blob: d4edf2503cf6f3771bcc33d01b09aac2d48aea8e [file] [log] [blame]
Chris Lattner6f3ddbd2010-12-20 07:38:24 +00001; RUN: opt < %s -inline -S | FileCheck %s
Chris Lattner908117b2008-01-11 06:09:30 +00002
3; Inlining a byval struct should cause an explicit copy into an alloca.
4
5 %struct.ss = type { i32, i64 }
6@.str = internal constant [10 x i8] c"%d, %lld\0A\00" ; <[10 x i8]*> [#uses=1]
7
8define internal void @f(%struct.ss* byval %b) nounwind {
9entry:
David Blaikie79e6c742015-02-27 19:29:02 +000010 %tmp = getelementptr %struct.ss, %struct.ss* %b, i32 0, i32 0 ; <i32*> [#uses=2]
David Blaikiea79ac142015-02-27 21:17:42 +000011 %tmp1 = load i32, i32* %tmp, align 4 ; <i32> [#uses=1]
Chris Lattner908117b2008-01-11 06:09:30 +000012 %tmp2 = add i32 %tmp1, 1 ; <i32> [#uses=1]
13 store i32 %tmp2, i32* %tmp, align 4
14 ret void
15}
16
17declare i32 @printf(i8*, ...) nounwind
18
Chris Lattnera9a5c592010-12-20 07:39:57 +000019define i32 @test1() nounwind {
Chris Lattner908117b2008-01-11 06:09:30 +000020entry:
21 %S = alloca %struct.ss ; <%struct.ss*> [#uses=4]
David Blaikie79e6c742015-02-27 19:29:02 +000022 %tmp1 = getelementptr %struct.ss, %struct.ss* %S, i32 0, i32 0 ; <i32*> [#uses=1]
Chris Lattner908117b2008-01-11 06:09:30 +000023 store i32 1, i32* %tmp1, align 8
David Blaikie79e6c742015-02-27 19:29:02 +000024 %tmp4 = getelementptr %struct.ss, %struct.ss* %S, i32 0, i32 1 ; <i64*> [#uses=1]
Chris Lattner908117b2008-01-11 06:09:30 +000025 store i64 2, i64* %tmp4, align 4
26 call void @f( %struct.ss* byval %S ) nounwind
27 ret i32 0
David Majnemer120f4a02013-11-03 12:22:13 +000028; CHECK: @test1()
Chris Lattner00997442010-12-20 07:57:41 +000029; CHECK: %S1 = alloca %struct.ss
30; CHECK: %S = alloca %struct.ss
Chris Lattner6f3ddbd2010-12-20 07:38:24 +000031; CHECK: call void @llvm.memcpy
Chris Lattnera9a5c592010-12-20 07:39:57 +000032; CHECK: ret i32 0
33}
34
35; Inlining a byval struct should NOT cause an explicit copy
36; into an alloca if the function is readonly
37
38define internal i32 @f2(%struct.ss* byval %b) nounwind readonly {
39entry:
David Blaikie79e6c742015-02-27 19:29:02 +000040 %tmp = getelementptr %struct.ss, %struct.ss* %b, i32 0, i32 0 ; <i32*> [#uses=2]
David Blaikiea79ac142015-02-27 21:17:42 +000041 %tmp1 = load i32, i32* %tmp, align 4 ; <i32> [#uses=1]
Chris Lattnera9a5c592010-12-20 07:39:57 +000042 %tmp2 = add i32 %tmp1, 1 ; <i32> [#uses=1]
43 ret i32 %tmp2
44}
45
46define i32 @test2() nounwind {
47entry:
48 %S = alloca %struct.ss ; <%struct.ss*> [#uses=4]
David Blaikie79e6c742015-02-27 19:29:02 +000049 %tmp1 = getelementptr %struct.ss, %struct.ss* %S, i32 0, i32 0 ; <i32*> [#uses=1]
Chris Lattnera9a5c592010-12-20 07:39:57 +000050 store i32 1, i32* %tmp1, align 8
David Blaikie79e6c742015-02-27 19:29:02 +000051 %tmp4 = getelementptr %struct.ss, %struct.ss* %S, i32 0, i32 1 ; <i64*> [#uses=1]
Chris Lattnera9a5c592010-12-20 07:39:57 +000052 store i64 2, i64* %tmp4, align 4
53 %X = call i32 @f2( %struct.ss* byval %S ) nounwind
54 ret i32 %X
David Majnemer120f4a02013-11-03 12:22:13 +000055; CHECK: @test2()
Chris Lattnera9a5c592010-12-20 07:39:57 +000056; CHECK: %S = alloca %struct.ss
57; CHECK-NOT: call void @llvm.memcpy
58; CHECK: ret i32
Chris Lattner908117b2008-01-11 06:09:30 +000059}
Chris Lattner73946802010-12-20 07:45:28 +000060
61
62; Inlining a byval with an explicit alignment needs to use *at least* that
63; alignment on the generated alloca.
64; PR8769
65declare void @g3(%struct.ss* %p)
66
67define internal void @f3(%struct.ss* byval align 64 %b) nounwind {
68 call void @g3(%struct.ss* %b) ;; Could make alignment assumptions!
69 ret void
70}
71
72define void @test3() nounwind {
73entry:
74 %S = alloca %struct.ss, align 1 ;; May not be aligned.
75 call void @f3( %struct.ss* byval align 64 %S) nounwind
76 ret void
David Majnemer120f4a02013-11-03 12:22:13 +000077; CHECK: @test3()
Chris Lattner00997442010-12-20 07:57:41 +000078; CHECK: %S1 = alloca %struct.ss, align 64
Chris Lattner73946802010-12-20 07:45:28 +000079; CHECK: %S = alloca %struct.ss
80; CHECK: call void @llvm.memcpy
Chris Lattner00997442010-12-20 07:57:41 +000081; CHECK: call void @g3(%struct.ss* %S1)
Chris Lattner73946802010-12-20 07:45:28 +000082; CHECK: ret void
83}
Chris Lattner0f114952010-12-20 08:10:40 +000084
85
86; Inlining a byval struct should NOT cause an explicit copy
87; into an alloca if the function is readonly, but should increase an alloca's
88; alignment to satisfy an explicit alignment request.
89
90define internal i32 @f4(%struct.ss* byval align 64 %b) nounwind readonly {
91 call void @g3(%struct.ss* %b)
92 ret i32 4
93}
94
95define i32 @test4() nounwind {
96entry:
97 %S = alloca %struct.ss, align 2 ; <%struct.ss*> [#uses=4]
98 %X = call i32 @f4( %struct.ss* byval align 64 %S ) nounwind
99 ret i32 %X
David Majnemer120f4a02013-11-03 12:22:13 +0000100; CHECK: @test4()
Chris Lattner0f114952010-12-20 08:10:40 +0000101; CHECK: %S = alloca %struct.ss, align 64
102; CHECK-NOT: call void @llvm.memcpy
103; CHECK: call void @g3
104; CHECK: ret i32 4
105}
106
David Majnemer120f4a02013-11-03 12:22:13 +0000107%struct.S0 = type { i32 }
Tom Stellardbc7d87f2013-10-24 16:38:33 +0000108
David Majnemer120f4a02013-11-03 12:22:13 +0000109@b = global %struct.S0 { i32 1 }, align 4
110@a = common global i32 0, align 4
111
112define internal void @f5(%struct.S0* byval nocapture readonly align 4 %p) {
Tom Stellardbc7d87f2013-10-24 16:38:33 +0000113entry:
David Blaikief72d05b2015-03-13 18:20:45 +0000114 store i32 0, i32* getelementptr inbounds (%struct.S0, %struct.S0* @b, i64 0, i32 0), align 4
David Blaikie79e6c742015-02-27 19:29:02 +0000115 %f2 = getelementptr inbounds %struct.S0, %struct.S0* %p, i64 0, i32 0
David Blaikiea79ac142015-02-27 21:17:42 +0000116 %0 = load i32, i32* %f2, align 4
David Majnemer120f4a02013-11-03 12:22:13 +0000117 store i32 %0, i32* @a, align 4
118 ret void
Tom Stellardbc7d87f2013-10-24 16:38:33 +0000119}
120
David Majnemer120f4a02013-11-03 12:22:13 +0000121define i32 @test5() {
Tom Stellardbc7d87f2013-10-24 16:38:33 +0000122entry:
David Majnemer120f4a02013-11-03 12:22:13 +0000123 tail call void @f5(%struct.S0* byval align 4 @b)
David Blaikiea79ac142015-02-27 21:17:42 +0000124 %0 = load i32, i32* @a, align 4
David Majnemer120f4a02013-11-03 12:22:13 +0000125 ret i32 %0
126; CHECK: @test5()
David Blaikief72d05b2015-03-13 18:20:45 +0000127; CHECK: store i32 0, i32* getelementptr inbounds (%struct.S0, %struct.S0* @b, i64 0, i32 0), align 4
128; CHECK-NOT: load i32, i32* getelementptr inbounds (%struct.S0, %struct.S0* @b, i64 0, i32 0), align 4
Tom Stellardbc7d87f2013-10-24 16:38:33 +0000129}