blob: e601faf2bb3733f1356aca33cf9f4c5c14989dc3 [file] [log] [blame]
Chris Lattnerb0af8ce2010-12-20 07:38:24 +00001; RUN: opt < %s -inline -S | FileCheck %s
Chris Lattnerc93adca2008-01-11 06:09:30 +00002
3; Inlining a byval struct should cause an explicit copy into an alloca.
4
5 %struct.ss = type { i32, i64 }
6@.str = internal constant [10 x i8] c"%d, %lld\0A\00" ; <[10 x i8]*> [#uses=1]
7
8define internal void @f(%struct.ss* byval %b) nounwind {
9entry:
10 %tmp = getelementptr %struct.ss* %b, i32 0, i32 0 ; <i32*> [#uses=2]
11 %tmp1 = load i32* %tmp, align 4 ; <i32> [#uses=1]
12 %tmp2 = add i32 %tmp1, 1 ; <i32> [#uses=1]
13 store i32 %tmp2, i32* %tmp, align 4
14 ret void
15}
16
17declare i32 @printf(i8*, ...) nounwind
18
Chris Lattner57233592010-12-20 07:39:57 +000019define i32 @test1() nounwind {
Chris Lattnerc93adca2008-01-11 06:09:30 +000020entry:
21 %S = alloca %struct.ss ; <%struct.ss*> [#uses=4]
22 %tmp1 = getelementptr %struct.ss* %S, i32 0, i32 0 ; <i32*> [#uses=1]
23 store i32 1, i32* %tmp1, align 8
24 %tmp4 = getelementptr %struct.ss* %S, i32 0, i32 1 ; <i64*> [#uses=1]
25 store i64 2, i64* %tmp4, align 4
26 call void @f( %struct.ss* byval %S ) nounwind
27 ret i32 0
Chris Lattner57233592010-12-20 07:39:57 +000028; CHECK: @test1()
Chris Lattnere7ae7052010-12-20 07:57:41 +000029; CHECK: %S1 = alloca %struct.ss
30; CHECK: %S = alloca %struct.ss
Chris Lattnerb0af8ce2010-12-20 07:38:24 +000031; CHECK: call void @llvm.memcpy
Chris Lattner57233592010-12-20 07:39:57 +000032; CHECK: ret i32 0
33}
34
35; Inlining a byval struct should NOT cause an explicit copy
36; into an alloca if the function is readonly
37
38define internal i32 @f2(%struct.ss* byval %b) nounwind readonly {
39entry:
40 %tmp = getelementptr %struct.ss* %b, i32 0, i32 0 ; <i32*> [#uses=2]
41 %tmp1 = load i32* %tmp, align 4 ; <i32> [#uses=1]
42 %tmp2 = add i32 %tmp1, 1 ; <i32> [#uses=1]
43 ret i32 %tmp2
44}
45
46define i32 @test2() nounwind {
47entry:
48 %S = alloca %struct.ss ; <%struct.ss*> [#uses=4]
49 %tmp1 = getelementptr %struct.ss* %S, i32 0, i32 0 ; <i32*> [#uses=1]
50 store i32 1, i32* %tmp1, align 8
51 %tmp4 = getelementptr %struct.ss* %S, i32 0, i32 1 ; <i64*> [#uses=1]
52 store i64 2, i64* %tmp4, align 4
53 %X = call i32 @f2( %struct.ss* byval %S ) nounwind
54 ret i32 %X
55; CHECK: @test2()
56; CHECK: %S = alloca %struct.ss
57; CHECK-NOT: call void @llvm.memcpy
58; CHECK: ret i32
Chris Lattnerc93adca2008-01-11 06:09:30 +000059}
Chris Lattner018fb762010-12-20 07:45:28 +000060
61
62; Inlining a byval with an explicit alignment needs to use *at least* that
63; alignment on the generated alloca.
64; PR8769
65declare void @g3(%struct.ss* %p)
66
67define internal void @f3(%struct.ss* byval align 64 %b) nounwind {
68 call void @g3(%struct.ss* %b) ;; Could make alignment assumptions!
69 ret void
70}
71
72define void @test3() nounwind {
73entry:
74 %S = alloca %struct.ss, align 1 ;; May not be aligned.
75 call void @f3( %struct.ss* byval align 64 %S) nounwind
76 ret void
77; CHECK: @test3()
Chris Lattnere7ae7052010-12-20 07:57:41 +000078; CHECK: %S1 = alloca %struct.ss, align 64
Chris Lattner018fb762010-12-20 07:45:28 +000079; CHECK: %S = alloca %struct.ss
80; CHECK: call void @llvm.memcpy
Chris Lattnere7ae7052010-12-20 07:57:41 +000081; CHECK: call void @g3(%struct.ss* %S1)
Chris Lattner018fb762010-12-20 07:45:28 +000082; CHECK: ret void
83}
Chris Lattner0b66f632010-12-20 08:10:40 +000084
85
86; Inlining a byval struct should NOT cause an explicit copy
87; into an alloca if the function is readonly, but should increase an alloca's
88; alignment to satisfy an explicit alignment request.
89
90define internal i32 @f4(%struct.ss* byval align 64 %b) nounwind readonly {
91 call void @g3(%struct.ss* %b)
92 ret i32 4
93}
94
95define i32 @test4() nounwind {
96entry:
97 %S = alloca %struct.ss, align 2 ; <%struct.ss*> [#uses=4]
98 %X = call i32 @f4( %struct.ss* byval align 64 %S ) nounwind
99 ret i32 %X
100; CHECK: @test4()
101; CHECK: %S = alloca %struct.ss, align 64
102; CHECK-NOT: call void @llvm.memcpy
103; CHECK: call void @g3
104; CHECK: ret i32 4
105}
106