NAKAMURA Takumi | 64779f4 | 2011-02-22 07:20:02 +0000 | [diff] [blame] | 1 | ; RUN: llc < %s -mtriple=x86_64-linux -O0 | FileCheck %s --check-prefix=X64 |
| 2 | ; RUN: llc < %s -mtriple=x86_64-win32 -O0 | FileCheck %s --check-prefix=X64 |
Chris Lattner | 25519dd | 2009-09-15 18:23:23 +0000 | [diff] [blame] | 3 | ; RUN: llc < %s -march=x86 -O0 | FileCheck %s --check-prefix=X32 |
Dan Gohman | c8a1a3c | 2008-12-08 07:57:47 +0000 | [diff] [blame] | 4 | |
| 5 | ; GEP indices are interpreted as signed integers, so they |
| 6 | ; should be sign-extended to 64 bits on 64-bit targets. |
Chris Lattner | dffb6e5 | 2009-09-15 18:27:02 +0000 | [diff] [blame] | 7 | ; PR3181 |
Chris Lattner | 25519dd | 2009-09-15 18:23:23 +0000 | [diff] [blame] | 8 | define i32 @test1(i32 %t3, i32* %t1) nounwind { |
Dan Gohman | c8a1a3c | 2008-12-08 07:57:47 +0000 | [diff] [blame] | 9 | %t9 = getelementptr i32* %t1, i32 %t3 ; <i32*> [#uses=1] |
| 10 | %t15 = load i32* %t9 ; <i32> [#uses=1] |
| 11 | ret i32 %t15 |
Chris Lattner | 25519dd | 2009-09-15 18:23:23 +0000 | [diff] [blame] | 12 | ; X32: test1: |
Dan Gohman | e667e01 | 2010-07-16 02:01:19 +0000 | [diff] [blame] | 13 | ; X32: movl (%eax,%ecx,4), %eax |
Chris Lattner | 25519dd | 2009-09-15 18:23:23 +0000 | [diff] [blame] | 14 | ; X32: ret |
| 15 | |
| 16 | ; X64: test1: |
Bill Wendling | d336de3 | 2011-04-14 01:46:37 +0000 | [diff] [blame] | 17 | ; X64: movslq %e[[A0:di|cx]], %rax |
NAKAMURA Takumi | 64779f4 | 2011-02-22 07:20:02 +0000 | [diff] [blame] | 18 | ; X64: movl (%r[[A1:si|dx]],%rax,4), %eax |
Dan Gohman | f595141 | 2010-07-08 01:00:56 +0000 | [diff] [blame] | 19 | ; X64: ret |
Chris Lattner | 25519dd | 2009-09-15 18:23:23 +0000 | [diff] [blame] | 20 | |
Dan Gohman | c8a1a3c | 2008-12-08 07:57:47 +0000 | [diff] [blame] | 21 | } |
Chris Lattner | 25519dd | 2009-09-15 18:23:23 +0000 | [diff] [blame] | 22 | define i32 @test2(i64 %t3, i32* %t1) nounwind { |
Dan Gohman | c8a1a3c | 2008-12-08 07:57:47 +0000 | [diff] [blame] | 23 | %t9 = getelementptr i32* %t1, i64 %t3 ; <i32*> [#uses=1] |
| 24 | %t15 = load i32* %t9 ; <i32> [#uses=1] |
| 25 | ret i32 %t15 |
Chris Lattner | 25519dd | 2009-09-15 18:23:23 +0000 | [diff] [blame] | 26 | ; X32: test2: |
Jakob Stoklund Olesen | 5e5ed44 | 2011-06-13 03:26:46 +0000 | [diff] [blame] | 27 | ; X32: movl (%edx,%ecx,4), %e |
Chris Lattner | 25519dd | 2009-09-15 18:23:23 +0000 | [diff] [blame] | 28 | ; X32: ret |
| 29 | |
| 30 | ; X64: test2: |
NAKAMURA Takumi | 64779f4 | 2011-02-22 07:20:02 +0000 | [diff] [blame] | 31 | ; X64: movl (%r[[A1]],%r[[A0]],4), %eax |
Chris Lattner | 25519dd | 2009-09-15 18:23:23 +0000 | [diff] [blame] | 32 | ; X64: ret |
Dan Gohman | c8a1a3c | 2008-12-08 07:57:47 +0000 | [diff] [blame] | 33 | } |
Chris Lattner | dffb6e5 | 2009-09-15 18:27:02 +0000 | [diff] [blame] | 34 | |
| 35 | |
| 36 | |
| 37 | ; PR4984 |
| 38 | define i8 @test3(i8* %start) nounwind { |
| 39 | entry: |
| 40 | %A = getelementptr i8* %start, i64 -2 ; <i8*> [#uses=1] |
| 41 | %B = load i8* %A, align 1 ; <i8> [#uses=1] |
| 42 | ret i8 %B |
| 43 | |
| 44 | |
| 45 | ; X32: test3: |
| 46 | ; X32: movl 4(%esp), %eax |
| 47 | ; X32: movb -2(%eax), %al |
| 48 | ; X32: ret |
| 49 | |
| 50 | ; X64: test3: |
NAKAMURA Takumi | 64779f4 | 2011-02-22 07:20:02 +0000 | [diff] [blame] | 51 | ; X64: movb -2(%r[[A0]]), %al |
Chris Lattner | dffb6e5 | 2009-09-15 18:27:02 +0000 | [diff] [blame] | 52 | ; X64: ret |
| 53 | |
| 54 | } |
Dan Gohman | 5c87bf6 | 2010-07-01 02:27:15 +0000 | [diff] [blame] | 55 | |
| 56 | define double @test4(i64 %x, double* %p) nounwind { |
| 57 | entry: |
| 58 | %x.addr = alloca i64, align 8 ; <i64*> [#uses=2] |
| 59 | %p.addr = alloca double*, align 8 ; <double**> [#uses=2] |
| 60 | store i64 %x, i64* %x.addr |
| 61 | store double* %p, double** %p.addr |
| 62 | %tmp = load i64* %x.addr ; <i64> [#uses=1] |
| 63 | %add = add nsw i64 %tmp, 16 ; <i64> [#uses=1] |
| 64 | %tmp1 = load double** %p.addr ; <double*> [#uses=1] |
| 65 | %arrayidx = getelementptr inbounds double* %tmp1, i64 %add ; <double*> [#uses=1] |
| 66 | %tmp2 = load double* %arrayidx ; <double> [#uses=1] |
| 67 | ret double %tmp2 |
| 68 | |
| 69 | ; X32: test4: |
| 70 | ; X32: 128(%e{{.*}},%e{{.*}},8) |
| 71 | ; X64: test4: |
| 72 | ; X64: 128(%r{{.*}},%r{{.*}},8) |
| 73 | } |
Chris Lattner | b99fdee | 2011-01-16 02:27:38 +0000 | [diff] [blame] | 74 | |
| 75 | ; PR8961 - Make sure the sext for the GEP addressing comes before the load that |
| 76 | ; is folded. |
| 77 | define i64 @test5(i8* %A, i32 %I, i64 %B) nounwind { |
| 78 | %v8 = getelementptr i8* %A, i32 %I |
| 79 | %v9 = bitcast i8* %v8 to i64* |
| 80 | %v10 = load i64* %v9 |
| 81 | %v11 = add i64 %B, %v10 |
| 82 | ret i64 %v11 |
| 83 | ; X64: test5: |
Bill Wendling | d336de3 | 2011-04-14 01:46:37 +0000 | [diff] [blame] | 84 | ; X64: movslq %e[[A1]], %rax |
Evan Cheng | c3aa7c5 | 2011-11-16 18:44:48 +0000 | [diff] [blame] | 85 | ; X64-NEXT: (%r[[A0]],%rax), |
| 86 | ; X64: ret |
Chris Lattner | b99fdee | 2011-01-16 02:27:38 +0000 | [diff] [blame] | 87 | } |
| 88 | |
Dan Gohman | b55d6b6 | 2011-03-22 00:04:35 +0000 | [diff] [blame] | 89 | ; PR9500, rdar://9156159 - Don't do non-local address mode folding, |
| 90 | ; because it may require values which wouldn't otherwise be live out |
| 91 | ; of their blocks. |
| 92 | define void @test6() { |
| 93 | if.end: ; preds = %if.then, %invoke.cont |
| 94 | %tmp15 = load i64* undef |
| 95 | %dec = add i64 %tmp15, 13 |
| 96 | store i64 %dec, i64* undef |
| 97 | %call17 = invoke i8* @_ZNK18G__FastAllocString4dataEv() |
| 98 | to label %invoke.cont16 unwind label %lpad |
Chris Lattner | b99fdee | 2011-01-16 02:27:38 +0000 | [diff] [blame] | 99 | |
Dan Gohman | b55d6b6 | 2011-03-22 00:04:35 +0000 | [diff] [blame] | 100 | invoke.cont16: ; preds = %if.then14 |
| 101 | %arrayidx18 = getelementptr inbounds i8* %call17, i64 %dec |
| 102 | store i8 0, i8* %arrayidx18 |
| 103 | unreachable |
| 104 | |
| 105 | lpad: ; preds = %if.end19, %if.then14, %if.end, %entry |
Bill Wendling | 9359031 | 2011-08-31 21:39:05 +0000 | [diff] [blame] | 106 | %exn = landingpad {i8*, i32} personality i32 (...)* @__gxx_personality_v0 |
| 107 | cleanup |
Dan Gohman | b55d6b6 | 2011-03-22 00:04:35 +0000 | [diff] [blame] | 108 | unreachable |
| 109 | } |
| 110 | declare i8* @_ZNK18G__FastAllocString4dataEv() nounwind |
Chris Lattner | f4ea68f | 2011-08-11 06:26:54 +0000 | [diff] [blame] | 111 | |
| 112 | |
| 113 | ; PR10605 / rdar://9930964 - Don't fold loads incorrectly. The load should |
| 114 | ; happen before the store. |
| 115 | define i32 @test7({i32,i32,i32}* %tmp1, i32 %tmp71, i32 %tmp63) nounwind { |
| 116 | ; X64: test7: |
| 117 | ; X64: movl 8({{%rdi|%rcx}}), %eax |
Chris Lattner | 7eba85e | 2011-08-11 16:15:10 +0000 | [diff] [blame] | 118 | ; X64: movl $4, 8({{%rdi|%rcx}}) |
Chris Lattner | f4ea68f | 2011-08-11 06:26:54 +0000 | [diff] [blame] | 119 | |
| 120 | |
| 121 | %tmp29 = getelementptr inbounds {i32,i32,i32}* %tmp1, i32 0, i32 2 |
| 122 | %tmp30 = load i32* %tmp29, align 4 |
| 123 | |
| 124 | %p2 = getelementptr inbounds {i32,i32,i32}* %tmp1, i32 0, i32 2 |
| 125 | store i32 4, i32* %p2 |
| 126 | |
| 127 | %tmp72 = or i32 %tmp71, %tmp30 |
| 128 | %tmp73 = icmp ne i32 %tmp63, 32 |
| 129 | br i1 %tmp73, label %T, label %F |
| 130 | |
| 131 | T: |
| 132 | ret i32 %tmp72 |
| 133 | |
| 134 | F: |
| 135 | ret i32 4 |
| 136 | } |
| 137 | |
Bill Wendling | 9359031 | 2011-08-31 21:39:05 +0000 | [diff] [blame] | 138 | declare i32 @__gxx_personality_v0(...) |