Dan Gohman | fce288f | 2009-09-09 00:09:15 +0000 | [diff] [blame] | 1 | ; RUN: llc < %s -march=systemz | FileCheck %s |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 2 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 3 | define signext i32 @foo1(i32 %a, i32 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 4 | ; CHECK: foo1: |
| 5 | ; CHECK: a %r2, 4(%r1,%r3) |
| 6 | entry: |
| 7 | %idx2 = add i64 %idx, 1 ; <i64> [#uses=1] |
| 8 | %ptr = getelementptr i32* %b, i64 %idx2 ; <i32*> [#uses=1] |
| 9 | %c = load i32* %ptr |
| 10 | %d = add i32 %a, %c |
| 11 | ret i32 %d |
| 12 | } |
| 13 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 14 | define signext i32 @foo2(i32 %a, i32 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 15 | ; CHECK: foo2: |
| 16 | ; CHECK: ay %r2, -4(%r1,%r3) |
| 17 | entry: |
| 18 | %idx2 = add i64 %idx, -1 ; <i64> [#uses=1] |
| 19 | %ptr = getelementptr i32* %b, i64 %idx2 ; <i32*> [#uses=1] |
| 20 | %c = load i32* %ptr |
| 21 | %d = add i32 %a, %c |
| 22 | ret i32 %d |
| 23 | } |
| 24 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 25 | define signext i64 @foo3(i64 %a, i64 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 26 | ; CHECK: foo3: |
| 27 | ; CHECK: ag %r2, 8(%r1,%r3) |
| 28 | entry: |
| 29 | %idx2 = add i64 %idx, 1 ; <i64> [#uses=1] |
| 30 | %ptr = getelementptr i64* %b, i64 %idx2 ; <i64*> [#uses=1] |
| 31 | %c = load i64* %ptr |
| 32 | %d = add i64 %a, %c |
| 33 | ret i64 %d |
| 34 | } |
| 35 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 36 | define signext i32 @foo4(i32 %a, i32 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 37 | ; CHECK: foo4: |
| 38 | ; CHECK: n %r2, 4(%r1,%r3) |
| 39 | entry: |
| 40 | %idx2 = add i64 %idx, 1 ; <i64> [#uses=1] |
| 41 | %ptr = getelementptr i32* %b, i64 %idx2 ; <i32*> [#uses=1] |
| 42 | %c = load i32* %ptr |
| 43 | %d = and i32 %a, %c |
| 44 | ret i32 %d |
| 45 | } |
| 46 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 47 | define signext i32 @foo5(i32 %a, i32 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 48 | ; CHECK: foo5: |
| 49 | ; CHECK: ny %r2, -4(%r1,%r3) |
| 50 | entry: |
| 51 | %idx2 = add i64 %idx, -1 ; <i64> [#uses=1] |
| 52 | %ptr = getelementptr i32* %b, i64 %idx2 ; <i32*> [#uses=1] |
| 53 | %c = load i32* %ptr |
| 54 | %d = and i32 %a, %c |
| 55 | ret i32 %d |
| 56 | } |
| 57 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 58 | define signext i64 @foo6(i64 %a, i64 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 59 | ; CHECK: foo6: |
| 60 | ; CHECK: ng %r2, 8(%r1,%r3) |
| 61 | entry: |
| 62 | %idx2 = add i64 %idx, 1 ; <i64> [#uses=1] |
| 63 | %ptr = getelementptr i64* %b, i64 %idx2 ; <i64*> [#uses=1] |
| 64 | %c = load i64* %ptr |
| 65 | %d = and i64 %a, %c |
| 66 | ret i64 %d |
| 67 | } |
| 68 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 69 | define signext i32 @foo7(i32 %a, i32 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 70 | ; CHECK: foo7: |
| 71 | ; CHECK: o %r2, 4(%r1,%r3) |
| 72 | entry: |
| 73 | %idx2 = add i64 %idx, 1 ; <i64> [#uses=1] |
| 74 | %ptr = getelementptr i32* %b, i64 %idx2 ; <i32*> [#uses=1] |
| 75 | %c = load i32* %ptr |
| 76 | %d = or i32 %a, %c |
| 77 | ret i32 %d |
| 78 | } |
| 79 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 80 | define signext i32 @foo8(i32 %a, i32 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 81 | ; CHECK: foo8: |
| 82 | ; CHECK: oy %r2, -4(%r1,%r3) |
| 83 | entry: |
| 84 | %idx2 = add i64 %idx, -1 ; <i64> [#uses=1] |
| 85 | %ptr = getelementptr i32* %b, i64 %idx2 ; <i32*> [#uses=1] |
| 86 | %c = load i32* %ptr |
| 87 | %d = or i32 %a, %c |
| 88 | ret i32 %d |
| 89 | } |
| 90 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 91 | define signext i64 @foo9(i64 %a, i64 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 92 | ; CHECK: foo9: |
| 93 | ; CHECK: og %r2, 8(%r1,%r3) |
| 94 | entry: |
| 95 | %idx2 = add i64 %idx, 1 ; <i64> [#uses=1] |
| 96 | %ptr = getelementptr i64* %b, i64 %idx2 ; <i64*> [#uses=1] |
| 97 | %c = load i64* %ptr |
| 98 | %d = or i64 %a, %c |
| 99 | ret i64 %d |
| 100 | } |
| 101 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 102 | define signext i32 @foo10(i32 %a, i32 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 103 | ; CHECK: foo10: |
| 104 | ; CHECK: x %r2, 4(%r1,%r3) |
| 105 | entry: |
| 106 | %idx2 = add i64 %idx, 1 ; <i64> [#uses=1] |
| 107 | %ptr = getelementptr i32* %b, i64 %idx2 ; <i32*> [#uses=1] |
| 108 | %c = load i32* %ptr |
| 109 | %d = xor i32 %a, %c |
| 110 | ret i32 %d |
| 111 | } |
| 112 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 113 | define signext i32 @foo11(i32 %a, i32 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 114 | ; CHECK: foo11: |
| 115 | ; CHECK: xy %r2, -4(%r1,%r3) |
| 116 | entry: |
| 117 | %idx2 = add i64 %idx, -1 ; <i64> [#uses=1] |
| 118 | %ptr = getelementptr i32* %b, i64 %idx2 ; <i32*> [#uses=1] |
| 119 | %c = load i32* %ptr |
| 120 | %d = xor i32 %a, %c |
| 121 | ret i32 %d |
| 122 | } |
| 123 | |
Chris Lattner | 26b0000 | 2011-06-17 03:14:27 +0000 | [diff] [blame] | 124 | define signext i64 @foo12(i64 %a, i64 *%b, i64 %idx) { |
Anton Korobeynikov | f4ef8c9 | 2009-08-05 17:04:32 +0000 | [diff] [blame] | 125 | ; CHECK: foo12: |
| 126 | ; CHECK: xg %r2, 8(%r1,%r3) |
| 127 | entry: |
| 128 | %idx2 = add i64 %idx, 1 ; <i64> [#uses=1] |
| 129 | %ptr = getelementptr i64* %b, i64 %idx2 ; <i64*> [#uses=1] |
| 130 | %c = load i64* %ptr |
| 131 | %d = xor i64 %a, %c |
| 132 | ret i64 %d |
| 133 | } |