Evan Cheng | 1e33e8b | 2011-12-21 03:04:10 +0000 | [diff] [blame] | 1 | ; RUN: llc < %s -mtriple=armv7-apple-ios -verify-machineinstrs | FileCheck %s |
| 2 | ; RUN: llc < %s -mtriple=thumbv7-apple-ios -verify-machineinstrs | FileCheck %s |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 3 | |
| 4 | define void @func(i32 %argc, i8** %argv) nounwind { |
| 5 | entry: |
| 6 | %argc.addr = alloca i32 ; <i32*> [#uses=1] |
| 7 | %argv.addr = alloca i8** ; <i8***> [#uses=1] |
| 8 | %val1 = alloca i32 ; <i32*> [#uses=2] |
| 9 | %val2 = alloca i32 ; <i32*> [#uses=15] |
| 10 | %andt = alloca i32 ; <i32*> [#uses=2] |
| 11 | %ort = alloca i32 ; <i32*> [#uses=2] |
| 12 | %xort = alloca i32 ; <i32*> [#uses=2] |
| 13 | %old = alloca i32 ; <i32*> [#uses=18] |
| 14 | %temp = alloca i32 ; <i32*> [#uses=2] |
| 15 | store i32 %argc, i32* %argc.addr |
| 16 | store i8** %argv, i8*** %argv.addr |
| 17 | store i32 0, i32* %val1 |
| 18 | store i32 31, i32* %val2 |
| 19 | store i32 3855, i32* %andt |
| 20 | store i32 3855, i32* %ort |
| 21 | store i32 3855, i32* %xort |
| 22 | store i32 4, i32* %temp |
| 23 | %tmp = load i32* %temp |
| 24 | ; CHECK: ldrex |
| 25 | ; CHECK: add |
| 26 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 27 | %0 = atomicrmw add i32* %val1, i32 %tmp monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 28 | store i32 %0, i32* %old |
| 29 | ; CHECK: ldrex |
| 30 | ; CHECK: sub |
| 31 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 32 | %1 = atomicrmw sub i32* %val2, i32 30 monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 33 | store i32 %1, i32* %old |
| 34 | ; CHECK: ldrex |
| 35 | ; CHECK: add |
| 36 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 37 | %2 = atomicrmw add i32* %val2, i32 1 monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 38 | store i32 %2, i32* %old |
| 39 | ; CHECK: ldrex |
| 40 | ; CHECK: sub |
| 41 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 42 | %3 = atomicrmw sub i32* %val2, i32 1 monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 43 | store i32 %3, i32* %old |
| 44 | ; CHECK: ldrex |
| 45 | ; CHECK: and |
| 46 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 47 | %4 = atomicrmw and i32* %andt, i32 4080 monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 48 | store i32 %4, i32* %old |
| 49 | ; CHECK: ldrex |
| 50 | ; CHECK: or |
| 51 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 52 | %5 = atomicrmw or i32* %ort, i32 4080 monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 53 | store i32 %5, i32* %old |
| 54 | ; CHECK: ldrex |
| 55 | ; CHECK: eor |
| 56 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 57 | %6 = atomicrmw xor i32* %xort, i32 4080 monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 58 | store i32 %6, i32* %old |
| 59 | ; CHECK: ldrex |
| 60 | ; CHECK: cmp |
| 61 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 62 | %7 = atomicrmw min i32* %val2, i32 16 monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 63 | store i32 %7, i32* %old |
Evan Cheng | 1e33e8b | 2011-12-21 03:04:10 +0000 | [diff] [blame] | 64 | %neg = sub i32 0, 1 |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 65 | ; CHECK: ldrex |
| 66 | ; CHECK: cmp |
| 67 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 68 | %8 = atomicrmw min i32* %val2, i32 %neg monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 69 | store i32 %8, i32* %old |
| 70 | ; CHECK: ldrex |
| 71 | ; CHECK: cmp |
| 72 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 73 | %9 = atomicrmw max i32* %val2, i32 1 monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 74 | store i32 %9, i32* %old |
| 75 | ; CHECK: ldrex |
| 76 | ; CHECK: cmp |
| 77 | ; CHECK: strex |
Eli Friedman | ad2d46d | 2011-09-26 20:27:49 +0000 | [diff] [blame] | 78 | %10 = atomicrmw max i32* %val2, i32 0 monotonic |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 79 | store i32 %10, i32* %old |
Evan Cheng | 1e33e8b | 2011-12-21 03:04:10 +0000 | [diff] [blame] | 80 | ; CHECK: ldrex |
| 81 | ; CHECK: cmp |
| 82 | ; CHECK: strex |
| 83 | %11 = atomicrmw umin i32* %val2, i32 16 monotonic |
| 84 | store i32 %11, i32* %old |
| 85 | %uneg = sub i32 0, 1 |
| 86 | ; CHECK: ldrex |
| 87 | ; CHECK: cmp |
| 88 | ; CHECK: strex |
| 89 | %12 = atomicrmw umin i32* %val2, i32 %uneg monotonic |
| 90 | store i32 %12, i32* %old |
| 91 | ; CHECK: ldrex |
| 92 | ; CHECK: cmp |
| 93 | ; CHECK: strex |
| 94 | %13 = atomicrmw umax i32* %val2, i32 1 monotonic |
| 95 | store i32 %13, i32* %old |
| 96 | ; CHECK: ldrex |
| 97 | ; CHECK: cmp |
| 98 | ; CHECK: strex |
| 99 | %14 = atomicrmw umax i32* %val2, i32 0 monotonic |
| 100 | store i32 %14, i32* %old |
| 101 | |
| 102 | ret void |
Cameron Zwarich | de64aaf | 2011-05-27 23:54:00 +0000 | [diff] [blame] | 103 | } |
Chad Rosier | 8d0447c | 2011-12-21 18:56:22 +0000 | [diff] [blame] | 104 | |
| 105 | define void @func2() nounwind { |
| 106 | entry: |
| 107 | %val = alloca i16 |
| 108 | %old = alloca i16 |
| 109 | store i16 31, i16* %val |
| 110 | ; CHECK: ldrex |
| 111 | ; CHECK: cmp |
| 112 | ; CHECK: strex |
| 113 | %0 = atomicrmw umin i16* %val, i16 16 monotonic |
| 114 | store i16 %0, i16* %old |
| 115 | %uneg = sub i16 0, 1 |
| 116 | ; CHECK: ldrex |
| 117 | ; CHECK: cmp |
| 118 | ; CHECK: strex |
| 119 | %1 = atomicrmw umin i16* %val, i16 %uneg monotonic |
| 120 | store i16 %1, i16* %old |
| 121 | ; CHECK: ldrex |
| 122 | ; CHECK: cmp |
| 123 | ; CHECK: strex |
| 124 | %2 = atomicrmw umax i16* %val, i16 1 monotonic |
| 125 | store i16 %2, i16* %old |
| 126 | ; CHECK: ldrex |
| 127 | ; CHECK: cmp |
| 128 | ; CHECK: strex |
| 129 | %3 = atomicrmw umax i16* %val, i16 0 monotonic |
| 130 | store i16 %3, i16* %old |
| 131 | ret void |
| 132 | } |
| 133 | |
| 134 | define void @func3() nounwind { |
| 135 | entry: |
| 136 | %val = alloca i8 |
| 137 | %old = alloca i8 |
| 138 | store i8 31, i8* %val |
| 139 | ; CHECK: ldrex |
| 140 | ; CHECK: cmp |
| 141 | ; CHECK: strex |
| 142 | %0 = atomicrmw umin i8* %val, i8 16 monotonic |
| 143 | store i8 %0, i8* %old |
| 144 | ; CHECK: ldrex |
| 145 | ; CHECK: cmp |
| 146 | ; CHECK: strex |
| 147 | %uneg = sub i8 0, 1 |
| 148 | %1 = atomicrmw umin i8* %val, i8 %uneg monotonic |
| 149 | store i8 %1, i8* %old |
| 150 | ; CHECK: ldrex |
| 151 | ; CHECK: cmp |
| 152 | ; CHECK: strex |
| 153 | %2 = atomicrmw umax i8* %val, i8 1 monotonic |
| 154 | store i8 %2, i8* %old |
| 155 | ; CHECK: ldrex |
| 156 | ; CHECK: cmp |
| 157 | ; CHECK: strex |
| 158 | %3 = atomicrmw umax i8* %val, i8 0 monotonic |
| 159 | store i8 %3, i8* %old |
| 160 | ret void |
| 161 | } |