Zi Xuan Wu | a02a3fe | 2018-12-28 02:12:55 +0000 | [diff] [blame] | 1 | ; RUN: llc -verify-machineinstrs < %s -mtriple=powerpc-unknown-linux-gnu -verify-machineinstrs -ppc-asm-full-reg-names | FileCheck %s --check-prefix=CHECK --check-prefix=PPC32 |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 2 | ; This is already checked for in Atomics-64.ll |
Zi Xuan Wu | a02a3fe | 2018-12-28 02:12:55 +0000 | [diff] [blame] | 3 | ; RUN: llc -verify-machineinstrs < %s -mtriple=powerpc64-unknown-linux-gnu -ppc-asm-full-reg-names | FileCheck %s --check-prefix=CHECK --check-prefix=PPC64 |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 4 | |
| 5 | ; FIXME: we don't currently check for the operations themselves with CHECK-NEXT, |
| 6 | ; because they are implemented in a very messy way with lwarx/stwcx. |
| 7 | ; It should be fixed soon in another patch. |
| 8 | |
| 9 | ; We first check loads, for all sizes from i8 to i64. |
| 10 | ; We also vary orderings to check for barriers. |
| 11 | define i8 @load_i8_unordered(i8* %mem) { |
| 12 | ; CHECK-LABEL: load_i8_unordered |
Robin Morisset | e1ca44b | 2014-10-02 22:27:07 +0000 | [diff] [blame] | 13 | ; CHECK: lbz |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 14 | ; CHECK-NOT: sync |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 15 | %val = load atomic i8, i8* %mem unordered, align 1 |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 16 | ret i8 %val |
| 17 | } |
| 18 | define i16 @load_i16_monotonic(i16* %mem) { |
| 19 | ; CHECK-LABEL: load_i16_monotonic |
Robin Morisset | e1ca44b | 2014-10-02 22:27:07 +0000 | [diff] [blame] | 20 | ; CHECK: lhz |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 21 | ; CHECK-NOT: sync |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 22 | %val = load atomic i16, i16* %mem monotonic, align 2 |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 23 | ret i16 %val |
| 24 | } |
| 25 | define i32 @load_i32_acquire(i32* %mem) { |
| 26 | ; CHECK-LABEL: load_i32_acquire |
Tim Shen | 3bef27c | 2017-05-16 20:18:06 +0000 | [diff] [blame] | 27 | ; CHECK: lwz [[VAL:r[0-9]+]] |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 28 | %val = load atomic i32, i32* %mem acquire, align 4 |
Tim Shen | 3bef27c | 2017-05-16 20:18:06 +0000 | [diff] [blame] | 29 | ; CHECK-PPC32: lwsync |
| 30 | ; CHECK-PPC64: cmpw [[CR:cr[0-9]+]], [[VAL]], [[VAL]] |
| 31 | ; CHECK-PPC64: bne- [[CR]], .+4 |
| 32 | ; CHECK-PPC64: isync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 33 | ret i32 %val |
| 34 | } |
| 35 | define i64 @load_i64_seq_cst(i64* %mem) { |
| 36 | ; CHECK-LABEL: load_i64_seq_cst |
Hal Finkel | d86e90a | 2015-04-23 23:05:08 +0000 | [diff] [blame] | 37 | ; CHECK: sync |
Robin Morisset | e1ca44b | 2014-10-02 22:27:07 +0000 | [diff] [blame] | 38 | ; PPC32: __sync_ |
| 39 | ; PPC64-NOT: __sync_ |
Tim Shen | 3bef27c | 2017-05-16 20:18:06 +0000 | [diff] [blame] | 40 | ; PPC64: ld [[VAL:r[0-9]+]] |
David Blaikie | a79ac14 | 2015-02-27 21:17:42 +0000 | [diff] [blame] | 41 | %val = load atomic i64, i64* %mem seq_cst, align 8 |
Tim Shen | 3bef27c | 2017-05-16 20:18:06 +0000 | [diff] [blame] | 42 | ; CHECK-PPC32: lwsync |
| 43 | ; CHECK-PPC64: cmpw [[CR:cr[0-9]+]], [[VAL]], [[VAL]] |
| 44 | ; CHECK-PPC64: bne- [[CR]], .+4 |
| 45 | ; CHECK-PPC64: isync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 46 | ret i64 %val |
| 47 | } |
| 48 | |
| 49 | ; Stores |
| 50 | define void @store_i8_unordered(i8* %mem) { |
| 51 | ; CHECK-LABEL: store_i8_unordered |
| 52 | ; CHECK-NOT: sync |
Robin Morisset | e1ca44b | 2014-10-02 22:27:07 +0000 | [diff] [blame] | 53 | ; CHECK: stb |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 54 | store atomic i8 42, i8* %mem unordered, align 1 |
| 55 | ret void |
| 56 | } |
| 57 | define void @store_i16_monotonic(i16* %mem) { |
| 58 | ; CHECK-LABEL: store_i16_monotonic |
| 59 | ; CHECK-NOT: sync |
Robin Morisset | e1ca44b | 2014-10-02 22:27:07 +0000 | [diff] [blame] | 60 | ; CHECK: sth |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 61 | store atomic i16 42, i16* %mem monotonic, align 2 |
| 62 | ret void |
| 63 | } |
| 64 | define void @store_i32_release(i32* %mem) { |
| 65 | ; CHECK-LABEL: store_i32_release |
Hal Finkel | 7c5cb06 | 2015-04-23 18:30:38 +0000 | [diff] [blame] | 66 | ; CHECK: lwsync |
Robin Morisset | e1ca44b | 2014-10-02 22:27:07 +0000 | [diff] [blame] | 67 | ; CHECK: stw |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 68 | store atomic i32 42, i32* %mem release, align 4 |
| 69 | ret void |
| 70 | } |
| 71 | define void @store_i64_seq_cst(i64* %mem) { |
| 72 | ; CHECK-LABEL: store_i64_seq_cst |
Hal Finkel | d86e90a | 2015-04-23 23:05:08 +0000 | [diff] [blame] | 73 | ; CHECK: sync |
Robin Morisset | e1ca44b | 2014-10-02 22:27:07 +0000 | [diff] [blame] | 74 | ; PPC32: __sync_ |
| 75 | ; PPC64-NOT: __sync_ |
| 76 | ; PPC64: std |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 77 | store atomic i64 42, i64* %mem seq_cst, align 8 |
| 78 | ret void |
| 79 | } |
| 80 | |
| 81 | ; Atomic CmpXchg |
| 82 | define i8 @cas_strong_i8_sc_sc(i8* %mem) { |
| 83 | ; CHECK-LABEL: cas_strong_i8_sc_sc |
Hal Finkel | d86e90a | 2015-04-23 23:05:08 +0000 | [diff] [blame] | 84 | ; CHECK: sync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 85 | %val = cmpxchg i8* %mem, i8 0, i8 1 seq_cst seq_cst |
Hal Finkel | 7c5cb06 | 2015-04-23 18:30:38 +0000 | [diff] [blame] | 86 | ; CHECK: lwsync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 87 | %loaded = extractvalue { i8, i1} %val, 0 |
| 88 | ret i8 %loaded |
| 89 | } |
| 90 | define i16 @cas_weak_i16_acquire_acquire(i16* %mem) { |
| 91 | ; CHECK-LABEL: cas_weak_i16_acquire_acquire |
| 92 | ;CHECK-NOT: sync |
| 93 | %val = cmpxchg weak i16* %mem, i16 0, i16 1 acquire acquire |
Hal Finkel | 7c5cb06 | 2015-04-23 18:30:38 +0000 | [diff] [blame] | 94 | ; CHECK: lwsync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 95 | %loaded = extractvalue { i16, i1} %val, 0 |
| 96 | ret i16 %loaded |
| 97 | } |
| 98 | define i32 @cas_strong_i32_acqrel_acquire(i32* %mem) { |
| 99 | ; CHECK-LABEL: cas_strong_i32_acqrel_acquire |
Hal Finkel | 7c5cb06 | 2015-04-23 18:30:38 +0000 | [diff] [blame] | 100 | ; CHECK: lwsync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 101 | %val = cmpxchg i32* %mem, i32 0, i32 1 acq_rel acquire |
Hal Finkel | 7c5cb06 | 2015-04-23 18:30:38 +0000 | [diff] [blame] | 102 | ; CHECK: lwsync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 103 | %loaded = extractvalue { i32, i1} %val, 0 |
| 104 | ret i32 %loaded |
| 105 | } |
| 106 | define i64 @cas_weak_i64_release_monotonic(i64* %mem) { |
| 107 | ; CHECK-LABEL: cas_weak_i64_release_monotonic |
Hal Finkel | 7c5cb06 | 2015-04-23 18:30:38 +0000 | [diff] [blame] | 108 | ; CHECK: lwsync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 109 | %val = cmpxchg weak i64* %mem, i64 0, i64 1 release monotonic |
| 110 | ; CHECK-NOT: [sync ] |
| 111 | %loaded = extractvalue { i64, i1} %val, 0 |
| 112 | ret i64 %loaded |
| 113 | } |
| 114 | |
| 115 | ; AtomicRMW |
| 116 | define i8 @add_i8_monotonic(i8* %mem, i8 %operand) { |
| 117 | ; CHECK-LABEL: add_i8_monotonic |
| 118 | ; CHECK-NOT: sync |
| 119 | %val = atomicrmw add i8* %mem, i8 %operand monotonic |
| 120 | ret i8 %val |
| 121 | } |
| 122 | define i16 @xor_i16_seq_cst(i16* %mem, i16 %operand) { |
| 123 | ; CHECK-LABEL: xor_i16_seq_cst |
Hal Finkel | d86e90a | 2015-04-23 23:05:08 +0000 | [diff] [blame] | 124 | ; CHECK: sync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 125 | %val = atomicrmw xor i16* %mem, i16 %operand seq_cst |
Hal Finkel | 7c5cb06 | 2015-04-23 18:30:38 +0000 | [diff] [blame] | 126 | ; CHECK: lwsync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 127 | ret i16 %val |
| 128 | } |
| 129 | define i32 @xchg_i32_acq_rel(i32* %mem, i32 %operand) { |
| 130 | ; CHECK-LABEL: xchg_i32_acq_rel |
Hal Finkel | 7c5cb06 | 2015-04-23 18:30:38 +0000 | [diff] [blame] | 131 | ; CHECK: lwsync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 132 | %val = atomicrmw xchg i32* %mem, i32 %operand acq_rel |
Hal Finkel | 7c5cb06 | 2015-04-23 18:30:38 +0000 | [diff] [blame] | 133 | ; CHECK: lwsync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 134 | ret i32 %val |
| 135 | } |
| 136 | define i64 @and_i64_release(i64* %mem, i64 %operand) { |
| 137 | ; CHECK-LABEL: and_i64_release |
Hal Finkel | 7c5cb06 | 2015-04-23 18:30:38 +0000 | [diff] [blame] | 138 | ; CHECK: lwsync |
Robin Morisset | 2212996 | 2014-09-23 20:46:49 +0000 | [diff] [blame] | 139 | %val = atomicrmw and i64* %mem, i64 %operand release |
| 140 | ; CHECK-NOT: [sync ] |
| 141 | ret i64 %val |
| 142 | } |