Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1 | ; RUN: opt < %s -tsan -S | FileCheck %s |
| 2 | ; Check that atomic memory operations are converted to calls into ThreadSanitizer runtime. |
| 3 | target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128" |
| 4 | |
| 5 | define i8 @atomic8_load_unordered(i8* %a) nounwind uwtable { |
| 6 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 7 | %0 = load atomic i8* %a unordered, align 1 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 8 | ret i8 %0 |
| 9 | } |
| 10 | ; CHECK: atomic8_load_unordered |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 11 | ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 12 | |
| 13 | define i8 @atomic8_load_monotonic(i8* %a) nounwind uwtable { |
| 14 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 15 | %0 = load atomic i8* %a monotonic, align 1 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 16 | ret i8 %0 |
| 17 | } |
| 18 | ; CHECK: atomic8_load_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 19 | ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 20 | |
| 21 | define i8 @atomic8_load_acquire(i8* %a) nounwind uwtable { |
| 22 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 23 | %0 = load atomic i8* %a acquire, align 1 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 24 | ret i8 %0 |
| 25 | } |
| 26 | ; CHECK: atomic8_load_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 27 | ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 2) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 28 | |
| 29 | define i8 @atomic8_load_seq_cst(i8* %a) nounwind uwtable { |
| 30 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 31 | %0 = load atomic i8* %a seq_cst, align 1 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 32 | ret i8 %0 |
| 33 | } |
| 34 | ; CHECK: atomic8_load_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 35 | ; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 5) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 36 | |
| 37 | define void @atomic8_store_unordered(i8* %a) nounwind uwtable { |
| 38 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 39 | store atomic i8 0, i8* %a unordered, align 1 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 40 | ret void |
| 41 | } |
| 42 | ; CHECK: atomic8_store_unordered |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 43 | ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 44 | |
| 45 | define void @atomic8_store_monotonic(i8* %a) nounwind uwtable { |
| 46 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 47 | store atomic i8 0, i8* %a monotonic, align 1 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 48 | ret void |
| 49 | } |
| 50 | ; CHECK: atomic8_store_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 51 | ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 52 | |
| 53 | define void @atomic8_store_release(i8* %a) nounwind uwtable { |
| 54 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 55 | store atomic i8 0, i8* %a release, align 1 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 56 | ret void |
| 57 | } |
| 58 | ; CHECK: atomic8_store_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 59 | ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 3) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 60 | |
| 61 | define void @atomic8_store_seq_cst(i8* %a) nounwind uwtable { |
| 62 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 63 | store atomic i8 0, i8* %a seq_cst, align 1 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 64 | ret void |
| 65 | } |
| 66 | ; CHECK: atomic8_store_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 67 | ; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 5) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 68 | |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 69 | define void @atomic8_xchg_monotonic(i8* %a) nounwind uwtable { |
| 70 | entry: |
| 71 | atomicrmw xchg i8* %a, i8 0 monotonic |
| 72 | ret void |
| 73 | } |
| 74 | ; CHECK: atomic8_xchg_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 75 | ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 76 | |
| 77 | define void @atomic8_add_monotonic(i8* %a) nounwind uwtable { |
| 78 | entry: |
| 79 | atomicrmw add i8* %a, i8 0 monotonic |
| 80 | ret void |
| 81 | } |
| 82 | ; CHECK: atomic8_add_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 83 | ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 84 | |
| 85 | define void @atomic8_sub_monotonic(i8* %a) nounwind uwtable { |
| 86 | entry: |
| 87 | atomicrmw sub i8* %a, i8 0 monotonic |
| 88 | ret void |
| 89 | } |
| 90 | ; CHECK: atomic8_sub_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 91 | ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 92 | |
| 93 | define void @atomic8_and_monotonic(i8* %a) nounwind uwtable { |
| 94 | entry: |
| 95 | atomicrmw and i8* %a, i8 0 monotonic |
| 96 | ret void |
| 97 | } |
| 98 | ; CHECK: atomic8_and_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 99 | ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 100 | |
| 101 | define void @atomic8_or_monotonic(i8* %a) nounwind uwtable { |
| 102 | entry: |
| 103 | atomicrmw or i8* %a, i8 0 monotonic |
| 104 | ret void |
| 105 | } |
| 106 | ; CHECK: atomic8_or_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 107 | ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 108 | |
| 109 | define void @atomic8_xor_monotonic(i8* %a) nounwind uwtable { |
| 110 | entry: |
| 111 | atomicrmw xor i8* %a, i8 0 monotonic |
| 112 | ret void |
| 113 | } |
| 114 | ; CHECK: atomic8_xor_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 115 | ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 116 | |
| 117 | define void @atomic8_xchg_acquire(i8* %a) nounwind uwtable { |
| 118 | entry: |
| 119 | atomicrmw xchg i8* %a, i8 0 acquire |
| 120 | ret void |
| 121 | } |
| 122 | ; CHECK: atomic8_xchg_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 123 | ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 124 | |
| 125 | define void @atomic8_add_acquire(i8* %a) nounwind uwtable { |
| 126 | entry: |
| 127 | atomicrmw add i8* %a, i8 0 acquire |
| 128 | ret void |
| 129 | } |
| 130 | ; CHECK: atomic8_add_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 131 | ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 132 | |
| 133 | define void @atomic8_sub_acquire(i8* %a) nounwind uwtable { |
| 134 | entry: |
| 135 | atomicrmw sub i8* %a, i8 0 acquire |
| 136 | ret void |
| 137 | } |
| 138 | ; CHECK: atomic8_sub_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 139 | ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 140 | |
| 141 | define void @atomic8_and_acquire(i8* %a) nounwind uwtable { |
| 142 | entry: |
| 143 | atomicrmw and i8* %a, i8 0 acquire |
| 144 | ret void |
| 145 | } |
| 146 | ; CHECK: atomic8_and_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 147 | ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 148 | |
| 149 | define void @atomic8_or_acquire(i8* %a) nounwind uwtable { |
| 150 | entry: |
| 151 | atomicrmw or i8* %a, i8 0 acquire |
| 152 | ret void |
| 153 | } |
| 154 | ; CHECK: atomic8_or_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 155 | ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 156 | |
| 157 | define void @atomic8_xor_acquire(i8* %a) nounwind uwtable { |
| 158 | entry: |
| 159 | atomicrmw xor i8* %a, i8 0 acquire |
| 160 | ret void |
| 161 | } |
| 162 | ; CHECK: atomic8_xor_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 163 | ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 164 | |
| 165 | define void @atomic8_xchg_release(i8* %a) nounwind uwtable { |
| 166 | entry: |
| 167 | atomicrmw xchg i8* %a, i8 0 release |
| 168 | ret void |
| 169 | } |
| 170 | ; CHECK: atomic8_xchg_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 171 | ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 172 | |
| 173 | define void @atomic8_add_release(i8* %a) nounwind uwtable { |
| 174 | entry: |
| 175 | atomicrmw add i8* %a, i8 0 release |
| 176 | ret void |
| 177 | } |
| 178 | ; CHECK: atomic8_add_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 179 | ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 180 | |
| 181 | define void @atomic8_sub_release(i8* %a) nounwind uwtable { |
| 182 | entry: |
| 183 | atomicrmw sub i8* %a, i8 0 release |
| 184 | ret void |
| 185 | } |
| 186 | ; CHECK: atomic8_sub_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 187 | ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 188 | |
| 189 | define void @atomic8_and_release(i8* %a) nounwind uwtable { |
| 190 | entry: |
| 191 | atomicrmw and i8* %a, i8 0 release |
| 192 | ret void |
| 193 | } |
| 194 | ; CHECK: atomic8_and_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 195 | ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 196 | |
| 197 | define void @atomic8_or_release(i8* %a) nounwind uwtable { |
| 198 | entry: |
| 199 | atomicrmw or i8* %a, i8 0 release |
| 200 | ret void |
| 201 | } |
| 202 | ; CHECK: atomic8_or_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 203 | ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 204 | |
| 205 | define void @atomic8_xor_release(i8* %a) nounwind uwtable { |
| 206 | entry: |
| 207 | atomicrmw xor i8* %a, i8 0 release |
| 208 | ret void |
| 209 | } |
| 210 | ; CHECK: atomic8_xor_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 211 | ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 212 | |
| 213 | define void @atomic8_xchg_acq_rel(i8* %a) nounwind uwtable { |
| 214 | entry: |
| 215 | atomicrmw xchg i8* %a, i8 0 acq_rel |
| 216 | ret void |
| 217 | } |
| 218 | ; CHECK: atomic8_xchg_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 219 | ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 220 | |
| 221 | define void @atomic8_add_acq_rel(i8* %a) nounwind uwtable { |
| 222 | entry: |
| 223 | atomicrmw add i8* %a, i8 0 acq_rel |
| 224 | ret void |
| 225 | } |
| 226 | ; CHECK: atomic8_add_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 227 | ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 228 | |
| 229 | define void @atomic8_sub_acq_rel(i8* %a) nounwind uwtable { |
| 230 | entry: |
| 231 | atomicrmw sub i8* %a, i8 0 acq_rel |
| 232 | ret void |
| 233 | } |
| 234 | ; CHECK: atomic8_sub_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 235 | ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 236 | |
| 237 | define void @atomic8_and_acq_rel(i8* %a) nounwind uwtable { |
| 238 | entry: |
| 239 | atomicrmw and i8* %a, i8 0 acq_rel |
| 240 | ret void |
| 241 | } |
| 242 | ; CHECK: atomic8_and_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 243 | ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 244 | |
| 245 | define void @atomic8_or_acq_rel(i8* %a) nounwind uwtable { |
| 246 | entry: |
| 247 | atomicrmw or i8* %a, i8 0 acq_rel |
| 248 | ret void |
| 249 | } |
| 250 | ; CHECK: atomic8_or_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 251 | ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 252 | |
| 253 | define void @atomic8_xor_acq_rel(i8* %a) nounwind uwtable { |
| 254 | entry: |
| 255 | atomicrmw xor i8* %a, i8 0 acq_rel |
| 256 | ret void |
| 257 | } |
| 258 | ; CHECK: atomic8_xor_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 259 | ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 260 | |
| 261 | define void @atomic8_xchg_seq_cst(i8* %a) nounwind uwtable { |
| 262 | entry: |
| 263 | atomicrmw xchg i8* %a, i8 0 seq_cst |
| 264 | ret void |
| 265 | } |
| 266 | ; CHECK: atomic8_xchg_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 267 | ; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 268 | |
| 269 | define void @atomic8_add_seq_cst(i8* %a) nounwind uwtable { |
| 270 | entry: |
| 271 | atomicrmw add i8* %a, i8 0 seq_cst |
| 272 | ret void |
| 273 | } |
| 274 | ; CHECK: atomic8_add_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 275 | ; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 276 | |
| 277 | define void @atomic8_sub_seq_cst(i8* %a) nounwind uwtable { |
| 278 | entry: |
| 279 | atomicrmw sub i8* %a, i8 0 seq_cst |
| 280 | ret void |
| 281 | } |
| 282 | ; CHECK: atomic8_sub_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 283 | ; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 284 | |
| 285 | define void @atomic8_and_seq_cst(i8* %a) nounwind uwtable { |
| 286 | entry: |
| 287 | atomicrmw and i8* %a, i8 0 seq_cst |
| 288 | ret void |
| 289 | } |
| 290 | ; CHECK: atomic8_and_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 291 | ; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 292 | |
| 293 | define void @atomic8_or_seq_cst(i8* %a) nounwind uwtable { |
| 294 | entry: |
| 295 | atomicrmw or i8* %a, i8 0 seq_cst |
| 296 | ret void |
| 297 | } |
| 298 | ; CHECK: atomic8_or_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 299 | ; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 300 | |
| 301 | define void @atomic8_xor_seq_cst(i8* %a) nounwind uwtable { |
| 302 | entry: |
| 303 | atomicrmw xor i8* %a, i8 0 seq_cst |
| 304 | ret void |
| 305 | } |
| 306 | ; CHECK: atomic8_xor_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 307 | ; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 308 | |
| 309 | define void @atomic8_cas_monotonic(i8* %a) nounwind uwtable { |
| 310 | entry: |
| 311 | cmpxchg i8* %a, i8 0, i8 1 monotonic |
| 312 | ret void |
| 313 | } |
| 314 | ; CHECK: atomic8_cas_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 315 | ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 316 | |
| 317 | define void @atomic8_cas_acquire(i8* %a) nounwind uwtable { |
| 318 | entry: |
| 319 | cmpxchg i8* %a, i8 0, i8 1 acquire |
| 320 | ret void |
| 321 | } |
| 322 | ; CHECK: atomic8_cas_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 323 | ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 324 | |
| 325 | define void @atomic8_cas_release(i8* %a) nounwind uwtable { |
| 326 | entry: |
| 327 | cmpxchg i8* %a, i8 0, i8 1 release |
| 328 | ret void |
| 329 | } |
| 330 | ; CHECK: atomic8_cas_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 331 | ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 332 | |
| 333 | define void @atomic8_cas_acq_rel(i8* %a) nounwind uwtable { |
| 334 | entry: |
| 335 | cmpxchg i8* %a, i8 0, i8 1 acq_rel |
| 336 | ret void |
| 337 | } |
| 338 | ; CHECK: atomic8_cas_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 339 | ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 340 | |
| 341 | define void @atomic8_cas_seq_cst(i8* %a) nounwind uwtable { |
| 342 | entry: |
| 343 | cmpxchg i8* %a, i8 0, i8 1 seq_cst |
| 344 | ret void |
| 345 | } |
| 346 | ; CHECK: atomic8_cas_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 347 | ; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 348 | |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 349 | define i16 @atomic16_load_unordered(i16* %a) nounwind uwtable { |
| 350 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 351 | %0 = load atomic i16* %a unordered, align 2 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 352 | ret i16 %0 |
| 353 | } |
| 354 | ; CHECK: atomic16_load_unordered |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 355 | ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 356 | |
| 357 | define i16 @atomic16_load_monotonic(i16* %a) nounwind uwtable { |
| 358 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 359 | %0 = load atomic i16* %a monotonic, align 2 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 360 | ret i16 %0 |
| 361 | } |
| 362 | ; CHECK: atomic16_load_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 363 | ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 364 | |
| 365 | define i16 @atomic16_load_acquire(i16* %a) nounwind uwtable { |
| 366 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 367 | %0 = load atomic i16* %a acquire, align 2 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 368 | ret i16 %0 |
| 369 | } |
| 370 | ; CHECK: atomic16_load_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 371 | ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 2) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 372 | |
| 373 | define i16 @atomic16_load_seq_cst(i16* %a) nounwind uwtable { |
| 374 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 375 | %0 = load atomic i16* %a seq_cst, align 2 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 376 | ret i16 %0 |
| 377 | } |
| 378 | ; CHECK: atomic16_load_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 379 | ; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 5) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 380 | |
| 381 | define void @atomic16_store_unordered(i16* %a) nounwind uwtable { |
| 382 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 383 | store atomic i16 0, i16* %a unordered, align 2 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 384 | ret void |
| 385 | } |
| 386 | ; CHECK: atomic16_store_unordered |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 387 | ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 388 | |
| 389 | define void @atomic16_store_monotonic(i16* %a) nounwind uwtable { |
| 390 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 391 | store atomic i16 0, i16* %a monotonic, align 2 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 392 | ret void |
| 393 | } |
| 394 | ; CHECK: atomic16_store_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 395 | ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 396 | |
| 397 | define void @atomic16_store_release(i16* %a) nounwind uwtable { |
| 398 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 399 | store atomic i16 0, i16* %a release, align 2 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 400 | ret void |
| 401 | } |
| 402 | ; CHECK: atomic16_store_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 403 | ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 3) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 404 | |
| 405 | define void @atomic16_store_seq_cst(i16* %a) nounwind uwtable { |
| 406 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 407 | store atomic i16 0, i16* %a seq_cst, align 2 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 408 | ret void |
| 409 | } |
| 410 | ; CHECK: atomic16_store_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 411 | ; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 5) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 412 | |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 413 | define void @atomic16_xchg_monotonic(i16* %a) nounwind uwtable { |
| 414 | entry: |
| 415 | atomicrmw xchg i16* %a, i16 0 monotonic |
| 416 | ret void |
| 417 | } |
| 418 | ; CHECK: atomic16_xchg_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 419 | ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 420 | |
| 421 | define void @atomic16_add_monotonic(i16* %a) nounwind uwtable { |
| 422 | entry: |
| 423 | atomicrmw add i16* %a, i16 0 monotonic |
| 424 | ret void |
| 425 | } |
| 426 | ; CHECK: atomic16_add_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 427 | ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 428 | |
| 429 | define void @atomic16_sub_monotonic(i16* %a) nounwind uwtable { |
| 430 | entry: |
| 431 | atomicrmw sub i16* %a, i16 0 monotonic |
| 432 | ret void |
| 433 | } |
| 434 | ; CHECK: atomic16_sub_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 435 | ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 436 | |
| 437 | define void @atomic16_and_monotonic(i16* %a) nounwind uwtable { |
| 438 | entry: |
| 439 | atomicrmw and i16* %a, i16 0 monotonic |
| 440 | ret void |
| 441 | } |
| 442 | ; CHECK: atomic16_and_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 443 | ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 444 | |
| 445 | define void @atomic16_or_monotonic(i16* %a) nounwind uwtable { |
| 446 | entry: |
| 447 | atomicrmw or i16* %a, i16 0 monotonic |
| 448 | ret void |
| 449 | } |
| 450 | ; CHECK: atomic16_or_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 451 | ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 452 | |
| 453 | define void @atomic16_xor_monotonic(i16* %a) nounwind uwtable { |
| 454 | entry: |
| 455 | atomicrmw xor i16* %a, i16 0 monotonic |
| 456 | ret void |
| 457 | } |
| 458 | ; CHECK: atomic16_xor_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 459 | ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 460 | |
| 461 | define void @atomic16_xchg_acquire(i16* %a) nounwind uwtable { |
| 462 | entry: |
| 463 | atomicrmw xchg i16* %a, i16 0 acquire |
| 464 | ret void |
| 465 | } |
| 466 | ; CHECK: atomic16_xchg_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 467 | ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 468 | |
| 469 | define void @atomic16_add_acquire(i16* %a) nounwind uwtable { |
| 470 | entry: |
| 471 | atomicrmw add i16* %a, i16 0 acquire |
| 472 | ret void |
| 473 | } |
| 474 | ; CHECK: atomic16_add_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 475 | ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 476 | |
| 477 | define void @atomic16_sub_acquire(i16* %a) nounwind uwtable { |
| 478 | entry: |
| 479 | atomicrmw sub i16* %a, i16 0 acquire |
| 480 | ret void |
| 481 | } |
| 482 | ; CHECK: atomic16_sub_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 483 | ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 484 | |
| 485 | define void @atomic16_and_acquire(i16* %a) nounwind uwtable { |
| 486 | entry: |
| 487 | atomicrmw and i16* %a, i16 0 acquire |
| 488 | ret void |
| 489 | } |
| 490 | ; CHECK: atomic16_and_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 491 | ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 492 | |
| 493 | define void @atomic16_or_acquire(i16* %a) nounwind uwtable { |
| 494 | entry: |
| 495 | atomicrmw or i16* %a, i16 0 acquire |
| 496 | ret void |
| 497 | } |
| 498 | ; CHECK: atomic16_or_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 499 | ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 500 | |
| 501 | define void @atomic16_xor_acquire(i16* %a) nounwind uwtable { |
| 502 | entry: |
| 503 | atomicrmw xor i16* %a, i16 0 acquire |
| 504 | ret void |
| 505 | } |
| 506 | ; CHECK: atomic16_xor_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 507 | ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 508 | |
| 509 | define void @atomic16_xchg_release(i16* %a) nounwind uwtable { |
| 510 | entry: |
| 511 | atomicrmw xchg i16* %a, i16 0 release |
| 512 | ret void |
| 513 | } |
| 514 | ; CHECK: atomic16_xchg_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 515 | ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 516 | |
| 517 | define void @atomic16_add_release(i16* %a) nounwind uwtable { |
| 518 | entry: |
| 519 | atomicrmw add i16* %a, i16 0 release |
| 520 | ret void |
| 521 | } |
| 522 | ; CHECK: atomic16_add_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 523 | ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 524 | |
| 525 | define void @atomic16_sub_release(i16* %a) nounwind uwtable { |
| 526 | entry: |
| 527 | atomicrmw sub i16* %a, i16 0 release |
| 528 | ret void |
| 529 | } |
| 530 | ; CHECK: atomic16_sub_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 531 | ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 532 | |
| 533 | define void @atomic16_and_release(i16* %a) nounwind uwtable { |
| 534 | entry: |
| 535 | atomicrmw and i16* %a, i16 0 release |
| 536 | ret void |
| 537 | } |
| 538 | ; CHECK: atomic16_and_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 539 | ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 540 | |
| 541 | define void @atomic16_or_release(i16* %a) nounwind uwtable { |
| 542 | entry: |
| 543 | atomicrmw or i16* %a, i16 0 release |
| 544 | ret void |
| 545 | } |
| 546 | ; CHECK: atomic16_or_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 547 | ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 548 | |
| 549 | define void @atomic16_xor_release(i16* %a) nounwind uwtable { |
| 550 | entry: |
| 551 | atomicrmw xor i16* %a, i16 0 release |
| 552 | ret void |
| 553 | } |
| 554 | ; CHECK: atomic16_xor_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 555 | ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 556 | |
| 557 | define void @atomic16_xchg_acq_rel(i16* %a) nounwind uwtable { |
| 558 | entry: |
| 559 | atomicrmw xchg i16* %a, i16 0 acq_rel |
| 560 | ret void |
| 561 | } |
| 562 | ; CHECK: atomic16_xchg_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 563 | ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 564 | |
| 565 | define void @atomic16_add_acq_rel(i16* %a) nounwind uwtable { |
| 566 | entry: |
| 567 | atomicrmw add i16* %a, i16 0 acq_rel |
| 568 | ret void |
| 569 | } |
| 570 | ; CHECK: atomic16_add_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 571 | ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 572 | |
| 573 | define void @atomic16_sub_acq_rel(i16* %a) nounwind uwtable { |
| 574 | entry: |
| 575 | atomicrmw sub i16* %a, i16 0 acq_rel |
| 576 | ret void |
| 577 | } |
| 578 | ; CHECK: atomic16_sub_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 579 | ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 580 | |
| 581 | define void @atomic16_and_acq_rel(i16* %a) nounwind uwtable { |
| 582 | entry: |
| 583 | atomicrmw and i16* %a, i16 0 acq_rel |
| 584 | ret void |
| 585 | } |
| 586 | ; CHECK: atomic16_and_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 587 | ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 588 | |
| 589 | define void @atomic16_or_acq_rel(i16* %a) nounwind uwtable { |
| 590 | entry: |
| 591 | atomicrmw or i16* %a, i16 0 acq_rel |
| 592 | ret void |
| 593 | } |
| 594 | ; CHECK: atomic16_or_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 595 | ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 596 | |
| 597 | define void @atomic16_xor_acq_rel(i16* %a) nounwind uwtable { |
| 598 | entry: |
| 599 | atomicrmw xor i16* %a, i16 0 acq_rel |
| 600 | ret void |
| 601 | } |
| 602 | ; CHECK: atomic16_xor_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 603 | ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 604 | |
| 605 | define void @atomic16_xchg_seq_cst(i16* %a) nounwind uwtable { |
| 606 | entry: |
| 607 | atomicrmw xchg i16* %a, i16 0 seq_cst |
| 608 | ret void |
| 609 | } |
| 610 | ; CHECK: atomic16_xchg_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 611 | ; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 612 | |
| 613 | define void @atomic16_add_seq_cst(i16* %a) nounwind uwtable { |
| 614 | entry: |
| 615 | atomicrmw add i16* %a, i16 0 seq_cst |
| 616 | ret void |
| 617 | } |
| 618 | ; CHECK: atomic16_add_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 619 | ; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 620 | |
| 621 | define void @atomic16_sub_seq_cst(i16* %a) nounwind uwtable { |
| 622 | entry: |
| 623 | atomicrmw sub i16* %a, i16 0 seq_cst |
| 624 | ret void |
| 625 | } |
| 626 | ; CHECK: atomic16_sub_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 627 | ; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 628 | |
| 629 | define void @atomic16_and_seq_cst(i16* %a) nounwind uwtable { |
| 630 | entry: |
| 631 | atomicrmw and i16* %a, i16 0 seq_cst |
| 632 | ret void |
| 633 | } |
| 634 | ; CHECK: atomic16_and_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 635 | ; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 636 | |
| 637 | define void @atomic16_or_seq_cst(i16* %a) nounwind uwtable { |
| 638 | entry: |
| 639 | atomicrmw or i16* %a, i16 0 seq_cst |
| 640 | ret void |
| 641 | } |
| 642 | ; CHECK: atomic16_or_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 643 | ; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 644 | |
| 645 | define void @atomic16_xor_seq_cst(i16* %a) nounwind uwtable { |
| 646 | entry: |
| 647 | atomicrmw xor i16* %a, i16 0 seq_cst |
| 648 | ret void |
| 649 | } |
| 650 | ; CHECK: atomic16_xor_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 651 | ; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 652 | |
| 653 | define void @atomic16_cas_monotonic(i16* %a) nounwind uwtable { |
| 654 | entry: |
| 655 | cmpxchg i16* %a, i16 0, i16 1 monotonic |
| 656 | ret void |
| 657 | } |
| 658 | ; CHECK: atomic16_cas_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 659 | ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 660 | |
| 661 | define void @atomic16_cas_acquire(i16* %a) nounwind uwtable { |
| 662 | entry: |
| 663 | cmpxchg i16* %a, i16 0, i16 1 acquire |
| 664 | ret void |
| 665 | } |
| 666 | ; CHECK: atomic16_cas_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 667 | ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 668 | |
| 669 | define void @atomic16_cas_release(i16* %a) nounwind uwtable { |
| 670 | entry: |
| 671 | cmpxchg i16* %a, i16 0, i16 1 release |
| 672 | ret void |
| 673 | } |
| 674 | ; CHECK: atomic16_cas_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 675 | ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 676 | |
| 677 | define void @atomic16_cas_acq_rel(i16* %a) nounwind uwtable { |
| 678 | entry: |
| 679 | cmpxchg i16* %a, i16 0, i16 1 acq_rel |
| 680 | ret void |
| 681 | } |
| 682 | ; CHECK: atomic16_cas_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 683 | ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 684 | |
| 685 | define void @atomic16_cas_seq_cst(i16* %a) nounwind uwtable { |
| 686 | entry: |
| 687 | cmpxchg i16* %a, i16 0, i16 1 seq_cst |
| 688 | ret void |
| 689 | } |
| 690 | ; CHECK: atomic16_cas_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 691 | ; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 692 | |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 693 | define i32 @atomic32_load_unordered(i32* %a) nounwind uwtable { |
| 694 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 695 | %0 = load atomic i32* %a unordered, align 4 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 696 | ret i32 %0 |
| 697 | } |
| 698 | ; CHECK: atomic32_load_unordered |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 699 | ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 700 | |
| 701 | define i32 @atomic32_load_monotonic(i32* %a) nounwind uwtable { |
| 702 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 703 | %0 = load atomic i32* %a monotonic, align 4 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 704 | ret i32 %0 |
| 705 | } |
| 706 | ; CHECK: atomic32_load_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 707 | ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 708 | |
| 709 | define i32 @atomic32_load_acquire(i32* %a) nounwind uwtable { |
| 710 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 711 | %0 = load atomic i32* %a acquire, align 4 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 712 | ret i32 %0 |
| 713 | } |
| 714 | ; CHECK: atomic32_load_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 715 | ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 2) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 716 | |
| 717 | define i32 @atomic32_load_seq_cst(i32* %a) nounwind uwtable { |
| 718 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 719 | %0 = load atomic i32* %a seq_cst, align 4 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 720 | ret i32 %0 |
| 721 | } |
| 722 | ; CHECK: atomic32_load_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 723 | ; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 5) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 724 | |
| 725 | define void @atomic32_store_unordered(i32* %a) nounwind uwtable { |
| 726 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 727 | store atomic i32 0, i32* %a unordered, align 4 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 728 | ret void |
| 729 | } |
| 730 | ; CHECK: atomic32_store_unordered |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 731 | ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 732 | |
| 733 | define void @atomic32_store_monotonic(i32* %a) nounwind uwtable { |
| 734 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 735 | store atomic i32 0, i32* %a monotonic, align 4 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 736 | ret void |
| 737 | } |
| 738 | ; CHECK: atomic32_store_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 739 | ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 740 | |
| 741 | define void @atomic32_store_release(i32* %a) nounwind uwtable { |
| 742 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 743 | store atomic i32 0, i32* %a release, align 4 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 744 | ret void |
| 745 | } |
| 746 | ; CHECK: atomic32_store_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 747 | ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 3) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 748 | |
| 749 | define void @atomic32_store_seq_cst(i32* %a) nounwind uwtable { |
| 750 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 751 | store atomic i32 0, i32* %a seq_cst, align 4 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 752 | ret void |
| 753 | } |
| 754 | ; CHECK: atomic32_store_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 755 | ; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 5) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 756 | |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 757 | define void @atomic32_xchg_monotonic(i32* %a) nounwind uwtable { |
| 758 | entry: |
| 759 | atomicrmw xchg i32* %a, i32 0 monotonic |
| 760 | ret void |
| 761 | } |
| 762 | ; CHECK: atomic32_xchg_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 763 | ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 764 | |
| 765 | define void @atomic32_add_monotonic(i32* %a) nounwind uwtable { |
| 766 | entry: |
| 767 | atomicrmw add i32* %a, i32 0 monotonic |
| 768 | ret void |
| 769 | } |
| 770 | ; CHECK: atomic32_add_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 771 | ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 772 | |
| 773 | define void @atomic32_sub_monotonic(i32* %a) nounwind uwtable { |
| 774 | entry: |
| 775 | atomicrmw sub i32* %a, i32 0 monotonic |
| 776 | ret void |
| 777 | } |
| 778 | ; CHECK: atomic32_sub_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 779 | ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 780 | |
| 781 | define void @atomic32_and_monotonic(i32* %a) nounwind uwtable { |
| 782 | entry: |
| 783 | atomicrmw and i32* %a, i32 0 monotonic |
| 784 | ret void |
| 785 | } |
| 786 | ; CHECK: atomic32_and_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 787 | ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 788 | |
| 789 | define void @atomic32_or_monotonic(i32* %a) nounwind uwtable { |
| 790 | entry: |
| 791 | atomicrmw or i32* %a, i32 0 monotonic |
| 792 | ret void |
| 793 | } |
| 794 | ; CHECK: atomic32_or_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 795 | ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 796 | |
| 797 | define void @atomic32_xor_monotonic(i32* %a) nounwind uwtable { |
| 798 | entry: |
| 799 | atomicrmw xor i32* %a, i32 0 monotonic |
| 800 | ret void |
| 801 | } |
| 802 | ; CHECK: atomic32_xor_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 803 | ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 804 | |
| 805 | define void @atomic32_xchg_acquire(i32* %a) nounwind uwtable { |
| 806 | entry: |
| 807 | atomicrmw xchg i32* %a, i32 0 acquire |
| 808 | ret void |
| 809 | } |
| 810 | ; CHECK: atomic32_xchg_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 811 | ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 812 | |
| 813 | define void @atomic32_add_acquire(i32* %a) nounwind uwtable { |
| 814 | entry: |
| 815 | atomicrmw add i32* %a, i32 0 acquire |
| 816 | ret void |
| 817 | } |
| 818 | ; CHECK: atomic32_add_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 819 | ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 820 | |
| 821 | define void @atomic32_sub_acquire(i32* %a) nounwind uwtable { |
| 822 | entry: |
| 823 | atomicrmw sub i32* %a, i32 0 acquire |
| 824 | ret void |
| 825 | } |
| 826 | ; CHECK: atomic32_sub_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 827 | ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 828 | |
| 829 | define void @atomic32_and_acquire(i32* %a) nounwind uwtable { |
| 830 | entry: |
| 831 | atomicrmw and i32* %a, i32 0 acquire |
| 832 | ret void |
| 833 | } |
| 834 | ; CHECK: atomic32_and_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 835 | ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 836 | |
| 837 | define void @atomic32_or_acquire(i32* %a) nounwind uwtable { |
| 838 | entry: |
| 839 | atomicrmw or i32* %a, i32 0 acquire |
| 840 | ret void |
| 841 | } |
| 842 | ; CHECK: atomic32_or_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 843 | ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 844 | |
| 845 | define void @atomic32_xor_acquire(i32* %a) nounwind uwtable { |
| 846 | entry: |
| 847 | atomicrmw xor i32* %a, i32 0 acquire |
| 848 | ret void |
| 849 | } |
| 850 | ; CHECK: atomic32_xor_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 851 | ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 852 | |
| 853 | define void @atomic32_xchg_release(i32* %a) nounwind uwtable { |
| 854 | entry: |
| 855 | atomicrmw xchg i32* %a, i32 0 release |
| 856 | ret void |
| 857 | } |
| 858 | ; CHECK: atomic32_xchg_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 859 | ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 860 | |
| 861 | define void @atomic32_add_release(i32* %a) nounwind uwtable { |
| 862 | entry: |
| 863 | atomicrmw add i32* %a, i32 0 release |
| 864 | ret void |
| 865 | } |
| 866 | ; CHECK: atomic32_add_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 867 | ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 868 | |
| 869 | define void @atomic32_sub_release(i32* %a) nounwind uwtable { |
| 870 | entry: |
| 871 | atomicrmw sub i32* %a, i32 0 release |
| 872 | ret void |
| 873 | } |
| 874 | ; CHECK: atomic32_sub_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 875 | ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 876 | |
| 877 | define void @atomic32_and_release(i32* %a) nounwind uwtable { |
| 878 | entry: |
| 879 | atomicrmw and i32* %a, i32 0 release |
| 880 | ret void |
| 881 | } |
| 882 | ; CHECK: atomic32_and_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 883 | ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 884 | |
| 885 | define void @atomic32_or_release(i32* %a) nounwind uwtable { |
| 886 | entry: |
| 887 | atomicrmw or i32* %a, i32 0 release |
| 888 | ret void |
| 889 | } |
| 890 | ; CHECK: atomic32_or_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 891 | ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 892 | |
| 893 | define void @atomic32_xor_release(i32* %a) nounwind uwtable { |
| 894 | entry: |
| 895 | atomicrmw xor i32* %a, i32 0 release |
| 896 | ret void |
| 897 | } |
| 898 | ; CHECK: atomic32_xor_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 899 | ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 900 | |
| 901 | define void @atomic32_xchg_acq_rel(i32* %a) nounwind uwtable { |
| 902 | entry: |
| 903 | atomicrmw xchg i32* %a, i32 0 acq_rel |
| 904 | ret void |
| 905 | } |
| 906 | ; CHECK: atomic32_xchg_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 907 | ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 908 | |
| 909 | define void @atomic32_add_acq_rel(i32* %a) nounwind uwtable { |
| 910 | entry: |
| 911 | atomicrmw add i32* %a, i32 0 acq_rel |
| 912 | ret void |
| 913 | } |
| 914 | ; CHECK: atomic32_add_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 915 | ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 916 | |
| 917 | define void @atomic32_sub_acq_rel(i32* %a) nounwind uwtable { |
| 918 | entry: |
| 919 | atomicrmw sub i32* %a, i32 0 acq_rel |
| 920 | ret void |
| 921 | } |
| 922 | ; CHECK: atomic32_sub_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 923 | ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 924 | |
| 925 | define void @atomic32_and_acq_rel(i32* %a) nounwind uwtable { |
| 926 | entry: |
| 927 | atomicrmw and i32* %a, i32 0 acq_rel |
| 928 | ret void |
| 929 | } |
| 930 | ; CHECK: atomic32_and_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 931 | ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 932 | |
| 933 | define void @atomic32_or_acq_rel(i32* %a) nounwind uwtable { |
| 934 | entry: |
| 935 | atomicrmw or i32* %a, i32 0 acq_rel |
| 936 | ret void |
| 937 | } |
| 938 | ; CHECK: atomic32_or_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 939 | ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 940 | |
| 941 | define void @atomic32_xor_acq_rel(i32* %a) nounwind uwtable { |
| 942 | entry: |
| 943 | atomicrmw xor i32* %a, i32 0 acq_rel |
| 944 | ret void |
| 945 | } |
| 946 | ; CHECK: atomic32_xor_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 947 | ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 948 | |
| 949 | define void @atomic32_xchg_seq_cst(i32* %a) nounwind uwtable { |
| 950 | entry: |
| 951 | atomicrmw xchg i32* %a, i32 0 seq_cst |
| 952 | ret void |
| 953 | } |
| 954 | ; CHECK: atomic32_xchg_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 955 | ; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 956 | |
| 957 | define void @atomic32_add_seq_cst(i32* %a) nounwind uwtable { |
| 958 | entry: |
| 959 | atomicrmw add i32* %a, i32 0 seq_cst |
| 960 | ret void |
| 961 | } |
| 962 | ; CHECK: atomic32_add_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 963 | ; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 964 | |
| 965 | define void @atomic32_sub_seq_cst(i32* %a) nounwind uwtable { |
| 966 | entry: |
| 967 | atomicrmw sub i32* %a, i32 0 seq_cst |
| 968 | ret void |
| 969 | } |
| 970 | ; CHECK: atomic32_sub_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 971 | ; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 972 | |
| 973 | define void @atomic32_and_seq_cst(i32* %a) nounwind uwtable { |
| 974 | entry: |
| 975 | atomicrmw and i32* %a, i32 0 seq_cst |
| 976 | ret void |
| 977 | } |
| 978 | ; CHECK: atomic32_and_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 979 | ; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 980 | |
| 981 | define void @atomic32_or_seq_cst(i32* %a) nounwind uwtable { |
| 982 | entry: |
| 983 | atomicrmw or i32* %a, i32 0 seq_cst |
| 984 | ret void |
| 985 | } |
| 986 | ; CHECK: atomic32_or_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 987 | ; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 988 | |
| 989 | define void @atomic32_xor_seq_cst(i32* %a) nounwind uwtable { |
| 990 | entry: |
| 991 | atomicrmw xor i32* %a, i32 0 seq_cst |
| 992 | ret void |
| 993 | } |
| 994 | ; CHECK: atomic32_xor_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 995 | ; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 996 | |
| 997 | define void @atomic32_cas_monotonic(i32* %a) nounwind uwtable { |
| 998 | entry: |
| 999 | cmpxchg i32* %a, i32 0, i32 1 monotonic |
| 1000 | ret void |
| 1001 | } |
| 1002 | ; CHECK: atomic32_cas_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1003 | ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1004 | |
| 1005 | define void @atomic32_cas_acquire(i32* %a) nounwind uwtable { |
| 1006 | entry: |
| 1007 | cmpxchg i32* %a, i32 0, i32 1 acquire |
| 1008 | ret void |
| 1009 | } |
| 1010 | ; CHECK: atomic32_cas_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1011 | ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1012 | |
| 1013 | define void @atomic32_cas_release(i32* %a) nounwind uwtable { |
| 1014 | entry: |
| 1015 | cmpxchg i32* %a, i32 0, i32 1 release |
| 1016 | ret void |
| 1017 | } |
| 1018 | ; CHECK: atomic32_cas_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1019 | ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1020 | |
| 1021 | define void @atomic32_cas_acq_rel(i32* %a) nounwind uwtable { |
| 1022 | entry: |
| 1023 | cmpxchg i32* %a, i32 0, i32 1 acq_rel |
| 1024 | ret void |
| 1025 | } |
| 1026 | ; CHECK: atomic32_cas_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1027 | ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1028 | |
| 1029 | define void @atomic32_cas_seq_cst(i32* %a) nounwind uwtable { |
| 1030 | entry: |
| 1031 | cmpxchg i32* %a, i32 0, i32 1 seq_cst |
| 1032 | ret void |
| 1033 | } |
| 1034 | ; CHECK: atomic32_cas_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1035 | ; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1036 | |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1037 | define i64 @atomic64_load_unordered(i64* %a) nounwind uwtable { |
| 1038 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1039 | %0 = load atomic i64* %a unordered, align 8 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1040 | ret i64 %0 |
| 1041 | } |
| 1042 | ; CHECK: atomic64_load_unordered |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1043 | ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1044 | |
| 1045 | define i64 @atomic64_load_monotonic(i64* %a) nounwind uwtable { |
| 1046 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1047 | %0 = load atomic i64* %a monotonic, align 8 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1048 | ret i64 %0 |
| 1049 | } |
| 1050 | ; CHECK: atomic64_load_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1051 | ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1052 | |
| 1053 | define i64 @atomic64_load_acquire(i64* %a) nounwind uwtable { |
| 1054 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1055 | %0 = load atomic i64* %a acquire, align 8 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1056 | ret i64 %0 |
| 1057 | } |
| 1058 | ; CHECK: atomic64_load_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1059 | ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 2) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1060 | |
| 1061 | define i64 @atomic64_load_seq_cst(i64* %a) nounwind uwtable { |
| 1062 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1063 | %0 = load atomic i64* %a seq_cst, align 8 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1064 | ret i64 %0 |
| 1065 | } |
| 1066 | ; CHECK: atomic64_load_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1067 | ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 5) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1068 | |
| 1069 | define void @atomic64_store_unordered(i64* %a) nounwind uwtable { |
| 1070 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1071 | store atomic i64 0, i64* %a unordered, align 8 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1072 | ret void |
| 1073 | } |
| 1074 | ; CHECK: atomic64_store_unordered |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1075 | ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1076 | |
| 1077 | define void @atomic64_store_monotonic(i64* %a) nounwind uwtable { |
| 1078 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1079 | store atomic i64 0, i64* %a monotonic, align 8 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1080 | ret void |
| 1081 | } |
| 1082 | ; CHECK: atomic64_store_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1083 | ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1084 | |
| 1085 | define void @atomic64_store_release(i64* %a) nounwind uwtable { |
| 1086 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1087 | store atomic i64 0, i64* %a release, align 8 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1088 | ret void |
| 1089 | } |
| 1090 | ; CHECK: atomic64_store_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1091 | ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 3) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1092 | |
| 1093 | define void @atomic64_store_seq_cst(i64* %a) nounwind uwtable { |
| 1094 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1095 | store atomic i64 0, i64* %a seq_cst, align 8 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1096 | ret void |
| 1097 | } |
| 1098 | ; CHECK: atomic64_store_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1099 | ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 5) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1100 | |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1101 | define void @atomic64_xchg_monotonic(i64* %a) nounwind uwtable { |
| 1102 | entry: |
| 1103 | atomicrmw xchg i64* %a, i64 0 monotonic |
| 1104 | ret void |
| 1105 | } |
| 1106 | ; CHECK: atomic64_xchg_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1107 | ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1108 | |
| 1109 | define void @atomic64_add_monotonic(i64* %a) nounwind uwtable { |
| 1110 | entry: |
| 1111 | atomicrmw add i64* %a, i64 0 monotonic |
| 1112 | ret void |
| 1113 | } |
| 1114 | ; CHECK: atomic64_add_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1115 | ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1116 | |
| 1117 | define void @atomic64_sub_monotonic(i64* %a) nounwind uwtable { |
| 1118 | entry: |
| 1119 | atomicrmw sub i64* %a, i64 0 monotonic |
| 1120 | ret void |
| 1121 | } |
| 1122 | ; CHECK: atomic64_sub_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1123 | ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1124 | |
| 1125 | define void @atomic64_and_monotonic(i64* %a) nounwind uwtable { |
| 1126 | entry: |
| 1127 | atomicrmw and i64* %a, i64 0 monotonic |
| 1128 | ret void |
| 1129 | } |
| 1130 | ; CHECK: atomic64_and_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1131 | ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1132 | |
| 1133 | define void @atomic64_or_monotonic(i64* %a) nounwind uwtable { |
| 1134 | entry: |
| 1135 | atomicrmw or i64* %a, i64 0 monotonic |
| 1136 | ret void |
| 1137 | } |
| 1138 | ; CHECK: atomic64_or_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1139 | ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1140 | |
| 1141 | define void @atomic64_xor_monotonic(i64* %a) nounwind uwtable { |
| 1142 | entry: |
| 1143 | atomicrmw xor i64* %a, i64 0 monotonic |
| 1144 | ret void |
| 1145 | } |
| 1146 | ; CHECK: atomic64_xor_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1147 | ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1148 | |
| 1149 | define void @atomic64_xchg_acquire(i64* %a) nounwind uwtable { |
| 1150 | entry: |
| 1151 | atomicrmw xchg i64* %a, i64 0 acquire |
| 1152 | ret void |
| 1153 | } |
| 1154 | ; CHECK: atomic64_xchg_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1155 | ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1156 | |
| 1157 | define void @atomic64_add_acquire(i64* %a) nounwind uwtable { |
| 1158 | entry: |
| 1159 | atomicrmw add i64* %a, i64 0 acquire |
| 1160 | ret void |
| 1161 | } |
| 1162 | ; CHECK: atomic64_add_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1163 | ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1164 | |
| 1165 | define void @atomic64_sub_acquire(i64* %a) nounwind uwtable { |
| 1166 | entry: |
| 1167 | atomicrmw sub i64* %a, i64 0 acquire |
| 1168 | ret void |
| 1169 | } |
| 1170 | ; CHECK: atomic64_sub_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1171 | ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1172 | |
| 1173 | define void @atomic64_and_acquire(i64* %a) nounwind uwtable { |
| 1174 | entry: |
| 1175 | atomicrmw and i64* %a, i64 0 acquire |
| 1176 | ret void |
| 1177 | } |
| 1178 | ; CHECK: atomic64_and_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1179 | ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1180 | |
| 1181 | define void @atomic64_or_acquire(i64* %a) nounwind uwtable { |
| 1182 | entry: |
| 1183 | atomicrmw or i64* %a, i64 0 acquire |
| 1184 | ret void |
| 1185 | } |
| 1186 | ; CHECK: atomic64_or_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1187 | ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1188 | |
| 1189 | define void @atomic64_xor_acquire(i64* %a) nounwind uwtable { |
| 1190 | entry: |
| 1191 | atomicrmw xor i64* %a, i64 0 acquire |
| 1192 | ret void |
| 1193 | } |
| 1194 | ; CHECK: atomic64_xor_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1195 | ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1196 | |
| 1197 | define void @atomic64_xchg_release(i64* %a) nounwind uwtable { |
| 1198 | entry: |
| 1199 | atomicrmw xchg i64* %a, i64 0 release |
| 1200 | ret void |
| 1201 | } |
| 1202 | ; CHECK: atomic64_xchg_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1203 | ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1204 | |
| 1205 | define void @atomic64_add_release(i64* %a) nounwind uwtable { |
| 1206 | entry: |
| 1207 | atomicrmw add i64* %a, i64 0 release |
| 1208 | ret void |
| 1209 | } |
| 1210 | ; CHECK: atomic64_add_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1211 | ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1212 | |
| 1213 | define void @atomic64_sub_release(i64* %a) nounwind uwtable { |
| 1214 | entry: |
| 1215 | atomicrmw sub i64* %a, i64 0 release |
| 1216 | ret void |
| 1217 | } |
| 1218 | ; CHECK: atomic64_sub_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1219 | ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1220 | |
| 1221 | define void @atomic64_and_release(i64* %a) nounwind uwtable { |
| 1222 | entry: |
| 1223 | atomicrmw and i64* %a, i64 0 release |
| 1224 | ret void |
| 1225 | } |
| 1226 | ; CHECK: atomic64_and_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1227 | ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1228 | |
| 1229 | define void @atomic64_or_release(i64* %a) nounwind uwtable { |
| 1230 | entry: |
| 1231 | atomicrmw or i64* %a, i64 0 release |
| 1232 | ret void |
| 1233 | } |
| 1234 | ; CHECK: atomic64_or_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1235 | ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1236 | |
| 1237 | define void @atomic64_xor_release(i64* %a) nounwind uwtable { |
| 1238 | entry: |
| 1239 | atomicrmw xor i64* %a, i64 0 release |
| 1240 | ret void |
| 1241 | } |
| 1242 | ; CHECK: atomic64_xor_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1243 | ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1244 | |
| 1245 | define void @atomic64_xchg_acq_rel(i64* %a) nounwind uwtable { |
| 1246 | entry: |
| 1247 | atomicrmw xchg i64* %a, i64 0 acq_rel |
| 1248 | ret void |
| 1249 | } |
| 1250 | ; CHECK: atomic64_xchg_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1251 | ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1252 | |
| 1253 | define void @atomic64_add_acq_rel(i64* %a) nounwind uwtable { |
| 1254 | entry: |
| 1255 | atomicrmw add i64* %a, i64 0 acq_rel |
| 1256 | ret void |
| 1257 | } |
| 1258 | ; CHECK: atomic64_add_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1259 | ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1260 | |
| 1261 | define void @atomic64_sub_acq_rel(i64* %a) nounwind uwtable { |
| 1262 | entry: |
| 1263 | atomicrmw sub i64* %a, i64 0 acq_rel |
| 1264 | ret void |
| 1265 | } |
| 1266 | ; CHECK: atomic64_sub_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1267 | ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1268 | |
| 1269 | define void @atomic64_and_acq_rel(i64* %a) nounwind uwtable { |
| 1270 | entry: |
| 1271 | atomicrmw and i64* %a, i64 0 acq_rel |
| 1272 | ret void |
| 1273 | } |
| 1274 | ; CHECK: atomic64_and_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1275 | ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1276 | |
| 1277 | define void @atomic64_or_acq_rel(i64* %a) nounwind uwtable { |
| 1278 | entry: |
| 1279 | atomicrmw or i64* %a, i64 0 acq_rel |
| 1280 | ret void |
| 1281 | } |
| 1282 | ; CHECK: atomic64_or_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1283 | ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1284 | |
| 1285 | define void @atomic64_xor_acq_rel(i64* %a) nounwind uwtable { |
| 1286 | entry: |
| 1287 | atomicrmw xor i64* %a, i64 0 acq_rel |
| 1288 | ret void |
| 1289 | } |
| 1290 | ; CHECK: atomic64_xor_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1291 | ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1292 | |
| 1293 | define void @atomic64_xchg_seq_cst(i64* %a) nounwind uwtable { |
| 1294 | entry: |
| 1295 | atomicrmw xchg i64* %a, i64 0 seq_cst |
| 1296 | ret void |
| 1297 | } |
| 1298 | ; CHECK: atomic64_xchg_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1299 | ; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1300 | |
| 1301 | define void @atomic64_add_seq_cst(i64* %a) nounwind uwtable { |
| 1302 | entry: |
| 1303 | atomicrmw add i64* %a, i64 0 seq_cst |
| 1304 | ret void |
| 1305 | } |
| 1306 | ; CHECK: atomic64_add_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1307 | ; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1308 | |
| 1309 | define void @atomic64_sub_seq_cst(i64* %a) nounwind uwtable { |
| 1310 | entry: |
| 1311 | atomicrmw sub i64* %a, i64 0 seq_cst |
| 1312 | ret void |
| 1313 | } |
| 1314 | ; CHECK: atomic64_sub_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1315 | ; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1316 | |
| 1317 | define void @atomic64_and_seq_cst(i64* %a) nounwind uwtable { |
| 1318 | entry: |
| 1319 | atomicrmw and i64* %a, i64 0 seq_cst |
| 1320 | ret void |
| 1321 | } |
| 1322 | ; CHECK: atomic64_and_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1323 | ; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1324 | |
| 1325 | define void @atomic64_or_seq_cst(i64* %a) nounwind uwtable { |
| 1326 | entry: |
| 1327 | atomicrmw or i64* %a, i64 0 seq_cst |
| 1328 | ret void |
| 1329 | } |
| 1330 | ; CHECK: atomic64_or_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1331 | ; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1332 | |
| 1333 | define void @atomic64_xor_seq_cst(i64* %a) nounwind uwtable { |
| 1334 | entry: |
| 1335 | atomicrmw xor i64* %a, i64 0 seq_cst |
| 1336 | ret void |
| 1337 | } |
| 1338 | ; CHECK: atomic64_xor_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1339 | ; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1340 | |
| 1341 | define void @atomic64_cas_monotonic(i64* %a) nounwind uwtable { |
| 1342 | entry: |
| 1343 | cmpxchg i64* %a, i64 0, i64 1 monotonic |
| 1344 | ret void |
| 1345 | } |
| 1346 | ; CHECK: atomic64_cas_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1347 | ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1348 | |
| 1349 | define void @atomic64_cas_acquire(i64* %a) nounwind uwtable { |
| 1350 | entry: |
| 1351 | cmpxchg i64* %a, i64 0, i64 1 acquire |
| 1352 | ret void |
| 1353 | } |
| 1354 | ; CHECK: atomic64_cas_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1355 | ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1356 | |
| 1357 | define void @atomic64_cas_release(i64* %a) nounwind uwtable { |
| 1358 | entry: |
| 1359 | cmpxchg i64* %a, i64 0, i64 1 release |
| 1360 | ret void |
| 1361 | } |
| 1362 | ; CHECK: atomic64_cas_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1363 | ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1364 | |
| 1365 | define void @atomic64_cas_acq_rel(i64* %a) nounwind uwtable { |
| 1366 | entry: |
| 1367 | cmpxchg i64* %a, i64 0, i64 1 acq_rel |
| 1368 | ret void |
| 1369 | } |
| 1370 | ; CHECK: atomic64_cas_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1371 | ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1372 | |
| 1373 | define void @atomic64_cas_seq_cst(i64* %a) nounwind uwtable { |
| 1374 | entry: |
| 1375 | cmpxchg i64* %a, i64 0, i64 1 seq_cst |
| 1376 | ret void |
| 1377 | } |
| 1378 | ; CHECK: atomic64_cas_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1379 | ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1380 | |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1381 | define i128 @atomic128_load_unordered(i128* %a) nounwind uwtable { |
| 1382 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1383 | %0 = load atomic i128* %a unordered, align 16 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1384 | ret i128 %0 |
| 1385 | } |
| 1386 | ; CHECK: atomic128_load_unordered |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1387 | ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1388 | |
| 1389 | define i128 @atomic128_load_monotonic(i128* %a) nounwind uwtable { |
| 1390 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1391 | %0 = load atomic i128* %a monotonic, align 16 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1392 | ret i128 %0 |
| 1393 | } |
| 1394 | ; CHECK: atomic128_load_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1395 | ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1396 | |
| 1397 | define i128 @atomic128_load_acquire(i128* %a) nounwind uwtable { |
| 1398 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1399 | %0 = load atomic i128* %a acquire, align 16 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1400 | ret i128 %0 |
| 1401 | } |
| 1402 | ; CHECK: atomic128_load_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1403 | ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 2) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1404 | |
| 1405 | define i128 @atomic128_load_seq_cst(i128* %a) nounwind uwtable { |
| 1406 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1407 | %0 = load atomic i128* %a seq_cst, align 16 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1408 | ret i128 %0 |
| 1409 | } |
| 1410 | ; CHECK: atomic128_load_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1411 | ; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 5) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1412 | |
| 1413 | define void @atomic128_store_unordered(i128* %a) nounwind uwtable { |
| 1414 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1415 | store atomic i128 0, i128* %a unordered, align 16 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1416 | ret void |
| 1417 | } |
| 1418 | ; CHECK: atomic128_store_unordered |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1419 | ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1420 | |
| 1421 | define void @atomic128_store_monotonic(i128* %a) nounwind uwtable { |
| 1422 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1423 | store atomic i128 0, i128* %a monotonic, align 16 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1424 | ret void |
| 1425 | } |
| 1426 | ; CHECK: atomic128_store_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1427 | ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1428 | |
| 1429 | define void @atomic128_store_release(i128* %a) nounwind uwtable { |
| 1430 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1431 | store atomic i128 0, i128* %a release, align 16 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1432 | ret void |
| 1433 | } |
| 1434 | ; CHECK: atomic128_store_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1435 | ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 3) |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1436 | |
| 1437 | define void @atomic128_store_seq_cst(i128* %a) nounwind uwtable { |
| 1438 | entry: |
Dmitry Vyukov | 84d75cd | 2012-10-03 13:19:20 +0000 | [diff] [blame] | 1439 | store atomic i128 0, i128* %a seq_cst, align 16 |
Kostya Serebryany | a125977 | 2012-04-27 07:31:53 +0000 | [diff] [blame] | 1440 | ret void |
| 1441 | } |
| 1442 | ; CHECK: atomic128_store_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1443 | ; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1444 | |
| 1445 | define void @atomic128_xchg_monotonic(i128* %a) nounwind uwtable { |
| 1446 | entry: |
| 1447 | atomicrmw xchg i128* %a, i128 0 monotonic |
| 1448 | ret void |
| 1449 | } |
| 1450 | ; CHECK: atomic128_xchg_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1451 | ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1452 | |
| 1453 | define void @atomic128_add_monotonic(i128* %a) nounwind uwtable { |
| 1454 | entry: |
| 1455 | atomicrmw add i128* %a, i128 0 monotonic |
| 1456 | ret void |
| 1457 | } |
| 1458 | ; CHECK: atomic128_add_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1459 | ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1460 | |
| 1461 | define void @atomic128_sub_monotonic(i128* %a) nounwind uwtable { |
| 1462 | entry: |
| 1463 | atomicrmw sub i128* %a, i128 0 monotonic |
| 1464 | ret void |
| 1465 | } |
| 1466 | ; CHECK: atomic128_sub_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1467 | ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1468 | |
| 1469 | define void @atomic128_and_monotonic(i128* %a) nounwind uwtable { |
| 1470 | entry: |
| 1471 | atomicrmw and i128* %a, i128 0 monotonic |
| 1472 | ret void |
| 1473 | } |
| 1474 | ; CHECK: atomic128_and_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1475 | ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1476 | |
| 1477 | define void @atomic128_or_monotonic(i128* %a) nounwind uwtable { |
| 1478 | entry: |
| 1479 | atomicrmw or i128* %a, i128 0 monotonic |
| 1480 | ret void |
| 1481 | } |
| 1482 | ; CHECK: atomic128_or_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1483 | ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1484 | |
| 1485 | define void @atomic128_xor_monotonic(i128* %a) nounwind uwtable { |
| 1486 | entry: |
| 1487 | atomicrmw xor i128* %a, i128 0 monotonic |
| 1488 | ret void |
| 1489 | } |
| 1490 | ; CHECK: atomic128_xor_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1491 | ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1492 | |
| 1493 | define void @atomic128_xchg_acquire(i128* %a) nounwind uwtable { |
| 1494 | entry: |
| 1495 | atomicrmw xchg i128* %a, i128 0 acquire |
| 1496 | ret void |
| 1497 | } |
| 1498 | ; CHECK: atomic128_xchg_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1499 | ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1500 | |
| 1501 | define void @atomic128_add_acquire(i128* %a) nounwind uwtable { |
| 1502 | entry: |
| 1503 | atomicrmw add i128* %a, i128 0 acquire |
| 1504 | ret void |
| 1505 | } |
| 1506 | ; CHECK: atomic128_add_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1507 | ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1508 | |
| 1509 | define void @atomic128_sub_acquire(i128* %a) nounwind uwtable { |
| 1510 | entry: |
| 1511 | atomicrmw sub i128* %a, i128 0 acquire |
| 1512 | ret void |
| 1513 | } |
| 1514 | ; CHECK: atomic128_sub_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1515 | ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1516 | |
| 1517 | define void @atomic128_and_acquire(i128* %a) nounwind uwtable { |
| 1518 | entry: |
| 1519 | atomicrmw and i128* %a, i128 0 acquire |
| 1520 | ret void |
| 1521 | } |
| 1522 | ; CHECK: atomic128_and_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1523 | ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1524 | |
| 1525 | define void @atomic128_or_acquire(i128* %a) nounwind uwtable { |
| 1526 | entry: |
| 1527 | atomicrmw or i128* %a, i128 0 acquire |
| 1528 | ret void |
| 1529 | } |
| 1530 | ; CHECK: atomic128_or_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1531 | ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1532 | |
| 1533 | define void @atomic128_xor_acquire(i128* %a) nounwind uwtable { |
| 1534 | entry: |
| 1535 | atomicrmw xor i128* %a, i128 0 acquire |
| 1536 | ret void |
| 1537 | } |
| 1538 | ; CHECK: atomic128_xor_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1539 | ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1540 | |
| 1541 | define void @atomic128_xchg_release(i128* %a) nounwind uwtable { |
| 1542 | entry: |
| 1543 | atomicrmw xchg i128* %a, i128 0 release |
| 1544 | ret void |
| 1545 | } |
| 1546 | ; CHECK: atomic128_xchg_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1547 | ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1548 | |
| 1549 | define void @atomic128_add_release(i128* %a) nounwind uwtable { |
| 1550 | entry: |
| 1551 | atomicrmw add i128* %a, i128 0 release |
| 1552 | ret void |
| 1553 | } |
| 1554 | ; CHECK: atomic128_add_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1555 | ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1556 | |
| 1557 | define void @atomic128_sub_release(i128* %a) nounwind uwtable { |
| 1558 | entry: |
| 1559 | atomicrmw sub i128* %a, i128 0 release |
| 1560 | ret void |
| 1561 | } |
| 1562 | ; CHECK: atomic128_sub_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1563 | ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1564 | |
| 1565 | define void @atomic128_and_release(i128* %a) nounwind uwtable { |
| 1566 | entry: |
| 1567 | atomicrmw and i128* %a, i128 0 release |
| 1568 | ret void |
| 1569 | } |
| 1570 | ; CHECK: atomic128_and_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1571 | ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1572 | |
| 1573 | define void @atomic128_or_release(i128* %a) nounwind uwtable { |
| 1574 | entry: |
| 1575 | atomicrmw or i128* %a, i128 0 release |
| 1576 | ret void |
| 1577 | } |
| 1578 | ; CHECK: atomic128_or_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1579 | ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1580 | |
| 1581 | define void @atomic128_xor_release(i128* %a) nounwind uwtable { |
| 1582 | entry: |
| 1583 | atomicrmw xor i128* %a, i128 0 release |
| 1584 | ret void |
| 1585 | } |
| 1586 | ; CHECK: atomic128_xor_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1587 | ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1588 | |
| 1589 | define void @atomic128_xchg_acq_rel(i128* %a) nounwind uwtable { |
| 1590 | entry: |
| 1591 | atomicrmw xchg i128* %a, i128 0 acq_rel |
| 1592 | ret void |
| 1593 | } |
| 1594 | ; CHECK: atomic128_xchg_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1595 | ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1596 | |
| 1597 | define void @atomic128_add_acq_rel(i128* %a) nounwind uwtable { |
| 1598 | entry: |
| 1599 | atomicrmw add i128* %a, i128 0 acq_rel |
| 1600 | ret void |
| 1601 | } |
| 1602 | ; CHECK: atomic128_add_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1603 | ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1604 | |
| 1605 | define void @atomic128_sub_acq_rel(i128* %a) nounwind uwtable { |
| 1606 | entry: |
| 1607 | atomicrmw sub i128* %a, i128 0 acq_rel |
| 1608 | ret void |
| 1609 | } |
| 1610 | ; CHECK: atomic128_sub_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1611 | ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1612 | |
| 1613 | define void @atomic128_and_acq_rel(i128* %a) nounwind uwtable { |
| 1614 | entry: |
| 1615 | atomicrmw and i128* %a, i128 0 acq_rel |
| 1616 | ret void |
| 1617 | } |
| 1618 | ; CHECK: atomic128_and_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1619 | ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1620 | |
| 1621 | define void @atomic128_or_acq_rel(i128* %a) nounwind uwtable { |
| 1622 | entry: |
| 1623 | atomicrmw or i128* %a, i128 0 acq_rel |
| 1624 | ret void |
| 1625 | } |
| 1626 | ; CHECK: atomic128_or_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1627 | ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1628 | |
| 1629 | define void @atomic128_xor_acq_rel(i128* %a) nounwind uwtable { |
| 1630 | entry: |
| 1631 | atomicrmw xor i128* %a, i128 0 acq_rel |
| 1632 | ret void |
| 1633 | } |
| 1634 | ; CHECK: atomic128_xor_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1635 | ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1636 | |
| 1637 | define void @atomic128_xchg_seq_cst(i128* %a) nounwind uwtable { |
| 1638 | entry: |
| 1639 | atomicrmw xchg i128* %a, i128 0 seq_cst |
| 1640 | ret void |
| 1641 | } |
| 1642 | ; CHECK: atomic128_xchg_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1643 | ; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1644 | |
| 1645 | define void @atomic128_add_seq_cst(i128* %a) nounwind uwtable { |
| 1646 | entry: |
| 1647 | atomicrmw add i128* %a, i128 0 seq_cst |
| 1648 | ret void |
| 1649 | } |
| 1650 | ; CHECK: atomic128_add_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1651 | ; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1652 | |
| 1653 | define void @atomic128_sub_seq_cst(i128* %a) nounwind uwtable { |
| 1654 | entry: |
| 1655 | atomicrmw sub i128* %a, i128 0 seq_cst |
| 1656 | ret void |
| 1657 | } |
| 1658 | ; CHECK: atomic128_sub_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1659 | ; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1660 | |
| 1661 | define void @atomic128_and_seq_cst(i128* %a) nounwind uwtable { |
| 1662 | entry: |
| 1663 | atomicrmw and i128* %a, i128 0 seq_cst |
| 1664 | ret void |
| 1665 | } |
| 1666 | ; CHECK: atomic128_and_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1667 | ; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1668 | |
| 1669 | define void @atomic128_or_seq_cst(i128* %a) nounwind uwtable { |
| 1670 | entry: |
| 1671 | atomicrmw or i128* %a, i128 0 seq_cst |
| 1672 | ret void |
| 1673 | } |
| 1674 | ; CHECK: atomic128_or_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1675 | ; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1676 | |
| 1677 | define void @atomic128_xor_seq_cst(i128* %a) nounwind uwtable { |
| 1678 | entry: |
| 1679 | atomicrmw xor i128* %a, i128 0 seq_cst |
| 1680 | ret void |
| 1681 | } |
| 1682 | ; CHECK: atomic128_xor_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1683 | ; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1684 | |
| 1685 | define void @atomic128_cas_monotonic(i128* %a) nounwind uwtable { |
| 1686 | entry: |
| 1687 | cmpxchg i128* %a, i128 0, i128 1 monotonic |
| 1688 | ret void |
| 1689 | } |
| 1690 | ; CHECK: atomic128_cas_monotonic |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1691 | ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 0) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1692 | |
| 1693 | define void @atomic128_cas_acquire(i128* %a) nounwind uwtable { |
| 1694 | entry: |
| 1695 | cmpxchg i128* %a, i128 0, i128 1 acquire |
| 1696 | ret void |
| 1697 | } |
| 1698 | ; CHECK: atomic128_cas_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1699 | ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1700 | |
| 1701 | define void @atomic128_cas_release(i128* %a) nounwind uwtable { |
| 1702 | entry: |
| 1703 | cmpxchg i128* %a, i128 0, i128 1 release |
| 1704 | ret void |
| 1705 | } |
| 1706 | ; CHECK: atomic128_cas_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1707 | ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1708 | |
| 1709 | define void @atomic128_cas_acq_rel(i128* %a) nounwind uwtable { |
| 1710 | entry: |
| 1711 | cmpxchg i128* %a, i128 0, i128 1 acq_rel |
| 1712 | ret void |
| 1713 | } |
| 1714 | ; CHECK: atomic128_cas_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1715 | ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1716 | |
| 1717 | define void @atomic128_cas_seq_cst(i128* %a) nounwind uwtable { |
| 1718 | entry: |
| 1719 | cmpxchg i128* %a, i128 0, i128 1 seq_cst |
| 1720 | ret void |
| 1721 | } |
| 1722 | ; CHECK: atomic128_cas_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1723 | ; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1724 | |
| 1725 | define void @atomic_signal_fence_acquire() nounwind uwtable { |
| 1726 | entry: |
| 1727 | fence singlethread acquire |
| 1728 | ret void |
| 1729 | } |
| 1730 | ; CHECK: atomic_signal_fence_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1731 | ; CHECK: call void @__tsan_atomic_signal_fence(i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1732 | |
| 1733 | define void @atomic_thread_fence_acquire() nounwind uwtable { |
| 1734 | entry: |
| 1735 | fence acquire |
| 1736 | ret void |
| 1737 | } |
| 1738 | ; CHECK: atomic_thread_fence_acquire |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1739 | ; CHECK: call void @__tsan_atomic_thread_fence(i32 2) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1740 | |
| 1741 | define void @atomic_signal_fence_release() nounwind uwtable { |
| 1742 | entry: |
| 1743 | fence singlethread release |
| 1744 | ret void |
| 1745 | } |
| 1746 | ; CHECK: atomic_signal_fence_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1747 | ; CHECK: call void @__tsan_atomic_signal_fence(i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1748 | |
| 1749 | define void @atomic_thread_fence_release() nounwind uwtable { |
| 1750 | entry: |
| 1751 | fence release |
| 1752 | ret void |
| 1753 | } |
| 1754 | ; CHECK: atomic_thread_fence_release |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1755 | ; CHECK: call void @__tsan_atomic_thread_fence(i32 3) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1756 | |
| 1757 | define void @atomic_signal_fence_acq_rel() nounwind uwtable { |
| 1758 | entry: |
| 1759 | fence singlethread acq_rel |
| 1760 | ret void |
| 1761 | } |
| 1762 | ; CHECK: atomic_signal_fence_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1763 | ; CHECK: call void @__tsan_atomic_signal_fence(i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1764 | |
| 1765 | define void @atomic_thread_fence_acq_rel() nounwind uwtable { |
| 1766 | entry: |
| 1767 | fence acq_rel |
| 1768 | ret void |
| 1769 | } |
| 1770 | ; CHECK: atomic_thread_fence_acq_rel |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1771 | ; CHECK: call void @__tsan_atomic_thread_fence(i32 4) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1772 | |
| 1773 | define void @atomic_signal_fence_seq_cst() nounwind uwtable { |
| 1774 | entry: |
| 1775 | fence singlethread seq_cst |
| 1776 | ret void |
| 1777 | } |
| 1778 | ; CHECK: atomic_signal_fence_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1779 | ; CHECK: call void @__tsan_atomic_signal_fence(i32 5) |
Dmitry Vyukov | 92b9e1d | 2012-11-09 12:55:36 +0000 | [diff] [blame] | 1780 | |
| 1781 | define void @atomic_thread_fence_seq_cst() nounwind uwtable { |
| 1782 | entry: |
| 1783 | fence seq_cst |
| 1784 | ret void |
| 1785 | } |
| 1786 | ; CHECK: atomic_thread_fence_seq_cst |
Dmitry Vyukov | 0044e38 | 2012-11-09 14:12:16 +0000 | [diff] [blame^] | 1787 | ; CHECK: call void @__tsan_atomic_thread_fence(i32 5) |