blob: e40268f97b9966beea6bcf7cdec0f596ddd2f9b4 [file] [log] [blame]
Kostya Serebryanya1259772012-04-27 07:31:53 +00001; RUN: opt < %s -tsan -S | FileCheck %s
2; Check that atomic memory operations are converted to calls into ThreadSanitizer runtime.
3target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
4
5define i8 @atomic8_load_unordered(i8* %a) nounwind uwtable {
6entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00007 %0 = load atomic i8* %a unordered, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +00008 ret i8 %0
9}
10; CHECK: atomic8_load_unordered
Dmitry Vyukov0044e382012-11-09 14:12:16 +000011; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +000012
13define i8 @atomic8_load_monotonic(i8* %a) nounwind uwtable {
14entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000015 %0 = load atomic i8* %a monotonic, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000016 ret i8 %0
17}
18; CHECK: atomic8_load_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +000019; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +000020
21define i8 @atomic8_load_acquire(i8* %a) nounwind uwtable {
22entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000023 %0 = load atomic i8* %a acquire, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000024 ret i8 %0
25}
26; CHECK: atomic8_load_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +000027; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 2)
Kostya Serebryanya1259772012-04-27 07:31:53 +000028
29define i8 @atomic8_load_seq_cst(i8* %a) nounwind uwtable {
30entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000031 %0 = load atomic i8* %a seq_cst, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000032 ret i8 %0
33}
34; CHECK: atomic8_load_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +000035; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 5)
Kostya Serebryanya1259772012-04-27 07:31:53 +000036
37define void @atomic8_store_unordered(i8* %a) nounwind uwtable {
38entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000039 store atomic i8 0, i8* %a unordered, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000040 ret void
41}
42; CHECK: atomic8_store_unordered
Dmitry Vyukov0044e382012-11-09 14:12:16 +000043; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +000044
45define void @atomic8_store_monotonic(i8* %a) nounwind uwtable {
46entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000047 store atomic i8 0, i8* %a monotonic, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000048 ret void
49}
50; CHECK: atomic8_store_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +000051; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +000052
53define void @atomic8_store_release(i8* %a) nounwind uwtable {
54entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000055 store atomic i8 0, i8* %a release, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000056 ret void
57}
58; CHECK: atomic8_store_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +000059; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 3)
Kostya Serebryanya1259772012-04-27 07:31:53 +000060
61define void @atomic8_store_seq_cst(i8* %a) nounwind uwtable {
62entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000063 store atomic i8 0, i8* %a seq_cst, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000064 ret void
65}
66; CHECK: atomic8_store_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +000067; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 5)
Kostya Serebryanya1259772012-04-27 07:31:53 +000068
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +000069define void @atomic8_xchg_monotonic(i8* %a) nounwind uwtable {
70entry:
71 atomicrmw xchg i8* %a, i8 0 monotonic
72 ret void
73}
74; CHECK: atomic8_xchg_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +000075; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +000076
77define void @atomic8_add_monotonic(i8* %a) nounwind uwtable {
78entry:
79 atomicrmw add i8* %a, i8 0 monotonic
80 ret void
81}
82; CHECK: atomic8_add_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +000083; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +000084
85define void @atomic8_sub_monotonic(i8* %a) nounwind uwtable {
86entry:
87 atomicrmw sub i8* %a, i8 0 monotonic
88 ret void
89}
90; CHECK: atomic8_sub_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +000091; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +000092
93define void @atomic8_and_monotonic(i8* %a) nounwind uwtable {
94entry:
95 atomicrmw and i8* %a, i8 0 monotonic
96 ret void
97}
98; CHECK: atomic8_and_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +000099; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000100
101define void @atomic8_or_monotonic(i8* %a) nounwind uwtable {
102entry:
103 atomicrmw or i8* %a, i8 0 monotonic
104 ret void
105}
106; CHECK: atomic8_or_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000107; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000108
109define void @atomic8_xor_monotonic(i8* %a) nounwind uwtable {
110entry:
111 atomicrmw xor i8* %a, i8 0 monotonic
112 ret void
113}
114; CHECK: atomic8_xor_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000115; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000116
Dmitry Vyukova878e742012-11-27 08:09:25 +0000117define void @atomic8_nand_monotonic(i8* %a) nounwind uwtable {
118entry:
119 atomicrmw nand i8* %a, i8 0 monotonic
120 ret void
121}
122; CHECK: atomic8_nand_monotonic
123; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 0)
124
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000125define void @atomic8_xchg_acquire(i8* %a) nounwind uwtable {
126entry:
127 atomicrmw xchg i8* %a, i8 0 acquire
128 ret void
129}
130; CHECK: atomic8_xchg_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000131; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000132
133define void @atomic8_add_acquire(i8* %a) nounwind uwtable {
134entry:
135 atomicrmw add i8* %a, i8 0 acquire
136 ret void
137}
138; CHECK: atomic8_add_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000139; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000140
141define void @atomic8_sub_acquire(i8* %a) nounwind uwtable {
142entry:
143 atomicrmw sub i8* %a, i8 0 acquire
144 ret void
145}
146; CHECK: atomic8_sub_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000147; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000148
149define void @atomic8_and_acquire(i8* %a) nounwind uwtable {
150entry:
151 atomicrmw and i8* %a, i8 0 acquire
152 ret void
153}
154; CHECK: atomic8_and_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000155; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000156
157define void @atomic8_or_acquire(i8* %a) nounwind uwtable {
158entry:
159 atomicrmw or i8* %a, i8 0 acquire
160 ret void
161}
162; CHECK: atomic8_or_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000163; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000164
165define void @atomic8_xor_acquire(i8* %a) nounwind uwtable {
166entry:
167 atomicrmw xor i8* %a, i8 0 acquire
168 ret void
169}
170; CHECK: atomic8_xor_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000171; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000172
Dmitry Vyukova878e742012-11-27 08:09:25 +0000173define void @atomic8_nand_acquire(i8* %a) nounwind uwtable {
174entry:
175 atomicrmw nand i8* %a, i8 0 acquire
176 ret void
177}
178; CHECK: atomic8_nand_acquire
179; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 2)
180
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000181define void @atomic8_xchg_release(i8* %a) nounwind uwtable {
182entry:
183 atomicrmw xchg i8* %a, i8 0 release
184 ret void
185}
186; CHECK: atomic8_xchg_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000187; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000188
189define void @atomic8_add_release(i8* %a) nounwind uwtable {
190entry:
191 atomicrmw add i8* %a, i8 0 release
192 ret void
193}
194; CHECK: atomic8_add_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000195; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000196
197define void @atomic8_sub_release(i8* %a) nounwind uwtable {
198entry:
199 atomicrmw sub i8* %a, i8 0 release
200 ret void
201}
202; CHECK: atomic8_sub_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000203; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000204
205define void @atomic8_and_release(i8* %a) nounwind uwtable {
206entry:
207 atomicrmw and i8* %a, i8 0 release
208 ret void
209}
210; CHECK: atomic8_and_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000211; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000212
213define void @atomic8_or_release(i8* %a) nounwind uwtable {
214entry:
215 atomicrmw or i8* %a, i8 0 release
216 ret void
217}
218; CHECK: atomic8_or_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000219; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000220
221define void @atomic8_xor_release(i8* %a) nounwind uwtable {
222entry:
223 atomicrmw xor i8* %a, i8 0 release
224 ret void
225}
226; CHECK: atomic8_xor_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000227; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000228
Dmitry Vyukova878e742012-11-27 08:09:25 +0000229define void @atomic8_nand_release(i8* %a) nounwind uwtable {
230entry:
231 atomicrmw nand i8* %a, i8 0 release
232 ret void
233}
234; CHECK: atomic8_nand_release
235; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 3)
236
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000237define void @atomic8_xchg_acq_rel(i8* %a) nounwind uwtable {
238entry:
239 atomicrmw xchg i8* %a, i8 0 acq_rel
240 ret void
241}
242; CHECK: atomic8_xchg_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000243; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000244
245define void @atomic8_add_acq_rel(i8* %a) nounwind uwtable {
246entry:
247 atomicrmw add i8* %a, i8 0 acq_rel
248 ret void
249}
250; CHECK: atomic8_add_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000251; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000252
253define void @atomic8_sub_acq_rel(i8* %a) nounwind uwtable {
254entry:
255 atomicrmw sub i8* %a, i8 0 acq_rel
256 ret void
257}
258; CHECK: atomic8_sub_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000259; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000260
261define void @atomic8_and_acq_rel(i8* %a) nounwind uwtable {
262entry:
263 atomicrmw and i8* %a, i8 0 acq_rel
264 ret void
265}
266; CHECK: atomic8_and_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000267; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000268
269define void @atomic8_or_acq_rel(i8* %a) nounwind uwtable {
270entry:
271 atomicrmw or i8* %a, i8 0 acq_rel
272 ret void
273}
274; CHECK: atomic8_or_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000275; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000276
277define void @atomic8_xor_acq_rel(i8* %a) nounwind uwtable {
278entry:
279 atomicrmw xor i8* %a, i8 0 acq_rel
280 ret void
281}
282; CHECK: atomic8_xor_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000283; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000284
Dmitry Vyukova878e742012-11-27 08:09:25 +0000285define void @atomic8_nand_acq_rel(i8* %a) nounwind uwtable {
286entry:
287 atomicrmw nand i8* %a, i8 0 acq_rel
288 ret void
289}
290; CHECK: atomic8_nand_acq_rel
291; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 4)
292
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000293define void @atomic8_xchg_seq_cst(i8* %a) nounwind uwtable {
294entry:
295 atomicrmw xchg i8* %a, i8 0 seq_cst
296 ret void
297}
298; CHECK: atomic8_xchg_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000299; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000300
301define void @atomic8_add_seq_cst(i8* %a) nounwind uwtable {
302entry:
303 atomicrmw add i8* %a, i8 0 seq_cst
304 ret void
305}
306; CHECK: atomic8_add_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000307; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000308
309define void @atomic8_sub_seq_cst(i8* %a) nounwind uwtable {
310entry:
311 atomicrmw sub i8* %a, i8 0 seq_cst
312 ret void
313}
314; CHECK: atomic8_sub_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000315; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000316
317define void @atomic8_and_seq_cst(i8* %a) nounwind uwtable {
318entry:
319 atomicrmw and i8* %a, i8 0 seq_cst
320 ret void
321}
322; CHECK: atomic8_and_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000323; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000324
325define void @atomic8_or_seq_cst(i8* %a) nounwind uwtable {
326entry:
327 atomicrmw or i8* %a, i8 0 seq_cst
328 ret void
329}
330; CHECK: atomic8_or_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000331; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000332
333define void @atomic8_xor_seq_cst(i8* %a) nounwind uwtable {
334entry:
335 atomicrmw xor i8* %a, i8 0 seq_cst
336 ret void
337}
338; CHECK: atomic8_xor_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000339; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000340
Dmitry Vyukova878e742012-11-27 08:09:25 +0000341define void @atomic8_nand_seq_cst(i8* %a) nounwind uwtable {
342entry:
343 atomicrmw nand i8* %a, i8 0 seq_cst
344 ret void
345}
346; CHECK: atomic8_nand_seq_cst
347; CHECK: call i8 @__tsan_atomic8_fetch_nand(i8* %a, i8 0, i32 5)
348
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000349define void @atomic8_cas_monotonic(i8* %a) nounwind uwtable {
350entry:
Tim Northovere94a5182014-03-11 10:48:52 +0000351 cmpxchg i8* %a, i8 0, i8 1 monotonic monotonic
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000352 ret void
353}
354; CHECK: atomic8_cas_monotonic
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +0000355; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000356
357define void @atomic8_cas_acquire(i8* %a) nounwind uwtable {
358entry:
Tim Northovere94a5182014-03-11 10:48:52 +0000359 cmpxchg i8* %a, i8 0, i8 1 acquire acquire
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000360 ret void
361}
362; CHECK: atomic8_cas_acquire
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +0000363; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 2, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000364
365define void @atomic8_cas_release(i8* %a) nounwind uwtable {
366entry:
Tim Northovere94a5182014-03-11 10:48:52 +0000367 cmpxchg i8* %a, i8 0, i8 1 release monotonic
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000368 ret void
369}
370; CHECK: atomic8_cas_release
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +0000371; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 3, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000372
373define void @atomic8_cas_acq_rel(i8* %a) nounwind uwtable {
374entry:
Tim Northovere94a5182014-03-11 10:48:52 +0000375 cmpxchg i8* %a, i8 0, i8 1 acq_rel acquire
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000376 ret void
377}
378; CHECK: atomic8_cas_acq_rel
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +0000379; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 4, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000380
381define void @atomic8_cas_seq_cst(i8* %a) nounwind uwtable {
382entry:
Tim Northovere94a5182014-03-11 10:48:52 +0000383 cmpxchg i8* %a, i8 0, i8 1 seq_cst seq_cst
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000384 ret void
385}
386; CHECK: atomic8_cas_seq_cst
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +0000387; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 5, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000388
Kostya Serebryanya1259772012-04-27 07:31:53 +0000389define i16 @atomic16_load_unordered(i16* %a) nounwind uwtable {
390entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000391 %0 = load atomic i16* %a unordered, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000392 ret i16 %0
393}
394; CHECK: atomic16_load_unordered
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000395; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000396
397define i16 @atomic16_load_monotonic(i16* %a) nounwind uwtable {
398entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000399 %0 = load atomic i16* %a monotonic, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000400 ret i16 %0
401}
402; CHECK: atomic16_load_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000403; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000404
405define i16 @atomic16_load_acquire(i16* %a) nounwind uwtable {
406entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000407 %0 = load atomic i16* %a acquire, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000408 ret i16 %0
409}
410; CHECK: atomic16_load_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000411; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 2)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000412
413define i16 @atomic16_load_seq_cst(i16* %a) nounwind uwtable {
414entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000415 %0 = load atomic i16* %a seq_cst, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000416 ret i16 %0
417}
418; CHECK: atomic16_load_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000419; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 5)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000420
421define void @atomic16_store_unordered(i16* %a) nounwind uwtable {
422entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000423 store atomic i16 0, i16* %a unordered, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000424 ret void
425}
426; CHECK: atomic16_store_unordered
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000427; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000428
429define void @atomic16_store_monotonic(i16* %a) nounwind uwtable {
430entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000431 store atomic i16 0, i16* %a monotonic, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000432 ret void
433}
434; CHECK: atomic16_store_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000435; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000436
437define void @atomic16_store_release(i16* %a) nounwind uwtable {
438entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000439 store atomic i16 0, i16* %a release, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000440 ret void
441}
442; CHECK: atomic16_store_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000443; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 3)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000444
445define void @atomic16_store_seq_cst(i16* %a) nounwind uwtable {
446entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000447 store atomic i16 0, i16* %a seq_cst, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000448 ret void
449}
450; CHECK: atomic16_store_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000451; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 5)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000452
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000453define void @atomic16_xchg_monotonic(i16* %a) nounwind uwtable {
454entry:
455 atomicrmw xchg i16* %a, i16 0 monotonic
456 ret void
457}
458; CHECK: atomic16_xchg_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000459; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000460
461define void @atomic16_add_monotonic(i16* %a) nounwind uwtable {
462entry:
463 atomicrmw add i16* %a, i16 0 monotonic
464 ret void
465}
466; CHECK: atomic16_add_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000467; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000468
469define void @atomic16_sub_monotonic(i16* %a) nounwind uwtable {
470entry:
471 atomicrmw sub i16* %a, i16 0 monotonic
472 ret void
473}
474; CHECK: atomic16_sub_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000475; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000476
477define void @atomic16_and_monotonic(i16* %a) nounwind uwtable {
478entry:
479 atomicrmw and i16* %a, i16 0 monotonic
480 ret void
481}
482; CHECK: atomic16_and_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000483; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000484
485define void @atomic16_or_monotonic(i16* %a) nounwind uwtable {
486entry:
487 atomicrmw or i16* %a, i16 0 monotonic
488 ret void
489}
490; CHECK: atomic16_or_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000491; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000492
493define void @atomic16_xor_monotonic(i16* %a) nounwind uwtable {
494entry:
495 atomicrmw xor i16* %a, i16 0 monotonic
496 ret void
497}
498; CHECK: atomic16_xor_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000499; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000500
Dmitry Vyukova878e742012-11-27 08:09:25 +0000501define void @atomic16_nand_monotonic(i16* %a) nounwind uwtable {
502entry:
503 atomicrmw nand i16* %a, i16 0 monotonic
504 ret void
505}
506; CHECK: atomic16_nand_monotonic
507; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 0)
508
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000509define void @atomic16_xchg_acquire(i16* %a) nounwind uwtable {
510entry:
511 atomicrmw xchg i16* %a, i16 0 acquire
512 ret void
513}
514; CHECK: atomic16_xchg_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000515; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000516
517define void @atomic16_add_acquire(i16* %a) nounwind uwtable {
518entry:
519 atomicrmw add i16* %a, i16 0 acquire
520 ret void
521}
522; CHECK: atomic16_add_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000523; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000524
525define void @atomic16_sub_acquire(i16* %a) nounwind uwtable {
526entry:
527 atomicrmw sub i16* %a, i16 0 acquire
528 ret void
529}
530; CHECK: atomic16_sub_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000531; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000532
533define void @atomic16_and_acquire(i16* %a) nounwind uwtable {
534entry:
535 atomicrmw and i16* %a, i16 0 acquire
536 ret void
537}
538; CHECK: atomic16_and_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000539; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000540
541define void @atomic16_or_acquire(i16* %a) nounwind uwtable {
542entry:
543 atomicrmw or i16* %a, i16 0 acquire
544 ret void
545}
546; CHECK: atomic16_or_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000547; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000548
549define void @atomic16_xor_acquire(i16* %a) nounwind uwtable {
550entry:
551 atomicrmw xor i16* %a, i16 0 acquire
552 ret void
553}
554; CHECK: atomic16_xor_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000555; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000556
Dmitry Vyukova878e742012-11-27 08:09:25 +0000557define void @atomic16_nand_acquire(i16* %a) nounwind uwtable {
558entry:
559 atomicrmw nand i16* %a, i16 0 acquire
560 ret void
561}
562; CHECK: atomic16_nand_acquire
563; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 2)
564
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000565define void @atomic16_xchg_release(i16* %a) nounwind uwtable {
566entry:
567 atomicrmw xchg i16* %a, i16 0 release
568 ret void
569}
570; CHECK: atomic16_xchg_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000571; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000572
573define void @atomic16_add_release(i16* %a) nounwind uwtable {
574entry:
575 atomicrmw add i16* %a, i16 0 release
576 ret void
577}
578; CHECK: atomic16_add_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000579; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000580
581define void @atomic16_sub_release(i16* %a) nounwind uwtable {
582entry:
583 atomicrmw sub i16* %a, i16 0 release
584 ret void
585}
586; CHECK: atomic16_sub_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000587; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000588
589define void @atomic16_and_release(i16* %a) nounwind uwtable {
590entry:
591 atomicrmw and i16* %a, i16 0 release
592 ret void
593}
594; CHECK: atomic16_and_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000595; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000596
597define void @atomic16_or_release(i16* %a) nounwind uwtable {
598entry:
599 atomicrmw or i16* %a, i16 0 release
600 ret void
601}
602; CHECK: atomic16_or_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000603; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000604
605define void @atomic16_xor_release(i16* %a) nounwind uwtable {
606entry:
607 atomicrmw xor i16* %a, i16 0 release
608 ret void
609}
610; CHECK: atomic16_xor_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000611; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000612
Dmitry Vyukova878e742012-11-27 08:09:25 +0000613define void @atomic16_nand_release(i16* %a) nounwind uwtable {
614entry:
615 atomicrmw nand i16* %a, i16 0 release
616 ret void
617}
618; CHECK: atomic16_nand_release
619; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 3)
620
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000621define void @atomic16_xchg_acq_rel(i16* %a) nounwind uwtable {
622entry:
623 atomicrmw xchg i16* %a, i16 0 acq_rel
624 ret void
625}
626; CHECK: atomic16_xchg_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000627; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000628
629define void @atomic16_add_acq_rel(i16* %a) nounwind uwtable {
630entry:
631 atomicrmw add i16* %a, i16 0 acq_rel
632 ret void
633}
634; CHECK: atomic16_add_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000635; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000636
637define void @atomic16_sub_acq_rel(i16* %a) nounwind uwtable {
638entry:
639 atomicrmw sub i16* %a, i16 0 acq_rel
640 ret void
641}
642; CHECK: atomic16_sub_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000643; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000644
645define void @atomic16_and_acq_rel(i16* %a) nounwind uwtable {
646entry:
647 atomicrmw and i16* %a, i16 0 acq_rel
648 ret void
649}
650; CHECK: atomic16_and_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000651; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000652
653define void @atomic16_or_acq_rel(i16* %a) nounwind uwtable {
654entry:
655 atomicrmw or i16* %a, i16 0 acq_rel
656 ret void
657}
658; CHECK: atomic16_or_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000659; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000660
661define void @atomic16_xor_acq_rel(i16* %a) nounwind uwtable {
662entry:
663 atomicrmw xor i16* %a, i16 0 acq_rel
664 ret void
665}
666; CHECK: atomic16_xor_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000667; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000668
Dmitry Vyukova878e742012-11-27 08:09:25 +0000669define void @atomic16_nand_acq_rel(i16* %a) nounwind uwtable {
670entry:
671 atomicrmw nand i16* %a, i16 0 acq_rel
672 ret void
673}
674; CHECK: atomic16_nand_acq_rel
675; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 4)
676
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000677define void @atomic16_xchg_seq_cst(i16* %a) nounwind uwtable {
678entry:
679 atomicrmw xchg i16* %a, i16 0 seq_cst
680 ret void
681}
682; CHECK: atomic16_xchg_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000683; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000684
685define void @atomic16_add_seq_cst(i16* %a) nounwind uwtable {
686entry:
687 atomicrmw add i16* %a, i16 0 seq_cst
688 ret void
689}
690; CHECK: atomic16_add_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000691; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000692
693define void @atomic16_sub_seq_cst(i16* %a) nounwind uwtable {
694entry:
695 atomicrmw sub i16* %a, i16 0 seq_cst
696 ret void
697}
698; CHECK: atomic16_sub_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000699; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000700
701define void @atomic16_and_seq_cst(i16* %a) nounwind uwtable {
702entry:
703 atomicrmw and i16* %a, i16 0 seq_cst
704 ret void
705}
706; CHECK: atomic16_and_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000707; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000708
709define void @atomic16_or_seq_cst(i16* %a) nounwind uwtable {
710entry:
711 atomicrmw or i16* %a, i16 0 seq_cst
712 ret void
713}
714; CHECK: atomic16_or_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000715; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000716
717define void @atomic16_xor_seq_cst(i16* %a) nounwind uwtable {
718entry:
719 atomicrmw xor i16* %a, i16 0 seq_cst
720 ret void
721}
722; CHECK: atomic16_xor_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000723; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000724
Dmitry Vyukova878e742012-11-27 08:09:25 +0000725define void @atomic16_nand_seq_cst(i16* %a) nounwind uwtable {
726entry:
727 atomicrmw nand i16* %a, i16 0 seq_cst
728 ret void
729}
730; CHECK: atomic16_nand_seq_cst
731; CHECK: call i16 @__tsan_atomic16_fetch_nand(i16* %a, i16 0, i32 5)
732
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000733define void @atomic16_cas_monotonic(i16* %a) nounwind uwtable {
734entry:
Tim Northovere94a5182014-03-11 10:48:52 +0000735 cmpxchg i16* %a, i16 0, i16 1 monotonic monotonic
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000736 ret void
737}
738; CHECK: atomic16_cas_monotonic
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +0000739; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000740
741define void @atomic16_cas_acquire(i16* %a) nounwind uwtable {
742entry:
Tim Northovere94a5182014-03-11 10:48:52 +0000743 cmpxchg i16* %a, i16 0, i16 1 acquire acquire
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000744 ret void
745}
746; CHECK: atomic16_cas_acquire
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +0000747; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 2, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000748
749define void @atomic16_cas_release(i16* %a) nounwind uwtable {
750entry:
Tim Northovere94a5182014-03-11 10:48:52 +0000751 cmpxchg i16* %a, i16 0, i16 1 release monotonic
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000752 ret void
753}
754; CHECK: atomic16_cas_release
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +0000755; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 3, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000756
757define void @atomic16_cas_acq_rel(i16* %a) nounwind uwtable {
758entry:
Tim Northovere94a5182014-03-11 10:48:52 +0000759 cmpxchg i16* %a, i16 0, i16 1 acq_rel acquire
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000760 ret void
761}
762; CHECK: atomic16_cas_acq_rel
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +0000763; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 4, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000764
765define void @atomic16_cas_seq_cst(i16* %a) nounwind uwtable {
766entry:
Tim Northovere94a5182014-03-11 10:48:52 +0000767 cmpxchg i16* %a, i16 0, i16 1 seq_cst seq_cst
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000768 ret void
769}
770; CHECK: atomic16_cas_seq_cst
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +0000771; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 5, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000772
Kostya Serebryanya1259772012-04-27 07:31:53 +0000773define i32 @atomic32_load_unordered(i32* %a) nounwind uwtable {
774entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000775 %0 = load atomic i32* %a unordered, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000776 ret i32 %0
777}
778; CHECK: atomic32_load_unordered
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000779; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000780
781define i32 @atomic32_load_monotonic(i32* %a) nounwind uwtable {
782entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000783 %0 = load atomic i32* %a monotonic, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000784 ret i32 %0
785}
786; CHECK: atomic32_load_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000787; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000788
789define i32 @atomic32_load_acquire(i32* %a) nounwind uwtable {
790entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000791 %0 = load atomic i32* %a acquire, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000792 ret i32 %0
793}
794; CHECK: atomic32_load_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000795; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 2)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000796
797define i32 @atomic32_load_seq_cst(i32* %a) nounwind uwtable {
798entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000799 %0 = load atomic i32* %a seq_cst, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000800 ret i32 %0
801}
802; CHECK: atomic32_load_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000803; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 5)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000804
805define void @atomic32_store_unordered(i32* %a) nounwind uwtable {
806entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000807 store atomic i32 0, i32* %a unordered, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000808 ret void
809}
810; CHECK: atomic32_store_unordered
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000811; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000812
813define void @atomic32_store_monotonic(i32* %a) nounwind uwtable {
814entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000815 store atomic i32 0, i32* %a monotonic, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000816 ret void
817}
818; CHECK: atomic32_store_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000819; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000820
821define void @atomic32_store_release(i32* %a) nounwind uwtable {
822entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000823 store atomic i32 0, i32* %a release, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000824 ret void
825}
826; CHECK: atomic32_store_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000827; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 3)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000828
829define void @atomic32_store_seq_cst(i32* %a) nounwind uwtable {
830entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000831 store atomic i32 0, i32* %a seq_cst, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000832 ret void
833}
834; CHECK: atomic32_store_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000835; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 5)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000836
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000837define void @atomic32_xchg_monotonic(i32* %a) nounwind uwtable {
838entry:
839 atomicrmw xchg i32* %a, i32 0 monotonic
840 ret void
841}
842; CHECK: atomic32_xchg_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000843; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000844
845define void @atomic32_add_monotonic(i32* %a) nounwind uwtable {
846entry:
847 atomicrmw add i32* %a, i32 0 monotonic
848 ret void
849}
850; CHECK: atomic32_add_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000851; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000852
853define void @atomic32_sub_monotonic(i32* %a) nounwind uwtable {
854entry:
855 atomicrmw sub i32* %a, i32 0 monotonic
856 ret void
857}
858; CHECK: atomic32_sub_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000859; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000860
861define void @atomic32_and_monotonic(i32* %a) nounwind uwtable {
862entry:
863 atomicrmw and i32* %a, i32 0 monotonic
864 ret void
865}
866; CHECK: atomic32_and_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000867; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000868
869define void @atomic32_or_monotonic(i32* %a) nounwind uwtable {
870entry:
871 atomicrmw or i32* %a, i32 0 monotonic
872 ret void
873}
874; CHECK: atomic32_or_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000875; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000876
877define void @atomic32_xor_monotonic(i32* %a) nounwind uwtable {
878entry:
879 atomicrmw xor i32* %a, i32 0 monotonic
880 ret void
881}
882; CHECK: atomic32_xor_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000883; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000884
Dmitry Vyukova878e742012-11-27 08:09:25 +0000885define void @atomic32_nand_monotonic(i32* %a) nounwind uwtable {
886entry:
887 atomicrmw nand i32* %a, i32 0 monotonic
888 ret void
889}
890; CHECK: atomic32_nand_monotonic
891; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 0)
892
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000893define void @atomic32_xchg_acquire(i32* %a) nounwind uwtable {
894entry:
895 atomicrmw xchg i32* %a, i32 0 acquire
896 ret void
897}
898; CHECK: atomic32_xchg_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000899; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000900
901define void @atomic32_add_acquire(i32* %a) nounwind uwtable {
902entry:
903 atomicrmw add i32* %a, i32 0 acquire
904 ret void
905}
906; CHECK: atomic32_add_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000907; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000908
909define void @atomic32_sub_acquire(i32* %a) nounwind uwtable {
910entry:
911 atomicrmw sub i32* %a, i32 0 acquire
912 ret void
913}
914; CHECK: atomic32_sub_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000915; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000916
917define void @atomic32_and_acquire(i32* %a) nounwind uwtable {
918entry:
919 atomicrmw and i32* %a, i32 0 acquire
920 ret void
921}
922; CHECK: atomic32_and_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000923; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000924
925define void @atomic32_or_acquire(i32* %a) nounwind uwtable {
926entry:
927 atomicrmw or i32* %a, i32 0 acquire
928 ret void
929}
930; CHECK: atomic32_or_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000931; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000932
933define void @atomic32_xor_acquire(i32* %a) nounwind uwtable {
934entry:
935 atomicrmw xor i32* %a, i32 0 acquire
936 ret void
937}
938; CHECK: atomic32_xor_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000939; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000940
Dmitry Vyukova878e742012-11-27 08:09:25 +0000941define void @atomic32_nand_acquire(i32* %a) nounwind uwtable {
942entry:
943 atomicrmw nand i32* %a, i32 0 acquire
944 ret void
945}
946; CHECK: atomic32_nand_acquire
947; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 2)
948
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000949define void @atomic32_xchg_release(i32* %a) nounwind uwtable {
950entry:
951 atomicrmw xchg i32* %a, i32 0 release
952 ret void
953}
954; CHECK: atomic32_xchg_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000955; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000956
957define void @atomic32_add_release(i32* %a) nounwind uwtable {
958entry:
959 atomicrmw add i32* %a, i32 0 release
960 ret void
961}
962; CHECK: atomic32_add_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000963; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000964
965define void @atomic32_sub_release(i32* %a) nounwind uwtable {
966entry:
967 atomicrmw sub i32* %a, i32 0 release
968 ret void
969}
970; CHECK: atomic32_sub_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000971; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000972
973define void @atomic32_and_release(i32* %a) nounwind uwtable {
974entry:
975 atomicrmw and i32* %a, i32 0 release
976 ret void
977}
978; CHECK: atomic32_and_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000979; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000980
981define void @atomic32_or_release(i32* %a) nounwind uwtable {
982entry:
983 atomicrmw or i32* %a, i32 0 release
984 ret void
985}
986; CHECK: atomic32_or_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000987; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000988
989define void @atomic32_xor_release(i32* %a) nounwind uwtable {
990entry:
991 atomicrmw xor i32* %a, i32 0 release
992 ret void
993}
994; CHECK: atomic32_xor_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +0000995; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000996
Dmitry Vyukova878e742012-11-27 08:09:25 +0000997define void @atomic32_nand_release(i32* %a) nounwind uwtable {
998entry:
999 atomicrmw nand i32* %a, i32 0 release
1000 ret void
1001}
1002; CHECK: atomic32_nand_release
1003; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 3)
1004
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001005define void @atomic32_xchg_acq_rel(i32* %a) nounwind uwtable {
1006entry:
1007 atomicrmw xchg i32* %a, i32 0 acq_rel
1008 ret void
1009}
1010; CHECK: atomic32_xchg_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001011; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001012
1013define void @atomic32_add_acq_rel(i32* %a) nounwind uwtable {
1014entry:
1015 atomicrmw add i32* %a, i32 0 acq_rel
1016 ret void
1017}
1018; CHECK: atomic32_add_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001019; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001020
1021define void @atomic32_sub_acq_rel(i32* %a) nounwind uwtable {
1022entry:
1023 atomicrmw sub i32* %a, i32 0 acq_rel
1024 ret void
1025}
1026; CHECK: atomic32_sub_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001027; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001028
1029define void @atomic32_and_acq_rel(i32* %a) nounwind uwtable {
1030entry:
1031 atomicrmw and i32* %a, i32 0 acq_rel
1032 ret void
1033}
1034; CHECK: atomic32_and_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001035; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001036
1037define void @atomic32_or_acq_rel(i32* %a) nounwind uwtable {
1038entry:
1039 atomicrmw or i32* %a, i32 0 acq_rel
1040 ret void
1041}
1042; CHECK: atomic32_or_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001043; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001044
1045define void @atomic32_xor_acq_rel(i32* %a) nounwind uwtable {
1046entry:
1047 atomicrmw xor i32* %a, i32 0 acq_rel
1048 ret void
1049}
1050; CHECK: atomic32_xor_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001051; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001052
Dmitry Vyukova878e742012-11-27 08:09:25 +00001053define void @atomic32_nand_acq_rel(i32* %a) nounwind uwtable {
1054entry:
1055 atomicrmw nand i32* %a, i32 0 acq_rel
1056 ret void
1057}
1058; CHECK: atomic32_nand_acq_rel
1059; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 4)
1060
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001061define void @atomic32_xchg_seq_cst(i32* %a) nounwind uwtable {
1062entry:
1063 atomicrmw xchg i32* %a, i32 0 seq_cst
1064 ret void
1065}
1066; CHECK: atomic32_xchg_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001067; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001068
1069define void @atomic32_add_seq_cst(i32* %a) nounwind uwtable {
1070entry:
1071 atomicrmw add i32* %a, i32 0 seq_cst
1072 ret void
1073}
1074; CHECK: atomic32_add_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001075; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001076
1077define void @atomic32_sub_seq_cst(i32* %a) nounwind uwtable {
1078entry:
1079 atomicrmw sub i32* %a, i32 0 seq_cst
1080 ret void
1081}
1082; CHECK: atomic32_sub_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001083; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001084
1085define void @atomic32_and_seq_cst(i32* %a) nounwind uwtable {
1086entry:
1087 atomicrmw and i32* %a, i32 0 seq_cst
1088 ret void
1089}
1090; CHECK: atomic32_and_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001091; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001092
1093define void @atomic32_or_seq_cst(i32* %a) nounwind uwtable {
1094entry:
1095 atomicrmw or i32* %a, i32 0 seq_cst
1096 ret void
1097}
1098; CHECK: atomic32_or_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001099; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001100
1101define void @atomic32_xor_seq_cst(i32* %a) nounwind uwtable {
1102entry:
1103 atomicrmw xor i32* %a, i32 0 seq_cst
1104 ret void
1105}
1106; CHECK: atomic32_xor_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001107; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001108
Dmitry Vyukova878e742012-11-27 08:09:25 +00001109define void @atomic32_nand_seq_cst(i32* %a) nounwind uwtable {
1110entry:
1111 atomicrmw nand i32* %a, i32 0 seq_cst
1112 ret void
1113}
1114; CHECK: atomic32_nand_seq_cst
1115; CHECK: call i32 @__tsan_atomic32_fetch_nand(i32* %a, i32 0, i32 5)
1116
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001117define void @atomic32_cas_monotonic(i32* %a) nounwind uwtable {
1118entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001119 cmpxchg i32* %a, i32 0, i32 1 monotonic monotonic
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001120 ret void
1121}
1122; CHECK: atomic32_cas_monotonic
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001123; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001124
1125define void @atomic32_cas_acquire(i32* %a) nounwind uwtable {
1126entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001127 cmpxchg i32* %a, i32 0, i32 1 acquire acquire
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001128 ret void
1129}
1130; CHECK: atomic32_cas_acquire
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001131; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 2, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001132
1133define void @atomic32_cas_release(i32* %a) nounwind uwtable {
1134entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001135 cmpxchg i32* %a, i32 0, i32 1 release monotonic
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001136 ret void
1137}
1138; CHECK: atomic32_cas_release
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001139; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 3, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001140
1141define void @atomic32_cas_acq_rel(i32* %a) nounwind uwtable {
1142entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001143 cmpxchg i32* %a, i32 0, i32 1 acq_rel acquire
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001144 ret void
1145}
1146; CHECK: atomic32_cas_acq_rel
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001147; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 4, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001148
1149define void @atomic32_cas_seq_cst(i32* %a) nounwind uwtable {
1150entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001151 cmpxchg i32* %a, i32 0, i32 1 seq_cst seq_cst
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001152 ret void
1153}
1154; CHECK: atomic32_cas_seq_cst
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001155; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 5, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001156
Kostya Serebryanya1259772012-04-27 07:31:53 +00001157define i64 @atomic64_load_unordered(i64* %a) nounwind uwtable {
1158entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001159 %0 = load atomic i64* %a unordered, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001160 ret i64 %0
1161}
1162; CHECK: atomic64_load_unordered
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001163; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001164
1165define i64 @atomic64_load_monotonic(i64* %a) nounwind uwtable {
1166entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001167 %0 = load atomic i64* %a monotonic, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001168 ret i64 %0
1169}
1170; CHECK: atomic64_load_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001171; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001172
1173define i64 @atomic64_load_acquire(i64* %a) nounwind uwtable {
1174entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001175 %0 = load atomic i64* %a acquire, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001176 ret i64 %0
1177}
1178; CHECK: atomic64_load_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001179; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 2)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001180
1181define i64 @atomic64_load_seq_cst(i64* %a) nounwind uwtable {
1182entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001183 %0 = load atomic i64* %a seq_cst, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001184 ret i64 %0
1185}
1186; CHECK: atomic64_load_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001187; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 5)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001188
1189define void @atomic64_store_unordered(i64* %a) nounwind uwtable {
1190entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001191 store atomic i64 0, i64* %a unordered, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001192 ret void
1193}
1194; CHECK: atomic64_store_unordered
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001195; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001196
1197define void @atomic64_store_monotonic(i64* %a) nounwind uwtable {
1198entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001199 store atomic i64 0, i64* %a monotonic, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001200 ret void
1201}
1202; CHECK: atomic64_store_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001203; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001204
1205define void @atomic64_store_release(i64* %a) nounwind uwtable {
1206entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001207 store atomic i64 0, i64* %a release, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001208 ret void
1209}
1210; CHECK: atomic64_store_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001211; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 3)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001212
1213define void @atomic64_store_seq_cst(i64* %a) nounwind uwtable {
1214entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001215 store atomic i64 0, i64* %a seq_cst, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001216 ret void
1217}
1218; CHECK: atomic64_store_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001219; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 5)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001220
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001221define void @atomic64_xchg_monotonic(i64* %a) nounwind uwtable {
1222entry:
1223 atomicrmw xchg i64* %a, i64 0 monotonic
1224 ret void
1225}
1226; CHECK: atomic64_xchg_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001227; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001228
1229define void @atomic64_add_monotonic(i64* %a) nounwind uwtable {
1230entry:
1231 atomicrmw add i64* %a, i64 0 monotonic
1232 ret void
1233}
1234; CHECK: atomic64_add_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001235; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001236
1237define void @atomic64_sub_monotonic(i64* %a) nounwind uwtable {
1238entry:
1239 atomicrmw sub i64* %a, i64 0 monotonic
1240 ret void
1241}
1242; CHECK: atomic64_sub_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001243; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001244
1245define void @atomic64_and_monotonic(i64* %a) nounwind uwtable {
1246entry:
1247 atomicrmw and i64* %a, i64 0 monotonic
1248 ret void
1249}
1250; CHECK: atomic64_and_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001251; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001252
1253define void @atomic64_or_monotonic(i64* %a) nounwind uwtable {
1254entry:
1255 atomicrmw or i64* %a, i64 0 monotonic
1256 ret void
1257}
1258; CHECK: atomic64_or_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001259; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001260
1261define void @atomic64_xor_monotonic(i64* %a) nounwind uwtable {
1262entry:
1263 atomicrmw xor i64* %a, i64 0 monotonic
1264 ret void
1265}
1266; CHECK: atomic64_xor_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001267; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001268
Dmitry Vyukova878e742012-11-27 08:09:25 +00001269define void @atomic64_nand_monotonic(i64* %a) nounwind uwtable {
1270entry:
1271 atomicrmw nand i64* %a, i64 0 monotonic
1272 ret void
1273}
1274; CHECK: atomic64_nand_monotonic
1275; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 0)
1276
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001277define void @atomic64_xchg_acquire(i64* %a) nounwind uwtable {
1278entry:
1279 atomicrmw xchg i64* %a, i64 0 acquire
1280 ret void
1281}
1282; CHECK: atomic64_xchg_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001283; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001284
1285define void @atomic64_add_acquire(i64* %a) nounwind uwtable {
1286entry:
1287 atomicrmw add i64* %a, i64 0 acquire
1288 ret void
1289}
1290; CHECK: atomic64_add_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001291; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001292
1293define void @atomic64_sub_acquire(i64* %a) nounwind uwtable {
1294entry:
1295 atomicrmw sub i64* %a, i64 0 acquire
1296 ret void
1297}
1298; CHECK: atomic64_sub_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001299; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001300
1301define void @atomic64_and_acquire(i64* %a) nounwind uwtable {
1302entry:
1303 atomicrmw and i64* %a, i64 0 acquire
1304 ret void
1305}
1306; CHECK: atomic64_and_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001307; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001308
1309define void @atomic64_or_acquire(i64* %a) nounwind uwtable {
1310entry:
1311 atomicrmw or i64* %a, i64 0 acquire
1312 ret void
1313}
1314; CHECK: atomic64_or_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001315; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001316
1317define void @atomic64_xor_acquire(i64* %a) nounwind uwtable {
1318entry:
1319 atomicrmw xor i64* %a, i64 0 acquire
1320 ret void
1321}
1322; CHECK: atomic64_xor_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001323; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001324
Dmitry Vyukova878e742012-11-27 08:09:25 +00001325define void @atomic64_nand_acquire(i64* %a) nounwind uwtable {
1326entry:
1327 atomicrmw nand i64* %a, i64 0 acquire
1328 ret void
1329}
1330; CHECK: atomic64_nand_acquire
1331; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 2)
1332
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001333define void @atomic64_xchg_release(i64* %a) nounwind uwtable {
1334entry:
1335 atomicrmw xchg i64* %a, i64 0 release
1336 ret void
1337}
1338; CHECK: atomic64_xchg_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001339; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001340
1341define void @atomic64_add_release(i64* %a) nounwind uwtable {
1342entry:
1343 atomicrmw add i64* %a, i64 0 release
1344 ret void
1345}
1346; CHECK: atomic64_add_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001347; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001348
1349define void @atomic64_sub_release(i64* %a) nounwind uwtable {
1350entry:
1351 atomicrmw sub i64* %a, i64 0 release
1352 ret void
1353}
1354; CHECK: atomic64_sub_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001355; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001356
1357define void @atomic64_and_release(i64* %a) nounwind uwtable {
1358entry:
1359 atomicrmw and i64* %a, i64 0 release
1360 ret void
1361}
1362; CHECK: atomic64_and_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001363; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001364
1365define void @atomic64_or_release(i64* %a) nounwind uwtable {
1366entry:
1367 atomicrmw or i64* %a, i64 0 release
1368 ret void
1369}
1370; CHECK: atomic64_or_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001371; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001372
1373define void @atomic64_xor_release(i64* %a) nounwind uwtable {
1374entry:
1375 atomicrmw xor i64* %a, i64 0 release
1376 ret void
1377}
1378; CHECK: atomic64_xor_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001379; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001380
Dmitry Vyukova878e742012-11-27 08:09:25 +00001381define void @atomic64_nand_release(i64* %a) nounwind uwtable {
1382entry:
1383 atomicrmw nand i64* %a, i64 0 release
1384 ret void
1385}
1386; CHECK: atomic64_nand_release
1387; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 3)
1388
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001389define void @atomic64_xchg_acq_rel(i64* %a) nounwind uwtable {
1390entry:
1391 atomicrmw xchg i64* %a, i64 0 acq_rel
1392 ret void
1393}
1394; CHECK: atomic64_xchg_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001395; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001396
1397define void @atomic64_add_acq_rel(i64* %a) nounwind uwtable {
1398entry:
1399 atomicrmw add i64* %a, i64 0 acq_rel
1400 ret void
1401}
1402; CHECK: atomic64_add_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001403; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001404
1405define void @atomic64_sub_acq_rel(i64* %a) nounwind uwtable {
1406entry:
1407 atomicrmw sub i64* %a, i64 0 acq_rel
1408 ret void
1409}
1410; CHECK: atomic64_sub_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001411; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001412
1413define void @atomic64_and_acq_rel(i64* %a) nounwind uwtable {
1414entry:
1415 atomicrmw and i64* %a, i64 0 acq_rel
1416 ret void
1417}
1418; CHECK: atomic64_and_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001419; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001420
1421define void @atomic64_or_acq_rel(i64* %a) nounwind uwtable {
1422entry:
1423 atomicrmw or i64* %a, i64 0 acq_rel
1424 ret void
1425}
1426; CHECK: atomic64_or_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001427; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001428
1429define void @atomic64_xor_acq_rel(i64* %a) nounwind uwtable {
1430entry:
1431 atomicrmw xor i64* %a, i64 0 acq_rel
1432 ret void
1433}
1434; CHECK: atomic64_xor_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001435; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001436
Dmitry Vyukova878e742012-11-27 08:09:25 +00001437define void @atomic64_nand_acq_rel(i64* %a) nounwind uwtable {
1438entry:
1439 atomicrmw nand i64* %a, i64 0 acq_rel
1440 ret void
1441}
1442; CHECK: atomic64_nand_acq_rel
1443; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 4)
1444
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001445define void @atomic64_xchg_seq_cst(i64* %a) nounwind uwtable {
1446entry:
1447 atomicrmw xchg i64* %a, i64 0 seq_cst
1448 ret void
1449}
1450; CHECK: atomic64_xchg_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001451; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001452
1453define void @atomic64_add_seq_cst(i64* %a) nounwind uwtable {
1454entry:
1455 atomicrmw add i64* %a, i64 0 seq_cst
1456 ret void
1457}
1458; CHECK: atomic64_add_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001459; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001460
1461define void @atomic64_sub_seq_cst(i64* %a) nounwind uwtable {
1462entry:
1463 atomicrmw sub i64* %a, i64 0 seq_cst
1464 ret void
1465}
1466; CHECK: atomic64_sub_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001467; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001468
1469define void @atomic64_and_seq_cst(i64* %a) nounwind uwtable {
1470entry:
1471 atomicrmw and i64* %a, i64 0 seq_cst
1472 ret void
1473}
1474; CHECK: atomic64_and_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001475; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001476
1477define void @atomic64_or_seq_cst(i64* %a) nounwind uwtable {
1478entry:
1479 atomicrmw or i64* %a, i64 0 seq_cst
1480 ret void
1481}
1482; CHECK: atomic64_or_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001483; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001484
1485define void @atomic64_xor_seq_cst(i64* %a) nounwind uwtable {
1486entry:
1487 atomicrmw xor i64* %a, i64 0 seq_cst
1488 ret void
1489}
1490; CHECK: atomic64_xor_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001491; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001492
Dmitry Vyukova878e742012-11-27 08:09:25 +00001493define void @atomic64_nand_seq_cst(i64* %a) nounwind uwtable {
1494entry:
1495 atomicrmw nand i64* %a, i64 0 seq_cst
1496 ret void
1497}
1498; CHECK: atomic64_nand_seq_cst
1499; CHECK: call i64 @__tsan_atomic64_fetch_nand(i64* %a, i64 0, i32 5)
1500
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001501define void @atomic64_cas_monotonic(i64* %a) nounwind uwtable {
1502entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001503 cmpxchg i64* %a, i64 0, i64 1 monotonic monotonic
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001504 ret void
1505}
1506; CHECK: atomic64_cas_monotonic
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001507; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001508
1509define void @atomic64_cas_acquire(i64* %a) nounwind uwtable {
1510entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001511 cmpxchg i64* %a, i64 0, i64 1 acquire acquire
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001512 ret void
1513}
1514; CHECK: atomic64_cas_acquire
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001515; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 2, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001516
1517define void @atomic64_cas_release(i64* %a) nounwind uwtable {
1518entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001519 cmpxchg i64* %a, i64 0, i64 1 release monotonic
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001520 ret void
1521}
1522; CHECK: atomic64_cas_release
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001523; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 3, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001524
1525define void @atomic64_cas_acq_rel(i64* %a) nounwind uwtable {
1526entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001527 cmpxchg i64* %a, i64 0, i64 1 acq_rel acquire
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001528 ret void
1529}
1530; CHECK: atomic64_cas_acq_rel
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001531; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 4, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001532
1533define void @atomic64_cas_seq_cst(i64* %a) nounwind uwtable {
1534entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001535 cmpxchg i64* %a, i64 0, i64 1 seq_cst seq_cst
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001536 ret void
1537}
1538; CHECK: atomic64_cas_seq_cst
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001539; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 5, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001540
Kostya Serebryanya1259772012-04-27 07:31:53 +00001541define i128 @atomic128_load_unordered(i128* %a) nounwind uwtable {
1542entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001543 %0 = load atomic i128* %a unordered, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001544 ret i128 %0
1545}
1546; CHECK: atomic128_load_unordered
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001547; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001548
1549define i128 @atomic128_load_monotonic(i128* %a) nounwind uwtable {
1550entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001551 %0 = load atomic i128* %a monotonic, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001552 ret i128 %0
1553}
1554; CHECK: atomic128_load_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001555; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001556
1557define i128 @atomic128_load_acquire(i128* %a) nounwind uwtable {
1558entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001559 %0 = load atomic i128* %a acquire, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001560 ret i128 %0
1561}
1562; CHECK: atomic128_load_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001563; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 2)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001564
1565define i128 @atomic128_load_seq_cst(i128* %a) nounwind uwtable {
1566entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001567 %0 = load atomic i128* %a seq_cst, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001568 ret i128 %0
1569}
1570; CHECK: atomic128_load_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001571; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 5)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001572
1573define void @atomic128_store_unordered(i128* %a) nounwind uwtable {
1574entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001575 store atomic i128 0, i128* %a unordered, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001576 ret void
1577}
1578; CHECK: atomic128_store_unordered
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001579; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001580
1581define void @atomic128_store_monotonic(i128* %a) nounwind uwtable {
1582entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001583 store atomic i128 0, i128* %a monotonic, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001584 ret void
1585}
1586; CHECK: atomic128_store_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001587; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 0)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001588
1589define void @atomic128_store_release(i128* %a) nounwind uwtable {
1590entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001591 store atomic i128 0, i128* %a release, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001592 ret void
1593}
1594; CHECK: atomic128_store_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001595; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 3)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001596
1597define void @atomic128_store_seq_cst(i128* %a) nounwind uwtable {
1598entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001599 store atomic i128 0, i128* %a seq_cst, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001600 ret void
1601}
1602; CHECK: atomic128_store_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001603; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001604
1605define void @atomic128_xchg_monotonic(i128* %a) nounwind uwtable {
1606entry:
1607 atomicrmw xchg i128* %a, i128 0 monotonic
1608 ret void
1609}
1610; CHECK: atomic128_xchg_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001611; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001612
1613define void @atomic128_add_monotonic(i128* %a) nounwind uwtable {
1614entry:
1615 atomicrmw add i128* %a, i128 0 monotonic
1616 ret void
1617}
1618; CHECK: atomic128_add_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001619; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001620
1621define void @atomic128_sub_monotonic(i128* %a) nounwind uwtable {
1622entry:
1623 atomicrmw sub i128* %a, i128 0 monotonic
1624 ret void
1625}
1626; CHECK: atomic128_sub_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001627; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001628
1629define void @atomic128_and_monotonic(i128* %a) nounwind uwtable {
1630entry:
1631 atomicrmw and i128* %a, i128 0 monotonic
1632 ret void
1633}
1634; CHECK: atomic128_and_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001635; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001636
1637define void @atomic128_or_monotonic(i128* %a) nounwind uwtable {
1638entry:
1639 atomicrmw or i128* %a, i128 0 monotonic
1640 ret void
1641}
1642; CHECK: atomic128_or_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001643; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001644
1645define void @atomic128_xor_monotonic(i128* %a) nounwind uwtable {
1646entry:
1647 atomicrmw xor i128* %a, i128 0 monotonic
1648 ret void
1649}
1650; CHECK: atomic128_xor_monotonic
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001651; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001652
Dmitry Vyukova878e742012-11-27 08:09:25 +00001653define void @atomic128_nand_monotonic(i128* %a) nounwind uwtable {
1654entry:
1655 atomicrmw nand i128* %a, i128 0 monotonic
1656 ret void
1657}
1658; CHECK: atomic128_nand_monotonic
1659; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 0)
1660
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001661define void @atomic128_xchg_acquire(i128* %a) nounwind uwtable {
1662entry:
1663 atomicrmw xchg i128* %a, i128 0 acquire
1664 ret void
1665}
1666; CHECK: atomic128_xchg_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001667; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001668
1669define void @atomic128_add_acquire(i128* %a) nounwind uwtable {
1670entry:
1671 atomicrmw add i128* %a, i128 0 acquire
1672 ret void
1673}
1674; CHECK: atomic128_add_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001675; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001676
1677define void @atomic128_sub_acquire(i128* %a) nounwind uwtable {
1678entry:
1679 atomicrmw sub i128* %a, i128 0 acquire
1680 ret void
1681}
1682; CHECK: atomic128_sub_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001683; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001684
1685define void @atomic128_and_acquire(i128* %a) nounwind uwtable {
1686entry:
1687 atomicrmw and i128* %a, i128 0 acquire
1688 ret void
1689}
1690; CHECK: atomic128_and_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001691; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001692
1693define void @atomic128_or_acquire(i128* %a) nounwind uwtable {
1694entry:
1695 atomicrmw or i128* %a, i128 0 acquire
1696 ret void
1697}
1698; CHECK: atomic128_or_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001699; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001700
1701define void @atomic128_xor_acquire(i128* %a) nounwind uwtable {
1702entry:
1703 atomicrmw xor i128* %a, i128 0 acquire
1704 ret void
1705}
1706; CHECK: atomic128_xor_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001707; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001708
Dmitry Vyukova878e742012-11-27 08:09:25 +00001709define void @atomic128_nand_acquire(i128* %a) nounwind uwtable {
1710entry:
1711 atomicrmw nand i128* %a, i128 0 acquire
1712 ret void
1713}
1714; CHECK: atomic128_nand_acquire
1715; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 2)
1716
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001717define void @atomic128_xchg_release(i128* %a) nounwind uwtable {
1718entry:
1719 atomicrmw xchg i128* %a, i128 0 release
1720 ret void
1721}
1722; CHECK: atomic128_xchg_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001723; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001724
1725define void @atomic128_add_release(i128* %a) nounwind uwtable {
1726entry:
1727 atomicrmw add i128* %a, i128 0 release
1728 ret void
1729}
1730; CHECK: atomic128_add_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001731; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001732
1733define void @atomic128_sub_release(i128* %a) nounwind uwtable {
1734entry:
1735 atomicrmw sub i128* %a, i128 0 release
1736 ret void
1737}
1738; CHECK: atomic128_sub_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001739; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001740
1741define void @atomic128_and_release(i128* %a) nounwind uwtable {
1742entry:
1743 atomicrmw and i128* %a, i128 0 release
1744 ret void
1745}
1746; CHECK: atomic128_and_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001747; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001748
1749define void @atomic128_or_release(i128* %a) nounwind uwtable {
1750entry:
1751 atomicrmw or i128* %a, i128 0 release
1752 ret void
1753}
1754; CHECK: atomic128_or_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001755; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001756
1757define void @atomic128_xor_release(i128* %a) nounwind uwtable {
1758entry:
1759 atomicrmw xor i128* %a, i128 0 release
1760 ret void
1761}
1762; CHECK: atomic128_xor_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001763; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001764
Dmitry Vyukova878e742012-11-27 08:09:25 +00001765define void @atomic128_nand_release(i128* %a) nounwind uwtable {
1766entry:
1767 atomicrmw nand i128* %a, i128 0 release
1768 ret void
1769}
1770; CHECK: atomic128_nand_release
1771; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 3)
1772
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001773define void @atomic128_xchg_acq_rel(i128* %a) nounwind uwtable {
1774entry:
1775 atomicrmw xchg i128* %a, i128 0 acq_rel
1776 ret void
1777}
1778; CHECK: atomic128_xchg_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001779; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001780
1781define void @atomic128_add_acq_rel(i128* %a) nounwind uwtable {
1782entry:
1783 atomicrmw add i128* %a, i128 0 acq_rel
1784 ret void
1785}
1786; CHECK: atomic128_add_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001787; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001788
1789define void @atomic128_sub_acq_rel(i128* %a) nounwind uwtable {
1790entry:
1791 atomicrmw sub i128* %a, i128 0 acq_rel
1792 ret void
1793}
1794; CHECK: atomic128_sub_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001795; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001796
1797define void @atomic128_and_acq_rel(i128* %a) nounwind uwtable {
1798entry:
1799 atomicrmw and i128* %a, i128 0 acq_rel
1800 ret void
1801}
1802; CHECK: atomic128_and_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001803; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001804
1805define void @atomic128_or_acq_rel(i128* %a) nounwind uwtable {
1806entry:
1807 atomicrmw or i128* %a, i128 0 acq_rel
1808 ret void
1809}
1810; CHECK: atomic128_or_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001811; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001812
1813define void @atomic128_xor_acq_rel(i128* %a) nounwind uwtable {
1814entry:
1815 atomicrmw xor i128* %a, i128 0 acq_rel
1816 ret void
1817}
1818; CHECK: atomic128_xor_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001819; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001820
Dmitry Vyukova878e742012-11-27 08:09:25 +00001821define void @atomic128_nand_acq_rel(i128* %a) nounwind uwtable {
1822entry:
1823 atomicrmw nand i128* %a, i128 0 acq_rel
1824 ret void
1825}
1826; CHECK: atomic128_nand_acq_rel
1827; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 4)
1828
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001829define void @atomic128_xchg_seq_cst(i128* %a) nounwind uwtable {
1830entry:
1831 atomicrmw xchg i128* %a, i128 0 seq_cst
1832 ret void
1833}
1834; CHECK: atomic128_xchg_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001835; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001836
1837define void @atomic128_add_seq_cst(i128* %a) nounwind uwtable {
1838entry:
1839 atomicrmw add i128* %a, i128 0 seq_cst
1840 ret void
1841}
1842; CHECK: atomic128_add_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001843; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001844
1845define void @atomic128_sub_seq_cst(i128* %a) nounwind uwtable {
1846entry:
1847 atomicrmw sub i128* %a, i128 0 seq_cst
1848 ret void
1849}
1850; CHECK: atomic128_sub_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001851; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001852
1853define void @atomic128_and_seq_cst(i128* %a) nounwind uwtable {
1854entry:
1855 atomicrmw and i128* %a, i128 0 seq_cst
1856 ret void
1857}
1858; CHECK: atomic128_and_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001859; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001860
1861define void @atomic128_or_seq_cst(i128* %a) nounwind uwtable {
1862entry:
1863 atomicrmw or i128* %a, i128 0 seq_cst
1864 ret void
1865}
1866; CHECK: atomic128_or_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001867; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001868
1869define void @atomic128_xor_seq_cst(i128* %a) nounwind uwtable {
1870entry:
1871 atomicrmw xor i128* %a, i128 0 seq_cst
1872 ret void
1873}
1874; CHECK: atomic128_xor_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001875; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001876
Dmitry Vyukova878e742012-11-27 08:09:25 +00001877define void @atomic128_nand_seq_cst(i128* %a) nounwind uwtable {
1878entry:
1879 atomicrmw nand i128* %a, i128 0 seq_cst
1880 ret void
1881}
1882; CHECK: atomic128_nand_seq_cst
1883; CHECK: call i128 @__tsan_atomic128_fetch_nand(i128* %a, i128 0, i32 5)
1884
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001885define void @atomic128_cas_monotonic(i128* %a) nounwind uwtable {
1886entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001887 cmpxchg i128* %a, i128 0, i128 1 monotonic monotonic
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001888 ret void
1889}
1890; CHECK: atomic128_cas_monotonic
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001891; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 0, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001892
1893define void @atomic128_cas_acquire(i128* %a) nounwind uwtable {
1894entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001895 cmpxchg i128* %a, i128 0, i128 1 acquire acquire
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001896 ret void
1897}
1898; CHECK: atomic128_cas_acquire
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001899; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 2, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001900
1901define void @atomic128_cas_release(i128* %a) nounwind uwtable {
1902entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001903 cmpxchg i128* %a, i128 0, i128 1 release monotonic
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001904 ret void
1905}
1906; CHECK: atomic128_cas_release
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001907; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 3, i32 0)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001908
1909define void @atomic128_cas_acq_rel(i128* %a) nounwind uwtable {
1910entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001911 cmpxchg i128* %a, i128 0, i128 1 acq_rel acquire
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001912 ret void
1913}
1914; CHECK: atomic128_cas_acq_rel
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001915; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 4, i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001916
1917define void @atomic128_cas_seq_cst(i128* %a) nounwind uwtable {
1918entry:
Tim Northovere94a5182014-03-11 10:48:52 +00001919 cmpxchg i128* %a, i128 0, i128 1 seq_cst seq_cst
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001920 ret void
1921}
1922; CHECK: atomic128_cas_seq_cst
Dmitry Vyukov12b5cb92012-11-26 11:36:19 +00001923; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 5, i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001924
1925define void @atomic_signal_fence_acquire() nounwind uwtable {
1926entry:
1927 fence singlethread acquire
1928 ret void
1929}
1930; CHECK: atomic_signal_fence_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001931; CHECK: call void @__tsan_atomic_signal_fence(i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001932
1933define void @atomic_thread_fence_acquire() nounwind uwtable {
1934entry:
1935 fence acquire
1936 ret void
1937}
1938; CHECK: atomic_thread_fence_acquire
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001939; CHECK: call void @__tsan_atomic_thread_fence(i32 2)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001940
1941define void @atomic_signal_fence_release() nounwind uwtable {
1942entry:
1943 fence singlethread release
1944 ret void
1945}
1946; CHECK: atomic_signal_fence_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001947; CHECK: call void @__tsan_atomic_signal_fence(i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001948
1949define void @atomic_thread_fence_release() nounwind uwtable {
1950entry:
1951 fence release
1952 ret void
1953}
1954; CHECK: atomic_thread_fence_release
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001955; CHECK: call void @__tsan_atomic_thread_fence(i32 3)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001956
1957define void @atomic_signal_fence_acq_rel() nounwind uwtable {
1958entry:
1959 fence singlethread acq_rel
1960 ret void
1961}
1962; CHECK: atomic_signal_fence_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001963; CHECK: call void @__tsan_atomic_signal_fence(i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001964
1965define void @atomic_thread_fence_acq_rel() nounwind uwtable {
1966entry:
1967 fence acq_rel
1968 ret void
1969}
1970; CHECK: atomic_thread_fence_acq_rel
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001971; CHECK: call void @__tsan_atomic_thread_fence(i32 4)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001972
1973define void @atomic_signal_fence_seq_cst() nounwind uwtable {
1974entry:
1975 fence singlethread seq_cst
1976 ret void
1977}
1978; CHECK: atomic_signal_fence_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001979; CHECK: call void @__tsan_atomic_signal_fence(i32 5)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001980
1981define void @atomic_thread_fence_seq_cst() nounwind uwtable {
1982entry:
1983 fence seq_cst
1984 ret void
1985}
1986; CHECK: atomic_thread_fence_seq_cst
Dmitry Vyukov0044e382012-11-09 14:12:16 +00001987; CHECK: call void @__tsan_atomic_thread_fence(i32 5)