blob: 672e47e1f2feede278daec8f7639425eef79c59c [file] [log] [blame]
Kostya Serebryanya1259772012-04-27 07:31:53 +00001; RUN: opt < %s -tsan -S | FileCheck %s
2; Check that atomic memory operations are converted to calls into ThreadSanitizer runtime.
3target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
4
5define i8 @atomic8_load_unordered(i8* %a) nounwind uwtable {
6entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00007 %0 = load atomic i8* %a unordered, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +00008 ret i8 %0
9}
10; CHECK: atomic8_load_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000011; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +000012
13define i8 @atomic8_load_monotonic(i8* %a) nounwind uwtable {
14entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000015 %0 = load atomic i8* %a monotonic, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000016 ret i8 %0
17}
18; CHECK: atomic8_load_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000019; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +000020
21define i8 @atomic8_load_acquire(i8* %a) nounwind uwtable {
22entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000023 %0 = load atomic i8* %a acquire, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000024 ret i8 %0
25}
26; CHECK: atomic8_load_acquire
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000027; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 100504)
Kostya Serebryanya1259772012-04-27 07:31:53 +000028
29define i8 @atomic8_load_seq_cst(i8* %a) nounwind uwtable {
30entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000031 %0 = load atomic i8* %a seq_cst, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000032 ret i8 %0
33}
34; CHECK: atomic8_load_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000035; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +000036
37define void @atomic8_store_unordered(i8* %a) nounwind uwtable {
38entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000039 store atomic i8 0, i8* %a unordered, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000040 ret void
41}
42; CHECK: atomic8_store_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000043; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +000044
45define void @atomic8_store_monotonic(i8* %a) nounwind uwtable {
46entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000047 store atomic i8 0, i8* %a monotonic, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000048 ret void
49}
50; CHECK: atomic8_store_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000051; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +000052
53define void @atomic8_store_release(i8* %a) nounwind uwtable {
54entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000055 store atomic i8 0, i8* %a release, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000056 ret void
57}
58; CHECK: atomic8_store_release
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000059; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 100508)
Kostya Serebryanya1259772012-04-27 07:31:53 +000060
61define void @atomic8_store_seq_cst(i8* %a) nounwind uwtable {
62entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000063 store atomic i8 0, i8* %a seq_cst, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000064 ret void
65}
66; CHECK: atomic8_store_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000067; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +000068
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +000069define void @atomic8_xchg_monotonic(i8* %a) nounwind uwtable {
70entry:
71 atomicrmw xchg i8* %a, i8 0 monotonic
72 ret void
73}
74; CHECK: atomic8_xchg_monotonic
75; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 100501)
76
77define void @atomic8_add_monotonic(i8* %a) nounwind uwtable {
78entry:
79 atomicrmw add i8* %a, i8 0 monotonic
80 ret void
81}
82; CHECK: atomic8_add_monotonic
83; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 100501)
84
85define void @atomic8_sub_monotonic(i8* %a) nounwind uwtable {
86entry:
87 atomicrmw sub i8* %a, i8 0 monotonic
88 ret void
89}
90; CHECK: atomic8_sub_monotonic
91; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 100501)
92
93define void @atomic8_and_monotonic(i8* %a) nounwind uwtable {
94entry:
95 atomicrmw and i8* %a, i8 0 monotonic
96 ret void
97}
98; CHECK: atomic8_and_monotonic
99; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 100501)
100
101define void @atomic8_or_monotonic(i8* %a) nounwind uwtable {
102entry:
103 atomicrmw or i8* %a, i8 0 monotonic
104 ret void
105}
106; CHECK: atomic8_or_monotonic
107; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 100501)
108
109define void @atomic8_xor_monotonic(i8* %a) nounwind uwtable {
110entry:
111 atomicrmw xor i8* %a, i8 0 monotonic
112 ret void
113}
114; CHECK: atomic8_xor_monotonic
115; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 100501)
116
117define void @atomic8_xchg_acquire(i8* %a) nounwind uwtable {
118entry:
119 atomicrmw xchg i8* %a, i8 0 acquire
120 ret void
121}
122; CHECK: atomic8_xchg_acquire
123; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 100504)
124
125define void @atomic8_add_acquire(i8* %a) nounwind uwtable {
126entry:
127 atomicrmw add i8* %a, i8 0 acquire
128 ret void
129}
130; CHECK: atomic8_add_acquire
131; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 100504)
132
133define void @atomic8_sub_acquire(i8* %a) nounwind uwtable {
134entry:
135 atomicrmw sub i8* %a, i8 0 acquire
136 ret void
137}
138; CHECK: atomic8_sub_acquire
139; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 100504)
140
141define void @atomic8_and_acquire(i8* %a) nounwind uwtable {
142entry:
143 atomicrmw and i8* %a, i8 0 acquire
144 ret void
145}
146; CHECK: atomic8_and_acquire
147; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 100504)
148
149define void @atomic8_or_acquire(i8* %a) nounwind uwtable {
150entry:
151 atomicrmw or i8* %a, i8 0 acquire
152 ret void
153}
154; CHECK: atomic8_or_acquire
155; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 100504)
156
157define void @atomic8_xor_acquire(i8* %a) nounwind uwtable {
158entry:
159 atomicrmw xor i8* %a, i8 0 acquire
160 ret void
161}
162; CHECK: atomic8_xor_acquire
163; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 100504)
164
165define void @atomic8_xchg_release(i8* %a) nounwind uwtable {
166entry:
167 atomicrmw xchg i8* %a, i8 0 release
168 ret void
169}
170; CHECK: atomic8_xchg_release
171; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 100508)
172
173define void @atomic8_add_release(i8* %a) nounwind uwtable {
174entry:
175 atomicrmw add i8* %a, i8 0 release
176 ret void
177}
178; CHECK: atomic8_add_release
179; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 100508)
180
181define void @atomic8_sub_release(i8* %a) nounwind uwtable {
182entry:
183 atomicrmw sub i8* %a, i8 0 release
184 ret void
185}
186; CHECK: atomic8_sub_release
187; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 100508)
188
189define void @atomic8_and_release(i8* %a) nounwind uwtable {
190entry:
191 atomicrmw and i8* %a, i8 0 release
192 ret void
193}
194; CHECK: atomic8_and_release
195; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 100508)
196
197define void @atomic8_or_release(i8* %a) nounwind uwtable {
198entry:
199 atomicrmw or i8* %a, i8 0 release
200 ret void
201}
202; CHECK: atomic8_or_release
203; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 100508)
204
205define void @atomic8_xor_release(i8* %a) nounwind uwtable {
206entry:
207 atomicrmw xor i8* %a, i8 0 release
208 ret void
209}
210; CHECK: atomic8_xor_release
211; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 100508)
212
213define void @atomic8_xchg_acq_rel(i8* %a) nounwind uwtable {
214entry:
215 atomicrmw xchg i8* %a, i8 0 acq_rel
216 ret void
217}
218; CHECK: atomic8_xchg_acq_rel
219; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 100516)
220
221define void @atomic8_add_acq_rel(i8* %a) nounwind uwtable {
222entry:
223 atomicrmw add i8* %a, i8 0 acq_rel
224 ret void
225}
226; CHECK: atomic8_add_acq_rel
227; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 100516)
228
229define void @atomic8_sub_acq_rel(i8* %a) nounwind uwtable {
230entry:
231 atomicrmw sub i8* %a, i8 0 acq_rel
232 ret void
233}
234; CHECK: atomic8_sub_acq_rel
235; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 100516)
236
237define void @atomic8_and_acq_rel(i8* %a) nounwind uwtable {
238entry:
239 atomicrmw and i8* %a, i8 0 acq_rel
240 ret void
241}
242; CHECK: atomic8_and_acq_rel
243; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 100516)
244
245define void @atomic8_or_acq_rel(i8* %a) nounwind uwtable {
246entry:
247 atomicrmw or i8* %a, i8 0 acq_rel
248 ret void
249}
250; CHECK: atomic8_or_acq_rel
251; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 100516)
252
253define void @atomic8_xor_acq_rel(i8* %a) nounwind uwtable {
254entry:
255 atomicrmw xor i8* %a, i8 0 acq_rel
256 ret void
257}
258; CHECK: atomic8_xor_acq_rel
259; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 100516)
260
261define void @atomic8_xchg_seq_cst(i8* %a) nounwind uwtable {
262entry:
263 atomicrmw xchg i8* %a, i8 0 seq_cst
264 ret void
265}
266; CHECK: atomic8_xchg_seq_cst
267; CHECK: call i8 @__tsan_atomic8_exchange(i8* %a, i8 0, i32 100532)
268
269define void @atomic8_add_seq_cst(i8* %a) nounwind uwtable {
270entry:
271 atomicrmw add i8* %a, i8 0 seq_cst
272 ret void
273}
274; CHECK: atomic8_add_seq_cst
275; CHECK: call i8 @__tsan_atomic8_fetch_add(i8* %a, i8 0, i32 100532)
276
277define void @atomic8_sub_seq_cst(i8* %a) nounwind uwtable {
278entry:
279 atomicrmw sub i8* %a, i8 0 seq_cst
280 ret void
281}
282; CHECK: atomic8_sub_seq_cst
283; CHECK: call i8 @__tsan_atomic8_fetch_sub(i8* %a, i8 0, i32 100532)
284
285define void @atomic8_and_seq_cst(i8* %a) nounwind uwtable {
286entry:
287 atomicrmw and i8* %a, i8 0 seq_cst
288 ret void
289}
290; CHECK: atomic8_and_seq_cst
291; CHECK: call i8 @__tsan_atomic8_fetch_and(i8* %a, i8 0, i32 100532)
292
293define void @atomic8_or_seq_cst(i8* %a) nounwind uwtable {
294entry:
295 atomicrmw or i8* %a, i8 0 seq_cst
296 ret void
297}
298; CHECK: atomic8_or_seq_cst
299; CHECK: call i8 @__tsan_atomic8_fetch_or(i8* %a, i8 0, i32 100532)
300
301define void @atomic8_xor_seq_cst(i8* %a) nounwind uwtable {
302entry:
303 atomicrmw xor i8* %a, i8 0 seq_cst
304 ret void
305}
306; CHECK: atomic8_xor_seq_cst
307; CHECK: call i8 @__tsan_atomic8_fetch_xor(i8* %a, i8 0, i32 100532)
308
309define void @atomic8_cas_monotonic(i8* %a) nounwind uwtable {
310entry:
311 cmpxchg i8* %a, i8 0, i8 1 monotonic
312 ret void
313}
314; CHECK: atomic8_cas_monotonic
315; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 100501)
316
317define void @atomic8_cas_acquire(i8* %a) nounwind uwtable {
318entry:
319 cmpxchg i8* %a, i8 0, i8 1 acquire
320 ret void
321}
322; CHECK: atomic8_cas_acquire
323; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 100504)
324
325define void @atomic8_cas_release(i8* %a) nounwind uwtable {
326entry:
327 cmpxchg i8* %a, i8 0, i8 1 release
328 ret void
329}
330; CHECK: atomic8_cas_release
331; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 100508)
332
333define void @atomic8_cas_acq_rel(i8* %a) nounwind uwtable {
334entry:
335 cmpxchg i8* %a, i8 0, i8 1 acq_rel
336 ret void
337}
338; CHECK: atomic8_cas_acq_rel
339; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 100516)
340
341define void @atomic8_cas_seq_cst(i8* %a) nounwind uwtable {
342entry:
343 cmpxchg i8* %a, i8 0, i8 1 seq_cst
344 ret void
345}
346; CHECK: atomic8_cas_seq_cst
347; CHECK: call i8 @__tsan_atomic8_compare_exchange_val(i8* %a, i8 0, i8 1, i32 100532)
348
Kostya Serebryanya1259772012-04-27 07:31:53 +0000349define i16 @atomic16_load_unordered(i16* %a) nounwind uwtable {
350entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000351 %0 = load atomic i16* %a unordered, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000352 ret i16 %0
353}
354; CHECK: atomic16_load_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000355; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000356
357define i16 @atomic16_load_monotonic(i16* %a) nounwind uwtable {
358entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000359 %0 = load atomic i16* %a monotonic, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000360 ret i16 %0
361}
362; CHECK: atomic16_load_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000363; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000364
365define i16 @atomic16_load_acquire(i16* %a) nounwind uwtable {
366entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000367 %0 = load atomic i16* %a acquire, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000368 ret i16 %0
369}
370; CHECK: atomic16_load_acquire
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000371; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 100504)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000372
373define i16 @atomic16_load_seq_cst(i16* %a) nounwind uwtable {
374entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000375 %0 = load atomic i16* %a seq_cst, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000376 ret i16 %0
377}
378; CHECK: atomic16_load_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000379; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000380
381define void @atomic16_store_unordered(i16* %a) nounwind uwtable {
382entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000383 store atomic i16 0, i16* %a unordered, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000384 ret void
385}
386; CHECK: atomic16_store_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000387; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000388
389define void @atomic16_store_monotonic(i16* %a) nounwind uwtable {
390entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000391 store atomic i16 0, i16* %a monotonic, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000392 ret void
393}
394; CHECK: atomic16_store_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000395; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000396
397define void @atomic16_store_release(i16* %a) nounwind uwtable {
398entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000399 store atomic i16 0, i16* %a release, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000400 ret void
401}
402; CHECK: atomic16_store_release
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000403; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 100508)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000404
405define void @atomic16_store_seq_cst(i16* %a) nounwind uwtable {
406entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000407 store atomic i16 0, i16* %a seq_cst, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000408 ret void
409}
410; CHECK: atomic16_store_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000411; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000412
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000413define void @atomic16_xchg_monotonic(i16* %a) nounwind uwtable {
414entry:
415 atomicrmw xchg i16* %a, i16 0 monotonic
416 ret void
417}
418; CHECK: atomic16_xchg_monotonic
419; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 100501)
420
421define void @atomic16_add_monotonic(i16* %a) nounwind uwtable {
422entry:
423 atomicrmw add i16* %a, i16 0 monotonic
424 ret void
425}
426; CHECK: atomic16_add_monotonic
427; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 100501)
428
429define void @atomic16_sub_monotonic(i16* %a) nounwind uwtable {
430entry:
431 atomicrmw sub i16* %a, i16 0 monotonic
432 ret void
433}
434; CHECK: atomic16_sub_monotonic
435; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 100501)
436
437define void @atomic16_and_monotonic(i16* %a) nounwind uwtable {
438entry:
439 atomicrmw and i16* %a, i16 0 monotonic
440 ret void
441}
442; CHECK: atomic16_and_monotonic
443; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 100501)
444
445define void @atomic16_or_monotonic(i16* %a) nounwind uwtable {
446entry:
447 atomicrmw or i16* %a, i16 0 monotonic
448 ret void
449}
450; CHECK: atomic16_or_monotonic
451; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 100501)
452
453define void @atomic16_xor_monotonic(i16* %a) nounwind uwtable {
454entry:
455 atomicrmw xor i16* %a, i16 0 monotonic
456 ret void
457}
458; CHECK: atomic16_xor_monotonic
459; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 100501)
460
461define void @atomic16_xchg_acquire(i16* %a) nounwind uwtable {
462entry:
463 atomicrmw xchg i16* %a, i16 0 acquire
464 ret void
465}
466; CHECK: atomic16_xchg_acquire
467; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 100504)
468
469define void @atomic16_add_acquire(i16* %a) nounwind uwtable {
470entry:
471 atomicrmw add i16* %a, i16 0 acquire
472 ret void
473}
474; CHECK: atomic16_add_acquire
475; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 100504)
476
477define void @atomic16_sub_acquire(i16* %a) nounwind uwtable {
478entry:
479 atomicrmw sub i16* %a, i16 0 acquire
480 ret void
481}
482; CHECK: atomic16_sub_acquire
483; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 100504)
484
485define void @atomic16_and_acquire(i16* %a) nounwind uwtable {
486entry:
487 atomicrmw and i16* %a, i16 0 acquire
488 ret void
489}
490; CHECK: atomic16_and_acquire
491; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 100504)
492
493define void @atomic16_or_acquire(i16* %a) nounwind uwtable {
494entry:
495 atomicrmw or i16* %a, i16 0 acquire
496 ret void
497}
498; CHECK: atomic16_or_acquire
499; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 100504)
500
501define void @atomic16_xor_acquire(i16* %a) nounwind uwtable {
502entry:
503 atomicrmw xor i16* %a, i16 0 acquire
504 ret void
505}
506; CHECK: atomic16_xor_acquire
507; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 100504)
508
509define void @atomic16_xchg_release(i16* %a) nounwind uwtable {
510entry:
511 atomicrmw xchg i16* %a, i16 0 release
512 ret void
513}
514; CHECK: atomic16_xchg_release
515; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 100508)
516
517define void @atomic16_add_release(i16* %a) nounwind uwtable {
518entry:
519 atomicrmw add i16* %a, i16 0 release
520 ret void
521}
522; CHECK: atomic16_add_release
523; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 100508)
524
525define void @atomic16_sub_release(i16* %a) nounwind uwtable {
526entry:
527 atomicrmw sub i16* %a, i16 0 release
528 ret void
529}
530; CHECK: atomic16_sub_release
531; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 100508)
532
533define void @atomic16_and_release(i16* %a) nounwind uwtable {
534entry:
535 atomicrmw and i16* %a, i16 0 release
536 ret void
537}
538; CHECK: atomic16_and_release
539; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 100508)
540
541define void @atomic16_or_release(i16* %a) nounwind uwtable {
542entry:
543 atomicrmw or i16* %a, i16 0 release
544 ret void
545}
546; CHECK: atomic16_or_release
547; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 100508)
548
549define void @atomic16_xor_release(i16* %a) nounwind uwtable {
550entry:
551 atomicrmw xor i16* %a, i16 0 release
552 ret void
553}
554; CHECK: atomic16_xor_release
555; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 100508)
556
557define void @atomic16_xchg_acq_rel(i16* %a) nounwind uwtable {
558entry:
559 atomicrmw xchg i16* %a, i16 0 acq_rel
560 ret void
561}
562; CHECK: atomic16_xchg_acq_rel
563; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 100516)
564
565define void @atomic16_add_acq_rel(i16* %a) nounwind uwtable {
566entry:
567 atomicrmw add i16* %a, i16 0 acq_rel
568 ret void
569}
570; CHECK: atomic16_add_acq_rel
571; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 100516)
572
573define void @atomic16_sub_acq_rel(i16* %a) nounwind uwtable {
574entry:
575 atomicrmw sub i16* %a, i16 0 acq_rel
576 ret void
577}
578; CHECK: atomic16_sub_acq_rel
579; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 100516)
580
581define void @atomic16_and_acq_rel(i16* %a) nounwind uwtable {
582entry:
583 atomicrmw and i16* %a, i16 0 acq_rel
584 ret void
585}
586; CHECK: atomic16_and_acq_rel
587; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 100516)
588
589define void @atomic16_or_acq_rel(i16* %a) nounwind uwtable {
590entry:
591 atomicrmw or i16* %a, i16 0 acq_rel
592 ret void
593}
594; CHECK: atomic16_or_acq_rel
595; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 100516)
596
597define void @atomic16_xor_acq_rel(i16* %a) nounwind uwtable {
598entry:
599 atomicrmw xor i16* %a, i16 0 acq_rel
600 ret void
601}
602; CHECK: atomic16_xor_acq_rel
603; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 100516)
604
605define void @atomic16_xchg_seq_cst(i16* %a) nounwind uwtable {
606entry:
607 atomicrmw xchg i16* %a, i16 0 seq_cst
608 ret void
609}
610; CHECK: atomic16_xchg_seq_cst
611; CHECK: call i16 @__tsan_atomic16_exchange(i16* %a, i16 0, i32 100532)
612
613define void @atomic16_add_seq_cst(i16* %a) nounwind uwtable {
614entry:
615 atomicrmw add i16* %a, i16 0 seq_cst
616 ret void
617}
618; CHECK: atomic16_add_seq_cst
619; CHECK: call i16 @__tsan_atomic16_fetch_add(i16* %a, i16 0, i32 100532)
620
621define void @atomic16_sub_seq_cst(i16* %a) nounwind uwtable {
622entry:
623 atomicrmw sub i16* %a, i16 0 seq_cst
624 ret void
625}
626; CHECK: atomic16_sub_seq_cst
627; CHECK: call i16 @__tsan_atomic16_fetch_sub(i16* %a, i16 0, i32 100532)
628
629define void @atomic16_and_seq_cst(i16* %a) nounwind uwtable {
630entry:
631 atomicrmw and i16* %a, i16 0 seq_cst
632 ret void
633}
634; CHECK: atomic16_and_seq_cst
635; CHECK: call i16 @__tsan_atomic16_fetch_and(i16* %a, i16 0, i32 100532)
636
637define void @atomic16_or_seq_cst(i16* %a) nounwind uwtable {
638entry:
639 atomicrmw or i16* %a, i16 0 seq_cst
640 ret void
641}
642; CHECK: atomic16_or_seq_cst
643; CHECK: call i16 @__tsan_atomic16_fetch_or(i16* %a, i16 0, i32 100532)
644
645define void @atomic16_xor_seq_cst(i16* %a) nounwind uwtable {
646entry:
647 atomicrmw xor i16* %a, i16 0 seq_cst
648 ret void
649}
650; CHECK: atomic16_xor_seq_cst
651; CHECK: call i16 @__tsan_atomic16_fetch_xor(i16* %a, i16 0, i32 100532)
652
653define void @atomic16_cas_monotonic(i16* %a) nounwind uwtable {
654entry:
655 cmpxchg i16* %a, i16 0, i16 1 monotonic
656 ret void
657}
658; CHECK: atomic16_cas_monotonic
659; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 100501)
660
661define void @atomic16_cas_acquire(i16* %a) nounwind uwtable {
662entry:
663 cmpxchg i16* %a, i16 0, i16 1 acquire
664 ret void
665}
666; CHECK: atomic16_cas_acquire
667; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 100504)
668
669define void @atomic16_cas_release(i16* %a) nounwind uwtable {
670entry:
671 cmpxchg i16* %a, i16 0, i16 1 release
672 ret void
673}
674; CHECK: atomic16_cas_release
675; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 100508)
676
677define void @atomic16_cas_acq_rel(i16* %a) nounwind uwtable {
678entry:
679 cmpxchg i16* %a, i16 0, i16 1 acq_rel
680 ret void
681}
682; CHECK: atomic16_cas_acq_rel
683; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 100516)
684
685define void @atomic16_cas_seq_cst(i16* %a) nounwind uwtable {
686entry:
687 cmpxchg i16* %a, i16 0, i16 1 seq_cst
688 ret void
689}
690; CHECK: atomic16_cas_seq_cst
691; CHECK: call i16 @__tsan_atomic16_compare_exchange_val(i16* %a, i16 0, i16 1, i32 100532)
692
Kostya Serebryanya1259772012-04-27 07:31:53 +0000693define i32 @atomic32_load_unordered(i32* %a) nounwind uwtable {
694entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000695 %0 = load atomic i32* %a unordered, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000696 ret i32 %0
697}
698; CHECK: atomic32_load_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000699; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000700
701define i32 @atomic32_load_monotonic(i32* %a) nounwind uwtable {
702entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000703 %0 = load atomic i32* %a monotonic, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000704 ret i32 %0
705}
706; CHECK: atomic32_load_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000707; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000708
709define i32 @atomic32_load_acquire(i32* %a) nounwind uwtable {
710entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000711 %0 = load atomic i32* %a acquire, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000712 ret i32 %0
713}
714; CHECK: atomic32_load_acquire
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000715; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 100504)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000716
717define i32 @atomic32_load_seq_cst(i32* %a) nounwind uwtable {
718entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000719 %0 = load atomic i32* %a seq_cst, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000720 ret i32 %0
721}
722; CHECK: atomic32_load_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000723; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000724
725define void @atomic32_store_unordered(i32* %a) nounwind uwtable {
726entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000727 store atomic i32 0, i32* %a unordered, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000728 ret void
729}
730; CHECK: atomic32_store_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000731; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000732
733define void @atomic32_store_monotonic(i32* %a) nounwind uwtable {
734entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000735 store atomic i32 0, i32* %a monotonic, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000736 ret void
737}
738; CHECK: atomic32_store_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000739; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000740
741define void @atomic32_store_release(i32* %a) nounwind uwtable {
742entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000743 store atomic i32 0, i32* %a release, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000744 ret void
745}
746; CHECK: atomic32_store_release
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000747; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 100508)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000748
749define void @atomic32_store_seq_cst(i32* %a) nounwind uwtable {
750entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000751 store atomic i32 0, i32* %a seq_cst, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000752 ret void
753}
754; CHECK: atomic32_store_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000755; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000756
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +0000757define void @atomic32_xchg_monotonic(i32* %a) nounwind uwtable {
758entry:
759 atomicrmw xchg i32* %a, i32 0 monotonic
760 ret void
761}
762; CHECK: atomic32_xchg_monotonic
763; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 100501)
764
765define void @atomic32_add_monotonic(i32* %a) nounwind uwtable {
766entry:
767 atomicrmw add i32* %a, i32 0 monotonic
768 ret void
769}
770; CHECK: atomic32_add_monotonic
771; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 100501)
772
773define void @atomic32_sub_monotonic(i32* %a) nounwind uwtable {
774entry:
775 atomicrmw sub i32* %a, i32 0 monotonic
776 ret void
777}
778; CHECK: atomic32_sub_monotonic
779; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 100501)
780
781define void @atomic32_and_monotonic(i32* %a) nounwind uwtable {
782entry:
783 atomicrmw and i32* %a, i32 0 monotonic
784 ret void
785}
786; CHECK: atomic32_and_monotonic
787; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 100501)
788
789define void @atomic32_or_monotonic(i32* %a) nounwind uwtable {
790entry:
791 atomicrmw or i32* %a, i32 0 monotonic
792 ret void
793}
794; CHECK: atomic32_or_monotonic
795; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 100501)
796
797define void @atomic32_xor_monotonic(i32* %a) nounwind uwtable {
798entry:
799 atomicrmw xor i32* %a, i32 0 monotonic
800 ret void
801}
802; CHECK: atomic32_xor_monotonic
803; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 100501)
804
805define void @atomic32_xchg_acquire(i32* %a) nounwind uwtable {
806entry:
807 atomicrmw xchg i32* %a, i32 0 acquire
808 ret void
809}
810; CHECK: atomic32_xchg_acquire
811; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 100504)
812
813define void @atomic32_add_acquire(i32* %a) nounwind uwtable {
814entry:
815 atomicrmw add i32* %a, i32 0 acquire
816 ret void
817}
818; CHECK: atomic32_add_acquire
819; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 100504)
820
821define void @atomic32_sub_acquire(i32* %a) nounwind uwtable {
822entry:
823 atomicrmw sub i32* %a, i32 0 acquire
824 ret void
825}
826; CHECK: atomic32_sub_acquire
827; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 100504)
828
829define void @atomic32_and_acquire(i32* %a) nounwind uwtable {
830entry:
831 atomicrmw and i32* %a, i32 0 acquire
832 ret void
833}
834; CHECK: atomic32_and_acquire
835; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 100504)
836
837define void @atomic32_or_acquire(i32* %a) nounwind uwtable {
838entry:
839 atomicrmw or i32* %a, i32 0 acquire
840 ret void
841}
842; CHECK: atomic32_or_acquire
843; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 100504)
844
845define void @atomic32_xor_acquire(i32* %a) nounwind uwtable {
846entry:
847 atomicrmw xor i32* %a, i32 0 acquire
848 ret void
849}
850; CHECK: atomic32_xor_acquire
851; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 100504)
852
853define void @atomic32_xchg_release(i32* %a) nounwind uwtable {
854entry:
855 atomicrmw xchg i32* %a, i32 0 release
856 ret void
857}
858; CHECK: atomic32_xchg_release
859; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 100508)
860
861define void @atomic32_add_release(i32* %a) nounwind uwtable {
862entry:
863 atomicrmw add i32* %a, i32 0 release
864 ret void
865}
866; CHECK: atomic32_add_release
867; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 100508)
868
869define void @atomic32_sub_release(i32* %a) nounwind uwtable {
870entry:
871 atomicrmw sub i32* %a, i32 0 release
872 ret void
873}
874; CHECK: atomic32_sub_release
875; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 100508)
876
877define void @atomic32_and_release(i32* %a) nounwind uwtable {
878entry:
879 atomicrmw and i32* %a, i32 0 release
880 ret void
881}
882; CHECK: atomic32_and_release
883; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 100508)
884
885define void @atomic32_or_release(i32* %a) nounwind uwtable {
886entry:
887 atomicrmw or i32* %a, i32 0 release
888 ret void
889}
890; CHECK: atomic32_or_release
891; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 100508)
892
893define void @atomic32_xor_release(i32* %a) nounwind uwtable {
894entry:
895 atomicrmw xor i32* %a, i32 0 release
896 ret void
897}
898; CHECK: atomic32_xor_release
899; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 100508)
900
901define void @atomic32_xchg_acq_rel(i32* %a) nounwind uwtable {
902entry:
903 atomicrmw xchg i32* %a, i32 0 acq_rel
904 ret void
905}
906; CHECK: atomic32_xchg_acq_rel
907; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 100516)
908
909define void @atomic32_add_acq_rel(i32* %a) nounwind uwtable {
910entry:
911 atomicrmw add i32* %a, i32 0 acq_rel
912 ret void
913}
914; CHECK: atomic32_add_acq_rel
915; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 100516)
916
917define void @atomic32_sub_acq_rel(i32* %a) nounwind uwtable {
918entry:
919 atomicrmw sub i32* %a, i32 0 acq_rel
920 ret void
921}
922; CHECK: atomic32_sub_acq_rel
923; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 100516)
924
925define void @atomic32_and_acq_rel(i32* %a) nounwind uwtable {
926entry:
927 atomicrmw and i32* %a, i32 0 acq_rel
928 ret void
929}
930; CHECK: atomic32_and_acq_rel
931; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 100516)
932
933define void @atomic32_or_acq_rel(i32* %a) nounwind uwtable {
934entry:
935 atomicrmw or i32* %a, i32 0 acq_rel
936 ret void
937}
938; CHECK: atomic32_or_acq_rel
939; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 100516)
940
941define void @atomic32_xor_acq_rel(i32* %a) nounwind uwtable {
942entry:
943 atomicrmw xor i32* %a, i32 0 acq_rel
944 ret void
945}
946; CHECK: atomic32_xor_acq_rel
947; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 100516)
948
949define void @atomic32_xchg_seq_cst(i32* %a) nounwind uwtable {
950entry:
951 atomicrmw xchg i32* %a, i32 0 seq_cst
952 ret void
953}
954; CHECK: atomic32_xchg_seq_cst
955; CHECK: call i32 @__tsan_atomic32_exchange(i32* %a, i32 0, i32 100532)
956
957define void @atomic32_add_seq_cst(i32* %a) nounwind uwtable {
958entry:
959 atomicrmw add i32* %a, i32 0 seq_cst
960 ret void
961}
962; CHECK: atomic32_add_seq_cst
963; CHECK: call i32 @__tsan_atomic32_fetch_add(i32* %a, i32 0, i32 100532)
964
965define void @atomic32_sub_seq_cst(i32* %a) nounwind uwtable {
966entry:
967 atomicrmw sub i32* %a, i32 0 seq_cst
968 ret void
969}
970; CHECK: atomic32_sub_seq_cst
971; CHECK: call i32 @__tsan_atomic32_fetch_sub(i32* %a, i32 0, i32 100532)
972
973define void @atomic32_and_seq_cst(i32* %a) nounwind uwtable {
974entry:
975 atomicrmw and i32* %a, i32 0 seq_cst
976 ret void
977}
978; CHECK: atomic32_and_seq_cst
979; CHECK: call i32 @__tsan_atomic32_fetch_and(i32* %a, i32 0, i32 100532)
980
981define void @atomic32_or_seq_cst(i32* %a) nounwind uwtable {
982entry:
983 atomicrmw or i32* %a, i32 0 seq_cst
984 ret void
985}
986; CHECK: atomic32_or_seq_cst
987; CHECK: call i32 @__tsan_atomic32_fetch_or(i32* %a, i32 0, i32 100532)
988
989define void @atomic32_xor_seq_cst(i32* %a) nounwind uwtable {
990entry:
991 atomicrmw xor i32* %a, i32 0 seq_cst
992 ret void
993}
994; CHECK: atomic32_xor_seq_cst
995; CHECK: call i32 @__tsan_atomic32_fetch_xor(i32* %a, i32 0, i32 100532)
996
997define void @atomic32_cas_monotonic(i32* %a) nounwind uwtable {
998entry:
999 cmpxchg i32* %a, i32 0, i32 1 monotonic
1000 ret void
1001}
1002; CHECK: atomic32_cas_monotonic
1003; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 100501)
1004
1005define void @atomic32_cas_acquire(i32* %a) nounwind uwtable {
1006entry:
1007 cmpxchg i32* %a, i32 0, i32 1 acquire
1008 ret void
1009}
1010; CHECK: atomic32_cas_acquire
1011; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 100504)
1012
1013define void @atomic32_cas_release(i32* %a) nounwind uwtable {
1014entry:
1015 cmpxchg i32* %a, i32 0, i32 1 release
1016 ret void
1017}
1018; CHECK: atomic32_cas_release
1019; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 100508)
1020
1021define void @atomic32_cas_acq_rel(i32* %a) nounwind uwtable {
1022entry:
1023 cmpxchg i32* %a, i32 0, i32 1 acq_rel
1024 ret void
1025}
1026; CHECK: atomic32_cas_acq_rel
1027; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 100516)
1028
1029define void @atomic32_cas_seq_cst(i32* %a) nounwind uwtable {
1030entry:
1031 cmpxchg i32* %a, i32 0, i32 1 seq_cst
1032 ret void
1033}
1034; CHECK: atomic32_cas_seq_cst
1035; CHECK: call i32 @__tsan_atomic32_compare_exchange_val(i32* %a, i32 0, i32 1, i32 100532)
1036
Kostya Serebryanya1259772012-04-27 07:31:53 +00001037define i64 @atomic64_load_unordered(i64* %a) nounwind uwtable {
1038entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001039 %0 = load atomic i64* %a unordered, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001040 ret i64 %0
1041}
1042; CHECK: atomic64_load_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001043; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001044
1045define i64 @atomic64_load_monotonic(i64* %a) nounwind uwtable {
1046entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001047 %0 = load atomic i64* %a monotonic, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001048 ret i64 %0
1049}
1050; CHECK: atomic64_load_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001051; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001052
1053define i64 @atomic64_load_acquire(i64* %a) nounwind uwtable {
1054entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001055 %0 = load atomic i64* %a acquire, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001056 ret i64 %0
1057}
1058; CHECK: atomic64_load_acquire
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001059; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 100504)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001060
1061define i64 @atomic64_load_seq_cst(i64* %a) nounwind uwtable {
1062entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001063 %0 = load atomic i64* %a seq_cst, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001064 ret i64 %0
1065}
1066; CHECK: atomic64_load_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001067; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001068
1069define void @atomic64_store_unordered(i64* %a) nounwind uwtable {
1070entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001071 store atomic i64 0, i64* %a unordered, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001072 ret void
1073}
1074; CHECK: atomic64_store_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001075; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001076
1077define void @atomic64_store_monotonic(i64* %a) nounwind uwtable {
1078entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001079 store atomic i64 0, i64* %a monotonic, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001080 ret void
1081}
1082; CHECK: atomic64_store_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001083; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001084
1085define void @atomic64_store_release(i64* %a) nounwind uwtable {
1086entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001087 store atomic i64 0, i64* %a release, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001088 ret void
1089}
1090; CHECK: atomic64_store_release
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001091; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 100508)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001092
1093define void @atomic64_store_seq_cst(i64* %a) nounwind uwtable {
1094entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001095 store atomic i64 0, i64* %a seq_cst, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +00001096 ret void
1097}
1098; CHECK: atomic64_store_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001099; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001100
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001101define void @atomic64_xchg_monotonic(i64* %a) nounwind uwtable {
1102entry:
1103 atomicrmw xchg i64* %a, i64 0 monotonic
1104 ret void
1105}
1106; CHECK: atomic64_xchg_monotonic
1107; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 100501)
1108
1109define void @atomic64_add_monotonic(i64* %a) nounwind uwtable {
1110entry:
1111 atomicrmw add i64* %a, i64 0 monotonic
1112 ret void
1113}
1114; CHECK: atomic64_add_monotonic
1115; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 100501)
1116
1117define void @atomic64_sub_monotonic(i64* %a) nounwind uwtable {
1118entry:
1119 atomicrmw sub i64* %a, i64 0 monotonic
1120 ret void
1121}
1122; CHECK: atomic64_sub_monotonic
1123; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 100501)
1124
1125define void @atomic64_and_monotonic(i64* %a) nounwind uwtable {
1126entry:
1127 atomicrmw and i64* %a, i64 0 monotonic
1128 ret void
1129}
1130; CHECK: atomic64_and_monotonic
1131; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 100501)
1132
1133define void @atomic64_or_monotonic(i64* %a) nounwind uwtable {
1134entry:
1135 atomicrmw or i64* %a, i64 0 monotonic
1136 ret void
1137}
1138; CHECK: atomic64_or_monotonic
1139; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 100501)
1140
1141define void @atomic64_xor_monotonic(i64* %a) nounwind uwtable {
1142entry:
1143 atomicrmw xor i64* %a, i64 0 monotonic
1144 ret void
1145}
1146; CHECK: atomic64_xor_monotonic
1147; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 100501)
1148
1149define void @atomic64_xchg_acquire(i64* %a) nounwind uwtable {
1150entry:
1151 atomicrmw xchg i64* %a, i64 0 acquire
1152 ret void
1153}
1154; CHECK: atomic64_xchg_acquire
1155; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 100504)
1156
1157define void @atomic64_add_acquire(i64* %a) nounwind uwtable {
1158entry:
1159 atomicrmw add i64* %a, i64 0 acquire
1160 ret void
1161}
1162; CHECK: atomic64_add_acquire
1163; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 100504)
1164
1165define void @atomic64_sub_acquire(i64* %a) nounwind uwtable {
1166entry:
1167 atomicrmw sub i64* %a, i64 0 acquire
1168 ret void
1169}
1170; CHECK: atomic64_sub_acquire
1171; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 100504)
1172
1173define void @atomic64_and_acquire(i64* %a) nounwind uwtable {
1174entry:
1175 atomicrmw and i64* %a, i64 0 acquire
1176 ret void
1177}
1178; CHECK: atomic64_and_acquire
1179; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 100504)
1180
1181define void @atomic64_or_acquire(i64* %a) nounwind uwtable {
1182entry:
1183 atomicrmw or i64* %a, i64 0 acquire
1184 ret void
1185}
1186; CHECK: atomic64_or_acquire
1187; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 100504)
1188
1189define void @atomic64_xor_acquire(i64* %a) nounwind uwtable {
1190entry:
1191 atomicrmw xor i64* %a, i64 0 acquire
1192 ret void
1193}
1194; CHECK: atomic64_xor_acquire
1195; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 100504)
1196
1197define void @atomic64_xchg_release(i64* %a) nounwind uwtable {
1198entry:
1199 atomicrmw xchg i64* %a, i64 0 release
1200 ret void
1201}
1202; CHECK: atomic64_xchg_release
1203; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 100508)
1204
1205define void @atomic64_add_release(i64* %a) nounwind uwtable {
1206entry:
1207 atomicrmw add i64* %a, i64 0 release
1208 ret void
1209}
1210; CHECK: atomic64_add_release
1211; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 100508)
1212
1213define void @atomic64_sub_release(i64* %a) nounwind uwtable {
1214entry:
1215 atomicrmw sub i64* %a, i64 0 release
1216 ret void
1217}
1218; CHECK: atomic64_sub_release
1219; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 100508)
1220
1221define void @atomic64_and_release(i64* %a) nounwind uwtable {
1222entry:
1223 atomicrmw and i64* %a, i64 0 release
1224 ret void
1225}
1226; CHECK: atomic64_and_release
1227; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 100508)
1228
1229define void @atomic64_or_release(i64* %a) nounwind uwtable {
1230entry:
1231 atomicrmw or i64* %a, i64 0 release
1232 ret void
1233}
1234; CHECK: atomic64_or_release
1235; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 100508)
1236
1237define void @atomic64_xor_release(i64* %a) nounwind uwtable {
1238entry:
1239 atomicrmw xor i64* %a, i64 0 release
1240 ret void
1241}
1242; CHECK: atomic64_xor_release
1243; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 100508)
1244
1245define void @atomic64_xchg_acq_rel(i64* %a) nounwind uwtable {
1246entry:
1247 atomicrmw xchg i64* %a, i64 0 acq_rel
1248 ret void
1249}
1250; CHECK: atomic64_xchg_acq_rel
1251; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 100516)
1252
1253define void @atomic64_add_acq_rel(i64* %a) nounwind uwtable {
1254entry:
1255 atomicrmw add i64* %a, i64 0 acq_rel
1256 ret void
1257}
1258; CHECK: atomic64_add_acq_rel
1259; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 100516)
1260
1261define void @atomic64_sub_acq_rel(i64* %a) nounwind uwtable {
1262entry:
1263 atomicrmw sub i64* %a, i64 0 acq_rel
1264 ret void
1265}
1266; CHECK: atomic64_sub_acq_rel
1267; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 100516)
1268
1269define void @atomic64_and_acq_rel(i64* %a) nounwind uwtable {
1270entry:
1271 atomicrmw and i64* %a, i64 0 acq_rel
1272 ret void
1273}
1274; CHECK: atomic64_and_acq_rel
1275; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 100516)
1276
1277define void @atomic64_or_acq_rel(i64* %a) nounwind uwtable {
1278entry:
1279 atomicrmw or i64* %a, i64 0 acq_rel
1280 ret void
1281}
1282; CHECK: atomic64_or_acq_rel
1283; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 100516)
1284
1285define void @atomic64_xor_acq_rel(i64* %a) nounwind uwtable {
1286entry:
1287 atomicrmw xor i64* %a, i64 0 acq_rel
1288 ret void
1289}
1290; CHECK: atomic64_xor_acq_rel
1291; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 100516)
1292
1293define void @atomic64_xchg_seq_cst(i64* %a) nounwind uwtable {
1294entry:
1295 atomicrmw xchg i64* %a, i64 0 seq_cst
1296 ret void
1297}
1298; CHECK: atomic64_xchg_seq_cst
1299; CHECK: call i64 @__tsan_atomic64_exchange(i64* %a, i64 0, i32 100532)
1300
1301define void @atomic64_add_seq_cst(i64* %a) nounwind uwtable {
1302entry:
1303 atomicrmw add i64* %a, i64 0 seq_cst
1304 ret void
1305}
1306; CHECK: atomic64_add_seq_cst
1307; CHECK: call i64 @__tsan_atomic64_fetch_add(i64* %a, i64 0, i32 100532)
1308
1309define void @atomic64_sub_seq_cst(i64* %a) nounwind uwtable {
1310entry:
1311 atomicrmw sub i64* %a, i64 0 seq_cst
1312 ret void
1313}
1314; CHECK: atomic64_sub_seq_cst
1315; CHECK: call i64 @__tsan_atomic64_fetch_sub(i64* %a, i64 0, i32 100532)
1316
1317define void @atomic64_and_seq_cst(i64* %a) nounwind uwtable {
1318entry:
1319 atomicrmw and i64* %a, i64 0 seq_cst
1320 ret void
1321}
1322; CHECK: atomic64_and_seq_cst
1323; CHECK: call i64 @__tsan_atomic64_fetch_and(i64* %a, i64 0, i32 100532)
1324
1325define void @atomic64_or_seq_cst(i64* %a) nounwind uwtable {
1326entry:
1327 atomicrmw or i64* %a, i64 0 seq_cst
1328 ret void
1329}
1330; CHECK: atomic64_or_seq_cst
1331; CHECK: call i64 @__tsan_atomic64_fetch_or(i64* %a, i64 0, i32 100532)
1332
1333define void @atomic64_xor_seq_cst(i64* %a) nounwind uwtable {
1334entry:
1335 atomicrmw xor i64* %a, i64 0 seq_cst
1336 ret void
1337}
1338; CHECK: atomic64_xor_seq_cst
1339; CHECK: call i64 @__tsan_atomic64_fetch_xor(i64* %a, i64 0, i32 100532)
1340
1341define void @atomic64_cas_monotonic(i64* %a) nounwind uwtable {
1342entry:
1343 cmpxchg i64* %a, i64 0, i64 1 monotonic
1344 ret void
1345}
1346; CHECK: atomic64_cas_monotonic
1347; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 100501)
1348
1349define void @atomic64_cas_acquire(i64* %a) nounwind uwtable {
1350entry:
1351 cmpxchg i64* %a, i64 0, i64 1 acquire
1352 ret void
1353}
1354; CHECK: atomic64_cas_acquire
1355; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 100504)
1356
1357define void @atomic64_cas_release(i64* %a) nounwind uwtable {
1358entry:
1359 cmpxchg i64* %a, i64 0, i64 1 release
1360 ret void
1361}
1362; CHECK: atomic64_cas_release
1363; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 100508)
1364
1365define void @atomic64_cas_acq_rel(i64* %a) nounwind uwtable {
1366entry:
1367 cmpxchg i64* %a, i64 0, i64 1 acq_rel
1368 ret void
1369}
1370; CHECK: atomic64_cas_acq_rel
1371; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 100516)
1372
1373define void @atomic64_cas_seq_cst(i64* %a) nounwind uwtable {
1374entry:
1375 cmpxchg i64* %a, i64 0, i64 1 seq_cst
1376 ret void
1377}
1378; CHECK: atomic64_cas_seq_cst
1379; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 100532)
1380
Kostya Serebryanya1259772012-04-27 07:31:53 +00001381define i128 @atomic128_load_unordered(i128* %a) nounwind uwtable {
1382entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001383 %0 = load atomic i128* %a unordered, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001384 ret i128 %0
1385}
1386; CHECK: atomic128_load_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001387; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001388
1389define i128 @atomic128_load_monotonic(i128* %a) nounwind uwtable {
1390entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001391 %0 = load atomic i128* %a monotonic, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001392 ret i128 %0
1393}
1394; CHECK: atomic128_load_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001395; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001396
1397define i128 @atomic128_load_acquire(i128* %a) nounwind uwtable {
1398entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001399 %0 = load atomic i128* %a acquire, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001400 ret i128 %0
1401}
1402; CHECK: atomic128_load_acquire
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001403; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 100504)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001404
1405define i128 @atomic128_load_seq_cst(i128* %a) nounwind uwtable {
1406entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001407 %0 = load atomic i128* %a seq_cst, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001408 ret i128 %0
1409}
1410; CHECK: atomic128_load_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001411; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001412
1413define void @atomic128_store_unordered(i128* %a) nounwind uwtable {
1414entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001415 store atomic i128 0, i128* %a unordered, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001416 ret void
1417}
1418; CHECK: atomic128_store_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001419; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001420
1421define void @atomic128_store_monotonic(i128* %a) nounwind uwtable {
1422entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001423 store atomic i128 0, i128* %a monotonic, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001424 ret void
1425}
1426; CHECK: atomic128_store_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001427; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001428
1429define void @atomic128_store_release(i128* %a) nounwind uwtable {
1430entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001431 store atomic i128 0, i128* %a release, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001432 ret void
1433}
1434; CHECK: atomic128_store_release
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001435; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 100508)
Kostya Serebryanya1259772012-04-27 07:31:53 +00001436
1437define void @atomic128_store_seq_cst(i128* %a) nounwind uwtable {
1438entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001439 store atomic i128 0, i128* %a seq_cst, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +00001440 ret void
1441}
1442; CHECK: atomic128_store_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00001443; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 100532)
Dmitry Vyukov92b9e1d2012-11-09 12:55:36 +00001444
1445define void @atomic128_xchg_monotonic(i128* %a) nounwind uwtable {
1446entry:
1447 atomicrmw xchg i128* %a, i128 0 monotonic
1448 ret void
1449}
1450; CHECK: atomic128_xchg_monotonic
1451; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 100501)
1452
1453define void @atomic128_add_monotonic(i128* %a) nounwind uwtable {
1454entry:
1455 atomicrmw add i128* %a, i128 0 monotonic
1456 ret void
1457}
1458; CHECK: atomic128_add_monotonic
1459; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 100501)
1460
1461define void @atomic128_sub_monotonic(i128* %a) nounwind uwtable {
1462entry:
1463 atomicrmw sub i128* %a, i128 0 monotonic
1464 ret void
1465}
1466; CHECK: atomic128_sub_monotonic
1467; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 100501)
1468
1469define void @atomic128_and_monotonic(i128* %a) nounwind uwtable {
1470entry:
1471 atomicrmw and i128* %a, i128 0 monotonic
1472 ret void
1473}
1474; CHECK: atomic128_and_monotonic
1475; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 100501)
1476
1477define void @atomic128_or_monotonic(i128* %a) nounwind uwtable {
1478entry:
1479 atomicrmw or i128* %a, i128 0 monotonic
1480 ret void
1481}
1482; CHECK: atomic128_or_monotonic
1483; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 100501)
1484
1485define void @atomic128_xor_monotonic(i128* %a) nounwind uwtable {
1486entry:
1487 atomicrmw xor i128* %a, i128 0 monotonic
1488 ret void
1489}
1490; CHECK: atomic128_xor_monotonic
1491; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 100501)
1492
1493define void @atomic128_xchg_acquire(i128* %a) nounwind uwtable {
1494entry:
1495 atomicrmw xchg i128* %a, i128 0 acquire
1496 ret void
1497}
1498; CHECK: atomic128_xchg_acquire
1499; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 100504)
1500
1501define void @atomic128_add_acquire(i128* %a) nounwind uwtable {
1502entry:
1503 atomicrmw add i128* %a, i128 0 acquire
1504 ret void
1505}
1506; CHECK: atomic128_add_acquire
1507; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 100504)
1508
1509define void @atomic128_sub_acquire(i128* %a) nounwind uwtable {
1510entry:
1511 atomicrmw sub i128* %a, i128 0 acquire
1512 ret void
1513}
1514; CHECK: atomic128_sub_acquire
1515; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 100504)
1516
1517define void @atomic128_and_acquire(i128* %a) nounwind uwtable {
1518entry:
1519 atomicrmw and i128* %a, i128 0 acquire
1520 ret void
1521}
1522; CHECK: atomic128_and_acquire
1523; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 100504)
1524
1525define void @atomic128_or_acquire(i128* %a) nounwind uwtable {
1526entry:
1527 atomicrmw or i128* %a, i128 0 acquire
1528 ret void
1529}
1530; CHECK: atomic128_or_acquire
1531; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 100504)
1532
1533define void @atomic128_xor_acquire(i128* %a) nounwind uwtable {
1534entry:
1535 atomicrmw xor i128* %a, i128 0 acquire
1536 ret void
1537}
1538; CHECK: atomic128_xor_acquire
1539; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 100504)
1540
1541define void @atomic128_xchg_release(i128* %a) nounwind uwtable {
1542entry:
1543 atomicrmw xchg i128* %a, i128 0 release
1544 ret void
1545}
1546; CHECK: atomic128_xchg_release
1547; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 100508)
1548
1549define void @atomic128_add_release(i128* %a) nounwind uwtable {
1550entry:
1551 atomicrmw add i128* %a, i128 0 release
1552 ret void
1553}
1554; CHECK: atomic128_add_release
1555; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 100508)
1556
1557define void @atomic128_sub_release(i128* %a) nounwind uwtable {
1558entry:
1559 atomicrmw sub i128* %a, i128 0 release
1560 ret void
1561}
1562; CHECK: atomic128_sub_release
1563; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 100508)
1564
1565define void @atomic128_and_release(i128* %a) nounwind uwtable {
1566entry:
1567 atomicrmw and i128* %a, i128 0 release
1568 ret void
1569}
1570; CHECK: atomic128_and_release
1571; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 100508)
1572
1573define void @atomic128_or_release(i128* %a) nounwind uwtable {
1574entry:
1575 atomicrmw or i128* %a, i128 0 release
1576 ret void
1577}
1578; CHECK: atomic128_or_release
1579; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 100508)
1580
1581define void @atomic128_xor_release(i128* %a) nounwind uwtable {
1582entry:
1583 atomicrmw xor i128* %a, i128 0 release
1584 ret void
1585}
1586; CHECK: atomic128_xor_release
1587; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 100508)
1588
1589define void @atomic128_xchg_acq_rel(i128* %a) nounwind uwtable {
1590entry:
1591 atomicrmw xchg i128* %a, i128 0 acq_rel
1592 ret void
1593}
1594; CHECK: atomic128_xchg_acq_rel
1595; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 100516)
1596
1597define void @atomic128_add_acq_rel(i128* %a) nounwind uwtable {
1598entry:
1599 atomicrmw add i128* %a, i128 0 acq_rel
1600 ret void
1601}
1602; CHECK: atomic128_add_acq_rel
1603; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 100516)
1604
1605define void @atomic128_sub_acq_rel(i128* %a) nounwind uwtable {
1606entry:
1607 atomicrmw sub i128* %a, i128 0 acq_rel
1608 ret void
1609}
1610; CHECK: atomic128_sub_acq_rel
1611; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 100516)
1612
1613define void @atomic128_and_acq_rel(i128* %a) nounwind uwtable {
1614entry:
1615 atomicrmw and i128* %a, i128 0 acq_rel
1616 ret void
1617}
1618; CHECK: atomic128_and_acq_rel
1619; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 100516)
1620
1621define void @atomic128_or_acq_rel(i128* %a) nounwind uwtable {
1622entry:
1623 atomicrmw or i128* %a, i128 0 acq_rel
1624 ret void
1625}
1626; CHECK: atomic128_or_acq_rel
1627; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 100516)
1628
1629define void @atomic128_xor_acq_rel(i128* %a) nounwind uwtable {
1630entry:
1631 atomicrmw xor i128* %a, i128 0 acq_rel
1632 ret void
1633}
1634; CHECK: atomic128_xor_acq_rel
1635; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 100516)
1636
1637define void @atomic128_xchg_seq_cst(i128* %a) nounwind uwtable {
1638entry:
1639 atomicrmw xchg i128* %a, i128 0 seq_cst
1640 ret void
1641}
1642; CHECK: atomic128_xchg_seq_cst
1643; CHECK: call i128 @__tsan_atomic128_exchange(i128* %a, i128 0, i32 100532)
1644
1645define void @atomic128_add_seq_cst(i128* %a) nounwind uwtable {
1646entry:
1647 atomicrmw add i128* %a, i128 0 seq_cst
1648 ret void
1649}
1650; CHECK: atomic128_add_seq_cst
1651; CHECK: call i128 @__tsan_atomic128_fetch_add(i128* %a, i128 0, i32 100532)
1652
1653define void @atomic128_sub_seq_cst(i128* %a) nounwind uwtable {
1654entry:
1655 atomicrmw sub i128* %a, i128 0 seq_cst
1656 ret void
1657}
1658; CHECK: atomic128_sub_seq_cst
1659; CHECK: call i128 @__tsan_atomic128_fetch_sub(i128* %a, i128 0, i32 100532)
1660
1661define void @atomic128_and_seq_cst(i128* %a) nounwind uwtable {
1662entry:
1663 atomicrmw and i128* %a, i128 0 seq_cst
1664 ret void
1665}
1666; CHECK: atomic128_and_seq_cst
1667; CHECK: call i128 @__tsan_atomic128_fetch_and(i128* %a, i128 0, i32 100532)
1668
1669define void @atomic128_or_seq_cst(i128* %a) nounwind uwtable {
1670entry:
1671 atomicrmw or i128* %a, i128 0 seq_cst
1672 ret void
1673}
1674; CHECK: atomic128_or_seq_cst
1675; CHECK: call i128 @__tsan_atomic128_fetch_or(i128* %a, i128 0, i32 100532)
1676
1677define void @atomic128_xor_seq_cst(i128* %a) nounwind uwtable {
1678entry:
1679 atomicrmw xor i128* %a, i128 0 seq_cst
1680 ret void
1681}
1682; CHECK: atomic128_xor_seq_cst
1683; CHECK: call i128 @__tsan_atomic128_fetch_xor(i128* %a, i128 0, i32 100532)
1684
1685define void @atomic128_cas_monotonic(i128* %a) nounwind uwtable {
1686entry:
1687 cmpxchg i128* %a, i128 0, i128 1 monotonic
1688 ret void
1689}
1690; CHECK: atomic128_cas_monotonic
1691; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 100501)
1692
1693define void @atomic128_cas_acquire(i128* %a) nounwind uwtable {
1694entry:
1695 cmpxchg i128* %a, i128 0, i128 1 acquire
1696 ret void
1697}
1698; CHECK: atomic128_cas_acquire
1699; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 100504)
1700
1701define void @atomic128_cas_release(i128* %a) nounwind uwtable {
1702entry:
1703 cmpxchg i128* %a, i128 0, i128 1 release
1704 ret void
1705}
1706; CHECK: atomic128_cas_release
1707; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 100508)
1708
1709define void @atomic128_cas_acq_rel(i128* %a) nounwind uwtable {
1710entry:
1711 cmpxchg i128* %a, i128 0, i128 1 acq_rel
1712 ret void
1713}
1714; CHECK: atomic128_cas_acq_rel
1715; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 100516)
1716
1717define void @atomic128_cas_seq_cst(i128* %a) nounwind uwtable {
1718entry:
1719 cmpxchg i128* %a, i128 0, i128 1 seq_cst
1720 ret void
1721}
1722; CHECK: atomic128_cas_seq_cst
1723; CHECK: call i128 @__tsan_atomic128_compare_exchange_val(i128* %a, i128 0, i128 1, i32 100532)
1724
1725define void @atomic_signal_fence_acquire() nounwind uwtable {
1726entry:
1727 fence singlethread acquire
1728 ret void
1729}
1730; CHECK: atomic_signal_fence_acquire
1731; CHECK: call void @__tsan_atomic_signal_fence(i32 100504)
1732
1733define void @atomic_thread_fence_acquire() nounwind uwtable {
1734entry:
1735 fence acquire
1736 ret void
1737}
1738; CHECK: atomic_thread_fence_acquire
1739; CHECK: call void @__tsan_atomic_thread_fence(i32 100504)
1740
1741define void @atomic_signal_fence_release() nounwind uwtable {
1742entry:
1743 fence singlethread release
1744 ret void
1745}
1746; CHECK: atomic_signal_fence_release
1747; CHECK: call void @__tsan_atomic_signal_fence(i32 100508)
1748
1749define void @atomic_thread_fence_release() nounwind uwtable {
1750entry:
1751 fence release
1752 ret void
1753}
1754; CHECK: atomic_thread_fence_release
1755; CHECK: call void @__tsan_atomic_thread_fence(i32 100508)
1756
1757define void @atomic_signal_fence_acq_rel() nounwind uwtable {
1758entry:
1759 fence singlethread acq_rel
1760 ret void
1761}
1762; CHECK: atomic_signal_fence_acq_rel
1763; CHECK: call void @__tsan_atomic_signal_fence(i32 100516)
1764
1765define void @atomic_thread_fence_acq_rel() nounwind uwtable {
1766entry:
1767 fence acq_rel
1768 ret void
1769}
1770; CHECK: atomic_thread_fence_acq_rel
1771; CHECK: call void @__tsan_atomic_thread_fence(i32 100516)
1772
1773define void @atomic_signal_fence_seq_cst() nounwind uwtable {
1774entry:
1775 fence singlethread seq_cst
1776 ret void
1777}
1778; CHECK: atomic_signal_fence_seq_cst
1779; CHECK: call void @__tsan_atomic_signal_fence(i32 100532)
1780
1781define void @atomic_thread_fence_seq_cst() nounwind uwtable {
1782entry:
1783 fence seq_cst
1784 ret void
1785}
1786; CHECK: atomic_thread_fence_seq_cst
1787; CHECK: call void @__tsan_atomic_thread_fence(i32 100532)