blob: ed3c821205cb1e4dfe4ee320449c295447d44858 [file] [log] [blame]
Kostya Serebryanya1259772012-04-27 07:31:53 +00001; RUN: opt < %s -tsan -S | FileCheck %s
2; Check that atomic memory operations are converted to calls into ThreadSanitizer runtime.
3target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
4
5define i8 @atomic8_load_unordered(i8* %a) nounwind uwtable {
6entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +00007 %0 = load atomic i8* %a unordered, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +00008 ret i8 %0
9}
10; CHECK: atomic8_load_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000011; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +000012
13define i8 @atomic8_load_monotonic(i8* %a) nounwind uwtable {
14entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000015 %0 = load atomic i8* %a monotonic, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000016 ret i8 %0
17}
18; CHECK: atomic8_load_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000019; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +000020
21define i8 @atomic8_load_acquire(i8* %a) nounwind uwtable {
22entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000023 %0 = load atomic i8* %a acquire, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000024 ret i8 %0
25}
26; CHECK: atomic8_load_acquire
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000027; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 100504)
Kostya Serebryanya1259772012-04-27 07:31:53 +000028
29define i8 @atomic8_load_seq_cst(i8* %a) nounwind uwtable {
30entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000031 %0 = load atomic i8* %a seq_cst, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000032 ret i8 %0
33}
34; CHECK: atomic8_load_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000035; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +000036
37define void @atomic8_store_unordered(i8* %a) nounwind uwtable {
38entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000039 store atomic i8 0, i8* %a unordered, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000040 ret void
41}
42; CHECK: atomic8_store_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000043; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +000044
45define void @atomic8_store_monotonic(i8* %a) nounwind uwtable {
46entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000047 store atomic i8 0, i8* %a monotonic, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000048 ret void
49}
50; CHECK: atomic8_store_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000051; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +000052
53define void @atomic8_store_release(i8* %a) nounwind uwtable {
54entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000055 store atomic i8 0, i8* %a release, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000056 ret void
57}
58; CHECK: atomic8_store_release
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000059; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 100508)
Kostya Serebryanya1259772012-04-27 07:31:53 +000060
61define void @atomic8_store_seq_cst(i8* %a) nounwind uwtable {
62entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000063 store atomic i8 0, i8* %a seq_cst, align 1
Kostya Serebryanya1259772012-04-27 07:31:53 +000064 ret void
65}
66; CHECK: atomic8_store_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000067; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +000068
69define i16 @atomic16_load_unordered(i16* %a) nounwind uwtable {
70entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000071 %0 = load atomic i16* %a unordered, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +000072 ret i16 %0
73}
74; CHECK: atomic16_load_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000075; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +000076
77define i16 @atomic16_load_monotonic(i16* %a) nounwind uwtable {
78entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000079 %0 = load atomic i16* %a monotonic, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +000080 ret i16 %0
81}
82; CHECK: atomic16_load_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000083; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +000084
85define i16 @atomic16_load_acquire(i16* %a) nounwind uwtable {
86entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000087 %0 = load atomic i16* %a acquire, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +000088 ret i16 %0
89}
90; CHECK: atomic16_load_acquire
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000091; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 100504)
Kostya Serebryanya1259772012-04-27 07:31:53 +000092
93define i16 @atomic16_load_seq_cst(i16* %a) nounwind uwtable {
94entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000095 %0 = load atomic i16* %a seq_cst, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +000096 ret i16 %0
97}
98; CHECK: atomic16_load_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +000099; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000100
101define void @atomic16_store_unordered(i16* %a) nounwind uwtable {
102entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000103 store atomic i16 0, i16* %a unordered, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000104 ret void
105}
106; CHECK: atomic16_store_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000107; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000108
109define void @atomic16_store_monotonic(i16* %a) nounwind uwtable {
110entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000111 store atomic i16 0, i16* %a monotonic, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000112 ret void
113}
114; CHECK: atomic16_store_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000115; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000116
117define void @atomic16_store_release(i16* %a) nounwind uwtable {
118entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000119 store atomic i16 0, i16* %a release, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000120 ret void
121}
122; CHECK: atomic16_store_release
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000123; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 100508)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000124
125define void @atomic16_store_seq_cst(i16* %a) nounwind uwtable {
126entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000127 store atomic i16 0, i16* %a seq_cst, align 2
Kostya Serebryanya1259772012-04-27 07:31:53 +0000128 ret void
129}
130; CHECK: atomic16_store_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000131; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000132
133define i32 @atomic32_load_unordered(i32* %a) nounwind uwtable {
134entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000135 %0 = load atomic i32* %a unordered, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000136 ret i32 %0
137}
138; CHECK: atomic32_load_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000139; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000140
141define i32 @atomic32_load_monotonic(i32* %a) nounwind uwtable {
142entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000143 %0 = load atomic i32* %a monotonic, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000144 ret i32 %0
145}
146; CHECK: atomic32_load_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000147; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000148
149define i32 @atomic32_load_acquire(i32* %a) nounwind uwtable {
150entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000151 %0 = load atomic i32* %a acquire, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000152 ret i32 %0
153}
154; CHECK: atomic32_load_acquire
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000155; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 100504)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000156
157define i32 @atomic32_load_seq_cst(i32* %a) nounwind uwtable {
158entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000159 %0 = load atomic i32* %a seq_cst, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000160 ret i32 %0
161}
162; CHECK: atomic32_load_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000163; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000164
165define void @atomic32_store_unordered(i32* %a) nounwind uwtable {
166entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000167 store atomic i32 0, i32* %a unordered, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000168 ret void
169}
170; CHECK: atomic32_store_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000171; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000172
173define void @atomic32_store_monotonic(i32* %a) nounwind uwtable {
174entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000175 store atomic i32 0, i32* %a monotonic, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000176 ret void
177}
178; CHECK: atomic32_store_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000179; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000180
181define void @atomic32_store_release(i32* %a) nounwind uwtable {
182entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000183 store atomic i32 0, i32* %a release, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000184 ret void
185}
186; CHECK: atomic32_store_release
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000187; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 100508)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000188
189define void @atomic32_store_seq_cst(i32* %a) nounwind uwtable {
190entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000191 store atomic i32 0, i32* %a seq_cst, align 4
Kostya Serebryanya1259772012-04-27 07:31:53 +0000192 ret void
193}
194; CHECK: atomic32_store_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000195; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000196
197define i64 @atomic64_load_unordered(i64* %a) nounwind uwtable {
198entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000199 %0 = load atomic i64* %a unordered, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +0000200 ret i64 %0
201}
202; CHECK: atomic64_load_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000203; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000204
205define i64 @atomic64_load_monotonic(i64* %a) nounwind uwtable {
206entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000207 %0 = load atomic i64* %a monotonic, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +0000208 ret i64 %0
209}
210; CHECK: atomic64_load_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000211; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000212
213define i64 @atomic64_load_acquire(i64* %a) nounwind uwtable {
214entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000215 %0 = load atomic i64* %a acquire, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +0000216 ret i64 %0
217}
218; CHECK: atomic64_load_acquire
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000219; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 100504)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000220
221define i64 @atomic64_load_seq_cst(i64* %a) nounwind uwtable {
222entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000223 %0 = load atomic i64* %a seq_cst, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +0000224 ret i64 %0
225}
226; CHECK: atomic64_load_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000227; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000228
229define void @atomic64_store_unordered(i64* %a) nounwind uwtable {
230entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000231 store atomic i64 0, i64* %a unordered, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +0000232 ret void
233}
234; CHECK: atomic64_store_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000235; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000236
237define void @atomic64_store_monotonic(i64* %a) nounwind uwtable {
238entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000239 store atomic i64 0, i64* %a monotonic, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +0000240 ret void
241}
242; CHECK: atomic64_store_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000243; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000244
245define void @atomic64_store_release(i64* %a) nounwind uwtable {
246entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000247 store atomic i64 0, i64* %a release, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +0000248 ret void
249}
250; CHECK: atomic64_store_release
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000251; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 100508)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000252
253define void @atomic64_store_seq_cst(i64* %a) nounwind uwtable {
254entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000255 store atomic i64 0, i64* %a seq_cst, align 8
Kostya Serebryanya1259772012-04-27 07:31:53 +0000256 ret void
257}
258; CHECK: atomic64_store_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000259; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000260
261define i128 @atomic128_load_unordered(i128* %a) nounwind uwtable {
262entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000263 %0 = load atomic i128* %a unordered, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +0000264 ret i128 %0
265}
266; CHECK: atomic128_load_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000267; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000268
269define i128 @atomic128_load_monotonic(i128* %a) nounwind uwtable {
270entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000271 %0 = load atomic i128* %a monotonic, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +0000272 ret i128 %0
273}
274; CHECK: atomic128_load_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000275; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000276
277define i128 @atomic128_load_acquire(i128* %a) nounwind uwtable {
278entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000279 %0 = load atomic i128* %a acquire, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +0000280 ret i128 %0
281}
282; CHECK: atomic128_load_acquire
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000283; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 100504)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000284
285define i128 @atomic128_load_seq_cst(i128* %a) nounwind uwtable {
286entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000287 %0 = load atomic i128* %a seq_cst, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +0000288 ret i128 %0
289}
290; CHECK: atomic128_load_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000291; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 100532)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000292
293define void @atomic128_store_unordered(i128* %a) nounwind uwtable {
294entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000295 store atomic i128 0, i128* %a unordered, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +0000296 ret void
297}
298; CHECK: atomic128_store_unordered
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000299; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000300
301define void @atomic128_store_monotonic(i128* %a) nounwind uwtable {
302entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000303 store atomic i128 0, i128* %a monotonic, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +0000304 ret void
305}
306; CHECK: atomic128_store_monotonic
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000307; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 100501)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000308
309define void @atomic128_store_release(i128* %a) nounwind uwtable {
310entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000311 store atomic i128 0, i128* %a release, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +0000312 ret void
313}
314; CHECK: atomic128_store_release
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000315; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 100508)
Kostya Serebryanya1259772012-04-27 07:31:53 +0000316
317define void @atomic128_store_seq_cst(i128* %a) nounwind uwtable {
318entry:
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000319 store atomic i128 0, i128* %a seq_cst, align 16
Kostya Serebryanya1259772012-04-27 07:31:53 +0000320 ret void
321}
322; CHECK: atomic128_store_seq_cst
Dmitry Vyukov84d75cd2012-10-03 13:19:20 +0000323; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 100532)