blob: 6ce796fd8041daf1cf6c9fc8ebdf0bae14644312 [file] [log] [blame]
Kostya Serebryanya1259772012-04-27 07:31:53 +00001; RUN: opt < %s -tsan -S | FileCheck %s
2; Check that atomic memory operations are converted to calls into ThreadSanitizer runtime.
3target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
4
5define i8 @atomic8_load_unordered(i8* %a) nounwind uwtable {
6entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +00007 %0 = load atomic i8* %a unordered, align 100501
Kostya Serebryanya1259772012-04-27 07:31:53 +00008 ret i8 %0
9}
10; CHECK: atomic8_load_unordered
11; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 1)
12
13define i8 @atomic8_load_monotonic(i8* %a) nounwind uwtable {
14entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000015 %0 = load atomic i8* %a monotonic, align 100501
Kostya Serebryanya1259772012-04-27 07:31:53 +000016 ret i8 %0
17}
18; CHECK: atomic8_load_monotonic
19; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 1)
20
21define i8 @atomic8_load_acquire(i8* %a) nounwind uwtable {
22entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000023 %0 = load atomic i8* %a acquire, align 100501
Kostya Serebryanya1259772012-04-27 07:31:53 +000024 ret i8 %0
25}
26; CHECK: atomic8_load_acquire
27; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 4)
28
29define i8 @atomic8_load_seq_cst(i8* %a) nounwind uwtable {
30entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000031 %0 = load atomic i8* %a seq_cst, align 100501
Kostya Serebryanya1259772012-04-27 07:31:53 +000032 ret i8 %0
33}
34; CHECK: atomic8_load_seq_cst
35; CHECK: call i8 @__tsan_atomic8_load(i8* %a, i32 32)
36
37define void @atomic8_store_unordered(i8* %a) nounwind uwtable {
38entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000039 store atomic i8 0, i8* %a unordered, align 100501
Kostya Serebryanya1259772012-04-27 07:31:53 +000040 ret void
41}
42; CHECK: atomic8_store_unordered
43; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 1)
44
45define void @atomic8_store_monotonic(i8* %a) nounwind uwtable {
46entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000047 store atomic i8 0, i8* %a monotonic, align 100501
Kostya Serebryanya1259772012-04-27 07:31:53 +000048 ret void
49}
50; CHECK: atomic8_store_monotonic
51; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 1)
52
53define void @atomic8_store_release(i8* %a) nounwind uwtable {
54entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000055 store atomic i8 0, i8* %a release, align 100501
Kostya Serebryanya1259772012-04-27 07:31:53 +000056 ret void
57}
58; CHECK: atomic8_store_release
59; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 8)
60
61define void @atomic8_store_seq_cst(i8* %a) nounwind uwtable {
62entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000063 store atomic i8 0, i8* %a seq_cst, align 100501
Kostya Serebryanya1259772012-04-27 07:31:53 +000064 ret void
65}
66; CHECK: atomic8_store_seq_cst
67; CHECK: call void @__tsan_atomic8_store(i8* %a, i8 0, i32 32)
68
69define i16 @atomic16_load_unordered(i16* %a) nounwind uwtable {
70entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000071 %0 = load atomic i16* %a unordered, align 100502
Kostya Serebryanya1259772012-04-27 07:31:53 +000072 ret i16 %0
73}
74; CHECK: atomic16_load_unordered
75; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 1)
76
77define i16 @atomic16_load_monotonic(i16* %a) nounwind uwtable {
78entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000079 %0 = load atomic i16* %a monotonic, align 100502
Kostya Serebryanya1259772012-04-27 07:31:53 +000080 ret i16 %0
81}
82; CHECK: atomic16_load_monotonic
83; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 1)
84
85define i16 @atomic16_load_acquire(i16* %a) nounwind uwtable {
86entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000087 %0 = load atomic i16* %a acquire, align 100502
Kostya Serebryanya1259772012-04-27 07:31:53 +000088 ret i16 %0
89}
90; CHECK: atomic16_load_acquire
91; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 4)
92
93define i16 @atomic16_load_seq_cst(i16* %a) nounwind uwtable {
94entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +000095 %0 = load atomic i16* %a seq_cst, align 100502
Kostya Serebryanya1259772012-04-27 07:31:53 +000096 ret i16 %0
97}
98; CHECK: atomic16_load_seq_cst
99; CHECK: call i16 @__tsan_atomic16_load(i16* %a, i32 32)
100
101define void @atomic16_store_unordered(i16* %a) nounwind uwtable {
102entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000103 store atomic i16 0, i16* %a unordered, align 100502
Kostya Serebryanya1259772012-04-27 07:31:53 +0000104 ret void
105}
106; CHECK: atomic16_store_unordered
107; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 1)
108
109define void @atomic16_store_monotonic(i16* %a) nounwind uwtable {
110entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000111 store atomic i16 0, i16* %a monotonic, align 100502
Kostya Serebryanya1259772012-04-27 07:31:53 +0000112 ret void
113}
114; CHECK: atomic16_store_monotonic
115; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 1)
116
117define void @atomic16_store_release(i16* %a) nounwind uwtable {
118entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000119 store atomic i16 0, i16* %a release, align 100502
Kostya Serebryanya1259772012-04-27 07:31:53 +0000120 ret void
121}
122; CHECK: atomic16_store_release
123; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 8)
124
125define void @atomic16_store_seq_cst(i16* %a) nounwind uwtable {
126entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000127 store atomic i16 0, i16* %a seq_cst, align 100502
Kostya Serebryanya1259772012-04-27 07:31:53 +0000128 ret void
129}
130; CHECK: atomic16_store_seq_cst
131; CHECK: call void @__tsan_atomic16_store(i16* %a, i16 0, i32 32)
132
133define i32 @atomic32_load_unordered(i32* %a) nounwind uwtable {
134entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000135 %0 = load atomic i32* %a unordered, align 100504
Kostya Serebryanya1259772012-04-27 07:31:53 +0000136 ret i32 %0
137}
138; CHECK: atomic32_load_unordered
139; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 1)
140
141define i32 @atomic32_load_monotonic(i32* %a) nounwind uwtable {
142entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000143 %0 = load atomic i32* %a monotonic, align 100504
Kostya Serebryanya1259772012-04-27 07:31:53 +0000144 ret i32 %0
145}
146; CHECK: atomic32_load_monotonic
147; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 1)
148
149define i32 @atomic32_load_acquire(i32* %a) nounwind uwtable {
150entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000151 %0 = load atomic i32* %a acquire, align 100504
Kostya Serebryanya1259772012-04-27 07:31:53 +0000152 ret i32 %0
153}
154; CHECK: atomic32_load_acquire
155; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 4)
156
157define i32 @atomic32_load_seq_cst(i32* %a) nounwind uwtable {
158entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000159 %0 = load atomic i32* %a seq_cst, align 100504
Kostya Serebryanya1259772012-04-27 07:31:53 +0000160 ret i32 %0
161}
162; CHECK: atomic32_load_seq_cst
163; CHECK: call i32 @__tsan_atomic32_load(i32* %a, i32 32)
164
165define void @atomic32_store_unordered(i32* %a) nounwind uwtable {
166entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000167 store atomic i32 0, i32* %a unordered, align 100504
Kostya Serebryanya1259772012-04-27 07:31:53 +0000168 ret void
169}
170; CHECK: atomic32_store_unordered
171; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 1)
172
173define void @atomic32_store_monotonic(i32* %a) nounwind uwtable {
174entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000175 store atomic i32 0, i32* %a monotonic, align 100504
Kostya Serebryanya1259772012-04-27 07:31:53 +0000176 ret void
177}
178; CHECK: atomic32_store_monotonic
179; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 1)
180
181define void @atomic32_store_release(i32* %a) nounwind uwtable {
182entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000183 store atomic i32 0, i32* %a release, align 100504
Kostya Serebryanya1259772012-04-27 07:31:53 +0000184 ret void
185}
186; CHECK: atomic32_store_release
187; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 8)
188
189define void @atomic32_store_seq_cst(i32* %a) nounwind uwtable {
190entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000191 store atomic i32 0, i32* %a seq_cst, align 100504
Kostya Serebryanya1259772012-04-27 07:31:53 +0000192 ret void
193}
194; CHECK: atomic32_store_seq_cst
195; CHECK: call void @__tsan_atomic32_store(i32* %a, i32 0, i32 32)
196
197define i64 @atomic64_load_unordered(i64* %a) nounwind uwtable {
198entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000199 %0 = load atomic i64* %a unordered, align 100508
Kostya Serebryanya1259772012-04-27 07:31:53 +0000200 ret i64 %0
201}
202; CHECK: atomic64_load_unordered
203; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 1)
204
205define i64 @atomic64_load_monotonic(i64* %a) nounwind uwtable {
206entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000207 %0 = load atomic i64* %a monotonic, align 100508
Kostya Serebryanya1259772012-04-27 07:31:53 +0000208 ret i64 %0
209}
210; CHECK: atomic64_load_monotonic
211; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 1)
212
213define i64 @atomic64_load_acquire(i64* %a) nounwind uwtable {
214entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000215 %0 = load atomic i64* %a acquire, align 100508
Kostya Serebryanya1259772012-04-27 07:31:53 +0000216 ret i64 %0
217}
218; CHECK: atomic64_load_acquire
219; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 4)
220
221define i64 @atomic64_load_seq_cst(i64* %a) nounwind uwtable {
222entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000223 %0 = load atomic i64* %a seq_cst, align 100508
Kostya Serebryanya1259772012-04-27 07:31:53 +0000224 ret i64 %0
225}
226; CHECK: atomic64_load_seq_cst
227; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 32)
228
229define void @atomic64_store_unordered(i64* %a) nounwind uwtable {
230entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000231 store atomic i64 0, i64* %a unordered, align 100508
Kostya Serebryanya1259772012-04-27 07:31:53 +0000232 ret void
233}
234; CHECK: atomic64_store_unordered
235; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 1)
236
237define void @atomic64_store_monotonic(i64* %a) nounwind uwtable {
238entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000239 store atomic i64 0, i64* %a monotonic, align 100508
Kostya Serebryanya1259772012-04-27 07:31:53 +0000240 ret void
241}
242; CHECK: atomic64_store_monotonic
243; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 1)
244
245define void @atomic64_store_release(i64* %a) nounwind uwtable {
246entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000247 store atomic i64 0, i64* %a release, align 100508
Kostya Serebryanya1259772012-04-27 07:31:53 +0000248 ret void
249}
250; CHECK: atomic64_store_release
251; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 8)
252
253define void @atomic64_store_seq_cst(i64* %a) nounwind uwtable {
254entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000255 store atomic i64 0, i64* %a seq_cst, align 100508
Kostya Serebryanya1259772012-04-27 07:31:53 +0000256 ret void
257}
258; CHECK: atomic64_store_seq_cst
259; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 32)
260
261define i128 @atomic128_load_unordered(i128* %a) nounwind uwtable {
262entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000263 %0 = load atomic i128* %a unordered, align 100516
Kostya Serebryanya1259772012-04-27 07:31:53 +0000264 ret i128 %0
265}
266; CHECK: atomic128_load_unordered
267; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 1)
268
269define i128 @atomic128_load_monotonic(i128* %a) nounwind uwtable {
270entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000271 %0 = load atomic i128* %a monotonic, align 100516
Kostya Serebryanya1259772012-04-27 07:31:53 +0000272 ret i128 %0
273}
274; CHECK: atomic128_load_monotonic
275; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 1)
276
277define i128 @atomic128_load_acquire(i128* %a) nounwind uwtable {
278entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000279 %0 = load atomic i128* %a acquire, align 100516
Kostya Serebryanya1259772012-04-27 07:31:53 +0000280 ret i128 %0
281}
282; CHECK: atomic128_load_acquire
283; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 4)
284
285define i128 @atomic128_load_seq_cst(i128* %a) nounwind uwtable {
286entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000287 %0 = load atomic i128* %a seq_cst, align 100516
Kostya Serebryanya1259772012-04-27 07:31:53 +0000288 ret i128 %0
289}
290; CHECK: atomic128_load_seq_cst
291; CHECK: call i128 @__tsan_atomic128_load(i128* %a, i32 32)
292
293define void @atomic128_store_unordered(i128* %a) nounwind uwtable {
294entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000295 store atomic i128 0, i128* %a unordered, align 100516
Kostya Serebryanya1259772012-04-27 07:31:53 +0000296 ret void
297}
298; CHECK: atomic128_store_unordered
299; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 1)
300
301define void @atomic128_store_monotonic(i128* %a) nounwind uwtable {
302entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000303 store atomic i128 0, i128* %a monotonic, align 100516
Kostya Serebryanya1259772012-04-27 07:31:53 +0000304 ret void
305}
306; CHECK: atomic128_store_monotonic
307; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 1)
308
309define void @atomic128_store_release(i128* %a) nounwind uwtable {
310entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000311 store atomic i128 0, i128* %a release, align 100516
Kostya Serebryanya1259772012-04-27 07:31:53 +0000312 ret void
313}
314; CHECK: atomic128_store_release
315; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 8)
316
317define void @atomic128_store_seq_cst(i128* %a) nounwind uwtable {
318entry:
Dmitry Vyukov68845092012-10-03 13:13:54 +0000319 store atomic i128 0, i128* %a seq_cst, align 100516
Kostya Serebryanya1259772012-04-27 07:31:53 +0000320 ret void
321}
322; CHECK: atomic128_store_seq_cst
323; CHECK: call void @__tsan_atomic128_store(i128* %a, i128 0, i32 32)