blob: 646e5bd4b746029ac26bf495fb589bfa6d664269 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5
6// This file is an internal atomic implementation for compiler-based
7// ThreadSanitizer. Use base/atomicops.h instead.
8
9#ifndef V8_BASE_ATOMICOPS_INTERNALS_TSAN_H_
10#define V8_BASE_ATOMICOPS_INTERNALS_TSAN_H_
11
12namespace v8 {
13namespace base {
14
15#ifndef TSAN_INTERFACE_ATOMIC_H
16#define TSAN_INTERFACE_ATOMIC_H
17
18
19extern "C" {
20typedef char __tsan_atomic8;
21typedef short __tsan_atomic16; // NOLINT
22typedef int __tsan_atomic32;
23typedef long __tsan_atomic64; // NOLINT
24
25#if defined(__SIZEOF_INT128__) \
26 || (__clang_major__ * 100 + __clang_minor__ >= 302)
27typedef __int128 __tsan_atomic128;
28#define __TSAN_HAS_INT128 1
29#else
30typedef char __tsan_atomic128;
31#define __TSAN_HAS_INT128 0
32#endif
33
34typedef enum {
35 __tsan_memory_order_relaxed,
36 __tsan_memory_order_consume,
37 __tsan_memory_order_acquire,
38 __tsan_memory_order_release,
39 __tsan_memory_order_acq_rel,
40 __tsan_memory_order_seq_cst,
41} __tsan_memory_order;
42
43__tsan_atomic8 __tsan_atomic8_load(const volatile __tsan_atomic8* a,
44 __tsan_memory_order mo);
45__tsan_atomic16 __tsan_atomic16_load(const volatile __tsan_atomic16* a,
46 __tsan_memory_order mo);
47__tsan_atomic32 __tsan_atomic32_load(const volatile __tsan_atomic32* a,
48 __tsan_memory_order mo);
49__tsan_atomic64 __tsan_atomic64_load(const volatile __tsan_atomic64* a,
50 __tsan_memory_order mo);
51__tsan_atomic128 __tsan_atomic128_load(const volatile __tsan_atomic128* a,
52 __tsan_memory_order mo);
53
54void __tsan_atomic8_store(volatile __tsan_atomic8* a, __tsan_atomic8 v,
55 __tsan_memory_order mo);
56void __tsan_atomic16_store(volatile __tsan_atomic16* a, __tsan_atomic16 v,
57 __tsan_memory_order mo);
58void __tsan_atomic32_store(volatile __tsan_atomic32* a, __tsan_atomic32 v,
59 __tsan_memory_order mo);
60void __tsan_atomic64_store(volatile __tsan_atomic64* a, __tsan_atomic64 v,
61 __tsan_memory_order mo);
62void __tsan_atomic128_store(volatile __tsan_atomic128* a, __tsan_atomic128 v,
63 __tsan_memory_order mo);
64
65__tsan_atomic8 __tsan_atomic8_exchange(volatile __tsan_atomic8* a,
66 __tsan_atomic8 v, __tsan_memory_order mo);
67__tsan_atomic16 __tsan_atomic16_exchange(volatile __tsan_atomic16* a,
68 __tsan_atomic16 v, __tsan_memory_order mo);
69__tsan_atomic32 __tsan_atomic32_exchange(volatile __tsan_atomic32* a,
70 __tsan_atomic32 v, __tsan_memory_order mo);
71__tsan_atomic64 __tsan_atomic64_exchange(volatile __tsan_atomic64* a,
72 __tsan_atomic64 v, __tsan_memory_order mo);
73__tsan_atomic128 __tsan_atomic128_exchange(volatile __tsan_atomic128* a,
74 __tsan_atomic128 v, __tsan_memory_order mo);
75
76__tsan_atomic8 __tsan_atomic8_fetch_add(volatile __tsan_atomic8* a,
77 __tsan_atomic8 v, __tsan_memory_order mo);
78__tsan_atomic16 __tsan_atomic16_fetch_add(volatile __tsan_atomic16* a,
79 __tsan_atomic16 v, __tsan_memory_order mo);
80__tsan_atomic32 __tsan_atomic32_fetch_add(volatile __tsan_atomic32* a,
81 __tsan_atomic32 v, __tsan_memory_order mo);
82__tsan_atomic64 __tsan_atomic64_fetch_add(volatile __tsan_atomic64* a,
83 __tsan_atomic64 v, __tsan_memory_order mo);
84__tsan_atomic128 __tsan_atomic128_fetch_add(volatile __tsan_atomic128* a,
85 __tsan_atomic128 v, __tsan_memory_order mo);
86
87__tsan_atomic8 __tsan_atomic8_fetch_and(volatile __tsan_atomic8* a,
88 __tsan_atomic8 v, __tsan_memory_order mo);
89__tsan_atomic16 __tsan_atomic16_fetch_and(volatile __tsan_atomic16* a,
90 __tsan_atomic16 v, __tsan_memory_order mo);
91__tsan_atomic32 __tsan_atomic32_fetch_and(volatile __tsan_atomic32* a,
92 __tsan_atomic32 v, __tsan_memory_order mo);
93__tsan_atomic64 __tsan_atomic64_fetch_and(volatile __tsan_atomic64* a,
94 __tsan_atomic64 v, __tsan_memory_order mo);
95__tsan_atomic128 __tsan_atomic128_fetch_and(volatile __tsan_atomic128* a,
96 __tsan_atomic128 v, __tsan_memory_order mo);
97
98__tsan_atomic8 __tsan_atomic8_fetch_or(volatile __tsan_atomic8* a,
99 __tsan_atomic8 v, __tsan_memory_order mo);
100__tsan_atomic16 __tsan_atomic16_fetch_or(volatile __tsan_atomic16* a,
101 __tsan_atomic16 v, __tsan_memory_order mo);
102__tsan_atomic32 __tsan_atomic32_fetch_or(volatile __tsan_atomic32* a,
103 __tsan_atomic32 v, __tsan_memory_order mo);
104__tsan_atomic64 __tsan_atomic64_fetch_or(volatile __tsan_atomic64* a,
105 __tsan_atomic64 v, __tsan_memory_order mo);
106__tsan_atomic128 __tsan_atomic128_fetch_or(volatile __tsan_atomic128* a,
107 __tsan_atomic128 v, __tsan_memory_order mo);
108
109__tsan_atomic8 __tsan_atomic8_fetch_xor(volatile __tsan_atomic8* a,
110 __tsan_atomic8 v, __tsan_memory_order mo);
111__tsan_atomic16 __tsan_atomic16_fetch_xor(volatile __tsan_atomic16* a,
112 __tsan_atomic16 v, __tsan_memory_order mo);
113__tsan_atomic32 __tsan_atomic32_fetch_xor(volatile __tsan_atomic32* a,
114 __tsan_atomic32 v, __tsan_memory_order mo);
115__tsan_atomic64 __tsan_atomic64_fetch_xor(volatile __tsan_atomic64* a,
116 __tsan_atomic64 v, __tsan_memory_order mo);
117__tsan_atomic128 __tsan_atomic128_fetch_xor(volatile __tsan_atomic128* a,
118 __tsan_atomic128 v, __tsan_memory_order mo);
119
120__tsan_atomic8 __tsan_atomic8_fetch_nand(volatile __tsan_atomic8* a,
121 __tsan_atomic8 v, __tsan_memory_order mo);
122__tsan_atomic16 __tsan_atomic16_fetch_nand(volatile __tsan_atomic16* a,
123 __tsan_atomic16 v, __tsan_memory_order mo);
124__tsan_atomic32 __tsan_atomic32_fetch_nand(volatile __tsan_atomic32* a,
125 __tsan_atomic32 v, __tsan_memory_order mo);
126__tsan_atomic64 __tsan_atomic64_fetch_nand(volatile __tsan_atomic64* a,
127 __tsan_atomic64 v, __tsan_memory_order mo);
128__tsan_atomic128 __tsan_atomic128_fetch_nand(volatile __tsan_atomic128* a,
129 __tsan_atomic128 v, __tsan_memory_order mo);
130
131int __tsan_atomic8_compare_exchange_weak(volatile __tsan_atomic8* a,
132 __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo,
133 __tsan_memory_order fail_mo);
134int __tsan_atomic16_compare_exchange_weak(volatile __tsan_atomic16* a,
135 __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo,
136 __tsan_memory_order fail_mo);
137int __tsan_atomic32_compare_exchange_weak(volatile __tsan_atomic32* a,
138 __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo,
139 __tsan_memory_order fail_mo);
140int __tsan_atomic64_compare_exchange_weak(volatile __tsan_atomic64* a,
141 __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo,
142 __tsan_memory_order fail_mo);
143int __tsan_atomic128_compare_exchange_weak(volatile __tsan_atomic128* a,
144 __tsan_atomic128* c, __tsan_atomic128 v, __tsan_memory_order mo,
145 __tsan_memory_order fail_mo);
146
147int __tsan_atomic8_compare_exchange_strong(volatile __tsan_atomic8* a,
148 __tsan_atomic8* c, __tsan_atomic8 v, __tsan_memory_order mo,
149 __tsan_memory_order fail_mo);
150int __tsan_atomic16_compare_exchange_strong(volatile __tsan_atomic16* a,
151 __tsan_atomic16* c, __tsan_atomic16 v, __tsan_memory_order mo,
152 __tsan_memory_order fail_mo);
153int __tsan_atomic32_compare_exchange_strong(volatile __tsan_atomic32* a,
154 __tsan_atomic32* c, __tsan_atomic32 v, __tsan_memory_order mo,
155 __tsan_memory_order fail_mo);
156int __tsan_atomic64_compare_exchange_strong(volatile __tsan_atomic64* a,
157 __tsan_atomic64* c, __tsan_atomic64 v, __tsan_memory_order mo,
158 __tsan_memory_order fail_mo);
159int __tsan_atomic128_compare_exchange_strong(volatile __tsan_atomic128* a,
160 __tsan_atomic128* c, __tsan_atomic128 v, __tsan_memory_order mo,
161 __tsan_memory_order fail_mo);
162
163__tsan_atomic8 __tsan_atomic8_compare_exchange_val(
164 volatile __tsan_atomic8* a, __tsan_atomic8 c, __tsan_atomic8 v,
165 __tsan_memory_order mo, __tsan_memory_order fail_mo);
166__tsan_atomic16 __tsan_atomic16_compare_exchange_val(
167 volatile __tsan_atomic16* a, __tsan_atomic16 c, __tsan_atomic16 v,
168 __tsan_memory_order mo, __tsan_memory_order fail_mo);
169__tsan_atomic32 __tsan_atomic32_compare_exchange_val(
170 volatile __tsan_atomic32* a, __tsan_atomic32 c, __tsan_atomic32 v,
171 __tsan_memory_order mo, __tsan_memory_order fail_mo);
172__tsan_atomic64 __tsan_atomic64_compare_exchange_val(
173 volatile __tsan_atomic64* a, __tsan_atomic64 c, __tsan_atomic64 v,
174 __tsan_memory_order mo, __tsan_memory_order fail_mo);
175__tsan_atomic128 __tsan_atomic128_compare_exchange_val(
176 volatile __tsan_atomic128* a, __tsan_atomic128 c, __tsan_atomic128 v,
177 __tsan_memory_order mo, __tsan_memory_order fail_mo);
178
179void __tsan_atomic_thread_fence(__tsan_memory_order mo);
180void __tsan_atomic_signal_fence(__tsan_memory_order mo);
181} // extern "C"
182
183#endif // #ifndef TSAN_INTERFACE_ATOMIC_H
184
185inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
186 Atomic32 old_value,
187 Atomic32 new_value) {
188 Atomic32 cmp = old_value;
189 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
190 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
191 return cmp;
192}
193
194inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
195 Atomic32 new_value) {
196 return __tsan_atomic32_exchange(ptr, new_value,
197 __tsan_memory_order_relaxed);
198}
199
200inline Atomic32 Acquire_AtomicExchange(volatile Atomic32* ptr,
201 Atomic32 new_value) {
202 return __tsan_atomic32_exchange(ptr, new_value,
203 __tsan_memory_order_acquire);
204}
205
206inline Atomic32 Release_AtomicExchange(volatile Atomic32* ptr,
207 Atomic32 new_value) {
208 return __tsan_atomic32_exchange(ptr, new_value,
209 __tsan_memory_order_release);
210}
211
212inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
213 Atomic32 increment) {
214 return increment + __tsan_atomic32_fetch_add(ptr, increment,
215 __tsan_memory_order_relaxed);
216}
217
218inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
219 Atomic32 increment) {
220 return increment + __tsan_atomic32_fetch_add(ptr, increment,
221 __tsan_memory_order_acq_rel);
222}
223
224inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
225 Atomic32 old_value,
226 Atomic32 new_value) {
227 Atomic32 cmp = old_value;
228 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
229 __tsan_memory_order_acquire, __tsan_memory_order_acquire);
230 return cmp;
231}
232
233inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
234 Atomic32 old_value,
235 Atomic32 new_value) {
236 Atomic32 cmp = old_value;
237 __tsan_atomic32_compare_exchange_strong(ptr, &cmp, new_value,
238 __tsan_memory_order_release, __tsan_memory_order_relaxed);
239 return cmp;
240}
241
242inline void NoBarrier_Store(volatile Atomic8* ptr, Atomic8 value) {
243 __tsan_atomic8_store(ptr, value, __tsan_memory_order_relaxed);
244}
245
246inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
247 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
248}
249
250inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
251 __tsan_atomic32_store(ptr, value, __tsan_memory_order_relaxed);
252 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
253}
254
255inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
256 __tsan_atomic32_store(ptr, value, __tsan_memory_order_release);
257}
258
259inline Atomic8 NoBarrier_Load(volatile const Atomic8* ptr) {
260 return __tsan_atomic8_load(ptr, __tsan_memory_order_relaxed);
261}
262
263inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
264 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
265}
266
267inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
268 return __tsan_atomic32_load(ptr, __tsan_memory_order_acquire);
269}
270
271inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
272 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
273 return __tsan_atomic32_load(ptr, __tsan_memory_order_relaxed);
274}
275
276inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
277 Atomic64 old_value,
278 Atomic64 new_value) {
279 Atomic64 cmp = old_value;
280 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
281 __tsan_memory_order_relaxed, __tsan_memory_order_relaxed);
282 return cmp;
283}
284
285inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
286 Atomic64 new_value) {
287 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_relaxed);
288}
289
290inline Atomic64 Acquire_AtomicExchange(volatile Atomic64* ptr,
291 Atomic64 new_value) {
292 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_acquire);
293}
294
295inline Atomic64 Release_AtomicExchange(volatile Atomic64* ptr,
296 Atomic64 new_value) {
297 return __tsan_atomic64_exchange(ptr, new_value, __tsan_memory_order_release);
298}
299
300inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
301 Atomic64 increment) {
302 return increment + __tsan_atomic64_fetch_add(ptr, increment,
303 __tsan_memory_order_relaxed);
304}
305
306inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
307 Atomic64 increment) {
308 return increment + __tsan_atomic64_fetch_add(ptr, increment,
309 __tsan_memory_order_acq_rel);
310}
311
312inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
313 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
314}
315
316inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
317 __tsan_atomic64_store(ptr, value, __tsan_memory_order_relaxed);
318 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
319}
320
321inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
322 __tsan_atomic64_store(ptr, value, __tsan_memory_order_release);
323}
324
325inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
326 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
327}
328
329inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
330 return __tsan_atomic64_load(ptr, __tsan_memory_order_acquire);
331}
332
333inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
334 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
335 return __tsan_atomic64_load(ptr, __tsan_memory_order_relaxed);
336}
337
338inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
339 Atomic64 old_value,
340 Atomic64 new_value) {
341 Atomic64 cmp = old_value;
342 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
343 __tsan_memory_order_acquire, __tsan_memory_order_acquire);
344 return cmp;
345}
346
347inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
348 Atomic64 old_value,
349 Atomic64 new_value) {
350 Atomic64 cmp = old_value;
351 __tsan_atomic64_compare_exchange_strong(ptr, &cmp, new_value,
352 __tsan_memory_order_release, __tsan_memory_order_relaxed);
353 return cmp;
354}
355
356inline void MemoryBarrier() {
357 __tsan_atomic_thread_fence(__tsan_memory_order_seq_cst);
358}
359
360} // namespace base
361} // namespace v8
362
363#endif // V8_BASE_ATOMICOPS_INTERNALS_TSAN_H_