blob: 27031991438c904864da4c2dc205e90457f4e602 [file] [log] [blame]
Alexey Samsonov3b2f9f42012-06-04 13:55:19 +00001//===-- tsan_interface_atomic.cc ------------------------------------------===//
Kostya Serebryany4ad375f2012-05-10 13:48:04 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer (TSan), a race detector.
11//
12//===----------------------------------------------------------------------===//
13
Dmitry Vyukov10362c42012-11-27 07:25:50 +000014// ThreadSanitizer atomic operations are based on C++11/C1x standards.
Alp Toker1ee7fc72014-05-15 02:22:34 +000015// For background see C++11 standard. A slightly older, publicly
Dmitry Vyukov10362c42012-11-27 07:25:50 +000016// available draft of the standard (not entirely up-to-date, but close enough
17// for casual browsing) is available here:
18// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3242.pdf
19// The following page contains more background information:
20// http://www.hpl.hp.com/personal/Hans_Boehm/c++mm/
21
Alexey Samsonov8bd90982012-06-07 09:50:16 +000022#include "sanitizer_common/sanitizer_placement_new.h"
Dmitry Vyukovaa6af4d2013-02-01 11:01:17 +000023#include "sanitizer_common/sanitizer_stacktrace.h"
Dmitry Vyukov16e7a752014-01-24 12:33:35 +000024#include "sanitizer_common/sanitizer_mutex.h"
Kostya Serebryany4ad375f2012-05-10 13:48:04 +000025#include "tsan_flags.h"
26#include "tsan_rtl.h"
27
28using namespace __tsan; // NOLINT
29
Alexey Samsonov4fc80982013-12-11 08:18:50 +000030// These should match declarations from public tsan_interface_atomic.h header.
Dmitry Vyukov16e7a752014-01-24 12:33:35 +000031typedef unsigned char a8;
32typedef unsigned short a16; // NOLINT
33typedef unsigned int a32;
34typedef unsigned long long a64; // NOLINT
Kostya Serebryany83ed8892014-12-09 01:31:14 +000035#if !defined(SANITIZER_GO) && (defined(__SIZEOF_INT128__) \
Mohit K. Bhakkada46d5a72015-02-20 06:42:41 +000036 || (__clang_major__ * 100 + __clang_minor__ >= 302)) && !defined(__mips64)
Alexey Samsonov4fc80982013-12-11 08:18:50 +000037__extension__ typedef __int128 a128;
38# define __TSAN_HAS_INT128 1
39#else
40# define __TSAN_HAS_INT128 0
41#endif
42
Mohit K. Bhakkada46d5a72015-02-20 06:42:41 +000043#if !defined(SANITIZER_GO) && __TSAN_HAS_INT128
Dmitry Vyukov16e7a752014-01-24 12:33:35 +000044// Protects emulation of 128-bit atomic operations.
45static StaticSpinMutex mutex128;
Dmitry Vyukova8df2472014-07-18 14:54:02 +000046#endif
Dmitry Vyukov16e7a752014-01-24 12:33:35 +000047
Alexey Samsonov4fc80982013-12-11 08:18:50 +000048// Part of ABI, do not change.
49// http://llvm.org/viewvc/llvm-project/libcxx/trunk/include/atomic?view=markup
50typedef enum {
51 mo_relaxed,
52 mo_consume,
53 mo_acquire,
54 mo_release,
55 mo_acq_rel,
56 mo_seq_cst
57} morder;
Kostya Serebryany4ad375f2012-05-10 13:48:04 +000058
Dmitry Vyukovbe687832012-10-03 13:00:13 +000059static bool IsLoadOrder(morder mo) {
60 return mo == mo_relaxed || mo == mo_consume
61 || mo == mo_acquire || mo == mo_seq_cst;
62}
63
64static bool IsStoreOrder(morder mo) {
65 return mo == mo_relaxed || mo == mo_release || mo == mo_seq_cst;
66}
67
68static bool IsReleaseOrder(morder mo) {
69 return mo == mo_release || mo == mo_acq_rel || mo == mo_seq_cst;
70}
71
72static bool IsAcquireOrder(morder mo) {
73 return mo == mo_consume || mo == mo_acquire
74 || mo == mo_acq_rel || mo == mo_seq_cst;
75}
76
Dmitry Vyukov10362c42012-11-27 07:25:50 +000077static bool IsAcqRelOrder(morder mo) {
78 return mo == mo_acq_rel || mo == mo_seq_cst;
79}
80
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +000081template<typename T> T func_xchg(volatile T *v, T op) {
Dmitry Vyukovd413d8c2012-12-05 13:14:55 +000082 T res = __sync_lock_test_and_set(v, op);
83 // __sync_lock_test_and_set does not contain full barrier.
84 __sync_synchronize();
85 return res;
Dmitry Vyukov10362c42012-11-27 07:25:50 +000086}
87
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +000088template<typename T> T func_add(volatile T *v, T op) {
89 return __sync_fetch_and_add(v, op);
Dmitry Vyukov10362c42012-11-27 07:25:50 +000090}
91
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +000092template<typename T> T func_sub(volatile T *v, T op) {
93 return __sync_fetch_and_sub(v, op);
Dmitry Vyukov10362c42012-11-27 07:25:50 +000094}
95
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +000096template<typename T> T func_and(volatile T *v, T op) {
97 return __sync_fetch_and_and(v, op);
Dmitry Vyukov10362c42012-11-27 07:25:50 +000098}
99
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000100template<typename T> T func_or(volatile T *v, T op) {
101 return __sync_fetch_and_or(v, op);
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000102}
103
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000104template<typename T> T func_xor(volatile T *v, T op) {
105 return __sync_fetch_and_xor(v, op);
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000106}
107
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000108template<typename T> T func_nand(volatile T *v, T op) {
109 // clang does not support __sync_fetch_and_nand.
110 T cmp = *v;
111 for (;;) {
112 T newv = ~(cmp & op);
113 T cur = __sync_val_compare_and_swap(v, cmp, newv);
114 if (cmp == cur)
115 return cmp;
116 cmp = cur;
117 }
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000118}
119
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000120template<typename T> T func_cas(volatile T *v, T cmp, T xch) {
121 return __sync_val_compare_and_swap(v, cmp, xch);
122}
123
124// clang does not support 128-bit atomic ops.
125// Atomic ops are executed under tsan internal mutex,
126// here we assume that the atomic variables are not accessed
127// from non-instrumented code.
Mohit K. Bhakkada46d5a72015-02-20 06:42:41 +0000128#if !defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16) && !defined(SANITIZER_GO) \
129 && __TSAN_HAS_INT128
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000130a128 func_xchg(volatile a128 *v, a128 op) {
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000131 SpinMutexLock lock(&mutex128);
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000132 a128 cmp = *v;
133 *v = op;
134 return cmp;
135}
136
137a128 func_add(volatile a128 *v, a128 op) {
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000138 SpinMutexLock lock(&mutex128);
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000139 a128 cmp = *v;
140 *v = cmp + op;
141 return cmp;
142}
143
144a128 func_sub(volatile a128 *v, a128 op) {
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000145 SpinMutexLock lock(&mutex128);
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000146 a128 cmp = *v;
147 *v = cmp - op;
148 return cmp;
149}
150
151a128 func_and(volatile a128 *v, a128 op) {
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000152 SpinMutexLock lock(&mutex128);
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000153 a128 cmp = *v;
154 *v = cmp & op;
155 return cmp;
156}
157
158a128 func_or(volatile a128 *v, a128 op) {
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000159 SpinMutexLock lock(&mutex128);
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000160 a128 cmp = *v;
161 *v = cmp | op;
162 return cmp;
163}
164
165a128 func_xor(volatile a128 *v, a128 op) {
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000166 SpinMutexLock lock(&mutex128);
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000167 a128 cmp = *v;
168 *v = cmp ^ op;
169 return cmp;
170}
171
172a128 func_nand(volatile a128 *v, a128 op) {
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000173 SpinMutexLock lock(&mutex128);
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000174 a128 cmp = *v;
175 *v = ~(cmp & op);
176 return cmp;
177}
178
179a128 func_cas(volatile a128 *v, a128 cmp, a128 xch) {
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000180 SpinMutexLock lock(&mutex128);
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000181 a128 cur = *v;
182 if (cur == cmp)
183 *v = xch;
184 return cur;
185}
186#endif
187
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000188template<typename T>
Dmitry Vyukovba429142013-02-01 09:42:06 +0000189static int SizeLog() {
190 if (sizeof(T) <= 1)
191 return kSizeLog1;
192 else if (sizeof(T) <= 2)
193 return kSizeLog2;
194 else if (sizeof(T) <= 4)
195 return kSizeLog4;
196 else
197 return kSizeLog8;
198 // For 16-byte atomics we also use 8-byte memory access,
199 // this leads to false negatives only in very obscure cases.
200}
201
Kostya Serebryany83ed8892014-12-09 01:31:14 +0000202#ifndef SANITIZER_GO
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000203static atomic_uint8_t *to_atomic(const volatile a8 *a) {
Alexey Samsonov4925fd42014-11-13 22:40:59 +0000204 return reinterpret_cast<atomic_uint8_t *>(const_cast<a8 *>(a));
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000205}
206
207static atomic_uint16_t *to_atomic(const volatile a16 *a) {
Alexey Samsonov4925fd42014-11-13 22:40:59 +0000208 return reinterpret_cast<atomic_uint16_t *>(const_cast<a16 *>(a));
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000209}
Dmitry Vyukova8df2472014-07-18 14:54:02 +0000210#endif
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000211
212static atomic_uint32_t *to_atomic(const volatile a32 *a) {
Alexey Samsonov4925fd42014-11-13 22:40:59 +0000213 return reinterpret_cast<atomic_uint32_t *>(const_cast<a32 *>(a));
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000214}
215
216static atomic_uint64_t *to_atomic(const volatile a64 *a) {
Alexey Samsonov4925fd42014-11-13 22:40:59 +0000217 return reinterpret_cast<atomic_uint64_t *>(const_cast<a64 *>(a));
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000218}
219
220static memory_order to_mo(morder mo) {
221 switch (mo) {
222 case mo_relaxed: return memory_order_relaxed;
223 case mo_consume: return memory_order_consume;
224 case mo_acquire: return memory_order_acquire;
225 case mo_release: return memory_order_release;
226 case mo_acq_rel: return memory_order_acq_rel;
227 case mo_seq_cst: return memory_order_seq_cst;
228 }
229 CHECK(0);
230 return memory_order_seq_cst;
231}
232
233template<typename T>
234static T NoTsanAtomicLoad(const volatile T *a, morder mo) {
235 return atomic_load(to_atomic(a), to_mo(mo));
236}
237
Kostya Serebryany83ed8892014-12-09 01:31:14 +0000238#if __TSAN_HAS_INT128 && !defined(SANITIZER_GO)
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000239static a128 NoTsanAtomicLoad(const volatile a128 *a, morder mo) {
240 SpinMutexLock lock(&mutex128);
241 return *a;
242}
Alexey Samsonov958a59b72014-05-22 22:02:38 +0000243#endif
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000244
Dmitry Vyukovba429142013-02-01 09:42:06 +0000245template<typename T>
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000246static T AtomicLoad(ThreadState *thr, uptr pc, const volatile T *a,
247 morder mo) {
Dmitry Vyukovbe687832012-10-03 13:00:13 +0000248 CHECK(IsLoadOrder(mo));
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000249 // This fast-path is critical for performance.
250 // Assume the access is atomic.
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000251 if (!IsAcquireOrder(mo)) {
Dmitry Vyukov87c6bb92013-02-01 14:41:58 +0000252 MemoryReadAtomic(thr, pc, (uptr)a, SizeLog<T>());
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000253 return NoTsanAtomicLoad(a, mo);
Dmitry Vyukovba429142013-02-01 09:42:06 +0000254 }
Dmitry Vyukovbde4c9c2014-05-29 13:50:54 +0000255 SyncVar *s = ctx->metamap.GetOrCreateAndLock(thr, pc, (uptr)a, false);
Dmitry Vyukovfbb194f2013-10-10 15:58:12 +0000256 AcquireImpl(thr, pc, &s->clock);
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000257 T v = NoTsanAtomicLoad(a, mo);
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000258 s->mtx.ReadUnlock();
Dmitry Vyukov87c6bb92013-02-01 14:41:58 +0000259 MemoryReadAtomic(thr, pc, (uptr)a, SizeLog<T>());
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000260 return v;
261}
262
263template<typename T>
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000264static void NoTsanAtomicStore(volatile T *a, T v, morder mo) {
265 atomic_store(to_atomic(a), v, to_mo(mo));
266}
267
Kostya Serebryany83ed8892014-12-09 01:31:14 +0000268#if __TSAN_HAS_INT128 && !defined(SANITIZER_GO)
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000269static void NoTsanAtomicStore(volatile a128 *a, a128 v, morder mo) {
270 SpinMutexLock lock(&mutex128);
271 *a = v;
272}
Alexey Samsonov958a59b72014-05-22 22:02:38 +0000273#endif
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000274
275template<typename T>
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000276static void AtomicStore(ThreadState *thr, uptr pc, volatile T *a, T v,
277 morder mo) {
Dmitry Vyukovbe687832012-10-03 13:00:13 +0000278 CHECK(IsStoreOrder(mo));
Dmitry Vyukov87c6bb92013-02-01 14:41:58 +0000279 MemoryWriteAtomic(thr, pc, (uptr)a, SizeLog<T>());
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000280 // This fast-path is critical for performance.
281 // Assume the access is atomic.
282 // Strictly saying even relaxed store cuts off release sequence,
283 // so must reset the clock.
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000284 if (!IsReleaseOrder(mo)) {
285 NoTsanAtomicStore(a, v, mo);
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000286 return;
287 }
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000288 __sync_synchronize();
Dmitry Vyukovbde4c9c2014-05-29 13:50:54 +0000289 SyncVar *s = ctx->metamap.GetOrCreateAndLock(thr, pc, (uptr)a, true);
Dmitry Vyukovfbb194f2013-10-10 15:58:12 +0000290 thr->fast_state.IncrementEpoch();
291 // Can't increment epoch w/o writing to the trace as well.
292 TraceAddEvent(thr, thr->fast_state, EventTypeMop, 0);
293 ReleaseImpl(thr, pc, &s->clock);
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000294 NoTsanAtomicStore(a, v, mo);
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000295 s->mtx.Unlock();
296}
297
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000298template<typename T, T (*F)(volatile T *v, T op)>
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000299static T AtomicRMW(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) {
Dmitry Vyukov87c6bb92013-02-01 14:41:58 +0000300 MemoryWriteAtomic(thr, pc, (uptr)a, SizeLog<T>());
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000301 SyncVar *s = 0;
302 if (mo != mo_relaxed) {
Dmitry Vyukovbde4c9c2014-05-29 13:50:54 +0000303 s = ctx->metamap.GetOrCreateAndLock(thr, pc, (uptr)a, true);
Dmitry Vyukovfbb194f2013-10-10 15:58:12 +0000304 thr->fast_state.IncrementEpoch();
305 // Can't increment epoch w/o writing to the trace as well.
306 TraceAddEvent(thr, thr->fast_state, EventTypeMop, 0);
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000307 if (IsAcqRelOrder(mo))
Dmitry Vyukovfbb194f2013-10-10 15:58:12 +0000308 AcquireReleaseImpl(thr, pc, &s->clock);
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000309 else if (IsReleaseOrder(mo))
Dmitry Vyukovfbb194f2013-10-10 15:58:12 +0000310 ReleaseImpl(thr, pc, &s->clock);
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000311 else if (IsAcquireOrder(mo))
Dmitry Vyukovfbb194f2013-10-10 15:58:12 +0000312 AcquireImpl(thr, pc, &s->clock);
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000313 }
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000314 v = F(a, v);
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000315 if (s)
316 s->mtx.Unlock();
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000317 return v;
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000318}
319
320template<typename T>
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000321static T NoTsanAtomicExchange(volatile T *a, T v, morder mo) {
322 return func_xchg(a, v);
323}
324
325template<typename T>
326static T NoTsanAtomicFetchAdd(volatile T *a, T v, morder mo) {
327 return func_add(a, v);
328}
329
330template<typename T>
331static T NoTsanAtomicFetchSub(volatile T *a, T v, morder mo) {
332 return func_sub(a, v);
333}
334
335template<typename T>
336static T NoTsanAtomicFetchAnd(volatile T *a, T v, morder mo) {
337 return func_and(a, v);
338}
339
340template<typename T>
341static T NoTsanAtomicFetchOr(volatile T *a, T v, morder mo) {
342 return func_or(a, v);
343}
344
345template<typename T>
346static T NoTsanAtomicFetchXor(volatile T *a, T v, morder mo) {
347 return func_xor(a, v);
348}
349
350template<typename T>
351static T NoTsanAtomicFetchNand(volatile T *a, T v, morder mo) {
352 return func_nand(a, v);
353}
354
355template<typename T>
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000356static T AtomicExchange(ThreadState *thr, uptr pc, volatile T *a, T v,
357 morder mo) {
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000358 return AtomicRMW<T, func_xchg>(thr, pc, a, v, mo);
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000359}
360
361template<typename T>
362static T AtomicFetchAdd(ThreadState *thr, uptr pc, volatile T *a, T v,
363 morder mo) {
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000364 return AtomicRMW<T, func_add>(thr, pc, a, v, mo);
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000365}
366
367template<typename T>
Dmitry Vyukovb96a7b52012-10-04 10:08:23 +0000368static T AtomicFetchSub(ThreadState *thr, uptr pc, volatile T *a, T v,
369 morder mo) {
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000370 return AtomicRMW<T, func_sub>(thr, pc, a, v, mo);
Dmitry Vyukovb96a7b52012-10-04 10:08:23 +0000371}
372
373template<typename T>
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000374static T AtomicFetchAnd(ThreadState *thr, uptr pc, volatile T *a, T v,
375 morder mo) {
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000376 return AtomicRMW<T, func_and>(thr, pc, a, v, mo);
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000377}
378
379template<typename T>
380static T AtomicFetchOr(ThreadState *thr, uptr pc, volatile T *a, T v,
381 morder mo) {
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000382 return AtomicRMW<T, func_or>(thr, pc, a, v, mo);
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000383}
384
385template<typename T>
386static T AtomicFetchXor(ThreadState *thr, uptr pc, volatile T *a, T v,
387 morder mo) {
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000388 return AtomicRMW<T, func_xor>(thr, pc, a, v, mo);
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000389}
390
391template<typename T>
Dmitry Vyukov3b450122012-11-26 09:42:56 +0000392static T AtomicFetchNand(ThreadState *thr, uptr pc, volatile T *a, T v,
393 morder mo) {
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000394 return AtomicRMW<T, func_nand>(thr, pc, a, v, mo);
Dmitry Vyukov3b450122012-11-26 09:42:56 +0000395}
396
397template<typename T>
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000398static bool NoTsanAtomicCAS(volatile T *a, T *c, T v, morder mo, morder fmo) {
399 return atomic_compare_exchange_strong(to_atomic(a), c, v, to_mo(mo));
400}
401
Alexey Samsonov958a59b72014-05-22 22:02:38 +0000402#if __TSAN_HAS_INT128
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000403static bool NoTsanAtomicCAS(volatile a128 *a, a128 *c, a128 v,
404 morder mo, morder fmo) {
405 a128 old = *c;
406 a128 cur = func_cas(a, old, v);
407 if (cur == old)
408 return true;
409 *c = cur;
410 return false;
411}
Alexey Samsonov958a59b72014-05-22 22:02:38 +0000412#endif
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000413
414template<typename T>
Dmitry Vyukova8df2472014-07-18 14:54:02 +0000415static T NoTsanAtomicCAS(volatile T *a, T c, T v, morder mo, morder fmo) {
416 NoTsanAtomicCAS(a, &c, v, mo, fmo);
417 return c;
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000418}
419
420template<typename T>
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000421static bool AtomicCAS(ThreadState *thr, uptr pc,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000422 volatile T *a, T *c, T v, morder mo, morder fmo) {
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000423 (void)fmo; // Unused because llvm does not pass it yet.
Dmitry Vyukov87c6bb92013-02-01 14:41:58 +0000424 MemoryWriteAtomic(thr, pc, (uptr)a, SizeLog<T>());
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000425 SyncVar *s = 0;
Dmitry Vyukov781eca52014-03-24 18:51:13 +0000426 bool write_lock = mo != mo_acquire && mo != mo_consume;
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000427 if (mo != mo_relaxed) {
Dmitry Vyukovbde4c9c2014-05-29 13:50:54 +0000428 s = ctx->metamap.GetOrCreateAndLock(thr, pc, (uptr)a, write_lock);
Dmitry Vyukovfbb194f2013-10-10 15:58:12 +0000429 thr->fast_state.IncrementEpoch();
430 // Can't increment epoch w/o writing to the trace as well.
431 TraceAddEvent(thr, thr->fast_state, EventTypeMop, 0);
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000432 if (IsAcqRelOrder(mo))
Dmitry Vyukovfbb194f2013-10-10 15:58:12 +0000433 AcquireReleaseImpl(thr, pc, &s->clock);
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000434 else if (IsReleaseOrder(mo))
Dmitry Vyukovfbb194f2013-10-10 15:58:12 +0000435 ReleaseImpl(thr, pc, &s->clock);
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000436 else if (IsAcquireOrder(mo))
Dmitry Vyukovfbb194f2013-10-10 15:58:12 +0000437 AcquireImpl(thr, pc, &s->clock);
Dmitry Vyukovb59fa872013-03-19 09:15:31 +0000438 }
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000439 T cc = *c;
440 T pr = func_cas(a, cc, v);
Dmitry Vyukov781eca52014-03-24 18:51:13 +0000441 if (s) {
442 if (write_lock)
443 s->mtx.Unlock();
444 else
445 s->mtx.ReadUnlock();
446 }
Dmitry Vyukov4b82b2b2012-12-04 14:50:10 +0000447 if (pr == cc)
448 return true;
449 *c = pr;
450 return false;
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000451}
452
Dmitry Vyukov4e5f72d2012-11-09 12:54:37 +0000453template<typename T>
454static T AtomicCAS(ThreadState *thr, uptr pc,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000455 volatile T *a, T c, T v, morder mo, morder fmo) {
456 AtomicCAS(thr, pc, a, &c, v, mo, fmo);
Dmitry Vyukov4e5f72d2012-11-09 12:54:37 +0000457 return c;
458}
459
Kostya Serebryany83ed8892014-12-09 01:31:14 +0000460#ifndef SANITIZER_GO
Dmitry Vyukov16e7a752014-01-24 12:33:35 +0000461static void NoTsanAtomicFence(morder mo) {
462 __sync_synchronize();
463}
464
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000465static void AtomicFence(ThreadState *thr, uptr pc, morder mo) {
Dmitry Vyukov10362c42012-11-27 07:25:50 +0000466 // FIXME(dvyukov): not implemented.
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000467 __sync_synchronize();
468}
Dmitry Vyukova8df2472014-07-18 14:54:02 +0000469#endif
470
471// Interface functions follow.
Kostya Serebryany83ed8892014-12-09 01:31:14 +0000472#ifndef SANITIZER_GO
Dmitry Vyukova8df2472014-07-18 14:54:02 +0000473
474// C/C++
475
476#define SCOPED_ATOMIC(func, ...) \
477 const uptr callpc = (uptr)__builtin_return_address(0); \
Alexey Samsonov40733a82014-11-03 22:23:44 +0000478 uptr pc = StackTrace::GetCurrentPc(); \
Dmitry Vyukova8df2472014-07-18 14:54:02 +0000479 mo = flags()->force_seq_cst_atomics ? (morder)mo_seq_cst : mo; \
480 ThreadState *const thr = cur_thread(); \
481 if (thr->ignore_interceptors) \
482 return NoTsanAtomic##func(__VA_ARGS__); \
483 AtomicStatInc(thr, sizeof(*a), mo, StatAtomic##func); \
484 ScopedAtomic sa(thr, callpc, a, mo, __func__); \
485 return Atomic##func(thr, pc, __VA_ARGS__); \
486/**/
487
488class ScopedAtomic {
489 public:
490 ScopedAtomic(ThreadState *thr, uptr pc, const volatile void *a,
491 morder mo, const char *func)
492 : thr_(thr) {
493 FuncEntry(thr_, pc);
494 DPrintf("#%d: %s(%p, %d)\n", thr_->tid, func, a, mo);
495 }
496 ~ScopedAtomic() {
497 ProcessPendingSignals(thr_);
498 FuncExit(thr_);
499 }
500 private:
501 ThreadState *thr_;
502};
503
504static void AtomicStatInc(ThreadState *thr, uptr size, morder mo, StatType t) {
505 StatInc(thr, StatAtomic);
506 StatInc(thr, t);
507 StatInc(thr, size == 1 ? StatAtomic1
508 : size == 2 ? StatAtomic2
509 : size == 4 ? StatAtomic4
510 : size == 8 ? StatAtomic8
511 : StatAtomic16);
512 StatInc(thr, mo == mo_relaxed ? StatAtomicRelaxed
513 : mo == mo_consume ? StatAtomicConsume
514 : mo == mo_acquire ? StatAtomicAcquire
515 : mo == mo_release ? StatAtomicRelease
516 : mo == mo_acq_rel ? StatAtomicAcq_Rel
517 : StatAtomicSeq_Cst);
518}
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000519
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000520extern "C" {
521SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000522a8 __tsan_atomic8_load(const volatile a8 *a, morder mo) {
523 SCOPED_ATOMIC(Load, a, mo);
524}
525
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000526SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000527a16 __tsan_atomic16_load(const volatile a16 *a, morder mo) {
528 SCOPED_ATOMIC(Load, a, mo);
529}
530
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000531SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000532a32 __tsan_atomic32_load(const volatile a32 *a, morder mo) {
533 SCOPED_ATOMIC(Load, a, mo);
534}
535
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000536SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000537a64 __tsan_atomic64_load(const volatile a64 *a, morder mo) {
538 SCOPED_ATOMIC(Load, a, mo);
539}
540
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000541#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000542SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000543a128 __tsan_atomic128_load(const volatile a128 *a, morder mo) {
544 SCOPED_ATOMIC(Load, a, mo);
545}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000546#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000547
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000548SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000549void __tsan_atomic8_store(volatile a8 *a, a8 v, morder mo) {
550 SCOPED_ATOMIC(Store, a, v, mo);
551}
552
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000553SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000554void __tsan_atomic16_store(volatile a16 *a, a16 v, morder mo) {
555 SCOPED_ATOMIC(Store, a, v, mo);
556}
557
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000558SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000559void __tsan_atomic32_store(volatile a32 *a, a32 v, morder mo) {
560 SCOPED_ATOMIC(Store, a, v, mo);
561}
562
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000563SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000564void __tsan_atomic64_store(volatile a64 *a, a64 v, morder mo) {
565 SCOPED_ATOMIC(Store, a, v, mo);
566}
567
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000568#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000569SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000570void __tsan_atomic128_store(volatile a128 *a, a128 v, morder mo) {
571 SCOPED_ATOMIC(Store, a, v, mo);
572}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000573#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000574
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000575SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000576a8 __tsan_atomic8_exchange(volatile a8 *a, a8 v, morder mo) {
577 SCOPED_ATOMIC(Exchange, a, v, mo);
578}
579
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000580SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000581a16 __tsan_atomic16_exchange(volatile a16 *a, a16 v, morder mo) {
582 SCOPED_ATOMIC(Exchange, a, v, mo);
583}
584
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000585SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000586a32 __tsan_atomic32_exchange(volatile a32 *a, a32 v, morder mo) {
587 SCOPED_ATOMIC(Exchange, a, v, mo);
588}
589
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000590SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000591a64 __tsan_atomic64_exchange(volatile a64 *a, a64 v, morder mo) {
592 SCOPED_ATOMIC(Exchange, a, v, mo);
593}
594
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000595#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000596SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000597a128 __tsan_atomic128_exchange(volatile a128 *a, a128 v, morder mo) {
598 SCOPED_ATOMIC(Exchange, a, v, mo);
599}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000600#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000601
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000602SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000603a8 __tsan_atomic8_fetch_add(volatile a8 *a, a8 v, morder mo) {
604 SCOPED_ATOMIC(FetchAdd, a, v, mo);
605}
606
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000607SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000608a16 __tsan_atomic16_fetch_add(volatile a16 *a, a16 v, morder mo) {
609 SCOPED_ATOMIC(FetchAdd, a, v, mo);
610}
611
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000612SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000613a32 __tsan_atomic32_fetch_add(volatile a32 *a, a32 v, morder mo) {
614 SCOPED_ATOMIC(FetchAdd, a, v, mo);
615}
616
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000617SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000618a64 __tsan_atomic64_fetch_add(volatile a64 *a, a64 v, morder mo) {
619 SCOPED_ATOMIC(FetchAdd, a, v, mo);
620}
621
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000622#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000623SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000624a128 __tsan_atomic128_fetch_add(volatile a128 *a, a128 v, morder mo) {
625 SCOPED_ATOMIC(FetchAdd, a, v, mo);
626}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000627#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000628
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000629SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukovb96a7b52012-10-04 10:08:23 +0000630a8 __tsan_atomic8_fetch_sub(volatile a8 *a, a8 v, morder mo) {
631 SCOPED_ATOMIC(FetchSub, a, v, mo);
632}
633
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000634SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukovb96a7b52012-10-04 10:08:23 +0000635a16 __tsan_atomic16_fetch_sub(volatile a16 *a, a16 v, morder mo) {
636 SCOPED_ATOMIC(FetchSub, a, v, mo);
637}
638
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000639SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukovb96a7b52012-10-04 10:08:23 +0000640a32 __tsan_atomic32_fetch_sub(volatile a32 *a, a32 v, morder mo) {
641 SCOPED_ATOMIC(FetchSub, a, v, mo);
642}
643
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000644SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukovb96a7b52012-10-04 10:08:23 +0000645a64 __tsan_atomic64_fetch_sub(volatile a64 *a, a64 v, morder mo) {
646 SCOPED_ATOMIC(FetchSub, a, v, mo);
647}
648
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000649#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000650SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000651a128 __tsan_atomic128_fetch_sub(volatile a128 *a, a128 v, morder mo) {
652 SCOPED_ATOMIC(FetchSub, a, v, mo);
653}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000654#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000655
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000656SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000657a8 __tsan_atomic8_fetch_and(volatile a8 *a, a8 v, morder mo) {
658 SCOPED_ATOMIC(FetchAnd, a, v, mo);
659}
660
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000661SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000662a16 __tsan_atomic16_fetch_and(volatile a16 *a, a16 v, morder mo) {
663 SCOPED_ATOMIC(FetchAnd, a, v, mo);
664}
665
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000666SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000667a32 __tsan_atomic32_fetch_and(volatile a32 *a, a32 v, morder mo) {
668 SCOPED_ATOMIC(FetchAnd, a, v, mo);
669}
670
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000671SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000672a64 __tsan_atomic64_fetch_and(volatile a64 *a, a64 v, morder mo) {
673 SCOPED_ATOMIC(FetchAnd, a, v, mo);
674}
675
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000676#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000677SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000678a128 __tsan_atomic128_fetch_and(volatile a128 *a, a128 v, morder mo) {
679 SCOPED_ATOMIC(FetchAnd, a, v, mo);
680}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000681#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000682
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000683SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000684a8 __tsan_atomic8_fetch_or(volatile a8 *a, a8 v, morder mo) {
685 SCOPED_ATOMIC(FetchOr, a, v, mo);
686}
687
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000688SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000689a16 __tsan_atomic16_fetch_or(volatile a16 *a, a16 v, morder mo) {
690 SCOPED_ATOMIC(FetchOr, a, v, mo);
691}
692
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000693SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000694a32 __tsan_atomic32_fetch_or(volatile a32 *a, a32 v, morder mo) {
695 SCOPED_ATOMIC(FetchOr, a, v, mo);
696}
697
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000698SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000699a64 __tsan_atomic64_fetch_or(volatile a64 *a, a64 v, morder mo) {
700 SCOPED_ATOMIC(FetchOr, a, v, mo);
701}
702
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000703#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000704SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000705a128 __tsan_atomic128_fetch_or(volatile a128 *a, a128 v, morder mo) {
706 SCOPED_ATOMIC(FetchOr, a, v, mo);
707}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000708#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000709
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000710SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000711a8 __tsan_atomic8_fetch_xor(volatile a8 *a, a8 v, morder mo) {
712 SCOPED_ATOMIC(FetchXor, a, v, mo);
713}
714
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000715SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000716a16 __tsan_atomic16_fetch_xor(volatile a16 *a, a16 v, morder mo) {
717 SCOPED_ATOMIC(FetchXor, a, v, mo);
718}
719
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000720SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000721a32 __tsan_atomic32_fetch_xor(volatile a32 *a, a32 v, morder mo) {
722 SCOPED_ATOMIC(FetchXor, a, v, mo);
723}
724
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000725SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000726a64 __tsan_atomic64_fetch_xor(volatile a64 *a, a64 v, morder mo) {
727 SCOPED_ATOMIC(FetchXor, a, v, mo);
728}
729
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000730#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000731SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000732a128 __tsan_atomic128_fetch_xor(volatile a128 *a, a128 v, morder mo) {
733 SCOPED_ATOMIC(FetchXor, a, v, mo);
734}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000735#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000736
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000737SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov3b450122012-11-26 09:42:56 +0000738a8 __tsan_atomic8_fetch_nand(volatile a8 *a, a8 v, morder mo) {
739 SCOPED_ATOMIC(FetchNand, a, v, mo);
740}
741
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000742SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov3b450122012-11-26 09:42:56 +0000743a16 __tsan_atomic16_fetch_nand(volatile a16 *a, a16 v, morder mo) {
744 SCOPED_ATOMIC(FetchNand, a, v, mo);
745}
746
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000747SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov3b450122012-11-26 09:42:56 +0000748a32 __tsan_atomic32_fetch_nand(volatile a32 *a, a32 v, morder mo) {
749 SCOPED_ATOMIC(FetchNand, a, v, mo);
750}
751
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000752SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov3b450122012-11-26 09:42:56 +0000753a64 __tsan_atomic64_fetch_nand(volatile a64 *a, a64 v, morder mo) {
754 SCOPED_ATOMIC(FetchNand, a, v, mo);
755}
756
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000757#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000758SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000759a128 __tsan_atomic128_fetch_nand(volatile a128 *a, a128 v, morder mo) {
760 SCOPED_ATOMIC(FetchNand, a, v, mo);
761}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000762#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000763
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000764SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000765int __tsan_atomic8_compare_exchange_strong(volatile a8 *a, a8 *c, a8 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000766 morder mo, morder fmo) {
767 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000768}
769
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000770SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000771int __tsan_atomic16_compare_exchange_strong(volatile a16 *a, a16 *c, a16 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000772 morder mo, morder fmo) {
773 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000774}
775
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000776SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000777int __tsan_atomic32_compare_exchange_strong(volatile a32 *a, a32 *c, a32 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000778 morder mo, morder fmo) {
779 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000780}
781
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000782SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000783int __tsan_atomic64_compare_exchange_strong(volatile a64 *a, a64 *c, a64 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000784 morder mo, morder fmo) {
785 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000786}
787
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000788#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000789SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000790int __tsan_atomic128_compare_exchange_strong(volatile a128 *a, a128 *c, a128 v,
791 morder mo, morder fmo) {
792 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
793}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000794#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000795
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000796SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000797int __tsan_atomic8_compare_exchange_weak(volatile a8 *a, a8 *c, a8 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000798 morder mo, morder fmo) {
799 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000800}
801
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000802SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000803int __tsan_atomic16_compare_exchange_weak(volatile a16 *a, a16 *c, a16 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000804 morder mo, morder fmo) {
805 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000806}
807
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000808SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000809int __tsan_atomic32_compare_exchange_weak(volatile a32 *a, a32 *c, a32 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000810 morder mo, morder fmo) {
811 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000812}
813
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000814SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000815int __tsan_atomic64_compare_exchange_weak(volatile a64 *a, a64 *c, a64 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000816 morder mo, morder fmo) {
817 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Dmitry Vyukov572c5b22012-05-14 15:33:00 +0000818}
819
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000820#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000821SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000822int __tsan_atomic128_compare_exchange_weak(volatile a128 *a, a128 *c, a128 v,
823 morder mo, morder fmo) {
824 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
825}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000826#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000827
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000828SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov4e5f72d2012-11-09 12:54:37 +0000829a8 __tsan_atomic8_compare_exchange_val(volatile a8 *a, a8 c, a8 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000830 morder mo, morder fmo) {
831 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Dmitry Vyukov4e5f72d2012-11-09 12:54:37 +0000832}
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000833
834SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov4e5f72d2012-11-09 12:54:37 +0000835a16 __tsan_atomic16_compare_exchange_val(volatile a16 *a, a16 c, a16 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000836 morder mo, morder fmo) {
837 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Dmitry Vyukov4e5f72d2012-11-09 12:54:37 +0000838}
839
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000840SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov4e5f72d2012-11-09 12:54:37 +0000841a32 __tsan_atomic32_compare_exchange_val(volatile a32 *a, a32 c, a32 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000842 morder mo, morder fmo) {
843 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Dmitry Vyukov4e5f72d2012-11-09 12:54:37 +0000844}
845
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000846SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukov4e5f72d2012-11-09 12:54:37 +0000847a64 __tsan_atomic64_compare_exchange_val(volatile a64 *a, a64 c, a64 v,
Dmitry Vyukov195eda92012-11-23 15:51:45 +0000848 morder mo, morder fmo) {
849 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
Dmitry Vyukov4e5f72d2012-11-09 12:54:37 +0000850}
851
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000852#if __TSAN_HAS_INT128
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000853SANITIZER_INTERFACE_ATTRIBUTE
Richard Smith079fe202013-06-28 22:28:37 +0000854a128 __tsan_atomic128_compare_exchange_val(volatile a128 *a, a128 c, a128 v,
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000855 morder mo, morder fmo) {
856 SCOPED_ATOMIC(CAS, a, c, v, mo, fmo);
857}
Dmitry Vyukov69a071d2012-11-27 09:35:44 +0000858#endif
Dmitry Vyukov59d58662012-11-27 07:41:27 +0000859
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000860SANITIZER_INTERFACE_ATTRIBUTE
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000861void __tsan_atomic_thread_fence(morder mo) {
Dmitry Vyukov87947722013-06-10 10:01:31 +0000862 char* a = 0;
Kostya Serebryany4ad375f2012-05-10 13:48:04 +0000863 SCOPED_ATOMIC(Fence, mo);
864}
Dmitry Vyukovb96a7b52012-10-04 10:08:23 +0000865
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000866SANITIZER_INTERFACE_ATTRIBUTE
Dmitry Vyukovb96a7b52012-10-04 10:08:23 +0000867void __tsan_atomic_signal_fence(morder mo) {
868}
Alexey Samsonov4fc80982013-12-11 08:18:50 +0000869} // extern "C"
Dmitry Vyukova8df2472014-07-18 14:54:02 +0000870
Kostya Serebryany83ed8892014-12-09 01:31:14 +0000871#else // #ifndef SANITIZER_GO
Dmitry Vyukova8df2472014-07-18 14:54:02 +0000872
873// Go
874
875#define ATOMIC(func, ...) \
876 if (thr->ignore_sync) { \
877 NoTsanAtomic##func(__VA_ARGS__); \
878 } else { \
879 FuncEntry(thr, cpc); \
880 Atomic##func(thr, pc, __VA_ARGS__); \
881 FuncExit(thr); \
882 } \
883/**/
884
885#define ATOMIC_RET(func, ret, ...) \
886 if (thr->ignore_sync) { \
887 (ret) = NoTsanAtomic##func(__VA_ARGS__); \
888 } else { \
889 FuncEntry(thr, cpc); \
890 (ret) = Atomic##func(thr, pc, __VA_ARGS__); \
891 FuncExit(thr); \
892 } \
893/**/
894
895extern "C" {
896SANITIZER_INTERFACE_ATTRIBUTE
897void __tsan_go_atomic32_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
898 ATOMIC_RET(Load, *(a32*)(a+8), *(a32**)a, mo_acquire);
899}
900
901SANITIZER_INTERFACE_ATTRIBUTE
902void __tsan_go_atomic64_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
903 ATOMIC_RET(Load, *(a64*)(a+8), *(a64**)a, mo_acquire);
904}
905
906SANITIZER_INTERFACE_ATTRIBUTE
907void __tsan_go_atomic32_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
908 ATOMIC(Store, *(a32**)a, *(a32*)(a+8), mo_release);
909}
910
911SANITIZER_INTERFACE_ATTRIBUTE
912void __tsan_go_atomic64_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
913 ATOMIC(Store, *(a64**)a, *(a64*)(a+8), mo_release);
914}
915
916SANITIZER_INTERFACE_ATTRIBUTE
917void __tsan_go_atomic32_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
918 ATOMIC_RET(FetchAdd, *(a32*)(a+16), *(a32**)a, *(a32*)(a+8), mo_acq_rel);
919}
920
921SANITIZER_INTERFACE_ATTRIBUTE
922void __tsan_go_atomic64_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
923 ATOMIC_RET(FetchAdd, *(a64*)(a+16), *(a64**)a, *(a64*)(a+8), mo_acq_rel);
924}
925
926SANITIZER_INTERFACE_ATTRIBUTE
927void __tsan_go_atomic32_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
928 ATOMIC_RET(Exchange, *(a32*)(a+16), *(a32**)a, *(a32*)(a+8), mo_acq_rel);
929}
930
931SANITIZER_INTERFACE_ATTRIBUTE
932void __tsan_go_atomic64_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
933 ATOMIC_RET(Exchange, *(a64*)(a+16), *(a64**)a, *(a64*)(a+8), mo_acq_rel);
934}
935
936SANITIZER_INTERFACE_ATTRIBUTE
937void __tsan_go_atomic32_compare_exchange(
938 ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
939 a32 cur = 0;
940 a32 cmp = *(a32*)(a+8);
941 ATOMIC_RET(CAS, cur, *(a32**)a, cmp, *(a32*)(a+12), mo_acq_rel, mo_acquire);
942 *(bool*)(a+16) = (cur == cmp);
943}
944
945SANITIZER_INTERFACE_ATTRIBUTE
946void __tsan_go_atomic64_compare_exchange(
947 ThreadState *thr, uptr cpc, uptr pc, u8 *a) {
948 a64 cur = 0;
949 a64 cmp = *(a64*)(a+8);
950 ATOMIC_RET(CAS, cur, *(a64**)a, cmp, *(a64*)(a+16), mo_acq_rel, mo_acquire);
951 *(bool*)(a+24) = (cur == cmp);
952}
953} // extern "C"
Kostya Serebryany83ed8892014-12-09 01:31:14 +0000954#endif // #ifndef SANITIZER_GO