blob: 6d94056d8c681c3c3dd619c218162026c84072f7 [file] [log] [blame]
Dmitry Vyukove8cee122012-06-29 18:00:38 +00001//===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Not intended for direct inclusion. Include sanitizer_atomic.h.
12//
13//===----------------------------------------------------------------------===//
14
15#ifndef SANITIZER_ATOMIC_MSVC_H
16#define SANITIZER_ATOMIC_MSVC_H
17
Dmitry Vyukov3c5c9e72012-06-29 18:37:45 +000018extern "C" void _ReadWriteBarrier();
Dmitry Vyukovb379fe52012-06-29 18:28:02 +000019#pragma intrinsic(_ReadWriteBarrier)
Dmitry Vyukov3c5c9e72012-06-29 18:37:45 +000020extern "C" void _mm_mfence();
Dmitry Vyukovb379fe52012-06-29 18:28:02 +000021#pragma intrinsic(_mm_mfence)
Dmitry Vyukov3c5c9e72012-06-29 18:37:45 +000022extern "C" void _mm_pause();
Dmitry Vyukovb379fe52012-06-29 18:28:02 +000023#pragma intrinsic(_mm_pause)
Peter Collingbourned3b99172015-07-02 01:44:34 +000024extern "C" char _InterlockedExchange8( // NOLINT
25 char volatile *Addend, char Value); // NOLINT
26#pragma intrinsic(_InterlockedExchange8)
27extern "C" short _InterlockedExchange16( // NOLINT
28 short volatile *Addend, short Value); // NOLINT
29#pragma intrinsic(_InterlockedExchange16)
30extern "C" long _InterlockedExchange( // NOLINT
31 long volatile *Addend, long Value); // NOLINT
32#pragma intrinsic(_InterlockedExchange)
Dmitry Vyukov3c5c9e72012-06-29 18:37:45 +000033extern "C" long _InterlockedExchangeAdd( // NOLINT
34 long volatile * Addend, long Value); // NOLINT
Dmitry Vyukovb379fe52012-06-29 18:28:02 +000035#pragma intrinsic(_InterlockedExchangeAdd)
Etienne Bergeron65c00a22016-07-06 16:33:57 +000036extern "C" char _InterlockedCompareExchange8( // NOLINT
37 char volatile *Destination, // NOLINT
38 char Exchange, char Comparand); // NOLINT
39#pragma intrinsic(_InterlockedCompareExchange8)
Timur Iskhodzhanov5ce39372014-05-13 14:23:25 +000040extern "C" short _InterlockedCompareExchange16( // NOLINT
41 short volatile *Destination, // NOLINT
42 short Exchange, short Comparand); // NOLINT
43#pragma intrinsic(_InterlockedCompareExchange16)
44extern "C"
45long long _InterlockedCompareExchange64( // NOLINT
46 long long volatile *Destination, // NOLINT
47 long long Exchange, long long Comparand); // NOLINT
48#pragma intrinsic(_InterlockedCompareExchange64)
Alexey Samsonovc9142812012-12-26 09:25:09 +000049extern "C" void *_InterlockedCompareExchangePointer(
Dmitry Vyukovfa90fa32012-08-31 14:01:33 +000050 void *volatile *Destination,
51 void *Exchange, void *Comparand);
Alexey Samsonovc9142812012-12-26 09:25:09 +000052#pragma intrinsic(_InterlockedCompareExchangePointer)
Timur Iskhodzhanov73e484a2013-01-09 12:03:27 +000053extern "C"
Timur Iskhodzhanov1d1f74c2013-01-09 12:43:16 +000054long __cdecl _InterlockedCompareExchange( // NOLINT
55 long volatile *Destination, // NOLINT
56 long Exchange, long Comparand); // NOLINT
Timur Iskhodzhanov73e484a2013-01-09 12:03:27 +000057#pragma intrinsic(_InterlockedCompareExchange)
58
Timur Iskhodzhanov575180d2014-07-07 16:22:04 +000059#ifdef _WIN64
60extern "C" long long _InterlockedExchangeAdd64( // NOLINT
61 long long volatile * Addend, long long Value); // NOLINT
62#pragma intrinsic(_InterlockedExchangeAdd64)
Timur Iskhodzhanov73e484a2013-01-09 12:03:27 +000063#endif
Dmitry Vyukove8cee122012-06-29 18:00:38 +000064
65namespace __sanitizer {
66
67INLINE void atomic_signal_fence(memory_order) {
68 _ReadWriteBarrier();
69}
70
71INLINE void atomic_thread_fence(memory_order) {
72 _mm_mfence();
73}
74
75INLINE void proc_yield(int cnt) {
76 for (int i = 0; i < cnt; i++)
77 _mm_pause();
78}
79
80template<typename T>
81INLINE typename T::Type atomic_load(
82 const volatile T *a, memory_order mo) {
83 DCHECK(mo & (memory_order_relaxed | memory_order_consume
84 | memory_order_acquire | memory_order_seq_cst));
85 DCHECK(!((uptr)a % sizeof(*a)));
86 typename T::Type v;
Dmitry Vyukov316faf72013-01-14 08:12:47 +000087 // FIXME(dvyukov): 64-bit load is not atomic on 32-bits.
Dmitry Vyukove8cee122012-06-29 18:00:38 +000088 if (mo == memory_order_relaxed) {
89 v = a->val_dont_use;
90 } else {
91 atomic_signal_fence(memory_order_seq_cst);
92 v = a->val_dont_use;
93 atomic_signal_fence(memory_order_seq_cst);
94 }
95 return v;
96}
97
98template<typename T>
99INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) {
100 DCHECK(mo & (memory_order_relaxed | memory_order_release
101 | memory_order_seq_cst));
102 DCHECK(!((uptr)a % sizeof(*a)));
Dmitry Vyukov316faf72013-01-14 08:12:47 +0000103 // FIXME(dvyukov): 64-bit store is not atomic on 32-bits.
Dmitry Vyukove8cee122012-06-29 18:00:38 +0000104 if (mo == memory_order_relaxed) {
105 a->val_dont_use = v;
106 } else {
107 atomic_signal_fence(memory_order_seq_cst);
108 a->val_dont_use = v;
109 atomic_signal_fence(memory_order_seq_cst);
110 }
111 if (mo == memory_order_seq_cst)
112 atomic_thread_fence(memory_order_seq_cst);
113}
114
115INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a,
116 u32 v, memory_order mo) {
117 (void)mo;
118 DCHECK(!((uptr)a % sizeof(*a)));
119 return (u32)_InterlockedExchangeAdd(
120 (volatile long*)&a->val_dont_use, (long)v); // NOLINT
121}
122
Hans Wennborg8f80ccc2014-04-14 17:43:49 +0000123INLINE uptr atomic_fetch_add(volatile atomic_uintptr_t *a,
124 uptr v, memory_order mo) {
125 (void)mo;
126 DCHECK(!((uptr)a % sizeof(*a)));
127#ifdef _WIN64
128 return (uptr)_InterlockedExchangeAdd64(
129 (volatile long long*)&a->val_dont_use, (long long)v); // NOLINT
130#else
131 return (uptr)_InterlockedExchangeAdd(
132 (volatile long*)&a->val_dont_use, (long)v); // NOLINT
133#endif
134}
135
Dmitry Vyukov9e3a2172014-03-04 11:57:25 +0000136INLINE u32 atomic_fetch_sub(volatile atomic_uint32_t *a,
137 u32 v, memory_order mo) {
138 (void)mo;
139 DCHECK(!((uptr)a % sizeof(*a)));
140 return (u32)_InterlockedExchangeAdd(
141 (volatile long*)&a->val_dont_use, -(long)v); // NOLINT
142}
143
Hans Wennborg8f80ccc2014-04-14 17:43:49 +0000144INLINE uptr atomic_fetch_sub(volatile atomic_uintptr_t *a,
145 uptr v, memory_order mo) {
146 (void)mo;
147 DCHECK(!((uptr)a % sizeof(*a)));
148#ifdef _WIN64
149 return (uptr)_InterlockedExchangeAdd64(
150 (volatile long long*)&a->val_dont_use, -(long long)v); // NOLINT
151#else
152 return (uptr)_InterlockedExchangeAdd(
153 (volatile long*)&a->val_dont_use, -(long)v); // NOLINT
154#endif
155}
156
Dmitry Vyukove8cee122012-06-29 18:00:38 +0000157INLINE u8 atomic_exchange(volatile atomic_uint8_t *a,
158 u8 v, memory_order mo) {
159 (void)mo;
160 DCHECK(!((uptr)a % sizeof(*a)));
Peter Collingbourned3b99172015-07-02 01:44:34 +0000161 return (u8)_InterlockedExchange8((volatile char*)&a->val_dont_use, v);
Dmitry Vyukove8cee122012-06-29 18:00:38 +0000162}
163
164INLINE u16 atomic_exchange(volatile atomic_uint16_t *a,
165 u16 v, memory_order mo) {
166 (void)mo;
167 DCHECK(!((uptr)a % sizeof(*a)));
Peter Collingbourned3b99172015-07-02 01:44:34 +0000168 return (u16)_InterlockedExchange16((volatile short*)&a->val_dont_use, v);
Dmitry Vyukove8cee122012-06-29 18:00:38 +0000169}
170
Peter Collingbourned3b99172015-07-02 01:44:34 +0000171INLINE u32 atomic_exchange(volatile atomic_uint32_t *a,
172 u32 v, memory_order mo) {
173 (void)mo;
174 DCHECK(!((uptr)a % sizeof(*a)));
175 return (u32)_InterlockedExchange((volatile long*)&a->val_dont_use, v);
176}
177
Alexey Samsonova208c392013-03-25 09:23:30 +0000178INLINE bool atomic_compare_exchange_strong(volatile atomic_uint8_t *a,
179 u8 *cmp,
Timur Iskhodzhanove05f9ba2013-05-23 11:40:51 +0000180 u8 xchgv,
Alexey Samsonova208c392013-03-25 09:23:30 +0000181 memory_order mo) {
182 (void)mo;
183 DCHECK(!((uptr)a % sizeof(*a)));
184 u8 cmpv = *cmp;
Etienne Bergeron65c00a22016-07-06 16:33:57 +0000185#ifdef _WIN64
186 u8 prev = (u8)_InterlockedCompareExchange8(
187 (volatile char*)&a->val_dont_use, (char)xchgv, (char)cmpv);
188#else
Alexey Samsonova208c392013-03-25 09:23:30 +0000189 u8 prev;
190 __asm {
191 mov al, cmpv
192 mov ecx, a
Timur Iskhodzhanove05f9ba2013-05-23 11:40:51 +0000193 mov dl, xchgv
Alexey Samsonova208c392013-03-25 09:23:30 +0000194 lock cmpxchg [ecx], dl
195 mov prev, al
196 }
Etienne Bergeron65c00a22016-07-06 16:33:57 +0000197#endif
Alexey Samsonova208c392013-03-25 09:23:30 +0000198 if (prev == cmpv)
199 return true;
200 *cmp = prev;
201 return false;
Etienne Bergeron00f3f6e2016-05-27 21:29:31 +0000202}
Peter Collingbourned3b99172015-07-02 01:44:34 +0000203
Dmitry Vyukov179e5dda2012-08-31 14:11:33 +0000204INLINE bool atomic_compare_exchange_strong(volatile atomic_uintptr_t *a,
Dmitry Vyukovfa90fa32012-08-31 14:01:33 +0000205 uptr *cmp,
206 uptr xchg,
207 memory_order mo) {
208 uptr cmpv = *cmp;
Alexey Samsonovc9142812012-12-26 09:25:09 +0000209 uptr prev = (uptr)_InterlockedCompareExchangePointer(
Dmitry Vyukovfa90fa32012-08-31 14:01:33 +0000210 (void*volatile*)&a->val_dont_use, (void*)xchg, (void*)cmpv);
211 if (prev == cmpv)
212 return true;
213 *cmp = prev;
214 return false;
215}
216
Timur Iskhodzhanov5ce39372014-05-13 14:23:25 +0000217INLINE bool atomic_compare_exchange_strong(volatile atomic_uint16_t *a,
218 u16 *cmp,
219 u16 xchg,
220 memory_order mo) {
221 u16 cmpv = *cmp;
222 u16 prev = (u16)_InterlockedCompareExchange16(
223 (volatile short*)&a->val_dont_use, (short)xchg, (short)cmpv);
224 if (prev == cmpv)
225 return true;
226 *cmp = prev;
227 return false;
228}
229
Dmitry Vyukov30076b02014-03-04 14:21:42 +0000230INLINE bool atomic_compare_exchange_strong(volatile atomic_uint32_t *a,
231 u32 *cmp,
232 u32 xchg,
233 memory_order mo) {
234 u32 cmpv = *cmp;
235 u32 prev = (u32)_InterlockedCompareExchange(
236 (volatile long*)&a->val_dont_use, (long)xchg, (long)cmpv);
237 if (prev == cmpv)
238 return true;
239 *cmp = prev;
240 return false;
241}
242
Timur Iskhodzhanov5ce39372014-05-13 14:23:25 +0000243INLINE bool atomic_compare_exchange_strong(volatile atomic_uint64_t *a,
244 u64 *cmp,
245 u64 xchg,
246 memory_order mo) {
247 u64 cmpv = *cmp;
248 u64 prev = (u64)_InterlockedCompareExchange64(
249 (volatile long long*)&a->val_dont_use, (long long)xchg, (long long)cmpv);
250 if (prev == cmpv)
251 return true;
252 *cmp = prev;
253 return false;
254}
255
Dmitry Vyukovfa90fa32012-08-31 14:01:33 +0000256template<typename T>
257INLINE bool atomic_compare_exchange_weak(volatile T *a,
Alexey Samsonova208c392013-03-25 09:23:30 +0000258 typename T::Type *cmp,
259 typename T::Type xchg,
260 memory_order mo) {
Dmitry Vyukovfa90fa32012-08-31 14:01:33 +0000261 return atomic_compare_exchange_strong(a, cmp, xchg, mo);
262}
263
Dmitry Vyukove8cee122012-06-29 18:00:38 +0000264} // namespace __sanitizer
265
266#endif // SANITIZER_ATOMIC_CLANG_H