blob: 12ffef3ba105e842464216c6dfa78feebfa2344d [file] [log] [blame]
Dmitry Vyukovb6eb56f2012-06-29 18:00:38 +00001//===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Not intended for direct inclusion. Include sanitizer_atomic.h.
12//
13//===----------------------------------------------------------------------===//
14
15#ifndef SANITIZER_ATOMIC_MSVC_H
16#define SANITIZER_ATOMIC_MSVC_H
17
Dmitry Vyukov69021352012-06-29 18:37:45 +000018extern "C" void _ReadWriteBarrier();
Dmitry Vyukov6dab1902012-06-29 18:28:02 +000019#pragma intrinsic(_ReadWriteBarrier)
Dmitry Vyukov69021352012-06-29 18:37:45 +000020extern "C" void _mm_mfence();
Dmitry Vyukov6dab1902012-06-29 18:28:02 +000021#pragma intrinsic(_mm_mfence)
Dmitry Vyukov69021352012-06-29 18:37:45 +000022extern "C" void _mm_pause();
Dmitry Vyukov6dab1902012-06-29 18:28:02 +000023#pragma intrinsic(_mm_pause)
Dmitry Vyukov69021352012-06-29 18:37:45 +000024extern "C" long _InterlockedExchangeAdd( // NOLINT
25 long volatile * Addend, long Value); // NOLINT
Dmitry Vyukov6dab1902012-06-29 18:28:02 +000026#pragma intrinsic(_InterlockedExchangeAdd)
Stephen Hines2d1fdb22014-05-28 23:58:16 -070027extern "C" short _InterlockedCompareExchange16( // NOLINT
28 short volatile *Destination, // NOLINT
29 short Exchange, short Comparand); // NOLINT
30#pragma intrinsic(_InterlockedCompareExchange16)
31extern "C"
32long long _InterlockedCompareExchange64( // NOLINT
33 long long volatile *Destination, // NOLINT
34 long long Exchange, long long Comparand); // NOLINT
35#pragma intrinsic(_InterlockedCompareExchange64)
Alexey Samsonov806c4912012-12-26 09:25:09 +000036extern "C" void *_InterlockedCompareExchangePointer(
Dmitry Vyukov0d9c08d2012-08-31 14:01:33 +000037 void *volatile *Destination,
38 void *Exchange, void *Comparand);
Alexey Samsonov806c4912012-12-26 09:25:09 +000039#pragma intrinsic(_InterlockedCompareExchangePointer)
Timur Iskhodzhanov0cf7eef2013-01-09 12:03:27 +000040extern "C"
Timur Iskhodzhanovb1d1ef22013-01-09 12:43:16 +000041long __cdecl _InterlockedCompareExchange( // NOLINT
42 long volatile *Destination, // NOLINT
43 long Exchange, long Comparand); // NOLINT
Timur Iskhodzhanov0cf7eef2013-01-09 12:03:27 +000044#pragma intrinsic(_InterlockedCompareExchange)
45
Stephen Hines6a211c52014-07-21 00:49:56 -070046#ifdef _WIN64
47extern "C" long long _InterlockedExchangeAdd64( // NOLINT
48 long long volatile * Addend, long long Value); // NOLINT
49#pragma intrinsic(_InterlockedExchangeAdd64)
Timur Iskhodzhanov0cf7eef2013-01-09 12:03:27 +000050#endif
Dmitry Vyukovb6eb56f2012-06-29 18:00:38 +000051
52namespace __sanitizer {
53
54INLINE void atomic_signal_fence(memory_order) {
55 _ReadWriteBarrier();
56}
57
58INLINE void atomic_thread_fence(memory_order) {
59 _mm_mfence();
60}
61
62INLINE void proc_yield(int cnt) {
63 for (int i = 0; i < cnt; i++)
64 _mm_pause();
65}
66
67template<typename T>
68INLINE typename T::Type atomic_load(
69 const volatile T *a, memory_order mo) {
70 DCHECK(mo & (memory_order_relaxed | memory_order_consume
71 | memory_order_acquire | memory_order_seq_cst));
72 DCHECK(!((uptr)a % sizeof(*a)));
73 typename T::Type v;
Dmitry Vyukov1a3503b2013-01-14 08:12:47 +000074 // FIXME(dvyukov): 64-bit load is not atomic on 32-bits.
Dmitry Vyukovb6eb56f2012-06-29 18:00:38 +000075 if (mo == memory_order_relaxed) {
76 v = a->val_dont_use;
77 } else {
78 atomic_signal_fence(memory_order_seq_cst);
79 v = a->val_dont_use;
80 atomic_signal_fence(memory_order_seq_cst);
81 }
82 return v;
83}
84
85template<typename T>
86INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) {
87 DCHECK(mo & (memory_order_relaxed | memory_order_release
88 | memory_order_seq_cst));
89 DCHECK(!((uptr)a % sizeof(*a)));
Dmitry Vyukov1a3503b2013-01-14 08:12:47 +000090 // FIXME(dvyukov): 64-bit store is not atomic on 32-bits.
Dmitry Vyukovb6eb56f2012-06-29 18:00:38 +000091 if (mo == memory_order_relaxed) {
92 a->val_dont_use = v;
93 } else {
94 atomic_signal_fence(memory_order_seq_cst);
95 a->val_dont_use = v;
96 atomic_signal_fence(memory_order_seq_cst);
97 }
98 if (mo == memory_order_seq_cst)
99 atomic_thread_fence(memory_order_seq_cst);
100}
101
102INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a,
103 u32 v, memory_order mo) {
104 (void)mo;
105 DCHECK(!((uptr)a % sizeof(*a)));
106 return (u32)_InterlockedExchangeAdd(
107 (volatile long*)&a->val_dont_use, (long)v); // NOLINT
108}
109
Stephen Hines2d1fdb22014-05-28 23:58:16 -0700110INLINE uptr atomic_fetch_add(volatile atomic_uintptr_t *a,
111 uptr v, memory_order mo) {
112 (void)mo;
113 DCHECK(!((uptr)a % sizeof(*a)));
114#ifdef _WIN64
115 return (uptr)_InterlockedExchangeAdd64(
116 (volatile long long*)&a->val_dont_use, (long long)v); // NOLINT
117#else
118 return (uptr)_InterlockedExchangeAdd(
119 (volatile long*)&a->val_dont_use, (long)v); // NOLINT
120#endif
121}
122
123INLINE u32 atomic_fetch_sub(volatile atomic_uint32_t *a,
124 u32 v, memory_order mo) {
125 (void)mo;
126 DCHECK(!((uptr)a % sizeof(*a)));
127 return (u32)_InterlockedExchangeAdd(
128 (volatile long*)&a->val_dont_use, -(long)v); // NOLINT
129}
130
131INLINE uptr atomic_fetch_sub(volatile atomic_uintptr_t *a,
132 uptr v, memory_order mo) {
133 (void)mo;
134 DCHECK(!((uptr)a % sizeof(*a)));
135#ifdef _WIN64
136 return (uptr)_InterlockedExchangeAdd64(
137 (volatile long long*)&a->val_dont_use, -(long long)v); // NOLINT
138#else
139 return (uptr)_InterlockedExchangeAdd(
140 (volatile long*)&a->val_dont_use, -(long)v); // NOLINT
141#endif
142}
143
Dmitry Vyukovb6eb56f2012-06-29 18:00:38 +0000144INLINE u8 atomic_exchange(volatile atomic_uint8_t *a,
145 u8 v, memory_order mo) {
146 (void)mo;
147 DCHECK(!((uptr)a % sizeof(*a)));
148 __asm {
149 mov eax, a
Dmitry Vyukov6dab1902012-06-29 18:28:02 +0000150 mov cl, v
151 xchg [eax], cl // NOLINT
152 mov v, cl
Dmitry Vyukovb6eb56f2012-06-29 18:00:38 +0000153 }
154 return v;
155}
156
157INLINE u16 atomic_exchange(volatile atomic_uint16_t *a,
158 u16 v, memory_order mo) {
159 (void)mo;
160 DCHECK(!((uptr)a % sizeof(*a)));
161 __asm {
162 mov eax, a
Dmitry Vyukov6dab1902012-06-29 18:28:02 +0000163 mov cx, v
164 xchg [eax], cx // NOLINT
165 mov v, cx
Dmitry Vyukovb6eb56f2012-06-29 18:00:38 +0000166 }
167 return v;
168}
169
Alexey Samsonovb975c8a2013-03-25 09:23:30 +0000170INLINE bool atomic_compare_exchange_strong(volatile atomic_uint8_t *a,
171 u8 *cmp,
Timur Iskhodzhanov40b631f2013-05-23 11:40:51 +0000172 u8 xchgv,
Alexey Samsonovb975c8a2013-03-25 09:23:30 +0000173 memory_order mo) {
174 (void)mo;
175 DCHECK(!((uptr)a % sizeof(*a)));
176 u8 cmpv = *cmp;
177 u8 prev;
178 __asm {
179 mov al, cmpv
180 mov ecx, a
Timur Iskhodzhanov40b631f2013-05-23 11:40:51 +0000181 mov dl, xchgv
Alexey Samsonovb975c8a2013-03-25 09:23:30 +0000182 lock cmpxchg [ecx], dl
183 mov prev, al
184 }
185 if (prev == cmpv)
186 return true;
187 *cmp = prev;
188 return false;
189}
190
Dmitry Vyukov1b1de032012-08-31 14:11:33 +0000191INLINE bool atomic_compare_exchange_strong(volatile atomic_uintptr_t *a,
Dmitry Vyukov0d9c08d2012-08-31 14:01:33 +0000192 uptr *cmp,
193 uptr xchg,
194 memory_order mo) {
195 uptr cmpv = *cmp;
Alexey Samsonov806c4912012-12-26 09:25:09 +0000196 uptr prev = (uptr)_InterlockedCompareExchangePointer(
Dmitry Vyukov0d9c08d2012-08-31 14:01:33 +0000197 (void*volatile*)&a->val_dont_use, (void*)xchg, (void*)cmpv);
198 if (prev == cmpv)
199 return true;
200 *cmp = prev;
201 return false;
202}
203
Stephen Hines2d1fdb22014-05-28 23:58:16 -0700204INLINE bool atomic_compare_exchange_strong(volatile atomic_uint16_t *a,
205 u16 *cmp,
206 u16 xchg,
207 memory_order mo) {
208 u16 cmpv = *cmp;
209 u16 prev = (u16)_InterlockedCompareExchange16(
210 (volatile short*)&a->val_dont_use, (short)xchg, (short)cmpv);
211 if (prev == cmpv)
212 return true;
213 *cmp = prev;
214 return false;
215}
216
217INLINE bool atomic_compare_exchange_strong(volatile atomic_uint32_t *a,
218 u32 *cmp,
219 u32 xchg,
220 memory_order mo) {
221 u32 cmpv = *cmp;
222 u32 prev = (u32)_InterlockedCompareExchange(
223 (volatile long*)&a->val_dont_use, (long)xchg, (long)cmpv);
224 if (prev == cmpv)
225 return true;
226 *cmp = prev;
227 return false;
228}
229
230INLINE bool atomic_compare_exchange_strong(volatile atomic_uint64_t *a,
231 u64 *cmp,
232 u64 xchg,
233 memory_order mo) {
234 u64 cmpv = *cmp;
235 u64 prev = (u64)_InterlockedCompareExchange64(
236 (volatile long long*)&a->val_dont_use, (long long)xchg, (long long)cmpv);
237 if (prev == cmpv)
238 return true;
239 *cmp = prev;
240 return false;
241}
242
Dmitry Vyukov0d9c08d2012-08-31 14:01:33 +0000243template<typename T>
244INLINE bool atomic_compare_exchange_weak(volatile T *a,
Alexey Samsonovb975c8a2013-03-25 09:23:30 +0000245 typename T::Type *cmp,
246 typename T::Type xchg,
247 memory_order mo) {
Dmitry Vyukov0d9c08d2012-08-31 14:01:33 +0000248 return atomic_compare_exchange_strong(a, cmp, xchg, mo);
249}
250
Dmitry Vyukovb6eb56f2012-06-29 18:00:38 +0000251} // namespace __sanitizer
252
253#endif // SANITIZER_ATOMIC_CLANG_H