blob: 3a1a947412813aba566094c18b3fac78ad7ad30f [file] [log] [blame]
Dmitry Vyukove8cee122012-06-29 18:00:38 +00001//===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Not intended for direct inclusion. Include sanitizer_atomic.h.
12//
13//===----------------------------------------------------------------------===//
14
15#ifndef SANITIZER_ATOMIC_MSVC_H
16#define SANITIZER_ATOMIC_MSVC_H
17
Dmitry Vyukov3c5c9e72012-06-29 18:37:45 +000018extern "C" void _ReadWriteBarrier();
Dmitry Vyukovb379fe52012-06-29 18:28:02 +000019#pragma intrinsic(_ReadWriteBarrier)
Dmitry Vyukov3c5c9e72012-06-29 18:37:45 +000020extern "C" void _mm_mfence();
Dmitry Vyukovb379fe52012-06-29 18:28:02 +000021#pragma intrinsic(_mm_mfence)
Dmitry Vyukov3c5c9e72012-06-29 18:37:45 +000022extern "C" void _mm_pause();
Dmitry Vyukovb379fe52012-06-29 18:28:02 +000023#pragma intrinsic(_mm_pause)
Dmitry Vyukov3c5c9e72012-06-29 18:37:45 +000024extern "C" long _InterlockedExchangeAdd( // NOLINT
25 long volatile * Addend, long Value); // NOLINT
Dmitry Vyukovb379fe52012-06-29 18:28:02 +000026#pragma intrinsic(_InterlockedExchangeAdd)
Timur Iskhodzhanov73e484a2013-01-09 12:03:27 +000027
28#ifdef _WIN64
Alexey Samsonovc9142812012-12-26 09:25:09 +000029extern "C" void *_InterlockedCompareExchangePointer(
Dmitry Vyukovfa90fa32012-08-31 14:01:33 +000030 void *volatile *Destination,
31 void *Exchange, void *Comparand);
Alexey Samsonovc9142812012-12-26 09:25:09 +000032#pragma intrinsic(_InterlockedCompareExchangePointer)
Timur Iskhodzhanov73e484a2013-01-09 12:03:27 +000033#else
34// There's no _InterlockedCompareExchangePointer intrinsic on x86,
35// so call _InterlockedCompareExchange instead.
36extern "C"
Timur Iskhodzhanov1d1f74c2013-01-09 12:43:16 +000037long __cdecl _InterlockedCompareExchange( // NOLINT
38 long volatile *Destination, // NOLINT
39 long Exchange, long Comparand); // NOLINT
Timur Iskhodzhanov73e484a2013-01-09 12:03:27 +000040#pragma intrinsic(_InterlockedCompareExchange)
41
42inline static void *_InterlockedCompareExchangePointer(
43 void *volatile *Destination,
44 void *Exchange, void *Comparand) {
45 return reinterpret_cast<void*>(
46 _InterlockedCompareExchange(
Timur Iskhodzhanov1d1f74c2013-01-09 12:43:16 +000047 reinterpret_cast<long volatile*>(Destination), // NOLINT
48 reinterpret_cast<long>(Exchange), // NOLINT
49 reinterpret_cast<long>(Comparand))); // NOLINT
Timur Iskhodzhanov73e484a2013-01-09 12:03:27 +000050}
51#endif
Dmitry Vyukove8cee122012-06-29 18:00:38 +000052
53namespace __sanitizer {
54
55INLINE void atomic_signal_fence(memory_order) {
56 _ReadWriteBarrier();
57}
58
59INLINE void atomic_thread_fence(memory_order) {
60 _mm_mfence();
61}
62
63INLINE void proc_yield(int cnt) {
64 for (int i = 0; i < cnt; i++)
65 _mm_pause();
66}
67
68template<typename T>
69INLINE typename T::Type atomic_load(
70 const volatile T *a, memory_order mo) {
71 DCHECK(mo & (memory_order_relaxed | memory_order_consume
72 | memory_order_acquire | memory_order_seq_cst));
73 DCHECK(!((uptr)a % sizeof(*a)));
74 typename T::Type v;
75 if (mo == memory_order_relaxed) {
76 v = a->val_dont_use;
77 } else {
78 atomic_signal_fence(memory_order_seq_cst);
79 v = a->val_dont_use;
80 atomic_signal_fence(memory_order_seq_cst);
81 }
82 return v;
83}
84
85template<typename T>
86INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) {
87 DCHECK(mo & (memory_order_relaxed | memory_order_release
88 | memory_order_seq_cst));
89 DCHECK(!((uptr)a % sizeof(*a)));
90 if (mo == memory_order_relaxed) {
91 a->val_dont_use = v;
92 } else {
93 atomic_signal_fence(memory_order_seq_cst);
94 a->val_dont_use = v;
95 atomic_signal_fence(memory_order_seq_cst);
96 }
97 if (mo == memory_order_seq_cst)
98 atomic_thread_fence(memory_order_seq_cst);
99}
100
101INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a,
102 u32 v, memory_order mo) {
103 (void)mo;
104 DCHECK(!((uptr)a % sizeof(*a)));
105 return (u32)_InterlockedExchangeAdd(
106 (volatile long*)&a->val_dont_use, (long)v); // NOLINT
107}
108
109INLINE u8 atomic_exchange(volatile atomic_uint8_t *a,
110 u8 v, memory_order mo) {
111 (void)mo;
112 DCHECK(!((uptr)a % sizeof(*a)));
113 __asm {
114 mov eax, a
Dmitry Vyukovb379fe52012-06-29 18:28:02 +0000115 mov cl, v
116 xchg [eax], cl // NOLINT
117 mov v, cl
Dmitry Vyukove8cee122012-06-29 18:00:38 +0000118 }
119 return v;
120}
121
122INLINE u16 atomic_exchange(volatile atomic_uint16_t *a,
123 u16 v, memory_order mo) {
124 (void)mo;
125 DCHECK(!((uptr)a % sizeof(*a)));
126 __asm {
127 mov eax, a
Dmitry Vyukovb379fe52012-06-29 18:28:02 +0000128 mov cx, v
129 xchg [eax], cx // NOLINT
130 mov v, cx
Dmitry Vyukove8cee122012-06-29 18:00:38 +0000131 }
132 return v;
133}
134
Dmitry Vyukov179e5dda2012-08-31 14:11:33 +0000135INLINE bool atomic_compare_exchange_strong(volatile atomic_uintptr_t *a,
Dmitry Vyukovfa90fa32012-08-31 14:01:33 +0000136 uptr *cmp,
137 uptr xchg,
138 memory_order mo) {
139 uptr cmpv = *cmp;
Alexey Samsonovc9142812012-12-26 09:25:09 +0000140 uptr prev = (uptr)_InterlockedCompareExchangePointer(
Dmitry Vyukovfa90fa32012-08-31 14:01:33 +0000141 (void*volatile*)&a->val_dont_use, (void*)xchg, (void*)cmpv);
142 if (prev == cmpv)
143 return true;
144 *cmp = prev;
145 return false;
146}
147
148template<typename T>
149INLINE bool atomic_compare_exchange_weak(volatile T *a,
150 typename T::Type *cmp,
151 typename T::Type xchg,
152 memory_order mo) {
153 return atomic_compare_exchange_strong(a, cmp, xchg, mo);
154}
155
Dmitry Vyukove8cee122012-06-29 18:00:38 +0000156} // namespace __sanitizer
157
158#endif // SANITIZER_ATOMIC_CLANG_H