blob: 6bad94e35920bd4d8b8cd83fcb6446d875cc5860 [file] [log] [blame]
Dmitry Vyukove8cee122012-06-29 18:00:38 +00001//===-- sanitizer_atomic_msvc.h ---------------------------------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file is a part of ThreadSanitizer/AddressSanitizer runtime.
11// Not intended for direct inclusion. Include sanitizer_atomic.h.
12//
13//===----------------------------------------------------------------------===//
14
15#ifndef SANITIZER_ATOMIC_MSVC_H
16#define SANITIZER_ATOMIC_MSVC_H
17
18#include <intrin.h>
19
20namespace __sanitizer {
21
22INLINE void atomic_signal_fence(memory_order) {
23 _ReadWriteBarrier();
24}
25
26INLINE void atomic_thread_fence(memory_order) {
27 _mm_mfence();
28}
29
30INLINE void proc_yield(int cnt) {
31 for (int i = 0; i < cnt; i++)
32 _mm_pause();
33}
34
35template<typename T>
36INLINE typename T::Type atomic_load(
37 const volatile T *a, memory_order mo) {
38 DCHECK(mo & (memory_order_relaxed | memory_order_consume
39 | memory_order_acquire | memory_order_seq_cst));
40 DCHECK(!((uptr)a % sizeof(*a)));
41 typename T::Type v;
42 if (mo == memory_order_relaxed) {
43 v = a->val_dont_use;
44 } else {
45 atomic_signal_fence(memory_order_seq_cst);
46 v = a->val_dont_use;
47 atomic_signal_fence(memory_order_seq_cst);
48 }
49 return v;
50}
51
52template<typename T>
53INLINE void atomic_store(volatile T *a, typename T::Type v, memory_order mo) {
54 DCHECK(mo & (memory_order_relaxed | memory_order_release
55 | memory_order_seq_cst));
56 DCHECK(!((uptr)a % sizeof(*a)));
57 if (mo == memory_order_relaxed) {
58 a->val_dont_use = v;
59 } else {
60 atomic_signal_fence(memory_order_seq_cst);
61 a->val_dont_use = v;
62 atomic_signal_fence(memory_order_seq_cst);
63 }
64 if (mo == memory_order_seq_cst)
65 atomic_thread_fence(memory_order_seq_cst);
66}
67
68INLINE u32 atomic_fetch_add(volatile atomic_uint32_t *a,
69 u32 v, memory_order mo) {
70 (void)mo;
71 DCHECK(!((uptr)a % sizeof(*a)));
72 return (u32)_InterlockedExchangeAdd(
73 (volatile long*)&a->val_dont_use, (long)v); // NOLINT
74}
75
76INLINE u8 atomic_exchange(volatile atomic_uint8_t *a,
77 u8 v, memory_order mo) {
78 (void)mo;
79 DCHECK(!((uptr)a % sizeof(*a)));
80 __asm {
81 mov eax, a
82 mov cx, v
83 xchg [eax], cx // NOLINT
84 mov v, cx
85 }
86 return v;
87}
88
89INLINE u16 atomic_exchange(volatile atomic_uint16_t *a,
90 u16 v, memory_order mo) {
91 (void)mo;
92 DCHECK(!((uptr)a % sizeof(*a)));
93 __asm {
94 mov eax, a
95 mov cl, v
96 xchg [eax], cl // NOLINT
97 mov v, cl
98 }
99 return v;
100}
101
102} // namespace __sanitizer
103
104#endif // SANITIZER_ATOMIC_CLANG_H