blob: 9bad5f6da252ab3500a36b4498f4f689da10bb5e [file] [log] [blame]
deanm@google.com1579ec72008-08-05 18:57:36 +09001// Copyright 2008, Google Inc.
2// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are
6// met:
7//
8// * Redistributions of source code must retain the above copyright
9// notice, this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above
11// copyright notice, this list of conditions and the following disclaimer
12// in the documentation and/or other materials provided with the
13// distribution.
14// * Neither the name of Google Inc. nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30// This file is an internal atomic implementation, use base/atomicops.h instead.
31
32#ifndef BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
33#define BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_
34
35#include <libkern/OSAtomic.h>
36
37namespace base {
38namespace subtle {
39
40inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
41 Atomic32 old_value,
42 Atomic32 new_value) {
43 Atomic32 prev_value;
44 do {
45 if (OSAtomicCompareAndSwap32(old_value, new_value,
46 const_cast<Atomic32*>(ptr))) {
47 return old_value;
48 }
49 prev_value = *ptr;
50 } while (prev_value == old_value);
51 return prev_value;
52}
53
54inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
55 Atomic32 new_value) {
56 Atomic32 old_value;
57 do {
58 old_value = *ptr;
59 } while (!OSAtomicCompareAndSwap32(old_value, new_value,
60 const_cast<Atomic32*>(ptr)));
61 return old_value;
62}
63
64inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
65 Atomic32 increment) {
66 return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
67}
68
69inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
70 Atomic32 increment) {
71 return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
72}
73
74inline void MemoryBarrier() {
75 OSMemoryBarrier();
76}
77
78inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
79 Atomic32 old_value,
80 Atomic32 new_value) {
81 Atomic32 prev_value;
82 do {
83 if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
84 const_cast<Atomic32*>(ptr))) {
85 return old_value;
86 }
87 prev_value = *ptr;
88 } while (prev_value == old_value);
89 return prev_value;
90}
91
92inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
93 Atomic32 old_value,
94 Atomic32 new_value) {
95 return Acquire_CompareAndSwap(ptr, old_value, new_value);
96}
97
98inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
99 *ptr = value;
100}
101
102inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
103 *ptr = value;
104 MemoryBarrier();
105}
106
107inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
108 MemoryBarrier();
109 *ptr = value;
110}
111
112inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
113 return *ptr;
114}
115
116inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
117 Atomic32 value = *ptr;
118 MemoryBarrier();
119 return value;
120}
121
122inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
123 MemoryBarrier();
124 return *ptr;
125}
126
127#ifdef __LP64__
128
129// 64-bit implementation on 64-bit platform
130
131inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
132 Atomic64 old_value,
133 Atomic64 new_value) {
134 Atomic64 prev_value;
135 do {
136 if (OSAtomicCompareAndSwap64(old_value, new_value,
137 const_cast<Atomic64*>(ptr))) {
138 return old_value;
139 }
140 prev_value = *ptr;
141 } while (prev_value == old_value);
142 return prev_value;
143}
144
145inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
146 Atomic64 new_value) {
147 Atomic64 old_value;
148 do {
149 old_value = *ptr;
150 } while (!OSAtomicCompareAndSwap64(old_value, new_value,
151 const_cast<Atomic64*>(ptr)));
152 return old_value;
153}
154
155inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
156 Atomic64 increment) {
157 return OSAtomicAdd64(increment, const_cast<Atomic64*>(ptr));
158}
159
160inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
161 Atomic64 increment) {
162 return OSAtomicAdd64Barrier(increment, const_cast<Atomic64*>(ptr));
163}
164
165inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
166 Atomic64 old_value,
167 Atomic64 new_value) {
168 Atomic64 prev_value;
169 do {
170 if (OSAtomicCompareAndSwap64Barrier(old_value, new_value,
171 const_cast<Atomic64*>(ptr))) {
172 return old_value;
173 }
174 prev_value = *ptr;
175 } while (prev_value == old_value);
176 return prev_value;
177}
178
179inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
180 Atomic64 old_value,
181 Atomic64 new_value) {
182 // The lib kern interface does not distinguish between
183 // Acquire and Release memory barriers; they are equivalent.
184 return Acquire_CompareAndSwap(ptr, old_value, new_value);
185}
186
187inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
188 *ptr = value;
189}
190
191inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
192 *ptr = value;
193 MemoryBarrier();
194}
195
196inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
197 MemoryBarrier();
198 *ptr = value;
199}
200
201inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
202 return *ptr;
203}
204
205inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
206 Atomic64 value = *ptr;
207 MemoryBarrier();
208 return value;
209}
210
211inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
212 MemoryBarrier();
213 return *ptr;
214}
215
216#endif // defined(__LP64__)
217
218// MacOS uses long for intptr_t, AtomicWord and Atomic32 are always different
219// on the Mac, even when they are the same size. We need to explicitly cast
220// from AtomicWord to Atomic32/64 to implement the AtomicWord interface.
221#ifdef __LP64__
222#define AtomicWordCastType Atomic64
223#else
224#define AtomicWordCastType Atomic32
225#endif
226
227inline AtomicWord NoBarrier_CompareAndSwap(volatile AtomicWord* ptr,
228 AtomicWord old_value,
229 AtomicWord new_value) {
230 return NoBarrier_CompareAndSwap(
231 reinterpret_cast<volatile AtomicWordCastType*>(ptr),
232 old_value, new_value);
233}
234
235inline AtomicWord NoBarrier_AtomicExchange(volatile AtomicWord* ptr,
236 AtomicWord new_value) {
237 return NoBarrier_AtomicExchange(
238 reinterpret_cast<volatile AtomicWordCastType*>(ptr), new_value);
239}
240
241inline AtomicWord NoBarrier_AtomicIncrement(volatile AtomicWord* ptr,
242 AtomicWord increment) {
243 return NoBarrier_AtomicIncrement(
244 reinterpret_cast<volatile AtomicWordCastType*>(ptr), increment);
245}
246
247inline AtomicWord Barrier_AtomicIncrement(volatile AtomicWord* ptr,
248 AtomicWord increment) {
249 return Barrier_AtomicIncrement(
250 reinterpret_cast<volatile AtomicWordCastType*>(ptr), increment);
251}
252
253inline AtomicWord Acquire_CompareAndSwap(volatile AtomicWord* ptr,
254 AtomicWord old_value,
255 AtomicWord new_value) {
256 return base::subtle::Acquire_CompareAndSwap(
257 reinterpret_cast<volatile AtomicWordCastType*>(ptr),
258 old_value, new_value);
259}
260
261inline AtomicWord Release_CompareAndSwap(volatile AtomicWord* ptr,
262 AtomicWord old_value,
263 AtomicWord new_value) {
264 return base::subtle::Release_CompareAndSwap(
265 reinterpret_cast<volatile AtomicWordCastType*>(ptr),
266 old_value, new_value);
267}
268
269inline void NoBarrier_Store(volatile AtomicWord *ptr, AtomicWord value) {
270 NoBarrier_Store(
271 reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
272}
273
274inline void Acquire_Store(volatile AtomicWord* ptr, AtomicWord value) {
275 return base::subtle::Acquire_Store(
276 reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
277}
278
279inline void Release_Store(volatile AtomicWord* ptr, AtomicWord value) {
280 return base::subtle::Release_Store(
281 reinterpret_cast<volatile AtomicWordCastType*>(ptr), value);
282}
283
284inline AtomicWord NoBarrier_Load(volatile const AtomicWord *ptr) {
285 return NoBarrier_Load(
286 reinterpret_cast<volatile const AtomicWordCastType*>(ptr));
287}
288
289inline AtomicWord Acquire_Load(volatile const AtomicWord* ptr) {
290 return base::subtle::Acquire_Load(
291 reinterpret_cast<volatile const AtomicWordCastType*>(ptr));
292}
293
294inline AtomicWord Release_Load(volatile const AtomicWord* ptr) {
295 return base::subtle::Release_Load(
296 reinterpret_cast<volatile const AtomicWordCastType*>(ptr));
297}
298
299#undef AtomicWordCastType
300
301} // namespace base::subtle
302} // namespace base
303
304#endif // BASE_ATOMICOPS_INTERNALS_X86_MACOSX_H_