blob: dd7abf6f17541e3223392f62c8a4ab07a57f9876 [file] [log] [blame]
Jeffrey van Goghb9ece3d2016-04-12 11:10:44 -07001// Copyright 2013 Red Hat Inc. All rights reserved.
2//
3// Redistribution and use in source and binary forms, with or without
4// modification, are permitted provided that the following conditions are
5// met:
6//
7// * Redistributions of source code must retain the above copyright
8// notice, this list of conditions and the following disclaimer.
9// * Redistributions in binary form must reproduce the above
10// copyright notice, this list of conditions and the following disclaimer
11// in the documentation and/or other materials provided with the
12// distribution.
13// * Neither the name of Red Hat Inc. nor the names of its
14// contributors may be used to endorse or promote products derived from
15// this software without specific prior written permission.
16//
17// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28
29// This file is an internal atomic implementation, use atomicops.h instead.
30
31#ifndef GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_GENERIC_GCC_H_
32#define GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_GENERIC_GCC_H_
33
34namespace google {
35namespace protobuf {
36namespace internal {
37
38inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
39 Atomic32 old_value,
40 Atomic32 new_value) {
41 __atomic_compare_exchange_n(ptr, &old_value, new_value, true,
42 __ATOMIC_RELAXED, __ATOMIC_RELAXED);
43 return old_value;
44}
45
46inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
47 Atomic32 new_value) {
48 return __atomic_exchange_n(ptr, new_value, __ATOMIC_RELAXED);
49}
50
51inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
52 Atomic32 increment) {
53 return __atomic_add_fetch(ptr, increment, __ATOMIC_RELAXED);
54}
55
56inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
57 Atomic32 increment) {
58 return __atomic_add_fetch(ptr, increment, __ATOMIC_SEQ_CST);
59}
60
61inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
62 Atomic32 old_value,
63 Atomic32 new_value) {
64 __atomic_compare_exchange(ptr, &old_value, &new_value, true,
65 __ATOMIC_ACQUIRE, __ATOMIC_ACQUIRE);
66 return old_value;
67}
68
69inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
70 Atomic32 old_value,
71 Atomic32 new_value) {
72 __atomic_compare_exchange_n(ptr, &old_value, new_value, true,
73 __ATOMIC_RELEASE, __ATOMIC_ACQUIRE);
74 return old_value;
75}
76
77inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
78 __atomic_store_n(ptr, value, __ATOMIC_RELAXED);
79}
80
81inline void MemoryBarrier() {
82 __sync_synchronize();
83}
84
85inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
86 __atomic_store_n(ptr, value, __ATOMIC_SEQ_CST);
87}
88
89inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
90 __atomic_store_n(ptr, value, __ATOMIC_RELEASE);
91}
92
93inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
94 return __atomic_load_n(ptr, __ATOMIC_RELAXED);
95}
96
97inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
98 return __atomic_load_n(ptr, __ATOMIC_ACQUIRE);
99}
100
101inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
102 return __atomic_load_n(ptr, __ATOMIC_SEQ_CST);
103}
104
105#ifdef __LP64__
106
107inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
108 __atomic_store_n(ptr, value, __ATOMIC_RELEASE);
109}
110
111inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
112 return __atomic_load_n(ptr, __ATOMIC_ACQUIRE);
113}
114
115inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
116 Atomic64 old_value,
117 Atomic64 new_value) {
118 __atomic_compare_exchange_n(ptr, &old_value, new_value, true,
119 __ATOMIC_ACQUIRE, __ATOMIC_ACQUIRE);
120 return old_value;
121}
122
123inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
124 Atomic64 old_value,
125 Atomic64 new_value) {
126 __atomic_compare_exchange_n(ptr, &old_value, new_value, true,
127 __ATOMIC_RELAXED, __ATOMIC_RELAXED);
128 return old_value;
129}
130
131#endif // defined(__LP64__)
132
133} // namespace internal
134} // namespace protobuf
135} // namespace google
136
137#endif // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_GENERIC_GCC_H_