blob: 762eeb0fcc1dcaec27a4dff140274113a2c31fbf [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Linus Torvalds1da177e2005-04-16 15:20:36 -07002#ifndef _ASM_IA64_ATOMIC_H
3#define _ASM_IA64_ATOMIC_H
4
5/*
6 * Atomic operations that C can't guarantee us. Useful for
7 * resource counting etc..
8 *
9 * NOTE: don't mess with the types below! The "unsigned long" and
10 * "int" types were carefully placed so as to ensure proper operation
11 * of the macros.
12 *
13 * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
14 * David Mosberger-Tang <davidm@hpl.hp.com>
15 */
16#include <linux/types.h>
17
18#include <asm/intrinsics.h>
Peter Zijlstra0cd64ef2014-03-13 19:00:36 +010019#include <asm/barrier.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070020
Linus Torvalds1da177e2005-04-16 15:20:36 -070021
Tony Lucka1193652012-07-26 10:55:26 -070022#define ATOMIC_INIT(i) { (i) }
23#define ATOMIC64_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070024
Peter Zijlstra62e8a322015-09-18 11:13:10 +020025#define atomic_read(v) READ_ONCE((v)->counter)
26#define atomic64_read(v) READ_ONCE((v)->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070027
Peter Zijlstra62e8a322015-09-18 11:13:10 +020028#define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
29#define atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i))
Linus Torvalds1da177e2005-04-16 15:20:36 -070030
Peter Zijlstra08be2da2014-03-23 18:20:30 +010031#define ATOMIC_OP(op, c_op) \
32static __inline__ int \
33ia64_atomic_##op (int i, atomic_t *v) \
34{ \
35 __s32 old, new; \
36 CMPXCHG_BUGCHECK_DECL \
37 \
38 do { \
39 CMPXCHG_BUGCHECK(v); \
40 old = atomic_read(v); \
41 new = old c_op i; \
42 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
43 return new; \
Linus Torvalds1da177e2005-04-16 15:20:36 -070044}
45
Peter Zijlstracc1025072016-04-18 01:16:07 +020046#define ATOMIC_FETCH_OP(op, c_op) \
47static __inline__ int \
48ia64_atomic_fetch_##op (int i, atomic_t *v) \
49{ \
50 __s32 old, new; \
51 CMPXCHG_BUGCHECK_DECL \
52 \
53 do { \
54 CMPXCHG_BUGCHECK(v); \
55 old = atomic_read(v); \
56 new = old c_op i; \
57 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
58 return old; \
59}
60
61#define ATOMIC_OPS(op, c_op) \
62 ATOMIC_OP(op, c_op) \
63 ATOMIC_FETCH_OP(op, c_op)
64
65ATOMIC_OPS(add, +)
66ATOMIC_OPS(sub, -)
Linus Torvalds1da177e2005-04-16 15:20:36 -070067
Matthew Wilcox4b664e72018-01-18 13:52:17 -080068#ifdef __OPTIMIZE__
69#define __ia64_atomic_const(i) __builtin_constant_p(i) ? \
70 ((i) == 1 || (i) == 4 || (i) == 8 || (i) == 16 || \
71 (i) == -1 || (i) == -4 || (i) == -8 || (i) == -16) : 0
72
73#define atomic_add_return(i, v) \
Peter Zijlstra08be2da2014-03-23 18:20:30 +010074({ \
Matthew Wilcox4b664e72018-01-18 13:52:17 -080075 int __i = (i); \
76 static const int __ia64_atomic_p = __ia64_atomic_const(i); \
77 __ia64_atomic_p ? ia64_fetch_and_add(__i, &(v)->counter) : \
78 ia64_atomic_add(__i, v); \
Peter Zijlstra08be2da2014-03-23 18:20:30 +010079})
80
Matthew Wilcox4b664e72018-01-18 13:52:17 -080081#define atomic_sub_return(i, v) \
Peter Zijlstra08be2da2014-03-23 18:20:30 +010082({ \
Matthew Wilcox4b664e72018-01-18 13:52:17 -080083 int __i = (i); \
84 static const int __ia64_atomic_p = __ia64_atomic_const(i); \
85 __ia64_atomic_p ? ia64_fetch_and_add(-__i, &(v)->counter) : \
86 ia64_atomic_sub(__i, v); \
Peter Zijlstra08be2da2014-03-23 18:20:30 +010087})
Matthew Wilcox4b664e72018-01-18 13:52:17 -080088#else
89#define atomic_add_return(i, v) ia64_atomic_add(i, v)
90#define atomic_sub_return(i, v) ia64_atomic_sub(i, v)
91#endif
Peter Zijlstra08be2da2014-03-23 18:20:30 +010092
Peter Zijlstracc1025072016-04-18 01:16:07 +020093#define atomic_fetch_add(i,v) \
94({ \
95 int __ia64_aar_i = (i); \
96 (__builtin_constant_p(i) \
97 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
98 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
99 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
100 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
101 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
102 : ia64_atomic_fetch_add(__ia64_aar_i, v); \
103})
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200104
Peter Zijlstracc1025072016-04-18 01:16:07 +0200105#define atomic_fetch_sub(i,v) \
106({ \
107 int __ia64_asr_i = (i); \
108 (__builtin_constant_p(i) \
109 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
110 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
111 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
112 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
113 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
114 : ia64_atomic_fetch_sub(__ia64_asr_i, v); \
115})
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200116
Peter Zijlstracc1025072016-04-18 01:16:07 +0200117ATOMIC_FETCH_OP(and, &)
118ATOMIC_FETCH_OP(or, |)
119ATOMIC_FETCH_OP(xor, ^)
120
121#define atomic_and(i,v) (void)ia64_atomic_fetch_and(i,v)
122#define atomic_or(i,v) (void)ia64_atomic_fetch_or(i,v)
123#define atomic_xor(i,v) (void)ia64_atomic_fetch_xor(i,v)
124
125#define atomic_fetch_and(i,v) ia64_atomic_fetch_and(i,v)
126#define atomic_fetch_or(i,v) ia64_atomic_fetch_or(i,v)
127#define atomic_fetch_xor(i,v) ia64_atomic_fetch_xor(i,v)
128
129#undef ATOMIC_OPS
130#undef ATOMIC_FETCH_OP
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200131#undef ATOMIC_OP
132
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100133#define ATOMIC64_OP(op, c_op) \
134static __inline__ long \
135ia64_atomic64_##op (__s64 i, atomic64_t *v) \
136{ \
137 __s64 old, new; \
138 CMPXCHG_BUGCHECK_DECL \
139 \
140 do { \
141 CMPXCHG_BUGCHECK(v); \
142 old = atomic64_read(v); \
143 new = old c_op i; \
144 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
145 return new; \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700146}
147
Peter Zijlstracc1025072016-04-18 01:16:07 +0200148#define ATOMIC64_FETCH_OP(op, c_op) \
149static __inline__ long \
150ia64_atomic64_fetch_##op (__s64 i, atomic64_t *v) \
151{ \
152 __s64 old, new; \
153 CMPXCHG_BUGCHECK_DECL \
154 \
155 do { \
156 CMPXCHG_BUGCHECK(v); \
157 old = atomic64_read(v); \
158 new = old c_op i; \
159 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
160 return old; \
161}
162
163#define ATOMIC64_OPS(op, c_op) \
164 ATOMIC64_OP(op, c_op) \
165 ATOMIC64_FETCH_OP(op, c_op)
166
167ATOMIC64_OPS(add, +)
168ATOMIC64_OPS(sub, -)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700169
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100170#define atomic64_add_return(i,v) \
171({ \
172 long __ia64_aar_i = (i); \
173 (__builtin_constant_p(i) \
174 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
175 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
176 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
177 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
178 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
179 : ia64_atomic64_add(__ia64_aar_i, v); \
180})
Linus Torvalds1da177e2005-04-16 15:20:36 -0700181
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100182#define atomic64_sub_return(i,v) \
183({ \
184 long __ia64_asr_i = (i); \
185 (__builtin_constant_p(i) \
186 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
187 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
188 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
189 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
190 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
191 : ia64_atomic64_sub(__ia64_asr_i, v); \
192})
Linus Torvalds1da177e2005-04-16 15:20:36 -0700193
Peter Zijlstracc1025072016-04-18 01:16:07 +0200194#define atomic64_fetch_add(i,v) \
195({ \
196 long __ia64_aar_i = (i); \
197 (__builtin_constant_p(i) \
198 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
199 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
200 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
201 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
202 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
203 : ia64_atomic64_fetch_add(__ia64_aar_i, v); \
204})
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200205
Peter Zijlstracc1025072016-04-18 01:16:07 +0200206#define atomic64_fetch_sub(i,v) \
207({ \
208 long __ia64_asr_i = (i); \
209 (__builtin_constant_p(i) \
210 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
211 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
212 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
213 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
214 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
215 : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \
216})
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200217
Peter Zijlstracc1025072016-04-18 01:16:07 +0200218ATOMIC64_FETCH_OP(and, &)
219ATOMIC64_FETCH_OP(or, |)
220ATOMIC64_FETCH_OP(xor, ^)
221
222#define atomic64_and(i,v) (void)ia64_atomic64_fetch_and(i,v)
223#define atomic64_or(i,v) (void)ia64_atomic64_fetch_or(i,v)
224#define atomic64_xor(i,v) (void)ia64_atomic64_fetch_xor(i,v)
225
226#define atomic64_fetch_and(i,v) ia64_atomic64_fetch_and(i,v)
227#define atomic64_fetch_or(i,v) ia64_atomic64_fetch_or(i,v)
228#define atomic64_fetch_xor(i,v) ia64_atomic64_fetch_xor(i,v)
229
230#undef ATOMIC64_OPS
231#undef ATOMIC64_FETCH_OP
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200232#undef ATOMIC64_OP
233
Mathieu Desnoyers81979132007-05-08 00:34:22 -0700234#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800235#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800236
Mathieu Desnoyers81979132007-05-08 00:34:22 -0700237#define atomic64_cmpxchg(v, old, new) \
238 (cmpxchg(&((v)->counter), old, new))
239#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
240
Arun Sharmaf24219b2011-07-26 16:09:07 -0700241static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700242{
243 int c, old;
244 c = atomic_read(v);
245 for (;;) {
246 if (unlikely(c == (u)))
247 break;
248 old = atomic_cmpxchg((v), c, c + (a));
249 if (likely(old == c))
250 break;
251 c = old;
252 }
Arun Sharmaf24219b2011-07-26 16:09:07 -0700253 return c;
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700254}
255
Nick Piggin8426e1f2005-11-13 16:07:25 -0800256
Tony Luck01d69a82010-08-13 16:41:07 -0700257static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700258{
259 long c, old;
260 c = atomic64_read(v);
261 for (;;) {
262 if (unlikely(c == (u)))
263 break;
264 old = atomic64_cmpxchg((v), c, c + (a));
265 if (likely(old == c))
266 break;
267 c = old;
268 }
269 return c != (u);
270}
271
Mathieu Desnoyers81979132007-05-08 00:34:22 -0700272#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
273
Vineet Gupta445ed0a2016-10-07 17:02:07 -0700274static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
275{
276 long c, old, dec;
277 c = atomic64_read(v);
278 for (;;) {
279 dec = c - 1;
280 if (unlikely(dec < 0))
281 break;
282 old = atomic64_cmpxchg((v), c, dec);
283 if (likely(old == c))
284 break;
285 c = old;
286 }
287 return dec;
288}
289
Linus Torvalds1da177e2005-04-16 15:20:36 -0700290/*
291 * Atomically add I to V and return TRUE if the resulting value is
292 * negative.
293 */
294static __inline__ int
295atomic_add_negative (int i, atomic_t *v)
296{
297 return atomic_add_return(i, v) < 0;
298}
299
Tony Luck01d69a82010-08-13 16:41:07 -0700300static __inline__ long
Linus Torvalds1da177e2005-04-16 15:20:36 -0700301atomic64_add_negative (__s64 i, atomic64_t *v)
302{
303 return atomic64_add_return(i, v) < 0;
304}
305
Linus Torvalds1da177e2005-04-16 15:20:36 -0700306#define atomic_dec_return(v) atomic_sub_return(1, (v))
307#define atomic_inc_return(v) atomic_add_return(1, (v))
308#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
309#define atomic64_inc_return(v) atomic64_add_return(1, (v))
310
311#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
312#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
313#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
314#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
315#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
316#define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
317
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100318#define atomic_add(i,v) (void)atomic_add_return((i), (v))
319#define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700320#define atomic_inc(v) atomic_add(1, (v))
321#define atomic_dec(v) atomic_sub(1, (v))
322
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100323#define atomic64_add(i,v) (void)atomic64_add_return((i), (v))
324#define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700325#define atomic64_inc(v) atomic64_add(1, (v))
326#define atomic64_dec(v) atomic64_sub(1, (v))
327
Linus Torvalds1da177e2005-04-16 15:20:36 -0700328#endif /* _ASM_IA64_ATOMIC_H */