blob: 28e02c99be6d72efca8128d8a11011e20bd89936 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Linus Torvalds1da177e2005-04-16 15:20:36 -07002#ifndef _ASM_IA64_ATOMIC_H
3#define _ASM_IA64_ATOMIC_H
4
5/*
6 * Atomic operations that C can't guarantee us. Useful for
7 * resource counting etc..
8 *
9 * NOTE: don't mess with the types below! The "unsigned long" and
10 * "int" types were carefully placed so as to ensure proper operation
11 * of the macros.
12 *
13 * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
14 * David Mosberger-Tang <davidm@hpl.hp.com>
15 */
16#include <linux/types.h>
17
18#include <asm/intrinsics.h>
Peter Zijlstra0cd64ef2014-03-13 19:00:36 +010019#include <asm/barrier.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070020
Linus Torvalds1da177e2005-04-16 15:20:36 -070021
Tony Lucka1193652012-07-26 10:55:26 -070022#define ATOMIC_INIT(i) { (i) }
23#define ATOMIC64_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070024
Peter Zijlstra62e8a322015-09-18 11:13:10 +020025#define atomic_read(v) READ_ONCE((v)->counter)
26#define atomic64_read(v) READ_ONCE((v)->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070027
Peter Zijlstra62e8a322015-09-18 11:13:10 +020028#define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
29#define atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i))
Linus Torvalds1da177e2005-04-16 15:20:36 -070030
Peter Zijlstra08be2da2014-03-23 18:20:30 +010031#define ATOMIC_OP(op, c_op) \
32static __inline__ int \
33ia64_atomic_##op (int i, atomic_t *v) \
34{ \
35 __s32 old, new; \
36 CMPXCHG_BUGCHECK_DECL \
37 \
38 do { \
39 CMPXCHG_BUGCHECK(v); \
40 old = atomic_read(v); \
41 new = old c_op i; \
42 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
43 return new; \
Linus Torvalds1da177e2005-04-16 15:20:36 -070044}
45
Peter Zijlstracc1025072016-04-18 01:16:07 +020046#define ATOMIC_FETCH_OP(op, c_op) \
47static __inline__ int \
48ia64_atomic_fetch_##op (int i, atomic_t *v) \
49{ \
50 __s32 old, new; \
51 CMPXCHG_BUGCHECK_DECL \
52 \
53 do { \
54 CMPXCHG_BUGCHECK(v); \
55 old = atomic_read(v); \
56 new = old c_op i; \
57 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
58 return old; \
59}
60
61#define ATOMIC_OPS(op, c_op) \
62 ATOMIC_OP(op, c_op) \
63 ATOMIC_FETCH_OP(op, c_op)
64
65ATOMIC_OPS(add, +)
66ATOMIC_OPS(sub, -)
Linus Torvalds1da177e2005-04-16 15:20:36 -070067
Peter Zijlstra08be2da2014-03-23 18:20:30 +010068#define atomic_add_return(i,v) \
69({ \
70 int __ia64_aar_i = (i); \
71 (__builtin_constant_p(i) \
72 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
73 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
74 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
75 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
76 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
77 : ia64_atomic_add(__ia64_aar_i, v); \
78})
79
80#define atomic_sub_return(i,v) \
81({ \
82 int __ia64_asr_i = (i); \
83 (__builtin_constant_p(i) \
84 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
85 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
86 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
87 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
88 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
89 : ia64_atomic_sub(__ia64_asr_i, v); \
90})
91
Peter Zijlstracc1025072016-04-18 01:16:07 +020092#define atomic_fetch_add(i,v) \
93({ \
94 int __ia64_aar_i = (i); \
95 (__builtin_constant_p(i) \
96 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
97 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
98 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
99 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
100 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
101 : ia64_atomic_fetch_add(__ia64_aar_i, v); \
102})
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200103
Peter Zijlstracc1025072016-04-18 01:16:07 +0200104#define atomic_fetch_sub(i,v) \
105({ \
106 int __ia64_asr_i = (i); \
107 (__builtin_constant_p(i) \
108 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
109 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
110 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
111 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
112 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
113 : ia64_atomic_fetch_sub(__ia64_asr_i, v); \
114})
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200115
Peter Zijlstracc1025072016-04-18 01:16:07 +0200116ATOMIC_FETCH_OP(and, &)
117ATOMIC_FETCH_OP(or, |)
118ATOMIC_FETCH_OP(xor, ^)
119
120#define atomic_and(i,v) (void)ia64_atomic_fetch_and(i,v)
121#define atomic_or(i,v) (void)ia64_atomic_fetch_or(i,v)
122#define atomic_xor(i,v) (void)ia64_atomic_fetch_xor(i,v)
123
124#define atomic_fetch_and(i,v) ia64_atomic_fetch_and(i,v)
125#define atomic_fetch_or(i,v) ia64_atomic_fetch_or(i,v)
126#define atomic_fetch_xor(i,v) ia64_atomic_fetch_xor(i,v)
127
128#undef ATOMIC_OPS
129#undef ATOMIC_FETCH_OP
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200130#undef ATOMIC_OP
131
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100132#define ATOMIC64_OP(op, c_op) \
133static __inline__ long \
134ia64_atomic64_##op (__s64 i, atomic64_t *v) \
135{ \
136 __s64 old, new; \
137 CMPXCHG_BUGCHECK_DECL \
138 \
139 do { \
140 CMPXCHG_BUGCHECK(v); \
141 old = atomic64_read(v); \
142 new = old c_op i; \
143 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
144 return new; \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700145}
146
Peter Zijlstracc1025072016-04-18 01:16:07 +0200147#define ATOMIC64_FETCH_OP(op, c_op) \
148static __inline__ long \
149ia64_atomic64_fetch_##op (__s64 i, atomic64_t *v) \
150{ \
151 __s64 old, new; \
152 CMPXCHG_BUGCHECK_DECL \
153 \
154 do { \
155 CMPXCHG_BUGCHECK(v); \
156 old = atomic64_read(v); \
157 new = old c_op i; \
158 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
159 return old; \
160}
161
162#define ATOMIC64_OPS(op, c_op) \
163 ATOMIC64_OP(op, c_op) \
164 ATOMIC64_FETCH_OP(op, c_op)
165
166ATOMIC64_OPS(add, +)
167ATOMIC64_OPS(sub, -)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700168
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100169#define atomic64_add_return(i,v) \
170({ \
171 long __ia64_aar_i = (i); \
172 (__builtin_constant_p(i) \
173 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
174 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
175 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
176 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
177 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
178 : ia64_atomic64_add(__ia64_aar_i, v); \
179})
Linus Torvalds1da177e2005-04-16 15:20:36 -0700180
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100181#define atomic64_sub_return(i,v) \
182({ \
183 long __ia64_asr_i = (i); \
184 (__builtin_constant_p(i) \
185 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
186 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
187 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
188 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
189 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
190 : ia64_atomic64_sub(__ia64_asr_i, v); \
191})
Linus Torvalds1da177e2005-04-16 15:20:36 -0700192
Peter Zijlstracc1025072016-04-18 01:16:07 +0200193#define atomic64_fetch_add(i,v) \
194({ \
195 long __ia64_aar_i = (i); \
196 (__builtin_constant_p(i) \
197 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
198 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
199 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
200 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
201 ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \
202 : ia64_atomic64_fetch_add(__ia64_aar_i, v); \
203})
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200204
Peter Zijlstracc1025072016-04-18 01:16:07 +0200205#define atomic64_fetch_sub(i,v) \
206({ \
207 long __ia64_asr_i = (i); \
208 (__builtin_constant_p(i) \
209 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
210 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
211 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
212 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
213 ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \
214 : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \
215})
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200216
Peter Zijlstracc1025072016-04-18 01:16:07 +0200217ATOMIC64_FETCH_OP(and, &)
218ATOMIC64_FETCH_OP(or, |)
219ATOMIC64_FETCH_OP(xor, ^)
220
221#define atomic64_and(i,v) (void)ia64_atomic64_fetch_and(i,v)
222#define atomic64_or(i,v) (void)ia64_atomic64_fetch_or(i,v)
223#define atomic64_xor(i,v) (void)ia64_atomic64_fetch_xor(i,v)
224
225#define atomic64_fetch_and(i,v) ia64_atomic64_fetch_and(i,v)
226#define atomic64_fetch_or(i,v) ia64_atomic64_fetch_or(i,v)
227#define atomic64_fetch_xor(i,v) ia64_atomic64_fetch_xor(i,v)
228
229#undef ATOMIC64_OPS
230#undef ATOMIC64_FETCH_OP
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200231#undef ATOMIC64_OP
232
Mathieu Desnoyers81979132007-05-08 00:34:22 -0700233#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800234#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800235
Mathieu Desnoyers81979132007-05-08 00:34:22 -0700236#define atomic64_cmpxchg(v, old, new) \
237 (cmpxchg(&((v)->counter), old, new))
238#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
239
Arun Sharmaf24219b2011-07-26 16:09:07 -0700240static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700241{
242 int c, old;
243 c = atomic_read(v);
244 for (;;) {
245 if (unlikely(c == (u)))
246 break;
247 old = atomic_cmpxchg((v), c, c + (a));
248 if (likely(old == c))
249 break;
250 c = old;
251 }
Arun Sharmaf24219b2011-07-26 16:09:07 -0700252 return c;
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700253}
254
Nick Piggin8426e1f2005-11-13 16:07:25 -0800255
Tony Luck01d69a82010-08-13 16:41:07 -0700256static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700257{
258 long c, old;
259 c = atomic64_read(v);
260 for (;;) {
261 if (unlikely(c == (u)))
262 break;
263 old = atomic64_cmpxchg((v), c, c + (a));
264 if (likely(old == c))
265 break;
266 c = old;
267 }
268 return c != (u);
269}
270
Mathieu Desnoyers81979132007-05-08 00:34:22 -0700271#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
272
Vineet Gupta445ed0a2016-10-07 17:02:07 -0700273static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
274{
275 long c, old, dec;
276 c = atomic64_read(v);
277 for (;;) {
278 dec = c - 1;
279 if (unlikely(dec < 0))
280 break;
281 old = atomic64_cmpxchg((v), c, dec);
282 if (likely(old == c))
283 break;
284 c = old;
285 }
286 return dec;
287}
288
Linus Torvalds1da177e2005-04-16 15:20:36 -0700289/*
290 * Atomically add I to V and return TRUE if the resulting value is
291 * negative.
292 */
293static __inline__ int
294atomic_add_negative (int i, atomic_t *v)
295{
296 return atomic_add_return(i, v) < 0;
297}
298
Tony Luck01d69a82010-08-13 16:41:07 -0700299static __inline__ long
Linus Torvalds1da177e2005-04-16 15:20:36 -0700300atomic64_add_negative (__s64 i, atomic64_t *v)
301{
302 return atomic64_add_return(i, v) < 0;
303}
304
Linus Torvalds1da177e2005-04-16 15:20:36 -0700305#define atomic_dec_return(v) atomic_sub_return(1, (v))
306#define atomic_inc_return(v) atomic_add_return(1, (v))
307#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
308#define atomic64_inc_return(v) atomic64_add_return(1, (v))
309
310#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
311#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
312#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
313#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
314#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
315#define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
316
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100317#define atomic_add(i,v) (void)atomic_add_return((i), (v))
318#define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700319#define atomic_inc(v) atomic_add(1, (v))
320#define atomic_dec(v) atomic_sub(1, (v))
321
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100322#define atomic64_add(i,v) (void)atomic64_add_return((i), (v))
323#define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700324#define atomic64_inc(v) atomic64_add(1, (v))
325#define atomic64_dec(v) atomic64_sub(1, (v))
326
Linus Torvalds1da177e2005-04-16 15:20:36 -0700327#endif /* _ASM_IA64_ATOMIC_H */