blob: 5c5620ae5a356278301b977cffe4954380a388ad [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Arun Sharmaacac43e2011-07-26 16:09:08 -07002/* Atomic operations usable in machine independent code */
Eric Dumazet3f9d35b2010-11-11 14:05:08 -08003#ifndef _LINUX_ATOMIC_H
4#define _LINUX_ATOMIC_H
5#include <asm/atomic.h>
Will Deacon654672d2015-08-06 17:54:37 +01006#include <asm/barrier.h>
7
8/*
9 * Relaxed variants of xchg, cmpxchg and some atomic operations.
10 *
11 * We support four variants:
12 *
13 * - Fully ordered: The default implementation, no suffix required.
14 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
15 * - Release: Provides RELEASE semantics, _release suffix.
16 * - Relaxed: No ordering guarantees, _relaxed suffix.
17 *
18 * For compound atomics performing both a load and a store, ACQUIRE
19 * semantics apply only to the load and RELEASE semantics only to the
20 * store portion of the operation. Note that a failed cmpxchg_acquire
21 * does -not- imply any memory ordering constraints.
22 *
23 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
24 */
25
26#ifndef atomic_read_acquire
27#define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
28#endif
29
30#ifndef atomic_set_release
31#define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
32#endif
33
34/*
35 * The idea here is to build acquire/release variants by adding explicit
36 * barriers on top of the relaxed variant. In the case where the relaxed
37 * variant is already fully ordered, no additional barriers are needed.
Boqun Fenge1ab7f32015-12-15 22:24:14 +080038 *
39 * Besides, if an arch has a special barrier for acquire/release, it could
40 * implement its own __atomic_op_* and use the same framework for building
41 * variants
Peter Zijlstrad89e588c2016-09-05 11:37:53 +020042 *
43 * If an architecture overrides __atomic_op_acquire() it will probably want
44 * to define smp_mb__after_spinlock().
Will Deacon654672d2015-08-06 17:54:37 +010045 */
Boqun Fenge1ab7f32015-12-15 22:24:14 +080046#ifndef __atomic_op_acquire
Will Deacon654672d2015-08-06 17:54:37 +010047#define __atomic_op_acquire(op, args...) \
48({ \
49 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
50 smp_mb__after_atomic(); \
51 __ret; \
52})
Boqun Fenge1ab7f32015-12-15 22:24:14 +080053#endif
Will Deacon654672d2015-08-06 17:54:37 +010054
Boqun Fenge1ab7f32015-12-15 22:24:14 +080055#ifndef __atomic_op_release
Will Deacon654672d2015-08-06 17:54:37 +010056#define __atomic_op_release(op, args...) \
57({ \
58 smp_mb__before_atomic(); \
59 op##_relaxed(args); \
60})
Boqun Fenge1ab7f32015-12-15 22:24:14 +080061#endif
Will Deacon654672d2015-08-06 17:54:37 +010062
Boqun Fenge1ab7f32015-12-15 22:24:14 +080063#ifndef __atomic_op_fence
Will Deacon654672d2015-08-06 17:54:37 +010064#define __atomic_op_fence(op, args...) \
65({ \
66 typeof(op##_relaxed(args)) __ret; \
67 smp_mb__before_atomic(); \
68 __ret = op##_relaxed(args); \
69 smp_mb__after_atomic(); \
70 __ret; \
71})
Boqun Fenge1ab7f32015-12-15 22:24:14 +080072#endif
Will Deacon654672d2015-08-06 17:54:37 +010073
74/* atomic_add_return_relaxed */
75#ifndef atomic_add_return_relaxed
76#define atomic_add_return_relaxed atomic_add_return
77#define atomic_add_return_acquire atomic_add_return
78#define atomic_add_return_release atomic_add_return
79
80#else /* atomic_add_return_relaxed */
81
82#ifndef atomic_add_return_acquire
83#define atomic_add_return_acquire(...) \
84 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
85#endif
86
87#ifndef atomic_add_return_release
88#define atomic_add_return_release(...) \
89 __atomic_op_release(atomic_add_return, __VA_ARGS__)
90#endif
91
92#ifndef atomic_add_return
93#define atomic_add_return(...) \
94 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
95#endif
96#endif /* atomic_add_return_relaxed */
97
Davidlohr Bueso63ab7bd2015-09-30 13:03:11 -070098/* atomic_inc_return_relaxed */
99#ifndef atomic_inc_return_relaxed
100#define atomic_inc_return_relaxed atomic_inc_return
101#define atomic_inc_return_acquire atomic_inc_return
102#define atomic_inc_return_release atomic_inc_return
103
104#else /* atomic_inc_return_relaxed */
105
106#ifndef atomic_inc_return_acquire
107#define atomic_inc_return_acquire(...) \
108 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
109#endif
110
111#ifndef atomic_inc_return_release
112#define atomic_inc_return_release(...) \
113 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
114#endif
115
116#ifndef atomic_inc_return
117#define atomic_inc_return(...) \
118 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
119#endif
120#endif /* atomic_inc_return_relaxed */
121
Will Deacon654672d2015-08-06 17:54:37 +0100122/* atomic_sub_return_relaxed */
123#ifndef atomic_sub_return_relaxed
124#define atomic_sub_return_relaxed atomic_sub_return
125#define atomic_sub_return_acquire atomic_sub_return
126#define atomic_sub_return_release atomic_sub_return
127
128#else /* atomic_sub_return_relaxed */
129
130#ifndef atomic_sub_return_acquire
131#define atomic_sub_return_acquire(...) \
132 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
133#endif
134
135#ifndef atomic_sub_return_release
136#define atomic_sub_return_release(...) \
137 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
138#endif
139
140#ifndef atomic_sub_return
141#define atomic_sub_return(...) \
142 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
143#endif
144#endif /* atomic_sub_return_relaxed */
145
Davidlohr Bueso63ab7bd2015-09-30 13:03:11 -0700146/* atomic_dec_return_relaxed */
147#ifndef atomic_dec_return_relaxed
148#define atomic_dec_return_relaxed atomic_dec_return
149#define atomic_dec_return_acquire atomic_dec_return
150#define atomic_dec_return_release atomic_dec_return
151
152#else /* atomic_dec_return_relaxed */
153
154#ifndef atomic_dec_return_acquire
155#define atomic_dec_return_acquire(...) \
156 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
157#endif
158
159#ifndef atomic_dec_return_release
160#define atomic_dec_return_release(...) \
161 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
162#endif
163
164#ifndef atomic_dec_return
165#define atomic_dec_return(...) \
166 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
167#endif
168#endif /* atomic_dec_return_relaxed */
169
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200170
171/* atomic_fetch_add_relaxed */
172#ifndef atomic_fetch_add_relaxed
173#define atomic_fetch_add_relaxed atomic_fetch_add
174#define atomic_fetch_add_acquire atomic_fetch_add
175#define atomic_fetch_add_release atomic_fetch_add
176
177#else /* atomic_fetch_add_relaxed */
178
179#ifndef atomic_fetch_add_acquire
180#define atomic_fetch_add_acquire(...) \
181 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
182#endif
183
184#ifndef atomic_fetch_add_release
185#define atomic_fetch_add_release(...) \
186 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
187#endif
188
189#ifndef atomic_fetch_add
190#define atomic_fetch_add(...) \
191 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
192#endif
193#endif /* atomic_fetch_add_relaxed */
194
Davidlohr Buesof0662862016-06-28 14:56:51 -0700195/* atomic_fetch_inc_relaxed */
196#ifndef atomic_fetch_inc_relaxed
197
198#ifndef atomic_fetch_inc
199#define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
200#define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
201#define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
202#define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
203#else /* atomic_fetch_inc */
204#define atomic_fetch_inc_relaxed atomic_fetch_inc
205#define atomic_fetch_inc_acquire atomic_fetch_inc
206#define atomic_fetch_inc_release atomic_fetch_inc
207#endif /* atomic_fetch_inc */
208
209#else /* atomic_fetch_inc_relaxed */
210
211#ifndef atomic_fetch_inc_acquire
212#define atomic_fetch_inc_acquire(...) \
213 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
214#endif
215
216#ifndef atomic_fetch_inc_release
217#define atomic_fetch_inc_release(...) \
218 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
219#endif
220
221#ifndef atomic_fetch_inc
222#define atomic_fetch_inc(...) \
223 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
224#endif
225#endif /* atomic_fetch_inc_relaxed */
226
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200227/* atomic_fetch_sub_relaxed */
228#ifndef atomic_fetch_sub_relaxed
229#define atomic_fetch_sub_relaxed atomic_fetch_sub
230#define atomic_fetch_sub_acquire atomic_fetch_sub
231#define atomic_fetch_sub_release atomic_fetch_sub
232
233#else /* atomic_fetch_sub_relaxed */
234
235#ifndef atomic_fetch_sub_acquire
236#define atomic_fetch_sub_acquire(...) \
237 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
238#endif
239
240#ifndef atomic_fetch_sub_release
241#define atomic_fetch_sub_release(...) \
242 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
243#endif
244
245#ifndef atomic_fetch_sub
246#define atomic_fetch_sub(...) \
247 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
248#endif
249#endif /* atomic_fetch_sub_relaxed */
250
Davidlohr Buesof0662862016-06-28 14:56:51 -0700251/* atomic_fetch_dec_relaxed */
252#ifndef atomic_fetch_dec_relaxed
253
254#ifndef atomic_fetch_dec
255#define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
256#define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
257#define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
258#define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
259#else /* atomic_fetch_dec */
260#define atomic_fetch_dec_relaxed atomic_fetch_dec
261#define atomic_fetch_dec_acquire atomic_fetch_dec
262#define atomic_fetch_dec_release atomic_fetch_dec
263#endif /* atomic_fetch_dec */
264
265#else /* atomic_fetch_dec_relaxed */
266
267#ifndef atomic_fetch_dec_acquire
268#define atomic_fetch_dec_acquire(...) \
269 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
270#endif
271
272#ifndef atomic_fetch_dec_release
273#define atomic_fetch_dec_release(...) \
274 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
275#endif
276
277#ifndef atomic_fetch_dec
278#define atomic_fetch_dec(...) \
279 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
280#endif
281#endif /* atomic_fetch_dec_relaxed */
282
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200283/* atomic_fetch_or_relaxed */
284#ifndef atomic_fetch_or_relaxed
285#define atomic_fetch_or_relaxed atomic_fetch_or
286#define atomic_fetch_or_acquire atomic_fetch_or
287#define atomic_fetch_or_release atomic_fetch_or
288
289#else /* atomic_fetch_or_relaxed */
290
291#ifndef atomic_fetch_or_acquire
292#define atomic_fetch_or_acquire(...) \
293 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
294#endif
295
296#ifndef atomic_fetch_or_release
297#define atomic_fetch_or_release(...) \
298 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
299#endif
300
301#ifndef atomic_fetch_or
302#define atomic_fetch_or(...) \
303 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
304#endif
305#endif /* atomic_fetch_or_relaxed */
306
307/* atomic_fetch_and_relaxed */
308#ifndef atomic_fetch_and_relaxed
309#define atomic_fetch_and_relaxed atomic_fetch_and
310#define atomic_fetch_and_acquire atomic_fetch_and
311#define atomic_fetch_and_release atomic_fetch_and
312
313#else /* atomic_fetch_and_relaxed */
314
315#ifndef atomic_fetch_and_acquire
316#define atomic_fetch_and_acquire(...) \
317 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
318#endif
319
320#ifndef atomic_fetch_and_release
321#define atomic_fetch_and_release(...) \
322 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
323#endif
324
325#ifndef atomic_fetch_and
326#define atomic_fetch_and(...) \
327 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
328#endif
329#endif /* atomic_fetch_and_relaxed */
330
331#ifdef atomic_andnot
332/* atomic_fetch_andnot_relaxed */
333#ifndef atomic_fetch_andnot_relaxed
334#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
335#define atomic_fetch_andnot_acquire atomic_fetch_andnot
336#define atomic_fetch_andnot_release atomic_fetch_andnot
337
338#else /* atomic_fetch_andnot_relaxed */
339
340#ifndef atomic_fetch_andnot_acquire
341#define atomic_fetch_andnot_acquire(...) \
342 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
343#endif
344
345#ifndef atomic_fetch_andnot_release
346#define atomic_fetch_andnot_release(...) \
347 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
348#endif
349
350#ifndef atomic_fetch_andnot
351#define atomic_fetch_andnot(...) \
352 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
353#endif
354#endif /* atomic_fetch_andnot_relaxed */
355#endif /* atomic_andnot */
356
357/* atomic_fetch_xor_relaxed */
358#ifndef atomic_fetch_xor_relaxed
359#define atomic_fetch_xor_relaxed atomic_fetch_xor
360#define atomic_fetch_xor_acquire atomic_fetch_xor
361#define atomic_fetch_xor_release atomic_fetch_xor
362
363#else /* atomic_fetch_xor_relaxed */
364
365#ifndef atomic_fetch_xor_acquire
366#define atomic_fetch_xor_acquire(...) \
367 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
368#endif
369
370#ifndef atomic_fetch_xor_release
371#define atomic_fetch_xor_release(...) \
372 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
373#endif
374
375#ifndef atomic_fetch_xor
376#define atomic_fetch_xor(...) \
377 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
378#endif
379#endif /* atomic_fetch_xor_relaxed */
380
381
Will Deacon654672d2015-08-06 17:54:37 +0100382/* atomic_xchg_relaxed */
383#ifndef atomic_xchg_relaxed
384#define atomic_xchg_relaxed atomic_xchg
385#define atomic_xchg_acquire atomic_xchg
386#define atomic_xchg_release atomic_xchg
387
388#else /* atomic_xchg_relaxed */
389
390#ifndef atomic_xchg_acquire
391#define atomic_xchg_acquire(...) \
392 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
393#endif
394
395#ifndef atomic_xchg_release
396#define atomic_xchg_release(...) \
397 __atomic_op_release(atomic_xchg, __VA_ARGS__)
398#endif
399
400#ifndef atomic_xchg
401#define atomic_xchg(...) \
402 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
403#endif
404#endif /* atomic_xchg_relaxed */
405
406/* atomic_cmpxchg_relaxed */
407#ifndef atomic_cmpxchg_relaxed
408#define atomic_cmpxchg_relaxed atomic_cmpxchg
409#define atomic_cmpxchg_acquire atomic_cmpxchg
410#define atomic_cmpxchg_release atomic_cmpxchg
411
412#else /* atomic_cmpxchg_relaxed */
413
414#ifndef atomic_cmpxchg_acquire
415#define atomic_cmpxchg_acquire(...) \
416 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
417#endif
418
419#ifndef atomic_cmpxchg_release
420#define atomic_cmpxchg_release(...) \
421 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
422#endif
423
424#ifndef atomic_cmpxchg
425#define atomic_cmpxchg(...) \
426 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
427#endif
428#endif /* atomic_cmpxchg_relaxed */
429
Peter Zijlstraa9ebf302017-02-01 16:39:38 +0100430#ifndef atomic_try_cmpxchg
431
432#define __atomic_try_cmpxchg(type, _p, _po, _n) \
433({ \
434 typeof(_po) __po = (_po); \
Peter Zijlstra44fe8442017-03-27 13:54:38 +0200435 typeof(*(_po)) __r, __o = *__po; \
436 __r = atomic_cmpxchg##type((_p), __o, (_n)); \
437 if (unlikely(__r != __o)) \
438 *__po = __r; \
439 likely(__r == __o); \
Peter Zijlstraa9ebf302017-02-01 16:39:38 +0100440})
441
442#define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
443#define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
444#define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
445#define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
446
447#else /* atomic_try_cmpxchg */
448#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
449#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
450#define atomic_try_cmpxchg_release atomic_try_cmpxchg
451#endif /* atomic_try_cmpxchg */
452
Will Deacon654672d2015-08-06 17:54:37 +0100453/* cmpxchg_relaxed */
454#ifndef cmpxchg_relaxed
455#define cmpxchg_relaxed cmpxchg
456#define cmpxchg_acquire cmpxchg
457#define cmpxchg_release cmpxchg
458
459#else /* cmpxchg_relaxed */
460
461#ifndef cmpxchg_acquire
462#define cmpxchg_acquire(...) \
463 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
464#endif
465
466#ifndef cmpxchg_release
467#define cmpxchg_release(...) \
468 __atomic_op_release(cmpxchg, __VA_ARGS__)
469#endif
470
471#ifndef cmpxchg
472#define cmpxchg(...) \
473 __atomic_op_fence(cmpxchg, __VA_ARGS__)
474#endif
475#endif /* cmpxchg_relaxed */
476
477/* cmpxchg64_relaxed */
478#ifndef cmpxchg64_relaxed
479#define cmpxchg64_relaxed cmpxchg64
480#define cmpxchg64_acquire cmpxchg64
481#define cmpxchg64_release cmpxchg64
482
483#else /* cmpxchg64_relaxed */
484
485#ifndef cmpxchg64_acquire
486#define cmpxchg64_acquire(...) \
487 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
488#endif
489
490#ifndef cmpxchg64_release
491#define cmpxchg64_release(...) \
492 __atomic_op_release(cmpxchg64, __VA_ARGS__)
493#endif
494
495#ifndef cmpxchg64
496#define cmpxchg64(...) \
497 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
498#endif
499#endif /* cmpxchg64_relaxed */
500
501/* xchg_relaxed */
502#ifndef xchg_relaxed
503#define xchg_relaxed xchg
504#define xchg_acquire xchg
505#define xchg_release xchg
506
507#else /* xchg_relaxed */
508
509#ifndef xchg_acquire
510#define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
511#endif
512
513#ifndef xchg_release
514#define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
515#endif
516
517#ifndef xchg
518#define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
519#endif
520#endif /* xchg_relaxed */
Eric Dumazet3f9d35b2010-11-11 14:05:08 -0800521
522/**
Arun Sharmaf24219b2011-07-26 16:09:07 -0700523 * atomic_add_unless - add unless the number is already a given value
524 * @v: pointer of type atomic_t
525 * @a: the amount to add to v...
526 * @u: ...unless v is equal to u.
527 *
528 * Atomically adds @a to @v, so long as @v was not already @u.
529 * Returns non-zero if @v was not @u, and zero otherwise.
530 */
531static inline int atomic_add_unless(atomic_t *v, int a, int u)
532{
Mark Rutlandbfc18e32018-06-21 13:13:04 +0100533 return atomic_fetch_add_unless(v, a, u) != u;
Arun Sharmaf24219b2011-07-26 16:09:07 -0700534}
535
536/**
Arun Sharma600634972011-07-26 16:09:06 -0700537 * atomic_inc_not_zero - increment unless the number is zero
538 * @v: pointer of type atomic_t
539 *
540 * Atomically increments @v by 1, so long as @v is non-zero.
541 * Returns non-zero if @v was non-zero, and zero otherwise.
542 */
Anton Blanchardb1ada602012-02-29 21:09:53 +0000543#ifndef atomic_inc_not_zero
Arun Sharma600634972011-07-26 16:09:06 -0700544#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
Anton Blanchardb1ada602012-02-29 21:09:53 +0000545#endif
Arun Sharma600634972011-07-26 16:09:06 -0700546
Peter Zijlstrade9e4322015-04-24 01:12:32 +0200547#ifndef atomic_andnot
548static inline void atomic_andnot(int i, atomic_t *v)
549{
550 atomic_and(~i, v);
551}
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200552
553static inline int atomic_fetch_andnot(int i, atomic_t *v)
554{
555 return atomic_fetch_and(~i, v);
556}
557
558static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
559{
560 return atomic_fetch_and_relaxed(~i, v);
561}
562
563static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
564{
565 return atomic_fetch_and_acquire(~i, v);
566}
567
568static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
569{
570 return atomic_fetch_and_release(~i, v);
571}
Peter Zijlstrade9e4322015-04-24 01:12:32 +0200572#endif
573
Al Viro07b8ce12011-06-20 10:52:57 -0400574#ifndef atomic_inc_unless_negative
575static inline int atomic_inc_unless_negative(atomic_t *p)
576{
577 int v, v1;
578 for (v = 0; v >= 0; v = v1) {
579 v1 = atomic_cmpxchg(p, v, v + 1);
580 if (likely(v1 == v))
581 return 1;
582 }
583 return 0;
584}
585#endif
586
587#ifndef atomic_dec_unless_positive
588static inline int atomic_dec_unless_positive(atomic_t *p)
589{
590 int v, v1;
591 for (v = 0; v <= 0; v = v1) {
592 v1 = atomic_cmpxchg(p, v, v - 1);
593 if (likely(v1 == v))
594 return 1;
595 }
596 return 0;
597}
598#endif
599
Shaohua Lie79bee22012-10-08 16:32:18 -0700600/*
601 * atomic_dec_if_positive - decrement by 1 if old value positive
602 * @v: pointer of type atomic_t
603 *
604 * The function returns the old value of *v minus 1, even if
605 * the atomic variable, v, was not decremented.
606 */
607#ifndef atomic_dec_if_positive
608static inline int atomic_dec_if_positive(atomic_t *v)
609{
610 int c, old, dec;
611 c = atomic_read(v);
612 for (;;) {
613 dec = c - 1;
614 if (unlikely(dec < 0))
615 break;
616 old = atomic_cmpxchg((v), c, dec);
617 if (likely(old == c))
618 break;
619 c = old;
620 }
621 return dec;
622}
623#endif
624
Will Deaconfcfdfe302018-04-26 11:34:15 +0100625#define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
Will Deacon4df714b2017-10-12 13:20:48 +0100626#define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
627
Arun Sharma78477772011-07-26 16:09:08 -0700628#ifdef CONFIG_GENERIC_ATOMIC64
629#include <asm-generic/atomic64.h>
630#endif
Peter Zijlstrade9e4322015-04-24 01:12:32 +0200631
Peter Zijlstrae1213332016-04-18 00:52:13 +0200632#ifndef atomic64_read_acquire
633#define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
634#endif
635
636#ifndef atomic64_set_release
637#define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
638#endif
639
640/* atomic64_add_return_relaxed */
641#ifndef atomic64_add_return_relaxed
642#define atomic64_add_return_relaxed atomic64_add_return
643#define atomic64_add_return_acquire atomic64_add_return
644#define atomic64_add_return_release atomic64_add_return
645
646#else /* atomic64_add_return_relaxed */
647
648#ifndef atomic64_add_return_acquire
649#define atomic64_add_return_acquire(...) \
650 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
651#endif
652
653#ifndef atomic64_add_return_release
654#define atomic64_add_return_release(...) \
655 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
656#endif
657
658#ifndef atomic64_add_return
659#define atomic64_add_return(...) \
660 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
661#endif
662#endif /* atomic64_add_return_relaxed */
663
664/* atomic64_inc_return_relaxed */
665#ifndef atomic64_inc_return_relaxed
666#define atomic64_inc_return_relaxed atomic64_inc_return
667#define atomic64_inc_return_acquire atomic64_inc_return
668#define atomic64_inc_return_release atomic64_inc_return
669
670#else /* atomic64_inc_return_relaxed */
671
672#ifndef atomic64_inc_return_acquire
673#define atomic64_inc_return_acquire(...) \
674 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
675#endif
676
677#ifndef atomic64_inc_return_release
678#define atomic64_inc_return_release(...) \
679 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
680#endif
681
682#ifndef atomic64_inc_return
683#define atomic64_inc_return(...) \
684 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
685#endif
686#endif /* atomic64_inc_return_relaxed */
687
688
689/* atomic64_sub_return_relaxed */
690#ifndef atomic64_sub_return_relaxed
691#define atomic64_sub_return_relaxed atomic64_sub_return
692#define atomic64_sub_return_acquire atomic64_sub_return
693#define atomic64_sub_return_release atomic64_sub_return
694
695#else /* atomic64_sub_return_relaxed */
696
697#ifndef atomic64_sub_return_acquire
698#define atomic64_sub_return_acquire(...) \
699 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
700#endif
701
702#ifndef atomic64_sub_return_release
703#define atomic64_sub_return_release(...) \
704 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
705#endif
706
707#ifndef atomic64_sub_return
708#define atomic64_sub_return(...) \
709 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
710#endif
711#endif /* atomic64_sub_return_relaxed */
712
713/* atomic64_dec_return_relaxed */
714#ifndef atomic64_dec_return_relaxed
715#define atomic64_dec_return_relaxed atomic64_dec_return
716#define atomic64_dec_return_acquire atomic64_dec_return
717#define atomic64_dec_return_release atomic64_dec_return
718
719#else /* atomic64_dec_return_relaxed */
720
721#ifndef atomic64_dec_return_acquire
722#define atomic64_dec_return_acquire(...) \
723 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
724#endif
725
726#ifndef atomic64_dec_return_release
727#define atomic64_dec_return_release(...) \
728 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
729#endif
730
731#ifndef atomic64_dec_return
732#define atomic64_dec_return(...) \
733 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
734#endif
735#endif /* atomic64_dec_return_relaxed */
736
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200737
738/* atomic64_fetch_add_relaxed */
739#ifndef atomic64_fetch_add_relaxed
740#define atomic64_fetch_add_relaxed atomic64_fetch_add
741#define atomic64_fetch_add_acquire atomic64_fetch_add
742#define atomic64_fetch_add_release atomic64_fetch_add
743
744#else /* atomic64_fetch_add_relaxed */
745
746#ifndef atomic64_fetch_add_acquire
747#define atomic64_fetch_add_acquire(...) \
748 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
749#endif
750
751#ifndef atomic64_fetch_add_release
752#define atomic64_fetch_add_release(...) \
753 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
754#endif
755
756#ifndef atomic64_fetch_add
757#define atomic64_fetch_add(...) \
758 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
759#endif
760#endif /* atomic64_fetch_add_relaxed */
761
Davidlohr Buesof0662862016-06-28 14:56:51 -0700762/* atomic64_fetch_inc_relaxed */
763#ifndef atomic64_fetch_inc_relaxed
764
765#ifndef atomic64_fetch_inc
766#define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
767#define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
768#define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
769#define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
770#else /* atomic64_fetch_inc */
771#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
772#define atomic64_fetch_inc_acquire atomic64_fetch_inc
773#define atomic64_fetch_inc_release atomic64_fetch_inc
774#endif /* atomic64_fetch_inc */
775
776#else /* atomic64_fetch_inc_relaxed */
777
778#ifndef atomic64_fetch_inc_acquire
779#define atomic64_fetch_inc_acquire(...) \
780 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
781#endif
782
783#ifndef atomic64_fetch_inc_release
784#define atomic64_fetch_inc_release(...) \
785 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
786#endif
787
788#ifndef atomic64_fetch_inc
789#define atomic64_fetch_inc(...) \
790 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
791#endif
792#endif /* atomic64_fetch_inc_relaxed */
793
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200794/* atomic64_fetch_sub_relaxed */
795#ifndef atomic64_fetch_sub_relaxed
796#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
797#define atomic64_fetch_sub_acquire atomic64_fetch_sub
798#define atomic64_fetch_sub_release atomic64_fetch_sub
799
800#else /* atomic64_fetch_sub_relaxed */
801
802#ifndef atomic64_fetch_sub_acquire
803#define atomic64_fetch_sub_acquire(...) \
804 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
805#endif
806
807#ifndef atomic64_fetch_sub_release
808#define atomic64_fetch_sub_release(...) \
809 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
810#endif
811
812#ifndef atomic64_fetch_sub
813#define atomic64_fetch_sub(...) \
814 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
815#endif
816#endif /* atomic64_fetch_sub_relaxed */
817
Davidlohr Buesof0662862016-06-28 14:56:51 -0700818/* atomic64_fetch_dec_relaxed */
819#ifndef atomic64_fetch_dec_relaxed
820
821#ifndef atomic64_fetch_dec
822#define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
823#define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
824#define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
825#define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
826#else /* atomic64_fetch_dec */
827#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
828#define atomic64_fetch_dec_acquire atomic64_fetch_dec
829#define atomic64_fetch_dec_release atomic64_fetch_dec
830#endif /* atomic64_fetch_dec */
831
832#else /* atomic64_fetch_dec_relaxed */
833
834#ifndef atomic64_fetch_dec_acquire
835#define atomic64_fetch_dec_acquire(...) \
836 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
837#endif
838
839#ifndef atomic64_fetch_dec_release
840#define atomic64_fetch_dec_release(...) \
841 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
842#endif
843
844#ifndef atomic64_fetch_dec
845#define atomic64_fetch_dec(...) \
846 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
847#endif
848#endif /* atomic64_fetch_dec_relaxed */
849
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200850/* atomic64_fetch_or_relaxed */
851#ifndef atomic64_fetch_or_relaxed
852#define atomic64_fetch_or_relaxed atomic64_fetch_or
853#define atomic64_fetch_or_acquire atomic64_fetch_or
854#define atomic64_fetch_or_release atomic64_fetch_or
855
856#else /* atomic64_fetch_or_relaxed */
857
858#ifndef atomic64_fetch_or_acquire
859#define atomic64_fetch_or_acquire(...) \
860 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
861#endif
862
863#ifndef atomic64_fetch_or_release
864#define atomic64_fetch_or_release(...) \
865 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
866#endif
867
868#ifndef atomic64_fetch_or
869#define atomic64_fetch_or(...) \
870 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
871#endif
872#endif /* atomic64_fetch_or_relaxed */
873
874/* atomic64_fetch_and_relaxed */
875#ifndef atomic64_fetch_and_relaxed
876#define atomic64_fetch_and_relaxed atomic64_fetch_and
877#define atomic64_fetch_and_acquire atomic64_fetch_and
878#define atomic64_fetch_and_release atomic64_fetch_and
879
880#else /* atomic64_fetch_and_relaxed */
881
882#ifndef atomic64_fetch_and_acquire
883#define atomic64_fetch_and_acquire(...) \
884 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
885#endif
886
887#ifndef atomic64_fetch_and_release
888#define atomic64_fetch_and_release(...) \
889 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
890#endif
891
892#ifndef atomic64_fetch_and
893#define atomic64_fetch_and(...) \
894 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
895#endif
896#endif /* atomic64_fetch_and_relaxed */
897
898#ifdef atomic64_andnot
899/* atomic64_fetch_andnot_relaxed */
900#ifndef atomic64_fetch_andnot_relaxed
901#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
902#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
903#define atomic64_fetch_andnot_release atomic64_fetch_andnot
904
905#else /* atomic64_fetch_andnot_relaxed */
906
907#ifndef atomic64_fetch_andnot_acquire
908#define atomic64_fetch_andnot_acquire(...) \
909 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
910#endif
911
912#ifndef atomic64_fetch_andnot_release
913#define atomic64_fetch_andnot_release(...) \
914 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
915#endif
916
917#ifndef atomic64_fetch_andnot
918#define atomic64_fetch_andnot(...) \
919 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
920#endif
921#endif /* atomic64_fetch_andnot_relaxed */
922#endif /* atomic64_andnot */
923
924/* atomic64_fetch_xor_relaxed */
925#ifndef atomic64_fetch_xor_relaxed
926#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
927#define atomic64_fetch_xor_acquire atomic64_fetch_xor
928#define atomic64_fetch_xor_release atomic64_fetch_xor
929
930#else /* atomic64_fetch_xor_relaxed */
931
932#ifndef atomic64_fetch_xor_acquire
933#define atomic64_fetch_xor_acquire(...) \
934 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
935#endif
936
937#ifndef atomic64_fetch_xor_release
938#define atomic64_fetch_xor_release(...) \
939 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
940#endif
941
942#ifndef atomic64_fetch_xor
943#define atomic64_fetch_xor(...) \
944 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
945#endif
946#endif /* atomic64_fetch_xor_relaxed */
947
948
Peter Zijlstrae1213332016-04-18 00:52:13 +0200949/* atomic64_xchg_relaxed */
950#ifndef atomic64_xchg_relaxed
951#define atomic64_xchg_relaxed atomic64_xchg
952#define atomic64_xchg_acquire atomic64_xchg
953#define atomic64_xchg_release atomic64_xchg
954
955#else /* atomic64_xchg_relaxed */
956
957#ifndef atomic64_xchg_acquire
958#define atomic64_xchg_acquire(...) \
959 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
960#endif
961
962#ifndef atomic64_xchg_release
963#define atomic64_xchg_release(...) \
964 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
965#endif
966
967#ifndef atomic64_xchg
968#define atomic64_xchg(...) \
969 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
970#endif
971#endif /* atomic64_xchg_relaxed */
972
973/* atomic64_cmpxchg_relaxed */
974#ifndef atomic64_cmpxchg_relaxed
975#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
976#define atomic64_cmpxchg_acquire atomic64_cmpxchg
977#define atomic64_cmpxchg_release atomic64_cmpxchg
978
979#else /* atomic64_cmpxchg_relaxed */
980
981#ifndef atomic64_cmpxchg_acquire
982#define atomic64_cmpxchg_acquire(...) \
983 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
984#endif
985
986#ifndef atomic64_cmpxchg_release
987#define atomic64_cmpxchg_release(...) \
988 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
989#endif
990
991#ifndef atomic64_cmpxchg
992#define atomic64_cmpxchg(...) \
993 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
994#endif
995#endif /* atomic64_cmpxchg_relaxed */
996
Peter Zijlstraa9ebf302017-02-01 16:39:38 +0100997#ifndef atomic64_try_cmpxchg
998
999#define __atomic64_try_cmpxchg(type, _p, _po, _n) \
1000({ \
1001 typeof(_po) __po = (_po); \
Peter Zijlstra44fe8442017-03-27 13:54:38 +02001002 typeof(*(_po)) __r, __o = *__po; \
1003 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
1004 if (unlikely(__r != __o)) \
1005 *__po = __r; \
1006 likely(__r == __o); \
Peter Zijlstraa9ebf302017-02-01 16:39:38 +01001007})
1008
1009#define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
1010#define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1011#define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1012#define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
1013
1014#else /* atomic64_try_cmpxchg */
1015#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
1016#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
1017#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1018#endif /* atomic64_try_cmpxchg */
1019
Peter Zijlstrade9e4322015-04-24 01:12:32 +02001020#ifndef atomic64_andnot
1021static inline void atomic64_andnot(long long i, atomic64_t *v)
1022{
1023 atomic64_and(~i, v);
1024}
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +02001025
1026static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1027{
1028 return atomic64_fetch_and(~i, v);
1029}
1030
1031static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1032{
1033 return atomic64_fetch_and_relaxed(~i, v);
1034}
1035
1036static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1037{
1038 return atomic64_fetch_and_acquire(~i, v);
1039}
1040
1041static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1042{
1043 return atomic64_fetch_and_release(~i, v);
1044}
Peter Zijlstrade9e4322015-04-24 01:12:32 +02001045#endif
1046
Will Deaconfcfdfe302018-04-26 11:34:15 +01001047#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
Will Deacon4df714b2017-10-12 13:20:48 +01001048#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1049
Peter Zijlstra90fe6512015-09-18 15:04:59 +02001050#include <asm-generic/atomic-long.h>
1051
Eric Dumazet3f9d35b2010-11-11 14:05:08 -08001052#endif /* _LINUX_ATOMIC_H */