blob: 3ee8da9023cd8103019d6eccfb0e34e04175ab18 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Arun Sharmaacac43e2011-07-26 16:09:08 -07002/* Atomic operations usable in machine independent code */
Eric Dumazet3f9d35b2010-11-11 14:05:08 -08003#ifndef _LINUX_ATOMIC_H
4#define _LINUX_ATOMIC_H
Mark Rutlandade5ef92018-06-21 13:13:07 +01005#include <linux/types.h>
6
Eric Dumazet3f9d35b2010-11-11 14:05:08 -08007#include <asm/atomic.h>
Will Deacon654672d2015-08-06 17:54:37 +01008#include <asm/barrier.h>
9
10/*
11 * Relaxed variants of xchg, cmpxchg and some atomic operations.
12 *
13 * We support four variants:
14 *
15 * - Fully ordered: The default implementation, no suffix required.
16 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
17 * - Release: Provides RELEASE semantics, _release suffix.
18 * - Relaxed: No ordering guarantees, _relaxed suffix.
19 *
20 * For compound atomics performing both a load and a store, ACQUIRE
21 * semantics apply only to the load and RELEASE semantics only to the
22 * store portion of the operation. Note that a failed cmpxchg_acquire
23 * does -not- imply any memory ordering constraints.
24 *
25 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
26 */
27
28#ifndef atomic_read_acquire
29#define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
30#endif
31
32#ifndef atomic_set_release
33#define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
34#endif
35
36/*
37 * The idea here is to build acquire/release variants by adding explicit
38 * barriers on top of the relaxed variant. In the case where the relaxed
39 * variant is already fully ordered, no additional barriers are needed.
Boqun Fenge1ab7f32015-12-15 22:24:14 +080040 *
41 * Besides, if an arch has a special barrier for acquire/release, it could
42 * implement its own __atomic_op_* and use the same framework for building
43 * variants
Peter Zijlstrad89e588c2016-09-05 11:37:53 +020044 *
45 * If an architecture overrides __atomic_op_acquire() it will probably want
46 * to define smp_mb__after_spinlock().
Will Deacon654672d2015-08-06 17:54:37 +010047 */
Boqun Fenge1ab7f32015-12-15 22:24:14 +080048#ifndef __atomic_op_acquire
Will Deacon654672d2015-08-06 17:54:37 +010049#define __atomic_op_acquire(op, args...) \
50({ \
51 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
52 smp_mb__after_atomic(); \
53 __ret; \
54})
Boqun Fenge1ab7f32015-12-15 22:24:14 +080055#endif
Will Deacon654672d2015-08-06 17:54:37 +010056
Boqun Fenge1ab7f32015-12-15 22:24:14 +080057#ifndef __atomic_op_release
Will Deacon654672d2015-08-06 17:54:37 +010058#define __atomic_op_release(op, args...) \
59({ \
60 smp_mb__before_atomic(); \
61 op##_relaxed(args); \
62})
Boqun Fenge1ab7f32015-12-15 22:24:14 +080063#endif
Will Deacon654672d2015-08-06 17:54:37 +010064
Boqun Fenge1ab7f32015-12-15 22:24:14 +080065#ifndef __atomic_op_fence
Will Deacon654672d2015-08-06 17:54:37 +010066#define __atomic_op_fence(op, args...) \
67({ \
68 typeof(op##_relaxed(args)) __ret; \
69 smp_mb__before_atomic(); \
70 __ret = op##_relaxed(args); \
71 smp_mb__after_atomic(); \
72 __ret; \
73})
Boqun Fenge1ab7f32015-12-15 22:24:14 +080074#endif
Will Deacon654672d2015-08-06 17:54:37 +010075
76/* atomic_add_return_relaxed */
77#ifndef atomic_add_return_relaxed
78#define atomic_add_return_relaxed atomic_add_return
79#define atomic_add_return_acquire atomic_add_return
80#define atomic_add_return_release atomic_add_return
81
82#else /* atomic_add_return_relaxed */
83
84#ifndef atomic_add_return_acquire
85#define atomic_add_return_acquire(...) \
86 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
87#endif
88
89#ifndef atomic_add_return_release
90#define atomic_add_return_release(...) \
91 __atomic_op_release(atomic_add_return, __VA_ARGS__)
92#endif
93
94#ifndef atomic_add_return
95#define atomic_add_return(...) \
96 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
97#endif
98#endif /* atomic_add_return_relaxed */
99
Davidlohr Bueso63ab7bd2015-09-30 13:03:11 -0700100/* atomic_inc_return_relaxed */
101#ifndef atomic_inc_return_relaxed
102#define atomic_inc_return_relaxed atomic_inc_return
103#define atomic_inc_return_acquire atomic_inc_return
104#define atomic_inc_return_release atomic_inc_return
105
106#else /* atomic_inc_return_relaxed */
107
108#ifndef atomic_inc_return_acquire
109#define atomic_inc_return_acquire(...) \
110 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
111#endif
112
113#ifndef atomic_inc_return_release
114#define atomic_inc_return_release(...) \
115 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
116#endif
117
118#ifndef atomic_inc_return
119#define atomic_inc_return(...) \
120 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
121#endif
122#endif /* atomic_inc_return_relaxed */
123
Will Deacon654672d2015-08-06 17:54:37 +0100124/* atomic_sub_return_relaxed */
125#ifndef atomic_sub_return_relaxed
126#define atomic_sub_return_relaxed atomic_sub_return
127#define atomic_sub_return_acquire atomic_sub_return
128#define atomic_sub_return_release atomic_sub_return
129
130#else /* atomic_sub_return_relaxed */
131
132#ifndef atomic_sub_return_acquire
133#define atomic_sub_return_acquire(...) \
134 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
135#endif
136
137#ifndef atomic_sub_return_release
138#define atomic_sub_return_release(...) \
139 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
140#endif
141
142#ifndef atomic_sub_return
143#define atomic_sub_return(...) \
144 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
145#endif
146#endif /* atomic_sub_return_relaxed */
147
Davidlohr Bueso63ab7bd2015-09-30 13:03:11 -0700148/* atomic_dec_return_relaxed */
149#ifndef atomic_dec_return_relaxed
150#define atomic_dec_return_relaxed atomic_dec_return
151#define atomic_dec_return_acquire atomic_dec_return
152#define atomic_dec_return_release atomic_dec_return
153
154#else /* atomic_dec_return_relaxed */
155
156#ifndef atomic_dec_return_acquire
157#define atomic_dec_return_acquire(...) \
158 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
159#endif
160
161#ifndef atomic_dec_return_release
162#define atomic_dec_return_release(...) \
163 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
164#endif
165
166#ifndef atomic_dec_return
167#define atomic_dec_return(...) \
168 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
169#endif
170#endif /* atomic_dec_return_relaxed */
171
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200172
173/* atomic_fetch_add_relaxed */
174#ifndef atomic_fetch_add_relaxed
175#define atomic_fetch_add_relaxed atomic_fetch_add
176#define atomic_fetch_add_acquire atomic_fetch_add
177#define atomic_fetch_add_release atomic_fetch_add
178
179#else /* atomic_fetch_add_relaxed */
180
181#ifndef atomic_fetch_add_acquire
182#define atomic_fetch_add_acquire(...) \
183 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
184#endif
185
186#ifndef atomic_fetch_add_release
187#define atomic_fetch_add_release(...) \
188 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
189#endif
190
191#ifndef atomic_fetch_add
192#define atomic_fetch_add(...) \
193 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
194#endif
195#endif /* atomic_fetch_add_relaxed */
196
Davidlohr Buesof0662862016-06-28 14:56:51 -0700197/* atomic_fetch_inc_relaxed */
198#ifndef atomic_fetch_inc_relaxed
199
200#ifndef atomic_fetch_inc
201#define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
202#define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
203#define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
204#define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
205#else /* atomic_fetch_inc */
206#define atomic_fetch_inc_relaxed atomic_fetch_inc
207#define atomic_fetch_inc_acquire atomic_fetch_inc
208#define atomic_fetch_inc_release atomic_fetch_inc
209#endif /* atomic_fetch_inc */
210
211#else /* atomic_fetch_inc_relaxed */
212
213#ifndef atomic_fetch_inc_acquire
214#define atomic_fetch_inc_acquire(...) \
215 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
216#endif
217
218#ifndef atomic_fetch_inc_release
219#define atomic_fetch_inc_release(...) \
220 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
221#endif
222
223#ifndef atomic_fetch_inc
224#define atomic_fetch_inc(...) \
225 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
226#endif
227#endif /* atomic_fetch_inc_relaxed */
228
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200229/* atomic_fetch_sub_relaxed */
230#ifndef atomic_fetch_sub_relaxed
231#define atomic_fetch_sub_relaxed atomic_fetch_sub
232#define atomic_fetch_sub_acquire atomic_fetch_sub
233#define atomic_fetch_sub_release atomic_fetch_sub
234
235#else /* atomic_fetch_sub_relaxed */
236
237#ifndef atomic_fetch_sub_acquire
238#define atomic_fetch_sub_acquire(...) \
239 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
240#endif
241
242#ifndef atomic_fetch_sub_release
243#define atomic_fetch_sub_release(...) \
244 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
245#endif
246
247#ifndef atomic_fetch_sub
248#define atomic_fetch_sub(...) \
249 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
250#endif
251#endif /* atomic_fetch_sub_relaxed */
252
Davidlohr Buesof0662862016-06-28 14:56:51 -0700253/* atomic_fetch_dec_relaxed */
254#ifndef atomic_fetch_dec_relaxed
255
256#ifndef atomic_fetch_dec
257#define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
258#define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
259#define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
260#define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
261#else /* atomic_fetch_dec */
262#define atomic_fetch_dec_relaxed atomic_fetch_dec
263#define atomic_fetch_dec_acquire atomic_fetch_dec
264#define atomic_fetch_dec_release atomic_fetch_dec
265#endif /* atomic_fetch_dec */
266
267#else /* atomic_fetch_dec_relaxed */
268
269#ifndef atomic_fetch_dec_acquire
270#define atomic_fetch_dec_acquire(...) \
271 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
272#endif
273
274#ifndef atomic_fetch_dec_release
275#define atomic_fetch_dec_release(...) \
276 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
277#endif
278
279#ifndef atomic_fetch_dec
280#define atomic_fetch_dec(...) \
281 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
282#endif
283#endif /* atomic_fetch_dec_relaxed */
284
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200285/* atomic_fetch_or_relaxed */
286#ifndef atomic_fetch_or_relaxed
287#define atomic_fetch_or_relaxed atomic_fetch_or
288#define atomic_fetch_or_acquire atomic_fetch_or
289#define atomic_fetch_or_release atomic_fetch_or
290
291#else /* atomic_fetch_or_relaxed */
292
293#ifndef atomic_fetch_or_acquire
294#define atomic_fetch_or_acquire(...) \
295 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
296#endif
297
298#ifndef atomic_fetch_or_release
299#define atomic_fetch_or_release(...) \
300 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
301#endif
302
303#ifndef atomic_fetch_or
304#define atomic_fetch_or(...) \
305 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
306#endif
307#endif /* atomic_fetch_or_relaxed */
308
309/* atomic_fetch_and_relaxed */
310#ifndef atomic_fetch_and_relaxed
311#define atomic_fetch_and_relaxed atomic_fetch_and
312#define atomic_fetch_and_acquire atomic_fetch_and
313#define atomic_fetch_and_release atomic_fetch_and
314
315#else /* atomic_fetch_and_relaxed */
316
317#ifndef atomic_fetch_and_acquire
318#define atomic_fetch_and_acquire(...) \
319 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
320#endif
321
322#ifndef atomic_fetch_and_release
323#define atomic_fetch_and_release(...) \
324 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
325#endif
326
327#ifndef atomic_fetch_and
328#define atomic_fetch_and(...) \
329 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
330#endif
331#endif /* atomic_fetch_and_relaxed */
332
333#ifdef atomic_andnot
334/* atomic_fetch_andnot_relaxed */
335#ifndef atomic_fetch_andnot_relaxed
336#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
337#define atomic_fetch_andnot_acquire atomic_fetch_andnot
338#define atomic_fetch_andnot_release atomic_fetch_andnot
339
340#else /* atomic_fetch_andnot_relaxed */
341
342#ifndef atomic_fetch_andnot_acquire
343#define atomic_fetch_andnot_acquire(...) \
344 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
345#endif
346
347#ifndef atomic_fetch_andnot_release
348#define atomic_fetch_andnot_release(...) \
349 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
350#endif
351
352#ifndef atomic_fetch_andnot
353#define atomic_fetch_andnot(...) \
354 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
355#endif
356#endif /* atomic_fetch_andnot_relaxed */
357#endif /* atomic_andnot */
358
359/* atomic_fetch_xor_relaxed */
360#ifndef atomic_fetch_xor_relaxed
361#define atomic_fetch_xor_relaxed atomic_fetch_xor
362#define atomic_fetch_xor_acquire atomic_fetch_xor
363#define atomic_fetch_xor_release atomic_fetch_xor
364
365#else /* atomic_fetch_xor_relaxed */
366
367#ifndef atomic_fetch_xor_acquire
368#define atomic_fetch_xor_acquire(...) \
369 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
370#endif
371
372#ifndef atomic_fetch_xor_release
373#define atomic_fetch_xor_release(...) \
374 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
375#endif
376
377#ifndef atomic_fetch_xor
378#define atomic_fetch_xor(...) \
379 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
380#endif
381#endif /* atomic_fetch_xor_relaxed */
382
383
Will Deacon654672d2015-08-06 17:54:37 +0100384/* atomic_xchg_relaxed */
385#ifndef atomic_xchg_relaxed
386#define atomic_xchg_relaxed atomic_xchg
387#define atomic_xchg_acquire atomic_xchg
388#define atomic_xchg_release atomic_xchg
389
390#else /* atomic_xchg_relaxed */
391
392#ifndef atomic_xchg_acquire
393#define atomic_xchg_acquire(...) \
394 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
395#endif
396
397#ifndef atomic_xchg_release
398#define atomic_xchg_release(...) \
399 __atomic_op_release(atomic_xchg, __VA_ARGS__)
400#endif
401
402#ifndef atomic_xchg
403#define atomic_xchg(...) \
404 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
405#endif
406#endif /* atomic_xchg_relaxed */
407
408/* atomic_cmpxchg_relaxed */
409#ifndef atomic_cmpxchg_relaxed
410#define atomic_cmpxchg_relaxed atomic_cmpxchg
411#define atomic_cmpxchg_acquire atomic_cmpxchg
412#define atomic_cmpxchg_release atomic_cmpxchg
413
414#else /* atomic_cmpxchg_relaxed */
415
416#ifndef atomic_cmpxchg_acquire
417#define atomic_cmpxchg_acquire(...) \
418 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
419#endif
420
421#ifndef atomic_cmpxchg_release
422#define atomic_cmpxchg_release(...) \
423 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
424#endif
425
426#ifndef atomic_cmpxchg
427#define atomic_cmpxchg(...) \
428 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
429#endif
430#endif /* atomic_cmpxchg_relaxed */
431
Peter Zijlstraa9ebf302017-02-01 16:39:38 +0100432#ifndef atomic_try_cmpxchg
433
434#define __atomic_try_cmpxchg(type, _p, _po, _n) \
435({ \
436 typeof(_po) __po = (_po); \
Peter Zijlstra44fe8442017-03-27 13:54:38 +0200437 typeof(*(_po)) __r, __o = *__po; \
438 __r = atomic_cmpxchg##type((_p), __o, (_n)); \
439 if (unlikely(__r != __o)) \
440 *__po = __r; \
441 likely(__r == __o); \
Peter Zijlstraa9ebf302017-02-01 16:39:38 +0100442})
443
444#define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
445#define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
446#define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
447#define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
448
449#else /* atomic_try_cmpxchg */
450#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
451#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
452#define atomic_try_cmpxchg_release atomic_try_cmpxchg
453#endif /* atomic_try_cmpxchg */
454
Will Deacon654672d2015-08-06 17:54:37 +0100455/* cmpxchg_relaxed */
456#ifndef cmpxchg_relaxed
457#define cmpxchg_relaxed cmpxchg
458#define cmpxchg_acquire cmpxchg
459#define cmpxchg_release cmpxchg
460
461#else /* cmpxchg_relaxed */
462
463#ifndef cmpxchg_acquire
464#define cmpxchg_acquire(...) \
465 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
466#endif
467
468#ifndef cmpxchg_release
469#define cmpxchg_release(...) \
470 __atomic_op_release(cmpxchg, __VA_ARGS__)
471#endif
472
473#ifndef cmpxchg
474#define cmpxchg(...) \
475 __atomic_op_fence(cmpxchg, __VA_ARGS__)
476#endif
477#endif /* cmpxchg_relaxed */
478
479/* cmpxchg64_relaxed */
480#ifndef cmpxchg64_relaxed
481#define cmpxchg64_relaxed cmpxchg64
482#define cmpxchg64_acquire cmpxchg64
483#define cmpxchg64_release cmpxchg64
484
485#else /* cmpxchg64_relaxed */
486
487#ifndef cmpxchg64_acquire
488#define cmpxchg64_acquire(...) \
489 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
490#endif
491
492#ifndef cmpxchg64_release
493#define cmpxchg64_release(...) \
494 __atomic_op_release(cmpxchg64, __VA_ARGS__)
495#endif
496
497#ifndef cmpxchg64
498#define cmpxchg64(...) \
499 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
500#endif
501#endif /* cmpxchg64_relaxed */
502
503/* xchg_relaxed */
504#ifndef xchg_relaxed
505#define xchg_relaxed xchg
506#define xchg_acquire xchg
507#define xchg_release xchg
508
509#else /* xchg_relaxed */
510
511#ifndef xchg_acquire
512#define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
513#endif
514
515#ifndef xchg_release
516#define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
517#endif
518
519#ifndef xchg
520#define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
521#endif
522#endif /* xchg_relaxed */
Eric Dumazet3f9d35b2010-11-11 14:05:08 -0800523
524/**
Mark Rutlandeccc2da2018-06-21 13:13:09 +0100525 * atomic_fetch_add_unless - add unless the number is already a given value
526 * @v: pointer of type atomic_t
527 * @a: the amount to add to v...
528 * @u: ...unless v is equal to u.
529 *
530 * Atomically adds @a to @v, if @v was not already @u.
531 * Returns the original value of @v.
532 */
533#ifndef atomic_fetch_add_unless
534static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
535{
536 int c = atomic_read(v);
537
538 do {
539 if (unlikely(c == u))
540 break;
541 } while (!atomic_try_cmpxchg(v, &c, c + a));
542
543 return c;
544}
545#endif
546
547/**
Arun Sharmaf24219b2011-07-26 16:09:07 -0700548 * atomic_add_unless - add unless the number is already a given value
549 * @v: pointer of type atomic_t
550 * @a: the amount to add to v...
551 * @u: ...unless v is equal to u.
552 *
Mark Rutlandade5ef92018-06-21 13:13:07 +0100553 * Atomically adds @a to @v, if @v was not already @u.
554 * Returns true if the addition was done.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700555 */
Mark Rutlandade5ef92018-06-21 13:13:07 +0100556static inline bool atomic_add_unless(atomic_t *v, int a, int u)
Arun Sharmaf24219b2011-07-26 16:09:07 -0700557{
Mark Rutlandbfc18e32018-06-21 13:13:04 +0100558 return atomic_fetch_add_unless(v, a, u) != u;
Arun Sharmaf24219b2011-07-26 16:09:07 -0700559}
560
561/**
Arun Sharma600634972011-07-26 16:09:06 -0700562 * atomic_inc_not_zero - increment unless the number is zero
563 * @v: pointer of type atomic_t
564 *
Mark Rutlandade5ef92018-06-21 13:13:07 +0100565 * Atomically increments @v by 1, if @v is non-zero.
566 * Returns true if the increment was done.
Arun Sharma600634972011-07-26 16:09:06 -0700567 */
Anton Blanchardb1ada602012-02-29 21:09:53 +0000568#ifndef atomic_inc_not_zero
Arun Sharma600634972011-07-26 16:09:06 -0700569#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
Anton Blanchardb1ada602012-02-29 21:09:53 +0000570#endif
Arun Sharma600634972011-07-26 16:09:06 -0700571
Mark Rutland18cc1812018-06-21 13:13:18 +0100572/**
573 * atomic_inc_and_test - increment and test
574 * @v: pointer of type atomic_t
575 *
576 * Atomically increments @v by 1
577 * and returns true if the result is zero, or false for all
578 * other cases.
579 */
580#ifndef atomic_inc_and_test
581static inline bool atomic_inc_and_test(atomic_t *v)
582{
583 return atomic_inc_return(v) == 0;
584}
585#endif
586
587/**
588 * atomic_dec_and_test - decrement and test
589 * @v: pointer of type atomic_t
590 *
591 * Atomically decrements @v by 1 and
592 * returns true if the result is 0, or false for all other
593 * cases.
594 */
595#ifndef atomic_dec_and_test
596static inline bool atomic_dec_and_test(atomic_t *v)
597{
598 return atomic_dec_return(v) == 0;
599}
600#endif
601
602/**
603 * atomic_sub_and_test - subtract value from variable and test result
604 * @i: integer value to subtract
605 * @v: pointer of type atomic_t
606 *
607 * Atomically subtracts @i from @v and returns
608 * true if the result is zero, or false for all
609 * other cases.
610 */
611#ifndef atomic_sub_and_test
612static inline bool atomic_sub_and_test(int i, atomic_t *v)
613{
614 return atomic_sub_return(i, v) == 0;
615}
616#endif
617
618/**
619 * atomic_add_negative - add and test if negative
620 * @i: integer value to add
621 * @v: pointer of type atomic_t
622 *
623 * Atomically adds @i to @v and returns true
624 * if the result is negative, or false when
625 * result is greater than or equal to zero.
626 */
627#ifndef atomic_add_negative
628static inline bool atomic_add_negative(int i, atomic_t *v)
629{
630 return atomic_add_return(i, v) < 0;
631}
632#endif
633
Peter Zijlstrade9e4322015-04-24 01:12:32 +0200634#ifndef atomic_andnot
635static inline void atomic_andnot(int i, atomic_t *v)
636{
637 atomic_and(~i, v);
638}
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200639
640static inline int atomic_fetch_andnot(int i, atomic_t *v)
641{
642 return atomic_fetch_and(~i, v);
643}
644
645static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
646{
647 return atomic_fetch_and_relaxed(~i, v);
648}
649
650static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
651{
652 return atomic_fetch_and_acquire(~i, v);
653}
654
655static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
656{
657 return atomic_fetch_and_release(~i, v);
658}
Peter Zijlstrade9e4322015-04-24 01:12:32 +0200659#endif
660
Al Viro07b8ce12011-06-20 10:52:57 -0400661#ifndef atomic_inc_unless_negative
Mark Rutlandade5ef92018-06-21 13:13:07 +0100662static inline bool atomic_inc_unless_negative(atomic_t *p)
Al Viro07b8ce12011-06-20 10:52:57 -0400663{
664 int v, v1;
665 for (v = 0; v >= 0; v = v1) {
666 v1 = atomic_cmpxchg(p, v, v + 1);
667 if (likely(v1 == v))
Mark Rutlandade5ef92018-06-21 13:13:07 +0100668 return true;
Al Viro07b8ce12011-06-20 10:52:57 -0400669 }
Mark Rutlandade5ef92018-06-21 13:13:07 +0100670 return false;
Al Viro07b8ce12011-06-20 10:52:57 -0400671}
672#endif
673
674#ifndef atomic_dec_unless_positive
Mark Rutlandade5ef92018-06-21 13:13:07 +0100675static inline bool atomic_dec_unless_positive(atomic_t *p)
Al Viro07b8ce12011-06-20 10:52:57 -0400676{
677 int v, v1;
678 for (v = 0; v <= 0; v = v1) {
679 v1 = atomic_cmpxchg(p, v, v - 1);
680 if (likely(v1 == v))
Mark Rutlandade5ef92018-06-21 13:13:07 +0100681 return true;
Al Viro07b8ce12011-06-20 10:52:57 -0400682 }
Mark Rutlandade5ef92018-06-21 13:13:07 +0100683 return false;
Al Viro07b8ce12011-06-20 10:52:57 -0400684}
685#endif
686
Shaohua Lie79bee22012-10-08 16:32:18 -0700687/*
688 * atomic_dec_if_positive - decrement by 1 if old value positive
689 * @v: pointer of type atomic_t
690 *
691 * The function returns the old value of *v minus 1, even if
692 * the atomic variable, v, was not decremented.
693 */
694#ifndef atomic_dec_if_positive
695static inline int atomic_dec_if_positive(atomic_t *v)
696{
697 int c, old, dec;
698 c = atomic_read(v);
699 for (;;) {
700 dec = c - 1;
701 if (unlikely(dec < 0))
702 break;
703 old = atomic_cmpxchg((v), c, dec);
704 if (likely(old == c))
705 break;
706 c = old;
707 }
708 return dec;
709}
710#endif
711
Will Deaconfcfdfe302018-04-26 11:34:15 +0100712#define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
Will Deacon4df714b2017-10-12 13:20:48 +0100713#define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
714
Arun Sharma78477772011-07-26 16:09:08 -0700715#ifdef CONFIG_GENERIC_ATOMIC64
716#include <asm-generic/atomic64.h>
717#endif
Peter Zijlstrade9e4322015-04-24 01:12:32 +0200718
Peter Zijlstrae1213332016-04-18 00:52:13 +0200719#ifndef atomic64_read_acquire
720#define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
721#endif
722
723#ifndef atomic64_set_release
724#define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
725#endif
726
727/* atomic64_add_return_relaxed */
728#ifndef atomic64_add_return_relaxed
729#define atomic64_add_return_relaxed atomic64_add_return
730#define atomic64_add_return_acquire atomic64_add_return
731#define atomic64_add_return_release atomic64_add_return
732
733#else /* atomic64_add_return_relaxed */
734
735#ifndef atomic64_add_return_acquire
736#define atomic64_add_return_acquire(...) \
737 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
738#endif
739
740#ifndef atomic64_add_return_release
741#define atomic64_add_return_release(...) \
742 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
743#endif
744
745#ifndef atomic64_add_return
746#define atomic64_add_return(...) \
747 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
748#endif
749#endif /* atomic64_add_return_relaxed */
750
751/* atomic64_inc_return_relaxed */
752#ifndef atomic64_inc_return_relaxed
753#define atomic64_inc_return_relaxed atomic64_inc_return
754#define atomic64_inc_return_acquire atomic64_inc_return
755#define atomic64_inc_return_release atomic64_inc_return
756
757#else /* atomic64_inc_return_relaxed */
758
759#ifndef atomic64_inc_return_acquire
760#define atomic64_inc_return_acquire(...) \
761 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
762#endif
763
764#ifndef atomic64_inc_return_release
765#define atomic64_inc_return_release(...) \
766 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
767#endif
768
769#ifndef atomic64_inc_return
770#define atomic64_inc_return(...) \
771 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
772#endif
773#endif /* atomic64_inc_return_relaxed */
774
775
776/* atomic64_sub_return_relaxed */
777#ifndef atomic64_sub_return_relaxed
778#define atomic64_sub_return_relaxed atomic64_sub_return
779#define atomic64_sub_return_acquire atomic64_sub_return
780#define atomic64_sub_return_release atomic64_sub_return
781
782#else /* atomic64_sub_return_relaxed */
783
784#ifndef atomic64_sub_return_acquire
785#define atomic64_sub_return_acquire(...) \
786 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
787#endif
788
789#ifndef atomic64_sub_return_release
790#define atomic64_sub_return_release(...) \
791 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
792#endif
793
794#ifndef atomic64_sub_return
795#define atomic64_sub_return(...) \
796 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
797#endif
798#endif /* atomic64_sub_return_relaxed */
799
800/* atomic64_dec_return_relaxed */
801#ifndef atomic64_dec_return_relaxed
802#define atomic64_dec_return_relaxed atomic64_dec_return
803#define atomic64_dec_return_acquire atomic64_dec_return
804#define atomic64_dec_return_release atomic64_dec_return
805
806#else /* atomic64_dec_return_relaxed */
807
808#ifndef atomic64_dec_return_acquire
809#define atomic64_dec_return_acquire(...) \
810 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
811#endif
812
813#ifndef atomic64_dec_return_release
814#define atomic64_dec_return_release(...) \
815 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
816#endif
817
818#ifndef atomic64_dec_return
819#define atomic64_dec_return(...) \
820 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
821#endif
822#endif /* atomic64_dec_return_relaxed */
823
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200824
825/* atomic64_fetch_add_relaxed */
826#ifndef atomic64_fetch_add_relaxed
827#define atomic64_fetch_add_relaxed atomic64_fetch_add
828#define atomic64_fetch_add_acquire atomic64_fetch_add
829#define atomic64_fetch_add_release atomic64_fetch_add
830
831#else /* atomic64_fetch_add_relaxed */
832
833#ifndef atomic64_fetch_add_acquire
834#define atomic64_fetch_add_acquire(...) \
835 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
836#endif
837
838#ifndef atomic64_fetch_add_release
839#define atomic64_fetch_add_release(...) \
840 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
841#endif
842
843#ifndef atomic64_fetch_add
844#define atomic64_fetch_add(...) \
845 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
846#endif
847#endif /* atomic64_fetch_add_relaxed */
848
Davidlohr Buesof0662862016-06-28 14:56:51 -0700849/* atomic64_fetch_inc_relaxed */
850#ifndef atomic64_fetch_inc_relaxed
851
852#ifndef atomic64_fetch_inc
853#define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
854#define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
855#define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
856#define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
857#else /* atomic64_fetch_inc */
858#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
859#define atomic64_fetch_inc_acquire atomic64_fetch_inc
860#define atomic64_fetch_inc_release atomic64_fetch_inc
861#endif /* atomic64_fetch_inc */
862
863#else /* atomic64_fetch_inc_relaxed */
864
865#ifndef atomic64_fetch_inc_acquire
866#define atomic64_fetch_inc_acquire(...) \
867 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
868#endif
869
870#ifndef atomic64_fetch_inc_release
871#define atomic64_fetch_inc_release(...) \
872 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
873#endif
874
875#ifndef atomic64_fetch_inc
876#define atomic64_fetch_inc(...) \
877 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
878#endif
879#endif /* atomic64_fetch_inc_relaxed */
880
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200881/* atomic64_fetch_sub_relaxed */
882#ifndef atomic64_fetch_sub_relaxed
883#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
884#define atomic64_fetch_sub_acquire atomic64_fetch_sub
885#define atomic64_fetch_sub_release atomic64_fetch_sub
886
887#else /* atomic64_fetch_sub_relaxed */
888
889#ifndef atomic64_fetch_sub_acquire
890#define atomic64_fetch_sub_acquire(...) \
891 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
892#endif
893
894#ifndef atomic64_fetch_sub_release
895#define atomic64_fetch_sub_release(...) \
896 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
897#endif
898
899#ifndef atomic64_fetch_sub
900#define atomic64_fetch_sub(...) \
901 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
902#endif
903#endif /* atomic64_fetch_sub_relaxed */
904
Davidlohr Buesof0662862016-06-28 14:56:51 -0700905/* atomic64_fetch_dec_relaxed */
906#ifndef atomic64_fetch_dec_relaxed
907
908#ifndef atomic64_fetch_dec
909#define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
910#define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
911#define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
912#define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
913#else /* atomic64_fetch_dec */
914#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
915#define atomic64_fetch_dec_acquire atomic64_fetch_dec
916#define atomic64_fetch_dec_release atomic64_fetch_dec
917#endif /* atomic64_fetch_dec */
918
919#else /* atomic64_fetch_dec_relaxed */
920
921#ifndef atomic64_fetch_dec_acquire
922#define atomic64_fetch_dec_acquire(...) \
923 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
924#endif
925
926#ifndef atomic64_fetch_dec_release
927#define atomic64_fetch_dec_release(...) \
928 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
929#endif
930
931#ifndef atomic64_fetch_dec
932#define atomic64_fetch_dec(...) \
933 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
934#endif
935#endif /* atomic64_fetch_dec_relaxed */
936
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200937/* atomic64_fetch_or_relaxed */
938#ifndef atomic64_fetch_or_relaxed
939#define atomic64_fetch_or_relaxed atomic64_fetch_or
940#define atomic64_fetch_or_acquire atomic64_fetch_or
941#define atomic64_fetch_or_release atomic64_fetch_or
942
943#else /* atomic64_fetch_or_relaxed */
944
945#ifndef atomic64_fetch_or_acquire
946#define atomic64_fetch_or_acquire(...) \
947 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
948#endif
949
950#ifndef atomic64_fetch_or_release
951#define atomic64_fetch_or_release(...) \
952 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
953#endif
954
955#ifndef atomic64_fetch_or
956#define atomic64_fetch_or(...) \
957 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
958#endif
959#endif /* atomic64_fetch_or_relaxed */
960
961/* atomic64_fetch_and_relaxed */
962#ifndef atomic64_fetch_and_relaxed
963#define atomic64_fetch_and_relaxed atomic64_fetch_and
964#define atomic64_fetch_and_acquire atomic64_fetch_and
965#define atomic64_fetch_and_release atomic64_fetch_and
966
967#else /* atomic64_fetch_and_relaxed */
968
969#ifndef atomic64_fetch_and_acquire
970#define atomic64_fetch_and_acquire(...) \
971 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
972#endif
973
974#ifndef atomic64_fetch_and_release
975#define atomic64_fetch_and_release(...) \
976 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
977#endif
978
979#ifndef atomic64_fetch_and
980#define atomic64_fetch_and(...) \
981 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
982#endif
983#endif /* atomic64_fetch_and_relaxed */
984
985#ifdef atomic64_andnot
986/* atomic64_fetch_andnot_relaxed */
987#ifndef atomic64_fetch_andnot_relaxed
988#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
989#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
990#define atomic64_fetch_andnot_release atomic64_fetch_andnot
991
992#else /* atomic64_fetch_andnot_relaxed */
993
994#ifndef atomic64_fetch_andnot_acquire
995#define atomic64_fetch_andnot_acquire(...) \
996 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
997#endif
998
999#ifndef atomic64_fetch_andnot_release
1000#define atomic64_fetch_andnot_release(...) \
1001 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
1002#endif
1003
1004#ifndef atomic64_fetch_andnot
1005#define atomic64_fetch_andnot(...) \
1006 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
1007#endif
1008#endif /* atomic64_fetch_andnot_relaxed */
1009#endif /* atomic64_andnot */
1010
1011/* atomic64_fetch_xor_relaxed */
1012#ifndef atomic64_fetch_xor_relaxed
1013#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
1014#define atomic64_fetch_xor_acquire atomic64_fetch_xor
1015#define atomic64_fetch_xor_release atomic64_fetch_xor
1016
1017#else /* atomic64_fetch_xor_relaxed */
1018
1019#ifndef atomic64_fetch_xor_acquire
1020#define atomic64_fetch_xor_acquire(...) \
1021 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
1022#endif
1023
1024#ifndef atomic64_fetch_xor_release
1025#define atomic64_fetch_xor_release(...) \
1026 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
1027#endif
1028
1029#ifndef atomic64_fetch_xor
1030#define atomic64_fetch_xor(...) \
1031 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
1032#endif
1033#endif /* atomic64_fetch_xor_relaxed */
1034
1035
Peter Zijlstrae1213332016-04-18 00:52:13 +02001036/* atomic64_xchg_relaxed */
1037#ifndef atomic64_xchg_relaxed
1038#define atomic64_xchg_relaxed atomic64_xchg
1039#define atomic64_xchg_acquire atomic64_xchg
1040#define atomic64_xchg_release atomic64_xchg
1041
1042#else /* atomic64_xchg_relaxed */
1043
1044#ifndef atomic64_xchg_acquire
1045#define atomic64_xchg_acquire(...) \
1046 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
1047#endif
1048
1049#ifndef atomic64_xchg_release
1050#define atomic64_xchg_release(...) \
1051 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
1052#endif
1053
1054#ifndef atomic64_xchg
1055#define atomic64_xchg(...) \
1056 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
1057#endif
1058#endif /* atomic64_xchg_relaxed */
1059
1060/* atomic64_cmpxchg_relaxed */
1061#ifndef atomic64_cmpxchg_relaxed
1062#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1063#define atomic64_cmpxchg_acquire atomic64_cmpxchg
1064#define atomic64_cmpxchg_release atomic64_cmpxchg
1065
1066#else /* atomic64_cmpxchg_relaxed */
1067
1068#ifndef atomic64_cmpxchg_acquire
1069#define atomic64_cmpxchg_acquire(...) \
1070 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1071#endif
1072
1073#ifndef atomic64_cmpxchg_release
1074#define atomic64_cmpxchg_release(...) \
1075 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1076#endif
1077
1078#ifndef atomic64_cmpxchg
1079#define atomic64_cmpxchg(...) \
1080 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1081#endif
1082#endif /* atomic64_cmpxchg_relaxed */
1083
Peter Zijlstraa9ebf302017-02-01 16:39:38 +01001084#ifndef atomic64_try_cmpxchg
1085
1086#define __atomic64_try_cmpxchg(type, _p, _po, _n) \
1087({ \
1088 typeof(_po) __po = (_po); \
Peter Zijlstra44fe8442017-03-27 13:54:38 +02001089 typeof(*(_po)) __r, __o = *__po; \
1090 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
1091 if (unlikely(__r != __o)) \
1092 *__po = __r; \
1093 likely(__r == __o); \
Peter Zijlstraa9ebf302017-02-01 16:39:38 +01001094})
1095
1096#define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
1097#define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1098#define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1099#define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
1100
1101#else /* atomic64_try_cmpxchg */
1102#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
1103#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
1104#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1105#endif /* atomic64_try_cmpxchg */
1106
Mark Rutlandbef82822018-06-21 13:13:08 +01001107/**
Mark Rutland35670132018-06-21 13:13:17 +01001108 * atomic64_fetch_add_unless - add unless the number is already a given value
1109 * @v: pointer of type atomic64_t
1110 * @a: the amount to add to v...
1111 * @u: ...unless v is equal to u.
1112 *
1113 * Atomically adds @a to @v, if @v was not already @u.
1114 * Returns the original value of @v.
1115 */
1116#ifndef atomic64_fetch_add_unless
1117static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
1118 long long u)
1119{
1120 long long c = atomic64_read(v);
1121
1122 do {
1123 if (unlikely(c == u))
1124 break;
1125 } while (!atomic64_try_cmpxchg(v, &c, c + a));
1126
1127 return c;
1128}
1129#endif
1130
1131/**
Mark Rutland0ae1d992018-06-21 13:13:10 +01001132 * atomic64_add_unless - add unless the number is already a given value
1133 * @v: pointer of type atomic_t
1134 * @a: the amount to add to v...
1135 * @u: ...unless v is equal to u.
1136 *
1137 * Atomically adds @a to @v, if @v was not already @u.
1138 * Returns true if the addition was done.
1139 */
Mark Rutland0ae1d992018-06-21 13:13:10 +01001140static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
1141{
1142 return atomic64_fetch_add_unless(v, a, u) != u;
1143}
Mark Rutland0ae1d992018-06-21 13:13:10 +01001144
1145/**
Mark Rutlandbef82822018-06-21 13:13:08 +01001146 * atomic64_inc_not_zero - increment unless the number is zero
1147 * @v: pointer of type atomic64_t
1148 *
1149 * Atomically increments @v by 1, if @v is non-zero.
1150 * Returns true if the increment was done.
1151 */
1152#ifndef atomic64_inc_not_zero
1153#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
1154#endif
1155
Mark Rutland18cc1812018-06-21 13:13:18 +01001156/**
1157 * atomic64_inc_and_test - increment and test
1158 * @v: pointer of type atomic64_t
1159 *
1160 * Atomically increments @v by 1
1161 * and returns true if the result is zero, or false for all
1162 * other cases.
1163 */
1164#ifndef atomic64_inc_and_test
1165static inline bool atomic64_inc_and_test(atomic64_t *v)
1166{
1167 return atomic64_inc_return(v) == 0;
1168}
1169#endif
1170
1171/**
1172 * atomic64_dec_and_test - decrement and test
1173 * @v: pointer of type atomic64_t
1174 *
1175 * Atomically decrements @v by 1 and
1176 * returns true if the result is 0, or false for all other
1177 * cases.
1178 */
1179#ifndef atomic64_dec_and_test
1180static inline bool atomic64_dec_and_test(atomic64_t *v)
1181{
1182 return atomic64_dec_return(v) == 0;
1183}
1184#endif
1185
1186/**
1187 * atomic64_sub_and_test - subtract value from variable and test result
1188 * @i: integer value to subtract
1189 * @v: pointer of type atomic64_t
1190 *
1191 * Atomically subtracts @i from @v and returns
1192 * true if the result is zero, or false for all
1193 * other cases.
1194 */
1195#ifndef atomic64_sub_and_test
1196static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
1197{
1198 return atomic64_sub_return(i, v) == 0;
1199}
1200#endif
1201
1202/**
1203 * atomic64_add_negative - add and test if negative
1204 * @i: integer value to add
1205 * @v: pointer of type atomic64_t
1206 *
1207 * Atomically adds @i to @v and returns true
1208 * if the result is negative, or false when
1209 * result is greater than or equal to zero.
1210 */
1211#ifndef atomic64_add_negative
1212static inline bool atomic64_add_negative(long long i, atomic64_t *v)
1213{
1214 return atomic64_add_return(i, v) < 0;
1215}
1216#endif
1217
Peter Zijlstrade9e4322015-04-24 01:12:32 +02001218#ifndef atomic64_andnot
1219static inline void atomic64_andnot(long long i, atomic64_t *v)
1220{
1221 atomic64_and(~i, v);
1222}
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +02001223
1224static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1225{
1226 return atomic64_fetch_and(~i, v);
1227}
1228
1229static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1230{
1231 return atomic64_fetch_and_relaxed(~i, v);
1232}
1233
1234static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1235{
1236 return atomic64_fetch_and_acquire(~i, v);
1237}
1238
1239static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1240{
1241 return atomic64_fetch_and_release(~i, v);
1242}
Peter Zijlstrade9e4322015-04-24 01:12:32 +02001243#endif
1244
Will Deaconfcfdfe302018-04-26 11:34:15 +01001245#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
Will Deacon4df714b2017-10-12 13:20:48 +01001246#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1247
Peter Zijlstra90fe6512015-09-18 15:04:59 +02001248#include <asm-generic/atomic-long.h>
1249
Eric Dumazet3f9d35b2010-11-11 14:05:08 -08001250#endif /* _LINUX_ATOMIC_H */