blob: 93fe5b4041e1b502ec358d96bb8b03dd94e84c30 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Arun Sharmaacac43e2011-07-26 16:09:08 -07002/* Atomic operations usable in machine independent code */
Eric Dumazet3f9d35b2010-11-11 14:05:08 -08003#ifndef _LINUX_ATOMIC_H
4#define _LINUX_ATOMIC_H
Mark Rutlandade5ef92018-06-21 13:13:07 +01005#include <linux/types.h>
6
Eric Dumazet3f9d35b2010-11-11 14:05:08 -08007#include <asm/atomic.h>
Will Deacon654672d2015-08-06 17:54:37 +01008#include <asm/barrier.h>
9
10/*
11 * Relaxed variants of xchg, cmpxchg and some atomic operations.
12 *
13 * We support four variants:
14 *
15 * - Fully ordered: The default implementation, no suffix required.
16 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
17 * - Release: Provides RELEASE semantics, _release suffix.
18 * - Relaxed: No ordering guarantees, _relaxed suffix.
19 *
20 * For compound atomics performing both a load and a store, ACQUIRE
21 * semantics apply only to the load and RELEASE semantics only to the
22 * store portion of the operation. Note that a failed cmpxchg_acquire
23 * does -not- imply any memory ordering constraints.
24 *
25 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
26 */
27
28#ifndef atomic_read_acquire
29#define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
30#endif
31
32#ifndef atomic_set_release
33#define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
34#endif
35
36/*
37 * The idea here is to build acquire/release variants by adding explicit
38 * barriers on top of the relaxed variant. In the case where the relaxed
39 * variant is already fully ordered, no additional barriers are needed.
Boqun Fenge1ab7f32015-12-15 22:24:14 +080040 *
41 * Besides, if an arch has a special barrier for acquire/release, it could
42 * implement its own __atomic_op_* and use the same framework for building
43 * variants
Peter Zijlstrad89e588c2016-09-05 11:37:53 +020044 *
45 * If an architecture overrides __atomic_op_acquire() it will probably want
46 * to define smp_mb__after_spinlock().
Will Deacon654672d2015-08-06 17:54:37 +010047 */
Boqun Fenge1ab7f32015-12-15 22:24:14 +080048#ifndef __atomic_op_acquire
Will Deacon654672d2015-08-06 17:54:37 +010049#define __atomic_op_acquire(op, args...) \
50({ \
51 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
52 smp_mb__after_atomic(); \
53 __ret; \
54})
Boqun Fenge1ab7f32015-12-15 22:24:14 +080055#endif
Will Deacon654672d2015-08-06 17:54:37 +010056
Boqun Fenge1ab7f32015-12-15 22:24:14 +080057#ifndef __atomic_op_release
Will Deacon654672d2015-08-06 17:54:37 +010058#define __atomic_op_release(op, args...) \
59({ \
60 smp_mb__before_atomic(); \
61 op##_relaxed(args); \
62})
Boqun Fenge1ab7f32015-12-15 22:24:14 +080063#endif
Will Deacon654672d2015-08-06 17:54:37 +010064
Boqun Fenge1ab7f32015-12-15 22:24:14 +080065#ifndef __atomic_op_fence
Will Deacon654672d2015-08-06 17:54:37 +010066#define __atomic_op_fence(op, args...) \
67({ \
68 typeof(op##_relaxed(args)) __ret; \
69 smp_mb__before_atomic(); \
70 __ret = op##_relaxed(args); \
71 smp_mb__after_atomic(); \
72 __ret; \
73})
Boqun Fenge1ab7f32015-12-15 22:24:14 +080074#endif
Will Deacon654672d2015-08-06 17:54:37 +010075
76/* atomic_add_return_relaxed */
77#ifndef atomic_add_return_relaxed
78#define atomic_add_return_relaxed atomic_add_return
79#define atomic_add_return_acquire atomic_add_return
80#define atomic_add_return_release atomic_add_return
81
82#else /* atomic_add_return_relaxed */
83
84#ifndef atomic_add_return_acquire
85#define atomic_add_return_acquire(...) \
86 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
87#endif
88
89#ifndef atomic_add_return_release
90#define atomic_add_return_release(...) \
91 __atomic_op_release(atomic_add_return, __VA_ARGS__)
92#endif
93
94#ifndef atomic_add_return
95#define atomic_add_return(...) \
96 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
97#endif
98#endif /* atomic_add_return_relaxed */
99
Mark Rutland98375592018-06-21 13:13:19 +0100100#ifndef atomic_inc
101#define atomic_inc(v) atomic_add(1, (v))
102#endif
103
Davidlohr Bueso63ab7bd2015-09-30 13:03:11 -0700104/* atomic_inc_return_relaxed */
105#ifndef atomic_inc_return_relaxed
Mark Rutland98375592018-06-21 13:13:19 +0100106
107#ifndef atomic_inc_return
108#define atomic_inc_return(v) atomic_add_return(1, (v))
109#define atomic_inc_return_relaxed(v) atomic_add_return_relaxed(1, (v))
110#define atomic_inc_return_acquire(v) atomic_add_return_acquire(1, (v))
111#define atomic_inc_return_release(v) atomic_add_return_release(1, (v))
112#else /* atomic_inc_return */
Davidlohr Bueso63ab7bd2015-09-30 13:03:11 -0700113#define atomic_inc_return_relaxed atomic_inc_return
114#define atomic_inc_return_acquire atomic_inc_return
115#define atomic_inc_return_release atomic_inc_return
Mark Rutland98375592018-06-21 13:13:19 +0100116#endif /* atomic_inc_return */
Davidlohr Bueso63ab7bd2015-09-30 13:03:11 -0700117
118#else /* atomic_inc_return_relaxed */
119
120#ifndef atomic_inc_return_acquire
121#define atomic_inc_return_acquire(...) \
122 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
123#endif
124
125#ifndef atomic_inc_return_release
126#define atomic_inc_return_release(...) \
127 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
128#endif
129
130#ifndef atomic_inc_return
131#define atomic_inc_return(...) \
132 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
133#endif
134#endif /* atomic_inc_return_relaxed */
135
Will Deacon654672d2015-08-06 17:54:37 +0100136/* atomic_sub_return_relaxed */
137#ifndef atomic_sub_return_relaxed
138#define atomic_sub_return_relaxed atomic_sub_return
139#define atomic_sub_return_acquire atomic_sub_return
140#define atomic_sub_return_release atomic_sub_return
141
142#else /* atomic_sub_return_relaxed */
143
144#ifndef atomic_sub_return_acquire
145#define atomic_sub_return_acquire(...) \
146 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
147#endif
148
149#ifndef atomic_sub_return_release
150#define atomic_sub_return_release(...) \
151 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
152#endif
153
154#ifndef atomic_sub_return
155#define atomic_sub_return(...) \
156 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
157#endif
158#endif /* atomic_sub_return_relaxed */
159
Mark Rutland98375592018-06-21 13:13:19 +0100160#ifndef atomic_dec
161#define atomic_dec(v) atomic_sub(1, (v))
162#endif
163
Davidlohr Bueso63ab7bd2015-09-30 13:03:11 -0700164/* atomic_dec_return_relaxed */
165#ifndef atomic_dec_return_relaxed
Mark Rutland98375592018-06-21 13:13:19 +0100166
167#ifndef atomic_dec_return
168#define atomic_dec_return(v) atomic_sub_return(1, (v))
169#define atomic_dec_return_relaxed(v) atomic_sub_return_relaxed(1, (v))
170#define atomic_dec_return_acquire(v) atomic_sub_return_acquire(1, (v))
171#define atomic_dec_return_release(v) atomic_sub_return_release(1, (v))
172#else /* atomic_dec_return */
Davidlohr Bueso63ab7bd2015-09-30 13:03:11 -0700173#define atomic_dec_return_relaxed atomic_dec_return
174#define atomic_dec_return_acquire atomic_dec_return
175#define atomic_dec_return_release atomic_dec_return
Mark Rutland98375592018-06-21 13:13:19 +0100176#endif /* atomic_dec_return */
Davidlohr Bueso63ab7bd2015-09-30 13:03:11 -0700177
178#else /* atomic_dec_return_relaxed */
179
180#ifndef atomic_dec_return_acquire
181#define atomic_dec_return_acquire(...) \
182 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
183#endif
184
185#ifndef atomic_dec_return_release
186#define atomic_dec_return_release(...) \
187 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
188#endif
189
190#ifndef atomic_dec_return
191#define atomic_dec_return(...) \
192 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
193#endif
194#endif /* atomic_dec_return_relaxed */
195
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200196
197/* atomic_fetch_add_relaxed */
198#ifndef atomic_fetch_add_relaxed
199#define atomic_fetch_add_relaxed atomic_fetch_add
200#define atomic_fetch_add_acquire atomic_fetch_add
201#define atomic_fetch_add_release atomic_fetch_add
202
203#else /* atomic_fetch_add_relaxed */
204
205#ifndef atomic_fetch_add_acquire
206#define atomic_fetch_add_acquire(...) \
207 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
208#endif
209
210#ifndef atomic_fetch_add_release
211#define atomic_fetch_add_release(...) \
212 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
213#endif
214
215#ifndef atomic_fetch_add
216#define atomic_fetch_add(...) \
217 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
218#endif
219#endif /* atomic_fetch_add_relaxed */
220
Davidlohr Buesof0662862016-06-28 14:56:51 -0700221/* atomic_fetch_inc_relaxed */
222#ifndef atomic_fetch_inc_relaxed
223
224#ifndef atomic_fetch_inc
225#define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
226#define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
227#define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
228#define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
229#else /* atomic_fetch_inc */
230#define atomic_fetch_inc_relaxed atomic_fetch_inc
231#define atomic_fetch_inc_acquire atomic_fetch_inc
232#define atomic_fetch_inc_release atomic_fetch_inc
233#endif /* atomic_fetch_inc */
234
235#else /* atomic_fetch_inc_relaxed */
236
237#ifndef atomic_fetch_inc_acquire
238#define atomic_fetch_inc_acquire(...) \
239 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
240#endif
241
242#ifndef atomic_fetch_inc_release
243#define atomic_fetch_inc_release(...) \
244 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
245#endif
246
247#ifndef atomic_fetch_inc
248#define atomic_fetch_inc(...) \
249 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
250#endif
251#endif /* atomic_fetch_inc_relaxed */
252
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200253/* atomic_fetch_sub_relaxed */
254#ifndef atomic_fetch_sub_relaxed
255#define atomic_fetch_sub_relaxed atomic_fetch_sub
256#define atomic_fetch_sub_acquire atomic_fetch_sub
257#define atomic_fetch_sub_release atomic_fetch_sub
258
259#else /* atomic_fetch_sub_relaxed */
260
261#ifndef atomic_fetch_sub_acquire
262#define atomic_fetch_sub_acquire(...) \
263 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
264#endif
265
266#ifndef atomic_fetch_sub_release
267#define atomic_fetch_sub_release(...) \
268 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
269#endif
270
271#ifndef atomic_fetch_sub
272#define atomic_fetch_sub(...) \
273 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
274#endif
275#endif /* atomic_fetch_sub_relaxed */
276
Davidlohr Buesof0662862016-06-28 14:56:51 -0700277/* atomic_fetch_dec_relaxed */
278#ifndef atomic_fetch_dec_relaxed
279
280#ifndef atomic_fetch_dec
281#define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
282#define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
283#define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
284#define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
285#else /* atomic_fetch_dec */
286#define atomic_fetch_dec_relaxed atomic_fetch_dec
287#define atomic_fetch_dec_acquire atomic_fetch_dec
288#define atomic_fetch_dec_release atomic_fetch_dec
289#endif /* atomic_fetch_dec */
290
291#else /* atomic_fetch_dec_relaxed */
292
293#ifndef atomic_fetch_dec_acquire
294#define atomic_fetch_dec_acquire(...) \
295 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
296#endif
297
298#ifndef atomic_fetch_dec_release
299#define atomic_fetch_dec_release(...) \
300 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
301#endif
302
303#ifndef atomic_fetch_dec
304#define atomic_fetch_dec(...) \
305 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
306#endif
307#endif /* atomic_fetch_dec_relaxed */
308
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200309/* atomic_fetch_or_relaxed */
310#ifndef atomic_fetch_or_relaxed
311#define atomic_fetch_or_relaxed atomic_fetch_or
312#define atomic_fetch_or_acquire atomic_fetch_or
313#define atomic_fetch_or_release atomic_fetch_or
314
315#else /* atomic_fetch_or_relaxed */
316
317#ifndef atomic_fetch_or_acquire
318#define atomic_fetch_or_acquire(...) \
319 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
320#endif
321
322#ifndef atomic_fetch_or_release
323#define atomic_fetch_or_release(...) \
324 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
325#endif
326
327#ifndef atomic_fetch_or
328#define atomic_fetch_or(...) \
329 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
330#endif
331#endif /* atomic_fetch_or_relaxed */
332
333/* atomic_fetch_and_relaxed */
334#ifndef atomic_fetch_and_relaxed
335#define atomic_fetch_and_relaxed atomic_fetch_and
336#define atomic_fetch_and_acquire atomic_fetch_and
337#define atomic_fetch_and_release atomic_fetch_and
338
339#else /* atomic_fetch_and_relaxed */
340
341#ifndef atomic_fetch_and_acquire
342#define atomic_fetch_and_acquire(...) \
343 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
344#endif
345
346#ifndef atomic_fetch_and_release
347#define atomic_fetch_and_release(...) \
348 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
349#endif
350
351#ifndef atomic_fetch_and
352#define atomic_fetch_and(...) \
353 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
354#endif
355#endif /* atomic_fetch_and_relaxed */
356
357#ifdef atomic_andnot
358/* atomic_fetch_andnot_relaxed */
359#ifndef atomic_fetch_andnot_relaxed
360#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
361#define atomic_fetch_andnot_acquire atomic_fetch_andnot
362#define atomic_fetch_andnot_release atomic_fetch_andnot
363
364#else /* atomic_fetch_andnot_relaxed */
365
366#ifndef atomic_fetch_andnot_acquire
367#define atomic_fetch_andnot_acquire(...) \
368 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
369#endif
370
371#ifndef atomic_fetch_andnot_release
372#define atomic_fetch_andnot_release(...) \
373 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
374#endif
375
376#ifndef atomic_fetch_andnot
377#define atomic_fetch_andnot(...) \
378 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
379#endif
380#endif /* atomic_fetch_andnot_relaxed */
381#endif /* atomic_andnot */
382
383/* atomic_fetch_xor_relaxed */
384#ifndef atomic_fetch_xor_relaxed
385#define atomic_fetch_xor_relaxed atomic_fetch_xor
386#define atomic_fetch_xor_acquire atomic_fetch_xor
387#define atomic_fetch_xor_release atomic_fetch_xor
388
389#else /* atomic_fetch_xor_relaxed */
390
391#ifndef atomic_fetch_xor_acquire
392#define atomic_fetch_xor_acquire(...) \
393 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
394#endif
395
396#ifndef atomic_fetch_xor_release
397#define atomic_fetch_xor_release(...) \
398 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
399#endif
400
401#ifndef atomic_fetch_xor
402#define atomic_fetch_xor(...) \
403 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
404#endif
405#endif /* atomic_fetch_xor_relaxed */
406
407
Will Deacon654672d2015-08-06 17:54:37 +0100408/* atomic_xchg_relaxed */
409#ifndef atomic_xchg_relaxed
410#define atomic_xchg_relaxed atomic_xchg
411#define atomic_xchg_acquire atomic_xchg
412#define atomic_xchg_release atomic_xchg
413
414#else /* atomic_xchg_relaxed */
415
416#ifndef atomic_xchg_acquire
417#define atomic_xchg_acquire(...) \
418 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
419#endif
420
421#ifndef atomic_xchg_release
422#define atomic_xchg_release(...) \
423 __atomic_op_release(atomic_xchg, __VA_ARGS__)
424#endif
425
426#ifndef atomic_xchg
427#define atomic_xchg(...) \
428 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
429#endif
430#endif /* atomic_xchg_relaxed */
431
432/* atomic_cmpxchg_relaxed */
433#ifndef atomic_cmpxchg_relaxed
434#define atomic_cmpxchg_relaxed atomic_cmpxchg
435#define atomic_cmpxchg_acquire atomic_cmpxchg
436#define atomic_cmpxchg_release atomic_cmpxchg
437
438#else /* atomic_cmpxchg_relaxed */
439
440#ifndef atomic_cmpxchg_acquire
441#define atomic_cmpxchg_acquire(...) \
442 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
443#endif
444
445#ifndef atomic_cmpxchg_release
446#define atomic_cmpxchg_release(...) \
447 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
448#endif
449
450#ifndef atomic_cmpxchg
451#define atomic_cmpxchg(...) \
452 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
453#endif
454#endif /* atomic_cmpxchg_relaxed */
455
Peter Zijlstraa9ebf302017-02-01 16:39:38 +0100456#ifndef atomic_try_cmpxchg
457
458#define __atomic_try_cmpxchg(type, _p, _po, _n) \
459({ \
460 typeof(_po) __po = (_po); \
Peter Zijlstra44fe8442017-03-27 13:54:38 +0200461 typeof(*(_po)) __r, __o = *__po; \
462 __r = atomic_cmpxchg##type((_p), __o, (_n)); \
463 if (unlikely(__r != __o)) \
464 *__po = __r; \
465 likely(__r == __o); \
Peter Zijlstraa9ebf302017-02-01 16:39:38 +0100466})
467
468#define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
469#define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
470#define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
471#define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
472
473#else /* atomic_try_cmpxchg */
474#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
475#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
476#define atomic_try_cmpxchg_release atomic_try_cmpxchg
477#endif /* atomic_try_cmpxchg */
478
Will Deacon654672d2015-08-06 17:54:37 +0100479/* cmpxchg_relaxed */
480#ifndef cmpxchg_relaxed
481#define cmpxchg_relaxed cmpxchg
482#define cmpxchg_acquire cmpxchg
483#define cmpxchg_release cmpxchg
484
485#else /* cmpxchg_relaxed */
486
487#ifndef cmpxchg_acquire
488#define cmpxchg_acquire(...) \
489 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
490#endif
491
492#ifndef cmpxchg_release
493#define cmpxchg_release(...) \
494 __atomic_op_release(cmpxchg, __VA_ARGS__)
495#endif
496
497#ifndef cmpxchg
498#define cmpxchg(...) \
499 __atomic_op_fence(cmpxchg, __VA_ARGS__)
500#endif
501#endif /* cmpxchg_relaxed */
502
503/* cmpxchg64_relaxed */
504#ifndef cmpxchg64_relaxed
505#define cmpxchg64_relaxed cmpxchg64
506#define cmpxchg64_acquire cmpxchg64
507#define cmpxchg64_release cmpxchg64
508
509#else /* cmpxchg64_relaxed */
510
511#ifndef cmpxchg64_acquire
512#define cmpxchg64_acquire(...) \
513 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
514#endif
515
516#ifndef cmpxchg64_release
517#define cmpxchg64_release(...) \
518 __atomic_op_release(cmpxchg64, __VA_ARGS__)
519#endif
520
521#ifndef cmpxchg64
522#define cmpxchg64(...) \
523 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
524#endif
525#endif /* cmpxchg64_relaxed */
526
527/* xchg_relaxed */
528#ifndef xchg_relaxed
529#define xchg_relaxed xchg
530#define xchg_acquire xchg
531#define xchg_release xchg
532
533#else /* xchg_relaxed */
534
535#ifndef xchg_acquire
536#define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
537#endif
538
539#ifndef xchg_release
540#define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
541#endif
542
543#ifndef xchg
544#define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
545#endif
546#endif /* xchg_relaxed */
Eric Dumazet3f9d35b2010-11-11 14:05:08 -0800547
548/**
Mark Rutlandeccc2da2018-06-21 13:13:09 +0100549 * atomic_fetch_add_unless - add unless the number is already a given value
550 * @v: pointer of type atomic_t
551 * @a: the amount to add to v...
552 * @u: ...unless v is equal to u.
553 *
554 * Atomically adds @a to @v, if @v was not already @u.
555 * Returns the original value of @v.
556 */
557#ifndef atomic_fetch_add_unless
558static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
559{
560 int c = atomic_read(v);
561
562 do {
563 if (unlikely(c == u))
564 break;
565 } while (!atomic_try_cmpxchg(v, &c, c + a));
566
567 return c;
568}
569#endif
570
571/**
Arun Sharmaf24219b2011-07-26 16:09:07 -0700572 * atomic_add_unless - add unless the number is already a given value
573 * @v: pointer of type atomic_t
574 * @a: the amount to add to v...
575 * @u: ...unless v is equal to u.
576 *
Mark Rutlandade5ef92018-06-21 13:13:07 +0100577 * Atomically adds @a to @v, if @v was not already @u.
578 * Returns true if the addition was done.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700579 */
Mark Rutlandade5ef92018-06-21 13:13:07 +0100580static inline bool atomic_add_unless(atomic_t *v, int a, int u)
Arun Sharmaf24219b2011-07-26 16:09:07 -0700581{
Mark Rutlandbfc18e32018-06-21 13:13:04 +0100582 return atomic_fetch_add_unless(v, a, u) != u;
Arun Sharmaf24219b2011-07-26 16:09:07 -0700583}
584
585/**
Arun Sharma600634972011-07-26 16:09:06 -0700586 * atomic_inc_not_zero - increment unless the number is zero
587 * @v: pointer of type atomic_t
588 *
Mark Rutlandade5ef92018-06-21 13:13:07 +0100589 * Atomically increments @v by 1, if @v is non-zero.
590 * Returns true if the increment was done.
Arun Sharma600634972011-07-26 16:09:06 -0700591 */
Anton Blanchardb1ada602012-02-29 21:09:53 +0000592#ifndef atomic_inc_not_zero
Arun Sharma600634972011-07-26 16:09:06 -0700593#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
Anton Blanchardb1ada602012-02-29 21:09:53 +0000594#endif
Arun Sharma600634972011-07-26 16:09:06 -0700595
Mark Rutland18cc1812018-06-21 13:13:18 +0100596/**
597 * atomic_inc_and_test - increment and test
598 * @v: pointer of type atomic_t
599 *
600 * Atomically increments @v by 1
601 * and returns true if the result is zero, or false for all
602 * other cases.
603 */
604#ifndef atomic_inc_and_test
605static inline bool atomic_inc_and_test(atomic_t *v)
606{
607 return atomic_inc_return(v) == 0;
608}
609#endif
610
611/**
612 * atomic_dec_and_test - decrement and test
613 * @v: pointer of type atomic_t
614 *
615 * Atomically decrements @v by 1 and
616 * returns true if the result is 0, or false for all other
617 * cases.
618 */
619#ifndef atomic_dec_and_test
620static inline bool atomic_dec_and_test(atomic_t *v)
621{
622 return atomic_dec_return(v) == 0;
623}
624#endif
625
626/**
627 * atomic_sub_and_test - subtract value from variable and test result
628 * @i: integer value to subtract
629 * @v: pointer of type atomic_t
630 *
631 * Atomically subtracts @i from @v and returns
632 * true if the result is zero, or false for all
633 * other cases.
634 */
635#ifndef atomic_sub_and_test
636static inline bool atomic_sub_and_test(int i, atomic_t *v)
637{
638 return atomic_sub_return(i, v) == 0;
639}
640#endif
641
642/**
643 * atomic_add_negative - add and test if negative
644 * @i: integer value to add
645 * @v: pointer of type atomic_t
646 *
647 * Atomically adds @i to @v and returns true
648 * if the result is negative, or false when
649 * result is greater than or equal to zero.
650 */
651#ifndef atomic_add_negative
652static inline bool atomic_add_negative(int i, atomic_t *v)
653{
654 return atomic_add_return(i, v) < 0;
655}
656#endif
657
Peter Zijlstrade9e4322015-04-24 01:12:32 +0200658#ifndef atomic_andnot
659static inline void atomic_andnot(int i, atomic_t *v)
660{
661 atomic_and(~i, v);
662}
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200663
664static inline int atomic_fetch_andnot(int i, atomic_t *v)
665{
666 return atomic_fetch_and(~i, v);
667}
668
669static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
670{
671 return atomic_fetch_and_relaxed(~i, v);
672}
673
674static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
675{
676 return atomic_fetch_and_acquire(~i, v);
677}
678
679static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
680{
681 return atomic_fetch_and_release(~i, v);
682}
Peter Zijlstrade9e4322015-04-24 01:12:32 +0200683#endif
684
Al Viro07b8ce12011-06-20 10:52:57 -0400685#ifndef atomic_inc_unless_negative
Mark Rutlandb3a2a052018-06-21 13:13:20 +0100686static inline bool atomic_inc_unless_negative(atomic_t *v)
Al Viro07b8ce12011-06-20 10:52:57 -0400687{
Mark Rutlandb3a2a052018-06-21 13:13:20 +0100688 int c = atomic_read(v);
689
690 do {
691 if (unlikely(c < 0))
692 return false;
693 } while (!atomic_try_cmpxchg(v, &c, c + 1));
694
695 return true;
Al Viro07b8ce12011-06-20 10:52:57 -0400696}
697#endif
698
699#ifndef atomic_dec_unless_positive
Mark Rutlandb3a2a052018-06-21 13:13:20 +0100700static inline bool atomic_dec_unless_positive(atomic_t *v)
Al Viro07b8ce12011-06-20 10:52:57 -0400701{
Mark Rutlandb3a2a052018-06-21 13:13:20 +0100702 int c = atomic_read(v);
703
704 do {
705 if (unlikely(c > 0))
706 return false;
707 } while (!atomic_try_cmpxchg(v, &c, c - 1));
708
709 return true;
Al Viro07b8ce12011-06-20 10:52:57 -0400710}
711#endif
712
Shaohua Lie79bee22012-10-08 16:32:18 -0700713/*
714 * atomic_dec_if_positive - decrement by 1 if old value positive
715 * @v: pointer of type atomic_t
716 *
717 * The function returns the old value of *v minus 1, even if
718 * the atomic variable, v, was not decremented.
719 */
720#ifndef atomic_dec_if_positive
721static inline int atomic_dec_if_positive(atomic_t *v)
722{
Mark Rutlandb3a2a052018-06-21 13:13:20 +0100723 int dec, c = atomic_read(v);
724
725 do {
Shaohua Lie79bee22012-10-08 16:32:18 -0700726 dec = c - 1;
727 if (unlikely(dec < 0))
728 break;
Mark Rutlandb3a2a052018-06-21 13:13:20 +0100729 } while (!atomic_try_cmpxchg(v, &c, dec));
730
Shaohua Lie79bee22012-10-08 16:32:18 -0700731 return dec;
732}
733#endif
734
Will Deaconfcfdfe302018-04-26 11:34:15 +0100735#define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
Will Deacon4df714b2017-10-12 13:20:48 +0100736#define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
737
Arun Sharma78477772011-07-26 16:09:08 -0700738#ifdef CONFIG_GENERIC_ATOMIC64
739#include <asm-generic/atomic64.h>
740#endif
Peter Zijlstrade9e4322015-04-24 01:12:32 +0200741
Peter Zijlstrae1213332016-04-18 00:52:13 +0200742#ifndef atomic64_read_acquire
743#define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
744#endif
745
746#ifndef atomic64_set_release
747#define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
748#endif
749
750/* atomic64_add_return_relaxed */
751#ifndef atomic64_add_return_relaxed
752#define atomic64_add_return_relaxed atomic64_add_return
753#define atomic64_add_return_acquire atomic64_add_return
754#define atomic64_add_return_release atomic64_add_return
755
756#else /* atomic64_add_return_relaxed */
757
758#ifndef atomic64_add_return_acquire
759#define atomic64_add_return_acquire(...) \
760 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
761#endif
762
763#ifndef atomic64_add_return_release
764#define atomic64_add_return_release(...) \
765 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
766#endif
767
768#ifndef atomic64_add_return
769#define atomic64_add_return(...) \
770 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
771#endif
772#endif /* atomic64_add_return_relaxed */
773
Mark Rutland98375592018-06-21 13:13:19 +0100774#ifndef atomic64_inc
775#define atomic64_inc(v) atomic64_add(1, (v))
776#endif
777
Peter Zijlstrae1213332016-04-18 00:52:13 +0200778/* atomic64_inc_return_relaxed */
779#ifndef atomic64_inc_return_relaxed
Mark Rutland98375592018-06-21 13:13:19 +0100780
781#ifndef atomic64_inc_return
782#define atomic64_inc_return(v) atomic64_add_return(1, (v))
783#define atomic64_inc_return_relaxed(v) atomic64_add_return_relaxed(1, (v))
784#define atomic64_inc_return_acquire(v) atomic64_add_return_acquire(1, (v))
785#define atomic64_inc_return_release(v) atomic64_add_return_release(1, (v))
786#else /* atomic64_inc_return */
Peter Zijlstrae1213332016-04-18 00:52:13 +0200787#define atomic64_inc_return_relaxed atomic64_inc_return
788#define atomic64_inc_return_acquire atomic64_inc_return
789#define atomic64_inc_return_release atomic64_inc_return
Mark Rutland98375592018-06-21 13:13:19 +0100790#endif /* atomic64_inc_return */
Peter Zijlstrae1213332016-04-18 00:52:13 +0200791
792#else /* atomic64_inc_return_relaxed */
793
794#ifndef atomic64_inc_return_acquire
795#define atomic64_inc_return_acquire(...) \
796 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
797#endif
798
799#ifndef atomic64_inc_return_release
800#define atomic64_inc_return_release(...) \
801 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
802#endif
803
804#ifndef atomic64_inc_return
805#define atomic64_inc_return(...) \
806 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
807#endif
808#endif /* atomic64_inc_return_relaxed */
809
810
811/* atomic64_sub_return_relaxed */
812#ifndef atomic64_sub_return_relaxed
813#define atomic64_sub_return_relaxed atomic64_sub_return
814#define atomic64_sub_return_acquire atomic64_sub_return
815#define atomic64_sub_return_release atomic64_sub_return
816
817#else /* atomic64_sub_return_relaxed */
818
819#ifndef atomic64_sub_return_acquire
820#define atomic64_sub_return_acquire(...) \
821 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
822#endif
823
824#ifndef atomic64_sub_return_release
825#define atomic64_sub_return_release(...) \
826 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
827#endif
828
829#ifndef atomic64_sub_return
830#define atomic64_sub_return(...) \
831 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
832#endif
833#endif /* atomic64_sub_return_relaxed */
834
Mark Rutland98375592018-06-21 13:13:19 +0100835#ifndef atomic64_dec
836#define atomic64_dec(v) atomic64_sub(1, (v))
837#endif
838
Peter Zijlstrae1213332016-04-18 00:52:13 +0200839/* atomic64_dec_return_relaxed */
840#ifndef atomic64_dec_return_relaxed
Mark Rutland98375592018-06-21 13:13:19 +0100841
842#ifndef atomic64_dec_return
843#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
844#define atomic64_dec_return_relaxed(v) atomic64_sub_return_relaxed(1, (v))
845#define atomic64_dec_return_acquire(v) atomic64_sub_return_acquire(1, (v))
846#define atomic64_dec_return_release(v) atomic64_sub_return_release(1, (v))
847#else /* atomic64_dec_return */
Peter Zijlstrae1213332016-04-18 00:52:13 +0200848#define atomic64_dec_return_relaxed atomic64_dec_return
849#define atomic64_dec_return_acquire atomic64_dec_return
850#define atomic64_dec_return_release atomic64_dec_return
Mark Rutland98375592018-06-21 13:13:19 +0100851#endif /* atomic64_dec_return */
Peter Zijlstrae1213332016-04-18 00:52:13 +0200852
853#else /* atomic64_dec_return_relaxed */
854
855#ifndef atomic64_dec_return_acquire
856#define atomic64_dec_return_acquire(...) \
857 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
858#endif
859
860#ifndef atomic64_dec_return_release
861#define atomic64_dec_return_release(...) \
862 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
863#endif
864
865#ifndef atomic64_dec_return
866#define atomic64_dec_return(...) \
867 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
868#endif
869#endif /* atomic64_dec_return_relaxed */
870
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200871
872/* atomic64_fetch_add_relaxed */
873#ifndef atomic64_fetch_add_relaxed
874#define atomic64_fetch_add_relaxed atomic64_fetch_add
875#define atomic64_fetch_add_acquire atomic64_fetch_add
876#define atomic64_fetch_add_release atomic64_fetch_add
877
878#else /* atomic64_fetch_add_relaxed */
879
880#ifndef atomic64_fetch_add_acquire
881#define atomic64_fetch_add_acquire(...) \
882 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
883#endif
884
885#ifndef atomic64_fetch_add_release
886#define atomic64_fetch_add_release(...) \
887 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
888#endif
889
890#ifndef atomic64_fetch_add
891#define atomic64_fetch_add(...) \
892 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
893#endif
894#endif /* atomic64_fetch_add_relaxed */
895
Davidlohr Buesof0662862016-06-28 14:56:51 -0700896/* atomic64_fetch_inc_relaxed */
897#ifndef atomic64_fetch_inc_relaxed
898
899#ifndef atomic64_fetch_inc
900#define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
901#define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
902#define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
903#define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
904#else /* atomic64_fetch_inc */
905#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
906#define atomic64_fetch_inc_acquire atomic64_fetch_inc
907#define atomic64_fetch_inc_release atomic64_fetch_inc
908#endif /* atomic64_fetch_inc */
909
910#else /* atomic64_fetch_inc_relaxed */
911
912#ifndef atomic64_fetch_inc_acquire
913#define atomic64_fetch_inc_acquire(...) \
914 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
915#endif
916
917#ifndef atomic64_fetch_inc_release
918#define atomic64_fetch_inc_release(...) \
919 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
920#endif
921
922#ifndef atomic64_fetch_inc
923#define atomic64_fetch_inc(...) \
924 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
925#endif
926#endif /* atomic64_fetch_inc_relaxed */
927
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200928/* atomic64_fetch_sub_relaxed */
929#ifndef atomic64_fetch_sub_relaxed
930#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
931#define atomic64_fetch_sub_acquire atomic64_fetch_sub
932#define atomic64_fetch_sub_release atomic64_fetch_sub
933
934#else /* atomic64_fetch_sub_relaxed */
935
936#ifndef atomic64_fetch_sub_acquire
937#define atomic64_fetch_sub_acquire(...) \
938 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
939#endif
940
941#ifndef atomic64_fetch_sub_release
942#define atomic64_fetch_sub_release(...) \
943 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
944#endif
945
946#ifndef atomic64_fetch_sub
947#define atomic64_fetch_sub(...) \
948 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
949#endif
950#endif /* atomic64_fetch_sub_relaxed */
951
Davidlohr Buesof0662862016-06-28 14:56:51 -0700952/* atomic64_fetch_dec_relaxed */
953#ifndef atomic64_fetch_dec_relaxed
954
955#ifndef atomic64_fetch_dec
956#define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
957#define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
958#define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
959#define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
960#else /* atomic64_fetch_dec */
961#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
962#define atomic64_fetch_dec_acquire atomic64_fetch_dec
963#define atomic64_fetch_dec_release atomic64_fetch_dec
964#endif /* atomic64_fetch_dec */
965
966#else /* atomic64_fetch_dec_relaxed */
967
968#ifndef atomic64_fetch_dec_acquire
969#define atomic64_fetch_dec_acquire(...) \
970 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
971#endif
972
973#ifndef atomic64_fetch_dec_release
974#define atomic64_fetch_dec_release(...) \
975 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
976#endif
977
978#ifndef atomic64_fetch_dec
979#define atomic64_fetch_dec(...) \
980 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
981#endif
982#endif /* atomic64_fetch_dec_relaxed */
983
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +0200984/* atomic64_fetch_or_relaxed */
985#ifndef atomic64_fetch_or_relaxed
986#define atomic64_fetch_or_relaxed atomic64_fetch_or
987#define atomic64_fetch_or_acquire atomic64_fetch_or
988#define atomic64_fetch_or_release atomic64_fetch_or
989
990#else /* atomic64_fetch_or_relaxed */
991
992#ifndef atomic64_fetch_or_acquire
993#define atomic64_fetch_or_acquire(...) \
994 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
995#endif
996
997#ifndef atomic64_fetch_or_release
998#define atomic64_fetch_or_release(...) \
999 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
1000#endif
1001
1002#ifndef atomic64_fetch_or
1003#define atomic64_fetch_or(...) \
1004 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
1005#endif
1006#endif /* atomic64_fetch_or_relaxed */
1007
1008/* atomic64_fetch_and_relaxed */
1009#ifndef atomic64_fetch_and_relaxed
1010#define atomic64_fetch_and_relaxed atomic64_fetch_and
1011#define atomic64_fetch_and_acquire atomic64_fetch_and
1012#define atomic64_fetch_and_release atomic64_fetch_and
1013
1014#else /* atomic64_fetch_and_relaxed */
1015
1016#ifndef atomic64_fetch_and_acquire
1017#define atomic64_fetch_and_acquire(...) \
1018 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
1019#endif
1020
1021#ifndef atomic64_fetch_and_release
1022#define atomic64_fetch_and_release(...) \
1023 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
1024#endif
1025
1026#ifndef atomic64_fetch_and
1027#define atomic64_fetch_and(...) \
1028 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
1029#endif
1030#endif /* atomic64_fetch_and_relaxed */
1031
1032#ifdef atomic64_andnot
1033/* atomic64_fetch_andnot_relaxed */
1034#ifndef atomic64_fetch_andnot_relaxed
1035#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1036#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1037#define atomic64_fetch_andnot_release atomic64_fetch_andnot
1038
1039#else /* atomic64_fetch_andnot_relaxed */
1040
1041#ifndef atomic64_fetch_andnot_acquire
1042#define atomic64_fetch_andnot_acquire(...) \
1043 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
1044#endif
1045
1046#ifndef atomic64_fetch_andnot_release
1047#define atomic64_fetch_andnot_release(...) \
1048 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
1049#endif
1050
1051#ifndef atomic64_fetch_andnot
1052#define atomic64_fetch_andnot(...) \
1053 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
1054#endif
1055#endif /* atomic64_fetch_andnot_relaxed */
1056#endif /* atomic64_andnot */
1057
1058/* atomic64_fetch_xor_relaxed */
1059#ifndef atomic64_fetch_xor_relaxed
1060#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
1061#define atomic64_fetch_xor_acquire atomic64_fetch_xor
1062#define atomic64_fetch_xor_release atomic64_fetch_xor
1063
1064#else /* atomic64_fetch_xor_relaxed */
1065
1066#ifndef atomic64_fetch_xor_acquire
1067#define atomic64_fetch_xor_acquire(...) \
1068 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
1069#endif
1070
1071#ifndef atomic64_fetch_xor_release
1072#define atomic64_fetch_xor_release(...) \
1073 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
1074#endif
1075
1076#ifndef atomic64_fetch_xor
1077#define atomic64_fetch_xor(...) \
1078 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
1079#endif
1080#endif /* atomic64_fetch_xor_relaxed */
1081
1082
Peter Zijlstrae1213332016-04-18 00:52:13 +02001083/* atomic64_xchg_relaxed */
1084#ifndef atomic64_xchg_relaxed
1085#define atomic64_xchg_relaxed atomic64_xchg
1086#define atomic64_xchg_acquire atomic64_xchg
1087#define atomic64_xchg_release atomic64_xchg
1088
1089#else /* atomic64_xchg_relaxed */
1090
1091#ifndef atomic64_xchg_acquire
1092#define atomic64_xchg_acquire(...) \
1093 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
1094#endif
1095
1096#ifndef atomic64_xchg_release
1097#define atomic64_xchg_release(...) \
1098 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
1099#endif
1100
1101#ifndef atomic64_xchg
1102#define atomic64_xchg(...) \
1103 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
1104#endif
1105#endif /* atomic64_xchg_relaxed */
1106
1107/* atomic64_cmpxchg_relaxed */
1108#ifndef atomic64_cmpxchg_relaxed
1109#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
1110#define atomic64_cmpxchg_acquire atomic64_cmpxchg
1111#define atomic64_cmpxchg_release atomic64_cmpxchg
1112
1113#else /* atomic64_cmpxchg_relaxed */
1114
1115#ifndef atomic64_cmpxchg_acquire
1116#define atomic64_cmpxchg_acquire(...) \
1117 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1118#endif
1119
1120#ifndef atomic64_cmpxchg_release
1121#define atomic64_cmpxchg_release(...) \
1122 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1123#endif
1124
1125#ifndef atomic64_cmpxchg
1126#define atomic64_cmpxchg(...) \
1127 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1128#endif
1129#endif /* atomic64_cmpxchg_relaxed */
1130
Peter Zijlstraa9ebf302017-02-01 16:39:38 +01001131#ifndef atomic64_try_cmpxchg
1132
1133#define __atomic64_try_cmpxchg(type, _p, _po, _n) \
1134({ \
1135 typeof(_po) __po = (_po); \
Peter Zijlstra44fe8442017-03-27 13:54:38 +02001136 typeof(*(_po)) __r, __o = *__po; \
1137 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \
1138 if (unlikely(__r != __o)) \
1139 *__po = __r; \
1140 likely(__r == __o); \
Peter Zijlstraa9ebf302017-02-01 16:39:38 +01001141})
1142
1143#define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
1144#define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1145#define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1146#define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
1147
1148#else /* atomic64_try_cmpxchg */
1149#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
1150#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
1151#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1152#endif /* atomic64_try_cmpxchg */
1153
Mark Rutlandbef82822018-06-21 13:13:08 +01001154/**
Mark Rutland35670132018-06-21 13:13:17 +01001155 * atomic64_fetch_add_unless - add unless the number is already a given value
1156 * @v: pointer of type atomic64_t
1157 * @a: the amount to add to v...
1158 * @u: ...unless v is equal to u.
1159 *
1160 * Atomically adds @a to @v, if @v was not already @u.
1161 * Returns the original value of @v.
1162 */
1163#ifndef atomic64_fetch_add_unless
1164static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
1165 long long u)
1166{
1167 long long c = atomic64_read(v);
1168
1169 do {
1170 if (unlikely(c == u))
1171 break;
1172 } while (!atomic64_try_cmpxchg(v, &c, c + a));
1173
1174 return c;
1175}
1176#endif
1177
1178/**
Mark Rutland0ae1d992018-06-21 13:13:10 +01001179 * atomic64_add_unless - add unless the number is already a given value
1180 * @v: pointer of type atomic_t
1181 * @a: the amount to add to v...
1182 * @u: ...unless v is equal to u.
1183 *
1184 * Atomically adds @a to @v, if @v was not already @u.
1185 * Returns true if the addition was done.
1186 */
Mark Rutland0ae1d992018-06-21 13:13:10 +01001187static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
1188{
1189 return atomic64_fetch_add_unless(v, a, u) != u;
1190}
Mark Rutland0ae1d992018-06-21 13:13:10 +01001191
1192/**
Mark Rutlandbef82822018-06-21 13:13:08 +01001193 * atomic64_inc_not_zero - increment unless the number is zero
1194 * @v: pointer of type atomic64_t
1195 *
1196 * Atomically increments @v by 1, if @v is non-zero.
1197 * Returns true if the increment was done.
1198 */
1199#ifndef atomic64_inc_not_zero
1200#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
1201#endif
1202
Mark Rutland18cc1812018-06-21 13:13:18 +01001203/**
1204 * atomic64_inc_and_test - increment and test
1205 * @v: pointer of type atomic64_t
1206 *
1207 * Atomically increments @v by 1
1208 * and returns true if the result is zero, or false for all
1209 * other cases.
1210 */
1211#ifndef atomic64_inc_and_test
1212static inline bool atomic64_inc_and_test(atomic64_t *v)
1213{
1214 return atomic64_inc_return(v) == 0;
1215}
1216#endif
1217
1218/**
1219 * atomic64_dec_and_test - decrement and test
1220 * @v: pointer of type atomic64_t
1221 *
1222 * Atomically decrements @v by 1 and
1223 * returns true if the result is 0, or false for all other
1224 * cases.
1225 */
1226#ifndef atomic64_dec_and_test
1227static inline bool atomic64_dec_and_test(atomic64_t *v)
1228{
1229 return atomic64_dec_return(v) == 0;
1230}
1231#endif
1232
1233/**
1234 * atomic64_sub_and_test - subtract value from variable and test result
1235 * @i: integer value to subtract
1236 * @v: pointer of type atomic64_t
1237 *
1238 * Atomically subtracts @i from @v and returns
1239 * true if the result is zero, or false for all
1240 * other cases.
1241 */
1242#ifndef atomic64_sub_and_test
1243static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
1244{
1245 return atomic64_sub_return(i, v) == 0;
1246}
1247#endif
1248
1249/**
1250 * atomic64_add_negative - add and test if negative
1251 * @i: integer value to add
1252 * @v: pointer of type atomic64_t
1253 *
1254 * Atomically adds @i to @v and returns true
1255 * if the result is negative, or false when
1256 * result is greater than or equal to zero.
1257 */
1258#ifndef atomic64_add_negative
1259static inline bool atomic64_add_negative(long long i, atomic64_t *v)
1260{
1261 return atomic64_add_return(i, v) < 0;
1262}
1263#endif
1264
Peter Zijlstrade9e4322015-04-24 01:12:32 +02001265#ifndef atomic64_andnot
1266static inline void atomic64_andnot(long long i, atomic64_t *v)
1267{
1268 atomic64_and(~i, v);
1269}
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +02001270
1271static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1272{
1273 return atomic64_fetch_and(~i, v);
1274}
1275
1276static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1277{
1278 return atomic64_fetch_and_relaxed(~i, v);
1279}
1280
1281static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1282{
1283 return atomic64_fetch_and_acquire(~i, v);
1284}
1285
1286static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1287{
1288 return atomic64_fetch_and_release(~i, v);
1289}
Peter Zijlstrade9e4322015-04-24 01:12:32 +02001290#endif
1291
Mark Rutlandb3a2a052018-06-21 13:13:20 +01001292#ifndef atomic64_inc_unless_negative
1293static inline bool atomic64_inc_unless_negative(atomic64_t *v)
1294{
1295 long long c = atomic64_read(v);
1296
1297 do {
1298 if (unlikely(c < 0))
1299 return false;
1300 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
1301
1302 return true;
1303}
1304#endif
1305
1306#ifndef atomic64_dec_unless_positive
1307static inline bool atomic64_dec_unless_positive(atomic64_t *v)
1308{
1309 long long c = atomic64_read(v);
1310
1311 do {
1312 if (unlikely(c > 0))
1313 return false;
1314 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
1315
1316 return true;
1317}
1318#endif
1319
1320/*
1321 * atomic64_dec_if_positive - decrement by 1 if old value positive
1322 * @v: pointer of type atomic64_t
1323 *
1324 * The function returns the old value of *v minus 1, even if
1325 * the atomic64 variable, v, was not decremented.
1326 */
1327#ifndef atomic64_dec_if_positive
1328static inline long long atomic64_dec_if_positive(atomic64_t *v)
1329{
1330 long long dec, c = atomic64_read(v);
1331
1332 do {
1333 dec = c - 1;
1334 if (unlikely(dec < 0))
1335 break;
1336 } while (!atomic64_try_cmpxchg(v, &c, dec));
1337
1338 return dec;
1339}
1340#endif
1341
Will Deaconfcfdfe302018-04-26 11:34:15 +01001342#define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c))
Will Deacon4df714b2017-10-12 13:20:48 +01001343#define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c))
1344
Peter Zijlstra90fe6512015-09-18 15:04:59 +02001345#include <asm-generic/atomic-long.h>
1346
Eric Dumazet3f9d35b2010-11-11 14:05:08 -08001347#endif /* _LINUX_ATOMIC_H */