blob: dad68bf46c77012e144857898093b99dabea1cd7 [file] [log] [blame]
Paul Mackerras09d4e0e2009-06-12 21:10:05 +00001/*
2 * Generic implementation of 64-bit atomics using spinlocks,
3 * useful on processors that don't have 64-bit atomic instructions.
4 *
5 * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com>
6 *
7 * This program is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU General Public License
9 * as published by the Free Software Foundation; either version
10 * 2 of the License, or (at your option) any later version.
11 */
12#ifndef _ASM_GENERIC_ATOMIC64_H
13#define _ASM_GENERIC_ATOMIC64_H
14
15typedef struct {
16 long long counter;
17} atomic64_t;
18
19#define ATOMIC64_INIT(i) { (i) }
20
21extern long long atomic64_read(const atomic64_t *v);
22extern void atomic64_set(atomic64_t *v, long long i);
Peter Zijlstra560cb122014-04-23 16:12:30 +020023
24#define ATOMIC64_OP(op) \
25extern void atomic64_##op(long long a, atomic64_t *v);
26
27#define ATOMIC64_OP_RETURN(op) \
28extern long long atomic64_##op##_return(long long a, atomic64_t *v);
29
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +020030#define ATOMIC64_FETCH_OP(op) \
31extern long long atomic64_fetch_##op(long long a, atomic64_t *v);
32
33#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) ATOMIC64_FETCH_OP(op)
Peter Zijlstra560cb122014-04-23 16:12:30 +020034
35ATOMIC64_OPS(add)
36ATOMIC64_OPS(sub)
37
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +020038#undef ATOMIC64_OPS
39#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_FETCH_OP(op)
40
41ATOMIC64_OPS(and)
42ATOMIC64_OPS(or)
43ATOMIC64_OPS(xor)
Peter Zijlstrae6942b72014-04-23 19:32:50 +020044
Peter Zijlstra560cb122014-04-23 16:12:30 +020045#undef ATOMIC64_OPS
Peter Zijlstra28aa2bd2016-04-18 00:54:38 +020046#undef ATOMIC64_FETCH_OP
Peter Zijlstra560cb122014-04-23 16:12:30 +020047#undef ATOMIC64_OP_RETURN
48#undef ATOMIC64_OP
49
Paul Mackerras09d4e0e2009-06-12 21:10:05 +000050extern long long atomic64_dec_if_positive(atomic64_t *v);
51extern long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n);
52extern long long atomic64_xchg(atomic64_t *v, long long new);
53extern int atomic64_add_unless(atomic64_t *v, long long a, long long u);
54
55#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
56#define atomic64_inc(v) atomic64_add(1LL, (v))
57#define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
58#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
59#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
60#define atomic64_dec(v) atomic64_sub(1LL, (v))
61#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
62#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
63#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
64
65#endif /* _ASM_GENERIC_ATOMIC64_H */