blob: d48e78ccad3dd8aedb3b020d46bdb00312c86772 [file] [log] [blame]
Paul Mackerras09d4e0e2009-06-12 21:10:05 +00001/*
2 * Generic implementation of 64-bit atomics using spinlocks,
3 * useful on processors that don't have 64-bit atomic instructions.
4 *
5 * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com>
6 *
7 * This program is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU General Public License
9 * as published by the Free Software Foundation; either version
10 * 2 of the License, or (at your option) any later version.
11 */
12#ifndef _ASM_GENERIC_ATOMIC64_H
13#define _ASM_GENERIC_ATOMIC64_H
14
15typedef struct {
16 long long counter;
17} atomic64_t;
18
19#define ATOMIC64_INIT(i) { (i) }
20
21extern long long atomic64_read(const atomic64_t *v);
22extern void atomic64_set(atomic64_t *v, long long i);
Peter Zijlstra560cb122014-04-23 16:12:30 +020023
24#define ATOMIC64_OP(op) \
25extern void atomic64_##op(long long a, atomic64_t *v);
26
27#define ATOMIC64_OP_RETURN(op) \
28extern long long atomic64_##op##_return(long long a, atomic64_t *v);
29
30#define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op)
31
32ATOMIC64_OPS(add)
33ATOMIC64_OPS(sub)
34
Peter Zijlstrae6942b72014-04-23 19:32:50 +020035ATOMIC64_OP(and)
36ATOMIC64_OP(or)
37ATOMIC64_OP(xor)
38
Peter Zijlstra560cb122014-04-23 16:12:30 +020039#undef ATOMIC64_OPS
40#undef ATOMIC64_OP_RETURN
41#undef ATOMIC64_OP
42
Paul Mackerras09d4e0e2009-06-12 21:10:05 +000043extern long long atomic64_dec_if_positive(atomic64_t *v);
44extern long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n);
45extern long long atomic64_xchg(atomic64_t *v, long long new);
46extern int atomic64_add_unless(atomic64_t *v, long long a, long long u);
47
48#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
49#define atomic64_inc(v) atomic64_add(1LL, (v))
50#define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
51#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
52#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
53#define atomic64_dec(v) atomic64_sub(1LL, (v))
54#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
55#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
56#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
57
58#endif /* _ASM_GENERIC_ATOMIC64_H */