blob: 349a47a918db7252229445838357990d7ce8c598 [file] [log] [blame]
Yoshinori Satod2a5f492015-05-11 02:20:06 +09001#ifndef __ARCH_H8300_ATOMIC__
2#define __ARCH_H8300_ATOMIC__
3
4#include <linux/types.h>
5#include <asm/cmpxchg.h>
6
7/*
8 * Atomic operations that C can't guarantee us. Useful for
9 * resource counting etc..
10 */
11
12#define ATOMIC_INIT(i) { (i) }
13
Peter Zijlstra62e8a322015-09-18 11:13:10 +020014#define atomic_read(v) READ_ONCE((v)->counter)
15#define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
Yoshinori Satod2a5f492015-05-11 02:20:06 +090016
17#include <linux/kernel.h>
18
Peter Zijlstra73ada372015-07-10 12:55:45 +020019#define ATOMIC_OP_RETURN(op, c_op) \
20static inline int atomic_##op##_return(int i, atomic_t *v) \
21{ \
22 h8300flags flags; \
23 int ret; \
24 \
25 flags = arch_local_irq_save(); \
26 ret = v->counter c_op i; \
27 arch_local_irq_restore(flags); \
28 return ret; \
Yoshinori Satod2a5f492015-05-11 02:20:06 +090029}
30
Peter Zijlstra0c074cb2016-04-18 01:16:08 +020031#define ATOMIC_FETCH_OP(op, c_op) \
32static inline int atomic_fetch_##op(int i, atomic_t *v) \
33{ \
34 h8300flags flags; \
35 int ret; \
36 \
37 flags = arch_local_irq_save(); \
38 ret = v->counter; \
39 v->counter c_op i; \
40 arch_local_irq_restore(flags); \
41 return ret; \
42}
43
Peter Zijlstra73ada372015-07-10 12:55:45 +020044#define ATOMIC_OP(op, c_op) \
45static inline void atomic_##op(int i, atomic_t *v) \
46{ \
47 h8300flags flags; \
48 \
49 flags = arch_local_irq_save(); \
50 v->counter c_op i; \
51 arch_local_irq_restore(flags); \
52}
53
54ATOMIC_OP_RETURN(add, +=)
55ATOMIC_OP_RETURN(sub, -=)
56
Peter Zijlstra0c074cb2016-04-18 01:16:08 +020057#define ATOMIC_OPS(op, c_op) \
58 ATOMIC_OP(op, c_op) \
59 ATOMIC_FETCH_OP(op, c_op)
60
61ATOMIC_OPS(and, &=)
62ATOMIC_OPS(or, |=)
63ATOMIC_OPS(xor, ^=)
64ATOMIC_OPS(add, +=)
65ATOMIC_OPS(sub, -=)
66
67#undef ATOMIC_OPS
Peter Zijlstra73ada372015-07-10 12:55:45 +020068#undef ATOMIC_OP_RETURN
69#undef ATOMIC_OP
70
Yoshinori Satod2a5f492015-05-11 02:20:06 +090071#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
Peter Zijlstra73ada372015-07-10 12:55:45 +020072#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
Yoshinori Satod2a5f492015-05-11 02:20:06 +090073
Peter Zijlstra73ada372015-07-10 12:55:45 +020074#define atomic_inc_return(v) atomic_add_return(1, v)
75#define atomic_dec_return(v) atomic_sub_return(1, v)
Yoshinori Satod2a5f492015-05-11 02:20:06 +090076
Peter Zijlstra73ada372015-07-10 12:55:45 +020077#define atomic_inc(v) (void)atomic_inc_return(v)
78#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
Yoshinori Satod2a5f492015-05-11 02:20:06 +090079
Peter Zijlstra73ada372015-07-10 12:55:45 +020080#define atomic_dec(v) (void)atomic_dec_return(v)
81#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
Yoshinori Satod2a5f492015-05-11 02:20:06 +090082
83static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
84{
85 int ret;
86 h8300flags flags;
87
88 flags = arch_local_irq_save();
89 ret = v->counter;
90 if (likely(ret == old))
91 v->counter = new;
92 arch_local_irq_restore(flags);
93 return ret;
94}
95
96static inline int __atomic_add_unless(atomic_t *v, int a, int u)
97{
98 int ret;
99 h8300flags flags;
100
101 flags = arch_local_irq_save();
102 ret = v->counter;
103 if (ret != u)
104 v->counter += a;
105 arch_local_irq_restore(flags);
106 return ret;
107}
108
Yoshinori Satod2a5f492015-05-11 02:20:06 +0900109#endif /* __ARCH_H8300_ATOMIC __ */