blob: a58b998111053f2be1654780cd11a6eee164507c [file] [log] [blame]
Brian Gerst5abbbbf2010-01-07 11:53:35 -05001#ifndef _ASM_X86_ATOMIC_H
2#define _ASM_X86_ATOMIC_H
3
4#include <linux/compiler.h>
5#include <linux/types.h>
Brian Gerst5abbbbf2010-01-07 11:53:35 -05006#include <asm/alternative.h>
7#include <asm/cmpxchg.h>
Peter Zijlstra0c44c2d2013-09-11 15:19:24 +02008#include <asm/rmwcc.h>
Peter Zijlstrad00a5692014-03-13 19:00:35 +01009#include <asm/barrier.h>
Brian Gerst5abbbbf2010-01-07 11:53:35 -050010
11/*
12 * Atomic operations that C can't guarantee us. Useful for
13 * resource counting etc..
14 */
15
16#define ATOMIC_INIT(i) { (i) }
17
18/**
19 * atomic_read - read atomic variable
20 * @v: pointer of type atomic_t
21 *
22 * Atomically reads the value of @v.
23 */
Denys Vlasenko2a4e90b2015-05-08 12:26:02 +020024static __always_inline int atomic_read(const atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050025{
Peter Zijlstra62e8a322015-09-18 11:13:10 +020026 return READ_ONCE((v)->counter);
Brian Gerst5abbbbf2010-01-07 11:53:35 -050027}
28
29/**
30 * atomic_set - set atomic variable
31 * @v: pointer of type atomic_t
32 * @i: required value
33 *
34 * Atomically sets the value of @v to @i.
35 */
Denys Vlasenko2a4e90b2015-05-08 12:26:02 +020036static __always_inline void atomic_set(atomic_t *v, int i)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050037{
Peter Zijlstra62e8a322015-09-18 11:13:10 +020038 WRITE_ONCE(v->counter, i);
Brian Gerst5abbbbf2010-01-07 11:53:35 -050039}
40
41/**
42 * atomic_add - add integer to atomic variable
43 * @i: integer value to add
44 * @v: pointer of type atomic_t
45 *
46 * Atomically adds @i to @v.
47 */
Hagen Paul Pfeifer3462bd22015-04-20 23:27:11 +020048static __always_inline void atomic_add(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050049{
50 asm volatile(LOCK_PREFIX "addl %1,%0"
51 : "+m" (v->counter)
52 : "ir" (i));
53}
54
55/**
56 * atomic_sub - subtract integer from atomic variable
57 * @i: integer value to subtract
58 * @v: pointer of type atomic_t
59 *
60 * Atomically subtracts @i from @v.
61 */
Hagen Paul Pfeifer3462bd22015-04-20 23:27:11 +020062static __always_inline void atomic_sub(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050063{
64 asm volatile(LOCK_PREFIX "subl %1,%0"
65 : "+m" (v->counter)
66 : "ir" (i));
67}
68
69/**
70 * atomic_sub_and_test - subtract value from variable and test result
71 * @i: integer value to subtract
72 * @v: pointer of type atomic_t
73 *
74 * Atomically subtracts @i from @v and returns
75 * true if the result is zero, or false for all
76 * other cases.
77 */
Hagen Paul Pfeifer3462bd22015-04-20 23:27:11 +020078static __always_inline int atomic_sub_and_test(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050079{
H. Peter Anvine0f6dec2013-12-04 14:31:28 -080080 GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", "e");
Brian Gerst5abbbbf2010-01-07 11:53:35 -050081}
82
83/**
84 * atomic_inc - increment atomic variable
85 * @v: pointer of type atomic_t
86 *
87 * Atomically increments @v by 1.
88 */
Hagen Paul Pfeifer3462bd22015-04-20 23:27:11 +020089static __always_inline void atomic_inc(atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050090{
91 asm volatile(LOCK_PREFIX "incl %0"
92 : "+m" (v->counter));
93}
94
95/**
96 * atomic_dec - decrement atomic variable
97 * @v: pointer of type atomic_t
98 *
99 * Atomically decrements @v by 1.
100 */
Hagen Paul Pfeifer3462bd22015-04-20 23:27:11 +0200101static __always_inline void atomic_dec(atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500102{
103 asm volatile(LOCK_PREFIX "decl %0"
104 : "+m" (v->counter));
105}
106
107/**
108 * atomic_dec_and_test - decrement and test
109 * @v: pointer of type atomic_t
110 *
111 * Atomically decrements @v by 1 and
112 * returns true if the result is 0, or false for all other
113 * cases.
114 */
Hagen Paul Pfeifer3462bd22015-04-20 23:27:11 +0200115static __always_inline int atomic_dec_and_test(atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500116{
Peter Zijlstra0c44c2d2013-09-11 15:19:24 +0200117 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", "e");
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500118}
119
120/**
121 * atomic_inc_and_test - increment and test
122 * @v: pointer of type atomic_t
123 *
124 * Atomically increments @v by 1
125 * and returns true if the result is zero, or false for all
126 * other cases.
127 */
Denys Vlasenko2a4e90b2015-05-08 12:26:02 +0200128static __always_inline int atomic_inc_and_test(atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500129{
Peter Zijlstra0c44c2d2013-09-11 15:19:24 +0200130 GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", "e");
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500131}
132
133/**
134 * atomic_add_negative - add and test if negative
135 * @i: integer value to add
136 * @v: pointer of type atomic_t
137 *
138 * Atomically adds @i to @v and returns true
139 * if the result is negative, or false when
140 * result is greater than or equal to zero.
141 */
Denys Vlasenko2a4e90b2015-05-08 12:26:02 +0200142static __always_inline int atomic_add_negative(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500143{
H. Peter Anvine0f6dec2013-12-04 14:31:28 -0800144 GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", "s");
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500145}
146
147/**
148 * atomic_add_return - add integer and return
149 * @i: integer value to add
150 * @v: pointer of type atomic_t
151 *
152 * Atomically adds @i to @v and returns @i + @v
153 */
Hagen Paul Pfeifer3462bd22015-04-20 23:27:11 +0200154static __always_inline int atomic_add_return(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500155{
Jeremy Fitzhardinge8b8bc2f2011-08-23 16:59:58 -0700156 return i + xadd(&v->counter, i);
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500157}
158
159/**
160 * atomic_sub_return - subtract integer and return
161 * @v: pointer of type atomic_t
162 * @i: integer value to subtract
163 *
164 * Atomically subtracts @i from @v and returns @v - @i
165 */
Denys Vlasenko2a4e90b2015-05-08 12:26:02 +0200166static __always_inline int atomic_sub_return(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500167{
168 return atomic_add_return(-i, v);
169}
170
171#define atomic_inc_return(v) (atomic_add_return(1, v))
172#define atomic_dec_return(v) (atomic_sub_return(1, v))
173
Peter Zijlstraa8bccca2016-04-18 01:16:03 +0200174static __always_inline int atomic_fetch_add(int i, atomic_t *v)
175{
176 return xadd(&v->counter, i);
177}
178
179static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
180{
181 return xadd(&v->counter, -i);
182}
183
Denys Vlasenko2a4e90b2015-05-08 12:26:02 +0200184static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500185{
186 return cmpxchg(&v->counter, old, new);
187}
188
189static inline int atomic_xchg(atomic_t *v, int new)
190{
191 return xchg(&v->counter, new);
192}
193
Peter Zijlstra7fc18452014-04-23 20:28:37 +0200194#define ATOMIC_OP(op) \
195static inline void atomic_##op(int i, atomic_t *v) \
196{ \
197 asm volatile(LOCK_PREFIX #op"l %1,%0" \
198 : "+m" (v->counter) \
199 : "ir" (i) \
200 : "memory"); \
201}
202
Peter Zijlstraa8bccca2016-04-18 01:16:03 +0200203#define ATOMIC_FETCH_OP(op, c_op) \
204static inline int atomic_fetch_##op(int i, atomic_t *v) \
205{ \
206 int old, val = atomic_read(v); \
207 for (;;) { \
208 old = atomic_cmpxchg(v, val, val c_op i); \
209 if (old == val) \
210 break; \
211 val = old; \
212 } \
213 return old; \
214}
Peter Zijlstra7fc18452014-04-23 20:28:37 +0200215
Peter Zijlstraa8bccca2016-04-18 01:16:03 +0200216#define ATOMIC_OPS(op, c_op) \
217 ATOMIC_OP(op) \
218 ATOMIC_FETCH_OP(op, c_op)
219
Peter Zijlstraa8bccca2016-04-18 01:16:03 +0200220ATOMIC_OPS(and, &)
221ATOMIC_OPS(or , |)
222ATOMIC_OPS(xor, ^)
223
224#undef ATOMIC_OPS
225#undef ATOMIC_FETCH_OP
Peter Zijlstra7fc18452014-04-23 20:28:37 +0200226#undef ATOMIC_OP
227
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500228/**
Arun Sharmaf24219b2011-07-26 16:09:07 -0700229 * __atomic_add_unless - add unless the number is already a given value
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500230 * @v: pointer of type atomic_t
231 * @a: the amount to add to v...
232 * @u: ...unless v is equal to u.
233 *
234 * Atomically adds @a to @v, so long as @v was not already @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700235 * Returns the old value of @v.
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500236 */
Hagen Paul Pfeifer3462bd22015-04-20 23:27:11 +0200237static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500238{
239 int c, old;
240 c = atomic_read(v);
241 for (;;) {
242 if (unlikely(c == (u)))
243 break;
244 old = atomic_cmpxchg((v), c, c + (a));
245 if (likely(old == c))
246 break;
247 c = old;
248 }
Arun Sharmaf24219b2011-07-26 16:09:07 -0700249 return c;
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500250}
251
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500252/**
253 * atomic_inc_short - increment of a short integer
254 * @v: pointer to type int
255 *
256 * Atomically adds 1 to @v
257 * Returns the new value of @u
258 */
Denys Vlasenko2a4e90b2015-05-08 12:26:02 +0200259static __always_inline short int atomic_inc_short(short int *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500260{
261 asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
262 return *v;
263}
264
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500265#ifdef CONFIG_X86_32
David Howellsa1ce3922012-10-02 18:01:25 +0100266# include <asm/atomic64_32.h>
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500267#else
David Howellsa1ce3922012-10-02 18:01:25 +0100268# include <asm/atomic64_64.h>
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500269#endif
270
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500271#endif /* _ASM_X86_ATOMIC_H */