blob: 3e0cd7d3833569bab88b4e7414201aafa0d0dab2 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __ARCH_X86_64_ATOMIC__
2#define __ARCH_X86_64_ATOMIC__
3
Gerd Hoffmannd167a512006-06-26 13:56:16 +02004#include <asm/alternative.h>
Jeff Dikea436ed92007-05-08 00:35:02 -07005#include <asm/cmpxchg.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -07006
7/* atomic_t should be 32 bit signed type */
8
9/*
10 * Atomic operations that C can't guarantee us. Useful for
11 * resource counting etc..
12 */
13
14#ifdef CONFIG_SMP
15#define LOCK "lock ; "
16#else
17#define LOCK ""
18#endif
19
20/*
21 * Make sure gcc doesn't try to be clever and move things around
22 * on us. We need to use _exactly_ the address the user gave us,
23 * not some alias that contains the same information.
24 */
Joe Perches7edb3cd2008-03-23 01:01:42 -070025typedef struct {
26 int counter;
27} atomic_t;
Linus Torvalds1da177e2005-04-16 15:20:36 -070028
29#define ATOMIC_INIT(i) { (i) }
30
31/**
32 * atomic_read - read atomic variable
33 * @v: pointer of type atomic_t
Joe Perches7edb3cd2008-03-23 01:01:42 -070034 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070035 * Atomically reads the value of @v.
Joe Perches7edb3cd2008-03-23 01:01:42 -070036 */
Linus Torvalds1da177e2005-04-16 15:20:36 -070037#define atomic_read(v) ((v)->counter)
38
39/**
40 * atomic_set - set atomic variable
41 * @v: pointer of type atomic_t
42 * @i: required value
Joe Perches7edb3cd2008-03-23 01:01:42 -070043 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070044 * Atomically sets the value of @v to @i.
Joe Perches7edb3cd2008-03-23 01:01:42 -070045 */
46#define atomic_set(v, i) (((v)->counter) = (i))
Linus Torvalds1da177e2005-04-16 15:20:36 -070047
48/**
49 * atomic_add - add integer to atomic variable
50 * @i: integer value to add
51 * @v: pointer of type atomic_t
Joe Perches7edb3cd2008-03-23 01:01:42 -070052 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070053 * Atomically adds @i to @v.
54 */
Joe Perches7edb3cd2008-03-23 01:01:42 -070055static inline void atomic_add(int i, atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070056{
Joe Perches7edb3cd2008-03-23 01:01:42 -070057 asm volatile(LOCK_PREFIX "addl %1,%0"
58 : "=m" (v->counter)
59 : "ir" (i), "m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -070060}
61
62/**
63 * atomic_sub - subtract the atomic variable
64 * @i: integer value to subtract
65 * @v: pointer of type atomic_t
Joe Perches7edb3cd2008-03-23 01:01:42 -070066 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070067 * Atomically subtracts @i from @v.
68 */
Joe Perches7edb3cd2008-03-23 01:01:42 -070069static inline void atomic_sub(int i, atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070070{
Joe Perches7edb3cd2008-03-23 01:01:42 -070071 asm volatile(LOCK_PREFIX "subl %1,%0"
72 : "=m" (v->counter)
73 : "ir" (i), "m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -070074}
75
76/**
77 * atomic_sub_and_test - subtract value from variable and test result
78 * @i: integer value to subtract
79 * @v: pointer of type atomic_t
Joe Perches7edb3cd2008-03-23 01:01:42 -070080 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070081 * Atomically subtracts @i from @v and returns
82 * true if the result is zero, or false for all
83 * other cases.
84 */
Joe Perches7edb3cd2008-03-23 01:01:42 -070085static inline int atomic_sub_and_test(int i, atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070086{
87 unsigned char c;
88
Joe Perches7edb3cd2008-03-23 01:01:42 -070089 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
90 : "=m" (v->counter), "=qm" (c)
91 : "ir" (i), "m" (v->counter) : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -070092 return c;
93}
94
95/**
96 * atomic_inc - increment atomic variable
97 * @v: pointer of type atomic_t
Joe Perches7edb3cd2008-03-23 01:01:42 -070098 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070099 * Atomically increments @v by 1.
Joe Perches7edb3cd2008-03-23 01:01:42 -0700100 */
101static inline void atomic_inc(atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700102{
Joe Perches7edb3cd2008-03-23 01:01:42 -0700103 asm volatile(LOCK_PREFIX "incl %0"
104 : "=m" (v->counter)
105 : "m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700106}
107
108/**
109 * atomic_dec - decrement atomic variable
110 * @v: pointer of type atomic_t
Joe Perches7edb3cd2008-03-23 01:01:42 -0700111 *
Linus Torvalds1da177e2005-04-16 15:20:36 -0700112 * Atomically decrements @v by 1.
Joe Perches7edb3cd2008-03-23 01:01:42 -0700113 */
114static inline void atomic_dec(atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700115{
Joe Perches7edb3cd2008-03-23 01:01:42 -0700116 asm volatile(LOCK_PREFIX "decl %0"
117 : "=m" (v->counter)
118 : "m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700119}
120
121/**
122 * atomic_dec_and_test - decrement and test
123 * @v: pointer of type atomic_t
Joe Perches7edb3cd2008-03-23 01:01:42 -0700124 *
Linus Torvalds1da177e2005-04-16 15:20:36 -0700125 * Atomically decrements @v by 1 and
126 * returns true if the result is 0, or false for all other
127 * cases.
Joe Perches7edb3cd2008-03-23 01:01:42 -0700128 */
129static inline int atomic_dec_and_test(atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700130{
131 unsigned char c;
132
Joe Perches7edb3cd2008-03-23 01:01:42 -0700133 asm volatile(LOCK_PREFIX "decl %0; sete %1"
134 : "=m" (v->counter), "=qm" (c)
135 : "m" (v->counter) : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700136 return c != 0;
137}
138
139/**
Joe Perches7edb3cd2008-03-23 01:01:42 -0700140 * atomic_inc_and_test - increment and test
Linus Torvalds1da177e2005-04-16 15:20:36 -0700141 * @v: pointer of type atomic_t
Joe Perches7edb3cd2008-03-23 01:01:42 -0700142 *
Linus Torvalds1da177e2005-04-16 15:20:36 -0700143 * Atomically increments @v by 1
144 * and returns true if the result is zero, or false for all
145 * other cases.
Joe Perches7edb3cd2008-03-23 01:01:42 -0700146 */
147static inline int atomic_inc_and_test(atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700148{
149 unsigned char c;
150
Joe Perches7edb3cd2008-03-23 01:01:42 -0700151 asm volatile(LOCK_PREFIX "incl %0; sete %1"
152 : "=m" (v->counter), "=qm" (c)
153 : "m" (v->counter) : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700154 return c != 0;
155}
156
157/**
158 * atomic_add_negative - add and test if negative
Linus Torvalds1da177e2005-04-16 15:20:36 -0700159 * @i: integer value to add
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800160 * @v: pointer of type atomic_t
Joe Perches7edb3cd2008-03-23 01:01:42 -0700161 *
Linus Torvalds1da177e2005-04-16 15:20:36 -0700162 * Atomically adds @i to @v and returns true
163 * if the result is negative, or false when
164 * result is greater than or equal to zero.
Joe Perches7edb3cd2008-03-23 01:01:42 -0700165 */
166static inline int atomic_add_negative(int i, atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700167{
168 unsigned char c;
169
Joe Perches7edb3cd2008-03-23 01:01:42 -0700170 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
171 : "=m" (v->counter), "=qm" (c)
172 : "ir" (i), "m" (v->counter) : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700173 return c;
174}
175
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800176/**
177 * atomic_add_return - add and return
178 * @i: integer value to add
179 * @v: pointer of type atomic_t
180 *
181 * Atomically adds @i to @v and returns @i + @v
182 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700183static inline int atomic_add_return(int i, atomic_t *v)
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800184{
185 int __i = i;
Joe Perches7edb3cd2008-03-23 01:01:42 -0700186 asm volatile(LOCK_PREFIX "xaddl %0, %1"
187 : "+r" (i), "+m" (v->counter)
188 : : "memory");
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800189 return i + __i;
190}
191
Joe Perches7edb3cd2008-03-23 01:01:42 -0700192static inline int atomic_sub_return(int i, atomic_t *v)
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800193{
Joe Perches7edb3cd2008-03-23 01:01:42 -0700194 return atomic_add_return(-i, v);
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800195}
196
Joe Perches7edb3cd2008-03-23 01:01:42 -0700197#define atomic_inc_return(v) (atomic_add_return(1, v))
198#define atomic_dec_return(v) (atomic_sub_return(1, v))
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800199
Linus Torvalds1da177e2005-04-16 15:20:36 -0700200/* An 64bit atomic type */
201
Joe Perches7edb3cd2008-03-23 01:01:42 -0700202typedef struct {
203 long counter;
204} atomic64_t;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700205
206#define ATOMIC64_INIT(i) { (i) }
207
208/**
209 * atomic64_read - read atomic64 variable
210 * @v: pointer of type atomic64_t
211 *
212 * Atomically reads the value of @v.
213 * Doesn't imply a read memory barrier.
214 */
215#define atomic64_read(v) ((v)->counter)
216
217/**
218 * atomic64_set - set atomic64 variable
219 * @v: pointer to type atomic64_t
220 * @i: required value
221 *
222 * Atomically sets the value of @v to @i.
223 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700224#define atomic64_set(v, i) (((v)->counter) = (i))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700225
226/**
227 * atomic64_add - add integer to atomic64 variable
228 * @i: integer value to add
229 * @v: pointer to type atomic64_t
230 *
231 * Atomically adds @i to @v.
232 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700233static inline void atomic64_add(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700234{
Joe Perches7edb3cd2008-03-23 01:01:42 -0700235 asm volatile(LOCK_PREFIX "addq %1,%0"
236 : "=m" (v->counter)
237 : "ir" (i), "m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700238}
239
240/**
241 * atomic64_sub - subtract the atomic64 variable
242 * @i: integer value to subtract
243 * @v: pointer to type atomic64_t
244 *
245 * Atomically subtracts @i from @v.
246 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700247static inline void atomic64_sub(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700248{
Joe Perches7edb3cd2008-03-23 01:01:42 -0700249 asm volatile(LOCK_PREFIX "subq %1,%0"
250 : "=m" (v->counter)
251 : "ir" (i), "m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700252}
253
254/**
255 * atomic64_sub_and_test - subtract value from variable and test result
256 * @i: integer value to subtract
257 * @v: pointer to type atomic64_t
258 *
259 * Atomically subtracts @i from @v and returns
260 * true if the result is zero, or false for all
261 * other cases.
262 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700263static inline int atomic64_sub_and_test(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700264{
265 unsigned char c;
266
Joe Perches7edb3cd2008-03-23 01:01:42 -0700267 asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
268 : "=m" (v->counter), "=qm" (c)
269 : "ir" (i), "m" (v->counter) : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700270 return c;
271}
272
273/**
274 * atomic64_inc - increment atomic64 variable
275 * @v: pointer to type atomic64_t
276 *
277 * Atomically increments @v by 1.
278 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700279static inline void atomic64_inc(atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700280{
Joe Perches7edb3cd2008-03-23 01:01:42 -0700281 asm volatile(LOCK_PREFIX "incq %0"
282 : "=m" (v->counter)
283 : "m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700284}
285
286/**
287 * atomic64_dec - decrement atomic64 variable
288 * @v: pointer to type atomic64_t
289 *
290 * Atomically decrements @v by 1.
291 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700292static inline void atomic64_dec(atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700293{
Joe Perches7edb3cd2008-03-23 01:01:42 -0700294 asm volatile(LOCK_PREFIX "decq %0"
295 : "=m" (v->counter)
296 : "m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700297}
298
299/**
300 * atomic64_dec_and_test - decrement and test
301 * @v: pointer to type atomic64_t
302 *
303 * Atomically decrements @v by 1 and
304 * returns true if the result is 0, or false for all other
305 * cases.
306 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700307static inline int atomic64_dec_and_test(atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700308{
309 unsigned char c;
310
Joe Perches7edb3cd2008-03-23 01:01:42 -0700311 asm volatile(LOCK_PREFIX "decq %0; sete %1"
312 : "=m" (v->counter), "=qm" (c)
313 : "m" (v->counter) : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700314 return c != 0;
315}
316
317/**
318 * atomic64_inc_and_test - increment and test
319 * @v: pointer to type atomic64_t
320 *
321 * Atomically increments @v by 1
322 * and returns true if the result is zero, or false for all
323 * other cases.
324 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700325static inline int atomic64_inc_and_test(atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700326{
327 unsigned char c;
328
Joe Perches7edb3cd2008-03-23 01:01:42 -0700329 asm volatile(LOCK_PREFIX "incq %0; sete %1"
330 : "=m" (v->counter), "=qm" (c)
331 : "m" (v->counter) : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700332 return c != 0;
333}
334
335/**
336 * atomic64_add_negative - add and test if negative
Linus Torvalds1da177e2005-04-16 15:20:36 -0700337 * @i: integer value to add
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800338 * @v: pointer to type atomic64_t
Linus Torvalds1da177e2005-04-16 15:20:36 -0700339 *
340 * Atomically adds @i to @v and returns true
341 * if the result is negative, or false when
342 * result is greater than or equal to zero.
343 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700344static inline int atomic64_add_negative(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700345{
346 unsigned char c;
347
Joe Perches7edb3cd2008-03-23 01:01:42 -0700348 asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
349 : "=m" (v->counter), "=qm" (c)
350 : "ir" (i), "m" (v->counter) : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700351 return c;
352}
353
354/**
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800355 * atomic64_add_return - add and return
Linus Torvalds1da177e2005-04-16 15:20:36 -0700356 * @i: integer value to add
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800357 * @v: pointer to type atomic64_t
Linus Torvalds1da177e2005-04-16 15:20:36 -0700358 *
359 * Atomically adds @i to @v and returns @i + @v
360 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700361static inline long atomic64_add_return(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700362{
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800363 long __i = i;
Joe Perches7edb3cd2008-03-23 01:01:42 -0700364 asm volatile(LOCK_PREFIX "xaddq %0, %1;"
365 : "+r" (i), "+m" (v->counter)
366 : : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700367 return i + __i;
368}
369
Joe Perches7edb3cd2008-03-23 01:01:42 -0700370static inline long atomic64_sub_return(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700371{
Joe Perches7edb3cd2008-03-23 01:01:42 -0700372 return atomic64_add_return(-i, v);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700373}
374
Joe Perches7edb3cd2008-03-23 01:01:42 -0700375#define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
376#define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800377
Joe Perches7edb3cd2008-03-23 01:01:42 -0700378#define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
Mathieu Desnoyers79d365a2007-05-08 00:34:36 -0700379#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
380
Joe Perches7edb3cd2008-03-23 01:01:42 -0700381#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
382#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800383
Nick Piggin8426e1f2005-11-13 16:07:25 -0800384/**
385 * atomic_add_unless - add unless the number is a given value
386 * @v: pointer of type atomic_t
387 * @a: the amount to add to v...
388 * @u: ...unless v is equal to u.
389 *
390 * Atomically adds @a to @v, so long as it was not @u.
391 * Returns non-zero if @v was not @u, and zero otherwise.
392 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700393static inline int atomic_add_unless(atomic_t *v, int a, int u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700394{
395 int c, old;
396 c = atomic_read(v);
397 for (;;) {
398 if (unlikely(c == (u)))
399 break;
400 old = atomic_cmpxchg((v), c, c + (a));
401 if (likely(old == c))
402 break;
403 c = old;
404 }
405 return c != (u);
406}
407
Nick Piggin8426e1f2005-11-13 16:07:25 -0800408#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
409
Mathieu Desnoyers79d365a2007-05-08 00:34:36 -0700410/**
411 * atomic64_add_unless - add unless the number is a given value
412 * @v: pointer of type atomic64_t
413 * @a: the amount to add to v...
414 * @u: ...unless v is equal to u.
415 *
416 * Atomically adds @a to @v, so long as it was not @u.
417 * Returns non-zero if @v was not @u, and zero otherwise.
418 */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700419static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700420{
421 long c, old;
422 c = atomic64_read(v);
423 for (;;) {
424 if (unlikely(c == (u)))
425 break;
426 old = atomic64_cmpxchg((v), c, c + (a));
427 if (likely(old == c))
428 break;
429 c = old;
430 }
431 return c != (u);
432}
433
Mathieu Desnoyers79d365a2007-05-08 00:34:36 -0700434#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
435
Linus Torvalds1da177e2005-04-16 15:20:36 -0700436/* These are x86-specific, used by some header files */
Joe Perches7edb3cd2008-03-23 01:01:42 -0700437#define atomic_clear_mask(mask, addr) \
438 asm volatile(LOCK_PREFIX "andl %0,%1" \
439 : : "r" (~(mask)), "m" (*(addr)) : "memory")
Linus Torvalds1da177e2005-04-16 15:20:36 -0700440
Joe Perches7edb3cd2008-03-23 01:01:42 -0700441#define atomic_set_mask(mask, addr) \
442 asm volatile(LOCK_PREFIX "orl %0,%1" \
443 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
444 : "memory")
Linus Torvalds1da177e2005-04-16 15:20:36 -0700445
446/* Atomic operations are already serializing on x86 */
447#define smp_mb__before_atomic_dec() barrier()
448#define smp_mb__after_atomic_dec() barrier()
449#define smp_mb__before_atomic_inc() barrier()
450#define smp_mb__after_atomic_inc() barrier()
451
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800452#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700453#endif