blob: f2e64634fa48c59a04d70a6d3b2e8e0f55b573cb [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __ARCH_X86_64_ATOMIC__
2#define __ARCH_X86_64_ATOMIC__
3
Gerd Hoffmannd167a512006-06-26 13:56:16 +02004#include <asm/alternative.h>
Jeff Dikea436ed92007-05-08 00:35:02 -07005#include <asm/cmpxchg.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -07006
7/* atomic_t should be 32 bit signed type */
8
9/*
10 * Atomic operations that C can't guarantee us. Useful for
11 * resource counting etc..
12 */
13
14#ifdef CONFIG_SMP
15#define LOCK "lock ; "
16#else
17#define LOCK ""
18#endif
19
20/*
21 * Make sure gcc doesn't try to be clever and move things around
22 * on us. We need to use _exactly_ the address the user gave us,
23 * not some alias that contains the same information.
24 */
Linus Torvaldsf9e9dcb2006-12-06 14:42:57 -080025typedef struct { int counter; } atomic_t;
Linus Torvalds1da177e2005-04-16 15:20:36 -070026
27#define ATOMIC_INIT(i) { (i) }
28
29/**
30 * atomic_read - read atomic variable
31 * @v: pointer of type atomic_t
32 *
33 * Atomically reads the value of @v.
34 */
35#define atomic_read(v) ((v)->counter)
36
37/**
38 * atomic_set - set atomic variable
39 * @v: pointer of type atomic_t
40 * @i: required value
41 *
42 * Atomically sets the value of @v to @i.
43 */
44#define atomic_set(v,i) (((v)->counter) = (i))
45
46/**
47 * atomic_add - add integer to atomic variable
48 * @i: integer value to add
49 * @v: pointer of type atomic_t
50 *
51 * Atomically adds @i to @v.
52 */
53static __inline__ void atomic_add(int i, atomic_t *v)
54{
55 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +020056 LOCK_PREFIX "addl %1,%0"
Linus Torvalds1da177e2005-04-16 15:20:36 -070057 :"=m" (v->counter)
58 :"ir" (i), "m" (v->counter));
59}
60
61/**
62 * atomic_sub - subtract the atomic variable
63 * @i: integer value to subtract
64 * @v: pointer of type atomic_t
65 *
66 * Atomically subtracts @i from @v.
67 */
68static __inline__ void atomic_sub(int i, atomic_t *v)
69{
70 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +020071 LOCK_PREFIX "subl %1,%0"
Linus Torvalds1da177e2005-04-16 15:20:36 -070072 :"=m" (v->counter)
73 :"ir" (i), "m" (v->counter));
74}
75
76/**
77 * atomic_sub_and_test - subtract value from variable and test result
78 * @i: integer value to subtract
79 * @v: pointer of type atomic_t
80 *
81 * Atomically subtracts @i from @v and returns
82 * true if the result is zero, or false for all
83 * other cases.
84 */
85static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
86{
87 unsigned char c;
88
89 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +020090 LOCK_PREFIX "subl %2,%0; sete %1"
Linus Torvalds1da177e2005-04-16 15:20:36 -070091 :"=m" (v->counter), "=qm" (c)
92 :"ir" (i), "m" (v->counter) : "memory");
93 return c;
94}
95
96/**
97 * atomic_inc - increment atomic variable
98 * @v: pointer of type atomic_t
99 *
100 * Atomically increments @v by 1.
101 */
102static __inline__ void atomic_inc(atomic_t *v)
103{
104 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200105 LOCK_PREFIX "incl %0"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700106 :"=m" (v->counter)
107 :"m" (v->counter));
108}
109
110/**
111 * atomic_dec - decrement atomic variable
112 * @v: pointer of type atomic_t
113 *
114 * Atomically decrements @v by 1.
115 */
116static __inline__ void atomic_dec(atomic_t *v)
117{
118 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200119 LOCK_PREFIX "decl %0"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700120 :"=m" (v->counter)
121 :"m" (v->counter));
122}
123
124/**
125 * atomic_dec_and_test - decrement and test
126 * @v: pointer of type atomic_t
127 *
128 * Atomically decrements @v by 1 and
129 * returns true if the result is 0, or false for all other
130 * cases.
131 */
132static __inline__ int atomic_dec_and_test(atomic_t *v)
133{
134 unsigned char c;
135
136 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200137 LOCK_PREFIX "decl %0; sete %1"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700138 :"=m" (v->counter), "=qm" (c)
139 :"m" (v->counter) : "memory");
140 return c != 0;
141}
142
143/**
144 * atomic_inc_and_test - increment and test
145 * @v: pointer of type atomic_t
146 *
147 * Atomically increments @v by 1
148 * and returns true if the result is zero, or false for all
149 * other cases.
150 */
151static __inline__ int atomic_inc_and_test(atomic_t *v)
152{
153 unsigned char c;
154
155 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200156 LOCK_PREFIX "incl %0; sete %1"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700157 :"=m" (v->counter), "=qm" (c)
158 :"m" (v->counter) : "memory");
159 return c != 0;
160}
161
162/**
163 * atomic_add_negative - add and test if negative
Linus Torvalds1da177e2005-04-16 15:20:36 -0700164 * @i: integer value to add
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800165 * @v: pointer of type atomic_t
Linus Torvalds1da177e2005-04-16 15:20:36 -0700166 *
167 * Atomically adds @i to @v and returns true
168 * if the result is negative, or false when
169 * result is greater than or equal to zero.
170 */
171static __inline__ int atomic_add_negative(int i, atomic_t *v)
172{
173 unsigned char c;
174
175 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200176 LOCK_PREFIX "addl %2,%0; sets %1"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700177 :"=m" (v->counter), "=qm" (c)
178 :"ir" (i), "m" (v->counter) : "memory");
179 return c;
180}
181
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800182/**
183 * atomic_add_return - add and return
184 * @i: integer value to add
185 * @v: pointer of type atomic_t
186 *
187 * Atomically adds @i to @v and returns @i + @v
188 */
189static __inline__ int atomic_add_return(int i, atomic_t *v)
190{
191 int __i = i;
192 __asm__ __volatile__(
Andi Kleen9dc452b2006-12-07 02:14:13 +0100193 LOCK_PREFIX "xaddl %0, %1"
194 :"+r" (i), "+m" (v->counter)
195 : : "memory");
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800196 return i + __i;
197}
198
199static __inline__ int atomic_sub_return(int i, atomic_t *v)
200{
201 return atomic_add_return(-i,v);
202}
203
204#define atomic_inc_return(v) (atomic_add_return(1,v))
205#define atomic_dec_return(v) (atomic_sub_return(1,v))
206
Linus Torvalds1da177e2005-04-16 15:20:36 -0700207/* An 64bit atomic type */
208
209typedef struct { volatile long counter; } atomic64_t;
210
211#define ATOMIC64_INIT(i) { (i) }
212
213/**
214 * atomic64_read - read atomic64 variable
215 * @v: pointer of type atomic64_t
216 *
217 * Atomically reads the value of @v.
218 * Doesn't imply a read memory barrier.
219 */
220#define atomic64_read(v) ((v)->counter)
221
222/**
223 * atomic64_set - set atomic64 variable
224 * @v: pointer to type atomic64_t
225 * @i: required value
226 *
227 * Atomically sets the value of @v to @i.
228 */
229#define atomic64_set(v,i) (((v)->counter) = (i))
230
231/**
232 * atomic64_add - add integer to atomic64 variable
233 * @i: integer value to add
234 * @v: pointer to type atomic64_t
235 *
236 * Atomically adds @i to @v.
237 */
238static __inline__ void atomic64_add(long i, atomic64_t *v)
239{
240 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200241 LOCK_PREFIX "addq %1,%0"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700242 :"=m" (v->counter)
243 :"ir" (i), "m" (v->counter));
244}
245
246/**
247 * atomic64_sub - subtract the atomic64 variable
248 * @i: integer value to subtract
249 * @v: pointer to type atomic64_t
250 *
251 * Atomically subtracts @i from @v.
252 */
253static __inline__ void atomic64_sub(long i, atomic64_t *v)
254{
255 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200256 LOCK_PREFIX "subq %1,%0"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700257 :"=m" (v->counter)
258 :"ir" (i), "m" (v->counter));
259}
260
261/**
262 * atomic64_sub_and_test - subtract value from variable and test result
263 * @i: integer value to subtract
264 * @v: pointer to type atomic64_t
265 *
266 * Atomically subtracts @i from @v and returns
267 * true if the result is zero, or false for all
268 * other cases.
269 */
270static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v)
271{
272 unsigned char c;
273
274 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200275 LOCK_PREFIX "subq %2,%0; sete %1"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700276 :"=m" (v->counter), "=qm" (c)
277 :"ir" (i), "m" (v->counter) : "memory");
278 return c;
279}
280
281/**
282 * atomic64_inc - increment atomic64 variable
283 * @v: pointer to type atomic64_t
284 *
285 * Atomically increments @v by 1.
286 */
287static __inline__ void atomic64_inc(atomic64_t *v)
288{
289 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200290 LOCK_PREFIX "incq %0"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700291 :"=m" (v->counter)
292 :"m" (v->counter));
293}
294
295/**
296 * atomic64_dec - decrement atomic64 variable
297 * @v: pointer to type atomic64_t
298 *
299 * Atomically decrements @v by 1.
300 */
301static __inline__ void atomic64_dec(atomic64_t *v)
302{
303 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200304 LOCK_PREFIX "decq %0"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700305 :"=m" (v->counter)
306 :"m" (v->counter));
307}
308
309/**
310 * atomic64_dec_and_test - decrement and test
311 * @v: pointer to type atomic64_t
312 *
313 * Atomically decrements @v by 1 and
314 * returns true if the result is 0, or false for all other
315 * cases.
316 */
317static __inline__ int atomic64_dec_and_test(atomic64_t *v)
318{
319 unsigned char c;
320
321 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200322 LOCK_PREFIX "decq %0; sete %1"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700323 :"=m" (v->counter), "=qm" (c)
324 :"m" (v->counter) : "memory");
325 return c != 0;
326}
327
328/**
329 * atomic64_inc_and_test - increment and test
330 * @v: pointer to type atomic64_t
331 *
332 * Atomically increments @v by 1
333 * and returns true if the result is zero, or false for all
334 * other cases.
335 */
336static __inline__ int atomic64_inc_and_test(atomic64_t *v)
337{
338 unsigned char c;
339
340 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200341 LOCK_PREFIX "incq %0; sete %1"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700342 :"=m" (v->counter), "=qm" (c)
343 :"m" (v->counter) : "memory");
344 return c != 0;
345}
346
347/**
348 * atomic64_add_negative - add and test if negative
Linus Torvalds1da177e2005-04-16 15:20:36 -0700349 * @i: integer value to add
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800350 * @v: pointer to type atomic64_t
Linus Torvalds1da177e2005-04-16 15:20:36 -0700351 *
352 * Atomically adds @i to @v and returns true
353 * if the result is negative, or false when
354 * result is greater than or equal to zero.
355 */
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800356static __inline__ int atomic64_add_negative(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700357{
358 unsigned char c;
359
360 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200361 LOCK_PREFIX "addq %2,%0; sets %1"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700362 :"=m" (v->counter), "=qm" (c)
363 :"ir" (i), "m" (v->counter) : "memory");
364 return c;
365}
366
367/**
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800368 * atomic64_add_return - add and return
Linus Torvalds1da177e2005-04-16 15:20:36 -0700369 * @i: integer value to add
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800370 * @v: pointer to type atomic64_t
Linus Torvalds1da177e2005-04-16 15:20:36 -0700371 *
372 * Atomically adds @i to @v and returns @i + @v
373 */
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800374static __inline__ long atomic64_add_return(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700375{
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800376 long __i = i;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700377 __asm__ __volatile__(
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200378 LOCK_PREFIX "xaddq %0, %1;"
Mathieu Desnoyers79d365a2007-05-08 00:34:36 -0700379 :"+r" (i), "+m" (v->counter)
380 : : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700381 return i + __i;
382}
383
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800384static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700385{
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800386 return atomic64_add_return(-i,v);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700387}
388
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800389#define atomic64_inc_return(v) (atomic64_add_return(1,v))
390#define atomic64_dec_return(v) (atomic64_sub_return(1,v))
391
Mathieu Desnoyers79d365a2007-05-08 00:34:36 -0700392#define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
393#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
394
395#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800396#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800397
Nick Piggin8426e1f2005-11-13 16:07:25 -0800398/**
399 * atomic_add_unless - add unless the number is a given value
400 * @v: pointer of type atomic_t
401 * @a: the amount to add to v...
402 * @u: ...unless v is equal to u.
403 *
404 * Atomically adds @a to @v, so long as it was not @u.
405 * Returns non-zero if @v was not @u, and zero otherwise.
406 */
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700407static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
408{
409 int c, old;
410 c = atomic_read(v);
411 for (;;) {
412 if (unlikely(c == (u)))
413 break;
414 old = atomic_cmpxchg((v), c, c + (a));
415 if (likely(old == c))
416 break;
417 c = old;
418 }
419 return c != (u);
420}
421
Nick Piggin8426e1f2005-11-13 16:07:25 -0800422#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
423
Mathieu Desnoyers79d365a2007-05-08 00:34:36 -0700424/**
425 * atomic64_add_unless - add unless the number is a given value
426 * @v: pointer of type atomic64_t
427 * @a: the amount to add to v...
428 * @u: ...unless v is equal to u.
429 *
430 * Atomically adds @a to @v, so long as it was not @u.
431 * Returns non-zero if @v was not @u, and zero otherwise.
432 */
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700433static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
434{
435 long c, old;
436 c = atomic64_read(v);
437 for (;;) {
438 if (unlikely(c == (u)))
439 break;
440 old = atomic64_cmpxchg((v), c, c + (a));
441 if (likely(old == c))
442 break;
443 c = old;
444 }
445 return c != (u);
446}
447
Mathieu Desnoyers79d365a2007-05-08 00:34:36 -0700448#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
449
Linus Torvalds1da177e2005-04-16 15:20:36 -0700450/* These are x86-specific, used by some header files */
451#define atomic_clear_mask(mask, addr) \
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200452__asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700453: : "r" (~(mask)),"m" (*addr) : "memory")
454
455#define atomic_set_mask(mask, addr) \
Gerd Hoffmannd167a512006-06-26 13:56:16 +0200456__asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700457: : "r" ((unsigned)mask),"m" (*(addr)) : "memory")
458
459/* Atomic operations are already serializing on x86 */
460#define smp_mb__before_atomic_dec() barrier()
461#define smp_mb__after_atomic_dec() barrier()
462#define smp_mb__before_atomic_inc() barrier()
463#define smp_mb__after_atomic_inc() barrier()
464
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800465#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700466#endif