blob: cecbf7baa6aa346297efa3bb14480bddd5e4236a [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __ARCH_X86_64_ATOMIC__
2#define __ARCH_X86_64_ATOMIC__
3
4#include <linux/config.h>
Andi Kleen99f7b772006-01-11 22:46:12 +01005#include <asm/types.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -07006
7/* atomic_t should be 32 bit signed type */
8
9/*
10 * Atomic operations that C can't guarantee us. Useful for
11 * resource counting etc..
12 */
13
14#ifdef CONFIG_SMP
15#define LOCK "lock ; "
16#else
17#define LOCK ""
18#endif
19
20/*
21 * Make sure gcc doesn't try to be clever and move things around
22 * on us. We need to use _exactly_ the address the user gave us,
23 * not some alias that contains the same information.
24 */
25typedef struct { volatile int counter; } atomic_t;
26
27#define ATOMIC_INIT(i) { (i) }
28
29/**
30 * atomic_read - read atomic variable
31 * @v: pointer of type atomic_t
32 *
33 * Atomically reads the value of @v.
34 */
35#define atomic_read(v) ((v)->counter)
36
37/**
38 * atomic_set - set atomic variable
39 * @v: pointer of type atomic_t
40 * @i: required value
41 *
42 * Atomically sets the value of @v to @i.
43 */
44#define atomic_set(v,i) (((v)->counter) = (i))
45
46/**
47 * atomic_add - add integer to atomic variable
48 * @i: integer value to add
49 * @v: pointer of type atomic_t
50 *
51 * Atomically adds @i to @v.
52 */
53static __inline__ void atomic_add(int i, atomic_t *v)
54{
55 __asm__ __volatile__(
56 LOCK "addl %1,%0"
57 :"=m" (v->counter)
58 :"ir" (i), "m" (v->counter));
59}
60
61/**
62 * atomic_sub - subtract the atomic variable
63 * @i: integer value to subtract
64 * @v: pointer of type atomic_t
65 *
66 * Atomically subtracts @i from @v.
67 */
68static __inline__ void atomic_sub(int i, atomic_t *v)
69{
70 __asm__ __volatile__(
71 LOCK "subl %1,%0"
72 :"=m" (v->counter)
73 :"ir" (i), "m" (v->counter));
74}
75
76/**
77 * atomic_sub_and_test - subtract value from variable and test result
78 * @i: integer value to subtract
79 * @v: pointer of type atomic_t
80 *
81 * Atomically subtracts @i from @v and returns
82 * true if the result is zero, or false for all
83 * other cases.
84 */
85static __inline__ int atomic_sub_and_test(int i, atomic_t *v)
86{
87 unsigned char c;
88
89 __asm__ __volatile__(
90 LOCK "subl %2,%0; sete %1"
91 :"=m" (v->counter), "=qm" (c)
92 :"ir" (i), "m" (v->counter) : "memory");
93 return c;
94}
95
96/**
97 * atomic_inc - increment atomic variable
98 * @v: pointer of type atomic_t
99 *
100 * Atomically increments @v by 1.
101 */
102static __inline__ void atomic_inc(atomic_t *v)
103{
104 __asm__ __volatile__(
105 LOCK "incl %0"
106 :"=m" (v->counter)
107 :"m" (v->counter));
108}
109
110/**
111 * atomic_dec - decrement atomic variable
112 * @v: pointer of type atomic_t
113 *
114 * Atomically decrements @v by 1.
115 */
116static __inline__ void atomic_dec(atomic_t *v)
117{
118 __asm__ __volatile__(
119 LOCK "decl %0"
120 :"=m" (v->counter)
121 :"m" (v->counter));
122}
123
124/**
125 * atomic_dec_and_test - decrement and test
126 * @v: pointer of type atomic_t
127 *
128 * Atomically decrements @v by 1 and
129 * returns true if the result is 0, or false for all other
130 * cases.
131 */
132static __inline__ int atomic_dec_and_test(atomic_t *v)
133{
134 unsigned char c;
135
136 __asm__ __volatile__(
137 LOCK "decl %0; sete %1"
138 :"=m" (v->counter), "=qm" (c)
139 :"m" (v->counter) : "memory");
140 return c != 0;
141}
142
143/**
144 * atomic_inc_and_test - increment and test
145 * @v: pointer of type atomic_t
146 *
147 * Atomically increments @v by 1
148 * and returns true if the result is zero, or false for all
149 * other cases.
150 */
151static __inline__ int atomic_inc_and_test(atomic_t *v)
152{
153 unsigned char c;
154
155 __asm__ __volatile__(
156 LOCK "incl %0; sete %1"
157 :"=m" (v->counter), "=qm" (c)
158 :"m" (v->counter) : "memory");
159 return c != 0;
160}
161
162/**
163 * atomic_add_negative - add and test if negative
Linus Torvalds1da177e2005-04-16 15:20:36 -0700164 * @i: integer value to add
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800165 * @v: pointer of type atomic_t
Linus Torvalds1da177e2005-04-16 15:20:36 -0700166 *
167 * Atomically adds @i to @v and returns true
168 * if the result is negative, or false when
169 * result is greater than or equal to zero.
170 */
171static __inline__ int atomic_add_negative(int i, atomic_t *v)
172{
173 unsigned char c;
174
175 __asm__ __volatile__(
176 LOCK "addl %2,%0; sets %1"
177 :"=m" (v->counter), "=qm" (c)
178 :"ir" (i), "m" (v->counter) : "memory");
179 return c;
180}
181
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800182/**
183 * atomic_add_return - add and return
184 * @i: integer value to add
185 * @v: pointer of type atomic_t
186 *
187 * Atomically adds @i to @v and returns @i + @v
188 */
189static __inline__ int atomic_add_return(int i, atomic_t *v)
190{
191 int __i = i;
192 __asm__ __volatile__(
193 LOCK "xaddl %0, %1;"
194 :"=r"(i)
195 :"m"(v->counter), "0"(i));
196 return i + __i;
197}
198
199static __inline__ int atomic_sub_return(int i, atomic_t *v)
200{
201 return atomic_add_return(-i,v);
202}
203
204#define atomic_inc_return(v) (atomic_add_return(1,v))
205#define atomic_dec_return(v) (atomic_sub_return(1,v))
206
Linus Torvalds1da177e2005-04-16 15:20:36 -0700207/* An 64bit atomic type */
208
209typedef struct { volatile long counter; } atomic64_t;
210
211#define ATOMIC64_INIT(i) { (i) }
212
213/**
214 * atomic64_read - read atomic64 variable
215 * @v: pointer of type atomic64_t
216 *
217 * Atomically reads the value of @v.
218 * Doesn't imply a read memory barrier.
219 */
220#define atomic64_read(v) ((v)->counter)
221
222/**
223 * atomic64_set - set atomic64 variable
224 * @v: pointer to type atomic64_t
225 * @i: required value
226 *
227 * Atomically sets the value of @v to @i.
228 */
229#define atomic64_set(v,i) (((v)->counter) = (i))
230
231/**
232 * atomic64_add - add integer to atomic64 variable
233 * @i: integer value to add
234 * @v: pointer to type atomic64_t
235 *
236 * Atomically adds @i to @v.
237 */
238static __inline__ void atomic64_add(long i, atomic64_t *v)
239{
240 __asm__ __volatile__(
241 LOCK "addq %1,%0"
242 :"=m" (v->counter)
243 :"ir" (i), "m" (v->counter));
244}
245
246/**
247 * atomic64_sub - subtract the atomic64 variable
248 * @i: integer value to subtract
249 * @v: pointer to type atomic64_t
250 *
251 * Atomically subtracts @i from @v.
252 */
253static __inline__ void atomic64_sub(long i, atomic64_t *v)
254{
255 __asm__ __volatile__(
256 LOCK "subq %1,%0"
257 :"=m" (v->counter)
258 :"ir" (i), "m" (v->counter));
259}
260
261/**
262 * atomic64_sub_and_test - subtract value from variable and test result
263 * @i: integer value to subtract
264 * @v: pointer to type atomic64_t
265 *
266 * Atomically subtracts @i from @v and returns
267 * true if the result is zero, or false for all
268 * other cases.
269 */
270static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v)
271{
272 unsigned char c;
273
274 __asm__ __volatile__(
275 LOCK "subq %2,%0; sete %1"
276 :"=m" (v->counter), "=qm" (c)
277 :"ir" (i), "m" (v->counter) : "memory");
278 return c;
279}
280
281/**
282 * atomic64_inc - increment atomic64 variable
283 * @v: pointer to type atomic64_t
284 *
285 * Atomically increments @v by 1.
286 */
287static __inline__ void atomic64_inc(atomic64_t *v)
288{
289 __asm__ __volatile__(
290 LOCK "incq %0"
291 :"=m" (v->counter)
292 :"m" (v->counter));
293}
294
295/**
296 * atomic64_dec - decrement atomic64 variable
297 * @v: pointer to type atomic64_t
298 *
299 * Atomically decrements @v by 1.
300 */
301static __inline__ void atomic64_dec(atomic64_t *v)
302{
303 __asm__ __volatile__(
304 LOCK "decq %0"
305 :"=m" (v->counter)
306 :"m" (v->counter));
307}
308
309/**
310 * atomic64_dec_and_test - decrement and test
311 * @v: pointer to type atomic64_t
312 *
313 * Atomically decrements @v by 1 and
314 * returns true if the result is 0, or false for all other
315 * cases.
316 */
317static __inline__ int atomic64_dec_and_test(atomic64_t *v)
318{
319 unsigned char c;
320
321 __asm__ __volatile__(
322 LOCK "decq %0; sete %1"
323 :"=m" (v->counter), "=qm" (c)
324 :"m" (v->counter) : "memory");
325 return c != 0;
326}
327
328/**
329 * atomic64_inc_and_test - increment and test
330 * @v: pointer to type atomic64_t
331 *
332 * Atomically increments @v by 1
333 * and returns true if the result is zero, or false for all
334 * other cases.
335 */
336static __inline__ int atomic64_inc_and_test(atomic64_t *v)
337{
338 unsigned char c;
339
340 __asm__ __volatile__(
341 LOCK "incq %0; sete %1"
342 :"=m" (v->counter), "=qm" (c)
343 :"m" (v->counter) : "memory");
344 return c != 0;
345}
346
347/**
348 * atomic64_add_negative - add and test if negative
Linus Torvalds1da177e2005-04-16 15:20:36 -0700349 * @i: integer value to add
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800350 * @v: pointer to type atomic64_t
Linus Torvalds1da177e2005-04-16 15:20:36 -0700351 *
352 * Atomically adds @i to @v and returns true
353 * if the result is negative, or false when
354 * result is greater than or equal to zero.
355 */
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800356static __inline__ int atomic64_add_negative(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700357{
358 unsigned char c;
359
360 __asm__ __volatile__(
361 LOCK "addq %2,%0; sets %1"
362 :"=m" (v->counter), "=qm" (c)
363 :"ir" (i), "m" (v->counter) : "memory");
364 return c;
365}
366
367/**
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800368 * atomic64_add_return - add and return
Linus Torvalds1da177e2005-04-16 15:20:36 -0700369 * @i: integer value to add
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800370 * @v: pointer to type atomic64_t
Linus Torvalds1da177e2005-04-16 15:20:36 -0700371 *
372 * Atomically adds @i to @v and returns @i + @v
373 */
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800374static __inline__ long atomic64_add_return(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700375{
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800376 long __i = i;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700377 __asm__ __volatile__(
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800378 LOCK "xaddq %0, %1;"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700379 :"=r"(i)
380 :"m"(v->counter), "0"(i));
381 return i + __i;
382}
383
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800384static __inline__ long atomic64_sub_return(long i, atomic64_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700385{
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800386 return atomic64_add_return(-i,v);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700387}
388
Hugh Dickins7c72aaf2005-11-23 13:37:40 -0800389#define atomic64_inc_return(v) (atomic64_add_return(1,v))
390#define atomic64_dec_return(v) (atomic64_sub_return(1,v))
391
Nick Piggin4a6dae62005-11-13 16:07:24 -0800392#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800393#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800394
Nick Piggin8426e1f2005-11-13 16:07:25 -0800395/**
396 * atomic_add_unless - add unless the number is a given value
397 * @v: pointer of type atomic_t
398 * @a: the amount to add to v...
399 * @u: ...unless v is equal to u.
400 *
401 * Atomically adds @a to @v, so long as it was not @u.
402 * Returns non-zero if @v was not @u, and zero otherwise.
403 */
404#define atomic_add_unless(v, a, u) \
405({ \
406 int c, old; \
407 c = atomic_read(v); \
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800408 for (;;) { \
409 if (unlikely(c == (u))) \
410 break; \
411 old = atomic_cmpxchg((v), c, c + (a)); \
412 if (likely(old == c)) \
413 break; \
Nick Piggin8426e1f2005-11-13 16:07:25 -0800414 c = old; \
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800415 } \
Nick Piggin8426e1f2005-11-13 16:07:25 -0800416 c != (u); \
417})
418#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
419
Linus Torvalds1da177e2005-04-16 15:20:36 -0700420/* These are x86-specific, used by some header files */
421#define atomic_clear_mask(mask, addr) \
422__asm__ __volatile__(LOCK "andl %0,%1" \
423: : "r" (~(mask)),"m" (*addr) : "memory")
424
425#define atomic_set_mask(mask, addr) \
426__asm__ __volatile__(LOCK "orl %0,%1" \
427: : "r" ((unsigned)mask),"m" (*(addr)) : "memory")
428
429/* Atomic operations are already serializing on x86 */
430#define smp_mb__before_atomic_dec() barrier()
431#define smp_mb__after_atomic_dec() barrier()
432#define smp_mb__before_atomic_inc() barrier()
433#define smp_mb__after_atomic_inc() barrier()
434
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800435#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700436#endif