blob: aff9f1fcdcd7e2be9d86049fc7f9522bafd4b35d [file] [log] [blame]
H. Peter Anvin1965aae2008-10-22 22:26:29 -07001#ifndef _ASM_X86_ATOMIC_32_H
2#define _ASM_X86_ATOMIC_32_H
Linus Torvalds1da177e2005-04-16 15:20:36 -07003
Linus Torvalds1da177e2005-04-16 15:20:36 -07004#include <linux/compiler.h>
Matthew Wilcoxea4354672009-01-06 14:40:39 -08005#include <linux/types.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -07006#include <asm/processor.h>
Jeff Dikea436ed92007-05-08 00:35:02 -07007#include <asm/cmpxchg.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -07008
9/*
10 * Atomic operations that C can't guarantee us. Useful for
11 * resource counting etc..
12 */
13
Linus Torvalds1da177e2005-04-16 15:20:36 -070014#define ATOMIC_INIT(i) { (i) }
15
16/**
17 * atomic_read - read atomic variable
18 * @v: pointer of type atomic_t
Joe Perches78ff12e2008-03-23 01:01:41 -070019 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070020 * Atomically reads the value of @v.
Joe Perches78ff12e2008-03-23 01:01:41 -070021 */
Linus Torvalds1da177e2005-04-16 15:20:36 -070022#define atomic_read(v) ((v)->counter)
23
24/**
25 * atomic_set - set atomic variable
26 * @v: pointer of type atomic_t
27 * @i: required value
Joe Perches78ff12e2008-03-23 01:01:41 -070028 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070029 * Atomically sets the value of @v to @i.
Joe Perches78ff12e2008-03-23 01:01:41 -070030 */
31#define atomic_set(v, i) (((v)->counter) = (i))
Linus Torvalds1da177e2005-04-16 15:20:36 -070032
33/**
34 * atomic_add - add integer to atomic variable
35 * @i: integer value to add
36 * @v: pointer of type atomic_t
Joe Perches78ff12e2008-03-23 01:01:41 -070037 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070038 * Atomically adds @i to @v.
39 */
Joe Perches78ff12e2008-03-23 01:01:41 -070040static inline void atomic_add(int i, atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070041{
Joe Perches78ff12e2008-03-23 01:01:41 -070042 asm volatile(LOCK_PREFIX "addl %1,%0"
43 : "+m" (v->counter)
44 : "ir" (i));
Linus Torvalds1da177e2005-04-16 15:20:36 -070045}
46
47/**
Robert P. J. Daycc386822007-05-08 00:35:08 -070048 * atomic_sub - subtract integer from atomic variable
Linus Torvalds1da177e2005-04-16 15:20:36 -070049 * @i: integer value to subtract
50 * @v: pointer of type atomic_t
Joe Perches78ff12e2008-03-23 01:01:41 -070051 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070052 * Atomically subtracts @i from @v.
53 */
Joe Perches78ff12e2008-03-23 01:01:41 -070054static inline void atomic_sub(int i, atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070055{
Joe Perches78ff12e2008-03-23 01:01:41 -070056 asm volatile(LOCK_PREFIX "subl %1,%0"
57 : "+m" (v->counter)
58 : "ir" (i));
Linus Torvalds1da177e2005-04-16 15:20:36 -070059}
60
61/**
62 * atomic_sub_and_test - subtract value from variable and test result
63 * @i: integer value to subtract
64 * @v: pointer of type atomic_t
Joe Perches78ff12e2008-03-23 01:01:41 -070065 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070066 * Atomically subtracts @i from @v and returns
67 * true if the result is zero, or false for all
68 * other cases.
69 */
Joe Perches78ff12e2008-03-23 01:01:41 -070070static inline int atomic_sub_and_test(int i, atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070071{
72 unsigned char c;
73
Joe Perches78ff12e2008-03-23 01:01:41 -070074 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
75 : "+m" (v->counter), "=qm" (c)
76 : "ir" (i) : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -070077 return c;
78}
79
80/**
81 * atomic_inc - increment atomic variable
82 * @v: pointer of type atomic_t
Joe Perches78ff12e2008-03-23 01:01:41 -070083 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070084 * Atomically increments @v by 1.
Joe Perches78ff12e2008-03-23 01:01:41 -070085 */
86static inline void atomic_inc(atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070087{
Joe Perches78ff12e2008-03-23 01:01:41 -070088 asm volatile(LOCK_PREFIX "incl %0"
89 : "+m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -070090}
91
92/**
93 * atomic_dec - decrement atomic variable
94 * @v: pointer of type atomic_t
Joe Perches78ff12e2008-03-23 01:01:41 -070095 *
Linus Torvalds1da177e2005-04-16 15:20:36 -070096 * Atomically decrements @v by 1.
Joe Perches78ff12e2008-03-23 01:01:41 -070097 */
98static inline void atomic_dec(atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070099{
Joe Perches78ff12e2008-03-23 01:01:41 -0700100 asm volatile(LOCK_PREFIX "decl %0"
101 : "+m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700102}
103
104/**
105 * atomic_dec_and_test - decrement and test
106 * @v: pointer of type atomic_t
Joe Perches78ff12e2008-03-23 01:01:41 -0700107 *
Linus Torvalds1da177e2005-04-16 15:20:36 -0700108 * Atomically decrements @v by 1 and
109 * returns true if the result is 0, or false for all other
110 * cases.
Joe Perches78ff12e2008-03-23 01:01:41 -0700111 */
112static inline int atomic_dec_and_test(atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700113{
114 unsigned char c;
115
Joe Perches78ff12e2008-03-23 01:01:41 -0700116 asm volatile(LOCK_PREFIX "decl %0; sete %1"
117 : "+m" (v->counter), "=qm" (c)
118 : : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700119 return c != 0;
120}
121
122/**
Joe Perches78ff12e2008-03-23 01:01:41 -0700123 * atomic_inc_and_test - increment and test
Linus Torvalds1da177e2005-04-16 15:20:36 -0700124 * @v: pointer of type atomic_t
Joe Perches78ff12e2008-03-23 01:01:41 -0700125 *
Linus Torvalds1da177e2005-04-16 15:20:36 -0700126 * Atomically increments @v by 1
127 * and returns true if the result is zero, or false for all
128 * other cases.
Joe Perches78ff12e2008-03-23 01:01:41 -0700129 */
130static inline int atomic_inc_and_test(atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700131{
132 unsigned char c;
133
Joe Perches78ff12e2008-03-23 01:01:41 -0700134 asm volatile(LOCK_PREFIX "incl %0; sete %1"
135 : "+m" (v->counter), "=qm" (c)
136 : : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700137 return c != 0;
138}
139
140/**
141 * atomic_add_negative - add and test if negative
142 * @v: pointer of type atomic_t
143 * @i: integer value to add
Joe Perches78ff12e2008-03-23 01:01:41 -0700144 *
Linus Torvalds1da177e2005-04-16 15:20:36 -0700145 * Atomically adds @i to @v and returns true
146 * if the result is negative, or false when
147 * result is greater than or equal to zero.
Joe Perches78ff12e2008-03-23 01:01:41 -0700148 */
149static inline int atomic_add_negative(int i, atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700150{
151 unsigned char c;
152
Joe Perches78ff12e2008-03-23 01:01:41 -0700153 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
154 : "+m" (v->counter), "=qm" (c)
155 : "ir" (i) : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700156 return c;
157}
158
159/**
Robert P. J. Daycc386822007-05-08 00:35:08 -0700160 * atomic_add_return - add integer and return
Linus Torvalds1da177e2005-04-16 15:20:36 -0700161 * @v: pointer of type atomic_t
162 * @i: integer value to add
163 *
164 * Atomically adds @i to @v and returns @i + @v
165 */
Joe Perches78ff12e2008-03-23 01:01:41 -0700166static inline int atomic_add_return(int i, atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700167{
168 int __i;
169#ifdef CONFIG_M386
lepton1bb858f2006-04-18 22:21:10 -0700170 unsigned long flags;
Joe Perches78ff12e2008-03-23 01:01:41 -0700171 if (unlikely(boot_cpu_data.x86 <= 3))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700172 goto no_xadd;
173#endif
174 /* Modern 486+ processor */
175 __i = i;
Joe Perches78ff12e2008-03-23 01:01:41 -0700176 asm volatile(LOCK_PREFIX "xaddl %0, %1"
177 : "+r" (i), "+m" (v->counter)
178 : : "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700179 return i + __i;
180
181#ifdef CONFIG_M386
182no_xadd: /* Legacy 386 processor */
lepton1bb858f2006-04-18 22:21:10 -0700183 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700184 __i = atomic_read(v);
185 atomic_set(v, i + __i);
lepton1bb858f2006-04-18 22:21:10 -0700186 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700187 return i + __i;
188#endif
189}
190
Robert P. J. Daycc386822007-05-08 00:35:08 -0700191/**
192 * atomic_sub_return - subtract integer and return
193 * @v: pointer of type atomic_t
194 * @i: integer value to subtract
195 *
196 * Atomically subtracts @i from @v and returns @v - @i
197 */
Joe Perches78ff12e2008-03-23 01:01:41 -0700198static inline int atomic_sub_return(int i, atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700199{
Joe Perches78ff12e2008-03-23 01:01:41 -0700200 return atomic_add_return(-i, v);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700201}
202
Mathieu Desnoyerse656e242007-05-08 00:34:20 -0700203#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
204#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800205
Nick Piggin8426e1f2005-11-13 16:07:25 -0800206/**
Robert P. J. Day72fd4a32007-02-10 01:45:59 -0800207 * atomic_add_unless - add unless the number is already a given value
Nick Piggin8426e1f2005-11-13 16:07:25 -0800208 * @v: pointer of type atomic_t
209 * @a: the amount to add to v...
210 * @u: ...unless v is equal to u.
211 *
Robert P. J. Day72fd4a32007-02-10 01:45:59 -0800212 * Atomically adds @a to @v, so long as @v was not already @u.
Nick Piggin8426e1f2005-11-13 16:07:25 -0800213 * Returns non-zero if @v was not @u, and zero otherwise.
214 */
Joe Perches78ff12e2008-03-23 01:01:41 -0700215static inline int atomic_add_unless(atomic_t *v, int a, int u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700216{
217 int c, old;
218 c = atomic_read(v);
219 for (;;) {
220 if (unlikely(c == (u)))
221 break;
222 old = atomic_cmpxchg((v), c, c + (a));
223 if (likely(old == c))
224 break;
225 c = old;
226 }
227 return c != (u);
228}
229
Nick Piggin8426e1f2005-11-13 16:07:25 -0800230#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
231
Joe Perches78ff12e2008-03-23 01:01:41 -0700232#define atomic_inc_return(v) (atomic_add_return(1, v))
233#define atomic_dec_return(v) (atomic_sub_return(1, v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700234
235/* These are x86-specific, used by some header files */
Joe Perches78ff12e2008-03-23 01:01:41 -0700236#define atomic_clear_mask(mask, addr) \
237 asm volatile(LOCK_PREFIX "andl %0,%1" \
238 : : "r" (~(mask)), "m" (*(addr)) : "memory")
Linus Torvalds1da177e2005-04-16 15:20:36 -0700239
Joe Perches78ff12e2008-03-23 01:01:41 -0700240#define atomic_set_mask(mask, addr) \
241 asm volatile(LOCK_PREFIX "orl %0,%1" \
242 : : "r" (mask), "m" (*(addr)) : "memory")
Linus Torvalds1da177e2005-04-16 15:20:36 -0700243
244/* Atomic operations are already serializing on x86 */
245#define smp_mb__before_atomic_dec() barrier()
246#define smp_mb__after_atomic_dec() barrier()
247#define smp_mb__before_atomic_inc() barrier()
248#define smp_mb__after_atomic_inc() barrier()
249
Ingo Molnar9b194e82008-12-14 20:22:35 +0100250/* An 64bit atomic type */
251
252typedef struct {
253 unsigned long long counter;
254} atomic64_t;
255
256#define ATOMIC64_INIT(val) { (val) }
257
258/**
259 * atomic64_read - read atomic64 variable
260 * @v: pointer of type atomic64_t
261 *
262 * Atomically reads the value of @v.
263 * Doesn't imply a read memory barrier.
264 */
265#define __atomic64_read(ptr) ((ptr)->counter)
266
267static inline unsigned long long
268cmpxchg8b(unsigned long long *ptr, unsigned long long old, unsigned long long new)
269{
270 asm volatile(
271
272 LOCK_PREFIX "cmpxchg8b (%[ptr])\n"
273
274 : "=A" (old)
275
276 : [ptr] "D" (ptr),
277 "A" (old),
278 "b" (ll_low(new)),
279 "c" (ll_high(new))
280
281 : "memory");
282
283 return old;
284}
285
286static inline unsigned long long
287atomic64_cmpxchg(atomic64_t *ptr, unsigned long long old_val,
288 unsigned long long new_val)
289{
290 return cmpxchg8b(&ptr->counter, old_val, new_val);
291}
292
293/**
Ingo Molnar98c2aaf2009-04-07 11:30:17 +0200294 * atomic64_xchg - xchg atomic64 variable
295 * @ptr: pointer to type atomic64_t
296 * @new_val: value to assign
297 * @old_val: old value that was there
298 *
299 * Atomically xchgs the value of @ptr to @new_val and returns
300 * the old value.
301 */
302
303static inline unsigned long long
304atomic64_xchg(atomic64_t *ptr, unsigned long long new_val)
305{
306 unsigned long long old_val;
307
308 do {
309 old_val = atomic_read(ptr);
310 } while (atomic64_cmpxchg(ptr, old_val, new_val) != old_val);
311
312 return old_val;
313}
314
315/**
Ingo Molnar9b194e82008-12-14 20:22:35 +0100316 * atomic64_set - set atomic64 variable
317 * @ptr: pointer to type atomic64_t
318 * @new_val: value to assign
319 *
320 * Atomically sets the value of @ptr to @new_val.
321 */
322static inline void atomic64_set(atomic64_t *ptr, unsigned long long new_val)
323{
Ingo Molnar98c2aaf2009-04-07 11:30:17 +0200324 atomic64_xchg(ptr, new_val);
Ingo Molnar9b194e82008-12-14 20:22:35 +0100325}
326
327/**
328 * atomic64_read - read atomic64 variable
329 * @ptr: pointer to type atomic64_t
330 *
331 * Atomically reads the value of @ptr and returns it.
332 */
333static inline unsigned long long atomic64_read(atomic64_t *ptr)
334{
335 unsigned long long curr_val;
336
337 do {
338 curr_val = __atomic64_read(ptr);
339 } while (atomic64_cmpxchg(ptr, curr_val, curr_val) != curr_val);
340
341 return curr_val;
342}
343
344/**
345 * atomic64_add_return - add and return
346 * @delta: integer value to add
347 * @ptr: pointer to type atomic64_t
348 *
349 * Atomically adds @delta to @ptr and returns @delta + *@ptr
350 */
351static inline unsigned long long
352atomic64_add_return(unsigned long long delta, atomic64_t *ptr)
353{
354 unsigned long long old_val, new_val;
355
356 do {
357 old_val = atomic_read(ptr);
358 new_val = old_val + delta;
359
360 } while (atomic64_cmpxchg(ptr, old_val, new_val) != old_val);
361
362 return new_val;
363}
364
365static inline long atomic64_sub_return(unsigned long long delta, atomic64_t *ptr)
366{
367 return atomic64_add_return(-delta, ptr);
368}
369
370static inline long atomic64_inc_return(atomic64_t *ptr)
371{
372 return atomic64_add_return(1, ptr);
373}
374
375static inline long atomic64_dec_return(atomic64_t *ptr)
376{
377 return atomic64_sub_return(1, ptr);
378}
379
380/**
381 * atomic64_add - add integer to atomic64 variable
382 * @delta: integer value to add
383 * @ptr: pointer to type atomic64_t
384 *
385 * Atomically adds @delta to @ptr.
386 */
387static inline void atomic64_add(unsigned long long delta, atomic64_t *ptr)
388{
389 atomic64_add_return(delta, ptr);
390}
391
392/**
393 * atomic64_sub - subtract the atomic64 variable
394 * @delta: integer value to subtract
395 * @ptr: pointer to type atomic64_t
396 *
397 * Atomically subtracts @delta from @ptr.
398 */
399static inline void atomic64_sub(unsigned long long delta, atomic64_t *ptr)
400{
401 atomic64_add(-delta, ptr);
402}
403
404/**
405 * atomic64_sub_and_test - subtract value from variable and test result
406 * @delta: integer value to subtract
407 * @ptr: pointer to type atomic64_t
408 *
409 * Atomically subtracts @delta from @ptr and returns
410 * true if the result is zero, or false for all
411 * other cases.
412 */
413static inline int
414atomic64_sub_and_test(unsigned long long delta, atomic64_t *ptr)
415{
416 unsigned long long old_val = atomic64_sub_return(delta, ptr);
417
418 return old_val == 0;
419}
420
421/**
422 * atomic64_inc - increment atomic64 variable
423 * @ptr: pointer to type atomic64_t
424 *
425 * Atomically increments @ptr by 1.
426 */
427static inline void atomic64_inc(atomic64_t *ptr)
428{
429 atomic64_add(1, ptr);
430}
431
432/**
433 * atomic64_dec - decrement atomic64 variable
434 * @ptr: pointer to type atomic64_t
435 *
436 * Atomically decrements @ptr by 1.
437 */
438static inline void atomic64_dec(atomic64_t *ptr)
439{
440 atomic64_sub(1, ptr);
441}
442
443/**
444 * atomic64_dec_and_test - decrement and test
445 * @ptr: pointer to type atomic64_t
446 *
447 * Atomically decrements @ptr by 1 and
448 * returns true if the result is 0, or false for all other
449 * cases.
450 */
451static inline int atomic64_dec_and_test(atomic64_t *ptr)
452{
453 return atomic64_sub_and_test(1, ptr);
454}
455
456/**
457 * atomic64_inc_and_test - increment and test
458 * @ptr: pointer to type atomic64_t
459 *
460 * Atomically increments @ptr by 1
461 * and returns true if the result is zero, or false for all
462 * other cases.
463 */
464static inline int atomic64_inc_and_test(atomic64_t *ptr)
465{
466 return atomic64_sub_and_test(-1, ptr);
467}
468
469/**
470 * atomic64_add_negative - add and test if negative
471 * @delta: integer value to add
472 * @ptr: pointer to type atomic64_t
473 *
474 * Atomically adds @delta to @ptr and returns true
475 * if the result is negative, or false when
476 * result is greater than or equal to zero.
477 */
478static inline int
479atomic64_add_negative(unsigned long long delta, atomic64_t *ptr)
480{
481 long long old_val = atomic64_add_return(delta, ptr);
482
483 return old_val < 0;
484}
485
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800486#include <asm-generic/atomic.h>
H. Peter Anvin1965aae2008-10-22 22:26:29 -0700487#endif /* _ASM_X86_ATOMIC_32_H */