blob: 8b81b2524fa68ee56f0e6679f451adcb65c006d7 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * x86 version of "atomic_dec_and_lock()" using
3 * the atomic "cmpxchg" instruction.
4 *
5 * (For CPU's lacking cmpxchg, we use the slow
6 * generic version, and this one never even gets
7 * compiled).
8 */
9
10#include <linux/spinlock.h>
Alexey Dobriyan129f6942005-06-23 00:08:33 -070011#include <linux/module.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070012#include <asm/atomic.h>
13
14int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
15{
16 int counter;
17 int newcount;
18
19repeat:
20 counter = atomic_read(atomic);
21 newcount = counter-1;
22
23 if (!newcount)
24 goto slow_path;
25
26 asm volatile("lock; cmpxchgl %1,%2"
27 :"=a" (newcount)
28 :"r" (newcount), "m" (atomic->counter), "0" (counter));
29
30 /* If the above failed, "eax" will have changed */
31 if (newcount != counter)
32 goto repeat;
33 return 0;
34
35slow_path:
36 spin_lock(lock);
37 if (atomic_dec_and_test(atomic))
38 return 1;
39 spin_unlock(lock);
40 return 0;
41}
Alexey Dobriyan129f6942005-06-23 00:08:33 -070042EXPORT_SYMBOL(_atomic_dec_and_lock);