Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef __ASM_SH_ATOMIC_H |
| 2 | #define __ASM_SH_ATOMIC_H |
| 3 | |
| 4 | /* |
| 5 | * Atomic operations that C can't guarantee us. Useful for |
| 6 | * resource counting etc.. |
| 7 | * |
| 8 | */ |
| 9 | |
| 10 | typedef struct { volatile int counter; } atomic_t; |
| 11 | |
| 12 | #define ATOMIC_INIT(i) ( (atomic_t) { (i) } ) |
| 13 | |
| 14 | #define atomic_read(v) ((v)->counter) |
| 15 | #define atomic_set(v,i) ((v)->counter = (i)) |
| 16 | |
Paul Mundt | e4c2cfe | 2006-09-27 12:31:01 +0900 | [diff] [blame] | 17 | #include <linux/compiler.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 18 | #include <asm/system.h> |
| 19 | |
Paul Mundt | 781125c | 2006-09-27 17:52:19 +0900 | [diff] [blame] | 20 | #ifdef CONFIG_CPU_SH4A |
Paul Mundt | ec723fbe | 2006-12-07 20:33:38 +0900 | [diff] [blame] | 21 | #include <asm/atomic-llsc.h> |
Paul Mundt | 781125c | 2006-09-27 17:52:19 +0900 | [diff] [blame] | 22 | #else |
Paul Mundt | ec723fbe | 2006-12-07 20:33:38 +0900 | [diff] [blame] | 23 | #include <asm/atomic-irq.h> |
Paul Mundt | 781125c | 2006-09-27 17:52:19 +0900 | [diff] [blame] | 24 | #endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 25 | |
| 26 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) |
| 27 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 28 | #define atomic_dec_return(v) atomic_sub_return(1,(v)) |
| 29 | #define atomic_inc_return(v) atomic_add_return(1,(v)) |
| 30 | |
| 31 | /* |
| 32 | * atomic_inc_and_test - increment and test |
| 33 | * @v: pointer of type atomic_t |
| 34 | * |
| 35 | * Atomically increments @v by 1 |
| 36 | * and returns true if the result is zero, or false for all |
| 37 | * other cases. |
| 38 | */ |
| 39 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) |
| 40 | |
| 41 | #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0) |
| 42 | #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) |
| 43 | |
| 44 | #define atomic_inc(v) atomic_add(1,(v)) |
| 45 | #define atomic_dec(v) atomic_sub(1,(v)) |
| 46 | |
Nick Piggin | 4a6dae6 | 2005-11-13 16:07:24 -0800 | [diff] [blame] | 47 | static inline int atomic_cmpxchg(atomic_t *v, int old, int new) |
| 48 | { |
| 49 | int ret; |
| 50 | unsigned long flags; |
| 51 | |
| 52 | local_irq_save(flags); |
| 53 | ret = v->counter; |
| 54 | if (likely(ret == old)) |
| 55 | v->counter = new; |
| 56 | local_irq_restore(flags); |
| 57 | |
| 58 | return ret; |
| 59 | } |
| 60 | |
Ingo Molnar | ffbf670 | 2006-01-09 15:59:17 -0800 | [diff] [blame] | 61 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
| 62 | |
Nick Piggin | 8426e1f | 2005-11-13 16:07:25 -0800 | [diff] [blame] | 63 | static inline int atomic_add_unless(atomic_t *v, int a, int u) |
| 64 | { |
| 65 | int ret; |
| 66 | unsigned long flags; |
| 67 | |
| 68 | local_irq_save(flags); |
| 69 | ret = v->counter; |
| 70 | if (ret != u) |
| 71 | v->counter += a; |
| 72 | local_irq_restore(flags); |
| 73 | |
| 74 | return ret != u; |
| 75 | } |
| 76 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
| 77 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 78 | /* Atomic operations are already serializing on SH */ |
| 79 | #define smp_mb__before_atomic_dec() barrier() |
| 80 | #define smp_mb__after_atomic_dec() barrier() |
| 81 | #define smp_mb__before_atomic_inc() barrier() |
| 82 | #define smp_mb__after_atomic_inc() barrier() |
| 83 | |
Christoph Lameter | d3cb487 | 2006-01-06 00:11:20 -0800 | [diff] [blame] | 84 | #include <asm-generic/atomic.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 85 | #endif /* __ASM_SH_ATOMIC_H */ |