blob: 88f36d599fe895c858924b6b85581bb4d147719a [file] [log] [blame]
Bryan Wu1394f032007-05-06 14:50:22 -07001#ifndef __ARCH_BLACKFIN_ATOMIC__
2#define __ARCH_BLACKFIN_ATOMIC__
3
Mike Frysinger3d150632009-06-13 11:21:51 -04004#ifndef CONFIG_SMP
5# include <asm-generic/atomic.h>
6#else
7
Matthew Wilcoxea4354672009-01-06 14:40:39 -08008#include <linux/types.h>
Bryan Wu1394f032007-05-06 14:50:22 -07009#include <asm/system.h> /* local_irq_XXX() */
10
11/*
12 * Atomic operations that C can't guarantee us. Useful for
13 * resource counting etc..
Bryan Wu1394f032007-05-06 14:50:22 -070014 */
15
Bryan Wu1394f032007-05-06 14:50:22 -070016#define ATOMIC_INIT(i) { (i) }
Bryan Wu1394f032007-05-06 14:50:22 -070017#define atomic_set(v, i) (((v)->counter) = i)
18
Graf Yang6b3087c2009-01-07 23:14:39 +080019#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
20
21asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
22
23asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value);
24
25asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value);
26
27asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value);
28
29asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
30
31asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
32
33static inline void atomic_add(int i, atomic_t *v)
34{
35 __raw_atomic_update_asm(&v->counter, i);
36}
37
38static inline void atomic_sub(int i, atomic_t *v)
39{
40 __raw_atomic_update_asm(&v->counter, -i);
41}
42
43static inline int atomic_add_return(int i, atomic_t *v)
44{
45 return __raw_atomic_update_asm(&v->counter, i);
46}
47
48static inline int atomic_sub_return(int i, atomic_t *v)
49{
50 return __raw_atomic_update_asm(&v->counter, -i);
51}
52
53static inline void atomic_inc(volatile atomic_t *v)
54{
55 __raw_atomic_update_asm(&v->counter, 1);
56}
57
58static inline void atomic_dec(volatile atomic_t *v)
59{
60 __raw_atomic_update_asm(&v->counter, -1);
61}
62
63static inline void atomic_clear_mask(int mask, atomic_t *v)
64{
65 __raw_atomic_clear_asm(&v->counter, mask);
66}
67
68static inline void atomic_set_mask(int mask, atomic_t *v)
69{
70 __raw_atomic_set_asm(&v->counter, mask);
71}
72
73static inline int atomic_test_mask(int mask, atomic_t *v)
74{
75 return __raw_atomic_test_asm(&v->counter, mask);
76}
77
78/* Atomic operations are already serializing */
79#define smp_mb__before_atomic_dec() barrier()
80#define smp_mb__after_atomic_dec() barrier()
81#define smp_mb__before_atomic_inc() barrier()
82#define smp_mb__after_atomic_inc() barrier()
83
Graf Yang6b3087c2009-01-07 23:14:39 +080084#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
Bryan Wu1394f032007-05-06 14:50:22 -070085#define atomic_dec_return(v) atomic_sub_return(1,(v))
86#define atomic_inc_return(v) atomic_add_return(1,(v))
87
Graf Yang6b3087c2009-01-07 23:14:39 +080088#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
89#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
90
91#define atomic_add_unless(v, a, u) \
92({ \
93 int c, old; \
94 c = atomic_read(v); \
95 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
96 c = old; \
97 c != (u); \
98})
99#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
100
Bryan Wu1394f032007-05-06 14:50:22 -0700101/*
102 * atomic_inc_and_test - increment and test
103 * @v: pointer of type atomic_t
104 *
105 * Atomically increments @v by 1
106 * and returns true if the result is zero, or false for all
107 * other cases.
108 */
109#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
110
111#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
112#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
113
Arnd Bergmann72099ed2009-05-13 22:56:29 +0000114#include <asm-generic/atomic-long.h>
Bryan Wu1394f032007-05-06 14:50:22 -0700115
Mike Frysinger3d150632009-06-13 11:21:51 -0400116#endif
117
118#endif