blob: f5b9ab6f4e70da97b847db5e910c5956a63a9acd [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * include/asm-v850/atomic.h -- Atomic operations
3 *
4 * Copyright (C) 2001,02 NEC Corporation
5 * Copyright (C) 2001,02 Miles Bader <miles@gnu.org>
6 *
7 * This file is subject to the terms and conditions of the GNU General
8 * Public License. See the file COPYING in the main directory of this
9 * archive for more details.
10 *
11 * Written by Miles Bader <miles@gnu.org>
12 */
13
14#ifndef __V850_ATOMIC_H__
15#define __V850_ATOMIC_H__
16
17#include <linux/config.h>
18
19#include <asm/system.h>
20
21#ifdef CONFIG_SMP
22#error SMP not supported
23#endif
24
25typedef struct { int counter; } atomic_t;
26
27#define ATOMIC_INIT(i) { (i) }
28
29#ifdef __KERNEL__
30
31#define atomic_read(v) ((v)->counter)
32#define atomic_set(v,i) (((v)->counter) = (i))
33
Adrian Bunk23f88fe2005-11-07 00:59:00 -080034static inline int atomic_add_return (int i, volatile atomic_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070035{
36 unsigned long flags;
37 int res;
38
39 local_irq_save (flags);
40 res = v->counter + i;
41 v->counter = res;
42 local_irq_restore (flags);
43
44 return res;
45}
46
47static __inline__ int atomic_sub_return (int i, volatile atomic_t *v)
48{
49 unsigned long flags;
50 int res;
51
52 local_irq_save (flags);
53 res = v->counter - i;
54 v->counter = res;
55 local_irq_restore (flags);
56
57 return res;
58}
59
60static __inline__ void atomic_clear_mask (unsigned long mask, unsigned long *addr)
61{
62 unsigned long flags;
63
64 local_irq_save (flags);
65 *addr &= ~mask;
66 local_irq_restore (flags);
67}
68
69#endif
70
71#define atomic_add(i, v) atomic_add_return ((i), (v))
72#define atomic_sub(i, v) atomic_sub_return ((i), (v))
73
74#define atomic_dec_return(v) atomic_sub_return (1, (v))
75#define atomic_inc_return(v) atomic_add_return (1, (v))
76#define atomic_inc(v) atomic_inc_return (v)
77#define atomic_dec(v) atomic_dec_return (v)
78
79/*
80 * atomic_inc_and_test - increment and test
81 * @v: pointer of type atomic_t
82 *
83 * Atomically increments @v by 1
84 * and returns true if the result is zero, or false for all
85 * other cases.
86 */
87#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
88
89#define atomic_sub_and_test(i,v) (atomic_sub_return ((i), (v)) == 0)
90#define atomic_dec_and_test(v) (atomic_sub_return (1, (v)) == 0)
91#define atomic_add_negative(i,v) (atomic_add_return ((i), (v)) < 0)
92
Nick Piggin4a6dae62005-11-13 16:07:24 -080093static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
94{
95 int ret;
96 unsigned long flags;
97
98 local_irq_save(flags);
99 ret = v->counter;
100 if (likely(ret == old))
101 v->counter = new;
102 local_irq_restore(flags);
103
104 return ret;
105}
106
Nick Piggin8426e1f2005-11-13 16:07:25 -0800107static inline int atomic_add_unless(atomic_t *v, int a, int u)
108{
109 int ret;
110 unsigned long flags;
111
112 local_irq_save(flags);
113 ret = v->counter;
114 if (ret != u)
115 v->counter += a;
116 local_irq_restore(flags);
117
118 return ret != u;
119}
120
121#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
122
Linus Torvalds1da177e2005-04-16 15:20:36 -0700123/* Atomic operations are already serializing on ARM */
124#define smp_mb__before_atomic_dec() barrier()
125#define smp_mb__after_atomic_dec() barrier()
126#define smp_mb__before_atomic_inc() barrier()
127#define smp_mb__after_atomic_inc() barrier()
128
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800129#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700130#endif /* __V850_ATOMIC_H__ */