blob: 4045db3e4f6519593439c40edd65473739bc77eb [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __M32R_LOCAL_H
2#define __M32R_LOCAL_H
3
Mathieu Desnoyers7fa2ac32008-02-07 00:16:19 -08004/*
5 * linux/include/asm-m32r/local.h
6 *
7 * M32R version:
8 * Copyright (C) 2001, 2002 Hitoshi Yamamoto
9 * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
10 * Copyright (C) 2007 Mathieu Desnoyers <mathieu.desnoyers@polymtl.ca>
11 */
12
13#include <linux/percpu.h>
14#include <asm/assembler.h>
Mathieu Desnoyers7fa2ac32008-02-07 00:16:19 -080015#include <asm/local.h>
16
17/*
18 * Atomic operations that C can't guarantee us. Useful for
19 * resource counting etc..
20 */
21
22/*
23 * Make sure gcc doesn't try to be clever and move things around
24 * on us. We need to use _exactly_ the address the user gave us,
25 * not some alias that contains the same information.
26 */
27typedef struct { volatile int counter; } local_t;
28
29#define LOCAL_INIT(i) { (i) }
30
31/**
32 * local_read - read local variable
33 * @l: pointer of type local_t
34 *
35 * Atomically reads the value of @l.
36 */
37#define local_read(l) ((l)->counter)
38
39/**
40 * local_set - set local variable
41 * @l: pointer of type local_t
42 * @i: required value
43 *
44 * Atomically sets the value of @l to @i.
45 */
46#define local_set(l, i) (((l)->counter) = (i))
47
48/**
49 * local_add_return - add long to local variable and return it
50 * @i: long value to add
51 * @l: pointer of type local_t
52 *
53 * Atomically adds @i to @l and return (@i + @l).
54 */
55static inline long local_add_return(long i, local_t *l)
56{
57 unsigned long flags;
58 long result;
59
60 local_irq_save(flags);
61 __asm__ __volatile__ (
62 "# local_add_return \n\t"
63 DCACHE_CLEAR("%0", "r4", "%1")
64 "ld %0, @%1; \n\t"
65 "add %0, %2; \n\t"
66 "st %0, @%1; \n\t"
67 : "=&r" (result)
68 : "r" (&l->counter), "r" (i)
69 : "memory"
70#ifdef CONFIG_CHIP_M32700_TS1
71 , "r4"
72#endif /* CONFIG_CHIP_M32700_TS1 */
73 );
74 local_irq_restore(flags);
75
76 return result;
77}
78
79/**
80 * local_sub_return - subtract long from local variable and return it
81 * @i: long value to subtract
82 * @l: pointer of type local_t
83 *
84 * Atomically subtracts @i from @l and return (@l - @i).
85 */
86static inline long local_sub_return(long i, local_t *l)
87{
88 unsigned long flags;
89 long result;
90
91 local_irq_save(flags);
92 __asm__ __volatile__ (
93 "# local_sub_return \n\t"
94 DCACHE_CLEAR("%0", "r4", "%1")
95 "ld %0, @%1; \n\t"
96 "sub %0, %2; \n\t"
97 "st %0, @%1; \n\t"
98 : "=&r" (result)
99 : "r" (&l->counter), "r" (i)
100 : "memory"
101#ifdef CONFIG_CHIP_M32700_TS1
102 , "r4"
103#endif /* CONFIG_CHIP_M32700_TS1 */
104 );
105 local_irq_restore(flags);
106
107 return result;
108}
109
110/**
111 * local_add - add long to local variable
112 * @i: long value to add
113 * @l: pointer of type local_t
114 *
115 * Atomically adds @i to @l.
116 */
117#define local_add(i, l) ((void) local_add_return((i), (l)))
118
119/**
120 * local_sub - subtract the local variable
121 * @i: long value to subtract
122 * @l: pointer of type local_t
123 *
124 * Atomically subtracts @i from @l.
125 */
126#define local_sub(i, l) ((void) local_sub_return((i), (l)))
127
128/**
129 * local_sub_and_test - subtract value from variable and test result
130 * @i: integer value to subtract
131 * @l: pointer of type local_t
132 *
133 * Atomically subtracts @i from @l and returns
134 * true if the result is zero, or false for all
135 * other cases.
136 */
137#define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
138
139/**
140 * local_inc_return - increment local variable and return it
141 * @l: pointer of type local_t
142 *
143 * Atomically increments @l by 1 and returns the result.
144 */
145static inline long local_inc_return(local_t *l)
146{
147 unsigned long flags;
148 long result;
149
150 local_irq_save(flags);
151 __asm__ __volatile__ (
152 "# local_inc_return \n\t"
153 DCACHE_CLEAR("%0", "r4", "%1")
154 "ld %0, @%1; \n\t"
155 "addi %0, #1; \n\t"
156 "st %0, @%1; \n\t"
157 : "=&r" (result)
158 : "r" (&l->counter)
159 : "memory"
160#ifdef CONFIG_CHIP_M32700_TS1
161 , "r4"
162#endif /* CONFIG_CHIP_M32700_TS1 */
163 );
164 local_irq_restore(flags);
165
166 return result;
167}
168
169/**
170 * local_dec_return - decrement local variable and return it
171 * @l: pointer of type local_t
172 *
173 * Atomically decrements @l by 1 and returns the result.
174 */
175static inline long local_dec_return(local_t *l)
176{
177 unsigned long flags;
178 long result;
179
180 local_irq_save(flags);
181 __asm__ __volatile__ (
182 "# local_dec_return \n\t"
183 DCACHE_CLEAR("%0", "r4", "%1")
184 "ld %0, @%1; \n\t"
185 "addi %0, #-1; \n\t"
186 "st %0, @%1; \n\t"
187 : "=&r" (result)
188 : "r" (&l->counter)
189 : "memory"
190#ifdef CONFIG_CHIP_M32700_TS1
191 , "r4"
192#endif /* CONFIG_CHIP_M32700_TS1 */
193 );
194 local_irq_restore(flags);
195
196 return result;
197}
198
199/**
200 * local_inc - increment local variable
201 * @l: pointer of type local_t
202 *
203 * Atomically increments @l by 1.
204 */
205#define local_inc(l) ((void)local_inc_return(l))
206
207/**
208 * local_dec - decrement local variable
209 * @l: pointer of type local_t
210 *
211 * Atomically decrements @l by 1.
212 */
213#define local_dec(l) ((void)local_dec_return(l))
214
215/**
216 * local_inc_and_test - increment and test
217 * @l: pointer of type local_t
218 *
219 * Atomically increments @l by 1
220 * and returns true if the result is zero, or false for all
221 * other cases.
222 */
223#define local_inc_and_test(l) (local_inc_return(l) == 0)
224
225/**
226 * local_dec_and_test - decrement and test
227 * @l: pointer of type local_t
228 *
229 * Atomically decrements @l by 1 and
230 * returns true if the result is 0, or false for all
231 * other cases.
232 */
233#define local_dec_and_test(l) (local_dec_return(l) == 0)
234
235/**
236 * local_add_negative - add and test if negative
237 * @l: pointer of type local_t
238 * @i: integer value to add
239 *
240 * Atomically adds @i to @l and returns true
241 * if the result is negative, or false when
242 * result is greater than or equal to zero.
243 */
244#define local_add_negative(i, l) (local_add_return((i), (l)) < 0)
245
246#define local_cmpxchg(l, o, n) (cmpxchg_local(&((l)->counter), (o), (n)))
247#define local_xchg(v, new) (xchg_local(&((l)->counter), new))
248
249/**
250 * local_add_unless - add unless the number is a given value
251 * @l: pointer of type local_t
252 * @a: the amount to add to l...
253 * @u: ...unless l is equal to u.
254 *
255 * Atomically adds @a to @l, so long as it was not @u.
256 * Returns non-zero if @l was not @u, and zero otherwise.
257 */
258static inline int local_add_unless(local_t *l, long a, long u)
259{
260 long c, old;
261 c = local_read(l);
262 for (;;) {
263 if (unlikely(c == (u)))
264 break;
265 old = local_cmpxchg((l), c, c + (a));
266 if (likely(old == c))
267 break;
268 c = old;
269 }
270 return c != (u);
271}
272
273#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
274
275static inline void local_clear_mask(unsigned long mask, local_t *addr)
276{
277 unsigned long flags;
278 unsigned long tmp;
279
280 local_irq_save(flags);
281 __asm__ __volatile__ (
282 "# local_clear_mask \n\t"
283 DCACHE_CLEAR("%0", "r5", "%1")
284 "ld %0, @%1; \n\t"
285 "and %0, %2; \n\t"
286 "st %0, @%1; \n\t"
287 : "=&r" (tmp)
288 : "r" (addr), "r" (~mask)
289 : "memory"
290#ifdef CONFIG_CHIP_M32700_TS1
291 , "r5"
292#endif /* CONFIG_CHIP_M32700_TS1 */
293 );
294 local_irq_restore(flags);
295}
296
297static inline void local_set_mask(unsigned long mask, local_t *addr)
298{
299 unsigned long flags;
300 unsigned long tmp;
301
302 local_irq_save(flags);
303 __asm__ __volatile__ (
304 "# local_set_mask \n\t"
305 DCACHE_CLEAR("%0", "r5", "%1")
306 "ld %0, @%1; \n\t"
307 "or %0, %2; \n\t"
308 "st %0, @%1; \n\t"
309 : "=&r" (tmp)
310 : "r" (addr), "r" (mask)
311 : "memory"
312#ifdef CONFIG_CHIP_M32700_TS1
313 , "r5"
314#endif /* CONFIG_CHIP_M32700_TS1 */
315 );
316 local_irq_restore(flags);
317}
318
319/* Atomic operations are already serializing on m32r */
320#define smp_mb__before_local_dec() barrier()
321#define smp_mb__after_local_dec() barrier()
322#define smp_mb__before_local_inc() barrier()
323#define smp_mb__after_local_inc() barrier()
324
325/* Use these for per-cpu local_t variables: on some archs they are
326 * much more efficient than these naive implementations. Note they take
327 * a variable, not an address.
328 */
329
330#define __local_inc(l) ((l)->a.counter++)
331#define __local_dec(l) ((l)->a.counter++)
332#define __local_add(i, l) ((l)->a.counter += (i))
333#define __local_sub(i, l) ((l)->a.counter -= (i))
334
335/* Use these for per-cpu local_t variables: on some archs they are
336 * much more efficient than these naive implementations. Note they take
337 * a variable, not an address.
338 */
339
Linus Torvalds1da177e2005-04-16 15:20:36 -0700340#endif /* __M32R_LOCAL_H */