blob: ae395a0632a60893626efcf527a1b64bef8a6cd3 [file] [log] [blame]
Becky Brucefeaf7cf2005-09-22 14:20:04 -05001#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
Linus Torvalds1da177e2005-04-16 15:20:36 -07004/*
5 * PowerPC atomic operations
6 */
7
Linus Torvalds1da177e2005-04-16 15:20:36 -07008typedef struct { volatile int counter; } atomic_t;
9
10#ifdef __KERNEL__
Becky Brucefeaf7cf2005-09-22 14:20:04 -050011#include <asm/synch.h>
David Gibson3ddfbcf2005-11-10 12:56:55 +110012#include <asm/asm-compat.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070013
Becky Brucefeaf7cf2005-09-22 14:20:04 -050014#define ATOMIC_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070015
16#define atomic_read(v) ((v)->counter)
17#define atomic_set(v,i) (((v)->counter) = (i))
18
Linus Torvalds1da177e2005-04-16 15:20:36 -070019static __inline__ void atomic_add(int a, atomic_t *v)
20{
21 int t;
22
23 __asm__ __volatile__(
24"1: lwarx %0,0,%3 # atomic_add\n\
25 add %0,%2,%0\n"
26 PPC405_ERR77(0,%3)
27" stwcx. %0,0,%3 \n\
28 bne- 1b"
29 : "=&r" (t), "=m" (v->counter)
30 : "r" (a), "r" (&v->counter), "m" (v->counter)
31 : "cc");
32}
33
34static __inline__ int atomic_add_return(int a, atomic_t *v)
35{
36 int t;
37
38 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -050039 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070040"1: lwarx %0,0,%2 # atomic_add_return\n\
41 add %0,%1,%0\n"
42 PPC405_ERR77(0,%2)
43" stwcx. %0,0,%2 \n\
44 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -050045 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070046 : "=&r" (t)
47 : "r" (a), "r" (&v->counter)
48 : "cc", "memory");
49
50 return t;
51}
52
53#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
54
55static __inline__ void atomic_sub(int a, atomic_t *v)
56{
57 int t;
58
59 __asm__ __volatile__(
60"1: lwarx %0,0,%3 # atomic_sub\n\
61 subf %0,%2,%0\n"
62 PPC405_ERR77(0,%3)
63" stwcx. %0,0,%3 \n\
64 bne- 1b"
65 : "=&r" (t), "=m" (v->counter)
66 : "r" (a), "r" (&v->counter), "m" (v->counter)
67 : "cc");
68}
69
70static __inline__ int atomic_sub_return(int a, atomic_t *v)
71{
72 int t;
73
74 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -050075 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070076"1: lwarx %0,0,%2 # atomic_sub_return\n\
77 subf %0,%1,%0\n"
78 PPC405_ERR77(0,%2)
79" stwcx. %0,0,%2 \n\
80 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -050081 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070082 : "=&r" (t)
83 : "r" (a), "r" (&v->counter)
84 : "cc", "memory");
85
86 return t;
87}
88
89static __inline__ void atomic_inc(atomic_t *v)
90{
91 int t;
92
93 __asm__ __volatile__(
94"1: lwarx %0,0,%2 # atomic_inc\n\
95 addic %0,%0,1\n"
96 PPC405_ERR77(0,%2)
97" stwcx. %0,0,%2 \n\
98 bne- 1b"
99 : "=&r" (t), "=m" (v->counter)
100 : "r" (&v->counter), "m" (v->counter)
101 : "cc");
102}
103
104static __inline__ int atomic_inc_return(atomic_t *v)
105{
106 int t;
107
108 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500109 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700110"1: lwarx %0,0,%1 # atomic_inc_return\n\
111 addic %0,%0,1\n"
112 PPC405_ERR77(0,%1)
113" stwcx. %0,0,%1 \n\
114 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500115 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700116 : "=&r" (t)
117 : "r" (&v->counter)
118 : "cc", "memory");
119
120 return t;
121}
122
123/*
124 * atomic_inc_and_test - increment and test
125 * @v: pointer of type atomic_t
126 *
127 * Atomically increments @v by 1
128 * and returns true if the result is zero, or false for all
129 * other cases.
130 */
131#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
132
133static __inline__ void atomic_dec(atomic_t *v)
134{
135 int t;
136
137 __asm__ __volatile__(
138"1: lwarx %0,0,%2 # atomic_dec\n\
139 addic %0,%0,-1\n"
140 PPC405_ERR77(0,%2)\
141" stwcx. %0,0,%2\n\
142 bne- 1b"
143 : "=&r" (t), "=m" (v->counter)
144 : "r" (&v->counter), "m" (v->counter)
145 : "cc");
146}
147
148static __inline__ int atomic_dec_return(atomic_t *v)
149{
150 int t;
151
152 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500153 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700154"1: lwarx %0,0,%1 # atomic_dec_return\n\
155 addic %0,%0,-1\n"
156 PPC405_ERR77(0,%1)
157" stwcx. %0,0,%1\n\
158 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500159 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700160 : "=&r" (t)
161 : "r" (&v->counter)
162 : "cc", "memory");
163
164 return t;
165}
166
Nick Piggin4a6dae62005-11-13 16:07:24 -0800167#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
168
Nick Piggin8426e1f2005-11-13 16:07:25 -0800169/**
170 * atomic_add_unless - add unless the number is a given value
171 * @v: pointer of type atomic_t
172 * @a: the amount to add to v...
173 * @u: ...unless v is equal to u.
174 *
175 * Atomically adds @a to @v, so long as it was not @u.
176 * Returns non-zero if @v was not @u, and zero otherwise.
177 */
178#define atomic_add_unless(v, a, u) \
179({ \
180 int c, old; \
181 c = atomic_read(v); \
182 for (;;) { \
183 if (unlikely(c == (u))) \
184 break; \
185 old = atomic_cmpxchg((v), c, c + (a)); \
186 if (likely(old == c)) \
187 break; \
188 c = old; \
189 } \
190 c != (u); \
191})
192#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
193
Linus Torvalds1da177e2005-04-16 15:20:36 -0700194#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
195#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
196
197/*
198 * Atomically test *v and decrement if it is greater than 0.
199 * The function returns the old value of *v minus 1.
200 */
201static __inline__ int atomic_dec_if_positive(atomic_t *v)
202{
203 int t;
204
205 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500206 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700207"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
208 addic. %0,%0,-1\n\
209 blt- 2f\n"
210 PPC405_ERR77(0,%1)
211" stwcx. %0,0,%1\n\
212 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500213 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700214 "\n\
2152:" : "=&r" (t)
216 : "r" (&v->counter)
217 : "cc", "memory");
218
219 return t;
220}
221
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500222#define smp_mb__before_atomic_dec() smp_mb()
223#define smp_mb__after_atomic_dec() smp_mb()
224#define smp_mb__before_atomic_inc() smp_mb()
225#define smp_mb__after_atomic_inc() smp_mb()
Linus Torvalds1da177e2005-04-16 15:20:36 -0700226
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100227#ifdef __powerpc64__
228
229typedef struct { volatile long counter; } atomic64_t;
230
231#define ATOMIC64_INIT(i) { (i) }
232
233#define atomic64_read(v) ((v)->counter)
234#define atomic64_set(v,i) (((v)->counter) = (i))
235
236static __inline__ void atomic64_add(long a, atomic64_t *v)
237{
238 long t;
239
240 __asm__ __volatile__(
241"1: ldarx %0,0,%3 # atomic64_add\n\
242 add %0,%2,%0\n\
243 stdcx. %0,0,%3 \n\
244 bne- 1b"
245 : "=&r" (t), "=m" (v->counter)
246 : "r" (a), "r" (&v->counter), "m" (v->counter)
247 : "cc");
248}
249
250static __inline__ long atomic64_add_return(long a, atomic64_t *v)
251{
252 long t;
253
254 __asm__ __volatile__(
255 EIEIO_ON_SMP
256"1: ldarx %0,0,%2 # atomic64_add_return\n\
257 add %0,%1,%0\n\
258 stdcx. %0,0,%2 \n\
259 bne- 1b"
260 ISYNC_ON_SMP
261 : "=&r" (t)
262 : "r" (a), "r" (&v->counter)
263 : "cc", "memory");
264
265 return t;
266}
267
268#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
269
270static __inline__ void atomic64_sub(long a, atomic64_t *v)
271{
272 long t;
273
274 __asm__ __volatile__(
275"1: ldarx %0,0,%3 # atomic64_sub\n\
276 subf %0,%2,%0\n\
277 stdcx. %0,0,%3 \n\
278 bne- 1b"
279 : "=&r" (t), "=m" (v->counter)
280 : "r" (a), "r" (&v->counter), "m" (v->counter)
281 : "cc");
282}
283
284static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
285{
286 long t;
287
288 __asm__ __volatile__(
289 EIEIO_ON_SMP
290"1: ldarx %0,0,%2 # atomic64_sub_return\n\
291 subf %0,%1,%0\n\
292 stdcx. %0,0,%2 \n\
293 bne- 1b"
294 ISYNC_ON_SMP
295 : "=&r" (t)
296 : "r" (a), "r" (&v->counter)
297 : "cc", "memory");
298
299 return t;
300}
301
302static __inline__ void atomic64_inc(atomic64_t *v)
303{
304 long t;
305
306 __asm__ __volatile__(
307"1: ldarx %0,0,%2 # atomic64_inc\n\
308 addic %0,%0,1\n\
309 stdcx. %0,0,%2 \n\
310 bne- 1b"
311 : "=&r" (t), "=m" (v->counter)
312 : "r" (&v->counter), "m" (v->counter)
313 : "cc");
314}
315
316static __inline__ long atomic64_inc_return(atomic64_t *v)
317{
318 long t;
319
320 __asm__ __volatile__(
321 EIEIO_ON_SMP
322"1: ldarx %0,0,%1 # atomic64_inc_return\n\
323 addic %0,%0,1\n\
324 stdcx. %0,0,%1 \n\
325 bne- 1b"
326 ISYNC_ON_SMP
327 : "=&r" (t)
328 : "r" (&v->counter)
329 : "cc", "memory");
330
331 return t;
332}
333
334/*
335 * atomic64_inc_and_test - increment and test
336 * @v: pointer of type atomic64_t
337 *
338 * Atomically increments @v by 1
339 * and returns true if the result is zero, or false for all
340 * other cases.
341 */
342#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
343
344static __inline__ void atomic64_dec(atomic64_t *v)
345{
346 long t;
347
348 __asm__ __volatile__(
349"1: ldarx %0,0,%2 # atomic64_dec\n\
350 addic %0,%0,-1\n\
351 stdcx. %0,0,%2\n\
352 bne- 1b"
353 : "=&r" (t), "=m" (v->counter)
354 : "r" (&v->counter), "m" (v->counter)
355 : "cc");
356}
357
358static __inline__ long atomic64_dec_return(atomic64_t *v)
359{
360 long t;
361
362 __asm__ __volatile__(
363 EIEIO_ON_SMP
364"1: ldarx %0,0,%1 # atomic64_dec_return\n\
365 addic %0,%0,-1\n\
366 stdcx. %0,0,%1\n\
367 bne- 1b"
368 ISYNC_ON_SMP
369 : "=&r" (t)
370 : "r" (&v->counter)
371 : "cc", "memory");
372
373 return t;
374}
375
376#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
377#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
378
379/*
380 * Atomically test *v and decrement if it is greater than 0.
381 * The function returns the old value of *v minus 1.
382 */
383static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
384{
385 long t;
386
387 __asm__ __volatile__(
388 EIEIO_ON_SMP
389"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
390 addic. %0,%0,-1\n\
391 blt- 2f\n\
392 stdcx. %0,0,%1\n\
393 bne- 1b"
394 ISYNC_ON_SMP
395 "\n\
3962:" : "=&r" (t)
397 : "r" (&v->counter)
398 : "cc", "memory");
399
400 return t;
401}
402
403#endif /* __powerpc64__ */
404
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800405#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700406#endif /* __KERNEL__ */
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500407#endif /* _ASM_POWERPC_ATOMIC_H_ */