blob: f038e33e6d48677d0da1d5150dfc0b5f1f8a6931 [file] [log] [blame]
Becky Brucefeaf7cf2005-09-22 14:20:04 -05001#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
Linus Torvalds1da177e2005-04-16 15:20:36 -07004/*
5 * PowerPC atomic operations
6 */
7
Linus Torvalds1da177e2005-04-16 15:20:36 -07008typedef struct { volatile int counter; } atomic_t;
9
10#ifdef __KERNEL__
Nick Pigginf055aff2006-02-20 10:41:40 +010011#include <linux/compiler.h>
Becky Brucefeaf7cf2005-09-22 14:20:04 -050012#include <asm/synch.h>
David Gibson3ddfbcf2005-11-10 12:56:55 +110013#include <asm/asm-compat.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070014
Becky Brucefeaf7cf2005-09-22 14:20:04 -050015#define ATOMIC_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070016
17#define atomic_read(v) ((v)->counter)
18#define atomic_set(v,i) (((v)->counter) = (i))
19
Linus Torvalds1da177e2005-04-16 15:20:36 -070020static __inline__ void atomic_add(int a, atomic_t *v)
21{
22 int t;
23
24 __asm__ __volatile__(
25"1: lwarx %0,0,%3 # atomic_add\n\
26 add %0,%2,%0\n"
27 PPC405_ERR77(0,%3)
28" stwcx. %0,0,%3 \n\
29 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -070030 : "=&r" (t), "+m" (v->counter)
31 : "r" (a), "r" (&v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070032 : "cc");
33}
34
35static __inline__ int atomic_add_return(int a, atomic_t *v)
36{
37 int t;
38
39 __asm__ __volatile__(
Anton Blanchard144b9c12006-01-13 15:37:17 +110040 LWSYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070041"1: lwarx %0,0,%2 # atomic_add_return\n\
42 add %0,%1,%0\n"
43 PPC405_ERR77(0,%2)
44" stwcx. %0,0,%2 \n\
45 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -050046 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070047 : "=&r" (t)
48 : "r" (a), "r" (&v->counter)
49 : "cc", "memory");
50
51 return t;
52}
53
54#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
55
56static __inline__ void atomic_sub(int a, atomic_t *v)
57{
58 int t;
59
60 __asm__ __volatile__(
61"1: lwarx %0,0,%3 # atomic_sub\n\
62 subf %0,%2,%0\n"
63 PPC405_ERR77(0,%3)
64" stwcx. %0,0,%3 \n\
65 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -070066 : "=&r" (t), "+m" (v->counter)
67 : "r" (a), "r" (&v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070068 : "cc");
69}
70
71static __inline__ int atomic_sub_return(int a, atomic_t *v)
72{
73 int t;
74
75 __asm__ __volatile__(
Anton Blanchard144b9c12006-01-13 15:37:17 +110076 LWSYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070077"1: lwarx %0,0,%2 # atomic_sub_return\n\
78 subf %0,%1,%0\n"
79 PPC405_ERR77(0,%2)
80" stwcx. %0,0,%2 \n\
81 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -050082 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070083 : "=&r" (t)
84 : "r" (a), "r" (&v->counter)
85 : "cc", "memory");
86
87 return t;
88}
89
90static __inline__ void atomic_inc(atomic_t *v)
91{
92 int t;
93
94 __asm__ __volatile__(
95"1: lwarx %0,0,%2 # atomic_inc\n\
96 addic %0,%0,1\n"
97 PPC405_ERR77(0,%2)
98" stwcx. %0,0,%2 \n\
99 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700100 : "=&r" (t), "+m" (v->counter)
101 : "r" (&v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700102 : "cc");
103}
104
105static __inline__ int atomic_inc_return(atomic_t *v)
106{
107 int t;
108
109 __asm__ __volatile__(
Anton Blanchard144b9c12006-01-13 15:37:17 +1100110 LWSYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700111"1: lwarx %0,0,%1 # atomic_inc_return\n\
112 addic %0,%0,1\n"
113 PPC405_ERR77(0,%1)
114" stwcx. %0,0,%1 \n\
115 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500116 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700117 : "=&r" (t)
118 : "r" (&v->counter)
119 : "cc", "memory");
120
121 return t;
122}
123
124/*
125 * atomic_inc_and_test - increment and test
126 * @v: pointer of type atomic_t
127 *
128 * Atomically increments @v by 1
129 * and returns true if the result is zero, or false for all
130 * other cases.
131 */
132#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
133
134static __inline__ void atomic_dec(atomic_t *v)
135{
136 int t;
137
138 __asm__ __volatile__(
139"1: lwarx %0,0,%2 # atomic_dec\n\
140 addic %0,%0,-1\n"
141 PPC405_ERR77(0,%2)\
142" stwcx. %0,0,%2\n\
143 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700144 : "=&r" (t), "+m" (v->counter)
145 : "r" (&v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700146 : "cc");
147}
148
149static __inline__ int atomic_dec_return(atomic_t *v)
150{
151 int t;
152
153 __asm__ __volatile__(
Anton Blanchard144b9c12006-01-13 15:37:17 +1100154 LWSYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700155"1: lwarx %0,0,%1 # atomic_dec_return\n\
156 addic %0,%0,-1\n"
157 PPC405_ERR77(0,%1)
158" stwcx. %0,0,%1\n\
159 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500160 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700161 : "=&r" (t)
162 : "r" (&v->counter)
163 : "cc", "memory");
164
165 return t;
166}
167
Nick Piggin4a6dae62005-11-13 16:07:24 -0800168#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800169#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800170
Nick Piggin8426e1f2005-11-13 16:07:25 -0800171/**
172 * atomic_add_unless - add unless the number is a given value
173 * @v: pointer of type atomic_t
174 * @a: the amount to add to v...
175 * @u: ...unless v is equal to u.
176 *
177 * Atomically adds @a to @v, so long as it was not @u.
178 * Returns non-zero if @v was not @u, and zero otherwise.
179 */
Nick Pigginf055aff2006-02-20 10:41:40 +0100180static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
181{
182 int t;
183
184 __asm__ __volatile__ (
185 LWSYNC_ON_SMP
186"1: lwarx %0,0,%1 # atomic_add_unless\n\
187 cmpw 0,%0,%3 \n\
188 beq- 2f \n\
189 add %0,%2,%0 \n"
190 PPC405_ERR77(0,%2)
191" stwcx. %0,0,%1 \n\
192 bne- 1b \n"
193 ISYNC_ON_SMP
194" subf %0,%2,%0 \n\
1952:"
196 : "=&r" (t)
197 : "r" (&v->counter), "r" (a), "r" (u)
198 : "cc", "memory");
199
200 return t != u;
201}
202
Nick Piggin8426e1f2005-11-13 16:07:25 -0800203#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
204
Linus Torvalds1da177e2005-04-16 15:20:36 -0700205#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
206#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
207
208/*
209 * Atomically test *v and decrement if it is greater than 0.
Robert Jennings434f98c2007-01-17 10:50:20 -0600210 * The function returns the old value of *v minus 1, even if
211 * the atomic variable, v, was not decremented.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700212 */
213static __inline__ int atomic_dec_if_positive(atomic_t *v)
214{
215 int t;
216
217 __asm__ __volatile__(
Anton Blanchard144b9c12006-01-13 15:37:17 +1100218 LWSYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700219"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
Robert Jennings434f98c2007-01-17 10:50:20 -0600220 cmpwi %0,1\n\
221 addi %0,%0,-1\n\
Linus Torvalds1da177e2005-04-16 15:20:36 -0700222 blt- 2f\n"
223 PPC405_ERR77(0,%1)
224" stwcx. %0,0,%1\n\
225 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500226 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700227 "\n\
Robert Jennings434f98c2007-01-17 10:50:20 -06002282:" : "=&b" (t)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700229 : "r" (&v->counter)
230 : "cc", "memory");
231
232 return t;
233}
234
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500235#define smp_mb__before_atomic_dec() smp_mb()
236#define smp_mb__after_atomic_dec() smp_mb()
237#define smp_mb__before_atomic_inc() smp_mb()
238#define smp_mb__after_atomic_inc() smp_mb()
Linus Torvalds1da177e2005-04-16 15:20:36 -0700239
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100240#ifdef __powerpc64__
241
242typedef struct { volatile long counter; } atomic64_t;
243
244#define ATOMIC64_INIT(i) { (i) }
245
246#define atomic64_read(v) ((v)->counter)
247#define atomic64_set(v,i) (((v)->counter) = (i))
248
249static __inline__ void atomic64_add(long a, atomic64_t *v)
250{
251 long t;
252
253 __asm__ __volatile__(
254"1: ldarx %0,0,%3 # atomic64_add\n\
255 add %0,%2,%0\n\
256 stdcx. %0,0,%3 \n\
257 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700258 : "=&r" (t), "+m" (v->counter)
259 : "r" (a), "r" (&v->counter)
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100260 : "cc");
261}
262
263static __inline__ long atomic64_add_return(long a, atomic64_t *v)
264{
265 long t;
266
267 __asm__ __volatile__(
Anton Blanchard144b9c12006-01-13 15:37:17 +1100268 LWSYNC_ON_SMP
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100269"1: ldarx %0,0,%2 # atomic64_add_return\n\
270 add %0,%1,%0\n\
271 stdcx. %0,0,%2 \n\
272 bne- 1b"
273 ISYNC_ON_SMP
274 : "=&r" (t)
275 : "r" (a), "r" (&v->counter)
276 : "cc", "memory");
277
278 return t;
279}
280
281#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
282
283static __inline__ void atomic64_sub(long a, atomic64_t *v)
284{
285 long t;
286
287 __asm__ __volatile__(
288"1: ldarx %0,0,%3 # atomic64_sub\n\
289 subf %0,%2,%0\n\
290 stdcx. %0,0,%3 \n\
291 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700292 : "=&r" (t), "+m" (v->counter)
293 : "r" (a), "r" (&v->counter)
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100294 : "cc");
295}
296
297static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
298{
299 long t;
300
301 __asm__ __volatile__(
Anton Blanchard144b9c12006-01-13 15:37:17 +1100302 LWSYNC_ON_SMP
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100303"1: ldarx %0,0,%2 # atomic64_sub_return\n\
304 subf %0,%1,%0\n\
305 stdcx. %0,0,%2 \n\
306 bne- 1b"
307 ISYNC_ON_SMP
308 : "=&r" (t)
309 : "r" (a), "r" (&v->counter)
310 : "cc", "memory");
311
312 return t;
313}
314
315static __inline__ void atomic64_inc(atomic64_t *v)
316{
317 long t;
318
319 __asm__ __volatile__(
320"1: ldarx %0,0,%2 # atomic64_inc\n\
321 addic %0,%0,1\n\
322 stdcx. %0,0,%2 \n\
323 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700324 : "=&r" (t), "+m" (v->counter)
325 : "r" (&v->counter)
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100326 : "cc");
327}
328
329static __inline__ long atomic64_inc_return(atomic64_t *v)
330{
331 long t;
332
333 __asm__ __volatile__(
Anton Blanchard144b9c12006-01-13 15:37:17 +1100334 LWSYNC_ON_SMP
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100335"1: ldarx %0,0,%1 # atomic64_inc_return\n\
336 addic %0,%0,1\n\
337 stdcx. %0,0,%1 \n\
338 bne- 1b"
339 ISYNC_ON_SMP
340 : "=&r" (t)
341 : "r" (&v->counter)
342 : "cc", "memory");
343
344 return t;
345}
346
347/*
348 * atomic64_inc_and_test - increment and test
349 * @v: pointer of type atomic64_t
350 *
351 * Atomically increments @v by 1
352 * and returns true if the result is zero, or false for all
353 * other cases.
354 */
355#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
356
357static __inline__ void atomic64_dec(atomic64_t *v)
358{
359 long t;
360
361 __asm__ __volatile__(
362"1: ldarx %0,0,%2 # atomic64_dec\n\
363 addic %0,%0,-1\n\
364 stdcx. %0,0,%2\n\
365 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700366 : "=&r" (t), "+m" (v->counter)
367 : "r" (&v->counter)
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100368 : "cc");
369}
370
371static __inline__ long atomic64_dec_return(atomic64_t *v)
372{
373 long t;
374
375 __asm__ __volatile__(
Anton Blanchard144b9c12006-01-13 15:37:17 +1100376 LWSYNC_ON_SMP
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100377"1: ldarx %0,0,%1 # atomic64_dec_return\n\
378 addic %0,%0,-1\n\
379 stdcx. %0,0,%1\n\
380 bne- 1b"
381 ISYNC_ON_SMP
382 : "=&r" (t)
383 : "r" (&v->counter)
384 : "cc", "memory");
385
386 return t;
387}
388
389#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
390#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
391
392/*
393 * Atomically test *v and decrement if it is greater than 0.
394 * The function returns the old value of *v minus 1.
395 */
396static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
397{
398 long t;
399
400 __asm__ __volatile__(
Anton Blanchard144b9c12006-01-13 15:37:17 +1100401 LWSYNC_ON_SMP
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100402"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
403 addic. %0,%0,-1\n\
404 blt- 2f\n\
405 stdcx. %0,0,%1\n\
406 bne- 1b"
407 ISYNC_ON_SMP
408 "\n\
4092:" : "=&r" (t)
410 : "r" (&v->counter)
411 : "cc", "memory");
412
413 return t;
414}
415
416#endif /* __powerpc64__ */
417
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800418#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700419#endif /* __KERNEL__ */
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500420#endif /* _ASM_POWERPC_ATOMIC_H_ */