blob: 248f9aec959c69a93f43a6bc15ceb94e3bd90bc0 [file] [log] [blame]
Becky Brucefeaf7cf2005-09-22 14:20:04 -05001#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
Linus Torvalds1da177e2005-04-16 15:20:36 -07004/*
5 * PowerPC atomic operations
6 */
7
Linus Torvalds1da177e2005-04-16 15:20:36 -07008typedef struct { volatile int counter; } atomic_t;
9
10#ifdef __KERNEL__
Becky Brucefeaf7cf2005-09-22 14:20:04 -050011#include <asm/synch.h>
David Gibson3ddfbcf2005-11-10 12:56:55 +110012#include <asm/asm-compat.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070013
Becky Brucefeaf7cf2005-09-22 14:20:04 -050014#define ATOMIC_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070015
16#define atomic_read(v) ((v)->counter)
17#define atomic_set(v,i) (((v)->counter) = (i))
18
Linus Torvalds1da177e2005-04-16 15:20:36 -070019static __inline__ void atomic_add(int a, atomic_t *v)
20{
21 int t;
22
23 __asm__ __volatile__(
24"1: lwarx %0,0,%3 # atomic_add\n\
25 add %0,%2,%0\n"
26 PPC405_ERR77(0,%3)
27" stwcx. %0,0,%3 \n\
28 bne- 1b"
29 : "=&r" (t), "=m" (v->counter)
30 : "r" (a), "r" (&v->counter), "m" (v->counter)
31 : "cc");
32}
33
34static __inline__ int atomic_add_return(int a, atomic_t *v)
35{
36 int t;
37
38 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -050039 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070040"1: lwarx %0,0,%2 # atomic_add_return\n\
41 add %0,%1,%0\n"
42 PPC405_ERR77(0,%2)
43" stwcx. %0,0,%2 \n\
44 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -050045 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070046 : "=&r" (t)
47 : "r" (a), "r" (&v->counter)
48 : "cc", "memory");
49
50 return t;
51}
52
53#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
54
55static __inline__ void atomic_sub(int a, atomic_t *v)
56{
57 int t;
58
59 __asm__ __volatile__(
60"1: lwarx %0,0,%3 # atomic_sub\n\
61 subf %0,%2,%0\n"
62 PPC405_ERR77(0,%3)
63" stwcx. %0,0,%3 \n\
64 bne- 1b"
65 : "=&r" (t), "=m" (v->counter)
66 : "r" (a), "r" (&v->counter), "m" (v->counter)
67 : "cc");
68}
69
70static __inline__ int atomic_sub_return(int a, atomic_t *v)
71{
72 int t;
73
74 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -050075 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070076"1: lwarx %0,0,%2 # atomic_sub_return\n\
77 subf %0,%1,%0\n"
78 PPC405_ERR77(0,%2)
79" stwcx. %0,0,%2 \n\
80 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -050081 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070082 : "=&r" (t)
83 : "r" (a), "r" (&v->counter)
84 : "cc", "memory");
85
86 return t;
87}
88
89static __inline__ void atomic_inc(atomic_t *v)
90{
91 int t;
92
93 __asm__ __volatile__(
94"1: lwarx %0,0,%2 # atomic_inc\n\
95 addic %0,%0,1\n"
96 PPC405_ERR77(0,%2)
97" stwcx. %0,0,%2 \n\
98 bne- 1b"
99 : "=&r" (t), "=m" (v->counter)
100 : "r" (&v->counter), "m" (v->counter)
101 : "cc");
102}
103
104static __inline__ int atomic_inc_return(atomic_t *v)
105{
106 int t;
107
108 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500109 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700110"1: lwarx %0,0,%1 # atomic_inc_return\n\
111 addic %0,%0,1\n"
112 PPC405_ERR77(0,%1)
113" stwcx. %0,0,%1 \n\
114 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500115 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700116 : "=&r" (t)
117 : "r" (&v->counter)
118 : "cc", "memory");
119
120 return t;
121}
122
123/*
124 * atomic_inc_and_test - increment and test
125 * @v: pointer of type atomic_t
126 *
127 * Atomically increments @v by 1
128 * and returns true if the result is zero, or false for all
129 * other cases.
130 */
131#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
132
133static __inline__ void atomic_dec(atomic_t *v)
134{
135 int t;
136
137 __asm__ __volatile__(
138"1: lwarx %0,0,%2 # atomic_dec\n\
139 addic %0,%0,-1\n"
140 PPC405_ERR77(0,%2)\
141" stwcx. %0,0,%2\n\
142 bne- 1b"
143 : "=&r" (t), "=m" (v->counter)
144 : "r" (&v->counter), "m" (v->counter)
145 : "cc");
146}
147
148static __inline__ int atomic_dec_return(atomic_t *v)
149{
150 int t;
151
152 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500153 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700154"1: lwarx %0,0,%1 # atomic_dec_return\n\
155 addic %0,%0,-1\n"
156 PPC405_ERR77(0,%1)
157" stwcx. %0,0,%1\n\
158 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500159 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700160 : "=&r" (t)
161 : "r" (&v->counter)
162 : "cc", "memory");
163
164 return t;
165}
166
Nick Piggin4a6dae62005-11-13 16:07:24 -0800167#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800168#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800169
Nick Piggin8426e1f2005-11-13 16:07:25 -0800170/**
171 * atomic_add_unless - add unless the number is a given value
172 * @v: pointer of type atomic_t
173 * @a: the amount to add to v...
174 * @u: ...unless v is equal to u.
175 *
176 * Atomically adds @a to @v, so long as it was not @u.
177 * Returns non-zero if @v was not @u, and zero otherwise.
178 */
179#define atomic_add_unless(v, a, u) \
180({ \
181 int c, old; \
182 c = atomic_read(v); \
183 for (;;) { \
184 if (unlikely(c == (u))) \
185 break; \
186 old = atomic_cmpxchg((v), c, c + (a)); \
187 if (likely(old == c)) \
188 break; \
189 c = old; \
190 } \
191 c != (u); \
192})
193#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
194
Linus Torvalds1da177e2005-04-16 15:20:36 -0700195#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
196#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
197
198/*
199 * Atomically test *v and decrement if it is greater than 0.
200 * The function returns the old value of *v minus 1.
201 */
202static __inline__ int atomic_dec_if_positive(atomic_t *v)
203{
204 int t;
205
206 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500207 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700208"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
209 addic. %0,%0,-1\n\
210 blt- 2f\n"
211 PPC405_ERR77(0,%1)
212" stwcx. %0,0,%1\n\
213 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500214 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700215 "\n\
2162:" : "=&r" (t)
217 : "r" (&v->counter)
218 : "cc", "memory");
219
220 return t;
221}
222
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500223#define smp_mb__before_atomic_dec() smp_mb()
224#define smp_mb__after_atomic_dec() smp_mb()
225#define smp_mb__before_atomic_inc() smp_mb()
226#define smp_mb__after_atomic_inc() smp_mb()
Linus Torvalds1da177e2005-04-16 15:20:36 -0700227
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100228#ifdef __powerpc64__
229
230typedef struct { volatile long counter; } atomic64_t;
231
232#define ATOMIC64_INIT(i) { (i) }
233
234#define atomic64_read(v) ((v)->counter)
235#define atomic64_set(v,i) (((v)->counter) = (i))
236
237static __inline__ void atomic64_add(long a, atomic64_t *v)
238{
239 long t;
240
241 __asm__ __volatile__(
242"1: ldarx %0,0,%3 # atomic64_add\n\
243 add %0,%2,%0\n\
244 stdcx. %0,0,%3 \n\
245 bne- 1b"
246 : "=&r" (t), "=m" (v->counter)
247 : "r" (a), "r" (&v->counter), "m" (v->counter)
248 : "cc");
249}
250
251static __inline__ long atomic64_add_return(long a, atomic64_t *v)
252{
253 long t;
254
255 __asm__ __volatile__(
256 EIEIO_ON_SMP
257"1: ldarx %0,0,%2 # atomic64_add_return\n\
258 add %0,%1,%0\n\
259 stdcx. %0,0,%2 \n\
260 bne- 1b"
261 ISYNC_ON_SMP
262 : "=&r" (t)
263 : "r" (a), "r" (&v->counter)
264 : "cc", "memory");
265
266 return t;
267}
268
269#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
270
271static __inline__ void atomic64_sub(long a, atomic64_t *v)
272{
273 long t;
274
275 __asm__ __volatile__(
276"1: ldarx %0,0,%3 # atomic64_sub\n\
277 subf %0,%2,%0\n\
278 stdcx. %0,0,%3 \n\
279 bne- 1b"
280 : "=&r" (t), "=m" (v->counter)
281 : "r" (a), "r" (&v->counter), "m" (v->counter)
282 : "cc");
283}
284
285static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
286{
287 long t;
288
289 __asm__ __volatile__(
290 EIEIO_ON_SMP
291"1: ldarx %0,0,%2 # atomic64_sub_return\n\
292 subf %0,%1,%0\n\
293 stdcx. %0,0,%2 \n\
294 bne- 1b"
295 ISYNC_ON_SMP
296 : "=&r" (t)
297 : "r" (a), "r" (&v->counter)
298 : "cc", "memory");
299
300 return t;
301}
302
303static __inline__ void atomic64_inc(atomic64_t *v)
304{
305 long t;
306
307 __asm__ __volatile__(
308"1: ldarx %0,0,%2 # atomic64_inc\n\
309 addic %0,%0,1\n\
310 stdcx. %0,0,%2 \n\
311 bne- 1b"
312 : "=&r" (t), "=m" (v->counter)
313 : "r" (&v->counter), "m" (v->counter)
314 : "cc");
315}
316
317static __inline__ long atomic64_inc_return(atomic64_t *v)
318{
319 long t;
320
321 __asm__ __volatile__(
322 EIEIO_ON_SMP
323"1: ldarx %0,0,%1 # atomic64_inc_return\n\
324 addic %0,%0,1\n\
325 stdcx. %0,0,%1 \n\
326 bne- 1b"
327 ISYNC_ON_SMP
328 : "=&r" (t)
329 : "r" (&v->counter)
330 : "cc", "memory");
331
332 return t;
333}
334
335/*
336 * atomic64_inc_and_test - increment and test
337 * @v: pointer of type atomic64_t
338 *
339 * Atomically increments @v by 1
340 * and returns true if the result is zero, or false for all
341 * other cases.
342 */
343#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
344
345static __inline__ void atomic64_dec(atomic64_t *v)
346{
347 long t;
348
349 __asm__ __volatile__(
350"1: ldarx %0,0,%2 # atomic64_dec\n\
351 addic %0,%0,-1\n\
352 stdcx. %0,0,%2\n\
353 bne- 1b"
354 : "=&r" (t), "=m" (v->counter)
355 : "r" (&v->counter), "m" (v->counter)
356 : "cc");
357}
358
359static __inline__ long atomic64_dec_return(atomic64_t *v)
360{
361 long t;
362
363 __asm__ __volatile__(
364 EIEIO_ON_SMP
365"1: ldarx %0,0,%1 # atomic64_dec_return\n\
366 addic %0,%0,-1\n\
367 stdcx. %0,0,%1\n\
368 bne- 1b"
369 ISYNC_ON_SMP
370 : "=&r" (t)
371 : "r" (&v->counter)
372 : "cc", "memory");
373
374 return t;
375}
376
377#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
378#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
379
380/*
381 * Atomically test *v and decrement if it is greater than 0.
382 * The function returns the old value of *v minus 1.
383 */
384static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
385{
386 long t;
387
388 __asm__ __volatile__(
389 EIEIO_ON_SMP
390"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
391 addic. %0,%0,-1\n\
392 blt- 2f\n\
393 stdcx. %0,0,%1\n\
394 bne- 1b"
395 ISYNC_ON_SMP
396 "\n\
3972:" : "=&r" (t)
398 : "r" (&v->counter)
399 : "cc", "memory");
400
401 return t;
402}
403
404#endif /* __powerpc64__ */
405
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800406#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700407#endif /* __KERNEL__ */
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500408#endif /* _ASM_POWERPC_ATOMIC_H_ */