blob: e3b1d41c89be73425595b364370c096206a6a843 [file] [log] [blame]
Becky Brucefeaf7cf2005-09-22 14:20:04 -05001#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
Linus Torvalds1da177e2005-04-16 15:20:36 -07004/*
5 * PowerPC atomic operations
6 */
7
Linus Torvalds1da177e2005-04-16 15:20:36 -07008#ifdef __KERNEL__
David Howellsae3a1972012-03-28 18:30:02 +01009#include <linux/types.h>
10#include <asm/cmpxchg.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070011
Becky Brucefeaf7cf2005-09-22 14:20:04 -050012#define ATOMIC_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070013
Segher Boessenkool9f0cbea2007-08-11 10:15:30 +100014static __inline__ int atomic_read(const atomic_t *v)
15{
16 int t;
17
18 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
19
20 return t;
21}
22
23static __inline__ void atomic_set(atomic_t *v, int i)
24{
25 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
26}
Linus Torvalds1da177e2005-04-16 15:20:36 -070027
Linus Torvalds1da177e2005-04-16 15:20:36 -070028static __inline__ void atomic_add(int a, atomic_t *v)
29{
30 int t;
31
32 __asm__ __volatile__(
33"1: lwarx %0,0,%3 # atomic_add\n\
34 add %0,%2,%0\n"
35 PPC405_ERR77(0,%3)
36" stwcx. %0,0,%3 \n\
37 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -070038 : "=&r" (t), "+m" (v->counter)
39 : "r" (a), "r" (&v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070040 : "cc");
41}
42
43static __inline__ int atomic_add_return(int a, atomic_t *v)
44{
45 int t;
46
47 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000048 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070049"1: lwarx %0,0,%2 # atomic_add_return\n\
50 add %0,%1,%0\n"
51 PPC405_ERR77(0,%2)
52" stwcx. %0,0,%2 \n\
53 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000054 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070055 : "=&r" (t)
56 : "r" (a), "r" (&v->counter)
57 : "cc", "memory");
58
59 return t;
60}
61
62#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
63
64static __inline__ void atomic_sub(int a, atomic_t *v)
65{
66 int t;
67
68 __asm__ __volatile__(
69"1: lwarx %0,0,%3 # atomic_sub\n\
70 subf %0,%2,%0\n"
71 PPC405_ERR77(0,%3)
72" stwcx. %0,0,%3 \n\
73 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -070074 : "=&r" (t), "+m" (v->counter)
75 : "r" (a), "r" (&v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070076 : "cc");
77}
78
79static __inline__ int atomic_sub_return(int a, atomic_t *v)
80{
81 int t;
82
83 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000084 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070085"1: lwarx %0,0,%2 # atomic_sub_return\n\
86 subf %0,%1,%0\n"
87 PPC405_ERR77(0,%2)
88" stwcx. %0,0,%2 \n\
89 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000090 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070091 : "=&r" (t)
92 : "r" (a), "r" (&v->counter)
93 : "cc", "memory");
94
95 return t;
96}
97
98static __inline__ void atomic_inc(atomic_t *v)
99{
100 int t;
101
102 __asm__ __volatile__(
103"1: lwarx %0,0,%2 # atomic_inc\n\
104 addic %0,%0,1\n"
105 PPC405_ERR77(0,%2)
106" stwcx. %0,0,%2 \n\
107 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700108 : "=&r" (t), "+m" (v->counter)
109 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000110 : "cc", "xer");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700111}
112
113static __inline__ int atomic_inc_return(atomic_t *v)
114{
115 int t;
116
117 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000118 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700119"1: lwarx %0,0,%1 # atomic_inc_return\n\
120 addic %0,%0,1\n"
121 PPC405_ERR77(0,%1)
122" stwcx. %0,0,%1 \n\
123 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000124 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700125 : "=&r" (t)
126 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000127 : "cc", "xer", "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700128
129 return t;
130}
131
132/*
133 * atomic_inc_and_test - increment and test
134 * @v: pointer of type atomic_t
135 *
136 * Atomically increments @v by 1
137 * and returns true if the result is zero, or false for all
138 * other cases.
139 */
140#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
141
142static __inline__ void atomic_dec(atomic_t *v)
143{
144 int t;
145
146 __asm__ __volatile__(
147"1: lwarx %0,0,%2 # atomic_dec\n\
148 addic %0,%0,-1\n"
149 PPC405_ERR77(0,%2)\
150" stwcx. %0,0,%2\n\
151 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700152 : "=&r" (t), "+m" (v->counter)
153 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000154 : "cc", "xer");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700155}
156
157static __inline__ int atomic_dec_return(atomic_t *v)
158{
159 int t;
160
161 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000162 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700163"1: lwarx %0,0,%1 # atomic_dec_return\n\
164 addic %0,%0,-1\n"
165 PPC405_ERR77(0,%1)
166" stwcx. %0,0,%1\n\
167 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000168 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700169 : "=&r" (t)
170 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000171 : "cc", "xer", "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700172
173 return t;
174}
175
Mathieu Desnoyersf46e4772007-05-08 00:34:27 -0700176#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800177#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800178
Nick Piggin8426e1f2005-11-13 16:07:25 -0800179/**
Arun Sharmaf24219b2011-07-26 16:09:07 -0700180 * __atomic_add_unless - add unless the number is a given value
Nick Piggin8426e1f2005-11-13 16:07:25 -0800181 * @v: pointer of type atomic_t
182 * @a: the amount to add to v...
183 * @u: ...unless v is equal to u.
184 *
185 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700186 * Returns the old value of @v.
Nick Piggin8426e1f2005-11-13 16:07:25 -0800187 */
Arun Sharmaf24219b2011-07-26 16:09:07 -0700188static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
Nick Pigginf055aff2006-02-20 10:41:40 +0100189{
190 int t;
191
192 __asm__ __volatile__ (
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000193 PPC_ATOMIC_ENTRY_BARRIER
Arun Sharmaf24219b2011-07-26 16:09:07 -0700194"1: lwarx %0,0,%1 # __atomic_add_unless\n\
Nick Pigginf055aff2006-02-20 10:41:40 +0100195 cmpw 0,%0,%3 \n\
196 beq- 2f \n\
197 add %0,%2,%0 \n"
198 PPC405_ERR77(0,%2)
199" stwcx. %0,0,%1 \n\
200 bne- 1b \n"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000201 PPC_ATOMIC_EXIT_BARRIER
Nick Pigginf055aff2006-02-20 10:41:40 +0100202" subf %0,%2,%0 \n\
2032:"
204 : "=&r" (t)
205 : "r" (&v->counter), "r" (a), "r" (u)
206 : "cc", "memory");
207
Arun Sharmaf24219b2011-07-26 16:09:07 -0700208 return t;
Nick Pigginf055aff2006-02-20 10:41:40 +0100209}
210
Anton Blancharda6cf7ed2012-02-29 21:12:16 +0000211/**
212 * atomic_inc_not_zero - increment unless the number is zero
213 * @v: pointer of type atomic_t
214 *
215 * Atomically increments @v by 1, so long as @v is non-zero.
216 * Returns non-zero if @v was non-zero, and zero otherwise.
217 */
218static __inline__ int atomic_inc_not_zero(atomic_t *v)
219{
220 int t1, t2;
221
222 __asm__ __volatile__ (
223 PPC_ATOMIC_ENTRY_BARRIER
224"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
225 cmpwi 0,%0,0\n\
226 beq- 2f\n\
227 addic %1,%0,1\n"
228 PPC405_ERR77(0,%2)
229" stwcx. %1,0,%2\n\
230 bne- 1b\n"
231 PPC_ATOMIC_EXIT_BARRIER
232 "\n\
2332:"
234 : "=&r" (t1), "=&r" (t2)
235 : "r" (&v->counter)
236 : "cc", "xer", "memory");
237
238 return t1;
239}
240#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
Nick Piggin8426e1f2005-11-13 16:07:25 -0800241
Linus Torvalds1da177e2005-04-16 15:20:36 -0700242#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
243#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
244
245/*
246 * Atomically test *v and decrement if it is greater than 0.
Robert Jennings434f98c2007-01-17 10:50:20 -0600247 * The function returns the old value of *v minus 1, even if
248 * the atomic variable, v, was not decremented.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700249 */
250static __inline__ int atomic_dec_if_positive(atomic_t *v)
251{
252 int t;
253
254 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000255 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700256"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
Robert Jennings434f98c2007-01-17 10:50:20 -0600257 cmpwi %0,1\n\
258 addi %0,%0,-1\n\
Linus Torvalds1da177e2005-04-16 15:20:36 -0700259 blt- 2f\n"
260 PPC405_ERR77(0,%1)
261" stwcx. %0,0,%1\n\
262 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000263 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700264 "\n\
Robert Jennings434f98c2007-01-17 10:50:20 -06002652:" : "=&b" (t)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700266 : "r" (&v->counter)
267 : "cc", "memory");
268
269 return t;
270}
Shaohua Lie79bee22012-10-08 16:32:18 -0700271#define atomic_dec_if_positive atomic_dec_if_positive
Linus Torvalds1da177e2005-04-16 15:20:36 -0700272
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500273#define smp_mb__before_atomic_dec() smp_mb()
274#define smp_mb__after_atomic_dec() smp_mb()
275#define smp_mb__before_atomic_inc() smp_mb()
276#define smp_mb__after_atomic_inc() smp_mb()
Linus Torvalds1da177e2005-04-16 15:20:36 -0700277
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100278#ifdef __powerpc64__
279
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100280#define ATOMIC64_INIT(i) { (i) }
281
Segher Boessenkool9f0cbea2007-08-11 10:15:30 +1000282static __inline__ long atomic64_read(const atomic64_t *v)
283{
284 long t;
285
286 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
287
288 return t;
289}
290
291static __inline__ void atomic64_set(atomic64_t *v, long i)
292{
293 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
294}
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100295
296static __inline__ void atomic64_add(long a, atomic64_t *v)
297{
298 long t;
299
300 __asm__ __volatile__(
301"1: ldarx %0,0,%3 # atomic64_add\n\
302 add %0,%2,%0\n\
303 stdcx. %0,0,%3 \n\
304 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700305 : "=&r" (t), "+m" (v->counter)
306 : "r" (a), "r" (&v->counter)
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100307 : "cc");
308}
309
310static __inline__ long atomic64_add_return(long a, atomic64_t *v)
311{
312 long t;
313
314 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000315 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100316"1: ldarx %0,0,%2 # atomic64_add_return\n\
317 add %0,%1,%0\n\
318 stdcx. %0,0,%2 \n\
319 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000320 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100321 : "=&r" (t)
322 : "r" (a), "r" (&v->counter)
323 : "cc", "memory");
324
325 return t;
326}
327
328#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
329
330static __inline__ void atomic64_sub(long a, atomic64_t *v)
331{
332 long t;
333
334 __asm__ __volatile__(
335"1: ldarx %0,0,%3 # atomic64_sub\n\
336 subf %0,%2,%0\n\
337 stdcx. %0,0,%3 \n\
338 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700339 : "=&r" (t), "+m" (v->counter)
340 : "r" (a), "r" (&v->counter)
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100341 : "cc");
342}
343
344static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
345{
346 long t;
347
348 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000349 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100350"1: ldarx %0,0,%2 # atomic64_sub_return\n\
351 subf %0,%1,%0\n\
352 stdcx. %0,0,%2 \n\
353 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000354 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100355 : "=&r" (t)
356 : "r" (a), "r" (&v->counter)
357 : "cc", "memory");
358
359 return t;
360}
361
362static __inline__ void atomic64_inc(atomic64_t *v)
363{
364 long t;
365
366 __asm__ __volatile__(
367"1: ldarx %0,0,%2 # atomic64_inc\n\
368 addic %0,%0,1\n\
369 stdcx. %0,0,%2 \n\
370 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700371 : "=&r" (t), "+m" (v->counter)
372 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000373 : "cc", "xer");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100374}
375
376static __inline__ long atomic64_inc_return(atomic64_t *v)
377{
378 long t;
379
380 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000381 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100382"1: ldarx %0,0,%1 # atomic64_inc_return\n\
383 addic %0,%0,1\n\
384 stdcx. %0,0,%1 \n\
385 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000386 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100387 : "=&r" (t)
388 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000389 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100390
391 return t;
392}
393
394/*
395 * atomic64_inc_and_test - increment and test
396 * @v: pointer of type atomic64_t
397 *
398 * Atomically increments @v by 1
399 * and returns true if the result is zero, or false for all
400 * other cases.
401 */
402#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
403
404static __inline__ void atomic64_dec(atomic64_t *v)
405{
406 long t;
407
408 __asm__ __volatile__(
409"1: ldarx %0,0,%2 # atomic64_dec\n\
410 addic %0,%0,-1\n\
411 stdcx. %0,0,%2\n\
412 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700413 : "=&r" (t), "+m" (v->counter)
414 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000415 : "cc", "xer");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100416}
417
418static __inline__ long atomic64_dec_return(atomic64_t *v)
419{
420 long t;
421
422 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000423 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100424"1: ldarx %0,0,%1 # atomic64_dec_return\n\
425 addic %0,%0,-1\n\
426 stdcx. %0,0,%1\n\
427 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000428 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100429 : "=&r" (t)
430 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000431 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100432
433 return t;
434}
435
436#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
437#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
438
439/*
440 * Atomically test *v and decrement if it is greater than 0.
441 * The function returns the old value of *v minus 1.
442 */
443static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
444{
445 long t;
446
447 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000448 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100449"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
450 addic. %0,%0,-1\n\
451 blt- 2f\n\
452 stdcx. %0,0,%1\n\
453 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000454 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100455 "\n\
4562:" : "=&r" (t)
457 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000458 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100459
460 return t;
461}
462
Mathieu Desnoyersf46e4772007-05-08 00:34:27 -0700463#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500464#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
465
466/**
467 * atomic64_add_unless - add unless the number is a given value
468 * @v: pointer of type atomic64_t
469 * @a: the amount to add to v...
470 * @u: ...unless v is equal to u.
471 *
472 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700473 * Returns the old value of @v.
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500474 */
475static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
476{
477 long t;
478
479 __asm__ __volatile__ (
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000480 PPC_ATOMIC_ENTRY_BARRIER
Arun Sharmaf24219b2011-07-26 16:09:07 -0700481"1: ldarx %0,0,%1 # __atomic_add_unless\n\
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500482 cmpd 0,%0,%3 \n\
483 beq- 2f \n\
484 add %0,%2,%0 \n"
485" stdcx. %0,0,%1 \n\
486 bne- 1b \n"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000487 PPC_ATOMIC_EXIT_BARRIER
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500488" subf %0,%2,%0 \n\
4892:"
490 : "=&r" (t)
491 : "r" (&v->counter), "r" (a), "r" (u)
492 : "cc", "memory");
493
494 return t != u;
495}
496
Anton Blancharda6cf7ed2012-02-29 21:12:16 +0000497/**
498 * atomic_inc64_not_zero - increment unless the number is zero
499 * @v: pointer of type atomic64_t
500 *
501 * Atomically increments @v by 1, so long as @v is non-zero.
502 * Returns non-zero if @v was non-zero, and zero otherwise.
503 */
504static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
505{
506 long t1, t2;
507
508 __asm__ __volatile__ (
509 PPC_ATOMIC_ENTRY_BARRIER
510"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
511 cmpdi 0,%0,0\n\
512 beq- 2f\n\
513 addic %1,%0,1\n\
514 stdcx. %1,0,%2\n\
515 bne- 1b\n"
516 PPC_ATOMIC_EXIT_BARRIER
517 "\n\
5182:"
519 : "=&r" (t1), "=&r" (t2)
520 : "r" (&v->counter)
521 : "cc", "xer", "memory");
522
523 return t1;
524}
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500525
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100526#endif /* __powerpc64__ */
527
Linus Torvalds1da177e2005-04-16 15:20:36 -0700528#endif /* __KERNEL__ */
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500529#endif /* _ASM_POWERPC_ATOMIC_H_ */