blob: 28992d01292633f2d473eeae47e202497fa691a6 [file] [log] [blame]
Becky Brucefeaf7cf2005-09-22 14:20:04 -05001#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
Linus Torvalds1da177e2005-04-16 15:20:36 -07004/*
5 * PowerPC atomic operations
6 */
7
Linus Torvalds1da177e2005-04-16 15:20:36 -07008#ifdef __KERNEL__
David Howellsae3a1972012-03-28 18:30:02 +01009#include <linux/types.h>
10#include <asm/cmpxchg.h>
Peter Zijlstrac6450732014-03-13 19:00:35 +010011#include <asm/barrier.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070012
Becky Brucefeaf7cf2005-09-22 14:20:04 -050013#define ATOMIC_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070014
Segher Boessenkool9f0cbea2007-08-11 10:15:30 +100015static __inline__ int atomic_read(const atomic_t *v)
16{
17 int t;
18
19 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
20
21 return t;
22}
23
24static __inline__ void atomic_set(atomic_t *v, int i)
25{
26 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
27}
Linus Torvalds1da177e2005-04-16 15:20:36 -070028
Linus Torvalds1da177e2005-04-16 15:20:36 -070029static __inline__ void atomic_add(int a, atomic_t *v)
30{
31 int t;
32
33 __asm__ __volatile__(
34"1: lwarx %0,0,%3 # atomic_add\n\
35 add %0,%2,%0\n"
36 PPC405_ERR77(0,%3)
37" stwcx. %0,0,%3 \n\
38 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -070039 : "=&r" (t), "+m" (v->counter)
40 : "r" (a), "r" (&v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070041 : "cc");
42}
43
44static __inline__ int atomic_add_return(int a, atomic_t *v)
45{
46 int t;
47
48 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000049 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070050"1: lwarx %0,0,%2 # atomic_add_return\n\
51 add %0,%1,%0\n"
52 PPC405_ERR77(0,%2)
53" stwcx. %0,0,%2 \n\
54 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000055 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070056 : "=&r" (t)
57 : "r" (a), "r" (&v->counter)
58 : "cc", "memory");
59
60 return t;
61}
62
63#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
64
65static __inline__ void atomic_sub(int a, atomic_t *v)
66{
67 int t;
68
69 __asm__ __volatile__(
70"1: lwarx %0,0,%3 # atomic_sub\n\
71 subf %0,%2,%0\n"
72 PPC405_ERR77(0,%3)
73" stwcx. %0,0,%3 \n\
74 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -070075 : "=&r" (t), "+m" (v->counter)
76 : "r" (a), "r" (&v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070077 : "cc");
78}
79
80static __inline__ int atomic_sub_return(int a, atomic_t *v)
81{
82 int t;
83
84 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000085 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070086"1: lwarx %0,0,%2 # atomic_sub_return\n\
87 subf %0,%1,%0\n"
88 PPC405_ERR77(0,%2)
89" stwcx. %0,0,%2 \n\
90 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000091 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070092 : "=&r" (t)
93 : "r" (a), "r" (&v->counter)
94 : "cc", "memory");
95
96 return t;
97}
98
99static __inline__ void atomic_inc(atomic_t *v)
100{
101 int t;
102
103 __asm__ __volatile__(
104"1: lwarx %0,0,%2 # atomic_inc\n\
105 addic %0,%0,1\n"
106 PPC405_ERR77(0,%2)
107" stwcx. %0,0,%2 \n\
108 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700109 : "=&r" (t), "+m" (v->counter)
110 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000111 : "cc", "xer");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700112}
113
114static __inline__ int atomic_inc_return(atomic_t *v)
115{
116 int t;
117
118 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000119 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700120"1: lwarx %0,0,%1 # atomic_inc_return\n\
121 addic %0,%0,1\n"
122 PPC405_ERR77(0,%1)
123" stwcx. %0,0,%1 \n\
124 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000125 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700126 : "=&r" (t)
127 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000128 : "cc", "xer", "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700129
130 return t;
131}
132
133/*
134 * atomic_inc_and_test - increment and test
135 * @v: pointer of type atomic_t
136 *
137 * Atomically increments @v by 1
138 * and returns true if the result is zero, or false for all
139 * other cases.
140 */
141#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
142
143static __inline__ void atomic_dec(atomic_t *v)
144{
145 int t;
146
147 __asm__ __volatile__(
148"1: lwarx %0,0,%2 # atomic_dec\n\
149 addic %0,%0,-1\n"
150 PPC405_ERR77(0,%2)\
151" stwcx. %0,0,%2\n\
152 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700153 : "=&r" (t), "+m" (v->counter)
154 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000155 : "cc", "xer");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700156}
157
158static __inline__ int atomic_dec_return(atomic_t *v)
159{
160 int t;
161
162 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000163 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700164"1: lwarx %0,0,%1 # atomic_dec_return\n\
165 addic %0,%0,-1\n"
166 PPC405_ERR77(0,%1)
167" stwcx. %0,0,%1\n\
168 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000169 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700170 : "=&r" (t)
171 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000172 : "cc", "xer", "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700173
174 return t;
175}
176
Mathieu Desnoyersf46e4772007-05-08 00:34:27 -0700177#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800178#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800179
Nick Piggin8426e1f2005-11-13 16:07:25 -0800180/**
Arun Sharmaf24219b2011-07-26 16:09:07 -0700181 * __atomic_add_unless - add unless the number is a given value
Nick Piggin8426e1f2005-11-13 16:07:25 -0800182 * @v: pointer of type atomic_t
183 * @a: the amount to add to v...
184 * @u: ...unless v is equal to u.
185 *
186 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700187 * Returns the old value of @v.
Nick Piggin8426e1f2005-11-13 16:07:25 -0800188 */
Arun Sharmaf24219b2011-07-26 16:09:07 -0700189static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
Nick Pigginf055aff2006-02-20 10:41:40 +0100190{
191 int t;
192
193 __asm__ __volatile__ (
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000194 PPC_ATOMIC_ENTRY_BARRIER
Arun Sharmaf24219b2011-07-26 16:09:07 -0700195"1: lwarx %0,0,%1 # __atomic_add_unless\n\
Nick Pigginf055aff2006-02-20 10:41:40 +0100196 cmpw 0,%0,%3 \n\
197 beq- 2f \n\
198 add %0,%2,%0 \n"
199 PPC405_ERR77(0,%2)
200" stwcx. %0,0,%1 \n\
201 bne- 1b \n"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000202 PPC_ATOMIC_EXIT_BARRIER
Nick Pigginf055aff2006-02-20 10:41:40 +0100203" subf %0,%2,%0 \n\
2042:"
205 : "=&r" (t)
206 : "r" (&v->counter), "r" (a), "r" (u)
207 : "cc", "memory");
208
Arun Sharmaf24219b2011-07-26 16:09:07 -0700209 return t;
Nick Pigginf055aff2006-02-20 10:41:40 +0100210}
211
Anton Blancharda6cf7ed2012-02-29 21:12:16 +0000212/**
213 * atomic_inc_not_zero - increment unless the number is zero
214 * @v: pointer of type atomic_t
215 *
216 * Atomically increments @v by 1, so long as @v is non-zero.
217 * Returns non-zero if @v was non-zero, and zero otherwise.
218 */
219static __inline__ int atomic_inc_not_zero(atomic_t *v)
220{
221 int t1, t2;
222
223 __asm__ __volatile__ (
224 PPC_ATOMIC_ENTRY_BARRIER
225"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
226 cmpwi 0,%0,0\n\
227 beq- 2f\n\
228 addic %1,%0,1\n"
229 PPC405_ERR77(0,%2)
230" stwcx. %1,0,%2\n\
231 bne- 1b\n"
232 PPC_ATOMIC_EXIT_BARRIER
233 "\n\
2342:"
235 : "=&r" (t1), "=&r" (t2)
236 : "r" (&v->counter)
237 : "cc", "xer", "memory");
238
239 return t1;
240}
241#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
Nick Piggin8426e1f2005-11-13 16:07:25 -0800242
Linus Torvalds1da177e2005-04-16 15:20:36 -0700243#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
244#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
245
246/*
247 * Atomically test *v and decrement if it is greater than 0.
Robert Jennings434f98c2007-01-17 10:50:20 -0600248 * The function returns the old value of *v minus 1, even if
249 * the atomic variable, v, was not decremented.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700250 */
251static __inline__ int atomic_dec_if_positive(atomic_t *v)
252{
253 int t;
254
255 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000256 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700257"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
Robert Jennings434f98c2007-01-17 10:50:20 -0600258 cmpwi %0,1\n\
259 addi %0,%0,-1\n\
Linus Torvalds1da177e2005-04-16 15:20:36 -0700260 blt- 2f\n"
261 PPC405_ERR77(0,%1)
262" stwcx. %0,0,%1\n\
263 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000264 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700265 "\n\
Robert Jennings434f98c2007-01-17 10:50:20 -06002662:" : "=&b" (t)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700267 : "r" (&v->counter)
268 : "cc", "memory");
269
270 return t;
271}
Shaohua Lie79bee22012-10-08 16:32:18 -0700272#define atomic_dec_if_positive atomic_dec_if_positive
Linus Torvalds1da177e2005-04-16 15:20:36 -0700273
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100274#ifdef __powerpc64__
275
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100276#define ATOMIC64_INIT(i) { (i) }
277
Segher Boessenkool9f0cbea2007-08-11 10:15:30 +1000278static __inline__ long atomic64_read(const atomic64_t *v)
279{
280 long t;
281
282 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
283
284 return t;
285}
286
287static __inline__ void atomic64_set(atomic64_t *v, long i)
288{
289 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
290}
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100291
292static __inline__ void atomic64_add(long a, atomic64_t *v)
293{
294 long t;
295
296 __asm__ __volatile__(
297"1: ldarx %0,0,%3 # atomic64_add\n\
298 add %0,%2,%0\n\
299 stdcx. %0,0,%3 \n\
300 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700301 : "=&r" (t), "+m" (v->counter)
302 : "r" (a), "r" (&v->counter)
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100303 : "cc");
304}
305
306static __inline__ long atomic64_add_return(long a, atomic64_t *v)
307{
308 long t;
309
310 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000311 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100312"1: ldarx %0,0,%2 # atomic64_add_return\n\
313 add %0,%1,%0\n\
314 stdcx. %0,0,%2 \n\
315 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000316 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100317 : "=&r" (t)
318 : "r" (a), "r" (&v->counter)
319 : "cc", "memory");
320
321 return t;
322}
323
324#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
325
326static __inline__ void atomic64_sub(long a, atomic64_t *v)
327{
328 long t;
329
330 __asm__ __volatile__(
331"1: ldarx %0,0,%3 # atomic64_sub\n\
332 subf %0,%2,%0\n\
333 stdcx. %0,0,%3 \n\
334 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700335 : "=&r" (t), "+m" (v->counter)
336 : "r" (a), "r" (&v->counter)
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100337 : "cc");
338}
339
340static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
341{
342 long t;
343
344 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000345 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100346"1: ldarx %0,0,%2 # atomic64_sub_return\n\
347 subf %0,%1,%0\n\
348 stdcx. %0,0,%2 \n\
349 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000350 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100351 : "=&r" (t)
352 : "r" (a), "r" (&v->counter)
353 : "cc", "memory");
354
355 return t;
356}
357
358static __inline__ void atomic64_inc(atomic64_t *v)
359{
360 long t;
361
362 __asm__ __volatile__(
363"1: ldarx %0,0,%2 # atomic64_inc\n\
364 addic %0,%0,1\n\
365 stdcx. %0,0,%2 \n\
366 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700367 : "=&r" (t), "+m" (v->counter)
368 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000369 : "cc", "xer");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100370}
371
372static __inline__ long atomic64_inc_return(atomic64_t *v)
373{
374 long t;
375
376 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000377 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100378"1: ldarx %0,0,%1 # atomic64_inc_return\n\
379 addic %0,%0,1\n\
380 stdcx. %0,0,%1 \n\
381 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000382 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100383 : "=&r" (t)
384 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000385 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100386
387 return t;
388}
389
390/*
391 * atomic64_inc_and_test - increment and test
392 * @v: pointer of type atomic64_t
393 *
394 * Atomically increments @v by 1
395 * and returns true if the result is zero, or false for all
396 * other cases.
397 */
398#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
399
400static __inline__ void atomic64_dec(atomic64_t *v)
401{
402 long t;
403
404 __asm__ __volatile__(
405"1: ldarx %0,0,%2 # atomic64_dec\n\
406 addic %0,%0,-1\n\
407 stdcx. %0,0,%2\n\
408 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700409 : "=&r" (t), "+m" (v->counter)
410 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000411 : "cc", "xer");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100412}
413
414static __inline__ long atomic64_dec_return(atomic64_t *v)
415{
416 long t;
417
418 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000419 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100420"1: ldarx %0,0,%1 # atomic64_dec_return\n\
421 addic %0,%0,-1\n\
422 stdcx. %0,0,%1\n\
423 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000424 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100425 : "=&r" (t)
426 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000427 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100428
429 return t;
430}
431
432#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
433#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
434
435/*
436 * Atomically test *v and decrement if it is greater than 0.
437 * The function returns the old value of *v minus 1.
438 */
439static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
440{
441 long t;
442
443 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000444 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100445"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
446 addic. %0,%0,-1\n\
447 blt- 2f\n\
448 stdcx. %0,0,%1\n\
449 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000450 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100451 "\n\
4522:" : "=&r" (t)
453 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000454 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100455
456 return t;
457}
458
Mathieu Desnoyersf46e4772007-05-08 00:34:27 -0700459#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500460#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
461
462/**
463 * atomic64_add_unless - add unless the number is a given value
464 * @v: pointer of type atomic64_t
465 * @a: the amount to add to v...
466 * @u: ...unless v is equal to u.
467 *
468 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700469 * Returns the old value of @v.
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500470 */
471static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
472{
473 long t;
474
475 __asm__ __volatile__ (
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000476 PPC_ATOMIC_ENTRY_BARRIER
Arun Sharmaf24219b2011-07-26 16:09:07 -0700477"1: ldarx %0,0,%1 # __atomic_add_unless\n\
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500478 cmpd 0,%0,%3 \n\
479 beq- 2f \n\
480 add %0,%2,%0 \n"
481" stdcx. %0,0,%1 \n\
482 bne- 1b \n"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000483 PPC_ATOMIC_EXIT_BARRIER
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500484" subf %0,%2,%0 \n\
4852:"
486 : "=&r" (t)
487 : "r" (&v->counter), "r" (a), "r" (u)
488 : "cc", "memory");
489
490 return t != u;
491}
492
Anton Blancharda6cf7ed2012-02-29 21:12:16 +0000493/**
494 * atomic_inc64_not_zero - increment unless the number is zero
495 * @v: pointer of type atomic64_t
496 *
497 * Atomically increments @v by 1, so long as @v is non-zero.
498 * Returns non-zero if @v was non-zero, and zero otherwise.
499 */
500static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
501{
502 long t1, t2;
503
504 __asm__ __volatile__ (
505 PPC_ATOMIC_ENTRY_BARRIER
506"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
507 cmpdi 0,%0,0\n\
508 beq- 2f\n\
509 addic %1,%0,1\n\
510 stdcx. %1,0,%2\n\
511 bne- 1b\n"
512 PPC_ATOMIC_EXIT_BARRIER
513 "\n\
5142:"
515 : "=&r" (t1), "=&r" (t2)
516 : "r" (&v->counter)
517 : "cc", "xer", "memory");
518
519 return t1;
520}
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500521
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100522#endif /* __powerpc64__ */
523
Linus Torvalds1da177e2005-04-16 15:20:36 -0700524#endif /* __KERNEL__ */
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500525#endif /* _ASM_POWERPC_ATOMIC_H_ */