blob: 55f106ed12bf2e719f6f555c868cc5ba2a38da6a [file] [log] [blame]
Becky Brucefeaf7cf2005-09-22 14:20:04 -05001#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
Linus Torvalds1da177e2005-04-16 15:20:36 -07004/*
5 * PowerPC atomic operations
6 */
7
Linus Torvalds1da177e2005-04-16 15:20:36 -07008#ifdef __KERNEL__
David Howellsae3a1972012-03-28 18:30:02 +01009#include <linux/types.h>
10#include <asm/cmpxchg.h>
Peter Zijlstrac6450732014-03-13 19:00:35 +010011#include <asm/barrier.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070012
Becky Brucefeaf7cf2005-09-22 14:20:04 -050013#define ATOMIC_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070014
Segher Boessenkool9f0cbea2007-08-11 10:15:30 +100015static __inline__ int atomic_read(const atomic_t *v)
16{
17 int t;
18
19 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
20
21 return t;
22}
23
24static __inline__ void atomic_set(atomic_t *v, int i)
25{
26 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
27}
Linus Torvalds1da177e2005-04-16 15:20:36 -070028
Peter Zijlstraaf095dd2014-03-26 18:11:31 +010029#define ATOMIC_OP(op, asm_op) \
30static __inline__ void atomic_##op(int a, atomic_t *v) \
31{ \
32 int t; \
33 \
34 __asm__ __volatile__( \
35"1: lwarx %0,0,%3 # atomic_" #op "\n" \
36 #asm_op " %0,%2,%0\n" \
37 PPC405_ERR77(0,%3) \
38" stwcx. %0,0,%3 \n" \
39" bne- 1b\n" \
40 : "=&r" (t), "+m" (v->counter) \
41 : "r" (a), "r" (&v->counter) \
42 : "cc"); \
43} \
Linus Torvalds1da177e2005-04-16 15:20:36 -070044
Peter Zijlstraaf095dd2014-03-26 18:11:31 +010045#define ATOMIC_OP_RETURN(op, asm_op) \
46static __inline__ int atomic_##op##_return(int a, atomic_t *v) \
47{ \
48 int t; \
49 \
50 __asm__ __volatile__( \
51 PPC_ATOMIC_ENTRY_BARRIER \
52"1: lwarx %0,0,%2 # atomic_" #op "_return\n" \
53 #asm_op " %0,%1,%0\n" \
54 PPC405_ERR77(0,%2) \
55" stwcx. %0,0,%2 \n" \
56" bne- 1b\n" \
57 PPC_ATOMIC_EXIT_BARRIER \
58 : "=&r" (t) \
59 : "r" (a), "r" (&v->counter) \
60 : "cc", "memory"); \
61 \
62 return t; \
Linus Torvalds1da177e2005-04-16 15:20:36 -070063}
64
Peter Zijlstraaf095dd2014-03-26 18:11:31 +010065#define ATOMIC_OPS(op, asm_op) ATOMIC_OP(op, asm_op) ATOMIC_OP_RETURN(op, asm_op)
Linus Torvalds1da177e2005-04-16 15:20:36 -070066
Peter Zijlstraaf095dd2014-03-26 18:11:31 +010067ATOMIC_OPS(add, add)
68ATOMIC_OPS(sub, subf)
Linus Torvalds1da177e2005-04-16 15:20:36 -070069
Peter Zijlstrad0b7eb62014-04-23 19:46:23 +020070ATOMIC_OP(and, and)
71ATOMIC_OP(or, or)
72ATOMIC_OP(xor, xor)
73
Peter Zijlstraaf095dd2014-03-26 18:11:31 +010074#undef ATOMIC_OPS
75#undef ATOMIC_OP_RETURN
76#undef ATOMIC_OP
Linus Torvalds1da177e2005-04-16 15:20:36 -070077
78#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
79
Linus Torvalds1da177e2005-04-16 15:20:36 -070080static __inline__ void atomic_inc(atomic_t *v)
81{
82 int t;
83
84 __asm__ __volatile__(
85"1: lwarx %0,0,%2 # atomic_inc\n\
86 addic %0,%0,1\n"
87 PPC405_ERR77(0,%2)
88" stwcx. %0,0,%2 \n\
89 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -070090 : "=&r" (t), "+m" (v->counter)
91 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +000092 : "cc", "xer");
Linus Torvalds1da177e2005-04-16 15:20:36 -070093}
94
95static __inline__ int atomic_inc_return(atomic_t *v)
96{
97 int t;
98
99 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000100 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700101"1: lwarx %0,0,%1 # atomic_inc_return\n\
102 addic %0,%0,1\n"
103 PPC405_ERR77(0,%1)
104" stwcx. %0,0,%1 \n\
105 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000106 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700107 : "=&r" (t)
108 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000109 : "cc", "xer", "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700110
111 return t;
112}
113
114/*
115 * atomic_inc_and_test - increment and test
116 * @v: pointer of type atomic_t
117 *
118 * Atomically increments @v by 1
119 * and returns true if the result is zero, or false for all
120 * other cases.
121 */
122#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
123
124static __inline__ void atomic_dec(atomic_t *v)
125{
126 int t;
127
128 __asm__ __volatile__(
129"1: lwarx %0,0,%2 # atomic_dec\n\
130 addic %0,%0,-1\n"
131 PPC405_ERR77(0,%2)\
132" stwcx. %0,0,%2\n\
133 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700134 : "=&r" (t), "+m" (v->counter)
135 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000136 : "cc", "xer");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700137}
138
139static __inline__ int atomic_dec_return(atomic_t *v)
140{
141 int t;
142
143 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000144 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700145"1: lwarx %0,0,%1 # atomic_dec_return\n\
146 addic %0,%0,-1\n"
147 PPC405_ERR77(0,%1)
148" stwcx. %0,0,%1\n\
149 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000150 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700151 : "=&r" (t)
152 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000153 : "cc", "xer", "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700154
155 return t;
156}
157
Mathieu Desnoyersf46e4772007-05-08 00:34:27 -0700158#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800159#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800160
Nick Piggin8426e1f2005-11-13 16:07:25 -0800161/**
Arun Sharmaf24219b2011-07-26 16:09:07 -0700162 * __atomic_add_unless - add unless the number is a given value
Nick Piggin8426e1f2005-11-13 16:07:25 -0800163 * @v: pointer of type atomic_t
164 * @a: the amount to add to v...
165 * @u: ...unless v is equal to u.
166 *
167 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700168 * Returns the old value of @v.
Nick Piggin8426e1f2005-11-13 16:07:25 -0800169 */
Arun Sharmaf24219b2011-07-26 16:09:07 -0700170static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
Nick Pigginf055aff2006-02-20 10:41:40 +0100171{
172 int t;
173
174 __asm__ __volatile__ (
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000175 PPC_ATOMIC_ENTRY_BARRIER
Arun Sharmaf24219b2011-07-26 16:09:07 -0700176"1: lwarx %0,0,%1 # __atomic_add_unless\n\
Nick Pigginf055aff2006-02-20 10:41:40 +0100177 cmpw 0,%0,%3 \n\
178 beq- 2f \n\
179 add %0,%2,%0 \n"
180 PPC405_ERR77(0,%2)
181" stwcx. %0,0,%1 \n\
182 bne- 1b \n"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000183 PPC_ATOMIC_EXIT_BARRIER
Nick Pigginf055aff2006-02-20 10:41:40 +0100184" subf %0,%2,%0 \n\
1852:"
186 : "=&r" (t)
187 : "r" (&v->counter), "r" (a), "r" (u)
188 : "cc", "memory");
189
Arun Sharmaf24219b2011-07-26 16:09:07 -0700190 return t;
Nick Pigginf055aff2006-02-20 10:41:40 +0100191}
192
Anton Blancharda6cf7ed2012-02-29 21:12:16 +0000193/**
194 * atomic_inc_not_zero - increment unless the number is zero
195 * @v: pointer of type atomic_t
196 *
197 * Atomically increments @v by 1, so long as @v is non-zero.
198 * Returns non-zero if @v was non-zero, and zero otherwise.
199 */
200static __inline__ int atomic_inc_not_zero(atomic_t *v)
201{
202 int t1, t2;
203
204 __asm__ __volatile__ (
205 PPC_ATOMIC_ENTRY_BARRIER
206"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
207 cmpwi 0,%0,0\n\
208 beq- 2f\n\
209 addic %1,%0,1\n"
210 PPC405_ERR77(0,%2)
211" stwcx. %1,0,%2\n\
212 bne- 1b\n"
213 PPC_ATOMIC_EXIT_BARRIER
214 "\n\
2152:"
216 : "=&r" (t1), "=&r" (t2)
217 : "r" (&v->counter)
218 : "cc", "xer", "memory");
219
220 return t1;
221}
222#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
Nick Piggin8426e1f2005-11-13 16:07:25 -0800223
Linus Torvalds1da177e2005-04-16 15:20:36 -0700224#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
225#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
226
227/*
228 * Atomically test *v and decrement if it is greater than 0.
Robert Jennings434f98c2007-01-17 10:50:20 -0600229 * The function returns the old value of *v minus 1, even if
230 * the atomic variable, v, was not decremented.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700231 */
232static __inline__ int atomic_dec_if_positive(atomic_t *v)
233{
234 int t;
235
236 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000237 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700238"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
Robert Jennings434f98c2007-01-17 10:50:20 -0600239 cmpwi %0,1\n\
240 addi %0,%0,-1\n\
Linus Torvalds1da177e2005-04-16 15:20:36 -0700241 blt- 2f\n"
242 PPC405_ERR77(0,%1)
243" stwcx. %0,0,%1\n\
244 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000245 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700246 "\n\
Robert Jennings434f98c2007-01-17 10:50:20 -06002472:" : "=&b" (t)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700248 : "r" (&v->counter)
249 : "cc", "memory");
250
251 return t;
252}
Shaohua Lie79bee22012-10-08 16:32:18 -0700253#define atomic_dec_if_positive atomic_dec_if_positive
Linus Torvalds1da177e2005-04-16 15:20:36 -0700254
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100255#ifdef __powerpc64__
256
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100257#define ATOMIC64_INIT(i) { (i) }
258
Segher Boessenkool9f0cbea2007-08-11 10:15:30 +1000259static __inline__ long atomic64_read(const atomic64_t *v)
260{
261 long t;
262
263 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
264
265 return t;
266}
267
268static __inline__ void atomic64_set(atomic64_t *v, long i)
269{
270 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
271}
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100272
Peter Zijlstraaf095dd2014-03-26 18:11:31 +0100273#define ATOMIC64_OP(op, asm_op) \
274static __inline__ void atomic64_##op(long a, atomic64_t *v) \
275{ \
276 long t; \
277 \
278 __asm__ __volatile__( \
279"1: ldarx %0,0,%3 # atomic64_" #op "\n" \
280 #asm_op " %0,%2,%0\n" \
281" stdcx. %0,0,%3 \n" \
282" bne- 1b\n" \
283 : "=&r" (t), "+m" (v->counter) \
284 : "r" (a), "r" (&v->counter) \
285 : "cc"); \
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100286}
287
Peter Zijlstraaf095dd2014-03-26 18:11:31 +0100288#define ATOMIC64_OP_RETURN(op, asm_op) \
289static __inline__ long atomic64_##op##_return(long a, atomic64_t *v) \
290{ \
291 long t; \
292 \
293 __asm__ __volatile__( \
294 PPC_ATOMIC_ENTRY_BARRIER \
295"1: ldarx %0,0,%2 # atomic64_" #op "_return\n" \
296 #asm_op " %0,%1,%0\n" \
297" stdcx. %0,0,%2 \n" \
298" bne- 1b\n" \
299 PPC_ATOMIC_EXIT_BARRIER \
300 : "=&r" (t) \
301 : "r" (a), "r" (&v->counter) \
302 : "cc", "memory"); \
303 \
304 return t; \
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100305}
306
Peter Zijlstraaf095dd2014-03-26 18:11:31 +0100307#define ATOMIC64_OPS(op, asm_op) ATOMIC64_OP(op, asm_op) ATOMIC64_OP_RETURN(op, asm_op)
308
309ATOMIC64_OPS(add, add)
310ATOMIC64_OPS(sub, subf)
Peter Zijlstrad0b7eb62014-04-23 19:46:23 +0200311ATOMIC64_OP(and, and)
312ATOMIC64_OP(or, or)
313ATOMIC64_OP(xor, xor)
Peter Zijlstraaf095dd2014-03-26 18:11:31 +0100314
315#undef ATOMIC64_OPS
316#undef ATOMIC64_OP_RETURN
317#undef ATOMIC64_OP
318
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100319#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
320
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100321static __inline__ void atomic64_inc(atomic64_t *v)
322{
323 long t;
324
325 __asm__ __volatile__(
326"1: ldarx %0,0,%2 # atomic64_inc\n\
327 addic %0,%0,1\n\
328 stdcx. %0,0,%2 \n\
329 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700330 : "=&r" (t), "+m" (v->counter)
331 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000332 : "cc", "xer");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100333}
334
335static __inline__ long atomic64_inc_return(atomic64_t *v)
336{
337 long t;
338
339 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000340 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100341"1: ldarx %0,0,%1 # atomic64_inc_return\n\
342 addic %0,%0,1\n\
343 stdcx. %0,0,%1 \n\
344 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000345 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100346 : "=&r" (t)
347 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000348 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100349
350 return t;
351}
352
353/*
354 * atomic64_inc_and_test - increment and test
355 * @v: pointer of type atomic64_t
356 *
357 * Atomically increments @v by 1
358 * and returns true if the result is zero, or false for all
359 * other cases.
360 */
361#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
362
363static __inline__ void atomic64_dec(atomic64_t *v)
364{
365 long t;
366
367 __asm__ __volatile__(
368"1: ldarx %0,0,%2 # atomic64_dec\n\
369 addic %0,%0,-1\n\
370 stdcx. %0,0,%2\n\
371 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700372 : "=&r" (t), "+m" (v->counter)
373 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000374 : "cc", "xer");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100375}
376
377static __inline__ long atomic64_dec_return(atomic64_t *v)
378{
379 long t;
380
381 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000382 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100383"1: ldarx %0,0,%1 # atomic64_dec_return\n\
384 addic %0,%0,-1\n\
385 stdcx. %0,0,%1\n\
386 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000387 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100388 : "=&r" (t)
389 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000390 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100391
392 return t;
393}
394
395#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
396#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
397
398/*
399 * Atomically test *v and decrement if it is greater than 0.
400 * The function returns the old value of *v minus 1.
401 */
402static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
403{
404 long t;
405
406 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000407 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100408"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
409 addic. %0,%0,-1\n\
410 blt- 2f\n\
411 stdcx. %0,0,%1\n\
412 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000413 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100414 "\n\
4152:" : "=&r" (t)
416 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000417 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100418
419 return t;
420}
421
Mathieu Desnoyersf46e4772007-05-08 00:34:27 -0700422#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500423#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
424
425/**
426 * atomic64_add_unless - add unless the number is a given value
427 * @v: pointer of type atomic64_t
428 * @a: the amount to add to v...
429 * @u: ...unless v is equal to u.
430 *
431 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700432 * Returns the old value of @v.
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500433 */
434static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
435{
436 long t;
437
438 __asm__ __volatile__ (
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000439 PPC_ATOMIC_ENTRY_BARRIER
Arun Sharmaf24219b2011-07-26 16:09:07 -0700440"1: ldarx %0,0,%1 # __atomic_add_unless\n\
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500441 cmpd 0,%0,%3 \n\
442 beq- 2f \n\
443 add %0,%2,%0 \n"
444" stdcx. %0,0,%1 \n\
445 bne- 1b \n"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000446 PPC_ATOMIC_EXIT_BARRIER
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500447" subf %0,%2,%0 \n\
4482:"
449 : "=&r" (t)
450 : "r" (&v->counter), "r" (a), "r" (u)
451 : "cc", "memory");
452
453 return t != u;
454}
455
Anton Blancharda6cf7ed2012-02-29 21:12:16 +0000456/**
457 * atomic_inc64_not_zero - increment unless the number is zero
458 * @v: pointer of type atomic64_t
459 *
460 * Atomically increments @v by 1, so long as @v is non-zero.
461 * Returns non-zero if @v was non-zero, and zero otherwise.
462 */
463static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
464{
465 long t1, t2;
466
467 __asm__ __volatile__ (
468 PPC_ATOMIC_ENTRY_BARRIER
469"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
470 cmpdi 0,%0,0\n\
471 beq- 2f\n\
472 addic %1,%0,1\n\
473 stdcx. %1,0,%2\n\
474 bne- 1b\n"
475 PPC_ATOMIC_EXIT_BARRIER
476 "\n\
4772:"
478 : "=&r" (t1), "=&r" (t2)
479 : "r" (&v->counter)
480 : "cc", "xer", "memory");
481
482 return t1;
483}
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500484
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100485#endif /* __powerpc64__ */
486
Linus Torvalds1da177e2005-04-16 15:20:36 -0700487#endif /* __KERNEL__ */
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500488#endif /* _ASM_POWERPC_ATOMIC_H_ */