blob: 1b60624dab7e4aacca7518818aeaf2c6214baa33 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
Ralf Baechlee303e082006-11-30 01:14:50 +000012 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
Linus Torvalds1da177e2005-04-16 15:20:36 -070013 */
Linus Torvalds1da177e2005-04-16 15:20:36 -070014#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
Ralf Baechle192ef362006-07-07 14:07:18 +010017#include <linux/irqflags.h>
Ralf Baechle0004a9d2006-10-31 03:45:07 +000018#include <asm/barrier.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070019#include <asm/cpu-features.h>
20#include <asm/war.h>
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -070021#include <asm/system.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070022
Linus Torvalds1da177e2005-04-16 15:20:36 -070023typedef struct { volatile int counter; } atomic_t;
24
25#define ATOMIC_INIT(i) { (i) }
26
27/*
28 * atomic_read - read atomic variable
29 * @v: pointer of type atomic_t
30 *
31 * Atomically reads the value of @v.
32 */
33#define atomic_read(v) ((v)->counter)
34
35/*
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
38 * @i: required value
39 *
40 * Atomically sets the value of @v to @i.
41 */
42#define atomic_set(v,i) ((v)->counter = (i))
43
44/*
45 * atomic_add - add integer to atomic variable
46 * @i: integer value to add
47 * @v: pointer of type atomic_t
48 *
49 * Atomically adds @i to @v.
50 */
51static __inline__ void atomic_add(int i, atomic_t * v)
52{
53 if (cpu_has_llsc && R10000_LLSC_WAR) {
54 unsigned long temp;
55
56 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000057 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070058 "1: ll %0, %1 # atomic_add \n"
59 " addu %0, %2 \n"
60 " sc %0, %1 \n"
61 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000062 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070063 : "=&r" (temp), "=m" (v->counter)
64 : "Ir" (i), "m" (v->counter));
65 } else if (cpu_has_llsc) {
66 unsigned long temp;
67
68 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000069 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070070 "1: ll %0, %1 # atomic_add \n"
71 " addu %0, %2 \n"
72 " sc %0, %1 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +010073 " beqz %0, 2f \n"
74 " .subsection 2 \n"
75 "2: b 1b \n"
76 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000077 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070078 : "=&r" (temp), "=m" (v->counter)
79 : "Ir" (i), "m" (v->counter));
80 } else {
81 unsigned long flags;
82
Ralf Baechle49edd092007-03-16 16:10:36 +000083 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070084 v->counter += i;
Ralf Baechle49edd092007-03-16 16:10:36 +000085 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070086 }
87}
88
89/*
90 * atomic_sub - subtract the atomic variable
91 * @i: integer value to subtract
92 * @v: pointer of type atomic_t
93 *
94 * Atomically subtracts @i from @v.
95 */
96static __inline__ void atomic_sub(int i, atomic_t * v)
97{
98 if (cpu_has_llsc && R10000_LLSC_WAR) {
99 unsigned long temp;
100
101 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000102 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700103 "1: ll %0, %1 # atomic_sub \n"
104 " subu %0, %2 \n"
105 " sc %0, %1 \n"
106 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000107 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700108 : "=&r" (temp), "=m" (v->counter)
109 : "Ir" (i), "m" (v->counter));
110 } else if (cpu_has_llsc) {
111 unsigned long temp;
112
113 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000114 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700115 "1: ll %0, %1 # atomic_sub \n"
116 " subu %0, %2 \n"
117 " sc %0, %1 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100118 " beqz %0, 2f \n"
119 " .subsection 2 \n"
120 "2: b 1b \n"
121 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000122 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700123 : "=&r" (temp), "=m" (v->counter)
124 : "Ir" (i), "m" (v->counter));
125 } else {
126 unsigned long flags;
127
Ralf Baechle49edd092007-03-16 16:10:36 +0000128 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700129 v->counter -= i;
Ralf Baechle49edd092007-03-16 16:10:36 +0000130 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700131 }
132}
133
134/*
135 * Same as above, but return the result value
136 */
137static __inline__ int atomic_add_return(int i, atomic_t * v)
138{
139 unsigned long result;
140
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000141 smp_mb();
142
Linus Torvalds1da177e2005-04-16 15:20:36 -0700143 if (cpu_has_llsc && R10000_LLSC_WAR) {
144 unsigned long temp;
145
146 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000147 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700148 "1: ll %1, %2 # atomic_add_return \n"
149 " addu %0, %1, %3 \n"
150 " sc %0, %2 \n"
151 " beqzl %0, 1b \n"
152 " addu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000153 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700154 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
155 : "Ir" (i), "m" (v->counter)
156 : "memory");
157 } else if (cpu_has_llsc) {
158 unsigned long temp;
159
160 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000161 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700162 "1: ll %1, %2 # atomic_add_return \n"
163 " addu %0, %1, %3 \n"
164 " sc %0, %2 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100165 " beqz %0, 2f \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700166 " addu %0, %1, %3 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100167 " .subsection 2 \n"
168 "2: b 1b \n"
169 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000170 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700171 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
172 : "Ir" (i), "m" (v->counter)
173 : "memory");
174 } else {
175 unsigned long flags;
176
Ralf Baechle49edd092007-03-16 16:10:36 +0000177 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700178 result = v->counter;
179 result += i;
180 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000181 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700182 }
183
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000184 smp_mb();
185
Linus Torvalds1da177e2005-04-16 15:20:36 -0700186 return result;
187}
188
189static __inline__ int atomic_sub_return(int i, atomic_t * v)
190{
191 unsigned long result;
192
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000193 smp_mb();
194
Linus Torvalds1da177e2005-04-16 15:20:36 -0700195 if (cpu_has_llsc && R10000_LLSC_WAR) {
196 unsigned long temp;
197
198 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000199 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700200 "1: ll %1, %2 # atomic_sub_return \n"
201 " subu %0, %1, %3 \n"
202 " sc %0, %2 \n"
203 " beqzl %0, 1b \n"
204 " subu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000205 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700206 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
207 : "Ir" (i), "m" (v->counter)
208 : "memory");
209 } else if (cpu_has_llsc) {
210 unsigned long temp;
211
212 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000213 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700214 "1: ll %1, %2 # atomic_sub_return \n"
215 " subu %0, %1, %3 \n"
216 " sc %0, %2 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100217 " beqz %0, 2f \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700218 " subu %0, %1, %3 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100219 " .subsection 2 \n"
220 "2: b 1b \n"
221 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000222 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700223 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
224 : "Ir" (i), "m" (v->counter)
225 : "memory");
226 } else {
227 unsigned long flags;
228
Ralf Baechle49edd092007-03-16 16:10:36 +0000229 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700230 result = v->counter;
231 result -= i;
232 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000233 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700234 }
235
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000236 smp_mb();
237
Linus Torvalds1da177e2005-04-16 15:20:36 -0700238 return result;
239}
240
241/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100242 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
243 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700244 * @v: pointer of type atomic_t
245 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100246 * Atomically test @v and subtract @i if @v is greater or equal than @i.
247 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700248 */
249static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
250{
251 unsigned long result;
252
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000253 smp_mb();
254
Linus Torvalds1da177e2005-04-16 15:20:36 -0700255 if (cpu_has_llsc && R10000_LLSC_WAR) {
256 unsigned long temp;
257
258 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000259 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700260 "1: ll %1, %2 # atomic_sub_if_positive\n"
261 " subu %0, %1, %3 \n"
262 " bltz %0, 1f \n"
263 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000264 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700265 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000266 " subu %0, %1, %3 \n"
267 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700268 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000269 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700270 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
271 : "Ir" (i), "m" (v->counter)
272 : "memory");
273 } else if (cpu_has_llsc) {
274 unsigned long temp;
275
276 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000277 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700278 "1: ll %1, %2 # atomic_sub_if_positive\n"
279 " subu %0, %1, %3 \n"
280 " bltz %0, 1f \n"
281 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000282 " .set noreorder \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100283 " beqz %0, 2f \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000284 " subu %0, %1, %3 \n"
285 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700286 "1: \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100287 " .subsection 2 \n"
288 "2: b 1b \n"
289 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000290 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700291 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
292 : "Ir" (i), "m" (v->counter)
293 : "memory");
294 } else {
295 unsigned long flags;
296
Ralf Baechle49edd092007-03-16 16:10:36 +0000297 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700298 result = v->counter;
299 result -= i;
300 if (result >= 0)
301 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000302 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700303 }
304
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000305 smp_mb();
306
Linus Torvalds1da177e2005-04-16 15:20:36 -0700307 return result;
308}
309
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700310#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
311#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800312
Nick Piggin8426e1f2005-11-13 16:07:25 -0800313/**
314 * atomic_add_unless - add unless the number is a given value
315 * @v: pointer of type atomic_t
316 * @a: the amount to add to v...
317 * @u: ...unless v is equal to u.
318 *
319 * Atomically adds @a to @v, so long as it was not @u.
320 * Returns non-zero if @v was not @u, and zero otherwise.
321 */
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700322static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
323{
324 int c, old;
325 c = atomic_read(v);
326 for (;;) {
327 if (unlikely(c == (u)))
328 break;
329 old = atomic_cmpxchg((v), c, c + (a));
330 if (likely(old == c))
331 break;
332 c = old;
333 }
334 return c != (u);
335}
Nick Piggin8426e1f2005-11-13 16:07:25 -0800336#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
337
Linus Torvalds1da177e2005-04-16 15:20:36 -0700338#define atomic_dec_return(v) atomic_sub_return(1,(v))
339#define atomic_inc_return(v) atomic_add_return(1,(v))
340
341/*
342 * atomic_sub_and_test - subtract value from variable and test result
343 * @i: integer value to subtract
344 * @v: pointer of type atomic_t
345 *
346 * Atomically subtracts @i from @v and returns
347 * true if the result is zero, or false for all
348 * other cases.
349 */
350#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
351
352/*
353 * atomic_inc_and_test - increment and test
354 * @v: pointer of type atomic_t
355 *
356 * Atomically increments @v by 1
357 * and returns true if the result is zero, or false for all
358 * other cases.
359 */
360#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
361
362/*
363 * atomic_dec_and_test - decrement by 1 and test
364 * @v: pointer of type atomic_t
365 *
366 * Atomically decrements @v by 1 and
367 * returns true if the result is 0, or false for all other
368 * cases.
369 */
370#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
371
372/*
373 * atomic_dec_if_positive - decrement by 1 if old value positive
374 * @v: pointer of type atomic_t
375 */
376#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
377
378/*
379 * atomic_inc - increment atomic variable
380 * @v: pointer of type atomic_t
381 *
382 * Atomically increments @v by 1.
383 */
384#define atomic_inc(v) atomic_add(1,(v))
385
386/*
387 * atomic_dec - decrement and test
388 * @v: pointer of type atomic_t
389 *
390 * Atomically decrements @v by 1.
391 */
392#define atomic_dec(v) atomic_sub(1,(v))
393
394/*
395 * atomic_add_negative - add and test if negative
396 * @v: pointer of type atomic_t
397 * @i: integer value to add
398 *
399 * Atomically adds @i to @v and returns true
400 * if the result is negative, or false when
401 * result is greater than or equal to zero.
402 */
403#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
404
Ralf Baechle875d43e2005-09-03 15:56:16 -0700405#ifdef CONFIG_64BIT
Linus Torvalds1da177e2005-04-16 15:20:36 -0700406
Ralf Baechle4f8b5c72006-11-30 15:38:10 +0000407typedef struct { volatile long counter; } atomic64_t;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700408
409#define ATOMIC64_INIT(i) { (i) }
410
411/*
412 * atomic64_read - read atomic variable
413 * @v: pointer of type atomic64_t
414 *
415 */
416#define atomic64_read(v) ((v)->counter)
417
418/*
419 * atomic64_set - set atomic variable
420 * @v: pointer of type atomic64_t
421 * @i: required value
422 */
423#define atomic64_set(v,i) ((v)->counter = (i))
424
425/*
426 * atomic64_add - add integer to atomic variable
427 * @i: integer value to add
428 * @v: pointer of type atomic64_t
429 *
430 * Atomically adds @i to @v.
431 */
432static __inline__ void atomic64_add(long i, atomic64_t * v)
433{
434 if (cpu_has_llsc && R10000_LLSC_WAR) {
435 unsigned long temp;
436
437 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000438 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700439 "1: lld %0, %1 # atomic64_add \n"
440 " addu %0, %2 \n"
441 " scd %0, %1 \n"
442 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000443 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700444 : "=&r" (temp), "=m" (v->counter)
445 : "Ir" (i), "m" (v->counter));
446 } else if (cpu_has_llsc) {
447 unsigned long temp;
448
449 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000450 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700451 "1: lld %0, %1 # atomic64_add \n"
452 " addu %0, %2 \n"
453 " scd %0, %1 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100454 " beqz %0, 2f \n"
455 " .subsection 2 \n"
456 "2: b 1b \n"
457 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000458 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700459 : "=&r" (temp), "=m" (v->counter)
460 : "Ir" (i), "m" (v->counter));
461 } else {
462 unsigned long flags;
463
Ralf Baechle49edd092007-03-16 16:10:36 +0000464 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700465 v->counter += i;
Ralf Baechle49edd092007-03-16 16:10:36 +0000466 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700467 }
468}
469
470/*
471 * atomic64_sub - subtract the atomic variable
472 * @i: integer value to subtract
473 * @v: pointer of type atomic64_t
474 *
475 * Atomically subtracts @i from @v.
476 */
477static __inline__ void atomic64_sub(long i, atomic64_t * v)
478{
479 if (cpu_has_llsc && R10000_LLSC_WAR) {
480 unsigned long temp;
481
482 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000483 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700484 "1: lld %0, %1 # atomic64_sub \n"
485 " subu %0, %2 \n"
486 " scd %0, %1 \n"
487 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000488 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700489 : "=&r" (temp), "=m" (v->counter)
490 : "Ir" (i), "m" (v->counter));
491 } else if (cpu_has_llsc) {
492 unsigned long temp;
493
494 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000495 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700496 "1: lld %0, %1 # atomic64_sub \n"
497 " subu %0, %2 \n"
498 " scd %0, %1 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100499 " beqz %0, 2f \n"
500 " .subsection 2 \n"
501 "2: b 1b \n"
502 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000503 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700504 : "=&r" (temp), "=m" (v->counter)
505 : "Ir" (i), "m" (v->counter));
506 } else {
507 unsigned long flags;
508
Ralf Baechle49edd092007-03-16 16:10:36 +0000509 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700510 v->counter -= i;
Ralf Baechle49edd092007-03-16 16:10:36 +0000511 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700512 }
513}
514
515/*
516 * Same as above, but return the result value
517 */
518static __inline__ long atomic64_add_return(long i, atomic64_t * v)
519{
520 unsigned long result;
521
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000522 smp_mb();
523
Linus Torvalds1da177e2005-04-16 15:20:36 -0700524 if (cpu_has_llsc && R10000_LLSC_WAR) {
525 unsigned long temp;
526
527 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000528 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700529 "1: lld %1, %2 # atomic64_add_return \n"
530 " addu %0, %1, %3 \n"
531 " scd %0, %2 \n"
532 " beqzl %0, 1b \n"
533 " addu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000534 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700535 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
536 : "Ir" (i), "m" (v->counter)
537 : "memory");
538 } else if (cpu_has_llsc) {
539 unsigned long temp;
540
541 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000542 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700543 "1: lld %1, %2 # atomic64_add_return \n"
544 " addu %0, %1, %3 \n"
545 " scd %0, %2 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100546 " beqz %0, 2f \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700547 " addu %0, %1, %3 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100548 " .subsection 2 \n"
549 "2: b 1b \n"
550 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000551 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700552 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
553 : "Ir" (i), "m" (v->counter)
554 : "memory");
555 } else {
556 unsigned long flags;
557
Ralf Baechle49edd092007-03-16 16:10:36 +0000558 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700559 result = v->counter;
560 result += i;
561 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000562 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700563 }
564
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000565 smp_mb();
566
Linus Torvalds1da177e2005-04-16 15:20:36 -0700567 return result;
568}
569
570static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
571{
572 unsigned long result;
573
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000574 smp_mb();
575
Linus Torvalds1da177e2005-04-16 15:20:36 -0700576 if (cpu_has_llsc && R10000_LLSC_WAR) {
577 unsigned long temp;
578
579 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000580 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700581 "1: lld %1, %2 # atomic64_sub_return \n"
582 " subu %0, %1, %3 \n"
583 " scd %0, %2 \n"
584 " beqzl %0, 1b \n"
585 " subu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000586 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700587 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
588 : "Ir" (i), "m" (v->counter)
589 : "memory");
590 } else if (cpu_has_llsc) {
591 unsigned long temp;
592
593 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000594 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700595 "1: lld %1, %2 # atomic64_sub_return \n"
596 " subu %0, %1, %3 \n"
597 " scd %0, %2 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100598 " beqz %0, 2f \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700599 " subu %0, %1, %3 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100600 " .subsection 2 \n"
601 "2: b 1b \n"
602 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000603 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700604 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
605 : "Ir" (i), "m" (v->counter)
606 : "memory");
607 } else {
608 unsigned long flags;
609
Ralf Baechle49edd092007-03-16 16:10:36 +0000610 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700611 result = v->counter;
612 result -= i;
613 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000614 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700615 }
616
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000617 smp_mb();
618
Linus Torvalds1da177e2005-04-16 15:20:36 -0700619 return result;
620}
621
622/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100623 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
624 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700625 * @v: pointer of type atomic64_t
626 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100627 * Atomically test @v and subtract @i if @v is greater or equal than @i.
628 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700629 */
630static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
631{
632 unsigned long result;
633
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000634 smp_mb();
635
Linus Torvalds1da177e2005-04-16 15:20:36 -0700636 if (cpu_has_llsc && R10000_LLSC_WAR) {
637 unsigned long temp;
638
639 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000640 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700641 "1: lld %1, %2 # atomic64_sub_if_positive\n"
642 " dsubu %0, %1, %3 \n"
643 " bltz %0, 1f \n"
644 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000645 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700646 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000647 " dsubu %0, %1, %3 \n"
648 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700649 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000650 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700651 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
652 : "Ir" (i), "m" (v->counter)
653 : "memory");
654 } else if (cpu_has_llsc) {
655 unsigned long temp;
656
657 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000658 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700659 "1: lld %1, %2 # atomic64_sub_if_positive\n"
660 " dsubu %0, %1, %3 \n"
661 " bltz %0, 1f \n"
662 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000663 " .set noreorder \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100664 " beqz %0, 2f \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000665 " dsubu %0, %1, %3 \n"
666 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700667 "1: \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100668 " .subsection 2 \n"
669 "2: b 1b \n"
670 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000671 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700672 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
673 : "Ir" (i), "m" (v->counter)
674 : "memory");
675 } else {
676 unsigned long flags;
677
Ralf Baechle49edd092007-03-16 16:10:36 +0000678 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700679 result = v->counter;
680 result -= i;
681 if (result >= 0)
682 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000683 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700684 }
685
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000686 smp_mb();
687
Linus Torvalds1da177e2005-04-16 15:20:36 -0700688 return result;
689}
690
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700691#define atomic64_cmpxchg(v, o, n) \
Atsushi Nemoto7b239bb2007-05-10 23:47:45 +0900692 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700693#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
694
695/**
696 * atomic64_add_unless - add unless the number is a given value
697 * @v: pointer of type atomic64_t
698 * @a: the amount to add to v...
699 * @u: ...unless v is equal to u.
700 *
701 * Atomically adds @a to @v, so long as it was not @u.
702 * Returns non-zero if @v was not @u, and zero otherwise.
703 */
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700704static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
705{
706 long c, old;
707 c = atomic64_read(v);
708 for (;;) {
709 if (unlikely(c == (u)))
710 break;
711 old = atomic64_cmpxchg((v), c, c + (a));
712 if (likely(old == c))
713 break;
714 c = old;
715 }
716 return c != (u);
717}
718
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700719#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
720
Linus Torvalds1da177e2005-04-16 15:20:36 -0700721#define atomic64_dec_return(v) atomic64_sub_return(1,(v))
722#define atomic64_inc_return(v) atomic64_add_return(1,(v))
723
724/*
725 * atomic64_sub_and_test - subtract value from variable and test result
726 * @i: integer value to subtract
727 * @v: pointer of type atomic64_t
728 *
729 * Atomically subtracts @i from @v and returns
730 * true if the result is zero, or false for all
731 * other cases.
732 */
733#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
734
735/*
736 * atomic64_inc_and_test - increment and test
737 * @v: pointer of type atomic64_t
738 *
739 * Atomically increments @v by 1
740 * and returns true if the result is zero, or false for all
741 * other cases.
742 */
743#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
744
745/*
746 * atomic64_dec_and_test - decrement by 1 and test
747 * @v: pointer of type atomic64_t
748 *
749 * Atomically decrements @v by 1 and
750 * returns true if the result is 0, or false for all other
751 * cases.
752 */
753#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
754
755/*
756 * atomic64_dec_if_positive - decrement by 1 if old value positive
757 * @v: pointer of type atomic64_t
758 */
759#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
760
761/*
762 * atomic64_inc - increment atomic variable
763 * @v: pointer of type atomic64_t
764 *
765 * Atomically increments @v by 1.
766 */
767#define atomic64_inc(v) atomic64_add(1,(v))
768
769/*
770 * atomic64_dec - decrement and test
771 * @v: pointer of type atomic64_t
772 *
773 * Atomically decrements @v by 1.
774 */
775#define atomic64_dec(v) atomic64_sub(1,(v))
776
777/*
778 * atomic64_add_negative - add and test if negative
779 * @v: pointer of type atomic64_t
780 * @i: integer value to add
781 *
782 * Atomically adds @i to @v and returns true
783 * if the result is negative, or false when
784 * result is greater than or equal to zero.
785 */
786#define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
787
Ralf Baechle875d43e2005-09-03 15:56:16 -0700788#endif /* CONFIG_64BIT */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700789
790/*
791 * atomic*_return operations are serializing but not the non-*_return
792 * versions.
793 */
794#define smp_mb__before_atomic_dec() smp_mb()
795#define smp_mb__after_atomic_dec() smp_mb()
796#define smp_mb__before_atomic_inc() smp_mb()
797#define smp_mb__after_atomic_inc() smp_mb()
798
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800799#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700800#endif /* _ASM_ATOMIC_H */