blob: c1a2409bb52a171fc1c1c0b9070b7791f350e812 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
Ralf Baechlee303e082006-11-30 01:14:50 +000012 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
Linus Torvalds1da177e2005-04-16 15:20:36 -070013 */
Linus Torvalds1da177e2005-04-16 15:20:36 -070014#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
Ralf Baechle192ef362006-07-07 14:07:18 +010017#include <linux/irqflags.h>
Ralf Baechle0004a9d2006-10-31 03:45:07 +000018#include <asm/barrier.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070019#include <asm/cpu-features.h>
20#include <asm/war.h>
21
Linus Torvalds1da177e2005-04-16 15:20:36 -070022typedef struct { volatile int counter; } atomic_t;
23
24#define ATOMIC_INIT(i) { (i) }
25
26/*
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
29 *
30 * Atomically reads the value of @v.
31 */
32#define atomic_read(v) ((v)->counter)
33
34/*
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
37 * @i: required value
38 *
39 * Atomically sets the value of @v to @i.
40 */
41#define atomic_set(v,i) ((v)->counter = (i))
42
43/*
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
47 *
48 * Atomically adds @i to @v.
49 */
50static __inline__ void atomic_add(int i, atomic_t * v)
51{
52 if (cpu_has_llsc && R10000_LLSC_WAR) {
53 unsigned long temp;
54
55 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000056 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070057 "1: ll %0, %1 # atomic_add \n"
58 " addu %0, %2 \n"
59 " sc %0, %1 \n"
60 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000061 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070062 : "=&r" (temp), "=m" (v->counter)
63 : "Ir" (i), "m" (v->counter));
64 } else if (cpu_has_llsc) {
65 unsigned long temp;
66
67 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000068 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070069 "1: ll %0, %1 # atomic_add \n"
70 " addu %0, %2 \n"
71 " sc %0, %1 \n"
72 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000073 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070074 : "=&r" (temp), "=m" (v->counter)
75 : "Ir" (i), "m" (v->counter));
76 } else {
77 unsigned long flags;
78
Ralf Baechleb2d28b72005-12-07 18:57:52 +000079 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070080 v->counter += i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +000081 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070082 }
83}
84
85/*
86 * atomic_sub - subtract the atomic variable
87 * @i: integer value to subtract
88 * @v: pointer of type atomic_t
89 *
90 * Atomically subtracts @i from @v.
91 */
92static __inline__ void atomic_sub(int i, atomic_t * v)
93{
94 if (cpu_has_llsc && R10000_LLSC_WAR) {
95 unsigned long temp;
96
97 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000098 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070099 "1: ll %0, %1 # atomic_sub \n"
100 " subu %0, %2 \n"
101 " sc %0, %1 \n"
102 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000103 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700104 : "=&r" (temp), "=m" (v->counter)
105 : "Ir" (i), "m" (v->counter));
106 } else if (cpu_has_llsc) {
107 unsigned long temp;
108
109 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000110 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700111 "1: ll %0, %1 # atomic_sub \n"
112 " subu %0, %2 \n"
113 " sc %0, %1 \n"
114 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000115 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700116 : "=&r" (temp), "=m" (v->counter)
117 : "Ir" (i), "m" (v->counter));
118 } else {
119 unsigned long flags;
120
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000121 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700122 v->counter -= i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000123 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700124 }
125}
126
127/*
128 * Same as above, but return the result value
129 */
130static __inline__ int atomic_add_return(int i, atomic_t * v)
131{
132 unsigned long result;
133
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000134 smp_mb();
135
Linus Torvalds1da177e2005-04-16 15:20:36 -0700136 if (cpu_has_llsc && R10000_LLSC_WAR) {
137 unsigned long temp;
138
139 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000140 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700141 "1: ll %1, %2 # atomic_add_return \n"
142 " addu %0, %1, %3 \n"
143 " sc %0, %2 \n"
144 " beqzl %0, 1b \n"
145 " addu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000146 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700147 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
148 : "Ir" (i), "m" (v->counter)
149 : "memory");
150 } else if (cpu_has_llsc) {
151 unsigned long temp;
152
153 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000154 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700155 "1: ll %1, %2 # atomic_add_return \n"
156 " addu %0, %1, %3 \n"
157 " sc %0, %2 \n"
158 " beqz %0, 1b \n"
159 " addu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000160 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700161 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
162 : "Ir" (i), "m" (v->counter)
163 : "memory");
164 } else {
165 unsigned long flags;
166
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000167 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700168 result = v->counter;
169 result += i;
170 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000171 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700172 }
173
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000174 smp_mb();
175
Linus Torvalds1da177e2005-04-16 15:20:36 -0700176 return result;
177}
178
179static __inline__ int atomic_sub_return(int i, atomic_t * v)
180{
181 unsigned long result;
182
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000183 smp_mb();
184
Linus Torvalds1da177e2005-04-16 15:20:36 -0700185 if (cpu_has_llsc && R10000_LLSC_WAR) {
186 unsigned long temp;
187
188 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000189 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700190 "1: ll %1, %2 # atomic_sub_return \n"
191 " subu %0, %1, %3 \n"
192 " sc %0, %2 \n"
193 " beqzl %0, 1b \n"
194 " subu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000195 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700196 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
197 : "Ir" (i), "m" (v->counter)
198 : "memory");
199 } else if (cpu_has_llsc) {
200 unsigned long temp;
201
202 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000203 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700204 "1: ll %1, %2 # atomic_sub_return \n"
205 " subu %0, %1, %3 \n"
206 " sc %0, %2 \n"
207 " beqz %0, 1b \n"
208 " subu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000209 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700210 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
211 : "Ir" (i), "m" (v->counter)
212 : "memory");
213 } else {
214 unsigned long flags;
215
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000216 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700217 result = v->counter;
218 result -= i;
219 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000220 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700221 }
222
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000223 smp_mb();
224
Linus Torvalds1da177e2005-04-16 15:20:36 -0700225 return result;
226}
227
228/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100229 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
230 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700231 * @v: pointer of type atomic_t
232 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100233 * Atomically test @v and subtract @i if @v is greater or equal than @i.
234 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700235 */
236static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
237{
238 unsigned long result;
239
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000240 smp_mb();
241
Linus Torvalds1da177e2005-04-16 15:20:36 -0700242 if (cpu_has_llsc && R10000_LLSC_WAR) {
243 unsigned long temp;
244
245 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000246 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700247 "1: ll %1, %2 # atomic_sub_if_positive\n"
248 " subu %0, %1, %3 \n"
249 " bltz %0, 1f \n"
250 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000251 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700252 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000253 " subu %0, %1, %3 \n"
254 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700255 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000256 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700257 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
258 : "Ir" (i), "m" (v->counter)
259 : "memory");
260 } else if (cpu_has_llsc) {
261 unsigned long temp;
262
263 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000264 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700265 "1: ll %1, %2 # atomic_sub_if_positive\n"
266 " subu %0, %1, %3 \n"
267 " bltz %0, 1f \n"
268 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000269 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700270 " beqz %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000271 " subu %0, %1, %3 \n"
272 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700273 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000274 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700275 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
276 : "Ir" (i), "m" (v->counter)
277 : "memory");
278 } else {
279 unsigned long flags;
280
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000281 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700282 result = v->counter;
283 result -= i;
284 if (result >= 0)
285 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000286 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700287 }
288
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000289 smp_mb();
290
Linus Torvalds1da177e2005-04-16 15:20:36 -0700291 return result;
292}
293
Nick Piggin4a6dae62005-11-13 16:07:24 -0800294#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800295#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800296
Nick Piggin8426e1f2005-11-13 16:07:25 -0800297/**
298 * atomic_add_unless - add unless the number is a given value
299 * @v: pointer of type atomic_t
300 * @a: the amount to add to v...
301 * @u: ...unless v is equal to u.
302 *
303 * Atomically adds @a to @v, so long as it was not @u.
304 * Returns non-zero if @v was not @u, and zero otherwise.
305 */
306#define atomic_add_unless(v, a, u) \
307({ \
308 int c, old; \
309 c = atomic_read(v); \
310 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
311 c = old; \
312 c != (u); \
313})
314#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
315
Linus Torvalds1da177e2005-04-16 15:20:36 -0700316#define atomic_dec_return(v) atomic_sub_return(1,(v))
317#define atomic_inc_return(v) atomic_add_return(1,(v))
318
319/*
320 * atomic_sub_and_test - subtract value from variable and test result
321 * @i: integer value to subtract
322 * @v: pointer of type atomic_t
323 *
324 * Atomically subtracts @i from @v and returns
325 * true if the result is zero, or false for all
326 * other cases.
327 */
328#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
329
330/*
331 * atomic_inc_and_test - increment and test
332 * @v: pointer of type atomic_t
333 *
334 * Atomically increments @v by 1
335 * and returns true if the result is zero, or false for all
336 * other cases.
337 */
338#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
339
340/*
341 * atomic_dec_and_test - decrement by 1 and test
342 * @v: pointer of type atomic_t
343 *
344 * Atomically decrements @v by 1 and
345 * returns true if the result is 0, or false for all other
346 * cases.
347 */
348#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
349
350/*
351 * atomic_dec_if_positive - decrement by 1 if old value positive
352 * @v: pointer of type atomic_t
353 */
354#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
355
356/*
357 * atomic_inc - increment atomic variable
358 * @v: pointer of type atomic_t
359 *
360 * Atomically increments @v by 1.
361 */
362#define atomic_inc(v) atomic_add(1,(v))
363
364/*
365 * atomic_dec - decrement and test
366 * @v: pointer of type atomic_t
367 *
368 * Atomically decrements @v by 1.
369 */
370#define atomic_dec(v) atomic_sub(1,(v))
371
372/*
373 * atomic_add_negative - add and test if negative
374 * @v: pointer of type atomic_t
375 * @i: integer value to add
376 *
377 * Atomically adds @i to @v and returns true
378 * if the result is negative, or false when
379 * result is greater than or equal to zero.
380 */
381#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
382
Ralf Baechle875d43e2005-09-03 15:56:16 -0700383#ifdef CONFIG_64BIT
Linus Torvalds1da177e2005-04-16 15:20:36 -0700384
Ralf Baechle4f8b5c72006-11-30 15:38:10 +0000385typedef struct { volatile long counter; } atomic64_t;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700386
387#define ATOMIC64_INIT(i) { (i) }
388
389/*
390 * atomic64_read - read atomic variable
391 * @v: pointer of type atomic64_t
392 *
393 */
394#define atomic64_read(v) ((v)->counter)
395
396/*
397 * atomic64_set - set atomic variable
398 * @v: pointer of type atomic64_t
399 * @i: required value
400 */
401#define atomic64_set(v,i) ((v)->counter = (i))
402
403/*
404 * atomic64_add - add integer to atomic variable
405 * @i: integer value to add
406 * @v: pointer of type atomic64_t
407 *
408 * Atomically adds @i to @v.
409 */
410static __inline__ void atomic64_add(long i, atomic64_t * v)
411{
412 if (cpu_has_llsc && R10000_LLSC_WAR) {
413 unsigned long temp;
414
415 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000416 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700417 "1: lld %0, %1 # atomic64_add \n"
418 " addu %0, %2 \n"
419 " scd %0, %1 \n"
420 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000421 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700422 : "=&r" (temp), "=m" (v->counter)
423 : "Ir" (i), "m" (v->counter));
424 } else if (cpu_has_llsc) {
425 unsigned long temp;
426
427 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000428 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700429 "1: lld %0, %1 # atomic64_add \n"
430 " addu %0, %2 \n"
431 " scd %0, %1 \n"
432 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000433 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700434 : "=&r" (temp), "=m" (v->counter)
435 : "Ir" (i), "m" (v->counter));
436 } else {
437 unsigned long flags;
438
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000439 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700440 v->counter += i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000441 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700442 }
443}
444
445/*
446 * atomic64_sub - subtract the atomic variable
447 * @i: integer value to subtract
448 * @v: pointer of type atomic64_t
449 *
450 * Atomically subtracts @i from @v.
451 */
452static __inline__ void atomic64_sub(long i, atomic64_t * v)
453{
454 if (cpu_has_llsc && R10000_LLSC_WAR) {
455 unsigned long temp;
456
457 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000458 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700459 "1: lld %0, %1 # atomic64_sub \n"
460 " subu %0, %2 \n"
461 " scd %0, %1 \n"
462 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000463 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700464 : "=&r" (temp), "=m" (v->counter)
465 : "Ir" (i), "m" (v->counter));
466 } else if (cpu_has_llsc) {
467 unsigned long temp;
468
469 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000470 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700471 "1: lld %0, %1 # atomic64_sub \n"
472 " subu %0, %2 \n"
473 " scd %0, %1 \n"
474 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000475 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700476 : "=&r" (temp), "=m" (v->counter)
477 : "Ir" (i), "m" (v->counter));
478 } else {
479 unsigned long flags;
480
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000481 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700482 v->counter -= i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000483 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700484 }
485}
486
487/*
488 * Same as above, but return the result value
489 */
490static __inline__ long atomic64_add_return(long i, atomic64_t * v)
491{
492 unsigned long result;
493
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000494 smp_mb();
495
Linus Torvalds1da177e2005-04-16 15:20:36 -0700496 if (cpu_has_llsc && R10000_LLSC_WAR) {
497 unsigned long temp;
498
499 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000500 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700501 "1: lld %1, %2 # atomic64_add_return \n"
502 " addu %0, %1, %3 \n"
503 " scd %0, %2 \n"
504 " beqzl %0, 1b \n"
505 " addu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000506 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700507 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
508 : "Ir" (i), "m" (v->counter)
509 : "memory");
510 } else if (cpu_has_llsc) {
511 unsigned long temp;
512
513 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000514 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700515 "1: lld %1, %2 # atomic64_add_return \n"
516 " addu %0, %1, %3 \n"
517 " scd %0, %2 \n"
518 " beqz %0, 1b \n"
519 " addu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000520 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700521 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
522 : "Ir" (i), "m" (v->counter)
523 : "memory");
524 } else {
525 unsigned long flags;
526
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000527 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700528 result = v->counter;
529 result += i;
530 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000531 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700532 }
533
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000534 smp_mb();
535
Linus Torvalds1da177e2005-04-16 15:20:36 -0700536 return result;
537}
538
539static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
540{
541 unsigned long result;
542
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000543 smp_mb();
544
Linus Torvalds1da177e2005-04-16 15:20:36 -0700545 if (cpu_has_llsc && R10000_LLSC_WAR) {
546 unsigned long temp;
547
548 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000549 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700550 "1: lld %1, %2 # atomic64_sub_return \n"
551 " subu %0, %1, %3 \n"
552 " scd %0, %2 \n"
553 " beqzl %0, 1b \n"
554 " subu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000555 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700556 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
557 : "Ir" (i), "m" (v->counter)
558 : "memory");
559 } else if (cpu_has_llsc) {
560 unsigned long temp;
561
562 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000563 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700564 "1: lld %1, %2 # atomic64_sub_return \n"
565 " subu %0, %1, %3 \n"
566 " scd %0, %2 \n"
567 " beqz %0, 1b \n"
568 " subu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000569 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700570 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
571 : "Ir" (i), "m" (v->counter)
572 : "memory");
573 } else {
574 unsigned long flags;
575
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000576 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700577 result = v->counter;
578 result -= i;
579 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000580 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700581 }
582
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000583 smp_mb();
584
Linus Torvalds1da177e2005-04-16 15:20:36 -0700585 return result;
586}
587
588/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100589 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
590 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700591 * @v: pointer of type atomic64_t
592 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100593 * Atomically test @v and subtract @i if @v is greater or equal than @i.
594 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700595 */
596static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
597{
598 unsigned long result;
599
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000600 smp_mb();
601
Linus Torvalds1da177e2005-04-16 15:20:36 -0700602 if (cpu_has_llsc && R10000_LLSC_WAR) {
603 unsigned long temp;
604
605 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000606 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700607 "1: lld %1, %2 # atomic64_sub_if_positive\n"
608 " dsubu %0, %1, %3 \n"
609 " bltz %0, 1f \n"
610 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000611 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700612 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000613 " dsubu %0, %1, %3 \n"
614 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700615 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000616 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700617 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
618 : "Ir" (i), "m" (v->counter)
619 : "memory");
620 } else if (cpu_has_llsc) {
621 unsigned long temp;
622
623 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000624 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700625 "1: lld %1, %2 # atomic64_sub_if_positive\n"
626 " dsubu %0, %1, %3 \n"
627 " bltz %0, 1f \n"
628 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000629 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700630 " beqz %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000631 " dsubu %0, %1, %3 \n"
632 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700633 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000634 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700635 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
636 : "Ir" (i), "m" (v->counter)
637 : "memory");
638 } else {
639 unsigned long flags;
640
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000641 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700642 result = v->counter;
643 result -= i;
644 if (result >= 0)
645 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000646 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700647 }
648
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000649 smp_mb();
650
Linus Torvalds1da177e2005-04-16 15:20:36 -0700651 return result;
652}
653
654#define atomic64_dec_return(v) atomic64_sub_return(1,(v))
655#define atomic64_inc_return(v) atomic64_add_return(1,(v))
656
657/*
658 * atomic64_sub_and_test - subtract value from variable and test result
659 * @i: integer value to subtract
660 * @v: pointer of type atomic64_t
661 *
662 * Atomically subtracts @i from @v and returns
663 * true if the result is zero, or false for all
664 * other cases.
665 */
666#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
667
668/*
669 * atomic64_inc_and_test - increment and test
670 * @v: pointer of type atomic64_t
671 *
672 * Atomically increments @v by 1
673 * and returns true if the result is zero, or false for all
674 * other cases.
675 */
676#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
677
678/*
679 * atomic64_dec_and_test - decrement by 1 and test
680 * @v: pointer of type atomic64_t
681 *
682 * Atomically decrements @v by 1 and
683 * returns true if the result is 0, or false for all other
684 * cases.
685 */
686#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
687
688/*
689 * atomic64_dec_if_positive - decrement by 1 if old value positive
690 * @v: pointer of type atomic64_t
691 */
692#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
693
694/*
695 * atomic64_inc - increment atomic variable
696 * @v: pointer of type atomic64_t
697 *
698 * Atomically increments @v by 1.
699 */
700#define atomic64_inc(v) atomic64_add(1,(v))
701
702/*
703 * atomic64_dec - decrement and test
704 * @v: pointer of type atomic64_t
705 *
706 * Atomically decrements @v by 1.
707 */
708#define atomic64_dec(v) atomic64_sub(1,(v))
709
710/*
711 * atomic64_add_negative - add and test if negative
712 * @v: pointer of type atomic64_t
713 * @i: integer value to add
714 *
715 * Atomically adds @i to @v and returns true
716 * if the result is negative, or false when
717 * result is greater than or equal to zero.
718 */
719#define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
720
Ralf Baechle875d43e2005-09-03 15:56:16 -0700721#endif /* CONFIG_64BIT */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700722
723/*
724 * atomic*_return operations are serializing but not the non-*_return
725 * versions.
726 */
727#define smp_mb__before_atomic_dec() smp_mb()
728#define smp_mb__after_atomic_dec() smp_mb()
729#define smp_mb__before_atomic_inc() smp_mb()
730#define smp_mb__after_atomic_inc() smp_mb()
731
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800732#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700733#endif /* _ASM_ATOMIC_H */