blob: e64abc0d82217068d4e73fce05a31afe5009e9da [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
13 */
14
15/*
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
19 */
Linus Torvalds1da177e2005-04-16 15:20:36 -070020#include <linux/spinlock.h>
21
22#ifndef _ASM_ATOMIC_H
23#define _ASM_ATOMIC_H
24
Ralf Baechle192ef362006-07-07 14:07:18 +010025#include <linux/irqflags.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070026#include <asm/cpu-features.h>
27#include <asm/war.h>
28
Linus Torvalds1da177e2005-04-16 15:20:36 -070029typedef struct { volatile int counter; } atomic_t;
30
31#define ATOMIC_INIT(i) { (i) }
32
33/*
34 * atomic_read - read atomic variable
35 * @v: pointer of type atomic_t
36 *
37 * Atomically reads the value of @v.
38 */
39#define atomic_read(v) ((v)->counter)
40
41/*
42 * atomic_set - set atomic variable
43 * @v: pointer of type atomic_t
44 * @i: required value
45 *
46 * Atomically sets the value of @v to @i.
47 */
48#define atomic_set(v,i) ((v)->counter = (i))
49
50/*
51 * atomic_add - add integer to atomic variable
52 * @i: integer value to add
53 * @v: pointer of type atomic_t
54 *
55 * Atomically adds @i to @v.
56 */
57static __inline__ void atomic_add(int i, atomic_t * v)
58{
59 if (cpu_has_llsc && R10000_LLSC_WAR) {
60 unsigned long temp;
61
62 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000063 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070064 "1: ll %0, %1 # atomic_add \n"
65 " addu %0, %2 \n"
66 " sc %0, %1 \n"
67 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000068 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070069 : "=&r" (temp), "=m" (v->counter)
70 : "Ir" (i), "m" (v->counter));
71 } else if (cpu_has_llsc) {
72 unsigned long temp;
73
74 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000075 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070076 "1: ll %0, %1 # atomic_add \n"
77 " addu %0, %2 \n"
78 " sc %0, %1 \n"
79 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000080 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070081 : "=&r" (temp), "=m" (v->counter)
82 : "Ir" (i), "m" (v->counter));
83 } else {
84 unsigned long flags;
85
Ralf Baechleb2d28b72005-12-07 18:57:52 +000086 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070087 v->counter += i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +000088 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070089 }
90}
91
92/*
93 * atomic_sub - subtract the atomic variable
94 * @i: integer value to subtract
95 * @v: pointer of type atomic_t
96 *
97 * Atomically subtracts @i from @v.
98 */
99static __inline__ void atomic_sub(int i, atomic_t * v)
100{
101 if (cpu_has_llsc && R10000_LLSC_WAR) {
102 unsigned long temp;
103
104 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000105 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700106 "1: ll %0, %1 # atomic_sub \n"
107 " subu %0, %2 \n"
108 " sc %0, %1 \n"
109 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000110 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700111 : "=&r" (temp), "=m" (v->counter)
112 : "Ir" (i), "m" (v->counter));
113 } else if (cpu_has_llsc) {
114 unsigned long temp;
115
116 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000117 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700118 "1: ll %0, %1 # atomic_sub \n"
119 " subu %0, %2 \n"
120 " sc %0, %1 \n"
121 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000122 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700123 : "=&r" (temp), "=m" (v->counter)
124 : "Ir" (i), "m" (v->counter));
125 } else {
126 unsigned long flags;
127
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000128 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700129 v->counter -= i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000130 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700131 }
132}
133
134/*
135 * Same as above, but return the result value
136 */
137static __inline__ int atomic_add_return(int i, atomic_t * v)
138{
139 unsigned long result;
140
141 if (cpu_has_llsc && R10000_LLSC_WAR) {
142 unsigned long temp;
143
144 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000145 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700146 "1: ll %1, %2 # atomic_add_return \n"
147 " addu %0, %1, %3 \n"
148 " sc %0, %2 \n"
149 " beqzl %0, 1b \n"
150 " addu %0, %1, %3 \n"
151 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000152 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700153 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
154 : "Ir" (i), "m" (v->counter)
155 : "memory");
156 } else if (cpu_has_llsc) {
157 unsigned long temp;
158
159 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000160 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700161 "1: ll %1, %2 # atomic_add_return \n"
162 " addu %0, %1, %3 \n"
163 " sc %0, %2 \n"
164 " beqz %0, 1b \n"
165 " addu %0, %1, %3 \n"
166 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000167 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700168 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
169 : "Ir" (i), "m" (v->counter)
170 : "memory");
171 } else {
172 unsigned long flags;
173
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000174 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700175 result = v->counter;
176 result += i;
177 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000178 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700179 }
180
181 return result;
182}
183
184static __inline__ int atomic_sub_return(int i, atomic_t * v)
185{
186 unsigned long result;
187
188 if (cpu_has_llsc && R10000_LLSC_WAR) {
189 unsigned long temp;
190
191 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000192 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700193 "1: ll %1, %2 # atomic_sub_return \n"
194 " subu %0, %1, %3 \n"
195 " sc %0, %2 \n"
196 " beqzl %0, 1b \n"
197 " subu %0, %1, %3 \n"
198 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000199 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700200 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
201 : "Ir" (i), "m" (v->counter)
202 : "memory");
203 } else if (cpu_has_llsc) {
204 unsigned long temp;
205
206 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000207 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700208 "1: ll %1, %2 # atomic_sub_return \n"
209 " subu %0, %1, %3 \n"
210 " sc %0, %2 \n"
211 " beqz %0, 1b \n"
212 " subu %0, %1, %3 \n"
213 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000214 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700215 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
216 : "Ir" (i), "m" (v->counter)
217 : "memory");
218 } else {
219 unsigned long flags;
220
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000221 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700222 result = v->counter;
223 result -= i;
224 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000225 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700226 }
227
228 return result;
229}
230
231/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100232 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
233 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700234 * @v: pointer of type atomic_t
235 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100236 * Atomically test @v and subtract @i if @v is greater or equal than @i.
237 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700238 */
239static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
240{
241 unsigned long result;
242
243 if (cpu_has_llsc && R10000_LLSC_WAR) {
244 unsigned long temp;
245
246 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000247 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700248 "1: ll %1, %2 # atomic_sub_if_positive\n"
249 " subu %0, %1, %3 \n"
250 " bltz %0, 1f \n"
251 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000252 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700253 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000254 " subu %0, %1, %3 \n"
255 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700256 " sync \n"
257 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000258 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700259 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
260 : "Ir" (i), "m" (v->counter)
261 : "memory");
262 } else if (cpu_has_llsc) {
263 unsigned long temp;
264
265 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000266 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700267 "1: ll %1, %2 # atomic_sub_if_positive\n"
268 " subu %0, %1, %3 \n"
269 " bltz %0, 1f \n"
270 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000271 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700272 " beqz %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000273 " subu %0, %1, %3 \n"
274 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700275 " sync \n"
276 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000277 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700278 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
279 : "Ir" (i), "m" (v->counter)
280 : "memory");
281 } else {
282 unsigned long flags;
283
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000284 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700285 result = v->counter;
286 result -= i;
287 if (result >= 0)
288 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000289 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700290 }
291
292 return result;
293}
294
Nick Piggin4a6dae62005-11-13 16:07:24 -0800295#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800296#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800297
Nick Piggin8426e1f2005-11-13 16:07:25 -0800298/**
299 * atomic_add_unless - add unless the number is a given value
300 * @v: pointer of type atomic_t
301 * @a: the amount to add to v...
302 * @u: ...unless v is equal to u.
303 *
304 * Atomically adds @a to @v, so long as it was not @u.
305 * Returns non-zero if @v was not @u, and zero otherwise.
306 */
307#define atomic_add_unless(v, a, u) \
308({ \
309 int c, old; \
310 c = atomic_read(v); \
311 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
312 c = old; \
313 c != (u); \
314})
315#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
316
Linus Torvalds1da177e2005-04-16 15:20:36 -0700317#define atomic_dec_return(v) atomic_sub_return(1,(v))
318#define atomic_inc_return(v) atomic_add_return(1,(v))
319
320/*
321 * atomic_sub_and_test - subtract value from variable and test result
322 * @i: integer value to subtract
323 * @v: pointer of type atomic_t
324 *
325 * Atomically subtracts @i from @v and returns
326 * true if the result is zero, or false for all
327 * other cases.
328 */
329#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
330
331/*
332 * atomic_inc_and_test - increment and test
333 * @v: pointer of type atomic_t
334 *
335 * Atomically increments @v by 1
336 * and returns true if the result is zero, or false for all
337 * other cases.
338 */
339#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
340
341/*
342 * atomic_dec_and_test - decrement by 1 and test
343 * @v: pointer of type atomic_t
344 *
345 * Atomically decrements @v by 1 and
346 * returns true if the result is 0, or false for all other
347 * cases.
348 */
349#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
350
351/*
352 * atomic_dec_if_positive - decrement by 1 if old value positive
353 * @v: pointer of type atomic_t
354 */
355#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
356
357/*
358 * atomic_inc - increment atomic variable
359 * @v: pointer of type atomic_t
360 *
361 * Atomically increments @v by 1.
362 */
363#define atomic_inc(v) atomic_add(1,(v))
364
365/*
366 * atomic_dec - decrement and test
367 * @v: pointer of type atomic_t
368 *
369 * Atomically decrements @v by 1.
370 */
371#define atomic_dec(v) atomic_sub(1,(v))
372
373/*
374 * atomic_add_negative - add and test if negative
375 * @v: pointer of type atomic_t
376 * @i: integer value to add
377 *
378 * Atomically adds @i to @v and returns true
379 * if the result is negative, or false when
380 * result is greater than or equal to zero.
381 */
382#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
383
Ralf Baechle875d43e2005-09-03 15:56:16 -0700384#ifdef CONFIG_64BIT
Linus Torvalds1da177e2005-04-16 15:20:36 -0700385
386typedef struct { volatile __s64 counter; } atomic64_t;
387
388#define ATOMIC64_INIT(i) { (i) }
389
390/*
391 * atomic64_read - read atomic variable
392 * @v: pointer of type atomic64_t
393 *
394 */
395#define atomic64_read(v) ((v)->counter)
396
397/*
398 * atomic64_set - set atomic variable
399 * @v: pointer of type atomic64_t
400 * @i: required value
401 */
402#define atomic64_set(v,i) ((v)->counter = (i))
403
404/*
405 * atomic64_add - add integer to atomic variable
406 * @i: integer value to add
407 * @v: pointer of type atomic64_t
408 *
409 * Atomically adds @i to @v.
410 */
411static __inline__ void atomic64_add(long i, atomic64_t * v)
412{
413 if (cpu_has_llsc && R10000_LLSC_WAR) {
414 unsigned long temp;
415
416 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000417 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700418 "1: lld %0, %1 # atomic64_add \n"
419 " addu %0, %2 \n"
420 " scd %0, %1 \n"
421 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000422 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700423 : "=&r" (temp), "=m" (v->counter)
424 : "Ir" (i), "m" (v->counter));
425 } else if (cpu_has_llsc) {
426 unsigned long temp;
427
428 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000429 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700430 "1: lld %0, %1 # atomic64_add \n"
431 " addu %0, %2 \n"
432 " scd %0, %1 \n"
433 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000434 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700435 : "=&r" (temp), "=m" (v->counter)
436 : "Ir" (i), "m" (v->counter));
437 } else {
438 unsigned long flags;
439
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000440 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700441 v->counter += i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000442 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700443 }
444}
445
446/*
447 * atomic64_sub - subtract the atomic variable
448 * @i: integer value to subtract
449 * @v: pointer of type atomic64_t
450 *
451 * Atomically subtracts @i from @v.
452 */
453static __inline__ void atomic64_sub(long i, atomic64_t * v)
454{
455 if (cpu_has_llsc && R10000_LLSC_WAR) {
456 unsigned long temp;
457
458 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000459 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700460 "1: lld %0, %1 # atomic64_sub \n"
461 " subu %0, %2 \n"
462 " scd %0, %1 \n"
463 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000464 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700465 : "=&r" (temp), "=m" (v->counter)
466 : "Ir" (i), "m" (v->counter));
467 } else if (cpu_has_llsc) {
468 unsigned long temp;
469
470 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000471 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700472 "1: lld %0, %1 # atomic64_sub \n"
473 " subu %0, %2 \n"
474 " scd %0, %1 \n"
475 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000476 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700477 : "=&r" (temp), "=m" (v->counter)
478 : "Ir" (i), "m" (v->counter));
479 } else {
480 unsigned long flags;
481
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000482 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700483 v->counter -= i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000484 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700485 }
486}
487
488/*
489 * Same as above, but return the result value
490 */
491static __inline__ long atomic64_add_return(long i, atomic64_t * v)
492{
493 unsigned long result;
494
495 if (cpu_has_llsc && R10000_LLSC_WAR) {
496 unsigned long temp;
497
498 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000499 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700500 "1: lld %1, %2 # atomic64_add_return \n"
501 " addu %0, %1, %3 \n"
502 " scd %0, %2 \n"
503 " beqzl %0, 1b \n"
504 " addu %0, %1, %3 \n"
505 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000506 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700507 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
508 : "Ir" (i), "m" (v->counter)
509 : "memory");
510 } else if (cpu_has_llsc) {
511 unsigned long temp;
512
513 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000514 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700515 "1: lld %1, %2 # atomic64_add_return \n"
516 " addu %0, %1, %3 \n"
517 " scd %0, %2 \n"
518 " beqz %0, 1b \n"
519 " addu %0, %1, %3 \n"
520 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000521 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700522 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
523 : "Ir" (i), "m" (v->counter)
524 : "memory");
525 } else {
526 unsigned long flags;
527
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000528 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700529 result = v->counter;
530 result += i;
531 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000532 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700533 }
534
535 return result;
536}
537
538static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
539{
540 unsigned long result;
541
542 if (cpu_has_llsc && R10000_LLSC_WAR) {
543 unsigned long temp;
544
545 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000546 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700547 "1: lld %1, %2 # atomic64_sub_return \n"
548 " subu %0, %1, %3 \n"
549 " scd %0, %2 \n"
550 " beqzl %0, 1b \n"
551 " subu %0, %1, %3 \n"
552 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000553 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700554 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
555 : "Ir" (i), "m" (v->counter)
556 : "memory");
557 } else if (cpu_has_llsc) {
558 unsigned long temp;
559
560 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000561 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700562 "1: lld %1, %2 # atomic64_sub_return \n"
563 " subu %0, %1, %3 \n"
564 " scd %0, %2 \n"
565 " beqz %0, 1b \n"
566 " subu %0, %1, %3 \n"
567 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000568 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700569 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
570 : "Ir" (i), "m" (v->counter)
571 : "memory");
572 } else {
573 unsigned long flags;
574
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000575 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700576 result = v->counter;
577 result -= i;
578 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000579 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700580 }
581
582 return result;
583}
584
585/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100586 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
587 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700588 * @v: pointer of type atomic64_t
589 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100590 * Atomically test @v and subtract @i if @v is greater or equal than @i.
591 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700592 */
593static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
594{
595 unsigned long result;
596
597 if (cpu_has_llsc && R10000_LLSC_WAR) {
598 unsigned long temp;
599
600 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000601 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700602 "1: lld %1, %2 # atomic64_sub_if_positive\n"
603 " dsubu %0, %1, %3 \n"
604 " bltz %0, 1f \n"
605 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000606 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700607 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000608 " dsubu %0, %1, %3 \n"
609 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700610 " sync \n"
611 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000612 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700613 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
614 : "Ir" (i), "m" (v->counter)
615 : "memory");
616 } else if (cpu_has_llsc) {
617 unsigned long temp;
618
619 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000620 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700621 "1: lld %1, %2 # atomic64_sub_if_positive\n"
622 " dsubu %0, %1, %3 \n"
623 " bltz %0, 1f \n"
624 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000625 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700626 " beqz %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000627 " dsubu %0, %1, %3 \n"
628 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700629 " sync \n"
630 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000631 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700632 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
633 : "Ir" (i), "m" (v->counter)
634 : "memory");
635 } else {
636 unsigned long flags;
637
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000638 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700639 result = v->counter;
640 result -= i;
641 if (result >= 0)
642 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000643 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700644 }
645
646 return result;
647}
648
649#define atomic64_dec_return(v) atomic64_sub_return(1,(v))
650#define atomic64_inc_return(v) atomic64_add_return(1,(v))
651
652/*
653 * atomic64_sub_and_test - subtract value from variable and test result
654 * @i: integer value to subtract
655 * @v: pointer of type atomic64_t
656 *
657 * Atomically subtracts @i from @v and returns
658 * true if the result is zero, or false for all
659 * other cases.
660 */
661#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
662
663/*
664 * atomic64_inc_and_test - increment and test
665 * @v: pointer of type atomic64_t
666 *
667 * Atomically increments @v by 1
668 * and returns true if the result is zero, or false for all
669 * other cases.
670 */
671#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
672
673/*
674 * atomic64_dec_and_test - decrement by 1 and test
675 * @v: pointer of type atomic64_t
676 *
677 * Atomically decrements @v by 1 and
678 * returns true if the result is 0, or false for all other
679 * cases.
680 */
681#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
682
683/*
684 * atomic64_dec_if_positive - decrement by 1 if old value positive
685 * @v: pointer of type atomic64_t
686 */
687#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
688
689/*
690 * atomic64_inc - increment atomic variable
691 * @v: pointer of type atomic64_t
692 *
693 * Atomically increments @v by 1.
694 */
695#define atomic64_inc(v) atomic64_add(1,(v))
696
697/*
698 * atomic64_dec - decrement and test
699 * @v: pointer of type atomic64_t
700 *
701 * Atomically decrements @v by 1.
702 */
703#define atomic64_dec(v) atomic64_sub(1,(v))
704
705/*
706 * atomic64_add_negative - add and test if negative
707 * @v: pointer of type atomic64_t
708 * @i: integer value to add
709 *
710 * Atomically adds @i to @v and returns true
711 * if the result is negative, or false when
712 * result is greater than or equal to zero.
713 */
714#define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
715
Ralf Baechle875d43e2005-09-03 15:56:16 -0700716#endif /* CONFIG_64BIT */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700717
718/*
719 * atomic*_return operations are serializing but not the non-*_return
720 * versions.
721 */
722#define smp_mb__before_atomic_dec() smp_mb()
723#define smp_mb__after_atomic_dec() smp_mb()
724#define smp_mb__before_atomic_inc() smp_mb()
725#define smp_mb__after_atomic_inc() smp_mb()
726
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800727#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700728#endif /* _ASM_ATOMIC_H */