blob: 654b97d3e13a405302cdf0e96d04f83977841bf5 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
13 */
14
15/*
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
19 */
20#include <linux/config.h>
21#include <linux/spinlock.h>
22
23#ifndef _ASM_ATOMIC_H
24#define _ASM_ATOMIC_H
25
26#include <asm/cpu-features.h>
Ralf Baechleb2d28b72005-12-07 18:57:52 +000027#include <asm/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070028#include <asm/war.h>
29
Linus Torvalds1da177e2005-04-16 15:20:36 -070030typedef struct { volatile int counter; } atomic_t;
31
32#define ATOMIC_INIT(i) { (i) }
33
34/*
35 * atomic_read - read atomic variable
36 * @v: pointer of type atomic_t
37 *
38 * Atomically reads the value of @v.
39 */
40#define atomic_read(v) ((v)->counter)
41
42/*
43 * atomic_set - set atomic variable
44 * @v: pointer of type atomic_t
45 * @i: required value
46 *
47 * Atomically sets the value of @v to @i.
48 */
49#define atomic_set(v,i) ((v)->counter = (i))
50
51/*
52 * atomic_add - add integer to atomic variable
53 * @i: integer value to add
54 * @v: pointer of type atomic_t
55 *
56 * Atomically adds @i to @v.
57 */
58static __inline__ void atomic_add(int i, atomic_t * v)
59{
60 if (cpu_has_llsc && R10000_LLSC_WAR) {
61 unsigned long temp;
62
63 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000064 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070065 "1: ll %0, %1 # atomic_add \n"
66 " addu %0, %2 \n"
67 " sc %0, %1 \n"
68 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000069 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070070 : "=&r" (temp), "=m" (v->counter)
71 : "Ir" (i), "m" (v->counter));
72 } else if (cpu_has_llsc) {
73 unsigned long temp;
74
75 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000076 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070077 "1: ll %0, %1 # atomic_add \n"
78 " addu %0, %2 \n"
79 " sc %0, %1 \n"
80 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000081 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070082 : "=&r" (temp), "=m" (v->counter)
83 : "Ir" (i), "m" (v->counter));
84 } else {
85 unsigned long flags;
86
Ralf Baechleb2d28b72005-12-07 18:57:52 +000087 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070088 v->counter += i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +000089 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070090 }
91}
92
93/*
94 * atomic_sub - subtract the atomic variable
95 * @i: integer value to subtract
96 * @v: pointer of type atomic_t
97 *
98 * Atomically subtracts @i from @v.
99 */
100static __inline__ void atomic_sub(int i, atomic_t * v)
101{
102 if (cpu_has_llsc && R10000_LLSC_WAR) {
103 unsigned long temp;
104
105 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000106 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700107 "1: ll %0, %1 # atomic_sub \n"
108 " subu %0, %2 \n"
109 " sc %0, %1 \n"
110 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000111 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700112 : "=&r" (temp), "=m" (v->counter)
113 : "Ir" (i), "m" (v->counter));
114 } else if (cpu_has_llsc) {
115 unsigned long temp;
116
117 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000118 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700119 "1: ll %0, %1 # atomic_sub \n"
120 " subu %0, %2 \n"
121 " sc %0, %1 \n"
122 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000123 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700124 : "=&r" (temp), "=m" (v->counter)
125 : "Ir" (i), "m" (v->counter));
126 } else {
127 unsigned long flags;
128
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000129 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700130 v->counter -= i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000131 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700132 }
133}
134
135/*
136 * Same as above, but return the result value
137 */
138static __inline__ int atomic_add_return(int i, atomic_t * v)
139{
140 unsigned long result;
141
142 if (cpu_has_llsc && R10000_LLSC_WAR) {
143 unsigned long temp;
144
145 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000146 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700147 "1: ll %1, %2 # atomic_add_return \n"
148 " addu %0, %1, %3 \n"
149 " sc %0, %2 \n"
150 " beqzl %0, 1b \n"
151 " addu %0, %1, %3 \n"
152 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000153 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700154 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
155 : "Ir" (i), "m" (v->counter)
156 : "memory");
157 } else if (cpu_has_llsc) {
158 unsigned long temp;
159
160 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000161 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700162 "1: ll %1, %2 # atomic_add_return \n"
163 " addu %0, %1, %3 \n"
164 " sc %0, %2 \n"
165 " beqz %0, 1b \n"
166 " addu %0, %1, %3 \n"
167 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000168 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700169 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
170 : "Ir" (i), "m" (v->counter)
171 : "memory");
172 } else {
173 unsigned long flags;
174
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000175 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700176 result = v->counter;
177 result += i;
178 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000179 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700180 }
181
182 return result;
183}
184
185static __inline__ int atomic_sub_return(int i, atomic_t * v)
186{
187 unsigned long result;
188
189 if (cpu_has_llsc && R10000_LLSC_WAR) {
190 unsigned long temp;
191
192 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000193 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700194 "1: ll %1, %2 # atomic_sub_return \n"
195 " subu %0, %1, %3 \n"
196 " sc %0, %2 \n"
197 " beqzl %0, 1b \n"
198 " subu %0, %1, %3 \n"
199 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000200 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700201 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
202 : "Ir" (i), "m" (v->counter)
203 : "memory");
204 } else if (cpu_has_llsc) {
205 unsigned long temp;
206
207 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000208 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700209 "1: ll %1, %2 # atomic_sub_return \n"
210 " subu %0, %1, %3 \n"
211 " sc %0, %2 \n"
212 " beqz %0, 1b \n"
213 " subu %0, %1, %3 \n"
214 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000215 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700216 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
217 : "Ir" (i), "m" (v->counter)
218 : "memory");
219 } else {
220 unsigned long flags;
221
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000222 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700223 result = v->counter;
224 result -= i;
225 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000226 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700227 }
228
229 return result;
230}
231
232/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100233 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
234 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700235 * @v: pointer of type atomic_t
236 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100237 * Atomically test @v and subtract @i if @v is greater or equal than @i.
238 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700239 */
240static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
241{
242 unsigned long result;
243
244 if (cpu_has_llsc && R10000_LLSC_WAR) {
245 unsigned long temp;
246
247 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000248 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700249 "1: ll %1, %2 # atomic_sub_if_positive\n"
250 " subu %0, %1, %3 \n"
251 " bltz %0, 1f \n"
252 " sc %0, %2 \n"
253 " beqzl %0, 1b \n"
254 " sync \n"
255 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000256 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700257 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
258 : "Ir" (i), "m" (v->counter)
259 : "memory");
260 } else if (cpu_has_llsc) {
261 unsigned long temp;
262
263 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000264 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700265 "1: ll %1, %2 # atomic_sub_if_positive\n"
266 " subu %0, %1, %3 \n"
267 " bltz %0, 1f \n"
268 " sc %0, %2 \n"
269 " beqz %0, 1b \n"
270 " sync \n"
271 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000272 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700273 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
274 : "Ir" (i), "m" (v->counter)
275 : "memory");
276 } else {
277 unsigned long flags;
278
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000279 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700280 result = v->counter;
281 result -= i;
282 if (result >= 0)
283 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000284 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700285 }
286
287 return result;
288}
289
Nick Piggin4a6dae62005-11-13 16:07:24 -0800290#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800291#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800292
Nick Piggin8426e1f2005-11-13 16:07:25 -0800293/**
294 * atomic_add_unless - add unless the number is a given value
295 * @v: pointer of type atomic_t
296 * @a: the amount to add to v...
297 * @u: ...unless v is equal to u.
298 *
299 * Atomically adds @a to @v, so long as it was not @u.
300 * Returns non-zero if @v was not @u, and zero otherwise.
301 */
302#define atomic_add_unless(v, a, u) \
303({ \
304 int c, old; \
305 c = atomic_read(v); \
306 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
307 c = old; \
308 c != (u); \
309})
310#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
311
Linus Torvalds1da177e2005-04-16 15:20:36 -0700312#define atomic_dec_return(v) atomic_sub_return(1,(v))
313#define atomic_inc_return(v) atomic_add_return(1,(v))
314
315/*
316 * atomic_sub_and_test - subtract value from variable and test result
317 * @i: integer value to subtract
318 * @v: pointer of type atomic_t
319 *
320 * Atomically subtracts @i from @v and returns
321 * true if the result is zero, or false for all
322 * other cases.
323 */
324#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
325
326/*
327 * atomic_inc_and_test - increment and test
328 * @v: pointer of type atomic_t
329 *
330 * Atomically increments @v by 1
331 * and returns true if the result is zero, or false for all
332 * other cases.
333 */
334#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
335
336/*
337 * atomic_dec_and_test - decrement by 1 and test
338 * @v: pointer of type atomic_t
339 *
340 * Atomically decrements @v by 1 and
341 * returns true if the result is 0, or false for all other
342 * cases.
343 */
344#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
345
346/*
347 * atomic_dec_if_positive - decrement by 1 if old value positive
348 * @v: pointer of type atomic_t
349 */
350#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
351
352/*
353 * atomic_inc - increment atomic variable
354 * @v: pointer of type atomic_t
355 *
356 * Atomically increments @v by 1.
357 */
358#define atomic_inc(v) atomic_add(1,(v))
359
360/*
361 * atomic_dec - decrement and test
362 * @v: pointer of type atomic_t
363 *
364 * Atomically decrements @v by 1.
365 */
366#define atomic_dec(v) atomic_sub(1,(v))
367
368/*
369 * atomic_add_negative - add and test if negative
370 * @v: pointer of type atomic_t
371 * @i: integer value to add
372 *
373 * Atomically adds @i to @v and returns true
374 * if the result is negative, or false when
375 * result is greater than or equal to zero.
376 */
377#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
378
Ralf Baechle875d43e2005-09-03 15:56:16 -0700379#ifdef CONFIG_64BIT
Linus Torvalds1da177e2005-04-16 15:20:36 -0700380
381typedef struct { volatile __s64 counter; } atomic64_t;
382
383#define ATOMIC64_INIT(i) { (i) }
384
385/*
386 * atomic64_read - read atomic variable
387 * @v: pointer of type atomic64_t
388 *
389 */
390#define atomic64_read(v) ((v)->counter)
391
392/*
393 * atomic64_set - set atomic variable
394 * @v: pointer of type atomic64_t
395 * @i: required value
396 */
397#define atomic64_set(v,i) ((v)->counter = (i))
398
399/*
400 * atomic64_add - add integer to atomic variable
401 * @i: integer value to add
402 * @v: pointer of type atomic64_t
403 *
404 * Atomically adds @i to @v.
405 */
406static __inline__ void atomic64_add(long i, atomic64_t * v)
407{
408 if (cpu_has_llsc && R10000_LLSC_WAR) {
409 unsigned long temp;
410
411 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000412 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700413 "1: lld %0, %1 # atomic64_add \n"
414 " addu %0, %2 \n"
415 " scd %0, %1 \n"
416 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000417 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700418 : "=&r" (temp), "=m" (v->counter)
419 : "Ir" (i), "m" (v->counter));
420 } else if (cpu_has_llsc) {
421 unsigned long temp;
422
423 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000424 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700425 "1: lld %0, %1 # atomic64_add \n"
426 " addu %0, %2 \n"
427 " scd %0, %1 \n"
428 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000429 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700430 : "=&r" (temp), "=m" (v->counter)
431 : "Ir" (i), "m" (v->counter));
432 } else {
433 unsigned long flags;
434
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000435 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700436 v->counter += i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000437 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700438 }
439}
440
441/*
442 * atomic64_sub - subtract the atomic variable
443 * @i: integer value to subtract
444 * @v: pointer of type atomic64_t
445 *
446 * Atomically subtracts @i from @v.
447 */
448static __inline__ void atomic64_sub(long i, atomic64_t * v)
449{
450 if (cpu_has_llsc && R10000_LLSC_WAR) {
451 unsigned long temp;
452
453 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000454 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700455 "1: lld %0, %1 # atomic64_sub \n"
456 " subu %0, %2 \n"
457 " scd %0, %1 \n"
458 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000459 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700460 : "=&r" (temp), "=m" (v->counter)
461 : "Ir" (i), "m" (v->counter));
462 } else if (cpu_has_llsc) {
463 unsigned long temp;
464
465 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000466 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700467 "1: lld %0, %1 # atomic64_sub \n"
468 " subu %0, %2 \n"
469 " scd %0, %1 \n"
470 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000471 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700472 : "=&r" (temp), "=m" (v->counter)
473 : "Ir" (i), "m" (v->counter));
474 } else {
475 unsigned long flags;
476
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000477 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700478 v->counter -= i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000479 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700480 }
481}
482
483/*
484 * Same as above, but return the result value
485 */
486static __inline__ long atomic64_add_return(long i, atomic64_t * v)
487{
488 unsigned long result;
489
490 if (cpu_has_llsc && R10000_LLSC_WAR) {
491 unsigned long temp;
492
493 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000494 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700495 "1: lld %1, %2 # atomic64_add_return \n"
496 " addu %0, %1, %3 \n"
497 " scd %0, %2 \n"
498 " beqzl %0, 1b \n"
499 " addu %0, %1, %3 \n"
500 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000501 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700502 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
503 : "Ir" (i), "m" (v->counter)
504 : "memory");
505 } else if (cpu_has_llsc) {
506 unsigned long temp;
507
508 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000509 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700510 "1: lld %1, %2 # atomic64_add_return \n"
511 " addu %0, %1, %3 \n"
512 " scd %0, %2 \n"
513 " beqz %0, 1b \n"
514 " addu %0, %1, %3 \n"
515 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000516 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700517 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
518 : "Ir" (i), "m" (v->counter)
519 : "memory");
520 } else {
521 unsigned long flags;
522
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000523 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700524 result = v->counter;
525 result += i;
526 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000527 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700528 }
529
530 return result;
531}
532
533static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
534{
535 unsigned long result;
536
537 if (cpu_has_llsc && R10000_LLSC_WAR) {
538 unsigned long temp;
539
540 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000541 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700542 "1: lld %1, %2 # atomic64_sub_return \n"
543 " subu %0, %1, %3 \n"
544 " scd %0, %2 \n"
545 " beqzl %0, 1b \n"
546 " subu %0, %1, %3 \n"
547 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000548 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700549 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
550 : "Ir" (i), "m" (v->counter)
551 : "memory");
552 } else if (cpu_has_llsc) {
553 unsigned long temp;
554
555 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000556 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700557 "1: lld %1, %2 # atomic64_sub_return \n"
558 " subu %0, %1, %3 \n"
559 " scd %0, %2 \n"
560 " beqz %0, 1b \n"
561 " subu %0, %1, %3 \n"
562 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000563 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700564 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
565 : "Ir" (i), "m" (v->counter)
566 : "memory");
567 } else {
568 unsigned long flags;
569
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000570 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700571 result = v->counter;
572 result -= i;
573 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000574 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700575 }
576
577 return result;
578}
579
580/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100581 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
582 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700583 * @v: pointer of type atomic64_t
584 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100585 * Atomically test @v and subtract @i if @v is greater or equal than @i.
586 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700587 */
588static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
589{
590 unsigned long result;
591
592 if (cpu_has_llsc && R10000_LLSC_WAR) {
593 unsigned long temp;
594
595 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000596 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700597 "1: lld %1, %2 # atomic64_sub_if_positive\n"
598 " dsubu %0, %1, %3 \n"
599 " bltz %0, 1f \n"
600 " scd %0, %2 \n"
601 " beqzl %0, 1b \n"
602 " sync \n"
603 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000604 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700605 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
606 : "Ir" (i), "m" (v->counter)
607 : "memory");
608 } else if (cpu_has_llsc) {
609 unsigned long temp;
610
611 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000612 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700613 "1: lld %1, %2 # atomic64_sub_if_positive\n"
614 " dsubu %0, %1, %3 \n"
615 " bltz %0, 1f \n"
616 " scd %0, %2 \n"
617 " beqz %0, 1b \n"
618 " sync \n"
619 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000620 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700621 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
622 : "Ir" (i), "m" (v->counter)
623 : "memory");
624 } else {
625 unsigned long flags;
626
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000627 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700628 result = v->counter;
629 result -= i;
630 if (result >= 0)
631 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000632 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700633 }
634
635 return result;
636}
637
638#define atomic64_dec_return(v) atomic64_sub_return(1,(v))
639#define atomic64_inc_return(v) atomic64_add_return(1,(v))
640
641/*
642 * atomic64_sub_and_test - subtract value from variable and test result
643 * @i: integer value to subtract
644 * @v: pointer of type atomic64_t
645 *
646 * Atomically subtracts @i from @v and returns
647 * true if the result is zero, or false for all
648 * other cases.
649 */
650#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
651
652/*
653 * atomic64_inc_and_test - increment and test
654 * @v: pointer of type atomic64_t
655 *
656 * Atomically increments @v by 1
657 * and returns true if the result is zero, or false for all
658 * other cases.
659 */
660#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
661
662/*
663 * atomic64_dec_and_test - decrement by 1 and test
664 * @v: pointer of type atomic64_t
665 *
666 * Atomically decrements @v by 1 and
667 * returns true if the result is 0, or false for all other
668 * cases.
669 */
670#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
671
672/*
673 * atomic64_dec_if_positive - decrement by 1 if old value positive
674 * @v: pointer of type atomic64_t
675 */
676#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
677
678/*
679 * atomic64_inc - increment atomic variable
680 * @v: pointer of type atomic64_t
681 *
682 * Atomically increments @v by 1.
683 */
684#define atomic64_inc(v) atomic64_add(1,(v))
685
686/*
687 * atomic64_dec - decrement and test
688 * @v: pointer of type atomic64_t
689 *
690 * Atomically decrements @v by 1.
691 */
692#define atomic64_dec(v) atomic64_sub(1,(v))
693
694/*
695 * atomic64_add_negative - add and test if negative
696 * @v: pointer of type atomic64_t
697 * @i: integer value to add
698 *
699 * Atomically adds @i to @v and returns true
700 * if the result is negative, or false when
701 * result is greater than or equal to zero.
702 */
703#define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
704
Ralf Baechle875d43e2005-09-03 15:56:16 -0700705#endif /* CONFIG_64BIT */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700706
707/*
708 * atomic*_return operations are serializing but not the non-*_return
709 * versions.
710 */
711#define smp_mb__before_atomic_dec() smp_mb()
712#define smp_mb__after_atomic_dec() smp_mb()
713#define smp_mb__before_atomic_inc() smp_mb()
714#define smp_mb__after_atomic_inc() smp_mb()
715
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800716#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700717#endif /* _ASM_ATOMIC_H */