blob: 2c8b853376c995892680d5cf3f9b139b57bc5b5b [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
13 */
14
15/*
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
19 */
20#include <linux/config.h>
21#include <linux/spinlock.h>
22
23#ifndef _ASM_ATOMIC_H
24#define _ASM_ATOMIC_H
25
26#include <asm/cpu-features.h>
Ralf Baechleb2d28b72005-12-07 18:57:52 +000027#include <asm/interrupt.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070028#include <asm/war.h>
29
Linus Torvalds1da177e2005-04-16 15:20:36 -070030typedef struct { volatile int counter; } atomic_t;
31
32#define ATOMIC_INIT(i) { (i) }
33
34/*
35 * atomic_read - read atomic variable
36 * @v: pointer of type atomic_t
37 *
38 * Atomically reads the value of @v.
39 */
40#define atomic_read(v) ((v)->counter)
41
42/*
43 * atomic_set - set atomic variable
44 * @v: pointer of type atomic_t
45 * @i: required value
46 *
47 * Atomically sets the value of @v to @i.
48 */
49#define atomic_set(v,i) ((v)->counter = (i))
50
51/*
52 * atomic_add - add integer to atomic variable
53 * @i: integer value to add
54 * @v: pointer of type atomic_t
55 *
56 * Atomically adds @i to @v.
57 */
58static __inline__ void atomic_add(int i, atomic_t * v)
59{
60 if (cpu_has_llsc && R10000_LLSC_WAR) {
61 unsigned long temp;
62
63 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000064 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070065 "1: ll %0, %1 # atomic_add \n"
66 " addu %0, %2 \n"
67 " sc %0, %1 \n"
68 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000069 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070070 : "=&r" (temp), "=m" (v->counter)
71 : "Ir" (i), "m" (v->counter));
72 } else if (cpu_has_llsc) {
73 unsigned long temp;
74
75 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000076 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070077 "1: ll %0, %1 # atomic_add \n"
78 " addu %0, %2 \n"
79 " sc %0, %1 \n"
80 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000081 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070082 : "=&r" (temp), "=m" (v->counter)
83 : "Ir" (i), "m" (v->counter));
84 } else {
85 unsigned long flags;
86
Ralf Baechleb2d28b72005-12-07 18:57:52 +000087 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070088 v->counter += i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +000089 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070090 }
91}
92
93/*
94 * atomic_sub - subtract the atomic variable
95 * @i: integer value to subtract
96 * @v: pointer of type atomic_t
97 *
98 * Atomically subtracts @i from @v.
99 */
100static __inline__ void atomic_sub(int i, atomic_t * v)
101{
102 if (cpu_has_llsc && R10000_LLSC_WAR) {
103 unsigned long temp;
104
105 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000106 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700107 "1: ll %0, %1 # atomic_sub \n"
108 " subu %0, %2 \n"
109 " sc %0, %1 \n"
110 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000111 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700112 : "=&r" (temp), "=m" (v->counter)
113 : "Ir" (i), "m" (v->counter));
114 } else if (cpu_has_llsc) {
115 unsigned long temp;
116
117 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000118 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700119 "1: ll %0, %1 # atomic_sub \n"
120 " subu %0, %2 \n"
121 " sc %0, %1 \n"
122 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000123 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700124 : "=&r" (temp), "=m" (v->counter)
125 : "Ir" (i), "m" (v->counter));
126 } else {
127 unsigned long flags;
128
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000129 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700130 v->counter -= i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000131 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700132 }
133}
134
135/*
136 * Same as above, but return the result value
137 */
138static __inline__ int atomic_add_return(int i, atomic_t * v)
139{
140 unsigned long result;
141
142 if (cpu_has_llsc && R10000_LLSC_WAR) {
143 unsigned long temp;
144
145 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000146 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700147 "1: ll %1, %2 # atomic_add_return \n"
148 " addu %0, %1, %3 \n"
149 " sc %0, %2 \n"
150 " beqzl %0, 1b \n"
151 " addu %0, %1, %3 \n"
152 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000153 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700154 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
155 : "Ir" (i), "m" (v->counter)
156 : "memory");
157 } else if (cpu_has_llsc) {
158 unsigned long temp;
159
160 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000161 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700162 "1: ll %1, %2 # atomic_add_return \n"
163 " addu %0, %1, %3 \n"
164 " sc %0, %2 \n"
165 " beqz %0, 1b \n"
166 " addu %0, %1, %3 \n"
167 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000168 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700169 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
170 : "Ir" (i), "m" (v->counter)
171 : "memory");
172 } else {
173 unsigned long flags;
174
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000175 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700176 result = v->counter;
177 result += i;
178 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000179 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700180 }
181
182 return result;
183}
184
185static __inline__ int atomic_sub_return(int i, atomic_t * v)
186{
187 unsigned long result;
188
189 if (cpu_has_llsc && R10000_LLSC_WAR) {
190 unsigned long temp;
191
192 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000193 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700194 "1: ll %1, %2 # atomic_sub_return \n"
195 " subu %0, %1, %3 \n"
196 " sc %0, %2 \n"
197 " beqzl %0, 1b \n"
198 " subu %0, %1, %3 \n"
199 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000200 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700201 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
202 : "Ir" (i), "m" (v->counter)
203 : "memory");
204 } else if (cpu_has_llsc) {
205 unsigned long temp;
206
207 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000208 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700209 "1: ll %1, %2 # atomic_sub_return \n"
210 " subu %0, %1, %3 \n"
211 " sc %0, %2 \n"
212 " beqz %0, 1b \n"
213 " subu %0, %1, %3 \n"
214 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000215 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700216 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
217 : "Ir" (i), "m" (v->counter)
218 : "memory");
219 } else {
220 unsigned long flags;
221
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000222 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700223 result = v->counter;
224 result -= i;
225 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000226 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700227 }
228
229 return result;
230}
231
232/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100233 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
234 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700235 * @v: pointer of type atomic_t
236 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100237 * Atomically test @v and subtract @i if @v is greater or equal than @i.
238 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700239 */
240static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
241{
242 unsigned long result;
243
244 if (cpu_has_llsc && R10000_LLSC_WAR) {
245 unsigned long temp;
246
247 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000248 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700249 "1: ll %1, %2 # atomic_sub_if_positive\n"
250 " subu %0, %1, %3 \n"
251 " bltz %0, 1f \n"
252 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000253 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700254 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000255 " subu %0, %1, %3 \n"
256 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700257 " sync \n"
258 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000259 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700260 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
261 : "Ir" (i), "m" (v->counter)
262 : "memory");
263 } else if (cpu_has_llsc) {
264 unsigned long temp;
265
266 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000267 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700268 "1: ll %1, %2 # atomic_sub_if_positive\n"
269 " subu %0, %1, %3 \n"
270 " bltz %0, 1f \n"
271 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000272 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700273 " beqz %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000274 " subu %0, %1, %3 \n"
275 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700276 " sync \n"
277 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000278 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700279 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
280 : "Ir" (i), "m" (v->counter)
281 : "memory");
282 } else {
283 unsigned long flags;
284
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000285 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700286 result = v->counter;
287 result -= i;
288 if (result >= 0)
289 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000290 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700291 }
292
293 return result;
294}
295
Nick Piggin4a6dae62005-11-13 16:07:24 -0800296#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800297#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800298
Nick Piggin8426e1f2005-11-13 16:07:25 -0800299/**
300 * atomic_add_unless - add unless the number is a given value
301 * @v: pointer of type atomic_t
302 * @a: the amount to add to v...
303 * @u: ...unless v is equal to u.
304 *
305 * Atomically adds @a to @v, so long as it was not @u.
306 * Returns non-zero if @v was not @u, and zero otherwise.
307 */
308#define atomic_add_unless(v, a, u) \
309({ \
310 int c, old; \
311 c = atomic_read(v); \
312 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
313 c = old; \
314 c != (u); \
315})
316#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
317
Linus Torvalds1da177e2005-04-16 15:20:36 -0700318#define atomic_dec_return(v) atomic_sub_return(1,(v))
319#define atomic_inc_return(v) atomic_add_return(1,(v))
320
321/*
322 * atomic_sub_and_test - subtract value from variable and test result
323 * @i: integer value to subtract
324 * @v: pointer of type atomic_t
325 *
326 * Atomically subtracts @i from @v and returns
327 * true if the result is zero, or false for all
328 * other cases.
329 */
330#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
331
332/*
333 * atomic_inc_and_test - increment and test
334 * @v: pointer of type atomic_t
335 *
336 * Atomically increments @v by 1
337 * and returns true if the result is zero, or false for all
338 * other cases.
339 */
340#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
341
342/*
343 * atomic_dec_and_test - decrement by 1 and test
344 * @v: pointer of type atomic_t
345 *
346 * Atomically decrements @v by 1 and
347 * returns true if the result is 0, or false for all other
348 * cases.
349 */
350#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
351
352/*
353 * atomic_dec_if_positive - decrement by 1 if old value positive
354 * @v: pointer of type atomic_t
355 */
356#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
357
358/*
359 * atomic_inc - increment atomic variable
360 * @v: pointer of type atomic_t
361 *
362 * Atomically increments @v by 1.
363 */
364#define atomic_inc(v) atomic_add(1,(v))
365
366/*
367 * atomic_dec - decrement and test
368 * @v: pointer of type atomic_t
369 *
370 * Atomically decrements @v by 1.
371 */
372#define atomic_dec(v) atomic_sub(1,(v))
373
374/*
375 * atomic_add_negative - add and test if negative
376 * @v: pointer of type atomic_t
377 * @i: integer value to add
378 *
379 * Atomically adds @i to @v and returns true
380 * if the result is negative, or false when
381 * result is greater than or equal to zero.
382 */
383#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
384
Ralf Baechle875d43e2005-09-03 15:56:16 -0700385#ifdef CONFIG_64BIT
Linus Torvalds1da177e2005-04-16 15:20:36 -0700386
387typedef struct { volatile __s64 counter; } atomic64_t;
388
389#define ATOMIC64_INIT(i) { (i) }
390
391/*
392 * atomic64_read - read atomic variable
393 * @v: pointer of type atomic64_t
394 *
395 */
396#define atomic64_read(v) ((v)->counter)
397
398/*
399 * atomic64_set - set atomic variable
400 * @v: pointer of type atomic64_t
401 * @i: required value
402 */
403#define atomic64_set(v,i) ((v)->counter = (i))
404
405/*
406 * atomic64_add - add integer to atomic variable
407 * @i: integer value to add
408 * @v: pointer of type atomic64_t
409 *
410 * Atomically adds @i to @v.
411 */
412static __inline__ void atomic64_add(long i, atomic64_t * v)
413{
414 if (cpu_has_llsc && R10000_LLSC_WAR) {
415 unsigned long temp;
416
417 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000418 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700419 "1: lld %0, %1 # atomic64_add \n"
420 " addu %0, %2 \n"
421 " scd %0, %1 \n"
422 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000423 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700424 : "=&r" (temp), "=m" (v->counter)
425 : "Ir" (i), "m" (v->counter));
426 } else if (cpu_has_llsc) {
427 unsigned long temp;
428
429 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000430 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700431 "1: lld %0, %1 # atomic64_add \n"
432 " addu %0, %2 \n"
433 " scd %0, %1 \n"
434 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000435 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700436 : "=&r" (temp), "=m" (v->counter)
437 : "Ir" (i), "m" (v->counter));
438 } else {
439 unsigned long flags;
440
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000441 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700442 v->counter += i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000443 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700444 }
445}
446
447/*
448 * atomic64_sub - subtract the atomic variable
449 * @i: integer value to subtract
450 * @v: pointer of type atomic64_t
451 *
452 * Atomically subtracts @i from @v.
453 */
454static __inline__ void atomic64_sub(long i, atomic64_t * v)
455{
456 if (cpu_has_llsc && R10000_LLSC_WAR) {
457 unsigned long temp;
458
459 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000460 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700461 "1: lld %0, %1 # atomic64_sub \n"
462 " subu %0, %2 \n"
463 " scd %0, %1 \n"
464 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000465 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700466 : "=&r" (temp), "=m" (v->counter)
467 : "Ir" (i), "m" (v->counter));
468 } else if (cpu_has_llsc) {
469 unsigned long temp;
470
471 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000472 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700473 "1: lld %0, %1 # atomic64_sub \n"
474 " subu %0, %2 \n"
475 " scd %0, %1 \n"
476 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000477 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700478 : "=&r" (temp), "=m" (v->counter)
479 : "Ir" (i), "m" (v->counter));
480 } else {
481 unsigned long flags;
482
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000483 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700484 v->counter -= i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000485 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700486 }
487}
488
489/*
490 * Same as above, but return the result value
491 */
492static __inline__ long atomic64_add_return(long i, atomic64_t * v)
493{
494 unsigned long result;
495
496 if (cpu_has_llsc && R10000_LLSC_WAR) {
497 unsigned long temp;
498
499 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000500 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700501 "1: lld %1, %2 # atomic64_add_return \n"
502 " addu %0, %1, %3 \n"
503 " scd %0, %2 \n"
504 " beqzl %0, 1b \n"
505 " addu %0, %1, %3 \n"
506 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000507 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700508 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
509 : "Ir" (i), "m" (v->counter)
510 : "memory");
511 } else if (cpu_has_llsc) {
512 unsigned long temp;
513
514 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000515 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700516 "1: lld %1, %2 # atomic64_add_return \n"
517 " addu %0, %1, %3 \n"
518 " scd %0, %2 \n"
519 " beqz %0, 1b \n"
520 " addu %0, %1, %3 \n"
521 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000522 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700523 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
524 : "Ir" (i), "m" (v->counter)
525 : "memory");
526 } else {
527 unsigned long flags;
528
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000529 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700530 result = v->counter;
531 result += i;
532 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000533 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700534 }
535
536 return result;
537}
538
539static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
540{
541 unsigned long result;
542
543 if (cpu_has_llsc && R10000_LLSC_WAR) {
544 unsigned long temp;
545
546 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000547 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700548 "1: lld %1, %2 # atomic64_sub_return \n"
549 " subu %0, %1, %3 \n"
550 " scd %0, %2 \n"
551 " beqzl %0, 1b \n"
552 " subu %0, %1, %3 \n"
553 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000554 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700555 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
556 : "Ir" (i), "m" (v->counter)
557 : "memory");
558 } else if (cpu_has_llsc) {
559 unsigned long temp;
560
561 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000562 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700563 "1: lld %1, %2 # atomic64_sub_return \n"
564 " subu %0, %1, %3 \n"
565 " scd %0, %2 \n"
566 " beqz %0, 1b \n"
567 " subu %0, %1, %3 \n"
568 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000569 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700570 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
571 : "Ir" (i), "m" (v->counter)
572 : "memory");
573 } else {
574 unsigned long flags;
575
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000576 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700577 result = v->counter;
578 result -= i;
579 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000580 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700581 }
582
583 return result;
584}
585
586/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100587 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
588 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700589 * @v: pointer of type atomic64_t
590 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100591 * Atomically test @v and subtract @i if @v is greater or equal than @i.
592 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700593 */
594static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
595{
596 unsigned long result;
597
598 if (cpu_has_llsc && R10000_LLSC_WAR) {
599 unsigned long temp;
600
601 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000602 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700603 "1: lld %1, %2 # atomic64_sub_if_positive\n"
604 " dsubu %0, %1, %3 \n"
605 " bltz %0, 1f \n"
606 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000607 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700608 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000609 " dsubu %0, %1, %3 \n"
610 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700611 " sync \n"
612 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000613 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700614 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
615 : "Ir" (i), "m" (v->counter)
616 : "memory");
617 } else if (cpu_has_llsc) {
618 unsigned long temp;
619
620 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000621 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700622 "1: lld %1, %2 # atomic64_sub_if_positive\n"
623 " dsubu %0, %1, %3 \n"
624 " bltz %0, 1f \n"
625 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000626 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700627 " beqz %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000628 " dsubu %0, %1, %3 \n"
629 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700630 " sync \n"
631 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000632 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700633 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
634 : "Ir" (i), "m" (v->counter)
635 : "memory");
636 } else {
637 unsigned long flags;
638
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000639 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700640 result = v->counter;
641 result -= i;
642 if (result >= 0)
643 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000644 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700645 }
646
647 return result;
648}
649
650#define atomic64_dec_return(v) atomic64_sub_return(1,(v))
651#define atomic64_inc_return(v) atomic64_add_return(1,(v))
652
653/*
654 * atomic64_sub_and_test - subtract value from variable and test result
655 * @i: integer value to subtract
656 * @v: pointer of type atomic64_t
657 *
658 * Atomically subtracts @i from @v and returns
659 * true if the result is zero, or false for all
660 * other cases.
661 */
662#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
663
664/*
665 * atomic64_inc_and_test - increment and test
666 * @v: pointer of type atomic64_t
667 *
668 * Atomically increments @v by 1
669 * and returns true if the result is zero, or false for all
670 * other cases.
671 */
672#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
673
674/*
675 * atomic64_dec_and_test - decrement by 1 and test
676 * @v: pointer of type atomic64_t
677 *
678 * Atomically decrements @v by 1 and
679 * returns true if the result is 0, or false for all other
680 * cases.
681 */
682#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
683
684/*
685 * atomic64_dec_if_positive - decrement by 1 if old value positive
686 * @v: pointer of type atomic64_t
687 */
688#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
689
690/*
691 * atomic64_inc - increment atomic variable
692 * @v: pointer of type atomic64_t
693 *
694 * Atomically increments @v by 1.
695 */
696#define atomic64_inc(v) atomic64_add(1,(v))
697
698/*
699 * atomic64_dec - decrement and test
700 * @v: pointer of type atomic64_t
701 *
702 * Atomically decrements @v by 1.
703 */
704#define atomic64_dec(v) atomic64_sub(1,(v))
705
706/*
707 * atomic64_add_negative - add and test if negative
708 * @v: pointer of type atomic64_t
709 * @i: integer value to add
710 *
711 * Atomically adds @i to @v and returns true
712 * if the result is negative, or false when
713 * result is greater than or equal to zero.
714 */
715#define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
716
Ralf Baechle875d43e2005-09-03 15:56:16 -0700717#endif /* CONFIG_64BIT */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700718
719/*
720 * atomic*_return operations are serializing but not the non-*_return
721 * versions.
722 */
723#define smp_mb__before_atomic_dec() smp_mb()
724#define smp_mb__after_atomic_dec() smp_mb()
725#define smp_mb__before_atomic_inc() smp_mb()
726#define smp_mb__after_atomic_inc() smp_mb()
727
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800728#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700729#endif /* _ASM_ATOMIC_H */