blob: 365767074c79c68a2f3cf6eb5195b72baece1df8 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
Ralf Baechlee303e082006-11-30 01:14:50 +000012 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
Linus Torvalds1da177e2005-04-16 15:20:36 -070013 */
Linus Torvalds1da177e2005-04-16 15:20:36 -070014#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
Ralf Baechle192ef362006-07-07 14:07:18 +010017#include <linux/irqflags.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070018#include <asm/cpu-features.h>
19#include <asm/war.h>
20
Linus Torvalds1da177e2005-04-16 15:20:36 -070021typedef struct { volatile int counter; } atomic_t;
22
23#define ATOMIC_INIT(i) { (i) }
24
25/*
26 * atomic_read - read atomic variable
27 * @v: pointer of type atomic_t
28 *
29 * Atomically reads the value of @v.
30 */
31#define atomic_read(v) ((v)->counter)
32
33/*
34 * atomic_set - set atomic variable
35 * @v: pointer of type atomic_t
36 * @i: required value
37 *
38 * Atomically sets the value of @v to @i.
39 */
40#define atomic_set(v,i) ((v)->counter = (i))
41
42/*
43 * atomic_add - add integer to atomic variable
44 * @i: integer value to add
45 * @v: pointer of type atomic_t
46 *
47 * Atomically adds @i to @v.
48 */
49static __inline__ void atomic_add(int i, atomic_t * v)
50{
51 if (cpu_has_llsc && R10000_LLSC_WAR) {
52 unsigned long temp;
53
54 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000055 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070056 "1: ll %0, %1 # atomic_add \n"
57 " addu %0, %2 \n"
58 " sc %0, %1 \n"
59 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000060 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070061 : "=&r" (temp), "=m" (v->counter)
62 : "Ir" (i), "m" (v->counter));
63 } else if (cpu_has_llsc) {
64 unsigned long temp;
65
66 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000067 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070068 "1: ll %0, %1 # atomic_add \n"
69 " addu %0, %2 \n"
70 " sc %0, %1 \n"
71 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000072 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070073 : "=&r" (temp), "=m" (v->counter)
74 : "Ir" (i), "m" (v->counter));
75 } else {
76 unsigned long flags;
77
Ralf Baechleb2d28b72005-12-07 18:57:52 +000078 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070079 v->counter += i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +000080 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070081 }
82}
83
84/*
85 * atomic_sub - subtract the atomic variable
86 * @i: integer value to subtract
87 * @v: pointer of type atomic_t
88 *
89 * Atomically subtracts @i from @v.
90 */
91static __inline__ void atomic_sub(int i, atomic_t * v)
92{
93 if (cpu_has_llsc && R10000_LLSC_WAR) {
94 unsigned long temp;
95
96 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000097 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070098 "1: ll %0, %1 # atomic_sub \n"
99 " subu %0, %2 \n"
100 " sc %0, %1 \n"
101 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000102 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700103 : "=&r" (temp), "=m" (v->counter)
104 : "Ir" (i), "m" (v->counter));
105 } else if (cpu_has_llsc) {
106 unsigned long temp;
107
108 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000109 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700110 "1: ll %0, %1 # atomic_sub \n"
111 " subu %0, %2 \n"
112 " sc %0, %1 \n"
113 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000114 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700115 : "=&r" (temp), "=m" (v->counter)
116 : "Ir" (i), "m" (v->counter));
117 } else {
118 unsigned long flags;
119
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000120 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700121 v->counter -= i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000122 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700123 }
124}
125
126/*
127 * Same as above, but return the result value
128 */
129static __inline__ int atomic_add_return(int i, atomic_t * v)
130{
131 unsigned long result;
132
133 if (cpu_has_llsc && R10000_LLSC_WAR) {
134 unsigned long temp;
135
136 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000137 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700138 "1: ll %1, %2 # atomic_add_return \n"
139 " addu %0, %1, %3 \n"
140 " sc %0, %2 \n"
141 " beqzl %0, 1b \n"
142 " addu %0, %1, %3 \n"
143 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000144 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700145 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
146 : "Ir" (i), "m" (v->counter)
147 : "memory");
148 } else if (cpu_has_llsc) {
149 unsigned long temp;
150
151 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000152 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700153 "1: ll %1, %2 # atomic_add_return \n"
154 " addu %0, %1, %3 \n"
155 " sc %0, %2 \n"
156 " beqz %0, 1b \n"
157 " addu %0, %1, %3 \n"
158 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000159 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700160 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
161 : "Ir" (i), "m" (v->counter)
162 : "memory");
163 } else {
164 unsigned long flags;
165
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000166 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700167 result = v->counter;
168 result += i;
169 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000170 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700171 }
172
173 return result;
174}
175
176static __inline__ int atomic_sub_return(int i, atomic_t * v)
177{
178 unsigned long result;
179
180 if (cpu_has_llsc && R10000_LLSC_WAR) {
181 unsigned long temp;
182
183 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000184 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700185 "1: ll %1, %2 # atomic_sub_return \n"
186 " subu %0, %1, %3 \n"
187 " sc %0, %2 \n"
188 " beqzl %0, 1b \n"
189 " subu %0, %1, %3 \n"
190 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000191 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700192 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
193 : "Ir" (i), "m" (v->counter)
194 : "memory");
195 } else if (cpu_has_llsc) {
196 unsigned long temp;
197
198 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000199 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700200 "1: ll %1, %2 # atomic_sub_return \n"
201 " subu %0, %1, %3 \n"
202 " sc %0, %2 \n"
203 " beqz %0, 1b \n"
204 " subu %0, %1, %3 \n"
205 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000206 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700207 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
208 : "Ir" (i), "m" (v->counter)
209 : "memory");
210 } else {
211 unsigned long flags;
212
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000213 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700214 result = v->counter;
215 result -= i;
216 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000217 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700218 }
219
220 return result;
221}
222
223/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100224 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
225 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700226 * @v: pointer of type atomic_t
227 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100228 * Atomically test @v and subtract @i if @v is greater or equal than @i.
229 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700230 */
231static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
232{
233 unsigned long result;
234
235 if (cpu_has_llsc && R10000_LLSC_WAR) {
236 unsigned long temp;
237
238 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000239 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700240 "1: ll %1, %2 # atomic_sub_if_positive\n"
241 " subu %0, %1, %3 \n"
242 " bltz %0, 1f \n"
243 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000244 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700245 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000246 " subu %0, %1, %3 \n"
247 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700248 " sync \n"
249 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000250 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700251 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
252 : "Ir" (i), "m" (v->counter)
253 : "memory");
254 } else if (cpu_has_llsc) {
255 unsigned long temp;
256
257 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000258 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700259 "1: ll %1, %2 # atomic_sub_if_positive\n"
260 " subu %0, %1, %3 \n"
261 " bltz %0, 1f \n"
262 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000263 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700264 " beqz %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000265 " subu %0, %1, %3 \n"
266 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700267 " sync \n"
268 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000269 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700270 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
271 : "Ir" (i), "m" (v->counter)
272 : "memory");
273 } else {
274 unsigned long flags;
275
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000276 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700277 result = v->counter;
278 result -= i;
279 if (result >= 0)
280 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000281 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700282 }
283
284 return result;
285}
286
Nick Piggin4a6dae62005-11-13 16:07:24 -0800287#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800288#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800289
Nick Piggin8426e1f2005-11-13 16:07:25 -0800290/**
291 * atomic_add_unless - add unless the number is a given value
292 * @v: pointer of type atomic_t
293 * @a: the amount to add to v...
294 * @u: ...unless v is equal to u.
295 *
296 * Atomically adds @a to @v, so long as it was not @u.
297 * Returns non-zero if @v was not @u, and zero otherwise.
298 */
299#define atomic_add_unless(v, a, u) \
300({ \
301 int c, old; \
302 c = atomic_read(v); \
303 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
304 c = old; \
305 c != (u); \
306})
307#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
308
Linus Torvalds1da177e2005-04-16 15:20:36 -0700309#define atomic_dec_return(v) atomic_sub_return(1,(v))
310#define atomic_inc_return(v) atomic_add_return(1,(v))
311
312/*
313 * atomic_sub_and_test - subtract value from variable and test result
314 * @i: integer value to subtract
315 * @v: pointer of type atomic_t
316 *
317 * Atomically subtracts @i from @v and returns
318 * true if the result is zero, or false for all
319 * other cases.
320 */
321#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
322
323/*
324 * atomic_inc_and_test - increment and test
325 * @v: pointer of type atomic_t
326 *
327 * Atomically increments @v by 1
328 * and returns true if the result is zero, or false for all
329 * other cases.
330 */
331#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
332
333/*
334 * atomic_dec_and_test - decrement by 1 and test
335 * @v: pointer of type atomic_t
336 *
337 * Atomically decrements @v by 1 and
338 * returns true if the result is 0, or false for all other
339 * cases.
340 */
341#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
342
343/*
344 * atomic_dec_if_positive - decrement by 1 if old value positive
345 * @v: pointer of type atomic_t
346 */
347#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
348
349/*
350 * atomic_inc - increment atomic variable
351 * @v: pointer of type atomic_t
352 *
353 * Atomically increments @v by 1.
354 */
355#define atomic_inc(v) atomic_add(1,(v))
356
357/*
358 * atomic_dec - decrement and test
359 * @v: pointer of type atomic_t
360 *
361 * Atomically decrements @v by 1.
362 */
363#define atomic_dec(v) atomic_sub(1,(v))
364
365/*
366 * atomic_add_negative - add and test if negative
367 * @v: pointer of type atomic_t
368 * @i: integer value to add
369 *
370 * Atomically adds @i to @v and returns true
371 * if the result is negative, or false when
372 * result is greater than or equal to zero.
373 */
374#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
375
Ralf Baechle875d43e2005-09-03 15:56:16 -0700376#ifdef CONFIG_64BIT
Linus Torvalds1da177e2005-04-16 15:20:36 -0700377
Ralf Baechle4f8b5c72006-11-30 15:38:10 +0000378typedef struct { volatile long counter; } atomic64_t;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700379
380#define ATOMIC64_INIT(i) { (i) }
381
382/*
383 * atomic64_read - read atomic variable
384 * @v: pointer of type atomic64_t
385 *
386 */
387#define atomic64_read(v) ((v)->counter)
388
389/*
390 * atomic64_set - set atomic variable
391 * @v: pointer of type atomic64_t
392 * @i: required value
393 */
394#define atomic64_set(v,i) ((v)->counter = (i))
395
396/*
397 * atomic64_add - add integer to atomic variable
398 * @i: integer value to add
399 * @v: pointer of type atomic64_t
400 *
401 * Atomically adds @i to @v.
402 */
403static __inline__ void atomic64_add(long i, atomic64_t * v)
404{
405 if (cpu_has_llsc && R10000_LLSC_WAR) {
406 unsigned long temp;
407
408 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000409 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700410 "1: lld %0, %1 # atomic64_add \n"
411 " addu %0, %2 \n"
412 " scd %0, %1 \n"
413 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000414 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700415 : "=&r" (temp), "=m" (v->counter)
416 : "Ir" (i), "m" (v->counter));
417 } else if (cpu_has_llsc) {
418 unsigned long temp;
419
420 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000421 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700422 "1: lld %0, %1 # atomic64_add \n"
423 " addu %0, %2 \n"
424 " scd %0, %1 \n"
425 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000426 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700427 : "=&r" (temp), "=m" (v->counter)
428 : "Ir" (i), "m" (v->counter));
429 } else {
430 unsigned long flags;
431
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000432 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700433 v->counter += i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000434 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700435 }
436}
437
438/*
439 * atomic64_sub - subtract the atomic variable
440 * @i: integer value to subtract
441 * @v: pointer of type atomic64_t
442 *
443 * Atomically subtracts @i from @v.
444 */
445static __inline__ void atomic64_sub(long i, atomic64_t * v)
446{
447 if (cpu_has_llsc && R10000_LLSC_WAR) {
448 unsigned long temp;
449
450 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000451 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700452 "1: lld %0, %1 # atomic64_sub \n"
453 " subu %0, %2 \n"
454 " scd %0, %1 \n"
455 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000456 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700457 : "=&r" (temp), "=m" (v->counter)
458 : "Ir" (i), "m" (v->counter));
459 } else if (cpu_has_llsc) {
460 unsigned long temp;
461
462 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000463 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700464 "1: lld %0, %1 # atomic64_sub \n"
465 " subu %0, %2 \n"
466 " scd %0, %1 \n"
467 " beqz %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000468 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700469 : "=&r" (temp), "=m" (v->counter)
470 : "Ir" (i), "m" (v->counter));
471 } else {
472 unsigned long flags;
473
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000474 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700475 v->counter -= i;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000476 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700477 }
478}
479
480/*
481 * Same as above, but return the result value
482 */
483static __inline__ long atomic64_add_return(long i, atomic64_t * v)
484{
485 unsigned long result;
486
487 if (cpu_has_llsc && R10000_LLSC_WAR) {
488 unsigned long temp;
489
490 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000491 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700492 "1: lld %1, %2 # atomic64_add_return \n"
493 " addu %0, %1, %3 \n"
494 " scd %0, %2 \n"
495 " beqzl %0, 1b \n"
496 " addu %0, %1, %3 \n"
497 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000498 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700499 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
500 : "Ir" (i), "m" (v->counter)
501 : "memory");
502 } else if (cpu_has_llsc) {
503 unsigned long temp;
504
505 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000506 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700507 "1: lld %1, %2 # atomic64_add_return \n"
508 " addu %0, %1, %3 \n"
509 " scd %0, %2 \n"
510 " beqz %0, 1b \n"
511 " addu %0, %1, %3 \n"
512 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000513 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700514 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
515 : "Ir" (i), "m" (v->counter)
516 : "memory");
517 } else {
518 unsigned long flags;
519
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000520 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700521 result = v->counter;
522 result += i;
523 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000524 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700525 }
526
527 return result;
528}
529
530static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
531{
532 unsigned long result;
533
534 if (cpu_has_llsc && R10000_LLSC_WAR) {
535 unsigned long temp;
536
537 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000538 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700539 "1: lld %1, %2 # atomic64_sub_return \n"
540 " subu %0, %1, %3 \n"
541 " scd %0, %2 \n"
542 " beqzl %0, 1b \n"
543 " subu %0, %1, %3 \n"
544 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000545 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700546 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
547 : "Ir" (i), "m" (v->counter)
548 : "memory");
549 } else if (cpu_has_llsc) {
550 unsigned long temp;
551
552 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000553 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700554 "1: lld %1, %2 # atomic64_sub_return \n"
555 " subu %0, %1, %3 \n"
556 " scd %0, %2 \n"
557 " beqz %0, 1b \n"
558 " subu %0, %1, %3 \n"
559 " sync \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000560 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700561 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
562 : "Ir" (i), "m" (v->counter)
563 : "memory");
564 } else {
565 unsigned long flags;
566
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000567 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700568 result = v->counter;
569 result -= i;
570 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000571 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700572 }
573
574 return result;
575}
576
577/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100578 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
579 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700580 * @v: pointer of type atomic64_t
581 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100582 * Atomically test @v and subtract @i if @v is greater or equal than @i.
583 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700584 */
585static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
586{
587 unsigned long result;
588
589 if (cpu_has_llsc && R10000_LLSC_WAR) {
590 unsigned long temp;
591
592 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000593 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700594 "1: lld %1, %2 # atomic64_sub_if_positive\n"
595 " dsubu %0, %1, %3 \n"
596 " bltz %0, 1f \n"
597 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000598 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700599 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000600 " dsubu %0, %1, %3 \n"
601 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700602 " sync \n"
603 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000604 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700605 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
606 : "Ir" (i), "m" (v->counter)
607 : "memory");
608 } else if (cpu_has_llsc) {
609 unsigned long temp;
610
611 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000612 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700613 "1: lld %1, %2 # atomic64_sub_if_positive\n"
614 " dsubu %0, %1, %3 \n"
615 " bltz %0, 1f \n"
616 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000617 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700618 " beqz %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000619 " dsubu %0, %1, %3 \n"
620 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700621 " sync \n"
622 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000623 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700624 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
625 : "Ir" (i), "m" (v->counter)
626 : "memory");
627 } else {
628 unsigned long flags;
629
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000630 local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700631 result = v->counter;
632 result -= i;
633 if (result >= 0)
634 v->counter = result;
Ralf Baechleb2d28b72005-12-07 18:57:52 +0000635 local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700636 }
637
638 return result;
639}
640
641#define atomic64_dec_return(v) atomic64_sub_return(1,(v))
642#define atomic64_inc_return(v) atomic64_add_return(1,(v))
643
644/*
645 * atomic64_sub_and_test - subtract value from variable and test result
646 * @i: integer value to subtract
647 * @v: pointer of type atomic64_t
648 *
649 * Atomically subtracts @i from @v and returns
650 * true if the result is zero, or false for all
651 * other cases.
652 */
653#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
654
655/*
656 * atomic64_inc_and_test - increment and test
657 * @v: pointer of type atomic64_t
658 *
659 * Atomically increments @v by 1
660 * and returns true if the result is zero, or false for all
661 * other cases.
662 */
663#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
664
665/*
666 * atomic64_dec_and_test - decrement by 1 and test
667 * @v: pointer of type atomic64_t
668 *
669 * Atomically decrements @v by 1 and
670 * returns true if the result is 0, or false for all other
671 * cases.
672 */
673#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
674
675/*
676 * atomic64_dec_if_positive - decrement by 1 if old value positive
677 * @v: pointer of type atomic64_t
678 */
679#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
680
681/*
682 * atomic64_inc - increment atomic variable
683 * @v: pointer of type atomic64_t
684 *
685 * Atomically increments @v by 1.
686 */
687#define atomic64_inc(v) atomic64_add(1,(v))
688
689/*
690 * atomic64_dec - decrement and test
691 * @v: pointer of type atomic64_t
692 *
693 * Atomically decrements @v by 1.
694 */
695#define atomic64_dec(v) atomic64_sub(1,(v))
696
697/*
698 * atomic64_add_negative - add and test if negative
699 * @v: pointer of type atomic64_t
700 * @i: integer value to add
701 *
702 * Atomically adds @i to @v and returns true
703 * if the result is negative, or false when
704 * result is greater than or equal to zero.
705 */
706#define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
707
Ralf Baechle875d43e2005-09-03 15:56:16 -0700708#endif /* CONFIG_64BIT */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700709
710/*
711 * atomic*_return operations are serializing but not the non-*_return
712 * versions.
713 */
714#define smp_mb__before_atomic_dec() smp_mb()
715#define smp_mb__after_atomic_dec() smp_mb()
716#define smp_mb__before_atomic_inc() smp_mb()
717#define smp_mb__after_atomic_inc() smp_mb()
718
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800719#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700720#endif /* _ASM_ATOMIC_H */