blob: 59dc0c7ef7334b2e28e4b508e03f84e8f2ea2aca [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
Ralf Baechlee303e082006-11-30 01:14:50 +000012 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
Linus Torvalds1da177e2005-04-16 15:20:36 -070013 */
Linus Torvalds1da177e2005-04-16 15:20:36 -070014#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
Ralf Baechle192ef362006-07-07 14:07:18 +010017#include <linux/irqflags.h>
Matthew Wilcoxea4354672009-01-06 14:40:39 -080018#include <linux/types.h>
Ralf Baechle0004a9d2006-10-31 03:45:07 +000019#include <asm/barrier.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070020#include <asm/cpu-features.h>
21#include <asm/war.h>
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -070022#include <asm/system.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070023
Linus Torvalds1da177e2005-04-16 15:20:36 -070024#define ATOMIC_INIT(i) { (i) }
25
26/*
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
29 *
30 * Atomically reads the value of @v.
31 */
Anton Blanchardf3d46f92010-05-17 14:33:53 +100032#define atomic_read(v) (*(volatile int *)&(v)->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070033
34/*
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
37 * @i: required value
38 *
39 * Atomically sets the value of @v to @i.
40 */
Ralf Baechle21a151d2007-10-11 23:46:15 +010041#define atomic_set(v, i) ((v)->counter = (i))
Linus Torvalds1da177e2005-04-16 15:20:36 -070042
43/*
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
47 *
48 * Atomically adds @i to @v.
49 */
50static __inline__ void atomic_add(int i, atomic_t * v)
51{
David Daneyb791d112009-07-13 11:15:19 -070052 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +000053 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -070054
55 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000056 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070057 "1: ll %0, %1 # atomic_add \n"
58 " addu %0, %2 \n"
59 " sc %0, %1 \n"
60 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000061 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070062 : "=&r" (temp), "=m" (v->counter)
63 : "Ir" (i), "m" (v->counter));
David Daneyb791d112009-07-13 11:15:19 -070064 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +000065 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -070066
67 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +000068 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070069 "1: ll %0, %1 # atomic_add \n"
70 " addu %0, %2 \n"
71 " sc %0, %1 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +010072 " beqz %0, 2f \n"
73 " .subsection 2 \n"
74 "2: b 1b \n"
75 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +000076 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -070077 : "=&r" (temp), "=m" (v->counter)
78 : "Ir" (i), "m" (v->counter));
79 } else {
80 unsigned long flags;
81
Ralf Baechle49edd092007-03-16 16:10:36 +000082 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070083 v->counter += i;
Ralf Baechle49edd092007-03-16 16:10:36 +000084 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -070085 }
86}
87
88/*
89 * atomic_sub - subtract the atomic variable
90 * @i: integer value to subtract
91 * @v: pointer of type atomic_t
92 *
93 * Atomically subtracts @i from @v.
94 */
95static __inline__ void atomic_sub(int i, atomic_t * v)
96{
David Daneyb791d112009-07-13 11:15:19 -070097 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +000098 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -070099
100 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000101 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700102 "1: ll %0, %1 # atomic_sub \n"
103 " subu %0, %2 \n"
104 " sc %0, %1 \n"
105 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000106 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700107 : "=&r" (temp), "=m" (v->counter)
108 : "Ir" (i), "m" (v->counter));
David Daneyb791d112009-07-13 11:15:19 -0700109 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000110 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700111
112 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000113 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700114 "1: ll %0, %1 # atomic_sub \n"
115 " subu %0, %2 \n"
116 " sc %0, %1 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100117 " beqz %0, 2f \n"
118 " .subsection 2 \n"
119 "2: b 1b \n"
120 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000121 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700122 : "=&r" (temp), "=m" (v->counter)
123 : "Ir" (i), "m" (v->counter));
124 } else {
125 unsigned long flags;
126
Ralf Baechle49edd092007-03-16 16:10:36 +0000127 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700128 v->counter -= i;
Ralf Baechle49edd092007-03-16 16:10:36 +0000129 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700130 }
131}
132
133/*
134 * Same as above, but return the result value
135 */
136static __inline__ int atomic_add_return(int i, atomic_t * v)
137{
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000138 int result;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700139
David Daneyf252ffd2010-01-08 17:17:43 -0800140 smp_mb__before_llsc();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000141
David Daneyb791d112009-07-13 11:15:19 -0700142 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000143 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700144
145 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000146 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700147 "1: ll %1, %2 # atomic_add_return \n"
148 " addu %0, %1, %3 \n"
149 " sc %0, %2 \n"
150 " beqzl %0, 1b \n"
151 " addu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000152 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700153 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
154 : "Ir" (i), "m" (v->counter)
155 : "memory");
David Daneyb791d112009-07-13 11:15:19 -0700156 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000157 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700158
159 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000160 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700161 "1: ll %1, %2 # atomic_add_return \n"
162 " addu %0, %1, %3 \n"
163 " sc %0, %2 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100164 " beqz %0, 2f \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700165 " addu %0, %1, %3 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100166 " .subsection 2 \n"
167 "2: b 1b \n"
168 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000169 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700170 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171 : "Ir" (i), "m" (v->counter)
172 : "memory");
173 } else {
174 unsigned long flags;
175
Ralf Baechle49edd092007-03-16 16:10:36 +0000176 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700177 result = v->counter;
178 result += i;
179 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000180 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700181 }
182
Ralf Baechle17099b12007-07-14 13:24:05 +0100183 smp_llsc_mb();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000184
Linus Torvalds1da177e2005-04-16 15:20:36 -0700185 return result;
186}
187
188static __inline__ int atomic_sub_return(int i, atomic_t * v)
189{
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000190 int result;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700191
David Daneyf252ffd2010-01-08 17:17:43 -0800192 smp_mb__before_llsc();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000193
David Daneyb791d112009-07-13 11:15:19 -0700194 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000195 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700196
197 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000198 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700199 "1: ll %1, %2 # atomic_sub_return \n"
200 " subu %0, %1, %3 \n"
201 " sc %0, %2 \n"
202 " beqzl %0, 1b \n"
203 " subu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000204 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700205 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
206 : "Ir" (i), "m" (v->counter)
207 : "memory");
David Daneyb791d112009-07-13 11:15:19 -0700208 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000209 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700210
211 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000212 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700213 "1: ll %1, %2 # atomic_sub_return \n"
214 " subu %0, %1, %3 \n"
215 " sc %0, %2 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100216 " beqz %0, 2f \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700217 " subu %0, %1, %3 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100218 " .subsection 2 \n"
219 "2: b 1b \n"
220 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000221 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700222 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
223 : "Ir" (i), "m" (v->counter)
224 : "memory");
225 } else {
226 unsigned long flags;
227
Ralf Baechle49edd092007-03-16 16:10:36 +0000228 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700229 result = v->counter;
230 result -= i;
231 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000232 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700233 }
234
Ralf Baechle17099b12007-07-14 13:24:05 +0100235 smp_llsc_mb();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000236
Linus Torvalds1da177e2005-04-16 15:20:36 -0700237 return result;
238}
239
240/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100241 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
242 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700243 * @v: pointer of type atomic_t
244 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100245 * Atomically test @v and subtract @i if @v is greater or equal than @i.
246 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700247 */
248static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
249{
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000250 int result;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700251
David Daneyf252ffd2010-01-08 17:17:43 -0800252 smp_mb__before_llsc();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000253
David Daneyb791d112009-07-13 11:15:19 -0700254 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000255 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700256
257 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000258 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700259 "1: ll %1, %2 # atomic_sub_if_positive\n"
260 " subu %0, %1, %3 \n"
261 " bltz %0, 1f \n"
262 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000263 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700264 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000265 " subu %0, %1, %3 \n"
266 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700267 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000268 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700269 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
270 : "Ir" (i), "m" (v->counter)
271 : "memory");
David Daneyb791d112009-07-13 11:15:19 -0700272 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000273 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700274
275 __asm__ __volatile__(
Maciej W. Rozyckic4559f62005-06-23 15:57:15 +0000276 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700277 "1: ll %1, %2 # atomic_sub_if_positive\n"
278 " subu %0, %1, %3 \n"
279 " bltz %0, 1f \n"
280 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000281 " .set noreorder \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100282 " beqz %0, 2f \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000283 " subu %0, %1, %3 \n"
284 " .set reorder \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100285 " .subsection 2 \n"
286 "2: b 1b \n"
287 " .previous \n"
Ralf Baechle50952022008-07-03 23:28:35 +0100288 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000289 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700290 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
291 : "Ir" (i), "m" (v->counter)
292 : "memory");
293 } else {
294 unsigned long flags;
295
Ralf Baechle49edd092007-03-16 16:10:36 +0000296 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700297 result = v->counter;
298 result -= i;
299 if (result >= 0)
300 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000301 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700302 }
303
Ralf Baechle17099b12007-07-14 13:24:05 +0100304 smp_llsc_mb();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000305
Linus Torvalds1da177e2005-04-16 15:20:36 -0700306 return result;
307}
308
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700309#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
310#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800311
Nick Piggin8426e1f2005-11-13 16:07:25 -0800312/**
313 * atomic_add_unless - add unless the number is a given value
314 * @v: pointer of type atomic_t
315 * @a: the amount to add to v...
316 * @u: ...unless v is equal to u.
317 *
318 * Atomically adds @a to @v, so long as it was not @u.
319 * Returns non-zero if @v was not @u, and zero otherwise.
320 */
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700321static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
322{
323 int c, old;
324 c = atomic_read(v);
325 for (;;) {
326 if (unlikely(c == (u)))
327 break;
328 old = atomic_cmpxchg((v), c, c + (a));
329 if (likely(old == c))
330 break;
331 c = old;
332 }
333 return c != (u);
334}
Nick Piggin8426e1f2005-11-13 16:07:25 -0800335#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
336
Ralf Baechle21a151d2007-10-11 23:46:15 +0100337#define atomic_dec_return(v) atomic_sub_return(1, (v))
338#define atomic_inc_return(v) atomic_add_return(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700339
340/*
341 * atomic_sub_and_test - subtract value from variable and test result
342 * @i: integer value to subtract
343 * @v: pointer of type atomic_t
344 *
345 * Atomically subtracts @i from @v and returns
346 * true if the result is zero, or false for all
347 * other cases.
348 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100349#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700350
351/*
352 * atomic_inc_and_test - increment and test
353 * @v: pointer of type atomic_t
354 *
355 * Atomically increments @v by 1
356 * and returns true if the result is zero, or false for all
357 * other cases.
358 */
359#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
360
361/*
362 * atomic_dec_and_test - decrement by 1 and test
363 * @v: pointer of type atomic_t
364 *
365 * Atomically decrements @v by 1 and
366 * returns true if the result is 0, or false for all other
367 * cases.
368 */
369#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
370
371/*
372 * atomic_dec_if_positive - decrement by 1 if old value positive
373 * @v: pointer of type atomic_t
374 */
375#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
376
377/*
378 * atomic_inc - increment atomic variable
379 * @v: pointer of type atomic_t
380 *
381 * Atomically increments @v by 1.
382 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100383#define atomic_inc(v) atomic_add(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700384
385/*
386 * atomic_dec - decrement and test
387 * @v: pointer of type atomic_t
388 *
389 * Atomically decrements @v by 1.
390 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100391#define atomic_dec(v) atomic_sub(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700392
393/*
394 * atomic_add_negative - add and test if negative
395 * @v: pointer of type atomic_t
396 * @i: integer value to add
397 *
398 * Atomically adds @i to @v and returns true
399 * if the result is negative, or false when
400 * result is greater than or equal to zero.
401 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100402#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700403
Ralf Baechle875d43e2005-09-03 15:56:16 -0700404#ifdef CONFIG_64BIT
Linus Torvalds1da177e2005-04-16 15:20:36 -0700405
Linus Torvalds1da177e2005-04-16 15:20:36 -0700406#define ATOMIC64_INIT(i) { (i) }
407
408/*
409 * atomic64_read - read atomic variable
410 * @v: pointer of type atomic64_t
411 *
412 */
Anton Blanchardf3d46f92010-05-17 14:33:53 +1000413#define atomic64_read(v) (*(volatile long *)&(v)->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700414
415/*
416 * atomic64_set - set atomic variable
417 * @v: pointer of type atomic64_t
418 * @i: required value
419 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100420#define atomic64_set(v, i) ((v)->counter = (i))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700421
422/*
423 * atomic64_add - add integer to atomic variable
424 * @i: integer value to add
425 * @v: pointer of type atomic64_t
426 *
427 * Atomically adds @i to @v.
428 */
429static __inline__ void atomic64_add(long i, atomic64_t * v)
430{
David Daneyb791d112009-07-13 11:15:19 -0700431 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000432 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700433
434 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000435 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700436 "1: lld %0, %1 # atomic64_add \n"
437 " addu %0, %2 \n"
438 " scd %0, %1 \n"
439 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000440 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700441 : "=&r" (temp), "=m" (v->counter)
442 : "Ir" (i), "m" (v->counter));
David Daneyb791d112009-07-13 11:15:19 -0700443 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000444 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700445
446 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000447 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700448 "1: lld %0, %1 # atomic64_add \n"
449 " addu %0, %2 \n"
450 " scd %0, %1 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100451 " beqz %0, 2f \n"
452 " .subsection 2 \n"
453 "2: b 1b \n"
454 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000455 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700456 : "=&r" (temp), "=m" (v->counter)
457 : "Ir" (i), "m" (v->counter));
458 } else {
459 unsigned long flags;
460
Ralf Baechle49edd092007-03-16 16:10:36 +0000461 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700462 v->counter += i;
Ralf Baechle49edd092007-03-16 16:10:36 +0000463 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700464 }
465}
466
467/*
468 * atomic64_sub - subtract the atomic variable
469 * @i: integer value to subtract
470 * @v: pointer of type atomic64_t
471 *
472 * Atomically subtracts @i from @v.
473 */
474static __inline__ void atomic64_sub(long i, atomic64_t * v)
475{
David Daneyb791d112009-07-13 11:15:19 -0700476 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000477 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700478
479 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000480 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700481 "1: lld %0, %1 # atomic64_sub \n"
482 " subu %0, %2 \n"
483 " scd %0, %1 \n"
484 " beqzl %0, 1b \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000485 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700486 : "=&r" (temp), "=m" (v->counter)
487 : "Ir" (i), "m" (v->counter));
David Daneyb791d112009-07-13 11:15:19 -0700488 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000489 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700490
491 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000492 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700493 "1: lld %0, %1 # atomic64_sub \n"
494 " subu %0, %2 \n"
495 " scd %0, %1 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100496 " beqz %0, 2f \n"
497 " .subsection 2 \n"
498 "2: b 1b \n"
499 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000500 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700501 : "=&r" (temp), "=m" (v->counter)
502 : "Ir" (i), "m" (v->counter));
503 } else {
504 unsigned long flags;
505
Ralf Baechle49edd092007-03-16 16:10:36 +0000506 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700507 v->counter -= i;
Ralf Baechle49edd092007-03-16 16:10:36 +0000508 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700509 }
510}
511
512/*
513 * Same as above, but return the result value
514 */
515static __inline__ long atomic64_add_return(long i, atomic64_t * v)
516{
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000517 long result;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700518
David Daneyf252ffd2010-01-08 17:17:43 -0800519 smp_mb__before_llsc();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000520
David Daneyb791d112009-07-13 11:15:19 -0700521 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000522 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700523
524 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000525 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700526 "1: lld %1, %2 # atomic64_add_return \n"
527 " addu %0, %1, %3 \n"
528 " scd %0, %2 \n"
529 " beqzl %0, 1b \n"
530 " addu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000531 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700532 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
533 : "Ir" (i), "m" (v->counter)
534 : "memory");
David Daneyb791d112009-07-13 11:15:19 -0700535 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000536 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700537
538 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000539 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700540 "1: lld %1, %2 # atomic64_add_return \n"
541 " addu %0, %1, %3 \n"
542 " scd %0, %2 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100543 " beqz %0, 2f \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700544 " addu %0, %1, %3 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100545 " .subsection 2 \n"
546 "2: b 1b \n"
547 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000548 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700549 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
550 : "Ir" (i), "m" (v->counter)
551 : "memory");
552 } else {
553 unsigned long flags;
554
Ralf Baechle49edd092007-03-16 16:10:36 +0000555 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700556 result = v->counter;
557 result += i;
558 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000559 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700560 }
561
Ralf Baechle17099b12007-07-14 13:24:05 +0100562 smp_llsc_mb();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000563
Linus Torvalds1da177e2005-04-16 15:20:36 -0700564 return result;
565}
566
567static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
568{
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000569 long result;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700570
David Daneyf252ffd2010-01-08 17:17:43 -0800571 smp_mb__before_llsc();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000572
David Daneyb791d112009-07-13 11:15:19 -0700573 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000574 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700575
576 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000577 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700578 "1: lld %1, %2 # atomic64_sub_return \n"
579 " subu %0, %1, %3 \n"
580 " scd %0, %2 \n"
581 " beqzl %0, 1b \n"
582 " subu %0, %1, %3 \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000583 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700584 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
585 : "Ir" (i), "m" (v->counter)
586 : "memory");
David Daneyb791d112009-07-13 11:15:19 -0700587 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000588 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700589
590 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000591 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700592 "1: lld %1, %2 # atomic64_sub_return \n"
593 " subu %0, %1, %3 \n"
594 " scd %0, %2 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100595 " beqz %0, 2f \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700596 " subu %0, %1, %3 \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100597 " .subsection 2 \n"
598 "2: b 1b \n"
599 " .previous \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000600 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700601 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
602 : "Ir" (i), "m" (v->counter)
603 : "memory");
604 } else {
605 unsigned long flags;
606
Ralf Baechle49edd092007-03-16 16:10:36 +0000607 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700608 result = v->counter;
609 result -= i;
610 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000611 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700612 }
613
Ralf Baechle17099b12007-07-14 13:24:05 +0100614 smp_llsc_mb();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000615
Linus Torvalds1da177e2005-04-16 15:20:36 -0700616 return result;
617}
618
619/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100620 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
621 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700622 * @v: pointer of type atomic64_t
623 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100624 * Atomically test @v and subtract @i if @v is greater or equal than @i.
625 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700626 */
627static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
628{
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000629 long result;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700630
David Daneyf252ffd2010-01-08 17:17:43 -0800631 smp_mb__before_llsc();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000632
David Daneyb791d112009-07-13 11:15:19 -0700633 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000634 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700635
636 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000637 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700638 "1: lld %1, %2 # atomic64_sub_if_positive\n"
639 " dsubu %0, %1, %3 \n"
640 " bltz %0, 1f \n"
641 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000642 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700643 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000644 " dsubu %0, %1, %3 \n"
645 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700646 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000647 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700648 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
649 : "Ir" (i), "m" (v->counter)
650 : "memory");
David Daneyb791d112009-07-13 11:15:19 -0700651 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000652 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700653
654 __asm__ __volatile__(
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000655 " .set mips3 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700656 "1: lld %1, %2 # atomic64_sub_if_positive\n"
657 " dsubu %0, %1, %3 \n"
658 " bltz %0, 1f \n"
659 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000660 " .set noreorder \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100661 " beqz %0, 2f \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000662 " dsubu %0, %1, %3 \n"
663 " .set reorder \n"
Ralf Baechlef65e4fa2006-09-28 01:45:21 +0100664 " .subsection 2 \n"
665 "2: b 1b \n"
666 " .previous \n"
Ralf Baechle50952022008-07-03 23:28:35 +0100667 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000668 " .set mips0 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700669 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
670 : "Ir" (i), "m" (v->counter)
671 : "memory");
672 } else {
673 unsigned long flags;
674
Ralf Baechle49edd092007-03-16 16:10:36 +0000675 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700676 result = v->counter;
677 result -= i;
678 if (result >= 0)
679 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000680 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700681 }
682
Ralf Baechle17099b12007-07-14 13:24:05 +0100683 smp_llsc_mb();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000684
Linus Torvalds1da177e2005-04-16 15:20:36 -0700685 return result;
686}
687
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700688#define atomic64_cmpxchg(v, o, n) \
Atsushi Nemoto7b239bb2007-05-10 23:47:45 +0900689 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700690#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
691
692/**
693 * atomic64_add_unless - add unless the number is a given value
694 * @v: pointer of type atomic64_t
695 * @a: the amount to add to v...
696 * @u: ...unless v is equal to u.
697 *
698 * Atomically adds @a to @v, so long as it was not @u.
699 * Returns non-zero if @v was not @u, and zero otherwise.
700 */
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700701static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
702{
703 long c, old;
704 c = atomic64_read(v);
705 for (;;) {
706 if (unlikely(c == (u)))
707 break;
708 old = atomic64_cmpxchg((v), c, c + (a));
709 if (likely(old == c))
710 break;
711 c = old;
712 }
713 return c != (u);
714}
715
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700716#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
717
Ralf Baechle21a151d2007-10-11 23:46:15 +0100718#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
719#define atomic64_inc_return(v) atomic64_add_return(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700720
721/*
722 * atomic64_sub_and_test - subtract value from variable and test result
723 * @i: integer value to subtract
724 * @v: pointer of type atomic64_t
725 *
726 * Atomically subtracts @i from @v and returns
727 * true if the result is zero, or false for all
728 * other cases.
729 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100730#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700731
732/*
733 * atomic64_inc_and_test - increment and test
734 * @v: pointer of type atomic64_t
735 *
736 * Atomically increments @v by 1
737 * and returns true if the result is zero, or false for all
738 * other cases.
739 */
740#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
741
742/*
743 * atomic64_dec_and_test - decrement by 1 and test
744 * @v: pointer of type atomic64_t
745 *
746 * Atomically decrements @v by 1 and
747 * returns true if the result is 0, or false for all other
748 * cases.
749 */
750#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
751
752/*
753 * atomic64_dec_if_positive - decrement by 1 if old value positive
754 * @v: pointer of type atomic64_t
755 */
756#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
757
758/*
759 * atomic64_inc - increment atomic variable
760 * @v: pointer of type atomic64_t
761 *
762 * Atomically increments @v by 1.
763 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100764#define atomic64_inc(v) atomic64_add(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700765
766/*
767 * atomic64_dec - decrement and test
768 * @v: pointer of type atomic64_t
769 *
770 * Atomically decrements @v by 1.
771 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100772#define atomic64_dec(v) atomic64_sub(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700773
774/*
775 * atomic64_add_negative - add and test if negative
776 * @v: pointer of type atomic64_t
777 * @i: integer value to add
778 *
779 * Atomically adds @i to @v and returns true
780 * if the result is negative, or false when
781 * result is greater than or equal to zero.
782 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100783#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700784
Ralf Baechle875d43e2005-09-03 15:56:16 -0700785#endif /* CONFIG_64BIT */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700786
787/*
788 * atomic*_return operations are serializing but not the non-*_return
789 * versions.
790 */
David Daneyf252ffd2010-01-08 17:17:43 -0800791#define smp_mb__before_atomic_dec() smp_mb__before_llsc()
Ralf Baechle17099b12007-07-14 13:24:05 +0100792#define smp_mb__after_atomic_dec() smp_llsc_mb()
David Daneyf252ffd2010-01-08 17:17:43 -0800793#define smp_mb__before_atomic_inc() smp_mb__before_llsc()
Ralf Baechle17099b12007-07-14 13:24:05 +0100794#define smp_mb__after_atomic_inc() smp_llsc_mb()
Linus Torvalds1da177e2005-04-16 15:20:36 -0700795
Arnd Bergmann72099ed2009-05-13 22:56:29 +0000796#include <asm-generic/atomic-long.h>
Ralf Baechle17099b12007-07-14 13:24:05 +0100797
Linus Torvalds1da177e2005-04-16 15:20:36 -0700798#endif /* _ASM_ATOMIC_H */