blob: ec4b4d658bc4bfe78b9aaaaeff18e72fa5e1143d [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
Maciej W. Rozyckiedf7b932013-11-01 23:47:05 +00002 * Atomic operations that C can't guarantee us. Useful for
Linus Torvalds1da177e2005-04-16 15:20:36 -07003 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
Ralf Baechlee303e082006-11-30 01:14:50 +000012 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
Linus Torvalds1da177e2005-04-16 15:20:36 -070013 */
Linus Torvalds1da177e2005-04-16 15:20:36 -070014#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
Ralf Baechle192ef362006-07-07 14:07:18 +010017#include <linux/irqflags.h>
Matthew Wilcoxea4354672009-01-06 14:40:39 -080018#include <linux/types.h>
Ralf Baechle0004a9d2006-10-31 03:45:07 +000019#include <asm/barrier.h>
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +000020#include <asm/compiler.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070021#include <asm/cpu-features.h>
David Howellsb81947c2012-03-28 18:30:02 +010022#include <asm/cmpxchg.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070023#include <asm/war.h>
24
Ralf Baechle70342282013-01-22 12:59:30 +010025#define ATOMIC_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070026
27/*
28 * atomic_read - read atomic variable
29 * @v: pointer of type atomic_t
30 *
31 * Atomically reads the value of @v.
32 */
Pranith Kumar22910592014-09-23 10:29:50 -040033#define atomic_read(v) ACCESS_ONCE((v)->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070034
35/*
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
38 * @i: required value
39 *
40 * Atomically sets the value of @v to @i.
41 */
Ralf Baechle21a151d2007-10-11 23:46:15 +010042#define atomic_set(v, i) ((v)->counter = (i))
Linus Torvalds1da177e2005-04-16 15:20:36 -070043
Peter Zijlstraef315632014-03-26 17:56:43 +010044#define ATOMIC_OP(op, c_op, asm_op) \
45static __inline__ void atomic_##op(int i, atomic_t * v) \
46{ \
47 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
48 int temp; \
49 \
50 __asm__ __volatile__( \
51 " .set arch=r4000 \n" \
52 "1: ll %0, %1 # atomic_" #op " \n" \
53 " " #asm_op " %0, %2 \n" \
54 " sc %0, %1 \n" \
55 " beqzl %0, 1b \n" \
56 " .set mips0 \n" \
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +000057 : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \
Peter Zijlstraef315632014-03-26 17:56:43 +010058 : "Ir" (i)); \
59 } else if (kernel_uses_llsc) { \
60 int temp; \
61 \
62 do { \
63 __asm__ __volatile__( \
64 " .set arch=r4000 \n" \
65 " ll %0, %1 # atomic_" #op "\n" \
66 " " #asm_op " %0, %2 \n" \
67 " sc %0, %1 \n" \
68 " .set mips0 \n" \
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +000069 : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \
Peter Zijlstraef315632014-03-26 17:56:43 +010070 : "Ir" (i)); \
71 } while (unlikely(!temp)); \
72 } else { \
73 unsigned long flags; \
74 \
75 raw_local_irq_save(flags); \
76 v->counter c_op i; \
77 raw_local_irq_restore(flags); \
78 } \
79} \
Linus Torvalds1da177e2005-04-16 15:20:36 -070080
Peter Zijlstraef315632014-03-26 17:56:43 +010081#define ATOMIC_OP_RETURN(op, c_op, asm_op) \
82static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
83{ \
84 int result; \
85 \
86 smp_mb__before_llsc(); \
87 \
88 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
89 int temp; \
90 \
91 __asm__ __volatile__( \
92 " .set arch=r4000 \n" \
93 "1: ll %1, %2 # atomic_" #op "_return \n" \
94 " " #asm_op " %0, %1, %3 \n" \
95 " sc %0, %2 \n" \
96 " beqzl %0, 1b \n" \
Peter Zijlstrada4c5442014-09-02 22:21:26 +020097 " " #asm_op " %0, %1, %3 \n" \
Peter Zijlstraef315632014-03-26 17:56:43 +010098 " .set mips0 \n" \
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +000099 : "=&r" (result), "=&r" (temp), \
100 "+" GCC_OFF12_ASM() (v->counter) \
Peter Zijlstraef315632014-03-26 17:56:43 +0100101 : "Ir" (i)); \
102 } else if (kernel_uses_llsc) { \
103 int temp; \
104 \
105 do { \
106 __asm__ __volatile__( \
107 " .set arch=r4000 \n" \
108 " ll %1, %2 # atomic_" #op "_return \n" \
109 " " #asm_op " %0, %1, %3 \n" \
110 " sc %0, %2 \n" \
111 " .set mips0 \n" \
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +0000112 : "=&r" (result), "=&r" (temp), \
113 "+" GCC_OFF12_ASM() (v->counter) \
Peter Zijlstraef315632014-03-26 17:56:43 +0100114 : "Ir" (i)); \
115 } while (unlikely(!result)); \
116 \
Peter Zijlstrada4c5442014-09-02 22:21:26 +0200117 result = temp; result c_op i; \
Peter Zijlstraef315632014-03-26 17:56:43 +0100118 } else { \
119 unsigned long flags; \
120 \
121 raw_local_irq_save(flags); \
122 result = v->counter; \
123 result c_op i; \
124 v->counter = result; \
125 raw_local_irq_restore(flags); \
126 } \
127 \
128 smp_llsc_mb(); \
129 \
130 return result; \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700131}
132
Peter Zijlstraef315632014-03-26 17:56:43 +0100133#define ATOMIC_OPS(op, c_op, asm_op) \
134 ATOMIC_OP(op, c_op, asm_op) \
135 ATOMIC_OP_RETURN(op, c_op, asm_op)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700136
Peter Zijlstraef315632014-03-26 17:56:43 +0100137ATOMIC_OPS(add, +=, addu)
138ATOMIC_OPS(sub, -=, subu)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700139
Peter Zijlstraef315632014-03-26 17:56:43 +0100140#undef ATOMIC_OPS
141#undef ATOMIC_OP_RETURN
142#undef ATOMIC_OP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700143
144/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100145 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
146 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700147 * @v: pointer of type atomic_t
148 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100149 * Atomically test @v and subtract @i if @v is greater or equal than @i.
150 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700151 */
152static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
153{
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000154 int result;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700155
David Daneyf252ffd2010-01-08 17:17:43 -0800156 smp_mb__before_llsc();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000157
David Daneyb791d112009-07-13 11:15:19 -0700158 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000159 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700160
161 __asm__ __volatile__(
Ralf Baechlea809d462014-03-30 13:20:10 +0200162 " .set arch=r4000 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700163 "1: ll %1, %2 # atomic_sub_if_positive\n"
164 " subu %0, %1, %3 \n"
165 " bltz %0, 1f \n"
166 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000167 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700168 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000169 " subu %0, %1, %3 \n"
170 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700171 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000172 " .set mips0 \n"
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +0000173 : "=&r" (result), "=&r" (temp),
174 "+" GCC_OFF12_ASM() (v->counter)
175 : "Ir" (i), GCC_OFF12_ASM() (v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700176 : "memory");
David Daneyb791d112009-07-13 11:15:19 -0700177 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000178 int temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700179
180 __asm__ __volatile__(
Ralf Baechlea809d462014-03-30 13:20:10 +0200181 " .set arch=r4000 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700182 "1: ll %1, %2 # atomic_sub_if_positive\n"
183 " subu %0, %1, %3 \n"
184 " bltz %0, 1f \n"
185 " sc %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000186 " .set noreorder \n"
Ralf Baechle78373142010-10-29 19:08:24 +0100187 " beqz %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000188 " subu %0, %1, %3 \n"
189 " .set reorder \n"
Ralf Baechle50952022008-07-03 23:28:35 +0100190 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000191 " .set mips0 \n"
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +0000192 : "=&r" (result), "=&r" (temp),
193 "+" GCC_OFF12_ASM() (v->counter)
Joshua Kinardb4f2a172012-06-24 21:01:34 -0400194 : "Ir" (i));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700195 } else {
196 unsigned long flags;
197
Ralf Baechle49edd092007-03-16 16:10:36 +0000198 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700199 result = v->counter;
200 result -= i;
201 if (result >= 0)
202 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000203 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700204 }
205
Ralf Baechle17099b12007-07-14 13:24:05 +0100206 smp_llsc_mb();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000207
Linus Torvalds1da177e2005-04-16 15:20:36 -0700208 return result;
209}
210
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700211#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
212#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800213
Nick Piggin8426e1f2005-11-13 16:07:25 -0800214/**
Arun Sharmaf24219b2011-07-26 16:09:07 -0700215 * __atomic_add_unless - add unless the number is a given value
Nick Piggin8426e1f2005-11-13 16:07:25 -0800216 * @v: pointer of type atomic_t
217 * @a: the amount to add to v...
218 * @u: ...unless v is equal to u.
219 *
220 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700221 * Returns the old value of @v.
Nick Piggin8426e1f2005-11-13 16:07:25 -0800222 */
Arun Sharmaf24219b2011-07-26 16:09:07 -0700223static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700224{
225 int c, old;
226 c = atomic_read(v);
227 for (;;) {
228 if (unlikely(c == (u)))
229 break;
230 old = atomic_cmpxchg((v), c, c + (a));
231 if (likely(old == c))
232 break;
233 c = old;
234 }
Arun Sharmaf24219b2011-07-26 16:09:07 -0700235 return c;
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700236}
Nick Piggin8426e1f2005-11-13 16:07:25 -0800237
Ralf Baechle21a151d2007-10-11 23:46:15 +0100238#define atomic_dec_return(v) atomic_sub_return(1, (v))
239#define atomic_inc_return(v) atomic_add_return(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700240
241/*
242 * atomic_sub_and_test - subtract value from variable and test result
243 * @i: integer value to subtract
244 * @v: pointer of type atomic_t
245 *
246 * Atomically subtracts @i from @v and returns
247 * true if the result is zero, or false for all
248 * other cases.
249 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100250#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700251
252/*
253 * atomic_inc_and_test - increment and test
254 * @v: pointer of type atomic_t
255 *
256 * Atomically increments @v by 1
257 * and returns true if the result is zero, or false for all
258 * other cases.
259 */
260#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
261
262/*
263 * atomic_dec_and_test - decrement by 1 and test
264 * @v: pointer of type atomic_t
265 *
266 * Atomically decrements @v by 1 and
267 * returns true if the result is 0, or false for all other
268 * cases.
269 */
270#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
271
272/*
273 * atomic_dec_if_positive - decrement by 1 if old value positive
274 * @v: pointer of type atomic_t
275 */
276#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
277
278/*
279 * atomic_inc - increment atomic variable
280 * @v: pointer of type atomic_t
281 *
282 * Atomically increments @v by 1.
283 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100284#define atomic_inc(v) atomic_add(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700285
286/*
287 * atomic_dec - decrement and test
288 * @v: pointer of type atomic_t
289 *
290 * Atomically decrements @v by 1.
291 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100292#define atomic_dec(v) atomic_sub(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700293
294/*
295 * atomic_add_negative - add and test if negative
296 * @v: pointer of type atomic_t
297 * @i: integer value to add
298 *
299 * Atomically adds @i to @v and returns true
300 * if the result is negative, or false when
301 * result is greater than or equal to zero.
302 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100303#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700304
Ralf Baechle875d43e2005-09-03 15:56:16 -0700305#ifdef CONFIG_64BIT
Linus Torvalds1da177e2005-04-16 15:20:36 -0700306
Linus Torvalds1da177e2005-04-16 15:20:36 -0700307#define ATOMIC64_INIT(i) { (i) }
308
309/*
310 * atomic64_read - read atomic variable
311 * @v: pointer of type atomic64_t
312 *
313 */
Pranith Kumar22910592014-09-23 10:29:50 -0400314#define atomic64_read(v) ACCESS_ONCE((v)->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700315
316/*
317 * atomic64_set - set atomic variable
318 * @v: pointer of type atomic64_t
319 * @i: required value
320 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100321#define atomic64_set(v, i) ((v)->counter = (i))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700322
Peter Zijlstraef315632014-03-26 17:56:43 +0100323#define ATOMIC64_OP(op, c_op, asm_op) \
324static __inline__ void atomic64_##op(long i, atomic64_t * v) \
325{ \
326 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
327 long temp; \
328 \
329 __asm__ __volatile__( \
330 " .set arch=r4000 \n" \
331 "1: lld %0, %1 # atomic64_" #op " \n" \
332 " " #asm_op " %0, %2 \n" \
333 " scd %0, %1 \n" \
334 " beqzl %0, 1b \n" \
335 " .set mips0 \n" \
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +0000336 : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \
Peter Zijlstraef315632014-03-26 17:56:43 +0100337 : "Ir" (i)); \
338 } else if (kernel_uses_llsc) { \
339 long temp; \
340 \
341 do { \
342 __asm__ __volatile__( \
343 " .set arch=r4000 \n" \
344 " lld %0, %1 # atomic64_" #op "\n" \
345 " " #asm_op " %0, %2 \n" \
346 " scd %0, %1 \n" \
347 " .set mips0 \n" \
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +0000348 : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter) \
Peter Zijlstraef315632014-03-26 17:56:43 +0100349 : "Ir" (i)); \
350 } while (unlikely(!temp)); \
351 } else { \
352 unsigned long flags; \
353 \
354 raw_local_irq_save(flags); \
355 v->counter c_op i; \
356 raw_local_irq_restore(flags); \
357 } \
358} \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700359
Peter Zijlstraef315632014-03-26 17:56:43 +0100360#define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
361static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
362{ \
363 long result; \
364 \
365 smp_mb__before_llsc(); \
366 \
367 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
368 long temp; \
369 \
370 __asm__ __volatile__( \
371 " .set arch=r4000 \n" \
372 "1: lld %1, %2 # atomic64_" #op "_return\n" \
373 " " #asm_op " %0, %1, %3 \n" \
374 " scd %0, %2 \n" \
375 " beqzl %0, 1b \n" \
376 " " #asm_op " %0, %1, %3 \n" \
377 " .set mips0 \n" \
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +0000378 : "=&r" (result), "=&r" (temp), \
379 "+" GCC_OFF12_ASM() (v->counter) \
Peter Zijlstraef315632014-03-26 17:56:43 +0100380 : "Ir" (i)); \
381 } else if (kernel_uses_llsc) { \
382 long temp; \
383 \
384 do { \
385 __asm__ __volatile__( \
386 " .set arch=r4000 \n" \
387 " lld %1, %2 # atomic64_" #op "_return\n" \
388 " " #asm_op " %0, %1, %3 \n" \
389 " scd %0, %2 \n" \
390 " .set mips0 \n" \
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +0000391 : "=&r" (result), "=&r" (temp), \
392 "=" GCC_OFF12_ASM() (v->counter) \
393 : "Ir" (i), GCC_OFF12_ASM() (v->counter) \
Peter Zijlstraef315632014-03-26 17:56:43 +0100394 : "memory"); \
395 } while (unlikely(!result)); \
396 \
Peter Zijlstrada4c5442014-09-02 22:21:26 +0200397 result = temp; result c_op i; \
Peter Zijlstraef315632014-03-26 17:56:43 +0100398 } else { \
399 unsigned long flags; \
400 \
401 raw_local_irq_save(flags); \
402 result = v->counter; \
403 result c_op i; \
404 v->counter = result; \
405 raw_local_irq_restore(flags); \
406 } \
407 \
408 smp_llsc_mb(); \
409 \
410 return result; \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700411}
412
Peter Zijlstraef315632014-03-26 17:56:43 +0100413#define ATOMIC64_OPS(op, c_op, asm_op) \
414 ATOMIC64_OP(op, c_op, asm_op) \
415 ATOMIC64_OP_RETURN(op, c_op, asm_op)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700416
Peter Zijlstraef315632014-03-26 17:56:43 +0100417ATOMIC64_OPS(add, +=, daddu)
418ATOMIC64_OPS(sub, -=, dsubu)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700419
Peter Zijlstraef315632014-03-26 17:56:43 +0100420#undef ATOMIC64_OPS
421#undef ATOMIC64_OP_RETURN
422#undef ATOMIC64_OP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700423
424/*
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100425 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
426 * @i: integer value to subtract
Linus Torvalds1da177e2005-04-16 15:20:36 -0700427 * @v: pointer of type atomic64_t
428 *
Arnaud Gierschf10d14d2005-11-13 00:38:18 +0100429 * Atomically test @v and subtract @i if @v is greater or equal than @i.
430 * The function returns the old value of @v minus @i.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700431 */
432static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
433{
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000434 long result;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700435
David Daneyf252ffd2010-01-08 17:17:43 -0800436 smp_mb__before_llsc();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000437
David Daneyb791d112009-07-13 11:15:19 -0700438 if (kernel_uses_llsc && R10000_LLSC_WAR) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000439 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700440
441 __asm__ __volatile__(
Ralf Baechlea809d462014-03-30 13:20:10 +0200442 " .set arch=r4000 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700443 "1: lld %1, %2 # atomic64_sub_if_positive\n"
444 " dsubu %0, %1, %3 \n"
445 " bltz %0, 1f \n"
446 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000447 " .set noreorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700448 " beqzl %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000449 " dsubu %0, %1, %3 \n"
450 " .set reorder \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700451 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000452 " .set mips0 \n"
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +0000453 : "=&r" (result), "=&r" (temp),
454 "=" GCC_OFF12_ASM() (v->counter)
455 : "Ir" (i), GCC_OFF12_ASM() (v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700456 : "memory");
David Daneyb791d112009-07-13 11:15:19 -0700457 } else if (kernel_uses_llsc) {
Ralf Baechle915ec1e2009-01-12 00:52:18 +0000458 long temp;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700459
460 __asm__ __volatile__(
Ralf Baechlea809d462014-03-30 13:20:10 +0200461 " .set arch=r4000 \n"
Linus Torvalds1da177e2005-04-16 15:20:36 -0700462 "1: lld %1, %2 # atomic64_sub_if_positive\n"
463 " dsubu %0, %1, %3 \n"
464 " bltz %0, 1f \n"
465 " scd %0, %2 \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000466 " .set noreorder \n"
Ralf Baechle78373142010-10-29 19:08:24 +0100467 " beqz %0, 1b \n"
Ralf Baechle92f22c12006-02-23 14:10:53 +0000468 " dsubu %0, %1, %3 \n"
469 " .set reorder \n"
Ralf Baechle50952022008-07-03 23:28:35 +0100470 "1: \n"
Maciej W. Rozyckiaac8aa72005-06-14 17:35:03 +0000471 " .set mips0 \n"
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +0000472 : "=&r" (result), "=&r" (temp),
473 "+" GCC_OFF12_ASM() (v->counter)
Joshua Kinardb4f2a172012-06-24 21:01:34 -0400474 : "Ir" (i));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700475 } else {
476 unsigned long flags;
477
Ralf Baechle49edd092007-03-16 16:10:36 +0000478 raw_local_irq_save(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700479 result = v->counter;
480 result -= i;
481 if (result >= 0)
482 v->counter = result;
Ralf Baechle49edd092007-03-16 16:10:36 +0000483 raw_local_irq_restore(flags);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700484 }
485
Ralf Baechle17099b12007-07-14 13:24:05 +0100486 smp_llsc_mb();
Ralf Baechle0004a9d2006-10-31 03:45:07 +0000487
Linus Torvalds1da177e2005-04-16 15:20:36 -0700488 return result;
489}
490
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700491#define atomic64_cmpxchg(v, o, n) \
Atsushi Nemoto7b239bb2007-05-10 23:47:45 +0900492 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700493#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
494
495/**
496 * atomic64_add_unless - add unless the number is a given value
497 * @v: pointer of type atomic64_t
498 * @a: the amount to add to v...
499 * @u: ...unless v is equal to u.
500 *
501 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700502 * Returns the old value of @v.
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700503 */
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700504static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
505{
506 long c, old;
507 c = atomic64_read(v);
508 for (;;) {
509 if (unlikely(c == (u)))
510 break;
511 old = atomic64_cmpxchg((v), c, c + (a));
512 if (likely(old == c))
513 break;
514 c = old;
515 }
516 return c != (u);
517}
518
Mathieu Desnoyerse12f6442007-05-08 00:34:24 -0700519#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
520
Ralf Baechle21a151d2007-10-11 23:46:15 +0100521#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
522#define atomic64_inc_return(v) atomic64_add_return(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700523
524/*
525 * atomic64_sub_and_test - subtract value from variable and test result
526 * @i: integer value to subtract
527 * @v: pointer of type atomic64_t
528 *
529 * Atomically subtracts @i from @v and returns
530 * true if the result is zero, or false for all
531 * other cases.
532 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100533#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700534
535/*
536 * atomic64_inc_and_test - increment and test
537 * @v: pointer of type atomic64_t
538 *
539 * Atomically increments @v by 1
540 * and returns true if the result is zero, or false for all
541 * other cases.
542 */
543#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
544
545/*
546 * atomic64_dec_and_test - decrement by 1 and test
547 * @v: pointer of type atomic64_t
548 *
549 * Atomically decrements @v by 1 and
550 * returns true if the result is 0, or false for all other
551 * cases.
552 */
553#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
554
555/*
556 * atomic64_dec_if_positive - decrement by 1 if old value positive
557 * @v: pointer of type atomic64_t
558 */
559#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
560
561/*
562 * atomic64_inc - increment atomic variable
563 * @v: pointer of type atomic64_t
564 *
565 * Atomically increments @v by 1.
566 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100567#define atomic64_inc(v) atomic64_add(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700568
569/*
570 * atomic64_dec - decrement and test
571 * @v: pointer of type atomic64_t
572 *
573 * Atomically decrements @v by 1.
574 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100575#define atomic64_dec(v) atomic64_sub(1, (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700576
577/*
578 * atomic64_add_negative - add and test if negative
579 * @v: pointer of type atomic64_t
580 * @i: integer value to add
581 *
582 * Atomically adds @i to @v and returns true
583 * if the result is negative, or false when
584 * result is greater than or equal to zero.
585 */
Ralf Baechle21a151d2007-10-11 23:46:15 +0100586#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700587
Ralf Baechle875d43e2005-09-03 15:56:16 -0700588#endif /* CONFIG_64BIT */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700589
Linus Torvalds1da177e2005-04-16 15:20:36 -0700590#endif /* _ASM_ATOMIC_H */