blob: 61407279208ab43b8af98f410ce3e56a88ce198e [file] [log] [blame]
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -07001/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc.
4 *
5 * But use these as seldom as possible since they are slower than
6 * regular operations.
7 *
8 * Copyright (C) 2004-2006 Atmel Corporation
9 *
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License version 2 as
12 * published by the Free Software Foundation.
13 */
14#ifndef __ASM_AVR32_ATOMIC_H
15#define __ASM_AVR32_ATOMIC_H
16
Matthew Wilcoxea4354672009-01-06 14:40:39 -080017#include <linux/types.h>
David Howellsae473942012-03-28 18:30:01 +010018#include <asm/cmpxchg.h>
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070019
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070020#define ATOMIC_INIT(i) { (i) }
21
Anton Blanchardf3d46f92010-05-17 14:33:53 +100022#define atomic_read(v) (*(volatile int *)&(v)->counter)
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070023#define atomic_set(v, i) (((v)->counter) = i)
24
25/*
26 * atomic_sub_return - subtract the atomic variable
27 * @i: integer value to subtract
28 * @v: pointer of type atomic_t
29 *
30 * Atomically subtracts @i from @v. Returns the resulting value.
31 */
32static inline int atomic_sub_return(int i, atomic_t *v)
33{
34 int result;
35
36 asm volatile(
37 "/* atomic_sub_return */\n"
38 "1: ssrf 5\n"
39 " ld.w %0, %2\n"
40 " sub %0, %3\n"
41 " stcond %1, %0\n"
42 " brne 1b"
43 : "=&r"(result), "=o"(v->counter)
Haavard Skinnemoenfa352242006-10-24 10:12:42 +020044 : "m"(v->counter), "rKs21"(i)
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070045 : "cc");
46
47 return result;
48}
49
50/*
51 * atomic_add_return - add integer to atomic variable
52 * @i: integer value to add
53 * @v: pointer of type atomic_t
54 *
55 * Atomically adds @i to @v. Returns the resulting value.
56 */
57static inline int atomic_add_return(int i, atomic_t *v)
58{
59 int result;
60
Haavard Skinnemoenfa352242006-10-24 10:12:42 +020061 if (__builtin_constant_p(i) && (i >= -1048575) && (i <= 1048576))
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070062 result = atomic_sub_return(-i, v);
63 else
64 asm volatile(
65 "/* atomic_add_return */\n"
66 "1: ssrf 5\n"
67 " ld.w %0, %1\n"
68 " add %0, %3\n"
69 " stcond %2, %0\n"
70 " brne 1b"
71 : "=&r"(result), "=o"(v->counter)
72 : "m"(v->counter), "r"(i)
73 : "cc", "memory");
74
75 return result;
76}
77
78/*
79 * atomic_sub_unless - sub unless the number is a given value
80 * @v: pointer of type atomic_t
Arun Sharmaf24219b2011-07-26 16:09:07 -070081 * @a: the amount to subtract from v...
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070082 * @u: ...unless v is equal to u.
83 *
Arun Sharmaf24219b2011-07-26 16:09:07 -070084 * Atomically subtract @a from @v, so long as it was not @u.
85 * Returns the old value of @v.
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070086*/
Arun Sharmaf24219b2011-07-26 16:09:07 -070087static inline void atomic_sub_unless(atomic_t *v, int a, int u)
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070088{
Arun Sharmaf24219b2011-07-26 16:09:07 -070089 int tmp;
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070090
91 asm volatile(
92 "/* atomic_sub_unless */\n"
93 "1: ssrf 5\n"
Arun Sharmaf24219b2011-07-26 16:09:07 -070094 " ld.w %0, %2\n"
95 " cp.w %0, %4\n"
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070096 " breq 1f\n"
Arun Sharmaf24219b2011-07-26 16:09:07 -070097 " sub %0, %3\n"
98 " stcond %1, %0\n"
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -070099 " brne 1b\n"
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700100 "1:"
Arun Sharmaf24219b2011-07-26 16:09:07 -0700101 : "=&r"(tmp), "=o"(v->counter)
102 : "m"(v->counter), "rKs21"(a), "rKs21"(u)
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700103 : "cc", "memory");
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700104}
105
106/*
Arun Sharmaf24219b2011-07-26 16:09:07 -0700107 * __atomic_add_unless - add unless the number is a given value
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700108 * @v: pointer of type atomic_t
109 * @a: the amount to add to v...
110 * @u: ...unless v is equal to u.
111 *
Arun Sharmaf24219b2011-07-26 16:09:07 -0700112 * Atomically adds @a to @v, so long as it was not @u.
113 * Returns the old value of @v.
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700114*/
Arun Sharmaf24219b2011-07-26 16:09:07 -0700115static inline int __atomic_add_unless(atomic_t *v, int a, int u)
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700116{
Arun Sharmaf24219b2011-07-26 16:09:07 -0700117 int tmp, old = atomic_read(v);
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700118
Haavard Skinnemoenfa352242006-10-24 10:12:42 +0200119 if (__builtin_constant_p(a) && (a >= -1048575) && (a <= 1048576))
Arun Sharmaf24219b2011-07-26 16:09:07 -0700120 atomic_sub_unless(v, -a, u);
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700121 else {
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700122 asm volatile(
Arun Sharmaf24219b2011-07-26 16:09:07 -0700123 "/* __atomic_add_unless */\n"
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700124 "1: ssrf 5\n"
Arun Sharmaf24219b2011-07-26 16:09:07 -0700125 " ld.w %0, %2\n"
126 " cp.w %0, %4\n"
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700127 " breq 1f\n"
Arun Sharmaf24219b2011-07-26 16:09:07 -0700128 " add %0, %3\n"
129 " stcond %1, %0\n"
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700130 " brne 1b\n"
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700131 "1:"
Arun Sharmaf24219b2011-07-26 16:09:07 -0700132 : "=&r"(tmp), "=o"(v->counter)
133 : "m"(v->counter), "r"(a), "ir"(u)
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700134 : "cc", "memory");
135 }
136
Arun Sharmaf24219b2011-07-26 16:09:07 -0700137 return old;
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700138}
139
140/*
141 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
142 * @i: integer value to subtract
143 * @v: pointer of type atomic_t
144 *
145 * Atomically test @v and subtract @i if @v is greater or equal than @i.
146 * The function returns the old value of @v minus @i.
147 */
148static inline int atomic_sub_if_positive(int i, atomic_t *v)
149{
150 int result;
151
152 asm volatile(
153 "/* atomic_sub_if_positive */\n"
154 "1: ssrf 5\n"
155 " ld.w %0, %2\n"
156 " sub %0, %3\n"
157 " brlt 1f\n"
158 " stcond %1, %0\n"
159 " brne 1b\n"
160 "1:"
161 : "=&r"(result), "=o"(v->counter)
162 : "m"(v->counter), "ir"(i)
163 : "cc", "memory");
164
165 return result;
166}
167
168#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Mathieu Desnoyersa4022b02007-04-10 18:23:09 -0400169#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700170
171#define atomic_sub(i, v) (void)atomic_sub_return(i, v)
172#define atomic_add(i, v) (void)atomic_add_return(i, v)
173#define atomic_dec(v) atomic_sub(1, (v))
174#define atomic_inc(v) atomic_add(1, (v))
175
176#define atomic_dec_return(v) atomic_sub_return(1, v)
177#define atomic_inc_return(v) atomic_add_return(1, v)
178
179#define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
180#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
181#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
182#define atomic_add_negative(i, v) (atomic_add_return(i, v) < 0)
183
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700184#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
185
186#define smp_mb__before_atomic_dec() barrier()
187#define smp_mb__after_atomic_dec() barrier()
188#define smp_mb__before_atomic_inc() barrier()
189#define smp_mb__after_atomic_inc() barrier()
190
Haavard Skinnemoen5f97f7f2006-09-25 23:32:13 -0700191#endif /* __ASM_AVR32_ATOMIC_H */