blob: 7081e52291d075f342c2c658f931bc55f49547f9 [file] [log] [blame]
Paul Gortmaker5ba840f2012-04-02 16:04:13 -04001#ifndef _ALPHA_CMPXCHG_H
Ivan Kokshayskya6209d62009-03-31 15:23:35 -07002#error Do not include xchg.h directly!
3#else
4/*
5 * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
6 * except that local version do not have the expensive memory barrier.
Paul Gortmaker5ba840f2012-04-02 16:04:13 -04007 * So this file is included twice from asm/cmpxchg.h.
Ivan Kokshayskya6209d62009-03-31 15:23:35 -07008 */
9
10/*
11 * Atomic exchange.
12 * Since it can be used to implement critical sections
13 * it must clobber "memory" (also for interrupts in UP).
Andrea Parriafc58832018-02-22 10:24:48 +010014 *
15 * The leading and the trailing memory barriers guarantee that these
16 * operations are fully ordered.
17 *
Ivan Kokshayskya6209d62009-03-31 15:23:35 -070018 */
19
20static inline unsigned long
21____xchg(_u8, volatile char *m, unsigned long val)
22{
23 unsigned long ret, tmp, addr64;
24
Andrea Parriafc58832018-02-22 10:24:48 +010025 smp_mb();
Ivan Kokshayskya6209d62009-03-31 15:23:35 -070026 __asm__ __volatile__(
27 " andnot %4,7,%3\n"
28 " insbl %1,%4,%1\n"
29 "1: ldq_l %2,0(%3)\n"
30 " extbl %2,%4,%0\n"
31 " mskbl %2,%4,%2\n"
32 " or %1,%2,%2\n"
33 " stq_c %2,0(%3)\n"
34 " beq %2,2f\n"
35 __ASM__MB
36 ".subsection 2\n"
37 "2: br 1b\n"
38 ".previous"
39 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
40 : "r" ((long)m), "1" (val) : "memory");
41
42 return ret;
43}
44
45static inline unsigned long
46____xchg(_u16, volatile short *m, unsigned long val)
47{
48 unsigned long ret, tmp, addr64;
49
Andrea Parriafc58832018-02-22 10:24:48 +010050 smp_mb();
Ivan Kokshayskya6209d62009-03-31 15:23:35 -070051 __asm__ __volatile__(
52 " andnot %4,7,%3\n"
53 " inswl %1,%4,%1\n"
54 "1: ldq_l %2,0(%3)\n"
55 " extwl %2,%4,%0\n"
56 " mskwl %2,%4,%2\n"
57 " or %1,%2,%2\n"
58 " stq_c %2,0(%3)\n"
59 " beq %2,2f\n"
60 __ASM__MB
61 ".subsection 2\n"
62 "2: br 1b\n"
63 ".previous"
64 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
65 : "r" ((long)m), "1" (val) : "memory");
66
67 return ret;
68}
69
70static inline unsigned long
71____xchg(_u32, volatile int *m, unsigned long val)
72{
73 unsigned long dummy;
74
Andrea Parriafc58832018-02-22 10:24:48 +010075 smp_mb();
Ivan Kokshayskya6209d62009-03-31 15:23:35 -070076 __asm__ __volatile__(
77 "1: ldl_l %0,%4\n"
78 " bis $31,%3,%1\n"
79 " stl_c %1,%2\n"
80 " beq %1,2f\n"
81 __ASM__MB
82 ".subsection 2\n"
83 "2: br 1b\n"
84 ".previous"
85 : "=&r" (val), "=&r" (dummy), "=m" (*m)
86 : "rI" (val), "m" (*m) : "memory");
87
88 return val;
89}
90
91static inline unsigned long
92____xchg(_u64, volatile long *m, unsigned long val)
93{
94 unsigned long dummy;
95
Andrea Parriafc58832018-02-22 10:24:48 +010096 smp_mb();
Ivan Kokshayskya6209d62009-03-31 15:23:35 -070097 __asm__ __volatile__(
98 "1: ldq_l %0,%4\n"
99 " bis $31,%3,%1\n"
100 " stq_c %1,%2\n"
101 " beq %1,2f\n"
102 __ASM__MB
103 ".subsection 2\n"
104 "2: br 1b\n"
105 ".previous"
106 : "=&r" (val), "=&r" (dummy), "=m" (*m)
107 : "rI" (val), "m" (*m) : "memory");
108
109 return val;
110}
111
112/* This function doesn't exist, so you'll get a linker error
113 if something tries to do an invalid xchg(). */
114extern void __xchg_called_with_bad_pointer(void);
115
116static __always_inline unsigned long
117____xchg(, volatile void *ptr, unsigned long x, int size)
118{
119 switch (size) {
120 case 1:
121 return ____xchg(_u8, ptr, x);
122 case 2:
123 return ____xchg(_u16, ptr, x);
124 case 4:
125 return ____xchg(_u32, ptr, x);
126 case 8:
127 return ____xchg(_u64, ptr, x);
128 }
129 __xchg_called_with_bad_pointer();
130 return x;
131}
132
133/*
134 * Atomic compare and exchange. Compare OLD with MEM, if identical,
135 * store NEW in MEM. Return the initial value in MEM. Success is
136 * indicated by comparing RETURN with OLD.
137 *
Andrea Parriafc58832018-02-22 10:24:48 +0100138 * The leading and the trailing memory barriers guarantee that these
139 * operations are fully ordered.
140 *
141 * The trailing memory barrier is placed in SMP unconditionally, in
142 * order to guarantee that dependency ordering is preserved when a
143 * dependency is headed by an unsuccessful operation.
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700144 */
145
146static inline unsigned long
147____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
148{
149 unsigned long prev, tmp, cmp, addr64;
150
Andrea Parriafc58832018-02-22 10:24:48 +0100151 smp_mb();
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700152 __asm__ __volatile__(
153 " andnot %5,7,%4\n"
154 " insbl %1,%5,%1\n"
155 "1: ldq_l %2,0(%4)\n"
156 " extbl %2,%5,%0\n"
157 " cmpeq %0,%6,%3\n"
158 " beq %3,2f\n"
159 " mskbl %2,%5,%2\n"
160 " or %1,%2,%2\n"
161 " stq_c %2,0(%4)\n"
162 " beq %2,3f\n"
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700163 "2:\n"
Andrea Parribffff2e2018-02-20 19:45:56 +0100164 __ASM__MB
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700165 ".subsection 2\n"
166 "3: br 1b\n"
167 ".previous"
168 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
169 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
170
171 return prev;
172}
173
174static inline unsigned long
175____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
176{
177 unsigned long prev, tmp, cmp, addr64;
178
Andrea Parriafc58832018-02-22 10:24:48 +0100179 smp_mb();
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700180 __asm__ __volatile__(
181 " andnot %5,7,%4\n"
182 " inswl %1,%5,%1\n"
183 "1: ldq_l %2,0(%4)\n"
184 " extwl %2,%5,%0\n"
185 " cmpeq %0,%6,%3\n"
186 " beq %3,2f\n"
187 " mskwl %2,%5,%2\n"
188 " or %1,%2,%2\n"
189 " stq_c %2,0(%4)\n"
190 " beq %2,3f\n"
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700191 "2:\n"
Andrea Parribffff2e2018-02-20 19:45:56 +0100192 __ASM__MB
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700193 ".subsection 2\n"
194 "3: br 1b\n"
195 ".previous"
196 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
197 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
198
199 return prev;
200}
201
202static inline unsigned long
203____cmpxchg(_u32, volatile int *m, int old, int new)
204{
205 unsigned long prev, cmp;
206
Andrea Parriafc58832018-02-22 10:24:48 +0100207 smp_mb();
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700208 __asm__ __volatile__(
209 "1: ldl_l %0,%5\n"
210 " cmpeq %0,%3,%1\n"
211 " beq %1,2f\n"
212 " mov %4,%1\n"
213 " stl_c %1,%2\n"
214 " beq %1,3f\n"
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700215 "2:\n"
Andrea Parribffff2e2018-02-20 19:45:56 +0100216 __ASM__MB
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700217 ".subsection 2\n"
218 "3: br 1b\n"
219 ".previous"
220 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
221 : "r"((long) old), "r"(new), "m"(*m) : "memory");
222
223 return prev;
224}
225
226static inline unsigned long
227____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
228{
229 unsigned long prev, cmp;
230
Andrea Parriafc58832018-02-22 10:24:48 +0100231 smp_mb();
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700232 __asm__ __volatile__(
233 "1: ldq_l %0,%5\n"
234 " cmpeq %0,%3,%1\n"
235 " beq %1,2f\n"
236 " mov %4,%1\n"
237 " stq_c %1,%2\n"
238 " beq %1,3f\n"
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700239 "2:\n"
Andrea Parribffff2e2018-02-20 19:45:56 +0100240 __ASM__MB
Ivan Kokshayskya6209d62009-03-31 15:23:35 -0700241 ".subsection 2\n"
242 "3: br 1b\n"
243 ".previous"
244 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
245 : "r"((long) old), "r"(new), "m"(*m) : "memory");
246
247 return prev;
248}
249
250/* This function doesn't exist, so you'll get a linker error
251 if something tries to do an invalid cmpxchg(). */
252extern void __cmpxchg_called_with_bad_pointer(void);
253
254static __always_inline unsigned long
255____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
256 int size)
257{
258 switch (size) {
259 case 1:
260 return ____cmpxchg(_u8, ptr, old, new);
261 case 2:
262 return ____cmpxchg(_u16, ptr, old, new);
263 case 4:
264 return ____cmpxchg(_u32, ptr, old, new);
265 case 8:
266 return ____cmpxchg(_u64, ptr, old, new);
267 }
268 __cmpxchg_called_with_bad_pointer();
269 return old;
270}
271
272#endif