blob: fc46b664c49e8adfb58bd4c45fe9f439ee7eb96a [file] [log] [blame]
David Howellsae3a1972012-03-28 18:30:02 +01001#ifndef _ASM_POWERPC_CMPXCHG_H_
2#define _ASM_POWERPC_CMPXCHG_H_
3
4#ifdef __KERNEL__
5#include <linux/compiler.h>
6#include <asm/synch.h>
7#include <asm/asm-compat.h>
pan xinhui10d8b142016-02-23 19:05:01 +08008#include <linux/bug.h>
David Howellsae3a1972012-03-28 18:30:02 +01009
Pan Xinhuid0563a12016-04-27 17:16:45 +080010#ifdef __BIG_ENDIAN
11#define BITOFF_CAL(size, off) ((sizeof(u32) - size - off) * BITS_PER_BYTE)
12#else
13#define BITOFF_CAL(size, off) (off * BITS_PER_BYTE)
14#endif
15
16#define XCHG_GEN(type, sfx, cl) \
Michael Ellermanda58b232016-11-24 17:08:11 +110017static inline u32 __xchg_##type##sfx(volatile void *p, u32 val) \
Pan Xinhuid0563a12016-04-27 17:16:45 +080018{ \
19 unsigned int prev, prev_mask, tmp, bitoff, off; \
20 \
21 off = (unsigned long)p % sizeof(u32); \
22 bitoff = BITOFF_CAL(sizeof(type), off); \
23 p -= off; \
24 val <<= bitoff; \
25 prev_mask = (u32)(type)-1 << bitoff; \
26 \
27 __asm__ __volatile__( \
28"1: lwarx %0,0,%3\n" \
29" andc %1,%0,%5\n" \
30" or %1,%1,%4\n" \
31 PPC405_ERR77(0,%3) \
32" stwcx. %1,0,%3\n" \
33" bne- 1b\n" \
34 : "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \
35 : "r" (p), "r" (val), "r" (prev_mask) \
36 : "cc", cl); \
37 \
38 return prev >> bitoff; \
39}
40
41#define CMPXCHG_GEN(type, sfx, br, br2, cl) \
42static inline \
Michael Ellermanda58b232016-11-24 17:08:11 +110043u32 __cmpxchg_##type##sfx(volatile void *p, u32 old, u32 new) \
Pan Xinhuid0563a12016-04-27 17:16:45 +080044{ \
45 unsigned int prev, prev_mask, tmp, bitoff, off; \
46 \
47 off = (unsigned long)p % sizeof(u32); \
48 bitoff = BITOFF_CAL(sizeof(type), off); \
49 p -= off; \
50 old <<= bitoff; \
51 new <<= bitoff; \
52 prev_mask = (u32)(type)-1 << bitoff; \
53 \
54 __asm__ __volatile__( \
55 br \
56"1: lwarx %0,0,%3\n" \
57" and %1,%0,%6\n" \
58" cmpw 0,%1,%4\n" \
59" bne- 2f\n" \
60" andc %1,%0,%6\n" \
61" or %1,%1,%5\n" \
62 PPC405_ERR77(0,%3) \
63" stwcx. %1,0,%3\n" \
64" bne- 1b\n" \
65 br2 \
66 "\n" \
67"2:" \
68 : "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \
69 : "r" (p), "r" (old), "r" (new), "r" (prev_mask) \
70 : "cc", cl); \
71 \
72 return prev >> bitoff; \
73}
74
David Howellsae3a1972012-03-28 18:30:02 +010075/*
76 * Atomic exchange
77 *
Boqun Feng26760fc2015-12-15 22:24:16 +080078 * Changes the memory location '*p' to be val and returns
David Howellsae3a1972012-03-28 18:30:02 +010079 * the previous value stored there.
80 */
David Howellsae3a1972012-03-28 18:30:02 +010081
Pan Xinhuid0563a12016-04-27 17:16:45 +080082XCHG_GEN(u8, _local, "memory");
83XCHG_GEN(u8, _relaxed, "cc");
84XCHG_GEN(u16, _local, "memory");
85XCHG_GEN(u16, _relaxed, "cc");
86
David Howellsae3a1972012-03-28 18:30:02 +010087static __always_inline unsigned long
88__xchg_u32_local(volatile void *p, unsigned long val)
89{
90 unsigned long prev;
91
92 __asm__ __volatile__(
93"1: lwarx %0,0,%2 \n"
94 PPC405_ERR77(0,%2)
95" stwcx. %3,0,%2 \n\
96 bne- 1b"
97 : "=&r" (prev), "+m" (*(volatile unsigned int *)p)
98 : "r" (p), "r" (val)
99 : "cc", "memory");
100
101 return prev;
102}
103
David Howellsae3a1972012-03-28 18:30:02 +0100104static __always_inline unsigned long
Boqun Feng26760fc2015-12-15 22:24:16 +0800105__xchg_u32_relaxed(u32 *p, unsigned long val)
David Howellsae3a1972012-03-28 18:30:02 +0100106{
107 unsigned long prev;
108
109 __asm__ __volatile__(
Boqun Feng26760fc2015-12-15 22:24:16 +0800110"1: lwarx %0,0,%2\n"
111 PPC405_ERR77(0, %2)
112" stwcx. %3,0,%2\n"
113" bne- 1b"
114 : "=&r" (prev), "+m" (*p)
David Howellsae3a1972012-03-28 18:30:02 +0100115 : "r" (p), "r" (val)
Boqun Feng26760fc2015-12-15 22:24:16 +0800116 : "cc");
David Howellsae3a1972012-03-28 18:30:02 +0100117
118 return prev;
119}
120
Boqun Feng26760fc2015-12-15 22:24:16 +0800121#ifdef CONFIG_PPC64
David Howellsae3a1972012-03-28 18:30:02 +0100122static __always_inline unsigned long
123__xchg_u64_local(volatile void *p, unsigned long val)
124{
125 unsigned long prev;
126
127 __asm__ __volatile__(
128"1: ldarx %0,0,%2 \n"
129 PPC405_ERR77(0,%2)
130" stdcx. %3,0,%2 \n\
131 bne- 1b"
132 : "=&r" (prev), "+m" (*(volatile unsigned long *)p)
133 : "r" (p), "r" (val)
134 : "cc", "memory");
135
136 return prev;
137}
Boqun Feng26760fc2015-12-15 22:24:16 +0800138
139static __always_inline unsigned long
140__xchg_u64_relaxed(u64 *p, unsigned long val)
141{
142 unsigned long prev;
143
144 __asm__ __volatile__(
145"1: ldarx %0,0,%2\n"
146 PPC405_ERR77(0, %2)
147" stdcx. %3,0,%2\n"
148" bne- 1b"
149 : "=&r" (prev), "+m" (*p)
150 : "r" (p), "r" (val)
151 : "cc");
152
153 return prev;
154}
David Howellsae3a1972012-03-28 18:30:02 +0100155#endif
156
David Howellsae3a1972012-03-28 18:30:02 +0100157static __always_inline unsigned long
Pan Xinhuid0563a12016-04-27 17:16:45 +0800158__xchg_local(void *ptr, unsigned long x, unsigned int size)
David Howellsae3a1972012-03-28 18:30:02 +0100159{
160 switch (size) {
Pan Xinhuid0563a12016-04-27 17:16:45 +0800161 case 1:
162 return __xchg_u8_local(ptr, x);
163 case 2:
164 return __xchg_u16_local(ptr, x);
David Howellsae3a1972012-03-28 18:30:02 +0100165 case 4:
166 return __xchg_u32_local(ptr, x);
167#ifdef CONFIG_PPC64
168 case 8:
169 return __xchg_u64_local(ptr, x);
170#endif
171 }
pan xinhui10d8b142016-02-23 19:05:01 +0800172 BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg");
David Howellsae3a1972012-03-28 18:30:02 +0100173 return x;
174}
David Howellsae3a1972012-03-28 18:30:02 +0100175
Boqun Feng26760fc2015-12-15 22:24:16 +0800176static __always_inline unsigned long
177__xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
178{
179 switch (size) {
Pan Xinhuid0563a12016-04-27 17:16:45 +0800180 case 1:
181 return __xchg_u8_relaxed(ptr, x);
182 case 2:
183 return __xchg_u16_relaxed(ptr, x);
Boqun Feng26760fc2015-12-15 22:24:16 +0800184 case 4:
185 return __xchg_u32_relaxed(ptr, x);
186#ifdef CONFIG_PPC64
187 case 8:
188 return __xchg_u64_relaxed(ptr, x);
189#endif
190 }
pan xinhui10d8b142016-02-23 19:05:01 +0800191 BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_local");
Boqun Feng26760fc2015-12-15 22:24:16 +0800192 return x;
193}
David Howellsae3a1972012-03-28 18:30:02 +0100194#define xchg_local(ptr,x) \
195 ({ \
196 __typeof__(*(ptr)) _x_ = (x); \
197 (__typeof__(*(ptr))) __xchg_local((ptr), \
198 (unsigned long)_x_, sizeof(*(ptr))); \
199 })
200
Boqun Feng26760fc2015-12-15 22:24:16 +0800201#define xchg_relaxed(ptr, x) \
202({ \
203 __typeof__(*(ptr)) _x_ = (x); \
204 (__typeof__(*(ptr))) __xchg_relaxed((ptr), \
205 (unsigned long)_x_, sizeof(*(ptr))); \
206})
David Howellsae3a1972012-03-28 18:30:02 +0100207/*
208 * Compare and exchange - if *p == old, set it to new,
209 * and return the old value of *p.
210 */
David Howellsae3a1972012-03-28 18:30:02 +0100211
Pan Xinhuid0563a12016-04-27 17:16:45 +0800212CMPXCHG_GEN(u8, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory");
213CMPXCHG_GEN(u8, _local, , , "memory");
214CMPXCHG_GEN(u8, _acquire, , PPC_ACQUIRE_BARRIER, "memory");
215CMPXCHG_GEN(u8, _relaxed, , , "cc");
216CMPXCHG_GEN(u16, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory");
217CMPXCHG_GEN(u16, _local, , , "memory");
218CMPXCHG_GEN(u16, _acquire, , PPC_ACQUIRE_BARRIER, "memory");
219CMPXCHG_GEN(u16, _relaxed, , , "cc");
220
David Howellsae3a1972012-03-28 18:30:02 +0100221static __always_inline unsigned long
222__cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new)
223{
224 unsigned int prev;
225
226 __asm__ __volatile__ (
Boqun Feng81d7a322015-11-02 09:30:32 +0800227 PPC_ATOMIC_ENTRY_BARRIER
David Howellsae3a1972012-03-28 18:30:02 +0100228"1: lwarx %0,0,%2 # __cmpxchg_u32\n\
229 cmpw 0,%0,%3\n\
230 bne- 2f\n"
231 PPC405_ERR77(0,%2)
232" stwcx. %4,0,%2\n\
233 bne- 1b"
Boqun Feng81d7a322015-11-02 09:30:32 +0800234 PPC_ATOMIC_EXIT_BARRIER
David Howellsae3a1972012-03-28 18:30:02 +0100235 "\n\
2362:"
237 : "=&r" (prev), "+m" (*p)
238 : "r" (p), "r" (old), "r" (new)
239 : "cc", "memory");
240
241 return prev;
242}
243
244static __always_inline unsigned long
245__cmpxchg_u32_local(volatile unsigned int *p, unsigned long old,
246 unsigned long new)
247{
248 unsigned int prev;
249
250 __asm__ __volatile__ (
251"1: lwarx %0,0,%2 # __cmpxchg_u32\n\
252 cmpw 0,%0,%3\n\
253 bne- 2f\n"
254 PPC405_ERR77(0,%2)
255" stwcx. %4,0,%2\n\
256 bne- 1b"
257 "\n\
2582:"
259 : "=&r" (prev), "+m" (*p)
260 : "r" (p), "r" (old), "r" (new)
261 : "cc", "memory");
262
263 return prev;
264}
265
Boqun Feng56c08e62015-12-15 22:24:17 +0800266static __always_inline unsigned long
267__cmpxchg_u32_relaxed(u32 *p, unsigned long old, unsigned long new)
268{
269 unsigned long prev;
270
271 __asm__ __volatile__ (
272"1: lwarx %0,0,%2 # __cmpxchg_u32_relaxed\n"
273" cmpw 0,%0,%3\n"
274" bne- 2f\n"
275 PPC405_ERR77(0, %2)
276" stwcx. %4,0,%2\n"
277" bne- 1b\n"
278"2:"
279 : "=&r" (prev), "+m" (*p)
280 : "r" (p), "r" (old), "r" (new)
281 : "cc");
282
283 return prev;
284}
285
286/*
287 * cmpxchg family don't have order guarantee if cmp part fails, therefore we
288 * can avoid superfluous barriers if we use assembly code to implement
289 * cmpxchg() and cmpxchg_acquire(), however we don't do the similar for
290 * cmpxchg_release() because that will result in putting a barrier in the
291 * middle of a ll/sc loop, which is probably a bad idea. For example, this
292 * might cause the conditional store more likely to fail.
293 */
294static __always_inline unsigned long
295__cmpxchg_u32_acquire(u32 *p, unsigned long old, unsigned long new)
296{
297 unsigned long prev;
298
299 __asm__ __volatile__ (
300"1: lwarx %0,0,%2 # __cmpxchg_u32_acquire\n"
301" cmpw 0,%0,%3\n"
302" bne- 2f\n"
303 PPC405_ERR77(0, %2)
304" stwcx. %4,0,%2\n"
305" bne- 1b\n"
306 PPC_ACQUIRE_BARRIER
307 "\n"
308"2:"
309 : "=&r" (prev), "+m" (*p)
310 : "r" (p), "r" (old), "r" (new)
311 : "cc", "memory");
312
313 return prev;
314}
315
David Howellsae3a1972012-03-28 18:30:02 +0100316#ifdef CONFIG_PPC64
317static __always_inline unsigned long
318__cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new)
319{
320 unsigned long prev;
321
322 __asm__ __volatile__ (
Boqun Feng81d7a322015-11-02 09:30:32 +0800323 PPC_ATOMIC_ENTRY_BARRIER
David Howellsae3a1972012-03-28 18:30:02 +0100324"1: ldarx %0,0,%2 # __cmpxchg_u64\n\
325 cmpd 0,%0,%3\n\
326 bne- 2f\n\
327 stdcx. %4,0,%2\n\
328 bne- 1b"
Boqun Feng81d7a322015-11-02 09:30:32 +0800329 PPC_ATOMIC_EXIT_BARRIER
David Howellsae3a1972012-03-28 18:30:02 +0100330 "\n\
3312:"
332 : "=&r" (prev), "+m" (*p)
333 : "r" (p), "r" (old), "r" (new)
334 : "cc", "memory");
335
336 return prev;
337}
338
339static __always_inline unsigned long
340__cmpxchg_u64_local(volatile unsigned long *p, unsigned long old,
341 unsigned long new)
342{
343 unsigned long prev;
344
345 __asm__ __volatile__ (
346"1: ldarx %0,0,%2 # __cmpxchg_u64\n\
347 cmpd 0,%0,%3\n\
348 bne- 2f\n\
349 stdcx. %4,0,%2\n\
350 bne- 1b"
351 "\n\
3522:"
353 : "=&r" (prev), "+m" (*p)
354 : "r" (p), "r" (old), "r" (new)
355 : "cc", "memory");
356
357 return prev;
358}
Boqun Feng56c08e62015-12-15 22:24:17 +0800359
360static __always_inline unsigned long
361__cmpxchg_u64_relaxed(u64 *p, unsigned long old, unsigned long new)
362{
363 unsigned long prev;
364
365 __asm__ __volatile__ (
366"1: ldarx %0,0,%2 # __cmpxchg_u64_relaxed\n"
367" cmpd 0,%0,%3\n"
368" bne- 2f\n"
369" stdcx. %4,0,%2\n"
370" bne- 1b\n"
371"2:"
372 : "=&r" (prev), "+m" (*p)
373 : "r" (p), "r" (old), "r" (new)
374 : "cc");
375
376 return prev;
377}
378
379static __always_inline unsigned long
380__cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new)
381{
382 unsigned long prev;
383
384 __asm__ __volatile__ (
385"1: ldarx %0,0,%2 # __cmpxchg_u64_acquire\n"
386" cmpd 0,%0,%3\n"
387" bne- 2f\n"
388" stdcx. %4,0,%2\n"
389" bne- 1b\n"
390 PPC_ACQUIRE_BARRIER
391 "\n"
392"2:"
393 : "=&r" (prev), "+m" (*p)
394 : "r" (p), "r" (old), "r" (new)
395 : "cc", "memory");
396
397 return prev;
398}
David Howellsae3a1972012-03-28 18:30:02 +0100399#endif
400
David Howellsae3a1972012-03-28 18:30:02 +0100401static __always_inline unsigned long
Michael Ellermanda58b232016-11-24 17:08:11 +1100402__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
David Howellsae3a1972012-03-28 18:30:02 +0100403 unsigned int size)
404{
405 switch (size) {
Pan Xinhuid0563a12016-04-27 17:16:45 +0800406 case 1:
407 return __cmpxchg_u8(ptr, old, new);
408 case 2:
409 return __cmpxchg_u16(ptr, old, new);
David Howellsae3a1972012-03-28 18:30:02 +0100410 case 4:
411 return __cmpxchg_u32(ptr, old, new);
412#ifdef CONFIG_PPC64
413 case 8:
414 return __cmpxchg_u64(ptr, old, new);
415#endif
416 }
pan xinhui10d8b142016-02-23 19:05:01 +0800417 BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg");
David Howellsae3a1972012-03-28 18:30:02 +0100418 return old;
419}
420
421static __always_inline unsigned long
Pan Xinhuid0563a12016-04-27 17:16:45 +0800422__cmpxchg_local(void *ptr, unsigned long old, unsigned long new,
David Howellsae3a1972012-03-28 18:30:02 +0100423 unsigned int size)
424{
425 switch (size) {
Pan Xinhuid0563a12016-04-27 17:16:45 +0800426 case 1:
427 return __cmpxchg_u8_local(ptr, old, new);
428 case 2:
429 return __cmpxchg_u16_local(ptr, old, new);
David Howellsae3a1972012-03-28 18:30:02 +0100430 case 4:
431 return __cmpxchg_u32_local(ptr, old, new);
432#ifdef CONFIG_PPC64
433 case 8:
434 return __cmpxchg_u64_local(ptr, old, new);
435#endif
436 }
pan xinhui10d8b142016-02-23 19:05:01 +0800437 BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_local");
David Howellsae3a1972012-03-28 18:30:02 +0100438 return old;
439}
440
Boqun Feng56c08e62015-12-15 22:24:17 +0800441static __always_inline unsigned long
442__cmpxchg_relaxed(void *ptr, unsigned long old, unsigned long new,
443 unsigned int size)
444{
445 switch (size) {
Pan Xinhuid0563a12016-04-27 17:16:45 +0800446 case 1:
447 return __cmpxchg_u8_relaxed(ptr, old, new);
448 case 2:
449 return __cmpxchg_u16_relaxed(ptr, old, new);
Boqun Feng56c08e62015-12-15 22:24:17 +0800450 case 4:
451 return __cmpxchg_u32_relaxed(ptr, old, new);
452#ifdef CONFIG_PPC64
453 case 8:
454 return __cmpxchg_u64_relaxed(ptr, old, new);
455#endif
456 }
pan xinhui10d8b142016-02-23 19:05:01 +0800457 BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_relaxed");
Boqun Feng56c08e62015-12-15 22:24:17 +0800458 return old;
459}
460
461static __always_inline unsigned long
462__cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new,
463 unsigned int size)
464{
465 switch (size) {
Pan Xinhuid0563a12016-04-27 17:16:45 +0800466 case 1:
467 return __cmpxchg_u8_acquire(ptr, old, new);
468 case 2:
469 return __cmpxchg_u16_acquire(ptr, old, new);
Boqun Feng56c08e62015-12-15 22:24:17 +0800470 case 4:
471 return __cmpxchg_u32_acquire(ptr, old, new);
472#ifdef CONFIG_PPC64
473 case 8:
474 return __cmpxchg_u64_acquire(ptr, old, new);
475#endif
476 }
pan xinhui10d8b142016-02-23 19:05:01 +0800477 BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_acquire");
Boqun Feng56c08e62015-12-15 22:24:17 +0800478 return old;
479}
David Howellsae3a1972012-03-28 18:30:02 +0100480#define cmpxchg(ptr, o, n) \
481 ({ \
482 __typeof__(*(ptr)) _o_ = (o); \
483 __typeof__(*(ptr)) _n_ = (n); \
484 (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
485 (unsigned long)_n_, sizeof(*(ptr))); \
486 })
487
488
489#define cmpxchg_local(ptr, o, n) \
490 ({ \
491 __typeof__(*(ptr)) _o_ = (o); \
492 __typeof__(*(ptr)) _n_ = (n); \
493 (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \
494 (unsigned long)_n_, sizeof(*(ptr))); \
495 })
496
Boqun Feng56c08e62015-12-15 22:24:17 +0800497#define cmpxchg_relaxed(ptr, o, n) \
498({ \
499 __typeof__(*(ptr)) _o_ = (o); \
500 __typeof__(*(ptr)) _n_ = (n); \
501 (__typeof__(*(ptr))) __cmpxchg_relaxed((ptr), \
502 (unsigned long)_o_, (unsigned long)_n_, \
503 sizeof(*(ptr))); \
504})
505
506#define cmpxchg_acquire(ptr, o, n) \
507({ \
508 __typeof__(*(ptr)) _o_ = (o); \
509 __typeof__(*(ptr)) _n_ = (n); \
510 (__typeof__(*(ptr))) __cmpxchg_acquire((ptr), \
511 (unsigned long)_o_, (unsigned long)_n_, \
512 sizeof(*(ptr))); \
513})
David Howellsae3a1972012-03-28 18:30:02 +0100514#ifdef CONFIG_PPC64
515#define cmpxchg64(ptr, o, n) \
516 ({ \
517 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
518 cmpxchg((ptr), (o), (n)); \
519 })
520#define cmpxchg64_local(ptr, o, n) \
521 ({ \
522 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
523 cmpxchg_local((ptr), (o), (n)); \
524 })
Boqun Feng56c08e62015-12-15 22:24:17 +0800525#define cmpxchg64_relaxed(ptr, o, n) \
526({ \
527 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
528 cmpxchg_relaxed((ptr), (o), (n)); \
529})
530#define cmpxchg64_acquire(ptr, o, n) \
531({ \
532 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
533 cmpxchg_acquire((ptr), (o), (n)); \
534})
David Howellsae3a1972012-03-28 18:30:02 +0100535#else
536#include <asm-generic/cmpxchg-local.h>
537#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
538#endif
539
540#endif /* __KERNEL__ */
541#endif /* _ASM_POWERPC_CMPXCHG_H_ */