blob: a19359649b60bf432d325011d8f2dcc770ec4094 [file] [log] [blame]
Ralf Baechlefef74702007-10-01 04:15:00 +01001/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7 */
8#ifndef __ASM_CMPXCHG_H
9#define __ASM_CMPXCHG_H
10
Aaro Koskinen5520e422012-07-19 09:11:15 +020011#include <linux/bug.h>
Ralf Baechlefef74702007-10-01 04:15:00 +010012#include <linux/irqflags.h>
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +000013#include <asm/compiler.h>
David Howellsb81947c2012-03-28 18:30:02 +010014#include <asm/war.h>
15
Paul Burton6b1e7622017-06-09 17:26:33 -070016/*
17 * Using a branch-likely instruction to check the result of an sc instruction
18 * works around a bug present in R10000 CPUs prior to revision 3.0 that could
19 * cause ll-sc sequences to execute non-atomically.
20 */
21#if R10000_LLSC_WAR
22# define __scbeqz "beqzl"
23#else
24# define __scbeqz "beqz"
25#endif
26
Paul Burton5154f3b2017-06-09 17:26:34 -070027#define __xchg_asm(ld, st, m, val) \
28({ \
29 __typeof(*(m)) __ret; \
30 \
31 if (kernel_uses_llsc) { \
32 __asm__ __volatile__( \
33 " .set push \n" \
34 " .set noat \n" \
35 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
36 "1: " ld " %0, %2 # __xchg_asm \n" \
37 " .set mips0 \n" \
38 " move $1, %z3 \n" \
39 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
40 " " st " $1, %1 \n" \
41 "\t" __scbeqz " $1, 1b \n" \
42 " .set pop \n" \
43 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
44 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
45 : "memory"); \
46 } else { \
47 unsigned long __flags; \
48 \
49 raw_local_irq_save(__flags); \
50 __ret = *m; \
51 *m = val; \
52 raw_local_irq_restore(__flags); \
53 } \
54 \
55 __ret; \
56})
57
David Howellsb81947c2012-03-28 18:30:02 +010058static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
59{
60 __u32 retval;
61
62 smp_mb__before_llsc();
Paul Burton5154f3b2017-06-09 17:26:34 -070063 retval = __xchg_asm("ll", "sc", m, val);
David Howellsb81947c2012-03-28 18:30:02 +010064 smp_llsc_mb();
65
66 return retval;
67}
68
69#ifdef CONFIG_64BIT
70static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
71{
72 __u64 retval;
73
74 smp_mb__before_llsc();
Paul Burton5154f3b2017-06-09 17:26:34 -070075 retval = __xchg_asm("lld", "scd", m, val);
David Howellsb81947c2012-03-28 18:30:02 +010076 smp_llsc_mb();
77
78 return retval;
79}
80#else
81extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val);
82#define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels
83#endif
84
85static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
86{
87 switch (size) {
88 case 4:
89 return __xchg_u32(ptr, x);
90 case 8:
91 return __xchg_u64(ptr, x);
92 }
93
94 return x;
95}
96
97#define xchg(ptr, x) \
98({ \
99 BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc); \
100 \
101 ((__typeof__(*(ptr))) \
102 __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \
103})
Ralf Baechlefef74702007-10-01 04:15:00 +0100104
Ralf Baechlefef74702007-10-01 04:15:00 +0100105#define __cmpxchg_asm(ld, st, m, old, new) \
106({ \
107 __typeof(*(m)) __ret; \
108 \
Paul Burton6b1e7622017-06-09 17:26:33 -0700109 if (kernel_uses_llsc) { \
Ralf Baechlefef74702007-10-01 04:15:00 +0100110 __asm__ __volatile__( \
111 " .set push \n" \
112 " .set noat \n" \
Markos Chandrasfa998eb2014-11-20 13:31:48 +0000113 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
Ralf Baechle70342282013-01-22 12:59:30 +0100114 "1: " ld " %0, %2 # __cmpxchg_asm \n" \
Ralf Baechlefef74702007-10-01 04:15:00 +0100115 " bne %0, %z3, 2f \n" \
116 " .set mips0 \n" \
117 " move $1, %z4 \n" \
Markos Chandrasfa998eb2014-11-20 13:31:48 +0000118 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
Ralf Baechlefef74702007-10-01 04:15:00 +0100119 " " st " $1, %1 \n" \
Paul Burton6b1e7622017-06-09 17:26:33 -0700120 "\t" __scbeqz " $1, 1b \n" \
Ralf Baechlefef74702007-10-01 04:15:00 +0100121 " .set pop \n" \
Ralf Baechle78373142010-10-29 19:08:24 +0100122 "2: \n" \
Markos Chandras94bfb752015-01-26 12:44:11 +0000123 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
124 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
Ralf Baechlefef74702007-10-01 04:15:00 +0100125 : "memory"); \
126 } else { \
127 unsigned long __flags; \
128 \
129 raw_local_irq_save(__flags); \
130 __ret = *m; \
131 if (__ret == old) \
132 *m = new; \
133 raw_local_irq_restore(__flags); \
134 } \
135 \
136 __ret; \
137})
138
139/*
140 * This function doesn't exist, so you'll get a linker error
141 * if something tries to do an invalid cmpxchg().
142 */
143extern void __cmpxchg_called_with_bad_pointer(void);
144
David Daneyf252ffd2010-01-08 17:17:43 -0800145#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \
Ralf Baechlefef74702007-10-01 04:15:00 +0100146({ \
147 __typeof__(ptr) __ptr = (ptr); \
148 __typeof__(*(ptr)) __old = (old); \
149 __typeof__(*(ptr)) __new = (new); \
150 __typeof__(*(ptr)) __res = 0; \
151 \
David Daneyf252ffd2010-01-08 17:17:43 -0800152 pre_barrier; \
Ralf Baechlefef74702007-10-01 04:15:00 +0100153 \
154 switch (sizeof(*(__ptr))) { \
155 case 4: \
Ralf Baechle70342282013-01-22 12:59:30 +0100156 __res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \
Ralf Baechlefef74702007-10-01 04:15:00 +0100157 break; \
158 case 8: \
159 if (sizeof(long) == 8) { \
160 __res = __cmpxchg_asm("lld", "scd", __ptr, \
161 __old, __new); \
162 break; \
163 } \
164 default: \
165 __cmpxchg_called_with_bad_pointer(); \
166 break; \
167 } \
168 \
David Daneyf252ffd2010-01-08 17:17:43 -0800169 post_barrier; \
Ralf Baechlefef74702007-10-01 04:15:00 +0100170 \
171 __res; \
172})
173
David Daneyf252ffd2010-01-08 17:17:43 -0800174#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
175#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, , )
Ralf Baechlefef74702007-10-01 04:15:00 +0100176
Mathieu Desnoyers3b96a562008-02-07 00:16:09 -0800177#ifdef CONFIG_64BIT
178#define cmpxchg64_local(ptr, o, n) \
179 ({ \
180 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
181 cmpxchg_local((ptr), (o), (n)); \
182 })
Deng-Cheng Zhue2093c72015-03-07 10:30:20 -0800183
184#define cmpxchg64(ptr, o, n) \
185 ({ \
186 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
187 cmpxchg((ptr), (o), (n)); \
188 })
Mathieu Desnoyers3b96a562008-02-07 00:16:09 -0800189#else
190#include <asm-generic/cmpxchg-local.h>
191#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
Deng-Cheng Zhue2093c72015-03-07 10:30:20 -0800192#define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
Mathieu Desnoyers3b96a562008-02-07 00:16:09 -0800193#endif
194
Paul Burton6b1e7622017-06-09 17:26:33 -0700195#undef __scbeqz
196
Ralf Baechlefef74702007-10-01 04:15:00 +0100197#endif /* __ASM_CMPXCHG_H */