blob: 89e9fb7976fe61e3671100f7d2dae00b3d59bb5f [file] [log] [blame]
Ralf Baechlefef74702007-10-01 04:15:00 +01001/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7 */
8#ifndef __ASM_CMPXCHG_H
9#define __ASM_CMPXCHG_H
10
Aaro Koskinen5520e422012-07-19 09:11:15 +020011#include <linux/bug.h>
Ralf Baechlefef74702007-10-01 04:15:00 +010012#include <linux/irqflags.h>
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +000013#include <asm/compiler.h>
David Howellsb81947c2012-03-28 18:30:02 +010014#include <asm/war.h>
15
Paul Burton6b1e7622017-06-09 17:26:33 -070016/*
17 * Using a branch-likely instruction to check the result of an sc instruction
18 * works around a bug present in R10000 CPUs prior to revision 3.0 that could
19 * cause ll-sc sequences to execute non-atomically.
20 */
21#if R10000_LLSC_WAR
22# define __scbeqz "beqzl"
23#else
24# define __scbeqz "beqz"
25#endif
26
Paul Burtond15dc682017-06-09 17:26:36 -070027/*
28 * These functions doesn't exist, so if they are called you'll either:
29 *
30 * - Get an error at compile-time due to __compiletime_error, if supported by
31 * your compiler.
32 *
33 * or:
34 *
35 * - Get an error at link-time due to the call to the missing function.
36 */
Paul Burton8263db42017-06-09 17:26:38 -070037extern unsigned long __cmpxchg_called_with_bad_pointer(void)
Paul Burtond15dc682017-06-09 17:26:36 -070038 __compiletime_error("Bad argument size for cmpxchg");
39extern unsigned long __xchg_called_with_bad_pointer(void)
40 __compiletime_error("Bad argument size for xchg");
41
Paul Burton5154f3b2017-06-09 17:26:34 -070042#define __xchg_asm(ld, st, m, val) \
43({ \
44 __typeof(*(m)) __ret; \
45 \
46 if (kernel_uses_llsc) { \
47 __asm__ __volatile__( \
48 " .set push \n" \
49 " .set noat \n" \
50 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
51 "1: " ld " %0, %2 # __xchg_asm \n" \
52 " .set mips0 \n" \
53 " move $1, %z3 \n" \
54 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
55 " " st " $1, %1 \n" \
56 "\t" __scbeqz " $1, 1b \n" \
57 " .set pop \n" \
58 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
59 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
60 : "memory"); \
61 } else { \
62 unsigned long __flags; \
63 \
64 raw_local_irq_save(__flags); \
65 __ret = *m; \
66 *m = val; \
67 raw_local_irq_restore(__flags); \
68 } \
69 \
70 __ret; \
71})
72
Paul Burtonb70eb302017-06-09 17:26:39 -070073extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
74 unsigned int size);
75
Paul Burton4843cf82017-06-09 17:26:41 -070076static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
77 int size)
David Howellsb81947c2012-03-28 18:30:02 +010078{
79 switch (size) {
Paul Burtonb70eb302017-06-09 17:26:39 -070080 case 1:
81 case 2:
82 return __xchg_small(ptr, x, size);
83
David Howellsb81947c2012-03-28 18:30:02 +010084 case 4:
Paul Burton62c60812017-06-09 17:26:37 -070085 return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
86
David Howellsb81947c2012-03-28 18:30:02 +010087 case 8:
Paul Burton62c60812017-06-09 17:26:37 -070088 if (!IS_ENABLED(CONFIG_64BIT))
89 return __xchg_called_with_bad_pointer();
90
91 return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
92
Paul Burtond15dc682017-06-09 17:26:36 -070093 default:
94 return __xchg_called_with_bad_pointer();
David Howellsb81947c2012-03-28 18:30:02 +010095 }
David Howellsb81947c2012-03-28 18:30:02 +010096}
97
98#define xchg(ptr, x) \
99({ \
Paul Burton62c60812017-06-09 17:26:37 -0700100 __typeof__(*(ptr)) __res; \
101 \
Paul Burton62c60812017-06-09 17:26:37 -0700102 smp_mb__before_llsc(); \
103 \
104 __res = (__typeof__(*(ptr))) \
Paul Burton4843cf82017-06-09 17:26:41 -0700105 __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
Paul Burton62c60812017-06-09 17:26:37 -0700106 \
107 smp_llsc_mb(); \
108 \
109 __res; \
David Howellsb81947c2012-03-28 18:30:02 +0100110})
Ralf Baechlefef74702007-10-01 04:15:00 +0100111
Ralf Baechlefef74702007-10-01 04:15:00 +0100112#define __cmpxchg_asm(ld, st, m, old, new) \
113({ \
114 __typeof(*(m)) __ret; \
115 \
Paul Burton6b1e7622017-06-09 17:26:33 -0700116 if (kernel_uses_llsc) { \
Ralf Baechlefef74702007-10-01 04:15:00 +0100117 __asm__ __volatile__( \
118 " .set push \n" \
119 " .set noat \n" \
Markos Chandrasfa998eb2014-11-20 13:31:48 +0000120 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
Ralf Baechle70342282013-01-22 12:59:30 +0100121 "1: " ld " %0, %2 # __cmpxchg_asm \n" \
Ralf Baechlefef74702007-10-01 04:15:00 +0100122 " bne %0, %z3, 2f \n" \
123 " .set mips0 \n" \
124 " move $1, %z4 \n" \
Markos Chandrasfa998eb2014-11-20 13:31:48 +0000125 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
Ralf Baechlefef74702007-10-01 04:15:00 +0100126 " " st " $1, %1 \n" \
Paul Burton6b1e7622017-06-09 17:26:33 -0700127 "\t" __scbeqz " $1, 1b \n" \
Ralf Baechlefef74702007-10-01 04:15:00 +0100128 " .set pop \n" \
Ralf Baechle78373142010-10-29 19:08:24 +0100129 "2: \n" \
Markos Chandras94bfb752015-01-26 12:44:11 +0000130 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
131 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
Ralf Baechlefef74702007-10-01 04:15:00 +0100132 : "memory"); \
133 } else { \
134 unsigned long __flags; \
135 \
136 raw_local_irq_save(__flags); \
137 __ret = *m; \
138 if (__ret == old) \
139 *m = new; \
140 raw_local_irq_restore(__flags); \
141 } \
142 \
143 __ret; \
144})
145
Paul Burton3ba7f442017-06-09 17:26:40 -0700146extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
147 unsigned long new, unsigned int size);
148
Paul Burton8263db42017-06-09 17:26:38 -0700149static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
150 unsigned long new, unsigned int size)
151{
152 switch (size) {
Paul Burton3ba7f442017-06-09 17:26:40 -0700153 case 1:
154 case 2:
155 return __cmpxchg_small(ptr, old, new, size);
156
Paul Burton8263db42017-06-09 17:26:38 -0700157 case 4:
Paul Burton133d68e2017-09-01 14:46:50 -0700158 return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
159 (u32)old, new);
Paul Burton8263db42017-06-09 17:26:38 -0700160
161 case 8:
162 /* lld/scd are only available for MIPS64 */
163 if (!IS_ENABLED(CONFIG_64BIT))
164 return __cmpxchg_called_with_bad_pointer();
165
Paul Burton133d68e2017-09-01 14:46:50 -0700166 return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
167 (u64)old, new);
Paul Burton8263db42017-06-09 17:26:38 -0700168
169 default:
170 return __cmpxchg_called_with_bad_pointer();
171 }
172}
173
174#define cmpxchg_local(ptr, old, new) \
175 ((__typeof__(*(ptr))) \
176 __cmpxchg((ptr), \
177 (unsigned long)(__typeof__(*(ptr)))(old), \
178 (unsigned long)(__typeof__(*(ptr)))(new), \
179 sizeof(*(ptr))))
180
181#define cmpxchg(ptr, old, new) \
Ralf Baechlefef74702007-10-01 04:15:00 +0100182({ \
Paul Burton8263db42017-06-09 17:26:38 -0700183 __typeof__(*(ptr)) __res; \
Ralf Baechlefef74702007-10-01 04:15:00 +0100184 \
Paul Burton8263db42017-06-09 17:26:38 -0700185 smp_mb__before_llsc(); \
186 __res = cmpxchg_local((ptr), (old), (new)); \
187 smp_llsc_mb(); \
Ralf Baechlefef74702007-10-01 04:15:00 +0100188 \
189 __res; \
190})
191
Mathieu Desnoyers3b96a562008-02-07 00:16:09 -0800192#ifdef CONFIG_64BIT
193#define cmpxchg64_local(ptr, o, n) \
194 ({ \
195 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
196 cmpxchg_local((ptr), (o), (n)); \
197 })
Deng-Cheng Zhue2093c72015-03-07 10:30:20 -0800198
199#define cmpxchg64(ptr, o, n) \
200 ({ \
201 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
202 cmpxchg((ptr), (o), (n)); \
203 })
Mathieu Desnoyers3b96a562008-02-07 00:16:09 -0800204#else
205#include <asm-generic/cmpxchg-local.h>
206#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
Ben Hutchingsa3f14312017-10-04 03:46:14 +0100207#ifndef CONFIG_SMP
Deng-Cheng Zhue2093c72015-03-07 10:30:20 -0800208#define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
Mathieu Desnoyers3b96a562008-02-07 00:16:09 -0800209#endif
Ben Hutchingsa3f14312017-10-04 03:46:14 +0100210#endif
Mathieu Desnoyers3b96a562008-02-07 00:16:09 -0800211
Paul Burton6b1e7622017-06-09 17:26:33 -0700212#undef __scbeqz
213
Ralf Baechlefef74702007-10-01 04:15:00 +0100214#endif /* __ASM_CMPXCHG_H */