blob: ee0214e00ab1c392ad1423e13fc2e7560b297dad [file] [log] [blame]
Ralf Baechlefef74702007-10-01 04:15:00 +01001/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7 */
8#ifndef __ASM_CMPXCHG_H
9#define __ASM_CMPXCHG_H
10
Aaro Koskinen5520e422012-07-19 09:11:15 +020011#include <linux/bug.h>
Ralf Baechlefef74702007-10-01 04:15:00 +010012#include <linux/irqflags.h>
Maciej W. Rozyckib0984c42014-11-15 22:08:48 +000013#include <asm/compiler.h>
David Howellsb81947c2012-03-28 18:30:02 +010014#include <asm/war.h>
15
Paul Burton6b1e7622017-06-09 17:26:33 -070016/*
17 * Using a branch-likely instruction to check the result of an sc instruction
18 * works around a bug present in R10000 CPUs prior to revision 3.0 that could
19 * cause ll-sc sequences to execute non-atomically.
20 */
21#if R10000_LLSC_WAR
22# define __scbeqz "beqzl"
23#else
24# define __scbeqz "beqz"
25#endif
26
Paul Burton5154f3b2017-06-09 17:26:34 -070027#define __xchg_asm(ld, st, m, val) \
28({ \
29 __typeof(*(m)) __ret; \
30 \
31 if (kernel_uses_llsc) { \
32 __asm__ __volatile__( \
33 " .set push \n" \
34 " .set noat \n" \
35 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
36 "1: " ld " %0, %2 # __xchg_asm \n" \
37 " .set mips0 \n" \
38 " move $1, %z3 \n" \
39 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
40 " " st " $1, %1 \n" \
41 "\t" __scbeqz " $1, 1b \n" \
42 " .set pop \n" \
43 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
44 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
45 : "memory"); \
46 } else { \
47 unsigned long __flags; \
48 \
49 raw_local_irq_save(__flags); \
50 __ret = *m; \
51 *m = val; \
52 raw_local_irq_restore(__flags); \
53 } \
54 \
55 __ret; \
56})
57
David Howellsb81947c2012-03-28 18:30:02 +010058static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
59{
60 __u32 retval;
61
62 smp_mb__before_llsc();
Paul Burton5154f3b2017-06-09 17:26:34 -070063 retval = __xchg_asm("ll", "sc", m, val);
David Howellsb81947c2012-03-28 18:30:02 +010064 smp_llsc_mb();
65
66 return retval;
67}
68
69#ifdef CONFIG_64BIT
70static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
71{
72 __u64 retval;
73
74 smp_mb__before_llsc();
Paul Burton5154f3b2017-06-09 17:26:34 -070075 retval = __xchg_asm("lld", "scd", m, val);
David Howellsb81947c2012-03-28 18:30:02 +010076 smp_llsc_mb();
77
78 return retval;
79}
80#else
81extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val);
82#define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels
83#endif
84
85static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
86{
87 switch (size) {
88 case 4:
89 return __xchg_u32(ptr, x);
90 case 8:
91 return __xchg_u64(ptr, x);
92 }
93
94 return x;
95}
96
97#define xchg(ptr, x) \
98({ \
99 BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc); \
100 \
101 ((__typeof__(*(ptr))) \
102 __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \
103})
Ralf Baechlefef74702007-10-01 04:15:00 +0100104
Ralf Baechlefef74702007-10-01 04:15:00 +0100105#define __cmpxchg_asm(ld, st, m, old, new) \
106({ \
107 __typeof(*(m)) __ret; \
108 \
Paul Burton6b1e7622017-06-09 17:26:33 -0700109 if (kernel_uses_llsc) { \
Ralf Baechlefef74702007-10-01 04:15:00 +0100110 __asm__ __volatile__( \
111 " .set push \n" \
112 " .set noat \n" \
Markos Chandrasfa998eb2014-11-20 13:31:48 +0000113 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
Ralf Baechle70342282013-01-22 12:59:30 +0100114 "1: " ld " %0, %2 # __cmpxchg_asm \n" \
Ralf Baechlefef74702007-10-01 04:15:00 +0100115 " bne %0, %z3, 2f \n" \
116 " .set mips0 \n" \
117 " move $1, %z4 \n" \
Markos Chandrasfa998eb2014-11-20 13:31:48 +0000118 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
Ralf Baechlefef74702007-10-01 04:15:00 +0100119 " " st " $1, %1 \n" \
Paul Burton6b1e7622017-06-09 17:26:33 -0700120 "\t" __scbeqz " $1, 1b \n" \
Ralf Baechlefef74702007-10-01 04:15:00 +0100121 " .set pop \n" \
Ralf Baechle78373142010-10-29 19:08:24 +0100122 "2: \n" \
Markos Chandras94bfb752015-01-26 12:44:11 +0000123 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
124 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
Ralf Baechlefef74702007-10-01 04:15:00 +0100125 : "memory"); \
126 } else { \
127 unsigned long __flags; \
128 \
129 raw_local_irq_save(__flags); \
130 __ret = *m; \
131 if (__ret == old) \
132 *m = new; \
133 raw_local_irq_restore(__flags); \
134 } \
135 \
136 __ret; \
137})
138
139/*
Paul Burton77299db2017-06-09 17:26:35 -0700140 * This function doesn't exist, so if it is called you'll either:
141 *
142 * - Get an error at compile-time due to __compiletime_error, if supported by
143 * your compiler.
144 *
145 * or:
146 *
147 * - Get an error at link-time due to the call to the missing function.
Ralf Baechlefef74702007-10-01 04:15:00 +0100148 */
Paul Burton77299db2017-06-09 17:26:35 -0700149extern void __cmpxchg_called_with_bad_pointer(void)
150 __compiletime_error("Bad argument size for cmpxchg");
Ralf Baechlefef74702007-10-01 04:15:00 +0100151
David Daneyf252ffd2010-01-08 17:17:43 -0800152#define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \
Ralf Baechlefef74702007-10-01 04:15:00 +0100153({ \
154 __typeof__(ptr) __ptr = (ptr); \
155 __typeof__(*(ptr)) __old = (old); \
156 __typeof__(*(ptr)) __new = (new); \
157 __typeof__(*(ptr)) __res = 0; \
158 \
David Daneyf252ffd2010-01-08 17:17:43 -0800159 pre_barrier; \
Ralf Baechlefef74702007-10-01 04:15:00 +0100160 \
161 switch (sizeof(*(__ptr))) { \
162 case 4: \
Ralf Baechle70342282013-01-22 12:59:30 +0100163 __res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \
Ralf Baechlefef74702007-10-01 04:15:00 +0100164 break; \
165 case 8: \
166 if (sizeof(long) == 8) { \
167 __res = __cmpxchg_asm("lld", "scd", __ptr, \
168 __old, __new); \
169 break; \
170 } \
171 default: \
172 __cmpxchg_called_with_bad_pointer(); \
173 break; \
174 } \
175 \
David Daneyf252ffd2010-01-08 17:17:43 -0800176 post_barrier; \
Ralf Baechlefef74702007-10-01 04:15:00 +0100177 \
178 __res; \
179})
180
David Daneyf252ffd2010-01-08 17:17:43 -0800181#define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb())
182#define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, , )
Ralf Baechlefef74702007-10-01 04:15:00 +0100183
Mathieu Desnoyers3b96a562008-02-07 00:16:09 -0800184#ifdef CONFIG_64BIT
185#define cmpxchg64_local(ptr, o, n) \
186 ({ \
187 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
188 cmpxchg_local((ptr), (o), (n)); \
189 })
Deng-Cheng Zhue2093c72015-03-07 10:30:20 -0800190
191#define cmpxchg64(ptr, o, n) \
192 ({ \
193 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
194 cmpxchg((ptr), (o), (n)); \
195 })
Mathieu Desnoyers3b96a562008-02-07 00:16:09 -0800196#else
197#include <asm-generic/cmpxchg-local.h>
198#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
Deng-Cheng Zhue2093c72015-03-07 10:30:20 -0800199#define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
Mathieu Desnoyers3b96a562008-02-07 00:16:09 -0800200#endif
201
Paul Burton6b1e7622017-06-09 17:26:33 -0700202#undef __scbeqz
203
Ralf Baechlefef74702007-10-01 04:15:00 +0100204#endif /* __ASM_CMPXCHG_H */