blob: 8d798e962b632c9a8aa426576077a48d1f6f0a38 [file] [log] [blame]
Heiko Carstensa2c9dbe2011-03-23 10:16:05 +01001/*
2 * Copyright IBM Corp. 1999, 2011
3 *
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
5 */
6
7#ifndef __ASM_CMPXCHG_H
8#define __ASM_CMPXCHG_H
9
10#include <linux/types.h>
11
12extern void __xchg_called_with_bad_pointer(void);
13
14static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
15{
16 unsigned long addr, old;
17 int shift;
18
19 switch (size) {
20 case 1:
21 addr = (unsigned long) ptr;
22 shift = (3 ^ (addr & 3)) << 3;
23 addr ^= addr & 3;
24 asm volatile(
25 " l %0,%4\n"
26 "0: lr 0,%0\n"
27 " nr 0,%3\n"
28 " or 0,%2\n"
29 " cs %0,0,%4\n"
30 " jl 0b\n"
31 : "=&d" (old), "=Q" (*(int *) addr)
Heiko Carstens1896d252012-05-29 11:11:51 +020032 : "d" ((x & 0xff) << shift), "d" (~(0xff << shift)),
Heiko Carstensa2c9dbe2011-03-23 10:16:05 +010033 "Q" (*(int *) addr) : "memory", "cc", "0");
34 return old >> shift;
35 case 2:
36 addr = (unsigned long) ptr;
37 shift = (2 ^ (addr & 2)) << 3;
38 addr ^= addr & 2;
39 asm volatile(
40 " l %0,%4\n"
41 "0: lr 0,%0\n"
42 " nr 0,%3\n"
43 " or 0,%2\n"
44 " cs %0,0,%4\n"
45 " jl 0b\n"
46 : "=&d" (old), "=Q" (*(int *) addr)
Heiko Carstens1896d252012-05-29 11:11:51 +020047 : "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift)),
Heiko Carstensa2c9dbe2011-03-23 10:16:05 +010048 "Q" (*(int *) addr) : "memory", "cc", "0");
49 return old >> shift;
50 case 4:
51 asm volatile(
52 " l %0,%3\n"
53 "0: cs %0,%2,%3\n"
54 " jl 0b\n"
55 : "=&d" (old), "=Q" (*(int *) ptr)
56 : "d" (x), "Q" (*(int *) ptr)
57 : "memory", "cc");
58 return old;
59#ifdef CONFIG_64BIT
60 case 8:
61 asm volatile(
62 " lg %0,%3\n"
63 "0: csg %0,%2,%3\n"
64 " jl 0b\n"
65 : "=&d" (old), "=m" (*(long *) ptr)
66 : "d" (x), "Q" (*(long *) ptr)
67 : "memory", "cc");
68 return old;
69#endif /* CONFIG_64BIT */
70 }
71 __xchg_called_with_bad_pointer();
72 return x;
73}
74
75#define xchg(ptr, x) \
76({ \
77 __typeof__(*(ptr)) __ret; \
78 __ret = (__typeof__(*(ptr))) \
79 __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
80 __ret; \
81})
82
83/*
84 * Atomic compare and exchange. Compare OLD with MEM, if identical,
85 * store NEW in MEM. Return the initial value in MEM. Success is
86 * indicated by comparing RETURN with OLD.
87 */
88
89#define __HAVE_ARCH_CMPXCHG
90
91extern void __cmpxchg_called_with_bad_pointer(void);
92
93static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
94 unsigned long new, int size)
95{
96 unsigned long addr, prev, tmp;
97 int shift;
98
99 switch (size) {
100 case 1:
101 addr = (unsigned long) ptr;
102 shift = (3 ^ (addr & 3)) << 3;
103 addr ^= addr & 3;
104 asm volatile(
105 " l %0,%2\n"
106 "0: nr %0,%5\n"
107 " lr %1,%0\n"
108 " or %0,%3\n"
109 " or %1,%4\n"
110 " cs %0,%1,%2\n"
111 " jnl 1f\n"
112 " xr %1,%0\n"
113 " nr %1,%5\n"
114 " jnz 0b\n"
115 "1:"
Heiko Carstensbf3db852012-05-29 10:11:21 +0200116 : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
Heiko Carstens1896d252012-05-29 11:11:51 +0200117 : "d" ((old & 0xff) << shift),
118 "d" ((new & 0xff) << shift),
119 "d" (~(0xff << shift))
Heiko Carstensa2c9dbe2011-03-23 10:16:05 +0100120 : "memory", "cc");
121 return prev >> shift;
122 case 2:
123 addr = (unsigned long) ptr;
124 shift = (2 ^ (addr & 2)) << 3;
125 addr ^= addr & 2;
126 asm volatile(
127 " l %0,%2\n"
128 "0: nr %0,%5\n"
129 " lr %1,%0\n"
130 " or %0,%3\n"
131 " or %1,%4\n"
132 " cs %0,%1,%2\n"
133 " jnl 1f\n"
134 " xr %1,%0\n"
135 " nr %1,%5\n"
136 " jnz 0b\n"
137 "1:"
Heiko Carstensbf3db852012-05-29 10:11:21 +0200138 : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
Heiko Carstens1896d252012-05-29 11:11:51 +0200139 : "d" ((old & 0xffff) << shift),
140 "d" ((new & 0xffff) << shift),
141 "d" (~(0xffff << shift))
Heiko Carstensa2c9dbe2011-03-23 10:16:05 +0100142 : "memory", "cc");
143 return prev >> shift;
144 case 4:
145 asm volatile(
146 " cs %0,%3,%1\n"
147 : "=&d" (prev), "=Q" (*(int *) ptr)
148 : "0" (old), "d" (new), "Q" (*(int *) ptr)
149 : "memory", "cc");
150 return prev;
151#ifdef CONFIG_64BIT
152 case 8:
153 asm volatile(
154 " csg %0,%3,%1\n"
155 : "=&d" (prev), "=Q" (*(long *) ptr)
156 : "0" (old), "d" (new), "Q" (*(long *) ptr)
157 : "memory", "cc");
158 return prev;
159#endif /* CONFIG_64BIT */
160 }
161 __cmpxchg_called_with_bad_pointer();
162 return old;
163}
164
Heiko Carstens6b894a42012-05-29 08:28:38 +0200165#define cmpxchg(ptr, o, n) \
166({ \
167 __typeof__(*(ptr)) __ret; \
168 __ret = (__typeof__(*(ptr))) \
169 __cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
170 sizeof(*(ptr))); \
171 __ret; \
172})
Heiko Carstensa2c9dbe2011-03-23 10:16:05 +0100173
Heiko Carstens54eaae32011-03-23 10:16:06 +0100174#ifdef CONFIG_64BIT
175#define cmpxchg64(ptr, o, n) \
176({ \
Heiko Carstens54eaae32011-03-23 10:16:06 +0100177 cmpxchg((ptr), (o), (n)); \
178})
179#else /* CONFIG_64BIT */
180static inline unsigned long long __cmpxchg64(void *ptr,
181 unsigned long long old,
182 unsigned long long new)
183{
184 register_pair rp_old = {.pair = old};
185 register_pair rp_new = {.pair = new};
186
187 asm volatile(
188 " cds %0,%2,%1"
189 : "+&d" (rp_old), "=Q" (ptr)
190 : "d" (rp_new), "Q" (ptr)
Heiko Carstens0c44ca72012-05-29 06:54:32 +0200191 : "memory", "cc");
Heiko Carstens54eaae32011-03-23 10:16:06 +0100192 return rp_old.pair;
193}
Heiko Carstens6b894a42012-05-29 08:28:38 +0200194
195#define cmpxchg64(ptr, o, n) \
196({ \
197 __typeof__(*(ptr)) __ret; \
198 __ret = (__typeof__(*(ptr))) \
199 __cmpxchg64((ptr), \
200 (unsigned long long)(o), \
201 (unsigned long long)(n)); \
202 __ret; \
203})
Heiko Carstens54eaae32011-03-23 10:16:06 +0100204#endif /* CONFIG_64BIT */
205
Heiko Carstensa2c9dbe2011-03-23 10:16:05 +0100206#include <asm-generic/cmpxchg-local.h>
207
208static inline unsigned long __cmpxchg_local(void *ptr,
209 unsigned long old,
210 unsigned long new, int size)
211{
212 switch (size) {
213 case 1:
214 case 2:
215 case 4:
216#ifdef CONFIG_64BIT
217 case 8:
218#endif
219 return __cmpxchg(ptr, old, new, size);
220 default:
221 return __cmpxchg_local_generic(ptr, old, new, size);
222 }
223
224 return old;
225}
226
227/*
228 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
229 * them available.
230 */
231#define cmpxchg_local(ptr, o, n) \
Heiko Carstens6b894a42012-05-29 08:28:38 +0200232({ \
233 __typeof__(*(ptr)) __ret; \
234 __ret = (__typeof__(*(ptr))) \
235 __cmpxchg_local((ptr), (unsigned long)(o), \
236 (unsigned long)(n), sizeof(*(ptr))); \
237 __ret; \
238})
Heiko Carstens54eaae32011-03-23 10:16:06 +0100239
240#define cmpxchg64_local(ptr, o, n) cmpxchg64((ptr), (o), (n))
Heiko Carstensa2c9dbe2011-03-23 10:16:05 +0100241
242#endif /* __ASM_CMPXCHG_H */