blob: 5bfa560ef14ead2aedd7d310225292c93263628f [file] [log] [blame]
H. Peter Anvin1965aae2008-10-22 22:26:29 -07001#ifndef _ASM_X86_CMPXCHG_64_H
2#define _ASM_X86_CMPXCHG_64_H
Jeff Dikea436ed92007-05-08 00:35:02 -07003
4#include <asm/alternative.h> /* Provides LOCK_PREFIX */
5
Peter Zijlstraf3834b92009-10-09 10:12:46 +02006extern void __xchg_wrong_size(void);
7extern void __cmpxchg_wrong_size(void);
8
Jeff Dikea436ed92007-05-08 00:35:02 -07009/*
H. Peter Anvin4532b302010-07-28 15:18:35 -070010 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
11 * Since this is generally used to protect other memory information, we
12 * use "asm volatile" and "memory" clobbers to prevent gcc from moving
13 * information around.
Jeff Dikea436ed92007-05-08 00:35:02 -070014 */
Peter Zijlstraf3834b92009-10-09 10:12:46 +020015#define __xchg(x, ptr, size) \
16({ \
17 __typeof(*(ptr)) __x = (x); \
18 switch (size) { \
19 case 1: \
H. Peter Anvin4532b302010-07-28 15:18:35 -070020 { \
21 volatile u8 *__ptr = (volatile u8 *)(ptr); \
22 asm volatile("xchgb %0,%1" \
23 : "=q" (__x), "+m" (*__ptr) \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070024 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020025 : "memory"); \
26 break; \
H. Peter Anvin4532b302010-07-28 15:18:35 -070027 } \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020028 case 2: \
H. Peter Anvin4532b302010-07-28 15:18:35 -070029 { \
30 volatile u16 *__ptr = (volatile u16 *)(ptr); \
31 asm volatile("xchgw %0,%1" \
32 : "=r" (__x), "+m" (*__ptr) \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070033 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020034 : "memory"); \
35 break; \
H. Peter Anvin4532b302010-07-28 15:18:35 -070036 } \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020037 case 4: \
H. Peter Anvin4532b302010-07-28 15:18:35 -070038 { \
39 volatile u32 *__ptr = (volatile u32 *)(ptr); \
40 asm volatile("xchgl %0,%1" \
41 : "=r" (__x), "+m" (*__ptr) \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070042 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020043 : "memory"); \
44 break; \
H. Peter Anvin4532b302010-07-28 15:18:35 -070045 } \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020046 case 8: \
H. Peter Anvin4532b302010-07-28 15:18:35 -070047 { \
48 volatile u64 *__ptr = (volatile u64 *)(ptr); \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020049 asm volatile("xchgq %0,%1" \
H. Peter Anvin4532b302010-07-28 15:18:35 -070050 : "=r" (__x), "+m" (*__ptr) \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070051 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020052 : "memory"); \
53 break; \
H. Peter Anvin4532b302010-07-28 15:18:35 -070054 } \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020055 default: \
56 __xchg_wrong_size(); \
57 } \
58 __x; \
59})
60
61#define xchg(ptr, v) \
62 __xchg((v), (ptr), sizeof(*ptr))
63
Jeremy Fitzhardinge00a41542011-08-18 11:40:22 -070064static inline void set_64bit(volatile u64 *ptr, u64 val)
65{
66 *ptr = val;
67}
68
Peter Zijlstraf3834b92009-10-09 10:12:46 +020069#define __HAVE_ARCH_CMPXCHG 1
Jeff Dikea436ed92007-05-08 00:35:02 -070070
71/*
72 * Atomic compare and exchange. Compare OLD with MEM, if identical,
73 * store NEW in MEM. Return the initial value in MEM. Success is
74 * indicated by comparing RETURN with OLD.
75 */
Peter Zijlstraf3834b92009-10-09 10:12:46 +020076#define __raw_cmpxchg(ptr, old, new, size, lock) \
77({ \
78 __typeof__(*(ptr)) __ret; \
79 __typeof__(*(ptr)) __old = (old); \
80 __typeof__(*(ptr)) __new = (new); \
81 switch (size) { \
82 case 1: \
H. Peter Anvin4532b302010-07-28 15:18:35 -070083 { \
84 volatile u8 *__ptr = (volatile u8 *)(ptr); \
85 asm volatile(lock "cmpxchgb %2,%1" \
86 : "=a" (__ret), "+m" (*__ptr) \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070087 : "q" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020088 : "memory"); \
89 break; \
H. Peter Anvin4532b302010-07-28 15:18:35 -070090 } \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020091 case 2: \
H. Peter Anvin4532b302010-07-28 15:18:35 -070092 { \
93 volatile u16 *__ptr = (volatile u16 *)(ptr); \
94 asm volatile(lock "cmpxchgw %2,%1" \
95 : "=a" (__ret), "+m" (*__ptr) \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070096 : "r" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020097 : "memory"); \
98 break; \
H. Peter Anvin4532b302010-07-28 15:18:35 -070099 } \
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200100 case 4: \
H. Peter Anvin4532b302010-07-28 15:18:35 -0700101 { \
102 volatile u32 *__ptr = (volatile u32 *)(ptr); \
103 asm volatile(lock "cmpxchgl %2,%1" \
104 : "=a" (__ret), "+m" (*__ptr) \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -0700105 : "r" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200106 : "memory"); \
107 break; \
H. Peter Anvin4532b302010-07-28 15:18:35 -0700108 } \
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200109 case 8: \
H. Peter Anvin4532b302010-07-28 15:18:35 -0700110 { \
111 volatile u64 *__ptr = (volatile u64 *)(ptr); \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -0700112 asm volatile(lock "cmpxchgq %2,%1" \
H. Peter Anvin4532b302010-07-28 15:18:35 -0700113 : "=a" (__ret), "+m" (*__ptr) \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -0700114 : "r" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200115 : "memory"); \
116 break; \
H. Peter Anvin4532b302010-07-28 15:18:35 -0700117 } \
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200118 default: \
119 __cmpxchg_wrong_size(); \
120 } \
121 __ret; \
122})
Jeff Dikea436ed92007-05-08 00:35:02 -0700123
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200124#define __cmpxchg(ptr, old, new, size) \
125 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
Jeff Dikea436ed92007-05-08 00:35:02 -0700126
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200127#define __sync_cmpxchg(ptr, old, new, size) \
128 __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
Jeff Dikea436ed92007-05-08 00:35:02 -0700129
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200130#define __cmpxchg_local(ptr, old, new, size) \
131 __raw_cmpxchg((ptr), (old), (new), (size), "")
Jeremy Fitzhardinge15878c02008-06-25 00:19:10 -0400132
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200133#define cmpxchg(ptr, old, new) \
134 __cmpxchg((ptr), (old), (new), sizeof(*ptr))
Jeff Dikea436ed92007-05-08 00:35:02 -0700135
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200136#define sync_cmpxchg(ptr, old, new) \
137 __sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
138
139#define cmpxchg_local(ptr, old, new) \
140 __cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
141
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800142#define cmpxchg64(ptr, o, n) \
Joe Perchese52da352008-03-23 01:01:52 -0700143({ \
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800144 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
145 cmpxchg((ptr), (o), (n)); \
Joe Perchese52da352008-03-23 01:01:52 -0700146})
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200147
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800148#define cmpxchg64_local(ptr, o, n) \
Joe Perchese52da352008-03-23 01:01:52 -0700149({ \
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800150 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
151 cmpxchg_local((ptr), (o), (n)); \
Joe Perchese52da352008-03-23 01:01:52 -0700152})
Jeff Dikea436ed92007-05-08 00:35:02 -0700153
Christoph Lameter3824abd2011-06-01 12:25:47 -0500154#define cmpxchg16b(ptr, o1, o2, n1, n2) \
155({ \
156 char __ret; \
157 __typeof__(o2) __junk; \
158 __typeof__(*(ptr)) __old1 = (o1); \
159 __typeof__(o2) __old2 = (o2); \
160 __typeof__(*(ptr)) __new1 = (n1); \
161 __typeof__(o2) __new2 = (n2); \
162 asm volatile(LOCK_PREFIX "cmpxchg16b %2;setz %1" \
163 : "=d"(__junk), "=a"(__ret), "+m" (*ptr) \
164 : "b"(__new1), "c"(__new2), \
165 "a"(__old1), "d"(__old2)); \
166 __ret; })
167
168
169#define cmpxchg16b_local(ptr, o1, o2, n1, n2) \
170({ \
171 char __ret; \
172 __typeof__(o2) __junk; \
173 __typeof__(*(ptr)) __old1 = (o1); \
174 __typeof__(o2) __old2 = (o2); \
175 __typeof__(*(ptr)) __new1 = (n1); \
176 __typeof__(o2) __new2 = (n2); \
177 asm volatile("cmpxchg16b %2;setz %1" \
178 : "=d"(__junk), "=a"(__ret), "+m" (*ptr) \
179 : "b"(__new1), "c"(__new2), \
180 "a"(__old1), "d"(__old2)); \
181 __ret; })
182
183#define cmpxchg_double(ptr, o1, o2, n1, n2) \
184({ \
185 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
186 VM_BUG_ON((unsigned long)(ptr) % 16); \
187 cmpxchg16b((ptr), (o1), (o2), (n1), (n2)); \
188})
189
190#define cmpxchg_double_local(ptr, o1, o2, n1, n2) \
191({ \
192 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
193 VM_BUG_ON((unsigned long)(ptr) % 16); \
194 cmpxchg16b_local((ptr), (o1), (o2), (n1), (n2)); \
195})
196
197#define system_has_cmpxchg_double() cpu_has_cx16
198
H. Peter Anvin1965aae2008-10-22 22:26:29 -0700199#endif /* _ASM_X86_CMPXCHG_64_H */