blob: 9596e7c6196014ede2d37f9f773bb2592c8bf085 [file] [log] [blame]
H. Peter Anvin1965aae2008-10-22 22:26:29 -07001#ifndef _ASM_X86_CMPXCHG_64_H
2#define _ASM_X86_CMPXCHG_64_H
Jeff Dikea436ed92007-05-08 00:35:02 -07003
4#include <asm/alternative.h> /* Provides LOCK_PREFIX */
5
Jeff Dikea436ed92007-05-08 00:35:02 -07006#define __xg(x) ((volatile long *)(x))
7
H. Peter Anvin69309a02010-07-27 23:29:52 -07008static inline void set_64bit(volatile u64 *ptr, u64 val)
Jeff Dikea436ed92007-05-08 00:35:02 -07009{
10 *ptr = val;
11}
12
Peter Zijlstraf3834b92009-10-09 10:12:46 +020013extern void __xchg_wrong_size(void);
14extern void __cmpxchg_wrong_size(void);
15
Jeff Dikea436ed92007-05-08 00:35:02 -070016/*
17 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
18 * Note 2: xchg has side effect, so that attribute volatile is necessary,
19 * but generally the primitive is invalid, *ptr is output argument. --ANK
20 */
Peter Zijlstraf3834b92009-10-09 10:12:46 +020021#define __xchg(x, ptr, size) \
22({ \
23 __typeof(*(ptr)) __x = (x); \
24 switch (size) { \
25 case 1: \
26 asm volatile("xchgb %b0,%1" \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070027 : "=q" (__x), "+m" (*__xg(ptr)) \
28 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020029 : "memory"); \
30 break; \
31 case 2: \
32 asm volatile("xchgw %w0,%1" \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070033 : "=r" (__x), "+m" (*__xg(ptr)) \
34 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020035 : "memory"); \
36 break; \
37 case 4: \
38 asm volatile("xchgl %k0,%1" \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070039 : "=r" (__x), "+m" (*__xg(ptr)) \
40 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020041 : "memory"); \
42 break; \
43 case 8: \
44 asm volatile("xchgq %0,%1" \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070045 : "=r" (__x), "+m" (*__xg(ptr)) \
46 : "0" (__x) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020047 : "memory"); \
48 break; \
49 default: \
50 __xchg_wrong_size(); \
51 } \
52 __x; \
53})
54
55#define xchg(ptr, v) \
56 __xchg((v), (ptr), sizeof(*ptr))
57
58#define __HAVE_ARCH_CMPXCHG 1
Jeff Dikea436ed92007-05-08 00:35:02 -070059
60/*
61 * Atomic compare and exchange. Compare OLD with MEM, if identical,
62 * store NEW in MEM. Return the initial value in MEM. Success is
63 * indicated by comparing RETURN with OLD.
64 */
Peter Zijlstraf3834b92009-10-09 10:12:46 +020065#define __raw_cmpxchg(ptr, old, new, size, lock) \
66({ \
67 __typeof__(*(ptr)) __ret; \
68 __typeof__(*(ptr)) __old = (old); \
69 __typeof__(*(ptr)) __new = (new); \
70 switch (size) { \
71 case 1: \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070072 asm volatile(lock "cmpxchgb %b2,%1" \
73 : "=a" (__ret), "+m" (*__xg(ptr)) \
74 : "q" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020075 : "memory"); \
76 break; \
77 case 2: \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070078 asm volatile(lock "cmpxchgw %w2,%1" \
79 : "=a" (__ret), "+m" (*__xg(ptr)) \
80 : "r" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020081 : "memory"); \
82 break; \
83 case 4: \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070084 asm volatile(lock "cmpxchgl %k2,%1" \
85 : "=a" (__ret), "+m" (*__xg(ptr)) \
86 : "r" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020087 : "memory"); \
88 break; \
89 case 8: \
H. Peter Anvin113fc5a2010-07-27 17:01:49 -070090 asm volatile(lock "cmpxchgq %2,%1" \
91 : "=a" (__ret), "+m" (*__xg(ptr)) \
92 : "r" (__new), "0" (__old) \
Peter Zijlstraf3834b92009-10-09 10:12:46 +020093 : "memory"); \
94 break; \
95 default: \
96 __cmpxchg_wrong_size(); \
97 } \
98 __ret; \
99})
Jeff Dikea436ed92007-05-08 00:35:02 -0700100
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200101#define __cmpxchg(ptr, old, new, size) \
102 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
Jeff Dikea436ed92007-05-08 00:35:02 -0700103
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200104#define __sync_cmpxchg(ptr, old, new, size) \
105 __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
Jeff Dikea436ed92007-05-08 00:35:02 -0700106
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200107#define __cmpxchg_local(ptr, old, new, size) \
108 __raw_cmpxchg((ptr), (old), (new), (size), "")
Jeremy Fitzhardinge15878c02008-06-25 00:19:10 -0400109
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200110#define cmpxchg(ptr, old, new) \
111 __cmpxchg((ptr), (old), (new), sizeof(*ptr))
Jeff Dikea436ed92007-05-08 00:35:02 -0700112
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200113#define sync_cmpxchg(ptr, old, new) \
114 __sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
115
116#define cmpxchg_local(ptr, old, new) \
117 __cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
118
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800119#define cmpxchg64(ptr, o, n) \
Joe Perchese52da352008-03-23 01:01:52 -0700120({ \
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800121 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
122 cmpxchg((ptr), (o), (n)); \
Joe Perchese52da352008-03-23 01:01:52 -0700123})
Peter Zijlstraf3834b92009-10-09 10:12:46 +0200124
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800125#define cmpxchg64_local(ptr, o, n) \
Joe Perchese52da352008-03-23 01:01:52 -0700126({ \
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800127 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
128 cmpxchg_local((ptr), (o), (n)); \
Joe Perchese52da352008-03-23 01:01:52 -0700129})
Jeff Dikea436ed92007-05-08 00:35:02 -0700130
H. Peter Anvin1965aae2008-10-22 22:26:29 -0700131#endif /* _ASM_X86_CMPXCHG_64_H */