blob: efe3ec778a586ff707ba372b0b00c91ad1861a79 [file] [log] [blame]
Jeremy Fitzhardingee9826382011-08-18 11:48:06 -07001#ifndef ASM_X86_CMPXCHG_H
2#define ASM_X86_CMPXCHG_H
3
4#include <asm/alternative.h> /* Provides LOCK_PREFIX */
5
6/* Non-existant functions to indicate usage errors at link time. */
7extern void __xchg_wrong_size(void);
8extern void __cmpxchg_wrong_size(void);
9
10/*
11 * Constants for operation sizes. On 32-bit, the 64-bit size it set to
12 * -1 because sizeof will never return -1, thereby making those switch
13 * case statements guaranteeed dead code which the compiler will
14 * eliminate, and allowing the "missing symbol in the default case" to
15 * indicate a usage error.
16 */
17#define __X86_CASE_B 1
18#define __X86_CASE_W 2
19#define __X86_CASE_L 4
20#ifdef CONFIG_64BIT
21#define __X86_CASE_Q 8
22#else
23#define __X86_CASE_Q -1 /* sizeof will never return -1 */
24#endif
25
26/*
27 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
28 * Since this is generally used to protect other memory information, we
29 * use "asm volatile" and "memory" clobbers to prevent gcc from moving
30 * information around.
31 */
32#define __xchg(x, ptr, size) \
33({ \
34 __typeof(*(ptr)) __x = (x); \
35 switch (size) { \
36 case __X86_CASE_B: \
37 { \
38 volatile u8 *__ptr = (volatile u8 *)(ptr); \
39 asm volatile("xchgb %0,%1" \
40 : "=q" (__x), "+m" (*__ptr) \
41 : "0" (__x) \
42 : "memory"); \
43 break; \
44 } \
45 case __X86_CASE_W: \
46 { \
47 volatile u16 *__ptr = (volatile u16 *)(ptr); \
48 asm volatile("xchgw %0,%1" \
49 : "=r" (__x), "+m" (*__ptr) \
50 : "0" (__x) \
51 : "memory"); \
52 break; \
53 } \
54 case __X86_CASE_L: \
55 { \
56 volatile u32 *__ptr = (volatile u32 *)(ptr); \
57 asm volatile("xchgl %0,%1" \
58 : "=r" (__x), "+m" (*__ptr) \
59 : "0" (__x) \
60 : "memory"); \
61 break; \
62 } \
63 case __X86_CASE_Q: \
64 { \
65 volatile u64 *__ptr = (volatile u64 *)(ptr); \
66 asm volatile("xchgq %0,%1" \
67 : "=r" (__x), "+m" (*__ptr) \
68 : "0" (__x) \
69 : "memory"); \
70 break; \
71 } \
72 default: \
73 __xchg_wrong_size(); \
74 } \
75 __x; \
76})
77
78#define xchg(ptr, v) \
79 __xchg((v), (ptr), sizeof(*ptr))
80
81/*
82 * Atomic compare and exchange. Compare OLD with MEM, if identical,
83 * store NEW in MEM. Return the initial value in MEM. Success is
84 * indicated by comparing RETURN with OLD.
85 */
86#define __raw_cmpxchg(ptr, old, new, size, lock) \
87({ \
88 __typeof__(*(ptr)) __ret; \
89 __typeof__(*(ptr)) __old = (old); \
90 __typeof__(*(ptr)) __new = (new); \
91 switch (size) { \
92 case __X86_CASE_B: \
93 { \
94 volatile u8 *__ptr = (volatile u8 *)(ptr); \
95 asm volatile(lock "cmpxchgb %2,%1" \
96 : "=a" (__ret), "+m" (*__ptr) \
97 : "q" (__new), "0" (__old) \
98 : "memory"); \
99 break; \
100 } \
101 case __X86_CASE_W: \
102 { \
103 volatile u16 *__ptr = (volatile u16 *)(ptr); \
104 asm volatile(lock "cmpxchgw %2,%1" \
105 : "=a" (__ret), "+m" (*__ptr) \
106 : "r" (__new), "0" (__old) \
107 : "memory"); \
108 break; \
109 } \
110 case __X86_CASE_L: \
111 { \
112 volatile u32 *__ptr = (volatile u32 *)(ptr); \
113 asm volatile(lock "cmpxchgl %2,%1" \
114 : "=a" (__ret), "+m" (*__ptr) \
115 : "r" (__new), "0" (__old) \
116 : "memory"); \
117 break; \
118 } \
119 case __X86_CASE_Q: \
120 { \
121 volatile u64 *__ptr = (volatile u64 *)(ptr); \
122 asm volatile(lock "cmpxchgq %2,%1" \
123 : "=a" (__ret), "+m" (*__ptr) \
124 : "r" (__new), "0" (__old) \
125 : "memory"); \
126 break; \
127 } \
128 default: \
129 __cmpxchg_wrong_size(); \
130 } \
131 __ret; \
132})
133
134#define __cmpxchg(ptr, old, new, size) \
135 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
136
137#define __sync_cmpxchg(ptr, old, new, size) \
138 __raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
139
140#define __cmpxchg_local(ptr, old, new, size) \
141 __raw_cmpxchg((ptr), (old), (new), (size), "")
142
Thomas Gleixner96a388d2007-10-11 11:20:03 +0200143#ifdef CONFIG_X86_32
144# include "cmpxchg_32.h"
145#else
146# include "cmpxchg_64.h"
147#endif
Jeremy Fitzhardingee9826382011-08-18 11:48:06 -0700148
149#ifdef __HAVE_ARCH_CMPXCHG
150#define cmpxchg(ptr, old, new) \
151 __cmpxchg((ptr), (old), (new), sizeof(*ptr))
152
153#define sync_cmpxchg(ptr, old, new) \
154 __sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
155
156#define cmpxchg_local(ptr, old, new) \
157 __cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
158#endif
159
160#endif /* ASM_X86_CMPXCHG_H */