blob: d9b26b9a28cf9f0e3f64f4206f1922b331408f54 [file] [log] [blame]
Jeff Dikea436ed92007-05-08 00:35:02 -07001#ifndef __ASM_CMPXCHG_H
2#define __ASM_CMPXCHG_H
3
4#include <asm/alternative.h> /* Provides LOCK_PREFIX */
5
Joe Perchese52da352008-03-23 01:01:52 -07006#define xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), \
7 (ptr), sizeof(*(ptr))))
Jeff Dikea436ed92007-05-08 00:35:02 -07008
9#define __xg(x) ((volatile long *)(x))
10
11static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
12{
13 *ptr = val;
14}
15
16#define _set_64bit set_64bit
17
18/*
19 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
20 * Note 2: xchg has side effect, so that attribute volatile is necessary,
21 * but generally the primitive is invalid, *ptr is output argument. --ANK
22 */
Joe Perchese52da352008-03-23 01:01:52 -070023static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
24 int size)
Jeff Dikea436ed92007-05-08 00:35:02 -070025{
26 switch (size) {
Joe Perchese52da352008-03-23 01:01:52 -070027 case 1:
28 asm volatile("xchgb %b0,%1"
29 : "=q" (x)
30 : "m" (*__xg(ptr)), "0" (x)
31 : "memory");
32 break;
33 case 2:
34 asm volatile("xchgw %w0,%1"
35 : "=r" (x)
36 : "m" (*__xg(ptr)), "0" (x)
37 : "memory");
38 break;
39 case 4:
40 asm volatile("xchgl %k0,%1"
41 : "=r" (x)
42 : "m" (*__xg(ptr)), "0" (x)
43 : "memory");
44 break;
45 case 8:
46 asm volatile("xchgq %0,%1"
47 : "=r" (x)
48 : "m" (*__xg(ptr)), "0" (x)
49 : "memory");
50 break;
Jeff Dikea436ed92007-05-08 00:35:02 -070051 }
52 return x;
53}
54
55/*
56 * Atomic compare and exchange. Compare OLD with MEM, if identical,
57 * store NEW in MEM. Return the initial value in MEM. Success is
58 * indicated by comparing RETURN with OLD.
59 */
60
61#define __HAVE_ARCH_CMPXCHG 1
62
63static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
64 unsigned long new, int size)
65{
66 unsigned long prev;
67 switch (size) {
68 case 1:
Joe Perchese52da352008-03-23 01:01:52 -070069 asm volatile(LOCK_PREFIX "cmpxchgb %b1,%2"
70 : "=a"(prev)
71 : "q"(new), "m"(*__xg(ptr)), "0"(old)
72 : "memory");
Jeff Dikea436ed92007-05-08 00:35:02 -070073 return prev;
74 case 2:
Joe Perchese52da352008-03-23 01:01:52 -070075 asm volatile(LOCK_PREFIX "cmpxchgw %w1,%2"
76 : "=a"(prev)
77 : "r"(new), "m"(*__xg(ptr)), "0"(old)
78 : "memory");
Jeff Dikea436ed92007-05-08 00:35:02 -070079 return prev;
80 case 4:
Joe Perchese52da352008-03-23 01:01:52 -070081 asm volatile(LOCK_PREFIX "cmpxchgl %k1,%2"
82 : "=a"(prev)
83 : "r"(new), "m"(*__xg(ptr)), "0"(old)
84 : "memory");
Jeff Dikea436ed92007-05-08 00:35:02 -070085 return prev;
86 case 8:
Joe Perchese52da352008-03-23 01:01:52 -070087 asm volatile(LOCK_PREFIX "cmpxchgq %1,%2"
88 : "=a"(prev)
89 : "r"(new), "m"(*__xg(ptr)), "0"(old)
90 : "memory");
Jeff Dikea436ed92007-05-08 00:35:02 -070091 return prev;
92 }
93 return old;
94}
95
96static inline unsigned long __cmpxchg_local(volatile void *ptr,
Joe Perchese52da352008-03-23 01:01:52 -070097 unsigned long old,
98 unsigned long new, int size)
Jeff Dikea436ed92007-05-08 00:35:02 -070099{
100 unsigned long prev;
101 switch (size) {
102 case 1:
Joe Perchese52da352008-03-23 01:01:52 -0700103 asm volatile("cmpxchgb %b1,%2"
104 : "=a"(prev)
105 : "q"(new), "m"(*__xg(ptr)), "0"(old)
106 : "memory");
Jeff Dikea436ed92007-05-08 00:35:02 -0700107 return prev;
108 case 2:
Joe Perchese52da352008-03-23 01:01:52 -0700109 asm volatile("cmpxchgw %w1,%2"
110 : "=a"(prev)
111 : "r"(new), "m"(*__xg(ptr)), "0"(old)
112 : "memory");
Jeff Dikea436ed92007-05-08 00:35:02 -0700113 return prev;
114 case 4:
Joe Perchese52da352008-03-23 01:01:52 -0700115 asm volatile("cmpxchgl %k1,%2"
116 : "=a"(prev)
117 : "r"(new), "m"(*__xg(ptr)), "0"(old)
118 : "memory");
Jeff Dikea436ed92007-05-08 00:35:02 -0700119 return prev;
120 case 8:
Joe Perchese52da352008-03-23 01:01:52 -0700121 asm volatile("cmpxchgq %1,%2"
122 : "=a"(prev)
123 : "r"(new), "m"(*__xg(ptr)), "0"(old)
124 : "memory");
Jeff Dikea436ed92007-05-08 00:35:02 -0700125 return prev;
126 }
127 return old;
128}
129
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800130#define cmpxchg(ptr, o, n) \
131 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
Joe Perchese52da352008-03-23 01:01:52 -0700132 (unsigned long)(n), sizeof(*(ptr))))
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800133#define cmpxchg64(ptr, o, n) \
Joe Perchese52da352008-03-23 01:01:52 -0700134({ \
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800135 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
136 cmpxchg((ptr), (o), (n)); \
Joe Perchese52da352008-03-23 01:01:52 -0700137})
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800138#define cmpxchg_local(ptr, o, n) \
139 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
Joe Perchese52da352008-03-23 01:01:52 -0700140 (unsigned long)(n), \
141 sizeof(*(ptr))))
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800142#define cmpxchg64_local(ptr, o, n) \
Joe Perchese52da352008-03-23 01:01:52 -0700143({ \
Mathieu Desnoyers32f49eab2008-02-07 00:16:10 -0800144 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
145 cmpxchg_local((ptr), (o), (n)); \
Joe Perchese52da352008-03-23 01:01:52 -0700146})
Jeff Dikea436ed92007-05-08 00:35:02 -0700147
148#endif