Heiko Carstens | a2c9dbe | 2011-03-23 10:16:05 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright IBM Corp. 1999, 2011 |
| 3 | * |
| 4 | * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>, |
| 5 | */ |
| 6 | |
| 7 | #ifndef __ASM_CMPXCHG_H |
| 8 | #define __ASM_CMPXCHG_H |
| 9 | |
| 10 | #include <linux/types.h> |
| 11 | |
| 12 | extern void __xchg_called_with_bad_pointer(void); |
| 13 | |
| 14 | static inline unsigned long __xchg(unsigned long x, void *ptr, int size) |
| 15 | { |
| 16 | unsigned long addr, old; |
| 17 | int shift; |
| 18 | |
| 19 | switch (size) { |
| 20 | case 1: |
| 21 | addr = (unsigned long) ptr; |
| 22 | shift = (3 ^ (addr & 3)) << 3; |
| 23 | addr ^= addr & 3; |
| 24 | asm volatile( |
| 25 | " l %0,%4\n" |
| 26 | "0: lr 0,%0\n" |
| 27 | " nr 0,%3\n" |
| 28 | " or 0,%2\n" |
| 29 | " cs %0,0,%4\n" |
| 30 | " jl 0b\n" |
| 31 | : "=&d" (old), "=Q" (*(int *) addr) |
| 32 | : "d" (x << shift), "d" (~(255 << shift)), |
| 33 | "Q" (*(int *) addr) : "memory", "cc", "0"); |
| 34 | return old >> shift; |
| 35 | case 2: |
| 36 | addr = (unsigned long) ptr; |
| 37 | shift = (2 ^ (addr & 2)) << 3; |
| 38 | addr ^= addr & 2; |
| 39 | asm volatile( |
| 40 | " l %0,%4\n" |
| 41 | "0: lr 0,%0\n" |
| 42 | " nr 0,%3\n" |
| 43 | " or 0,%2\n" |
| 44 | " cs %0,0,%4\n" |
| 45 | " jl 0b\n" |
| 46 | : "=&d" (old), "=Q" (*(int *) addr) |
| 47 | : "d" (x << shift), "d" (~(65535 << shift)), |
| 48 | "Q" (*(int *) addr) : "memory", "cc", "0"); |
| 49 | return old >> shift; |
| 50 | case 4: |
| 51 | asm volatile( |
| 52 | " l %0,%3\n" |
| 53 | "0: cs %0,%2,%3\n" |
| 54 | " jl 0b\n" |
| 55 | : "=&d" (old), "=Q" (*(int *) ptr) |
| 56 | : "d" (x), "Q" (*(int *) ptr) |
| 57 | : "memory", "cc"); |
| 58 | return old; |
| 59 | #ifdef CONFIG_64BIT |
| 60 | case 8: |
| 61 | asm volatile( |
| 62 | " lg %0,%3\n" |
| 63 | "0: csg %0,%2,%3\n" |
| 64 | " jl 0b\n" |
| 65 | : "=&d" (old), "=m" (*(long *) ptr) |
| 66 | : "d" (x), "Q" (*(long *) ptr) |
| 67 | : "memory", "cc"); |
| 68 | return old; |
| 69 | #endif /* CONFIG_64BIT */ |
| 70 | } |
| 71 | __xchg_called_with_bad_pointer(); |
| 72 | return x; |
| 73 | } |
| 74 | |
| 75 | #define xchg(ptr, x) \ |
| 76 | ({ \ |
| 77 | __typeof__(*(ptr)) __ret; \ |
| 78 | __ret = (__typeof__(*(ptr))) \ |
| 79 | __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\ |
| 80 | __ret; \ |
| 81 | }) |
| 82 | |
| 83 | /* |
| 84 | * Atomic compare and exchange. Compare OLD with MEM, if identical, |
| 85 | * store NEW in MEM. Return the initial value in MEM. Success is |
| 86 | * indicated by comparing RETURN with OLD. |
| 87 | */ |
| 88 | |
| 89 | #define __HAVE_ARCH_CMPXCHG |
| 90 | |
| 91 | extern void __cmpxchg_called_with_bad_pointer(void); |
| 92 | |
| 93 | static inline unsigned long __cmpxchg(void *ptr, unsigned long old, |
| 94 | unsigned long new, int size) |
| 95 | { |
| 96 | unsigned long addr, prev, tmp; |
| 97 | int shift; |
| 98 | |
| 99 | switch (size) { |
| 100 | case 1: |
| 101 | addr = (unsigned long) ptr; |
| 102 | shift = (3 ^ (addr & 3)) << 3; |
| 103 | addr ^= addr & 3; |
| 104 | asm volatile( |
| 105 | " l %0,%2\n" |
| 106 | "0: nr %0,%5\n" |
| 107 | " lr %1,%0\n" |
| 108 | " or %0,%3\n" |
| 109 | " or %1,%4\n" |
| 110 | " cs %0,%1,%2\n" |
| 111 | " jnl 1f\n" |
| 112 | " xr %1,%0\n" |
| 113 | " nr %1,%5\n" |
| 114 | " jnz 0b\n" |
| 115 | "1:" |
| 116 | : "=&d" (prev), "=&d" (tmp), "=Q" (*(int *) ptr) |
| 117 | : "d" (old << shift), "d" (new << shift), |
| 118 | "d" (~(255 << shift)), "Q" (*(int *) ptr) |
| 119 | : "memory", "cc"); |
| 120 | return prev >> shift; |
| 121 | case 2: |
| 122 | addr = (unsigned long) ptr; |
| 123 | shift = (2 ^ (addr & 2)) << 3; |
| 124 | addr ^= addr & 2; |
| 125 | asm volatile( |
| 126 | " l %0,%2\n" |
| 127 | "0: nr %0,%5\n" |
| 128 | " lr %1,%0\n" |
| 129 | " or %0,%3\n" |
| 130 | " or %1,%4\n" |
| 131 | " cs %0,%1,%2\n" |
| 132 | " jnl 1f\n" |
| 133 | " xr %1,%0\n" |
| 134 | " nr %1,%5\n" |
| 135 | " jnz 0b\n" |
| 136 | "1:" |
| 137 | : "=&d" (prev), "=&d" (tmp), "=Q" (*(int *) ptr) |
| 138 | : "d" (old << shift), "d" (new << shift), |
| 139 | "d" (~(65535 << shift)), "Q" (*(int *) ptr) |
| 140 | : "memory", "cc"); |
| 141 | return prev >> shift; |
| 142 | case 4: |
| 143 | asm volatile( |
| 144 | " cs %0,%3,%1\n" |
| 145 | : "=&d" (prev), "=Q" (*(int *) ptr) |
| 146 | : "0" (old), "d" (new), "Q" (*(int *) ptr) |
| 147 | : "memory", "cc"); |
| 148 | return prev; |
| 149 | #ifdef CONFIG_64BIT |
| 150 | case 8: |
| 151 | asm volatile( |
| 152 | " csg %0,%3,%1\n" |
| 153 | : "=&d" (prev), "=Q" (*(long *) ptr) |
| 154 | : "0" (old), "d" (new), "Q" (*(long *) ptr) |
| 155 | : "memory", "cc"); |
| 156 | return prev; |
| 157 | #endif /* CONFIG_64BIT */ |
| 158 | } |
| 159 | __cmpxchg_called_with_bad_pointer(); |
| 160 | return old; |
| 161 | } |
| 162 | |
| 163 | #define cmpxchg(ptr, o, n) \ |
| 164 | ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \ |
| 165 | (unsigned long)(n), sizeof(*(ptr)))) |
| 166 | |
Heiko Carstens | 54eaae3 | 2011-03-23 10:16:06 +0100 | [diff] [blame] | 167 | #ifdef CONFIG_64BIT |
| 168 | #define cmpxchg64(ptr, o, n) \ |
| 169 | ({ \ |
| 170 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 171 | cmpxchg((ptr), (o), (n)); \ |
| 172 | }) |
| 173 | #else /* CONFIG_64BIT */ |
| 174 | static inline unsigned long long __cmpxchg64(void *ptr, |
| 175 | unsigned long long old, |
| 176 | unsigned long long new) |
| 177 | { |
| 178 | register_pair rp_old = {.pair = old}; |
| 179 | register_pair rp_new = {.pair = new}; |
| 180 | |
| 181 | asm volatile( |
| 182 | " cds %0,%2,%1" |
| 183 | : "+&d" (rp_old), "=Q" (ptr) |
| 184 | : "d" (rp_new), "Q" (ptr) |
| 185 | : "cc"); |
| 186 | return rp_old.pair; |
| 187 | } |
| 188 | #define cmpxchg64(ptr, o, n) \ |
| 189 | ((__typeof__(*(ptr)))__cmpxchg64((ptr), \ |
| 190 | (unsigned long long)(o), \ |
| 191 | (unsigned long long)(n))) |
| 192 | #endif /* CONFIG_64BIT */ |
| 193 | |
Heiko Carstens | a2c9dbe | 2011-03-23 10:16:05 +0100 | [diff] [blame] | 194 | #include <asm-generic/cmpxchg-local.h> |
| 195 | |
| 196 | static inline unsigned long __cmpxchg_local(void *ptr, |
| 197 | unsigned long old, |
| 198 | unsigned long new, int size) |
| 199 | { |
| 200 | switch (size) { |
| 201 | case 1: |
| 202 | case 2: |
| 203 | case 4: |
| 204 | #ifdef CONFIG_64BIT |
| 205 | case 8: |
| 206 | #endif |
| 207 | return __cmpxchg(ptr, old, new, size); |
| 208 | default: |
| 209 | return __cmpxchg_local_generic(ptr, old, new, size); |
| 210 | } |
| 211 | |
| 212 | return old; |
| 213 | } |
| 214 | |
| 215 | /* |
| 216 | * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make |
| 217 | * them available. |
| 218 | */ |
| 219 | #define cmpxchg_local(ptr, o, n) \ |
| 220 | ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \ |
| 221 | (unsigned long)(n), sizeof(*(ptr)))) |
Heiko Carstens | 54eaae3 | 2011-03-23 10:16:06 +0100 | [diff] [blame] | 222 | |
| 223 | #define cmpxchg64_local(ptr, o, n) cmpxchg64((ptr), (o), (n)) |
Heiko Carstens | a2c9dbe | 2011-03-23 10:16:05 +0100 | [diff] [blame] | 224 | |
| 225 | #endif /* __ASM_CMPXCHG_H */ |