David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 1 | #ifndef _ASM_POWERPC_CMPXCHG_H_ |
| 2 | #define _ASM_POWERPC_CMPXCHG_H_ |
| 3 | |
| 4 | #ifdef __KERNEL__ |
| 5 | #include <linux/compiler.h> |
| 6 | #include <asm/synch.h> |
| 7 | #include <asm/asm-compat.h> |
pan xinhui | 10d8b14 | 2016-02-23 19:05:01 +0800 | [diff] [blame] | 8 | #include <linux/bug.h> |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 9 | |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 10 | #ifdef __BIG_ENDIAN |
| 11 | #define BITOFF_CAL(size, off) ((sizeof(u32) - size - off) * BITS_PER_BYTE) |
| 12 | #else |
| 13 | #define BITOFF_CAL(size, off) (off * BITS_PER_BYTE) |
| 14 | #endif |
| 15 | |
| 16 | #define XCHG_GEN(type, sfx, cl) \ |
Michael Ellerman | da58b23 | 2016-11-24 17:08:11 +1100 | [diff] [blame] | 17 | static inline u32 __xchg_##type##sfx(volatile void *p, u32 val) \ |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 18 | { \ |
| 19 | unsigned int prev, prev_mask, tmp, bitoff, off; \ |
| 20 | \ |
| 21 | off = (unsigned long)p % sizeof(u32); \ |
| 22 | bitoff = BITOFF_CAL(sizeof(type), off); \ |
| 23 | p -= off; \ |
| 24 | val <<= bitoff; \ |
| 25 | prev_mask = (u32)(type)-1 << bitoff; \ |
| 26 | \ |
| 27 | __asm__ __volatile__( \ |
| 28 | "1: lwarx %0,0,%3\n" \ |
| 29 | " andc %1,%0,%5\n" \ |
| 30 | " or %1,%1,%4\n" \ |
| 31 | PPC405_ERR77(0,%3) \ |
| 32 | " stwcx. %1,0,%3\n" \ |
| 33 | " bne- 1b\n" \ |
| 34 | : "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \ |
| 35 | : "r" (p), "r" (val), "r" (prev_mask) \ |
| 36 | : "cc", cl); \ |
| 37 | \ |
| 38 | return prev >> bitoff; \ |
| 39 | } |
| 40 | |
| 41 | #define CMPXCHG_GEN(type, sfx, br, br2, cl) \ |
| 42 | static inline \ |
Michael Ellerman | da58b23 | 2016-11-24 17:08:11 +1100 | [diff] [blame] | 43 | u32 __cmpxchg_##type##sfx(volatile void *p, u32 old, u32 new) \ |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 44 | { \ |
| 45 | unsigned int prev, prev_mask, tmp, bitoff, off; \ |
| 46 | \ |
| 47 | off = (unsigned long)p % sizeof(u32); \ |
| 48 | bitoff = BITOFF_CAL(sizeof(type), off); \ |
| 49 | p -= off; \ |
| 50 | old <<= bitoff; \ |
| 51 | new <<= bitoff; \ |
| 52 | prev_mask = (u32)(type)-1 << bitoff; \ |
| 53 | \ |
| 54 | __asm__ __volatile__( \ |
| 55 | br \ |
| 56 | "1: lwarx %0,0,%3\n" \ |
| 57 | " and %1,%0,%6\n" \ |
| 58 | " cmpw 0,%1,%4\n" \ |
| 59 | " bne- 2f\n" \ |
| 60 | " andc %1,%0,%6\n" \ |
| 61 | " or %1,%1,%5\n" \ |
| 62 | PPC405_ERR77(0,%3) \ |
| 63 | " stwcx. %1,0,%3\n" \ |
| 64 | " bne- 1b\n" \ |
| 65 | br2 \ |
| 66 | "\n" \ |
| 67 | "2:" \ |
| 68 | : "=&r" (prev), "=&r" (tmp), "+m" (*(u32*)p) \ |
| 69 | : "r" (p), "r" (old), "r" (new), "r" (prev_mask) \ |
| 70 | : "cc", cl); \ |
| 71 | \ |
| 72 | return prev >> bitoff; \ |
| 73 | } |
| 74 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 75 | /* |
| 76 | * Atomic exchange |
| 77 | * |
Boqun Feng | 26760fc | 2015-12-15 22:24:16 +0800 | [diff] [blame] | 78 | * Changes the memory location '*p' to be val and returns |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 79 | * the previous value stored there. |
| 80 | */ |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 81 | |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 82 | XCHG_GEN(u8, _local, "memory"); |
| 83 | XCHG_GEN(u8, _relaxed, "cc"); |
| 84 | XCHG_GEN(u16, _local, "memory"); |
| 85 | XCHG_GEN(u16, _relaxed, "cc"); |
| 86 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 87 | static __always_inline unsigned long |
| 88 | __xchg_u32_local(volatile void *p, unsigned long val) |
| 89 | { |
| 90 | unsigned long prev; |
| 91 | |
| 92 | __asm__ __volatile__( |
| 93 | "1: lwarx %0,0,%2 \n" |
| 94 | PPC405_ERR77(0,%2) |
| 95 | " stwcx. %3,0,%2 \n\ |
| 96 | bne- 1b" |
| 97 | : "=&r" (prev), "+m" (*(volatile unsigned int *)p) |
| 98 | : "r" (p), "r" (val) |
| 99 | : "cc", "memory"); |
| 100 | |
| 101 | return prev; |
| 102 | } |
| 103 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 104 | static __always_inline unsigned long |
Boqun Feng | 26760fc | 2015-12-15 22:24:16 +0800 | [diff] [blame] | 105 | __xchg_u32_relaxed(u32 *p, unsigned long val) |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 106 | { |
| 107 | unsigned long prev; |
| 108 | |
| 109 | __asm__ __volatile__( |
Boqun Feng | 26760fc | 2015-12-15 22:24:16 +0800 | [diff] [blame] | 110 | "1: lwarx %0,0,%2\n" |
| 111 | PPC405_ERR77(0, %2) |
| 112 | " stwcx. %3,0,%2\n" |
| 113 | " bne- 1b" |
| 114 | : "=&r" (prev), "+m" (*p) |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 115 | : "r" (p), "r" (val) |
Boqun Feng | 26760fc | 2015-12-15 22:24:16 +0800 | [diff] [blame] | 116 | : "cc"); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 117 | |
| 118 | return prev; |
| 119 | } |
| 120 | |
Boqun Feng | 26760fc | 2015-12-15 22:24:16 +0800 | [diff] [blame] | 121 | #ifdef CONFIG_PPC64 |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 122 | static __always_inline unsigned long |
| 123 | __xchg_u64_local(volatile void *p, unsigned long val) |
| 124 | { |
| 125 | unsigned long prev; |
| 126 | |
| 127 | __asm__ __volatile__( |
| 128 | "1: ldarx %0,0,%2 \n" |
| 129 | PPC405_ERR77(0,%2) |
| 130 | " stdcx. %3,0,%2 \n\ |
| 131 | bne- 1b" |
| 132 | : "=&r" (prev), "+m" (*(volatile unsigned long *)p) |
| 133 | : "r" (p), "r" (val) |
| 134 | : "cc", "memory"); |
| 135 | |
| 136 | return prev; |
| 137 | } |
Boqun Feng | 26760fc | 2015-12-15 22:24:16 +0800 | [diff] [blame] | 138 | |
| 139 | static __always_inline unsigned long |
| 140 | __xchg_u64_relaxed(u64 *p, unsigned long val) |
| 141 | { |
| 142 | unsigned long prev; |
| 143 | |
| 144 | __asm__ __volatile__( |
| 145 | "1: ldarx %0,0,%2\n" |
| 146 | PPC405_ERR77(0, %2) |
| 147 | " stdcx. %3,0,%2\n" |
| 148 | " bne- 1b" |
| 149 | : "=&r" (prev), "+m" (*p) |
| 150 | : "r" (p), "r" (val) |
| 151 | : "cc"); |
| 152 | |
| 153 | return prev; |
| 154 | } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 155 | #endif |
| 156 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 157 | static __always_inline unsigned long |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 158 | __xchg_local(void *ptr, unsigned long x, unsigned int size) |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 159 | { |
| 160 | switch (size) { |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 161 | case 1: |
| 162 | return __xchg_u8_local(ptr, x); |
| 163 | case 2: |
| 164 | return __xchg_u16_local(ptr, x); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 165 | case 4: |
| 166 | return __xchg_u32_local(ptr, x); |
| 167 | #ifdef CONFIG_PPC64 |
| 168 | case 8: |
| 169 | return __xchg_u64_local(ptr, x); |
| 170 | #endif |
| 171 | } |
pan xinhui | 10d8b14 | 2016-02-23 19:05:01 +0800 | [diff] [blame] | 172 | BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg"); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 173 | return x; |
| 174 | } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 175 | |
Boqun Feng | 26760fc | 2015-12-15 22:24:16 +0800 | [diff] [blame] | 176 | static __always_inline unsigned long |
| 177 | __xchg_relaxed(void *ptr, unsigned long x, unsigned int size) |
| 178 | { |
| 179 | switch (size) { |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 180 | case 1: |
| 181 | return __xchg_u8_relaxed(ptr, x); |
| 182 | case 2: |
| 183 | return __xchg_u16_relaxed(ptr, x); |
Boqun Feng | 26760fc | 2015-12-15 22:24:16 +0800 | [diff] [blame] | 184 | case 4: |
| 185 | return __xchg_u32_relaxed(ptr, x); |
| 186 | #ifdef CONFIG_PPC64 |
| 187 | case 8: |
| 188 | return __xchg_u64_relaxed(ptr, x); |
| 189 | #endif |
| 190 | } |
pan xinhui | 10d8b14 | 2016-02-23 19:05:01 +0800 | [diff] [blame] | 191 | BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_local"); |
Boqun Feng | 26760fc | 2015-12-15 22:24:16 +0800 | [diff] [blame] | 192 | return x; |
| 193 | } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 194 | #define xchg_local(ptr,x) \ |
| 195 | ({ \ |
| 196 | __typeof__(*(ptr)) _x_ = (x); \ |
| 197 | (__typeof__(*(ptr))) __xchg_local((ptr), \ |
| 198 | (unsigned long)_x_, sizeof(*(ptr))); \ |
| 199 | }) |
| 200 | |
Boqun Feng | 26760fc | 2015-12-15 22:24:16 +0800 | [diff] [blame] | 201 | #define xchg_relaxed(ptr, x) \ |
| 202 | ({ \ |
| 203 | __typeof__(*(ptr)) _x_ = (x); \ |
| 204 | (__typeof__(*(ptr))) __xchg_relaxed((ptr), \ |
| 205 | (unsigned long)_x_, sizeof(*(ptr))); \ |
| 206 | }) |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 207 | /* |
| 208 | * Compare and exchange - if *p == old, set it to new, |
| 209 | * and return the old value of *p. |
| 210 | */ |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 211 | |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 212 | CMPXCHG_GEN(u8, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory"); |
| 213 | CMPXCHG_GEN(u8, _local, , , "memory"); |
| 214 | CMPXCHG_GEN(u8, _acquire, , PPC_ACQUIRE_BARRIER, "memory"); |
| 215 | CMPXCHG_GEN(u8, _relaxed, , , "cc"); |
| 216 | CMPXCHG_GEN(u16, , PPC_ATOMIC_ENTRY_BARRIER, PPC_ATOMIC_EXIT_BARRIER, "memory"); |
| 217 | CMPXCHG_GEN(u16, _local, , , "memory"); |
| 218 | CMPXCHG_GEN(u16, _acquire, , PPC_ACQUIRE_BARRIER, "memory"); |
| 219 | CMPXCHG_GEN(u16, _relaxed, , , "cc"); |
| 220 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 221 | static __always_inline unsigned long |
| 222 | __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new) |
| 223 | { |
| 224 | unsigned int prev; |
| 225 | |
| 226 | __asm__ __volatile__ ( |
Boqun Feng | 81d7a32 | 2015-11-02 09:30:32 +0800 | [diff] [blame] | 227 | PPC_ATOMIC_ENTRY_BARRIER |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 228 | "1: lwarx %0,0,%2 # __cmpxchg_u32\n\ |
| 229 | cmpw 0,%0,%3\n\ |
| 230 | bne- 2f\n" |
| 231 | PPC405_ERR77(0,%2) |
| 232 | " stwcx. %4,0,%2\n\ |
| 233 | bne- 1b" |
Boqun Feng | 81d7a32 | 2015-11-02 09:30:32 +0800 | [diff] [blame] | 234 | PPC_ATOMIC_EXIT_BARRIER |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 235 | "\n\ |
| 236 | 2:" |
| 237 | : "=&r" (prev), "+m" (*p) |
| 238 | : "r" (p), "r" (old), "r" (new) |
| 239 | : "cc", "memory"); |
| 240 | |
| 241 | return prev; |
| 242 | } |
| 243 | |
| 244 | static __always_inline unsigned long |
| 245 | __cmpxchg_u32_local(volatile unsigned int *p, unsigned long old, |
| 246 | unsigned long new) |
| 247 | { |
| 248 | unsigned int prev; |
| 249 | |
| 250 | __asm__ __volatile__ ( |
| 251 | "1: lwarx %0,0,%2 # __cmpxchg_u32\n\ |
| 252 | cmpw 0,%0,%3\n\ |
| 253 | bne- 2f\n" |
| 254 | PPC405_ERR77(0,%2) |
| 255 | " stwcx. %4,0,%2\n\ |
| 256 | bne- 1b" |
| 257 | "\n\ |
| 258 | 2:" |
| 259 | : "=&r" (prev), "+m" (*p) |
| 260 | : "r" (p), "r" (old), "r" (new) |
| 261 | : "cc", "memory"); |
| 262 | |
| 263 | return prev; |
| 264 | } |
| 265 | |
Boqun Feng | 56c08e6 | 2015-12-15 22:24:17 +0800 | [diff] [blame] | 266 | static __always_inline unsigned long |
| 267 | __cmpxchg_u32_relaxed(u32 *p, unsigned long old, unsigned long new) |
| 268 | { |
| 269 | unsigned long prev; |
| 270 | |
| 271 | __asm__ __volatile__ ( |
| 272 | "1: lwarx %0,0,%2 # __cmpxchg_u32_relaxed\n" |
| 273 | " cmpw 0,%0,%3\n" |
| 274 | " bne- 2f\n" |
| 275 | PPC405_ERR77(0, %2) |
| 276 | " stwcx. %4,0,%2\n" |
| 277 | " bne- 1b\n" |
| 278 | "2:" |
| 279 | : "=&r" (prev), "+m" (*p) |
| 280 | : "r" (p), "r" (old), "r" (new) |
| 281 | : "cc"); |
| 282 | |
| 283 | return prev; |
| 284 | } |
| 285 | |
| 286 | /* |
| 287 | * cmpxchg family don't have order guarantee if cmp part fails, therefore we |
| 288 | * can avoid superfluous barriers if we use assembly code to implement |
| 289 | * cmpxchg() and cmpxchg_acquire(), however we don't do the similar for |
| 290 | * cmpxchg_release() because that will result in putting a barrier in the |
| 291 | * middle of a ll/sc loop, which is probably a bad idea. For example, this |
| 292 | * might cause the conditional store more likely to fail. |
| 293 | */ |
| 294 | static __always_inline unsigned long |
| 295 | __cmpxchg_u32_acquire(u32 *p, unsigned long old, unsigned long new) |
| 296 | { |
| 297 | unsigned long prev; |
| 298 | |
| 299 | __asm__ __volatile__ ( |
| 300 | "1: lwarx %0,0,%2 # __cmpxchg_u32_acquire\n" |
| 301 | " cmpw 0,%0,%3\n" |
| 302 | " bne- 2f\n" |
| 303 | PPC405_ERR77(0, %2) |
| 304 | " stwcx. %4,0,%2\n" |
| 305 | " bne- 1b\n" |
| 306 | PPC_ACQUIRE_BARRIER |
| 307 | "\n" |
| 308 | "2:" |
| 309 | : "=&r" (prev), "+m" (*p) |
| 310 | : "r" (p), "r" (old), "r" (new) |
| 311 | : "cc", "memory"); |
| 312 | |
| 313 | return prev; |
| 314 | } |
| 315 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 316 | #ifdef CONFIG_PPC64 |
| 317 | static __always_inline unsigned long |
| 318 | __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new) |
| 319 | { |
| 320 | unsigned long prev; |
| 321 | |
| 322 | __asm__ __volatile__ ( |
Boqun Feng | 81d7a32 | 2015-11-02 09:30:32 +0800 | [diff] [blame] | 323 | PPC_ATOMIC_ENTRY_BARRIER |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 324 | "1: ldarx %0,0,%2 # __cmpxchg_u64\n\ |
| 325 | cmpd 0,%0,%3\n\ |
| 326 | bne- 2f\n\ |
| 327 | stdcx. %4,0,%2\n\ |
| 328 | bne- 1b" |
Boqun Feng | 81d7a32 | 2015-11-02 09:30:32 +0800 | [diff] [blame] | 329 | PPC_ATOMIC_EXIT_BARRIER |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 330 | "\n\ |
| 331 | 2:" |
| 332 | : "=&r" (prev), "+m" (*p) |
| 333 | : "r" (p), "r" (old), "r" (new) |
| 334 | : "cc", "memory"); |
| 335 | |
| 336 | return prev; |
| 337 | } |
| 338 | |
| 339 | static __always_inline unsigned long |
| 340 | __cmpxchg_u64_local(volatile unsigned long *p, unsigned long old, |
| 341 | unsigned long new) |
| 342 | { |
| 343 | unsigned long prev; |
| 344 | |
| 345 | __asm__ __volatile__ ( |
| 346 | "1: ldarx %0,0,%2 # __cmpxchg_u64\n\ |
| 347 | cmpd 0,%0,%3\n\ |
| 348 | bne- 2f\n\ |
| 349 | stdcx. %4,0,%2\n\ |
| 350 | bne- 1b" |
| 351 | "\n\ |
| 352 | 2:" |
| 353 | : "=&r" (prev), "+m" (*p) |
| 354 | : "r" (p), "r" (old), "r" (new) |
| 355 | : "cc", "memory"); |
| 356 | |
| 357 | return prev; |
| 358 | } |
Boqun Feng | 56c08e6 | 2015-12-15 22:24:17 +0800 | [diff] [blame] | 359 | |
| 360 | static __always_inline unsigned long |
| 361 | __cmpxchg_u64_relaxed(u64 *p, unsigned long old, unsigned long new) |
| 362 | { |
| 363 | unsigned long prev; |
| 364 | |
| 365 | __asm__ __volatile__ ( |
| 366 | "1: ldarx %0,0,%2 # __cmpxchg_u64_relaxed\n" |
| 367 | " cmpd 0,%0,%3\n" |
| 368 | " bne- 2f\n" |
| 369 | " stdcx. %4,0,%2\n" |
| 370 | " bne- 1b\n" |
| 371 | "2:" |
| 372 | : "=&r" (prev), "+m" (*p) |
| 373 | : "r" (p), "r" (old), "r" (new) |
| 374 | : "cc"); |
| 375 | |
| 376 | return prev; |
| 377 | } |
| 378 | |
| 379 | static __always_inline unsigned long |
| 380 | __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new) |
| 381 | { |
| 382 | unsigned long prev; |
| 383 | |
| 384 | __asm__ __volatile__ ( |
| 385 | "1: ldarx %0,0,%2 # __cmpxchg_u64_acquire\n" |
| 386 | " cmpd 0,%0,%3\n" |
| 387 | " bne- 2f\n" |
| 388 | " stdcx. %4,0,%2\n" |
| 389 | " bne- 1b\n" |
| 390 | PPC_ACQUIRE_BARRIER |
| 391 | "\n" |
| 392 | "2:" |
| 393 | : "=&r" (prev), "+m" (*p) |
| 394 | : "r" (p), "r" (old), "r" (new) |
| 395 | : "cc", "memory"); |
| 396 | |
| 397 | return prev; |
| 398 | } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 399 | #endif |
| 400 | |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 401 | static __always_inline unsigned long |
Michael Ellerman | da58b23 | 2016-11-24 17:08:11 +1100 | [diff] [blame] | 402 | __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 403 | unsigned int size) |
| 404 | { |
| 405 | switch (size) { |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 406 | case 1: |
| 407 | return __cmpxchg_u8(ptr, old, new); |
| 408 | case 2: |
| 409 | return __cmpxchg_u16(ptr, old, new); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 410 | case 4: |
| 411 | return __cmpxchg_u32(ptr, old, new); |
| 412 | #ifdef CONFIG_PPC64 |
| 413 | case 8: |
| 414 | return __cmpxchg_u64(ptr, old, new); |
| 415 | #endif |
| 416 | } |
pan xinhui | 10d8b14 | 2016-02-23 19:05:01 +0800 | [diff] [blame] | 417 | BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg"); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 418 | return old; |
| 419 | } |
| 420 | |
| 421 | static __always_inline unsigned long |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 422 | __cmpxchg_local(void *ptr, unsigned long old, unsigned long new, |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 423 | unsigned int size) |
| 424 | { |
| 425 | switch (size) { |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 426 | case 1: |
| 427 | return __cmpxchg_u8_local(ptr, old, new); |
| 428 | case 2: |
| 429 | return __cmpxchg_u16_local(ptr, old, new); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 430 | case 4: |
| 431 | return __cmpxchg_u32_local(ptr, old, new); |
| 432 | #ifdef CONFIG_PPC64 |
| 433 | case 8: |
| 434 | return __cmpxchg_u64_local(ptr, old, new); |
| 435 | #endif |
| 436 | } |
pan xinhui | 10d8b14 | 2016-02-23 19:05:01 +0800 | [diff] [blame] | 437 | BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_local"); |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 438 | return old; |
| 439 | } |
| 440 | |
Boqun Feng | 56c08e6 | 2015-12-15 22:24:17 +0800 | [diff] [blame] | 441 | static __always_inline unsigned long |
| 442 | __cmpxchg_relaxed(void *ptr, unsigned long old, unsigned long new, |
| 443 | unsigned int size) |
| 444 | { |
| 445 | switch (size) { |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 446 | case 1: |
| 447 | return __cmpxchg_u8_relaxed(ptr, old, new); |
| 448 | case 2: |
| 449 | return __cmpxchg_u16_relaxed(ptr, old, new); |
Boqun Feng | 56c08e6 | 2015-12-15 22:24:17 +0800 | [diff] [blame] | 450 | case 4: |
| 451 | return __cmpxchg_u32_relaxed(ptr, old, new); |
| 452 | #ifdef CONFIG_PPC64 |
| 453 | case 8: |
| 454 | return __cmpxchg_u64_relaxed(ptr, old, new); |
| 455 | #endif |
| 456 | } |
pan xinhui | 10d8b14 | 2016-02-23 19:05:01 +0800 | [diff] [blame] | 457 | BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_relaxed"); |
Boqun Feng | 56c08e6 | 2015-12-15 22:24:17 +0800 | [diff] [blame] | 458 | return old; |
| 459 | } |
| 460 | |
| 461 | static __always_inline unsigned long |
| 462 | __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new, |
| 463 | unsigned int size) |
| 464 | { |
| 465 | switch (size) { |
Pan Xinhui | d0563a1 | 2016-04-27 17:16:45 +0800 | [diff] [blame] | 466 | case 1: |
| 467 | return __cmpxchg_u8_acquire(ptr, old, new); |
| 468 | case 2: |
| 469 | return __cmpxchg_u16_acquire(ptr, old, new); |
Boqun Feng | 56c08e6 | 2015-12-15 22:24:17 +0800 | [diff] [blame] | 470 | case 4: |
| 471 | return __cmpxchg_u32_acquire(ptr, old, new); |
| 472 | #ifdef CONFIG_PPC64 |
| 473 | case 8: |
| 474 | return __cmpxchg_u64_acquire(ptr, old, new); |
| 475 | #endif |
| 476 | } |
pan xinhui | 10d8b14 | 2016-02-23 19:05:01 +0800 | [diff] [blame] | 477 | BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_acquire"); |
Boqun Feng | 56c08e6 | 2015-12-15 22:24:17 +0800 | [diff] [blame] | 478 | return old; |
| 479 | } |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 480 | #define cmpxchg(ptr, o, n) \ |
| 481 | ({ \ |
| 482 | __typeof__(*(ptr)) _o_ = (o); \ |
| 483 | __typeof__(*(ptr)) _n_ = (n); \ |
| 484 | (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \ |
| 485 | (unsigned long)_n_, sizeof(*(ptr))); \ |
| 486 | }) |
| 487 | |
| 488 | |
| 489 | #define cmpxchg_local(ptr, o, n) \ |
| 490 | ({ \ |
| 491 | __typeof__(*(ptr)) _o_ = (o); \ |
| 492 | __typeof__(*(ptr)) _n_ = (n); \ |
| 493 | (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \ |
| 494 | (unsigned long)_n_, sizeof(*(ptr))); \ |
| 495 | }) |
| 496 | |
Boqun Feng | 56c08e6 | 2015-12-15 22:24:17 +0800 | [diff] [blame] | 497 | #define cmpxchg_relaxed(ptr, o, n) \ |
| 498 | ({ \ |
| 499 | __typeof__(*(ptr)) _o_ = (o); \ |
| 500 | __typeof__(*(ptr)) _n_ = (n); \ |
| 501 | (__typeof__(*(ptr))) __cmpxchg_relaxed((ptr), \ |
| 502 | (unsigned long)_o_, (unsigned long)_n_, \ |
| 503 | sizeof(*(ptr))); \ |
| 504 | }) |
| 505 | |
| 506 | #define cmpxchg_acquire(ptr, o, n) \ |
| 507 | ({ \ |
| 508 | __typeof__(*(ptr)) _o_ = (o); \ |
| 509 | __typeof__(*(ptr)) _n_ = (n); \ |
| 510 | (__typeof__(*(ptr))) __cmpxchg_acquire((ptr), \ |
| 511 | (unsigned long)_o_, (unsigned long)_n_, \ |
| 512 | sizeof(*(ptr))); \ |
| 513 | }) |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 514 | #ifdef CONFIG_PPC64 |
| 515 | #define cmpxchg64(ptr, o, n) \ |
| 516 | ({ \ |
| 517 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 518 | cmpxchg((ptr), (o), (n)); \ |
| 519 | }) |
| 520 | #define cmpxchg64_local(ptr, o, n) \ |
| 521 | ({ \ |
| 522 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 523 | cmpxchg_local((ptr), (o), (n)); \ |
| 524 | }) |
Boqun Feng | 56c08e6 | 2015-12-15 22:24:17 +0800 | [diff] [blame] | 525 | #define cmpxchg64_relaxed(ptr, o, n) \ |
| 526 | ({ \ |
| 527 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 528 | cmpxchg_relaxed((ptr), (o), (n)); \ |
| 529 | }) |
| 530 | #define cmpxchg64_acquire(ptr, o, n) \ |
| 531 | ({ \ |
| 532 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 533 | cmpxchg_acquire((ptr), (o), (n)); \ |
| 534 | }) |
David Howells | ae3a197 | 2012-03-28 18:30:02 +0100 | [diff] [blame] | 535 | #else |
| 536 | #include <asm-generic/cmpxchg-local.h> |
| 537 | #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) |
| 538 | #endif |
| 539 | |
| 540 | #endif /* __KERNEL__ */ |
| 541 | #endif /* _ASM_POWERPC_CMPXCHG_H_ */ |