Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef __ARCH_S390_ATOMIC__ |
| 2 | #define __ARCH_S390_ATOMIC__ |
| 3 | |
Dave Jones | 5bd1db6 | 2006-04-10 22:53:51 -0700 | [diff] [blame] | 4 | #include <linux/compiler.h> |
Matthew Wilcox | ea435467 | 2009-01-06 14:40:39 -0800 | [diff] [blame] | 5 | #include <linux/types.h> |
Dave Jones | 5bd1db6 | 2006-04-10 22:53:51 -0700 | [diff] [blame] | 6 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 7 | /* |
| 8 | * include/asm-s390/atomic.h |
| 9 | * |
| 10 | * S390 version |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 11 | * Copyright (C) 1999-2005 IBM Deutschland Entwicklung GmbH, IBM Corporation |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 12 | * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com), |
| 13 | * Denis Joseph Barrow, |
| 14 | * Arnd Bergmann (arndb@de.ibm.com) |
| 15 | * |
| 16 | * Derived from "include/asm-i386/bitops.h" |
| 17 | * Copyright (C) 1992, Linus Torvalds |
| 18 | * |
| 19 | */ |
| 20 | |
| 21 | /* |
| 22 | * Atomic operations that C can't guarantee us. Useful for |
| 23 | * resource counting etc.. |
| 24 | * S390 uses 'Compare And Swap' for atomicity in SMP enviroment |
| 25 | */ |
| 26 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 27 | #define ATOMIC_INIT(i) { (i) } |
| 28 | |
| 29 | #ifdef __KERNEL__ |
| 30 | |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 31 | #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2) |
| 32 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 33 | #define __CS_LOOP(ptr, op_val, op_string) ({ \ |
| 34 | typeof(ptr->counter) old_val, new_val; \ |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 35 | asm volatile( \ |
| 36 | " l %0,%2\n" \ |
| 37 | "0: lr %1,%0\n" \ |
| 38 | op_string " %1,%3\n" \ |
| 39 | " cs %0,%1,%2\n" \ |
| 40 | " jl 0b" \ |
| 41 | : "=&d" (old_val), "=&d" (new_val), \ |
| 42 | "=Q" (((atomic_t *)(ptr))->counter) \ |
| 43 | : "d" (op_val), "Q" (((atomic_t *)(ptr))->counter) \ |
| 44 | : "cc", "memory"); \ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 45 | new_val; \ |
| 46 | }) |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 47 | |
| 48 | #else /* __GNUC__ */ |
| 49 | |
| 50 | #define __CS_LOOP(ptr, op_val, op_string) ({ \ |
| 51 | typeof(ptr->counter) old_val, new_val; \ |
| 52 | asm volatile( \ |
| 53 | " l %0,0(%3)\n" \ |
| 54 | "0: lr %1,%0\n" \ |
| 55 | op_string " %1,%4\n" \ |
| 56 | " cs %0,%1,0(%3)\n" \ |
| 57 | " jl 0b" \ |
| 58 | : "=&d" (old_val), "=&d" (new_val), \ |
| 59 | "=m" (((atomic_t *)(ptr))->counter) \ |
| 60 | : "a" (ptr), "d" (op_val), \ |
| 61 | "m" (((atomic_t *)(ptr))->counter) \ |
| 62 | : "cc", "memory"); \ |
| 63 | new_val; \ |
| 64 | }) |
| 65 | |
| 66 | #endif /* __GNUC__ */ |
| 67 | |
Heiko Carstens | c51b962 | 2007-08-22 13:51:45 +0200 | [diff] [blame] | 68 | static inline int atomic_read(const atomic_t *v) |
| 69 | { |
| 70 | barrier(); |
| 71 | return v->counter; |
| 72 | } |
| 73 | |
| 74 | static inline void atomic_set(atomic_t *v, int i) |
| 75 | { |
| 76 | v->counter = i; |
| 77 | barrier(); |
| 78 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 79 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 80 | static __inline__ int atomic_add_return(int i, atomic_t * v) |
| 81 | { |
| 82 | return __CS_LOOP(v, i, "ar"); |
| 83 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 84 | #define atomic_add(_i, _v) atomic_add_return(_i, _v) |
| 85 | #define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0) |
| 86 | #define atomic_inc(_v) atomic_add_return(1, _v) |
| 87 | #define atomic_inc_return(_v) atomic_add_return(1, _v) |
| 88 | #define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0) |
| 89 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 90 | static __inline__ int atomic_sub_return(int i, atomic_t * v) |
| 91 | { |
| 92 | return __CS_LOOP(v, i, "sr"); |
| 93 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 94 | #define atomic_sub(_i, _v) atomic_sub_return(_i, _v) |
| 95 | #define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0) |
| 96 | #define atomic_dec(_v) atomic_sub_return(1, _v) |
| 97 | #define atomic_dec_return(_v) atomic_sub_return(1, _v) |
| 98 | #define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 99 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 100 | static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t * v) |
| 101 | { |
| 102 | __CS_LOOP(v, ~mask, "nr"); |
| 103 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 104 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 105 | static __inline__ void atomic_set_mask(unsigned long mask, atomic_t * v) |
| 106 | { |
| 107 | __CS_LOOP(v, mask, "or"); |
| 108 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 109 | |
Ingo Molnar | ffbf670 | 2006-01-09 15:59:17 -0800 | [diff] [blame] | 110 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
| 111 | |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 112 | static __inline__ int atomic_cmpxchg(atomic_t *v, int old, int new) |
| 113 | { |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 114 | #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2) |
| 115 | asm volatile( |
| 116 | " cs %0,%2,%1" |
| 117 | : "+d" (old), "=Q" (v->counter) |
| 118 | : "d" (new), "Q" (v->counter) |
| 119 | : "cc", "memory"); |
| 120 | #else /* __GNUC__ */ |
| 121 | asm volatile( |
| 122 | " cs %0,%3,0(%2)" |
| 123 | : "+d" (old), "=m" (v->counter) |
| 124 | : "a" (v), "d" (new), "m" (v->counter) |
| 125 | : "cc", "memory"); |
| 126 | #endif /* __GNUC__ */ |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 127 | return old; |
| 128 | } |
| 129 | |
| 130 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
| 131 | { |
| 132 | int c, old; |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 133 | c = atomic_read(v); |
Nick Piggin | 0b2fcfd | 2006-03-23 03:01:02 -0800 | [diff] [blame] | 134 | for (;;) { |
| 135 | if (unlikely(c == u)) |
| 136 | break; |
| 137 | old = atomic_cmpxchg(v, c, c + a); |
| 138 | if (likely(old == c)) |
| 139 | break; |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 140 | c = old; |
Nick Piggin | 0b2fcfd | 2006-03-23 03:01:02 -0800 | [diff] [blame] | 141 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 142 | return c != u; |
| 143 | } |
| 144 | |
| 145 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
| 146 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 147 | #undef __CS_LOOP |
| 148 | |
| 149 | #ifdef __s390x__ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 150 | #define ATOMIC64_INIT(i) { (i) } |
| 151 | |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 152 | #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2) |
| 153 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 154 | #define __CSG_LOOP(ptr, op_val, op_string) ({ \ |
| 155 | typeof(ptr->counter) old_val, new_val; \ |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 156 | asm volatile( \ |
| 157 | " lg %0,%2\n" \ |
| 158 | "0: lgr %1,%0\n" \ |
| 159 | op_string " %1,%3\n" \ |
| 160 | " csg %0,%1,%2\n" \ |
| 161 | " jl 0b" \ |
| 162 | : "=&d" (old_val), "=&d" (new_val), \ |
| 163 | "=Q" (((atomic_t *)(ptr))->counter) \ |
| 164 | : "d" (op_val), "Q" (((atomic_t *)(ptr))->counter) \ |
| 165 | : "cc", "memory" ); \ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 166 | new_val; \ |
| 167 | }) |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 168 | |
| 169 | #else /* __GNUC__ */ |
| 170 | |
| 171 | #define __CSG_LOOP(ptr, op_val, op_string) ({ \ |
| 172 | typeof(ptr->counter) old_val, new_val; \ |
| 173 | asm volatile( \ |
| 174 | " lg %0,0(%3)\n" \ |
| 175 | "0: lgr %1,%0\n" \ |
| 176 | op_string " %1,%4\n" \ |
| 177 | " csg %0,%1,0(%3)\n" \ |
| 178 | " jl 0b" \ |
| 179 | : "=&d" (old_val), "=&d" (new_val), \ |
| 180 | "=m" (((atomic_t *)(ptr))->counter) \ |
| 181 | : "a" (ptr), "d" (op_val), \ |
| 182 | "m" (((atomic_t *)(ptr))->counter) \ |
| 183 | : "cc", "memory" ); \ |
| 184 | new_val; \ |
| 185 | }) |
| 186 | |
| 187 | #endif /* __GNUC__ */ |
| 188 | |
Heiko Carstens | c51b962 | 2007-08-22 13:51:45 +0200 | [diff] [blame] | 189 | static inline long long atomic64_read(const atomic64_t *v) |
| 190 | { |
| 191 | barrier(); |
| 192 | return v->counter; |
| 193 | } |
| 194 | |
| 195 | static inline void atomic64_set(atomic64_t *v, long long i) |
| 196 | { |
| 197 | v->counter = i; |
| 198 | barrier(); |
| 199 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 200 | |
Heiko Carstens | 46ee058 | 2005-07-27 11:44:59 -0700 | [diff] [blame] | 201 | static __inline__ long long atomic64_add_return(long long i, atomic64_t * v) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 202 | { |
| 203 | return __CSG_LOOP(v, i, "agr"); |
| 204 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 205 | #define atomic64_add(_i, _v) atomic64_add_return(_i, _v) |
| 206 | #define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0) |
| 207 | #define atomic64_inc(_v) atomic64_add_return(1, _v) |
| 208 | #define atomic64_inc_return(_v) atomic64_add_return(1, _v) |
| 209 | #define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0) |
| 210 | |
| 211 | static __inline__ long long atomic64_sub_return(long long i, atomic64_t * v) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 212 | { |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 213 | return __CSG_LOOP(v, i, "sgr"); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 214 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 215 | #define atomic64_sub(_i, _v) atomic64_sub_return(_i, _v) |
| 216 | #define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0) |
| 217 | #define atomic64_dec(_v) atomic64_sub_return(1, _v) |
| 218 | #define atomic64_dec_return(_v) atomic64_sub_return(1, _v) |
| 219 | #define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0) |
| 220 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 221 | static __inline__ void atomic64_clear_mask(unsigned long mask, atomic64_t * v) |
| 222 | { |
| 223 | __CSG_LOOP(v, ~mask, "ngr"); |
| 224 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 225 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 226 | static __inline__ void atomic64_set_mask(unsigned long mask, atomic64_t * v) |
| 227 | { |
| 228 | __CSG_LOOP(v, mask, "ogr"); |
| 229 | } |
| 230 | |
Mathieu Desnoyers | 3a5f10e | 2007-02-21 10:55:59 +0100 | [diff] [blame] | 231 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) |
| 232 | |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 233 | static __inline__ long long atomic64_cmpxchg(atomic64_t *v, |
| 234 | long long old, long long new) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 235 | { |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 236 | #if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2) |
| 237 | asm volatile( |
| 238 | " csg %0,%2,%1" |
| 239 | : "+d" (old), "=Q" (v->counter) |
| 240 | : "d" (new), "Q" (v->counter) |
| 241 | : "cc", "memory"); |
| 242 | #else /* __GNUC__ */ |
| 243 | asm volatile( |
| 244 | " csg %0,%3,0(%2)" |
| 245 | : "+d" (old), "=m" (v->counter) |
| 246 | : "a" (v), "d" (new), "m" (v->counter) |
| 247 | : "cc", "memory"); |
| 248 | #endif /* __GNUC__ */ |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 249 | return old; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 250 | } |
| 251 | |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 252 | static __inline__ int atomic64_add_unless(atomic64_t *v, |
| 253 | long long a, long long u) |
| 254 | { |
| 255 | long long c, old; |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 256 | c = atomic64_read(v); |
Nick Piggin | 0b2fcfd | 2006-03-23 03:01:02 -0800 | [diff] [blame] | 257 | for (;;) { |
| 258 | if (unlikely(c == u)) |
| 259 | break; |
| 260 | old = atomic64_cmpxchg(v, c, c + a); |
| 261 | if (likely(old == c)) |
| 262 | break; |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 263 | c = old; |
Nick Piggin | 0b2fcfd | 2006-03-23 03:01:02 -0800 | [diff] [blame] | 264 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 265 | return c != u; |
| 266 | } |
| 267 | |
| 268 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
| 269 | |
| 270 | #undef __CSG_LOOP |
| 271 | #endif |
Nick Piggin | 8426e1f | 2005-11-13 16:07:25 -0800 | [diff] [blame] | 272 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 273 | #define smp_mb__before_atomic_dec() smp_mb() |
| 274 | #define smp_mb__after_atomic_dec() smp_mb() |
| 275 | #define smp_mb__before_atomic_inc() smp_mb() |
| 276 | #define smp_mb__after_atomic_inc() smp_mb() |
| 277 | |
Arnd Bergmann | 72099ed | 2009-05-13 22:56:29 +0000 | [diff] [blame^] | 278 | #include <asm-generic/atomic-long.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 279 | #endif /* __KERNEL__ */ |
| 280 | #endif /* __ARCH_S390_ATOMIC__ */ |