Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef __ARCH_S390_ATOMIC__ |
| 2 | #define __ARCH_S390_ATOMIC__ |
| 3 | |
Heiko Carstens | 1275105 | 2009-09-11 10:28:34 +0200 | [diff] [blame] | 4 | /* |
| 5 | * Copyright 1999,2009 IBM Corp. |
| 6 | * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>, |
| 7 | * Denis Joseph Barrow, |
| 8 | * Arnd Bergmann <arndb@de.ibm.com>, |
| 9 | * |
| 10 | * Atomic operations that C can't guarantee us. |
| 11 | * Useful for resource counting etc. |
Lucas De Marchi | 25985ed | 2011-03-30 22:57:33 -0300 | [diff] [blame] | 12 | * s390 uses 'Compare And Swap' for atomicity in SMP environment. |
Heiko Carstens | 1275105 | 2009-09-11 10:28:34 +0200 | [diff] [blame] | 13 | * |
| 14 | */ |
| 15 | |
Dave Jones | 5bd1db6 | 2006-04-10 22:53:51 -0700 | [diff] [blame] | 16 | #include <linux/compiler.h> |
Matthew Wilcox | ea435467 | 2009-01-06 14:40:39 -0800 | [diff] [blame] | 17 | #include <linux/types.h> |
Heiko Carstens | 2ddb3ec | 2010-05-26 23:26:18 +0200 | [diff] [blame] | 18 | #include <asm/system.h> |
Dave Jones | 5bd1db6 | 2006-04-10 22:53:51 -0700 | [diff] [blame] | 19 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 20 | #define ATOMIC_INIT(i) { (i) } |
| 21 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 22 | #define __CS_LOOP(ptr, op_val, op_string) ({ \ |
Martin Schwidefsky | 3947517 | 2009-12-07 12:52:05 +0100 | [diff] [blame] | 23 | int old_val, new_val; \ |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 24 | asm volatile( \ |
| 25 | " l %0,%2\n" \ |
| 26 | "0: lr %1,%0\n" \ |
| 27 | op_string " %1,%3\n" \ |
| 28 | " cs %0,%1,%2\n" \ |
| 29 | " jl 0b" \ |
| 30 | : "=&d" (old_val), "=&d" (new_val), \ |
| 31 | "=Q" (((atomic_t *)(ptr))->counter) \ |
| 32 | : "d" (op_val), "Q" (((atomic_t *)(ptr))->counter) \ |
| 33 | : "cc", "memory"); \ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 34 | new_val; \ |
| 35 | }) |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 36 | |
Heiko Carstens | c51b962 | 2007-08-22 13:51:45 +0200 | [diff] [blame] | 37 | static inline int atomic_read(const atomic_t *v) |
| 38 | { |
Heiko Carstens | 7657e41 | 2011-02-17 13:13:58 +0100 | [diff] [blame] | 39 | int c; |
| 40 | |
| 41 | asm volatile( |
| 42 | " l %0,%1\n" |
| 43 | : "=d" (c) : "Q" (v->counter)); |
| 44 | return c; |
Heiko Carstens | c51b962 | 2007-08-22 13:51:45 +0200 | [diff] [blame] | 45 | } |
| 46 | |
| 47 | static inline void atomic_set(atomic_t *v, int i) |
| 48 | { |
Heiko Carstens | 7657e41 | 2011-02-17 13:13:58 +0100 | [diff] [blame] | 49 | asm volatile( |
| 50 | " st %1,%0\n" |
| 51 | : "=Q" (v->counter) : "d" (i)); |
Heiko Carstens | c51b962 | 2007-08-22 13:51:45 +0200 | [diff] [blame] | 52 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 53 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 54 | static inline int atomic_add_return(int i, atomic_t *v) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 55 | { |
| 56 | return __CS_LOOP(v, i, "ar"); |
| 57 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 58 | #define atomic_add(_i, _v) atomic_add_return(_i, _v) |
| 59 | #define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0) |
| 60 | #define atomic_inc(_v) atomic_add_return(1, _v) |
| 61 | #define atomic_inc_return(_v) atomic_add_return(1, _v) |
| 62 | #define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0) |
| 63 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 64 | static inline int atomic_sub_return(int i, atomic_t *v) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 65 | { |
| 66 | return __CS_LOOP(v, i, "sr"); |
| 67 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 68 | #define atomic_sub(_i, _v) atomic_sub_return(_i, _v) |
| 69 | #define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0) |
| 70 | #define atomic_dec(_v) atomic_sub_return(1, _v) |
| 71 | #define atomic_dec_return(_v) atomic_sub_return(1, _v) |
| 72 | #define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 73 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 74 | static inline void atomic_clear_mask(unsigned long mask, atomic_t *v) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 75 | { |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 76 | __CS_LOOP(v, ~mask, "nr"); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 77 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 78 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 79 | static inline void atomic_set_mask(unsigned long mask, atomic_t *v) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 80 | { |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 81 | __CS_LOOP(v, mask, "or"); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 82 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 83 | |
Ingo Molnar | ffbf670 | 2006-01-09 15:59:17 -0800 | [diff] [blame] | 84 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
| 85 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 86 | static inline int atomic_cmpxchg(atomic_t *v, int old, int new) |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 87 | { |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 88 | asm volatile( |
| 89 | " cs %0,%2,%1" |
| 90 | : "+d" (old), "=Q" (v->counter) |
| 91 | : "d" (new), "Q" (v->counter) |
| 92 | : "cc", "memory"); |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 93 | return old; |
| 94 | } |
| 95 | |
Arun Sharma | f24219b | 2011-07-26 16:09:07 -0700 | [diff] [blame] | 96 | static inline int __atomic_add_unless(atomic_t *v, int a, int u) |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 97 | { |
| 98 | int c, old; |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 99 | c = atomic_read(v); |
Nick Piggin | 0b2fcfd | 2006-03-23 03:01:02 -0800 | [diff] [blame] | 100 | for (;;) { |
| 101 | if (unlikely(c == u)) |
| 102 | break; |
| 103 | old = atomic_cmpxchg(v, c, c + a); |
| 104 | if (likely(old == c)) |
| 105 | break; |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 106 | c = old; |
Nick Piggin | 0b2fcfd | 2006-03-23 03:01:02 -0800 | [diff] [blame] | 107 | } |
Arun Sharma | f24219b | 2011-07-26 16:09:07 -0700 | [diff] [blame] | 108 | return c; |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 109 | } |
| 110 | |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 111 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 112 | #undef __CS_LOOP |
| 113 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 114 | #define ATOMIC64_INIT(i) { (i) } |
| 115 | |
Heiko Carstens | 1275105 | 2009-09-11 10:28:34 +0200 | [diff] [blame] | 116 | #ifdef CONFIG_64BIT |
| 117 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 118 | #define __CSG_LOOP(ptr, op_val, op_string) ({ \ |
Martin Schwidefsky | 3947517 | 2009-12-07 12:52:05 +0100 | [diff] [blame] | 119 | long long old_val, new_val; \ |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 120 | asm volatile( \ |
| 121 | " lg %0,%2\n" \ |
| 122 | "0: lgr %1,%0\n" \ |
| 123 | op_string " %1,%3\n" \ |
| 124 | " csg %0,%1,%2\n" \ |
| 125 | " jl 0b" \ |
| 126 | : "=&d" (old_val), "=&d" (new_val), \ |
| 127 | "=Q" (((atomic_t *)(ptr))->counter) \ |
| 128 | : "d" (op_val), "Q" (((atomic_t *)(ptr))->counter) \ |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 129 | : "cc", "memory"); \ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 130 | new_val; \ |
| 131 | }) |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 132 | |
Heiko Carstens | c51b962 | 2007-08-22 13:51:45 +0200 | [diff] [blame] | 133 | static inline long long atomic64_read(const atomic64_t *v) |
| 134 | { |
Heiko Carstens | 7657e41 | 2011-02-17 13:13:58 +0100 | [diff] [blame] | 135 | long long c; |
| 136 | |
| 137 | asm volatile( |
| 138 | " lg %0,%1\n" |
| 139 | : "=d" (c) : "Q" (v->counter)); |
| 140 | return c; |
Heiko Carstens | c51b962 | 2007-08-22 13:51:45 +0200 | [diff] [blame] | 141 | } |
| 142 | |
| 143 | static inline void atomic64_set(atomic64_t *v, long long i) |
| 144 | { |
Heiko Carstens | 7657e41 | 2011-02-17 13:13:58 +0100 | [diff] [blame] | 145 | asm volatile( |
| 146 | " stg %1,%0\n" |
| 147 | : "=Q" (v->counter) : "d" (i)); |
Heiko Carstens | c51b962 | 2007-08-22 13:51:45 +0200 | [diff] [blame] | 148 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 149 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 150 | static inline long long atomic64_add_return(long long i, atomic64_t *v) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 151 | { |
| 152 | return __CSG_LOOP(v, i, "agr"); |
| 153 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 154 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 155 | static inline long long atomic64_sub_return(long long i, atomic64_t *v) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 156 | { |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 157 | return __CSG_LOOP(v, i, "sgr"); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 158 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 159 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 160 | static inline void atomic64_clear_mask(unsigned long mask, atomic64_t *v) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 161 | { |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 162 | __CSG_LOOP(v, ~mask, "ngr"); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 163 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 164 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 165 | static inline void atomic64_set_mask(unsigned long mask, atomic64_t *v) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 166 | { |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 167 | __CSG_LOOP(v, mask, "ogr"); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 168 | } |
| 169 | |
Mathieu Desnoyers | 3a5f10e | 2007-02-21 10:55:59 +0100 | [diff] [blame] | 170 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) |
| 171 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 172 | static inline long long atomic64_cmpxchg(atomic64_t *v, |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 173 | long long old, long long new) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 174 | { |
Martin Schwidefsky | 94c12cc | 2006-09-28 16:56:43 +0200 | [diff] [blame] | 175 | asm volatile( |
| 176 | " csg %0,%2,%1" |
| 177 | : "+d" (old), "=Q" (v->counter) |
| 178 | : "d" (new), "Q" (v->counter) |
| 179 | : "cc", "memory"); |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 180 | return old; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 181 | } |
| 182 | |
Heiko Carstens | 1275105 | 2009-09-11 10:28:34 +0200 | [diff] [blame] | 183 | #undef __CSG_LOOP |
| 184 | |
| 185 | #else /* CONFIG_64BIT */ |
| 186 | |
| 187 | typedef struct { |
| 188 | long long counter; |
| 189 | } atomic64_t; |
| 190 | |
| 191 | static inline long long atomic64_read(const atomic64_t *v) |
| 192 | { |
| 193 | register_pair rp; |
| 194 | |
| 195 | asm volatile( |
Martin Schwidefsky | 987bcda | 2010-02-26 22:37:31 +0100 | [diff] [blame] | 196 | " lm %0,%N0,%1" |
| 197 | : "=&d" (rp) : "Q" (v->counter) ); |
Heiko Carstens | 1275105 | 2009-09-11 10:28:34 +0200 | [diff] [blame] | 198 | return rp.pair; |
| 199 | } |
| 200 | |
| 201 | static inline void atomic64_set(atomic64_t *v, long long i) |
| 202 | { |
| 203 | register_pair rp = {.pair = i}; |
| 204 | |
| 205 | asm volatile( |
Martin Schwidefsky | 987bcda | 2010-02-26 22:37:31 +0100 | [diff] [blame] | 206 | " stm %1,%N1,%0" |
| 207 | : "=Q" (v->counter) : "d" (rp) ); |
Heiko Carstens | 1275105 | 2009-09-11 10:28:34 +0200 | [diff] [blame] | 208 | } |
| 209 | |
| 210 | static inline long long atomic64_xchg(atomic64_t *v, long long new) |
| 211 | { |
| 212 | register_pair rp_new = {.pair = new}; |
| 213 | register_pair rp_old; |
| 214 | |
| 215 | asm volatile( |
Martin Schwidefsky | 987bcda | 2010-02-26 22:37:31 +0100 | [diff] [blame] | 216 | " lm %0,%N0,%1\n" |
| 217 | "0: cds %0,%2,%1\n" |
Heiko Carstens | 1275105 | 2009-09-11 10:28:34 +0200 | [diff] [blame] | 218 | " jl 0b\n" |
Martin Schwidefsky | 987bcda | 2010-02-26 22:37:31 +0100 | [diff] [blame] | 219 | : "=&d" (rp_old), "=Q" (v->counter) |
| 220 | : "d" (rp_new), "Q" (v->counter) |
Heiko Carstens | 1275105 | 2009-09-11 10:28:34 +0200 | [diff] [blame] | 221 | : "cc"); |
| 222 | return rp_old.pair; |
| 223 | } |
| 224 | |
| 225 | static inline long long atomic64_cmpxchg(atomic64_t *v, |
| 226 | long long old, long long new) |
| 227 | { |
| 228 | register_pair rp_old = {.pair = old}; |
| 229 | register_pair rp_new = {.pair = new}; |
| 230 | |
| 231 | asm volatile( |
Martin Schwidefsky | 987bcda | 2010-02-26 22:37:31 +0100 | [diff] [blame] | 232 | " cds %0,%2,%1" |
| 233 | : "+&d" (rp_old), "=Q" (v->counter) |
| 234 | : "d" (rp_new), "Q" (v->counter) |
Heiko Carstens | 1275105 | 2009-09-11 10:28:34 +0200 | [diff] [blame] | 235 | : "cc"); |
| 236 | return rp_old.pair; |
| 237 | } |
| 238 | |
| 239 | |
| 240 | static inline long long atomic64_add_return(long long i, atomic64_t *v) |
| 241 | { |
| 242 | long long old, new; |
| 243 | |
| 244 | do { |
| 245 | old = atomic64_read(v); |
| 246 | new = old + i; |
| 247 | } while (atomic64_cmpxchg(v, old, new) != old); |
| 248 | return new; |
| 249 | } |
| 250 | |
| 251 | static inline long long atomic64_sub_return(long long i, atomic64_t *v) |
| 252 | { |
| 253 | long long old, new; |
| 254 | |
| 255 | do { |
| 256 | old = atomic64_read(v); |
| 257 | new = old - i; |
| 258 | } while (atomic64_cmpxchg(v, old, new) != old); |
| 259 | return new; |
| 260 | } |
| 261 | |
| 262 | static inline void atomic64_set_mask(unsigned long long mask, atomic64_t *v) |
| 263 | { |
| 264 | long long old, new; |
| 265 | |
| 266 | do { |
| 267 | old = atomic64_read(v); |
| 268 | new = old | mask; |
| 269 | } while (atomic64_cmpxchg(v, old, new) != old); |
| 270 | } |
| 271 | |
| 272 | static inline void atomic64_clear_mask(unsigned long long mask, atomic64_t *v) |
| 273 | { |
| 274 | long long old, new; |
| 275 | |
| 276 | do { |
| 277 | old = atomic64_read(v); |
| 278 | new = old & mask; |
| 279 | } while (atomic64_cmpxchg(v, old, new) != old); |
| 280 | } |
| 281 | |
| 282 | #endif /* CONFIG_64BIT */ |
| 283 | |
Heiko Carstens | bfe3349b | 2009-09-11 10:28:35 +0200 | [diff] [blame] | 284 | static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 285 | { |
| 286 | long long c, old; |
Heiko Carstens | 2ddb3ec | 2010-05-26 23:26:18 +0200 | [diff] [blame] | 287 | |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 288 | c = atomic64_read(v); |
Nick Piggin | 0b2fcfd | 2006-03-23 03:01:02 -0800 | [diff] [blame] | 289 | for (;;) { |
| 290 | if (unlikely(c == u)) |
| 291 | break; |
| 292 | old = atomic64_cmpxchg(v, c, c + a); |
| 293 | if (likely(old == c)) |
| 294 | break; |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 295 | c = old; |
Nick Piggin | 0b2fcfd | 2006-03-23 03:01:02 -0800 | [diff] [blame] | 296 | } |
Martin Schwidefsky | 973bd99 | 2006-01-06 00:19:07 -0800 | [diff] [blame] | 297 | return c != u; |
| 298 | } |
| 299 | |
Heiko Carstens | 2ddb3ec | 2010-05-26 23:26:18 +0200 | [diff] [blame] | 300 | static inline long long atomic64_dec_if_positive(atomic64_t *v) |
| 301 | { |
| 302 | long long c, old, dec; |
| 303 | |
| 304 | c = atomic64_read(v); |
| 305 | for (;;) { |
| 306 | dec = c - 1; |
| 307 | if (unlikely(dec < 0)) |
| 308 | break; |
| 309 | old = atomic64_cmpxchg((v), c, dec); |
| 310 | if (likely(old == c)) |
| 311 | break; |
| 312 | c = old; |
| 313 | } |
| 314 | return dec; |
| 315 | } |
| 316 | |
Heiko Carstens | 1275105 | 2009-09-11 10:28:34 +0200 | [diff] [blame] | 317 | #define atomic64_add(_i, _v) atomic64_add_return(_i, _v) |
| 318 | #define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0) |
| 319 | #define atomic64_inc(_v) atomic64_add_return(1, _v) |
| 320 | #define atomic64_inc_return(_v) atomic64_add_return(1, _v) |
| 321 | #define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0) |
| 322 | #define atomic64_sub(_i, _v) atomic64_sub_return(_i, _v) |
| 323 | #define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0) |
| 324 | #define atomic64_dec(_v) atomic64_sub_return(1, _v) |
| 325 | #define atomic64_dec_return(_v) atomic64_sub_return(1, _v) |
| 326 | #define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0) |
| 327 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
Nick Piggin | 8426e1f | 2005-11-13 16:07:25 -0800 | [diff] [blame] | 328 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 329 | #define smp_mb__before_atomic_dec() smp_mb() |
| 330 | #define smp_mb__after_atomic_dec() smp_mb() |
| 331 | #define smp_mb__before_atomic_inc() smp_mb() |
| 332 | #define smp_mb__after_atomic_inc() smp_mb() |
| 333 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 334 | #endif /* __ARCH_S390_ATOMIC__ */ |