Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | #ifndef _ASM_IA64_ATOMIC_H |
| 3 | #define _ASM_IA64_ATOMIC_H |
| 4 | |
| 5 | /* |
| 6 | * Atomic operations that C can't guarantee us. Useful for |
| 7 | * resource counting etc.. |
| 8 | * |
| 9 | * NOTE: don't mess with the types below! The "unsigned long" and |
| 10 | * "int" types were carefully placed so as to ensure proper operation |
| 11 | * of the macros. |
| 12 | * |
| 13 | * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co |
| 14 | * David Mosberger-Tang <davidm@hpl.hp.com> |
| 15 | */ |
| 16 | #include <linux/types.h> |
| 17 | |
| 18 | #include <asm/intrinsics.h> |
Peter Zijlstra | 0cd64ef | 2014-03-13 19:00:36 +0100 | [diff] [blame] | 19 | #include <asm/barrier.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 20 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 21 | |
Tony Luck | a119365 | 2012-07-26 10:55:26 -0700 | [diff] [blame] | 22 | #define ATOMIC_INIT(i) { (i) } |
| 23 | #define ATOMIC64_INIT(i) { (i) } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 24 | |
Peter Zijlstra | 62e8a32 | 2015-09-18 11:13:10 +0200 | [diff] [blame] | 25 | #define atomic_read(v) READ_ONCE((v)->counter) |
| 26 | #define atomic64_read(v) READ_ONCE((v)->counter) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 27 | |
Peter Zijlstra | 62e8a32 | 2015-09-18 11:13:10 +0200 | [diff] [blame] | 28 | #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i)) |
| 29 | #define atomic64_set(v,i) WRITE_ONCE(((v)->counter), (i)) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 30 | |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 31 | #define ATOMIC_OP(op, c_op) \ |
| 32 | static __inline__ int \ |
| 33 | ia64_atomic_##op (int i, atomic_t *v) \ |
| 34 | { \ |
| 35 | __s32 old, new; \ |
| 36 | CMPXCHG_BUGCHECK_DECL \ |
| 37 | \ |
| 38 | do { \ |
| 39 | CMPXCHG_BUGCHECK(v); \ |
| 40 | old = atomic_read(v); \ |
| 41 | new = old c_op i; \ |
| 42 | } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \ |
| 43 | return new; \ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 44 | } |
| 45 | |
Peter Zijlstra | cc102507 | 2016-04-18 01:16:07 +0200 | [diff] [blame] | 46 | #define ATOMIC_FETCH_OP(op, c_op) \ |
| 47 | static __inline__ int \ |
| 48 | ia64_atomic_fetch_##op (int i, atomic_t *v) \ |
| 49 | { \ |
| 50 | __s32 old, new; \ |
| 51 | CMPXCHG_BUGCHECK_DECL \ |
| 52 | \ |
| 53 | do { \ |
| 54 | CMPXCHG_BUGCHECK(v); \ |
| 55 | old = atomic_read(v); \ |
| 56 | new = old c_op i; \ |
| 57 | } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \ |
| 58 | return old; \ |
| 59 | } |
| 60 | |
| 61 | #define ATOMIC_OPS(op, c_op) \ |
| 62 | ATOMIC_OP(op, c_op) \ |
| 63 | ATOMIC_FETCH_OP(op, c_op) |
| 64 | |
| 65 | ATOMIC_OPS(add, +) |
| 66 | ATOMIC_OPS(sub, -) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 67 | |
Matthew Wilcox | 4b664e7 | 2018-01-18 13:52:17 -0800 | [diff] [blame^] | 68 | #ifdef __OPTIMIZE__ |
| 69 | #define __ia64_atomic_const(i) __builtin_constant_p(i) ? \ |
| 70 | ((i) == 1 || (i) == 4 || (i) == 8 || (i) == 16 || \ |
| 71 | (i) == -1 || (i) == -4 || (i) == -8 || (i) == -16) : 0 |
| 72 | |
| 73 | #define atomic_add_return(i, v) \ |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 74 | ({ \ |
Matthew Wilcox | 4b664e7 | 2018-01-18 13:52:17 -0800 | [diff] [blame^] | 75 | int __i = (i); \ |
| 76 | static const int __ia64_atomic_p = __ia64_atomic_const(i); \ |
| 77 | __ia64_atomic_p ? ia64_fetch_and_add(__i, &(v)->counter) : \ |
| 78 | ia64_atomic_add(__i, v); \ |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 79 | }) |
| 80 | |
Matthew Wilcox | 4b664e7 | 2018-01-18 13:52:17 -0800 | [diff] [blame^] | 81 | #define atomic_sub_return(i, v) \ |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 82 | ({ \ |
Matthew Wilcox | 4b664e7 | 2018-01-18 13:52:17 -0800 | [diff] [blame^] | 83 | int __i = (i); \ |
| 84 | static const int __ia64_atomic_p = __ia64_atomic_const(i); \ |
| 85 | __ia64_atomic_p ? ia64_fetch_and_add(-__i, &(v)->counter) : \ |
| 86 | ia64_atomic_sub(__i, v); \ |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 87 | }) |
Matthew Wilcox | 4b664e7 | 2018-01-18 13:52:17 -0800 | [diff] [blame^] | 88 | #else |
| 89 | #define atomic_add_return(i, v) ia64_atomic_add(i, v) |
| 90 | #define atomic_sub_return(i, v) ia64_atomic_sub(i, v) |
| 91 | #endif |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 92 | |
Peter Zijlstra | cc102507 | 2016-04-18 01:16:07 +0200 | [diff] [blame] | 93 | #define atomic_fetch_add(i,v) \ |
| 94 | ({ \ |
| 95 | int __ia64_aar_i = (i); \ |
| 96 | (__builtin_constant_p(i) \ |
| 97 | && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \ |
| 98 | || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \ |
| 99 | || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \ |
| 100 | || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \ |
| 101 | ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \ |
| 102 | : ia64_atomic_fetch_add(__ia64_aar_i, v); \ |
| 103 | }) |
Peter Zijlstra | 70ed473 | 2014-04-23 20:00:01 +0200 | [diff] [blame] | 104 | |
Peter Zijlstra | cc102507 | 2016-04-18 01:16:07 +0200 | [diff] [blame] | 105 | #define atomic_fetch_sub(i,v) \ |
| 106 | ({ \ |
| 107 | int __ia64_asr_i = (i); \ |
| 108 | (__builtin_constant_p(i) \ |
| 109 | && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \ |
| 110 | || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \ |
| 111 | || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \ |
| 112 | || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \ |
| 113 | ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \ |
| 114 | : ia64_atomic_fetch_sub(__ia64_asr_i, v); \ |
| 115 | }) |
Peter Zijlstra | 70ed473 | 2014-04-23 20:00:01 +0200 | [diff] [blame] | 116 | |
Peter Zijlstra | cc102507 | 2016-04-18 01:16:07 +0200 | [diff] [blame] | 117 | ATOMIC_FETCH_OP(and, &) |
| 118 | ATOMIC_FETCH_OP(or, |) |
| 119 | ATOMIC_FETCH_OP(xor, ^) |
| 120 | |
| 121 | #define atomic_and(i,v) (void)ia64_atomic_fetch_and(i,v) |
| 122 | #define atomic_or(i,v) (void)ia64_atomic_fetch_or(i,v) |
| 123 | #define atomic_xor(i,v) (void)ia64_atomic_fetch_xor(i,v) |
| 124 | |
| 125 | #define atomic_fetch_and(i,v) ia64_atomic_fetch_and(i,v) |
| 126 | #define atomic_fetch_or(i,v) ia64_atomic_fetch_or(i,v) |
| 127 | #define atomic_fetch_xor(i,v) ia64_atomic_fetch_xor(i,v) |
| 128 | |
| 129 | #undef ATOMIC_OPS |
| 130 | #undef ATOMIC_FETCH_OP |
Peter Zijlstra | 70ed473 | 2014-04-23 20:00:01 +0200 | [diff] [blame] | 131 | #undef ATOMIC_OP |
| 132 | |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 133 | #define ATOMIC64_OP(op, c_op) \ |
| 134 | static __inline__ long \ |
| 135 | ia64_atomic64_##op (__s64 i, atomic64_t *v) \ |
| 136 | { \ |
| 137 | __s64 old, new; \ |
| 138 | CMPXCHG_BUGCHECK_DECL \ |
| 139 | \ |
| 140 | do { \ |
| 141 | CMPXCHG_BUGCHECK(v); \ |
| 142 | old = atomic64_read(v); \ |
| 143 | new = old c_op i; \ |
| 144 | } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \ |
| 145 | return new; \ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 146 | } |
| 147 | |
Peter Zijlstra | cc102507 | 2016-04-18 01:16:07 +0200 | [diff] [blame] | 148 | #define ATOMIC64_FETCH_OP(op, c_op) \ |
| 149 | static __inline__ long \ |
| 150 | ia64_atomic64_fetch_##op (__s64 i, atomic64_t *v) \ |
| 151 | { \ |
| 152 | __s64 old, new; \ |
| 153 | CMPXCHG_BUGCHECK_DECL \ |
| 154 | \ |
| 155 | do { \ |
| 156 | CMPXCHG_BUGCHECK(v); \ |
| 157 | old = atomic64_read(v); \ |
| 158 | new = old c_op i; \ |
| 159 | } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \ |
| 160 | return old; \ |
| 161 | } |
| 162 | |
| 163 | #define ATOMIC64_OPS(op, c_op) \ |
| 164 | ATOMIC64_OP(op, c_op) \ |
| 165 | ATOMIC64_FETCH_OP(op, c_op) |
| 166 | |
| 167 | ATOMIC64_OPS(add, +) |
| 168 | ATOMIC64_OPS(sub, -) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 169 | |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 170 | #define atomic64_add_return(i,v) \ |
| 171 | ({ \ |
| 172 | long __ia64_aar_i = (i); \ |
| 173 | (__builtin_constant_p(i) \ |
| 174 | && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \ |
| 175 | || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \ |
| 176 | || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \ |
| 177 | || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \ |
| 178 | ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \ |
| 179 | : ia64_atomic64_add(__ia64_aar_i, v); \ |
| 180 | }) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 181 | |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 182 | #define atomic64_sub_return(i,v) \ |
| 183 | ({ \ |
| 184 | long __ia64_asr_i = (i); \ |
| 185 | (__builtin_constant_p(i) \ |
| 186 | && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \ |
| 187 | || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \ |
| 188 | || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \ |
| 189 | || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \ |
| 190 | ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \ |
| 191 | : ia64_atomic64_sub(__ia64_asr_i, v); \ |
| 192 | }) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 193 | |
Peter Zijlstra | cc102507 | 2016-04-18 01:16:07 +0200 | [diff] [blame] | 194 | #define atomic64_fetch_add(i,v) \ |
| 195 | ({ \ |
| 196 | long __ia64_aar_i = (i); \ |
| 197 | (__builtin_constant_p(i) \ |
| 198 | && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \ |
| 199 | || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \ |
| 200 | || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \ |
| 201 | || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \ |
| 202 | ? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq) \ |
| 203 | : ia64_atomic64_fetch_add(__ia64_aar_i, v); \ |
| 204 | }) |
Peter Zijlstra | 70ed473 | 2014-04-23 20:00:01 +0200 | [diff] [blame] | 205 | |
Peter Zijlstra | cc102507 | 2016-04-18 01:16:07 +0200 | [diff] [blame] | 206 | #define atomic64_fetch_sub(i,v) \ |
| 207 | ({ \ |
| 208 | long __ia64_asr_i = (i); \ |
| 209 | (__builtin_constant_p(i) \ |
| 210 | && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \ |
| 211 | || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \ |
| 212 | || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \ |
| 213 | || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \ |
| 214 | ? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq) \ |
| 215 | : ia64_atomic64_fetch_sub(__ia64_asr_i, v); \ |
| 216 | }) |
Peter Zijlstra | 70ed473 | 2014-04-23 20:00:01 +0200 | [diff] [blame] | 217 | |
Peter Zijlstra | cc102507 | 2016-04-18 01:16:07 +0200 | [diff] [blame] | 218 | ATOMIC64_FETCH_OP(and, &) |
| 219 | ATOMIC64_FETCH_OP(or, |) |
| 220 | ATOMIC64_FETCH_OP(xor, ^) |
| 221 | |
| 222 | #define atomic64_and(i,v) (void)ia64_atomic64_fetch_and(i,v) |
| 223 | #define atomic64_or(i,v) (void)ia64_atomic64_fetch_or(i,v) |
| 224 | #define atomic64_xor(i,v) (void)ia64_atomic64_fetch_xor(i,v) |
| 225 | |
| 226 | #define atomic64_fetch_and(i,v) ia64_atomic64_fetch_and(i,v) |
| 227 | #define atomic64_fetch_or(i,v) ia64_atomic64_fetch_or(i,v) |
| 228 | #define atomic64_fetch_xor(i,v) ia64_atomic64_fetch_xor(i,v) |
| 229 | |
| 230 | #undef ATOMIC64_OPS |
| 231 | #undef ATOMIC64_FETCH_OP |
Peter Zijlstra | 70ed473 | 2014-04-23 20:00:01 +0200 | [diff] [blame] | 232 | #undef ATOMIC64_OP |
| 233 | |
Mathieu Desnoyers | 8197913 | 2007-05-08 00:34:22 -0700 | [diff] [blame] | 234 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) |
Ingo Molnar | ffbf670 | 2006-01-09 15:59:17 -0800 | [diff] [blame] | 235 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
Nick Piggin | 4a6dae6 | 2005-11-13 16:07:24 -0800 | [diff] [blame] | 236 | |
Mathieu Desnoyers | 8197913 | 2007-05-08 00:34:22 -0700 | [diff] [blame] | 237 | #define atomic64_cmpxchg(v, old, new) \ |
| 238 | (cmpxchg(&((v)->counter), old, new)) |
| 239 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) |
| 240 | |
Arun Sharma | f24219b | 2011-07-26 16:09:07 -0700 | [diff] [blame] | 241 | static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) |
Mathieu Desnoyers | 2856f5e | 2007-05-08 00:34:38 -0700 | [diff] [blame] | 242 | { |
| 243 | int c, old; |
| 244 | c = atomic_read(v); |
| 245 | for (;;) { |
| 246 | if (unlikely(c == (u))) |
| 247 | break; |
| 248 | old = atomic_cmpxchg((v), c, c + (a)); |
| 249 | if (likely(old == c)) |
| 250 | break; |
| 251 | c = old; |
| 252 | } |
Arun Sharma | f24219b | 2011-07-26 16:09:07 -0700 | [diff] [blame] | 253 | return c; |
Mathieu Desnoyers | 2856f5e | 2007-05-08 00:34:38 -0700 | [diff] [blame] | 254 | } |
| 255 | |
Nick Piggin | 8426e1f | 2005-11-13 16:07:25 -0800 | [diff] [blame] | 256 | |
Tony Luck | 01d69a8 | 2010-08-13 16:41:07 -0700 | [diff] [blame] | 257 | static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u) |
Mathieu Desnoyers | 2856f5e | 2007-05-08 00:34:38 -0700 | [diff] [blame] | 258 | { |
| 259 | long c, old; |
| 260 | c = atomic64_read(v); |
| 261 | for (;;) { |
| 262 | if (unlikely(c == (u))) |
| 263 | break; |
| 264 | old = atomic64_cmpxchg((v), c, c + (a)); |
| 265 | if (likely(old == c)) |
| 266 | break; |
| 267 | c = old; |
| 268 | } |
| 269 | return c != (u); |
| 270 | } |
| 271 | |
Mathieu Desnoyers | 8197913 | 2007-05-08 00:34:22 -0700 | [diff] [blame] | 272 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
| 273 | |
Vineet Gupta | 445ed0a | 2016-10-07 17:02:07 -0700 | [diff] [blame] | 274 | static __inline__ long atomic64_dec_if_positive(atomic64_t *v) |
| 275 | { |
| 276 | long c, old, dec; |
| 277 | c = atomic64_read(v); |
| 278 | for (;;) { |
| 279 | dec = c - 1; |
| 280 | if (unlikely(dec < 0)) |
| 281 | break; |
| 282 | old = atomic64_cmpxchg((v), c, dec); |
| 283 | if (likely(old == c)) |
| 284 | break; |
| 285 | c = old; |
| 286 | } |
| 287 | return dec; |
| 288 | } |
| 289 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 290 | /* |
| 291 | * Atomically add I to V and return TRUE if the resulting value is |
| 292 | * negative. |
| 293 | */ |
| 294 | static __inline__ int |
| 295 | atomic_add_negative (int i, atomic_t *v) |
| 296 | { |
| 297 | return atomic_add_return(i, v) < 0; |
| 298 | } |
| 299 | |
Tony Luck | 01d69a8 | 2010-08-13 16:41:07 -0700 | [diff] [blame] | 300 | static __inline__ long |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 301 | atomic64_add_negative (__s64 i, atomic64_t *v) |
| 302 | { |
| 303 | return atomic64_add_return(i, v) < 0; |
| 304 | } |
| 305 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 306 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) |
| 307 | #define atomic_inc_return(v) atomic_add_return(1, (v)) |
| 308 | #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) |
| 309 | #define atomic64_inc_return(v) atomic64_add_return(1, (v)) |
| 310 | |
| 311 | #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0) |
| 312 | #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) |
| 313 | #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0) |
| 314 | #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0) |
| 315 | #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0) |
| 316 | #define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0) |
| 317 | |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 318 | #define atomic_add(i,v) (void)atomic_add_return((i), (v)) |
| 319 | #define atomic_sub(i,v) (void)atomic_sub_return((i), (v)) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 320 | #define atomic_inc(v) atomic_add(1, (v)) |
| 321 | #define atomic_dec(v) atomic_sub(1, (v)) |
| 322 | |
Peter Zijlstra | 08be2da | 2014-03-23 18:20:30 +0100 | [diff] [blame] | 323 | #define atomic64_add(i,v) (void)atomic64_add_return((i), (v)) |
| 324 | #define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v)) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 325 | #define atomic64_inc(v) atomic64_add(1, (v)) |
| 326 | #define atomic64_dec(v) atomic64_sub(1, (v)) |
| 327 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 328 | #endif /* _ASM_IA64_ATOMIC_H */ |