Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 1 | /* |
Arun Sharma | acac43e | 2011-07-26 16:09:08 -0700 | [diff] [blame] | 2 | * Generic C implementation of atomic counter operations. Usable on |
| 3 | * UP systems only. Do not include in machine independent code. |
| 4 | * |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 5 | * Originally implemented for MN10300. |
| 6 | * |
| 7 | * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved. |
| 8 | * Written by David Howells (dhowells@redhat.com) |
| 9 | * |
| 10 | * This program is free software; you can redistribute it and/or |
| 11 | * modify it under the terms of the GNU General Public Licence |
| 12 | * as published by the Free Software Foundation; either version |
| 13 | * 2 of the Licence, or (at your option) any later version. |
| 14 | */ |
| 15 | #ifndef __ASM_GENERIC_ATOMIC_H |
| 16 | #define __ASM_GENERIC_ATOMIC_H |
| 17 | |
David Howells | 3448427 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 18 | #include <asm/cmpxchg.h> |
Peter Zijlstra | febdbfe | 2014-02-06 18:16:07 +0100 | [diff] [blame] | 19 | #include <asm/barrier.h> |
David Howells | 3448427 | 2012-03-28 18:30:03 +0100 | [diff] [blame] | 20 | |
Peter Zijlstra | 560cb12 | 2014-04-23 16:12:30 +0200 | [diff] [blame] | 21 | /* |
| 22 | * atomic_$op() - $op integer to atomic variable |
| 23 | * @i: integer value to $op |
| 24 | * @v: pointer to the atomic variable |
| 25 | * |
| 26 | * Atomically $ops @i to @v. Does not strictly guarantee a memory-barrier, use |
| 27 | * smp_mb__{before,after}_atomic(). |
| 28 | */ |
| 29 | |
| 30 | /* |
| 31 | * atomic_$op_return() - $op interer to atomic variable and returns the result |
| 32 | * @i: integer value to $op |
| 33 | * @v: pointer to the atomic variable |
| 34 | * |
| 35 | * Atomically $ops @i to @v. Does imply a full memory barrier. |
| 36 | */ |
| 37 | |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 38 | #ifdef CONFIG_SMP |
Peter Zijlstra | 560cb12 | 2014-04-23 16:12:30 +0200 | [diff] [blame] | 39 | |
| 40 | /* we can build all atomic primitives from cmpxchg */ |
| 41 | |
| 42 | #define ATOMIC_OP(op, c_op) \ |
| 43 | static inline void atomic_##op(int i, atomic_t *v) \ |
| 44 | { \ |
| 45 | int c, old; \ |
| 46 | \ |
| 47 | c = v->counter; \ |
| 48 | while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \ |
| 49 | c = old; \ |
| 50 | } |
| 51 | |
| 52 | #define ATOMIC_OP_RETURN(op, c_op) \ |
| 53 | static inline int atomic_##op##_return(int i, atomic_t *v) \ |
| 54 | { \ |
| 55 | int c, old; \ |
| 56 | \ |
| 57 | c = v->counter; \ |
| 58 | while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \ |
| 59 | c = old; \ |
| 60 | \ |
| 61 | return c c_op i; \ |
| 62 | } |
| 63 | |
| 64 | #else |
| 65 | |
| 66 | #include <linux/irqflags.h> |
| 67 | |
| 68 | #define ATOMIC_OP(op, c_op) \ |
| 69 | static inline void atomic_##op(int i, atomic_t *v) \ |
| 70 | { \ |
| 71 | unsigned long flags; \ |
| 72 | \ |
| 73 | raw_local_irq_save(flags); \ |
| 74 | v->counter = v->counter c_op i; \ |
| 75 | raw_local_irq_restore(flags); \ |
| 76 | } |
| 77 | |
| 78 | #define ATOMIC_OP_RETURN(op, c_op) \ |
| 79 | static inline int atomic_##op##_return(int i, atomic_t *v) \ |
| 80 | { \ |
| 81 | unsigned long flags; \ |
| 82 | int ret; \ |
| 83 | \ |
| 84 | raw_local_irq_save(flags); \ |
| 85 | ret = (v->counter = v->counter c_op i); \ |
| 86 | raw_local_irq_restore(flags); \ |
| 87 | \ |
| 88 | return ret; \ |
| 89 | } |
| 90 | |
| 91 | #endif /* CONFIG_SMP */ |
| 92 | |
| 93 | #ifndef atomic_add_return |
| 94 | ATOMIC_OP_RETURN(add, +) |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 95 | #endif |
| 96 | |
Peter Zijlstra | 560cb12 | 2014-04-23 16:12:30 +0200 | [diff] [blame] | 97 | #ifndef atomic_sub_return |
| 98 | ATOMIC_OP_RETURN(sub, -) |
| 99 | #endif |
| 100 | |
| 101 | #ifndef atomic_clear_mask |
| 102 | ATOMIC_OP(and, &) |
| 103 | #define atomic_clear_mask(i, v) atomic_and(~(i), (v)) |
| 104 | #endif |
| 105 | |
| 106 | #ifndef atomic_set_mask |
| 107 | #define CONFIG_ARCH_HAS_ATOMIC_OR |
| 108 | ATOMIC_OP(or, |) |
| 109 | #define atomic_set_mask(i, v) atomic_or((i), (v)) |
| 110 | #endif |
| 111 | |
| 112 | #undef ATOMIC_OP_RETURN |
| 113 | #undef ATOMIC_OP |
| 114 | |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 115 | /* |
| 116 | * Atomic operations that C can't guarantee us. Useful for |
| 117 | * resource counting etc.. |
| 118 | */ |
| 119 | |
| 120 | #define ATOMIC_INIT(i) { (i) } |
| 121 | |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 122 | /** |
| 123 | * atomic_read - read atomic variable |
| 124 | * @v: pointer of type atomic_t |
| 125 | * |
Peter Fritzsche | 3768217 | 2010-05-24 14:33:09 -0700 | [diff] [blame] | 126 | * Atomically reads the value of @v. |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 127 | */ |
Mike Frysinger | 7505cb6 | 2011-07-26 16:09:11 -0700 | [diff] [blame] | 128 | #ifndef atomic_read |
Pranith Kumar | 2291059 | 2014-09-23 10:29:50 -0400 | [diff] [blame] | 129 | #define atomic_read(v) ACCESS_ONCE((v)->counter) |
Mike Frysinger | 7505cb6 | 2011-07-26 16:09:11 -0700 | [diff] [blame] | 130 | #endif |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 131 | |
| 132 | /** |
| 133 | * atomic_set - set atomic variable |
| 134 | * @v: pointer of type atomic_t |
| 135 | * @i: required value |
| 136 | * |
Peter Fritzsche | 3768217 | 2010-05-24 14:33:09 -0700 | [diff] [blame] | 137 | * Atomically sets the value of @v to @i. |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 138 | */ |
| 139 | #define atomic_set(v, i) (((v)->counter) = (i)) |
| 140 | |
David Howells | df9ee29 | 2010-10-07 14:08:55 +0100 | [diff] [blame] | 141 | #include <linux/irqflags.h> |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 142 | |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 143 | static inline int atomic_add_negative(int i, atomic_t *v) |
| 144 | { |
| 145 | return atomic_add_return(i, v) < 0; |
| 146 | } |
| 147 | |
| 148 | static inline void atomic_add(int i, atomic_t *v) |
| 149 | { |
| 150 | atomic_add_return(i, v); |
| 151 | } |
| 152 | |
| 153 | static inline void atomic_sub(int i, atomic_t *v) |
| 154 | { |
| 155 | atomic_sub_return(i, v); |
| 156 | } |
| 157 | |
| 158 | static inline void atomic_inc(atomic_t *v) |
| 159 | { |
| 160 | atomic_add_return(1, v); |
| 161 | } |
| 162 | |
| 163 | static inline void atomic_dec(atomic_t *v) |
| 164 | { |
| 165 | atomic_sub_return(1, v); |
| 166 | } |
| 167 | |
| 168 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) |
| 169 | #define atomic_inc_return(v) atomic_add_return(1, (v)) |
| 170 | |
| 171 | #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) |
Mike Frysinger | 3eea44e | 2011-07-26 16:09:09 -0700 | [diff] [blame] | 172 | #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0) |
| 173 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 174 | |
Mathieu Lacage | 8b9d406 | 2010-06-27 12:26:06 +0200 | [diff] [blame] | 175 | #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v))) |
| 176 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) |
| 177 | |
Arun Sharma | f24219b | 2011-07-26 16:09:07 -0700 | [diff] [blame] | 178 | static inline int __atomic_add_unless(atomic_t *v, int a, int u) |
Mathieu Lacage | 8b9d406 | 2010-06-27 12:26:06 +0200 | [diff] [blame] | 179 | { |
Peter Zijlstra | 560cb12 | 2014-04-23 16:12:30 +0200 | [diff] [blame] | 180 | int c, old; |
| 181 | c = atomic_read(v); |
| 182 | while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c) |
| 183 | c = old; |
| 184 | return c; |
Mathieu Lacage | 8b9d406 | 2010-06-27 12:26:06 +0200 | [diff] [blame] | 185 | } |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 186 | |
Arnd Bergmann | 3f7e212 | 2009-05-13 22:56:35 +0000 | [diff] [blame] | 187 | #endif /* __ASM_GENERIC_ATOMIC_H */ |