Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 1 | #ifndef _ARCH_MIPS_LOCAL_H |
| 2 | #define _ARCH_MIPS_LOCAL_H |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 3 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 4 | #include <linux/percpu.h> |
Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 5 | #include <linux/bitops.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 6 | #include <asm/atomic.h> |
Ralf Baechle | fef7470 | 2007-10-01 04:15:00 +0100 | [diff] [blame^] | 7 | #include <asm/cmpxchg.h> |
Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 8 | #include <asm/war.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 9 | |
Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 10 | typedef struct |
| 11 | { |
| 12 | atomic_long_t a; |
| 13 | } local_t; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 14 | |
Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 15 | #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 16 | |
Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 17 | #define local_read(l) atomic_long_read(&(l)->a) |
| 18 | #define local_set(l,i) atomic_long_set(&(l)->a, (i)) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 19 | |
Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 20 | #define local_add(i,l) atomic_long_add((i),(&(l)->a)) |
| 21 | #define local_sub(i,l) atomic_long_sub((i),(&(l)->a)) |
| 22 | #define local_inc(l) atomic_long_inc(&(l)->a) |
| 23 | #define local_dec(l) atomic_long_dec(&(l)->a) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 24 | |
| 25 | /* |
Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 26 | * Same as above, but return the result value |
| 27 | */ |
| 28 | static __inline__ long local_add_return(long i, local_t * l) |
| 29 | { |
| 30 | unsigned long result; |
| 31 | |
| 32 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
| 33 | unsigned long temp; |
| 34 | |
| 35 | __asm__ __volatile__( |
| 36 | " .set mips3 \n" |
| 37 | "1:" __LL "%1, %2 # local_add_return \n" |
| 38 | " addu %0, %1, %3 \n" |
| 39 | __SC "%0, %2 \n" |
| 40 | " beqzl %0, 1b \n" |
| 41 | " addu %0, %1, %3 \n" |
| 42 | " .set mips0 \n" |
| 43 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) |
| 44 | : "Ir" (i), "m" (l->a.counter) |
| 45 | : "memory"); |
| 46 | } else if (cpu_has_llsc) { |
| 47 | unsigned long temp; |
| 48 | |
| 49 | __asm__ __volatile__( |
| 50 | " .set mips3 \n" |
| 51 | "1:" __LL "%1, %2 # local_add_return \n" |
| 52 | " addu %0, %1, %3 \n" |
| 53 | __SC "%0, %2 \n" |
| 54 | " beqz %0, 1b \n" |
| 55 | " addu %0, %1, %3 \n" |
| 56 | " .set mips0 \n" |
| 57 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) |
| 58 | : "Ir" (i), "m" (l->a.counter) |
| 59 | : "memory"); |
| 60 | } else { |
| 61 | unsigned long flags; |
| 62 | |
| 63 | local_irq_save(flags); |
| 64 | result = l->a.counter; |
| 65 | result += i; |
| 66 | l->a.counter = result; |
| 67 | local_irq_restore(flags); |
| 68 | } |
| 69 | |
| 70 | return result; |
| 71 | } |
| 72 | |
| 73 | static __inline__ long local_sub_return(long i, local_t * l) |
| 74 | { |
| 75 | unsigned long result; |
| 76 | |
| 77 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
| 78 | unsigned long temp; |
| 79 | |
| 80 | __asm__ __volatile__( |
| 81 | " .set mips3 \n" |
| 82 | "1:" __LL "%1, %2 # local_sub_return \n" |
| 83 | " subu %0, %1, %3 \n" |
| 84 | __SC "%0, %2 \n" |
| 85 | " beqzl %0, 1b \n" |
| 86 | " subu %0, %1, %3 \n" |
| 87 | " .set mips0 \n" |
| 88 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) |
| 89 | : "Ir" (i), "m" (l->a.counter) |
| 90 | : "memory"); |
| 91 | } else if (cpu_has_llsc) { |
| 92 | unsigned long temp; |
| 93 | |
| 94 | __asm__ __volatile__( |
| 95 | " .set mips3 \n" |
| 96 | "1:" __LL "%1, %2 # local_sub_return \n" |
| 97 | " subu %0, %1, %3 \n" |
| 98 | __SC "%0, %2 \n" |
| 99 | " beqz %0, 1b \n" |
| 100 | " subu %0, %1, %3 \n" |
| 101 | " .set mips0 \n" |
| 102 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) |
| 103 | : "Ir" (i), "m" (l->a.counter) |
| 104 | : "memory"); |
| 105 | } else { |
| 106 | unsigned long flags; |
| 107 | |
| 108 | local_irq_save(flags); |
| 109 | result = l->a.counter; |
| 110 | result -= i; |
| 111 | l->a.counter = result; |
| 112 | local_irq_restore(flags); |
| 113 | } |
| 114 | |
| 115 | return result; |
| 116 | } |
| 117 | |
| 118 | /* |
| 119 | * local_sub_if_positive - conditionally subtract integer from atomic variable |
| 120 | * @i: integer value to subtract |
| 121 | * @l: pointer of type local_t |
| 122 | * |
| 123 | * Atomically test @l and subtract @i if @l is greater or equal than @i. |
| 124 | * The function returns the old value of @l minus @i. |
| 125 | */ |
| 126 | static __inline__ long local_sub_if_positive(long i, local_t * l) |
| 127 | { |
| 128 | unsigned long result; |
| 129 | |
| 130 | if (cpu_has_llsc && R10000_LLSC_WAR) { |
| 131 | unsigned long temp; |
| 132 | |
| 133 | __asm__ __volatile__( |
| 134 | " .set mips3 \n" |
| 135 | "1:" __LL "%1, %2 # local_sub_if_positive\n" |
| 136 | " dsubu %0, %1, %3 \n" |
| 137 | " bltz %0, 1f \n" |
| 138 | __SC "%0, %2 \n" |
| 139 | " .set noreorder \n" |
| 140 | " beqzl %0, 1b \n" |
| 141 | " dsubu %0, %1, %3 \n" |
| 142 | " .set reorder \n" |
| 143 | "1: \n" |
| 144 | " .set mips0 \n" |
| 145 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) |
| 146 | : "Ir" (i), "m" (l->a.counter) |
| 147 | : "memory"); |
| 148 | } else if (cpu_has_llsc) { |
| 149 | unsigned long temp; |
| 150 | |
| 151 | __asm__ __volatile__( |
| 152 | " .set mips3 \n" |
| 153 | "1:" __LL "%1, %2 # local_sub_if_positive\n" |
| 154 | " dsubu %0, %1, %3 \n" |
| 155 | " bltz %0, 1f \n" |
| 156 | __SC "%0, %2 \n" |
| 157 | " .set noreorder \n" |
| 158 | " beqz %0, 1b \n" |
| 159 | " dsubu %0, %1, %3 \n" |
| 160 | " .set reorder \n" |
| 161 | "1: \n" |
| 162 | " .set mips0 \n" |
| 163 | : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) |
| 164 | : "Ir" (i), "m" (l->a.counter) |
| 165 | : "memory"); |
| 166 | } else { |
| 167 | unsigned long flags; |
| 168 | |
| 169 | local_irq_save(flags); |
| 170 | result = l->a.counter; |
| 171 | result -= i; |
| 172 | if (result >= 0) |
| 173 | l->a.counter = result; |
| 174 | local_irq_restore(flags); |
| 175 | } |
| 176 | |
| 177 | return result; |
| 178 | } |
| 179 | |
| 180 | #define local_cmpxchg(l, o, n) \ |
| 181 | ((long)cmpxchg_local(&((l)->a.counter), (o), (n))) |
| 182 | #define local_xchg(l, n) (xchg_local(&((l)->a.counter),(n))) |
| 183 | |
| 184 | /** |
| 185 | * local_add_unless - add unless the number is a given value |
| 186 | * @l: pointer of type local_t |
| 187 | * @a: the amount to add to l... |
| 188 | * @u: ...unless l is equal to u. |
| 189 | * |
| 190 | * Atomically adds @a to @l, so long as it was not @u. |
| 191 | * Returns non-zero if @l was not @u, and zero otherwise. |
| 192 | */ |
| 193 | #define local_add_unless(l, a, u) \ |
| 194 | ({ \ |
| 195 | long c, old; \ |
| 196 | c = local_read(l); \ |
| 197 | while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \ |
| 198 | c = old; \ |
| 199 | c != (u); \ |
| 200 | }) |
| 201 | #define local_inc_not_zero(l) local_add_unless((l), 1, 0) |
| 202 | |
| 203 | #define local_dec_return(l) local_sub_return(1,(l)) |
| 204 | #define local_inc_return(l) local_add_return(1,(l)) |
| 205 | |
| 206 | /* |
| 207 | * local_sub_and_test - subtract value from variable and test result |
| 208 | * @i: integer value to subtract |
| 209 | * @l: pointer of type local_t |
| 210 | * |
| 211 | * Atomically subtracts @i from @l and returns |
| 212 | * true if the result is zero, or false for all |
| 213 | * other cases. |
| 214 | */ |
| 215 | #define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0) |
| 216 | |
| 217 | /* |
| 218 | * local_inc_and_test - increment and test |
| 219 | * @l: pointer of type local_t |
| 220 | * |
| 221 | * Atomically increments @l by 1 |
| 222 | * and returns true if the result is zero, or false for all |
| 223 | * other cases. |
| 224 | */ |
| 225 | #define local_inc_and_test(l) (local_inc_return(l) == 0) |
| 226 | |
| 227 | /* |
| 228 | * local_dec_and_test - decrement by 1 and test |
| 229 | * @l: pointer of type local_t |
| 230 | * |
| 231 | * Atomically decrements @l by 1 and |
| 232 | * returns true if the result is 0, or false for all other |
| 233 | * cases. |
| 234 | */ |
| 235 | #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0) |
| 236 | |
| 237 | /* |
| 238 | * local_dec_if_positive - decrement by 1 if old value positive |
| 239 | * @l: pointer of type local_t |
| 240 | */ |
| 241 | #define local_dec_if_positive(l) local_sub_if_positive(1, l) |
| 242 | |
| 243 | /* |
| 244 | * local_add_negative - add and test if negative |
| 245 | * @l: pointer of type local_t |
| 246 | * @i: integer value to add |
| 247 | * |
| 248 | * Atomically adds @i to @l and returns true |
| 249 | * if the result is negative, or false when |
| 250 | * result is greater than or equal to zero. |
| 251 | */ |
| 252 | #define local_add_negative(i,l) (local_add_return(i, (l)) < 0) |
| 253 | |
| 254 | /* Use these for per-cpu local_t variables: on some archs they are |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 255 | * much more efficient than these naive implementations. Note they take |
| 256 | * a variable, not an address. |
| 257 | */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 258 | |
Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 259 | #define __local_inc(l) ((l)->a.counter++) |
| 260 | #define __local_dec(l) ((l)->a.counter++) |
| 261 | #define __local_add(i,l) ((l)->a.counter+=(i)) |
| 262 | #define __local_sub(i,l) ((l)->a.counter-=(i)) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 263 | |
Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 264 | /* Need to disable preemption for the cpu local counters otherwise we could |
| 265 | still access a variable of a previous CPU in a non atomic way. */ |
| 266 | #define cpu_local_wrap_v(l) \ |
| 267 | ({ local_t res__; \ |
| 268 | preempt_disable(); \ |
| 269 | res__ = (l); \ |
| 270 | preempt_enable(); \ |
| 271 | res__; }) |
| 272 | #define cpu_local_wrap(l) \ |
| 273 | ({ preempt_disable(); \ |
| 274 | l; \ |
| 275 | preempt_enable(); }) \ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 276 | |
Mathieu Desnoyers | 7232311 | 2007-05-08 00:34:47 -0700 | [diff] [blame] | 277 | #define cpu_local_read(l) cpu_local_wrap_v(local_read(&__get_cpu_var(l))) |
| 278 | #define cpu_local_set(l, i) cpu_local_wrap(local_set(&__get_cpu_var(l), (i))) |
| 279 | #define cpu_local_inc(l) cpu_local_wrap(local_inc(&__get_cpu_var(l))) |
| 280 | #define cpu_local_dec(l) cpu_local_wrap(local_dec(&__get_cpu_var(l))) |
| 281 | #define cpu_local_add(i, l) cpu_local_wrap(local_add((i), &__get_cpu_var(l))) |
| 282 | #define cpu_local_sub(i, l) cpu_local_wrap(local_sub((i), &__get_cpu_var(l))) |
| 283 | |
| 284 | #define __cpu_local_inc(l) cpu_local_inc(l) |
| 285 | #define __cpu_local_dec(l) cpu_local_dec(l) |
| 286 | #define __cpu_local_add(i, l) cpu_local_add((i), (l)) |
| 287 | #define __cpu_local_sub(i, l) cpu_local_sub((i), (l)) |
| 288 | |
| 289 | #endif /* _ARCH_MIPS_LOCAL_H */ |