| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* atomic.h: atomic operation emulation for FR-V | 
|  | 2 | * | 
|  | 3 | * For an explanation of how atomic ops work in this arch, see: | 
|  | 4 | *   Documentation/fujitsu/frv/atomic-ops.txt | 
|  | 5 | * | 
|  | 6 | * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved. | 
|  | 7 | * Written by David Howells (dhowells@redhat.com) | 
|  | 8 | * | 
|  | 9 | * This program is free software; you can redistribute it and/or | 
|  | 10 | * modify it under the terms of the GNU General Public License | 
|  | 11 | * as published by the Free Software Foundation; either version | 
|  | 12 | * 2 of the License, or (at your option) any later version. | 
|  | 13 | */ | 
|  | 14 | #ifndef _ASM_ATOMIC_H | 
|  | 15 | #define _ASM_ATOMIC_H | 
|  | 16 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 17 | #include <linux/types.h> | 
|  | 18 | #include <asm/spr-regs.h> | 
|  | 19 |  | 
|  | 20 | #ifdef CONFIG_SMP | 
|  | 21 | #error not SMP safe | 
|  | 22 | #endif | 
|  | 23 |  | 
|  | 24 | /* | 
|  | 25 | * Atomic operations that C can't guarantee us.  Useful for | 
|  | 26 | * resource counting etc.. | 
|  | 27 | * | 
|  | 28 | * We do not have SMP systems, so we don't have to deal with that. | 
|  | 29 | */ | 
|  | 30 |  | 
|  | 31 | /* Atomic operations are already serializing */ | 
|  | 32 | #define smp_mb__before_atomic_dec()	barrier() | 
|  | 33 | #define smp_mb__after_atomic_dec()	barrier() | 
|  | 34 | #define smp_mb__before_atomic_inc()	barrier() | 
|  | 35 | #define smp_mb__after_atomic_inc()	barrier() | 
|  | 36 |  | 
|  | 37 | typedef struct { | 
|  | 38 | int counter; | 
|  | 39 | } atomic_t; | 
|  | 40 |  | 
|  | 41 | #define ATOMIC_INIT(i)		{ (i) } | 
|  | 42 | #define atomic_read(v)		((v)->counter) | 
|  | 43 | #define atomic_set(v, i)	(((v)->counter) = (i)) | 
|  | 44 |  | 
|  | 45 | #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS | 
|  | 46 | static inline int atomic_add_return(int i, atomic_t *v) | 
|  | 47 | { | 
|  | 48 | unsigned long val; | 
|  | 49 |  | 
|  | 50 | asm("0:						\n" | 
|  | 51 | "	orcc		gr0,gr0,gr0,icc3	\n"	/* set ICC3.Z */ | 
|  | 52 | "	ckeq		icc3,cc7		\n" | 
|  | 53 | "	ld.p		%M0,%1			\n"	/* LD.P/ORCR must be atomic */ | 
|  | 54 | "	orcr		cc7,cc7,cc3		\n"	/* set CC3 to true */ | 
|  | 55 | "	add%I2		%1,%2,%1		\n" | 
|  | 56 | "	cst.p		%1,%M0		,cc3,#1	\n" | 
|  | 57 | "	corcc		gr29,gr29,gr0	,cc3,#1	\n"	/* clear ICC3.Z if store happens */ | 
|  | 58 | "	beq		icc3,#0,0b		\n" | 
|  | 59 | : "+U"(v->counter), "=&r"(val) | 
|  | 60 | : "NPr"(i) | 
|  | 61 | : "memory", "cc7", "cc3", "icc3" | 
|  | 62 | ); | 
|  | 63 |  | 
|  | 64 | return val; | 
|  | 65 | } | 
|  | 66 |  | 
|  | 67 | static inline int atomic_sub_return(int i, atomic_t *v) | 
|  | 68 | { | 
|  | 69 | unsigned long val; | 
|  | 70 |  | 
|  | 71 | asm("0:						\n" | 
|  | 72 | "	orcc		gr0,gr0,gr0,icc3	\n"	/* set ICC3.Z */ | 
|  | 73 | "	ckeq		icc3,cc7		\n" | 
|  | 74 | "	ld.p		%M0,%1			\n"	/* LD.P/ORCR must be atomic */ | 
|  | 75 | "	orcr		cc7,cc7,cc3		\n"	/* set CC3 to true */ | 
|  | 76 | "	sub%I2		%1,%2,%1		\n" | 
|  | 77 | "	cst.p		%1,%M0		,cc3,#1	\n" | 
|  | 78 | "	corcc		gr29,gr29,gr0	,cc3,#1	\n"	/* clear ICC3.Z if store happens */ | 
|  | 79 | "	beq		icc3,#0,0b		\n" | 
|  | 80 | : "+U"(v->counter), "=&r"(val) | 
|  | 81 | : "NPr"(i) | 
|  | 82 | : "memory", "cc7", "cc3", "icc3" | 
|  | 83 | ); | 
|  | 84 |  | 
|  | 85 | return val; | 
|  | 86 | } | 
|  | 87 |  | 
|  | 88 | #else | 
|  | 89 |  | 
|  | 90 | extern int atomic_add_return(int i, atomic_t *v); | 
|  | 91 | extern int atomic_sub_return(int i, atomic_t *v); | 
|  | 92 |  | 
|  | 93 | #endif | 
|  | 94 |  | 
|  | 95 | static inline int atomic_add_negative(int i, atomic_t *v) | 
|  | 96 | { | 
|  | 97 | return atomic_add_return(i, v) < 0; | 
|  | 98 | } | 
|  | 99 |  | 
|  | 100 | static inline void atomic_add(int i, atomic_t *v) | 
|  | 101 | { | 
|  | 102 | atomic_add_return(i, v); | 
|  | 103 | } | 
|  | 104 |  | 
|  | 105 | static inline void atomic_sub(int i, atomic_t *v) | 
|  | 106 | { | 
|  | 107 | atomic_sub_return(i, v); | 
|  | 108 | } | 
|  | 109 |  | 
|  | 110 | static inline void atomic_inc(atomic_t *v) | 
|  | 111 | { | 
|  | 112 | atomic_add_return(1, v); | 
|  | 113 | } | 
|  | 114 |  | 
|  | 115 | static inline void atomic_dec(atomic_t *v) | 
|  | 116 | { | 
|  | 117 | atomic_sub_return(1, v); | 
|  | 118 | } | 
|  | 119 |  | 
|  | 120 | #define atomic_dec_return(v)		atomic_sub_return(1, (v)) | 
|  | 121 | #define atomic_inc_return(v)		atomic_add_return(1, (v)) | 
|  | 122 |  | 
|  | 123 | #define atomic_sub_and_test(i,v)	(atomic_sub_return((i), (v)) == 0) | 
|  | 124 | #define atomic_dec_and_test(v)		(atomic_sub_return(1, (v)) == 0) | 
|  | 125 | #define atomic_inc_and_test(v)		(atomic_add_return(1, (v)) == 0) | 
|  | 126 |  | 
|  | 127 | #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS | 
|  | 128 | static inline | 
|  | 129 | unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v) | 
|  | 130 | { | 
|  | 131 | unsigned long old, tmp; | 
|  | 132 |  | 
|  | 133 | asm volatile( | 
|  | 134 | "0:						\n" | 
|  | 135 | "	orcc		gr0,gr0,gr0,icc3	\n"	/* set ICC3.Z */ | 
|  | 136 | "	ckeq		icc3,cc7		\n" | 
|  | 137 | "	ld.p		%M0,%1			\n"	/* LD.P/ORCR are atomic */ | 
|  | 138 | "	orcr		cc7,cc7,cc3		\n"	/* set CC3 to true */ | 
|  | 139 | "	and%I3		%1,%3,%2		\n" | 
|  | 140 | "	cst.p		%2,%M0		,cc3,#1	\n"	/* if store happens... */ | 
|  | 141 | "	corcc		gr29,gr29,gr0	,cc3,#1	\n"	/* ... clear ICC3.Z */ | 
|  | 142 | "	beq		icc3,#0,0b		\n" | 
|  | 143 | : "+U"(*v), "=&r"(old), "=r"(tmp) | 
|  | 144 | : "NPr"(~mask) | 
|  | 145 | : "memory", "cc7", "cc3", "icc3" | 
|  | 146 | ); | 
|  | 147 |  | 
|  | 148 | return old; | 
|  | 149 | } | 
|  | 150 |  | 
|  | 151 | static inline | 
|  | 152 | unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v) | 
|  | 153 | { | 
|  | 154 | unsigned long old, tmp; | 
|  | 155 |  | 
|  | 156 | asm volatile( | 
|  | 157 | "0:						\n" | 
|  | 158 | "	orcc		gr0,gr0,gr0,icc3	\n"	/* set ICC3.Z */ | 
|  | 159 | "	ckeq		icc3,cc7		\n" | 
|  | 160 | "	ld.p		%M0,%1			\n"	/* LD.P/ORCR are atomic */ | 
|  | 161 | "	orcr		cc7,cc7,cc3		\n"	/* set CC3 to true */ | 
|  | 162 | "	or%I3		%1,%3,%2		\n" | 
|  | 163 | "	cst.p		%2,%M0		,cc3,#1	\n"	/* if store happens... */ | 
|  | 164 | "	corcc		gr29,gr29,gr0	,cc3,#1	\n"	/* ... clear ICC3.Z */ | 
|  | 165 | "	beq		icc3,#0,0b		\n" | 
|  | 166 | : "+U"(*v), "=&r"(old), "=r"(tmp) | 
|  | 167 | : "NPr"(mask) | 
|  | 168 | : "memory", "cc7", "cc3", "icc3" | 
|  | 169 | ); | 
|  | 170 |  | 
|  | 171 | return old; | 
|  | 172 | } | 
|  | 173 |  | 
|  | 174 | static inline | 
|  | 175 | unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v) | 
|  | 176 | { | 
|  | 177 | unsigned long old, tmp; | 
|  | 178 |  | 
|  | 179 | asm volatile( | 
|  | 180 | "0:						\n" | 
|  | 181 | "	orcc		gr0,gr0,gr0,icc3	\n"	/* set ICC3.Z */ | 
|  | 182 | "	ckeq		icc3,cc7		\n" | 
|  | 183 | "	ld.p		%M0,%1			\n"	/* LD.P/ORCR are atomic */ | 
|  | 184 | "	orcr		cc7,cc7,cc3		\n"	/* set CC3 to true */ | 
|  | 185 | "	xor%I3		%1,%3,%2		\n" | 
|  | 186 | "	cst.p		%2,%M0		,cc3,#1	\n"	/* if store happens... */ | 
|  | 187 | "	corcc		gr29,gr29,gr0	,cc3,#1	\n"	/* ... clear ICC3.Z */ | 
|  | 188 | "	beq		icc3,#0,0b		\n" | 
|  | 189 | : "+U"(*v), "=&r"(old), "=r"(tmp) | 
|  | 190 | : "NPr"(mask) | 
|  | 191 | : "memory", "cc7", "cc3", "icc3" | 
|  | 192 | ); | 
|  | 193 |  | 
|  | 194 | return old; | 
|  | 195 | } | 
|  | 196 |  | 
|  | 197 | #else | 
|  | 198 |  | 
|  | 199 | extern unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v); | 
|  | 200 | extern unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v); | 
|  | 201 | extern unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v); | 
|  | 202 |  | 
|  | 203 | #endif | 
|  | 204 |  | 
|  | 205 | #define atomic_clear_mask(mask, v)	atomic_test_and_ANDNOT_mask((mask), (v)) | 
|  | 206 | #define atomic_set_mask(mask, v)	atomic_test_and_OR_mask((mask), (v)) | 
|  | 207 |  | 
|  | 208 | /*****************************************************************************/ | 
|  | 209 | /* | 
|  | 210 | * exchange value with memory | 
|  | 211 | */ | 
|  | 212 | #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS | 
|  | 213 |  | 
|  | 214 | #define xchg(ptr, x)								\ | 
|  | 215 | ({										\ | 
|  | 216 | __typeof__(ptr) __xg_ptr = (ptr);					\ | 
|  | 217 | __typeof__(*(ptr)) __xg_orig;						\ | 
|  | 218 | \ | 
|  | 219 | switch (sizeof(__xg_orig)) {						\ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 220 | case 4:									\ | 
|  | 221 | asm volatile(							\ | 
| David Howells | 68f624f | 2006-02-14 13:53:18 -0800 | [diff] [blame] | 222 | "swap%I0 %M0,%1"					\ | 
|  | 223 | : "+m"(*__xg_ptr), "=r"(__xg_orig)			\ | 
|  | 224 | : "1"(x)						\ | 
| David Howells | 2fa9e7e | 2006-01-08 01:01:17 -0800 | [diff] [blame] | 225 | : "memory"						\ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 226 | );							\ | 
|  | 227 | break;								\ | 
|  | 228 | \ | 
|  | 229 | default:								\ | 
| Al Viro | 7f78843 | 2006-06-23 02:04:09 -0700 | [diff] [blame] | 230 | __xg_orig = (__typeof__(__xg_orig))0;				\ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 231 | asm volatile("break");						\ | 
|  | 232 | break;								\ | 
|  | 233 | }									\ | 
|  | 234 | \ | 
|  | 235 | __xg_orig;								\ | 
|  | 236 | }) | 
|  | 237 |  | 
|  | 238 | #else | 
|  | 239 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 240 | extern uint32_t __xchg_32(uint32_t i, volatile void *v); | 
|  | 241 |  | 
|  | 242 | #define xchg(ptr, x)										\ | 
|  | 243 | ({												\ | 
|  | 244 | __typeof__(ptr) __xg_ptr = (ptr);							\ | 
|  | 245 | __typeof__(*(ptr)) __xg_orig;								\ | 
|  | 246 | \ | 
|  | 247 | switch (sizeof(__xg_orig)) {								\ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 248 | case 4: __xg_orig = (__typeof__(*(ptr))) __xchg_32((uint32_t) x, __xg_ptr);	break;	\ | 
|  | 249 | default:										\ | 
| Al Viro | 7f78843 | 2006-06-23 02:04:09 -0700 | [diff] [blame] | 250 | __xg_orig = (__typeof__(__xg_orig))0;									\ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 251 | asm volatile("break");								\ | 
|  | 252 | break;										\ | 
|  | 253 | }											\ | 
|  | 254 | __xg_orig;										\ | 
|  | 255 | }) | 
|  | 256 |  | 
|  | 257 | #endif | 
|  | 258 |  | 
|  | 259 | #define tas(ptr) (xchg((ptr), 1)) | 
|  | 260 |  | 
|  | 261 | /*****************************************************************************/ | 
|  | 262 | /* | 
|  | 263 | * compare and conditionally exchange value with memory | 
|  | 264 | * - if (*ptr == test) then orig = *ptr; *ptr = test; | 
|  | 265 | * - if (*ptr != test) then orig = *ptr; | 
|  | 266 | */ | 
|  | 267 | #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS | 
|  | 268 |  | 
|  | 269 | #define cmpxchg(ptr, test, new)							\ | 
|  | 270 | ({										\ | 
|  | 271 | __typeof__(ptr) __xg_ptr = (ptr);					\ | 
|  | 272 | __typeof__(*(ptr)) __xg_orig, __xg_tmp;					\ | 
|  | 273 | __typeof__(*(ptr)) __xg_test = (test);					\ | 
|  | 274 | __typeof__(*(ptr)) __xg_new = (new);					\ | 
|  | 275 | \ | 
|  | 276 | switch (sizeof(__xg_orig)) {						\ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 277 | case 4:									\ | 
|  | 278 | asm volatile(							\ | 
|  | 279 | "0:						\n"	\ | 
|  | 280 | "	orcc		gr0,gr0,gr0,icc3	\n"	\ | 
|  | 281 | "	ckeq		icc3,cc7		\n"	\ | 
|  | 282 | "	ld.p		%M0,%1			\n"	\ | 
|  | 283 | "	orcr		cc7,cc7,cc3		\n"	\ | 
|  | 284 | "	sub%I4cc	%1,%4,%2,icc0		\n"	\ | 
|  | 285 | "	bne		icc0,#0,1f		\n"	\ | 
|  | 286 | "	cst.p		%3,%M0		,cc3,#1	\n"	\ | 
|  | 287 | "	corcc		gr29,gr29,gr0	,cc3,#1	\n"	\ | 
|  | 288 | "	beq		icc3,#0,0b		\n"	\ | 
|  | 289 | "1:						\n"	\ | 
|  | 290 | : "+U"(*__xg_ptr), "=&r"(__xg_orig), "=&r"(__xg_tmp)	\ | 
|  | 291 | : "r"(__xg_new), "NPr"(__xg_test)			\ | 
|  | 292 | : "memory", "cc7", "cc3", "icc3", "icc0"		\ | 
|  | 293 | );							\ | 
|  | 294 | break;								\ | 
|  | 295 | \ | 
|  | 296 | default:								\ | 
|  | 297 | __xg_orig = 0;							\ | 
|  | 298 | asm volatile("break");						\ | 
|  | 299 | break;								\ | 
|  | 300 | }									\ | 
|  | 301 | \ | 
|  | 302 | __xg_orig;								\ | 
|  | 303 | }) | 
|  | 304 |  | 
|  | 305 | #else | 
|  | 306 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 307 | extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new); | 
|  | 308 |  | 
|  | 309 | #define cmpxchg(ptr, test, new)							\ | 
|  | 310 | ({										\ | 
|  | 311 | __typeof__(ptr) __xg_ptr = (ptr);					\ | 
|  | 312 | __typeof__(*(ptr)) __xg_orig;						\ | 
|  | 313 | __typeof__(*(ptr)) __xg_test = (test);					\ | 
|  | 314 | __typeof__(*(ptr)) __xg_new = (new);					\ | 
|  | 315 | \ | 
|  | 316 | switch (sizeof(__xg_orig)) {						\ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 317 | case 4: __xg_orig = __cmpxchg_32(__xg_ptr, __xg_test, __xg_new); break;	\ | 
|  | 318 | default:								\ | 
|  | 319 | __xg_orig = 0;							\ | 
|  | 320 | asm volatile("break");						\ | 
|  | 321 | break;								\ | 
|  | 322 | }									\ | 
|  | 323 | \ | 
|  | 324 | __xg_orig;								\ | 
|  | 325 | }) | 
|  | 326 |  | 
|  | 327 | #endif | 
|  | 328 |  | 
| David Howells | 2fa9e7e | 2006-01-08 01:01:17 -0800 | [diff] [blame] | 329 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) | 
| Ingo Molnar | ffbf670 | 2006-01-09 15:59:17 -0800 | [diff] [blame] | 330 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 
| Nick Piggin | 4a6dae6 | 2005-11-13 16:07:24 -0800 | [diff] [blame] | 331 |  | 
| Nick Piggin | 8426e1f | 2005-11-13 16:07:25 -0800 | [diff] [blame] | 332 | #define atomic_add_unless(v, a, u)				\ | 
|  | 333 | ({								\ | 
|  | 334 | int c, old;						\ | 
|  | 335 | c = atomic_read(v);					\ | 
|  | 336 | while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ | 
|  | 337 | c = old;					\ | 
|  | 338 | c != (u);						\ | 
|  | 339 | }) | 
| David Howells | 2fa9e7e | 2006-01-08 01:01:17 -0800 | [diff] [blame] | 340 |  | 
| Nick Piggin | 8426e1f | 2005-11-13 16:07:25 -0800 | [diff] [blame] | 341 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 
|  | 342 |  | 
| Christoph Lameter | d3cb487 | 2006-01-06 00:11:20 -0800 | [diff] [blame] | 343 | #include <asm-generic/atomic.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 344 | #endif /* _ASM_ATOMIC_H */ |