blob: ea486952f778cb323c4072c8aa1cefaea7f858b2 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __ARCH_S390_ATOMIC__
2#define __ARCH_S390_ATOMIC__
3
Dave Jones5bd1db62006-04-10 22:53:51 -07004#include <linux/compiler.h>
5
Linus Torvalds1da177e2005-04-16 15:20:36 -07006/*
7 * include/asm-s390/atomic.h
8 *
9 * S390 version
Martin Schwidefsky973bd992006-01-06 00:19:07 -080010 * Copyright (C) 1999-2005 IBM Deutschland Entwicklung GmbH, IBM Corporation
Linus Torvalds1da177e2005-04-16 15:20:36 -070011 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
12 * Denis Joseph Barrow,
13 * Arnd Bergmann (arndb@de.ibm.com)
14 *
15 * Derived from "include/asm-i386/bitops.h"
16 * Copyright (C) 1992, Linus Torvalds
17 *
18 */
19
20/*
21 * Atomic operations that C can't guarantee us. Useful for
22 * resource counting etc..
23 * S390 uses 'Compare And Swap' for atomicity in SMP enviroment
24 */
25
26typedef struct {
Heiko Carstens048d5ce2007-07-10 11:24:16 +020027 int counter;
Linus Torvalds1da177e2005-04-16 15:20:36 -070028} __attribute__ ((aligned (4))) atomic_t;
29#define ATOMIC_INIT(i) { (i) }
30
31#ifdef __KERNEL__
32
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +020033#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
34
Linus Torvalds1da177e2005-04-16 15:20:36 -070035#define __CS_LOOP(ptr, op_val, op_string) ({ \
36 typeof(ptr->counter) old_val, new_val; \
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +020037 asm volatile( \
38 " l %0,%2\n" \
39 "0: lr %1,%0\n" \
40 op_string " %1,%3\n" \
41 " cs %0,%1,%2\n" \
42 " jl 0b" \
43 : "=&d" (old_val), "=&d" (new_val), \
44 "=Q" (((atomic_t *)(ptr))->counter) \
45 : "d" (op_val), "Q" (((atomic_t *)(ptr))->counter) \
46 : "cc", "memory"); \
Linus Torvalds1da177e2005-04-16 15:20:36 -070047 new_val; \
48})
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +020049
50#else /* __GNUC__ */
51
52#define __CS_LOOP(ptr, op_val, op_string) ({ \
53 typeof(ptr->counter) old_val, new_val; \
54 asm volatile( \
55 " l %0,0(%3)\n" \
56 "0: lr %1,%0\n" \
57 op_string " %1,%4\n" \
58 " cs %0,%1,0(%3)\n" \
59 " jl 0b" \
60 : "=&d" (old_val), "=&d" (new_val), \
61 "=m" (((atomic_t *)(ptr))->counter) \
62 : "a" (ptr), "d" (op_val), \
63 "m" (((atomic_t *)(ptr))->counter) \
64 : "cc", "memory"); \
65 new_val; \
66})
67
68#endif /* __GNUC__ */
69
Linus Torvalds1da177e2005-04-16 15:20:36 -070070#define atomic_read(v) ((v)->counter)
71#define atomic_set(v,i) (((v)->counter) = (i))
72
Linus Torvalds1da177e2005-04-16 15:20:36 -070073static __inline__ int atomic_add_return(int i, atomic_t * v)
74{
75 return __CS_LOOP(v, i, "ar");
76}
Martin Schwidefsky973bd992006-01-06 00:19:07 -080077#define atomic_add(_i, _v) atomic_add_return(_i, _v)
78#define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
79#define atomic_inc(_v) atomic_add_return(1, _v)
80#define atomic_inc_return(_v) atomic_add_return(1, _v)
81#define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
82
Linus Torvalds1da177e2005-04-16 15:20:36 -070083static __inline__ int atomic_sub_return(int i, atomic_t * v)
84{
85 return __CS_LOOP(v, i, "sr");
86}
Martin Schwidefsky973bd992006-01-06 00:19:07 -080087#define atomic_sub(_i, _v) atomic_sub_return(_i, _v)
88#define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
89#define atomic_dec(_v) atomic_sub_return(1, _v)
90#define atomic_dec_return(_v) atomic_sub_return(1, _v)
91#define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -070092
Linus Torvalds1da177e2005-04-16 15:20:36 -070093static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t * v)
94{
95 __CS_LOOP(v, ~mask, "nr");
96}
Martin Schwidefsky973bd992006-01-06 00:19:07 -080097
Linus Torvalds1da177e2005-04-16 15:20:36 -070098static __inline__ void atomic_set_mask(unsigned long mask, atomic_t * v)
99{
100 __CS_LOOP(v, mask, "or");
101}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800102
Ingo Molnarffbf6702006-01-09 15:59:17 -0800103#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
104
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800105static __inline__ int atomic_cmpxchg(atomic_t *v, int old, int new)
106{
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200107#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
108 asm volatile(
109 " cs %0,%2,%1"
110 : "+d" (old), "=Q" (v->counter)
111 : "d" (new), "Q" (v->counter)
112 : "cc", "memory");
113#else /* __GNUC__ */
114 asm volatile(
115 " cs %0,%3,0(%2)"
116 : "+d" (old), "=m" (v->counter)
117 : "a" (v), "d" (new), "m" (v->counter)
118 : "cc", "memory");
119#endif /* __GNUC__ */
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800120 return old;
121}
122
123static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
124{
125 int c, old;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800126 c = atomic_read(v);
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800127 for (;;) {
128 if (unlikely(c == u))
129 break;
130 old = atomic_cmpxchg(v, c, c + a);
131 if (likely(old == c))
132 break;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800133 c = old;
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800134 }
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800135 return c != u;
136}
137
138#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
139
Linus Torvalds1da177e2005-04-16 15:20:36 -0700140#undef __CS_LOOP
141
142#ifdef __s390x__
143typedef struct {
Heiko Carstens048d5ce2007-07-10 11:24:16 +0200144 long long counter;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700145} __attribute__ ((aligned (8))) atomic64_t;
146#define ATOMIC64_INIT(i) { (i) }
147
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200148#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
149
Linus Torvalds1da177e2005-04-16 15:20:36 -0700150#define __CSG_LOOP(ptr, op_val, op_string) ({ \
151 typeof(ptr->counter) old_val, new_val; \
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200152 asm volatile( \
153 " lg %0,%2\n" \
154 "0: lgr %1,%0\n" \
155 op_string " %1,%3\n" \
156 " csg %0,%1,%2\n" \
157 " jl 0b" \
158 : "=&d" (old_val), "=&d" (new_val), \
159 "=Q" (((atomic_t *)(ptr))->counter) \
160 : "d" (op_val), "Q" (((atomic_t *)(ptr))->counter) \
161 : "cc", "memory" ); \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700162 new_val; \
163})
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200164
165#else /* __GNUC__ */
166
167#define __CSG_LOOP(ptr, op_val, op_string) ({ \
168 typeof(ptr->counter) old_val, new_val; \
169 asm volatile( \
170 " lg %0,0(%3)\n" \
171 "0: lgr %1,%0\n" \
172 op_string " %1,%4\n" \
173 " csg %0,%1,0(%3)\n" \
174 " jl 0b" \
175 : "=&d" (old_val), "=&d" (new_val), \
176 "=m" (((atomic_t *)(ptr))->counter) \
177 : "a" (ptr), "d" (op_val), \
178 "m" (((atomic_t *)(ptr))->counter) \
179 : "cc", "memory" ); \
180 new_val; \
181})
182
183#endif /* __GNUC__ */
184
Linus Torvalds1da177e2005-04-16 15:20:36 -0700185#define atomic64_read(v) ((v)->counter)
186#define atomic64_set(v,i) (((v)->counter) = (i))
187
Heiko Carstens46ee0582005-07-27 11:44:59 -0700188static __inline__ long long atomic64_add_return(long long i, atomic64_t * v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700189{
190 return __CSG_LOOP(v, i, "agr");
191}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800192#define atomic64_add(_i, _v) atomic64_add_return(_i, _v)
193#define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
194#define atomic64_inc(_v) atomic64_add_return(1, _v)
195#define atomic64_inc_return(_v) atomic64_add_return(1, _v)
196#define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
197
198static __inline__ long long atomic64_sub_return(long long i, atomic64_t * v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700199{
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800200 return __CSG_LOOP(v, i, "sgr");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700201}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800202#define atomic64_sub(_i, _v) atomic64_sub_return(_i, _v)
203#define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
204#define atomic64_dec(_v) atomic64_sub_return(1, _v)
205#define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
206#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
207
Linus Torvalds1da177e2005-04-16 15:20:36 -0700208static __inline__ void atomic64_clear_mask(unsigned long mask, atomic64_t * v)
209{
210 __CSG_LOOP(v, ~mask, "ngr");
211}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800212
Linus Torvalds1da177e2005-04-16 15:20:36 -0700213static __inline__ void atomic64_set_mask(unsigned long mask, atomic64_t * v)
214{
215 __CSG_LOOP(v, mask, "ogr");
216}
217
Mathieu Desnoyers3a5f10e2007-02-21 10:55:59 +0100218#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
219
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800220static __inline__ long long atomic64_cmpxchg(atomic64_t *v,
221 long long old, long long new)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700222{
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200223#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
224 asm volatile(
225 " csg %0,%2,%1"
226 : "+d" (old), "=Q" (v->counter)
227 : "d" (new), "Q" (v->counter)
228 : "cc", "memory");
229#else /* __GNUC__ */
230 asm volatile(
231 " csg %0,%3,0(%2)"
232 : "+d" (old), "=m" (v->counter)
233 : "a" (v), "d" (new), "m" (v->counter)
234 : "cc", "memory");
235#endif /* __GNUC__ */
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800236 return old;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700237}
238
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800239static __inline__ int atomic64_add_unless(atomic64_t *v,
240 long long a, long long u)
241{
242 long long c, old;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800243 c = atomic64_read(v);
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800244 for (;;) {
245 if (unlikely(c == u))
246 break;
247 old = atomic64_cmpxchg(v, c, c + a);
248 if (likely(old == c))
249 break;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800250 c = old;
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800251 }
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800252 return c != u;
253}
254
255#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
256
257#undef __CSG_LOOP
258#endif
Nick Piggin8426e1f2005-11-13 16:07:25 -0800259
Linus Torvalds1da177e2005-04-16 15:20:36 -0700260#define smp_mb__before_atomic_dec() smp_mb()
261#define smp_mb__after_atomic_dec() smp_mb()
262#define smp_mb__before_atomic_inc() smp_mb()
263#define smp_mb__after_atomic_inc() smp_mb()
264
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800265#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700266#endif /* __KERNEL__ */
267#endif /* __ARCH_S390_ATOMIC__ */