blob: 2d184655bc5d9b3cad0e9c1bddec672af404c042 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __ARCH_S390_ATOMIC__
2#define __ARCH_S390_ATOMIC__
3
Dave Jones5bd1db62006-04-10 22:53:51 -07004#include <linux/compiler.h>
5
Linus Torvalds1da177e2005-04-16 15:20:36 -07006/*
7 * include/asm-s390/atomic.h
8 *
9 * S390 version
Martin Schwidefsky973bd992006-01-06 00:19:07 -080010 * Copyright (C) 1999-2005 IBM Deutschland Entwicklung GmbH, IBM Corporation
Linus Torvalds1da177e2005-04-16 15:20:36 -070011 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
12 * Denis Joseph Barrow,
13 * Arnd Bergmann (arndb@de.ibm.com)
14 *
15 * Derived from "include/asm-i386/bitops.h"
16 * Copyright (C) 1992, Linus Torvalds
17 *
18 */
19
20/*
21 * Atomic operations that C can't guarantee us. Useful for
22 * resource counting etc..
23 * S390 uses 'Compare And Swap' for atomicity in SMP enviroment
24 */
25
26typedef struct {
Heiko Carstens048d5ce2007-07-10 11:24:16 +020027 int counter;
Linus Torvalds1da177e2005-04-16 15:20:36 -070028} __attribute__ ((aligned (4))) atomic_t;
29#define ATOMIC_INIT(i) { (i) }
30
31#ifdef __KERNEL__
32
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +020033#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
34
Linus Torvalds1da177e2005-04-16 15:20:36 -070035#define __CS_LOOP(ptr, op_val, op_string) ({ \
36 typeof(ptr->counter) old_val, new_val; \
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +020037 asm volatile( \
38 " l %0,%2\n" \
39 "0: lr %1,%0\n" \
40 op_string " %1,%3\n" \
41 " cs %0,%1,%2\n" \
42 " jl 0b" \
43 : "=&d" (old_val), "=&d" (new_val), \
44 "=Q" (((atomic_t *)(ptr))->counter) \
45 : "d" (op_val), "Q" (((atomic_t *)(ptr))->counter) \
46 : "cc", "memory"); \
Linus Torvalds1da177e2005-04-16 15:20:36 -070047 new_val; \
48})
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +020049
50#else /* __GNUC__ */
51
52#define __CS_LOOP(ptr, op_val, op_string) ({ \
53 typeof(ptr->counter) old_val, new_val; \
54 asm volatile( \
55 " l %0,0(%3)\n" \
56 "0: lr %1,%0\n" \
57 op_string " %1,%4\n" \
58 " cs %0,%1,0(%3)\n" \
59 " jl 0b" \
60 : "=&d" (old_val), "=&d" (new_val), \
61 "=m" (((atomic_t *)(ptr))->counter) \
62 : "a" (ptr), "d" (op_val), \
63 "m" (((atomic_t *)(ptr))->counter) \
64 : "cc", "memory"); \
65 new_val; \
66})
67
68#endif /* __GNUC__ */
69
Heiko Carstensc51b9622007-08-22 13:51:45 +020070static inline int atomic_read(const atomic_t *v)
71{
72 barrier();
73 return v->counter;
74}
75
76static inline void atomic_set(atomic_t *v, int i)
77{
78 v->counter = i;
79 barrier();
80}
Linus Torvalds1da177e2005-04-16 15:20:36 -070081
Linus Torvalds1da177e2005-04-16 15:20:36 -070082static __inline__ int atomic_add_return(int i, atomic_t * v)
83{
84 return __CS_LOOP(v, i, "ar");
85}
Martin Schwidefsky973bd992006-01-06 00:19:07 -080086#define atomic_add(_i, _v) atomic_add_return(_i, _v)
87#define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
88#define atomic_inc(_v) atomic_add_return(1, _v)
89#define atomic_inc_return(_v) atomic_add_return(1, _v)
90#define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
91
Linus Torvalds1da177e2005-04-16 15:20:36 -070092static __inline__ int atomic_sub_return(int i, atomic_t * v)
93{
94 return __CS_LOOP(v, i, "sr");
95}
Martin Schwidefsky973bd992006-01-06 00:19:07 -080096#define atomic_sub(_i, _v) atomic_sub_return(_i, _v)
97#define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
98#define atomic_dec(_v) atomic_sub_return(1, _v)
99#define atomic_dec_return(_v) atomic_sub_return(1, _v)
100#define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700101
Linus Torvalds1da177e2005-04-16 15:20:36 -0700102static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t * v)
103{
104 __CS_LOOP(v, ~mask, "nr");
105}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800106
Linus Torvalds1da177e2005-04-16 15:20:36 -0700107static __inline__ void atomic_set_mask(unsigned long mask, atomic_t * v)
108{
109 __CS_LOOP(v, mask, "or");
110}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800111
Ingo Molnarffbf6702006-01-09 15:59:17 -0800112#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
113
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800114static __inline__ int atomic_cmpxchg(atomic_t *v, int old, int new)
115{
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200116#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
117 asm volatile(
118 " cs %0,%2,%1"
119 : "+d" (old), "=Q" (v->counter)
120 : "d" (new), "Q" (v->counter)
121 : "cc", "memory");
122#else /* __GNUC__ */
123 asm volatile(
124 " cs %0,%3,0(%2)"
125 : "+d" (old), "=m" (v->counter)
126 : "a" (v), "d" (new), "m" (v->counter)
127 : "cc", "memory");
128#endif /* __GNUC__ */
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800129 return old;
130}
131
132static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
133{
134 int c, old;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800135 c = atomic_read(v);
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800136 for (;;) {
137 if (unlikely(c == u))
138 break;
139 old = atomic_cmpxchg(v, c, c + a);
140 if (likely(old == c))
141 break;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800142 c = old;
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800143 }
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800144 return c != u;
145}
146
147#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
148
Linus Torvalds1da177e2005-04-16 15:20:36 -0700149#undef __CS_LOOP
150
151#ifdef __s390x__
152typedef struct {
Heiko Carstens048d5ce2007-07-10 11:24:16 +0200153 long long counter;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700154} __attribute__ ((aligned (8))) atomic64_t;
155#define ATOMIC64_INIT(i) { (i) }
156
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200157#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
158
Linus Torvalds1da177e2005-04-16 15:20:36 -0700159#define __CSG_LOOP(ptr, op_val, op_string) ({ \
160 typeof(ptr->counter) old_val, new_val; \
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200161 asm volatile( \
162 " lg %0,%2\n" \
163 "0: lgr %1,%0\n" \
164 op_string " %1,%3\n" \
165 " csg %0,%1,%2\n" \
166 " jl 0b" \
167 : "=&d" (old_val), "=&d" (new_val), \
168 "=Q" (((atomic_t *)(ptr))->counter) \
169 : "d" (op_val), "Q" (((atomic_t *)(ptr))->counter) \
170 : "cc", "memory" ); \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700171 new_val; \
172})
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200173
174#else /* __GNUC__ */
175
176#define __CSG_LOOP(ptr, op_val, op_string) ({ \
177 typeof(ptr->counter) old_val, new_val; \
178 asm volatile( \
179 " lg %0,0(%3)\n" \
180 "0: lgr %1,%0\n" \
181 op_string " %1,%4\n" \
182 " csg %0,%1,0(%3)\n" \
183 " jl 0b" \
184 : "=&d" (old_val), "=&d" (new_val), \
185 "=m" (((atomic_t *)(ptr))->counter) \
186 : "a" (ptr), "d" (op_val), \
187 "m" (((atomic_t *)(ptr))->counter) \
188 : "cc", "memory" ); \
189 new_val; \
190})
191
192#endif /* __GNUC__ */
193
Heiko Carstensc51b9622007-08-22 13:51:45 +0200194static inline long long atomic64_read(const atomic64_t *v)
195{
196 barrier();
197 return v->counter;
198}
199
200static inline void atomic64_set(atomic64_t *v, long long i)
201{
202 v->counter = i;
203 barrier();
204}
Linus Torvalds1da177e2005-04-16 15:20:36 -0700205
Heiko Carstens46ee0582005-07-27 11:44:59 -0700206static __inline__ long long atomic64_add_return(long long i, atomic64_t * v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700207{
208 return __CSG_LOOP(v, i, "agr");
209}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800210#define atomic64_add(_i, _v) atomic64_add_return(_i, _v)
211#define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
212#define atomic64_inc(_v) atomic64_add_return(1, _v)
213#define atomic64_inc_return(_v) atomic64_add_return(1, _v)
214#define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
215
216static __inline__ long long atomic64_sub_return(long long i, atomic64_t * v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700217{
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800218 return __CSG_LOOP(v, i, "sgr");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700219}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800220#define atomic64_sub(_i, _v) atomic64_sub_return(_i, _v)
221#define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
222#define atomic64_dec(_v) atomic64_sub_return(1, _v)
223#define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
224#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
225
Linus Torvalds1da177e2005-04-16 15:20:36 -0700226static __inline__ void atomic64_clear_mask(unsigned long mask, atomic64_t * v)
227{
228 __CSG_LOOP(v, ~mask, "ngr");
229}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800230
Linus Torvalds1da177e2005-04-16 15:20:36 -0700231static __inline__ void atomic64_set_mask(unsigned long mask, atomic64_t * v)
232{
233 __CSG_LOOP(v, mask, "ogr");
234}
235
Mathieu Desnoyers3a5f10e2007-02-21 10:55:59 +0100236#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
237
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800238static __inline__ long long atomic64_cmpxchg(atomic64_t *v,
239 long long old, long long new)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700240{
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200241#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
242 asm volatile(
243 " csg %0,%2,%1"
244 : "+d" (old), "=Q" (v->counter)
245 : "d" (new), "Q" (v->counter)
246 : "cc", "memory");
247#else /* __GNUC__ */
248 asm volatile(
249 " csg %0,%3,0(%2)"
250 : "+d" (old), "=m" (v->counter)
251 : "a" (v), "d" (new), "m" (v->counter)
252 : "cc", "memory");
253#endif /* __GNUC__ */
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800254 return old;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700255}
256
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800257static __inline__ int atomic64_add_unless(atomic64_t *v,
258 long long a, long long u)
259{
260 long long c, old;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800261 c = atomic64_read(v);
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800262 for (;;) {
263 if (unlikely(c == u))
264 break;
265 old = atomic64_cmpxchg(v, c, c + a);
266 if (likely(old == c))
267 break;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800268 c = old;
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800269 }
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800270 return c != u;
271}
272
273#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
274
275#undef __CSG_LOOP
276#endif
Nick Piggin8426e1f2005-11-13 16:07:25 -0800277
Linus Torvalds1da177e2005-04-16 15:20:36 -0700278#define smp_mb__before_atomic_dec() smp_mb()
279#define smp_mb__after_atomic_dec() smp_mb()
280#define smp_mb__before_atomic_inc() smp_mb()
281#define smp_mb__after_atomic_inc() smp_mb()
282
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800283#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700284#endif /* __KERNEL__ */
285#endif /* __ARCH_S390_ATOMIC__ */