blob: af20c7462485f06de1dfb888e6f48769f0e64e9e [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __ARCH_S390_ATOMIC__
2#define __ARCH_S390_ATOMIC__
3
Dave Jones5bd1db62006-04-10 22:53:51 -07004#include <linux/compiler.h>
5
Linus Torvalds1da177e2005-04-16 15:20:36 -07006/*
7 * include/asm-s390/atomic.h
8 *
9 * S390 version
Martin Schwidefsky973bd992006-01-06 00:19:07 -080010 * Copyright (C) 1999-2005 IBM Deutschland Entwicklung GmbH, IBM Corporation
Linus Torvalds1da177e2005-04-16 15:20:36 -070011 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
12 * Denis Joseph Barrow,
13 * Arnd Bergmann (arndb@de.ibm.com)
14 *
15 * Derived from "include/asm-i386/bitops.h"
16 * Copyright (C) 1992, Linus Torvalds
17 *
18 */
19
20/*
21 * Atomic operations that C can't guarantee us. Useful for
22 * resource counting etc..
23 * S390 uses 'Compare And Swap' for atomicity in SMP enviroment
24 */
25
26typedef struct {
27 volatile int counter;
28} __attribute__ ((aligned (4))) atomic_t;
29#define ATOMIC_INIT(i) { (i) }
30
31#ifdef __KERNEL__
32
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +020033#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
34
Linus Torvalds1da177e2005-04-16 15:20:36 -070035#define __CS_LOOP(ptr, op_val, op_string) ({ \
36 typeof(ptr->counter) old_val, new_val; \
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +020037 asm volatile( \
38 " l %0,%2\n" \
39 "0: lr %1,%0\n" \
40 op_string " %1,%3\n" \
41 " cs %0,%1,%2\n" \
42 " jl 0b" \
43 : "=&d" (old_val), "=&d" (new_val), \
44 "=Q" (((atomic_t *)(ptr))->counter) \
45 : "d" (op_val), "Q" (((atomic_t *)(ptr))->counter) \
46 : "cc", "memory"); \
Linus Torvalds1da177e2005-04-16 15:20:36 -070047 new_val; \
48})
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +020049
50#else /* __GNUC__ */
51
52#define __CS_LOOP(ptr, op_val, op_string) ({ \
53 typeof(ptr->counter) old_val, new_val; \
54 asm volatile( \
55 " l %0,0(%3)\n" \
56 "0: lr %1,%0\n" \
57 op_string " %1,%4\n" \
58 " cs %0,%1,0(%3)\n" \
59 " jl 0b" \
60 : "=&d" (old_val), "=&d" (new_val), \
61 "=m" (((atomic_t *)(ptr))->counter) \
62 : "a" (ptr), "d" (op_val), \
63 "m" (((atomic_t *)(ptr))->counter) \
64 : "cc", "memory"); \
65 new_val; \
66})
67
68#endif /* __GNUC__ */
69
Linus Torvalds1da177e2005-04-16 15:20:36 -070070#define atomic_read(v) ((v)->counter)
71#define atomic_set(v,i) (((v)->counter) = (i))
72
Linus Torvalds1da177e2005-04-16 15:20:36 -070073static __inline__ int atomic_add_return(int i, atomic_t * v)
74{
75 return __CS_LOOP(v, i, "ar");
76}
Martin Schwidefsky973bd992006-01-06 00:19:07 -080077#define atomic_add(_i, _v) atomic_add_return(_i, _v)
78#define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
79#define atomic_inc(_v) atomic_add_return(1, _v)
80#define atomic_inc_return(_v) atomic_add_return(1, _v)
81#define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
82
Linus Torvalds1da177e2005-04-16 15:20:36 -070083static __inline__ int atomic_sub_return(int i, atomic_t * v)
84{
85 return __CS_LOOP(v, i, "sr");
86}
Martin Schwidefsky973bd992006-01-06 00:19:07 -080087#define atomic_sub(_i, _v) atomic_sub_return(_i, _v)
88#define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
89#define atomic_dec(_v) atomic_sub_return(1, _v)
90#define atomic_dec_return(_v) atomic_sub_return(1, _v)
91#define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
Linus Torvalds1da177e2005-04-16 15:20:36 -070092
Linus Torvalds1da177e2005-04-16 15:20:36 -070093static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t * v)
94{
95 __CS_LOOP(v, ~mask, "nr");
96}
Martin Schwidefsky973bd992006-01-06 00:19:07 -080097
Linus Torvalds1da177e2005-04-16 15:20:36 -070098static __inline__ void atomic_set_mask(unsigned long mask, atomic_t * v)
99{
100 __CS_LOOP(v, mask, "or");
101}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800102
Ingo Molnarffbf6702006-01-09 15:59:17 -0800103#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
104
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800105static __inline__ int atomic_cmpxchg(atomic_t *v, int old, int new)
106{
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200107#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
108 asm volatile(
109 " cs %0,%2,%1"
110 : "+d" (old), "=Q" (v->counter)
111 : "d" (new), "Q" (v->counter)
112 : "cc", "memory");
113#else /* __GNUC__ */
114 asm volatile(
115 " cs %0,%3,0(%2)"
116 : "+d" (old), "=m" (v->counter)
117 : "a" (v), "d" (new), "m" (v->counter)
118 : "cc", "memory");
119#endif /* __GNUC__ */
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800120 return old;
121}
122
123static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
124{
125 int c, old;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800126 c = atomic_read(v);
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800127 for (;;) {
128 if (unlikely(c == u))
129 break;
130 old = atomic_cmpxchg(v, c, c + a);
131 if (likely(old == c))
132 break;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800133 c = old;
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800134 }
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800135 return c != u;
136}
137
138#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
139
Linus Torvalds1da177e2005-04-16 15:20:36 -0700140#undef __CS_LOOP
141
142#ifdef __s390x__
143typedef struct {
144 volatile long long counter;
145} __attribute__ ((aligned (8))) atomic64_t;
146#define ATOMIC64_INIT(i) { (i) }
147
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200148#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
149
Linus Torvalds1da177e2005-04-16 15:20:36 -0700150#define __CSG_LOOP(ptr, op_val, op_string) ({ \
151 typeof(ptr->counter) old_val, new_val; \
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200152 asm volatile( \
153 " lg %0,%2\n" \
154 "0: lgr %1,%0\n" \
155 op_string " %1,%3\n" \
156 " csg %0,%1,%2\n" \
157 " jl 0b" \
158 : "=&d" (old_val), "=&d" (new_val), \
159 "=Q" (((atomic_t *)(ptr))->counter) \
160 : "d" (op_val), "Q" (((atomic_t *)(ptr))->counter) \
161 : "cc", "memory" ); \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700162 new_val; \
163})
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200164
165#else /* __GNUC__ */
166
167#define __CSG_LOOP(ptr, op_val, op_string) ({ \
168 typeof(ptr->counter) old_val, new_val; \
169 asm volatile( \
170 " lg %0,0(%3)\n" \
171 "0: lgr %1,%0\n" \
172 op_string " %1,%4\n" \
173 " csg %0,%1,0(%3)\n" \
174 " jl 0b" \
175 : "=&d" (old_val), "=&d" (new_val), \
176 "=m" (((atomic_t *)(ptr))->counter) \
177 : "a" (ptr), "d" (op_val), \
178 "m" (((atomic_t *)(ptr))->counter) \
179 : "cc", "memory" ); \
180 new_val; \
181})
182
183#endif /* __GNUC__ */
184
Linus Torvalds1da177e2005-04-16 15:20:36 -0700185#define atomic64_read(v) ((v)->counter)
186#define atomic64_set(v,i) (((v)->counter) = (i))
187
Heiko Carstens46ee0582005-07-27 11:44:59 -0700188static __inline__ long long atomic64_add_return(long long i, atomic64_t * v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700189{
190 return __CSG_LOOP(v, i, "agr");
191}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800192#define atomic64_add(_i, _v) atomic64_add_return(_i, _v)
193#define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
194#define atomic64_inc(_v) atomic64_add_return(1, _v)
195#define atomic64_inc_return(_v) atomic64_add_return(1, _v)
196#define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
197
198static __inline__ long long atomic64_sub_return(long long i, atomic64_t * v)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700199{
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800200 return __CSG_LOOP(v, i, "sgr");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700201}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800202#define atomic64_sub(_i, _v) atomic64_sub_return(_i, _v)
203#define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
204#define atomic64_dec(_v) atomic64_sub_return(1, _v)
205#define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
206#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
207
Linus Torvalds1da177e2005-04-16 15:20:36 -0700208static __inline__ void atomic64_clear_mask(unsigned long mask, atomic64_t * v)
209{
210 __CSG_LOOP(v, ~mask, "ngr");
211}
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800212
Linus Torvalds1da177e2005-04-16 15:20:36 -0700213static __inline__ void atomic64_set_mask(unsigned long mask, atomic64_t * v)
214{
215 __CSG_LOOP(v, mask, "ogr");
216}
217
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800218static __inline__ long long atomic64_cmpxchg(atomic64_t *v,
219 long long old, long long new)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700220{
Martin Schwidefsky94c12cc2006-09-28 16:56:43 +0200221#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
222 asm volatile(
223 " csg %0,%2,%1"
224 : "+d" (old), "=Q" (v->counter)
225 : "d" (new), "Q" (v->counter)
226 : "cc", "memory");
227#else /* __GNUC__ */
228 asm volatile(
229 " csg %0,%3,0(%2)"
230 : "+d" (old), "=m" (v->counter)
231 : "a" (v), "d" (new), "m" (v->counter)
232 : "cc", "memory");
233#endif /* __GNUC__ */
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800234 return old;
Linus Torvalds1da177e2005-04-16 15:20:36 -0700235}
236
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800237static __inline__ int atomic64_add_unless(atomic64_t *v,
238 long long a, long long u)
239{
240 long long c, old;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800241 c = atomic64_read(v);
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800242 for (;;) {
243 if (unlikely(c == u))
244 break;
245 old = atomic64_cmpxchg(v, c, c + a);
246 if (likely(old == c))
247 break;
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800248 c = old;
Nick Piggin0b2fcfd2006-03-23 03:01:02 -0800249 }
Martin Schwidefsky973bd992006-01-06 00:19:07 -0800250 return c != u;
251}
252
253#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
254
255#undef __CSG_LOOP
256#endif
Nick Piggin8426e1f2005-11-13 16:07:25 -0800257
Linus Torvalds1da177e2005-04-16 15:20:36 -0700258#define smp_mb__before_atomic_dec() smp_mb()
259#define smp_mb__after_atomic_dec() smp_mb()
260#define smp_mb__before_atomic_inc() smp_mb()
261#define smp_mb__after_atomic_inc() smp_mb()
262
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800263#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700264#endif /* __KERNEL__ */
265#endif /* __ARCH_S390_ATOMIC__ */