blob: 0809ef5d6b9a107f625e8893936d676b47c79c0e [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef _ASM_IA64_ATOMIC_H
2#define _ASM_IA64_ATOMIC_H
3
4/*
5 * Atomic operations that C can't guarantee us. Useful for
6 * resource counting etc..
7 *
8 * NOTE: don't mess with the types below! The "unsigned long" and
9 * "int" types were carefully placed so as to ensure proper operation
10 * of the macros.
11 *
12 * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
13 * David Mosberger-Tang <davidm@hpl.hp.com>
14 */
15#include <linux/types.h>
16
17#include <asm/intrinsics.h>
Peter Zijlstra0cd64ef2014-03-13 19:00:36 +010018#include <asm/barrier.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070019
Linus Torvalds1da177e2005-04-16 15:20:36 -070020
Tony Lucka1193652012-07-26 10:55:26 -070021#define ATOMIC_INIT(i) { (i) }
22#define ATOMIC64_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070023
Pranith Kumar22910592014-09-23 10:29:50 -040024#define atomic_read(v) ACCESS_ONCE((v)->counter)
25#define atomic64_read(v) ACCESS_ONCE((v)->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070026
27#define atomic_set(v,i) (((v)->counter) = (i))
28#define atomic64_set(v,i) (((v)->counter) = (i))
29
Peter Zijlstra08be2da2014-03-23 18:20:30 +010030#define ATOMIC_OP(op, c_op) \
31static __inline__ int \
32ia64_atomic_##op (int i, atomic_t *v) \
33{ \
34 __s32 old, new; \
35 CMPXCHG_BUGCHECK_DECL \
36 \
37 do { \
38 CMPXCHG_BUGCHECK(v); \
39 old = atomic_read(v); \
40 new = old c_op i; \
41 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
42 return new; \
Linus Torvalds1da177e2005-04-16 15:20:36 -070043}
44
Peter Zijlstra08be2da2014-03-23 18:20:30 +010045ATOMIC_OP(add, +)
46ATOMIC_OP(sub, -)
Linus Torvalds1da177e2005-04-16 15:20:36 -070047
Peter Zijlstra08be2da2014-03-23 18:20:30 +010048#define atomic_add_return(i,v) \
49({ \
50 int __ia64_aar_i = (i); \
51 (__builtin_constant_p(i) \
52 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
53 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
54 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
55 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
56 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
57 : ia64_atomic_add(__ia64_aar_i, v); \
58})
59
60#define atomic_sub_return(i,v) \
61({ \
62 int __ia64_asr_i = (i); \
63 (__builtin_constant_p(i) \
64 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
65 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
66 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
67 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
68 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
69 : ia64_atomic_sub(__ia64_asr_i, v); \
70})
71
Peter Zijlstra70ed4732014-04-23 20:00:01 +020072#define CONFIG_ARCH_HAS_ATOMIC_OR
73
74ATOMIC_OP(and, &)
75ATOMIC_OP(or, |)
76ATOMIC_OP(xor, ^)
77
78#define atomic_and(i,v) (void)ia64_atomic_and(i,v)
79#define atomic_or(i,v) (void)ia64_atomic_or(i,v)
80#define atomic_xor(i,v) (void)ia64_atomic_xor(i,v)
81
82#undef ATOMIC_OP
83
Peter Zijlstra08be2da2014-03-23 18:20:30 +010084#define ATOMIC64_OP(op, c_op) \
85static __inline__ long \
86ia64_atomic64_##op (__s64 i, atomic64_t *v) \
87{ \
88 __s64 old, new; \
89 CMPXCHG_BUGCHECK_DECL \
90 \
91 do { \
92 CMPXCHG_BUGCHECK(v); \
93 old = atomic64_read(v); \
94 new = old c_op i; \
95 } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
96 return new; \
Linus Torvalds1da177e2005-04-16 15:20:36 -070097}
98
Peter Zijlstra08be2da2014-03-23 18:20:30 +010099ATOMIC64_OP(add, +)
100ATOMIC64_OP(sub, -)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700101
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100102#define atomic64_add_return(i,v) \
103({ \
104 long __ia64_aar_i = (i); \
105 (__builtin_constant_p(i) \
106 && ( (__ia64_aar_i == 1) || (__ia64_aar_i == 4) \
107 || (__ia64_aar_i == 8) || (__ia64_aar_i == 16) \
108 || (__ia64_aar_i == -1) || (__ia64_aar_i == -4) \
109 || (__ia64_aar_i == -8) || (__ia64_aar_i == -16))) \
110 ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter) \
111 : ia64_atomic64_add(__ia64_aar_i, v); \
112})
Linus Torvalds1da177e2005-04-16 15:20:36 -0700113
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100114#define atomic64_sub_return(i,v) \
115({ \
116 long __ia64_asr_i = (i); \
117 (__builtin_constant_p(i) \
118 && ( (__ia64_asr_i == 1) || (__ia64_asr_i == 4) \
119 || (__ia64_asr_i == 8) || (__ia64_asr_i == 16) \
120 || (__ia64_asr_i == -1) || (__ia64_asr_i == -4) \
121 || (__ia64_asr_i == -8) || (__ia64_asr_i == -16))) \
122 ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter) \
123 : ia64_atomic64_sub(__ia64_asr_i, v); \
124})
Linus Torvalds1da177e2005-04-16 15:20:36 -0700125
Peter Zijlstra70ed4732014-04-23 20:00:01 +0200126ATOMIC64_OP(and, &)
127ATOMIC64_OP(or, |)
128ATOMIC64_OP(xor, ^)
129
130#define atomic64_and(i,v) (void)ia64_atomic64_and(i,v)
131#define atomic64_or(i,v) (void)ia64_atomic64_or(i,v)
132#define atomic64_xor(i,v) (void)ia64_atomic64_xor(i,v)
133
134#undef ATOMIC64_OP
135
Mathieu Desnoyers81979132007-05-08 00:34:22 -0700136#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800137#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800138
Mathieu Desnoyers81979132007-05-08 00:34:22 -0700139#define atomic64_cmpxchg(v, old, new) \
140 (cmpxchg(&((v)->counter), old, new))
141#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
142
Arun Sharmaf24219b2011-07-26 16:09:07 -0700143static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700144{
145 int c, old;
146 c = atomic_read(v);
147 for (;;) {
148 if (unlikely(c == (u)))
149 break;
150 old = atomic_cmpxchg((v), c, c + (a));
151 if (likely(old == c))
152 break;
153 c = old;
154 }
Arun Sharmaf24219b2011-07-26 16:09:07 -0700155 return c;
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700156}
157
Nick Piggin8426e1f2005-11-13 16:07:25 -0800158
Tony Luck01d69a82010-08-13 16:41:07 -0700159static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -0700160{
161 long c, old;
162 c = atomic64_read(v);
163 for (;;) {
164 if (unlikely(c == (u)))
165 break;
166 old = atomic64_cmpxchg((v), c, c + (a));
167 if (likely(old == c))
168 break;
169 c = old;
170 }
171 return c != (u);
172}
173
Mathieu Desnoyers81979132007-05-08 00:34:22 -0700174#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
175
Linus Torvalds1da177e2005-04-16 15:20:36 -0700176/*
177 * Atomically add I to V and return TRUE if the resulting value is
178 * negative.
179 */
180static __inline__ int
181atomic_add_negative (int i, atomic_t *v)
182{
183 return atomic_add_return(i, v) < 0;
184}
185
Tony Luck01d69a82010-08-13 16:41:07 -0700186static __inline__ long
Linus Torvalds1da177e2005-04-16 15:20:36 -0700187atomic64_add_negative (__s64 i, atomic64_t *v)
188{
189 return atomic64_add_return(i, v) < 0;
190}
191
Linus Torvalds1da177e2005-04-16 15:20:36 -0700192#define atomic_dec_return(v) atomic_sub_return(1, (v))
193#define atomic_inc_return(v) atomic_add_return(1, (v))
194#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
195#define atomic64_inc_return(v) atomic64_add_return(1, (v))
196
197#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
198#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
199#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
200#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
201#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
202#define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
203
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100204#define atomic_add(i,v) (void)atomic_add_return((i), (v))
205#define atomic_sub(i,v) (void)atomic_sub_return((i), (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700206#define atomic_inc(v) atomic_add(1, (v))
207#define atomic_dec(v) atomic_sub(1, (v))
208
Peter Zijlstra08be2da2014-03-23 18:20:30 +0100209#define atomic64_add(i,v) (void)atomic64_add_return((i), (v))
210#define atomic64_sub(i,v) (void)atomic64_sub_return((i), (v))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700211#define atomic64_inc(v) atomic64_add(1, (v))
212#define atomic64_dec(v) atomic64_sub(1, (v))
213
Linus Torvalds1da177e2005-04-16 15:20:36 -0700214#endif /* _ASM_IA64_ATOMIC_H */