blob: 0db6bec95489ebc703df2650e98c7f624cc7bff9 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
Brian Gerst5abbbbf2010-01-07 11:53:35 -05002#ifndef _ASM_X86_ATOMIC_H
3#define _ASM_X86_ATOMIC_H
4
5#include <linux/compiler.h>
6#include <linux/types.h>
Brian Gerst5abbbbf2010-01-07 11:53:35 -05007#include <asm/alternative.h>
8#include <asm/cmpxchg.h>
Peter Zijlstra0c44c2d2013-09-11 15:19:24 +02009#include <asm/rmwcc.h>
Peter Zijlstrad00a5692014-03-13 19:00:35 +010010#include <asm/barrier.h>
Brian Gerst5abbbbf2010-01-07 11:53:35 -050011
12/*
13 * Atomic operations that C can't guarantee us. Useful for
14 * resource counting etc..
15 */
16
17#define ATOMIC_INIT(i) { (i) }
18
19/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010020 * arch_atomic_read - read atomic variable
Brian Gerst5abbbbf2010-01-07 11:53:35 -050021 * @v: pointer of type atomic_t
22 *
23 * Atomically reads the value of @v.
24 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010025static __always_inline int arch_atomic_read(const atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050026{
Dmitry Vyukovac605be2018-01-29 18:26:07 +010027 /*
28 * Note for KASAN: we deliberately don't use READ_ONCE_NOCHECK() here,
29 * it's non-inlined function that increases binary size and stack usage.
30 */
Peter Zijlstra62e8a322015-09-18 11:13:10 +020031 return READ_ONCE((v)->counter);
Brian Gerst5abbbbf2010-01-07 11:53:35 -050032}
33
34/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010035 * arch_atomic_set - set atomic variable
Brian Gerst5abbbbf2010-01-07 11:53:35 -050036 * @v: pointer of type atomic_t
37 * @i: required value
38 *
39 * Atomically sets the value of @v to @i.
40 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010041static __always_inline void arch_atomic_set(atomic_t *v, int i)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050042{
Peter Zijlstra62e8a322015-09-18 11:13:10 +020043 WRITE_ONCE(v->counter, i);
Brian Gerst5abbbbf2010-01-07 11:53:35 -050044}
45
46/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010047 * arch_atomic_add - add integer to atomic variable
Brian Gerst5abbbbf2010-01-07 11:53:35 -050048 * @i: integer value to add
49 * @v: pointer of type atomic_t
50 *
51 * Atomically adds @i to @v.
52 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010053static __always_inline void arch_atomic_add(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050054{
55 asm volatile(LOCK_PREFIX "addl %1,%0"
56 : "+m" (v->counter)
57 : "ir" (i));
58}
59
60/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010061 * arch_atomic_sub - subtract integer from atomic variable
Brian Gerst5abbbbf2010-01-07 11:53:35 -050062 * @i: integer value to subtract
63 * @v: pointer of type atomic_t
64 *
65 * Atomically subtracts @i from @v.
66 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010067static __always_inline void arch_atomic_sub(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050068{
69 asm volatile(LOCK_PREFIX "subl %1,%0"
70 : "+m" (v->counter)
71 : "ir" (i));
72}
73
74/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010075 * arch_atomic_sub_and_test - subtract value from variable and test result
Brian Gerst5abbbbf2010-01-07 11:53:35 -050076 * @i: integer value to subtract
77 * @v: pointer of type atomic_t
78 *
79 * Atomically subtracts @i from @v and returns
80 * true if the result is zero, or false for all
81 * other cases.
82 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010083static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050084{
H. Peter Anvin18fe5822016-06-08 12:38:39 -070085 GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e);
Brian Gerst5abbbbf2010-01-07 11:53:35 -050086}
87
88/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010089 * arch_atomic_inc - increment atomic variable
Brian Gerst5abbbbf2010-01-07 11:53:35 -050090 * @v: pointer of type atomic_t
91 *
92 * Atomically increments @v by 1.
93 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +010094static __always_inline void arch_atomic_inc(atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -050095{
96 asm volatile(LOCK_PREFIX "incl %0"
97 : "+m" (v->counter));
98}
99
100/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100101 * arch_atomic_dec - decrement atomic variable
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500102 * @v: pointer of type atomic_t
103 *
104 * Atomically decrements @v by 1.
105 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100106static __always_inline void arch_atomic_dec(atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500107{
108 asm volatile(LOCK_PREFIX "decl %0"
109 : "+m" (v->counter));
110}
111
112/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100113 * arch_atomic_dec_and_test - decrement and test
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500114 * @v: pointer of type atomic_t
115 *
116 * Atomically decrements @v by 1 and
117 * returns true if the result is 0, or false for all other
118 * cases.
119 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100120static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500121{
H. Peter Anvin18fe5822016-06-08 12:38:39 -0700122 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e);
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500123}
124
125/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100126 * arch_atomic_inc_and_test - increment and test
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500127 * @v: pointer of type atomic_t
128 *
129 * Atomically increments @v by 1
130 * and returns true if the result is zero, or false for all
131 * other cases.
132 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100133static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500134{
H. Peter Anvin18fe5822016-06-08 12:38:39 -0700135 GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e);
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500136}
137
138/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100139 * arch_atomic_add_negative - add and test if negative
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500140 * @i: integer value to add
141 * @v: pointer of type atomic_t
142 *
143 * Atomically adds @i to @v and returns true
144 * if the result is negative, or false when
145 * result is greater than or equal to zero.
146 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100147static __always_inline bool arch_atomic_add_negative(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500148{
H. Peter Anvin18fe5822016-06-08 12:38:39 -0700149 GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s);
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500150}
151
152/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100153 * arch_atomic_add_return - add integer and return
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500154 * @i: integer value to add
155 * @v: pointer of type atomic_t
156 *
157 * Atomically adds @i to @v and returns @i + @v
158 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100159static __always_inline int arch_atomic_add_return(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500160{
Jeremy Fitzhardinge8b8bc2f2011-08-23 16:59:58 -0700161 return i + xadd(&v->counter, i);
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500162}
163
164/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100165 * arch_atomic_sub_return - subtract integer and return
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500166 * @v: pointer of type atomic_t
167 * @i: integer value to subtract
168 *
169 * Atomically subtracts @i from @v and returns @v - @i
170 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100171static __always_inline int arch_atomic_sub_return(int i, atomic_t *v)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500172{
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100173 return arch_atomic_add_return(-i, v);
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500174}
175
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100176#define arch_atomic_inc_return(v) (arch_atomic_add_return(1, v))
177#define arch_atomic_dec_return(v) (arch_atomic_sub_return(1, v))
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500178
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100179static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
Peter Zijlstraa8bccca2016-04-18 01:16:03 +0200180{
181 return xadd(&v->counter, i);
182}
183
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100184static __always_inline int arch_atomic_fetch_sub(int i, atomic_t *v)
Peter Zijlstraa8bccca2016-04-18 01:16:03 +0200185{
186 return xadd(&v->counter, -i);
187}
188
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100189static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500190{
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100191 return arch_cmpxchg(&v->counter, old, new);
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500192}
193
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100194#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
195static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
Peter Zijlstraa9ebf302017-02-01 16:39:38 +0100196{
197 return try_cmpxchg(&v->counter, old, new);
198}
199
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100200static inline int arch_atomic_xchg(atomic_t *v, int new)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500201{
202 return xchg(&v->counter, new);
203}
204
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100205static inline void arch_atomic_and(int i, atomic_t *v)
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200206{
207 asm volatile(LOCK_PREFIX "andl %1,%0"
208 : "+m" (v->counter)
209 : "ir" (i)
210 : "memory");
Peter Zijlstra7fc18452014-04-23 20:28:37 +0200211}
212
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100213static inline int arch_atomic_fetch_and(int i, atomic_t *v)
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200214{
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100215 int val = arch_atomic_read(v);
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200216
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100217 do { } while (!arch_atomic_try_cmpxchg(v, &val, val & i));
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200218
219 return val;
Peter Zijlstraa8bccca2016-04-18 01:16:03 +0200220}
Peter Zijlstra7fc18452014-04-23 20:28:37 +0200221
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100222static inline void arch_atomic_or(int i, atomic_t *v)
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200223{
224 asm volatile(LOCK_PREFIX "orl %1,%0"
225 : "+m" (v->counter)
226 : "ir" (i)
227 : "memory");
228}
Peter Zijlstraa8bccca2016-04-18 01:16:03 +0200229
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100230static inline int arch_atomic_fetch_or(int i, atomic_t *v)
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200231{
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100232 int val = arch_atomic_read(v);
Peter Zijlstraa8bccca2016-04-18 01:16:03 +0200233
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100234 do { } while (!arch_atomic_try_cmpxchg(v, &val, val | i));
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200235
236 return val;
237}
238
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100239static inline void arch_atomic_xor(int i, atomic_t *v)
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200240{
241 asm volatile(LOCK_PREFIX "xorl %1,%0"
242 : "+m" (v->counter)
243 : "ir" (i)
244 : "memory");
245}
246
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100247static inline int arch_atomic_fetch_xor(int i, atomic_t *v)
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200248{
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100249 int val = arch_atomic_read(v);
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200250
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100251 do { } while (!arch_atomic_try_cmpxchg(v, &val, val ^ i));
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200252
253 return val;
254}
Peter Zijlstra7fc18452014-04-23 20:28:37 +0200255
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500256/**
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100257 * __arch_atomic_add_unless - add unless the number is already a given value
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500258 * @v: pointer of type atomic_t
259 * @a: the amount to add to v...
260 * @u: ...unless v is equal to u.
261 *
262 * Atomically adds @a to @v, so long as @v was not already @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700263 * Returns the old value of @v.
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500264 */
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100265static __always_inline int __arch_atomic_add_unless(atomic_t *v, int a, int u)
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500266{
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100267 int c = arch_atomic_read(v);
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200268
Peter Zijlstrae6790e42017-03-17 20:44:45 +0100269 do {
270 if (unlikely(c == u))
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500271 break;
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100272 } while (!arch_atomic_try_cmpxchg(v, &c, c + a));
Dmitry Vyukovba1c9f82017-06-17 11:15:27 +0200273
Arun Sharmaf24219b2011-07-26 16:09:07 -0700274 return c;
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500275}
276
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500277#ifdef CONFIG_X86_32
David Howellsa1ce3922012-10-02 18:01:25 +0100278# include <asm/atomic64_32.h>
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500279#else
David Howellsa1ce3922012-10-02 18:01:25 +0100280# include <asm/atomic64_64.h>
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500281#endif
282
Dmitry Vyukov8bf705d2018-01-29 18:26:05 +0100283#include <asm-generic/atomic-instrumented.h>
284
Brian Gerst5abbbbf2010-01-07 11:53:35 -0500285#endif /* _ASM_X86_ATOMIC_H */