blob: c99c64dc5f3dcfe1a9fbf192c44f30bf84a47ea9 [file] [log] [blame]
Arnd Bergmann3f7e2122009-05-13 22:56:35 +00001/*
2 * Generic C implementation of atomic counter operations
3 * Originally implemented for MN10300.
4 *
5 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
6 * Written by David Howells (dhowells@redhat.com)
7 *
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public Licence
10 * as published by the Free Software Foundation; either version
11 * 2 of the Licence, or (at your option) any later version.
12 */
13#ifndef __ASM_GENERIC_ATOMIC_H
14#define __ASM_GENERIC_ATOMIC_H
15
16#ifdef CONFIG_SMP
17#error not SMP safe
18#endif
19
20/*
21 * Atomic operations that C can't guarantee us. Useful for
22 * resource counting etc..
23 */
24
25#define ATOMIC_INIT(i) { (i) }
26
27#ifdef __KERNEL__
28
29/**
30 * atomic_read - read atomic variable
31 * @v: pointer of type atomic_t
32 *
33 * Atomically reads the value of @v. Note that the guaranteed
34 * useful range of an atomic_t is only 24 bits.
35 */
36#define atomic_read(v) ((v)->counter)
37
38/**
39 * atomic_set - set atomic variable
40 * @v: pointer of type atomic_t
41 * @i: required value
42 *
43 * Atomically sets the value of @v to @i. Note that the guaranteed
44 * useful range of an atomic_t is only 24 bits.
45 */
46#define atomic_set(v, i) (((v)->counter) = (i))
47
48#include <asm/system.h>
49
50/**
51 * atomic_add_return - add integer to atomic variable
52 * @i: integer value to add
53 * @v: pointer of type atomic_t
54 *
55 * Atomically adds @i to @v and returns the result
56 * Note that the guaranteed useful range of an atomic_t is only 24 bits.
57 */
58static inline int atomic_add_return(int i, atomic_t *v)
59{
60 unsigned long flags;
61 int temp;
62
63 local_irq_save(flags);
64 temp = v->counter;
65 temp += i;
66 v->counter = temp;
67 local_irq_restore(flags);
68
69 return temp;
70}
71
72/**
73 * atomic_sub_return - subtract integer from atomic variable
74 * @i: integer value to subtract
75 * @v: pointer of type atomic_t
76 *
77 * Atomically subtracts @i from @v and returns the result
78 * Note that the guaranteed useful range of an atomic_t is only 24 bits.
79 */
80static inline int atomic_sub_return(int i, atomic_t *v)
81{
82 unsigned long flags;
83 int temp;
84
85 local_irq_save(flags);
86 temp = v->counter;
87 temp -= i;
88 v->counter = temp;
89 local_irq_restore(flags);
90
91 return temp;
92}
93
94static inline int atomic_add_negative(int i, atomic_t *v)
95{
96 return atomic_add_return(i, v) < 0;
97}
98
99static inline void atomic_add(int i, atomic_t *v)
100{
101 atomic_add_return(i, v);
102}
103
104static inline void atomic_sub(int i, atomic_t *v)
105{
106 atomic_sub_return(i, v);
107}
108
109static inline void atomic_inc(atomic_t *v)
110{
111 atomic_add_return(1, v);
112}
113
114static inline void atomic_dec(atomic_t *v)
115{
116 atomic_sub_return(1, v);
117}
118
119#define atomic_dec_return(v) atomic_sub_return(1, (v))
120#define atomic_inc_return(v) atomic_add_return(1, (v))
121
122#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
123#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
124#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
125
126#define atomic_add_unless(v, a, u) \
127({ \
128 int c, old; \
129 c = atomic_read(v); \
130 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
131 c = old; \
132 c != (u); \
133})
134
135#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
136
137static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
138{
139 unsigned long flags;
140
141 mask = ~mask;
142 local_irq_save(flags);
143 *addr &= mask;
144 local_irq_restore(flags);
145}
146
147#define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
148#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
149
150#define cmpxchg_local(ptr, o, n) \
151 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
152 (unsigned long)(n), sizeof(*(ptr))))
153
154#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
155
156/* Assume that atomic operations are already serializing */
157#define smp_mb__before_atomic_dec() barrier()
158#define smp_mb__after_atomic_dec() barrier()
159#define smp_mb__before_atomic_inc() barrier()
160#define smp_mb__after_atomic_inc() barrier()
161
162#include <asm-generic/atomic-long.h>
163
164#endif /* __KERNEL__ */
165#endif /* __ASM_GENERIC_ATOMIC_H */