blob: e8e78137c6f556650e45c4c4c1c064836c4be61c [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __ASM_SH_ATOMIC_H
2#define __ASM_SH_ATOMIC_H
3
4/*
5 * Atomic operations that C can't guarantee us. Useful for
6 * resource counting etc..
7 *
8 */
9
Matthew Wilcoxea4354672009-01-06 14:40:39 -080010#include <linux/compiler.h>
11#include <linux/types.h>
12#include <asm/system.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070013
14#define ATOMIC_INIT(i) ( (atomic_t) { (i) } )
15
16#define atomic_read(v) ((v)->counter)
17#define atomic_set(v,i) ((v)->counter = (i))
18
Stuart Menefy1efe4ce2007-11-30 16:12:36 +090019#if defined(CONFIG_GUSA_RB)
20#include <asm/atomic-grb.h>
21#elif defined(CONFIG_CPU_SH4A)
Paul Mundtec723fbe2006-12-07 20:33:38 +090022#include <asm/atomic-llsc.h>
Paul Mundt781125c2006-09-27 17:52:19 +090023#else
Paul Mundtec723fbe2006-12-07 20:33:38 +090024#include <asm/atomic-irq.h>
Paul Mundt781125c2006-09-27 17:52:19 +090025#endif
Linus Torvalds1da177e2005-04-16 15:20:36 -070026
27#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
28
Linus Torvalds1da177e2005-04-16 15:20:36 -070029#define atomic_dec_return(v) atomic_sub_return(1,(v))
30#define atomic_inc_return(v) atomic_add_return(1,(v))
31
32/*
33 * atomic_inc_and_test - increment and test
34 * @v: pointer of type atomic_t
35 *
36 * Atomically increments @v by 1
37 * and returns true if the result is zero, or false for all
38 * other cases.
39 */
40#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
41
42#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
43#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
44
45#define atomic_inc(v) atomic_add(1,(v))
46#define atomic_dec(v) atomic_sub(1,(v))
47
Aoi Shinkai4c7c9972009-06-10 16:15:42 +000048#if !defined(CONFIG_GUSA_RB) && !defined(CONFIG_CPU_SH4A)
Nick Piggin4a6dae62005-11-13 16:07:24 -080049static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
50{
51 int ret;
52 unsigned long flags;
53
54 local_irq_save(flags);
55 ret = v->counter;
56 if (likely(ret == old))
57 v->counter = new;
58 local_irq_restore(flags);
59
60 return ret;
61}
62
Nick Piggin8426e1f2005-11-13 16:07:25 -080063static inline int atomic_add_unless(atomic_t *v, int a, int u)
64{
65 int ret;
66 unsigned long flags;
67
68 local_irq_save(flags);
69 ret = v->counter;
70 if (ret != u)
71 v->counter += a;
72 local_irq_restore(flags);
73
74 return ret != u;
75}
Aoi Shinkai4c7c9972009-06-10 16:15:42 +000076#endif /* !CONFIG_GUSA_RB && !CONFIG_CPU_SH4A */
Stuart Menefy1efe4ce2007-11-30 16:12:36 +090077
78#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin8426e1f2005-11-13 16:07:25 -080079#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
80
Linus Torvalds1da177e2005-04-16 15:20:36 -070081/* Atomic operations are already serializing on SH */
82#define smp_mb__before_atomic_dec() barrier()
83#define smp_mb__after_atomic_dec() barrier()
84#define smp_mb__before_atomic_inc() barrier()
85#define smp_mb__after_atomic_inc() barrier()
86
Arnd Bergmann72099ed2009-05-13 22:56:29 +000087#include <asm-generic/atomic-long.h>
Paul Mundtf01789c2009-06-17 10:43:13 +090088#include <asm-generic/atomic64.h>
89
Linus Torvalds1da177e2005-04-16 15:20:36 -070090#endif /* __ASM_SH_ATOMIC_H */