blob: 3074b0e76343138f7ba8eed0931603f3ec58f550 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * linux/include/asm-arm26/atomic.h
3 *
4 * Copyright (c) 1996 Russell King.
5 * Modified for arm26 by Ian Molton
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * Changelog:
12 * 25-11-2004 IM Updated for 2.6.9
13 * 27-06-1996 RMK Created
14 * 13-04-1997 RMK Made functions atomic!
15 * 07-12-1997 RMK Upgraded for v2.1.
16 * 26-08-1998 PJB Added #ifdef __KERNEL__
17 *
18 * FIXME - its probably worth seeing what these compile into...
19 */
20#ifndef __ASM_ARM_ATOMIC_H
21#define __ASM_ARM_ATOMIC_H
22
23#include <linux/config.h>
24
25#ifdef CONFIG_SMP
26#error SMP is NOT supported
27#endif
28
29typedef struct { volatile int counter; } atomic_t;
30
31#define ATOMIC_INIT(i) { (i) }
32
33#ifdef __KERNEL__
34#include <asm/system.h>
35
36#define atomic_read(v) ((v)->counter)
37#define atomic_set(v,i) (((v)->counter) = (i))
38
39static inline int atomic_add_return(int i, atomic_t *v)
40{
41 unsigned long flags;
42 int val;
43
44 local_irq_save(flags);
45 val = v->counter;
46 v->counter = val += i;
47 local_irq_restore(flags);
48
49 return val;
50}
51
52static inline int atomic_sub_return(int i, atomic_t *v)
53{
54 unsigned long flags;
55 int val;
56
57 local_irq_save(flags);
58 val = v->counter;
59 v->counter = val -= i;
60 local_irq_restore(flags);
61
62 return val;
63}
64
Nick Piggin4a6dae62005-11-13 16:07:24 -080065static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
66{
67 int ret;
68 unsigned long flags;
69
70 local_irq_save(flags);
71 ret = v->counter;
72 if (likely(ret == old))
73 v->counter = new;
74 local_irq_restore(flags);
75
76 return ret;
77}
78
Nick Piggin8426e1f2005-11-13 16:07:25 -080079static inline int atomic_add_unless(atomic_t *v, int a, int u)
80{
81 int ret;
82 unsigned long flags;
83
84 local_irq_save(flags);
85 ret = v->counter;
86 if (ret != u)
87 v->counter += a;
88 local_irq_restore(flags);
89
90 return ret != u;
91}
92#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
93
Linus Torvalds1da177e2005-04-16 15:20:36 -070094static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
95{
96 unsigned long flags;
97
98 local_irq_save(flags);
99 *addr &= ~mask;
100 local_irq_restore(flags);
101}
102
103#define atomic_add(i, v) (void) atomic_add_return(i, v)
104#define atomic_inc(v) (void) atomic_add_return(1, v)
105#define atomic_sub(i, v) (void) atomic_sub_return(i, v)
106#define atomic_dec(v) (void) atomic_sub_return(1, v)
107
108#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
109#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
110#define atomic_inc_return(v) (atomic_add_return(1, v))
111#define atomic_dec_return(v) (atomic_sub_return(1, v))
112
113#define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
114
115/* Atomic operations are already serializing on ARM26 */
116#define smp_mb__before_atomic_dec() barrier()
117#define smp_mb__after_atomic_dec() barrier()
118#define smp_mb__before_atomic_inc() barrier()
119#define smp_mb__after_atomic_inc() barrier()
120
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800121#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700122#endif
123#endif