blob: 1552c8653990e46bbba426c3dda5e0d05fa2190c [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * linux/include/asm-arm26/atomic.h
3 *
4 * Copyright (c) 1996 Russell King.
5 * Modified for arm26 by Ian Molton
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * Changelog:
12 * 25-11-2004 IM Updated for 2.6.9
13 * 27-06-1996 RMK Created
14 * 13-04-1997 RMK Made functions atomic!
15 * 07-12-1997 RMK Upgraded for v2.1.
16 * 26-08-1998 PJB Added #ifdef __KERNEL__
17 *
18 * FIXME - its probably worth seeing what these compile into...
19 */
20#ifndef __ASM_ARM_ATOMIC_H
21#define __ASM_ARM_ATOMIC_H
22
23#include <linux/config.h>
24
25#ifdef CONFIG_SMP
26#error SMP is NOT supported
27#endif
28
29typedef struct { volatile int counter; } atomic_t;
30
31#define ATOMIC_INIT(i) { (i) }
32
33#ifdef __KERNEL__
34#include <asm/system.h>
35
36#define atomic_read(v) ((v)->counter)
37#define atomic_set(v,i) (((v)->counter) = (i))
38
39static inline int atomic_add_return(int i, atomic_t *v)
40{
41 unsigned long flags;
42 int val;
43
44 local_irq_save(flags);
45 val = v->counter;
46 v->counter = val += i;
47 local_irq_restore(flags);
48
49 return val;
50}
51
52static inline int atomic_sub_return(int i, atomic_t *v)
53{
54 unsigned long flags;
55 int val;
56
57 local_irq_save(flags);
58 val = v->counter;
59 v->counter = val -= i;
60 local_irq_restore(flags);
61
62 return val;
63}
64
Nick Piggin4a6dae62005-11-13 16:07:24 -080065static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
66{
67 int ret;
68 unsigned long flags;
69
70 local_irq_save(flags);
71 ret = v->counter;
72 if (likely(ret == old))
73 v->counter = new;
74 local_irq_restore(flags);
75
76 return ret;
77}
78
Ingo Molnarffbf6702006-01-09 15:59:17 -080079#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
80
Nick Piggin8426e1f2005-11-13 16:07:25 -080081static inline int atomic_add_unless(atomic_t *v, int a, int u)
82{
83 int ret;
84 unsigned long flags;
85
86 local_irq_save(flags);
87 ret = v->counter;
88 if (ret != u)
89 v->counter += a;
90 local_irq_restore(flags);
91
92 return ret != u;
93}
94#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
95
Linus Torvalds1da177e2005-04-16 15:20:36 -070096static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
97{
98 unsigned long flags;
99
100 local_irq_save(flags);
101 *addr &= ~mask;
102 local_irq_restore(flags);
103}
104
105#define atomic_add(i, v) (void) atomic_add_return(i, v)
106#define atomic_inc(v) (void) atomic_add_return(1, v)
107#define atomic_sub(i, v) (void) atomic_sub_return(i, v)
108#define atomic_dec(v) (void) atomic_sub_return(1, v)
109
110#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
111#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
112#define atomic_inc_return(v) (atomic_add_return(1, v))
113#define atomic_dec_return(v) (atomic_sub_return(1, v))
114
115#define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
116
117/* Atomic operations are already serializing on ARM26 */
118#define smp_mb__before_atomic_dec() barrier()
119#define smp_mb__after_atomic_dec() barrier()
120#define smp_mb__before_atomic_inc() barrier()
121#define smp_mb__after_atomic_inc() barrier()
122
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800123#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700124#endif
125#endif