blob: 97e944fe1cff4c9bf4d9c0eb7603fca6e43bf916 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * linux/include/asm-arm26/atomic.h
3 *
4 * Copyright (c) 1996 Russell King.
5 * Modified for arm26 by Ian Molton
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * Changelog:
12 * 25-11-2004 IM Updated for 2.6.9
13 * 27-06-1996 RMK Created
14 * 13-04-1997 RMK Made functions atomic!
15 * 07-12-1997 RMK Upgraded for v2.1.
16 * 26-08-1998 PJB Added #ifdef __KERNEL__
17 *
18 * FIXME - its probably worth seeing what these compile into...
19 */
20#ifndef __ASM_ARM_ATOMIC_H
21#define __ASM_ARM_ATOMIC_H
22
Linus Torvalds1da177e2005-04-16 15:20:36 -070023
24#ifdef CONFIG_SMP
25#error SMP is NOT supported
26#endif
27
28typedef struct { volatile int counter; } atomic_t;
29
30#define ATOMIC_INIT(i) { (i) }
31
32#ifdef __KERNEL__
33#include <asm/system.h>
34
35#define atomic_read(v) ((v)->counter)
36#define atomic_set(v,i) (((v)->counter) = (i))
37
38static inline int atomic_add_return(int i, atomic_t *v)
39{
40 unsigned long flags;
41 int val;
42
43 local_irq_save(flags);
44 val = v->counter;
45 v->counter = val += i;
46 local_irq_restore(flags);
47
48 return val;
49}
50
51static inline int atomic_sub_return(int i, atomic_t *v)
52{
53 unsigned long flags;
54 int val;
55
56 local_irq_save(flags);
57 val = v->counter;
58 v->counter = val -= i;
59 local_irq_restore(flags);
60
61 return val;
62}
63
Nick Piggin4a6dae62005-11-13 16:07:24 -080064static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
65{
66 int ret;
67 unsigned long flags;
68
69 local_irq_save(flags);
70 ret = v->counter;
71 if (likely(ret == old))
72 v->counter = new;
73 local_irq_restore(flags);
74
75 return ret;
76}
77
Ingo Molnarffbf6702006-01-09 15:59:17 -080078#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
79
Nick Piggin8426e1f2005-11-13 16:07:25 -080080static inline int atomic_add_unless(atomic_t *v, int a, int u)
81{
82 int ret;
83 unsigned long flags;
84
85 local_irq_save(flags);
86 ret = v->counter;
87 if (ret != u)
88 v->counter += a;
89 local_irq_restore(flags);
90
91 return ret != u;
92}
93#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
94
Linus Torvalds1da177e2005-04-16 15:20:36 -070095static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
96{
97 unsigned long flags;
98
99 local_irq_save(flags);
100 *addr &= ~mask;
101 local_irq_restore(flags);
102}
103
104#define atomic_add(i, v) (void) atomic_add_return(i, v)
105#define atomic_inc(v) (void) atomic_add_return(1, v)
106#define atomic_sub(i, v) (void) atomic_sub_return(i, v)
107#define atomic_dec(v) (void) atomic_sub_return(1, v)
108
109#define atomic_inc_and_test(v) (atomic_add_return(1, v) == 0)
110#define atomic_dec_and_test(v) (atomic_sub_return(1, v) == 0)
111#define atomic_inc_return(v) (atomic_add_return(1, v))
112#define atomic_dec_return(v) (atomic_sub_return(1, v))
113
114#define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
115
116/* Atomic operations are already serializing on ARM26 */
117#define smp_mb__before_atomic_dec() barrier()
118#define smp_mb__after_atomic_dec() barrier()
119#define smp_mb__before_atomic_inc() barrier()
120#define smp_mb__after_atomic_inc() barrier()
121
Christoph Lameterd3cb4872006-01-06 00:11:20 -0800122#include <asm-generic/atomic.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -0700123#endif
124#endif