blob: 0177da80dde34f68b98dee1a533493b780c8d62f [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef _ARCH_I386_LOCAL_H
2#define _ARCH_I386_LOCAL_H
3
4#include <linux/percpu.h>
5
6typedef struct
7{
8 volatile unsigned long counter;
9} local_t;
10
11#define LOCAL_INIT(i) { (i) }
12
13#define local_read(v) ((v)->counter)
14#define local_set(v,i) (((v)->counter) = (i))
15
16static __inline__ void local_inc(local_t *v)
17{
18 __asm__ __volatile__(
19 "incl %0"
20 :"=m" (v->counter)
21 :"m" (v->counter));
22}
23
24static __inline__ void local_dec(local_t *v)
25{
26 __asm__ __volatile__(
27 "decl %0"
28 :"=m" (v->counter)
29 :"m" (v->counter));
30}
31
32static __inline__ void local_add(unsigned long i, local_t *v)
33{
34 __asm__ __volatile__(
35 "addl %1,%0"
36 :"=m" (v->counter)
37 :"ir" (i), "m" (v->counter));
38}
39
40static __inline__ void local_sub(unsigned long i, local_t *v)
41{
42 __asm__ __volatile__(
43 "subl %1,%0"
44 :"=m" (v->counter)
45 :"ir" (i), "m" (v->counter));
46}
47
48/* On x86, these are no better than the atomic variants. */
49#define __local_inc(l) local_inc(l)
50#define __local_dec(l) local_dec(l)
51#define __local_add(i,l) local_add((i),(l))
52#define __local_sub(i,l) local_sub((i),(l))
53
54/* Use these for per-cpu local_t variables: on some archs they are
55 * much more efficient than these naive implementations. Note they take
56 * a variable, not an address.
57 */
58#define cpu_local_read(v) local_read(&__get_cpu_var(v))
59#define cpu_local_set(v, i) local_set(&__get_cpu_var(v), (i))
60#define cpu_local_inc(v) local_inc(&__get_cpu_var(v))
61#define cpu_local_dec(v) local_dec(&__get_cpu_var(v))
62#define cpu_local_add(i, v) local_add((i), &__get_cpu_var(v))
63#define cpu_local_sub(i, v) local_sub((i), &__get_cpu_var(v))
64
65#define __cpu_local_inc(v) cpu_local_inc(v)
66#define __cpu_local_dec(v) cpu_local_dec(v)
67#define __cpu_local_add(i, v) cpu_local_add((i), (v))
68#define __cpu_local_sub(i, v) cpu_local_sub((i), (v))
69
70#endif /* _ARCH_I386_LOCAL_H */