blob: 12060e22f7e2a53204f4f8cfec20f95abfa6eeda [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef _ARCH_I386_LOCAL_H
2#define _ARCH_I386_LOCAL_H
3
4#include <linux/percpu.h>
5
6typedef struct
7{
Andrew Morton2cf8d822006-03-31 02:30:49 -08008 volatile long counter;
Linus Torvalds1da177e2005-04-16 15:20:36 -07009} local_t;
10
11#define LOCAL_INIT(i) { (i) }
12
13#define local_read(v) ((v)->counter)
14#define local_set(v,i) (((v)->counter) = (i))
15
16static __inline__ void local_inc(local_t *v)
17{
18 __asm__ __volatile__(
19 "incl %0"
Linus Torvaldsb862f3b2006-07-08 15:24:18 -070020 :"+m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -070021}
22
23static __inline__ void local_dec(local_t *v)
24{
25 __asm__ __volatile__(
26 "decl %0"
Linus Torvaldsb862f3b2006-07-08 15:24:18 -070027 :"+m" (v->counter));
Linus Torvalds1da177e2005-04-16 15:20:36 -070028}
29
Andrew Morton2cf8d822006-03-31 02:30:49 -080030static __inline__ void local_add(long i, local_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070031{
32 __asm__ __volatile__(
33 "addl %1,%0"
Linus Torvaldsb862f3b2006-07-08 15:24:18 -070034 :"+m" (v->counter)
35 :"ir" (i));
Linus Torvalds1da177e2005-04-16 15:20:36 -070036}
37
Andrew Morton2cf8d822006-03-31 02:30:49 -080038static __inline__ void local_sub(long i, local_t *v)
Linus Torvalds1da177e2005-04-16 15:20:36 -070039{
40 __asm__ __volatile__(
41 "subl %1,%0"
Linus Torvaldsb862f3b2006-07-08 15:24:18 -070042 :"+m" (v->counter)
43 :"ir" (i));
Linus Torvalds1da177e2005-04-16 15:20:36 -070044}
45
46/* On x86, these are no better than the atomic variants. */
47#define __local_inc(l) local_inc(l)
48#define __local_dec(l) local_dec(l)
49#define __local_add(i,l) local_add((i),(l))
50#define __local_sub(i,l) local_sub((i),(l))
51
52/* Use these for per-cpu local_t variables: on some archs they are
53 * much more efficient than these naive implementations. Note they take
54 * a variable, not an address.
55 */
Andi Kleenda531122006-06-26 13:59:20 +020056
57/* Need to disable preemption for the cpu local counters otherwise we could
58 still access a variable of a previous CPU in a non atomic way. */
59#define cpu_local_wrap_v(v) \
60 ({ local_t res__; \
61 preempt_disable(); \
62 res__ = (v); \
63 preempt_enable(); \
64 res__; })
65#define cpu_local_wrap(v) \
66 ({ preempt_disable(); \
67 v; \
68 preempt_enable(); }) \
69
70#define cpu_local_read(v) cpu_local_wrap_v(local_read(&__get_cpu_var(v)))
71#define cpu_local_set(v, i) cpu_local_wrap(local_set(&__get_cpu_var(v), (i)))
72#define cpu_local_inc(v) cpu_local_wrap(local_inc(&__get_cpu_var(v)))
73#define cpu_local_dec(v) cpu_local_wrap(local_dec(&__get_cpu_var(v)))
74#define cpu_local_add(i, v) cpu_local_wrap(local_add((i), &__get_cpu_var(v)))
75#define cpu_local_sub(i, v) cpu_local_wrap(local_sub((i), &__get_cpu_var(v)))
Linus Torvalds1da177e2005-04-16 15:20:36 -070076
77#define __cpu_local_inc(v) cpu_local_inc(v)
78#define __cpu_local_dec(v) cpu_local_dec(v)
79#define __cpu_local_add(i, v) cpu_local_add((i), (v))
80#define __cpu_local_sub(i, v) cpu_local_sub((i), (v))
81
82#endif /* _ARCH_I386_LOCAL_H */