blob: 7f0a9c4f2fd046298cb7ddba73996823d3bcc088 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef __ALPHA_PERCPU_H
2#define __ALPHA_PERCPU_H
Tejun Heo9aa7a7d2009-05-12 13:19:39 -07003
Ivan Kokshaysky9267b4b2008-06-21 03:25:39 +04004#include <linux/compiler.h>
5#include <linux/threads.h>
Tejun Heo9aa7a7d2009-05-12 13:19:39 -07006#include <linux/percpu-defs.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -07007
Ivan Kokshaysky9267b4b2008-06-21 03:25:39 +04008/*
9 * Determine the real variable name from the name visible in the
10 * kernel sources.
11 */
12#define per_cpu_var(var) per_cpu__##var
13
14#ifdef CONFIG_SMP
15
16/*
17 * per_cpu_offset() is the offset that has to be added to a
18 * percpu variable to get to the instance for a certain processor.
19 */
20extern unsigned long __per_cpu_offset[NR_CPUS];
21
22#define per_cpu_offset(x) (__per_cpu_offset[x])
23
24#define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
25#ifdef CONFIG_DEBUG_PREEMPT
26#define my_cpu_offset per_cpu_offset(smp_processor_id())
27#else
28#define my_cpu_offset __my_cpu_offset
29#endif
30
31#ifndef MODULE
32#define SHIFT_PERCPU_PTR(var, offset) RELOC_HIDE(&per_cpu_var(var), (offset))
Ivan Kokshaysky9267b4b2008-06-21 03:25:39 +040033#else
34/*
35 * To calculate addresses of locally defined variables, GCC uses 32-bit
36 * displacement from the GP. Which doesn't work for per cpu variables in
37 * modules, as an offset to the kernel per cpu area is way above 4G.
38 *
39 * This forces allocation of a GOT entry for per cpu variable using
40 * ldq instruction with a 'literal' relocation.
41 */
42#define SHIFT_PERCPU_PTR(var, offset) ({ \
43 extern int simple_identifier_##var(void); \
44 unsigned long __ptr, tmp_gp; \
45 asm ( "br %1, 1f \n\
46 1: ldgp %1, 0(%1) \n\
47 ldq %0, per_cpu__" #var"(%1)\t!literal" \
48 : "=&r"(__ptr), "=&r"(tmp_gp)); \
49 (typeof(&per_cpu_var(var)))(__ptr + (offset)); })
50
Ivan Kokshaysky9267b4b2008-06-21 03:25:39 +040051#endif /* MODULE */
52
53/*
54 * A percpu variable may point to a discarded regions. The following are
55 * established ways to produce a usable pointer from the percpu variable
56 * offset.
57 */
58#define per_cpu(var, cpu) \
59 (*SHIFT_PERCPU_PTR(var, per_cpu_offset(cpu)))
60#define __get_cpu_var(var) \
61 (*SHIFT_PERCPU_PTR(var, my_cpu_offset))
62#define __raw_get_cpu_var(var) \
63 (*SHIFT_PERCPU_PTR(var, __my_cpu_offset))
64
65#else /* ! SMP */
66
67#define per_cpu(var, cpu) (*((void)(cpu), &per_cpu_var(var)))
68#define __get_cpu_var(var) per_cpu_var(var)
69#define __raw_get_cpu_var(var) per_cpu_var(var)
70
71#endif /* SMP */
72
Tejun Heo9aa7a7d2009-05-12 13:19:39 -070073#ifdef CONFIG_SMP
74#define PER_CPU_BASE_SECTION ".data.percpu"
75#else
76#define PER_CPU_BASE_SECTION ".data"
77#endif
78
79#ifdef CONFIG_SMP
80
81#ifdef MODULE
82#define PER_CPU_SHARED_ALIGNED_SECTION ""
83#else
84#define PER_CPU_SHARED_ALIGNED_SECTION ".shared_aligned"
85#endif
86#define PER_CPU_FIRST_SECTION ".first"
87
88#else
89
90#define PER_CPU_SHARED_ALIGNED_SECTION ""
91#define PER_CPU_FIRST_SECTION ""
92
93#endif
94
95#define PER_CPU_ATTRIBUTES
Linus Torvalds1da177e2005-04-16 15:20:36 -070096
97#endif /* __ALPHA_PERCPU_H */