blob: 01ba1f8e64d1c68d1cd67c0213d1c24ec36b46f1 [file] [log] [blame]
Glauber de Oliveira Costad8954222008-01-30 13:31:08 +01001#ifndef _ASM_X86_SYSTEM_H_
2#define _ASM_X86_SYSTEM_H_
3
4#include <asm/asm.h>
5
Glauber de Oliveira Costad3ca9012008-01-30 13:31:08 +01006#include <linux/kernel.h>
7
Thomas Gleixner96a388d2007-10-11 11:20:03 +02008#ifdef CONFIG_X86_32
9# include "system_32.h"
10#else
11# include "system_64.h"
12#endif
Glauber de Oliveira Costad8954222008-01-30 13:31:08 +010013
14#ifdef __KERNEL__
15#define _set_base(addr, base) do { unsigned long __pr; \
16__asm__ __volatile__ ("movw %%dx,%1\n\t" \
17 "rorl $16,%%edx\n\t" \
18 "movb %%dl,%2\n\t" \
19 "movb %%dh,%3" \
20 :"=&d" (__pr) \
21 :"m" (*((addr)+2)), \
22 "m" (*((addr)+4)), \
23 "m" (*((addr)+7)), \
24 "0" (base) \
25 ); } while (0)
26
27#define _set_limit(addr, limit) do { unsigned long __lr; \
28__asm__ __volatile__ ("movw %%dx,%1\n\t" \
29 "rorl $16,%%edx\n\t" \
30 "movb %2,%%dh\n\t" \
31 "andb $0xf0,%%dh\n\t" \
32 "orb %%dh,%%dl\n\t" \
33 "movb %%dl,%2" \
34 :"=&d" (__lr) \
35 :"m" (*(addr)), \
36 "m" (*((addr)+6)), \
37 "0" (limit) \
38 ); } while (0)
39
40#define set_base(ldt, base) _set_base(((char *)&(ldt)) , (base))
41#define set_limit(ldt, limit) _set_limit(((char *)&(ldt)) , ((limit)-1))
42
Glauber de Oliveira Costad3ca9012008-01-30 13:31:08 +010043extern void load_gs_index(unsigned);
44
Glauber de Oliveira Costad8954222008-01-30 13:31:08 +010045/*
Glauber de Oliveira Costaa6b46552008-01-30 13:31:08 +010046 * Load a segment. Fall back on loading the zero
47 * segment if something goes wrong..
48 */
49#define loadsegment(seg, value) \
50 asm volatile("\n" \
51 "1:\t" \
52 "movl %k0,%%" #seg "\n" \
53 "2:\n" \
54 ".section .fixup,\"ax\"\n" \
55 "3:\t" \
56 "movl %k1, %%" #seg "\n\t" \
57 "jmp 2b\n" \
58 ".previous\n" \
59 ".section __ex_table,\"a\"\n\t" \
60 _ASM_ALIGN "\n\t" \
61 _ASM_PTR " 1b,3b\n" \
62 ".previous" \
63 : :"r" (value), "r" (0))
64
65
66/*
Glauber de Oliveira Costad8954222008-01-30 13:31:08 +010067 * Save a segment register away
68 */
69#define savesegment(seg, value) \
70 asm volatile("mov %%" #seg ",%0":"=rm" (value))
71
72static inline unsigned long get_limit(unsigned long segment)
73{
74 unsigned long __limit;
75 __asm__("lsll %1,%0"
76 :"=r" (__limit):"r" (segment));
77 return __limit+1;
78}
Glauber de Oliveira Costad3ca9012008-01-30 13:31:08 +010079
80static inline void native_clts(void)
81{
82 asm volatile ("clts");
83}
84
85/*
86 * Volatile isn't enough to prevent the compiler from reordering the
87 * read/write functions for the control registers and messing everything up.
88 * A memory clobber would solve the problem, but would prevent reordering of
89 * all loads stores around it, which can hurt performance. Solution is to
90 * use a variable and mimic reads and writes to it to enforce serialization
91 */
92static unsigned long __force_order;
93
94static inline unsigned long native_read_cr0(void)
95{
96 unsigned long val;
97 asm volatile("mov %%cr0,%0\n\t" :"=r" (val), "=m" (__force_order));
98 return val;
99}
100
101static inline void native_write_cr0(unsigned long val)
102{
103 asm volatile("mov %0,%%cr0": :"r" (val), "m" (__force_order));
104}
105
106static inline unsigned long native_read_cr2(void)
107{
108 unsigned long val;
109 asm volatile("mov %%cr2,%0\n\t" :"=r" (val), "=m" (__force_order));
110 return val;
111}
112
113static inline void native_write_cr2(unsigned long val)
114{
115 asm volatile("mov %0,%%cr2": :"r" (val), "m" (__force_order));
116}
117
118static inline unsigned long native_read_cr3(void)
119{
120 unsigned long val;
121 asm volatile("mov %%cr3,%0\n\t" :"=r" (val), "=m" (__force_order));
122 return val;
123}
124
125static inline void native_write_cr3(unsigned long val)
126{
127 asm volatile("mov %0,%%cr3": :"r" (val), "m" (__force_order));
128}
129
130static inline unsigned long native_read_cr4(void)
131{
132 unsigned long val;
133 asm volatile("mov %%cr4,%0\n\t" :"=r" (val), "=m" (__force_order));
134 return val;
135}
136
137static inline unsigned long native_read_cr4_safe(void)
138{
139 unsigned long val;
140 /* This could fault if %cr4 does not exist. In x86_64, a cr4 always
141 * exists, so it will never fail. */
142#ifdef CONFIG_X86_32
143 asm volatile("1: mov %%cr4, %0 \n"
144 "2: \n"
145 ".section __ex_table,\"a\" \n"
146 ".long 1b,2b \n"
147 ".previous \n"
148 : "=r" (val), "=m" (__force_order) : "0" (0));
149#else
150 val = native_read_cr4();
151#endif
152 return val;
153}
154
155static inline void native_write_cr4(unsigned long val)
156{
157 asm volatile("mov %0,%%cr4": :"r" (val), "m" (__force_order));
158}
159
160static inline void native_wbinvd(void)
161{
162 asm volatile("wbinvd": : :"memory");
163}
164#ifdef CONFIG_PARAVIRT
165#include <asm/paravirt.h>
166#else
167#define read_cr0() (native_read_cr0())
168#define write_cr0(x) (native_write_cr0(x))
169#define read_cr2() (native_read_cr2())
170#define write_cr2(x) (native_write_cr2(x))
171#define read_cr3() (native_read_cr3())
172#define write_cr3(x) (native_write_cr3(x))
173#define read_cr4() (native_read_cr4())
174#define read_cr4_safe() (native_read_cr4_safe())
175#define write_cr4(x) (native_write_cr4(x))
176#define wbinvd() (native_wbinvd())
177
178/* Clear the 'TS' bit */
179#define clts() (native_clts())
180
181#endif/* CONFIG_PARAVIRT */
182
183#define stts() write_cr0(8 | read_cr0())
184
Glauber de Oliveira Costad8954222008-01-30 13:31:08 +0100185#endif /* __KERNEL__ */
186
187static inline void clflush(void *__p)
188{
189 asm volatile("clflush %0" : "+m" (*(char __force *)__p));
190}
191
192#define nop() __asm__ __volatile__ ("nop")
193
194void disable_hlt(void);
195void enable_hlt(void);
196
197extern int es7000_plat;
198void cpu_idle_wait(void);
199
200extern unsigned long arch_align_stack(unsigned long sp);
201extern void free_init_pages(char *what, unsigned long begin, unsigned long end);
202
203void default_idle(void);
204
205#endif