blob: 12af3e35edfa7ceab2908804455024a8af76f6b9 [file] [log] [blame]
David Howellsf05e7982012-03-28 18:11:12 +01001#ifndef _ASM_X86_SPECIAL_INSNS_H
2#define _ASM_X86_SPECIAL_INSNS_H
3
4
5#ifdef __KERNEL__
6
Ross Zwisler719d3592015-02-19 10:37:28 -07007#include <asm/nops.h>
8
David Howellsf05e7982012-03-28 18:11:12 +01009/*
10 * Volatile isn't enough to prevent the compiler from reordering the
11 * read/write functions for the control registers and messing everything up.
12 * A memory clobber would solve the problem, but would prevent reordering of
13 * all loads stores around it, which can hurt performance. Solution is to
14 * use a variable and mimic reads and writes to it to enforce serialization
15 */
Jan Beulich1d10f6e2013-05-29 13:29:12 +010016extern unsigned long __force_order;
David Howellsf05e7982012-03-28 18:11:12 +010017
18static inline unsigned long native_read_cr0(void)
19{
20 unsigned long val;
21 asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
22 return val;
23}
24
25static inline void native_write_cr0(unsigned long val)
26{
27 asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
28}
29
30static inline unsigned long native_read_cr2(void)
31{
32 unsigned long val;
33 asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
34 return val;
35}
36
37static inline void native_write_cr2(unsigned long val)
38{
39 asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
40}
41
42static inline unsigned long native_read_cr3(void)
43{
44 unsigned long val;
45 asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
46 return val;
47}
48
49static inline void native_write_cr3(unsigned long val)
50{
51 asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
52}
53
54static inline unsigned long native_read_cr4(void)
55{
56 unsigned long val;
David Howellsf05e7982012-03-28 18:11:12 +010057#ifdef CONFIG_X86_32
Andy Lutomirski1ef55be12016-09-29 12:48:12 -070058 /*
59 * This could fault if CR4 does not exist. Non-existent CR4
60 * is functionally equivalent to CR4 == 0. Keep it simple and pretend
61 * that CR4 == 0 on CPUs that don't have CR4.
62 */
David Howellsf05e7982012-03-28 18:11:12 +010063 asm volatile("1: mov %%cr4, %0\n"
64 "2:\n"
65 _ASM_EXTABLE(1b, 2b)
66 : "=r" (val), "=m" (__force_order) : "0" (0));
67#else
Andy Lutomirski1ef55be12016-09-29 12:48:12 -070068 /* CR4 always exists on x86_64. */
69 asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
David Howellsf05e7982012-03-28 18:11:12 +010070#endif
71 return val;
72}
73
74static inline void native_write_cr4(unsigned long val)
75{
76 asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
77}
78
79#ifdef CONFIG_X86_64
80static inline unsigned long native_read_cr8(void)
81{
82 unsigned long cr8;
83 asm volatile("movq %%cr8,%0" : "=r" (cr8));
84 return cr8;
85}
86
87static inline void native_write_cr8(unsigned long val)
88{
89 asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
90}
91#endif
92
Dave Hansena927cb82016-02-12 13:02:15 -080093#ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS
94static inline u32 __read_pkru(void)
95{
96 u32 ecx = 0;
97 u32 edx, pkru;
98
99 /*
100 * "rdpkru" instruction. Places PKRU contents in to EAX,
101 * clears EDX and requires that ecx=0.
102 */
103 asm volatile(".byte 0x0f,0x01,0xee\n\t"
104 : "=a" (pkru), "=d" (edx)
105 : "c" (ecx));
106 return pkru;
107}
Xiao Guangrong9e901992016-03-22 16:51:17 +0800108
109static inline void __write_pkru(u32 pkru)
110{
111 u32 ecx = 0, edx = 0;
112
113 /*
114 * "wrpkru" instruction. Loads contents in EAX to PKRU,
115 * requires that ecx = edx = 0.
116 */
117 asm volatile(".byte 0x0f,0x01,0xef\n\t"
118 : : "a" (pkru), "c"(ecx), "d"(edx));
119}
Dave Hansena927cb82016-02-12 13:02:15 -0800120#else
121static inline u32 __read_pkru(void)
122{
123 return 0;
124}
Xiao Guangrong9e901992016-03-22 16:51:17 +0800125
126static inline void __write_pkru(u32 pkru)
127{
128}
Dave Hansena927cb82016-02-12 13:02:15 -0800129#endif
130
David Howellsf05e7982012-03-28 18:11:12 +0100131static inline void native_wbinvd(void)
132{
133 asm volatile("wbinvd": : :"memory");
134}
135
Andi Kleen277d5b42013-08-05 15:02:43 -0700136extern asmlinkage void native_load_gs_index(unsigned);
David Howellsf05e7982012-03-28 18:11:12 +0100137
138#ifdef CONFIG_PARAVIRT
139#include <asm/paravirt.h>
140#else
141
142static inline unsigned long read_cr0(void)
143{
144 return native_read_cr0();
145}
146
147static inline void write_cr0(unsigned long x)
148{
149 native_write_cr0(x);
150}
151
152static inline unsigned long read_cr2(void)
153{
154 return native_read_cr2();
155}
156
157static inline void write_cr2(unsigned long x)
158{
159 native_write_cr2(x);
160}
161
162static inline unsigned long read_cr3(void)
163{
164 return native_read_cr3();
165}
166
167static inline void write_cr3(unsigned long x)
168{
169 native_write_cr3(x);
170}
171
Andy Lutomirski1e02ce42014-10-24 15:58:08 -0700172static inline unsigned long __read_cr4(void)
David Howellsf05e7982012-03-28 18:11:12 +0100173{
174 return native_read_cr4();
175}
176
Andy Lutomirski1e02ce42014-10-24 15:58:08 -0700177static inline void __write_cr4(unsigned long x)
David Howellsf05e7982012-03-28 18:11:12 +0100178{
179 native_write_cr4(x);
180}
181
182static inline void wbinvd(void)
183{
184 native_wbinvd();
185}
186
187#ifdef CONFIG_X86_64
188
189static inline unsigned long read_cr8(void)
190{
191 return native_read_cr8();
192}
193
194static inline void write_cr8(unsigned long x)
195{
196 native_write_cr8(x);
197}
198
199static inline void load_gs_index(unsigned selector)
200{
201 native_load_gs_index(selector);
202}
203
204#endif
205
David Howellsf05e7982012-03-28 18:11:12 +0100206#endif/* CONFIG_PARAVIRT */
207
David Howellsf05e7982012-03-28 18:11:12 +0100208static inline void clflush(volatile void *__p)
209{
210 asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
211}
212
Ross Zwisler171699f2014-02-26 12:06:49 -0700213static inline void clflushopt(volatile void *__p)
214{
215 alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0",
216 ".byte 0x66; clflush %P0",
217 X86_FEATURE_CLFLUSHOPT,
218 "+m" (*(volatile char __force *)__p));
219}
220
Ross Zwislerd9dc64f2015-01-27 09:53:51 -0700221static inline void clwb(volatile void *__p)
222{
223 volatile struct { char x[64]; } *p = __p;
224
225 asm volatile(ALTERNATIVE_2(
226 ".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])",
227 ".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */
228 X86_FEATURE_CLFLUSHOPT,
229 ".byte 0x66, 0x0f, 0xae, 0x30", /* clwb (%%rax) */
230 X86_FEATURE_CLWB)
231 : [p] "+m" (*p)
232 : [pax] "a" (p));
233}
234
David Howellsf05e7982012-03-28 18:11:12 +0100235#define nop() asm volatile ("nop")
236
237
238#endif /* __KERNEL__ */
239
240#endif /* _ASM_X86_SPECIAL_INSNS_H */