blob: 4d57894635f242da061e6a10acccaec70ae1dbc0 [file] [log] [blame]
David Woodhouse76b04382018-01-11 21:46:25 +00001/* SPDX-License-Identifier: GPL-2.0 */
2
Borislav Petkov7a32fc52018-01-26 13:11:37 +01003#ifndef _ASM_X86_NOSPEC_BRANCH_H_
4#define _ASM_X86_NOSPEC_BRANCH_H_
David Woodhouse76b04382018-01-11 21:46:25 +00005
6#include <asm/alternative.h>
7#include <asm/alternative-asm.h>
8#include <asm/cpufeatures.h>
9
10#ifdef __ASSEMBLY__
11
12/*
13 * This should be used immediately before a retpoline alternative. It tells
14 * objtool where the retpolines are so that it can make sense of the control
15 * flow by just reading the original instruction(s) and ignoring the
16 * alternatives.
17 */
18.macro ANNOTATE_NOSPEC_ALTERNATIVE
19 .Lannotate_\@:
20 .pushsection .discard.nospec
21 .long .Lannotate_\@ - .
22 .popsection
23.endm
24
25/*
26 * These are the bare retpoline primitives for indirect jmp and call.
27 * Do not use these directly; they only exist to make the ALTERNATIVE
28 * invocation below less ugly.
29 */
30.macro RETPOLINE_JMP reg:req
31 call .Ldo_rop_\@
32.Lspec_trap_\@:
33 pause
Tom Lendacky28d437d2018-01-13 17:27:30 -060034 lfence
David Woodhouse76b04382018-01-11 21:46:25 +000035 jmp .Lspec_trap_\@
36.Ldo_rop_\@:
37 mov \reg, (%_ASM_SP)
38 ret
39.endm
40
41/*
42 * This is a wrapper around RETPOLINE_JMP so the called function in reg
43 * returns to the instruction after the macro.
44 */
45.macro RETPOLINE_CALL reg:req
46 jmp .Ldo_call_\@
47.Ldo_retpoline_jmp_\@:
48 RETPOLINE_JMP \reg
49.Ldo_call_\@:
50 call .Ldo_retpoline_jmp_\@
51.endm
52
53/*
54 * JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple
55 * indirect jmp/call which may be susceptible to the Spectre variant 2
56 * attack.
57 */
58.macro JMP_NOSPEC reg:req
59#ifdef CONFIG_RETPOLINE
60 ANNOTATE_NOSPEC_ALTERNATIVE
61 ALTERNATIVE_2 __stringify(jmp *\reg), \
62 __stringify(RETPOLINE_JMP \reg), X86_FEATURE_RETPOLINE, \
63 __stringify(lfence; jmp *\reg), X86_FEATURE_RETPOLINE_AMD
64#else
65 jmp *\reg
66#endif
67.endm
68
69.macro CALL_NOSPEC reg:req
70#ifdef CONFIG_RETPOLINE
71 ANNOTATE_NOSPEC_ALTERNATIVE
72 ALTERNATIVE_2 __stringify(call *\reg), \
73 __stringify(RETPOLINE_CALL \reg), X86_FEATURE_RETPOLINE,\
74 __stringify(lfence; call *\reg), X86_FEATURE_RETPOLINE_AMD
75#else
76 call *\reg
77#endif
78.endm
79
Borislav Petkov1dde7412018-01-27 16:24:33 +000080/* This clobbers the BX register */
81.macro FILL_RETURN_BUFFER nr:req ftr:req
David Woodhouse117cc7a2018-01-12 11:11:27 +000082#ifdef CONFIG_RETPOLINE
Borislav Petkov1dde7412018-01-27 16:24:33 +000083 ALTERNATIVE "", "call __clear_rsb", \ftr
David Woodhouse117cc7a2018-01-12 11:11:27 +000084#endif
85.endm
86
David Woodhouse76b04382018-01-11 21:46:25 +000087#else /* __ASSEMBLY__ */
88
89#define ANNOTATE_NOSPEC_ALTERNATIVE \
90 "999:\n\t" \
91 ".pushsection .discard.nospec\n\t" \
92 ".long 999b - .\n\t" \
93 ".popsection\n\t"
94
95#if defined(CONFIG_X86_64) && defined(RETPOLINE)
96
97/*
98 * Since the inline asm uses the %V modifier which is only in newer GCC,
99 * the 64-bit one is dependent on RETPOLINE not CONFIG_RETPOLINE.
100 */
101# define CALL_NOSPEC \
102 ANNOTATE_NOSPEC_ALTERNATIVE \
103 ALTERNATIVE( \
104 "call *%[thunk_target]\n", \
105 "call __x86_indirect_thunk_%V[thunk_target]\n", \
106 X86_FEATURE_RETPOLINE)
107# define THUNK_TARGET(addr) [thunk_target] "r" (addr)
108
109#elif defined(CONFIG_X86_32) && defined(CONFIG_RETPOLINE)
110/*
111 * For i386 we use the original ret-equivalent retpoline, because
112 * otherwise we'll run out of registers. We don't care about CET
113 * here, anyway.
114 */
115# define CALL_NOSPEC ALTERNATIVE("call *%[thunk_target]\n", \
116 " jmp 904f;\n" \
117 " .align 16\n" \
118 "901: call 903f;\n" \
119 "902: pause;\n" \
Tom Lendacky28d437d2018-01-13 17:27:30 -0600120 " lfence;\n" \
David Woodhouse76b04382018-01-11 21:46:25 +0000121 " jmp 902b;\n" \
122 " .align 16\n" \
123 "903: addl $4, %%esp;\n" \
124 " pushl %[thunk_target];\n" \
125 " ret;\n" \
126 " .align 16\n" \
127 "904: call 901b;\n", \
128 X86_FEATURE_RETPOLINE)
129
130# define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
David Woodhouse117cc7a2018-01-12 11:11:27 +0000131#else /* No retpoline for C / inline asm */
David Woodhouse76b04382018-01-11 21:46:25 +0000132# define CALL_NOSPEC "call *%[thunk_target]\n"
133# define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
134#endif
135
David Woodhouseda285122018-01-11 21:46:26 +0000136/* The Spectre V2 mitigation variants */
137enum spectre_v2_mitigation {
138 SPECTRE_V2_NONE,
139 SPECTRE_V2_RETPOLINE_MINIMAL,
140 SPECTRE_V2_RETPOLINE_MINIMAL_AMD,
141 SPECTRE_V2_RETPOLINE_GENERIC,
142 SPECTRE_V2_RETPOLINE_AMD,
143 SPECTRE_V2_IBRS,
144};
145
Masami Hiramatsu736e80a2018-01-19 01:14:21 +0900146extern char __indirect_thunk_start[];
147extern char __indirect_thunk_end[];
148
David Woodhouse117cc7a2018-01-12 11:11:27 +0000149/*
150 * On VMEXIT we must ensure that no RSB predictions learned in the guest
151 * can be followed in the host, by overwriting the RSB completely. Both
152 * retpoline and IBRS mitigations for Spectre v2 need this; only on future
Darren Kennyaf189c92018-02-02 19:12:20 +0000153 * CPUs with IBRS_ALL *might* it be avoided.
David Woodhouse117cc7a2018-01-12 11:11:27 +0000154 */
155static inline void vmexit_fill_RSB(void)
156{
157#ifdef CONFIG_RETPOLINE
Borislav Petkov1dde7412018-01-27 16:24:33 +0000158 alternative_input("",
159 "call __fill_rsb",
160 X86_FEATURE_RETPOLINE,
161 ASM_NO_INPUT_CLOBBER(_ASM_BX, "memory"));
David Woodhouse117cc7a2018-01-12 11:11:27 +0000162#endif
163}
Andi Kleen3f7d8752018-01-17 14:53:28 -0800164
David Woodhouse20ffa1c2018-01-25 16:14:15 +0000165static inline void indirect_branch_prediction_barrier(void)
166{
Borislav Petkov64e16722018-01-27 16:24:34 +0000167 alternative_input("",
168 "call __ibp_barrier",
169 X86_FEATURE_USE_IBPB,
170 ASM_NO_INPUT_CLOBBER("eax", "ecx", "edx", "memory"));
David Woodhouse20ffa1c2018-01-25 16:14:15 +0000171}
172
David Woodhouse76b04382018-01-11 21:46:25 +0000173#endif /* __ASSEMBLY__ */
Borislav Petkov7a32fc52018-01-26 13:11:37 +0100174#endif /* _ASM_X86_NOSPEC_BRANCH_H_ */