]>
Commit | Line | Data |
---|---|---|
ac0999a8 DW |
1 | /* SPDX-License-Identifier: GPL-2.0 */ |
2 | ||
3 | #ifndef __NOSPEC_BRANCH_H__ | |
4 | #define __NOSPEC_BRANCH_H__ | |
5 | ||
6 | #include <asm/alternative.h> | |
7 | #include <asm/alternative-asm.h> | |
8 | #include <asm/cpufeatures.h> | |
9 | ||
10 | #ifdef __ASSEMBLY__ | |
11 | ||
12 | /* | |
13 | * This should be used immediately before a retpoline alternative. It tells | |
14 | * objtool where the retpolines are so that it can make sense of the control | |
15 | * flow by just reading the original instruction(s) and ignoring the | |
16 | * alternatives. | |
17 | */ | |
18 | .macro ANNOTATE_NOSPEC_ALTERNATIVE | |
19 | .Lannotate_\@: | |
20 | .pushsection .discard.nospec | |
21 | .long .Lannotate_\@ - . | |
22 | .popsection | |
23 | .endm | |
24 | ||
2f1b401d PZ |
25 | /* |
26 | * This should be used immediately before an indirect jump/call. It tells | |
27 | * objtool the subsequent indirect jump/call is vouched safe for retpoline | |
28 | * builds. | |
29 | */ | |
30 | .macro ANNOTATE_RETPOLINE_SAFE | |
31 | .Lannotate_\@: | |
32 | .pushsection .discard.retpoline_safe | |
33 | _ASM_PTR .Lannotate_\@ | |
34 | .popsection | |
35 | .endm | |
36 | ||
ac0999a8 DW |
37 | /* |
38 | * These are the bare retpoline primitives for indirect jmp and call. | |
39 | * Do not use these directly; they only exist to make the ALTERNATIVE | |
40 | * invocation below less ugly. | |
41 | */ | |
42 | .macro RETPOLINE_JMP reg:req | |
43 | call .Ldo_rop_\@ | |
44 | .Lspec_trap_\@: | |
45 | pause | |
81317bbe | 46 | lfence |
ac0999a8 DW |
47 | jmp .Lspec_trap_\@ |
48 | .Ldo_rop_\@: | |
49 | mov \reg, (%_ASM_SP) | |
50 | ret | |
51 | .endm | |
52 | ||
53 | /* | |
54 | * This is a wrapper around RETPOLINE_JMP so the called function in reg | |
55 | * returns to the instruction after the macro. | |
56 | */ | |
57 | .macro RETPOLINE_CALL reg:req | |
58 | jmp .Ldo_call_\@ | |
59 | .Ldo_retpoline_jmp_\@: | |
60 | RETPOLINE_JMP \reg | |
61 | .Ldo_call_\@: | |
62 | call .Ldo_retpoline_jmp_\@ | |
63 | .endm | |
64 | ||
65 | /* | |
66 | * JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple | |
67 | * indirect jmp/call which may be susceptible to the Spectre variant 2 | |
68 | * attack. | |
69 | */ | |
70 | .macro JMP_NOSPEC reg:req | |
71 | #ifdef CONFIG_RETPOLINE | |
72 | ANNOTATE_NOSPEC_ALTERNATIVE | |
2f1b401d | 73 | ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *\reg), \ |
ac0999a8 | 74 | __stringify(RETPOLINE_JMP \reg), X86_FEATURE_RETPOLINE, \ |
2f1b401d | 75 | __stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *\reg), X86_FEATURE_RETPOLINE_AMD |
ac0999a8 DW |
76 | #else |
77 | jmp *\reg | |
78 | #endif | |
79 | .endm | |
80 | ||
81 | .macro CALL_NOSPEC reg:req | |
82 | #ifdef CONFIG_RETPOLINE | |
83 | ANNOTATE_NOSPEC_ALTERNATIVE | |
2f1b401d | 84 | ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *\reg), \ |
ac0999a8 | 85 | __stringify(RETPOLINE_CALL \reg), X86_FEATURE_RETPOLINE,\ |
2f1b401d | 86 | __stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *\reg), X86_FEATURE_RETPOLINE_AMD |
ac0999a8 DW |
87 | #else |
88 | call *\reg | |
89 | #endif | |
8947886c DW |
90 | .endm |
91 | ||
81fd59c4 BP |
92 | /* This clobbers the BX register */ |
93 | .macro FILL_RETURN_BUFFER nr:req ftr:req | |
8947886c | 94 | #ifdef CONFIG_RETPOLINE |
81fd59c4 | 95 | ALTERNATIVE "", "call __clear_rsb", \ftr |
8947886c | 96 | #endif |
ac0999a8 DW |
97 | .endm |
98 | ||
99 | #else /* __ASSEMBLY__ */ | |
100 | ||
101 | #define ANNOTATE_NOSPEC_ALTERNATIVE \ | |
102 | "999:\n\t" \ | |
103 | ".pushsection .discard.nospec\n\t" \ | |
104 | ".long 999b - .\n\t" \ | |
105 | ".popsection\n\t" | |
106 | ||
2f1b401d PZ |
107 | #define ANNOTATE_RETPOLINE_SAFE \ |
108 | "999:\n\t" \ | |
109 | ".pushsection .discard.retpoline_safe\n\t" \ | |
110 | _ASM_PTR " 999b\n\t" \ | |
111 | ".popsection\n\t" | |
112 | ||
ac0999a8 DW |
113 | #if defined(CONFIG_X86_64) && defined(RETPOLINE) |
114 | ||
115 | /* | |
116 | * Since the inline asm uses the %V modifier which is only in newer GCC, | |
117 | * the 64-bit one is dependent on RETPOLINE not CONFIG_RETPOLINE. | |
118 | */ | |
119 | # define CALL_NOSPEC \ | |
120 | ANNOTATE_NOSPEC_ALTERNATIVE \ | |
121 | ALTERNATIVE( \ | |
2f1b401d | 122 | ANNOTATE_RETPOLINE_SAFE \ |
ac0999a8 DW |
123 | "call *%[thunk_target]\n", \ |
124 | "call __x86_indirect_thunk_%V[thunk_target]\n", \ | |
125 | X86_FEATURE_RETPOLINE) | |
126 | # define THUNK_TARGET(addr) [thunk_target] "r" (addr) | |
127 | ||
128 | #elif defined(CONFIG_X86_32) && defined(CONFIG_RETPOLINE) | |
129 | /* | |
130 | * For i386 we use the original ret-equivalent retpoline, because | |
131 | * otherwise we'll run out of registers. We don't care about CET | |
132 | * here, anyway. | |
133 | */ | |
7a39acd0 AW |
134 | # define CALL_NOSPEC \ |
135 | ALTERNATIVE( \ | |
136 | ANNOTATE_RETPOLINE_SAFE \ | |
137 | "call *%[thunk_target]\n", \ | |
ac0999a8 DW |
138 | " jmp 904f;\n" \ |
139 | " .align 16\n" \ | |
140 | "901: call 903f;\n" \ | |
141 | "902: pause;\n" \ | |
81317bbe | 142 | " lfence;\n" \ |
ac0999a8 DW |
143 | " jmp 902b;\n" \ |
144 | " .align 16\n" \ | |
145 | "903: addl $4, %%esp;\n" \ | |
146 | " pushl %[thunk_target];\n" \ | |
147 | " ret;\n" \ | |
148 | " .align 16\n" \ | |
149 | "904: call 901b;\n", \ | |
150 | X86_FEATURE_RETPOLINE) | |
151 | ||
152 | # define THUNK_TARGET(addr) [thunk_target] "rm" (addr) | |
8947886c | 153 | #else /* No retpoline for C / inline asm */ |
ac0999a8 DW |
154 | # define CALL_NOSPEC "call *%[thunk_target]\n" |
155 | # define THUNK_TARGET(addr) [thunk_target] "rm" (addr) | |
156 | #endif | |
157 | ||
687cc97a DW |
158 | /* The Spectre V2 mitigation variants */ |
159 | enum spectre_v2_mitigation { | |
160 | SPECTRE_V2_NONE, | |
161 | SPECTRE_V2_RETPOLINE_MINIMAL, | |
162 | SPECTRE_V2_RETPOLINE_MINIMAL_AMD, | |
163 | SPECTRE_V2_RETPOLINE_GENERIC, | |
164 | SPECTRE_V2_RETPOLINE_AMD, | |
165 | SPECTRE_V2_IBRS, | |
166 | }; | |
167 | ||
3ef956dd KRW |
168 | /* |
169 | * The Intel specification for the SPEC_CTRL MSR requires that we | |
170 | * preserve any already set reserved bits at boot time (e.g. for | |
171 | * future additions that this kernel is not currently aware of). | |
172 | * We then set any additional mitigation bits that we want | |
173 | * ourselves and always use this as the base for SPEC_CTRL. | |
174 | * We also use this when handling guest entry/exit as below. | |
175 | */ | |
176 | extern void x86_spec_ctrl_set(u64); | |
177 | extern u64 x86_spec_ctrl_get_default(void); | |
178 | ||
fe170612 KRW |
179 | /* |
180 | * On VMENTER we must preserve whatever view of the SPEC_CTRL MSR | |
181 | * the guest has, while on VMEXIT we restore the host view. This | |
182 | * would be easier if SPEC_CTRL were architecturally maskable or | |
183 | * shadowable for guests but this is not (currently) the case. | |
184 | * Takes the guest view of SPEC_CTRL MSR as a parameter. | |
185 | */ | |
186 | extern void x86_spec_ctrl_set_guest(u64); | |
187 | extern void x86_spec_ctrl_restore_host(u64); | |
188 | ||
8947886c DW |
189 | /* |
190 | * On VMEXIT we must ensure that no RSB predictions learned in the guest | |
191 | * can be followed in the host, by overwriting the RSB completely. Both | |
192 | * retpoline and IBRS mitigations for Spectre v2 need this; only on future | |
193 | * CPUs with IBRS_ATT *might* it be avoided. | |
194 | */ | |
195 | static inline void vmexit_fill_RSB(void) | |
196 | { | |
197 | #ifdef CONFIG_RETPOLINE | |
81fd59c4 BP |
198 | alternative_input("", |
199 | "call __fill_rsb", | |
200 | X86_FEATURE_RETPOLINE, | |
201 | ASM_NO_INPUT_CLOBBER(_ASM_BX, "memory")); | |
8947886c DW |
202 | #endif |
203 | } | |
88af5c9c | 204 | |
e6c91974 LT |
205 | static __always_inline |
206 | void alternative_msr_write(unsigned int msr, u64 val, unsigned int feature) | |
207 | { | |
208 | asm volatile(ALTERNATIVE("", "wrmsr", %c[feature]) | |
209 | : : "c" (msr), | |
210 | "a" (val), | |
211 | "d" (val >> 32), | |
212 | [feature] "i" (feature) | |
213 | : "memory"); | |
214 | } | |
9a406175 | 215 | |
ac0999a8 DW |
216 | #endif /* __ASSEMBLY__ */ |
217 | #endif /* __NOSPEC_BRANCH_H__ */ |