1 /* SPDX-License-Identifier: GPL-2.0 */
3 #ifndef __NOSPEC_BRANCH_H__
4 #define __NOSPEC_BRANCH_H__
6 #include <asm/alternative.h>
7 #include <asm/alternative-asm.h>
8 #include <asm/cpufeatures.h>
13 * This should be used immediately before a retpoline alternative. It tells
14 * objtool where the retpolines are so that it can make sense of the control
15 * flow by just reading the original instruction(s) and ignoring the
18 .macro ANNOTATE_NOSPEC_ALTERNATIVE
20 .pushsection
.discard
.nospec
21 .long .Lannotate_\@
- .
26 * This should be used immediately before an indirect jump/call. It tells
27 * objtool the subsequent indirect jump/call is vouched safe for retpoline
30 .macro ANNOTATE_RETPOLINE_SAFE
32 .pushsection
.discard
.retpoline_safe
33 _ASM_PTR
.Lannotate_\@
38 * These are the bare retpoline primitives for indirect jmp and call.
39 * Do not use these directly; they only exist to make the ALTERNATIVE
40 * invocation below less ugly.
42 .macro RETPOLINE_JMP reg
:req
54 * This is a wrapper around RETPOLINE_JMP so the called function in reg
55 * returns to the instruction after the macro.
57 .macro RETPOLINE_CALL reg
:req
59 .Ldo_retpoline_jmp_\@
:
62 call
.Ldo_retpoline_jmp_\@
66 * JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple
67 * indirect jmp/call which may be susceptible to the Spectre variant 2
70 .macro JMP_NOSPEC reg
:req
71 #ifdef CONFIG_RETPOLINE
72 ANNOTATE_NOSPEC_ALTERNATIVE
73 ALTERNATIVE_2
__stringify(ANNOTATE_RETPOLINE_SAFE
; jmp
*\reg
), \
74 __stringify(RETPOLINE_JMP
\reg
), X86_FEATURE_RETPOLINE
, \
75 __stringify(lfence
; ANNOTATE_RETPOLINE_SAFE
; jmp
*\reg
), X86_FEATURE_RETPOLINE_AMD
81 .macro CALL_NOSPEC reg
:req
82 #ifdef CONFIG_RETPOLINE
83 ANNOTATE_NOSPEC_ALTERNATIVE
84 ALTERNATIVE_2
__stringify(ANNOTATE_RETPOLINE_SAFE
; call
*\reg
), \
85 __stringify(RETPOLINE_CALL
\reg
), X86_FEATURE_RETPOLINE
,\
86 __stringify(lfence
; ANNOTATE_RETPOLINE_SAFE
; call
*\reg
), X86_FEATURE_RETPOLINE_AMD
92 /* This clobbers the BX register */
93 .macro FILL_RETURN_BUFFER nr
:req ftr
:req
94 #ifdef CONFIG_RETPOLINE
95 ALTERNATIVE
"", "call __clear_rsb", \ftr
99 #else /* __ASSEMBLY__ */
101 #define ANNOTATE_NOSPEC_ALTERNATIVE \
103 ".pushsection .discard.nospec\n\t" \
104 ".long 999b - .\n\t" \
107 #define ANNOTATE_RETPOLINE_SAFE \
109 ".pushsection .discard.retpoline_safe\n\t" \
110 _ASM_PTR " 999b\n\t" \
113 #if defined(CONFIG_X86_64) && defined(RETPOLINE)
116 * Since the inline asm uses the %V modifier which is only in newer GCC,
117 * the 64-bit one is dependent on RETPOLINE not CONFIG_RETPOLINE.
119 # define CALL_NOSPEC \
120 ANNOTATE_NOSPEC_ALTERNATIVE \
122 ANNOTATE_RETPOLINE_SAFE \
123 "call *%[thunk_target]\n", \
124 "call __x86_indirect_thunk_%V[thunk_target]\n", \
125 X86_FEATURE_RETPOLINE)
126 # define THUNK_TARGET(addr) [thunk_target] "r" (addr)
128 #elif defined(CONFIG_X86_32) && defined(CONFIG_RETPOLINE)
130 * For i386 we use the original ret-equivalent retpoline, because
131 * otherwise we'll run out of registers. We don't care about CET
134 # define CALL_NOSPEC \
136 ANNOTATE_RETPOLINE_SAFE \
137 "call *%[thunk_target]\n", \
140 "901: call 903f;\n" \
145 "903: addl $4, %%esp;\n" \
146 " pushl %[thunk_target];\n" \
149 "904: call 901b;\n", \
150 X86_FEATURE_RETPOLINE)
152 # define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
153 #else /* No retpoline for C / inline asm */
154 # define CALL_NOSPEC "call *%[thunk_target]\n"
155 # define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
158 /* The Spectre V2 mitigation variants */
159 enum spectre_v2_mitigation
{
161 SPECTRE_V2_RETPOLINE_MINIMAL
,
162 SPECTRE_V2_RETPOLINE_MINIMAL_AMD
,
163 SPECTRE_V2_RETPOLINE_GENERIC
,
164 SPECTRE_V2_RETPOLINE_AMD
,
169 * The Intel specification for the SPEC_CTRL MSR requires that we
170 * preserve any already set reserved bits at boot time (e.g. for
171 * future additions that this kernel is not currently aware of).
172 * We then set any additional mitigation bits that we want
173 * ourselves and always use this as the base for SPEC_CTRL.
174 * We also use this when handling guest entry/exit as below.
176 extern void x86_spec_ctrl_set(u64
);
177 extern u64
x86_spec_ctrl_get_default(void);
179 /* The Speculative Store Bypass disable variants */
180 enum ssb_mitigation
{
181 SPEC_STORE_BYPASS_NONE
,
182 SPEC_STORE_BYPASS_DISABLE
,
183 SPEC_STORE_BYPASS_PRCTL
,
186 static __always_inline
187 void alternative_msr_write(unsigned int msr
, u64 val
, unsigned int feature
)
189 asm volatile(ALTERNATIVE("", "wrmsr", %c
[feature
])
193 [feature
] "i" (feature
)
197 #endif /* __ASSEMBLY__ */
198 #endif /* __NOSPEC_BRANCH_H__ */