]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - arch/x86/include/asm/nospec-branch.h
x86/speculation: Make "seccomp" the default mode for Speculative Store Bypass
[mirror_ubuntu-artful-kernel.git] / arch / x86 / include / asm / nospec-branch.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2
3 #ifndef __NOSPEC_BRANCH_H__
4 #define __NOSPEC_BRANCH_H__
5
6 #include <asm/alternative.h>
7 #include <asm/alternative-asm.h>
8 #include <asm/cpufeatures.h>
9
10 #ifdef __ASSEMBLY__
11
12 /*
13 * This should be used immediately before a retpoline alternative. It tells
14 * objtool where the retpolines are so that it can make sense of the control
15 * flow by just reading the original instruction(s) and ignoring the
16 * alternatives.
17 */
18 .macro ANNOTATE_NOSPEC_ALTERNATIVE
19 .Lannotate_\@:
20 .pushsection .discard.nospec
21 .long .Lannotate_\@ - .
22 .popsection
23 .endm
24
25 /*
26 * This should be used immediately before an indirect jump/call. It tells
27 * objtool the subsequent indirect jump/call is vouched safe for retpoline
28 * builds.
29 */
30 .macro ANNOTATE_RETPOLINE_SAFE
31 .Lannotate_\@:
32 .pushsection .discard.retpoline_safe
33 _ASM_PTR .Lannotate_\@
34 .popsection
35 .endm
36
37 /*
38 * These are the bare retpoline primitives for indirect jmp and call.
39 * Do not use these directly; they only exist to make the ALTERNATIVE
40 * invocation below less ugly.
41 */
42 .macro RETPOLINE_JMP reg:req
43 call .Ldo_rop_\@
44 .Lspec_trap_\@:
45 pause
46 lfence
47 jmp .Lspec_trap_\@
48 .Ldo_rop_\@:
49 mov \reg, (%_ASM_SP)
50 ret
51 .endm
52
53 /*
54 * This is a wrapper around RETPOLINE_JMP so the called function in reg
55 * returns to the instruction after the macro.
56 */
57 .macro RETPOLINE_CALL reg:req
58 jmp .Ldo_call_\@
59 .Ldo_retpoline_jmp_\@:
60 RETPOLINE_JMP \reg
61 .Ldo_call_\@:
62 call .Ldo_retpoline_jmp_\@
63 .endm
64
65 /*
66 * JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple
67 * indirect jmp/call which may be susceptible to the Spectre variant 2
68 * attack.
69 */
70 .macro JMP_NOSPEC reg:req
71 #ifdef CONFIG_RETPOLINE
72 ANNOTATE_NOSPEC_ALTERNATIVE
73 ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *\reg), \
74 __stringify(RETPOLINE_JMP \reg), X86_FEATURE_RETPOLINE, \
75 __stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *\reg), X86_FEATURE_RETPOLINE_AMD
76 #else
77 jmp *\reg
78 #endif
79 .endm
80
81 .macro CALL_NOSPEC reg:req
82 #ifdef CONFIG_RETPOLINE
83 ANNOTATE_NOSPEC_ALTERNATIVE
84 ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *\reg), \
85 __stringify(RETPOLINE_CALL \reg), X86_FEATURE_RETPOLINE,\
86 __stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *\reg), X86_FEATURE_RETPOLINE_AMD
87 #else
88 call *\reg
89 #endif
90 .endm
91
92 /* This clobbers the BX register */
93 .macro FILL_RETURN_BUFFER nr:req ftr:req
94 #ifdef CONFIG_RETPOLINE
95 ALTERNATIVE "", "call __clear_rsb", \ftr
96 #endif
97 .endm
98
99 #else /* __ASSEMBLY__ */
100
101 #define ANNOTATE_NOSPEC_ALTERNATIVE \
102 "999:\n\t" \
103 ".pushsection .discard.nospec\n\t" \
104 ".long 999b - .\n\t" \
105 ".popsection\n\t"
106
107 #define ANNOTATE_RETPOLINE_SAFE \
108 "999:\n\t" \
109 ".pushsection .discard.retpoline_safe\n\t" \
110 _ASM_PTR " 999b\n\t" \
111 ".popsection\n\t"
112
113 #if defined(CONFIG_X86_64) && defined(RETPOLINE)
114
115 /*
116 * Since the inline asm uses the %V modifier which is only in newer GCC,
117 * the 64-bit one is dependent on RETPOLINE not CONFIG_RETPOLINE.
118 */
119 # define CALL_NOSPEC \
120 ANNOTATE_NOSPEC_ALTERNATIVE \
121 ALTERNATIVE( \
122 ANNOTATE_RETPOLINE_SAFE \
123 "call *%[thunk_target]\n", \
124 "call __x86_indirect_thunk_%V[thunk_target]\n", \
125 X86_FEATURE_RETPOLINE)
126 # define THUNK_TARGET(addr) [thunk_target] "r" (addr)
127
128 #elif defined(CONFIG_X86_32) && defined(CONFIG_RETPOLINE)
129 /*
130 * For i386 we use the original ret-equivalent retpoline, because
131 * otherwise we'll run out of registers. We don't care about CET
132 * here, anyway.
133 */
134 # define CALL_NOSPEC \
135 ALTERNATIVE( \
136 ANNOTATE_RETPOLINE_SAFE \
137 "call *%[thunk_target]\n", \
138 " jmp 904f;\n" \
139 " .align 16\n" \
140 "901: call 903f;\n" \
141 "902: pause;\n" \
142 " lfence;\n" \
143 " jmp 902b;\n" \
144 " .align 16\n" \
145 "903: addl $4, %%esp;\n" \
146 " pushl %[thunk_target];\n" \
147 " ret;\n" \
148 " .align 16\n" \
149 "904: call 901b;\n", \
150 X86_FEATURE_RETPOLINE)
151
152 # define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
153 #else /* No retpoline for C / inline asm */
154 # define CALL_NOSPEC "call *%[thunk_target]\n"
155 # define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
156 #endif
157
158 /* The Spectre V2 mitigation variants */
159 enum spectre_v2_mitigation {
160 SPECTRE_V2_NONE,
161 SPECTRE_V2_RETPOLINE_MINIMAL,
162 SPECTRE_V2_RETPOLINE_MINIMAL_AMD,
163 SPECTRE_V2_RETPOLINE_GENERIC,
164 SPECTRE_V2_RETPOLINE_AMD,
165 SPECTRE_V2_IBRS,
166 };
167
168 /*
169 * The Intel specification for the SPEC_CTRL MSR requires that we
170 * preserve any already set reserved bits at boot time (e.g. for
171 * future additions that this kernel is not currently aware of).
172 * We then set any additional mitigation bits that we want
173 * ourselves and always use this as the base for SPEC_CTRL.
174 * We also use this when handling guest entry/exit as below.
175 */
176 extern void x86_spec_ctrl_set(u64);
177 extern u64 x86_spec_ctrl_get_default(void);
178
179 /* The Speculative Store Bypass disable variants */
180 enum ssb_mitigation {
181 SPEC_STORE_BYPASS_NONE,
182 SPEC_STORE_BYPASS_DISABLE,
183 SPEC_STORE_BYPASS_PRCTL,
184 SPEC_STORE_BYPASS_SECCOMP,
185 };
186
187 static __always_inline
188 void alternative_msr_write(unsigned int msr, u64 val, unsigned int feature)
189 {
190 asm volatile(ALTERNATIVE("", "wrmsr", %c[feature])
191 : : "c" (msr),
192 "a" (val),
193 "d" (val >> 32),
194 [feature] "i" (feature)
195 : "memory");
196 }
197
198 #endif /* __ASSEMBLY__ */
199 #endif /* __NOSPEC_BRANCH_H__ */