]> git.proxmox.com Git - mirror_ubuntu-hirsute-kernel.git/blob - arch/x86/lib/retpoline.S
480edc3a5e03002dd6f0a0316477cbd7b0971cc8
[mirror_ubuntu-hirsute-kernel.git] / arch / x86 / lib / retpoline.S
1 /* SPDX-License-Identifier: GPL-2.0 */
2
3 #include <linux/stringify.h>
4 #include <linux/linkage.h>
5 #include <asm/dwarf2.h>
6 #include <asm/cpufeatures.h>
7 #include <asm/alternative-asm.h>
8 #include <asm/export.h>
9 #include <asm/nospec-branch.h>
10 #include <asm/bitsperlong.h>
11
12 .macro THUNK reg
13 .section .text.__x86.indirect_thunk
14
15 ENTRY(__x86_indirect_thunk_\reg)
16 CFI_STARTPROC
17 JMP_NOSPEC %\reg
18 CFI_ENDPROC
19 ENDPROC(__x86_indirect_thunk_\reg)
20 .endm
21
22 /*
23 * Despite being an assembler file we can't just use .irp here
24 * because __KSYM_DEPS__ only uses the C preprocessor and would
25 * only see one instance of "__x86_indirect_thunk_\reg" rather
26 * than one per register with the correct names. So we do it
27 * the simple and nasty way...
28 */
29 #define __EXPORT_THUNK(sym) _ASM_NOKPROBE(sym); EXPORT_SYMBOL(sym)
30 #define EXPORT_THUNK(reg) __EXPORT_THUNK(__x86_indirect_thunk_ ## reg)
31 #define GENERATE_THUNK(reg) THUNK reg ; EXPORT_THUNK(reg)
32
33 GENERATE_THUNK(_ASM_AX)
34 GENERATE_THUNK(_ASM_BX)
35 GENERATE_THUNK(_ASM_CX)
36 GENERATE_THUNK(_ASM_DX)
37 GENERATE_THUNK(_ASM_SI)
38 GENERATE_THUNK(_ASM_DI)
39 GENERATE_THUNK(_ASM_BP)
40 #ifdef CONFIG_64BIT
41 GENERATE_THUNK(r8)
42 GENERATE_THUNK(r9)
43 GENERATE_THUNK(r10)
44 GENERATE_THUNK(r11)
45 GENERATE_THUNK(r12)
46 GENERATE_THUNK(r13)
47 GENERATE_THUNK(r14)
48 GENERATE_THUNK(r15)
49 #endif
50
51 /*
52 * Fill the CPU return stack buffer.
53 *
54 * Each entry in the RSB, if used for a speculative 'ret', contains an
55 * infinite 'pause; lfence; jmp' loop to capture speculative execution.
56 *
57 * This is required in various cases for retpoline and IBRS-based
58 * mitigations for the Spectre variant 2 vulnerability. Sometimes to
59 * eliminate potentially bogus entries from the RSB, and sometimes
60 * purely to ensure that it doesn't get empty, which on some CPUs would
61 * allow predictions from other (unwanted!) sources to be used.
62 *
63 * Google experimented with loop-unrolling and this turned out to be
64 * the optimal version - two calls, each with their own speculation
65 * trap should their return address end up getting used, in a loop.
66 */
67 .macro STUFF_RSB nr:req sp:req
68 mov $(\nr / 2), %_ASM_BX
69 .align 16
70 771:
71 call 772f
72 773: /* speculation trap */
73 pause
74 lfence
75 jmp 773b
76 .align 16
77 772:
78 call 774f
79 775: /* speculation trap */
80 pause
81 lfence
82 jmp 775b
83 .align 16
84 774:
85 dec %_ASM_BX
86 jnz 771b
87 add $((BITS_PER_LONG/8) * \nr), \sp
88 .endm
89
90 #define RSB_FILL_LOOPS 16 /* To avoid underflow */
91
92 ENTRY(__fill_rsb)
93 STUFF_RSB RSB_FILL_LOOPS, %_ASM_SP
94 ret
95 END(__fill_rsb)
96 EXPORT_SYMBOL_GPL(__fill_rsb)
97
98 #define RSB_CLEAR_LOOPS 32 /* To forcibly overwrite all entries */
99
100 ENTRY(__clear_rsb)
101 STUFF_RSB RSB_CLEAR_LOOPS, %_ASM_SP
102 ret
103 END(__clear_rsb)
104 EXPORT_SYMBOL_GPL(__clear_rsb)