]> git.proxmox.com Git - mirror_edk2.git/blob - UefiCpuPkg/PiSmmCpuDxeSmm/Ia32/SmiEntry.nasm
UefiCpuPkg: Move AsmRelocateApLoopStart from Mpfuncs.nasm to AmdSev.nasm
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / Ia32 / SmiEntry.nasm
1 ;------------------------------------------------------------------------------ ;
2 ; Copyright (c) 2016 - 2022, Intel Corporation. All rights reserved.<BR>
3 ; Copyright (c) 2020, AMD Incorporated. All rights reserved.<BR>
4 ; SPDX-License-Identifier: BSD-2-Clause-Patent
5 ;
6 ; Module Name:
7 ;
8 ; SmiEntry.nasm
9 ;
10 ; Abstract:
11 ;
12 ; Code template of the SMI handler for a particular processor
13 ;
14 ;-------------------------------------------------------------------------------
15
16 %include "StuffRsbNasm.inc"
17 %include "Nasm.inc"
18
19 %define MSR_IA32_S_CET 0x6A2
20 %define MSR_IA32_CET_SH_STK_EN 0x1
21 %define MSR_IA32_CET_WR_SHSTK_EN 0x2
22 %define MSR_IA32_CET_ENDBR_EN 0x4
23 %define MSR_IA32_CET_LEG_IW_EN 0x8
24 %define MSR_IA32_CET_NO_TRACK_EN 0x10
25 %define MSR_IA32_CET_SUPPRESS_DIS 0x20
26 %define MSR_IA32_CET_SUPPRESS 0x400
27 %define MSR_IA32_CET_TRACKER 0x800
28 %define MSR_IA32_PL0_SSP 0x6A4
29
30 %define CR4_CET 0x800000
31
32 %define MSR_IA32_MISC_ENABLE 0x1A0
33 %define MSR_EFER 0xc0000080
34 %define MSR_EFER_XD 0x800
35
36 ;
37 ; Constants relating to PROCESSOR_SMM_DESCRIPTOR
38 ;
39 %define DSC_OFFSET 0xfb00
40 %define DSC_GDTPTR 0x30
41 %define DSC_GDTSIZ 0x38
42 %define DSC_CS 14
43 %define DSC_DS 16
44 %define DSC_SS 18
45 %define DSC_OTHERSEG 20
46
47 %define PROTECT_MODE_CS 0x8
48 %define PROTECT_MODE_DS 0x20
49 %define TSS_SEGMENT 0x40
50
51 extern ASM_PFX(SmiRendezvous)
52 extern ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))
53 extern ASM_PFX(CpuSmmDebugEntry)
54 extern ASM_PFX(CpuSmmDebugExit)
55
56 global ASM_PFX(gcSmiHandlerTemplate)
57 global ASM_PFX(gcSmiHandlerSize)
58 global ASM_PFX(gPatchSmiCr3)
59 global ASM_PFX(gPatchSmiStack)
60 global ASM_PFX(gPatchSmbase)
61 extern ASM_PFX(mXdSupported)
62 global ASM_PFX(gPatchXdSupported)
63 global ASM_PFX(gPatchMsrIa32MiscEnableSupported)
64 extern ASM_PFX(gSmiHandlerIdtr)
65
66 extern ASM_PFX(mCetSupported)
67 global ASM_PFX(mPatchCetSupported)
68 global ASM_PFX(mPatchCetPl0Ssp)
69 global ASM_PFX(mPatchCetInterruptSsp)
70
71 SECTION .text
72
73 BITS 16
74 ASM_PFX(gcSmiHandlerTemplate):
75 _SmiEntryPoint:
76 mov bx, _GdtDesc - _SmiEntryPoint + 0x8000
77 mov ax,[cs:DSC_OFFSET + DSC_GDTSIZ]
78 dec ax
79 mov [cs:bx], ax
80 mov eax, [cs:DSC_OFFSET + DSC_GDTPTR]
81 mov [cs:bx + 2], eax
82 mov ebp, eax ; ebp = GDT base
83 o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]
84 mov ax, PROTECT_MODE_CS
85 mov [cs:bx-0x2],ax
86 mov edi, strict dword 0 ; source operand will be patched
87 ASM_PFX(gPatchSmbase):
88 lea eax, [edi + (@32bit - _SmiEntryPoint) + 0x8000]
89 mov [cs:bx-0x6],eax
90 mov ebx, cr0
91 and ebx, 0x9ffafff3
92 or ebx, 0x23
93 mov cr0, ebx
94 jmp dword 0x0:0x0
95 _GdtDesc:
96 DW 0
97 DD 0
98
99 BITS 32
100 @32bit:
101 mov ax, PROTECT_MODE_DS
102 o16 mov ds, ax
103 o16 mov es, ax
104 o16 mov fs, ax
105 o16 mov gs, ax
106 o16 mov ss, ax
107 mov esp, strict dword 0 ; source operand will be patched
108 ASM_PFX(gPatchSmiStack):
109 mov eax, ASM_PFX(gSmiHandlerIdtr)
110 lidt [eax]
111 jmp ProtFlatMode
112
113 ProtFlatMode:
114 mov eax, strict dword 0 ; source operand will be patched
115 ASM_PFX(gPatchSmiCr3):
116 mov cr3, eax
117 ;
118 ; Need to test for CR4 specific bit support
119 ;
120 mov eax, 1
121 cpuid ; use CPUID to determine if specific CR4 bits are supported
122 xor eax, eax ; Clear EAX
123 test edx, BIT2 ; Check for DE capabilities
124 jz .0
125 or eax, BIT3
126 .0:
127 test edx, BIT6 ; Check for PAE capabilities
128 jz .1
129 or eax, BIT5
130 .1:
131 test edx, BIT7 ; Check for MCE capabilities
132 jz .2
133 or eax, BIT6
134 .2:
135 test edx, BIT24 ; Check for FXSR capabilities
136 jz .3
137 or eax, BIT9
138 .3:
139 test edx, BIT25 ; Check for SSE capabilities
140 jz .4
141 or eax, BIT10
142 .4: ; as cr4.PGE is not set here, refresh cr3
143 mov cr4, eax ; in PreModifyMtrrs() to flush TLB.
144
145 cmp byte [dword ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))], 0
146 jz .6
147 ; Load TSS
148 mov byte [ebp + TSS_SEGMENT + 5], 0x89 ; clear busy flag
149 mov eax, TSS_SEGMENT
150 ltr ax
151 .6:
152
153 ; enable NXE if supported
154 mov al, strict byte 1 ; source operand may be patched
155 ASM_PFX(gPatchXdSupported):
156 cmp al, 0
157 jz @SkipXd
158
159 ; If MSR_IA32_MISC_ENABLE is supported, clear XD Disable bit
160 mov al, strict byte 1 ; source operand may be patched
161 ASM_PFX(gPatchMsrIa32MiscEnableSupported):
162 cmp al, 1
163 jz MsrIa32MiscEnableSupported
164
165 ; MSR_IA32_MISC_ENABLE not supported
166 xor edx, edx
167 push edx ; don't try to restore the XD Disable bit just before RSM
168 jmp EnableNxe
169
170 ;
171 ; Check XD disable bit
172 ;
173 MsrIa32MiscEnableSupported:
174 mov ecx, MSR_IA32_MISC_ENABLE
175 rdmsr
176 push edx ; save MSR_IA32_MISC_ENABLE[63-32]
177 test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]
178 jz EnableNxe
179 and dx, 0xFFFB ; clear XD Disable bit if it is set
180 wrmsr
181 EnableNxe:
182 mov ecx, MSR_EFER
183 rdmsr
184 or ax, MSR_EFER_XD ; enable NXE
185 wrmsr
186 jmp @XdDone
187 @SkipXd:
188 sub esp, 4
189 @XdDone:
190
191 mov ebx, cr0
192 or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE
193 mov cr0, ebx
194 lea ebx, [edi + DSC_OFFSET]
195 mov ax, [ebx + DSC_DS]
196 mov ds, eax
197 mov ax, [ebx + DSC_OTHERSEG]
198 mov es, eax
199 mov fs, eax
200 mov gs, eax
201 mov ax, [ebx + DSC_SS]
202 mov ss, eax
203
204 mov ebx, [esp + 4] ; ebx <- CpuIndex
205
206 ; enable CET if supported
207 mov al, strict byte 1 ; source operand may be patched
208 ASM_PFX(mPatchCetSupported):
209 cmp al, 0
210 jz CetDone
211
212 mov ecx, MSR_IA32_S_CET
213 rdmsr
214 push edx
215 push eax
216
217 mov ecx, MSR_IA32_PL0_SSP
218 rdmsr
219 push edx
220 push eax
221
222 mov ecx, MSR_IA32_S_CET
223 mov eax, MSR_IA32_CET_SH_STK_EN
224 xor edx, edx
225 wrmsr
226
227 mov ecx, MSR_IA32_PL0_SSP
228 mov eax, strict dword 0 ; source operand will be patched
229 ASM_PFX(mPatchCetPl0Ssp):
230 xor edx, edx
231 wrmsr
232 mov ecx, cr0
233 btr ecx, 16 ; clear WP
234 mov cr0, ecx
235 mov [eax], eax ; reload SSP, and clear busyflag.
236 xor ecx, ecx
237 mov [eax + 4], ecx
238
239 mov eax, strict dword 0 ; source operand will be patched
240 ASM_PFX(mPatchCetInterruptSsp):
241 cmp eax, 0
242 jz CetInterruptDone
243 mov [eax], eax ; reload SSP, and clear busyflag.
244 xor ecx, ecx
245 mov [eax + 4], ecx
246 CetInterruptDone:
247
248 mov ecx, cr0
249 bts ecx, 16 ; set WP
250 mov cr0, ecx
251
252 mov eax, 0x668 | CR4_CET
253 mov cr4, eax
254
255 setssbsy
256
257 CetDone:
258
259 push ebx
260 mov eax, ASM_PFX(CpuSmmDebugEntry)
261 call eax
262 add esp, 4
263
264 push ebx
265 mov eax, ASM_PFX(SmiRendezvous)
266 call eax
267 add esp, 4
268
269 push ebx
270 mov eax, ASM_PFX(CpuSmmDebugExit)
271 call eax
272 add esp, 4
273
274 mov eax, ASM_PFX(mCetSupported)
275 mov al, [eax]
276 cmp al, 0
277 jz CetDone2
278
279 mov eax, 0x668
280 mov cr4, eax ; disable CET
281
282 mov ecx, MSR_IA32_PL0_SSP
283 pop eax
284 pop edx
285 wrmsr
286
287 mov ecx, MSR_IA32_S_CET
288 pop eax
289 pop edx
290 wrmsr
291 CetDone2:
292
293 mov eax, ASM_PFX(mXdSupported)
294 mov al, [eax]
295 cmp al, 0
296 jz .7
297 pop edx ; get saved MSR_IA32_MISC_ENABLE[63-32]
298 test edx, BIT2
299 jz .7
300 mov ecx, MSR_IA32_MISC_ENABLE
301 rdmsr
302 or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM
303 wrmsr
304
305 .7:
306
307 StuffRsb32
308 rsm
309
310 ASM_PFX(gcSmiHandlerSize): DW $ - _SmiEntryPoint
311
312 global ASM_PFX(PiSmmCpuSmiEntryFixupAddress)
313 ASM_PFX(PiSmmCpuSmiEntryFixupAddress):
314 ret