]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/PiSmmCpuDxeSmm/Ia32/SmiEntry.nasm
UefiCpuPkg: Replace BSD License with BSD+Patent License
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / Ia32 / SmiEntry.nasm
CommitLineData
63a4f460 1;------------------------------------------------------------------------------ ;\r
3eb69b08 2; Copyright (c) 2016 - 2019, Intel Corporation. All rights reserved.<BR>\r
0acd8697 3; SPDX-License-Identifier: BSD-2-Clause-Patent\r
63a4f460
LG
4;\r
5; Module Name:\r
6;\r
7; SmiEntry.nasm\r
8;\r
9; Abstract:\r
10;\r
11; Code template of the SMI handler for a particular processor\r
12;\r
13;-------------------------------------------------------------------------------\r
14\r
ada4a003 15%include "StuffRsbNasm.inc"\r
3eb69b08
JY
16%include "Nasm.inc"\r
17\r
18%define MSR_IA32_S_CET 0x6A2\r
19%define MSR_IA32_CET_SH_STK_EN 0x1\r
20%define MSR_IA32_CET_WR_SHSTK_EN 0x2\r
21%define MSR_IA32_CET_ENDBR_EN 0x4\r
22%define MSR_IA32_CET_LEG_IW_EN 0x8\r
23%define MSR_IA32_CET_NO_TRACK_EN 0x10\r
24%define MSR_IA32_CET_SUPPRESS_DIS 0x20\r
25%define MSR_IA32_CET_SUPPRESS 0x400\r
26%define MSR_IA32_CET_TRACKER 0x800\r
27%define MSR_IA32_PL0_SSP 0x6A4\r
28\r
29%define CR4_CET 0x800000\r
02f7fd15 30\r
717fb604
JY
31%define MSR_IA32_MISC_ENABLE 0x1A0\r
32%define MSR_EFER 0xc0000080\r
33%define MSR_EFER_XD 0x800\r
34\r
f12367a0
MK
35;\r
36; Constants relating to PROCESSOR_SMM_DESCRIPTOR\r
37;\r
63a4f460
LG
38%define DSC_OFFSET 0xfb00\r
39%define DSC_GDTPTR 0x30\r
40%define DSC_GDTSIZ 0x38\r
41%define DSC_CS 14\r
42%define DSC_DS 16\r
43%define DSC_SS 18\r
44%define DSC_OTHERSEG 20\r
45\r
46%define PROTECT_MODE_CS 0x8\r
47%define PROTECT_MODE_DS 0x20\r
48%define TSS_SEGMENT 0x40\r
49\r
50extern ASM_PFX(SmiRendezvous)\r
51extern ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
52extern ASM_PFX(CpuSmmDebugEntry)\r
53extern ASM_PFX(CpuSmmDebugExit)\r
54\r
55global ASM_PFX(gcSmiHandlerTemplate)\r
56global ASM_PFX(gcSmiHandlerSize)\r
c455687f 57global ASM_PFX(gPatchSmiCr3)\r
fc504fde 58global ASM_PFX(gPatchSmiStack)\r
5a1bfda4 59global ASM_PFX(gPatchSmbase)\r
3c5ce64f
LE
60extern ASM_PFX(mXdSupported)\r
61global ASM_PFX(gPatchXdSupported)\r
63a4f460
LG
62extern ASM_PFX(gSmiHandlerIdtr)\r
63\r
3eb69b08
JY
64extern ASM_PFX(mCetSupported)\r
65global ASM_PFX(mPatchCetSupported)\r
66global ASM_PFX(mPatchCetPl0Ssp)\r
67global ASM_PFX(mPatchCetInterruptSsp)\r
68\r
63a4f460
LG
69 SECTION .text\r
70\r
71BITS 16\r
72ASM_PFX(gcSmiHandlerTemplate):\r
73_SmiEntryPoint:\r
74 mov bx, _GdtDesc - _SmiEntryPoint + 0x8000\r
75 mov ax,[cs:DSC_OFFSET + DSC_GDTSIZ]\r
76 dec ax\r
77 mov [cs:bx], ax\r
78 mov eax, [cs:DSC_OFFSET + DSC_GDTPTR]\r
79 mov [cs:bx + 2], eax\r
80 mov ebp, eax ; ebp = GDT base\r
81o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]\r
82 mov ax, PROTECT_MODE_CS\r
717fb604 83 mov [cs:bx-0x2],ax\r
5a1bfda4
LE
84 mov edi, strict dword 0 ; source operand will be patched\r
85ASM_PFX(gPatchSmbase):\r
63a4f460
LG
86 lea eax, [edi + (@32bit - _SmiEntryPoint) + 0x8000]\r
87 mov [cs:bx-0x6],eax\r
88 mov ebx, cr0\r
89 and ebx, 0x9ffafff3\r
90 or ebx, 0x23\r
91 mov cr0, ebx\r
92 jmp dword 0x0:0x0\r
717fb604 93_GdtDesc:\r
63a4f460
LG
94 DW 0\r
95 DD 0\r
96\r
97BITS 32\r
98@32bit:\r
99 mov ax, PROTECT_MODE_DS\r
100o16 mov ds, ax\r
101o16 mov es, ax\r
102o16 mov fs, ax\r
103o16 mov gs, ax\r
104o16 mov ss, ax\r
fc504fde
LE
105 mov esp, strict dword 0 ; source operand will be patched\r
106ASM_PFX(gPatchSmiStack):\r
63a4f460
LG
107 mov eax, ASM_PFX(gSmiHandlerIdtr)\r
108 lidt [eax]\r
109 jmp ProtFlatMode\r
110\r
111ProtFlatMode:\r
c455687f
LE
112 mov eax, strict dword 0 ; source operand will be patched\r
113ASM_PFX(gPatchSmiCr3):\r
63a4f460
LG
114 mov cr3, eax\r
115;\r
116; Need to test for CR4 specific bit support\r
117;\r
118 mov eax, 1\r
119 cpuid ; use CPUID to determine if specific CR4 bits are supported\r
120 xor eax, eax ; Clear EAX\r
121 test edx, BIT2 ; Check for DE capabilities\r
122 jz .0\r
123 or eax, BIT3\r
124.0:\r
125 test edx, BIT6 ; Check for PAE capabilities\r
126 jz .1\r
127 or eax, BIT5\r
128.1:\r
129 test edx, BIT7 ; Check for MCE capabilities\r
130 jz .2\r
131 or eax, BIT6\r
132.2:\r
133 test edx, BIT24 ; Check for FXSR capabilities\r
134 jz .3\r
135 or eax, BIT9\r
136.3:\r
137 test edx, BIT25 ; Check for SSE capabilities\r
138 jz .4\r
139 or eax, BIT10\r
140.4: ; as cr4.PGE is not set here, refresh cr3\r
141 mov cr4, eax ; in PreModifyMtrrs() to flush TLB.\r
717fb604
JY
142\r
143 cmp byte [dword ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))], 0\r
144 jz .6\r
145; Load TSS\r
146 mov byte [ebp + TSS_SEGMENT + 5], 0x89 ; clear busy flag\r
147 mov eax, TSS_SEGMENT\r
148 ltr ax\r
149.6:\r
150\r
151; enable NXE if supported\r
3c5ce64f
LE
152 mov al, strict byte 1 ; source operand may be patched\r
153ASM_PFX(gPatchXdSupported):\r
717fb604
JY
154 cmp al, 0\r
155 jz @SkipXd\r
156;\r
157; Check XD disable bit\r
158;\r
159 mov ecx, MSR_IA32_MISC_ENABLE\r
160 rdmsr\r
161 push edx ; save MSR_IA32_MISC_ENABLE[63-32]\r
162 test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
163 jz .5\r
164 and dx, 0xFFFB ; clear XD Disable bit if it is set\r
165 wrmsr\r
166.5:\r
167 mov ecx, MSR_EFER\r
168 rdmsr\r
169 or ax, MSR_EFER_XD ; enable NXE\r
170 wrmsr\r
171 jmp @XdDone\r
172@SkipXd:\r
173 sub esp, 4\r
174@XdDone:\r
175\r
63a4f460 176 mov ebx, cr0\r
717fb604 177 or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE\r
63a4f460
LG
178 mov cr0, ebx\r
179 lea ebx, [edi + DSC_OFFSET]\r
180 mov ax, [ebx + DSC_DS]\r
181 mov ds, eax\r
182 mov ax, [ebx + DSC_OTHERSEG]\r
183 mov es, eax\r
184 mov fs, eax\r
185 mov gs, eax\r
186 mov ax, [ebx + DSC_SS]\r
187 mov ss, eax\r
188\r
3eb69b08
JY
189 mov ebx, [esp + 4] ; ebx <- CpuIndex\r
190\r
191; enable CET if supported\r
192 mov al, strict byte 1 ; source operand may be patched\r
193ASM_PFX(mPatchCetSupported):\r
194 cmp al, 0\r
195 jz CetDone\r
196\r
197 mov ecx, MSR_IA32_S_CET\r
198 rdmsr\r
199 push edx\r
200 push eax\r
201\r
202 mov ecx, MSR_IA32_PL0_SSP\r
203 rdmsr\r
204 push edx\r
205 push eax\r
206\r
207 mov ecx, MSR_IA32_S_CET\r
208 mov eax, MSR_IA32_CET_SH_STK_EN\r
209 xor edx, edx\r
210 wrmsr\r
211\r
212 mov ecx, MSR_IA32_PL0_SSP\r
213 mov eax, strict dword 0 ; source operand will be patched\r
214ASM_PFX(mPatchCetPl0Ssp):\r
215 xor edx, edx\r
216 wrmsr\r
217 mov ecx, cr0\r
218 btr ecx, 16 ; clear WP\r
219 mov cr0, ecx\r
220 mov [eax], eax ; reload SSP, and clear busyflag.\r
221 xor ecx, ecx\r
222 mov [eax + 4], ecx\r
223\r
224 mov eax, strict dword 0 ; source operand will be patched\r
225ASM_PFX(mPatchCetInterruptSsp):\r
226 cmp eax, 0\r
227 jz CetInterruptDone\r
228 mov [eax], eax ; reload SSP, and clear busyflag.\r
229 xor ecx, ecx\r
230 mov [eax + 4], ecx\r
231CetInterruptDone:\r
232\r
233 mov ecx, cr0\r
234 bts ecx, 16 ; set WP\r
235 mov cr0, ecx\r
236\r
237 mov eax, 0x668 | CR4_CET\r
238 mov cr4, eax\r
239\r
240 SETSSBSY\r
241\r
242CetDone:\r
63a4f460 243\r
63a4f460
LG
244 push ebx\r
245 mov eax, ASM_PFX(CpuSmmDebugEntry)\r
246 call eax\r
717fb604 247 add esp, 4\r
63a4f460
LG
248\r
249 push ebx\r
250 mov eax, ASM_PFX(SmiRendezvous)\r
251 call eax\r
717fb604
JY
252 add esp, 4\r
253\r
63a4f460
LG
254 push ebx\r
255 mov eax, ASM_PFX(CpuSmmDebugExit)\r
256 call eax\r
717fb604
JY
257 add esp, 4\r
258\r
3eb69b08
JY
259 mov eax, ASM_PFX(mCetSupported)\r
260 mov al, [eax]\r
261 cmp al, 0\r
262 jz CetDone2\r
263\r
264 mov eax, 0x668\r
265 mov cr4, eax ; disable CET\r
266\r
267 mov ecx, MSR_IA32_PL0_SSP\r
268 pop eax\r
269 pop edx\r
270 wrmsr\r
271\r
272 mov ecx, MSR_IA32_S_CET\r
273 pop eax\r
274 pop edx\r
275 wrmsr\r
276CetDone2:\r
277\r
717fb604
JY
278 mov eax, ASM_PFX(mXdSupported)\r
279 mov al, [eax]\r
280 cmp al, 0\r
281 jz .7\r
282 pop edx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r
283 test edx, BIT2\r
284 jz .7\r
285 mov ecx, MSR_IA32_MISC_ENABLE\r
286 rdmsr\r
287 or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r
288 wrmsr\r
289\r
290.7:\r
3eb69b08 291\r
02f7fd15 292 StuffRsb32\r
63a4f460
LG
293 rsm\r
294\r
295ASM_PFX(gcSmiHandlerSize): DW $ - _SmiEntryPoint\r
296\r
e21e355e
LG
297global ASM_PFX(PiSmmCpuSmiEntryFixupAddress)\r
298ASM_PFX(PiSmmCpuSmiEntryFixupAddress):\r
299 ret\r