]>
Commit | Line | Data |
---|---|---|
1 | ;------------------------------------------------------------------------------ ;\r | |
2 | ; Copyright (c) 2016 - 2019, Intel Corporation. All rights reserved.<BR>\r | |
3 | ; SPDX-License-Identifier: BSD-2-Clause-Patent\r | |
4 | ;\r | |
5 | ; Module Name:\r | |
6 | ;\r | |
7 | ; SmiEntry.nasm\r | |
8 | ;\r | |
9 | ; Abstract:\r | |
10 | ;\r | |
11 | ; Code template of the SMI handler for a particular processor\r | |
12 | ;\r | |
13 | ;-------------------------------------------------------------------------------\r | |
14 | \r | |
15 | %include "StuffRsbNasm.inc"\r | |
16 | %include "Nasm.inc"\r | |
17 | \r | |
18 | ;\r | |
19 | ; Variables referrenced by C code\r | |
20 | ;\r | |
21 | \r | |
22 | %define MSR_IA32_S_CET 0x6A2\r | |
23 | %define MSR_IA32_CET_SH_STK_EN 0x1\r | |
24 | %define MSR_IA32_CET_WR_SHSTK_EN 0x2\r | |
25 | %define MSR_IA32_CET_ENDBR_EN 0x4\r | |
26 | %define MSR_IA32_CET_LEG_IW_EN 0x8\r | |
27 | %define MSR_IA32_CET_NO_TRACK_EN 0x10\r | |
28 | %define MSR_IA32_CET_SUPPRESS_DIS 0x20\r | |
29 | %define MSR_IA32_CET_SUPPRESS 0x400\r | |
30 | %define MSR_IA32_CET_TRACKER 0x800\r | |
31 | %define MSR_IA32_PL0_SSP 0x6A4\r | |
32 | %define MSR_IA32_INTERRUPT_SSP_TABLE_ADDR 0x6A8\r | |
33 | \r | |
34 | %define CR4_CET 0x800000\r | |
35 | \r | |
36 | %define MSR_IA32_MISC_ENABLE 0x1A0\r | |
37 | %define MSR_EFER 0xc0000080\r | |
38 | %define MSR_EFER_XD 0x800\r | |
39 | \r | |
40 | ;\r | |
41 | ; Constants relating to PROCESSOR_SMM_DESCRIPTOR\r | |
42 | ;\r | |
43 | %define DSC_OFFSET 0xfb00\r | |
44 | %define DSC_GDTPTR 0x30\r | |
45 | %define DSC_GDTSIZ 0x38\r | |
46 | %define DSC_CS 14\r | |
47 | %define DSC_DS 16\r | |
48 | %define DSC_SS 18\r | |
49 | %define DSC_OTHERSEG 20\r | |
50 | ;\r | |
51 | ; Constants relating to CPU State Save Area\r | |
52 | ;\r | |
53 | %define SSM_DR6 0xffd0\r | |
54 | %define SSM_DR7 0xffc8\r | |
55 | \r | |
56 | %define PROTECT_MODE_CS 0x8\r | |
57 | %define PROTECT_MODE_DS 0x20\r | |
58 | %define LONG_MODE_CS 0x38\r | |
59 | %define TSS_SEGMENT 0x40\r | |
60 | %define GDT_SIZE 0x50\r | |
61 | \r | |
62 | extern ASM_PFX(SmiRendezvous)\r | |
63 | extern ASM_PFX(gSmiHandlerIdtr)\r | |
64 | extern ASM_PFX(CpuSmmDebugEntry)\r | |
65 | extern ASM_PFX(CpuSmmDebugExit)\r | |
66 | \r | |
67 | global ASM_PFX(gPatchSmbase)\r | |
68 | extern ASM_PFX(mXdSupported)\r | |
69 | global ASM_PFX(gPatchXdSupported)\r | |
70 | global ASM_PFX(gPatchSmiStack)\r | |
71 | global ASM_PFX(gPatchSmiCr3)\r | |
72 | global ASM_PFX(gcSmiHandlerTemplate)\r | |
73 | global ASM_PFX(gcSmiHandlerSize)\r | |
74 | \r | |
75 | extern ASM_PFX(mCetSupported)\r | |
76 | global ASM_PFX(mPatchCetSupported)\r | |
77 | global ASM_PFX(mPatchCetPl0Ssp)\r | |
78 | global ASM_PFX(mPatchCetInterruptSsp)\r | |
79 | global ASM_PFX(mPatchCetInterruptSspTable)\r | |
80 | \r | |
81 | DEFAULT REL\r | |
82 | SECTION .text\r | |
83 | \r | |
84 | BITS 16\r | |
85 | ASM_PFX(gcSmiHandlerTemplate):\r | |
86 | _SmiEntryPoint:\r | |
87 | mov bx, _GdtDesc - _SmiEntryPoint + 0x8000\r | |
88 | mov ax,[cs:DSC_OFFSET + DSC_GDTSIZ]\r | |
89 | dec ax\r | |
90 | mov [cs:bx], ax\r | |
91 | mov eax, [cs:DSC_OFFSET + DSC_GDTPTR]\r | |
92 | mov [cs:bx + 2], eax\r | |
93 | o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]\r | |
94 | mov ax, PROTECT_MODE_CS\r | |
95 | mov [cs:bx-0x2],ax\r | |
96 | mov edi, strict dword 0 ; source operand will be patched\r | |
97 | ASM_PFX(gPatchSmbase):\r | |
98 | lea eax, [edi + (@ProtectedMode - _SmiEntryPoint) + 0x8000]\r | |
99 | mov [cs:bx-0x6],eax\r | |
100 | mov ebx, cr0\r | |
101 | and ebx, 0x9ffafff3\r | |
102 | or ebx, 0x23\r | |
103 | mov cr0, ebx\r | |
104 | jmp dword 0x0:0x0\r | |
105 | _GdtDesc:\r | |
106 | DW 0\r | |
107 | DD 0\r | |
108 | \r | |
109 | BITS 32\r | |
110 | @ProtectedMode:\r | |
111 | mov ax, PROTECT_MODE_DS\r | |
112 | o16 mov ds, ax\r | |
113 | o16 mov es, ax\r | |
114 | o16 mov fs, ax\r | |
115 | o16 mov gs, ax\r | |
116 | o16 mov ss, ax\r | |
117 | mov esp, strict dword 0 ; source operand will be patched\r | |
118 | ASM_PFX(gPatchSmiStack):\r | |
119 | jmp ProtFlatMode\r | |
120 | \r | |
121 | BITS 64\r | |
122 | ProtFlatMode:\r | |
123 | mov eax, strict dword 0 ; source operand will be patched\r | |
124 | ASM_PFX(gPatchSmiCr3):\r | |
125 | mov cr3, rax\r | |
126 | mov eax, 0x668 ; as cr4.PGE is not set here, refresh cr3\r | |
127 | mov cr4, rax ; in PreModifyMtrrs() to flush TLB.\r | |
128 | ; Load TSS\r | |
129 | sub esp, 8 ; reserve room in stack\r | |
130 | sgdt [rsp]\r | |
131 | mov eax, [rsp + 2] ; eax = GDT base\r | |
132 | add esp, 8\r | |
133 | mov dl, 0x89\r | |
134 | mov [rax + TSS_SEGMENT + 5], dl ; clear busy flag\r | |
135 | mov eax, TSS_SEGMENT\r | |
136 | ltr ax\r | |
137 | \r | |
138 | ; enable NXE if supported\r | |
139 | mov al, strict byte 1 ; source operand may be patched\r | |
140 | ASM_PFX(gPatchXdSupported):\r | |
141 | cmp al, 0\r | |
142 | jz @SkipXd\r | |
143 | ;\r | |
144 | ; Check XD disable bit\r | |
145 | ;\r | |
146 | mov ecx, MSR_IA32_MISC_ENABLE\r | |
147 | rdmsr\r | |
148 | sub esp, 4\r | |
149 | push rdx ; save MSR_IA32_MISC_ENABLE[63-32]\r | |
150 | test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r | |
151 | jz .0\r | |
152 | and dx, 0xFFFB ; clear XD Disable bit if it is set\r | |
153 | wrmsr\r | |
154 | .0:\r | |
155 | mov ecx, MSR_EFER\r | |
156 | rdmsr\r | |
157 | or ax, MSR_EFER_XD ; enable NXE\r | |
158 | wrmsr\r | |
159 | jmp @XdDone\r | |
160 | @SkipXd:\r | |
161 | sub esp, 8\r | |
162 | @XdDone:\r | |
163 | \r | |
164 | ; Switch into @LongMode\r | |
165 | push LONG_MODE_CS ; push cs hardcore here\r | |
166 | call Base ; push return address for retf later\r | |
167 | Base:\r | |
168 | add dword [rsp], @LongMode - Base; offset for far retf, seg is the 1st arg\r | |
169 | \r | |
170 | mov ecx, MSR_EFER\r | |
171 | rdmsr\r | |
172 | or ah, 1 ; enable LME\r | |
173 | wrmsr\r | |
174 | mov rbx, cr0\r | |
175 | or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE\r | |
176 | mov cr0, rbx\r | |
177 | retf\r | |
178 | @LongMode: ; long mode (64-bit code) starts here\r | |
179 | mov rax, strict qword 0 ; mov rax, ASM_PFX(gSmiHandlerIdtr)\r | |
180 | SmiHandlerIdtrAbsAddr:\r | |
181 | lidt [rax]\r | |
182 | lea ebx, [rdi + DSC_OFFSET]\r | |
183 | mov ax, [rbx + DSC_DS]\r | |
184 | mov ds, eax\r | |
185 | mov ax, [rbx + DSC_OTHERSEG]\r | |
186 | mov es, eax\r | |
187 | mov fs, eax\r | |
188 | mov gs, eax\r | |
189 | mov ax, [rbx + DSC_SS]\r | |
190 | mov ss, eax\r | |
191 | \r | |
192 | mov rbx, [rsp + 0x8] ; rbx <- CpuIndex\r | |
193 | \r | |
194 | ; enable CET if supported\r | |
195 | mov al, strict byte 1 ; source operand may be patched\r | |
196 | ASM_PFX(mPatchCetSupported):\r | |
197 | cmp al, 0\r | |
198 | jz CetDone\r | |
199 | \r | |
200 | mov ecx, MSR_IA32_S_CET\r | |
201 | rdmsr\r | |
202 | push rdx\r | |
203 | push rax\r | |
204 | \r | |
205 | mov ecx, MSR_IA32_PL0_SSP\r | |
206 | rdmsr\r | |
207 | push rdx\r | |
208 | push rax\r | |
209 | \r | |
210 | mov ecx, MSR_IA32_INTERRUPT_SSP_TABLE_ADDR\r | |
211 | rdmsr\r | |
212 | push rdx\r | |
213 | push rax\r | |
214 | \r | |
215 | mov ecx, MSR_IA32_S_CET\r | |
216 | mov eax, MSR_IA32_CET_SH_STK_EN\r | |
217 | xor edx, edx\r | |
218 | wrmsr\r | |
219 | \r | |
220 | mov ecx, MSR_IA32_PL0_SSP\r | |
221 | mov eax, strict dword 0 ; source operand will be patched\r | |
222 | ASM_PFX(mPatchCetPl0Ssp):\r | |
223 | xor edx, edx\r | |
224 | wrmsr\r | |
225 | mov rcx, cr0\r | |
226 | btr ecx, 16 ; clear WP\r | |
227 | mov cr0, rcx\r | |
228 | mov [eax], eax ; reload SSP, and clear busyflag.\r | |
229 | xor ecx, ecx\r | |
230 | mov [eax + 4], ecx\r | |
231 | \r | |
232 | mov ecx, MSR_IA32_INTERRUPT_SSP_TABLE_ADDR\r | |
233 | mov eax, strict dword 0 ; source operand will be patched\r | |
234 | ASM_PFX(mPatchCetInterruptSspTable):\r | |
235 | xor edx, edx\r | |
236 | wrmsr\r | |
237 | \r | |
238 | mov eax, strict dword 0 ; source operand will be patched\r | |
239 | ASM_PFX(mPatchCetInterruptSsp):\r | |
240 | cmp eax, 0\r | |
241 | jz CetInterruptDone\r | |
242 | mov [eax], eax ; reload SSP, and clear busyflag.\r | |
243 | xor ecx, ecx\r | |
244 | mov [eax + 4], ecx\r | |
245 | CetInterruptDone:\r | |
246 | \r | |
247 | mov rcx, cr0\r | |
248 | bts ecx, 16 ; set WP\r | |
249 | mov cr0, rcx\r | |
250 | \r | |
251 | mov eax, 0x668 | CR4_CET\r | |
252 | mov cr4, rax\r | |
253 | \r | |
254 | SETSSBSY\r | |
255 | \r | |
256 | CetDone:\r | |
257 | \r | |
258 | ;\r | |
259 | ; Save FP registers\r | |
260 | ;\r | |
261 | sub rsp, 0x200\r | |
262 | fxsave64 [rsp]\r | |
263 | \r | |
264 | add rsp, -0x20\r | |
265 | \r | |
266 | mov rcx, rbx\r | |
267 | mov rax, strict qword 0 ; call ASM_PFX(CpuSmmDebugEntry)\r | |
268 | CpuSmmDebugEntryAbsAddr:\r | |
269 | call rax\r | |
270 | \r | |
271 | mov rcx, rbx\r | |
272 | mov rax, strict qword 0 ; call ASM_PFX(SmiRendezvous)\r | |
273 | SmiRendezvousAbsAddr:\r | |
274 | call rax\r | |
275 | \r | |
276 | mov rcx, rbx\r | |
277 | mov rax, strict qword 0 ; call ASM_PFX(CpuSmmDebugExit)\r | |
278 | CpuSmmDebugExitAbsAddr:\r | |
279 | call rax\r | |
280 | \r | |
281 | add rsp, 0x20\r | |
282 | \r | |
283 | ;\r | |
284 | ; Restore FP registers\r | |
285 | ;\r | |
286 | fxrstor64 [rsp]\r | |
287 | \r | |
288 | add rsp, 0x200\r | |
289 | \r | |
290 | mov rax, strict qword 0 ; mov rax, ASM_PFX(mCetSupported)\r | |
291 | mCetSupportedAbsAddr:\r | |
292 | mov al, [rax]\r | |
293 | cmp al, 0\r | |
294 | jz CetDone2\r | |
295 | \r | |
296 | mov eax, 0x668\r | |
297 | mov cr4, rax ; disable CET\r | |
298 | \r | |
299 | mov ecx, MSR_IA32_INTERRUPT_SSP_TABLE_ADDR\r | |
300 | pop rax\r | |
301 | pop rdx\r | |
302 | wrmsr\r | |
303 | \r | |
304 | mov ecx, MSR_IA32_PL0_SSP\r | |
305 | pop rax\r | |
306 | pop rdx\r | |
307 | wrmsr\r | |
308 | \r | |
309 | mov ecx, MSR_IA32_S_CET\r | |
310 | pop rax\r | |
311 | pop rdx\r | |
312 | wrmsr\r | |
313 | CetDone2:\r | |
314 | \r | |
315 | mov rax, strict qword 0 ; lea rax, [ASM_PFX(mXdSupported)]\r | |
316 | mXdSupportedAbsAddr:\r | |
317 | mov al, [rax]\r | |
318 | cmp al, 0\r | |
319 | jz .1\r | |
320 | pop rdx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r | |
321 | test edx, BIT2\r | |
322 | jz .1\r | |
323 | mov ecx, MSR_IA32_MISC_ENABLE\r | |
324 | rdmsr\r | |
325 | or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r | |
326 | wrmsr\r | |
327 | \r | |
328 | .1:\r | |
329 | \r | |
330 | StuffRsb64\r | |
331 | rsm\r | |
332 | \r | |
333 | ASM_PFX(gcSmiHandlerSize) DW $ - _SmiEntryPoint\r | |
334 | \r | |
335 | ;\r | |
336 | ; Retrieve the address and fill it into mov opcode.\r | |
337 | ;\r | |
338 | ; It is called in the driver entry point first.\r | |
339 | ; It is used to fix up the real address in mov opcode.\r | |
340 | ; Then, after the code logic is copied to the different location,\r | |
341 | ; the code can also run.\r | |
342 | ;\r | |
343 | global ASM_PFX(PiSmmCpuSmiEntryFixupAddress)\r | |
344 | ASM_PFX(PiSmmCpuSmiEntryFixupAddress):\r | |
345 | lea rax, [ASM_PFX(gSmiHandlerIdtr)]\r | |
346 | lea rcx, [SmiHandlerIdtrAbsAddr]\r | |
347 | mov qword [rcx - 8], rax\r | |
348 | \r | |
349 | lea rax, [ASM_PFX(CpuSmmDebugEntry)]\r | |
350 | lea rcx, [CpuSmmDebugEntryAbsAddr]\r | |
351 | mov qword [rcx - 8], rax\r | |
352 | \r | |
353 | lea rax, [ASM_PFX(SmiRendezvous)]\r | |
354 | lea rcx, [SmiRendezvousAbsAddr]\r | |
355 | mov qword [rcx - 8], rax\r | |
356 | \r | |
357 | lea rax, [ASM_PFX(CpuSmmDebugExit)]\r | |
358 | lea rcx, [CpuSmmDebugExitAbsAddr]\r | |
359 | mov qword [rcx - 8], rax\r | |
360 | \r | |
361 | lea rax, [ASM_PFX(mXdSupported)]\r | |
362 | lea rcx, [mXdSupportedAbsAddr]\r | |
363 | mov qword [rcx - 8], rax\r | |
364 | \r | |
365 | lea rax, [ASM_PFX(mCetSupported)]\r | |
366 | lea rcx, [mCetSupportedAbsAddr]\r | |
367 | mov qword [rcx - 8], rax\r | |
368 | ret\r |