]> git.proxmox.com Git - mirror_edk2.git/blame_incremental - UefiCpuPkg/Library/CpuExceptionHandlerLib/X64/Xcode5ExceptionHandlerAsm.nasm
UefiCpuPkg: Move AsmRelocateApLoopStart from Mpfuncs.nasm to AmdSev.nasm
[mirror_edk2.git] / UefiCpuPkg / Library / CpuExceptionHandlerLib / X64 / Xcode5ExceptionHandlerAsm.nasm
... / ...
CommitLineData
1;------------------------------------------------------------------------------ ;\r
2; Copyright (c) 2012 - 2022, Intel Corporation. All rights reserved.<BR>\r
3; SPDX-License-Identifier: BSD-2-Clause-Patent\r
4;\r
5; Module Name:\r
6;\r
7; ExceptionHandlerAsm.Asm\r
8;\r
9; Abstract:\r
10;\r
11; x64 CPU Exception Handler\r
12;\r
13; Notes:\r
14;\r
15;------------------------------------------------------------------------------\r
16%include "Nasm.inc"\r
17\r
18;\r
19; Equivalent NASM structure of IA32_DESCRIPTOR\r
20;\r
21struc IA32_DESCRIPTOR\r
22 .Limit CTYPE_UINT16 1\r
23 .Base CTYPE_UINTN 1\r
24endstruc\r
25\r
26;\r
27; Equivalent NASM structure of IA32_IDT_GATE_DESCRIPTOR\r
28;\r
29struc IA32_IDT_GATE_DESCRIPTOR\r
30 .OffsetLow CTYPE_UINT16 1\r
31 .Selector CTYPE_UINT16 1\r
32 .Reserved_0 CTYPE_UINT8 1\r
33 .GateType CTYPE_UINT8 1\r
34 .OffsetHigh CTYPE_UINT16 1\r
35 .OffsetUpper CTYPE_UINT32 1\r
36 .Reserved_1 CTYPE_UINT32 1\r
37endstruc\r
38\r
39;\r
40; CommonExceptionHandler()\r
41;\r
42\r
43%define VC_EXCEPTION 29\r
44\r
45extern ASM_PFX(mErrorCodeFlag) ; Error code flags for exceptions\r
46extern ASM_PFX(mDoFarReturnFlag) ; Do far return flag\r
47extern ASM_PFX(CommonExceptionHandler)\r
48\r
49SECTION .data\r
50\r
51DEFAULT REL\r
52SECTION .text\r
53\r
54ALIGN 8\r
55\r
56; Generate 256 IDT vectors.\r
57AsmIdtVectorBegin:\r
58%assign Vector 0\r
59%rep 256\r
60 push strict dword %[Vector] ; This instruction pushes sign-extended 8-byte value on stack\r
61 push rax\r
62 mov rax, strict qword 0 ; mov rax, ASM_PFX(CommonInterruptEntry)\r
63 jmp rax\r
64%assign Vector Vector+1\r
65%endrep\r
66AsmIdtVectorEnd:\r
67\r
68HookAfterStubHeaderBegin:\r
69 push strict dword 0 ; 0 will be fixed\r
70VectorNum:\r
71 push rax\r
72 mov rax, strict qword 0 ; mov rax, HookAfterStubHeaderEnd\r
73JmpAbsoluteAddress:\r
74 jmp rax\r
75HookAfterStubHeaderEnd:\r
76 mov rax, rsp\r
77 and sp, 0xfff0 ; make sure 16-byte aligned for exception context\r
78 sub rsp, 0x18 ; reserve room for filling exception data later\r
79 push rcx\r
80 mov rcx, [rax + 8]\r
81 bt [ASM_PFX(mErrorCodeFlag)], ecx\r
82 jnc .0\r
83 push qword [rsp] ; push additional rcx to make stack alignment\r
84.0:\r
85 xchg rcx, [rsp] ; restore rcx, save Exception Number in stack\r
86 push qword [rax] ; push rax into stack to keep code consistence\r
87\r
88;---------------------------------------;\r
89; CommonInterruptEntry ;\r
90;---------------------------------------;\r
91; The follow algorithm is used for the common interrupt routine.\r
92; Entry from each interrupt with a push eax and eax=interrupt number\r
93; Stack frame would be as follows as specified in IA32 manuals:\r
94;\r
95; +---------------------+ <-- 16-byte aligned ensured by processor\r
96; + Old SS +\r
97; +---------------------+\r
98; + Old RSP +\r
99; +---------------------+\r
100; + RFlags +\r
101; +---------------------+\r
102; + CS +\r
103; +---------------------+\r
104; + RIP +\r
105; +---------------------+\r
106; + Error Code +\r
107; +---------------------+\r
108; + Vector Number +\r
109; +---------------------+\r
110; + RBP +\r
111; +---------------------+ <-- RBP, 16-byte aligned\r
112; The follow algorithm is used for the common interrupt routine.\r
113global ASM_PFX(CommonInterruptEntry)\r
114ASM_PFX(CommonInterruptEntry):\r
115 cli\r
116 pop rax\r
117 ;\r
118 ; All interrupt handlers are invoked through interrupt gates, so\r
119 ; IF flag automatically cleared at the entry point\r
120 ;\r
121 xchg rcx, [rsp] ; Save rcx into stack and save vector number into rcx\r
122 and rcx, 0xFF\r
123 cmp ecx, 32 ; Intel reserved vector for exceptions?\r
124 jae NoErrorCode\r
125 bt [ASM_PFX(mErrorCodeFlag)], ecx\r
126 jc HasErrorCode\r
127\r
128NoErrorCode:\r
129\r
130 ;\r
131 ; Push a dummy error code on the stack\r
132 ; to maintain coherent stack map\r
133 ;\r
134 push qword [rsp]\r
135 mov qword [rsp + 8], 0\r
136HasErrorCode:\r
137 push rbp\r
138 mov rbp, rsp\r
139 push 0 ; clear EXCEPTION_HANDLER_CONTEXT.OldIdtHandler\r
140 push 0 ; clear EXCEPTION_HANDLER_CONTEXT.ExceptionDataFlag\r
141\r
142 ;\r
143 ; Stack:\r
144 ; +---------------------+ <-- 16-byte aligned ensured by processor\r
145 ; + Old SS +\r
146 ; +---------------------+\r
147 ; + Old RSP +\r
148 ; +---------------------+\r
149 ; + RFlags +\r
150 ; +---------------------+\r
151 ; + CS +\r
152 ; +---------------------+\r
153 ; + RIP +\r
154 ; +---------------------+\r
155 ; + Error Code +\r
156 ; +---------------------+\r
157 ; + RCX / Vector Number +\r
158 ; +---------------------+\r
159 ; + RBP +\r
160 ; +---------------------+ <-- RBP, 16-byte aligned\r
161 ;\r
162\r
163 ;\r
164 ; Since here the stack pointer is 16-byte aligned, so\r
165 ; EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64\r
166 ; is 16-byte aligned\r
167 ;\r
168\r
169;; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
170;; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
171 push r15\r
172 push r14\r
173 push r13\r
174 push r12\r
175 push r11\r
176 push r10\r
177 push r9\r
178 push r8\r
179 push rax\r
180 push qword [rbp + 8] ; RCX\r
181 push rdx\r
182 push rbx\r
183 push qword [rbp + 48] ; RSP\r
184 push qword [rbp] ; RBP\r
185 push rsi\r
186 push rdi\r
187\r
188;; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero\r
189 movzx rax, word [rbp + 56]\r
190 push rax ; for ss\r
191 movzx rax, word [rbp + 32]\r
192 push rax ; for cs\r
193 mov rax, ds\r
194 push rax\r
195 mov rax, es\r
196 push rax\r
197 mov rax, fs\r
198 push rax\r
199 mov rax, gs\r
200 push rax\r
201\r
202 mov [rbp + 8], rcx ; save vector number\r
203\r
204;; UINT64 Rip;\r
205 push qword [rbp + 24]\r
206\r
207;; UINT64 Gdtr[2], Idtr[2];\r
208 xor rax, rax\r
209 push rax\r
210 push rax\r
211 sidt [rsp]\r
212 mov bx, word [rsp]\r
213 mov rax, qword [rsp + 2]\r
214 mov qword [rsp], rax\r
215 mov word [rsp + 8], bx\r
216\r
217 xor rax, rax\r
218 push rax\r
219 push rax\r
220 sgdt [rsp]\r
221 mov bx, word [rsp]\r
222 mov rax, qword [rsp + 2]\r
223 mov qword [rsp], rax\r
224 mov word [rsp + 8], bx\r
225\r
226;; UINT64 Ldtr, Tr;\r
227 xor rax, rax\r
228 str ax\r
229 push rax\r
230 sldt ax\r
231 push rax\r
232\r
233;; UINT64 RFlags;\r
234 push qword [rbp + 40]\r
235\r
236;; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
237 mov rax, cr8\r
238 push rax\r
239 mov rax, cr4\r
240 or rax, 0x208\r
241 mov cr4, rax\r
242 push rax\r
243 mov rax, cr3\r
244 push rax\r
245 mov rax, cr2\r
246 push rax\r
247 xor rax, rax\r
248 push rax\r
249 mov rax, cr0\r
250 push rax\r
251\r
252;; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
253 cmp qword [rbp + 8], VC_EXCEPTION\r
254 je VcDebugRegs ; For SEV-ES (#VC) Debug registers ignored\r
255\r
256 mov rax, dr7\r
257 push rax\r
258 mov rax, dr6\r
259 push rax\r
260 mov rax, dr3\r
261 push rax\r
262 mov rax, dr2\r
263 push rax\r
264 mov rax, dr1\r
265 push rax\r
266 mov rax, dr0\r
267 push rax\r
268 jmp DrFinish\r
269\r
270VcDebugRegs:\r
271;; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7 are skipped for #VC to avoid exception recursion\r
272 xor rax, rax\r
273 push rax\r
274 push rax\r
275 push rax\r
276 push rax\r
277 push rax\r
278 push rax\r
279\r
280DrFinish:\r
281;; FX_SAVE_STATE_X64 FxSaveState;\r
282 sub rsp, 512\r
283 mov rdi, rsp\r
284 fxsave [rdi]\r
285\r
286;; UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear\r
287 cld\r
288\r
289;; UINT32 ExceptionData;\r
290 push qword [rbp + 16]\r
291\r
292;; Prepare parameter and call\r
293 mov rcx, [rbp + 8]\r
294 mov rdx, rsp\r
295 ;\r
296 ; Per X64 calling convention, allocate maximum parameter stack space\r
297 ; and make sure RSP is 16-byte aligned\r
298 ;\r
299 sub rsp, 4 * 8 + 8\r
300 call ASM_PFX(CommonExceptionHandler)\r
301 add rsp, 4 * 8 + 8\r
302\r
303 ; The follow algorithm is used for clear shadow stack token busy bit.\r
304 ; The comment is based on the sample shadow stack.\r
305 ; Shadow stack is 32 bytes aligned.\r
306 ; The sample shadow stack layout :\r
307 ; Address | Context\r
308 ; +-------------------------+\r
309 ; 0xFB8 | FREE | It is 0xFC0|0x02|(LMA & CS.L), after SAVEPREVSSP.\r
310 ; +-------------------------+\r
311 ; 0xFC0 | Prev SSP |\r
312 ; +-------------------------+\r
313 ; 0xFC8 | RIP |\r
314 ; +-------------------------+\r
315 ; 0xFD0 | CS |\r
316 ; +-------------------------+\r
317 ; 0xFD8 | 0xFD8 | BUSY | BUSY flag cleared after CLRSSBSY\r
318 ; +-------------------------+\r
319 ; 0xFE0 | 0xFC0|0x02|(LMA & CS.L) |\r
320 ; +-------------------------+\r
321 ; Instructions for Intel Control Flow Enforcement Technology (CET) are supported since NASM version 2.15.01.\r
322 cmp qword [ASM_PFX(mDoFarReturnFlag)], 0\r
323 jz CetDone\r
324 mov rax, cr4\r
325 and rax, 0x800000 ; Check if CET is enabled\r
326 jz CetDone\r
327 sub rsp, 0x10\r
328 sidt [rsp]\r
329 mov rcx, qword [rsp + IA32_DESCRIPTOR.Base]; Get IDT base address\r
330 add rsp, 0x10\r
331 mov rax, qword [rbp + 8]; Get exception number\r
332 sal rax, 0x04 ; Get IDT offset\r
333 add rax, rcx ; Get IDT gate descriptor address\r
334 mov al, byte [rax + IA32_IDT_GATE_DESCRIPTOR.Reserved_0]\r
335 and rax, 0x01 ; Check IST field\r
336 jz CetDone\r
337 ; SSP should be 0xFC0 at this point\r
338 mov rax, 0x04 ; advance past cs:lip:prevssp;supervisor shadow stack token\r
339 incsspq rax ; After this SSP should be 0xFE0\r
340 saveprevssp ; now the shadow stack restore token will be created at 0xFB8\r
341 rdsspq rax ; Read new SSP, SSP should be 0xFE8\r
342 sub rax, 0x10\r
343 clrssbsy [rax] ; Clear token at 0xFD8, SSP should be 0 after this\r
344 sub rax, 0x20\r
345 rstorssp [rax] ; Restore to token at 0xFB8, new SSP will be 0xFB8\r
346 mov rax, 0x01 ; Pop off the new save token created\r
347 incsspq rax ; SSP should be 0xFC0 now\r
348CetDone:\r
349\r
350 cli\r
351;; UINT64 ExceptionData;\r
352 add rsp, 8\r
353\r
354;; FX_SAVE_STATE_X64 FxSaveState;\r
355\r
356 mov rsi, rsp\r
357 fxrstor [rsi]\r
358 add rsp, 512\r
359\r
360;; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
361;; Skip restoration of DRx registers to support in-circuit emualators\r
362;; or debuggers set breakpoint in interrupt/exception context\r
363 add rsp, 8 * 6\r
364\r
365;; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
366 pop rax\r
367 mov cr0, rax\r
368 add rsp, 8 ; not for Cr1\r
369 pop rax\r
370 mov cr2, rax\r
371 pop rax\r
372 mov cr3, rax\r
373 pop rax\r
374 mov cr4, rax\r
375 pop rax\r
376 mov cr8, rax\r
377\r
378;; UINT64 RFlags;\r
379 pop qword [rbp + 40]\r
380\r
381;; UINT64 Ldtr, Tr;\r
382;; UINT64 Gdtr[2], Idtr[2];\r
383;; Best not let anyone mess with these particular registers...\r
384 add rsp, 48\r
385\r
386;; UINT64 Rip;\r
387 pop qword [rbp + 24]\r
388\r
389;; UINT64 Gs, Fs, Es, Ds, Cs, Ss;\r
390 pop rax\r
391 ; mov gs, rax ; not for gs\r
392 pop rax\r
393 ; mov fs, rax ; not for fs\r
394 ; (X64 will not use fs and gs, so we do not restore it)\r
395 pop rax\r
396 mov es, rax\r
397 pop rax\r
398 mov ds, rax\r
399 pop qword [rbp + 32] ; for cs\r
400 pop qword [rbp + 56] ; for ss\r
401\r
402;; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
403;; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
404 pop rdi\r
405 pop rsi\r
406 add rsp, 8 ; not for rbp\r
407 pop qword [rbp + 48] ; for rsp\r
408 pop rbx\r
409 pop rdx\r
410 pop rcx\r
411 pop rax\r
412 pop r8\r
413 pop r9\r
414 pop r10\r
415 pop r11\r
416 pop r12\r
417 pop r13\r
418 pop r14\r
419 pop r15\r
420\r
421 mov rsp, rbp\r
422 pop rbp\r
423 add rsp, 16\r
424 cmp qword [rsp - 32], 0 ; check EXCEPTION_HANDLER_CONTEXT.OldIdtHandler\r
425 jz DoReturn\r
426 cmp qword [rsp - 40], 1 ; check EXCEPTION_HANDLER_CONTEXT.ExceptionDataFlag\r
427 jz ErrorCode\r
428 jmp qword [rsp - 32]\r
429ErrorCode:\r
430 sub rsp, 8\r
431 jmp qword [rsp - 24]\r
432\r
433DoReturn:\r
434 cmp qword [ASM_PFX(mDoFarReturnFlag)], 0 ; Check if need to do far return instead of IRET\r
435 jz DoIret\r
436 push rax\r
437 mov rax, rsp ; save old RSP to rax\r
438 mov rsp, [rsp + 0x20]\r
439 push qword [rax + 0x10] ; save CS in new location\r
440 push qword [rax + 0x8] ; save EIP in new location\r
441 push qword [rax + 0x18] ; save EFLAGS in new location\r
442 mov rax, [rax] ; restore rax\r
443 popfq ; restore EFLAGS\r
444 retfq\r
445DoIret:\r
446 iretq\r
447\r
448;-------------------------------------------------------------------------------------\r
449; GetTemplateAddressMap (&AddressMap);\r
450;-------------------------------------------------------------------------------------\r
451; comments here for definition of address map\r
452global ASM_PFX(AsmGetTemplateAddressMap)\r
453ASM_PFX(AsmGetTemplateAddressMap):\r
454 lea rax, [AsmIdtVectorBegin]\r
455 mov qword [rcx], rax\r
456 mov qword [rcx + 0x8], (AsmIdtVectorEnd - AsmIdtVectorBegin) / 256\r
457 lea rax, [HookAfterStubHeaderBegin]\r
458 mov qword [rcx + 0x10], rax\r
459\r
460; Fix up CommonInterruptEntry address\r
461 lea rax, [ASM_PFX(CommonInterruptEntry)]\r
462 lea rcx, [AsmIdtVectorBegin]\r
463%rep 256\r
464 mov qword [rcx + (JmpAbsoluteAddress - 8 - HookAfterStubHeaderBegin)], rax\r
465 add rcx, (AsmIdtVectorEnd - AsmIdtVectorBegin) / 256\r
466%endrep\r
467; Fix up HookAfterStubHeaderEnd\r
468 lea rax, [HookAfterStubHeaderEnd]\r
469 lea rcx, [JmpAbsoluteAddress]\r
470 mov qword [rcx - 8], rax\r
471\r
472 ret\r
473\r
474;-------------------------------------------------------------------------------------\r
475; AsmVectorNumFixup (*NewVectorAddr, VectorNum, *OldVectorAddr);\r
476;-------------------------------------------------------------------------------------\r
477global ASM_PFX(AsmVectorNumFixup)\r
478ASM_PFX(AsmVectorNumFixup):\r
479 mov rax, rdx\r
480 mov [rcx + (VectorNum - 4 - HookAfterStubHeaderBegin)], al\r
481 ret\r
482\r