1 #------------------------------------------------------------------------------ ;
2 # Copyright (c) 2012 - 2014, Intel Corporation. All rights reserved.<BR>
3 # This program and the accompanying materials
4 # are licensed and made available under the terms and conditions of the BSD License
5 # which accompanies this distribution. The full text of the license may be found at
6 # http://opensource.org/licenses/bsd-license.php.
8 # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
13 # ExceptionHandlerAsm.S
17 # x64 CPU Exception Handler
21 #------------------------------------------------------------------------------
25 ASM_GLOBAL ASM_PFX(CommonExceptionHandler)
27 #EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions
28 #EXTRN ASM_PFX(mDoFarReturnFlag):QWORD # Do far return flag
32 # macros are different between GNU and Xcode as.
39 jmp ASM_PFX(CommonInterruptEntry)
77 HookAfterStubHeaderBegin:
80 .byte 0 # 0 will be fixed
81 .byte 0xe9 # jmp ASM_PFX(HookAfterStubHeaderEnd)
83 .set HOOK_ADDRESS, ASM_PFX(HookAfterStubHeaderEnd) - . - 4
84 .long HOOK_ADDRESS # will be fixed
85 ASM_GLOBAL ASM_PFX(HookAfterStubHeaderEnd)
86 ASM_PFX(HookAfterStubHeaderEnd):
89 andl $0x0fffffff0, %esp # make sure 16-byte aligned for exception context
90 subq $0x18, %rsp # reserve room for filling exception data later
93 bt %ecx, ASM_PFX(mErrorCodeFlag)(%rip)
95 pushq (%rsp) # push additional rcx to make stack alignment
97 xchgq (%rsp), %rcx # restore rcx, save Exception Number in stack
98 movq (%rax), %rax # restore rax
100 #---------------------------------------;
101 # CommonInterruptEntry ;
102 #---------------------------------------;
103 # The follow algorithm is used for the common interrupt routine.
105 ASM_GLOBAL ASM_PFX(CommonInterruptEntry)
106 ASM_PFX(CommonInterruptEntry):
109 # All interrupt handlers are invoked through interrupt gates, so
110 # IF flag automatically cleared at the entry point
113 # Calculate vector number
115 xchgq (%rsp), %rcx # get the return address of call, actually, it is the address of vector number.
117 cmp $32, %ecx # Intel reserved vector for exceptions?
120 movl ASM_PFX(mErrorCodeFlag)(%rip), %eax
123 jc CommonInterruptEntry_al_0000
128 # Push a dummy error code on the stack
129 # to maintain coherent stack map
133 CommonInterruptEntry_al_0000:
136 pushq $0 # check EXCEPTION_HANDLER_CONTEXT.OldIdtHandler
137 pushq $0 # check EXCEPTION_HANDLER_CONTEXT.ExceptionDataFlag
141 # +---------------------+ <-- 16-byte aligned ensured by processor
143 # +---------------------+
145 # +---------------------+
147 # +---------------------+
149 # +---------------------+
151 # +---------------------+
153 # +---------------------+
154 # + RCX / Vector Number +
155 # +---------------------+
157 # +---------------------+ <-- RBP, 16-byte aligned
162 # Since here the stack pointer is 16-byte aligned, so
163 # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64
167 #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
168 #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
186 #; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero
187 movzwq 56(%rbp), %rax
189 movzwq 32(%rbp), %rax
200 movq %rcx, 8(%rbp) # save vector number
205 #; UINT64 Gdtr[2], Idtr[2];
232 #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
248 #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
262 #; FX_SAVE_STATE_X64 FxSaveState;
265 .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]
267 #; UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear
270 #; UINT32 ExceptionData;
273 #; Prepare parameter and call
277 # Per X64 calling convention, allocate maximum parameter stack space
278 # and make sure RSP is 16-byte aligned
281 call ASM_PFX(CommonExceptionHandler)
285 #; UINT64 ExceptionData;
288 #; FX_SAVE_STATE_X64 FxSaveState;
291 .byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]
294 #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
295 #; Skip restoration of DRx registers to support in-circuit emualators
296 #; or debuggers set breakpoint in interrupt/exception context
299 #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
302 addq $8, %rsp # not for Cr1
316 #; UINT64 Gdtr[2], Idtr[2];
317 #; Best not let anyone mess with these particular registers...
323 #; UINT64 Gs, Fs, Es, Ds, Cs, Ss;
325 # mov %rax, %gs ; not for gs
327 # mov %rax, %fs ; not for fs
328 # (X64 will not use fs and gs, so we do not restore it)
333 popq 32(%rbp) # for cs
334 popq 56(%rbp) # for ss
336 #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
337 #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
340 addq $8, %rsp # not for rbp
341 popq 48(%rbp) # for rsp
358 cmpq $0, -32(%rsp) # check EXCEPTION_HANDLER_CONTEXT.OldIdtHandler
359 jz DoReturn # check EXCEPTION_HANDLER_CONTEXT.ExceptionDataFlag
369 movq ASM_PFX(mDoFarReturnFlag)(%rip), %rax
370 cmpq $0, %rax # Check if need to do far return instead of IRET
374 movq %rsp, %rax # save old RSP to rax
375 movq 0x20(%rsp), %rsp
376 pushq 0x10(%rax) # save CS in new location
377 pushq 0x8(%rax) # save EIP in new location
378 pushq 0x18(%rax) # save EFLAGS in new location
379 movq (%rax), %rax # restore rax
380 popfq # restore EFLAGS
381 .byte 0x48 # prefix to composite "retq" with next "retf"
391 #-------------------------------------------------------------------------------------
392 # AsmGetTemplateAddressMap (&AddressMap);
393 #-------------------------------------------------------------------------------------
394 # comments here for definition of address map
395 ASM_GLOBAL ASM_PFX(AsmGetTemplateAddressMap)
396 ASM_PFX(AsmGetTemplateAddressMap):
400 leaq AsmIdtVectorBegin(%rip), %rax
402 .set ENTRY_SIZE, ASM_PFX(HookAfterStubHeaderEnd) - HookAfterStubHeaderBegin
403 movq $(ENTRY_SIZE), 0x08(%rcx)
404 leaq HookAfterStubHeaderBegin(%rip), %rax
405 movq %rax, 0x10(%rcx)
410 #-------------------------------------------------------------------------------------
413 # AsmVectorNumFixup (
414 # IN VOID *NewVectorAddr, // RCX
415 # IN UINT8 VectorNum // RDX
416 # IN VOID *OldVectorAddr, // R8
418 #-------------------------------------------------------------------------------------
419 ASM_GLOBAL ASM_PFX(AsmVectorNumFixup)
420 ASM_PFX(AsmVectorNumFixup):
425 movb %dl, (PatchVectorNum - HookAfterStubHeaderBegin)(%rcx)
427 # Patch Function address
428 subq %rcx, %r8 # Calculate the offset value
429 movl (PatchFuncAddress - HookAfterStubHeaderBegin)(%rcx), %eax
431 movl %eax, (PatchFuncAddress - HookAfterStubHeaderBegin)(%rcx)