\r
#------------------------------------------------------------------------------\r
#*\r
-#* Copyright (c) 2008 - 2011, Intel Corporation. All rights reserved.<BR>\r
+#* Copyright (c) 2008 - 2013, Intel Corporation. All rights reserved.<BR>\r
#* This program and the accompanying materials\r
#* are licensed and made available under the terms and conditions of the BSD License\r
#* which accompanies this distribution. The full text of the license may be found at\r
#text SEGMENT\r
\r
\r
-#EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions\r
-\r
-\r
-#\r
-# point to the external interrupt vector table\r
-#\r
-ExternalVectorTablePtr:\r
- .byte 0, 0, 0, 0, 0, 0, 0, 0\r
-\r
-ASM_GLOBAL ASM_PFX(InitializeExternalVectorTablePtr)\r
-ASM_PFX(InitializeExternalVectorTablePtr):\r
- lea ExternalVectorTablePtr(%rip), %rax # save vector number\r
- mov %rcx, (%rax) \r
- ret\r
-\r
-\r
#------------------------------------------------------------------------------\r
# VOID\r
# SetCodeSelector (\r
movw %cx, %gs\r
ret\r
\r
-#---------------------------------------;\r
-# CommonInterruptEntry ;\r
-#---------------------------------------;\r
-# The follow algorithm is used for the common interrupt routine.\r
-\r
-ASM_GLOBAL ASM_PFX(CommonInterruptEntry)\r
-ASM_PFX(CommonInterruptEntry):\r
- cli\r
- #\r
- # All interrupt handlers are invoked through interrupt gates, so\r
- # IF flag automatically cleared at the entry point\r
- #\r
- #\r
- # Calculate vector number\r
- #\r
- xchgq (%rsp), %rcx # get the return address of call, actually, it is the address of vector number.\r
- movzwl (%rcx), %ecx \r
- cmp $32, %ecx # Intel reserved vector for exceptions?\r
- jae NoErrorCode\r
- pushq %rax\r
- leaq ASM_PFX(mErrorCodeFlag)(%rip), %rax\r
- bt %ecx, (%rax) \r
- popq %rax\r
- jc CommonInterruptEntry_al_0000\r
-\r
-NoErrorCode:\r
-\r
- #\r
- # Push a dummy error code on the stack\r
- # to maintain coherent stack map\r
- #\r
- pushq (%rsp)\r
- movq $0, 8(%rsp)\r
-CommonInterruptEntry_al_0000:\r
- pushq %rbp\r
- movq %rsp, %rbp\r
-\r
- #\r
- # Stack:\r
- # +---------------------+ <-- 16-byte aligned ensured by processor\r
- # + Old SS +\r
- # +---------------------+\r
- # + Old RSP +\r
- # +---------------------+\r
- # + RFlags +\r
- # +---------------------+\r
- # + CS +\r
- # +---------------------+\r
- # + RIP +\r
- # +---------------------+\r
- # + Error Code +\r
- # +---------------------+\r
- # + RCX / Vector Number +\r
- # +---------------------+\r
- # + RBP +\r
- # +---------------------+ <-- RBP, 16-byte aligned\r
- #\r
-\r
-\r
- #\r
- # Since here the stack pointer is 16-byte aligned, so\r
- # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64\r
- # is 16-byte aligned\r
- #\r
-\r
-#; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
-#; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
- pushq %r15\r
- pushq %r14\r
- pushq %r13\r
- pushq %r12\r
- pushq %r11\r
- pushq %r10\r
- pushq %r9\r
- pushq %r8\r
- pushq %rax\r
- pushq 8(%rbp) # RCX\r
- pushq %rdx\r
- pushq %rbx\r
- pushq 48(%rbp) # RSP\r
- pushq (%rbp) # RBP\r
- pushq %rsi\r
- pushq %rdi\r
-\r
-#; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero\r
- movzwq 56(%rbp), %rax\r
- pushq %rax # for ss\r
- movzwq 32(%rbp), %rax\r
- pushq %rax # for cs\r
- movl %ds, %eax\r
- pushq %rax\r
- movl %es, erax\r
- pushq %rax\r
- movl %fs, %eax\r
- pushq %rax\r
- movl %gs, %eax\r
- pushq %rax\r
-\r
- movq %rcx, 8(%rbp) # save vector number\r
-\r
-#; UINT64 Rip;\r
- pushq 24(%rbp)\r
-\r
-#; UINT64 Gdtr[2], Idtr[2];\r
- xorq %rax, %rax\r
- pushq %rax\r
- pushq %rax\r
- sidt (%rsp)\r
- xchgq 2(%rsp), %rax\r
- xchgq (%rsp), %rax\r
- xchgq 8(%rsp), %rax\r
-\r
- xorq %rax, %rax\r
- pushq %rax\r
- pushq %rax\r
- sgdt (%rsp)\r
- xchgq 2(%rsp), %rax\r
- xchgq (%rsp), %rax\r
- xchgq 8(%rsp), %rax\r
-\r
-#; UINT64 Ldtr, Tr;\r
- xorq %rax, %rax\r
- str %ax\r
- pushq %rax\r
- sldt %ax\r
- pushq %rax\r
-\r
-#; UINT64 RFlags;\r
- pushq 40(%rbp)\r
-\r
-#; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
- movq %cr8, %rax\r
- pushq %rax\r
- movq %cr4, %rax\r
- orq $0x208, %rax \r
- movq %rax, %cr4 \r
- pushq %rax\r
- mov %cr3, %rax \r
- pushq %rax\r
- mov %cr2, %rax \r
- pushq %rax\r
- xorq %rax, %rax\r
- pushq %rax\r
- mov %cr0, %rax \r
- pushq %rax\r
-\r
-#; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
- movq %dr7, %rax\r
- pushq %rax\r
- movq %dr6, %rax\r
- pushq %rax\r
- movq %dr3, %rax\r
- pushq %rax\r
- movq %dr2, %rax\r
- pushq %rax\r
- movq %dr1, %rax\r
- pushq %rax\r
- movq %dr0, %rax\r
- pushq %rax\r
-\r
-#; FX_SAVE_STATE_X64 FxSaveState;\r
- subq $512, %rsp\r
- movq %rsp, %rdi\r
- .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]\r
-\r
-#; UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear\r
- cld\r
-\r
-#; UINT32 ExceptionData;\r
- pushq 16(%rbp)\r
-\r
-#; call into exception handler\r
- movq 8(%rbp), %rcx\r
- leaq ExternalVectorTablePtr(%rip), %rax\r
- movl (%eax), %eax\r
- movq (%rax,%rcx,8), %rax\r
- orq %rax, %rax # NULL?\r
-\r
- je nonNullValue#\r
-\r
-#; Prepare parameter and call\r
-# mov rcx, [rbp + 8]\r
- mov %rsp, %rdx\r
- #\r
- # Per X64 calling convention, allocate maximum parameter stack space\r
- # and make sure RSP is 16-byte aligned\r
- #\r
- subq $40, %rsp \r
- call *%rax\r
- addq $40, %rsp\r
-\r
-nonNullValue:\r
- cli\r
-#; UINT64 ExceptionData;\r
- addq $8, %rsp\r
-\r
-#; FX_SAVE_STATE_X64 FxSaveState;\r
-\r
- movq %rsp, %rsi\r
- .byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]\r
- addq $512, %rsp\r
-\r
-#; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
-#; Skip restoration of DRx registers to support in-circuit emualators\r
-#; or debuggers set breakpoint in interrupt/exception context\r
- addq $48, %rsp\r
-\r
-#; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
- popq %rax\r
- movq %rax, %cr0\r
- addq $8, %rsp # not for Cr1\r
- popq %rax\r
- movq %rax, %cr2\r
- popq %rax\r
- movq %rax, %cr3\r
- popq %rax\r
- movq %rax, %cr4\r
- popq %rax\r
- movq %rax, %cr8\r
-\r
-#; UINT64 RFlags;\r
- popq 40(%rbp)\r
-\r
-#; UINT64 Ldtr, Tr;\r
-#; UINT64 Gdtr[2], Idtr[2];\r
-#; Best not let anyone mess with these particular registers...\r
- addq $48, %rsp\r
-\r
-#; UINT64 Rip;\r
- popq 24(%rbp)\r
-\r
-#; UINT64 Gs, Fs, Es, Ds, Cs, Ss;\r
- popq %rax\r
- # mov %rax, %gs ; not for gs\r
- popq %rax\r
- # mov %rax, %fs ; not for fs\r
- # (X64 will not use fs and gs, so we do not restore it)\r
- popq %rax\r
- movl %eax, %es\r
- popq %rax\r
- movl %eax, %ds\r
- popq 32(%rbp) # for cs\r
- popq 56(%rbp) # for ss\r
-\r
-#; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
-#; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
- popq %rdi\r
- popq %rsi\r
- addq $8, %rsp # not for rbp\r
- popq 48(%rbp) # for rsp\r
- popq %rbx\r
- popq %rdx\r
- popq %rcx\r
- popq %rax\r
- popq %r8\r
- popq %r9\r
- popq %r10\r
- popq %r11\r
- popq %r12\r
- popq %r13\r
- popq %r14\r
- popq %r15\r
-\r
- movq %rbp, %rsp\r
- popq %rbp\r
- addq $16, %rsp\r
- iretq\r
-\r
-\r
#text ENDS\r
\r
#END\r