]> git.proxmox.com Git - mirror_edk2.git/commitdiff
UefiCpuPkg PiSmmCpuDxeSmm: Convert X64/SmiException.asm to NASM
authorLiming Gao <liming.gao@intel.com>
Tue, 14 Jun 2016 08:36:20 +0000 (16:36 +0800)
committerLiming Gao <liming.gao@intel.com>
Tue, 28 Jun 2016 01:52:18 +0000 (09:52 +0800)
Manually convert X64/SmiException.asm to X64/SmiException.nasm

Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Liming Gao <liming.gao@intel.com>
UefiCpuPkg/PiSmmCpuDxeSmm/X64/SmiException.nasm [new file with mode: 0644]

diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/X64/SmiException.nasm b/UefiCpuPkg/PiSmmCpuDxeSmm/X64/SmiException.nasm
new file mode 100644 (file)
index 0000000..821ee18
--- /dev/null
@@ -0,0 +1,412 @@
+;------------------------------------------------------------------------------ ;\r
+; Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>\r
+; This program and the accompanying materials\r
+; are licensed and made available under the terms and conditions of the BSD License\r
+; which accompanies this distribution.  The full text of the license may be found at\r
+; http://opensource.org/licenses/bsd-license.php.\r
+;\r
+; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+;\r
+; Module Name:\r
+;\r
+;   SmiException.nasm\r
+;\r
+; Abstract:\r
+;\r
+;   Exception handlers used in SM mode\r
+;\r
+;-------------------------------------------------------------------------------\r
+\r
+extern  ASM_PFX(SmiPFHandler)\r
+extern  ASM_PFX(gSmiMtrrs)\r
+\r
+global  ASM_PFX(gcSmiIdtr)\r
+global  ASM_PFX(gcSmiGdtr)\r
+global  ASM_PFX(gcPsd)\r
+\r
+    SECTION .data\r
+\r
+NullSeg: DQ 0                   ; reserved by architecture\r
+CodeSeg32:\r
+            DW      -1                  ; LimitLow\r
+            DW      0                   ; BaseLow\r
+            DB      0                   ; BaseMid\r
+            DB      0x9b\r
+            DB      0xcf                ; LimitHigh\r
+            DB      0                   ; BaseHigh\r
+ProtModeCodeSeg32:\r
+            DW      -1                  ; LimitLow\r
+            DW      0                   ; BaseLow\r
+            DB      0                   ; BaseMid\r
+            DB      0x9b\r
+            DB      0xcf                ; LimitHigh\r
+            DB      0                   ; BaseHigh\r
+ProtModeSsSeg32:\r
+            DW      -1                  ; LimitLow\r
+            DW      0                   ; BaseLow\r
+            DB      0                   ; BaseMid\r
+            DB      0x93\r
+            DB      0xcf                ; LimitHigh\r
+            DB      0                   ; BaseHigh\r
+DataSeg32:\r
+            DW      -1                  ; LimitLow\r
+            DW      0                   ; BaseLow\r
+            DB      0                   ; BaseMid\r
+            DB      0x93\r
+            DB      0xcf                ; LimitHigh\r
+            DB      0                   ; BaseHigh\r
+CodeSeg16:\r
+            DW      -1\r
+            DW      0\r
+            DB      0\r
+            DB      0x9b\r
+            DB      0x8f\r
+            DB      0\r
+DataSeg16:\r
+            DW      -1\r
+            DW      0\r
+            DB      0\r
+            DB      0x93\r
+            DB      0x8f\r
+            DB      0\r
+CodeSeg64:\r
+            DW      -1                  ; LimitLow\r
+            DW      0                   ; BaseLow\r
+            DB      0                   ; BaseMid\r
+            DB      0x9b\r
+            DB      0xaf                ; LimitHigh\r
+            DB      0                   ; BaseHigh\r
+; TSS Segment for X64 specially\r
+TssSeg:\r
+            DW      TSS_DESC_SIZE       ; LimitLow\r
+            DW      0                   ; BaseLow\r
+            DB      0                   ; BaseMid\r
+            DB      0x89\r
+            DB      0x80                ; LimitHigh\r
+            DB      0                   ; BaseHigh\r
+            DD      0                   ; BaseUpper\r
+            DD      0                   ; Reserved\r
+GDT_SIZE equ $ -   NullSeg\r
+\r
+; Create TSS Descriptor just after GDT\r
+TssDescriptor:\r
+            DD      0                   ; Reserved\r
+            DQ      0                   ; RSP0\r
+            DQ      0                   ; RSP1\r
+            DQ      0                   ; RSP2\r
+            DD      0                   ; Reserved\r
+            DD      0                   ; Reserved\r
+            DQ      0                   ; IST1\r
+            DQ      0                   ; IST2\r
+            DQ      0                   ; IST3\r
+            DQ      0                   ; IST4\r
+            DQ      0                   ; IST5\r
+            DQ      0                   ; IST6\r
+            DQ      0                   ; IST7\r
+            DD      0                   ; Reserved\r
+            DD      0                   ; Reserved\r
+            DW      0                   ; Reserved\r
+            DW      0                   ; I/O Map Base Address\r
+TSS_DESC_SIZE equ $ -   TssDescriptor\r
+\r
+;\r
+; This structure serves as a template for all processors.\r
+;\r
+ASM_PFX(gcPsd):\r
+            DB      'PSDSIG  '\r
+            DW      PSD_SIZE\r
+            DW      2\r
+            DW      1 << 2\r
+            DW      CODE_SEL\r
+            DW      DATA_SEL\r
+            DW      DATA_SEL\r
+            DW      DATA_SEL\r
+            DW      0\r
+            DQ      0\r
+            DQ      0\r
+            DQ      0                   ; fixed in InitializeMpServiceData()\r
+            DQ        NullSeg\r
+            DD      GDT_SIZE\r
+            DD      0\r
+            times   24 DB 0\r
+            DQ      ASM_PFX(gSmiMtrrs)\r
+PSD_SIZE  equ $ -   ASM_PFX(gcPsd)\r
+\r
+;\r
+; CODE & DATA segments for SMM runtime\r
+;\r
+CODE_SEL    equ   CodeSeg64 -   NullSeg\r
+DATA_SEL    equ   DataSeg32 -   NullSeg\r
+CODE32_SEL  equ   CodeSeg32 -   NullSeg\r
+\r
+ASM_PFX(gcSmiGdtr):\r
+    DW      GDT_SIZE - 1\r
+    DQ        NullSeg\r
+\r
+ASM_PFX(gcSmiIdtr):\r
+    DW      IDT_SIZE - 1\r
+    DQ        _SmiIDT\r
+\r
+;\r
+; Here is the IDT. There are 32 (not 255) entries in it since only processor\r
+; generated exceptions will be handled.\r
+;\r
+_SmiIDT:\r
+%rep 32\r
+    DW      0                           ;   0:15\r
+    DW      CODE_SEL                    ; Segment selector\r
+    DB      0                           ; Unused\r
+    DB      0x8e                         ; Interrupt Gate, Present\r
+    DW      0                           ;   16:31\r
+    DQ      0                           ;   32:63\r
+%endrep\r
+_SmiIDTEnd:\r
+\r
+IDT_SIZE equ  _SmiIDTEnd -   _SmiIDT\r
+\r
+    DEFAULT REL\r
+    SECTION .text\r
+\r
+;------------------------------------------------------------------------------\r
+; _SmiExceptionEntryPoints is the collection of exception entrypoints followed\r
+; by a common exception handler.\r
+;\r
+; Stack frame would be as follows as specified in IA32 manuals:\r
+;\r
+; +---------------------+ <-- 16-byte aligned ensured by processor\r
+; +    Old SS           +\r
+; +---------------------+\r
+; +    Old RSP          +\r
+; +---------------------+\r
+; +    RFlags           +\r
+; +---------------------+\r
+; +    CS               +\r
+; +---------------------+\r
+; +    RIP              +\r
+; +---------------------+\r
+; +    Error Code       +\r
+; +---------------------+\r
+; +   Vector Number     +\r
+; +---------------------+\r
+; +    RBP              +\r
+; +---------------------+ <-- RBP, 16-byte aligned\r
+;\r
+; RSP set to odd multiple of 8 at @CommonEntryPoint means ErrCode PRESENT\r
+;------------------------------------------------------------------------------\r
+global ASM_PFX(PageFaultIdtHandlerSmmProfile)\r
+ASM_PFX(PageFaultIdtHandlerSmmProfile):\r
+    push    0xe                         ; Page Fault\r
+    test    spl, 8                      ; odd multiple of 8 => ErrCode present\r
+    jnz     .0\r
+    push    qword [rsp]                       ; duplicate INT# if no ErrCode\r
+    mov     qword [rsp + 8], 0\r
+.0:\r
+    push    rbp\r
+    mov     rbp, rsp\r
+\r
+    ;\r
+    ; Since here the stack pointer is 16-byte aligned, so\r
+    ; EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64\r
+    ; is 16-byte aligned\r
+    ;\r
+\r
+;; UINT64  Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
+;; UINT64  R8, R9, R10, R11, R12, R13, R14, R15;\r
+    push    r15\r
+    push    r14\r
+    push    r13\r
+    push    r12\r
+    push    r11\r
+    push    r10\r
+    push    r9\r
+    push    r8\r
+    push    rax\r
+    push    rcx\r
+    push    rdx\r
+    push    rbx\r
+    push    qword [rbp + 48]  ; RSP\r
+    push    qword [rbp]       ; RBP\r
+    push    rsi\r
+    push    rdi\r
+\r
+;; UINT64  Gs, Fs, Es, Ds, Cs, Ss;  insure high 16 bits of each is zero\r
+    movzx   rax, word [rbp + 56]\r
+    push    rax                      ; for ss\r
+    movzx   rax, word [rbp + 32]\r
+    push    rax                      ; for cs\r
+    mov     rax, ds\r
+    push    rax\r
+    mov     rax, es\r
+    push    rax\r
+    mov     rax, fs\r
+    push    rax\r
+    mov     rax, gs\r
+    push    rax\r
+\r
+;; UINT64  Rip;\r
+    push    qword [rbp + 24]\r
+\r
+;; UINT64  Gdtr[2], Idtr[2];\r
+    sub     rsp, 16\r
+    sidt    [rsp]\r
+    sub     rsp, 16\r
+    sgdt    [rsp]\r
+\r
+;; UINT64  Ldtr, Tr;\r
+    xor     rax, rax\r
+    str     ax\r
+    push    rax\r
+    sldt    ax\r
+    push    rax\r
+\r
+;; UINT64  RFlags;\r
+    push    qword [rbp + 40]\r
+\r
+;; UINT64  Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
+    mov     rax, cr8\r
+    push    rax\r
+    mov     rax, cr4\r
+    or      rax, 0x208\r
+    mov     cr4, rax\r
+    push    rax\r
+    mov     rax, cr3\r
+    push    rax\r
+    mov     rax, cr2\r
+    push    rax\r
+    xor     rax, rax\r
+    push    rax\r
+    mov     rax, cr0\r
+    push    rax\r
+\r
+;; UINT64  Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
+    mov     rax, dr7\r
+    push    rax\r
+    mov     rax, dr6\r
+    push    rax\r
+    mov     rax, dr3\r
+    push    rax\r
+    mov     rax, dr2\r
+    push    rax\r
+    mov     rax, dr1\r
+    push    rax\r
+    mov     rax, dr0\r
+    push    rax\r
+\r
+;; FX_SAVE_STATE_X64 FxSaveState;\r
+\r
+    sub rsp, 512\r
+    mov rdi, rsp\r
+    db 0xf, 0xae, 00000111y ;fxsave [rdi]\r
+\r
+; UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear\r
+    cld\r
+\r
+;; UINT32  ExceptionData;\r
+    push    qword [rbp + 16]\r
+\r
+;; call into exception handler\r
+    mov     rcx, [rbp + 8]\r
+    mov     rax, ASM_PFX(SmiPFHandler)\r
+\r
+;; Prepare parameter and call\r
+    mov     rdx, rsp\r
+    ;\r
+    ; Per X64 calling convention, allocate maximum parameter stack space\r
+    ; and make sure RSP is 16-byte aligned\r
+    ;\r
+    sub     rsp, 4 * 8 + 8\r
+    call    rax\r
+    add     rsp, 4 * 8 + 8\r
+    jmp     .1\r
+\r
+.1:\r
+;; UINT64  ExceptionData;\r
+    add     rsp, 8\r
+\r
+;; FX_SAVE_STATE_X64 FxSaveState;\r
+\r
+    mov rsi, rsp\r
+    db 0xf, 0xae, 00001110y ; fxrstor [rsi]\r
+    add rsp, 512\r
+\r
+;; UINT64  Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
+;; Skip restoration of DRx registers to support debuggers\r
+;; that set breakpoints in interrupt/exception context\r
+  add     rsp, 8 * 6\r
+\r
+;; UINT64  Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
+    pop     rax\r
+    mov     cr0, rax\r
+    add     rsp, 8   ; not for Cr1\r
+    pop     rax\r
+    mov     cr2, rax\r
+    pop     rax\r
+    mov     cr3, rax\r
+    pop     rax\r
+    mov     cr4, rax\r
+    pop     rax\r
+    mov     cr8, rax\r
+\r
+;; UINT64  RFlags;\r
+    pop     qword [rbp + 40]\r
+\r
+;; UINT64  Ldtr, Tr;\r
+;; UINT64  Gdtr[2], Idtr[2];\r
+;; Best not let anyone mess with these particular registers...\r
+    add     rsp, 48\r
+\r
+;; UINT64  Rip;\r
+    pop     qword [rbp + 24]\r
+\r
+;; UINT64  Gs, Fs, Es, Ds, Cs, Ss;\r
+    pop     rax\r
+    ; mov     gs, rax ; not for gs\r
+    pop     rax\r
+    ; mov     fs, rax ; not for fs\r
+    ; (X64 will not use fs and gs, so we do not restore it)\r
+    pop     rax\r
+    mov     es, rax\r
+    pop     rax\r
+    mov     ds, rax\r
+    pop     qword [rbp + 32]  ; for cs\r
+    pop     qword [rbp + 56]  ; for ss\r
+\r
+;; UINT64  Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
+;; UINT64  R8, R9, R10, R11, R12, R13, R14, R15;\r
+    pop     rdi\r
+    pop     rsi\r
+    add     rsp, 8               ; not for rbp\r
+    pop     qword [rbp + 48] ; for rsp\r
+    pop     rbx\r
+    pop     rdx\r
+    pop     rcx\r
+    pop     rax\r
+    pop     r8\r
+    pop     r9\r
+    pop     r10\r
+    pop     r11\r
+    pop     r12\r
+    pop     r13\r
+    pop     r14\r
+    pop     r15\r
+\r
+    mov     rsp, rbp\r
+\r
+; Enable TF bit after page fault handler runs\r
+    bts     dword [rsp + 40], 8  ;RFLAGS\r
+\r
+    pop     rbp\r
+    add     rsp, 16           ; skip INT# & ErrCode\r
+    iretq\r
+\r
+global ASM_PFX(InitializeIDTSmmStackGuard)\r
+ASM_PFX(InitializeIDTSmmStackGuard):\r
+;\r
+; If SMM Stack Guard feature is enabled, set the IST field of\r
+; the interrupt gate for Page Fault Exception to be 1\r
+;\r
+    lea     rax, [_SmiIDT + 14 * 16]\r
+    mov     byte [rax + 4], 1\r
+    ret\r
+\r