1 ;------------------------------------------------------------------------------ ;
2 ; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>
3 ; This program and the accompanying materials
4 ; are licensed and made available under the terms and conditions of the BSD License
5 ; which accompanies this distribution. The full text of the license may be found at
6 ; http://opensource.org/licenses/bsd-license.php.
8 ; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
9 ; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
17 ; Functions for relocating SMBASE's for all processors
19 ;-------------------------------------------------------------------------------
21 extern ASM_PFX(SmmInitHandler)
22 extern ASM_PFX(mRebasedFlag)
23 extern ASM_PFX(mSmmRelocationOriginalAddress)
25 global ASM_PFX(gPatchSmmCr3)
26 global ASM_PFX(gPatchSmmCr4)
27 global ASM_PFX(gPatchSmmCr0)
28 global ASM_PFX(gSmmInitStack)
29 global ASM_PFX(gcSmiInitGdtr)
30 global ASM_PFX(gcSmmInitSize)
31 global ASM_PFX(gcSmmInitTemplate)
32 global ASM_PFX(mRebasedFlagAddr32)
33 global ASM_PFX(mSmmRelocationOriginalAddressPtr32)
35 %define LONG_MODE_CS 0x38
40 ASM_PFX(gcSmiInitGdtr):
44 global ASM_PFX(SmmStartup)
48 mov eax, 0x80000001 ; read capability
50 mov ebx, edx ; rdmsr will change edx. keep it in ebx.
51 mov eax, strict dword 0 ; source operand will be patched
52 ASM_PFX(gPatchSmmCr3):
54 o32 lgdt [cs:ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]
55 mov eax, strict dword 0 ; source operand will be patched
56 ASM_PFX(gPatchSmmCr4):
57 or ah, 2 ; enable XMM registers access
59 mov ecx, 0xc0000080 ; IA32_EFER MSR
61 or ah, BIT0 ; set LME bit
62 test ebx, BIT20 ; check NXE capability
64 or ah, BIT3 ; set NXE bit
67 mov eax, strict dword 0 ; source operand will be patched
68 ASM_PFX(gPatchSmmCr0):
69 mov cr0, eax ; enable protected mode & paging
70 jmp LONG_MODE_CS : dword 0 ; offset will be patched to @LongMode
74 @LongMode: ; long-mode starts here
75 DB 0x48, 0xbc ; mov rsp, imm64
76 ASM_PFX(gSmmInitStack): DQ 0
77 and sp, 0xfff0 ; make sure RSP is 16-byte aligned
79 ; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save
80 ; them before calling C-function.
84 movdqa [rsp + 0x10], xmm1
85 movdqa [rsp + 0x20], xmm2
86 movdqa [rsp + 0x30], xmm3
87 movdqa [rsp + 0x40], xmm4
88 movdqa [rsp + 0x50], xmm5
91 call ASM_PFX(SmmInitHandler)
95 ; Restore XMM0~5 after calling C-function.
98 movdqa xmm1, [rsp + 0x10]
99 movdqa xmm2, [rsp + 0x20]
100 movdqa xmm3, [rsp + 0x30]
101 movdqa xmm4, [rsp + 0x40]
102 movdqa xmm5, [rsp + 0x50]
107 ASM_PFX(gcSmmInitTemplate):
108 mov ebp, [cs:@L1 - ASM_PFX(gcSmmInitTemplate) + 0x8000]
112 DQ 0; ASM_PFX(SmmStartup)
114 ASM_PFX(gcSmmInitSize): DW $ - ASM_PFX(gcSmmInitTemplate)
117 global ASM_PFX(SmmRelocationSemaphoreComplete)
118 ASM_PFX(SmmRelocationSemaphoreComplete):
120 mov rax, [ASM_PFX(mRebasedFlag)]
123 jmp [ASM_PFX(mSmmRelocationOriginalAddress)]
126 ; Semaphore code running in 32-bit mode
128 global ASM_PFX(SmmRelocationSemaphoreComplete32)
129 ASM_PFX(SmmRelocationSemaphoreComplete32):
134 ASM_PFX(mRebasedFlagAddr32): dd 0
140 ASM_PFX(mSmmRelocationOriginalAddressPtr32): dd 0
142 global ASM_PFX(PiSmmCpuSmmInitFixupAddress)
143 ASM_PFX(PiSmmCpuSmmInitFixupAddress):
145 lea rcx, [@PatchLongModeOffset - 6]
148 lea rax, [ASM_PFX(SmmStartup)]