1 ;------------------------------------------------------------------------------ ;
2 ; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>
3 ; SPDX-License-Identifier: BSD-2-Clause-Patent
11 ; Functions for relocating SMBASE's for all processors
13 ;-------------------------------------------------------------------------------
15 %include "StuffRsbNasm.inc"
17 extern ASM_PFX(SmmInitHandler)
18 extern ASM_PFX(mRebasedFlag)
19 extern ASM_PFX(mSmmRelocationOriginalAddress)
21 global ASM_PFX(gPatchSmmCr3)
22 global ASM_PFX(gPatchSmmCr4)
23 global ASM_PFX(gPatchSmmCr0)
24 global ASM_PFX(gPatchSmmInitStack)
25 global ASM_PFX(gcSmiInitGdtr)
26 global ASM_PFX(gcSmmInitSize)
27 global ASM_PFX(gcSmmInitTemplate)
28 global ASM_PFX(gPatchRebasedFlagAddr32)
29 global ASM_PFX(gPatchSmmRelocationOriginalAddressPtr32)
31 %define LONG_MODE_CS 0x38
36 ASM_PFX(gcSmiInitGdtr):
40 global ASM_PFX(SmmStartup)
44 mov eax, 0x80000001 ; read capability
46 mov ebx, edx ; rdmsr will change edx. keep it in ebx.
47 mov eax, strict dword 0 ; source operand will be patched
48 ASM_PFX(gPatchSmmCr3):
50 o32 lgdt [cs:ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]
51 mov eax, strict dword 0 ; source operand will be patched
52 ASM_PFX(gPatchSmmCr4):
53 or ah, 2 ; enable XMM registers access
55 mov ecx, 0xc0000080 ; IA32_EFER MSR
57 or ah, BIT0 ; set LME bit
58 test ebx, BIT20 ; check NXE capability
60 or ah, BIT3 ; set NXE bit
63 mov eax, strict dword 0 ; source operand will be patched
64 ASM_PFX(gPatchSmmCr0):
65 mov cr0, eax ; enable protected mode & paging
66 jmp LONG_MODE_CS : dword 0 ; offset will be patched to @LongMode
70 @LongMode: ; long-mode starts here
71 mov rsp, strict qword 0 ; source operand will be patched
72 ASM_PFX(gPatchSmmInitStack):
73 and sp, 0xfff0 ; make sure RSP is 16-byte aligned
75 ; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save
76 ; them before calling C-function.
80 movdqa [rsp + 0x10], xmm1
81 movdqa [rsp + 0x20], xmm2
82 movdqa [rsp + 0x30], xmm3
83 movdqa [rsp + 0x40], xmm4
84 movdqa [rsp + 0x50], xmm5
87 call ASM_PFX(SmmInitHandler)
91 ; Restore XMM0~5 after calling C-function.
94 movdqa xmm1, [rsp + 0x10]
95 movdqa xmm2, [rsp + 0x20]
96 movdqa xmm3, [rsp + 0x30]
97 movdqa xmm4, [rsp + 0x40]
98 movdqa xmm5, [rsp + 0x50]
104 ASM_PFX(gcSmmInitTemplate):
105 mov ebp, [cs:@L1 - ASM_PFX(gcSmmInitTemplate) + 0x8000]
109 DQ 0; ASM_PFX(SmmStartup)
111 ASM_PFX(gcSmmInitSize): DW $ - ASM_PFX(gcSmmInitTemplate)
114 global ASM_PFX(SmmRelocationSemaphoreComplete)
115 ASM_PFX(SmmRelocationSemaphoreComplete):
117 mov rax, [ASM_PFX(mRebasedFlag)]
120 jmp [ASM_PFX(mSmmRelocationOriginalAddress)]
123 ; Semaphore code running in 32-bit mode
126 global ASM_PFX(SmmRelocationSemaphoreComplete32)
127 ASM_PFX(SmmRelocationSemaphoreComplete32):
129 mov eax, strict dword 0 ; source operand will be patched
130 ASM_PFX(gPatchRebasedFlagAddr32):
133 jmp dword [dword 0] ; destination will be patched
134 ASM_PFX(gPatchSmmRelocationOriginalAddressPtr32):
137 global ASM_PFX(PiSmmCpuSmmInitFixupAddress)
138 ASM_PFX(PiSmmCpuSmmInitFixupAddress):
140 lea rcx, [@PatchLongModeOffset - 6]
143 lea rax, [ASM_PFX(SmmStartup)]