global ASM_PFX(gPatchSmmCr3)\r
global ASM_PFX(gPatchSmmCr4)\r
global ASM_PFX(gPatchSmmCr0)\r
-global ASM_PFX(gSmmInitStack)\r
+global ASM_PFX(gPatchSmmInitStack)\r
global ASM_PFX(gcSmiInitGdtr)\r
global ASM_PFX(gcSmmInitSize)\r
global ASM_PFX(gcSmmInitTemplate)\r
mov fs, edi\r
mov gs, edi\r
mov ss, edi\r
- DB 0xbc ; mov esp, imm32\r
-ASM_PFX(gSmmInitStack): DD 0\r
+ mov esp, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmmInitStack):\r
call ASM_PFX(SmmInitHandler)\r
rsm\r
\r
//\r
// Set SMI stack for SMM base relocation\r
//\r
- gSmmInitStack = (UINTN) (Stacks + mSmmStackSize - sizeof (UINTN));\r
+ PatchInstructionX86 (\r
+ gPatchSmmInitStack,\r
+ (UINTN) (Stacks + mSmmStackSize - sizeof (UINTN)),\r
+ sizeof (UINTN)\r
+ );\r
\r
//\r
// Initialize IDT\r
X86_ASSEMBLY_PATCH_LABEL gPatchSmmCr3;\r
extern UINT32 mSmmCr4;\r
X86_ASSEMBLY_PATCH_LABEL gPatchSmmCr4;\r
-extern UINTN gSmmInitStack;\r
+X86_ASSEMBLY_PATCH_LABEL gPatchSmmInitStack;\r
\r
/**\r
Semaphore operation for all processor relocate SMMBase.\r
global ASM_PFX(gPatchSmmCr3)\r
global ASM_PFX(gPatchSmmCr4)\r
global ASM_PFX(gPatchSmmCr0)\r
-global ASM_PFX(gSmmInitStack)\r
+global ASM_PFX(gPatchSmmInitStack)\r
global ASM_PFX(gcSmiInitGdtr)\r
global ASM_PFX(gcSmmInitSize)\r
global ASM_PFX(gcSmmInitTemplate)\r
\r
BITS 64\r
@LongMode: ; long-mode starts here\r
- DB 0x48, 0xbc ; mov rsp, imm64\r
-ASM_PFX(gSmmInitStack): DQ 0\r
+ mov rsp, strict qword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmmInitStack):\r
and sp, 0xfff0 ; make sure RSP is 16-byte aligned\r
;\r
; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save\r