extern ASM_PFX(mRebasedFlag)\r
extern ASM_PFX(mSmmRelocationOriginalAddress)\r
\r
-global ASM_PFX(gSmmCr3)\r
+global ASM_PFX(gPatchSmmCr3)\r
global ASM_PFX(gSmmCr4)\r
global ASM_PFX(gSmmCr0)\r
global ASM_PFX(gSmmJmpAddr)\r
DQ 0\r
\r
global ASM_PFX(SmmStartup)\r
+\r
+BITS 16\r
ASM_PFX(SmmStartup):\r
- DB 0x66, 0xb8 ; mov eax, imm32\r
-ASM_PFX(gSmmCr3): DD 0\r
- mov cr3, rax\r
- DB 0x66, 0x2e\r
- lgdt [ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]\r
+ mov eax, 0x80000001 ; read capability\r
+ cpuid\r
+ mov ebx, edx ; rdmsr will change edx. keep it in ebx.\r
+ mov eax, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmmCr3):\r
+ mov cr3, eax\r
+o32 lgdt [cs:ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]\r
DB 0x66, 0xb8 ; mov eax, imm32\r
ASM_PFX(gSmmCr4): DD 0\r
or ah, 2 ; enable XMM registers access\r
- mov cr4, rax\r
- DB 0x66\r
+ mov cr4, eax\r
mov ecx, 0xc0000080 ; IA32_EFER MSR\r
rdmsr\r
- or ah, 1 ; set LME bit\r
+ or ah, BIT0 ; set LME bit\r
+ test ebx, BIT20 ; check NXE capability\r
+ jz .1\r
+ or ah, BIT3 ; set NXE bit\r
+.1:\r
wrmsr\r
DB 0x66, 0xb8 ; mov eax, imm32\r
ASM_PFX(gSmmCr0): DD 0\r
- mov cr0, rax ; enable protected mode & paging\r
+ mov cr0, eax ; enable protected mode & paging\r
DB 0x66, 0xea ; far jmp to long mode\r
ASM_PFX(gSmmJmpAddr): DQ 0;@LongMode\r
+\r
+BITS 64\r
@LongMode: ; long-mode starts here\r
DB 0x48, 0xbc ; mov rsp, imm64\r
ASM_PFX(gSmmInitStack): DQ 0\r