;------------------------------------------------------------------------------ ;\r
-; Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>\r
+; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>\r
; This program and the accompanying materials\r
; are licensed and made available under the terms and conditions of the BSD License\r
; which accompanies this distribution. The full text of the license may be found at\r
; Variables referrenced by C code\r
;\r
\r
+%define MSR_IA32_MISC_ENABLE 0x1A0\r
+%define MSR_EFER 0xc0000080\r
+%define MSR_EFER_XD 0x800\r
+\r
;\r
; Constants relating to PROCESSOR_SMM_DESCRIPTOR\r
;\r
extern ASM_PFX(CpuSmmDebugEntry)\r
extern ASM_PFX(CpuSmmDebugExit)\r
\r
-global ASM_PFX(gSmbase)\r
-global ASM_PFX(gSmiStack)\r
-global ASM_PFX(gSmiCr3)\r
+global ASM_PFX(gPatchSmbase)\r
+extern ASM_PFX(mXdSupported)\r
+global ASM_PFX(gPatchXdSupported)\r
+global ASM_PFX(gPatchSmiStack)\r
+global ASM_PFX(gPatchSmiCr3)\r
global ASM_PFX(gcSmiHandlerTemplate)\r
global ASM_PFX(gcSmiHandlerSize)\r
\r
mov [cs:bx + 2], eax\r
o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]\r
mov ax, PROTECT_MODE_CS\r
- mov [cs:bx-0x2],ax \r
- DB 0x66, 0xbf ; mov edi, SMBASE\r
-ASM_PFX(gSmbase): DD 0\r
+ mov [cs:bx-0x2],ax\r
+ mov edi, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmbase):\r
lea eax, [edi + (@ProtectedMode - _SmiEntryPoint) + 0x8000]\r
mov [cs:bx-0x6],eax\r
mov ebx, cr0\r
or ebx, 0x23\r
mov cr0, ebx\r
jmp dword 0x0:0x0\r
-_GdtDesc: \r
+_GdtDesc:\r
DW 0\r
DD 0\r
\r
o16 mov fs, ax\r
o16 mov gs, ax\r
o16 mov ss, ax\r
- DB 0xbc ; mov esp, imm32\r
-ASM_PFX(gSmiStack): DD 0\r
+ mov esp, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmiStack):\r
jmp ProtFlatMode\r
\r
BITS 64\r
ProtFlatMode:\r
- DB 0xb8 ; mov eax, offset gSmiCr3\r
-ASM_PFX(gSmiCr3): DD 0\r
+ mov eax, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmiCr3):\r
mov cr3, rax\r
mov eax, 0x668 ; as cr4.PGE is not set here, refresh cr3\r
mov cr4, rax ; in PreModifyMtrrs() to flush TLB.\r
mov eax, TSS_SEGMENT\r
ltr ax\r
\r
+; enable NXE if supported\r
+ mov al, strict byte 1 ; source operand may be patched\r
+ASM_PFX(gPatchXdSupported):\r
+ cmp al, 0\r
+ jz @SkipXd\r
+;\r
+; Check XD disable bit\r
+;\r
+ mov ecx, MSR_IA32_MISC_ENABLE\r
+ rdmsr\r
+ sub esp, 4\r
+ push rdx ; save MSR_IA32_MISC_ENABLE[63-32]\r
+ test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
+ jz .0\r
+ and dx, 0xFFFB ; clear XD Disable bit if it is set\r
+ wrmsr\r
+.0:\r
+ mov ecx, MSR_EFER\r
+ rdmsr\r
+ or ax, MSR_EFER_XD ; enable NXE\r
+ wrmsr\r
+ jmp @XdDone\r
+@SkipXd:\r
+ sub esp, 8\r
+@XdDone:\r
+\r
; Switch into @LongMode\r
push LONG_MODE_CS ; push cs hardcore here\r
- call Base ; push reture address for retf later\r
+ call Base ; push return address for retf later\r
Base:\r
add dword [rsp], @LongMode - Base; offset for far retf, seg is the 1st arg\r
- mov ecx, 0xc0000080\r
+\r
+ mov ecx, MSR_EFER\r
rdmsr\r
- or ah, 1\r
+ or ah, 1 ; enable LME\r
wrmsr\r
mov rbx, cr0\r
- or ebx, 080010000h ; enable paging + WP\r
+ or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE\r
mov cr0, rbx\r
retf\r
@LongMode: ; long mode (64-bit code) starts here\r
- mov rax, ASM_PFX(gSmiHandlerIdtr)\r
+ mov rax, strict qword 0 ; mov rax, ASM_PFX(gSmiHandlerIdtr)\r
+SmiHandlerIdtrAbsAddr:\r
lidt [rax]\r
lea ebx, [rdi + DSC_OFFSET]\r
mov ax, [rbx + DSC_DS]\r
mov gs, eax\r
mov ax, [rbx + DSC_SS]\r
mov ss, eax\r
-; jmp _SmiHandler ; instruction is not needed\r
+ mov rax, strict qword 0 ; mov rax, _SmiHandler\r
+_SmiHandlerAbsAddr:\r
+ jmp rax\r
\r
_SmiHandler:\r
- mov rbx, [rsp] ; rbx <- CpuIndex\r
+ mov rbx, [rsp + 0x8] ; rcx <- CpuIndex\r
\r
;\r
; Save FP registers\r
;\r
- sub rsp, 0x208\r
+ sub rsp, 0x200\r
DB 0x48 ; FXSAVE64\r
fxsave [rsp]\r
\r
add rsp, -0x20\r
\r
mov rcx, rbx\r
- mov rax, CpuSmmDebugEntry\r
- call rax\r
- \r
+ call ASM_PFX(CpuSmmDebugEntry)\r
+\r
mov rcx, rbx\r
- mov rax, SmiRendezvous ; rax <- absolute addr of SmiRedezvous\r
- call rax\r
- \r
+ call ASM_PFX(SmiRendezvous)\r
+\r
mov rcx, rbx\r
- mov rax, CpuSmmDebugExit\r
- call rax\r
- \r
+ call ASM_PFX(CpuSmmDebugExit)\r
+\r
add rsp, 0x20\r
\r
;\r
DB 0x48 ; FXRSTOR64\r
fxrstor [rsp]\r
\r
+ add rsp, 0x200\r
+\r
+ lea rax, [ASM_PFX(mXdSupported)]\r
+ mov al, [rax]\r
+ cmp al, 0\r
+ jz .1\r
+ pop rdx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r
+ test edx, BIT2\r
+ jz .1\r
+ mov ecx, MSR_IA32_MISC_ENABLE\r
+ rdmsr\r
+ or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r
+ wrmsr\r
+\r
+.1:\r
rsm\r
\r
-gcSmiHandlerSize DW $ - _SmiEntryPoint\r
+ASM_PFX(gcSmiHandlerSize) DW $ - _SmiEntryPoint\r
+\r
+global ASM_PFX(PiSmmCpuSmiEntryFixupAddress)\r
+ASM_PFX(PiSmmCpuSmiEntryFixupAddress):\r
+ lea rax, [ASM_PFX(gSmiHandlerIdtr)]\r
+ lea rcx, [SmiHandlerIdtrAbsAddr]\r
+ mov qword [rcx - 8], rax\r
\r
+ lea rax, [_SmiHandler]\r
+ lea rcx, [_SmiHandlerAbsAddr]\r
+ mov qword [rcx - 8], rax\r
+ ret\r