;------------------------------------------------------------------------------ ;\r
-; Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>\r
+; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>\r
; This program and the accompanying materials\r
; are licensed and made available under the terms and conditions of the BSD License\r
; which accompanies this distribution. The full text of the license may be found at\r
;\r
;-------------------------------------------------------------------------------\r
\r
+%define MSR_IA32_MISC_ENABLE 0x1A0\r
+%define MSR_EFER 0xc0000080\r
+%define MSR_EFER_XD 0x800\r
+\r
+;\r
+; Constants relating to PROCESSOR_SMM_DESCRIPTOR\r
+;\r
%define DSC_OFFSET 0xfb00\r
%define DSC_GDTPTR 0x30\r
%define DSC_GDTSIZ 0x38\r
\r
global ASM_PFX(gcSmiHandlerTemplate)\r
global ASM_PFX(gcSmiHandlerSize)\r
-global ASM_PFX(gSmiCr3)\r
-global ASM_PFX(gSmiStack)\r
-global ASM_PFX(gSmbase)\r
+global ASM_PFX(gPatchSmiCr3)\r
+global ASM_PFX(gPatchSmiStack)\r
+global ASM_PFX(gPatchSmbase)\r
+extern ASM_PFX(mXdSupported)\r
+global ASM_PFX(gPatchXdSupported)\r
extern ASM_PFX(gSmiHandlerIdtr)\r
\r
SECTION .text\r
mov ebp, eax ; ebp = GDT base\r
o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]\r
mov ax, PROTECT_MODE_CS\r
- mov [cs:bx-0x2],ax \r
- DB 0x66, 0xbf ; mov edi, SMBASE\r
-ASM_PFX(gSmbase): DD 0\r
+ mov [cs:bx-0x2],ax\r
+ mov edi, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmbase):\r
lea eax, [edi + (@32bit - _SmiEntryPoint) + 0x8000]\r
mov [cs:bx-0x6],eax\r
mov ebx, cr0\r
or ebx, 0x23\r
mov cr0, ebx\r
jmp dword 0x0:0x0\r
-_GdtDesc: \r
+_GdtDesc:\r
DW 0\r
DD 0\r
\r
o16 mov fs, ax\r
o16 mov gs, ax\r
o16 mov ss, ax\r
- DB 0xbc ; mov esp, imm32\r
-ASM_PFX(gSmiStack): DD 0\r
+ mov esp, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmiStack):\r
mov eax, ASM_PFX(gSmiHandlerIdtr)\r
lidt [eax]\r
jmp ProtFlatMode\r
\r
ProtFlatMode:\r
- DB 0xb8 ; mov eax, imm32\r
-ASM_PFX(gSmiCr3): DD 0\r
+ mov eax, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmiCr3):\r
mov cr3, eax\r
;\r
; Need to test for CR4 specific bit support\r
or eax, BIT10\r
.4: ; as cr4.PGE is not set here, refresh cr3\r
mov cr4, eax ; in PreModifyMtrrs() to flush TLB.\r
+\r
+ cmp byte [dword ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))], 0\r
+ jz .6\r
+; Load TSS\r
+ mov byte [ebp + TSS_SEGMENT + 5], 0x89 ; clear busy flag\r
+ mov eax, TSS_SEGMENT\r
+ ltr ax\r
+.6:\r
+\r
+; enable NXE if supported\r
+ mov al, strict byte 1 ; source operand may be patched\r
+ASM_PFX(gPatchXdSupported):\r
+ cmp al, 0\r
+ jz @SkipXd\r
+;\r
+; Check XD disable bit\r
+;\r
+ mov ecx, MSR_IA32_MISC_ENABLE\r
+ rdmsr\r
+ push edx ; save MSR_IA32_MISC_ENABLE[63-32]\r
+ test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
+ jz .5\r
+ and dx, 0xFFFB ; clear XD Disable bit if it is set\r
+ wrmsr\r
+.5:\r
+ mov ecx, MSR_EFER\r
+ rdmsr\r
+ or ax, MSR_EFER_XD ; enable NXE\r
+ wrmsr\r
+ jmp @XdDone\r
+@SkipXd:\r
+ sub esp, 4\r
+@XdDone:\r
+\r
mov ebx, cr0\r
- or ebx, 0x080010000 ; enable paging + WP\r
+ or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE\r
mov cr0, ebx\r
lea ebx, [edi + DSC_OFFSET]\r
mov ax, [ebx + DSC_DS]\r
mov ax, [ebx + DSC_SS]\r
mov ss, eax\r
\r
- cmp byte [dword ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))], 0\r
- jz .5\r
-\r
-; Load TSS\r
- mov byte [ebp + TSS_SEGMENT + 5], 0x89 ; clear busy flag\r
- mov eax, TSS_SEGMENT\r
- ltr ax\r
-.5:\r
; jmp _SmiHandler ; instruction is not needed\r
\r
global ASM_PFX(SmiHandler)\r
ASM_PFX(SmiHandler):\r
- mov ebx, [esp] ; CPU Index\r
-\r
+ mov ebx, [esp + 4] ; CPU Index\r
push ebx\r
mov eax, ASM_PFX(CpuSmmDebugEntry)\r
call eax\r
- pop ecx\r
+ add esp, 4\r
\r
push ebx\r
mov eax, ASM_PFX(SmiRendezvous)\r
call eax\r
- pop ecx\r
- \r
+ add esp, 4\r
+\r
push ebx\r
mov eax, ASM_PFX(CpuSmmDebugExit)\r
call eax\r
- pop ecx\r
-\r
+ add esp, 4\r
+\r
+ mov eax, ASM_PFX(mXdSupported)\r
+ mov al, [eax]\r
+ cmp al, 0\r
+ jz .7\r
+ pop edx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r
+ test edx, BIT2\r
+ jz .7\r
+ mov ecx, MSR_IA32_MISC_ENABLE\r
+ rdmsr\r
+ or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r
+ wrmsr\r
+\r
+.7:\r
rsm\r
\r
ASM_PFX(gcSmiHandlerSize): DW $ - _SmiEntryPoint\r
\r
+global ASM_PFX(PiSmmCpuSmiEntryFixupAddress)\r
+ASM_PFX(PiSmmCpuSmiEntryFixupAddress):\r
+ ret\r