;------------------------------------------------------------------------------ ;\r
-; Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>\r
-; This program and the accompanying materials\r
-; are licensed and made available under the terms and conditions of the BSD License\r
-; which accompanies this distribution. The full text of the license may be found at\r
-; http://opensource.org/licenses/bsd-license.php.\r
-;\r
-; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
-; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+; Copyright (c) 2016 - 2019, Intel Corporation. All rights reserved.<BR>\r
+; SPDX-License-Identifier: BSD-2-Clause-Patent\r
;\r
; Module Name:\r
;\r
;\r
;-------------------------------------------------------------------------------\r
\r
+%include "StuffRsbNasm.inc"\r
+%include "Nasm.inc"\r
+\r
;\r
-; Variables referrenced by C code\r
+; Variables referenced by C code\r
;\r
\r
+%define MSR_IA32_S_CET 0x6A2\r
+%define MSR_IA32_CET_SH_STK_EN 0x1\r
+%define MSR_IA32_CET_WR_SHSTK_EN 0x2\r
+%define MSR_IA32_CET_ENDBR_EN 0x4\r
+%define MSR_IA32_CET_LEG_IW_EN 0x8\r
+%define MSR_IA32_CET_NO_TRACK_EN 0x10\r
+%define MSR_IA32_CET_SUPPRESS_DIS 0x20\r
+%define MSR_IA32_CET_SUPPRESS 0x400\r
+%define MSR_IA32_CET_TRACKER 0x800\r
+%define MSR_IA32_PL0_SSP 0x6A4\r
+%define MSR_IA32_INTERRUPT_SSP_TABLE_ADDR 0x6A8\r
+\r
+%define CR4_CET 0x800000\r
+\r
+%define MSR_IA32_MISC_ENABLE 0x1A0\r
+%define MSR_EFER 0xc0000080\r
+%define MSR_EFER_XD 0x800\r
+\r
;\r
; Constants relating to PROCESSOR_SMM_DESCRIPTOR\r
;\r
extern ASM_PFX(CpuSmmDebugEntry)\r
extern ASM_PFX(CpuSmmDebugExit)\r
\r
-global ASM_PFX(gSmbase)\r
-global ASM_PFX(gSmiStack)\r
-global ASM_PFX(gSmiCr3)\r
+global ASM_PFX(gPatchSmbase)\r
+extern ASM_PFX(mXdSupported)\r
+global ASM_PFX(gPatchXdSupported)\r
+global ASM_PFX(gPatchSmiStack)\r
+global ASM_PFX(gPatchSmiCr3)\r
+global ASM_PFX(gPatch5LevelPagingNeeded)\r
global ASM_PFX(gcSmiHandlerTemplate)\r
global ASM_PFX(gcSmiHandlerSize)\r
\r
+extern ASM_PFX(mCetSupported)\r
+global ASM_PFX(mPatchCetSupported)\r
+global ASM_PFX(mPatchCetPl0Ssp)\r
+global ASM_PFX(mPatchCetInterruptSsp)\r
+global ASM_PFX(mPatchCetInterruptSspTable)\r
+\r
DEFAULT REL\r
SECTION .text\r
\r
mov [cs:bx + 2], eax\r
o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]\r
mov ax, PROTECT_MODE_CS\r
- mov [cs:bx-0x2],ax \r
- DB 0x66, 0xbf ; mov edi, SMBASE\r
-ASM_PFX(gSmbase): DD 0\r
+ mov [cs:bx-0x2],ax\r
+ mov edi, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmbase):\r
lea eax, [edi + (@ProtectedMode - _SmiEntryPoint) + 0x8000]\r
mov [cs:bx-0x6],eax\r
mov ebx, cr0\r
or ebx, 0x23\r
mov cr0, ebx\r
jmp dword 0x0:0x0\r
-_GdtDesc: \r
+_GdtDesc:\r
DW 0\r
DD 0\r
\r
o16 mov fs, ax\r
o16 mov gs, ax\r
o16 mov ss, ax\r
- DB 0xbc ; mov esp, imm32\r
-ASM_PFX(gSmiStack): DD 0\r
+ mov esp, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmiStack):\r
jmp ProtFlatMode\r
\r
BITS 64\r
ProtFlatMode:\r
- DB 0xb8 ; mov eax, offset gSmiCr3\r
-ASM_PFX(gSmiCr3): DD 0\r
+ mov eax, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmiCr3):\r
mov cr3, rax\r
mov eax, 0x668 ; as cr4.PGE is not set here, refresh cr3\r
+\r
+ mov cl, strict byte 0 ; source operand will be patched\r
+ASM_PFX(gPatch5LevelPagingNeeded):\r
+ cmp cl, 0\r
+ je SkipEnable5LevelPaging\r
+ ;\r
+ ; Enable 5-Level Paging bit\r
+ ;\r
+ bts eax, 12 ; Set LA57 bit (bit #12)\r
+SkipEnable5LevelPaging:\r
+\r
mov cr4, rax ; in PreModifyMtrrs() to flush TLB.\r
; Load TSS\r
sub esp, 8 ; reserve room in stack\r
mov eax, TSS_SEGMENT\r
ltr ax\r
\r
+; enable NXE if supported\r
+ mov al, strict byte 1 ; source operand may be patched\r
+ASM_PFX(gPatchXdSupported):\r
+ cmp al, 0\r
+ jz @SkipXd\r
+;\r
+; Check XD disable bit\r
+;\r
+ mov ecx, MSR_IA32_MISC_ENABLE\r
+ rdmsr\r
+ sub esp, 4\r
+ push rdx ; save MSR_IA32_MISC_ENABLE[63-32]\r
+ test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
+ jz .0\r
+ and dx, 0xFFFB ; clear XD Disable bit if it is set\r
+ wrmsr\r
+.0:\r
+ mov ecx, MSR_EFER\r
+ rdmsr\r
+ or ax, MSR_EFER_XD ; enable NXE\r
+ wrmsr\r
+ jmp @XdDone\r
+@SkipXd:\r
+ sub esp, 8\r
+@XdDone:\r
+\r
; Switch into @LongMode\r
push LONG_MODE_CS ; push cs hardcore here\r
- call Base ; push reture address for retf later\r
+ call Base ; push return address for retf later\r
Base:\r
add dword [rsp], @LongMode - Base; offset for far retf, seg is the 1st arg\r
- mov ecx, 0xc0000080\r
+\r
+ mov ecx, MSR_EFER\r
rdmsr\r
- or ah, 1\r
+ or ah, 1 ; enable LME\r
wrmsr\r
mov rbx, cr0\r
- or ebx, 080010000h ; enable paging + WP\r
+ or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE\r
mov cr0, rbx\r
retf\r
@LongMode: ; long mode (64-bit code) starts here\r
- mov rax, ASM_PFX(gSmiHandlerIdtr)\r
+ mov rax, strict qword 0 ; mov rax, ASM_PFX(gSmiHandlerIdtr)\r
+SmiHandlerIdtrAbsAddr:\r
lidt [rax]\r
lea ebx, [rdi + DSC_OFFSET]\r
mov ax, [rbx + DSC_DS]\r
mov gs, eax\r
mov ax, [rbx + DSC_SS]\r
mov ss, eax\r
-; jmp _SmiHandler ; instruction is not needed\r
\r
-_SmiHandler:\r
- mov rbx, [rsp] ; rbx <- CpuIndex\r
+ mov rbx, [rsp + 0x8] ; rbx <- CpuIndex\r
+\r
+; enable CET if supported\r
+ mov al, strict byte 1 ; source operand may be patched\r
+ASM_PFX(mPatchCetSupported):\r
+ cmp al, 0\r
+ jz CetDone\r
+\r
+ mov ecx, MSR_IA32_S_CET\r
+ rdmsr\r
+ push rdx\r
+ push rax\r
+\r
+ mov ecx, MSR_IA32_PL0_SSP\r
+ rdmsr\r
+ push rdx\r
+ push rax\r
+\r
+ mov ecx, MSR_IA32_INTERRUPT_SSP_TABLE_ADDR\r
+ rdmsr\r
+ push rdx\r
+ push rax\r
+\r
+ mov ecx, MSR_IA32_S_CET\r
+ mov eax, MSR_IA32_CET_SH_STK_EN\r
+ xor edx, edx\r
+ wrmsr\r
+\r
+ mov ecx, MSR_IA32_PL0_SSP\r
+ mov eax, strict dword 0 ; source operand will be patched\r
+ASM_PFX(mPatchCetPl0Ssp):\r
+ xor edx, edx\r
+ wrmsr\r
+ mov rcx, cr0\r
+ btr ecx, 16 ; clear WP\r
+ mov cr0, rcx\r
+ mov [eax], eax ; reload SSP, and clear busyflag.\r
+ xor ecx, ecx\r
+ mov [eax + 4], ecx\r
+\r
+ mov ecx, MSR_IA32_INTERRUPT_SSP_TABLE_ADDR\r
+ mov eax, strict dword 0 ; source operand will be patched\r
+ASM_PFX(mPatchCetInterruptSspTable):\r
+ xor edx, edx\r
+ wrmsr\r
+\r
+ mov eax, strict dword 0 ; source operand will be patched\r
+ASM_PFX(mPatchCetInterruptSsp):\r
+ cmp eax, 0\r
+ jz CetInterruptDone\r
+ mov [eax], eax ; reload SSP, and clear busyflag.\r
+ xor ecx, ecx\r
+ mov [eax + 4], ecx\r
+CetInterruptDone:\r
+\r
+ mov rcx, cr0\r
+ bts ecx, 16 ; set WP\r
+ mov cr0, rcx\r
+\r
+ mov eax, 0x668 | CR4_CET\r
+ mov cr4, rax\r
+\r
+ SETSSBSY\r
+\r
+CetDone:\r
\r
;\r
; Save FP registers\r
;\r
- sub rsp, 0x208\r
- DB 0x48 ; FXSAVE64\r
- fxsave [rsp]\r
+ sub rsp, 0x200\r
+ fxsave64 [rsp]\r
\r
add rsp, -0x20\r
\r
mov rcx, rbx\r
- mov rax, CpuSmmDebugEntry\r
+ mov rax, strict qword 0 ; call ASM_PFX(CpuSmmDebugEntry)\r
+CpuSmmDebugEntryAbsAddr:\r
call rax\r
- \r
+\r
mov rcx, rbx\r
- mov rax, SmiRendezvous ; rax <- absolute addr of SmiRedezvous\r
+ mov rax, strict qword 0 ; call ASM_PFX(SmiRendezvous)\r
+SmiRendezvousAbsAddr:\r
call rax\r
- \r
+\r
mov rcx, rbx\r
- mov rax, CpuSmmDebugExit\r
+ mov rax, strict qword 0 ; call ASM_PFX(CpuSmmDebugExit)\r
+CpuSmmDebugExitAbsAddr:\r
call rax\r
- \r
+\r
add rsp, 0x20\r
\r
;\r
; Restore FP registers\r
;\r
- DB 0x48 ; FXRSTOR64\r
- fxrstor [rsp]\r
+ fxrstor64 [rsp]\r
+\r
+ add rsp, 0x200\r
+\r
+ mov rax, strict qword 0 ; mov rax, ASM_PFX(mCetSupported)\r
+mCetSupportedAbsAddr:\r
+ mov al, [rax]\r
+ cmp al, 0\r
+ jz CetDone2\r
+\r
+ mov eax, 0x668\r
+ mov cr4, rax ; disable CET\r
+\r
+ mov ecx, MSR_IA32_INTERRUPT_SSP_TABLE_ADDR\r
+ pop rax\r
+ pop rdx\r
+ wrmsr\r
+\r
+ mov ecx, MSR_IA32_PL0_SSP\r
+ pop rax\r
+ pop rdx\r
+ wrmsr\r
+\r
+ mov ecx, MSR_IA32_S_CET\r
+ pop rax\r
+ pop rdx\r
+ wrmsr\r
+CetDone2:\r
+\r
+ mov rax, strict qword 0 ; lea rax, [ASM_PFX(mXdSupported)]\r
+mXdSupportedAbsAddr:\r
+ mov al, [rax]\r
+ cmp al, 0\r
+ jz .1\r
+ pop rdx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r
+ test edx, BIT2\r
+ jz .1\r
+ mov ecx, MSR_IA32_MISC_ENABLE\r
+ rdmsr\r
+ or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r
+ wrmsr\r
\r
+.1:\r
+\r
+ StuffRsb64\r
rsm\r
\r
-gcSmiHandlerSize DW $ - _SmiEntryPoint\r
+ASM_PFX(gcSmiHandlerSize) DW $ - _SmiEntryPoint\r
+\r
+;\r
+; Retrieve the address and fill it into mov opcode.\r
+;\r
+; It is called in the driver entry point first.\r
+; It is used to fix up the real address in mov opcode.\r
+; Then, after the code logic is copied to the different location,\r
+; the code can also run.\r
+;\r
+global ASM_PFX(PiSmmCpuSmiEntryFixupAddress)\r
+ASM_PFX(PiSmmCpuSmiEntryFixupAddress):\r
+ lea rax, [ASM_PFX(gSmiHandlerIdtr)]\r
+ lea rcx, [SmiHandlerIdtrAbsAddr]\r
+ mov qword [rcx - 8], rax\r
+\r
+ lea rax, [ASM_PFX(CpuSmmDebugEntry)]\r
+ lea rcx, [CpuSmmDebugEntryAbsAddr]\r
+ mov qword [rcx - 8], rax\r
+\r
+ lea rax, [ASM_PFX(SmiRendezvous)]\r
+ lea rcx, [SmiRendezvousAbsAddr]\r
+ mov qword [rcx - 8], rax\r
+\r
+ lea rax, [ASM_PFX(CpuSmmDebugExit)]\r
+ lea rcx, [CpuSmmDebugExitAbsAddr]\r
+ mov qword [rcx - 8], rax\r
+\r
+ lea rax, [ASM_PFX(mXdSupported)]\r
+ lea rcx, [mXdSupportedAbsAddr]\r
+ mov qword [rcx - 8], rax\r
\r
+ lea rax, [ASM_PFX(mCetSupported)]\r
+ lea rcx, [mCetSupportedAbsAddr]\r
+ mov qword [rcx - 8], rax\r
+ ret\r