#------------------------------------------------------------------------------\r
#\r
-# Copyright (c) 2009 - 2015, Intel Corporation. All rights reserved.<BR>\r
+# Copyright (c) 2009 - 2017, Intel Corporation. All rights reserved.<BR>\r
# This program and the accompanying materials\r
# are licensed and made available under the terms and conditions of the BSD License\r
# which accompanies this distribution. The full text of the license may be found at\r
ASM_GLOBAL ASM_PFX(gSmiCr3)\r
ASM_GLOBAL ASM_PFX(gSmiStack)\r
ASM_GLOBAL ASM_PFX(gSmbase)\r
+ASM_GLOBAL ASM_PFX(mXdSupported)\r
ASM_GLOBAL ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
ASM_GLOBAL ASM_PFX(gSmiHandlerIdtr)\r
\r
+.equ MSR_IA32_MISC_ENABLE, 0x1A0\r
+.equ MSR_EFER, 0xc0000080\r
+.equ MSR_EFER_XD, 0x800\r
+\r
+#\r
+# Constants relating to PROCESSOR_SMM_DESCRIPTOR\r
+#\r
.equ DSC_OFFSET, 0xfb00\r
.equ DSC_GDTPTR, 0x30\r
.equ DSC_GDTSIZ, 0x38\r
orl $BIT10, %eax\r
L12: # as cr4.PGE is not set here, refresh cr3\r
movl %eax, %cr4 # in PreModifyMtrrs() to flush TLB.\r
+\r
+ cmpb $0, ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
+ jz L5\r
+# Load TSS\r
+ movb $0x89, (TSS_SEGMENT + 5)(%ebp) # clear busy flag\r
+ movl $TSS_SEGMENT, %eax\r
+ ltrw %ax\r
+L5:\r
+\r
+# enable NXE if supported\r
+ .byte 0xb0 # mov al, imm8\r
+ASM_PFX(mXdSupported): .byte 1\r
+ cmpb $0, %al\r
+ jz SkipNxe\r
+#\r
+# Check XD disable bit\r
+#\r
+ movl $MSR_IA32_MISC_ENABLE, %ecx\r
+ rdmsr\r
+ pushl %edx # save MSR_IA32_MISC_ENABLE[63-32]\r
+ testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]\r
+ jz L13\r
+ andw $0x0FFFB, %dx # clear XD Disable bit if it is set\r
+ wrmsr\r
+L13:\r
+ movl $MSR_EFER, %ecx\r
+ rdmsr\r
+ orw $MSR_EFER_XD,%ax # enable NXE\r
+ wrmsr\r
+ jmp NxeDone\r
+SkipNxe:\r
+ subl $4, %esp\r
+NxeDone:\r
+\r
movl %cr0, %ebx\r
- orl $0x080000000, %ebx # enable paging\r
+ orl $0x080010023, %ebx # enable paging + WP + NE + MP + PE\r
movl %ebx, %cr0\r
leal DSC_OFFSET(%edi),%ebx\r
movw DSC_DS(%ebx),%ax\r
movw DSC_SS(%ebx),%ax\r
movl %eax, %ss\r
\r
- cmpb $0, ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
- jz L5\r
-\r
-# Load TSS\r
- movb $0x89, (TSS_SEGMENT + 5)(%ebp) # clear busy flag\r
- movl $TSS_SEGMENT, %eax\r
- ltrw %ax\r
-L5:\r
-\r
# jmp _SmiHandler # instruction is not needed\r
\r
_SmiHandler:\r
- movl (%esp), %ebx\r
+ movl 4(%esp), %ebx\r
\r
pushl %ebx\r
movl $ASM_PFX(CpuSmmDebugEntry), %eax\r
call *%eax\r
- popl %ecx\r
- \r
+ addl $4, %esp\r
+\r
pushl %ebx\r
movl $ASM_PFX(SmiRendezvous), %eax\r
call *%eax\r
- popl %ecx\r
+ addl $4, %esp\r
\r
pushl %ebx\r
movl $ASM_PFX(CpuSmmDebugExit), %eax\r
call *%eax\r
- popl %ecx\r
-\r
+ addl $4, %esp\r
+\r
+ movl $ASM_PFX(mXdSupported), %eax\r
+ movb (%eax), %al\r
+ cmpb $0, %al\r
+ jz L16\r
+ popl %edx # get saved MSR_IA32_MISC_ENABLE[63-32]\r
+ testl $BIT2, %edx\r
+ jz L16\r
+ movl $MSR_IA32_MISC_ENABLE, %ecx\r
+ rdmsr\r
+ orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM\r
+ wrmsr\r
+\r
+L16:\r
rsm\r
\r
ASM_PFX(gcSmiHandlerSize): .word . - _SmiEntryPoint\r