--- /dev/null
+#------------------------------------------------------------------------------\r
+#\r
+# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>\r
+# This program and the accompanying materials\r
+# are licensed and made available under the terms and conditions of the BSD License\r
+# which accompanies this distribution. The full text of the license may be found at\r
+# http://opensource.org/licenses/bsd-license.php.\r
+#\r
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+#\r
+# Module Name:\r
+#\r
+# SmiEntry.S\r
+#\r
+# Abstract:\r
+#\r
+# Code template of the SMI handler for a particular processor\r
+#\r
+#------------------------------------------------------------------------------\r
+\r
+ASM_GLOBAL ASM_PFX(gcStmSmiHandlerTemplate)\r
+ASM_GLOBAL ASM_PFX(gcStmSmiHandlerSize)\r
+ASM_GLOBAL ASM_PFX(gcStmSmiHandlerOffset)\r
+ASM_GLOBAL ASM_PFX(gStmSmiCr3)\r
+ASM_GLOBAL ASM_PFX(gStmSmiStack)\r
+ASM_GLOBAL ASM_PFX(gStmSmbase)\r
+ASM_GLOBAL ASM_PFX(gStmXdSupported)\r
+ASM_GLOBAL ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
+ASM_GLOBAL ASM_PFX(gStmSmiHandlerIdtr)\r
+\r
+.equ MSR_IA32_MISC_ENABLE, 0x1A0\r
+.equ MSR_EFER, 0xc0000080\r
+.equ MSR_EFER_XD, 0x800\r
+\r
+#\r
+# Constants relating to TXT_PROCESSOR_SMM_DESCRIPTOR\r
+#\r
+.equ DSC_OFFSET, 0xfb00\r
+.equ DSC_GDTPTR, 0x48\r
+.equ DSC_GDTSIZ, 0x50\r
+.equ DSC_CS, 0x14\r
+.equ DSC_DS, 0x16\r
+.equ DSC_SS, 0x18\r
+.equ DSC_OTHERSEG, 0x1A\r
+\r
+.equ PROTECT_MODE_CS, 0x08\r
+.equ PROTECT_MODE_DS, 0x20\r
+.equ TSS_SEGMENT, 0x40\r
+\r
+ .text\r
+ASM_PFX(gcStmSmiHandlerTemplate):\r
+\r
+_StmSmiEntryPoint:\r
+ .byte 0xbb # mov bx, imm16\r
+ .word _StmGdtDesc - _StmSmiEntryPoint + 0x8000\r
+ .byte 0x2e,0xa1 # mov ax, cs:[offset16]\r
+ .word DSC_OFFSET + DSC_GDTSIZ\r
+ decl %eax\r
+ movl %eax, %cs:(%edi) # mov cs:[bx], ax\r
+ .byte 0x66,0x2e,0xa1 # mov eax, cs:[offset16]\r
+ .word DSC_OFFSET + DSC_GDTPTR\r
+ movw %ax, %cs:2(%edi)\r
+ movw %ax, %bp # ebp = GDT base\r
+ .byte 0x66\r
+ lgdt %cs:(%edi)\r
+# Patch ProtectedMode Segment\r
+ .byte 0xb8 # mov ax, imm16\r
+ .word PROTECT_MODE_CS # set AX for segment directly\r
+ movl %eax, %cs:-2(%edi) # mov cs:[bx - 2], ax\r
+# Patch ProtectedMode entry\r
+ .byte 0x66, 0xbf # mov edi, SMBASE\r
+ASM_PFX(gStmSmbase): .space 4\r
+ .byte 0x67\r
+ lea ((Start32bit - _StmSmiEntryPoint) + 0x8000)(%edi), %ax\r
+ movw %ax, %cs:-6(%edi)\r
+ movl %cr0, %ebx\r
+ .byte 0x66\r
+ andl $0x9ffafff3, %ebx\r
+ .byte 0x66\r
+ orl $0x23, %ebx\r
+ movl %ebx, %cr0\r
+ .byte 0x66,0xea\r
+ .space 4\r
+ .space 2\r
+_StmGdtDesc: .space 4\r
+ .space 2\r
+\r
+Start32bit:\r
+ movw $PROTECT_MODE_DS, %ax\r
+ movl %eax,%ds\r
+ movl %eax,%es\r
+ movl %eax,%fs\r
+ movl %eax,%gs\r
+ movl %eax,%ss\r
+ .byte 0xbc # mov esp, imm32\r
+ASM_PFX(gStmSmiStack): .space 4\r
+ movl $ASM_PFX(gStmSmiHandlerIdtr), %eax\r
+ lidt (%eax)\r
+ jmp ProtFlatMode\r
+\r
+ProtFlatMode:\r
+ .byte 0xb8 # mov eax, imm32\r
+ASM_PFX(gStmSmiCr3): .space 4\r
+ movl %eax, %cr3\r
+#\r
+# Need to test for CR4 specific bit support\r
+#\r
+ movl $1, %eax\r
+ cpuid # use CPUID to determine if specific CR4 bits are supported\r
+ xorl %eax, %eax # Clear EAX\r
+ testl $BIT2, %edx # Check for DE capabilities\r
+ jz L8\r
+ orl $BIT3, %eax\r
+L8:\r
+ testl $BIT6, %edx # Check for PAE capabilities\r
+ jz L9\r
+ orl $BIT5, %eax\r
+L9:\r
+ testl $BIT7, %edx # Check for MCE capabilities\r
+ jz L10\r
+ orl $BIT6, %eax\r
+L10:\r
+ testl $BIT24, %edx # Check for FXSR capabilities\r
+ jz L11\r
+ orl $BIT9, %eax\r
+L11:\r
+ testl $BIT25, %edx # Check for SSE capabilities\r
+ jz L12\r
+ orl $BIT10, %eax\r
+L12: # as cr4.PGE is not set here, refresh cr3\r
+ movl %eax, %cr4 # in PreModifyMtrrs() to flush TLB.\r
+\r
+ cmpb $0, ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
+ jz L5\r
+# Load TSS\r
+ movb $0x89, (TSS_SEGMENT + 5)(%ebp) # clear busy flag\r
+ movl $TSS_SEGMENT, %eax\r
+ ltrw %ax\r
+L5:\r
+\r
+# enable NXE if supported\r
+ .byte 0xb0 # mov al, imm8\r
+ASM_PFX(gStmXdSupported): .byte 1\r
+ cmpb $0, %al\r
+ jz SkipXd\r
+#\r
+# Check XD disable bit\r
+#\r
+ movl $MSR_IA32_MISC_ENABLE, %ecx\r
+ rdmsr\r
+ pushl %edx # save MSR_IA32_MISC_ENABLE[63-32]\r
+ testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]\r
+ jz L13\r
+ andw $0x0FFFB, %dx # clear XD Disable bit if it is set\r
+ wrmsr\r
+L13:\r
+ movl $MSR_EFER, %ecx\r
+ rdmsr\r
+ orw $MSR_EFER_XD,%ax # enable NXE\r
+ wrmsr\r
+ jmp XdDone\r
+SkipXd:\r
+ subl $4, %esp\r
+XdDone:\r
+\r
+ movl %cr0, %ebx\r
+ orl $0x080010023, %ebx # enable paging + WP + NE + MP + PE\r
+ movl %ebx, %cr0\r
+ leal DSC_OFFSET(%edi),%ebx\r
+ movw DSC_DS(%ebx),%ax\r
+ movl %eax, %ds\r
+ movw DSC_OTHERSEG(%ebx),%ax\r
+ movl %eax, %es\r
+ movl %eax, %fs\r
+ movl %eax, %gs\r
+ movw DSC_SS(%ebx),%ax\r
+ movl %eax, %ss\r
+\r
+CommonHandler:\r
+ movl 4(%esp), %ebx\r
+\r
+ pushl %ebx\r
+ movl $ASM_PFX(CpuSmmDebugEntry), %eax\r
+ call *%eax\r
+ addl $4, %esp\r
+\r
+ pushl %ebx\r
+ movl $ASM_PFX(SmiRendezvous), %eax\r
+ call *%eax\r
+ addl $4, %esp\r
+\r
+ pushl %ebx\r
+ movl $ASM_PFX(CpuSmmDebugExit), %eax\r
+ call *%eax\r
+ addl $4, %esp\r
+\r
+ movl $ASM_PFX(gStmXdSupported), %eax\r
+ movb (%eax), %al\r
+ cmpb $0, %al\r
+ jz L16\r
+ popl %edx # get saved MSR_IA32_MISC_ENABLE[63-32]\r
+ testl $BIT2, %edx\r
+ jz L16\r
+ movl $MSR_IA32_MISC_ENABLE, %ecx\r
+ rdmsr\r
+ orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM\r
+ wrmsr\r
+\r
+L16:\r
+ rsm\r
+\r
+_StmSmiHandler:\r
+#\r
+# Check XD disable bit\r
+#\r
+ xorl %esi, %esi\r
+ movl $ASM_PFX(gStmXdSupported), %eax\r
+ movb (%eax), %al\r
+ cmpb $0, %al\r
+ jz StmXdDone\r
+ movl $MSR_IA32_MISC_ENABLE, %ecx\r
+ rdmsr\r
+ movl %edx, %esi # save MSR_IA32_MISC_ENABLE[63-32]\r
+ testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]\r
+ jz L14\r
+ andw $0x0FFFB, %dx # clear XD Disable bit if it is set\r
+ wrmsr\r
+L14:\r
+ movl $MSR_EFER, %ecx\r
+ rdmsr\r
+ orw $MSR_EFER_XD,%ax # enable NXE\r
+ wrmsr\r
+StmXdDone:\r
+ push %esi\r
+\r
+ # below step is needed, because STM does not run above code.\r
+ # we have to run below code to set IDT/CR0/CR4\r
+ movl $ASM_PFX(gStmSmiHandlerIdtr), %eax\r
+ lidt (%eax)\r
+\r
+ movl %cr0, %eax\r
+ orl $0x80010023, %eax # enable paging + WP + NE + MP + PE\r
+ movl %eax, %cr0\r
+#\r
+# Need to test for CR4 specific bit support\r
+#\r
+ movl $1, %eax\r
+ cpuid # use CPUID to determine if specific CR4 bits are supported\r
+ movl %cr4, %eax # init EAX\r
+ testl $BIT2, %edx # Check for DE capabilities\r
+ jz L28\r
+ orl $BIT3, %eax\r
+L28:\r
+ testl $BIT6, %edx # Check for PAE capabilities\r
+ jz L29\r
+ orl $BIT5, %eax\r
+L29:\r
+ testl $BIT7, %edx # Check for MCE capabilities\r
+ jz L30\r
+ orl $BIT6, %eax\r
+L30:\r
+ testl $BIT24, %edx # Check for FXSR capabilities\r
+ jz L31\r
+ orl $BIT9, %eax\r
+L31:\r
+ testl $BIT25, %edx # Check for SSE capabilities\r
+ jz L32\r
+ orl $BIT10, %eax\r
+L32: # as cr4.PGE is not set here, refresh cr3\r
+ movl %eax, %cr4 # in PreModifyMtrrs() to flush TLB.\r
+ # STM init finish\r
+ jmp CommonHandler\r
+\r
+\r
+ASM_PFX(gcStmSmiHandlerSize) : .word . - _StmSmiEntryPoint\r
+ASM_PFX(gcStmSmiHandlerOffset): .word _StmSmiHandler - _StmSmiEntryPoint\r
+\r