--- /dev/null
+;------------------------------------------------------------------------------ ;\r
+; Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>\r
+; This program and the accompanying materials\r
+; are licensed and made available under the terms and conditions of the BSD License\r
+; which accompanies this distribution. The full text of the license may be found at\r
+; http://opensource.org/licenses/bsd-license.php.\r
+;\r
+; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+;\r
+; Module Name:\r
+;\r
+; SmmInit.nasm\r
+;\r
+; Abstract:\r
+;\r
+; Functions for relocating SMBASE's for all processors\r
+;\r
+;-------------------------------------------------------------------------------\r
+\r
+extern ASM_PFX(SmmInitHandler)\r
+extern ASM_PFX(mRebasedFlag)\r
+extern ASM_PFX(mSmmRelocationOriginalAddress)\r
+\r
+global ASM_PFX(gSmmCr3)\r
+global ASM_PFX(gSmmCr4)\r
+global ASM_PFX(gSmmCr0)\r
+global ASM_PFX(gSmmJmpAddr)\r
+global ASM_PFX(gSmmInitStack)\r
+global ASM_PFX(gcSmiInitGdtr)\r
+global ASM_PFX(gcSmmInitSize)\r
+global ASM_PFX(gcSmmInitTemplate)\r
+global ASM_PFX(mRebasedFlagAddr32)\r
+global ASM_PFX(mSmmRelocationOriginalAddressPtr32)\r
+\r
+ DEFAULT REL\r
+ SECTION .text\r
+\r
+ASM_PFX(gcSmiInitGdtr):\r
+ DW 0\r
+ DQ 0\r
+\r
+global ASM_PFX(SmmStartup)\r
+ASM_PFX(SmmStartup):\r
+ DB 0x66, 0xb8 ; mov eax, imm32\r
+ASM_PFX(gSmmCr3): DD 0\r
+ mov cr3, rax\r
+ DB 0x66, 0x2e\r
+ lgdt [ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]\r
+ DB 0x66, 0xb8 ; mov eax, imm32\r
+ASM_PFX(gSmmCr4): DD 0\r
+ or ah, 2 ; enable XMM registers access\r
+ mov cr4, rax\r
+ DB 0x66\r
+ mov ecx, 0xc0000080 ; IA32_EFER MSR\r
+ rdmsr\r
+ or ah, 1 ; set LME bit\r
+ wrmsr\r
+ DB 0x66, 0xb8 ; mov eax, imm32\r
+ASM_PFX(gSmmCr0): DD 0\r
+ mov cr0, rax ; enable protected mode & paging\r
+ DB 0x66, 0xea ; far jmp to long mode\r
+ASM_PFX(gSmmJmpAddr): DQ @LongMode\r
+@LongMode: ; long-mode starts here\r
+ DB 0x48, 0xbc ; mov rsp, imm64\r
+ASM_PFX(gSmmInitStack): DQ 0\r
+ and sp, 0xfff0 ; make sure RSP is 16-byte aligned\r
+ ;\r
+ ; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save\r
+ ; them before calling C-function.\r
+ ;\r
+ sub rsp, 0x60\r
+ movdqa [rsp], xmm0\r
+ movdqa [rsp + 0x10], xmm1\r
+ movdqa [rsp + 0x20], xmm2\r
+ movdqa [rsp + 0x30], xmm3\r
+ movdqa [rsp + 0x40], xmm4\r
+ movdqa [rsp + 0x50], xmm5\r
+\r
+ add rsp, -0x20\r
+ call ASM_PFX(SmmInitHandler)\r
+ add rsp, 0x20\r
+\r
+ ;\r
+ ; Restore XMM0~5 after calling C-function.\r
+ ;\r
+ movdqa xmm0, [rsp]\r
+ movdqa xmm1, [rsp + 0x10]\r
+ movdqa xmm2, [rsp + 0x20]\r
+ movdqa xmm3, [rsp + 0x30]\r
+ movdqa xmm4, [rsp + 0x40]\r
+ movdqa xmm5, [rsp + 0x50]\r
+\r
+ rsm\r
+\r
+BITS 16\r
+ASM_PFX(gcSmmInitTemplate):\r
+ mov ebp, [cs:@L1 - ASM_PFX(gcSmmInitTemplate) + 0x8000]\r
+ sub ebp, 0x30000\r
+ jmp ebp\r
+@L1:\r
+ DQ ASM_PFX(SmmStartup)\r
+\r
+ASM_PFX(gcSmmInitSize): DW $ - ASM_PFX(gcSmmInitTemplate)\r
+\r
+BITS 64\r
+global ASM_PFX(SmmRelocationSemaphoreComplete)\r
+ASM_PFX(SmmRelocationSemaphoreComplete):\r
+ push rax\r
+ mov rax, [ASM_PFX(mRebasedFlag)]\r
+ mov byte [rax], 1\r
+ pop rax\r
+ jmp [ASM_PFX(mSmmRelocationOriginalAddress)]\r
+\r
+;\r
+; Semaphore code running in 32-bit mode\r
+;\r
+global ASM_PFX(SmmRelocationSemaphoreComplete32)\r
+ASM_PFX(SmmRelocationSemaphoreComplete32):\r
+ ;\r
+ ; mov byte ptr [], 1\r
+ ;\r
+ db 0xc6, 0x5\r
+ASM_PFX(mRebasedFlagAddr32): dd 0\r
+ db 1\r
+ ;\r
+ ; jmp dword ptr []\r
+ ;\r
+ db 0xff, 0x25\r
+ASM_PFX(mSmmRelocationOriginalAddressPtr32): dd 0\r