;------------------------------------------------------------------------------ ;\r
; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>\r
-; This program and the accompanying materials\r
-; are licensed and made available under the terms and conditions of the BSD License\r
-; which accompanies this distribution. The full text of the license may be found at\r
-; http://opensource.org/licenses/bsd-license.php.\r
-;\r
-; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
-; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+; SPDX-License-Identifier: BSD-2-Clause-Patent\r
;\r
; Module Name:\r
;\r
;\r
;-------------------------------------------------------------------------------\r
\r
+%include "StuffRsbNasm.inc"\r
+\r
extern ASM_PFX(SmmInitHandler)\r
extern ASM_PFX(mRebasedFlag)\r
extern ASM_PFX(mSmmRelocationOriginalAddress)\r
\r
global ASM_PFX(gPatchSmmCr3)\r
-global ASM_PFX(gSmmCr4)\r
-global ASM_PFX(gSmmCr0)\r
-global ASM_PFX(gSmmJmpAddr)\r
-global ASM_PFX(gSmmInitStack)\r
+global ASM_PFX(gPatchSmmCr4)\r
+global ASM_PFX(gPatchSmmCr0)\r
+global ASM_PFX(gPatchSmmInitStack)\r
global ASM_PFX(gcSmiInitGdtr)\r
global ASM_PFX(gcSmmInitSize)\r
global ASM_PFX(gcSmmInitTemplate)\r
-global ASM_PFX(mRebasedFlagAddr32)\r
-global ASM_PFX(mSmmRelocationOriginalAddressPtr32)\r
+global ASM_PFX(gPatchRebasedFlagAddr32)\r
+global ASM_PFX(gPatchSmmRelocationOriginalAddressPtr32)\r
+\r
+%define LONG_MODE_CS 0x38\r
\r
DEFAULT REL\r
SECTION .text\r
ASM_PFX(gPatchSmmCr3):\r
mov cr3, eax\r
o32 lgdt [cs:ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]\r
- DB 0x66, 0xb8 ; mov eax, imm32\r
-ASM_PFX(gSmmCr4): DD 0\r
+ mov eax, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmmCr4):\r
or ah, 2 ; enable XMM registers access\r
mov cr4, eax\r
mov ecx, 0xc0000080 ; IA32_EFER MSR\r
or ah, BIT3 ; set NXE bit\r
.1:\r
wrmsr\r
- DB 0x66, 0xb8 ; mov eax, imm32\r
-ASM_PFX(gSmmCr0): DD 0\r
+ mov eax, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmmCr0):\r
mov cr0, eax ; enable protected mode & paging\r
- DB 0x66, 0xea ; far jmp to long mode\r
-ASM_PFX(gSmmJmpAddr): DQ 0;@LongMode\r
+ jmp LONG_MODE_CS : dword 0 ; offset will be patched to @LongMode\r
+@PatchLongModeOffset:\r
\r
BITS 64\r
@LongMode: ; long-mode starts here\r
- DB 0x48, 0xbc ; mov rsp, imm64\r
-ASM_PFX(gSmmInitStack): DQ 0\r
+ mov rsp, strict qword 0 ; source operand will be patched\r
+ASM_PFX(gPatchSmmInitStack):\r
and sp, 0xfff0 ; make sure RSP is 16-byte aligned\r
;\r
; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save\r
movdqa xmm4, [rsp + 0x40]\r
movdqa xmm5, [rsp + 0x50]\r
\r
+ StuffRsb64\r
rsm\r
\r
BITS 16\r
;\r
; Semaphore code running in 32-bit mode\r
;\r
+BITS 32\r
global ASM_PFX(SmmRelocationSemaphoreComplete32)\r
ASM_PFX(SmmRelocationSemaphoreComplete32):\r
- ;\r
- ; mov byte ptr [], 1\r
- ;\r
- db 0xc6, 0x5\r
-ASM_PFX(mRebasedFlagAddr32): dd 0\r
- db 1\r
- ;\r
- ; jmp dword ptr []\r
- ;\r
- db 0xff, 0x25\r
-ASM_PFX(mSmmRelocationOriginalAddressPtr32): dd 0\r
+ push eax\r
+ mov eax, strict dword 0 ; source operand will be patched\r
+ASM_PFX(gPatchRebasedFlagAddr32):\r
+ mov byte [eax], 1\r
+ pop eax\r
+ jmp dword [dword 0] ; destination will be patched\r
+ASM_PFX(gPatchSmmRelocationOriginalAddressPtr32):\r
\r
+BITS 64\r
global ASM_PFX(PiSmmCpuSmmInitFixupAddress)\r
ASM_PFX(PiSmmCpuSmmInitFixupAddress):\r
lea rax, [@LongMode]\r
- lea rcx, [ASM_PFX(gSmmJmpAddr)]\r
- mov qword [rcx], rax\r
+ lea rcx, [@PatchLongModeOffset - 6]\r
+ mov dword [rcx], eax\r
\r
lea rax, [ASM_PFX(SmmStartup)]\r
lea rcx, [@L1]\r