]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/PiSmmCpuDxeSmm/X64/SmmInit.nasm
UefiCpuPkg/PiSmmCpuDxeSmm: patch "gSmmCr3" with PatchInstructionX86()
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / X64 / SmmInit.nasm
CommitLineData
ba15b971 1;------------------------------------------------------------------------------ ;\r
e21e355e 2; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>\r
ba15b971
LG
3; This program and the accompanying materials\r
4; are licensed and made available under the terms and conditions of the BSD License\r
5; which accompanies this distribution. The full text of the license may be found at\r
6; http://opensource.org/licenses/bsd-license.php.\r
7;\r
8; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
9; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
10;\r
11; Module Name:\r
12;\r
13; SmmInit.nasm\r
14;\r
15; Abstract:\r
16;\r
17; Functions for relocating SMBASE's for all processors\r
18;\r
19;-------------------------------------------------------------------------------\r
20\r
21extern ASM_PFX(SmmInitHandler)\r
22extern ASM_PFX(mRebasedFlag)\r
23extern ASM_PFX(mSmmRelocationOriginalAddress)\r
24\r
6b0841c1 25global ASM_PFX(gPatchSmmCr3)\r
ba15b971
LG
26global ASM_PFX(gSmmCr4)\r
27global ASM_PFX(gSmmCr0)\r
28global ASM_PFX(gSmmJmpAddr)\r
29global ASM_PFX(gSmmInitStack)\r
30global ASM_PFX(gcSmiInitGdtr)\r
31global ASM_PFX(gcSmmInitSize)\r
32global ASM_PFX(gcSmmInitTemplate)\r
33global ASM_PFX(mRebasedFlagAddr32)\r
34global ASM_PFX(mSmmRelocationOriginalAddressPtr32)\r
35\r
36 DEFAULT REL\r
37 SECTION .text\r
38\r
39ASM_PFX(gcSmiInitGdtr):\r
40 DW 0\r
41 DQ 0\r
42\r
43global ASM_PFX(SmmStartup)\r
00c5eede
LE
44\r
45BITS 16\r
ba15b971 46ASM_PFX(SmmStartup):\r
d4d87596
JW
47 mov eax, 0x80000001 ; read capability\r
48 cpuid\r
d4d87596 49 mov ebx, edx ; rdmsr will change edx. keep it in ebx.\r
6b0841c1
LE
50 mov eax, strict dword 0 ; source operand will be patched\r
51ASM_PFX(gPatchSmmCr3):\r
00c5eede
LE
52 mov cr3, eax\r
53o32 lgdt [cs:ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]\r
ba15b971
LG
54 DB 0x66, 0xb8 ; mov eax, imm32\r
55ASM_PFX(gSmmCr4): DD 0\r
56 or ah, 2 ; enable XMM registers access\r
00c5eede 57 mov cr4, eax\r
ba15b971
LG
58 mov ecx, 0xc0000080 ; IA32_EFER MSR\r
59 rdmsr\r
d4d87596 60 or ah, BIT0 ; set LME bit\r
d4d87596
JW
61 test ebx, BIT20 ; check NXE capability\r
62 jz .1\r
63 or ah, BIT3 ; set NXE bit\r
64.1:\r
ba15b971
LG
65 wrmsr\r
66 DB 0x66, 0xb8 ; mov eax, imm32\r
67ASM_PFX(gSmmCr0): DD 0\r
00c5eede 68 mov cr0, eax ; enable protected mode & paging\r
ba15b971 69 DB 0x66, 0xea ; far jmp to long mode\r
e21e355e 70ASM_PFX(gSmmJmpAddr): DQ 0;@LongMode\r
00c5eede
LE
71\r
72BITS 64\r
ba15b971
LG
73@LongMode: ; long-mode starts here\r
74 DB 0x48, 0xbc ; mov rsp, imm64\r
75ASM_PFX(gSmmInitStack): DQ 0\r
76 and sp, 0xfff0 ; make sure RSP is 16-byte aligned\r
77 ;\r
78 ; Accoring to X64 calling convention, XMM0~5 are volatile, we need to save\r
79 ; them before calling C-function.\r
80 ;\r
81 sub rsp, 0x60\r
82 movdqa [rsp], xmm0\r
83 movdqa [rsp + 0x10], xmm1\r
84 movdqa [rsp + 0x20], xmm2\r
85 movdqa [rsp + 0x30], xmm3\r
86 movdqa [rsp + 0x40], xmm4\r
87 movdqa [rsp + 0x50], xmm5\r
88\r
89 add rsp, -0x20\r
90 call ASM_PFX(SmmInitHandler)\r
91 add rsp, 0x20\r
92\r
93 ;\r
94 ; Restore XMM0~5 after calling C-function.\r
95 ;\r
96 movdqa xmm0, [rsp]\r
97 movdqa xmm1, [rsp + 0x10]\r
98 movdqa xmm2, [rsp + 0x20]\r
99 movdqa xmm3, [rsp + 0x30]\r
100 movdqa xmm4, [rsp + 0x40]\r
101 movdqa xmm5, [rsp + 0x50]\r
102\r
103 rsm\r
104\r
105BITS 16\r
106ASM_PFX(gcSmmInitTemplate):\r
107 mov ebp, [cs:@L1 - ASM_PFX(gcSmmInitTemplate) + 0x8000]\r
108 sub ebp, 0x30000\r
109 jmp ebp\r
110@L1:\r
e21e355e 111 DQ 0; ASM_PFX(SmmStartup)\r
ba15b971
LG
112\r
113ASM_PFX(gcSmmInitSize): DW $ - ASM_PFX(gcSmmInitTemplate)\r
114\r
115BITS 64\r
116global ASM_PFX(SmmRelocationSemaphoreComplete)\r
117ASM_PFX(SmmRelocationSemaphoreComplete):\r
118 push rax\r
119 mov rax, [ASM_PFX(mRebasedFlag)]\r
120 mov byte [rax], 1\r
121 pop rax\r
122 jmp [ASM_PFX(mSmmRelocationOriginalAddress)]\r
123\r
124;\r
125; Semaphore code running in 32-bit mode\r
126;\r
127global ASM_PFX(SmmRelocationSemaphoreComplete32)\r
128ASM_PFX(SmmRelocationSemaphoreComplete32):\r
129 ;\r
130 ; mov byte ptr [], 1\r
131 ;\r
132 db 0xc6, 0x5\r
133ASM_PFX(mRebasedFlagAddr32): dd 0\r
134 db 1\r
135 ;\r
136 ; jmp dword ptr []\r
137 ;\r
138 db 0xff, 0x25\r
139ASM_PFX(mSmmRelocationOriginalAddressPtr32): dd 0\r
e21e355e
LG
140\r
141global ASM_PFX(PiSmmCpuSmmInitFixupAddress)\r
142ASM_PFX(PiSmmCpuSmmInitFixupAddress):\r
143 lea rax, [@LongMode]\r
144 lea rcx, [ASM_PFX(gSmmJmpAddr)]\r
145 mov qword [rcx], rax\r
146\r
147 lea rax, [ASM_PFX(SmmStartup)]\r
148 lea rcx, [@L1]\r
149 mov qword [rcx], rax\r
150 ret\r