]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/PiSmmCpuDxeSmm/X64/SmmInit.nasm
UefiCpuPkg: Move AsmRelocateApLoopStart from Mpfuncs.nasm to AmdSev.nasm
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / X64 / SmmInit.nasm
CommitLineData
ba15b971 1;------------------------------------------------------------------------------ ;\r
e21e355e 2; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>\r
0acd8697 3; SPDX-License-Identifier: BSD-2-Clause-Patent\r
ba15b971
LG
4;\r
5; Module Name:\r
6;\r
7; SmmInit.nasm\r
8;\r
9; Abstract:\r
10;\r
11; Functions for relocating SMBASE's for all processors\r
12;\r
13;-------------------------------------------------------------------------------\r
14\r
ada4a003 15%include "StuffRsbNasm.inc"\r
02f7fd15 16\r
ba15b971
LG
17extern ASM_PFX(SmmInitHandler)\r
18extern ASM_PFX(mRebasedFlag)\r
19extern ASM_PFX(mSmmRelocationOriginalAddress)\r
20\r
6b0841c1 21global ASM_PFX(gPatchSmmCr3)\r
351b49c1 22global ASM_PFX(gPatchSmmCr4)\r
f0053e83 23global ASM_PFX(gPatchSmmCr0)\r
5830d2c3 24global ASM_PFX(gPatchSmmInitStack)\r
ba15b971
LG
25global ASM_PFX(gcSmiInitGdtr)\r
26global ASM_PFX(gcSmmInitSize)\r
27global ASM_PFX(gcSmmInitTemplate)\r
9686a467
LE
28global ASM_PFX(gPatchRebasedFlagAddr32)\r
29global ASM_PFX(gPatchSmmRelocationOriginalAddressPtr32)\r
ba15b971 30\r
456c4cca
LE
31%define LONG_MODE_CS 0x38\r
32\r
ba15b971
LG
33 DEFAULT REL\r
34 SECTION .text\r
35\r
36ASM_PFX(gcSmiInitGdtr):\r
37 DW 0\r
38 DQ 0\r
39\r
40global ASM_PFX(SmmStartup)\r
00c5eede
LE
41\r
42BITS 16\r
ba15b971 43ASM_PFX(SmmStartup):\r
d4d87596
JW
44 mov eax, 0x80000001 ; read capability\r
45 cpuid\r
d4d87596 46 mov ebx, edx ; rdmsr will change edx. keep it in ebx.\r
6b0841c1
LE
47 mov eax, strict dword 0 ; source operand will be patched\r
48ASM_PFX(gPatchSmmCr3):\r
00c5eede
LE
49 mov cr3, eax\r
50o32 lgdt [cs:ebp + (ASM_PFX(gcSmiInitGdtr) - ASM_PFX(SmmStartup))]\r
351b49c1
LE
51 mov eax, strict dword 0 ; source operand will be patched\r
52ASM_PFX(gPatchSmmCr4):\r
ba15b971 53 or ah, 2 ; enable XMM registers access\r
00c5eede 54 mov cr4, eax\r
ba15b971
LG
55 mov ecx, 0xc0000080 ; IA32_EFER MSR\r
56 rdmsr\r
d4d87596 57 or ah, BIT0 ; set LME bit\r
d4d87596
JW
58 test ebx, BIT20 ; check NXE capability\r
59 jz .1\r
60 or ah, BIT3 ; set NXE bit\r
61.1:\r
ba15b971 62 wrmsr\r
f0053e83
LE
63 mov eax, strict dword 0 ; source operand will be patched\r
64ASM_PFX(gPatchSmmCr0):\r
00c5eede 65 mov cr0, eax ; enable protected mode & paging\r
456c4cca
LE
66 jmp LONG_MODE_CS : dword 0 ; offset will be patched to @LongMode\r
67@PatchLongModeOffset:\r
00c5eede
LE
68\r
69BITS 64\r
ba15b971 70@LongMode: ; long-mode starts here\r
5830d2c3
LE
71 mov rsp, strict qword 0 ; source operand will be patched\r
72ASM_PFX(gPatchSmmInitStack):\r
ba15b971
LG
73 and sp, 0xfff0 ; make sure RSP is 16-byte aligned\r
74 ;\r
ef62da4f 75 ; According to X64 calling convention, XMM0~5 are volatile, we need to save\r
ba15b971
LG
76 ; them before calling C-function.\r
77 ;\r
78 sub rsp, 0x60\r
79 movdqa [rsp], xmm0\r
80 movdqa [rsp + 0x10], xmm1\r
81 movdqa [rsp + 0x20], xmm2\r
82 movdqa [rsp + 0x30], xmm3\r
83 movdqa [rsp + 0x40], xmm4\r
84 movdqa [rsp + 0x50], xmm5\r
85\r
86 add rsp, -0x20\r
87 call ASM_PFX(SmmInitHandler)\r
88 add rsp, 0x20\r
89\r
90 ;\r
91 ; Restore XMM0~5 after calling C-function.\r
92 ;\r
93 movdqa xmm0, [rsp]\r
94 movdqa xmm1, [rsp + 0x10]\r
95 movdqa xmm2, [rsp + 0x20]\r
96 movdqa xmm3, [rsp + 0x30]\r
97 movdqa xmm4, [rsp + 0x40]\r
98 movdqa xmm5, [rsp + 0x50]\r
99\r
02f7fd15 100 StuffRsb64\r
ba15b971
LG
101 rsm\r
102\r
103BITS 16\r
104ASM_PFX(gcSmmInitTemplate):\r
105 mov ebp, [cs:@L1 - ASM_PFX(gcSmmInitTemplate) + 0x8000]\r
106 sub ebp, 0x30000\r
107 jmp ebp\r
108@L1:\r
e21e355e 109 DQ 0; ASM_PFX(SmmStartup)\r
ba15b971
LG
110\r
111ASM_PFX(gcSmmInitSize): DW $ - ASM_PFX(gcSmmInitTemplate)\r
112\r
113BITS 64\r
114global ASM_PFX(SmmRelocationSemaphoreComplete)\r
115ASM_PFX(SmmRelocationSemaphoreComplete):\r
116 push rax\r
117 mov rax, [ASM_PFX(mRebasedFlag)]\r
118 mov byte [rax], 1\r
119 pop rax\r
120 jmp [ASM_PFX(mSmmRelocationOriginalAddress)]\r
121\r
122;\r
123; Semaphore code running in 32-bit mode\r
124;\r
9686a467 125BITS 32\r
ba15b971
LG
126global ASM_PFX(SmmRelocationSemaphoreComplete32)\r
127ASM_PFX(SmmRelocationSemaphoreComplete32):\r
9686a467
LE
128 push eax\r
129 mov eax, strict dword 0 ; source operand will be patched\r
130ASM_PFX(gPatchRebasedFlagAddr32):\r
131 mov byte [eax], 1\r
132 pop eax\r
133 jmp dword [dword 0] ; destination will be patched\r
134ASM_PFX(gPatchSmmRelocationOriginalAddressPtr32):\r
e21e355e 135\r
9686a467 136BITS 64\r
e21e355e
LG
137global ASM_PFX(PiSmmCpuSmmInitFixupAddress)\r
138ASM_PFX(PiSmmCpuSmmInitFixupAddress):\r
139 lea rax, [@LongMode]\r
456c4cca
LE
140 lea rcx, [@PatchLongModeOffset - 6]\r
141 mov dword [rcx], eax\r
e21e355e
LG
142\r
143 lea rax, [ASM_PFX(SmmStartup)]\r
144 lea rcx, [@L1]\r
145 mov qword [rcx], rax\r
146 ret\r