]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/Library/SmmCpuFeaturesLib/X64/SmiEntry.nasm
UefiCpuPkg: Update SmmCpuFeatureLib pass XCODE5 tool chain
[mirror_edk2.git] / UefiCpuPkg / Library / SmmCpuFeaturesLib / X64 / SmiEntry.nasm
CommitLineData
09119a00 1;------------------------------------------------------------------------------ ;\r
1c7a65eb 2; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>\r
09119a00
MK
3; This program and the accompanying materials\r
4; are licensed and made available under the terms and conditions of the BSD License\r
5; which accompanies this distribution. The full text of the license may be found at\r
6; http://opensource.org/licenses/bsd-license.php.\r
7;\r
8; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
9; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
10;\r
11; Module Name:\r
12;\r
13; SmiEntry.nasm\r
14;\r
15; Abstract:\r
16;\r
17; Code template of the SMI handler for a particular processor\r
18;\r
19;-------------------------------------------------------------------------------\r
20\r
21;\r
22; Variables referrenced by C code\r
23;\r
24\r
25%define MSR_IA32_MISC_ENABLE 0x1A0\r
26%define MSR_EFER 0xc0000080\r
27%define MSR_EFER_XD 0x800\r
28\r
29;\r
30; Constants relating to TXT_PROCESSOR_SMM_DESCRIPTOR\r
31;\r
32%define DSC_OFFSET 0xfb00\r
33%define DSC_GDTPTR 0x48\r
34%define DSC_GDTSIZ 0x50\r
35%define DSC_CS 0x14\r
36%define DSC_DS 0x16\r
37%define DSC_SS 0x18\r
38%define DSC_OTHERSEG 0x1a\r
39;\r
40; Constants relating to CPU State Save Area\r
41;\r
42%define SSM_DR6 0xffd0\r
43%define SSM_DR7 0xffc8\r
44\r
45%define PROTECT_MODE_CS 0x8\r
46%define PROTECT_MODE_DS 0x20\r
47%define LONG_MODE_CS 0x38\r
48%define TSS_SEGMENT 0x40\r
49%define GDT_SIZE 0x50\r
50\r
51extern ASM_PFX(SmiRendezvous)\r
52extern ASM_PFX(gStmSmiHandlerIdtr)\r
53extern ASM_PFX(CpuSmmDebugEntry)\r
54extern ASM_PFX(CpuSmmDebugExit)\r
55\r
56global ASM_PFX(gStmSmbase)\r
57global ASM_PFX(gStmXdSupported)\r
58global ASM_PFX(gStmSmiStack)\r
59global ASM_PFX(gStmSmiCr3)\r
60global ASM_PFX(gcStmSmiHandlerTemplate)\r
61global ASM_PFX(gcStmSmiHandlerSize)\r
62global ASM_PFX(gcStmSmiHandlerOffset)\r
63\r
4c34a8ea
CR
64ASM_PFX(gStmSmbase) EQU StmSmbasePatch - 4\r
65ASM_PFX(gStmSmiStack) EQU StmSmiStackPatch - 4\r
66ASM_PFX(gStmSmiCr3) EQU StmSmiCr3Patch - 4\r
67ASM_PFX(gStmXdSupported) EQU StmXdSupportedPatch - 1\r
68\r
09119a00
MK
69 DEFAULT REL\r
70 SECTION .text\r
71\r
72BITS 16\r
73ASM_PFX(gcStmSmiHandlerTemplate):\r
74_StmSmiEntryPoint:\r
75 mov bx, _StmGdtDesc - _StmSmiEntryPoint + 0x8000\r
76 mov ax,[cs:DSC_OFFSET + DSC_GDTSIZ]\r
77 dec ax\r
78 mov [cs:bx], ax\r
79 mov eax, [cs:DSC_OFFSET + DSC_GDTPTR]\r
80 mov [cs:bx + 2], eax\r
81o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]\r
82 mov ax, PROTECT_MODE_CS\r
83 mov [cs:bx-0x2],ax\r
4c34a8ea
CR
84o32 mov edi, strict dword 0\r
85StmSmbasePatch:\r
09119a00
MK
86 lea eax, [edi + (@ProtectedMode - _StmSmiEntryPoint) + 0x8000]\r
87 mov [cs:bx-0x6],eax\r
88 mov ebx, cr0\r
89 and ebx, 0x9ffafff3\r
90 or ebx, 0x23\r
91 mov cr0, ebx\r
92 jmp dword 0x0:0x0\r
93_StmGdtDesc:\r
94 DW 0\r
95 DD 0\r
96\r
97BITS 32\r
98@ProtectedMode:\r
99 mov ax, PROTECT_MODE_DS\r
100o16 mov ds, ax\r
101o16 mov es, ax\r
102o16 mov fs, ax\r
103o16 mov gs, ax\r
104o16 mov ss, ax\r
4c34a8ea
CR
105 mov esp, strict dword 0\r
106StmSmiStackPatch:\r
09119a00
MK
107 jmp ProtFlatMode\r
108\r
109BITS 64\r
110ProtFlatMode:\r
4c34a8ea
CR
111 mov eax, strict dword 0\r
112StmSmiCr3Patch:\r
09119a00
MK
113 mov cr3, rax\r
114 mov eax, 0x668 ; as cr4.PGE is not set here, refresh cr3\r
115 mov cr4, rax ; in PreModifyMtrrs() to flush TLB.\r
116; Load TSS\r
117 sub esp, 8 ; reserve room in stack\r
118 sgdt [rsp]\r
119 mov eax, [rsp + 2] ; eax = GDT base\r
120 add esp, 8\r
121 mov dl, 0x89\r
122 mov [rax + TSS_SEGMENT + 5], dl ; clear busy flag\r
123 mov eax, TSS_SEGMENT\r
124 ltr ax\r
125\r
126; enable NXE if supported\r
4c34a8ea
CR
127 mov al, strict byte 1\r
128StmXdSupportedPatch:\r
09119a00
MK
129 cmp al, 0\r
130 jz @SkipXd\r
131;\r
132; Check XD disable bit\r
133;\r
134 mov ecx, MSR_IA32_MISC_ENABLE\r
135 rdmsr\r
136 sub esp, 4\r
137 push rdx ; save MSR_IA32_MISC_ENABLE[63-32]\r
138 test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
139 jz .0\r
140 and dx, 0xFFFB ; clear XD Disable bit if it is set\r
141 wrmsr\r
142.0:\r
143 mov ecx, MSR_EFER\r
144 rdmsr\r
145 or ax, MSR_EFER_XD ; enable NXE\r
146 wrmsr\r
147 jmp @XdDone\r
148@SkipXd:\r
149 sub esp, 8\r
150@XdDone:\r
151\r
152; Switch into @LongMode\r
153 push LONG_MODE_CS ; push cs hardcore here\r
154 call Base ; push return address for retf later\r
155Base:\r
156 add dword [rsp], @LongMode - Base; offset for far retf, seg is the 1st arg\r
157\r
158 mov ecx, MSR_EFER\r
159 rdmsr\r
160 or ah, 1 ; enable LME\r
161 wrmsr\r
162 mov rbx, cr0\r
163 or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE\r
164 mov cr0, rbx\r
165 retf\r
166@LongMode: ; long mode (64-bit code) starts here\r
1c7a65eb
LG
167 mov rax, strict qword 0 ; mov rax, ASM_PFX(gStmSmiHandlerIdtr)\r
168StmSmiEntrySmiHandlerIdtrAbsAddr:\r
09119a00
MK
169 lidt [rax]\r
170 lea ebx, [rdi + DSC_OFFSET]\r
171 mov ax, [rbx + DSC_DS]\r
172 mov ds, eax\r
173 mov ax, [rbx + DSC_OTHERSEG]\r
174 mov es, eax\r
175 mov fs, eax\r
176 mov gs, eax\r
177 mov ax, [rbx + DSC_SS]\r
178 mov ss, eax\r
1c7a65eb
LG
179 mov rax, strict qword 0 ; mov rax, CommonHandler\r
180StmSmiEntryCommonHandlerAbsAddr:\r
181 jmp rax\r
09119a00
MK
182CommonHandler:\r
183 mov rbx, [rsp + 0x08] ; rbx <- CpuIndex\r
184\r
185 ;\r
186 ; Save FP registers\r
187 ;\r
188 sub rsp, 0x200\r
4c34a8ea 189 fxsave64 [rsp]\r
09119a00
MK
190\r
191 add rsp, -0x20\r
192\r
193 mov rcx, rbx\r
1c7a65eb 194 call ASM_PFX(CpuSmmDebugEntry)\r
09119a00
MK
195\r
196 mov rcx, rbx\r
1c7a65eb 197 call ASM_PFX(SmiRendezvous)\r
09119a00
MK
198\r
199 mov rcx, rbx\r
1c7a65eb 200 call ASM_PFX(CpuSmmDebugExit)\r
09119a00
MK
201\r
202 add rsp, 0x20\r
203\r
204 ;\r
205 ; Restore FP registers\r
206 ;\r
4c34a8ea 207 fxrstor64 [rsp]\r
09119a00
MK
208\r
209 add rsp, 0x200\r
210\r
1c7a65eb 211 lea rax, [ASM_PFX(gStmXdSupported)]\r
09119a00
MK
212 mov al, [rax]\r
213 cmp al, 0\r
214 jz .1\r
215 pop rdx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r
216 test edx, BIT2\r
217 jz .1\r
218 mov ecx, MSR_IA32_MISC_ENABLE\r
219 rdmsr\r
220 or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r
221 wrmsr\r
222\r
223.1:\r
224 rsm\r
225\r
226_StmSmiHandler:\r
227;\r
228; Check XD disable bit\r
229;\r
230 xor r8, r8\r
1c7a65eb 231 lea rax, [ASM_PFX(gStmXdSupported)]\r
09119a00
MK
232 mov al, [rax]\r
233 cmp al, 0\r
234 jz @StmXdDone\r
235 mov ecx, MSR_IA32_MISC_ENABLE\r
236 rdmsr\r
237 mov r8, rdx ; save MSR_IA32_MISC_ENABLE[63-32]\r
238 test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
239 jz .0\r
240 and dx, 0xFFFB ; clear XD Disable bit if it is set\r
241 wrmsr\r
242.0:\r
243 mov ecx, MSR_EFER\r
244 rdmsr\r
245 or ax, MSR_EFER_XD ; enable NXE\r
246 wrmsr\r
247@StmXdDone:\r
248 push r8\r
249\r
250 ; below step is needed, because STM does not run above code.\r
251 ; we have to run below code to set IDT/CR0/CR4\r
1c7a65eb
LG
252 mov rax, strict qword 0 ; mov rax, ASM_PFX(gStmSmiHandlerIdtr)\r
253StmSmiHandlerIdtrAbsAddr:\r
09119a00
MK
254 lidt [rax]\r
255\r
256 mov rax, cr0\r
257 or eax, 0x80010023 ; enable paging + WP + NE + MP + PE\r
258 mov cr0, rax\r
259 mov rax, cr4\r
260 mov eax, 0x668 ; as cr4.PGE is not set here, refresh cr3\r
261 mov cr4, rax ; in PreModifyMtrrs() to flush TLB.\r
262 ; STM init finish\r
263 jmp CommonHandler\r
264\r
265ASM_PFX(gcStmSmiHandlerSize) : DW $ - _StmSmiEntryPoint\r
266ASM_PFX(gcStmSmiHandlerOffset) : DW _StmSmiHandler - _StmSmiEntryPoint\r
1c7a65eb
LG
267\r
268global ASM_PFX(SmmCpuFeaturesLibStmSmiEntryFixupAddress)\r
269ASM_PFX(SmmCpuFeaturesLibStmSmiEntryFixupAddress):\r
270 lea rax, [ASM_PFX(gStmSmiHandlerIdtr)]\r
271 lea rcx, [StmSmiEntrySmiHandlerIdtrAbsAddr]\r
272 mov qword [rcx - 8], rax\r
273 lea rcx, [StmSmiHandlerIdtrAbsAddr]\r
274 mov qword [rcx - 8], rax\r
275\r
276 lea rax, [CommonHandler]\r
277 lea rcx, [StmSmiEntryCommonHandlerAbsAddr]\r
278 mov qword [rcx - 8], rax\r
279 ret\r