UefiCpuPkg: Update SmmCpuFeatureLib pass XCODE5 tool chain
[mirror_edk2.git] / UefiCpuPkg / Library / SmmCpuFeaturesLib / Ia32 / SmiEntry.nasm
CommitLineData
09119a00 1;------------------------------------------------------------------------------ ;\r
1c7a65eb 2; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>\r
09119a00
MK
3; This program and the accompanying materials\r
4; are licensed and made available under the terms and conditions of the BSD License\r
5; which accompanies this distribution. The full text of the license may be found at\r
6; http://opensource.org/licenses/bsd-license.php.\r
7;\r
8; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
9; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
10;\r
11; Module Name:\r
12;\r
13; SmiEntry.nasm\r
14;\r
15; Abstract:\r
16;\r
17; Code template of the SMI handler for a particular processor\r
18;\r
19;-------------------------------------------------------------------------------\r
20\r
21%define MSR_IA32_MISC_ENABLE 0x1A0\r
22%define MSR_EFER 0xc0000080\r
23%define MSR_EFER_XD 0x800\r
24\r
25;\r
26; Constants relating to TXT_PROCESSOR_SMM_DESCRIPTOR\r
27;\r
28%define DSC_OFFSET 0xfb00\r
29%define DSC_GDTPTR 0x48\r
30%define DSC_GDTSIZ 0x50\r
31%define DSC_CS 0x14\r
32%define DSC_DS 0x16\r
33%define DSC_SS 0x18\r
34%define DSC_OTHERSEG 0x1a\r
35\r
36%define PROTECT_MODE_CS 0x8\r
37%define PROTECT_MODE_DS 0x20\r
38%define TSS_SEGMENT 0x40\r
39\r
40extern ASM_PFX(SmiRendezvous)\r
41extern ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
42extern ASM_PFX(CpuSmmDebugEntry)\r
43extern ASM_PFX(CpuSmmDebugExit)\r
44\r
45global ASM_PFX(gcStmSmiHandlerTemplate)\r
46global ASM_PFX(gcStmSmiHandlerSize)\r
47global ASM_PFX(gcStmSmiHandlerOffset)\r
48global ASM_PFX(gStmSmiCr3)\r
49global ASM_PFX(gStmSmiStack)\r
50global ASM_PFX(gStmSmbase)\r
51global ASM_PFX(gStmXdSupported)\r
52extern ASM_PFX(gStmSmiHandlerIdtr)\r
53\r
4c34a8ea
CR
54ASM_PFX(gStmSmiCr3) EQU StmSmiCr3Patch - 4\r
55ASM_PFX(gStmSmiStack) EQU StmSmiStackPatch - 4\r
56ASM_PFX(gStmSmbase) EQU StmSmbasePatch - 4\r
57ASM_PFX(gStmXdSupported) EQU StmXdSupportedPatch - 1\r
58\r
09119a00
MK
59 SECTION .text\r
60\r
61BITS 16\r
62ASM_PFX(gcStmSmiHandlerTemplate):\r
63_StmSmiEntryPoint:\r
64 mov bx, _StmGdtDesc - _StmSmiEntryPoint + 0x8000\r
65 mov ax,[cs:DSC_OFFSET + DSC_GDTSIZ]\r
66 dec ax\r
67 mov [cs:bx], ax\r
68 mov eax, [cs:DSC_OFFSET + DSC_GDTPTR]\r
69 mov [cs:bx + 2], eax\r
70 mov ebp, eax ; ebp = GDT base\r
71o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]\r
72 mov ax, PROTECT_MODE_CS\r
73 mov [cs:bx-0x2],ax\r
4c34a8ea
CR
74o32 mov edi, strict dword 0\r
75StmSmbasePatch:\r
09119a00
MK
76 lea eax, [edi + (@32bit - _StmSmiEntryPoint) + 0x8000]\r
77 mov [cs:bx-0x6],eax\r
78 mov ebx, cr0\r
79 and ebx, 0x9ffafff3\r
80 or ebx, 0x23\r
81 mov cr0, ebx\r
82 jmp dword 0x0:0x0\r
83_StmGdtDesc:\r
84 DW 0\r
85 DD 0\r
86\r
87BITS 32\r
88@32bit:\r
89 mov ax, PROTECT_MODE_DS\r
90o16 mov ds, ax\r
91o16 mov es, ax\r
92o16 mov fs, ax\r
93o16 mov gs, ax\r
94o16 mov ss, ax\r
4c34a8ea
CR
95 mov esp, strict dword 0\r
96StmSmiStackPatch:\r
09119a00
MK
97 mov eax, ASM_PFX(gStmSmiHandlerIdtr)\r
98 lidt [eax]\r
99 jmp ProtFlatMode\r
100\r
101ProtFlatMode:\r
4c34a8ea
CR
102 mov eax, strict dword 0\r
103StmSmiCr3Patch:\r
09119a00
MK
104 mov cr3, eax\r
105;\r
106; Need to test for CR4 specific bit support\r
107;\r
108 mov eax, 1\r
109 cpuid ; use CPUID to determine if specific CR4 bits are supported\r
110 xor eax, eax ; Clear EAX\r
111 test edx, BIT2 ; Check for DE capabilities\r
112 jz .0\r
113 or eax, BIT3\r
114.0:\r
115 test edx, BIT6 ; Check for PAE capabilities\r
116 jz .1\r
117 or eax, BIT5\r
118.1:\r
119 test edx, BIT7 ; Check for MCE capabilities\r
120 jz .2\r
121 or eax, BIT6\r
122.2:\r
123 test edx, BIT24 ; Check for FXSR capabilities\r
124 jz .3\r
125 or eax, BIT9\r
126.3:\r
127 test edx, BIT25 ; Check for SSE capabilities\r
128 jz .4\r
129 or eax, BIT10\r
130.4: ; as cr4.PGE is not set here, refresh cr3\r
131 mov cr4, eax ; in PreModifyMtrrs() to flush TLB.\r
132\r
133 cmp byte [dword ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))], 0\r
134 jz .6\r
135; Load TSS\r
136 mov byte [ebp + TSS_SEGMENT + 5], 0x89 ; clear busy flag\r
137 mov eax, TSS_SEGMENT\r
138 ltr ax\r
139.6:\r
140\r
141; enable NXE if supported\r
4c34a8ea
CR
142 mov al, strict byte 1\r
143StmXdSupportedPatch:\r
09119a00
MK
144 cmp al, 0\r
145 jz @SkipXd\r
146;\r
147; Check XD disable bit\r
148;\r
149 mov ecx, MSR_IA32_MISC_ENABLE\r
150 rdmsr\r
151 push edx ; save MSR_IA32_MISC_ENABLE[63-32]\r
152 test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
153 jz .5\r
154 and dx, 0xFFFB ; clear XD Disable bit if it is set\r
155 wrmsr\r
156.5:\r
157 mov ecx, MSR_EFER\r
158 rdmsr\r
159 or ax, MSR_EFER_XD ; enable NXE\r
160 wrmsr\r
161 jmp @XdDone\r
162@SkipXd:\r
163 sub esp, 4\r
164@XdDone:\r
165\r
166 mov ebx, cr0\r
167 or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE\r
168 mov cr0, ebx\r
169 lea ebx, [edi + DSC_OFFSET]\r
170 mov ax, [ebx + DSC_DS]\r
171 mov ds, eax\r
172 mov ax, [ebx + DSC_OTHERSEG]\r
173 mov es, eax\r
174 mov fs, eax\r
175 mov gs, eax\r
176 mov ax, [ebx + DSC_SS]\r
177 mov ss, eax\r
178\r
179CommonHandler:\r
180 mov ebx, [esp + 4] ; CPU Index\r
181 push ebx\r
182 mov eax, ASM_PFX(CpuSmmDebugEntry)\r
183 call eax\r
184 add esp, 4\r
185\r
186 push ebx\r
187 mov eax, ASM_PFX(SmiRendezvous)\r
188 call eax\r
189 add esp, 4\r
190\r
191 push ebx\r
192 mov eax, ASM_PFX(CpuSmmDebugExit)\r
193 call eax\r
194 add esp, 4\r
195\r
196 mov eax, ASM_PFX(gStmXdSupported)\r
197 mov al, [eax]\r
198 cmp al, 0\r
199 jz .7\r
200 pop edx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r
201 test edx, BIT2\r
202 jz .7\r
203 mov ecx, MSR_IA32_MISC_ENABLE\r
204 rdmsr\r
205 or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r
206 wrmsr\r
207\r
208.7:\r
209 rsm\r
210\r
211\r
212_StmSmiHandler:\r
213;\r
214; Check XD disable bit\r
215;\r
216 xor esi, esi\r
217 mov eax, ASM_PFX(gStmXdSupported)\r
218 mov al, [eax]\r
219 cmp al, 0\r
220 jz @StmXdDone\r
221 mov ecx, MSR_IA32_MISC_ENABLE\r
222 rdmsr\r
223 mov esi, edx ; save MSR_IA32_MISC_ENABLE[63-32]\r
224 test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
225 jz .5\r
226 and dx, 0xFFFB ; clear XD Disable bit if it is set\r
227 wrmsr\r
228.5:\r
229 mov ecx, MSR_EFER\r
230 rdmsr\r
231 or ax, MSR_EFER_XD ; enable NXE\r
232 wrmsr\r
233@StmXdDone:\r
234 push esi\r
235\r
236 ; below step is needed, because STM does not run above code.\r
237 ; we have to run below code to set IDT/CR0/CR4\r
238 mov eax, ASM_PFX(gStmSmiHandlerIdtr)\r
239 lidt [eax]\r
240\r
241 mov eax, cr0\r
242 or eax, 0x80010023 ; enable paging + WP + NE + MP + PE\r
243 mov cr0, eax\r
244;\r
245; Need to test for CR4 specific bit support\r
246;\r
247 mov eax, 1\r
248 cpuid ; use CPUID to determine if specific CR4 bits are supported\r
249 mov eax, cr4 ; init EAX\r
250 test edx, BIT2 ; Check for DE capabilities\r
251 jz .0\r
252 or eax, BIT3\r
253.0:\r
254 test edx, BIT6 ; Check for PAE capabilities\r
255 jz .1\r
256 or eax, BIT5\r
257.1:\r
258 test edx, BIT7 ; Check for MCE capabilities\r
259 jz .2\r
260 or eax, BIT6\r
261.2:\r
262 test edx, BIT24 ; Check for FXSR capabilities\r
263 jz .3\r
264 or eax, BIT9\r
265.3:\r
266 test edx, BIT25 ; Check for SSE capabilities\r
267 jz .4\r
268 or eax, BIT10\r
269.4: ; as cr4.PGE is not set here, refresh cr3\r
270 mov cr4, eax ; in PreModifyMtrrs() to flush TLB.\r
271 ; STM init finish\r
272 jmp CommonHandler\r
273\r
274ASM_PFX(gcStmSmiHandlerSize) : DW $ - _StmSmiEntryPoint\r
275ASM_PFX(gcStmSmiHandlerOffset) : DW _StmSmiHandler - _StmSmiEntryPoint\r
1c7a65eb
LG
276\r
277global ASM_PFX(SmmCpuFeaturesLibStmSmiEntryFixupAddress)\r
278ASM_PFX(SmmCpuFeaturesLibStmSmiEntryFixupAddress):\r
279 ret\r