]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/Library/SmmCpuFeaturesLib/Ia32/SmiEntry.nasm
UefiCpuPkg/SmmCpuFeaturesLib: [CVE-2017-5715] Stuff RSB before RSM
[mirror_edk2.git] / UefiCpuPkg / Library / SmmCpuFeaturesLib / Ia32 / SmiEntry.nasm
CommitLineData
09119a00 1;------------------------------------------------------------------------------ ;\r
1c7a65eb 2; Copyright (c) 2016 - 2018, Intel Corporation. All rights reserved.<BR>\r
09119a00
MK
3; This program and the accompanying materials\r
4; are licensed and made available under the terms and conditions of the BSD License\r
5; which accompanies this distribution. The full text of the license may be found at\r
6; http://opensource.org/licenses/bsd-license.php.\r
7;\r
8; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
9; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
10;\r
11; Module Name:\r
12;\r
13; SmiEntry.nasm\r
14;\r
15; Abstract:\r
16;\r
17; Code template of the SMI handler for a particular processor\r
18;\r
19;-------------------------------------------------------------------------------\r
20\r
0df50560
HW
21%include "StuffRsb.inc"\r
22\r
09119a00
MK
23%define MSR_IA32_MISC_ENABLE 0x1A0\r
24%define MSR_EFER 0xc0000080\r
25%define MSR_EFER_XD 0x800\r
26\r
27;\r
28; Constants relating to TXT_PROCESSOR_SMM_DESCRIPTOR\r
29;\r
30%define DSC_OFFSET 0xfb00\r
31%define DSC_GDTPTR 0x48\r
32%define DSC_GDTSIZ 0x50\r
33%define DSC_CS 0x14\r
34%define DSC_DS 0x16\r
35%define DSC_SS 0x18\r
36%define DSC_OTHERSEG 0x1a\r
37\r
38%define PROTECT_MODE_CS 0x8\r
39%define PROTECT_MODE_DS 0x20\r
40%define TSS_SEGMENT 0x40\r
41\r
42extern ASM_PFX(SmiRendezvous)\r
43extern ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
44extern ASM_PFX(CpuSmmDebugEntry)\r
45extern ASM_PFX(CpuSmmDebugExit)\r
46\r
47global ASM_PFX(gcStmSmiHandlerTemplate)\r
48global ASM_PFX(gcStmSmiHandlerSize)\r
49global ASM_PFX(gcStmSmiHandlerOffset)\r
50global ASM_PFX(gStmSmiCr3)\r
51global ASM_PFX(gStmSmiStack)\r
52global ASM_PFX(gStmSmbase)\r
53global ASM_PFX(gStmXdSupported)\r
54extern ASM_PFX(gStmSmiHandlerIdtr)\r
55\r
4c34a8ea
CR
56ASM_PFX(gStmSmiCr3) EQU StmSmiCr3Patch - 4\r
57ASM_PFX(gStmSmiStack) EQU StmSmiStackPatch - 4\r
58ASM_PFX(gStmSmbase) EQU StmSmbasePatch - 4\r
59ASM_PFX(gStmXdSupported) EQU StmXdSupportedPatch - 1\r
60\r
09119a00
MK
61 SECTION .text\r
62\r
63BITS 16\r
64ASM_PFX(gcStmSmiHandlerTemplate):\r
65_StmSmiEntryPoint:\r
66 mov bx, _StmGdtDesc - _StmSmiEntryPoint + 0x8000\r
67 mov ax,[cs:DSC_OFFSET + DSC_GDTSIZ]\r
68 dec ax\r
69 mov [cs:bx], ax\r
70 mov eax, [cs:DSC_OFFSET + DSC_GDTPTR]\r
71 mov [cs:bx + 2], eax\r
72 mov ebp, eax ; ebp = GDT base\r
73o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]\r
74 mov ax, PROTECT_MODE_CS\r
75 mov [cs:bx-0x2],ax\r
4c34a8ea
CR
76o32 mov edi, strict dword 0\r
77StmSmbasePatch:\r
09119a00
MK
78 lea eax, [edi + (@32bit - _StmSmiEntryPoint) + 0x8000]\r
79 mov [cs:bx-0x6],eax\r
80 mov ebx, cr0\r
81 and ebx, 0x9ffafff3\r
82 or ebx, 0x23\r
83 mov cr0, ebx\r
84 jmp dword 0x0:0x0\r
85_StmGdtDesc:\r
86 DW 0\r
87 DD 0\r
88\r
89BITS 32\r
90@32bit:\r
91 mov ax, PROTECT_MODE_DS\r
92o16 mov ds, ax\r
93o16 mov es, ax\r
94o16 mov fs, ax\r
95o16 mov gs, ax\r
96o16 mov ss, ax\r
4c34a8ea
CR
97 mov esp, strict dword 0\r
98StmSmiStackPatch:\r
09119a00
MK
99 mov eax, ASM_PFX(gStmSmiHandlerIdtr)\r
100 lidt [eax]\r
101 jmp ProtFlatMode\r
102\r
103ProtFlatMode:\r
4c34a8ea
CR
104 mov eax, strict dword 0\r
105StmSmiCr3Patch:\r
09119a00
MK
106 mov cr3, eax\r
107;\r
108; Need to test for CR4 specific bit support\r
109;\r
110 mov eax, 1\r
111 cpuid ; use CPUID to determine if specific CR4 bits are supported\r
112 xor eax, eax ; Clear EAX\r
113 test edx, BIT2 ; Check for DE capabilities\r
114 jz .0\r
115 or eax, BIT3\r
116.0:\r
117 test edx, BIT6 ; Check for PAE capabilities\r
118 jz .1\r
119 or eax, BIT5\r
120.1:\r
121 test edx, BIT7 ; Check for MCE capabilities\r
122 jz .2\r
123 or eax, BIT6\r
124.2:\r
125 test edx, BIT24 ; Check for FXSR capabilities\r
126 jz .3\r
127 or eax, BIT9\r
128.3:\r
129 test edx, BIT25 ; Check for SSE capabilities\r
130 jz .4\r
131 or eax, BIT10\r
132.4: ; as cr4.PGE is not set here, refresh cr3\r
133 mov cr4, eax ; in PreModifyMtrrs() to flush TLB.\r
134\r
135 cmp byte [dword ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))], 0\r
136 jz .6\r
137; Load TSS\r
138 mov byte [ebp + TSS_SEGMENT + 5], 0x89 ; clear busy flag\r
139 mov eax, TSS_SEGMENT\r
140 ltr ax\r
141.6:\r
142\r
143; enable NXE if supported\r
4c34a8ea
CR
144 mov al, strict byte 1\r
145StmXdSupportedPatch:\r
09119a00
MK
146 cmp al, 0\r
147 jz @SkipXd\r
148;\r
149; Check XD disable bit\r
150;\r
151 mov ecx, MSR_IA32_MISC_ENABLE\r
152 rdmsr\r
153 push edx ; save MSR_IA32_MISC_ENABLE[63-32]\r
154 test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
155 jz .5\r
156 and dx, 0xFFFB ; clear XD Disable bit if it is set\r
157 wrmsr\r
158.5:\r
159 mov ecx, MSR_EFER\r
160 rdmsr\r
161 or ax, MSR_EFER_XD ; enable NXE\r
162 wrmsr\r
163 jmp @XdDone\r
164@SkipXd:\r
165 sub esp, 4\r
166@XdDone:\r
167\r
168 mov ebx, cr0\r
169 or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE\r
170 mov cr0, ebx\r
171 lea ebx, [edi + DSC_OFFSET]\r
172 mov ax, [ebx + DSC_DS]\r
173 mov ds, eax\r
174 mov ax, [ebx + DSC_OTHERSEG]\r
175 mov es, eax\r
176 mov fs, eax\r
177 mov gs, eax\r
178 mov ax, [ebx + DSC_SS]\r
179 mov ss, eax\r
180\r
181CommonHandler:\r
182 mov ebx, [esp + 4] ; CPU Index\r
183 push ebx\r
184 mov eax, ASM_PFX(CpuSmmDebugEntry)\r
185 call eax\r
186 add esp, 4\r
187\r
188 push ebx\r
189 mov eax, ASM_PFX(SmiRendezvous)\r
190 call eax\r
191 add esp, 4\r
192\r
193 push ebx\r
194 mov eax, ASM_PFX(CpuSmmDebugExit)\r
195 call eax\r
196 add esp, 4\r
197\r
198 mov eax, ASM_PFX(gStmXdSupported)\r
199 mov al, [eax]\r
200 cmp al, 0\r
201 jz .7\r
202 pop edx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r
203 test edx, BIT2\r
204 jz .7\r
205 mov ecx, MSR_IA32_MISC_ENABLE\r
206 rdmsr\r
207 or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r
208 wrmsr\r
209\r
210.7:\r
0df50560 211 StuffRsb32\r
09119a00
MK
212 rsm\r
213\r
214\r
215_StmSmiHandler:\r
216;\r
217; Check XD disable bit\r
218;\r
219 xor esi, esi\r
220 mov eax, ASM_PFX(gStmXdSupported)\r
221 mov al, [eax]\r
222 cmp al, 0\r
223 jz @StmXdDone\r
224 mov ecx, MSR_IA32_MISC_ENABLE\r
225 rdmsr\r
226 mov esi, edx ; save MSR_IA32_MISC_ENABLE[63-32]\r
227 test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
228 jz .5\r
229 and dx, 0xFFFB ; clear XD Disable bit if it is set\r
230 wrmsr\r
231.5:\r
232 mov ecx, MSR_EFER\r
233 rdmsr\r
234 or ax, MSR_EFER_XD ; enable NXE\r
235 wrmsr\r
236@StmXdDone:\r
237 push esi\r
238\r
239 ; below step is needed, because STM does not run above code.\r
240 ; we have to run below code to set IDT/CR0/CR4\r
241 mov eax, ASM_PFX(gStmSmiHandlerIdtr)\r
242 lidt [eax]\r
243\r
244 mov eax, cr0\r
245 or eax, 0x80010023 ; enable paging + WP + NE + MP + PE\r
246 mov cr0, eax\r
247;\r
248; Need to test for CR4 specific bit support\r
249;\r
250 mov eax, 1\r
251 cpuid ; use CPUID to determine if specific CR4 bits are supported\r
252 mov eax, cr4 ; init EAX\r
253 test edx, BIT2 ; Check for DE capabilities\r
254 jz .0\r
255 or eax, BIT3\r
256.0:\r
257 test edx, BIT6 ; Check for PAE capabilities\r
258 jz .1\r
259 or eax, BIT5\r
260.1:\r
261 test edx, BIT7 ; Check for MCE capabilities\r
262 jz .2\r
263 or eax, BIT6\r
264.2:\r
265 test edx, BIT24 ; Check for FXSR capabilities\r
266 jz .3\r
267 or eax, BIT9\r
268.3:\r
269 test edx, BIT25 ; Check for SSE capabilities\r
270 jz .4\r
271 or eax, BIT10\r
272.4: ; as cr4.PGE is not set here, refresh cr3\r
273 mov cr4, eax ; in PreModifyMtrrs() to flush TLB.\r
274 ; STM init finish\r
275 jmp CommonHandler\r
276\r
277ASM_PFX(gcStmSmiHandlerSize) : DW $ - _StmSmiEntryPoint\r
278ASM_PFX(gcStmSmiHandlerOffset) : DW _StmSmiHandler - _StmSmiEntryPoint\r
1c7a65eb
LG
279\r
280global ASM_PFX(SmmCpuFeaturesLibStmSmiEntryFixupAddress)\r
281ASM_PFX(SmmCpuFeaturesLibStmSmiEntryFixupAddress):\r
282 ret\r