]>
Commit | Line | Data |
---|---|---|
63a4f460 | 1 | ;------------------------------------------------------------------------------ ;\r |
3eb69b08 | 2 | ; Copyright (c) 2016 - 2019, Intel Corporation. All rights reserved.<BR>\r |
63a4f460 LG |
3 | ; This program and the accompanying materials\r |
4 | ; are licensed and made available under the terms and conditions of the BSD License\r | |
5 | ; which accompanies this distribution. The full text of the license may be found at\r | |
6 | ; http://opensource.org/licenses/bsd-license.php.\r | |
7 | ;\r | |
8 | ; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r | |
9 | ; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r | |
10 | ;\r | |
11 | ; Module Name:\r | |
12 | ;\r | |
13 | ; SmiEntry.nasm\r | |
14 | ;\r | |
15 | ; Abstract:\r | |
16 | ;\r | |
17 | ; Code template of the SMI handler for a particular processor\r | |
18 | ;\r | |
19 | ;-------------------------------------------------------------------------------\r | |
20 | \r | |
ada4a003 | 21 | %include "StuffRsbNasm.inc"\r |
3eb69b08 JY |
22 | %include "Nasm.inc"\r |
23 | \r | |
24 | %define MSR_IA32_S_CET 0x6A2\r | |
25 | %define MSR_IA32_CET_SH_STK_EN 0x1\r | |
26 | %define MSR_IA32_CET_WR_SHSTK_EN 0x2\r | |
27 | %define MSR_IA32_CET_ENDBR_EN 0x4\r | |
28 | %define MSR_IA32_CET_LEG_IW_EN 0x8\r | |
29 | %define MSR_IA32_CET_NO_TRACK_EN 0x10\r | |
30 | %define MSR_IA32_CET_SUPPRESS_DIS 0x20\r | |
31 | %define MSR_IA32_CET_SUPPRESS 0x400\r | |
32 | %define MSR_IA32_CET_TRACKER 0x800\r | |
33 | %define MSR_IA32_PL0_SSP 0x6A4\r | |
34 | \r | |
35 | %define CR4_CET 0x800000\r | |
02f7fd15 | 36 | \r |
717fb604 JY |
37 | %define MSR_IA32_MISC_ENABLE 0x1A0\r |
38 | %define MSR_EFER 0xc0000080\r | |
39 | %define MSR_EFER_XD 0x800\r | |
40 | \r | |
f12367a0 MK |
41 | ;\r |
42 | ; Constants relating to PROCESSOR_SMM_DESCRIPTOR\r | |
43 | ;\r | |
63a4f460 LG |
44 | %define DSC_OFFSET 0xfb00\r |
45 | %define DSC_GDTPTR 0x30\r | |
46 | %define DSC_GDTSIZ 0x38\r | |
47 | %define DSC_CS 14\r | |
48 | %define DSC_DS 16\r | |
49 | %define DSC_SS 18\r | |
50 | %define DSC_OTHERSEG 20\r | |
51 | \r | |
52 | %define PROTECT_MODE_CS 0x8\r | |
53 | %define PROTECT_MODE_DS 0x20\r | |
54 | %define TSS_SEGMENT 0x40\r | |
55 | \r | |
56 | extern ASM_PFX(SmiRendezvous)\r | |
57 | extern ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r | |
58 | extern ASM_PFX(CpuSmmDebugEntry)\r | |
59 | extern ASM_PFX(CpuSmmDebugExit)\r | |
60 | \r | |
61 | global ASM_PFX(gcSmiHandlerTemplate)\r | |
62 | global ASM_PFX(gcSmiHandlerSize)\r | |
c455687f | 63 | global ASM_PFX(gPatchSmiCr3)\r |
fc504fde | 64 | global ASM_PFX(gPatchSmiStack)\r |
5a1bfda4 | 65 | global ASM_PFX(gPatchSmbase)\r |
3c5ce64f LE |
66 | extern ASM_PFX(mXdSupported)\r |
67 | global ASM_PFX(gPatchXdSupported)\r | |
63a4f460 LG |
68 | extern ASM_PFX(gSmiHandlerIdtr)\r |
69 | \r | |
3eb69b08 JY |
70 | extern ASM_PFX(mCetSupported)\r |
71 | global ASM_PFX(mPatchCetSupported)\r | |
72 | global ASM_PFX(mPatchCetPl0Ssp)\r | |
73 | global ASM_PFX(mPatchCetInterruptSsp)\r | |
74 | \r | |
63a4f460 LG |
75 | SECTION .text\r |
76 | \r | |
77 | BITS 16\r | |
78 | ASM_PFX(gcSmiHandlerTemplate):\r | |
79 | _SmiEntryPoint:\r | |
80 | mov bx, _GdtDesc - _SmiEntryPoint + 0x8000\r | |
81 | mov ax,[cs:DSC_OFFSET + DSC_GDTSIZ]\r | |
82 | dec ax\r | |
83 | mov [cs:bx], ax\r | |
84 | mov eax, [cs:DSC_OFFSET + DSC_GDTPTR]\r | |
85 | mov [cs:bx + 2], eax\r | |
86 | mov ebp, eax ; ebp = GDT base\r | |
87 | o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]\r | |
88 | mov ax, PROTECT_MODE_CS\r | |
717fb604 | 89 | mov [cs:bx-0x2],ax\r |
5a1bfda4 LE |
90 | mov edi, strict dword 0 ; source operand will be patched\r |
91 | ASM_PFX(gPatchSmbase):\r | |
63a4f460 LG |
92 | lea eax, [edi + (@32bit - _SmiEntryPoint) + 0x8000]\r |
93 | mov [cs:bx-0x6],eax\r | |
94 | mov ebx, cr0\r | |
95 | and ebx, 0x9ffafff3\r | |
96 | or ebx, 0x23\r | |
97 | mov cr0, ebx\r | |
98 | jmp dword 0x0:0x0\r | |
717fb604 | 99 | _GdtDesc:\r |
63a4f460 LG |
100 | DW 0\r |
101 | DD 0\r | |
102 | \r | |
103 | BITS 32\r | |
104 | @32bit:\r | |
105 | mov ax, PROTECT_MODE_DS\r | |
106 | o16 mov ds, ax\r | |
107 | o16 mov es, ax\r | |
108 | o16 mov fs, ax\r | |
109 | o16 mov gs, ax\r | |
110 | o16 mov ss, ax\r | |
fc504fde LE |
111 | mov esp, strict dword 0 ; source operand will be patched\r |
112 | ASM_PFX(gPatchSmiStack):\r | |
63a4f460 LG |
113 | mov eax, ASM_PFX(gSmiHandlerIdtr)\r |
114 | lidt [eax]\r | |
115 | jmp ProtFlatMode\r | |
116 | \r | |
117 | ProtFlatMode:\r | |
c455687f LE |
118 | mov eax, strict dword 0 ; source operand will be patched\r |
119 | ASM_PFX(gPatchSmiCr3):\r | |
63a4f460 LG |
120 | mov cr3, eax\r |
121 | ;\r | |
122 | ; Need to test for CR4 specific bit support\r | |
123 | ;\r | |
124 | mov eax, 1\r | |
125 | cpuid ; use CPUID to determine if specific CR4 bits are supported\r | |
126 | xor eax, eax ; Clear EAX\r | |
127 | test edx, BIT2 ; Check for DE capabilities\r | |
128 | jz .0\r | |
129 | or eax, BIT3\r | |
130 | .0:\r | |
131 | test edx, BIT6 ; Check for PAE capabilities\r | |
132 | jz .1\r | |
133 | or eax, BIT5\r | |
134 | .1:\r | |
135 | test edx, BIT7 ; Check for MCE capabilities\r | |
136 | jz .2\r | |
137 | or eax, BIT6\r | |
138 | .2:\r | |
139 | test edx, BIT24 ; Check for FXSR capabilities\r | |
140 | jz .3\r | |
141 | or eax, BIT9\r | |
142 | .3:\r | |
143 | test edx, BIT25 ; Check for SSE capabilities\r | |
144 | jz .4\r | |
145 | or eax, BIT10\r | |
146 | .4: ; as cr4.PGE is not set here, refresh cr3\r | |
147 | mov cr4, eax ; in PreModifyMtrrs() to flush TLB.\r | |
717fb604 JY |
148 | \r |
149 | cmp byte [dword ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))], 0\r | |
150 | jz .6\r | |
151 | ; Load TSS\r | |
152 | mov byte [ebp + TSS_SEGMENT + 5], 0x89 ; clear busy flag\r | |
153 | mov eax, TSS_SEGMENT\r | |
154 | ltr ax\r | |
155 | .6:\r | |
156 | \r | |
157 | ; enable NXE if supported\r | |
3c5ce64f LE |
158 | mov al, strict byte 1 ; source operand may be patched\r |
159 | ASM_PFX(gPatchXdSupported):\r | |
717fb604 JY |
160 | cmp al, 0\r |
161 | jz @SkipXd\r | |
162 | ;\r | |
163 | ; Check XD disable bit\r | |
164 | ;\r | |
165 | mov ecx, MSR_IA32_MISC_ENABLE\r | |
166 | rdmsr\r | |
167 | push edx ; save MSR_IA32_MISC_ENABLE[63-32]\r | |
168 | test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r | |
169 | jz .5\r | |
170 | and dx, 0xFFFB ; clear XD Disable bit if it is set\r | |
171 | wrmsr\r | |
172 | .5:\r | |
173 | mov ecx, MSR_EFER\r | |
174 | rdmsr\r | |
175 | or ax, MSR_EFER_XD ; enable NXE\r | |
176 | wrmsr\r | |
177 | jmp @XdDone\r | |
178 | @SkipXd:\r | |
179 | sub esp, 4\r | |
180 | @XdDone:\r | |
181 | \r | |
63a4f460 | 182 | mov ebx, cr0\r |
717fb604 | 183 | or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE\r |
63a4f460 LG |
184 | mov cr0, ebx\r |
185 | lea ebx, [edi + DSC_OFFSET]\r | |
186 | mov ax, [ebx + DSC_DS]\r | |
187 | mov ds, eax\r | |
188 | mov ax, [ebx + DSC_OTHERSEG]\r | |
189 | mov es, eax\r | |
190 | mov fs, eax\r | |
191 | mov gs, eax\r | |
192 | mov ax, [ebx + DSC_SS]\r | |
193 | mov ss, eax\r | |
194 | \r | |
3eb69b08 JY |
195 | mov ebx, [esp + 4] ; ebx <- CpuIndex\r |
196 | \r | |
197 | ; enable CET if supported\r | |
198 | mov al, strict byte 1 ; source operand may be patched\r | |
199 | ASM_PFX(mPatchCetSupported):\r | |
200 | cmp al, 0\r | |
201 | jz CetDone\r | |
202 | \r | |
203 | mov ecx, MSR_IA32_S_CET\r | |
204 | rdmsr\r | |
205 | push edx\r | |
206 | push eax\r | |
207 | \r | |
208 | mov ecx, MSR_IA32_PL0_SSP\r | |
209 | rdmsr\r | |
210 | push edx\r | |
211 | push eax\r | |
212 | \r | |
213 | mov ecx, MSR_IA32_S_CET\r | |
214 | mov eax, MSR_IA32_CET_SH_STK_EN\r | |
215 | xor edx, edx\r | |
216 | wrmsr\r | |
217 | \r | |
218 | mov ecx, MSR_IA32_PL0_SSP\r | |
219 | mov eax, strict dword 0 ; source operand will be patched\r | |
220 | ASM_PFX(mPatchCetPl0Ssp):\r | |
221 | xor edx, edx\r | |
222 | wrmsr\r | |
223 | mov ecx, cr0\r | |
224 | btr ecx, 16 ; clear WP\r | |
225 | mov cr0, ecx\r | |
226 | mov [eax], eax ; reload SSP, and clear busyflag.\r | |
227 | xor ecx, ecx\r | |
228 | mov [eax + 4], ecx\r | |
229 | \r | |
230 | mov eax, strict dword 0 ; source operand will be patched\r | |
231 | ASM_PFX(mPatchCetInterruptSsp):\r | |
232 | cmp eax, 0\r | |
233 | jz CetInterruptDone\r | |
234 | mov [eax], eax ; reload SSP, and clear busyflag.\r | |
235 | xor ecx, ecx\r | |
236 | mov [eax + 4], ecx\r | |
237 | CetInterruptDone:\r | |
238 | \r | |
239 | mov ecx, cr0\r | |
240 | bts ecx, 16 ; set WP\r | |
241 | mov cr0, ecx\r | |
242 | \r | |
243 | mov eax, 0x668 | CR4_CET\r | |
244 | mov cr4, eax\r | |
245 | \r | |
246 | SETSSBSY\r | |
247 | \r | |
248 | CetDone:\r | |
63a4f460 | 249 | \r |
63a4f460 LG |
250 | push ebx\r |
251 | mov eax, ASM_PFX(CpuSmmDebugEntry)\r | |
252 | call eax\r | |
717fb604 | 253 | add esp, 4\r |
63a4f460 LG |
254 | \r |
255 | push ebx\r | |
256 | mov eax, ASM_PFX(SmiRendezvous)\r | |
257 | call eax\r | |
717fb604 JY |
258 | add esp, 4\r |
259 | \r | |
63a4f460 LG |
260 | push ebx\r |
261 | mov eax, ASM_PFX(CpuSmmDebugExit)\r | |
262 | call eax\r | |
717fb604 JY |
263 | add esp, 4\r |
264 | \r | |
3eb69b08 JY |
265 | mov eax, ASM_PFX(mCetSupported)\r |
266 | mov al, [eax]\r | |
267 | cmp al, 0\r | |
268 | jz CetDone2\r | |
269 | \r | |
270 | mov eax, 0x668\r | |
271 | mov cr4, eax ; disable CET\r | |
272 | \r | |
273 | mov ecx, MSR_IA32_PL0_SSP\r | |
274 | pop eax\r | |
275 | pop edx\r | |
276 | wrmsr\r | |
277 | \r | |
278 | mov ecx, MSR_IA32_S_CET\r | |
279 | pop eax\r | |
280 | pop edx\r | |
281 | wrmsr\r | |
282 | CetDone2:\r | |
283 | \r | |
717fb604 JY |
284 | mov eax, ASM_PFX(mXdSupported)\r |
285 | mov al, [eax]\r | |
286 | cmp al, 0\r | |
287 | jz .7\r | |
288 | pop edx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r | |
289 | test edx, BIT2\r | |
290 | jz .7\r | |
291 | mov ecx, MSR_IA32_MISC_ENABLE\r | |
292 | rdmsr\r | |
293 | or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r | |
294 | wrmsr\r | |
295 | \r | |
296 | .7:\r | |
3eb69b08 | 297 | \r |
02f7fd15 | 298 | StuffRsb32\r |
63a4f460 LG |
299 | rsm\r |
300 | \r | |
301 | ASM_PFX(gcSmiHandlerSize): DW $ - _SmiEntryPoint\r | |
302 | \r | |
e21e355e LG |
303 | global ASM_PFX(PiSmmCpuSmiEntryFixupAddress)\r |
304 | ASM_PFX(PiSmmCpuSmiEntryFixupAddress):\r | |
305 | ret\r |