]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/PiSmmCpuDxeSmm/Ia32/SmiEntry.asm
UefiCpuPkg/PiSmmCpuDxeSmm: Add paging protection.
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / Ia32 / SmiEntry.asm
CommitLineData
7947da3c 1;------------------------------------------------------------------------------ ;\r
717fb604 2; Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>\r
7947da3c
MK
3; This program and the accompanying materials\r
4; are licensed and made available under the terms and conditions of the BSD License\r
5; which accompanies this distribution. The full text of the license may be found at\r
6; http://opensource.org/licenses/bsd-license.php.\r
7;\r
8; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
9; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
10;\r
11; Module Name:\r
12;\r
13; SmiEntry.asm\r
14;\r
15; Abstract:\r
16;\r
17; Code template of the SMI handler for a particular processor\r
18;\r
19;-------------------------------------------------------------------------------\r
20\r
21 .686p\r
22 .model flat,C\r
23 .xmm\r
24\r
717fb604
JY
25MSR_IA32_MISC_ENABLE EQU 1A0h\r
26MSR_EFER EQU 0c0000080h\r
27MSR_EFER_XD EQU 0800h\r
28\r
7947da3c
MK
29DSC_OFFSET EQU 0fb00h\r
30DSC_GDTPTR EQU 30h\r
31DSC_GDTSIZ EQU 38h\r
32DSC_CS EQU 14\r
33DSC_DS EQU 16\r
34DSC_SS EQU 18\r
35DSC_OTHERSEG EQU 20\r
36\r
37PROTECT_MODE_CS EQU 08h\r
38PROTECT_MODE_DS EQU 20h\r
39TSS_SEGMENT EQU 40h\r
40\r
f45f2d4a
JY
41SmiRendezvous PROTO C\r
42CpuSmmDebugEntry PROTO C\r
43CpuSmmDebugExit PROTO C\r
7947da3c
MK
44\r
45EXTERNDEF gcSmiHandlerTemplate:BYTE\r
46EXTERNDEF gcSmiHandlerSize:WORD\r
47EXTERNDEF gSmiCr3:DWORD\r
48EXTERNDEF gSmiStack:DWORD\r
49EXTERNDEF gSmbase:DWORD\r
717fb604 50EXTERNDEF mXdSupported:BYTE\r
7947da3c
MK
51EXTERNDEF FeaturePcdGet (PcdCpuSmmStackGuard):BYTE\r
52EXTERNDEF gSmiHandlerIdtr:FWORD\r
53\r
54 .code\r
55\r
56gcSmiHandlerTemplate LABEL BYTE\r
57\r
58_SmiEntryPoint:\r
59 DB 0bbh ; mov bx, imm16\r
60 DW offset _GdtDesc - _SmiEntryPoint + 8000h\r
61 DB 2eh, 0a1h ; mov ax, cs:[offset16]\r
62 DW DSC_OFFSET + DSC_GDTSIZ\r
63 dec eax\r
64 mov cs:[edi], eax ; mov cs:[bx], ax\r
65 DB 66h, 2eh, 0a1h ; mov eax, cs:[offset16]\r
66 DW DSC_OFFSET + DSC_GDTPTR\r
67 mov cs:[edi + 2], ax ; mov cs:[bx + 2], eax\r
68 mov bp, ax ; ebp = GDT base\r
69 DB 66h\r
70 lgdt fword ptr cs:[edi] ; lgdt fword ptr cs:[bx]\r
71; Patch ProtectedMode Segment\r
72 DB 0b8h ; mov ax, imm16\r
73 DW PROTECT_MODE_CS ; set AX for segment directly\r
74 mov cs:[edi - 2], eax ; mov cs:[bx - 2], ax\r
75; Patch ProtectedMode entry\r
76 DB 66h, 0bfh ; mov edi, SMBASE\r
77gSmbase DD ?\r
78 DB 67h\r
79 lea ax, [edi + (@32bit - _SmiEntryPoint) + 8000h]\r
80 mov cs:[edi - 6], ax ; mov cs:[bx - 6], eax\r
81 mov ebx, cr0\r
82 DB 66h\r
83 and ebx, 9ffafff3h\r
84 DB 66h\r
85 or ebx, 23h\r
86 mov cr0, ebx\r
87 DB 66h, 0eah\r
88 DD ?\r
89 DW ?\r
90_GdtDesc FWORD ?\r
91\r
92@32bit:\r
93 mov ax, PROTECT_MODE_DS\r
94 mov ds, ax\r
95 mov es, ax\r
96 mov fs, ax\r
97 mov gs, ax\r
98 mov ss, ax\r
99 DB 0bch ; mov esp, imm32\r
100gSmiStack DD ?\r
101 mov eax, offset gSmiHandlerIdtr\r
102 lidt fword ptr [eax]\r
103 jmp ProtFlatMode\r
104\r
105ProtFlatMode:\r
106 DB 0b8h ; mov eax, imm32\r
107gSmiCr3 DD ?\r
108 mov cr3, eax\r
109;\r
110; Need to test for CR4 specific bit support\r
111;\r
112 mov eax, 1\r
113 cpuid ; use CPUID to determine if specific CR4 bits are supported\r
114 xor eax, eax ; Clear EAX\r
115 test edx, BIT2 ; Check for DE capabilities\r
116 jz @f\r
117 or eax, BIT3\r
118@@:\r
119 test edx, BIT6 ; Check for PAE capabilities\r
120 jz @f\r
121 or eax, BIT5\r
122@@:\r
123 test edx, BIT7 ; Check for MCE capabilities\r
124 jz @f\r
125 or eax, BIT6\r
126@@:\r
127 test edx, BIT24 ; Check for FXSR capabilities\r
128 jz @f\r
129 or eax, BIT9\r
130@@:\r
131 test edx, BIT25 ; Check for SSE capabilities\r
132 jz @f\r
133 or eax, BIT10\r
134@@: ; as cr4.PGE is not set here, refresh cr3\r
135 mov cr4, eax ; in PreModifyMtrrs() to flush TLB.\r
717fb604
JY
136\r
137 cmp FeaturePcdGet (PcdCpuSmmStackGuard), 0\r
138 jz @F\r
139; Load TSS\r
140 mov byte ptr [ebp + TSS_SEGMENT + 5], 89h ; clear busy flag\r
141 mov eax, TSS_SEGMENT\r
142 ltr ax\r
143@@:\r
144\r
145; enable NXE if supported\r
146 DB 0b0h ; mov al, imm8\r
147mXdSupported DB 1\r
148 cmp al, 0\r
149 jz @SkipXd\r
150;\r
151; Check XD disable bit\r
152;\r
153 mov ecx, MSR_IA32_MISC_ENABLE\r
154 rdmsr\r
155 push edx ; save MSR_IA32_MISC_ENABLE[63-32]\r
156 test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
157 jz @f\r
158 and dx, 0FFFBh ; clear XD Disable bit if it is set\r
159 wrmsr\r
160@@:\r
161 mov ecx, MSR_EFER\r
162 rdmsr\r
163 or ax, MSR_EFER_XD ; enable NXE\r
164 wrmsr\r
165 jmp @XdDone\r
166@SkipXd:\r
167 sub esp, 4\r
168@XdDone:\r
169\r
7947da3c 170 mov ebx, cr0\r
717fb604 171 or ebx, 080010023h ; enable paging + WP + NE + MP + PE\r
7947da3c
MK
172 mov cr0, ebx\r
173 lea ebx, [edi + DSC_OFFSET]\r
174 mov ax, [ebx + DSC_DS]\r
175 mov ds, eax\r
176 mov ax, [ebx + DSC_OTHERSEG]\r
177 mov es, eax\r
178 mov fs, eax\r
179 mov gs, eax\r
180 mov ax, [ebx + DSC_SS]\r
181 mov ss, eax\r
182\r
7947da3c
MK
183; jmp _SmiHandler ; instruction is not needed\r
184\r
185_SmiHandler PROC\r
717fb604 186 mov ebx, [esp + 4] ; CPU Index\r
f45f2d4a
JY
187 push ebx\r
188 mov eax, CpuSmmDebugEntry\r
7947da3c 189 call eax\r
717fb604 190 add esp, 4\r
7947da3c 191\r
f45f2d4a
JY
192 push ebx\r
193 mov eax, SmiRendezvous\r
194 call eax\r
717fb604
JY
195 add esp, 4\r
196\r
f45f2d4a
JY
197 push ebx\r
198 mov eax, CpuSmmDebugExit\r
199 call eax\r
717fb604 200 add esp, 4\r
7947da3c 201\r
717fb604
JY
202 mov eax, mXdSupported\r
203 mov al, [eax]\r
204 cmp al, 0\r
205 jz @f\r
206 pop edx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r
207 test edx, BIT2\r
208 jz @f\r
209 mov ecx, MSR_IA32_MISC_ENABLE\r
210 rdmsr\r
211 or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r
212 wrmsr\r
213\r
214@@:\r
7947da3c
MK
215 rsm\r
216_SmiHandler ENDP\r
217\r
218gcSmiHandlerSize DW $ - _SmiEntryPoint\r
219\r
220 END\r