]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/PiSmmCpuDxeSmm/Ia32/SmiEntry.nasm
UefiCpuPkg/PiSmmCpuDxeSmm: Add paging protection.
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / Ia32 / SmiEntry.nasm
CommitLineData
63a4f460
LG
1;------------------------------------------------------------------------------ ;\r
2; Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>\r
3; This program and the accompanying materials\r
4; are licensed and made available under the terms and conditions of the BSD License\r
5; which accompanies this distribution. The full text of the license may be found at\r
6; http://opensource.org/licenses/bsd-license.php.\r
7;\r
8; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
9; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
10;\r
11; Module Name:\r
12;\r
13; SmiEntry.nasm\r
14;\r
15; Abstract:\r
16;\r
17; Code template of the SMI handler for a particular processor\r
18;\r
19;-------------------------------------------------------------------------------\r
20\r
717fb604
JY
21%define MSR_IA32_MISC_ENABLE 0x1A0\r
22%define MSR_EFER 0xc0000080\r
23%define MSR_EFER_XD 0x800\r
24\r
63a4f460
LG
25%define DSC_OFFSET 0xfb00\r
26%define DSC_GDTPTR 0x30\r
27%define DSC_GDTSIZ 0x38\r
28%define DSC_CS 14\r
29%define DSC_DS 16\r
30%define DSC_SS 18\r
31%define DSC_OTHERSEG 20\r
32\r
33%define PROTECT_MODE_CS 0x8\r
34%define PROTECT_MODE_DS 0x20\r
35%define TSS_SEGMENT 0x40\r
36\r
37extern ASM_PFX(SmiRendezvous)\r
38extern ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))\r
39extern ASM_PFX(CpuSmmDebugEntry)\r
40extern ASM_PFX(CpuSmmDebugExit)\r
41\r
42global ASM_PFX(gcSmiHandlerTemplate)\r
43global ASM_PFX(gcSmiHandlerSize)\r
44global ASM_PFX(gSmiCr3)\r
45global ASM_PFX(gSmiStack)\r
46global ASM_PFX(gSmbase)\r
717fb604 47global ASM_PFX(mXdSupported)\r
63a4f460
LG
48extern ASM_PFX(gSmiHandlerIdtr)\r
49\r
50 SECTION .text\r
51\r
52BITS 16\r
53ASM_PFX(gcSmiHandlerTemplate):\r
54_SmiEntryPoint:\r
55 mov bx, _GdtDesc - _SmiEntryPoint + 0x8000\r
56 mov ax,[cs:DSC_OFFSET + DSC_GDTSIZ]\r
57 dec ax\r
58 mov [cs:bx], ax\r
59 mov eax, [cs:DSC_OFFSET + DSC_GDTPTR]\r
60 mov [cs:bx + 2], eax\r
61 mov ebp, eax ; ebp = GDT base\r
62o32 lgdt [cs:bx] ; lgdt fword ptr cs:[bx]\r
63 mov ax, PROTECT_MODE_CS\r
717fb604 64 mov [cs:bx-0x2],ax\r
63a4f460
LG
65 DB 0x66, 0xbf ; mov edi, SMBASE\r
66ASM_PFX(gSmbase): DD 0\r
67 lea eax, [edi + (@32bit - _SmiEntryPoint) + 0x8000]\r
68 mov [cs:bx-0x6],eax\r
69 mov ebx, cr0\r
70 and ebx, 0x9ffafff3\r
71 or ebx, 0x23\r
72 mov cr0, ebx\r
73 jmp dword 0x0:0x0\r
717fb604 74_GdtDesc:\r
63a4f460
LG
75 DW 0\r
76 DD 0\r
77\r
78BITS 32\r
79@32bit:\r
80 mov ax, PROTECT_MODE_DS\r
81o16 mov ds, ax\r
82o16 mov es, ax\r
83o16 mov fs, ax\r
84o16 mov gs, ax\r
85o16 mov ss, ax\r
86 DB 0xbc ; mov esp, imm32\r
87ASM_PFX(gSmiStack): DD 0\r
88 mov eax, ASM_PFX(gSmiHandlerIdtr)\r
89 lidt [eax]\r
90 jmp ProtFlatMode\r
91\r
92ProtFlatMode:\r
93 DB 0xb8 ; mov eax, imm32\r
94ASM_PFX(gSmiCr3): DD 0\r
95 mov cr3, eax\r
96;\r
97; Need to test for CR4 specific bit support\r
98;\r
99 mov eax, 1\r
100 cpuid ; use CPUID to determine if specific CR4 bits are supported\r
101 xor eax, eax ; Clear EAX\r
102 test edx, BIT2 ; Check for DE capabilities\r
103 jz .0\r
104 or eax, BIT3\r
105.0:\r
106 test edx, BIT6 ; Check for PAE capabilities\r
107 jz .1\r
108 or eax, BIT5\r
109.1:\r
110 test edx, BIT7 ; Check for MCE capabilities\r
111 jz .2\r
112 or eax, BIT6\r
113.2:\r
114 test edx, BIT24 ; Check for FXSR capabilities\r
115 jz .3\r
116 or eax, BIT9\r
117.3:\r
118 test edx, BIT25 ; Check for SSE capabilities\r
119 jz .4\r
120 or eax, BIT10\r
121.4: ; as cr4.PGE is not set here, refresh cr3\r
122 mov cr4, eax ; in PreModifyMtrrs() to flush TLB.\r
717fb604
JY
123\r
124 cmp byte [dword ASM_PFX(FeaturePcdGet (PcdCpuSmmStackGuard))], 0\r
125 jz .6\r
126; Load TSS\r
127 mov byte [ebp + TSS_SEGMENT + 5], 0x89 ; clear busy flag\r
128 mov eax, TSS_SEGMENT\r
129 ltr ax\r
130.6:\r
131\r
132; enable NXE if supported\r
133 DB 0b0h ; mov al, imm8\r
134ASM_PFX(mXdSupported): DB 1\r
135 cmp al, 0\r
136 jz @SkipXd\r
137;\r
138; Check XD disable bit\r
139;\r
140 mov ecx, MSR_IA32_MISC_ENABLE\r
141 rdmsr\r
142 push edx ; save MSR_IA32_MISC_ENABLE[63-32]\r
143 test edx, BIT2 ; MSR_IA32_MISC_ENABLE[34]\r
144 jz .5\r
145 and dx, 0xFFFB ; clear XD Disable bit if it is set\r
146 wrmsr\r
147.5:\r
148 mov ecx, MSR_EFER\r
149 rdmsr\r
150 or ax, MSR_EFER_XD ; enable NXE\r
151 wrmsr\r
152 jmp @XdDone\r
153@SkipXd:\r
154 sub esp, 4\r
155@XdDone:\r
156\r
63a4f460 157 mov ebx, cr0\r
717fb604 158 or ebx, 0x80010023 ; enable paging + WP + NE + MP + PE\r
63a4f460
LG
159 mov cr0, ebx\r
160 lea ebx, [edi + DSC_OFFSET]\r
161 mov ax, [ebx + DSC_DS]\r
162 mov ds, eax\r
163 mov ax, [ebx + DSC_OTHERSEG]\r
164 mov es, eax\r
165 mov fs, eax\r
166 mov gs, eax\r
167 mov ax, [ebx + DSC_SS]\r
168 mov ss, eax\r
169\r
63a4f460
LG
170; jmp _SmiHandler ; instruction is not needed\r
171\r
172global ASM_PFX(SmiHandler)\r
173ASM_PFX(SmiHandler):\r
717fb604 174 mov ebx, [esp + 4] ; CPU Index\r
63a4f460
LG
175 push ebx\r
176 mov eax, ASM_PFX(CpuSmmDebugEntry)\r
177 call eax\r
717fb604 178 add esp, 4\r
63a4f460
LG
179\r
180 push ebx\r
181 mov eax, ASM_PFX(SmiRendezvous)\r
182 call eax\r
717fb604
JY
183 add esp, 4\r
184\r
63a4f460
LG
185 push ebx\r
186 mov eax, ASM_PFX(CpuSmmDebugExit)\r
187 call eax\r
717fb604
JY
188 add esp, 4\r
189\r
190 mov eax, ASM_PFX(mXdSupported)\r
191 mov al, [eax]\r
192 cmp al, 0\r
193 jz .7\r
194 pop edx ; get saved MSR_IA32_MISC_ENABLE[63-32]\r
195 test edx, BIT2\r
196 jz .7\r
197 mov ecx, MSR_IA32_MISC_ENABLE\r
198 rdmsr\r
199 or dx, BIT2 ; set XD Disable bit if it was set before entering into SMM\r
200 wrmsr\r
201\r
202.7:\r
63a4f460
LG
203 rsm\r
204\r
205ASM_PFX(gcSmiHandlerSize): DW $ - _SmiEntryPoint\r
206\r