]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/Library/SmmCpuFeaturesLib/X64/SmiEntry.S
UefiCpuPkg/SmmCpuFeatureLib: Add more CPU ID for SmmFeatureControl.
[mirror_edk2.git] / UefiCpuPkg / Library / SmmCpuFeaturesLib / X64 / SmiEntry.S
CommitLineData
09119a00
MK
1#------------------------------------------------------------------------------\r
2#\r
3# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>\r
4# This program and the accompanying materials\r
5# are licensed and made available under the terms and conditions of the BSD License\r
6# which accompanies this distribution. The full text of the license may be found at\r
7# http://opensource.org/licenses/bsd-license.php.\r
8#\r
9# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
10# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
11#\r
12# Module Name:\r
13#\r
14# SmiEntry.S\r
15#\r
16# Abstract:\r
17#\r
18# Code template of the SMI handler for a particular processor\r
19#\r
20#------------------------------------------------------------------------------\r
21\r
22ASM_GLOBAL ASM_PFX(gcStmSmiHandlerTemplate)\r
23ASM_GLOBAL ASM_PFX(gcStmSmiHandlerSize)\r
24ASM_GLOBAL ASM_PFX(gcStmSmiHandlerOffset)\r
25ASM_GLOBAL ASM_PFX(gStmSmiCr3)\r
26ASM_GLOBAL ASM_PFX(gStmSmiStack)\r
27ASM_GLOBAL ASM_PFX(gStmSmbase)\r
28ASM_GLOBAL ASM_PFX(gStmXdSupported)\r
29ASM_GLOBAL ASM_PFX(gStmSmiHandlerIdtr)\r
30\r
31.equ MSR_IA32_MISC_ENABLE, 0x1A0\r
32.equ MSR_EFER, 0xc0000080\r
33.equ MSR_EFER_XD, 0x800\r
34\r
35#\r
36# Constants relating to TXT_PROCESSOR_SMM_DESCRIPTOR\r
37#\r
38.equ DSC_OFFSET, 0xfb00\r
39.equ DSC_GDTPTR, 0x48\r
40.equ DSC_GDTSIZ, 0x50\r
41.equ DSC_CS, 0x14\r
42.equ DSC_DS, 0x16\r
43.equ DSC_SS, 0x18\r
44.equ DSC_OTHERSEG, 0x1a\r
45#\r
46# Constants relating to CPU State Save Area\r
47#\r
48.equ SSM_DR6, 0xffd0\r
49.equ SSM_DR7, 0xffc8\r
50\r
51.equ PROTECT_MODE_CS, 0x08\r
52.equ PROTECT_MODE_DS, 0x20\r
53.equ LONG_MODE_CS, 0x38\r
54.equ TSS_SEGMENT, 0x40\r
55.equ GDT_SIZE, 0x50\r
56\r
57 .text\r
58\r
59ASM_PFX(gcStmSmiHandlerTemplate):\r
60\r
61_StmSmiEntryPoint:\r
62 #\r
63 # The encoding of BX in 16-bit addressing mode is the same as of RDI in 64-\r
64 # bit addressing mode. And that coincidence has been used in the following\r
65 # "64-bit like" 16-bit code. Be aware that once RDI is referenced as a\r
66 # base address register, it is actually BX that is referenced.\r
67 #\r
68 .byte 0xbb # mov bx, imm16\r
69 .word _StmGdtDesc - _StmSmiEntryPoint + 0x8000\r
70 #\r
71 # fix GDT descriptor\r
72 #\r
73 .byte 0x2e,0xa1 # mov ax, cs:[offset16]\r
74 .word DSC_OFFSET + DSC_GDTSIZ\r
75 .byte 0x48 # dec ax\r
76 .byte 0x2e\r
77 movl %eax, (%rdi) # mov cs:[bx], ax\r
78 .byte 0x66,0x2e,0xa1 # mov eax, cs:[offset16]\r
79 .word DSC_OFFSET + DSC_GDTPTR\r
80 .byte 0x2e\r
81 movw %ax, 2(%rdi)\r
82 .byte 0x66,0x2e\r
83 lgdt (%rdi)\r
84 #\r
85 # Patch ProtectedMode Segment\r
86 #\r
87 .byte 0xb8\r
88 .word PROTECT_MODE_CS\r
89 .byte 0x2e\r
90 movl %eax, -2(%rdi)\r
91 #\r
92 # Patch ProtectedMode entry\r
93 #\r
94 .byte 0x66, 0xbf # mov edi, SMBASE\r
95ASM_PFX(gStmSmbase): .space 4\r
96 lea ((ProtectedMode - _StmSmiEntryPoint) + 0x8000)(%edi), %ax\r
97 .byte 0x2e\r
98 movw %ax, -6(%rdi)\r
99 #\r
100 # Switch into ProtectedMode\r
101 #\r
102 movq %cr0, %rbx\r
103 .byte 0x66\r
104 andl $0x9ffafff3, %ebx\r
105 .byte 0x66\r
106 orl $0x00000023, %ebx\r
107\r
108 movq %rbx, %cr0\r
109 .byte 0x66, 0xea\r
110 .space 6\r
111\r
112_StmGdtDesc: .space 6\r
113\r
114ProtectedMode:\r
115 movw $PROTECT_MODE_DS, %ax\r
116 movl %eax, %ds\r
117 movl %eax, %es\r
118 movl %eax, %fs\r
119 movl %eax, %gs\r
120 movl %eax, %ss\r
121 .byte 0xbc # mov esp, imm32\r
122ASM_PFX(gStmSmiStack): .space 4\r
123 jmp ProtFlatMode\r
124\r
125ProtFlatMode:\r
126 .byte 0xb8\r
127ASM_PFX(gStmSmiCr3): .space 4\r
128 movq %rax, %cr3\r
129 movl $0x668,%eax # as cr4.PGE is not set here, refresh cr3\r
130 movq %rax, %cr4 # in PreModifyMtrrs() to flush TLB.\r
131# Load TSS\r
132 subl $8, %esp # reserve room in stack\r
133 sgdt (%rsp)\r
134 movl 2(%rsp), %eax # eax = GDT base\r
135 addl $8, %esp\r
136 movb $0x89, %dl\r
137 movb %dl, (TSS_SEGMENT + 5)(%rax) # clear busy flag\r
138 movl $TSS_SEGMENT, %eax\r
139 ltr %ax\r
140\r
141# enable NXE if supported\r
142 .byte 0xb0 # mov al, imm8\r
143ASM_PFX(gStmXdSupported): .byte 1\r
144 cmpb $0, %al\r
145 jz SkipXd\r
146#\r
147# Check XD disable bit\r
148#\r
149 movl $MSR_IA32_MISC_ENABLE, %ecx\r
150 rdmsr\r
151 subl $4, %esp\r
152 pushq %rdx # save MSR_IA32_MISC_ENABLE[63-32]\r
153 testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]\r
154 jz L13\r
155 andw $0x0FFFB, %dx # clear XD Disable bit if it is set\r
156 wrmsr\r
157L13:\r
158 movl $MSR_EFER, %ecx\r
159 rdmsr\r
160 orw $MSR_EFER_XD,%ax # enable NXE\r
161 wrmsr\r
162 jmp XdDone\r
163SkipXd:\r
164 subl $8, %esp\r
165XdDone:\r
166\r
167 #\r
168 # Switch to LongMode\r
169 #\r
170 pushq $LONG_MODE_CS # push cs hardcore here\r
171 call Base # push return address for retf later\r
172Base:\r
173 addl $(LongMode - Base), (%rsp) # offset for far retf, seg is the 1st arg\r
174\r
175 movl $MSR_EFER, %ecx\r
176 rdmsr\r
177 orb $1,%ah # enable LME\r
178 wrmsr\r
179 movq %cr0, %rbx\r
180 orl $0x080010023, %ebx # enable paging + WP + NE + MP + PE\r
181 movq %rbx, %cr0\r
182 retf\r
183LongMode: # long mode (64-bit code) starts here\r
184 movabsq $ASM_PFX(gStmSmiHandlerIdtr), %rax\r
185 lidt (%rax)\r
186 lea (DSC_OFFSET)(%rdi), %ebx\r
187 movw DSC_DS(%rbx), %ax\r
188 movl %eax,%ds\r
189 movw DSC_OTHERSEG(%rbx), %ax\r
190 movl %eax,%es\r
191 movl %eax,%fs\r
192 movl %eax,%gs\r
193 movw DSC_SS(%rbx), %ax\r
194 movl %eax,%ss\r
195\r
196CommonHandler:\r
197 movq 8(%rsp), %rbx\r
198 # Save FP registers\r
199\r
200 subq $0x200, %rsp\r
201 .byte 0x48 # FXSAVE64\r
202 fxsave (%rsp)\r
203\r
204 addq $-0x20, %rsp\r
205\r
206 movq %rbx, %rcx\r
207 movabsq $ASM_PFX(CpuSmmDebugEntry), %rax\r
208 call *%rax\r
209\r
210 movq %rbx, %rcx\r
211 movabsq $ASM_PFX(SmiRendezvous), %rax\r
212 call *%rax\r
213\r
214 movq %rbx, %rcx\r
215 movabsq $ASM_PFX(CpuSmmDebugExit), %rax\r
216 call *%rax\r
217\r
218 addq $0x20, %rsp\r
219\r
220 #\r
221 # Restore FP registers\r
222 #\r
223 .byte 0x48 # FXRSTOR64\r
224 fxrstor (%rsp)\r
225\r
226 addq $0x200, %rsp\r
227\r
228 movabsq $ASM_PFX(gStmXdSupported), %rax\r
229 movb (%rax), %al\r
230 cmpb $0, %al\r
231 jz L16\r
232 popq %rdx # get saved MSR_IA32_MISC_ENABLE[63-32]\r
233 testl $BIT2, %edx\r
234 jz L16\r
235 movl $MSR_IA32_MISC_ENABLE, %ecx\r
236 rdmsr\r
237 orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM\r
238 wrmsr\r
239\r
240L16:\r
241 rsm\r
242\r
243_StmSmiHandler:\r
244#\r
245# Check XD disable bit\r
246#\r
247 xorq %r8, %r8\r
248 movabsq $ASM_PFX(gStmXdSupported), %rax\r
249 movb (%rax), %al\r
250 cmpb $0, %al\r
251 jz StmXdDone\r
252 movl $MSR_IA32_MISC_ENABLE, %ecx\r
253 rdmsr\r
254 movq %rdx, %r8 # save MSR_IA32_MISC_ENABLE[63-32]\r
255 testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]\r
256 jz L14\r
257 andw $0x0FFFB, %dx # clear XD Disable bit if it is set\r
258 wrmsr\r
259L14:\r
260 movl $MSR_EFER, %ecx\r
261 rdmsr\r
262 orw $MSR_EFER_XD,%ax # enable NXE\r
263 wrmsr\r
264StmXdDone:\r
265 pushq %r8\r
266\r
267 # below step is needed, because STM does not run above code.\r
268 # we have to run below code to set IDT/CR0/CR4\r
269 movabsq $ASM_PFX(gStmSmiHandlerIdtr), %rax\r
270 lidt (%rax)\r
271\r
272 movq %cr0, %rax\r
273 orl $0x80010023, %eax\r
274 movq %rax, %cr0\r
275 movq %cr4, %rax\r
276 movl $0x668, %eax # as cr4.PGE is not set here, refresh cr3\r
277 movq %rax, %cr4 # in PreModifyMtrrs() to flush TLB.\r
278 # STM init finish\r
279 jmp CommonHandler\r
280\r
281ASM_PFX(gcStmSmiHandlerSize) : .word . - _StmSmiEntryPoint\r
282ASM_PFX(gcStmSmiHandlerOffset): .word _StmSmiHandler - _StmSmiEntryPoint\r