]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/PiSmmCpuDxeSmm/X64/SmiEntry.S
MdeModulePkg/NonDiscoverablePciDeviceDxe: add support for non-coherent DMA
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / X64 / SmiEntry.S
CommitLineData
427e3573
MK
1#------------------------------------------------------------------------------\r
2#\r
717fb604 3# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>\r
427e3573
MK
4# This program and the accompanying materials\r
5# are licensed and made available under the terms and conditions of the BSD License\r
6# which accompanies this distribution. The full text of the license may be found at\r
7# http://opensource.org/licenses/bsd-license.php.\r
8#\r
9# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
10# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
11#\r
12# Module Name:\r
13#\r
14# SmiEntry.S\r
15#\r
16# Abstract:\r
17#\r
18# Code template of the SMI handler for a particular processor\r
19#\r
20#------------------------------------------------------------------------------\r
21\r
22ASM_GLOBAL ASM_PFX(gcSmiHandlerTemplate)\r
23ASM_GLOBAL ASM_PFX(gcSmiHandlerSize)\r
24ASM_GLOBAL ASM_PFX(gSmiCr3)\r
25ASM_GLOBAL ASM_PFX(gSmiStack)\r
26ASM_GLOBAL ASM_PFX(gSmbase)\r
717fb604 27ASM_GLOBAL ASM_PFX(mXdSupported)\r
427e3573
MK
28ASM_GLOBAL ASM_PFX(gSmiHandlerIdtr)\r
29\r
717fb604
JY
30.equ MSR_IA32_MISC_ENABLE, 0x1A0\r
31.equ MSR_EFER, 0xc0000080\r
32.equ MSR_EFER_XD, 0x800\r
33\r
427e3573
MK
34#\r
35# Constants relating to PROCESSOR_SMM_DESCRIPTOR\r
36#\r
37.equ DSC_OFFSET, 0xfb00\r
38.equ DSC_GDTPTR, 0x30\r
39.equ DSC_GDTSIZ, 0x38\r
40.equ DSC_CS, 14\r
41.equ DSC_DS, 16\r
42.equ DSC_SS, 18\r
43.equ DSC_OTHERSEG, 20\r
44#\r
45# Constants relating to CPU State Save Area\r
46#\r
47.equ SSM_DR6, 0xffd0\r
48.equ SSM_DR7, 0xffc8\r
49\r
50.equ PROTECT_MODE_CS, 0x08\r
51.equ PROTECT_MODE_DS, 0x20\r
52.equ LONG_MODE_CS, 0x38\r
53.equ TSS_SEGMENT, 0x40\r
54.equ GDT_SIZE, 0x50\r
55\r
56 .text\r
57\r
58ASM_PFX(gcSmiHandlerTemplate):\r
59\r
60_SmiEntryPoint:\r
61 #\r
62 # The encoding of BX in 16-bit addressing mode is the same as of RDI in 64-\r
63 # bit addressing mode. And that coincidence has been used in the following\r
64 # "64-bit like" 16-bit code. Be aware that once RDI is referenced as a\r
65 # base address register, it is actually BX that is referenced.\r
66 #\r
67 .byte 0xbb # mov bx, imm16\r
68 .word _GdtDesc - _SmiEntryPoint + 0x8000\r
69 #\r
70 # fix GDT descriptor\r
71 #\r
72 .byte 0x2e,0xa1 # mov ax, cs:[offset16]\r
73 .word DSC_OFFSET + DSC_GDTSIZ\r
74 .byte 0x48 # dec ax\r
75 .byte 0x2e\r
76 movl %eax, (%rdi) # mov cs:[bx], ax\r
77 .byte 0x66,0x2e,0xa1 # mov eax, cs:[offset16]\r
78 .word DSC_OFFSET + DSC_GDTPTR\r
79 .byte 0x2e\r
80 movw %ax, 2(%rdi)\r
81 .byte 0x66,0x2e\r
82 lgdt (%rdi)\r
83 #\r
84 # Patch ProtectedMode Segment\r
85 #\r
86 .byte 0xb8\r
87 .word PROTECT_MODE_CS\r
88 .byte 0x2e\r
89 movl %eax, -2(%rdi)\r
90 #\r
91 # Patch ProtectedMode entry\r
92 #\r
93 .byte 0x66, 0xbf # mov edi, SMBASE\r
94ASM_PFX(gSmbase): .space 4\r
95 lea ((ProtectedMode - _SmiEntryPoint) + 0x8000)(%edi), %ax\r
96 .byte 0x2e\r
97 movw %ax, -6(%rdi)\r
98 #\r
99 # Switch into ProtectedMode\r
100 #\r
101 movq %cr0, %rbx\r
102 .byte 0x66\r
103 andl $0x9ffafff3, %ebx\r
104 .byte 0x66\r
105 orl $0x00000023, %ebx\r
106\r
107 movq %rbx, %cr0\r
108 .byte 0x66, 0xea\r
109 .space 6\r
110\r
111_GdtDesc: .space 6\r
112\r
113ProtectedMode:\r
114 movw $PROTECT_MODE_DS, %ax\r
115 movl %eax, %ds\r
116 movl %eax, %es\r
117 movl %eax, %fs\r
118 movl %eax, %gs\r
119 movl %eax, %ss\r
120 .byte 0xbc # mov esp, imm32\r
121ASM_PFX(gSmiStack): .space 4\r
122 jmp ProtFlatMode\r
123\r
124ProtFlatMode:\r
125 .byte 0xb8\r
126ASM_PFX(gSmiCr3): .space 4\r
127 movq %rax, %cr3\r
128 movl $0x668,%eax # as cr4.PGE is not set here, refresh cr3\r
129 movq %rax, %cr4 # in PreModifyMtrrs() to flush TLB.\r
130# Load TSS\r
131 subl $8, %esp # reserve room in stack\r
132 sgdt (%rsp)\r
133 movl 2(%rsp), %eax # eax = GDT base\r
134 addl $8, %esp\r
427e3573
MK
135 movb $0x89, %dl\r
136 movb %dl, (TSS_SEGMENT + 5)(%rax) # clear busy flag\r
137 movl $TSS_SEGMENT, %eax\r
138 ltr %ax\r
139\r
717fb604
JY
140# enable NXE if supported\r
141 .byte 0xb0 # mov al, imm8\r
142ASM_PFX(mXdSupported): .byte 1\r
143 cmpb $0, %al\r
144 jz SkipNxe\r
145#\r
146# Check XD disable bit\r
147#\r
148 movl $MSR_IA32_MISC_ENABLE, %ecx\r
149 rdmsr\r
150 subl $4, %esp\r
151 pushq %rdx # save MSR_IA32_MISC_ENABLE[63-32]\r
152 testl $BIT2, %edx # MSR_IA32_MISC_ENABLE[34]\r
153 jz L13\r
154 andw $0x0FFFB, %dx # clear XD Disable bit if it is set\r
155 wrmsr\r
156L13:\r
157 movl $MSR_EFER, %ecx\r
158 rdmsr\r
159 orw $MSR_EFER_XD,%ax # enable NXE\r
160 wrmsr\r
161 jmp @NxeDone\r
162SkipNxe:\r
163 subl $8, %esp\r
164NxeDone:\r
165\r
427e3573
MK
166 #\r
167 # Switch to LongMode\r
168 #\r
169 pushq $LONG_MODE_CS # push cs hardcore here\r
170 call Base # push return address for retf later\r
171Base:\r
172 addl $(LongMode - Base), (%rsp) # offset for far retf, seg is the 1st arg\r
717fb604
JY
173\r
174 movl $MSR_EFER, %ecx\r
427e3573 175 rdmsr\r
717fb604 176 orb $1,%ah # enable LME\r
427e3573
MK
177 wrmsr\r
178 movq %cr0, %rbx\r
717fb604 179 orl $0x080010023, %ebx # enable paging + WP + NE + MP + PE\r
427e3573
MK
180 movq %rbx, %cr0\r
181 retf\r
182LongMode: # long mode (64-bit code) starts here\r
183 movabsq $ASM_PFX(gSmiHandlerIdtr), %rax\r
184 lidt (%rax)\r
185 lea (DSC_OFFSET)(%rdi), %ebx\r
186 movw DSC_DS(%rbx), %ax\r
187 movl %eax,%ds\r
188 movw DSC_OTHERSEG(%rbx), %ax\r
189 movl %eax,%es\r
190 movl %eax,%fs\r
191 movl %eax,%gs\r
192 movw DSC_SS(%rbx), %ax\r
193 movl %eax,%ss\r
194# jmp _SmiHandler ; instruction is not needed\r
195\r
196_SmiHandler:\r
717fb604 197 movq 8(%rsp), %rbx\r
427e3573
MK
198 # Save FP registers\r
199\r
717fb604 200 subq $0x200, %rsp\r
427e3573
MK
201 .byte 0x48 # FXSAVE64\r
202 fxsave (%rsp)\r
203\r
204 addq $-0x20, %rsp\r
f45f2d4a
JY
205\r
206 movq %rbx, %rcx\r
207 movabsq $ASM_PFX(CpuSmmDebugEntry), %rax\r
427e3573 208 call *%rax\r
f45f2d4a
JY
209\r
210 movq %rbx, %rcx\r
211 movabsq $ASM_PFX(SmiRendezvous), %rax\r
212 call *%rax\r
213\r
214 movq %rbx, %rcx\r
215 movabsq $ASM_PFX(CpuSmmDebugExit), %rax\r
216 call *%rax\r
217\r
427e3573
MK
218 addq $0x20, %rsp\r
219\r
220 #\r
221 # Restore FP registers\r
222 #\r
223 .byte 0x48 # FXRSTOR64\r
224 fxrstor (%rsp)\r
225\r
717fb604
JY
226 addq $0x200, %rsp\r
227\r
228 movabsq $ASM_PFX(mXdSupported), %rax\r
229 movb (%rax), %al\r
230 cmpb $0, %al\r
231 jz L16\r
232 popq %rdx # get saved MSR_IA32_MISC_ENABLE[63-32]\r
233 testl $BIT2, %edx\r
234 jz L16\r
235 movl $MSR_IA32_MISC_ENABLE, %ecx\r
236 rdmsr\r
237 orw $BIT2, %dx # set XD Disable bit if it was set before entering into SMM\r
238 wrmsr\r
239\r
240L16:\r
427e3573
MK
241 rsm\r
242\r
243ASM_PFX(gcSmiHandlerSize): .word . - _SmiEntryPoint\r