MdePkg/BaseLib: add PatchInstructionX86()
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / X64 / SmiException.S
CommitLineData
427e3573
MK
1#------------------------------------------------------------------------------\r
2#\r
717fb604 3# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>\r
427e3573
MK
4# This program and the accompanying materials\r
5# are licensed and made available under the terms and conditions of the BSD License\r
6# which accompanies this distribution. The full text of the license may be found at\r
7# http://opensource.org/licenses/bsd-license.php.\r
8#\r
9# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
10# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
11#\r
12# Module Name:\r
13#\r
14# SmiException.S\r
15#\r
16# Abstract:\r
17#\r
18# Exception handlers used in SM mode\r
19#\r
20#------------------------------------------------------------------------------\r
21\r
22ASM_GLOBAL ASM_PFX(SmiPFHandler)\r
427e3573
MK
23ASM_GLOBAL ASM_PFX(gcSmiIdtr)\r
24ASM_GLOBAL ASM_PFX(gcSmiGdtr)\r
25ASM_GLOBAL ASM_PFX(gcPsd)\r
26\r
27 .data\r
28\r
29NullSeg: .quad 0 # reserved by architecture\r
30CodeSeg32:\r
31 .word -1 # LimitLow\r
32 .word 0 # BaseLow\r
33 .byte 0 # BaseMid\r
34 .byte 0x9b\r
35 .byte 0xcf # LimitHigh\r
36 .byte 0 # BaseHigh\r
37ProtModeCodeSeg32:\r
38 .word -1 # LimitLow\r
39 .word 0 # BaseLow\r
40 .byte 0 # BaseMid\r
41 .byte 0x9b\r
42 .byte 0xcf # LimitHigh\r
43 .byte 0 # BaseHigh\r
44ProtModeSsSeg32:\r
45 .word -1 # LimitLow\r
46 .word 0 # BaseLow\r
47 .byte 0 # BaseMid\r
48 .byte 0x93\r
49 .byte 0xcf # LimitHigh\r
50 .byte 0 # BaseHigh\r
51DataSeg32:\r
52 .word -1 # LimitLow\r
53 .word 0 # BaseLow\r
54 .byte 0 # BaseMid\r
55 .byte 0x93\r
56 .byte 0xcf # LimitHigh\r
57 .byte 0 # BaseHigh\r
58CodeSeg16:\r
59 .word -1\r
60 .word 0\r
61 .byte 0\r
62 .byte 0x9b\r
63 .byte 0x8f\r
64 .byte 0\r
65DataSeg16:\r
66 .word -1\r
67 .word 0\r
68 .byte 0\r
69 .byte 0x93\r
70 .byte 0x8f\r
71 .byte 0\r
72CodeSeg64:\r
73 .word -1 # LimitLow\r
74 .word 0 # BaseLow\r
75 .byte 0 # BaseMid\r
76 .byte 0x9b\r
77 .byte 0xaf # LimitHigh\r
78 .byte 0 # BaseHigh\r
79# TSS Segment for X64 specially\r
80TssSeg:\r
20ab3269 81 .word TSS_DESC_SIZE - 1 # LimitLow\r
427e3573
MK
82 .word 0 # BaseLow\r
83 .byte 0 # BaseMid\r
84 .byte 0x89\r
20ab3269 85 .byte 0x00 # LimitHigh\r
427e3573
MK
86 .byte 0 # BaseHigh\r
87 .long 0 # BaseUpper\r
88 .long 0 # Reserved\r
89.equ GDT_SIZE, .- NullSeg\r
90\r
91TssDescriptor:\r
92 .space 104, 0\r
93.equ TSS_DESC_SIZE, .- TssDescriptor\r
94\r
95#\r
96# This structure serves as a template for all processors.\r
97#\r
98ASM_PFX(gcPsd):\r
99 .ascii "PSDSIG "\r
100 .word PSD_SIZE\r
101 .word 2\r
102 .word 1 << 2\r
103 .word CODE_SEL\r
104 .word DATA_SEL\r
105 .word DATA_SEL\r
106 .word DATA_SEL\r
107 .word 0\r
108 .quad 0\r
109 .quad 0\r
110 .quad 0 # fixed in InitializeMpServiceData()\r
111 .quad NullSeg\r
112 .long GDT_SIZE\r
113 .long 0\r
114 .space 24, 0\r
854c6b80 115 .quad 0\r
427e3573
MK
116.equ PSD_SIZE, . - ASM_PFX(gcPsd)\r
117\r
118#\r
119# CODE & DATA segments for SMM runtime\r
120#\r
121.equ CODE_SEL, CodeSeg64 - NullSeg\r
122.equ DATA_SEL, DataSeg32 - NullSeg\r
123.equ CODE32_SEL, CodeSeg32 - NullSeg\r
124\r
125ASM_PFX(gcSmiGdtr):\r
126 .word GDT_SIZE - 1\r
127 .quad NullSeg\r
128\r
129ASM_PFX(gcSmiIdtr):\r
717fb604
JY
130 .word 0\r
131 .quad 0\r
427e3573
MK
132\r
133 .text\r
134\r
135#------------------------------------------------------------------------------\r
136# _SmiExceptionEntryPoints is the collection of exception entry points followed\r
137# by a common exception handler.\r
138#\r
139# Stack frame would be as follows as specified in IA32 manuals:\r
140# +---------------------+ <-- 16-byte aligned ensured by processor\r
141# + Old SS +\r
142# +---------------------+\r
143# + Old RSP +\r
144# +---------------------+\r
145# + RFlags +\r
146# +---------------------+\r
147# + CS +\r
148# +---------------------+\r
149# + RIP +\r
150# +---------------------+\r
151# + Error Code +\r
152# +---------------------+\r
153# + Vector Number +\r
154# +---------------------+\r
155# + RBP +\r
156# +---------------------+ <-- RBP, 16-byte aligned\r
157#\r
158# RSP set to odd multiple of 8 at @CommonEntryPoint means ErrCode PRESENT\r
159#------------------------------------------------------------------------------\r
160ASM_GLOBAL ASM_PFX(PageFaultIdtHandlerSmmProfile)\r
161ASM_PFX(PageFaultIdtHandlerSmmProfile):\r
162 pushq $0x0e # Page Fault\r
163 .byte 0x40, 0xf6, 0xc4, 0x08 #test spl, 8\r
164 jnz L1\r
165 pushq (%rsp)\r
166 movq $0, 8(%rsp)\r
167L1:\r
168 pushq %rbp\r
169 movq %rsp, %rbp\r
170\r
171 #\r
172 # Since here the stack pointer is 16-byte aligned, so\r
173 # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64\r
174 # is 16-byte aligned\r
175 #\r
176\r
177## UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
178## UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
179 pushq %r15\r
180 pushq %r14\r
181 pushq %r13\r
182 pushq %r12\r
183 pushq %r11\r
184 pushq %r10\r
185 pushq %r9\r
186 pushq %r8\r
187 pushq %rax\r
188 pushq %rcx\r
189 pushq %rdx\r
190 pushq %rbx\r
191 pushq 48(%rbp) # RSP\r
192 pushq (%rbp) # RBP\r
193 pushq %rsi\r
194 pushq %rdi\r
195\r
196## UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero\r
197 movzwq 56(%rbp), %rax\r
198 pushq %rax # for ss\r
199 movzwq 32(%rbp), %rax\r
200 pushq %rax # for cs\r
201 movq %ds, %rax\r
202 pushq %rax\r
203 movq %es, %rax\r
204 pushq %rax\r
205 movq %fs, %rax\r
206 pushq %rax\r
207 movq %gs, %rax\r
208 pushq %rax\r
209\r
210## UINT64 Rip;\r
211 pushq 24(%rbp)\r
212\r
213## UINT64 Gdtr[2], Idtr[2];\r
214 subq $16, %rsp\r
215 sidt (%rsp)\r
216 subq $16, %rsp\r
217 sgdt (%rsp)\r
218\r
219## UINT64 Ldtr, Tr;\r
220 xorq %rax, %rax\r
221 strw %ax\r
222 pushq %rax\r
223 sldtw %ax\r
224 pushq %rax\r
225\r
226## UINT64 RFlags;\r
227 pushq 40(%rbp)\r
228\r
229## UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
230 movq %cr8, %rax\r
231 pushq %rax\r
232 movq %cr4, %rax\r
233 orq $0x208, %rax\r
234 movq %rax, %cr4\r
235 pushq %rax\r
236 movq %cr3, %rax\r
237 pushq %rax\r
238 movq %cr2, %rax\r
239 pushq %rax\r
240 xorq %rax, %rax\r
241 pushq %rax\r
242 movq %cr0, %rax\r
243 pushq %rax\r
244\r
245## UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
246 movq %dr7, %rax\r
247 pushq %rax\r
248 movq %dr6, %rax\r
249 pushq %rax\r
250 movq %dr3, %rax\r
251 pushq %rax\r
252 movq %dr2, %rax\r
253 pushq %rax\r
254 movq %dr1, %rax\r
255 pushq %rax\r
256 movq %dr0, %rax\r
257 pushq %rax\r
258\r
259## FX_SAVE_STATE_X64 FxSaveState;\r
260\r
261 subq $512, %rsp\r
262 movq %rsp, %rdi\r
263 .byte 0xf, 0xae, 0x7 # fxsave [rdi]\r
264\r
265# UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear\r
266 cld\r
267\r
268## UINT32 ExceptionData;\r
269 pushq 16(%rbp)\r
270\r
271## call into exception handler\r
272 movq 8(%rbp), %rcx\r
273 movabsq $ASM_PFX(SmiPFHandler), %rax\r
274\r
275## Prepare parameter and call\r
276 movq %rsp, %rdx\r
277 #\r
278 # Per X64 calling convention, allocate maximum parameter stack space\r
279 # and make sure RSP is 16-byte aligned\r
280 #\r
281 subq $4 * 8 + 8, %rsp\r
282 call *%rax\r
283 addq $4 * 8 + 8, %rsp\r
284 jmp L5\r
285\r
286L5:\r
287## UINT64 ExceptionData;\r
288 addq $8, %rsp\r
289\r
290## FX_SAVE_STATE_X64 FxSaveState;\r
291\r
292 movq %rsp, %rsi\r
293 .byte 0xf, 0xae, 0xe # fxrstor [rsi]\r
294 addq $512, %rsp\r
295\r
296## UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
297## Skip restoration of DRx registers to support debuggers\r
298## that set breakpoints in interrupt/exception context\r
299 addq $8 * 6, %rsp\r
300\r
301## UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
302 popq %rax\r
303 movq %rax, %cr0\r
304 addq $8, %rsp # not for Cr1\r
305 popq %rax\r
306 movq %rax, %cr2\r
307 popq %rax\r
308 movq %rax, %cr3\r
309 popq %rax\r
310 movq %rax, %cr4\r
311 popq %rax\r
312 movq %rax, %cr8\r
313\r
314## UINT64 RFlags;\r
315 popq 40(%rbp)\r
316\r
317## UINT64 Ldtr, Tr;\r
318## UINT64 Gdtr[2], Idtr[2];\r
319## Best not let anyone mess with these particular registers...\r
320 addq $48, %rsp\r
321\r
322## UINT64 Rip;\r
323 popq 24(%rbp)\r
324\r
325## UINT64 Gs, Fs, Es, Ds, Cs, Ss;\r
326 popq %rax\r
327 # mov gs, rax ; not for gs\r
328 popq %rax\r
329 # mov fs, rax ; not for fs\r
330 # (X64 will not use fs and gs, so we do not restore it)\r
331 popq %rax\r
332 movq %rax, %es\r
333 popq %rax\r
334 movq %rax, %ds\r
335 popq 32(%rbp) # for cs\r
336 popq 56(%rbp) # for ss\r
337\r
338## UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
339## UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
340 popq %rdi\r
341 popq %rsi\r
342 addq $8, %rsp # not for rbp\r
343 popq 48(%rbp) # for rsp\r
344 popq %rbx\r
345 popq %rdx\r
346 popq %rcx\r
347 popq %rax\r
348 popq %r8\r
349 popq %r9\r
350 popq %r10\r
351 popq %r11\r
352 popq %r12\r
353 popq %r13\r
354 popq %r14\r
355 popq %r15\r
356\r
357 movq %rbp, %rsp\r
358\r
359# Enable TF bit after page fault handler runs\r
360 btsl $8, 40(%rsp) #RFLAGS\r
361\r
362 popq %rbp\r
363 addq $16, %rsp # skip INT# & ErrCode\r
364 iretq\r
365\r