]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/PiSmmCpuDxeSmm/X64/SmiException.S
UefiCpuPkg/PiSmmCpuDxeSmm: Add paging protection.
[mirror_edk2.git] / UefiCpuPkg / PiSmmCpuDxeSmm / X64 / SmiException.S
CommitLineData
427e3573
MK
1#------------------------------------------------------------------------------\r
2#\r
717fb604 3# Copyright (c) 2009 - 2016, Intel Corporation. All rights reserved.<BR>\r
427e3573
MK
4# This program and the accompanying materials\r
5# are licensed and made available under the terms and conditions of the BSD License\r
6# which accompanies this distribution. The full text of the license may be found at\r
7# http://opensource.org/licenses/bsd-license.php.\r
8#\r
9# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
10# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
11#\r
12# Module Name:\r
13#\r
14# SmiException.S\r
15#\r
16# Abstract:\r
17#\r
18# Exception handlers used in SM mode\r
19#\r
20#------------------------------------------------------------------------------\r
21\r
22ASM_GLOBAL ASM_PFX(SmiPFHandler)\r
23ASM_GLOBAL ASM_PFX(gSmiMtrrs)\r
24ASM_GLOBAL ASM_PFX(gcSmiIdtr)\r
25ASM_GLOBAL ASM_PFX(gcSmiGdtr)\r
26ASM_GLOBAL ASM_PFX(gcPsd)\r
27\r
28 .data\r
29\r
30NullSeg: .quad 0 # reserved by architecture\r
31CodeSeg32:\r
32 .word -1 # LimitLow\r
33 .word 0 # BaseLow\r
34 .byte 0 # BaseMid\r
35 .byte 0x9b\r
36 .byte 0xcf # LimitHigh\r
37 .byte 0 # BaseHigh\r
38ProtModeCodeSeg32:\r
39 .word -1 # LimitLow\r
40 .word 0 # BaseLow\r
41 .byte 0 # BaseMid\r
42 .byte 0x9b\r
43 .byte 0xcf # LimitHigh\r
44 .byte 0 # BaseHigh\r
45ProtModeSsSeg32:\r
46 .word -1 # LimitLow\r
47 .word 0 # BaseLow\r
48 .byte 0 # BaseMid\r
49 .byte 0x93\r
50 .byte 0xcf # LimitHigh\r
51 .byte 0 # BaseHigh\r
52DataSeg32:\r
53 .word -1 # LimitLow\r
54 .word 0 # BaseLow\r
55 .byte 0 # BaseMid\r
56 .byte 0x93\r
57 .byte 0xcf # LimitHigh\r
58 .byte 0 # BaseHigh\r
59CodeSeg16:\r
60 .word -1\r
61 .word 0\r
62 .byte 0\r
63 .byte 0x9b\r
64 .byte 0x8f\r
65 .byte 0\r
66DataSeg16:\r
67 .word -1\r
68 .word 0\r
69 .byte 0\r
70 .byte 0x93\r
71 .byte 0x8f\r
72 .byte 0\r
73CodeSeg64:\r
74 .word -1 # LimitLow\r
75 .word 0 # BaseLow\r
76 .byte 0 # BaseMid\r
77 .byte 0x9b\r
78 .byte 0xaf # LimitHigh\r
79 .byte 0 # BaseHigh\r
80# TSS Segment for X64 specially\r
81TssSeg:\r
20ab3269 82 .word TSS_DESC_SIZE - 1 # LimitLow\r
427e3573
MK
83 .word 0 # BaseLow\r
84 .byte 0 # BaseMid\r
85 .byte 0x89\r
20ab3269 86 .byte 0x00 # LimitHigh\r
427e3573
MK
87 .byte 0 # BaseHigh\r
88 .long 0 # BaseUpper\r
89 .long 0 # Reserved\r
90.equ GDT_SIZE, .- NullSeg\r
91\r
92TssDescriptor:\r
93 .space 104, 0\r
94.equ TSS_DESC_SIZE, .- TssDescriptor\r
95\r
96#\r
97# This structure serves as a template for all processors.\r
98#\r
99ASM_PFX(gcPsd):\r
100 .ascii "PSDSIG "\r
101 .word PSD_SIZE\r
102 .word 2\r
103 .word 1 << 2\r
104 .word CODE_SEL\r
105 .word DATA_SEL\r
106 .word DATA_SEL\r
107 .word DATA_SEL\r
108 .word 0\r
109 .quad 0\r
110 .quad 0\r
111 .quad 0 # fixed in InitializeMpServiceData()\r
112 .quad NullSeg\r
113 .long GDT_SIZE\r
114 .long 0\r
115 .space 24, 0\r
116 .quad ASM_PFX(gSmiMtrrs)\r
117.equ PSD_SIZE, . - ASM_PFX(gcPsd)\r
118\r
119#\r
120# CODE & DATA segments for SMM runtime\r
121#\r
122.equ CODE_SEL, CodeSeg64 - NullSeg\r
123.equ DATA_SEL, DataSeg32 - NullSeg\r
124.equ CODE32_SEL, CodeSeg32 - NullSeg\r
125\r
126ASM_PFX(gcSmiGdtr):\r
127 .word GDT_SIZE - 1\r
128 .quad NullSeg\r
129\r
130ASM_PFX(gcSmiIdtr):\r
717fb604
JY
131 .word 0\r
132 .quad 0\r
427e3573
MK
133\r
134 .text\r
135\r
136#------------------------------------------------------------------------------\r
137# _SmiExceptionEntryPoints is the collection of exception entry points followed\r
138# by a common exception handler.\r
139#\r
140# Stack frame would be as follows as specified in IA32 manuals:\r
141# +---------------------+ <-- 16-byte aligned ensured by processor\r
142# + Old SS +\r
143# +---------------------+\r
144# + Old RSP +\r
145# +---------------------+\r
146# + RFlags +\r
147# +---------------------+\r
148# + CS +\r
149# +---------------------+\r
150# + RIP +\r
151# +---------------------+\r
152# + Error Code +\r
153# +---------------------+\r
154# + Vector Number +\r
155# +---------------------+\r
156# + RBP +\r
157# +---------------------+ <-- RBP, 16-byte aligned\r
158#\r
159# RSP set to odd multiple of 8 at @CommonEntryPoint means ErrCode PRESENT\r
160#------------------------------------------------------------------------------\r
161ASM_GLOBAL ASM_PFX(PageFaultIdtHandlerSmmProfile)\r
162ASM_PFX(PageFaultIdtHandlerSmmProfile):\r
163 pushq $0x0e # Page Fault\r
164 .byte 0x40, 0xf6, 0xc4, 0x08 #test spl, 8\r
165 jnz L1\r
166 pushq (%rsp)\r
167 movq $0, 8(%rsp)\r
168L1:\r
169 pushq %rbp\r
170 movq %rsp, %rbp\r
171\r
172 #\r
173 # Since here the stack pointer is 16-byte aligned, so\r
174 # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64\r
175 # is 16-byte aligned\r
176 #\r
177\r
178## UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
179## UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
180 pushq %r15\r
181 pushq %r14\r
182 pushq %r13\r
183 pushq %r12\r
184 pushq %r11\r
185 pushq %r10\r
186 pushq %r9\r
187 pushq %r8\r
188 pushq %rax\r
189 pushq %rcx\r
190 pushq %rdx\r
191 pushq %rbx\r
192 pushq 48(%rbp) # RSP\r
193 pushq (%rbp) # RBP\r
194 pushq %rsi\r
195 pushq %rdi\r
196\r
197## UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero\r
198 movzwq 56(%rbp), %rax\r
199 pushq %rax # for ss\r
200 movzwq 32(%rbp), %rax\r
201 pushq %rax # for cs\r
202 movq %ds, %rax\r
203 pushq %rax\r
204 movq %es, %rax\r
205 pushq %rax\r
206 movq %fs, %rax\r
207 pushq %rax\r
208 movq %gs, %rax\r
209 pushq %rax\r
210\r
211## UINT64 Rip;\r
212 pushq 24(%rbp)\r
213\r
214## UINT64 Gdtr[2], Idtr[2];\r
215 subq $16, %rsp\r
216 sidt (%rsp)\r
217 subq $16, %rsp\r
218 sgdt (%rsp)\r
219\r
220## UINT64 Ldtr, Tr;\r
221 xorq %rax, %rax\r
222 strw %ax\r
223 pushq %rax\r
224 sldtw %ax\r
225 pushq %rax\r
226\r
227## UINT64 RFlags;\r
228 pushq 40(%rbp)\r
229\r
230## UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
231 movq %cr8, %rax\r
232 pushq %rax\r
233 movq %cr4, %rax\r
234 orq $0x208, %rax\r
235 movq %rax, %cr4\r
236 pushq %rax\r
237 movq %cr3, %rax\r
238 pushq %rax\r
239 movq %cr2, %rax\r
240 pushq %rax\r
241 xorq %rax, %rax\r
242 pushq %rax\r
243 movq %cr0, %rax\r
244 pushq %rax\r
245\r
246## UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
247 movq %dr7, %rax\r
248 pushq %rax\r
249 movq %dr6, %rax\r
250 pushq %rax\r
251 movq %dr3, %rax\r
252 pushq %rax\r
253 movq %dr2, %rax\r
254 pushq %rax\r
255 movq %dr1, %rax\r
256 pushq %rax\r
257 movq %dr0, %rax\r
258 pushq %rax\r
259\r
260## FX_SAVE_STATE_X64 FxSaveState;\r
261\r
262 subq $512, %rsp\r
263 movq %rsp, %rdi\r
264 .byte 0xf, 0xae, 0x7 # fxsave [rdi]\r
265\r
266# UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear\r
267 cld\r
268\r
269## UINT32 ExceptionData;\r
270 pushq 16(%rbp)\r
271\r
272## call into exception handler\r
273 movq 8(%rbp), %rcx\r
274 movabsq $ASM_PFX(SmiPFHandler), %rax\r
275\r
276## Prepare parameter and call\r
277 movq %rsp, %rdx\r
278 #\r
279 # Per X64 calling convention, allocate maximum parameter stack space\r
280 # and make sure RSP is 16-byte aligned\r
281 #\r
282 subq $4 * 8 + 8, %rsp\r
283 call *%rax\r
284 addq $4 * 8 + 8, %rsp\r
285 jmp L5\r
286\r
287L5:\r
288## UINT64 ExceptionData;\r
289 addq $8, %rsp\r
290\r
291## FX_SAVE_STATE_X64 FxSaveState;\r
292\r
293 movq %rsp, %rsi\r
294 .byte 0xf, 0xae, 0xe # fxrstor [rsi]\r
295 addq $512, %rsp\r
296\r
297## UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
298## Skip restoration of DRx registers to support debuggers\r
299## that set breakpoints in interrupt/exception context\r
300 addq $8 * 6, %rsp\r
301\r
302## UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
303 popq %rax\r
304 movq %rax, %cr0\r
305 addq $8, %rsp # not for Cr1\r
306 popq %rax\r
307 movq %rax, %cr2\r
308 popq %rax\r
309 movq %rax, %cr3\r
310 popq %rax\r
311 movq %rax, %cr4\r
312 popq %rax\r
313 movq %rax, %cr8\r
314\r
315## UINT64 RFlags;\r
316 popq 40(%rbp)\r
317\r
318## UINT64 Ldtr, Tr;\r
319## UINT64 Gdtr[2], Idtr[2];\r
320## Best not let anyone mess with these particular registers...\r
321 addq $48, %rsp\r
322\r
323## UINT64 Rip;\r
324 popq 24(%rbp)\r
325\r
326## UINT64 Gs, Fs, Es, Ds, Cs, Ss;\r
327 popq %rax\r
328 # mov gs, rax ; not for gs\r
329 popq %rax\r
330 # mov fs, rax ; not for fs\r
331 # (X64 will not use fs and gs, so we do not restore it)\r
332 popq %rax\r
333 movq %rax, %es\r
334 popq %rax\r
335 movq %rax, %ds\r
336 popq 32(%rbp) # for cs\r
337 popq 56(%rbp) # for ss\r
338\r
339## UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
340## UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
341 popq %rdi\r
342 popq %rsi\r
343 addq $8, %rsp # not for rbp\r
344 popq 48(%rbp) # for rsp\r
345 popq %rbx\r
346 popq %rdx\r
347 popq %rcx\r
348 popq %rax\r
349 popq %r8\r
350 popq %r9\r
351 popq %r10\r
352 popq %r11\r
353 popq %r12\r
354 popq %r13\r
355 popq %r14\r
356 popq %r15\r
357\r
358 movq %rbp, %rsp\r
359\r
360# Enable TF bit after page fault handler runs\r
361 btsl $8, 40(%rsp) #RFLAGS\r
362\r
363 popq %rbp\r
364 addq $16, %rsp # skip INT# & ErrCode\r
365 iretq\r
366\r