]> git.proxmox.com Git - mirror_edk2.git/blame - UefiCpuPkg/CpuDxe/X64/CpuAsm.S
Clean up DEC files:
[mirror_edk2.git] / UefiCpuPkg / CpuDxe / X64 / CpuAsm.S
CommitLineData
c2fd60f0 1# TITLE CpuAsm.S: \r
2\r
3#------------------------------------------------------------------------------\r
4#*\r
61ece967 5#* Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>\r
01a1c0fc 6#* This program and the accompanying materials\r
c2fd60f0 7#* are licensed and made available under the terms and conditions of the BSD License\r
8#* which accompanies this distribution. The full text of the license may be found at\r
9#* http://opensource.org/licenses/bsd-license.php\r
10#*\r
11#* THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
12#* WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
13#*\r
14#* CpuAsm.S\r
15#*\r
16#* Abstract:\r
17#*\r
18#------------------------------------------------------------------------------\r
19\r
20\r
21#text SEGMENT\r
22\r
23\r
24#EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions\r
25\r
26\r
27#\r
28# point to the external interrupt vector table\r
29#\r
30ExternalVectorTablePtr:\r
31 .byte 0, 0, 0, 0, 0, 0, 0, 0\r
32\r
33ASM_GLOBAL ASM_PFX(InitializeExternalVectorTablePtr)\r
34ASM_PFX(InitializeExternalVectorTablePtr):\r
35 lea ExternalVectorTablePtr(%rip), %rax # save vector number\r
36 mov %rcx, (%rax) \r
37 ret\r
38\r
39\r
40#------------------------------------------------------------------------------\r
41# VOID\r
42# SetCodeSelector (\r
43# UINT16 Selector\r
44# );\r
45#------------------------------------------------------------------------------\r
46ASM_GLOBAL ASM_PFX(SetCodeSelector)\r
47ASM_PFX(SetCodeSelector):\r
48 subq $0x10, %rsp \r
49 leaq setCodeSelectorLongJump(%rip), %rax \r
50 movq %rax, (%rsp) \r
51 movw %cx, 4(%rsp)\r
52 .byte 0xFF, 0x2C, 0x24 # jmp (%rsp) note:fword jmp\r
53setCodeSelectorLongJump:\r
54 addq $0x10, %rsp\r
55 ret\r
56\r
57#------------------------------------------------------------------------------\r
58# VOID\r
59# SetDataSelectors (\r
60# UINT16 Selector\r
61# );\r
62#------------------------------------------------------------------------------\r
63ASM_GLOBAL ASM_PFX(SetDataSelectors)\r
64ASM_PFX(SetDataSelectors):\r
65 movw %cx, %ss\r
66 movw %cx, %ds\r
67 movw %cx, %es\r
68 movw %cx, %fs\r
69 movw %cx, %gs\r
70 ret\r
71\r
72#---------------------------------------;\r
73# CommonInterruptEntry ;\r
74#---------------------------------------;\r
75# The follow algorithm is used for the common interrupt routine.\r
76\r
77ASM_GLOBAL ASM_PFX(CommonInterruptEntry)\r
78ASM_PFX(CommonInterruptEntry):\r
79 cli\r
80 #\r
81 # All interrupt handlers are invoked through interrupt gates, so\r
82 # IF flag automatically cleared at the entry point\r
83 #\r
84 #\r
85 # Calculate vector number\r
86 #\r
87 xchgq (%rsp), %rcx # get the return address of call, actually, it is the address of vector number.\r
88 movzwl (%rcx), %ecx \r
89 cmp $32, %ecx # Intel reserved vector for exceptions?\r
90 jae NoErrorCode\r
91 pushq %rax\r
92 leaq ASM_PFX(mErrorCodeFlag)(%rip), %rax\r
93 bt %ecx, (%rax) \r
94 popq %rax\r
95 jc CommonInterruptEntry_al_0000\r
96\r
97NoErrorCode:\r
98\r
99 #\r
100 # Push a dummy error code on the stack\r
101 # to maintain coherent stack map\r
102 #\r
103 pushq (%rsp)\r
104 movq $0, 8(%rsp)\r
105CommonInterruptEntry_al_0000:\r
106 pushq %rbp\r
107 movq %rsp, %rbp\r
108\r
109 #\r
110 # Stack:\r
111 # +---------------------+ <-- 16-byte aligned ensured by processor\r
112 # + Old SS +\r
113 # +---------------------+\r
114 # + Old RSP +\r
115 # +---------------------+\r
116 # + RFlags +\r
117 # +---------------------+\r
118 # + CS +\r
119 # +---------------------+\r
120 # + RIP +\r
121 # +---------------------+\r
122 # + Error Code +\r
123 # +---------------------+\r
124 # + RCX / Vector Number +\r
125 # +---------------------+\r
126 # + RBP +\r
127 # +---------------------+ <-- RBP, 16-byte aligned\r
128 #\r
129\r
130\r
131 #\r
132 # Since here the stack pointer is 16-byte aligned, so\r
133 # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64\r
134 # is 16-byte aligned\r
135 #\r
136\r
137#; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
138#; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
139 pushq %r15\r
140 pushq %r14\r
141 pushq %r13\r
142 pushq %r12\r
143 pushq %r11\r
144 pushq %r10\r
145 pushq %r9\r
146 pushq %r8\r
147 pushq %rax\r
148 pushq 8(%rbp) # RCX\r
149 pushq %rdx\r
150 pushq %rbx\r
151 pushq 48(%rbp) # RSP\r
152 pushq (%rbp) # RBP\r
153 pushq %rsi\r
154 pushq %rdi\r
155\r
156#; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero\r
157 movzwq 56(%rbp), %rax\r
158 pushq %rax # for ss\r
159 movzwq 32(%rbp), %rax\r
160 pushq %rax # for cs\r
161 movq %ds, %rax\r
162 pushq %rax\r
163 movq %es, %rax\r
164 pushq %rax\r
165 movq %fs, %rax\r
166 pushq %rax\r
167 movq %gs, %rax\r
168 pushq %rax\r
169\r
170 movq %rcx, 8(%rbp) # save vector number\r
171\r
172#; UINT64 Rip;\r
173 pushq 24(%rbp)\r
174\r
175#; UINT64 Gdtr[2], Idtr[2];\r
176 xorq %rax, %rax\r
177 pushq %rax\r
178 pushq %rax\r
179 sidt (%rsp)\r
180 xchgq 2(%rsp), %rax\r
181 xchgq (%rsp), %rax\r
182 xchgq 8(%rsp), %rax\r
183\r
184 xorq %rax, %rax\r
185 pushq %rax\r
186 pushq %rax\r
187 sgdt (%rsp)\r
188 xchgq 2(%rsp), %rax\r
189 xchgq (%rsp), %rax\r
190 xchgq 8(%rsp), %rax\r
191\r
192#; UINT64 Ldtr, Tr;\r
193 xorq %rax, %rax\r
194 str %ax\r
195 pushq %rax\r
196 sldt %ax\r
197 pushq %rax\r
198\r
199#; UINT64 RFlags;\r
200 pushq 40(%rbp)\r
201\r
202#; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
203 movq %cr8, %rax\r
204 pushq %rax\r
205 movq %cr4, %rax\r
206 orq $0x208, %rax \r
207 movq %rax, %cr4 \r
208 pushq %rax\r
209 mov %cr3, %rax \r
210 pushq %rax\r
211 mov %cr2, %rax \r
212 pushq %rax\r
213 xorq %rax, %rax\r
214 pushq %rax\r
215 mov %cr0, %rax \r
216 pushq %rax\r
217\r
218#; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
219 movq %dr7, %rax\r
220 pushq %rax\r
c2fd60f0 221 movq %dr6, %rax\r
222 pushq %rax\r
c2fd60f0 223 movq %dr3, %rax\r
224 pushq %rax\r
225 movq %dr2, %rax\r
226 pushq %rax\r
227 movq %dr1, %rax\r
228 pushq %rax\r
229 movq %dr0, %rax\r
230 pushq %rax\r
231\r
232#; FX_SAVE_STATE_X64 FxSaveState;\r
233 subq $512, %rsp\r
234 movq %rsp, %rdi\r
235 .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]\r
236\r
237#; UINT32 ExceptionData;\r
238 pushq 16(%rbp)\r
239\r
240#; call into exception handler\r
241 movq 8(%rbp), %rcx\r
242 leaq ExternalVectorTablePtr(%rip), %rax\r
243 movl (%eax), %eax\r
244 movq (%rax,%rcx,8), %rax\r
245 orq %rax, %rax # NULL?\r
246\r
247 je nonNullValue#\r
248\r
249#; Prepare parameter and call\r
250# mov rcx, [rbp + 8]\r
251 mov %rsp, %rdx\r
252 #\r
253 # Per X64 calling convention, allocate maximum parameter stack space\r
254 # and make sure RSP is 16-byte aligned\r
255 #\r
256 subq $40, %rsp \r
257 call *%rax\r
258 addq $40, %rsp\r
259\r
260nonNullValue:\r
261 cli\r
262#; UINT64 ExceptionData;\r
263 addq $8, %rsp\r
264\r
265#; FX_SAVE_STATE_X64 FxSaveState;\r
266\r
267 movq %rsp, %rsi\r
268 .byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]\r
269 addq $512, %rsp\r
270\r
271#; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r
61ece967 272#; Skip restoration of DRx registers to support in-circuit emualators\r
273#; or debuggers set breakpoint in interrupt/exception context\r
274 addq $48, %rsp\r
c2fd60f0 275\r
276#; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r
277 popq %rax\r
278 movq %rax, %cr0\r
279 addq $8, %rsp # not for Cr1\r
280 popq %rax\r
281 movq %rax, %cr2\r
282 popq %rax\r
283 movq %rax, %cr3\r
284 popq %rax\r
285 movq %rax, %cr4\r
286 popq %rax\r
287 movq %rax, %cr8\r
288\r
289#; UINT64 RFlags;\r
290 popq 40(%rbp)\r
291\r
292#; UINT64 Ldtr, Tr;\r
293#; UINT64 Gdtr[2], Idtr[2];\r
294#; Best not let anyone mess with these particular registers...\r
295 addq $48, %rsp\r
296\r
297#; UINT64 Rip;\r
298 popq 24(%rbp)\r
299\r
300#; UINT64 Gs, Fs, Es, Ds, Cs, Ss;\r
301 popq %rax\r
302 # mov %rax, %gs ; not for gs\r
303 popq %rax\r
304 # mov %rax, %fs ; not for fs\r
305 # (X64 will not use fs and gs, so we do not restore it)\r
306 popq %rax\r
307 movq %rax, %es\r
308 popq %rax\r
309 movq %rax, %ds\r
310 popq 32(%rbp) # for cs\r
311 popq 56(%rbp) # for ss\r
312\r
313#; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r
314#; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r
315 popq %rdi\r
316 popq %rsi\r
317 addq $8, %rsp # not for rbp\r
318 popq 48(%rbp) # for rsp\r
319 popq %rbx\r
320 popq %rdx\r
321 popq %rcx\r
322 popq %rax\r
323 popq %r8\r
324 popq %r9\r
325 popq %r10\r
326 popq %r11\r
327 popq %r12\r
328 popq %r13\r
329 popq %r14\r
330 popq %r15\r
331\r
332 movq %rbp, %rsp\r
333 popq %rbp\r
334 addq $16, %rsp\r
335 iretq\r
336\r
337\r
338#text ENDS\r
339\r
340#END\r
341\r
342\r