]> git.proxmox.com Git - mirror_edk2.git/blob - UefiCpuPkg/CpuDxe/X64/CpuAsm.S
A complement fix for revision 11664 to update GCC assembly files : clear the directio...
[mirror_edk2.git] / UefiCpuPkg / CpuDxe / X64 / CpuAsm.S
1 # TITLE CpuAsm.S:
2
3 #------------------------------------------------------------------------------
4 #*
5 #* Copyright (c) 2008 - 2011, Intel Corporation. All rights reserved.<BR>
6 #* This program and the accompanying materials
7 #* are licensed and made available under the terms and conditions of the BSD License
8 #* which accompanies this distribution. The full text of the license may be found at
9 #* http://opensource.org/licenses/bsd-license.php
10 #*
11 #* THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
12 #* WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
13 #*
14 #* CpuAsm.S
15 #*
16 #* Abstract:
17 #*
18 #------------------------------------------------------------------------------
19
20
21 #text SEGMENT
22
23
24 #EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions
25
26
27 #
28 # point to the external interrupt vector table
29 #
30 ExternalVectorTablePtr:
31 .byte 0, 0, 0, 0, 0, 0, 0, 0
32
33 ASM_GLOBAL ASM_PFX(InitializeExternalVectorTablePtr)
34 ASM_PFX(InitializeExternalVectorTablePtr):
35 lea ExternalVectorTablePtr(%rip), %rax # save vector number
36 mov %rcx, (%rax)
37 ret
38
39
40 #------------------------------------------------------------------------------
41 # VOID
42 # SetCodeSelector (
43 # UINT16 Selector
44 # );
45 #------------------------------------------------------------------------------
46 ASM_GLOBAL ASM_PFX(SetCodeSelector)
47 ASM_PFX(SetCodeSelector):
48 subq $0x10, %rsp
49 leaq setCodeSelectorLongJump(%rip), %rax
50 movq %rax, (%rsp)
51 movw %cx, 4(%rsp)
52 .byte 0xFF, 0x2C, 0x24 # jmp (%rsp) note:fword jmp
53 setCodeSelectorLongJump:
54 addq $0x10, %rsp
55 ret
56
57 #------------------------------------------------------------------------------
58 # VOID
59 # SetDataSelectors (
60 # UINT16 Selector
61 # );
62 #------------------------------------------------------------------------------
63 ASM_GLOBAL ASM_PFX(SetDataSelectors)
64 ASM_PFX(SetDataSelectors):
65 movw %cx, %ss
66 movw %cx, %ds
67 movw %cx, %es
68 movw %cx, %fs
69 movw %cx, %gs
70 ret
71
72 #---------------------------------------;
73 # CommonInterruptEntry ;
74 #---------------------------------------;
75 # The follow algorithm is used for the common interrupt routine.
76
77 ASM_GLOBAL ASM_PFX(CommonInterruptEntry)
78 ASM_PFX(CommonInterruptEntry):
79 cli
80 #
81 # All interrupt handlers are invoked through interrupt gates, so
82 # IF flag automatically cleared at the entry point
83 #
84 #
85 # Calculate vector number
86 #
87 xchgq (%rsp), %rcx # get the return address of call, actually, it is the address of vector number.
88 movzwl (%rcx), %ecx
89 cmp $32, %ecx # Intel reserved vector for exceptions?
90 jae NoErrorCode
91 pushq %rax
92 leaq ASM_PFX(mErrorCodeFlag)(%rip), %rax
93 bt %ecx, (%rax)
94 popq %rax
95 jc CommonInterruptEntry_al_0000
96
97 NoErrorCode:
98
99 #
100 # Push a dummy error code on the stack
101 # to maintain coherent stack map
102 #
103 pushq (%rsp)
104 movq $0, 8(%rsp)
105 CommonInterruptEntry_al_0000:
106 pushq %rbp
107 movq %rsp, %rbp
108
109 #
110 # Stack:
111 # +---------------------+ <-- 16-byte aligned ensured by processor
112 # + Old SS +
113 # +---------------------+
114 # + Old RSP +
115 # +---------------------+
116 # + RFlags +
117 # +---------------------+
118 # + CS +
119 # +---------------------+
120 # + RIP +
121 # +---------------------+
122 # + Error Code +
123 # +---------------------+
124 # + RCX / Vector Number +
125 # +---------------------+
126 # + RBP +
127 # +---------------------+ <-- RBP, 16-byte aligned
128 #
129
130
131 #
132 # Since here the stack pointer is 16-byte aligned, so
133 # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64
134 # is 16-byte aligned
135 #
136
137 #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
138 #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
139 pushq %r15
140 pushq %r14
141 pushq %r13
142 pushq %r12
143 pushq %r11
144 pushq %r10
145 pushq %r9
146 pushq %r8
147 pushq %rax
148 pushq 8(%rbp) # RCX
149 pushq %rdx
150 pushq %rbx
151 pushq 48(%rbp) # RSP
152 pushq (%rbp) # RBP
153 pushq %rsi
154 pushq %rdi
155
156 #; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero
157 movzwq 56(%rbp), %rax
158 pushq %rax # for ss
159 movzwq 32(%rbp), %rax
160 pushq %rax # for cs
161 movq %ds, %rax
162 pushq %rax
163 movq %es, %rax
164 pushq %rax
165 movq %fs, %rax
166 pushq %rax
167 movq %gs, %rax
168 pushq %rax
169
170 movq %rcx, 8(%rbp) # save vector number
171
172 #; UINT64 Rip;
173 pushq 24(%rbp)
174
175 #; UINT64 Gdtr[2], Idtr[2];
176 xorq %rax, %rax
177 pushq %rax
178 pushq %rax
179 sidt (%rsp)
180 xchgq 2(%rsp), %rax
181 xchgq (%rsp), %rax
182 xchgq 8(%rsp), %rax
183
184 xorq %rax, %rax
185 pushq %rax
186 pushq %rax
187 sgdt (%rsp)
188 xchgq 2(%rsp), %rax
189 xchgq (%rsp), %rax
190 xchgq 8(%rsp), %rax
191
192 #; UINT64 Ldtr, Tr;
193 xorq %rax, %rax
194 str %ax
195 pushq %rax
196 sldt %ax
197 pushq %rax
198
199 #; UINT64 RFlags;
200 pushq 40(%rbp)
201
202 #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
203 movq %cr8, %rax
204 pushq %rax
205 movq %cr4, %rax
206 orq $0x208, %rax
207 movq %rax, %cr4
208 pushq %rax
209 mov %cr3, %rax
210 pushq %rax
211 mov %cr2, %rax
212 pushq %rax
213 xorq %rax, %rax
214 pushq %rax
215 mov %cr0, %rax
216 pushq %rax
217
218 #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
219 movq %dr7, %rax
220 pushq %rax
221 movq %dr6, %rax
222 pushq %rax
223 movq %dr3, %rax
224 pushq %rax
225 movq %dr2, %rax
226 pushq %rax
227 movq %dr1, %rax
228 pushq %rax
229 movq %dr0, %rax
230 pushq %rax
231
232 #; FX_SAVE_STATE_X64 FxSaveState;
233 subq $512, %rsp
234 movq %rsp, %rdi
235 .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]
236
237 #; UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear
238 cld
239
240 #; UINT32 ExceptionData;
241 pushq 16(%rbp)
242
243 #; call into exception handler
244 movq 8(%rbp), %rcx
245 leaq ExternalVectorTablePtr(%rip), %rax
246 movl (%eax), %eax
247 movq (%rax,%rcx,8), %rax
248 orq %rax, %rax # NULL?
249
250 je nonNullValue#
251
252 #; Prepare parameter and call
253 # mov rcx, [rbp + 8]
254 mov %rsp, %rdx
255 #
256 # Per X64 calling convention, allocate maximum parameter stack space
257 # and make sure RSP is 16-byte aligned
258 #
259 subq $40, %rsp
260 call *%rax
261 addq $40, %rsp
262
263 nonNullValue:
264 cli
265 #; UINT64 ExceptionData;
266 addq $8, %rsp
267
268 #; FX_SAVE_STATE_X64 FxSaveState;
269
270 movq %rsp, %rsi
271 .byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]
272 addq $512, %rsp
273
274 #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
275 #; Skip restoration of DRx registers to support in-circuit emualators
276 #; or debuggers set breakpoint in interrupt/exception context
277 addq $48, %rsp
278
279 #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
280 popq %rax
281 movq %rax, %cr0
282 addq $8, %rsp # not for Cr1
283 popq %rax
284 movq %rax, %cr2
285 popq %rax
286 movq %rax, %cr3
287 popq %rax
288 movq %rax, %cr4
289 popq %rax
290 movq %rax, %cr8
291
292 #; UINT64 RFlags;
293 popq 40(%rbp)
294
295 #; UINT64 Ldtr, Tr;
296 #; UINT64 Gdtr[2], Idtr[2];
297 #; Best not let anyone mess with these particular registers...
298 addq $48, %rsp
299
300 #; UINT64 Rip;
301 popq 24(%rbp)
302
303 #; UINT64 Gs, Fs, Es, Ds, Cs, Ss;
304 popq %rax
305 # mov %rax, %gs ; not for gs
306 popq %rax
307 # mov %rax, %fs ; not for fs
308 # (X64 will not use fs and gs, so we do not restore it)
309 popq %rax
310 movq %rax, %es
311 popq %rax
312 movq %rax, %ds
313 popq 32(%rbp) # for cs
314 popq 56(%rbp) # for ss
315
316 #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
317 #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
318 popq %rdi
319 popq %rsi
320 addq $8, %rsp # not for rbp
321 popq 48(%rbp) # for rsp
322 popq %rbx
323 popq %rdx
324 popq %rcx
325 popq %rax
326 popq %r8
327 popq %r9
328 popq %r10
329 popq %r11
330 popq %r12
331 popq %r13
332 popq %r14
333 popq %r15
334
335 movq %rbp, %rsp
336 popq %rbp
337 addq $16, %rsp
338 iretq
339
340
341 #text ENDS
342
343 #END
344
345