]>
Commit | Line | Data |
---|---|---|
c2fd60f0 | 1 | # TITLE CpuAsm.S: \r |
2 | \r | |
3 | #------------------------------------------------------------------------------\r | |
4 | #*\r | |
5 | #* Copyright 2008 - 2009, Intel Corporation\r | |
6 | #* All rights reserved. This program and the accompanying materials\r | |
7 | #* are licensed and made available under the terms and conditions of the BSD License\r | |
8 | #* which accompanies this distribution. The full text of the license may be found at\r | |
9 | #* http://opensource.org/licenses/bsd-license.php\r | |
10 | #*\r | |
11 | #* THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r | |
12 | #* WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r | |
13 | #*\r | |
14 | #* CpuAsm.S\r | |
15 | #*\r | |
16 | #* Abstract:\r | |
17 | #*\r | |
18 | #------------------------------------------------------------------------------\r | |
19 | \r | |
20 | \r | |
21 | #text SEGMENT\r | |
22 | \r | |
23 | \r | |
24 | #EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions\r | |
25 | \r | |
26 | \r | |
27 | #\r | |
28 | # point to the external interrupt vector table\r | |
29 | #\r | |
30 | ExternalVectorTablePtr:\r | |
31 | .byte 0, 0, 0, 0, 0, 0, 0, 0\r | |
32 | \r | |
33 | ASM_GLOBAL ASM_PFX(InitializeExternalVectorTablePtr)\r | |
34 | ASM_PFX(InitializeExternalVectorTablePtr):\r | |
35 | lea ExternalVectorTablePtr(%rip), %rax # save vector number\r | |
36 | mov %rcx, (%rax) \r | |
37 | ret\r | |
38 | \r | |
39 | \r | |
40 | #------------------------------------------------------------------------------\r | |
41 | # VOID\r | |
42 | # SetCodeSelector (\r | |
43 | # UINT16 Selector\r | |
44 | # );\r | |
45 | #------------------------------------------------------------------------------\r | |
46 | ASM_GLOBAL ASM_PFX(SetCodeSelector)\r | |
47 | ASM_PFX(SetCodeSelector):\r | |
48 | subq $0x10, %rsp \r | |
49 | leaq setCodeSelectorLongJump(%rip), %rax \r | |
50 | movq %rax, (%rsp) \r | |
51 | movw %cx, 4(%rsp)\r | |
52 | .byte 0xFF, 0x2C, 0x24 # jmp (%rsp) note:fword jmp\r | |
53 | setCodeSelectorLongJump:\r | |
54 | addq $0x10, %rsp\r | |
55 | ret\r | |
56 | \r | |
57 | #------------------------------------------------------------------------------\r | |
58 | # VOID\r | |
59 | # SetDataSelectors (\r | |
60 | # UINT16 Selector\r | |
61 | # );\r | |
62 | #------------------------------------------------------------------------------\r | |
63 | ASM_GLOBAL ASM_PFX(SetDataSelectors)\r | |
64 | ASM_PFX(SetDataSelectors):\r | |
65 | movw %cx, %ss\r | |
66 | movw %cx, %ds\r | |
67 | movw %cx, %es\r | |
68 | movw %cx, %fs\r | |
69 | movw %cx, %gs\r | |
70 | ret\r | |
71 | \r | |
72 | #---------------------------------------;\r | |
73 | # CommonInterruptEntry ;\r | |
74 | #---------------------------------------;\r | |
75 | # The follow algorithm is used for the common interrupt routine.\r | |
76 | \r | |
77 | ASM_GLOBAL ASM_PFX(CommonInterruptEntry)\r | |
78 | ASM_PFX(CommonInterruptEntry):\r | |
79 | cli\r | |
80 | #\r | |
81 | # All interrupt handlers are invoked through interrupt gates, so\r | |
82 | # IF flag automatically cleared at the entry point\r | |
83 | #\r | |
84 | #\r | |
85 | # Calculate vector number\r | |
86 | #\r | |
87 | xchgq (%rsp), %rcx # get the return address of call, actually, it is the address of vector number.\r | |
88 | movzwl (%rcx), %ecx \r | |
89 | cmp $32, %ecx # Intel reserved vector for exceptions?\r | |
90 | jae NoErrorCode\r | |
91 | pushq %rax\r | |
92 | leaq ASM_PFX(mErrorCodeFlag)(%rip), %rax\r | |
93 | bt %ecx, (%rax) \r | |
94 | popq %rax\r | |
95 | jc CommonInterruptEntry_al_0000\r | |
96 | \r | |
97 | NoErrorCode:\r | |
98 | \r | |
99 | #\r | |
100 | # Push a dummy error code on the stack\r | |
101 | # to maintain coherent stack map\r | |
102 | #\r | |
103 | pushq (%rsp)\r | |
104 | movq $0, 8(%rsp)\r | |
105 | CommonInterruptEntry_al_0000:\r | |
106 | pushq %rbp\r | |
107 | movq %rsp, %rbp\r | |
108 | \r | |
109 | #\r | |
110 | # Stack:\r | |
111 | # +---------------------+ <-- 16-byte aligned ensured by processor\r | |
112 | # + Old SS +\r | |
113 | # +---------------------+\r | |
114 | # + Old RSP +\r | |
115 | # +---------------------+\r | |
116 | # + RFlags +\r | |
117 | # +---------------------+\r | |
118 | # + CS +\r | |
119 | # +---------------------+\r | |
120 | # + RIP +\r | |
121 | # +---------------------+\r | |
122 | # + Error Code +\r | |
123 | # +---------------------+\r | |
124 | # + RCX / Vector Number +\r | |
125 | # +---------------------+\r | |
126 | # + RBP +\r | |
127 | # +---------------------+ <-- RBP, 16-byte aligned\r | |
128 | #\r | |
129 | \r | |
130 | \r | |
131 | #\r | |
132 | # Since here the stack pointer is 16-byte aligned, so\r | |
133 | # EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64\r | |
134 | # is 16-byte aligned\r | |
135 | #\r | |
136 | \r | |
137 | #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r | |
138 | #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r | |
139 | pushq %r15\r | |
140 | pushq %r14\r | |
141 | pushq %r13\r | |
142 | pushq %r12\r | |
143 | pushq %r11\r | |
144 | pushq %r10\r | |
145 | pushq %r9\r | |
146 | pushq %r8\r | |
147 | pushq %rax\r | |
148 | pushq 8(%rbp) # RCX\r | |
149 | pushq %rdx\r | |
150 | pushq %rbx\r | |
151 | pushq 48(%rbp) # RSP\r | |
152 | pushq (%rbp) # RBP\r | |
153 | pushq %rsi\r | |
154 | pushq %rdi\r | |
155 | \r | |
156 | #; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero\r | |
157 | movzwq 56(%rbp), %rax\r | |
158 | pushq %rax # for ss\r | |
159 | movzwq 32(%rbp), %rax\r | |
160 | pushq %rax # for cs\r | |
161 | movq %ds, %rax\r | |
162 | pushq %rax\r | |
163 | movq %es, %rax\r | |
164 | pushq %rax\r | |
165 | movq %fs, %rax\r | |
166 | pushq %rax\r | |
167 | movq %gs, %rax\r | |
168 | pushq %rax\r | |
169 | \r | |
170 | movq %rcx, 8(%rbp) # save vector number\r | |
171 | \r | |
172 | #; UINT64 Rip;\r | |
173 | pushq 24(%rbp)\r | |
174 | \r | |
175 | #; UINT64 Gdtr[2], Idtr[2];\r | |
176 | xorq %rax, %rax\r | |
177 | pushq %rax\r | |
178 | pushq %rax\r | |
179 | sidt (%rsp)\r | |
180 | xchgq 2(%rsp), %rax\r | |
181 | xchgq (%rsp), %rax\r | |
182 | xchgq 8(%rsp), %rax\r | |
183 | \r | |
184 | xorq %rax, %rax\r | |
185 | pushq %rax\r | |
186 | pushq %rax\r | |
187 | sgdt (%rsp)\r | |
188 | xchgq 2(%rsp), %rax\r | |
189 | xchgq (%rsp), %rax\r | |
190 | xchgq 8(%rsp), %rax\r | |
191 | \r | |
192 | #; UINT64 Ldtr, Tr;\r | |
193 | xorq %rax, %rax\r | |
194 | str %ax\r | |
195 | pushq %rax\r | |
196 | sldt %ax\r | |
197 | pushq %rax\r | |
198 | \r | |
199 | #; UINT64 RFlags;\r | |
200 | pushq 40(%rbp)\r | |
201 | \r | |
202 | #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r | |
203 | movq %cr8, %rax\r | |
204 | pushq %rax\r | |
205 | movq %cr4, %rax\r | |
206 | orq $0x208, %rax \r | |
207 | movq %rax, %cr4 \r | |
208 | pushq %rax\r | |
209 | mov %cr3, %rax \r | |
210 | pushq %rax\r | |
211 | mov %cr2, %rax \r | |
212 | pushq %rax\r | |
213 | xorq %rax, %rax\r | |
214 | pushq %rax\r | |
215 | mov %cr0, %rax \r | |
216 | pushq %rax\r | |
217 | \r | |
218 | #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r | |
219 | movq %dr7, %rax\r | |
220 | pushq %rax\r | |
221 | #; clear Dr7 while executing debugger itself\r | |
222 | xorq %rax, %rax\r | |
223 | movq %rax, %dr7\r | |
224 | \r | |
225 | movq %dr6, %rax\r | |
226 | pushq %rax\r | |
227 | #; insure all status bits in dr6 are clear...\r | |
228 | xorq %rax, %rax\r | |
229 | movq %rax, %dr6\r | |
230 | \r | |
231 | movq %dr3, %rax\r | |
232 | pushq %rax\r | |
233 | movq %dr2, %rax\r | |
234 | pushq %rax\r | |
235 | movq %dr1, %rax\r | |
236 | pushq %rax\r | |
237 | movq %dr0, %rax\r | |
238 | pushq %rax\r | |
239 | \r | |
240 | #; FX_SAVE_STATE_X64 FxSaveState;\r | |
241 | subq $512, %rsp\r | |
242 | movq %rsp, %rdi\r | |
243 | .byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]\r | |
244 | \r | |
245 | #; UINT32 ExceptionData;\r | |
246 | pushq 16(%rbp)\r | |
247 | \r | |
248 | #; call into exception handler\r | |
249 | movq 8(%rbp), %rcx\r | |
250 | leaq ExternalVectorTablePtr(%rip), %rax\r | |
251 | movl (%eax), %eax\r | |
252 | movq (%rax,%rcx,8), %rax\r | |
253 | orq %rax, %rax # NULL?\r | |
254 | \r | |
255 | je nonNullValue#\r | |
256 | \r | |
257 | #; Prepare parameter and call\r | |
258 | # mov rcx, [rbp + 8]\r | |
259 | mov %rsp, %rdx\r | |
260 | #\r | |
261 | # Per X64 calling convention, allocate maximum parameter stack space\r | |
262 | # and make sure RSP is 16-byte aligned\r | |
263 | #\r | |
264 | subq $40, %rsp \r | |
265 | call *%rax\r | |
266 | addq $40, %rsp\r | |
267 | \r | |
268 | nonNullValue:\r | |
269 | cli\r | |
270 | #; UINT64 ExceptionData;\r | |
271 | addq $8, %rsp\r | |
272 | \r | |
273 | #; FX_SAVE_STATE_X64 FxSaveState;\r | |
274 | \r | |
275 | movq %rsp, %rsi\r | |
276 | .byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]\r | |
277 | addq $512, %rsp\r | |
278 | \r | |
279 | #; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;\r | |
280 | popq %rax\r | |
281 | movq %rax, %dr0\r | |
282 | popq %rax\r | |
283 | movq %rax, %dr1\r | |
284 | popq %rax\r | |
285 | movq %rax, %dr2\r | |
286 | popq %rax\r | |
287 | movq %rax, %dr3\r | |
288 | #; skip restore of dr6. We cleared dr6 during the context save.\r | |
289 | addq $8, %rsp\r | |
290 | popq %rax\r | |
291 | movq %rax, %dr7\r | |
292 | \r | |
293 | #; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;\r | |
294 | popq %rax\r | |
295 | movq %rax, %cr0\r | |
296 | addq $8, %rsp # not for Cr1\r | |
297 | popq %rax\r | |
298 | movq %rax, %cr2\r | |
299 | popq %rax\r | |
300 | movq %rax, %cr3\r | |
301 | popq %rax\r | |
302 | movq %rax, %cr4\r | |
303 | popq %rax\r | |
304 | movq %rax, %cr8\r | |
305 | \r | |
306 | #; UINT64 RFlags;\r | |
307 | popq 40(%rbp)\r | |
308 | \r | |
309 | #; UINT64 Ldtr, Tr;\r | |
310 | #; UINT64 Gdtr[2], Idtr[2];\r | |
311 | #; Best not let anyone mess with these particular registers...\r | |
312 | addq $48, %rsp\r | |
313 | \r | |
314 | #; UINT64 Rip;\r | |
315 | popq 24(%rbp)\r | |
316 | \r | |
317 | #; UINT64 Gs, Fs, Es, Ds, Cs, Ss;\r | |
318 | popq %rax\r | |
319 | # mov %rax, %gs ; not for gs\r | |
320 | popq %rax\r | |
321 | # mov %rax, %fs ; not for fs\r | |
322 | # (X64 will not use fs and gs, so we do not restore it)\r | |
323 | popq %rax\r | |
324 | movq %rax, %es\r | |
325 | popq %rax\r | |
326 | movq %rax, %ds\r | |
327 | popq 32(%rbp) # for cs\r | |
328 | popq 56(%rbp) # for ss\r | |
329 | \r | |
330 | #; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;\r | |
331 | #; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;\r | |
332 | popq %rdi\r | |
333 | popq %rsi\r | |
334 | addq $8, %rsp # not for rbp\r | |
335 | popq 48(%rbp) # for rsp\r | |
336 | popq %rbx\r | |
337 | popq %rdx\r | |
338 | popq %rcx\r | |
339 | popq %rax\r | |
340 | popq %r8\r | |
341 | popq %r9\r | |
342 | popq %r10\r | |
343 | popq %r11\r | |
344 | popq %r12\r | |
345 | popq %r13\r | |
346 | popq %r14\r | |
347 | popq %r15\r | |
348 | \r | |
349 | movq %rbp, %rsp\r | |
350 | popq %rbp\r | |
351 | addq $16, %rsp\r | |
352 | iretq\r | |
353 | \r | |
354 | \r | |
355 | #text ENDS\r | |
356 | \r | |
357 | #END\r | |
358 | \r | |
359 | \r |