andl $0x0fffffff0, %esp\r
pushq %rcx\r
movq 8(%rax), %rcx\r
- bt %ecx, ASM_PFX(mErrorCodeFlag)\r
+ pushq %rax\r
+ movabsl ASM_PFX(mErrorCodeFlag), %eax\r
+ bt %ecx, %eax\r
+ popq %rax\r
jc NoErrorData\r
pushq (%rsp) # push additional rcx to make stack alignment\r
NoErrorData:\r
cmp $32, %ecx # Intel reserved vector for exceptions?\r
jae NoErrorCode\r
pushq %rax\r
- leaq ASM_PFX(mErrorCodeFlag)(%rip), %rax\r
- bt %ecx, (%rax) \r
+ movabsl ASM_PFX(mErrorCodeFlag), %eax\r
+ bt %ecx, %eax\r
popq %rax\r
jc CommonInterruptEntry_al_0000\r
\r
jmp *-24(%rsp)\r
\r
DoReturn:\r
- cmpq $0, ASM_PFX(mDoFarReturnFlag) # Check if need to do far return instead of IRET\r
+ pushq %rax\r
+ movabsq ASM_PFX(mDoFarReturnFlag), %rax\r
+ cmpq $0, %rax # Check if need to do far return instead of IRET\r
+ popq %rax\r
jz DoIret\r
pushq %rax\r
movq %rsp, %rax # save old RSP to rax\r