<Filename>SwapBytes32.c</Filename>\r
<Filename>SwapBytes64.c</Filename>\r
<Filename>SwitchStack.c</Filename>\r
- <Filename SupArchList="IA32">x86LowLevel.c</Filename>\r
+ <Filename SupArchList="IA32">x86DisablePaging32.c</Filename>\r
+ <Filename SupArchList="IA32">x86DisablePaging64.c</Filename>\r
+ <Filename SupArchList="IA32">x86EnablePaging32.c</Filename>\r
+ <Filename SupArchList="IA32">x86EnablePaging64.c</Filename>\r
+ <Filename SupArchList="IA32">x86FxRestore.c</Filename>\r
+ <Filename SupArchList="IA32">x86FxSave.c</Filename>\r
+ <Filename SupArchList="IA32">x86GetInterruptState.c</Filename>\r
+ <Filename SupArchList="IA32">x86MemoryFence.c</Filename>\r
+ <Filename SupArchList="IA32">x86Msr.c</Filename>\r
+ <Filename SupArchList="IA32">x86ReadGdtr.c</Filename>\r
+ <Filename SupArchList="IA32">x86ReadIdtr.c</Filename>\r
<Filename SupArchList="IA32">x86Thunk.c</Filename>\r
+ <Filename SupArchList="IA32">x86WriteGdtr.c</Filename>\r
+ <Filename SupArchList="IA32">x86WriteIdtr.c</Filename>\r
<Filename SupArchList="IA32">Unaligned.c</Filename>\r
<Filename SupArchList="IA32">Ia32/Non-existing.c</Filename>\r
<Filename SupArchList="IA32">Ia32/InternalSwitchStack.c</Filename>\r
<Filename SupArchList="IA32">Ia32/CpuIdEx.asm</Filename>\r
<Filename SupArchList="IA32">Ia32/ReadEflags.asm</Filename>\r
<Filename SupArchList="IA32">Ia32/ReadMsr64.asm</Filename>\r
- <Filename SupArchList="IA32">Ia32/WriteMsr32.asm</Filename>\r
<Filename SupArchList="IA32">Ia32/WriteMsr64.asm</Filename>\r
<Filename SupArchList="IA32">Ia32/ReadCr0.asm</Filename>\r
<Filename SupArchList="IA32">Ia32/ReadCr2.asm</Filename>\r
<Filename SupArchList="IA32">Ia32/CpuIdEx.S</Filename>\r
<Filename SupArchList="IA32">Ia32/ReadEflags.S</Filename>\r
<Filename SupArchList="IA32">Ia32/ReadMsr64.S</Filename>\r
- <Filename SupArchList="IA32">Ia32/WriteMsr32.S</Filename>\r
<Filename SupArchList="IA32">Ia32/WriteMsr64.S</Filename>\r
<Filename SupArchList="IA32">Ia32/ReadCr0.S</Filename>\r
<Filename SupArchList="IA32">Ia32/ReadCr2.S</Filename>\r
<Filename SupArchList="IA32">Ia32/CpuBreakpoint.S</Filename>\r
<Filename SupArchList="IA32">Ia32/CpuFlushTlb.S</Filename>\r
<Filename SupArchList="IA32">Ia32/Thunk16.S</Filename>\r
- <Filename SupArchList="X64">x86LowLevel.c</Filename>\r
+ <Filename SupArchList="X64">x86DisablePaging32.c</Filename>\r
+ <Filename SupArchList="X64">x86DisablePaging64.c</Filename>\r
+ <Filename SupArchList="X64">x86EnablePaging32.c</Filename>\r
+ <Filename SupArchList="X64">x86EnablePaging64.c</Filename>\r
+ <Filename SupArchList="X64">x86FxRestore.c</Filename>\r
+ <Filename SupArchList="X64">x86FxSave.c</Filename>\r
+ <Filename SupArchList="X64">x86GetInterruptState.c</Filename>\r
+ <Filename SupArchList="X64">x86MemoryFence.c</Filename>\r
+ <Filename SupArchList="X64">x86Msr.c</Filename>\r
+ <Filename SupArchList="X64">x86ReadGdtr.c</Filename>\r
+ <Filename SupArchList="X64">x86ReadIdtr.c</Filename>\r
<Filename SupArchList="X64">x86Thunk.c</Filename>\r
+ <Filename SupArchList="X64">x86WriteGdtr.c</Filename>\r
+ <Filename SupArchList="X64">x86WriteIdtr.c</Filename>\r
<Filename SupArchList="X64">Unaligned.c</Filename>\r
<Filename SupArchList="X64">Math64.c</Filename>\r
<Filename SupArchList="X64">X64/Non-existing.c</Filename>\r
<Filename SupArchList="X64">X64/CpuId.asm</Filename>\r
<Filename SupArchList="X64">X64/CpuIdEx.asm</Filename>\r
<Filename SupArchList="X64">X64/ReadEflags.asm</Filename>\r
- <Filename SupArchList="X64">X64/ReadMsr32.asm</Filename>\r
<Filename SupArchList="X64">X64/ReadMsr64.asm</Filename>\r
- <Filename SupArchList="X64">X64/WriteMsr32.asm</Filename>\r
<Filename SupArchList="X64">X64/WriteMsr64.asm</Filename>\r
<Filename SupArchList="X64">X64/ReadCr0.asm</Filename>\r
<Filename SupArchList="X64">X64/ReadCr2.asm</Filename>\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
.global _InternalMathARShiftU64\r
-_InternalMathARShiftU64: \r
- movb 12(%esp),%cl\r
- movl 8(%esp),%eax\r
+\r
+#------------------------------------------------------------------------------\r
+# UINT64\r
+# EFIAPI\r
+# InternalMathARShiftU64 (\r
+# IN UINT64 Operand,\r
+# IN UINTN Count\r
+# );\r
+#------------------------------------------------------------------------------\r
+_InternalMathARShiftU64:\r
+ movb 12(%esp), %cl\r
+ movl 8(%esp), %eax\r
cltd\r
- testb $32,%cl\r
+ testb $32, %cl\r
cmovz %eax, %edx\r
cmovz 4(%esp), %eax\r
- shrdl %cl,%edx,%eax\r
- sar %cl,%edx\r
+ shrdl %cl, %edx, %eax\r
+ sar %cl, %edx\r
ret\r
-\r
-\r
-\r
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathARShiftU64 (\r
+; IN UINT64 Operand,\r
+; IN UINTN Count\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathARShiftU64 PROC\r
mov cl, [esp + 12]\r
mov eax, [esp + 8]\r
ret\r
InternalMathARShiftU64 ENDP\r
\r
- END
\ No newline at end of file
+ END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _CpuBreakpoint\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _CpuBreakpoint\r
-_CpuBreakpoint: \r
+_CpuBreakpoint:\r
int $3\r
ret\r
-\r
-\r
-\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _CpuFlushTlb\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _CpuFlushTlb\r
-_CpuFlushTlb: \r
+_CpuFlushTlb:\r
movl %cr3, %eax\r
movl %eax, %cr3\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_CpuFlushTlb PROC\r
+CpuFlushTlb PROC\r
mov eax, cr3\r
mov cr3, eax\r
ret\r
-_CpuFlushTlb ENDP\r
+CpuFlushTlb ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
+.globl _AsmCpuid\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# OUT UINT32 *RegisterOutEdx OPTIONAL\r
# )\r
#------------------------------------------------------------------------------\r
-.globl _AsmCpuid\r
_AsmCpuid:\r
- pushl %ebx\r
- pushl %ebp\r
+ push %ebx\r
+ push %ebp\r
movl %esp, %ebp\r
movl 12(%ebp), %eax\r
cpuid\r
- pushl %ecx\r
+ push %ecx\r
movl 16(%ebp), %ecx\r
jecxz L1\r
movl %eax, (%ecx)\r
-L1: \r
+L1:\r
movl 20(%ebp), %ecx\r
jecxz L2\r
movl %ebx, (%ecx)\r
-L2: \r
+L2:\r
movl 24(%ebp), %ecx\r
jecxz L3\r
popl (%ecx)\r
-L3: \r
+L3:\r
movl 28(%ebp), %ecx\r
jecxz L4\r
movl %edx, (%ecx)\r
-L4: \r
+L4:\r
movl 12(%ebp), %eax\r
leave\r
- popl %ebx\r
+ pop %ebx\r
ret\r
-\r
-\r
; OUT UINT32 *RegisterOutEbx OPTIONAL,\r
; OUT UINT32 *RegisterOutEcx OPTIONAL,\r
; OUT UINT32 *RegisterOutEdx OPTIONAL\r
-; )\r
+; );\r
;------------------------------------------------------------------------------\r
AsmCpuid PROC USES ebx\r
push ebp\r
#
#------------------------------------------------------------------------------
- .686:
- .code:
+ .686:
+ .code:
#------------------------------------------------------------------------------
# UINT32
.globl _AsmCpuidEx
_AsmCpuidEx:
push %ebx
- pushl %ebp
+ push %ebp
movl %esp, %ebp
movl 12(%ebp), %eax
movl 16(%ebp), %ecx
cpuid
- pushl %ecx
+ push %ecx
movl 20(%ebp), %ecx
jecxz L1
movl %eax, (%ecx)
-L1:
+L1:
movl 24(%ebp), %ecx
jecxz L2
movl %ebx, (%ecx)
-L2:
+L2:
movl 28(%ebp), %ecx
jecxz L3
popl (%ecx)
-L3:
+L3:
movl 32(%ebp), %edx
jecxz L4
movl %edx, (%ecx)
-L4:
+L4:
movl 12(%ebp), %eax
leave
pop %ebx
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _CpuPause\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _CpuPause\r
-_CpuPause: \r
+_CpuPause:\r
pause\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.686\r
- .model flat\r
+ .model flat,C\r
.xmm\r
.code\r
\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_CpuPause PROC\r
+CpuPause PROC\r
pause\r
ret\r
-_CpuPause ENDP\r
+CpuPause ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _CpuSleep\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _CpuSleep\r
-_CpuSleep: \r
+_CpuSleep:\r
hlt\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_CpuSleep PROC\r
+CpuSleep PROC\r
hlt\r
ret\r
-_CpuSleep ENDP\r
+CpuSleep ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _DisableInterrupts\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _DisableInterrupts\r
-_DisableInterrupts: \r
+_DisableInterrupts:\r
cli\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_DisableInterrupts PROC\r
+DisableInterrupts PROC\r
cli\r
ret\r
-_DisableInterrupts ENDP\r
+DisableInterrupts ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _InternalX86DisablePaging32\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# IN VOID *NewStack\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalX86DisablePaging32\r
-_InternalX86DisablePaging32: \r
- movl 4(%esp),%ebx\r
- movl 8(%esp),%ecx\r
- movl 12(%esp),%edx\r
+_InternalX86DisablePaging32:\r
+ movl 4(%esp), %ebx\r
+ movl 8(%esp), %ecx\r
+ movl 12(%esp), %edx\r
pushfl\r
- popl %edi\r
+ pop %edi\r
cli\r
movl %cr0, %eax\r
- btrl $31,%eax\r
- movl 16(%esp),%esp\r
+ btrl $31, %eax\r
+ movl 16(%esp), %esp\r
movl %eax, %cr0\r
- pushl %edi\r
+ push %edi\r
popfl\r
- pushl %edx\r
- pushl %ecx\r
+ push %edx\r
+ push %ecx\r
call *%ebx\r
jmp .\r
-\r
-\r
-\r
mov ecx, [esp + 8]\r
mov edx, [esp + 12]\r
pushfd\r
- pop edi\r
+ pop edi ; save EFLAGS to edi\r
cli\r
mov eax, cr0\r
btr eax, 31\r
mov esp, [esp + 16]\r
mov cr0, eax\r
push edi\r
- popfd\r
+ popfd ; restore EFLAGS from edi\r
push edx\r
push ecx\r
call ebx\r
- jmp $\r
+ jmp $ ; EntryPoint() should not return\r
InternalX86DisablePaging32 ENDP\r
\r
END\r
#
#------------------------------------------------------------------------------
- .386:
- .code:
-
.global _InternalMathDivU64x32
+
+#------------------------------------------------------------------------------
+# UINT64
+# EFIAPI
+# InternalMathDivU64x32 (
+# IN UINT64 Dividend,
+# IN UINT32 Divisor
+# );
+#------------------------------------------------------------------------------
_InternalMathDivU64x32:
- movl 8(%esp),%eax
- movl 12(%esp),%ecx
- xorl %edx,%edx
+ movl 8(%esp), %eax
+ movl 12(%esp), %ecx
+ xorl %edx, %edx
divl %ecx
- pushl %eax
- movl 8(%esp),%eax
+ push %eax
+ movl 8(%esp), %eax
divl %ecx
- popl %edx
+ pop %edx
ret
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathDivU64x32 (\r
+; IN UINT64 Dividend,\r
+; IN UINT32 Divisor\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathDivU64x32 PROC\r
mov eax, [esp + 8]\r
mov ecx, [esp + 12]\r
xor edx, edx\r
div ecx\r
- push eax\r
+ push eax ; save quotient on stack\r
mov eax, [esp + 8]\r
div ecx\r
- pop edx\r
+ pop edx ; restore high-order dword of the quotient\r
ret\r
InternalMathDivU64x32 ENDP\r
\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
.global _InternalMathDivRemU64x32\r
-_InternalMathDivRemU64x32: \r
- movl 12(%esp),%ecx\r
- movl 8(%esp),%eax\r
- xorl %edx,%edx\r
+\r
+#------------------------------------------------------------------------------\r
+# UINT64\r
+# EFIAPI\r
+# InternalMathDivRemU64x32 (\r
+# IN UINT64 Dividend,\r
+# IN UINT32 Divisor,\r
+# OUT UINT32 *Remainder\r
+# );\r
+#------------------------------------------------------------------------------\r
+_InternalMathDivRemU64x32:\r
+ movl 12(%esp), %ecx\r
+ movl 8(%esp), %eax\r
+ xorl %edx, %edx\r
divl %ecx\r
- pushl %eax\r
- movl 8(%esp),%eax\r
+ push %eax\r
+ movl 8(%esp), %eax\r
divl %ecx\r
- movl 20(%esp),%ecx\r
+ movl 20(%esp), %ecx\r
jecxz L1\r
- movl %edx,(%ecx)\r
-L1: \r
- popl %edx\r
+ movl %edx, (%ecx)\r
+L1:\r
+ pop %edx\r
ret\r
-\r
-\r
-\r
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathDivRemU64x32 (\r
+; IN UINT64 Dividend,\r
+; IN UINT32 Divisor,\r
+; OUT UINT32 *Remainder\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathDivRemU64x32 PROC\r
mov ecx, [esp + 12]\r
mov eax, [esp + 8]\r
mov eax, [esp + 8]\r
div ecx\r
mov ecx, [esp + 20]\r
- jecxz @F\r
+ jecxz @F ; abandon remainder if Remainder == NULL\r
mov [ecx], edx\r
@@:\r
pop edx\r
#\r
#------------------------------------------------------------------------------\r
\r
+.global _InternalMathDivRemU64x32, _InternalMathDivRemU64x64\r
\r
-\r
- \r
-\r
-.extern _InternalMathDivRemU64x32\r
-\r
-.global _InternalMathDivRemU64x64\r
-_InternalMathDivRemU64x64: \r
- movl 16(%esp),%ecx\r
- testl %ecx,%ecx\r
- jnz _DivRemU64x64\r
- movl 20(%esp),%ecx\r
+#------------------------------------------------------------------------------\r
+# UINT64\r
+# EFIAPI\r
+# InternalMathDivRemU64x64 (\r
+# IN UINT64 Dividend,\r
+# IN UINT64 Divisor,\r
+# OUT UINT64 *Remainder OPTIONAL\r
+# );\r
+#------------------------------------------------------------------------------\r
+_InternalMathDivRemU64x64:\r
+ movl 16(%esp), %ecx\r
+ testl %ecx, %ecx\r
+ jnz Hard\r
+ movl 20(%esp), %ecx\r
jecxz L1\r
- and $0,4(%ecx)\r
- movl %ecx,16(%esp)\r
-L1: \r
+ and $0, 4(%ecx)\r
+ movl %ecx, 16(%esp)\r
+L1:\r
jmp _InternalMathDivRemU64x32\r
-\r
-\r
-.global _DivRemU64x64\r
-_DivRemU64x64:\r
- push %ebx \r
- push %esi \r
- push %edi \r
+Hard:\r
+ push %ebx\r
+ push %esi\r
+ push %edi\r
mov 20(%esp), %edx\r
mov 16(%esp), %eax\r
- movl %edx,%edi\r
- movl %eax,%esi\r
+ movl %edx, %edi\r
+ movl %eax, %esi\r
mov 24(%esp), %ebx\r
-L2: \r
+L2:\r
shrl %edx\r
- rcrl $1,%eax\r
- shrdl $1,%ecx,%ebx\r
+ rcrl $1, %eax\r
+ shrdl $1, %ecx, %ebx\r
shrl %ecx\r
jnz L2\r
divl %ebx\r
- movl %eax,%ebx\r
- movl 28(%esp),%ecx\r
+ movl %eax, %ebx\r
+ movl 28(%esp), %ecx\r
mull 24(%esp)\r
- imull %ebx,%ecx\r
- addl %ecx,%edx\r
+ imull %ebx, %ecx\r
+ addl %ecx, %edx\r
mov 32(%esp), %ecx\r
jc TooLarge\r
- cmpl %edx,%edi\r
+ cmpl %edx, %edi\r
ja Correct\r
jb TooLarge\r
- cmpl %eax,%esi\r
+ cmpl %eax, %esi\r
jae Correct\r
-TooLarge: \r
+TooLarge:\r
decl %ebx\r
jecxz Return\r
sub 24(%esp), %eax\r
sbb 28(%esp), %edx\r
-Correct: \r
+Correct:\r
jecxz Return\r
- subl %eax,%esi\r
- sbbl %edx,%edi\r
- movl %esi,(%ecx)\r
- movl %edi,4(%ecx)\r
-Return: \r
- movl %ebx,%eax\r
- xorl %edx,%edx\r
- push %edi \r
- push %esi \r
- push %ebx \r
+ subl %eax, %esi\r
+ sbbl %edx, %edi\r
+ movl %esi, (%ecx)\r
+ movl %edi, 4(%ecx)\r
+Return:\r
+ movl %ebx, %eax\r
+ xorl %edx, %edx\r
+ pop %edi\r
+ pop %esi\r
+ pop %ebx\r
ret\r
\r
EXTERN InternalMathDivRemU64x32:PROC\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathDivRemU64x64 (\r
+; IN UINT64 Dividend,\r
+; IN UINT64 Divisor,\r
+; OUT UINT64 *Remainder OPTIONAL\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathDivRemU64x64 PROC\r
mov ecx, [esp + 16]\r
test ecx, ecx\r
- jnz _@DivRemU64x64\r
+ jnz _@DivRemU64x64 ; call _@DivRemU64x64 if Divisor > 2^32\r
mov ecx, [esp + 20]\r
jecxz @F\r
and dword ptr [ecx + 4], 0\r
\r
_@DivRemU64x64 PROC USES ebx esi edi\r
mov edx, dword ptr [esp + 20]\r
- mov eax, dword ptr [esp + 16]\r
+ mov eax, dword ptr [esp + 16] ; edx:eax <- dividend\r
mov edi, edx\r
- mov esi, eax\r
- mov ebx, dword ptr [esp + 24]\r
+ mov esi, eax ; edi:esi <- dividend\r
+ mov ebx, dword ptr [esp + 24] ; ecx:ebx <- divisor\r
@@:\r
shr edx, 1\r
rcr eax, 1\r
shr ecx, 1\r
jnz @B\r
div ebx\r
- mov ebx, eax\r
+ mov ebx, eax ; ebx <- quotient\r
mov ecx, [esp + 28]\r
mul dword ptr [esp + 24]\r
imul ecx, ebx\r
add edx, ecx\r
mov ecx, dword ptr [esp + 32]\r
- jc @TooLarge\r
- cmp edi, edx\r
+ jc @TooLarge ; product > 2^64\r
+ cmp edi, edx ; compare high 32 bits\r
ja @Correct\r
- jb @TooLarge\r
+ jb @TooLarge ; product > dividend\r
cmp esi, eax\r
- jae @Correct\r
+ jae @Correct ; product <= dividend\r
@TooLarge:\r
- dec ebx\r
- jecxz @Return\r
+ dec ebx ; adjust quotient by -1\r
+ jecxz @Return ; return if Remainder == NULL\r
sub eax, dword ptr [esp + 24]\r
sbb edx, dword ptr [esp + 28]\r
@Correct:\r
jecxz @Return\r
sub esi, eax\r
- sbb edi, edx\r
+ sbb edi, edx ; edi:esi <- remainder\r
mov [ecx], esi\r
mov [ecx + 4], edi\r
@Return:\r
- mov eax, ebx\r
+ mov eax, ebx ; eax <- quotient\r
xor edx, edx\r
ret\r
_@DivRemU64x64 ENDP\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _EnableDisableInterrupts\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _EnableDisableInterrupts\r
-_EnableDisableInterrupts: \r
+_EnableDisableInterrupts:\r
sti\r
cli\r
ret\r
-\r
-\r
-\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _EnableInterrupts\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _EnableInterrupts\r
-_EnableInterrupts: \r
+_EnableInterrupts:\r
sti\r
ret\r
-\r
-\r
-\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _InternalX86EnablePaging32\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# IN VOID *NewStack\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalX86EnablePaging32\r
-_InternalX86EnablePaging32: \r
- movl 4(%esp),%ebx\r
- movl 8(%esp),%ecx\r
- movl 12(%esp),%edx\r
+_InternalX86EnablePaging32:\r
+ movl 4(%esp), %ebx\r
+ movl 8(%esp), %ecx\r
+ movl 12(%esp), %edx\r
pushfl\r
- popl %edi\r
+ pop %edi\r
cli\r
movl %cr0, %eax\r
- btsl $31,%eax\r
- movl 16(%esp),%esp\r
+ btsl $31, %eax\r
+ movl 16(%esp), %esp\r
movl %eax, %cr0\r
- pushl %edi\r
+ push %edi\r
popfl\r
- pushl %edx\r
- pushl %ecx\r
+ push %edx\r
+ push %ecx\r
call *%ebx\r
jmp .\r
-\r
-\r
-\r
#\r
#------------------------------------------------------------------------------\r
\r
- \r
- #.MODEL flat\r
- \r
+.global _InternalX86EnablePaging64\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# IN UINT64 NewStack\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalX86EnablePaging64\r
_InternalX86EnablePaging64:\r
cli\r
movl $LongStart, (%esp)\r
movl %eax, %cr0 # enable paging\r
lret\r
LongStart: # long mode starts here\r
- .byte 0x67,0x48\r
+ .byte 0x67, 0x48\r
movl (%esp), %ebx # mov rbx, [esp]\r
- .byte 0x67,0x48\r
+ .byte 0x67, 0x48\r
movl 8(%esp), %ecx # mov rcx, [esp + 8]\r
- .byte 0x67,0x48\r
+ .byte 0x67, 0x48\r
movl 0x10(%esp), %edx # mov rdx, [esp + 10h]\r
- .byte 0x67,0x48\r
+ .byte 0x67, 0x48\r
movl 0x18(%esp), %esp # mov rsp, [esp + 18h]\r
.byte 0x48\r
addl $0x-20, %esp # add rsp, -20h\r
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86EnablePaging64 (\r
+; IN UINT16 Cs,\r
+; IN UINT64 EntryPoint,\r
+; IN UINT64 Context1, OPTIONAL\r
+; IN UINT64 Context2, OPTIONAL\r
+; IN UINT64 NewStack\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalX86EnablePaging64 PROC\r
cli\r
- mov [esp], @F ; offset for far retf\r
+ mov [esp], @F ; offset for far retf, seg is the 1st arg\r
mov eax, cr4\r
or al, (1 SHL 5)\r
mov cr4, eax ; enable PAE\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _AsmFlushCacheLine\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# IN VOID *LinearAddress\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmFlushCacheLine\r
-_AsmFlushCacheLine: \r
- movl 4(%esp),%eax\r
+_AsmFlushCacheLine:\r
+ movl 4(%esp), %eax\r
clflush (%eax)\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586P\r
- .model flat\r
+ .model flat,C\r
.xmm\r
.code\r
\r
; IN VOID *LinearAddress\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmFlushCacheLine PROC\r
+AsmFlushCacheLine PROC\r
mov eax, [esp + 4]\r
clflush [eax]\r
ret\r
-_AsmFlushCacheLine ENDP\r
+AsmFlushCacheLine ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _InternalX86FxRestore\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# IN CONST IA32_FX_BUFFER *Buffer\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalX86FxRestore\r
-_InternalX86FxRestore: \r
- movl 4(%esp),%eax\r
+_InternalX86FxRestore:\r
+ movl 4(%esp), %eax\r
fxrstor (%eax)\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
+ .586\r
.model flat,C\r
.xmm\r
.code\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _InternalX86FxSave\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# OUT IA32_FX_BUFFER *Buffer\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalX86FxSave\r
-_InternalX86FxSave: \r
- movl 4(%esp),%eax\r
+_InternalX86FxSave:\r
+ movl 4(%esp), %eax\r
fxsave (%eax)\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
+ .586\r
.model flat,C\r
.xmm\r
.code\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _InternalSyncCompareExchange32\r
\r
#------------------------------------------------------------------------------\r
# UINT32\r
# IN UINT32 ExchangeValue\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalSyncCompareExchange32\r
-_InternalSyncCompareExchange32: \r
- movl 4(%esp),%ecx\r
- movl 8(%esp),%eax\r
- movl 12(%esp),%edx\r
- lock cmpxchgl %edx,(%ecx)\r
+_InternalSyncCompareExchange32:\r
+ movl 4(%esp), %ecx\r
+ movl 8(%esp), %eax\r
+ movl 12(%esp), %edx\r
+ lock\r
+ cmpxchgl %edx, (%ecx)\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
; UINT32\r
; EFIAPI\r
-; InterlockedCompareExchange32 (\r
+; InternalSyncCompareExchange32 (\r
; IN UINT32 *Value,\r
; IN UINT32 CompareValue,\r
; IN UINT32 ExchangeValue\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _InternalSyncCompareExchange64\r
\r
#------------------------------------------------------------------------------\r
# UINT64\r
# IN UINT64 ExchangeValue\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalSyncCompareExchange64\r
_InternalSyncCompareExchange64:\r
push %esi\r
push %ebx\r
- movl 12(%esp),%esi\r
- movl 16(%esp),%eax\r
- movl 20(%esp),%edx\r
- movl 24(%esp),%ebx\r
- movl 28(%esp),%ecx\r
- lock \r
+ movl 12(%esp), %esi\r
+ movl 16(%esp), %eax\r
+ movl 20(%esp), %edx\r
+ movl 24(%esp), %ebx\r
+ movl 28(%esp), %ecx\r
+ lock\r
cmpxchg8b (%esi)\r
pop %ebx\r
pop %esi\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
; UINT64\r
; EFIAPI\r
-; InterlockedCompareExchange64 (\r
+; InternalSyncCompareExchange64 (\r
; IN UINT64 *Value,\r
; IN UINT64 CompareValue,\r
; IN UINT64 ExchangeValue\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _InternalSyncDecrement\r
\r
#------------------------------------------------------------------------------\r
# UINT32\r
# IN UINT32 *Value\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalSyncDecrement\r
-_InternalSyncDecrement: \r
- movl 4(%esp),%eax\r
- lock \r
- decl (%eax)\r
- movl (%eax),%eax\r
+_InternalSyncDecrement:\r
+ movl 4(%esp), %eax\r
+ lock\r
+ decl (%eax)\r
+ movl (%eax), %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
; UINT32\r
; EFIAPI\r
-; InterlockedDecrement (\r
+; InternalSyncDecrement (\r
; IN UINT32 *Value\r
; );\r
;------------------------------------------------------------------------------\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _InternalSyncIncrement\r
\r
#------------------------------------------------------------------------------\r
# UINT32\r
# IN UINT32 *Value\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalSyncIncrement\r
-_InternalSyncIncrement: \r
- movl 4(%esp),%eax\r
- lock \r
- incl (%eax)\r
- movl (%eax),%eax\r
+_InternalSyncIncrement:\r
+ movl 4(%esp), %eax\r
+ lock\r
+ incl (%eax)\r
+ movl (%eax), %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
; UINT32\r
; EFIAPI\r
-; InterlockedIncrement (\r
+; InternalSyncIncrement (\r
; IN UINT32 *Value\r
; );\r
;------------------------------------------------------------------------------\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmInvd\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmInvd\r
-_AsmInvd: \r
+_AsmInvd:\r
invd\r
ret\r
-\r
-\r
-\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
.global _InternalMathLRotU64\r
+\r
+#------------------------------------------------------------------------------\r
+# UINT64\r
+# EFIAPI\r
+# InternalMathLRotU64 (\r
+# IN UINT64 Operand,\r
+# IN UINTN Count\r
+# );\r
+#------------------------------------------------------------------------------\r
_InternalMathLRotU64:\r
push %ebx\r
- movb 16(%esp),%cl\r
- movl 12(%esp),%edx\r
- movl 8(%esp),%eax\r
- shldl %cl,%edx,%ebx\r
- shldl %cl,%eax,%edx\r
- rorl %cl,%ebx\r
- shldl %cl,%ebx,%eax\r
- testb $32,%cl\r
+ movb 16(%esp), %cl\r
+ movl 12(%esp), %edx\r
+ movl 8(%esp), %eax\r
+ shldl %cl, %edx, %ebx\r
+ shldl %cl, %eax, %edx\r
+ rorl %cl, %ebx\r
+ shldl %cl, %ebx, %eax\r
+ testb $32, %cl\r
cmovnz %eax, %ecx\r
- cmovnz %edx, %eax \r
+ cmovnz %edx, %eax\r
cmovnz %ecx, %edx\r
pop %ebx\r
ret\r
-\r
-\r
-\r
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathLRotU64 (\r
+; IN UINT64 Operand,\r
+; IN UINTN Count\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathLRotU64 PROC USES ebx\r
mov cl, [esp + 16]\r
mov edx, [esp + 12]\r
shld edx, eax, cl\r
ror ebx, cl\r
shld eax, ebx, cl\r
- test cl, 32\r
+ test cl, 32 ; Count >= 32?\r
cmovnz ecx, eax\r
cmovnz eax, edx\r
cmovnz edx, ecx\r
#
#------------------------------------------------------------------------------
- .686:
- #.MODEL flat,C
- .code:
-
.global _InternalMathLShiftU64
+
+#------------------------------------------------------------------------------
+# UINT64
+# EFIAPI
+# InternalMathLShiftU64 (
+# IN UINT64 Operand,
+# IN UINTN Count
+# );
+#------------------------------------------------------------------------------
_InternalMathLShiftU64:
movb 12(%esp), %cl
xorl %eax, %eax
testb $32, %cl
cmovz %edx, %eax
cmovz 0x8(%esp), %edx
- shld %cl,%eax,%edx
+ shld %cl, %eax, %edx
shl %cl, %eax
ret
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathLShiftU64 (\r
+; IN UINT64 Operand,\r
+; IN UINTN Count\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathLShiftU64 PROC\r
mov cl, [esp + 12]\r
xor eax, eax\r
mov edx, [esp + 4]\r
- test cl, 32\r
+ test cl, 32 ; Count >= 32?\r
cmovz eax, edx\r
cmovz edx, [esp + 8]\r
shld edx, eax, cl\r
#
#------------------------------------------------------------------------------
- .386:
- .code:
+.global _InternalLongJump
-.globl _InternalLongJump
+#------------------------------------------------------------------------------
+# VOID
+# EFIAPI
+# InternalLongJump (
+# IN BASE_LIBRARY_JUMP_BUFFER *JumpBuffer,
+# IN UINTN Value
+# );
+#------------------------------------------------------------------------------
_InternalLongJump:
- popl %eax
- popl %edx
- popl %eax
+ pop %eax
+ pop %edx
+ pop %eax
movl (%edx), %ebx
movl 4(%edx), %esi
movl 8(%edx), %edi
movl 12(%edx), %ebp
movl 16(%edx), %esp
jmp *20(%edx)
-#InternalLongJump ENDP
-
-
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalLongJump (\r
+; IN BASE_LIBRARY_JUMP_BUFFER *JumpBuffer,\r
+; IN UINTN Value\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalLongJump PROC\r
- pop eax\r
- pop edx\r
- pop eax\r
+ pop eax ; skip return address\r
+ pop edx ; edx <- JumpBuffer\r
+ pop eax ; eax <- Value\r
mov ebx, [edx]\r
mov esi, [edx + 4]\r
mov edi, [edx + 8]\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
.global _InternalMathModU64x32\r
-_InternalMathModU64x32: \r
- movl 8(%esp),%eax\r
- movl 12(%esp),%ecx\r
- xorl %edx,%edx\r
+\r
+#------------------------------------------------------------------------------\r
+# UINT32\r
+# EFIAPI\r
+# InternalMathModU64x32 (\r
+# IN UINT64 Dividend,\r
+# IN UINT32 Divisor\r
+# );\r
+#------------------------------------------------------------------------------\r
+_InternalMathModU64x32:\r
+ movl 8(%esp), %eax\r
+ movl 12(%esp), %ecx\r
+ xorl %edx, %edx\r
divl %ecx\r
- movl 4(%esp),%eax\r
+ movl 4(%esp), %eax\r
divl %ecx\r
- movl %edx,%eax\r
+ movl %edx, %eax\r
ret\r
-\r
-\r
-\r
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT32\r
+; EFIAPI\r
+; InternalMathModU64x32 (\r
+; IN UINT64 Dividend,\r
+; IN UINT32 Divisor\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathModU64x32 PROC\r
mov eax, [esp + 8]\r
mov ecx, [esp + 12]\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmMonitor\r
\r
#------------------------------------------------------------------------------\r
# UINT64\r
# IN UINTN Edx\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmMonitor\r
-_AsmMonitor: \r
- movl 4(%esp),%eax\r
- movl 8(%esp),%ecx\r
- movl 12(%esp),%edx\r
- monitor %eax,%ecx,%edx\r
+_AsmMonitor:\r
+ movl 4(%esp), %eax\r
+ movl 8(%esp), %ecx\r
+ movl 12(%esp), %edx\r
+ monitor %eax, %ecx, %edx\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.686\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINT64\r
+; UINTN\r
; EFIAPI\r
; AsmMonitor (\r
; IN UINTN Eax,\r
; IN UINTN Edx\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmMonitor PROC\r
+AsmMonitor PROC\r
mov eax, [esp + 4]\r
mov ecx, [esp + 8]\r
mov edx, [esp + 12]\r
- DB 0fh, 1, 0c8h\r
+ DB 0fh, 1, 0c8h ; monitor\r
ret\r
-_AsmMonitor ENDP\r
+AsmMonitor ENDP\r
\r
END\r
#
#------------------------------------------------------------------------------
- .386:
- .code:
+ .386:
+ .code:
.global _InternalMathMultU64x32
+
+#------------------------------------------------------------------------------
+# UINT64
+# EFIAPI
+# InternalMathMultU64x32 (
+# IN UINT64 Multiplicand,
+# IN UINT32 Multiplier
+# );
+#------------------------------------------------------------------------------
_InternalMathMultU64x32:
- movl 12(%esp),%ecx
- movl %ecx,%eax
- imull 8(%esp),%ecx
- mull 0x4(%esp)
- addl %ecx,%edx
+ movl 12(%esp), %ecx
+ movl %ecx, %eax
+ imull 8(%esp), %ecx
+ mull 0x4(%esp)
+ addl %ecx, %edx
ret
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathMultU64x32 (\r
+; IN UINT64 Multiplicand,\r
+; IN UINT32 Multiplier\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathMultU64x32 PROC\r
mov ecx, [esp + 12]\r
mov eax, ecx\r
- imul ecx, [esp + 8]\r
+ imul ecx, [esp + 8] ; overflow not detectable\r
mul dword ptr [esp + 4]\r
add edx, ecx\r
ret\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
.global _InternalMathMultU64x64\r
-_InternalMathMultU64x64: \r
+\r
+#------------------------------------------------------------------------------\r
+# UINT64\r
+# EFIAPI\r
+# InternalMathMultU64x64 (\r
+# IN UINT64 Multiplicand,\r
+# IN UINT64 Multiplier\r
+# );\r
+#------------------------------------------------------------------------------\r
+_InternalMathMultU64x64:\r
push %ebx\r
- movl 8(%esp),%ebx\r
- movl 16(%esp),%edx\r
- movl %ebx,%ecx\r
- movl %edx,%eax\r
- imull 20(%esp),%ebx\r
- imull 12(%esp),%edx\r
- addl %edx,%ebx\r
+ movl 8(%esp), %ebx\r
+ movl 16(%esp), %edx\r
+ movl %ebx, %ecx\r
+ movl %edx, %eax\r
+ imull 20(%esp), %ebx\r
+ imull 12(%esp), %edx\r
+ addl %edx, %ebx\r
mull %ecx\r
- addl %ebx,%edx\r
+ addl %ebx, %edx\r
pop %ebx\r
ret\r
-\r
-\r
-\r
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathMultU64x64 (\r
+; IN UINT64 Multiplicand,\r
+; IN UINT64 Multiplier\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathMultU64x64 PROC USES ebx\r
mov ebx, [esp + 8]\r
mov edx, [esp + 16]\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmMwait\r
\r
#------------------------------------------------------------------------------\r
# UINT64\r
# IN UINTN Ecx\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmMwait\r
-_AsmMwait: \r
- movl 4(%esp),%eax\r
- movl 8(%esp),%ecx\r
- mwait %eax,%ecx\r
+_AsmMwait:\r
+ movl 4(%esp), %eax\r
+ movl 8(%esp), %ecx\r
+ mwait %eax, %ecx\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.686\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINT64\r
+; UINTN\r
; EFIAPI\r
; AsmMwait (\r
; IN UINTN Eax,\r
; IN UINTN Ecx\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmMwait PROC\r
+AsmMwait PROC\r
mov eax, [esp + 4]\r
mov ecx, [esp + 8]\r
- DB 0fh, 1, 0c9h\r
+ DB 0fh, 1, 0c9h ; mwait\r
ret\r
-_AsmMwait ENDP\r
+AsmMwait ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
.global _InternalMathRRotU64\r
+\r
+#------------------------------------------------------------------------------\r
+# UINT64\r
+# EFIAPI\r
+# InternalMathRRotU64 (\r
+# IN UINT64 Operand,\r
+# IN UINTN Count\r
+# );\r
+#------------------------------------------------------------------------------\r
_InternalMathRRotU64:\r
push %ebx\r
- movb 16(%esp),%cl\r
- movl 8(%esp),%eax\r
- movl 12(%esp),%edx\r
- shrdl %cl,%eax,%ebx\r
- shrdl %cl,%edx,%eax\r
- roll %cl,%ebx\r
- shrdl %cl,%ebx,%edx\r
- testb $32,%cl\r
+ movb 16(%esp), %cl\r
+ movl 8(%esp), %eax\r
+ movl 12(%esp), %edx\r
+ shrdl %cl, %eax, %ebx\r
+ shrdl %cl, %edx, %eax\r
+ roll %cl, %ebx\r
+ shrdl %cl, %ebx, %edx\r
+ testb $32, %cl\r
cmovnz %eax, %ecx\r
cmovnz %edx, %eax\r
cmovnz %ecx, %edx\r
pop %ebx\r
ret\r
-\r
-\r
-\r
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathRRotU64 (\r
+; IN UINT64 Operand,\r
+; IN UINTN Count\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathRRotU64 PROC USES ebx\r
mov cl, [esp + 16]\r
mov eax, [esp + 8]\r
shrd eax, edx, cl\r
rol ebx, cl\r
shrd edx, ebx, cl\r
- test cl, 32\r
+ test cl, 32 ; Count >= 32?\r
cmovnz ecx, eax\r
cmovnz eax, edx\r
cmovnz edx, ecx\r
#
#------------------------------------------------------------------------------
- .686:
- .code:
+ .686:
+ .code:
.global _InternalMathRShiftU64
+
+#------------------------------------------------------------------------------
+# UINT64
+# EFIAPI
+# InternalMathRShiftU64 (
+# IN UINT64 Operand,
+# IN UINTN Count
+# );
+#------------------------------------------------------------------------------
_InternalMathRShiftU64:
- movb 12(%esp),%cl
- xorl %edx,%edx
- movl 8(%esp),%eax
- testb $32,%cl
+ movb 12(%esp), %cl
+ xorl %edx, %edx
+ movl 8(%esp), %eax
+ testb $32, %cl
cmovz %eax, %edx
cmovz 0x4(%esp), %eax
- shrdl %cl,%edx,%eax
- shr %cl,%edx
+ shrdl %cl, %edx, %eax
+ shr %cl, %edx
ret
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathRShiftU64 (\r
+; IN UINT64 Operand,\r
+; IN UINTN Count\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathRShiftU64 PROC\r
mov cl, [esp + 12]\r
xor edx, edx\r
ret\r
InternalMathRShiftU64 ENDP\r
\r
- END
\ No newline at end of file
+ END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadCr0\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadCr0\r
-_AsmReadCr0: \r
+_AsmReadCr0:\r
movl %cr0, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadCr0 PROC\r
+AsmReadCr0 PROC\r
mov eax, cr0\r
ret\r
-_AsmReadCr0 ENDP\r
+AsmReadCr0 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadCr2\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadCr2\r
-_AsmReadCr2: \r
+_AsmReadCr2:\r
movl %cr2, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadCr2 PROC\r
+AsmReadCr2 PROC\r
mov eax, cr2\r
ret\r
-_AsmReadCr2 ENDP\r
+AsmReadCr2 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadCr3\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadCr3\r
-_AsmReadCr3: \r
+_AsmReadCr3:\r
movl %cr3, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadCr3 PROC\r
+AsmReadCr3 PROC\r
mov eax, cr3\r
ret\r
-_AsmReadCr3 ENDP\r
+AsmReadCr3 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadCr4\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadCr4\r
-_AsmReadCr4: \r
+_AsmReadCr4:\r
movl %cr4, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadCr4 PROC\r
+AsmReadCr4 PROC\r
mov eax, cr4\r
ret\r
-_AsmReadCr4 ENDP\r
+AsmReadCr4 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadCs\r
\r
#------------------------------------------------------------------------------\r
# UINT16\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadCs\r
-_AsmReadCs: \r
- movw %cs,%ax\r
+_AsmReadCs:\r
+ movl %cs, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadCs PROC\r
- mov ax, cs\r
+AsmReadCs PROC\r
+ mov eax, cs\r
ret\r
-_AsmReadCs ENDP\r
+AsmReadCs ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
- \r
+.global _AsmReadDr0\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadDr0\r
-_AsmReadDr0: \r
+_AsmReadDr0:\r
movl %dr0, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadDr0 PROC\r
+AsmReadDr0 PROC\r
mov eax, dr0\r
ret\r
-_AsmReadDr0 ENDP\r
+AsmReadDr0 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadDr1\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadDr1\r
-_AsmReadDr1: \r
+_AsmReadDr1:\r
movl %dr1, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadDr1 PROC\r
+AsmReadDr1 PROC\r
mov eax, dr1\r
ret\r
-_AsmReadDr1 ENDP\r
+AsmReadDr1 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadDr2\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadDr2\r
-_AsmReadDr2: \r
+_AsmReadDr2:\r
movl %dr2, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadDr2 PROC\r
+AsmReadDr2 PROC\r
mov eax, dr2\r
ret\r
-_AsmReadDr2 ENDP\r
+AsmReadDr2 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadDr3\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadDr3\r
-_AsmReadDr3: \r
+_AsmReadDr3:\r
movl %dr3, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadDr3 PROC\r
+AsmReadDr3 PROC\r
mov eax, dr3\r
ret\r
-_AsmReadDr3 ENDP\r
+AsmReadDr3 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadDr4\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadDr4\r
-_AsmReadDr4: \r
+_AsmReadDr4:\r
movl %dr4, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadDr4 PROC\r
+AsmReadDr4 PROC\r
DB 0fh, 21h, 0e0h\r
ret\r
-_AsmReadDr4 ENDP\r
+AsmReadDr4 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadDr5\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadDr5\r
-_AsmReadDr5: \r
+_AsmReadDr5:\r
movl %dr5, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadDr5 PROC\r
+AsmReadDr5 PROC\r
DB 0fh, 21h, 0e8h\r
ret\r
-_AsmReadDr5 ENDP\r
+AsmReadDr5 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadDr6\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadDr6\r
-_AsmReadDr6: \r
+_AsmReadDr6:\r
movl %dr6, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadDr6 PROC\r
+AsmReadDr6 PROC\r
mov eax, dr6\r
ret\r
-_AsmReadDr6 ENDP\r
+AsmReadDr6 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadDr7\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadDr7\r
-_AsmReadDr7: \r
+_AsmReadDr7:\r
movl %dr7, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadDr7 PROC\r
+AsmReadDr7 PROC\r
mov eax, dr7\r
ret\r
-_AsmReadDr7 ENDP\r
+AsmReadDr7 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadEflags\r
\r
#------------------------------------------------------------------------------\r
# UINT16\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadDs\r
-_AsmReadDs: \r
- movw %ds,%ax\r
+_AsmReadDs:\r
+ movl %ds, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadDs PROC\r
- mov ax, ds\r
+AsmReadDs PROC\r
+ mov eax, ds\r
ret\r
-_AsmReadDs ENDP\r
+AsmReadDs ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadEflags\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadEflags\r
-_AsmReadEflags: \r
+_AsmReadEflags:\r
pushfl\r
- popl %eax\r
+ pop %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadEflags PROC\r
+AsmReadEflags PROC\r
pushfd\r
pop eax\r
ret\r
-_AsmReadEflags ENDP\r
+AsmReadEflags ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadEs\r
\r
#------------------------------------------------------------------------------\r
# UINT16\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadEs\r
-_AsmReadEs: \r
- movw %es,%ax\r
+_AsmReadEs:\r
+ movl %es, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadEs PROC\r
- mov ax, es\r
+AsmReadEs PROC\r
+ mov eax, es\r
ret\r
-_AsmReadEs ENDP\r
+AsmReadEs ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadFs\r
\r
#------------------------------------------------------------------------------\r
# UINT16\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadFs\r
-_AsmReadFs: \r
- movw %fs,%ax\r
+_AsmReadFs:\r
+ movl %fs, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadFs PROC\r
- mov ax, fs\r
+AsmReadFs PROC\r
+ mov eax, fs\r
ret\r
-_AsmReadFs ENDP\r
+AsmReadFs ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _InternalX86ReadGdtr\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# OUT IA32_DESCRIPTOR *Gdtr\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalX86ReadGdtr\r
-_InternalX86ReadGdtr: \r
- movl 4(%esp),%eax\r
+_InternalX86ReadGdtr:\r
+ movl 4(%esp), %eax\r
sgdt (%eax)\r
ret\r
-\r
-\r
-\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadGs\r
\r
#------------------------------------------------------------------------------\r
# UINT16\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadGs\r
-_AsmReadGs: \r
- movw %gs,%ax\r
+_AsmReadGs:\r
+ movl %gs, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadGs PROC\r
- mov ax, gs\r
+AsmReadGs PROC\r
+ mov eax, gs\r
ret\r
-_AsmReadGs ENDP\r
+AsmReadGs ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _InternalX86ReadIdtr\r
\r
#------------------------------------------------------------------------------\r
# VOID\r
# OUT IA32_DESCRIPTOR *Idtr\r
# );\r
#------------------------------------------------------------------------------\r
-.global _InternalX86ReadIdtr\r
-_InternalX86ReadIdtr: \r
- movl 4(%esp),%eax\r
+_InternalX86ReadIdtr:\r
+ movl 4(%esp), %eax\r
sidt (%eax)\r
ret\r
-\r
-\r
-\r
.model flat,C\r
.code\r
\r
-InternalX86ReadIdtr PROC\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86ReadIdtr (\r
+; OUT IA32_DESCRIPTOR *Idtr\r
+; );\r
+;------------------------------------------------------------------------------\r
+InternalX86ReadIdtr PROC\r
mov eax, [esp + 4]\r
sidt fword ptr [eax]\r
ret\r
-InternalX86ReadIdtr ENDP\r
+InternalX86ReadIdtr ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadLdtr\r
\r
#------------------------------------------------------------------------------\r
# UINT16\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadLdtr\r
-_AsmReadLdtr: \r
+_AsmReadLdtr:\r
sldt %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadLdtr PROC\r
+AsmReadLdtr PROC\r
sldt ax\r
ret\r
-_AsmReadLdtr ENDP\r
+AsmReadLdtr ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _AsmReadMm0\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadMm0\r
-_AsmReadMm0: \r
- pushl %eax\r
- pushl %eax\r
- movq %mm0,(%esp)\r
- popl %eax\r
- popl %edx\r
+_AsmReadMm0:\r
+ push %eax\r
+ push %eax\r
+ movq %mm0, (%esp)\r
+ pop %eax\r
+ pop %edx\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm0 (\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadMm0 PROC\r
+AsmReadMm0 PROC\r
push eax\r
push eax\r
movq [esp], mm0\r
pop eax\r
pop edx\r
ret\r
-_AsmReadMm0 ENDP\r
+AsmReadMm0 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _AsmReadMm1\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadMm1\r
-_AsmReadMm1: \r
- pushl %eax\r
- pushl %eax\r
- movq %mm1,(%esp)\r
- popl %eax\r
- popl %edx\r
+_AsmReadMm1:\r
+ push %eax\r
+ push %eax\r
+ movq %mm1, (%esp)\r
+ pop %eax\r
+ pop %edx\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm1 (\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadMm1 PROC\r
+AsmReadMm1 PROC\r
push eax\r
push eax\r
movq [esp], mm1\r
pop eax\r
pop edx\r
ret\r
-_AsmReadMm1 ENDP\r
+AsmReadMm1 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _AsmReadMm2\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadMm2\r
-_AsmReadMm2: \r
- pushl %eax\r
- pushl %eax\r
- movq %mm2,(%esp)\r
- popl %eax\r
- popl %edx\r
+_AsmReadMm2:\r
+ push %eax\r
+ push %eax\r
+ movq %mm2, (%esp)\r
+ pop %eax\r
+ pop %edx\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm2 (\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadMm2 PROC\r
+AsmReadMm2 PROC\r
push eax\r
push eax\r
movq [esp], mm2\r
pop eax\r
pop edx\r
ret\r
-_AsmReadMm2 ENDP\r
+AsmReadMm2 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _AsmReadMm3\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadMm3\r
-_AsmReadMm3: \r
- pushl %eax\r
- pushl %eax\r
- movq %mm3,(%esp)\r
- popl %eax\r
- popl %edx\r
+_AsmReadMm3:\r
+ push %eax\r
+ push %eax\r
+ movq %mm3, (%esp)\r
+ pop %eax\r
+ pop %edx\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm3 (\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadMm3 PROC\r
+AsmReadMm3 PROC\r
push eax\r
push eax\r
movq [esp], mm3\r
pop eax\r
pop edx\r
ret\r
-_AsmReadMm3 ENDP\r
+AsmReadMm3 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _AsmReadMm4\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadMm4\r
-_AsmReadMm4: \r
- pushl %eax\r
- pushl %eax\r
- movq %mm4,(%esp)\r
- popl %eax\r
- popl %edx\r
+_AsmReadMm4:\r
+ push %eax\r
+ push %eax\r
+ movq %mm4, (%esp)\r
+ pop %eax\r
+ pop %edx\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm4 (\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadMm4 PROC\r
+AsmReadMm4 PROC\r
push eax\r
push eax\r
movq [esp], mm4\r
pop eax\r
pop edx\r
ret\r
-_AsmReadMm4 ENDP\r
+AsmReadMm4 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _AsmReadMm5\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadMm5\r
-_AsmReadMm5: \r
- pushl %eax\r
- pushl %eax\r
- movq %mm5,(%esp)\r
- popl %eax\r
- popl %edx\r
+_AsmReadMm5:\r
+ push %eax\r
+ push %eax\r
+ movq %mm5, (%esp)\r
+ pop %eax\r
+ pop %edx\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm5 (\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadMm5 PROC\r
+AsmReadMm5 PROC\r
push eax\r
push eax\r
movq [esp], mm5\r
pop eax\r
pop edx\r
ret\r
-_AsmReadMm5 ENDP\r
+AsmReadMm5 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _AsmReadMm6\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadMm6\r
-_AsmReadMm6: \r
- pushl %eax\r
- pushl %eax\r
- movq %mm6,(%esp)\r
- popl %eax\r
- popl %edx\r
+_AsmReadMm6:\r
+ push %eax\r
+ push %eax\r
+ movq %mm6, (%esp)\r
+ pop %eax\r
+ pop %edx\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm6 (\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadMm6 PROC\r
+AsmReadMm6 PROC\r
push eax\r
push eax\r
movq [esp], mm6\r
pop eax\r
pop edx\r
ret\r
-_AsmReadMm6 ENDP\r
+AsmReadMm6 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
+.global _AsmReadMm7\r
\r
#------------------------------------------------------------------------------\r
# UINTN\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadMm7\r
-_AsmReadMm7: \r
- pushl %eax\r
- pushl %eax\r
- movq %mm7,(%esp)\r
- popl %eax\r
- popl %edx\r
+_AsmReadMm7:\r
+ push %eax\r
+ push %eax\r
+ movq %mm7, (%esp)\r
+ pop %eax\r
+ pop %edx\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm7 (\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadMm7 PROC\r
+AsmReadMm7 PROC\r
push eax\r
push eax\r
movq [esp], mm7\r
pop eax\r
pop edx\r
ret\r
-_AsmReadMm7 ENDP\r
+AsmReadMm7 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
-#------------------------------------------------------------------------------\r
-# UINT32\r
-# EFIAPI\r
-# AsmReadMsr32 (\r
-# IN UINT32 Index\r
-# );\r
-#------------------------------------------------------------------------------\r
-.global _AsmReadMsr32\r
-_AsmReadMsr32: \r
- #\r
- # AsmReadMsr32 shares the same implementation with AsmReadMsr64 and thus no\r
- # code inside this function\r
- #\r
-\r
+.global _AsmReadMsr64\r
\r
#------------------------------------------------------------------------------\r
# UINT64\r
# EFIAPI\r
# AsmReadMsr64 (\r
-# IN UINT64 Index\r
+# IN UINT32 Index\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadMsr64\r
-_AsmReadMsr64: \r
- movl 4(%esp),%ecx\r
+_AsmReadMsr64:\r
+ movl 4(%esp), %ecx\r
rdmsr\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
+ .586p\r
+ .model flat,C\r
.code\r
\r
-;------------------------------------------------------------------------------\r
-; UINT32\r
-; EFIAPI\r
-; AsmReadMsr32 (\r
-; IN UINT32 Index\r
-; );\r
-;------------------------------------------------------------------------------\r
-_AsmReadMsr32 PROC\r
- ;\r
- ; AsmReadMsr32 shares the same implementation with AsmReadMsr64 and thus no\r
- ; code inside this function\r
- ;\r
-_AsmReadMsr32 ENDP\r
-\r
;------------------------------------------------------------------------------\r
; UINT64\r
; EFIAPI\r
; IN UINT64 Index\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadMsr64 PROC\r
+AsmReadMsr64 PROC\r
mov ecx, [esp + 4]\r
rdmsr\r
ret\r
-_AsmReadMsr64 ENDP\r
+AsmReadMsr64 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadPmc\r
\r
#------------------------------------------------------------------------------\r
# UINT64\r
# IN UINT32 PmcIndex\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadPmc\r
-_AsmReadPmc: \r
- movl 4(%esp),%ecx\r
+_AsmReadPmc:\r
+ movl 4(%esp), %ecx\r
rdpmc\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
+ .586p\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; IN UINT32 PmcIndex\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadPmc PROC\r
+AsmReadPmc PROC\r
mov ecx, [esp + 4]\r
rdpmc\r
ret\r
-_AsmReadPmc ENDP\r
+AsmReadPmc ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadSs\r
\r
#------------------------------------------------------------------------------\r
# UINT16\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadSs\r
-_AsmReadSs: \r
- movw %ss,%ax\r
+_AsmReadSs:\r
+ movl %ss, %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadSs PROC\r
- mov ax, ss\r
+AsmReadSs PROC\r
+ mov eax, ss\r
ret\r
-_AsmReadSs ENDP\r
+AsmReadSs ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadTr\r
\r
#------------------------------------------------------------------------------\r
# UINT16\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadTr\r
-_AsmReadTr: \r
+_AsmReadTr:\r
str %eax\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadTr PROC\r
+AsmReadTr PROC\r
str ax\r
ret\r
-_AsmReadTr ENDP\r
+AsmReadTr ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
+.global _AsmReadTsc\r
\r
#------------------------------------------------------------------------------\r
# UINT64\r
# VOID\r
# );\r
#------------------------------------------------------------------------------\r
-.global _AsmReadTsc\r
-_AsmReadTsc: \r
+_AsmReadTsc:\r
rdtsc\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
+ .586p\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmReadTsc PROC\r
+AsmReadTsc PROC\r
rdtsc\r
ret\r
-_AsmReadTsc ENDP\r
+AsmReadTsc ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
.global _SetJump, _InternalAssertJumpBuffer\r
-_SetJump: \r
+\r
+#------------------------------------------------------------------------------\r
+# UINTN\r
+# EFIAPI\r
+# SetJump (\r
+# OUT BASE_LIBRARY_JUMP_BUFFER *JumpBuffer\r
+# );\r
+#------------------------------------------------------------------------------\r
+_SetJump:\r
pushl 0x4(%esp)\r
call _InternalAssertJumpBuffer\r
- popl %ecx\r
- popl %ecx\r
- movl (%esp),%edx\r
- movl %ebx,(%edx)\r
- movl %esi,4(%edx)\r
- movl %edi,8(%edx)\r
- movl %ebp,12(%edx)\r
- movl %esp,16(%edx)\r
- movl %ecx,20(%edx)\r
- xorl %eax,%eax\r
+ pop %ecx\r
+ pop %ecx\r
+ movl (%esp), %edx\r
+ movl %ebx, (%edx)\r
+ movl %esi, 4(%edx)\r
+ movl %edi, 8(%edx)\r
+ movl %ebp, 12(%edx)\r
+ movl %esp, 16(%edx)\r
+ movl %ecx, 20(%edx)\r
+ xorl %eax, %eax\r
jmp *%ecx\r
-\r
-\r
-\r
\r
InternalAssertJumpBuffer PROTO C\r
\r
+;------------------------------------------------------------------------------\r
+; UINTN\r
+; EFIAPI\r
+; SetJump (\r
+; OUT BASE_LIBRARY_JUMP_BUFFER *JumpBuffer\r
+; );\r
+;------------------------------------------------------------------------------\r
SetJump PROC\r
push [esp + 4]\r
call InternalAssertJumpBuffer\r
#------------------------------------------------------------------------------
+#------------------------------------------------------------------------------
+# UINT64
+# EFIAPI
+# InternalMathSwapBytes64 (
+# IN UINT64 Operand
+# );
+#------------------------------------------------------------------------------
.global _InternalMathSwapBytes64
_InternalMathSwapBytes64:
-
movl 8(%esp), %eax
movl 4(%esp), %edx
bswapl %eax
bswapl %edx
ret
-
-
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
+ .586\r
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; UINT64\r
+; EFIAPI\r
+; InternalMathSwapBytes64 (\r
+; IN UINT64 Operand\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalMathSwapBytes64 PROC\r
mov eax, [esp + 8]\r
mov edx, [esp + 4]\r
#\r
#------------------------------------------------------------------------------\r
\r
-.global _m16Start, _m16Size, _mThunk16Attr, _m16GdtrBase, _m16Gdt, _m16GdtrBase, _mTransition\r
+.global _m16Start, _m16Size, _mThunk16Attr, _m16Gdt, _m16GdtrBase, _mTransition\r
.global _InternalAsmThunk16\r
\r
-#THUNK_ATTRIBUTE_BIG_REAL_MODE EQU 1\r
-#THUNK_ATTRIBUTE_DISABLE_A20_MASK_INT_15 EQU 2\r
-#THUNK_ATTRIBUTE_DISABLE_A20_MASK_KBD_CTRL EQU 4\r
-\r
- .code: \r
-\r
-_m16Start: \r
+_m16Start:\r
\r
SavedGdt: .space 6\r
\r
push %cs\r
.byte 0x66\r
call @Base1 # push eip\r
-@Base1: \r
+@Base1:\r
pushfw # pushfd actually\r
cli # disable interrupts\r
push %gs\r
push %es\r
push %ds\r
pushaw # pushad actually\r
- .byte 0x66,0xba # mov edx, imm32\r
+ .byte 0x66, 0xba # mov edx, imm32\r
_ThunkAttr: .space 4\r
testb $THUNK_ATTRIBUTE_DISABLE_A20_MASK_INT_15, %dl\r
jz @1\r
movl $0x15cd2401, %eax # mov ax, 2401h & int 15h\r
cli # disable interrupts\r
jnc @2\r
-@1: \r
+@1:\r
testb $THUNK_ATTRIBUTE_DISABLE_A20_MASK_KBD_CTRL, %dl\r
jz @2\r
inb $0x92, %al\r
orb $2, %al\r
outb %al, $0x92 # deactivate A20M#\r
-@2: \r
+@2:\r
movl %ss, %eax\r
.byte 0x67, 0x66, 0x8d, 0x6c, 0x24, 0x34, 0x66\r
- mov %ebp,0xffffffd8(%esi)\r
- mov 0xfffffff8(%esi),%ebx\r
+ mov %ebp, 0xffffffd8(%esi)\r
+ mov 0xfffffff8(%esi), %ebx\r
shlw $4, %ax # shl eax, 4\r
addw %ax, %bp # add ebp, eax\r
- .byte 0x66,0xb8 # mov eax, imm32\r
+ .byte 0x66, 0xb8 # mov eax, imm32\r
SavedCr4: .space 4\r
movl %eax, %cr4\r
- lgdtw %cs:0xfffffff2(%edi)\r
- .byte 0x66,0xb8 # mov eax, imm32\r
+ lgdtw %cs:0xfffffff2(%edi)\r
+ .byte 0x66, 0xb8 # mov eax, imm32\r
SavedCr0: .space 4\r
movl %eax, %cr0\r
.byte 0xb8 # mov ax, imm16\r
SavedSs: .space 2\r
movl %eax, %ss\r
- .byte 0x66,0xbc # mov esp, imm32\r
+ .byte 0x66, 0xbc # mov esp, imm32\r
SavedEsp: .space 4\r
.byte 0x66\r
lret # return to protected mode\r
xchgw %bx, %sp # set up 16-bit stack pointer\r
.byte 0x66\r
call @Base # push eip\r
-@Base: \r
+@Base:\r
popw %bp # ebp <- offset @Base\r
addr16 pushl 36(%si)\r
.byte 0x36\r
- lea 0xc(%esi),%eax\r
- pushl %eax\r
+ lea 0xc(%esi), %eax\r
+ push %eax\r
lret\r
\r
-@RealMode: \r
- mov %edx,%cs:0xffffffc5(%esi)\r
- mov %bx,%cs:0xffffffcb(%esi)\r
+@RealMode:\r
+ mov %edx, %cs:0xffffffc5(%esi)\r
+ mov %bx, %cs:0xffffffcb(%esi)\r
lidtw %cs:0xffffffd7(%esi)\r
popaw # popad actually\r
- popl %ds\r
- popl %es\r
- popl %fs\r
- popl %gs\r
+ pop %ds\r
+ pop %es\r
+ pop %fs\r
+ pop %gs\r
popfw # popfd\r
lretw # transfer control to user code\r
\r
push %fs\r
push %gs\r
movl 36(%esp), %esi # esi <- RegSet\r
- movzwl 0x32(%esi),%edx\r
- mov 0xc(%esi),%edi\r
- add $0xffffffc8,%edi\r
+ movzwl 0x32(%esi), %edx\r
+ mov 0xc(%esi), %edi\r
+ add $0xffffffc8, %edi\r
movl %edi, %ebx # ebx <- stack offset\r
- imul $0x10,%edx,%eax\r
+ imul $0x10, %edx, %eax\r
push $0xd\r
addl %eax, %edi # edi <- linear address of 16-bit stack\r
- popl %ecx\r
+ pop %ecx\r
rep\r
movsl # copy RegSet\r
movl 40(%esp), %eax # eax <- address of transition code\r
movl %edx, %esi # esi <- 16-bit stack segment\r
- lea 0x5e(%eax),%edx\r
+ lea 0x5e(%eax), %edx\r
movl %eax, %ecx\r
andl $0xf, %ecx\r
shll $12, %eax\r
- lea 0x6(%ecx),%ecx\r
+ lea 0x6(%ecx), %ecx\r
movw %cx, %ax\r
stosl # [edi] <- return address of user code\r
- sgdtl 0xffffffa2(%edx)\r
+ sgdtl 0xffffffa2(%edx)\r
sidtl 0x24(%esp)\r
movl %cr0, %eax\r
movl %eax, (%edx) # save CR0 in SavedCr0\r
andl $0x7ffffffe, %eax # clear PE, PG bits\r
movl %cr4, %ebp\r
- mov %ebp,0xfffffff1(%edx)\r
+ mov %ebp, 0xfffffff1(%edx)\r
andl $0x300, %ebp # clear all but PCE and OSFXSR bits\r
pushl $0x10\r
- popl %ecx # ecx <- selector for data segments\r
+ pop %ecx # ecx <- selector for data segments\r
lgdtl 0x20(%edx)\r
pushfl\r
lcall *0x14(%edx)\r
popfl\r
lidtl 0x24(%esp)\r
- lea 0xffffffcc(%ebp),%eax\r
+ lea 0xffffffcc(%ebp), %eax\r
pop %gs\r
pop %fs\r
pop %es\r
pop %ebp\r
ret\r
\r
- .const: \r
+ .const:\r
\r
_m16Size: .word _InternalAsmThunk16 - _m16Start\r
_mThunk16Attr: .word _ThunkAttr - _m16Start\r
_m16Gdt: .word _NullSegDesc - _m16Start\r
_m16GdtrBase: .word _16GdtrBase - _m16Start\r
_mTransition: .word _EntryPoint - _m16Start\r
-\r
EXTERNDEF C m16GdtrBase:WORD\r
EXTERNDEF C mTransition:WORD\r
\r
+;\r
+; Here is the layout of the real mode stack. _ToUserCode() is responsible for\r
+; loading all these registers from real mode stack.\r
+;\r
IA32_REGS STRUC 4t\r
_EDI DD ?\r
_ESI DD ?\r
\r
.const\r
\r
+;\r
+; These are global constant to convey information to C code.\r
+;\r
m16Size DW InternalAsmThunk16 - m16Start\r
mThunk16Attr DW _ThunkAttr - m16Start\r
m16Gdt DW _NullSegDesc - m16Start\r
SavedGdt LABEL FWORD\r
DW ?\r
DD ?\r
-\r
+;------------------------------------------------------------------------------\r
+; _BackFromUserCode() takes control in real mode after 'retf' has been executed\r
+; by user code. It will be shadowed to somewhere in memory below 1MB.\r
+;------------------------------------------------------------------------------\r
_BackFromUserCode PROC\r
push ss\r
push cs\r
DW GdtEnd - _NullSegDesc - 1\r
_16GdtrBase DD _NullSegDesc\r
\r
+;------------------------------------------------------------------------------\r
+; _ToUserCode() takes control in real mode before passing control to user code.\r
+; It will be shadowed to somewhere in memory below 1MB.\r
+;------------------------------------------------------------------------------\r
_ToUserCode PROC\r
mov edx, ss\r
mov ss, ecx ; set new segment selectors\r
DB 0\r
GdtEnd LABEL QWORD\r
\r
-;\r
-; @param RegSet Pointer to a IA32_DWORD_REGS structure\r
-; @param Transition Pointer to the transition code\r
-; @return The address of the 16-bit stack after returning from user code\r
-;\r
+;------------------------------------------------------------------------------\r
+; IA32_REGISTER_SET *\r
+; EFIAPI\r
+; InternalAsmThunk16 (\r
+; IN IA32_REGISTER_SET *RegisterSet,\r
+; IN OUT VOID *Transition\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalAsmThunk16 PROC USES ebp ebx esi edi ds es fs gs\r
mov esi, [esp + 36] ; esi <- RegSet\r
movzx edx, (IA32_REGS ptr [esi])._SS\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
# VOID\r
# EFIAPI\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWbinvd\r
-_AsmWbinvd: \r
+_AsmWbinvd:\r
wbinvd\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.486p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; VOID\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWbinvd PROC\r
+AsmWbinvd PROC\r
wbinvd\r
ret\r
-_AsmWbinvd ENDP\r
+AsmWbinvd ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
# UINTN\r
# EFIAPI\r
# AsmWriteCr0 (\r
-# VOID\r
+# IN UINTN Cr0\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteCr0\r
-_AsmWriteCr0: \r
- movl 4(%esp),%eax\r
+_AsmWriteCr0:\r
+ movl 4(%esp), %eax\r
movl %eax, %cr0\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; UINTN\r
; EFIAPI\r
; AsmWriteCr0 (\r
-; VOID\r
+; UINTN Cr0\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteCr0 PROC\r
+AsmWriteCr0 PROC\r
mov eax, [esp + 4]\r
mov cr0, eax\r
ret\r
-_AsmWriteCr0 ENDP\r
+AsmWriteCr0 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
# UINTN\r
# EFIAPI\r
# AsmWriteCr2 (\r
-# VOID\r
+# IN UINTN Cr2\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteCr2\r
-_AsmWriteCr2: \r
- movl 4(%esp),%eax\r
+_AsmWriteCr2:\r
+ movl 4(%esp), %eax\r
movl %eax, %cr2\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; UINTN\r
; EFIAPI\r
; AsmWriteCr2 (\r
-; VOID\r
+; UINTN Cr2\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteCr2 PROC\r
+AsmWriteCr2 PROC\r
mov eax, [esp + 4]\r
mov cr2, eax\r
ret\r
-_AsmWriteCr2 ENDP\r
+AsmWriteCr2 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
# UINTN\r
# EFIAPI\r
# AsmWriteCr3 (\r
-# VOID\r
+# IN UINTN Cr3\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteCr3\r
-_AsmWriteCr3: \r
- movl 4(%esp),%eax\r
+_AsmWriteCr3:\r
+ movl 4(%esp), %eax\r
movl %eax, %cr3\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.386p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; UINTN\r
; EFIAPI\r
; AsmWriteCr3 (\r
-; VOID\r
+; UINTN Cr3\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteCr3 PROC\r
+AsmWriteCr3 PROC\r
mov eax, [esp + 4]\r
mov cr3, eax\r
ret\r
-_AsmWriteCr3 ENDP\r
+AsmWriteCr3 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
# UINTN\r
# EFIAPI\r
# AsmWriteCr4 (\r
-# VOID\r
+# IN UINTN Cr4\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteCr4\r
-_AsmWriteCr4: \r
- movl 4(%esp),%eax\r
+_AsmWriteCr4:\r
+ movl 4(%esp), %eax\r
movl %eax, %cr4\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; UINTN\r
; EFIAPI\r
; AsmWriteCr4 (\r
-; VOID\r
+; UINTN Cr4\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteCr4 PROC\r
+AsmWriteCr4 PROC\r
mov eax, [esp + 4]\r
mov cr4, eax\r
ret\r
-_AsmWriteCr4 ENDP\r
+AsmWriteCr4 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
-# VOID\r
+# UINTN\r
# EFIAPI\r
# AsmWriteDr0 (\r
# IN UINTN Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteDr0\r
-_AsmWriteDr0: \r
- movl 4(%esp),%eax\r
+_AsmWriteDr0:\r
+ movl 4(%esp), %eax\r
movl %eax, %dr0\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; VOID\r
+; UINTN\r
; EFIAPI\r
; AsmWriteDr0 (\r
; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteDr0 PROC\r
+AsmWriteDr0 PROC\r
mov eax, [esp + 4]\r
mov dr0, eax\r
ret\r
-_AsmWriteDr0 ENDP\r
+AsmWriteDr0 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
-# VOID\r
+# UINTN\r
# EFIAPI\r
# AsmWriteDr1 (\r
# IN UINTN Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteDr1\r
-_AsmWriteDr1: \r
- movl 4(%esp),%eax\r
+_AsmWriteDr1:\r
+ movl 4(%esp), %eax\r
movl %eax, %dr1\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; VOID\r
+; UINTN\r
; EFIAPI\r
; AsmWriteDr1 (\r
; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteDr1 PROC\r
+AsmWriteDr1 PROC\r
mov eax, [esp + 4]\r
mov dr1, eax\r
ret\r
-_AsmWriteDr1 ENDP\r
+AsmWriteDr1 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
-# VOID\r
+# UINTN\r
# EFIAPI\r
# AsmWriteDr2 (\r
# IN UINTN Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteDr2\r
-_AsmWriteDr2: \r
- movl 4(%esp),%eax\r
+_AsmWriteDr2:\r
+ movl 4(%esp), %eax\r
movl %eax, %dr2\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; VOID\r
+; UINTN\r
; EFIAPI\r
; AsmWriteDr2 (\r
; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteDr2 PROC\r
+AsmWriteDr2 PROC\r
mov eax, [esp + 4]\r
mov dr2, eax\r
ret\r
-_AsmWriteDr2 ENDP\r
+AsmWriteDr2 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
-# VOID\r
+# UINTN\r
# EFIAPI\r
# AsmWriteDr3 (\r
# IN UINTN Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteDr3\r
-_AsmWriteDr3: \r
- movl 4(%esp),%eax\r
+_AsmWriteDr3:\r
+ movl 4(%esp), %eax\r
movl %eax, %dr3\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; VOID\r
+; UINTN\r
; EFIAPI\r
; AsmWriteDr3 (\r
; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteDr3 PROC\r
+AsmWriteDr3 PROC\r
mov eax, [esp + 4]\r
mov dr3, eax\r
ret\r
-_AsmWriteDr3 ENDP\r
+AsmWriteDr3 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
-# VOID\r
+# UINTN\r
# EFIAPI\r
# AsmWriteDr4 (\r
# IN UINTN Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteDr4\r
-_AsmWriteDr4: \r
- movl 4(%esp),%eax\r
+_AsmWriteDr4:\r
+ movl 4(%esp), %eax\r
movl %eax, %dr4\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; VOID\r
+; UINTN\r
; EFIAPI\r
; AsmWriteDr4 (\r
; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteDr4 PROC\r
+AsmWriteDr4 PROC\r
mov eax, [esp + 4]\r
DB 0fh, 23h, 0e0h\r
ret\r
-_AsmWriteDr4 ENDP\r
+AsmWriteDr4 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
-# VOID\r
+# UINTN\r
# EFIAPI\r
# AsmWriteDr5 (\r
# IN UINTN Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteDr5\r
-_AsmWriteDr5: \r
- movl 4(%esp),%eax\r
+_AsmWriteDr5:\r
+ movl 4(%esp), %eax\r
movl %eax, %dr5\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; VOID\r
+; UINTN\r
; EFIAPI\r
; AsmWriteDr5 (\r
; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteDr5 PROC\r
+AsmWriteDr5 PROC\r
mov eax, [esp + 4]\r
DB 0fh, 23h, 0e8h\r
ret\r
-_AsmWriteDr5 ENDP\r
+AsmWriteDr5 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
-# VOID\r
+# UINTN\r
# EFIAPI\r
# AsmWriteDr6 (\r
# IN UINTN Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteDr6\r
-_AsmWriteDr6: \r
- movl 4(%esp),%eax\r
+_AsmWriteDr6:\r
+ movl 4(%esp), %eax\r
movl %eax, %dr6\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; VOID\r
+; UINTN\r
; EFIAPI\r
; AsmWriteDr6 (\r
; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteDr6 PROC\r
+AsmWriteDr6 PROC\r
mov eax, [esp + 4]\r
mov dr6, eax\r
ret\r
-_AsmWriteDr6 ENDP\r
+AsmWriteDr6 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
-# VOID\r
+# UINTN\r
# EFIAPI\r
# AsmWriteDr7 (\r
# IN UINTN Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteDr7\r
-_AsmWriteDr7: \r
- movl 4(%esp),%eax\r
+_AsmWriteDr7:\r
+ movl 4(%esp), %eax\r
movl %eax, %dr7\r
ret\r
-\r
-\r
-\r
;------------------------------------------------------------------------------\r
\r
.586p\r
- .model flat\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; VOID\r
+; UINTN\r
; EFIAPI\r
; AsmWriteDr7 (\r
; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteDr7 PROC\r
+AsmWriteDr7 PROC\r
mov eax, [esp + 4]\r
mov dr7, eax\r
ret\r
-_AsmWriteDr7 ENDP\r
+AsmWriteDr7 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
# VOID\r
# EFIAPI\r
# );\r
#------------------------------------------------------------------------------\r
.global _InternalX86WriteGdtr\r
-_InternalX86WriteGdtr: \r
- movl 4(%esp),%eax\r
+_InternalX86WriteGdtr:\r
+ movl 4(%esp), %eax\r
lgdt (%eax)\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .686P\r
+ .386p\r
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86WriteGdtr (\r
+; IN CONST IA32_DESCRIPTOR *Idtr\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalX86WriteGdtr PROC\r
mov eax, [esp + 4]\r
lgdt fword ptr [eax]\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
# VOID\r
# EFIAPI\r
# );\r
#------------------------------------------------------------------------------\r
.global _InternalX86WriteIdtr\r
-_InternalX86WriteIdtr: \r
- movl 4(%esp),%eax\r
+_InternalX86WriteIdtr:\r
+ movl 4(%esp), %eax\r
lidt (%eax)\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .686P\r
+ .386p\r
.model flat,C\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86WriteIdtr (\r
+; IN CONST IA32_DESCRIPTOR *Idtr\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalX86WriteIdtr PROC\r
mov eax, [esp + 4]\r
lidt fword ptr [eax]\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
# VOID\r
# EFIAPI\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteLdtr\r
-_AsmWriteLdtr: \r
- movl 4(%esp),%eax\r
+_AsmWriteLdtr:\r
+ movl 4(%esp), %eax\r
lldtw %ax\r
ret\r
-\r
-\r
-\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
-\r
#------------------------------------------------------------------------------\r
-# UINT64\r
+# VOID\r
# EFIAPI\r
# AsmWriteMm0 (\r
# IN UINT64 Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteMm0\r
-_AsmWriteMm0: \r
- movq 4(%esp),%mm0\r
+_AsmWriteMm0:\r
+ movq 4(%esp), %mm0\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINT64\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm0 (\r
; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteMm0 PROC\r
+AsmWriteMm0 PROC\r
movq mm0, [esp + 4]\r
ret\r
-_AsmWriteMm0 ENDP\r
+AsmWriteMm0 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
-\r
#------------------------------------------------------------------------------\r
-# UINT64\r
+# VOID\r
# EFIAPI\r
# AsmWriteMm1 (\r
# IN UINT64 Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteMm1\r
-_AsmWriteMm1: \r
- movq 4(%esp),%mm1\r
+_AsmWriteMm1:\r
+ movq 4(%esp), %mm1\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINT64\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm1 (\r
; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteMm1 PROC\r
+AsmWriteMm1 PROC\r
movq mm1, [esp + 4]\r
ret\r
-_AsmWriteMm1 ENDP\r
+AsmWriteMm1 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
-\r
#------------------------------------------------------------------------------\r
-# UINT64\r
+# VOID\r
# EFIAPI\r
# AsmWriteMm2 (\r
# IN UINT64 Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteMm2\r
-_AsmWriteMm2: \r
- movq 4(%esp),%mm2\r
+_AsmWriteMm2:\r
+ movq 4(%esp), %mm2\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINT64\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm2 (\r
; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteMm2 PROC\r
+AsmWriteMm2 PROC\r
movq mm2, [esp + 4]\r
ret\r
-_AsmWriteMm2 ENDP\r
+AsmWriteMm2 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
-\r
#------------------------------------------------------------------------------\r
-# UINT64\r
+# VOID\r
# EFIAPI\r
# AsmWriteMm3 (\r
# IN UINT64 Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteMm3\r
-_AsmWriteMm3: \r
- movq 4(%esp),%mm3\r
+_AsmWriteMm3:\r
+ movq 4(%esp), %mm3\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINT64\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm3 (\r
; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteMm3 PROC\r
+AsmWriteMm3 PROC\r
movq mm3, [esp + 4]\r
ret\r
-_AsmWriteMm3 ENDP\r
+AsmWriteMm3 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
-\r
#------------------------------------------------------------------------------\r
-# UINT64\r
+# VOID\r
# EFIAPI\r
# AsmWriteMm4 (\r
# IN UINT64 Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteMm4\r
-_AsmWriteMm4: \r
- movq 4(%esp),%mm4\r
+_AsmWriteMm4:\r
+ movq 4(%esp), %mm4\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINT64\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm4 (\r
; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteMm4 PROC\r
+AsmWriteMm4 PROC\r
movq mm4, [esp + 4]\r
ret\r
-_AsmWriteMm4 ENDP\r
+AsmWriteMm4 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
-\r
#------------------------------------------------------------------------------\r
-# UINT64\r
+# VOID\r
# EFIAPI\r
# AsmWriteMm5 (\r
# IN UINT64 Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteMm5\r
-_AsmWriteMm5: \r
- movq 4(%esp),%mm5\r
+_AsmWriteMm5:\r
+ movq 4(%esp), %mm5\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINT64\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm5 (\r
; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteMm5 PROC\r
+AsmWriteMm5 PROC\r
movq mm5, [esp + 4]\r
ret\r
-_AsmWriteMm5 ENDP\r
+AsmWriteMm5 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
-\r
#------------------------------------------------------------------------------\r
-# UINT64\r
+# VOID\r
# EFIAPI\r
# AsmWriteMm6 (\r
# IN UINT64 Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteMm6\r
-_AsmWriteMm6: \r
- movq 4(%esp),%mm6\r
+_AsmWriteMm6:\r
+ movq 4(%esp), %mm6\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINT64\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm6 (\r
; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteMm6 PROC\r
+AsmWriteMm6 PROC\r
movq mm6, [esp + 4]\r
ret\r
-_AsmWriteMm6 ENDP\r
+AsmWriteMm6 ENDP\r
\r
END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
- \r
-\r
#------------------------------------------------------------------------------\r
-# UINT64\r
+# VOID\r
# EFIAPI\r
# AsmWriteMm7 (\r
# IN UINT64 Value\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteMm7\r
-_AsmWriteMm7: \r
- movq 4(%esp),%mm7\r
+_AsmWriteMm7:\r
+ movq 4(%esp), %mm7\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
- .xmm\r
+ .586\r
+ .model flat,C\r
+ .mmx\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINT64\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm7 (\r
; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteMm7 PROC\r
+AsmWriteMm7 PROC\r
movq mm7, [esp + 4]\r
ret\r
-_AsmWriteMm7 ENDP\r
+AsmWriteMm7 ENDP\r
\r
END\r
+++ /dev/null
-#------------------------------------------------------------------------------\r
-#\r
-# Copyright (c) 2006, Intel Corporation\r
-# All rights reserved. This program and the accompanying materials\r
-# are licensed and made available under the terms and conditions of the BSD License\r
-# which accompanies this distribution. The full text of the license may be found at\r
-# http://opensource.org/licenses/bsd-license.php\r
-#\r
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
-#\r
-# Module Name:\r
-#\r
-# WriteMsr32.Asm\r
-#\r
-# Abstract:\r
-#\r
-# AsmWriteMsr32 function\r
-#\r
-# Notes:\r
-#\r
-#------------------------------------------------------------------------------\r
-\r
-\r
-\r
- \r
-\r
-#------------------------------------------------------------------------------\r
-# UINT32\r
-# EFIAPI\r
-# AsmWriteMsr32 (\r
-# IN UINT32 Index,\r
-# IN UINT32 Value\r
-# );\r
-#------------------------------------------------------------------------------\r
-.global _AsmWriteMsr32\r
-_AsmWriteMsr32: \r
- movl 8(%esp),%eax\r
- xorl %edx,%edx\r
- movl 4(%esp),%ecx\r
- wrmsr\r
- ret\r
-\r
-\r
-\r
+++ /dev/null
-;------------------------------------------------------------------------------\r
-;\r
-; Copyright (c) 2006, Intel Corporation\r
-; All rights reserved. This program and the accompanying materials\r
-; are licensed and made available under the terms and conditions of the BSD License\r
-; which accompanies this distribution. The full text of the license may be found at\r
-; http://opensource.org/licenses/bsd-license.php\r
-;\r
-; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
-; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
-;\r
-; Module Name:\r
-;\r
-; WriteMsr32.Asm\r
-;\r
-; Abstract:\r
-;\r
-; AsmWriteMsr32 function\r
-;\r
-; Notes:\r
-;\r
-;------------------------------------------------------------------------------\r
-\r
- .586P\r
- .model flat\r
- .code\r
-\r
-;------------------------------------------------------------------------------\r
-; UINT32\r
-; EFIAPI\r
-; AsmWriteMsr32 (\r
-; IN UINT32 Index,\r
-; IN UINT32 Value\r
-; );\r
-;------------------------------------------------------------------------------\r
-_AsmWriteMsr32 PROC\r
- mov eax, [esp + 8]\r
- xor edx, edx\r
- mov ecx, [esp + 4]\r
- wrmsr\r
- ret\r
-_AsmWriteMsr32 ENDP\r
-\r
- END\r
#\r
#------------------------------------------------------------------------------\r
\r
-\r
-\r
- \r
-\r
#------------------------------------------------------------------------------\r
# UINT64\r
# EFIAPI\r
# );\r
#------------------------------------------------------------------------------\r
.global _AsmWriteMsr64\r
-_AsmWriteMsr64: \r
- movl 12(%esp),%edx\r
- movl 8(%esp),%eax\r
- movl 4(%esp),%ecx\r
+_AsmWriteMsr64:\r
+ movl 12(%esp), %edx\r
+ movl 8(%esp), %eax\r
+ movl 4(%esp), %ecx\r
wrmsr\r
ret\r
-\r
-\r
-\r
;\r
;------------------------------------------------------------------------------\r
\r
- .586P\r
- .model flat\r
+ .586p\r
+ .model flat,C\r
.code\r
\r
;------------------------------------------------------------------------------\r
; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
-_AsmWriteMsr64 PROC\r
+AsmWriteMsr64 PROC\r
mov edx, [esp + 12]\r
mov eax, [esp + 8]\r
mov ecx, [esp + 4]\r
wrmsr\r
ret\r
-_AsmWriteMsr64 ENDP\r
+AsmWriteMsr64 ENDP\r
\r
END\r
\r
**/\r
VOID\r
+EFIAPI\r
InternalAssertJumpBuffer (\r
IN BASE_LIBRARY_JUMP_BUFFER *JumpBuffer\r
)\r
\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86DisablePaging64 (\r
+; IN UINT16 Cs,\r
+; IN UINT32 EntryPoint,\r
+; IN UINT32 Context1, OPTIONAL\r
+; IN UINT32 Context2, OPTIONAL\r
+; IN UINT32 NewStack\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalX86DisablePaging64 PROC\r
cli\r
shl rcx, 32\r
\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86FxRestore (\r
+; IN CONST IA32_FX_BUFFER *Buffer\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalX86FxRestore PROC\r
fxrstor [rcx]\r
ret\r
\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86FxSave (\r
+; OUT IA32_FX_BUFFER *Buffer\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalX86FxSave PROC\r
fxsave [rcx]\r
ret\r
\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalLongJump (\r
+; IN BASE_LIBRARY_JUMP_BUFFER *JumpBuffer,\r
+; IN UINTN Value\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalLongJump PROC\r
mov rbx, [rcx]\r
mov rsp, [rcx + 8]\r
; );\r
;------------------------------------------------------------------------------\r
AsmReadCs PROC\r
- mov ax, cs\r
+ mov eax, cs\r
ret\r
AsmReadCs ENDP\r
\r
; );\r
;------------------------------------------------------------------------------\r
AsmReadDs PROC\r
- mov ax, ds\r
+ mov eax, ds\r
ret\r
AsmReadDs ENDP\r
\r
END\r
-\r
; );\r
;------------------------------------------------------------------------------\r
AsmReadEs PROC\r
- mov ax, es\r
+ mov eax, es\r
ret\r
AsmReadEs ENDP\r
\r
; );\r
;------------------------------------------------------------------------------\r
AsmReadFs PROC\r
- mov ax, fs\r
+ mov eax, fs\r
ret\r
AsmReadFs ENDP\r
\r
\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86ReadGdtr (\r
+; OUT IA32_DESCRIPTOR *Gdtr\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalX86ReadGdtr PROC\r
sgdt fword ptr [rcx]\r
ret\r
; );\r
;------------------------------------------------------------------------------\r
AsmReadGs PROC\r
- mov ax, gs\r
+ mov eax, gs\r
ret\r
AsmReadGs ENDP\r
\r
\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86ReadIdtr (\r
+; OUT IA32_DESCRIPTOR *Idtr\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalX86ReadIdtr PROC\r
sidt fword ptr [rcx]\r
ret\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm0 (\r
; VOID\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm1 (\r
; VOID\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm2 (\r
; VOID\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm3 (\r
; VOID\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm4 (\r
; VOID\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm5 (\r
; VOID\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm6 (\r
; VOID\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; UINT64\r
; EFIAPI\r
; AsmReadMm7 (\r
; VOID\r
+++ /dev/null
-;------------------------------------------------------------------------------\r
-;\r
-; Copyright (c) 2006, Intel Corporation\r
-; All rights reserved. This program and the accompanying materials\r
-; are licensed and made available under the terms and conditions of the BSD License\r
-; which accompanies this distribution. The full text of the license may be found at\r
-; http://opensource.org/licenses/bsd-license.php\r
-;\r
-; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
-; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
-;\r
-; Module Name:\r
-;\r
-; ReadMsr32.Asm\r
-;\r
-; Abstract:\r
-;\r
-; AsmReadMsr32 function\r
-;\r
-; Notes:\r
-;\r
-;------------------------------------------------------------------------------\r
-\r
- .code\r
-\r
-;------------------------------------------------------------------------------\r
-; UINT32\r
-; EFIAPI\r
-; AsmReadMsr32 (\r
-; IN UINT32 Index\r
-; );\r
-;------------------------------------------------------------------------------\r
-AsmReadMsr32 PROC\r
- rdmsr\r
- ret\r
-AsmReadMsr32 ENDP\r
-\r
- END\r
; );\r
;------------------------------------------------------------------------------\r
AsmReadMsr64 PROC\r
- rdmsr\r
- shl rax, 20h\r
- shrd rax, rdx, 20h\r
+ rdmsr ; edx & eax are zero extended\r
+ shl rdx, 20h\r
+ or rax, rdx\r
ret\r
AsmReadMsr64 ENDP\r
\r
;------------------------------------------------------------------------------\r
AsmReadPmc PROC\r
rdpmc\r
- shl ecx, 1\r
- jnc @F\r
- shl rax, 20h\r
- shrd rax, rdx, 20h\r
-@@:\r
+ shl rdx, 20h\r
+ or rax, rdx\r
ret\r
AsmReadPmc ENDP\r
\r
; );\r
;------------------------------------------------------------------------------\r
AsmReadSs PROC\r
- mov ax, ss\r
+ mov eax, ss\r
ret\r
AsmReadSs ENDP\r
\r
;------------------------------------------------------------------------------\r
AsmReadTsc PROC\r
rdtsc\r
- shl rax, 20h\r
- shrd rax, rdx, 20h\r
+ shl rdx, 20h\r
+ or rax, rdx\r
ret\r
AsmReadTsc ENDP\r
\r
\r
EXTERNDEF InternalAssertJumpBuffer:PROC\r
\r
+;------------------------------------------------------------------------------\r
+; UINTN\r
+; EFIAPI\r
+; SetJump (\r
+; OUT BASE_LIBRARY_JUMP_BUFFER *JumpBuffer\r
+; );\r
+;------------------------------------------------------------------------------\r
SetJump PROC\r
push rcx\r
add rsp, -20h\r
DW ?\r
DQ ?\r
\r
+;------------------------------------------------------------------------------\r
+; _BackFromUserCode() takes control in real mode after 'retf' has been executed\r
+; by user code. It will be shadowed to somewhere in memory below 1MB.\r
+;------------------------------------------------------------------------------\r
_BackFromUserCode PROC\r
DB 16h ; push ss\r
DB 0eh ; push cs\r
_16GdtrBase DQ _NullSegDesc\r
_16Idtr FWORD (1 SHL 10) - 1\r
\r
+;------------------------------------------------------------------------------\r
+; _ToUserCode() takes control in real mode before passing control to user code.\r
+; It will be shadowed to somewhere in memory below 1MB.\r
+;------------------------------------------------------------------------------\r
_ToUserCode PROC\r
mov edi, ss\r
mov ss, edx ; set new segment selectors\r
DB 0\r
GdtEnd LABEL QWORD\r
\r
-;\r
-; @param RegSet Pointer to a IA32_DWORD_REGS structure\r
-; @param Transition Pointer to the transition code\r
-; @return The address of the 16-bit stack after returning from user code\r
-;\r
+;------------------------------------------------------------------------------\r
+; IA32_REGISTER_SET *\r
+; EFIAPI\r
+; InternalAsmThunk16 (\r
+; IN IA32_REGISTER_SET *RegisterSet,\r
+; IN OUT VOID *Transition\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalAsmThunk16 PROC USES rbp rbx rsi rdi\r
mov r10d, ds\r
mov r11d, es\r
; UINTN\r
; EFIAPI\r
; AsmWriteCr0 (\r
-; VOID\r
+; UINTN Cr0\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteCr0 PROC\r
mov cr0, rcx\r
+ mov rax, rcx\r
ret\r
AsmWriteCr0 ENDP\r
\r
; UINTN\r
; EFIAPI\r
; AsmWriteCr2 (\r
-; VOID\r
+; UINTN Cr2\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteCr2 PROC\r
mov cr2, rcx\r
+ mov rax, rcx\r
ret\r
AsmWriteCr2 ENDP\r
\r
; UINTN\r
; EFIAPI\r
; AsmWriteCr3 (\r
-; VOID\r
+; UINTN Cr3\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteCr3 PROC\r
mov cr3, rcx\r
+ mov rax, rcx\r
ret\r
AsmWriteCr3 ENDP\r
\r
; UINTN\r
; EFIAPI\r
; AsmWriteCr4 (\r
-; VOID\r
+; UINTN Cr4\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteCr4 PROC\r
mov cr4, rcx\r
+ mov rax, rcx\r
ret\r
AsmWriteCr4 ENDP\r
\r
; UINTN\r
; EFIAPI\r
; AsmWriteDr0 (\r
-; VOID\r
+; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteDr0 PROC\r
mov dr0, rcx\r
+ mov rax, rcx\r
ret\r
AsmWriteDr0 ENDP\r
\r
; UINTN\r
; EFIAPI\r
; AsmWriteDr1 (\r
-; VOID\r
+; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteDr1 PROC\r
mov dr1, rcx\r
+ mov rax, rcx\r
ret\r
AsmWriteDr1 ENDP\r
\r
; UINTN\r
; EFIAPI\r
; AsmWriteDr2 (\r
-; VOID\r
+; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteDr2 PROC\r
mov dr2, rcx\r
+ mov rax, rcx\r
ret\r
AsmWriteDr2 ENDP\r
\r
; UINTN\r
; EFIAPI\r
; AsmWriteDr3 (\r
-; VOID\r
+; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteDr3 PROC\r
mov dr3, rcx\r
+ mov rax, rcx\r
ret\r
AsmWriteDr3 ENDP\r
\r
; UINTN\r
; EFIAPI\r
; AsmWriteDr4 (\r
-; VOID\r
+; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteDr4 PROC\r
DB 0fh, 23h, 0e1h\r
+ mov rax, rcx\r
ret\r
AsmWriteDr4 ENDP\r
\r
;------------------------------------------------------------------------------\r
; UINTN\r
; EFIAPI\r
-; AsmWriteDr5 (\r
-; VOID\r
+; AsmWriteDr6 (\r
+; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteDr5 PROC\r
DB 0fh, 23h, 0e9h\r
+ mov rax, rcx\r
ret\r
AsmWriteDr5 ENDP\r
\r
; UINTN\r
; EFIAPI\r
; AsmWriteDr6 (\r
-; VOID\r
+; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteDr6 PROC\r
mov dr6, rcx\r
+ mov rax, rcx\r
ret\r
AsmWriteDr6 ENDP\r
\r
; UINTN\r
; EFIAPI\r
; AsmWriteDr7 (\r
-; VOID\r
+; IN UINTN Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteDr7 PROC\r
mov dr7, rcx\r
+ mov rax, rcx\r
ret\r
AsmWriteDr7 ENDP\r
\r
\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86WriteGdtr (\r
+; IN CONST IA32_DESCRIPTOR *Idtr\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalX86WriteGdtr PROC\r
lgdt fword ptr [rcx]\r
ret\r
\r
.code\r
\r
+;------------------------------------------------------------------------------\r
+; VOID\r
+; EFIAPI\r
+; InternalX86WriteIdtr (\r
+; IN CONST IA32_DESCRIPTOR *Idtr\r
+; );\r
+;------------------------------------------------------------------------------\r
InternalX86WriteIdtr PROC\r
lidt fword ptr [rcx]\r
ret\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm0 (\r
-; VOID\r
+; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteMm0 PROC\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm1 (\r
-; VOID\r
+; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteMm1 PROC\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm2 (\r
-; VOID\r
+; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteMm2 PROC\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm3 (\r
-; VOID\r
+; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteMm3 PROC\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm4 (\r
-; VOID\r
+; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteMm4 PROC\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm5 (\r
-; VOID\r
+; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteMm5 PROC\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm6 (\r
-; VOID\r
+; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteMm6 PROC\r
.code\r
\r
;------------------------------------------------------------------------------\r
-; UINTN\r
+; VOID\r
; EFIAPI\r
; AsmWriteMm7 (\r
-; VOID\r
+; IN UINT64 Value\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteMm7 PROC\r
+++ /dev/null
-;------------------------------------------------------------------------------\r
-;\r
-; Copyright (c) 2006, Intel Corporation\r
-; All rights reserved. This program and the accompanying materials\r
-; are licensed and made available under the terms and conditions of the BSD License\r
-; which accompanies this distribution. The full text of the license may be found at\r
-; http://opensource.org/licenses/bsd-license.php\r
-;\r
-; THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
-; WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
-;\r
-; Module Name:\r
-;\r
-; WriteMsr32.Asm\r
-;\r
-; Abstract:\r
-;\r
-; AsmWriteMsr32 function\r
-;\r
-; Notes:\r
-;\r
-;------------------------------------------------------------------------------\r
-\r
- .code\r
-\r
-;------------------------------------------------------------------------------\r
-; UINT32\r
-; EFIAPI\r
-; AsmWriteMsr32 (\r
-; IN UINT32 Index,\r
-; IN UINT32 Value\r
-; );\r
-;------------------------------------------------------------------------------\r
-AsmWriteMsr32 PROC\r
- mov eax, edx\r
- xor edx, edx\r
- wrmsr\r
- ret\r
-AsmWriteMsr32 ENDP\r
-\r
- END\r
; );\r
;------------------------------------------------------------------------------\r
AsmWriteMsr64 PROC\r
- push rdx\r
- mov eax, edx\r
+ mov rax, rdx\r
shr rdx, 20h\r
wrmsr\r
- pop rax\r
ret\r
AsmWriteMsr64 ENDP\r
\r
--- /dev/null
+/** @file\r
+ IA-32/x64 AsmDisablePaging32()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86DisablePaging32.c\r
+\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+/**\r
+ Disables the 32-bit paging mode on the CPU.\r
+\r
+ Disables the 32-bit paging mode on the CPU and returns to 32-bit protected\r
+ mode. This function assumes the current execution mode is 32-paged protected\r
+ mode. This function is only available on IA-32. After the 32-bit paging mode\r
+ is disabled, control is transferred to the function specified by EntryPoint\r
+ using the new stack specified by NewStack and passing in the parameters\r
+ specified by Context1 and Context2. Context1 and Context2 are optional and\r
+ may be NULL. The function EntryPoint must never return.\r
+\r
+ If the current execution mode is not 32-bit paged mode, then ASSERT().\r
+ If EntryPoint is NULL, then ASSERT().\r
+ If NewStack is NULL, then ASSERT().\r
+\r
+ There are a number of constraints that must be followed before calling this\r
+ function:\r
+ 1) Interrupts must be disabled.\r
+ 2) The caller must be in 32-bit paged mode.\r
+ 3) CR0, CR3, and CR4 must be compatible with 32-bit paged mode.\r
+ 4) CR3 must point to valid page tables that guarantee that the pages for\r
+ this function and the stack are identity mapped.\r
+\r
+ @param EntryPoint A pointer to function to call with the new stack after\r
+ paging is disabled.\r
+ @param Context1 A pointer to the context to pass into the EntryPoint\r
+ function as the first parameter after paging is disabled.\r
+ @param Context2 A pointer to the context to pass into the EntryPoint\r
+ function as the second parameter after paging is\r
+ disabled.\r
+ @param NewStack A pointer to the new stack to use for the EntryPoint\r
+ function after paging is disabled.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+AsmDisablePaging32 (\r
+ IN SWITCH_STACK_ENTRY_POINT EntryPoint,\r
+ IN VOID *Context1, OPTIONAL\r
+ IN VOID *Context2, OPTIONAL\r
+ IN VOID *NewStack\r
+ )\r
+{\r
+ ASSERT (EntryPoint != NULL);\r
+ ASSERT (NewStack != NULL);\r
+ InternalX86DisablePaging32 (EntryPoint, Context1, Context2, NewStack);\r
+}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 AsmDisablePaging64()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86DisablePaging64.c\r
+\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+/**\r
+ Disables the 64-bit paging mode on the CPU.\r
+\r
+ Disables the 64-bit paging mode on the CPU and returns to 32-bit protected\r
+ mode. This function assumes the current execution mode is 64-paging mode.\r
+ This function is only available on X64. After the 64-bit paging mode is\r
+ disabled, control is transferred to the function specified by EntryPoint\r
+ using the new stack specified by NewStack and passing in the parameters\r
+ specified by Context1 and Context2. Context1 and Context2 are optional and\r
+ may be 0. The function EntryPoint must never return.\r
+\r
+ If the current execution mode is not 64-bit paged mode, then ASSERT().\r
+ If EntryPoint is 0, then ASSERT().\r
+ If NewStack is 0, then ASSERT().\r
+\r
+ @param Cs The 16-bit selector to load in the CS before EntryPoint\r
+ is called. The descriptor in the GDT that this selector\r
+ references must be setup for 32-bit protected mode.\r
+ @param EntryPoint The 64-bit virtual address of the function to call with\r
+ the new stack after paging is disabled.\r
+ @param Context1 The 64-bit virtual address of the context to pass into\r
+ the EntryPoint function as the first parameter after\r
+ paging is disabled.\r
+ @param Context2 The 64-bit virtual address of the context to pass into\r
+ the EntryPoint function as the second parameter after\r
+ paging is disabled.\r
+ @param NewStack The 64-bit virtual address of the new stack to use for\r
+ the EntryPoint function after paging is disabled.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+AsmDisablePaging64 (\r
+ IN UINT16 Cs,\r
+ IN UINT32 EntryPoint,\r
+ IN UINT32 Context1, OPTIONAL\r
+ IN UINT32 Context2, OPTIONAL\r
+ IN UINT32 NewStack\r
+ )\r
+{\r
+ ASSERT (EntryPoint != 0);\r
+ ASSERT (NewStack != 0);\r
+ InternalX86DisablePaging64 (Cs, EntryPoint, Context1, Context2, NewStack);\r
+}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 AsmEnablePaging32()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86EnablePaging32.c\r
+\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+/**\r
+ Enables the 32-bit paging mode on the CPU.\r
+\r
+ Enables the 32-bit paging mode on the CPU. CR0, CR3, CR4, and the page tables\r
+ must be properly initialized prior to calling this service. This function\r
+ assumes the current execution mode is 32-bit protected mode. This function is\r
+ only available on IA-32. After the 32-bit paging mode is enabled, control is\r
+ transferred to the function specified by EntryPoint using the new stack\r
+ specified by NewStack and passing in the parameters specified by Context1 and\r
+ Context2. Context1 and Context2 are optional and may be NULL. The function\r
+ EntryPoint must never return.\r
+\r
+ If the current execution mode is not 32-bit protected mode, then ASSERT().\r
+ If EntryPoint is NULL, then ASSERT().\r
+ If NewStack is NULL, then ASSERT().\r
+\r
+ There are a number of constraints that must be followed before calling this\r
+ function:\r
+ 1) Interrupts must be disabled.\r
+ 2) The caller must be in 32-bit protected mode with flat descriptors. This\r
+ means all descriptors must have a base of 0 and a limit of 4GB.\r
+ 3) CR0 and CR4 must be compatible with 32-bit protected mode with flat\r
+ descriptors.\r
+ 4) CR3 must point to valid page tables that will be used once the transition\r
+ is complete, and those page tables must guarantee that the pages for this\r
+ function and the stack are identity mapped.\r
+\r
+ @param EntryPoint A pointer to function to call with the new stack after\r
+ paging is enabled.\r
+ @param Context1 A pointer to the context to pass into the EntryPoint\r
+ function as the first parameter after paging is enabled.\r
+ @param Context2 A pointer to the context to pass into the EntryPoint\r
+ function as the second parameter after paging is enabled.\r
+ @param NewStack A pointer to the new stack to use for the EntryPoint\r
+ function after paging is enabled.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+AsmEnablePaging32 (\r
+ IN SWITCH_STACK_ENTRY_POINT EntryPoint,\r
+ IN VOID *Context1, OPTIONAL\r
+ IN VOID *Context2, OPTIONAL\r
+ IN VOID *NewStack\r
+ )\r
+{\r
+ ASSERT (EntryPoint != NULL);\r
+ ASSERT (NewStack != NULL);\r
+ InternalX86EnablePaging32 (EntryPoint, Context1, Context2, NewStack);\r
+}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 AsmEnablePaging64()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86EnablePaging64.c\r
+\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+/**\r
+ Enables the 64-bit paging mode on the CPU.\r
+\r
+ Enables the 64-bit paging mode on the CPU. CR0, CR3, CR4, and the page tables\r
+ must be properly initialized prior to calling this service. This function\r
+ assumes the current execution mode is 32-bit protected mode with flat\r
+ descriptors. This function is only available on IA-32. After the 64-bit\r
+ paging mode is enabled, control is transferred to the function specified by\r
+ EntryPoint using the new stack specified by NewStack and passing in the\r
+ parameters specified by Context1 and Context2. Context1 and Context2 are\r
+ optional and may be 0. The function EntryPoint must never return.\r
+\r
+ If the current execution mode is not 32-bit protected mode with flat\r
+ descriptors, then ASSERT().\r
+ If EntryPoint is 0, then ASSERT().\r
+ If NewStack is 0, then ASSERT().\r
+\r
+ @param Cs The 16-bit selector to load in the CS before EntryPoint\r
+ is called. The descriptor in the GDT that this selector\r
+ references must be setup for long mode.\r
+ @param EntryPoint The 64-bit virtual address of the function to call with\r
+ the new stack after paging is enabled.\r
+ @param Context1 The 64-bit virtual address of the context to pass into\r
+ the EntryPoint function as the first parameter after\r
+ paging is enabled.\r
+ @param Context2 The 64-bit virtual address of the context to pass into\r
+ the EntryPoint function as the second parameter after\r
+ paging is enabled.\r
+ @param NewStack The 64-bit virtual address of the new stack to use for\r
+ the EntryPoint function after paging is enabled.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+AsmEnablePaging64 (\r
+ IN UINT16 Cs,\r
+ IN UINT64 EntryPoint,\r
+ IN UINT64 Context1, OPTIONAL\r
+ IN UINT64 Context2, OPTIONAL\r
+ IN UINT64 NewStack\r
+ )\r
+{\r
+ ASSERT (EntryPoint != 0);\r
+ ASSERT (NewStack != 0);\r
+ InternalX86EnablePaging64 (Cs, EntryPoint, Context1, Context2, NewStack);\r
+}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 AsmFxRestore()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86FxRestore.c\r
+\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+/**\r
+ Restores the current floating point/SSE/SSE2 context from a buffer.\r
+\r
+ Restores the current floating point/SSE/SSE2 state from the buffer specified\r
+ by Buffer. Buffer must be aligned on a 16-byte boundary. This function is\r
+ only available on IA-32 and X64.\r
+\r
+ If Buffer is NULL, then ASSERT().\r
+ If Buffer is not aligned on a 16-byte boundary, then ASSERT().\r
+ If Buffer was not saved with AsmFxSave(), then ASSERT().\r
+\r
+ @param Buffer Pointer to a buffer to save the floating point/SSE/SSE2 context.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+AsmFxRestore (\r
+ IN CONST IA32_FX_BUFFER *Buffer\r
+ )\r
+{\r
+ ASSERT (Buffer != NULL);\r
+ ASSERT (((UINTN)Buffer & 0xf) == 0);\r
+\r
+ //\r
+ // Check the flag recorded by AsmFxSave()\r
+ //\r
+ ASSERT (*(UINT32 *) (&Buffer[sizeof (IA32_FX_BUFFER) - 4]) == 0xAA5555AA);\r
+\r
+ InternalX86FxRestore (Buffer);\r
+}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 AsmFxSave()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86FxSave.c\r
+\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+/**\r
+ Save the current floating point/SSE/SSE2 context to a buffer.\r
+\r
+ Saves the current floating point/SSE/SSE2 state to the buffer specified by\r
+ Buffer. Buffer must be aligned on a 16-byte boundary. This function is only\r
+ available on IA-32 and X64.\r
+\r
+ If Buffer is NULL, then ASSERT().\r
+ If Buffer is not aligned on a 16-byte boundary, then ASSERT().\r
+\r
+ @param Buffer Pointer to a buffer to save the floating point/SSE/SSE2 context.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+AsmFxSave (\r
+ OUT IA32_FX_BUFFER *Buffer\r
+ )\r
+{\r
+ ASSERT (Buffer != NULL);\r
+ ASSERT (((UINTN)Buffer & 0xf) == 0);\r
+\r
+ InternalX86FxSave (Buffer);\r
+\r
+ //\r
+ // Mark one flag at end of Buffer, it will be check by AsmFxRestor()\r
+ //\r
+ *(UINT32 *) (&Buffer[sizeof (IA32_FX_BUFFER) - 4]) = 0xAA5555AA;\r
+}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 GetInterruptState()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86GetInterruptState.c\r
+\r
+**/\r
+\r
+/**\r
+ Retrieves the current CPU interrupt state.\r
+\r
+ Retrieves the current CPU interrupt state. Returns TRUE is interrupts are\r
+ currently enabled. Otherwise returns FALSE.\r
+\r
+ @retval TRUE CPU interrupts are enabled.\r
+ @retval FALSE CPU interrupts are disabled.\r
+\r
+**/\r
+BOOLEAN\r
+EFIAPI\r
+GetInterruptState (\r
+ VOID\r
+ )\r
+{\r
+ IA32_EFLAGS32 EFlags;\r
+\r
+ EFlags.UintN = AsmReadEflags ();\r
+ return (BOOLEAN)(EFlags.Bits.IF == 1);\r
+}\r
+\r
+\r
+++ /dev/null
-/** @file\r
- IA-32/x64 specific functions.\r
-\r
- Copyright (c) 2006, Intel Corporation<BR>\r
- All rights reserved. This program and the accompanying materials\r
- are licensed and made available under the terms and conditions of the BSD License\r
- which accompanies this distribution. The full text of the license may be found at\r
- http://opensource.org/licenses/bsd-license.php\r
-\r
- THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
- WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
-\r
- Module Name: x86LowLevel.c\r
-\r
-**/\r
-\r
-#include "BaseLibInternals.h"\r
-\r
-//\r
-// Bit-wise MSR operations\r
-//\r
-\r
-/**\r
- Reads a 64-bit MSR, performs a bitwise inclusive OR on the lower 32-bits, and\r
- writes the result back to the 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise inclusive OR\r
- between the lower 32-bits of the read result and the value specified by\r
- OrData, and writes the result to the 64-bit MSR specified by Index. The lower\r
- 32-bits of the value written to the MSR is returned. No parameter checking is\r
- performed on Index or OrData, and some of these may cause CPU exceptions. The\r
- caller must either guarantee that Index and OrData are valid, or the caller\r
- must establish proper exception handlers. This function is only available on\r
- IA-32 and X64.\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param OrData The value to OR with the read value from the MSR.\r
-\r
- @return The lower 32-bit value written to the MSR.\r
-\r
-**/\r
-UINT32\r
-EFIAPI\r
-AsmMsrOr32 (\r
- IN UINT32 Index,\r
- IN UINT32 OrData\r
- )\r
-{\r
- return (UINT32)AsmMsrOr64 (Index, OrData);\r
-}\r
-\r
-/**\r
- Reads a 64-bit MSR, performs a bitwise AND on the lower 32-bits, and writes\r
- the result back to the 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise AND between the\r
- lower 32-bits of the read result and the value specified by AndData, and\r
- writes the result to the 64-bit MSR specified by Index. The lower 32-bits of\r
- the value written to the MSR is returned. No parameter checking is performed\r
- on Index or AndData, and some of these may cause CPU exceptions. The caller\r
- must either guarantee that Index and AndData are valid, or the caller must\r
- establish proper exception handlers. This function is only available on IA-32\r
- and X64.\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param AndData The value to AND with the read value from the MSR.\r
-\r
- @return The lower 32-bit value written to the MSR.\r
-\r
-**/\r
-UINT32\r
-EFIAPI\r
-AsmMsrAnd32 (\r
- IN UINT32 Index,\r
- IN UINT32 AndData\r
- )\r
-{\r
- return (UINT32)AsmMsrAnd64 (Index, AndData);\r
-}\r
-\r
-/**\r
- Reads a 64-bit MSR, performs a bitwise AND followed by a bitwise inclusive OR\r
- on the lower 32-bits, and writes the result back to the 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise AND between the\r
- lower 32-bits of the read result and the value specified by AndData\r
- preserving the upper 32-bits, performs a bitwise inclusive OR between the\r
- result of the AND operation and the value specified by OrData, and writes the\r
- result to the 64-bit MSR specified by Address. The lower 32-bits of the value\r
- written to the MSR is returned. No parameter checking is performed on Index,\r
- AndData, or OrData, and some of these may cause CPU exceptions. The caller\r
- must either guarantee that Index, AndData, and OrData are valid, or the\r
- caller must establish proper exception handlers. This function is only\r
- available on IA-32 and X64.\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param AndData The value to AND with the read value from the MSR.\r
- @param OrData The value to OR with the result of the AND operation.\r
-\r
- @return The lower 32-bit value written to the MSR.\r
-\r
-**/\r
-UINT32\r
-EFIAPI\r
-AsmMsrAndThenOr32 (\r
- IN UINT32 Index,\r
- IN UINT32 AndData,\r
- IN UINT32 OrData\r
- )\r
-{\r
- return (UINT32)AsmMsrAndThenOr64 (Index, AndData, OrData);\r
-}\r
-\r
-/**\r
- Reads a bit field of an MSR.\r
-\r
- Reads the bit field in the lower 32-bits of a 64-bit MSR. The bit field is\r
- specified by the StartBit and the EndBit. The value of the bit field is\r
- returned. The caller must either guarantee that Index is valid, or the caller\r
- must set up exception handlers to catch the exceptions. This function is only\r
- available on IA-32 and X64.\r
-\r
- If StartBit is greater than 31, then ASSERT().\r
- If EndBit is greater than 31, then ASSERT().\r
- If EndBit is less than StartBit, then ASSERT().\r
-\r
- @param Index The 32-bit MSR index to read.\r
- @param StartBit The ordinal of the least significant bit in the bit field.\r
- Range 0..31.\r
- @param EndBit The ordinal of the most significant bit in the bit field.\r
- Range 0..31.\r
-\r
- @return The bit field read from the MSR.\r
-\r
-**/\r
-UINT32\r
-EFIAPI\r
-AsmMsrBitFieldRead32 (\r
- IN UINT32 Index,\r
- IN UINTN StartBit,\r
- IN UINTN EndBit\r
- )\r
-{\r
- return BitFieldRead32 (AsmReadMsr32 (Index), StartBit, EndBit);\r
-}\r
-\r
-/**\r
- Writes a bit field to an MSR.\r
-\r
- Writes Value to a bit field in the lower 32-bits of a 64-bit MSR. The bit\r
- field is specified by the StartBit and the EndBit. All other bits in the\r
- destination MSR are preserved. The lower 32-bits of the MSR written is\r
- returned. Extra left bits in Value are stripped. The caller must either\r
- guarantee that Index and the data written is valid, or the caller must set up\r
- exception handlers to catch the exceptions. This function is only available\r
- on IA-32 and X64.\r
-\r
- If StartBit is greater than 31, then ASSERT().\r
- If EndBit is greater than 31, then ASSERT().\r
- If EndBit is less than StartBit, then ASSERT().\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param StartBit The ordinal of the least significant bit in the bit field.\r
- Range 0..31.\r
- @param EndBit The ordinal of the most significant bit in the bit field.\r
- Range 0..31.\r
- @param Value New value of the bit field.\r
-\r
- @return The lower 32-bit of the value written to the MSR.\r
-\r
-**/\r
-UINT32\r
-EFIAPI\r
-AsmMsrBitFieldWrite32 (\r
- IN UINT32 Index,\r
- IN UINTN StartBit,\r
- IN UINTN EndBit,\r
- IN UINT32 Value\r
- )\r
-{\r
- ASSERT (EndBit < sizeof (Value) * 8);\r
- ASSERT (StartBit <= EndBit);\r
- return (UINT32)AsmMsrBitFieldWrite64 (Index, StartBit, EndBit, Value);\r
-}\r
-\r
-/**\r
- Reads a bit field in a 64-bit MSR, performs a bitwise OR, and writes the\r
- result back to the bit field in the 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise inclusive OR\r
- between the read result and the value specified by OrData, and writes the\r
- result to the 64-bit MSR specified by Index. The lower 32-bits of the value\r
- written to the MSR are returned. Extra left bits in OrData are stripped. The\r
- caller must either guarantee that Index and the data written is valid, or\r
- the caller must set up exception handlers to catch the exceptions. This\r
- function is only available on IA-32 and X64.\r
-\r
- If StartBit is greater than 31, then ASSERT().\r
- If EndBit is greater than 31, then ASSERT().\r
- If EndBit is less than StartBit, then ASSERT().\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param StartBit The ordinal of the least significant bit in the bit field.\r
- Range 0..31.\r
- @param EndBit The ordinal of the most significant bit in the bit field.\r
- Range 0..31.\r
- @param OrData The value to OR with the read value from the MSR.\r
-\r
- @return The lower 32-bit of the value written to the MSR.\r
-\r
-**/\r
-UINT32\r
-EFIAPI\r
-AsmMsrBitFieldOr32 (\r
- IN UINT32 Index,\r
- IN UINTN StartBit,\r
- IN UINTN EndBit,\r
- IN UINT32 OrData\r
- )\r
-{\r
- ASSERT (EndBit < sizeof (OrData) * 8);\r
- ASSERT (StartBit <= EndBit);\r
- return (UINT32)AsmMsrBitFieldOr64 (Index, StartBit, EndBit, OrData);\r
-}\r
-\r
-/**\r
- Reads a bit field in a 64-bit MSR, performs a bitwise AND, and writes the\r
- result back to the bit field in the 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise AND between the\r
- read result and the value specified by AndData, and writes the result to the\r
- 64-bit MSR specified by Index. The lower 32-bits of the value written to the\r
- MSR are returned. Extra left bits in AndData are stripped. The caller must\r
- either guarantee that Index and the data written is valid, or the caller must\r
- set up exception handlers to catch the exceptions. This function is only\r
- available on IA-32 and X64.\r
-\r
- If StartBit is greater than 31, then ASSERT().\r
- If EndBit is greater than 31, then ASSERT().\r
- If EndBit is less than StartBit, then ASSERT().\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param StartBit The ordinal of the least significant bit in the bit field.\r
- Range 0..31.\r
- @param EndBit The ordinal of the most significant bit in the bit field.\r
- Range 0..31.\r
- @param AndData The value to AND with the read value from the MSR.\r
-\r
- @return The lower 32-bit of the value written to the MSR.\r
-\r
-**/\r
-UINT32\r
-EFIAPI\r
-AsmMsrBitFieldAnd32 (\r
- IN UINT32 Index,\r
- IN UINTN StartBit,\r
- IN UINTN EndBit,\r
- IN UINT32 AndData\r
- )\r
-{\r
- ASSERT (EndBit < sizeof (AndData) * 8);\r
- ASSERT (StartBit <= EndBit);\r
- return (UINT32)AsmMsrBitFieldAnd64 (Index, StartBit, EndBit, AndData);\r
-}\r
-\r
-/**\r
- Reads a bit field in a 64-bit MSR, performs a bitwise AND followed by a\r
- bitwise inclusive OR, and writes the result back to the bit field in the\r
- 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise AND followed by a\r
- bitwise inclusive OR between the read result and the value specified by\r
- AndData, and writes the result to the 64-bit MSR specified by Index. The\r
- lower 32-bits of the value written to the MSR are returned. Extra left bits\r
- in both AndData and OrData are stripped. The caller must either guarantee\r
- that Index and the data written is valid, or the caller must set up exception\r
- handlers to catch the exceptions. This function is only available on IA-32\r
- and X64.\r
-\r
- If StartBit is greater than 31, then ASSERT().\r
- If EndBit is greater than 31, then ASSERT().\r
- If EndBit is less than StartBit, then ASSERT().\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param StartBit The ordinal of the least significant bit in the bit field.\r
- Range 0..31.\r
- @param EndBit The ordinal of the most significant bit in the bit field.\r
- Range 0..31.\r
- @param AndData The value to AND with the read value from the MSR.\r
- @param OrData The value to OR with the result of the AND operation.\r
-\r
- @return The lower 32-bit of the value written to the MSR.\r
-\r
-**/\r
-UINT32\r
-EFIAPI\r
-AsmMsrBitFieldAndThenOr32 (\r
- IN UINT32 Index,\r
- IN UINTN StartBit,\r
- IN UINTN EndBit,\r
- IN UINT32 AndData,\r
- IN UINT32 OrData\r
- )\r
-{\r
- ASSERT (EndBit < sizeof (AndData) * 8);\r
- ASSERT (StartBit <= EndBit);\r
- return (UINT32)AsmMsrBitFieldAndThenOr64 (\r
- Index,\r
- StartBit,\r
- EndBit,\r
- AndData,\r
- OrData\r
- );\r
-}\r
-\r
-/**\r
- Reads a 64-bit MSR, performs a bitwise inclusive OR, and writes the result\r
- back to the 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise inclusive OR\r
- between the read result and the value specified by OrData, and writes the\r
- result to the 64-bit MSR specified by Index. The value written to the MSR is\r
- returned. No parameter checking is performed on Index or OrData, and some of\r
- these may cause CPU exceptions. The caller must either guarantee that Index\r
- and OrData are valid, or the caller must establish proper exception handlers.\r
- This function is only available on IA-32 and X64.\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param OrData The value to OR with the read value from the MSR.\r
-\r
- @return The value written back to the MSR.\r
-\r
-**/\r
-UINT64\r
-EFIAPI\r
-AsmMsrOr64 (\r
- IN UINT32 Index,\r
- IN UINT64 OrData\r
- )\r
-{\r
- return AsmWriteMsr64 (Index, AsmReadMsr64 (Index) | OrData);\r
-}\r
-\r
-/**\r
- Reads a 64-bit MSR, performs a bitwise AND, and writes the result back to the\r
- 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise AND between the\r
- read result and the value specified by OrData, and writes the result to the\r
- 64-bit MSR specified by Index. The value written to the MSR is returned. No\r
- parameter checking is performed on Index or OrData, and some of these may\r
- cause CPU exceptions. The caller must either guarantee that Index and OrData\r
- are valid, or the caller must establish proper exception handlers. This\r
- function is only available on IA-32 and X64.\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param AndData The value to AND with the read value from the MSR.\r
-\r
- @return The value written back to the MSR.\r
-\r
-**/\r
-UINT64\r
-EFIAPI\r
-AsmMsrAnd64 (\r
- IN UINT32 Index,\r
- IN UINT64 AndData\r
- )\r
-{\r
- return AsmWriteMsr64 (Index, AsmReadMsr64 (Index) & AndData);\r
-}\r
-\r
-/**\r
- Reads a 64-bit MSR, performs a bitwise AND followed by a bitwise inclusive\r
- OR, and writes the result back to the 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise AND between read\r
- result and the value specified by AndData, performs a bitwise inclusive OR\r
- between the result of the AND operation and the value specified by OrData,\r
- and writes the result to the 64-bit MSR specified by Index. The value written\r
- to the MSR is returned. No parameter checking is performed on Index, AndData,\r
- or OrData, and some of these may cause CPU exceptions. The caller must either\r
- guarantee that Index, AndData, and OrData are valid, or the caller must\r
- establish proper exception handlers. This function is only available on IA-32\r
- and X64.\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param AndData The value to AND with the read value from the MSR.\r
- @param OrData The value to OR with the result of the AND operation.\r
-\r
- @return The value written back to the MSR.\r
-\r
-**/\r
-UINT64\r
-EFIAPI\r
-AsmMsrAndThenOr64 (\r
- IN UINT32 Index,\r
- IN UINT64 AndData,\r
- IN UINT64 OrData\r
- )\r
-{\r
- return AsmWriteMsr64 (Index, (AsmReadMsr64 (Index) & AndData) | OrData);\r
-}\r
-\r
-/**\r
- Reads a bit field of an MSR.\r
-\r
- Reads the bit field in the 64-bit MSR. The bit field is specified by the\r
- StartBit and the EndBit. The value of the bit field is returned. The caller\r
- must either guarantee that Index is valid, or the caller must set up\r
- exception handlers to catch the exceptions. This function is only available\r
- on IA-32 and X64.\r
-\r
- If StartBit is greater than 63, then ASSERT().\r
- If EndBit is greater than 63, then ASSERT().\r
- If EndBit is less than StartBit, then ASSERT().\r
-\r
- @param Index The 32-bit MSR index to read.\r
- @param StartBit The ordinal of the least significant bit in the bit field.\r
- Range 0..63.\r
- @param EndBit The ordinal of the most significant bit in the bit field.\r
- Range 0..63.\r
-\r
- @return The value written back to the MSR.\r
-\r
-**/\r
-UINT64\r
-EFIAPI\r
-AsmMsrBitFieldRead64 (\r
- IN UINT32 Index,\r
- IN UINTN StartBit,\r
- IN UINTN EndBit\r
- )\r
-{\r
- return BitFieldRead64 (AsmReadMsr64 (Index), StartBit, EndBit);\r
-}\r
-\r
-/**\r
- Writes a bit field to an MSR.\r
-\r
- Writes Value to a bit field in a 64-bit MSR. The bit field is specified by\r
- the StartBit and the EndBit. All other bits in the destination MSR are\r
- preserved. The MSR written is returned. Extra left bits in Value are\r
- stripped. The caller must either guarantee that Index and the data written is\r
- valid, or the caller must set up exception handlers to catch the exceptions.\r
- This function is only available on IA-32 and X64.\r
-\r
- If StartBit is greater than 63, then ASSERT().\r
- If EndBit is greater than 63, then ASSERT().\r
- If EndBit is less than StartBit, then ASSERT().\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param StartBit The ordinal of the least significant bit in the bit field.\r
- Range 0..63.\r
- @param EndBit The ordinal of the most significant bit in the bit field.\r
- Range 0..63.\r
- @param Value New value of the bit field.\r
-\r
- @return The value written back to the MSR.\r
-\r
-**/\r
-UINT64\r
-EFIAPI\r
-AsmMsrBitFieldWrite64 (\r
- IN UINT32 Index,\r
- IN UINTN StartBit,\r
- IN UINTN EndBit,\r
- IN UINT64 Value\r
- )\r
-{\r
- return AsmWriteMsr64 (\r
- Index,\r
- BitFieldWrite64 (AsmReadMsr64 (Index), StartBit, EndBit, Value)\r
- );\r
-}\r
-\r
-/**\r
- Reads a bit field in a 64-bit MSR, performs a bitwise inclusive OR, and\r
- writes the result back to the bit field in the 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise inclusive OR\r
- between the read result and the value specified by OrData, and writes the\r
- result to the 64-bit MSR specified by Index. The value written to the MSR is\r
- returned. Extra left bits in OrData are stripped. The caller must either\r
- guarantee that Index and the data written is valid, or the caller must set up\r
- exception handlers to catch the exceptions. This function is only available\r
- on IA-32 and X64.\r
-\r
- If StartBit is greater than 63, then ASSERT().\r
- If EndBit is greater than 63, then ASSERT().\r
- If EndBit is less than StartBit, then ASSERT().\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param StartBit The ordinal of the least significant bit in the bit field.\r
- Range 0..63.\r
- @param EndBit The ordinal of the most significant bit in the bit field.\r
- Range 0..63.\r
- @param OrData The value to OR with the read value from the bit field.\r
-\r
- @return The value written back to the MSR.\r
-\r
-**/\r
-UINT64\r
-EFIAPI\r
-AsmMsrBitFieldOr64 (\r
- IN UINT32 Index,\r
- IN UINTN StartBit,\r
- IN UINTN EndBit,\r
- IN UINT64 OrData\r
- )\r
-{\r
- return AsmWriteMsr64 (\r
- Index,\r
- BitFieldOr64 (AsmReadMsr64 (Index), StartBit, EndBit, OrData)\r
- );\r
-}\r
-\r
-/**\r
- Reads a bit field in a 64-bit MSR, performs a bitwise AND, and writes the\r
- result back to the bit field in the 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise AND between the\r
- read result and the value specified by AndData, and writes the result to the\r
- 64-bit MSR specified by Index. The value written to the MSR is returned.\r
- Extra left bits in AndData are stripped. The caller must either guarantee\r
- that Index and the data written is valid, or the caller must set up exception\r
- handlers to catch the exceptions. This function is only available on IA-32\r
- and X64.\r
-\r
- If StartBit is greater than 63, then ASSERT().\r
- If EndBit is greater than 63, then ASSERT().\r
- If EndBit is less than StartBit, then ASSERT().\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param StartBit The ordinal of the least significant bit in the bit field.\r
- Range 0..63.\r
- @param EndBit The ordinal of the most significant bit in the bit field.\r
- Range 0..63.\r
- @param AndData The value to AND with the read value from the bit field.\r
-\r
- @return The value written back to the MSR.\r
-\r
-**/\r
-UINT64\r
-EFIAPI\r
-AsmMsrBitFieldAnd64 (\r
- IN UINT32 Index,\r
- IN UINTN StartBit,\r
- IN UINTN EndBit,\r
- IN UINT64 AndData\r
- )\r
-{\r
- return AsmWriteMsr64 (\r
- Index,\r
- BitFieldAnd64 (AsmReadMsr64 (Index), StartBit, EndBit, AndData)\r
- );\r
-}\r
-\r
-/**\r
- Reads a bit field in a 64-bit MSR, performs a bitwise AND followed by a\r
- bitwise inclusive OR, and writes the result back to the bit field in the\r
- 64-bit MSR.\r
-\r
- Reads the 64-bit MSR specified by Index, performs a bitwise AND followed by\r
- a bitwise inclusive OR between the read result and the value specified by\r
- AndData, and writes the result to the 64-bit MSR specified by Index. The\r
- value written to the MSR is returned. Extra left bits in both AndData and\r
- OrData are stripped. The caller must either guarantee that Index and the data\r
- written is valid, or the caller must set up exception handlers to catch the\r
- exceptions. This function is only available on IA-32 and X64.\r
-\r
- If StartBit is greater than 63, then ASSERT().\r
- If EndBit is greater than 63, then ASSERT().\r
- If EndBit is less than StartBit, then ASSERT().\r
-\r
- @param Index The 32-bit MSR index to write.\r
- @param StartBit The ordinal of the least significant bit in the bit field.\r
- Range 0..63.\r
- @param EndBit The ordinal of the most significant bit in the bit field.\r
- Range 0..63.\r
- @param AndData The value to AND with the read value from the bit field.\r
- @param OrData The value to OR with the result of the AND operation.\r
-\r
- @return The value written back to the MSR.\r
-\r
-**/\r
-UINT64\r
-EFIAPI\r
-AsmMsrBitFieldAndThenOr64 (\r
- IN UINT32 Index,\r
- IN UINTN StartBit,\r
- IN UINTN EndBit,\r
- IN UINT64 AndData,\r
- IN UINT64 OrData\r
- )\r
-{\r
- return AsmWriteMsr64 (\r
- Index,\r
- BitFieldAndThenOr64 (\r
- AsmReadMsr64 (Index),\r
- StartBit,\r
- EndBit,\r
- AndData,\r
- OrData\r
- )\r
- );\r
-}\r
-\r
-//\r
-// Base Library CPU Functions\r
-//\r
-\r
-/**\r
- Retrieves the current CPU interrupt state.\r
-\r
- Retrieves the current CPU interrupt state. Returns TRUE is interrupts are\r
- currently enabled. Otherwise returns FALSE.\r
-\r
- @retval TRUE CPU interrupts are enabled.\r
- @retval FALSE CPU interrupts are disabled.\r
-\r
-**/\r
-BOOLEAN\r
-EFIAPI\r
-GetInterruptState (\r
- VOID\r
- )\r
-{\r
- IA32_EFLAGS32 EFlags;\r
-\r
- EFlags.UintN = AsmReadEflags ();\r
- return (BOOLEAN)(EFlags.Bits.IF == 1);\r
-}\r
-\r
-//\r
-// Ia32 and x64 specific functions\r
-//\r
-\r
-/**\r
- Reads the current Global Descriptor Table Register(GDTR) descriptor.\r
-\r
- Reads and returns the current GDTR descriptor and returns it in Gdtr. This\r
- function is only available on IA-32 and X64.\r
-\r
- If Gdtr is NULL, then ASSERT().\r
-\r
- @param Gdtr Pointer to a GDTR descriptor.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-AsmReadGdtr (\r
- OUT IA32_DESCRIPTOR *Gdtr\r
- )\r
-{\r
- ASSERT (Gdtr != NULL);\r
- InternalX86ReadGdtr (Gdtr);\r
-}\r
-\r
-/**\r
- Writes the current Global Descriptor Table Register (GDTR) descriptor.\r
-\r
- Writes and the current GDTR descriptor specified by Gdtr. This function is\r
- only available on IA-32 and X64.\r
-\r
- If Gdtr is NULL, then ASSERT().\r
-\r
- @param Gdtr Pointer to a GDTR descriptor.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-AsmWriteGdtr (\r
- IN CONST IA32_DESCRIPTOR *Gdtr\r
- )\r
-{\r
- ASSERT (Gdtr != NULL);\r
- InternalX86WriteGdtr (Gdtr);\r
-}\r
-\r
-/**\r
- Reads the current Interrupt Descriptor Table Register(GDTR) descriptor.\r
-\r
- Reads and returns the current IDTR descriptor and returns it in Idtr. This\r
- function is only available on IA-32 and X64.\r
-\r
- If Idtr is NULL, then ASSERT().\r
-\r
- @param Idtr Pointer to a IDTR descriptor.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-AsmReadIdtr (\r
- OUT IA32_DESCRIPTOR *Idtr\r
- )\r
-{\r
- ASSERT (Idtr != NULL);\r
- InternalX86ReadIdtr (Idtr);\r
-}\r
-\r
-/**\r
- Writes the current Interrupt Descriptor Table Register(GDTR) descriptor.\r
-\r
- Writes the current IDTR descriptor and returns it in Idtr. This function is\r
- only available on IA-32 and X64.\r
-\r
- If Idtr is NULL, then ASSERT().\r
-\r
- @param Idtr Pointer to a IDTR descriptor.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-AsmWriteIdtr (\r
- IN CONST IA32_DESCRIPTOR *Idtr\r
- )\r
-{\r
- ASSERT (Idtr != NULL);\r
- InternalX86WriteIdtr (Idtr);\r
-}\r
-\r
-/**\r
- Save the current floating point/SSE/SSE2 context to a buffer.\r
-\r
- Saves the current floating point/SSE/SSE2 state to the buffer specified by\r
- Buffer. Buffer must be aligned on a 16-byte boundary. This function is only\r
- available on IA-32 and X64.\r
-\r
- If Buffer is NULL, then ASSERT().\r
- If Buffer is not aligned on a 16-byte boundary, then ASSERT().\r
-\r
- @param Buffer Pointer to a buffer to save the floating point/SSE/SSE2 context.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-AsmFxSave (\r
- OUT IA32_FX_BUFFER *Buffer\r
- )\r
-{\r
- ASSERT (Buffer != NULL);\r
- ASSERT (((UINTN)Buffer & 0xf) == 0);\r
-\r
- InternalX86FxSave (Buffer);\r
- \r
- //\r
- // Mark one flag at end of Buffer, it will be check by AsmFxRestor()\r
- //\r
- *(UINT32 *) (&Buffer[sizeof (IA32_FX_BUFFER) - 4]) = 0xAA5555AA; \r
-}\r
-\r
-/**\r
- Restores the current floating point/SSE/SSE2 context from a buffer.\r
-\r
- Restores the current floating point/SSE/SSE2 state from the buffer specified\r
- by Buffer. Buffer must be aligned on a 16-byte boundary. This function is\r
- only available on IA-32 and X64.\r
-\r
- If Buffer is NULL, then ASSERT().\r
- If Buffer is not aligned on a 16-byte boundary, then ASSERT().\r
- If Buffer was not saved with AsmFxSave(), then ASSERT().\r
-\r
- @param Buffer Pointer to a buffer to save the floating point/SSE/SSE2 context.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-AsmFxRestore (\r
- IN CONST IA32_FX_BUFFER *Buffer\r
- )\r
-{\r
- ASSERT (Buffer != NULL);\r
- ASSERT (((UINTN)Buffer & 0xf) == 0);\r
-\r
- //\r
- // Check the flag recorded by AsmFxSave()\r
- //\r
- ASSERT (*(UINT32 *) (&Buffer[sizeof (IA32_FX_BUFFER) - 4]) == 0xAA5555AA);\r
-\r
- InternalX86FxRestore (Buffer);\r
-}\r
-\r
-/**\r
- Enables the 32-bit paging mode on the CPU.\r
-\r
- Enables the 32-bit paging mode on the CPU. CR0, CR3, CR4, and the page tables\r
- must be properly initialized prior to calling this service. This function\r
- assumes the current execution mode is 32-bit protected mode. This function is\r
- only available on IA-32. After the 32-bit paging mode is enabled, control is\r
- transferred to the function specified by EntryPoint using the new stack\r
- specified by NewStack and passing in the parameters specified by Context1 and\r
- Context2. Context1 and Context2 are optional and may be NULL. The function\r
- EntryPoint must never return.\r
-\r
- If the current execution mode is not 32-bit protected mode, then ASSERT().\r
- If EntryPoint is NULL, then ASSERT().\r
- If NewStack is NULL, then ASSERT().\r
-\r
- There are a number of constraints that must be followed before calling this\r
- function:\r
- 1) Interrupts must be disabled.\r
- 2) The caller must be in 32-bit protected mode with flat descriptors. This\r
- means all descriptors must have a base of 0 and a limit of 4GB.\r
- 3) CR0 and CR4 must be compatible with 32-bit protected mode with flat\r
- descriptors.\r
- 4) CR3 must point to valid page tables that will be used once the transition\r
- is complete, and those page tables must guarantee that the pages for this\r
- function and the stack are identity mapped.\r
-\r
- @param EntryPoint A pointer to function to call with the new stack after\r
- paging is enabled.\r
- @param Context1 A pointer to the context to pass into the EntryPoint\r
- function as the first parameter after paging is enabled.\r
- @param Context2 A pointer to the context to pass into the EntryPoint\r
- function as the second parameter after paging is enabled.\r
- @param NewStack A pointer to the new stack to use for the EntryPoint\r
- function after paging is enabled.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-AsmEnablePaging32 (\r
- IN SWITCH_STACK_ENTRY_POINT EntryPoint,\r
- IN VOID *Context1, OPTIONAL\r
- IN VOID *Context2, OPTIONAL\r
- IN VOID *NewStack\r
- )\r
-{\r
- ASSERT (EntryPoint != NULL);\r
- ASSERT (NewStack != NULL);\r
- InternalX86EnablePaging32 (EntryPoint, Context1, Context2, NewStack);\r
-}\r
-\r
-/**\r
- Disables the 32-bit paging mode on the CPU.\r
-\r
- Disables the 32-bit paging mode on the CPU and returns to 32-bit protected\r
- mode. This function assumes the current execution mode is 32-paged protected\r
- mode. This function is only available on IA-32. After the 32-bit paging mode\r
- is disabled, control is transferred to the function specified by EntryPoint\r
- using the new stack specified by NewStack and passing in the parameters\r
- specified by Context1 and Context2. Context1 and Context2 are optional and\r
- may be NULL. The function EntryPoint must never return.\r
-\r
- If the current execution mode is not 32-bit paged mode, then ASSERT().\r
- If EntryPoint is NULL, then ASSERT().\r
- If NewStack is NULL, then ASSERT().\r
-\r
- There are a number of constraints that must be followed before calling this\r
- function:\r
- 1) Interrupts must be disabled.\r
- 2) The caller must be in 32-bit paged mode.\r
- 3) CR0, CR3, and CR4 must be compatible with 32-bit paged mode.\r
- 4) CR3 must point to valid page tables that guarantee that the pages for\r
- this function and the stack are identity mapped.\r
-\r
- @param EntryPoint A pointer to function to call with the new stack after\r
- paging is disabled.\r
- @param Context1 A pointer to the context to pass into the EntryPoint\r
- function as the first parameter after paging is disabled.\r
- @param Context2 A pointer to the context to pass into the EntryPoint\r
- function as the second parameter after paging is\r
- disabled.\r
- @param NewStack A pointer to the new stack to use for the EntryPoint\r
- function after paging is disabled.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-AsmDisablePaging32 (\r
- IN SWITCH_STACK_ENTRY_POINT EntryPoint,\r
- IN VOID *Context1, OPTIONAL\r
- IN VOID *Context2, OPTIONAL\r
- IN VOID *NewStack\r
- )\r
-{\r
- ASSERT (EntryPoint != NULL);\r
- ASSERT (NewStack != NULL);\r
- InternalX86DisablePaging32 (EntryPoint, Context1, Context2, NewStack);\r
-}\r
-\r
-/**\r
- Enables the 64-bit paging mode on the CPU.\r
-\r
- Enables the 64-bit paging mode on the CPU. CR0, CR3, CR4, and the page tables\r
- must be properly initialized prior to calling this service. This function\r
- assumes the current execution mode is 32-bit protected mode with flat\r
- descriptors. This function is only available on IA-32. After the 64-bit\r
- paging mode is enabled, control is transferred to the function specified by\r
- EntryPoint using the new stack specified by NewStack and passing in the\r
- parameters specified by Context1 and Context2. Context1 and Context2 are\r
- optional and may be 0. The function EntryPoint must never return.\r
-\r
- If the current execution mode is not 32-bit protected mode with flat\r
- descriptors, then ASSERT().\r
- If EntryPoint is 0, then ASSERT().\r
- If NewStack is 0, then ASSERT().\r
-\r
- @param Cs The 16-bit selector to load in the CS before EntryPoint\r
- is called. The descriptor in the GDT that this selector\r
- references must be setup for long mode.\r
- @param EntryPoint The 64-bit virtual address of the function to call with\r
- the new stack after paging is enabled.\r
- @param Context1 The 64-bit virtual address of the context to pass into\r
- the EntryPoint function as the first parameter after\r
- paging is enabled.\r
- @param Context2 The 64-bit virtual address of the context to pass into\r
- the EntryPoint function as the second parameter after\r
- paging is enabled.\r
- @param NewStack The 64-bit virtual address of the new stack to use for\r
- the EntryPoint function after paging is enabled.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-AsmEnablePaging64 (\r
- IN UINT16 Cs,\r
- IN UINT64 EntryPoint,\r
- IN UINT64 Context1, OPTIONAL\r
- IN UINT64 Context2, OPTIONAL\r
- IN UINT64 NewStack\r
- )\r
-{\r
- ASSERT (EntryPoint != 0);\r
- ASSERT (NewStack != 0);\r
- InternalX86EnablePaging64 (Cs, EntryPoint, Context1, Context2, NewStack);\r
-}\r
-\r
-/**\r
- Disables the 64-bit paging mode on the CPU.\r
-\r
- Disables the 64-bit paging mode on the CPU and returns to 32-bit protected\r
- mode. This function assumes the current execution mode is 64-paging mode.\r
- This function is only available on X64. After the 64-bit paging mode is\r
- disabled, control is transferred to the function specified by EntryPoint\r
- using the new stack specified by NewStack and passing in the parameters\r
- specified by Context1 and Context2. Context1 and Context2 are optional and\r
- may be 0. The function EntryPoint must never return.\r
-\r
- If the current execution mode is not 64-bit paged mode, then ASSERT().\r
- If EntryPoint is 0, then ASSERT().\r
- If NewStack is 0, then ASSERT().\r
-\r
- @param Cs The 16-bit selector to load in the CS before EntryPoint\r
- is called. The descriptor in the GDT that this selector\r
- references must be setup for 32-bit protected mode.\r
- @param EntryPoint The 64-bit virtual address of the function to call with\r
- the new stack after paging is disabled.\r
- @param Context1 The 64-bit virtual address of the context to pass into\r
- the EntryPoint function as the first parameter after\r
- paging is disabled.\r
- @param Context2 The 64-bit virtual address of the context to pass into\r
- the EntryPoint function as the second parameter after\r
- paging is disabled.\r
- @param NewStack The 64-bit virtual address of the new stack to use for\r
- the EntryPoint function after paging is disabled.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-AsmDisablePaging64 (\r
- IN UINT16 Cs,\r
- IN UINT32 EntryPoint,\r
- IN UINT32 Context1, OPTIONAL\r
- IN UINT32 Context2, OPTIONAL\r
- IN UINT32 NewStack\r
- )\r
-{\r
- ASSERT (EntryPoint != 0);\r
- ASSERT (NewStack != 0);\r
- InternalX86DisablePaging64 (Cs, EntryPoint, Context1, Context2, NewStack);\r
-}\r
-\r
-//\r
-// x86 version of MemoryFence()\r
-//\r
-\r
-/**\r
- Used to serialize load and store operations.\r
-\r
- All loads and stores that proceed calls to this function are guaranteed to be\r
- globally visible when this function returns.\r
-\r
-**/\r
-VOID\r
-EFIAPI\r
-MemoryFence (\r
- VOID\r
- )\r
-{\r
- return;\r
-}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 MemoryFence().\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86MemoryFence.c\r
+\r
+**/\r
+\r
+/**\r
+ Used to serialize load and store operations.\r
+\r
+ All loads and stores that proceed calls to this function are guaranteed to be\r
+ globally visible when this function returns.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+MemoryFence (\r
+ VOID\r
+ )\r
+{\r
+ return;\r
+}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 MSR functions.\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86Msr.c\r
+\r
+**/\r
+\r
+/**\r
+ Returns the lower 32-bits of a Machine Specific Register(MSR).\r
+\r
+ Reads and returns the lower 32-bits of the MSR specified by Index.\r
+ No parameter checking is performed on Index, and some Index values may cause\r
+ CPU exceptions. The caller must either guarantee that Index is valid, or the\r
+ caller must set up exception handlers to catch the exceptions. This function\r
+ is only available on IA-32 and X64.\r
+\r
+ @param Index The 32-bit MSR index to read.\r
+\r
+ @return The lower 32 bits of the MSR identified by Index.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+AsmReadMsr32 (\r
+ IN UINT32 Index\r
+ )\r
+{\r
+ return (UINT32)AsmReadMsr64 (Index);\r
+}\r
+\r
+/**\r
+ Zero-extend a 32-bit value and writes it to a Machine Specific Register(MSR).\r
+\r
+ Writes the 32-bit value specified by Value to the MSR specified by Index. The\r
+ upper 32-bits of the MSR write are set to zero. The 32-bit value written to\r
+ the MSR is returned. No parameter checking is performed on Index or Value,\r
+ and some of these may cause CPU exceptions. The caller must either guarantee\r
+ that Index and Value are valid, or the caller must establish proper exception\r
+ handlers. This function is only available on IA-32 and X64.\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param Value The 32-bit value to write to the MSR.\r
+\r
+ @return Value\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+AsmWriteMsr32 (\r
+ IN UINT32 Index,\r
+ IN UINT32 Value\r
+ )\r
+{\r
+ return (UINT32)AsmWriteMsr64 (Index, Value);\r
+}\r
+\r
+/**\r
+ Reads a 64-bit MSR, performs a bitwise inclusive OR on the lower 32-bits, and\r
+ writes the result back to the 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise inclusive OR\r
+ between the lower 32-bits of the read result and the value specified by\r
+ OrData, and writes the result to the 64-bit MSR specified by Index. The lower\r
+ 32-bits of the value written to the MSR is returned. No parameter checking is\r
+ performed on Index or OrData, and some of these may cause CPU exceptions. The\r
+ caller must either guarantee that Index and OrData are valid, or the caller\r
+ must establish proper exception handlers. This function is only available on\r
+ IA-32 and X64.\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param OrData The value to OR with the read value from the MSR.\r
+\r
+ @return The lower 32-bit value written to the MSR.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+AsmMsrOr32 (\r
+ IN UINT32 Index,\r
+ IN UINT32 OrData\r
+ )\r
+{\r
+ return (UINT32)AsmMsrOr64 (Index, OrData);\r
+}\r
+\r
+/**\r
+ Reads a 64-bit MSR, performs a bitwise AND on the lower 32-bits, and writes\r
+ the result back to the 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise AND between the\r
+ lower 32-bits of the read result and the value specified by AndData, and\r
+ writes the result to the 64-bit MSR specified by Index. The lower 32-bits of\r
+ the value written to the MSR is returned. No parameter checking is performed\r
+ on Index or AndData, and some of these may cause CPU exceptions. The caller\r
+ must either guarantee that Index and AndData are valid, or the caller must\r
+ establish proper exception handlers. This function is only available on IA-32\r
+ and X64.\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param AndData The value to AND with the read value from the MSR.\r
+\r
+ @return The lower 32-bit value written to the MSR.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+AsmMsrAnd32 (\r
+ IN UINT32 Index,\r
+ IN UINT32 AndData\r
+ )\r
+{\r
+ return (UINT32)AsmMsrAnd64 (Index, AndData);\r
+}\r
+\r
+/**\r
+ Reads a 64-bit MSR, performs a bitwise AND followed by a bitwise inclusive OR\r
+ on the lower 32-bits, and writes the result back to the 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise AND between the\r
+ lower 32-bits of the read result and the value specified by AndData\r
+ preserving the upper 32-bits, performs a bitwise inclusive OR between the\r
+ result of the AND operation and the value specified by OrData, and writes the\r
+ result to the 64-bit MSR specified by Address. The lower 32-bits of the value\r
+ written to the MSR is returned. No parameter checking is performed on Index,\r
+ AndData, or OrData, and some of these may cause CPU exceptions. The caller\r
+ must either guarantee that Index, AndData, and OrData are valid, or the\r
+ caller must establish proper exception handlers. This function is only\r
+ available on IA-32 and X64.\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param AndData The value to AND with the read value from the MSR.\r
+ @param OrData The value to OR with the result of the AND operation.\r
+\r
+ @return The lower 32-bit value written to the MSR.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+AsmMsrAndThenOr32 (\r
+ IN UINT32 Index,\r
+ IN UINT32 AndData,\r
+ IN UINT32 OrData\r
+ )\r
+{\r
+ return (UINT32)AsmMsrAndThenOr64 (Index, AndData, OrData);\r
+}\r
+\r
+/**\r
+ Reads a bit field of an MSR.\r
+\r
+ Reads the bit field in the lower 32-bits of a 64-bit MSR. The bit field is\r
+ specified by the StartBit and the EndBit. The value of the bit field is\r
+ returned. The caller must either guarantee that Index is valid, or the caller\r
+ must set up exception handlers to catch the exceptions. This function is only\r
+ available on IA-32 and X64.\r
+\r
+ If StartBit is greater than 31, then ASSERT().\r
+ If EndBit is greater than 31, then ASSERT().\r
+ If EndBit is less than StartBit, then ASSERT().\r
+\r
+ @param Index The 32-bit MSR index to read.\r
+ @param StartBit The ordinal of the least significant bit in the bit field.\r
+ Range 0..31.\r
+ @param EndBit The ordinal of the most significant bit in the bit field.\r
+ Range 0..31.\r
+\r
+ @return The bit field read from the MSR.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+AsmMsrBitFieldRead32 (\r
+ IN UINT32 Index,\r
+ IN UINTN StartBit,\r
+ IN UINTN EndBit\r
+ )\r
+{\r
+ return BitFieldRead32 (AsmReadMsr32 (Index), StartBit, EndBit);\r
+}\r
+\r
+/**\r
+ Writes a bit field to an MSR.\r
+\r
+ Writes Value to a bit field in the lower 32-bits of a 64-bit MSR. The bit\r
+ field is specified by the StartBit and the EndBit. All other bits in the\r
+ destination MSR are preserved. The lower 32-bits of the MSR written is\r
+ returned. Extra left bits in Value are stripped. The caller must either\r
+ guarantee that Index and the data written is valid, or the caller must set up\r
+ exception handlers to catch the exceptions. This function is only available\r
+ on IA-32 and X64.\r
+\r
+ If StartBit is greater than 31, then ASSERT().\r
+ If EndBit is greater than 31, then ASSERT().\r
+ If EndBit is less than StartBit, then ASSERT().\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param StartBit The ordinal of the least significant bit in the bit field.\r
+ Range 0..31.\r
+ @param EndBit The ordinal of the most significant bit in the bit field.\r
+ Range 0..31.\r
+ @param Value New value of the bit field.\r
+\r
+ @return The lower 32-bit of the value written to the MSR.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+AsmMsrBitFieldWrite32 (\r
+ IN UINT32 Index,\r
+ IN UINTN StartBit,\r
+ IN UINTN EndBit,\r
+ IN UINT32 Value\r
+ )\r
+{\r
+ ASSERT (EndBit < sizeof (Value) * 8);\r
+ ASSERT (StartBit <= EndBit);\r
+ return (UINT32)AsmMsrBitFieldWrite64 (Index, StartBit, EndBit, Value);\r
+}\r
+\r
+/**\r
+ Reads a bit field in a 64-bit MSR, performs a bitwise OR, and writes the\r
+ result back to the bit field in the 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise inclusive OR\r
+ between the read result and the value specified by OrData, and writes the\r
+ result to the 64-bit MSR specified by Index. The lower 32-bits of the value\r
+ written to the MSR are returned. Extra left bits in OrData are stripped. The\r
+ caller must either guarantee that Index and the data written is valid, or\r
+ the caller must set up exception handlers to catch the exceptions. This\r
+ function is only available on IA-32 and X64.\r
+\r
+ If StartBit is greater than 31, then ASSERT().\r
+ If EndBit is greater than 31, then ASSERT().\r
+ If EndBit is less than StartBit, then ASSERT().\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param StartBit The ordinal of the least significant bit in the bit field.\r
+ Range 0..31.\r
+ @param EndBit The ordinal of the most significant bit in the bit field.\r
+ Range 0..31.\r
+ @param OrData The value to OR with the read value from the MSR.\r
+\r
+ @return The lower 32-bit of the value written to the MSR.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+AsmMsrBitFieldOr32 (\r
+ IN UINT32 Index,\r
+ IN UINTN StartBit,\r
+ IN UINTN EndBit,\r
+ IN UINT32 OrData\r
+ )\r
+{\r
+ ASSERT (EndBit < sizeof (OrData) * 8);\r
+ ASSERT (StartBit <= EndBit);\r
+ return (UINT32)AsmMsrBitFieldOr64 (Index, StartBit, EndBit, OrData);\r
+}\r
+\r
+/**\r
+ Reads a bit field in a 64-bit MSR, performs a bitwise AND, and writes the\r
+ result back to the bit field in the 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise AND between the\r
+ read result and the value specified by AndData, and writes the result to the\r
+ 64-bit MSR specified by Index. The lower 32-bits of the value written to the\r
+ MSR are returned. Extra left bits in AndData are stripped. The caller must\r
+ either guarantee that Index and the data written is valid, or the caller must\r
+ set up exception handlers to catch the exceptions. This function is only\r
+ available on IA-32 and X64.\r
+\r
+ If StartBit is greater than 31, then ASSERT().\r
+ If EndBit is greater than 31, then ASSERT().\r
+ If EndBit is less than StartBit, then ASSERT().\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param StartBit The ordinal of the least significant bit in the bit field.\r
+ Range 0..31.\r
+ @param EndBit The ordinal of the most significant bit in the bit field.\r
+ Range 0..31.\r
+ @param AndData The value to AND with the read value from the MSR.\r
+\r
+ @return The lower 32-bit of the value written to the MSR.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+AsmMsrBitFieldAnd32 (\r
+ IN UINT32 Index,\r
+ IN UINTN StartBit,\r
+ IN UINTN EndBit,\r
+ IN UINT32 AndData\r
+ )\r
+{\r
+ ASSERT (EndBit < sizeof (AndData) * 8);\r
+ ASSERT (StartBit <= EndBit);\r
+ return (UINT32)AsmMsrBitFieldAnd64 (Index, StartBit, EndBit, AndData);\r
+}\r
+\r
+/**\r
+ Reads a bit field in a 64-bit MSR, performs a bitwise AND followed by a\r
+ bitwise inclusive OR, and writes the result back to the bit field in the\r
+ 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise AND followed by a\r
+ bitwise inclusive OR between the read result and the value specified by\r
+ AndData, and writes the result to the 64-bit MSR specified by Index. The\r
+ lower 32-bits of the value written to the MSR are returned. Extra left bits\r
+ in both AndData and OrData are stripped. The caller must either guarantee\r
+ that Index and the data written is valid, or the caller must set up exception\r
+ handlers to catch the exceptions. This function is only available on IA-32\r
+ and X64.\r
+\r
+ If StartBit is greater than 31, then ASSERT().\r
+ If EndBit is greater than 31, then ASSERT().\r
+ If EndBit is less than StartBit, then ASSERT().\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param StartBit The ordinal of the least significant bit in the bit field.\r
+ Range 0..31.\r
+ @param EndBit The ordinal of the most significant bit in the bit field.\r
+ Range 0..31.\r
+ @param AndData The value to AND with the read value from the MSR.\r
+ @param OrData The value to OR with the result of the AND operation.\r
+\r
+ @return The lower 32-bit of the value written to the MSR.\r
+\r
+**/\r
+UINT32\r
+EFIAPI\r
+AsmMsrBitFieldAndThenOr32 (\r
+ IN UINT32 Index,\r
+ IN UINTN StartBit,\r
+ IN UINTN EndBit,\r
+ IN UINT32 AndData,\r
+ IN UINT32 OrData\r
+ )\r
+{\r
+ ASSERT (EndBit < sizeof (AndData) * 8);\r
+ ASSERT (StartBit <= EndBit);\r
+ return (UINT32)AsmMsrBitFieldAndThenOr64 (\r
+ Index,\r
+ StartBit,\r
+ EndBit,\r
+ AndData,\r
+ OrData\r
+ );\r
+}\r
+\r
+/**\r
+ Reads a 64-bit MSR, performs a bitwise inclusive OR, and writes the result\r
+ back to the 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise inclusive OR\r
+ between the read result and the value specified by OrData, and writes the\r
+ result to the 64-bit MSR specified by Index. The value written to the MSR is\r
+ returned. No parameter checking is performed on Index or OrData, and some of\r
+ these may cause CPU exceptions. The caller must either guarantee that Index\r
+ and OrData are valid, or the caller must establish proper exception handlers.\r
+ This function is only available on IA-32 and X64.\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param OrData The value to OR with the read value from the MSR.\r
+\r
+ @return The value written back to the MSR.\r
+\r
+**/\r
+UINT64\r
+EFIAPI\r
+AsmMsrOr64 (\r
+ IN UINT32 Index,\r
+ IN UINT64 OrData\r
+ )\r
+{\r
+ return AsmWriteMsr64 (Index, AsmReadMsr64 (Index) | OrData);\r
+}\r
+\r
+/**\r
+ Reads a 64-bit MSR, performs a bitwise AND, and writes the result back to the\r
+ 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise AND between the\r
+ read result and the value specified by OrData, and writes the result to the\r
+ 64-bit MSR specified by Index. The value written to the MSR is returned. No\r
+ parameter checking is performed on Index or OrData, and some of these may\r
+ cause CPU exceptions. The caller must either guarantee that Index and OrData\r
+ are valid, or the caller must establish proper exception handlers. This\r
+ function is only available on IA-32 and X64.\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param AndData The value to AND with the read value from the MSR.\r
+\r
+ @return The value written back to the MSR.\r
+\r
+**/\r
+UINT64\r
+EFIAPI\r
+AsmMsrAnd64 (\r
+ IN UINT32 Index,\r
+ IN UINT64 AndData\r
+ )\r
+{\r
+ return AsmWriteMsr64 (Index, AsmReadMsr64 (Index) & AndData);\r
+}\r
+\r
+/**\r
+ Reads a 64-bit MSR, performs a bitwise AND followed by a bitwise inclusive\r
+ OR, and writes the result back to the 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise AND between read\r
+ result and the value specified by AndData, performs a bitwise inclusive OR\r
+ between the result of the AND operation and the value specified by OrData,\r
+ and writes the result to the 64-bit MSR specified by Index. The value written\r
+ to the MSR is returned. No parameter checking is performed on Index, AndData,\r
+ or OrData, and some of these may cause CPU exceptions. The caller must either\r
+ guarantee that Index, AndData, and OrData are valid, or the caller must\r
+ establish proper exception handlers. This function is only available on IA-32\r
+ and X64.\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param AndData The value to AND with the read value from the MSR.\r
+ @param OrData The value to OR with the result of the AND operation.\r
+\r
+ @return The value written back to the MSR.\r
+\r
+**/\r
+UINT64\r
+EFIAPI\r
+AsmMsrAndThenOr64 (\r
+ IN UINT32 Index,\r
+ IN UINT64 AndData,\r
+ IN UINT64 OrData\r
+ )\r
+{\r
+ return AsmWriteMsr64 (Index, (AsmReadMsr64 (Index) & AndData) | OrData);\r
+}\r
+\r
+/**\r
+ Reads a bit field of an MSR.\r
+\r
+ Reads the bit field in the 64-bit MSR. The bit field is specified by the\r
+ StartBit and the EndBit. The value of the bit field is returned. The caller\r
+ must either guarantee that Index is valid, or the caller must set up\r
+ exception handlers to catch the exceptions. This function is only available\r
+ on IA-32 and X64.\r
+\r
+ If StartBit is greater than 63, then ASSERT().\r
+ If EndBit is greater than 63, then ASSERT().\r
+ If EndBit is less than StartBit, then ASSERT().\r
+\r
+ @param Index The 32-bit MSR index to read.\r
+ @param StartBit The ordinal of the least significant bit in the bit field.\r
+ Range 0..63.\r
+ @param EndBit The ordinal of the most significant bit in the bit field.\r
+ Range 0..63.\r
+\r
+ @return The value written back to the MSR.\r
+\r
+**/\r
+UINT64\r
+EFIAPI\r
+AsmMsrBitFieldRead64 (\r
+ IN UINT32 Index,\r
+ IN UINTN StartBit,\r
+ IN UINTN EndBit\r
+ )\r
+{\r
+ return BitFieldRead64 (AsmReadMsr64 (Index), StartBit, EndBit);\r
+}\r
+\r
+/**\r
+ Writes a bit field to an MSR.\r
+\r
+ Writes Value to a bit field in a 64-bit MSR. The bit field is specified by\r
+ the StartBit and the EndBit. All other bits in the destination MSR are\r
+ preserved. The MSR written is returned. Extra left bits in Value are\r
+ stripped. The caller must either guarantee that Index and the data written is\r
+ valid, or the caller must set up exception handlers to catch the exceptions.\r
+ This function is only available on IA-32 and X64.\r
+\r
+ If StartBit is greater than 63, then ASSERT().\r
+ If EndBit is greater than 63, then ASSERT().\r
+ If EndBit is less than StartBit, then ASSERT().\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param StartBit The ordinal of the least significant bit in the bit field.\r
+ Range 0..63.\r
+ @param EndBit The ordinal of the most significant bit in the bit field.\r
+ Range 0..63.\r
+ @param Value New value of the bit field.\r
+\r
+ @return The value written back to the MSR.\r
+\r
+**/\r
+UINT64\r
+EFIAPI\r
+AsmMsrBitFieldWrite64 (\r
+ IN UINT32 Index,\r
+ IN UINTN StartBit,\r
+ IN UINTN EndBit,\r
+ IN UINT64 Value\r
+ )\r
+{\r
+ return AsmWriteMsr64 (\r
+ Index,\r
+ BitFieldWrite64 (AsmReadMsr64 (Index), StartBit, EndBit, Value)\r
+ );\r
+}\r
+\r
+/**\r
+ Reads a bit field in a 64-bit MSR, performs a bitwise inclusive OR, and\r
+ writes the result back to the bit field in the 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise inclusive OR\r
+ between the read result and the value specified by OrData, and writes the\r
+ result to the 64-bit MSR specified by Index. The value written to the MSR is\r
+ returned. Extra left bits in OrData are stripped. The caller must either\r
+ guarantee that Index and the data written is valid, or the caller must set up\r
+ exception handlers to catch the exceptions. This function is only available\r
+ on IA-32 and X64.\r
+\r
+ If StartBit is greater than 63, then ASSERT().\r
+ If EndBit is greater than 63, then ASSERT().\r
+ If EndBit is less than StartBit, then ASSERT().\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param StartBit The ordinal of the least significant bit in the bit field.\r
+ Range 0..63.\r
+ @param EndBit The ordinal of the most significant bit in the bit field.\r
+ Range 0..63.\r
+ @param OrData The value to OR with the read value from the bit field.\r
+\r
+ @return The value written back to the MSR.\r
+\r
+**/\r
+UINT64\r
+EFIAPI\r
+AsmMsrBitFieldOr64 (\r
+ IN UINT32 Index,\r
+ IN UINTN StartBit,\r
+ IN UINTN EndBit,\r
+ IN UINT64 OrData\r
+ )\r
+{\r
+ return AsmWriteMsr64 (\r
+ Index,\r
+ BitFieldOr64 (AsmReadMsr64 (Index), StartBit, EndBit, OrData)\r
+ );\r
+}\r
+\r
+/**\r
+ Reads a bit field in a 64-bit MSR, performs a bitwise AND, and writes the\r
+ result back to the bit field in the 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise AND between the\r
+ read result and the value specified by AndData, and writes the result to the\r
+ 64-bit MSR specified by Index. The value written to the MSR is returned.\r
+ Extra left bits in AndData are stripped. The caller must either guarantee\r
+ that Index and the data written is valid, or the caller must set up exception\r
+ handlers to catch the exceptions. This function is only available on IA-32\r
+ and X64.\r
+\r
+ If StartBit is greater than 63, then ASSERT().\r
+ If EndBit is greater than 63, then ASSERT().\r
+ If EndBit is less than StartBit, then ASSERT().\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param StartBit The ordinal of the least significant bit in the bit field.\r
+ Range 0..63.\r
+ @param EndBit The ordinal of the most significant bit in the bit field.\r
+ Range 0..63.\r
+ @param AndData The value to AND with the read value from the bit field.\r
+\r
+ @return The value written back to the MSR.\r
+\r
+**/\r
+UINT64\r
+EFIAPI\r
+AsmMsrBitFieldAnd64 (\r
+ IN UINT32 Index,\r
+ IN UINTN StartBit,\r
+ IN UINTN EndBit,\r
+ IN UINT64 AndData\r
+ )\r
+{\r
+ return AsmWriteMsr64 (\r
+ Index,\r
+ BitFieldAnd64 (AsmReadMsr64 (Index), StartBit, EndBit, AndData)\r
+ );\r
+}\r
+\r
+/**\r
+ Reads a bit field in a 64-bit MSR, performs a bitwise AND followed by a\r
+ bitwise inclusive OR, and writes the result back to the bit field in the\r
+ 64-bit MSR.\r
+\r
+ Reads the 64-bit MSR specified by Index, performs a bitwise AND followed by\r
+ a bitwise inclusive OR between the read result and the value specified by\r
+ AndData, and writes the result to the 64-bit MSR specified by Index. The\r
+ value written to the MSR is returned. Extra left bits in both AndData and\r
+ OrData are stripped. The caller must either guarantee that Index and the data\r
+ written is valid, or the caller must set up exception handlers to catch the\r
+ exceptions. This function is only available on IA-32 and X64.\r
+\r
+ If StartBit is greater than 63, then ASSERT().\r
+ If EndBit is greater than 63, then ASSERT().\r
+ If EndBit is less than StartBit, then ASSERT().\r
+\r
+ @param Index The 32-bit MSR index to write.\r
+ @param StartBit The ordinal of the least significant bit in the bit field.\r
+ Range 0..63.\r
+ @param EndBit The ordinal of the most significant bit in the bit field.\r
+ Range 0..63.\r
+ @param AndData The value to AND with the read value from the bit field.\r
+ @param OrData The value to OR with the result of the AND operation.\r
+\r
+ @return The value written back to the MSR.\r
+\r
+**/\r
+UINT64\r
+EFIAPI\r
+AsmMsrBitFieldAndThenOr64 (\r
+ IN UINT32 Index,\r
+ IN UINTN StartBit,\r
+ IN UINTN EndBit,\r
+ IN UINT64 AndData,\r
+ IN UINT64 OrData\r
+ )\r
+{\r
+ return AsmWriteMsr64 (\r
+ Index,\r
+ BitFieldAndThenOr64 (\r
+ AsmReadMsr64 (Index),\r
+ StartBit,\r
+ EndBit,\r
+ AndData,\r
+ OrData\r
+ )\r
+ );\r
+}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 AsmReadGdtr()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86ReadGdtr.c\r
+\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+/**\r
+ Reads the current Global Descriptor Table Register(GDTR) descriptor.\r
+\r
+ Reads and returns the current GDTR descriptor and returns it in Gdtr. This\r
+ function is only available on IA-32 and X64.\r
+\r
+ If Gdtr is NULL, then ASSERT().\r
+\r
+ @param Gdtr Pointer to a GDTR descriptor.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+AsmReadGdtr (\r
+ OUT IA32_DESCRIPTOR *Gdtr\r
+ )\r
+{\r
+ ASSERT (Gdtr != NULL);\r
+ InternalX86ReadGdtr (Gdtr);\r
+}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 AsmReadIdtr()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86ReadIdtr.c\r
+\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+/**\r
+ Reads the current Interrupt Descriptor Table Register(GDTR) descriptor.\r
+\r
+ Reads and returns the current IDTR descriptor and returns it in Idtr. This\r
+ function is only available on IA-32 and X64.\r
+\r
+ If Idtr is NULL, then ASSERT().\r
+\r
+ @param Idtr Pointer to a IDTR descriptor.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+AsmReadIdtr (\r
+ OUT IA32_DESCRIPTOR *Idtr\r
+ )\r
+{\r
+ ASSERT (Idtr != NULL);\r
+ InternalX86ReadIdtr (Idtr);\r
+}\r
\r
**/\r
IA32_REGISTER_SET *\r
+EFIAPI\r
InternalAsmThunk16 (\r
IN IA32_REGISTER_SET *RegisterSet,\r
IN OUT VOID *Transition\r
--- /dev/null
+/** @file\r
+ IA-32/x64 AsmWriteGdtr()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86GetInterruptState.c\r
+\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+/**\r
+ Writes the current Global Descriptor Table Register (GDTR) descriptor.\r
+\r
+ Writes and the current GDTR descriptor specified by Gdtr. This function is\r
+ only available on IA-32 and X64.\r
+\r
+ If Gdtr is NULL, then ASSERT().\r
+\r
+ @param Gdtr Pointer to a GDTR descriptor.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+AsmWriteGdtr (\r
+ IN CONST IA32_DESCRIPTOR *Gdtr\r
+ )\r
+{\r
+ ASSERT (Gdtr != NULL);\r
+ InternalX86WriteGdtr (Gdtr);\r
+}\r
--- /dev/null
+/** @file\r
+ IA-32/x64 AsmWriteIdtr()\r
+\r
+ Copyright (c) 2006, Intel Corporation<BR>\r
+ All rights reserved. This program and the accompanying materials\r
+ are licensed and made available under the terms and conditions of the BSD License\r
+ which accompanies this distribution. The full text of the license may be found at\r
+ http://opensource.org/licenses/bsd-license.php\r
+\r
+ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r
+ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r
+\r
+ Module Name: x86WriteIdtr.c\r
+\r
+**/\r
+\r
+#include "BaseLibInternals.h"\r
+\r
+/**\r
+ Writes the current Interrupt Descriptor Table Register(GDTR) descriptor.\r
+\r
+ Writes the current IDTR descriptor and returns it in Idtr. This function is\r
+ only available on IA-32 and X64.\r
+\r
+ If Idtr is NULL, then ASSERT().\r
+\r
+ @param Idtr Pointer to a IDTR descriptor.\r
+\r
+**/\r
+VOID\r
+EFIAPI\r
+AsmWriteIdtr (\r
+ IN CONST IA32_DESCRIPTOR *Idtr\r
+ )\r
+{\r
+ ASSERT (Idtr != NULL);\r
+ InternalX86WriteIdtr (Idtr);\r
+}\r