#\r
#------------------------------------------------------------------------------\r
\r
+.global _InternalMathDivRemU64x32, _InternalMathDivRemU64x64\r
\r
-\r
- \r
-\r
-.extern _InternalMathDivRemU64x32\r
-\r
-.global _InternalMathDivRemU64x64\r
-_InternalMathDivRemU64x64: \r
- movl 16(%esp),%ecx\r
- testl %ecx,%ecx\r
- jnz _DivRemU64x64\r
- movl 20(%esp),%ecx\r
+#------------------------------------------------------------------------------\r
+# UINT64\r
+# EFIAPI\r
+# InternalMathDivRemU64x64 (\r
+# IN UINT64 Dividend,\r
+# IN UINT64 Divisor,\r
+# OUT UINT64 *Remainder OPTIONAL\r
+# );\r
+#------------------------------------------------------------------------------\r
+_InternalMathDivRemU64x64:\r
+ movl 16(%esp), %ecx\r
+ testl %ecx, %ecx\r
+ jnz Hard\r
+ movl 20(%esp), %ecx\r
jecxz L1\r
- and $0,4(%ecx)\r
- movl %ecx,16(%esp)\r
-L1: \r
+ and $0, 4(%ecx)\r
+ movl %ecx, 16(%esp)\r
+L1:\r
jmp _InternalMathDivRemU64x32\r
-\r
-\r
-.global DivRemU64x64\r
-DivRemU64x64:\r
-# MISMATCH: "DivRemU64x64: USES ebx esi edi"\r
- push %ebx \r
- push %esi \r
- push %edi \r
+Hard:\r
+ push %ebx\r
+ push %esi\r
+ push %edi\r
mov 20(%esp), %edx\r
mov 16(%esp), %eax\r
- movl %edx,%edi\r
- movl %eax,%esi\r
+ movl %edx, %edi\r
+ movl %eax, %esi\r
mov 24(%esp), %ebx\r
-L2: \r
+L2:\r
shrl %edx\r
- rcrl $1,%eax\r
- shrdl $1,%ecx,%ebx\r
+ rcrl $1, %eax\r
+ shrdl $1, %ecx, %ebx\r
shrl %ecx\r
jnz L2\r
divl %ebx\r
- movl %eax,%ebx\r
- movl 28(%esp),%ecx\r
+ movl %eax, %ebx\r
+ movl 28(%esp), %ecx\r
mull 24(%esp)\r
- imull %ebx,%ecx\r
- addl %ecx,%edx\r
+ imull %ebx, %ecx\r
+ addl %ecx, %edx\r
mov 32(%esp), %ecx\r
jc TooLarge\r
- cmpl %edx,%edi\r
+ cmpl %edx, %edi\r
ja Correct\r
jb TooLarge\r
- cmpl %eax,%esi\r
+ cmpl %eax, %esi\r
jae Correct\r
-TooLarge: \r
+TooLarge:\r
decl %ebx\r
jecxz Return\r
sub 24(%esp), %eax\r
sbb 28(%esp), %edx\r
-Correct: \r
+Correct:\r
jecxz Return\r
- subl %eax,%esi\r
- sbbl %edx,%edi\r
- movl %esi,(%ecx)\r
- movl %edi,4(%ecx)\r
-Return: \r
- movl %ebx,%eax\r
- xorl %edx,%edx\r
- push %edi \r
- push %esi \r
- push %ebx \r
+ subl %eax, %esi\r
+ sbbl %edx, %edi\r
+ movl %esi, (%ecx)\r
+ movl %edi, 4(%ecx)\r
+Return:\r
+ movl %ebx, %eax\r
+ xorl %edx, %edx\r
+ pop %edi\r
+ pop %esi\r
+ pop %ebx\r
ret\r