878ddf1f |
1 | #------------------------------------------------------------------------------\r |
2 | #\r |
3 | # Copyright (c) 2006, Intel Corporation\r |
4 | # All rights reserved. This program and the accompanying materials\r |
5 | # are licensed and made available under the terms and conditions of the BSD License\r |
6 | # which accompanies this distribution. The full text of the license may be found at\r |
7 | # http://opensource.org/licenses/bsd-license.php\r |
8 | #\r |
9 | # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,\r |
10 | # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.\r |
11 | #\r |
12 | # Module Name:\r |
13 | #\r |
14 | # DivU64x64Remainder.asm\r |
15 | #\r |
16 | # Abstract:\r |
17 | #\r |
18 | # Calculate the quotient of a 64-bit integer by a 64-bit integer and returns\r |
19 | # both the quotient and the remainder\r |
20 | #\r |
21 | #------------------------------------------------------------------------------\r |
22 | \r |
a2b1bf96 |
23 | .globl ASM_PFX(InternalMathDivRemU64x32), ASM_PFX(InternalMathDivRemU64x64)\r |
878ddf1f |
24 | \r |
3f566587 |
25 | #------------------------------------------------------------------------------\r |
26 | # UINT64\r |
27 | # EFIAPI\r |
28 | # InternalMathDivRemU64x64 (\r |
29 | # IN UINT64 Dividend,\r |
30 | # IN UINT64 Divisor,\r |
31 | # OUT UINT64 *Remainder OPTIONAL\r |
32 | # );\r |
33 | #------------------------------------------------------------------------------\r |
a2b1bf96 |
34 | ASM_PFX(InternalMathDivRemU64x64):\r |
3f566587 |
35 | movl 16(%esp), %ecx\r |
36 | testl %ecx, %ecx\r |
37 | jnz Hard\r |
38 | movl 20(%esp), %ecx\r |
878ddf1f |
39 | jecxz L1\r |
3f566587 |
40 | and $0, 4(%ecx)\r |
41 | movl %ecx, 16(%esp)\r |
42 | L1:\r |
a2b1bf96 |
43 | jmp ASM_PFX(InternalMathDivRemU64x32)\r |
3f566587 |
44 | Hard:\r |
45 | push %ebx\r |
46 | push %esi\r |
47 | push %edi\r |
878ddf1f |
48 | mov 20(%esp), %edx\r |
49 | mov 16(%esp), %eax\r |
3f566587 |
50 | movl %edx, %edi\r |
51 | movl %eax, %esi\r |
878ddf1f |
52 | mov 24(%esp), %ebx\r |
3f566587 |
53 | L2:\r |
878ddf1f |
54 | shrl %edx\r |
3f566587 |
55 | rcrl $1, %eax\r |
56 | shrdl $1, %ecx, %ebx\r |
878ddf1f |
57 | shrl %ecx\r |
58 | jnz L2\r |
59 | divl %ebx\r |
3f566587 |
60 | movl %eax, %ebx\r |
61 | movl 28(%esp), %ecx\r |
878ddf1f |
62 | mull 24(%esp)\r |
3f566587 |
63 | imull %ebx, %ecx\r |
64 | addl %ecx, %edx\r |
878ddf1f |
65 | mov 32(%esp), %ecx\r |
66 | jc TooLarge\r |
3f566587 |
67 | cmpl %edx, %edi\r |
878ddf1f |
68 | ja Correct\r |
69 | jb TooLarge\r |
3f566587 |
70 | cmpl %eax, %esi\r |
878ddf1f |
71 | jae Correct\r |
3f566587 |
72 | TooLarge:\r |
878ddf1f |
73 | decl %ebx\r |
74 | jecxz Return\r |
75 | sub 24(%esp), %eax\r |
76 | sbb 28(%esp), %edx\r |
3f566587 |
77 | Correct:\r |
878ddf1f |
78 | jecxz Return\r |
3f566587 |
79 | subl %eax, %esi\r |
80 | sbbl %edx, %edi\r |
81 | movl %esi, (%ecx)\r |
82 | movl %edi, 4(%ecx)\r |
83 | Return:\r |
84 | movl %ebx, %eax\r |
85 | xorl %edx, %edx\r |
86 | pop %edi\r |
87 | pop %esi\r |
88 | pop %ebx\r |
878ddf1f |
89 | ret\r |