]>
Commit | Line | Data |
---|---|---|
9095d37b | 1 | #------------------------------------------------------------------------------\r |
8db92ab5 | 2 | #\r |
9095d37b | 3 | # Replacement for Math64.c that is coded to use older GCC intrinsics.\r |
8db92ab5 | 4 | # Doing this reduces the number of intrinsics that are required when\r |
9095d37b | 5 | # you port to a new version of gcc.\r |
8db92ab5 HT |
6 | #\r |
7 | # Need to split this into multple files to size optimize the image.\r | |
8 | #\r | |
9 | # Copyright (c) 2009 - 2010, Apple Inc. All rights reserved.<BR>\r | |
9344f092 | 10 | # SPDX-License-Identifier: BSD-2-Clause-Patent\r |
8db92ab5 HT |
11 | #\r |
12 | #------------------------------------------------------------------------------\r | |
13 | \r | |
9095d37b LG |
14 | .text\r |
15 | .align 2\r | |
16 | GCC_ASM_EXPORT(InternalMathLShiftU64)\r | |
8db92ab5 HT |
17 | \r |
18 | ASM_PFX(InternalMathLShiftU64):\r | |
9095d37b LG |
19 | stmfd sp!, {r4, r5, r6}\r |
20 | mov r6, r1\r | |
21 | rsb ip, r2, #32\r | |
22 | mov r4, r6, asl r2\r | |
23 | subs r1, r2, #32\r | |
24 | orr r4, r4, r0, lsr ip\r | |
25 | mov r3, r0, asl r2\r | |
26 | movpl r4, r0, asl r1\r | |
27 | mov r5, r0\r | |
28 | mov r0, r3\r | |
29 | mov r1, r4\r | |
30 | ldmfd sp!, {r4, r5, r6}\r | |
31 | bx lr\r | |
32 | \r | |
33 | .align 2\r | |
34 | GCC_ASM_EXPORT(InternalMathRShiftU64)\r | |
8db92ab5 HT |
35 | \r |
36 | ASM_PFX(InternalMathRShiftU64):\r | |
9095d37b LG |
37 | stmfd sp!, {r4, r5, r6}\r |
38 | mov r5, r0\r | |
39 | rsb ip, r2, #32\r | |
40 | mov r3, r5, lsr r2\r | |
41 | subs r0, r2, #32\r | |
42 | orr r3, r3, r1, asl ip\r | |
43 | mov r4, r1, lsr r2\r | |
44 | movpl r3, r1, lsr r0\r | |
45 | mov r6, r1\r | |
46 | mov r0, r3\r | |
47 | mov r1, r4\r | |
48 | ldmfd sp!, {r4, r5, r6}\r | |
49 | bx lr\r | |
50 | \r | |
51 | .align 2\r | |
52 | GCC_ASM_EXPORT(InternalMathARShiftU64)\r | |
8db92ab5 HT |
53 | \r |
54 | ASM_PFX(InternalMathARShiftU64):\r | |
9095d37b LG |
55 | stmfd sp!, {r4, r5, r6}\r |
56 | mov r5, r0\r | |
57 | rsb ip, r2, #32\r | |
58 | mov r3, r5, lsr r2\r | |
59 | subs r0, r2, #32\r | |
60 | orr r3, r3, r1, asl ip\r | |
61 | mov r4, r1, asr r2\r | |
62 | movpl r3, r1, asr r0\r | |
63 | mov r6, r1\r | |
64 | mov r0, r3\r | |
65 | mov r1, r4\r | |
66 | ldmfd sp!, {r4, r5, r6}\r | |
67 | bx lr\r | |
68 | \r | |
69 | .align 2\r | |
70 | GCC_ASM_EXPORT(InternalMathLRotU64)\r | |
8db92ab5 HT |
71 | \r |
72 | ASM_PFX(InternalMathLRotU64):\r | |
9095d37b LG |
73 | stmfd sp!, {r4, r5, r6, r7, lr}\r |
74 | add r7, sp, #12\r | |
75 | mov r6, r1\r | |
76 | rsb ip, r2, #32\r | |
77 | mov r4, r6, asl r2\r | |
78 | rsb lr, r2, #64\r | |
79 | subs r1, r2, #32\r | |
80 | orr r4, r4, r0, lsr ip\r | |
81 | mov r3, r0, asl r2\r | |
82 | movpl r4, r0, asl r1\r | |
83 | sub ip, r2, #32\r | |
84 | mov r5, r0\r | |
85 | mov r0, r0, lsr lr\r | |
86 | rsbs r2, r2, #32\r | |
87 | orr r0, r0, r6, asl ip\r | |
88 | mov r1, r6, lsr lr\r | |
89 | movpl r0, r6, lsr r2\r | |
90 | orr r1, r1, r4\r | |
91 | orr r0, r0, r3\r | |
92 | ldmfd sp!, {r4, r5, r6, r7, pc}\r | |
93 | \r | |
94 | \r | |
95 | .align 2\r | |
96 | GCC_ASM_EXPORT(InternalMathRRotU64)\r | |
8db92ab5 HT |
97 | \r |
98 | ASM_PFX(InternalMathRRotU64):\r | |
9095d37b LG |
99 | stmfd sp!, {r4, r5, r6, r7, lr}\r |
100 | add r7, sp, #12\r | |
101 | mov r5, r0\r | |
102 | rsb ip, r2, #32\r | |
103 | mov r3, r5, lsr r2\r | |
104 | rsb lr, r2, #64\r | |
105 | subs r0, r2, #32\r | |
106 | orr r3, r3, r1, asl ip\r | |
107 | mov r4, r1, lsr r2\r | |
108 | movpl r3, r1, lsr r0\r | |
109 | sub ip, r2, #32\r | |
110 | mov r6, r1\r | |
111 | mov r1, r1, asl lr\r | |
112 | rsbs r2, r2, #32\r | |
113 | orr r1, r1, r5, lsr ip\r | |
114 | mov r0, r5, asl lr\r | |
115 | movpl r1, r5, asl r2\r | |
116 | orr r0, r0, r3\r | |
117 | orr r1, r1, r4\r | |
118 | ldmfd sp!, {r4, r5, r6, r7, pc}\r | |
119 | \r | |
120 | .align 2\r | |
121 | GCC_ASM_EXPORT(InternalMathMultU64x32)\r | |
8db92ab5 HT |
122 | \r |
123 | ASM_PFX(InternalMathMultU64x32):\r | |
9095d37b LG |
124 | stmfd sp!, {r7, lr}\r |
125 | add r7, sp, #0\r | |
126 | mov r3, #0\r | |
127 | mov ip, r0\r | |
128 | mov lr, r1\r | |
129 | umull r0, r1, ip, r2\r | |
130 | mla r1, lr, r2, r1\r | |
131 | mla r1, ip, r3, r1\r | |
132 | ldmfd sp!, {r7, pc}\r | |
133 | \r | |
134 | .align 2\r | |
135 | GCC_ASM_EXPORT(InternalMathMultU64x64)\r | |
8db92ab5 HT |
136 | \r |
137 | ASM_PFX(InternalMathMultU64x64):\r | |
9095d37b LG |
138 | stmfd sp!, {r7, lr}\r |
139 | add r7, sp, #0\r | |
140 | mov ip, r0\r | |
141 | mov lr, r1\r | |
142 | umull r0, r1, ip, r2\r | |
143 | mla r1, lr, r2, r1\r | |
144 | mla r1, ip, r3, r1\r | |
145 | ldmfd sp!, {r7, pc}\r | |
146 | \r | |
147 | .align 2\r | |
148 | GCC_ASM_EXPORT(InternalMathDivU64x32)\r | |
8db92ab5 HT |
149 | \r |
150 | ASM_PFX(InternalMathDivU64x32):\r | |
9095d37b LG |
151 | stmfd sp!, {r7, lr}\r |
152 | add r7, sp, #0\r | |
153 | mov r3, #0\r | |
154 | bl ASM_PFX(__udivdi3)\r | |
155 | ldmfd sp!, {r7, pc}\r | |
156 | \r | |
157 | \r | |
158 | .align 2\r | |
159 | GCC_ASM_EXPORT(InternalMathModU64x32)\r | |
8db92ab5 HT |
160 | \r |
161 | ASM_PFX(InternalMathModU64x32):\r | |
9095d37b LG |
162 | stmfd sp!, {r7, lr}\r |
163 | add r7, sp, #0\r | |
164 | mov r3, #0\r | |
165 | bl ASM_PFX(__umoddi3)\r | |
166 | ldmfd sp!, {r7, pc}\r | |
167 | \r | |
168 | \r | |
169 | .align 2\r | |
170 | GCC_ASM_EXPORT(InternalMathDivRemU64x32)\r | |
8db92ab5 HT |
171 | \r |
172 | ASM_PFX(InternalMathDivRemU64x32):\r | |
9095d37b LG |
173 | stmfd sp!, {r4, r5, r6, r7, lr}\r |
174 | add r7, sp, #12\r | |
175 | stmfd sp!, {r10, r11}\r | |
176 | subs r6, r3, #0\r | |
177 | mov r10, r0\r | |
178 | mov r11, r1\r | |
179 | moveq r4, r2\r | |
180 | moveq r5, #0\r | |
181 | beq L22\r | |
182 | mov r4, r2\r | |
183 | mov r5, #0\r | |
184 | mov r3, #0\r | |
185 | bl ASM_PFX(__umoddi3)\r | |
186 | str r0, [r6, #0]\r | |
8db92ab5 | 187 | L22:\r |
9095d37b LG |
188 | mov r0, r10\r |
189 | mov r1, r11\r | |
190 | mov r2, r4\r | |
191 | mov r3, r5\r | |
192 | bl ASM_PFX(__udivdi3)\r | |
193 | ldmfd sp!, {r10, r11}\r | |
194 | ldmfd sp!, {r4, r5, r6, r7, pc}\r | |
195 | \r | |
196 | \r | |
197 | .align 2\r | |
198 | GCC_ASM_EXPORT(InternalMathDivRemU64x64)\r | |
8db92ab5 HT |
199 | \r |
200 | ASM_PFX(InternalMathDivRemU64x64):\r | |
9095d37b LG |
201 | stmfd sp!, {r4, r5, r6, r7, lr}\r |
202 | add r7, sp, #12\r | |
203 | stmfd sp!, {r10, r11}\r | |
204 | ldr r6, [sp, #28]\r | |
205 | mov r4, r0\r | |
206 | cmp r6, #0\r | |
207 | mov r5, r1\r | |
208 | mov r10, r2\r | |
209 | mov r11, r3\r | |
210 | beq L26\r | |
211 | bl ASM_PFX(__umoddi3)\r | |
212 | stmia r6, {r0-r1}\r | |
8db92ab5 | 213 | L26:\r |
9095d37b LG |
214 | mov r0, r4\r |
215 | mov r1, r5\r | |
216 | mov r2, r10\r | |
217 | mov r3, r11\r | |
218 | bl ASM_PFX(__udivdi3)\r | |
219 | ldmfd sp!, {r10, r11}\r | |
220 | ldmfd sp!, {r4, r5, r6, r7, pc}\r | |
221 | \r | |
222 | \r | |
223 | .align 2\r | |
224 | GCC_ASM_EXPORT(InternalMathDivRemS64x64)\r | |
8db92ab5 HT |
225 | \r |
226 | ASM_PFX(InternalMathDivRemS64x64):\r | |
9095d37b LG |
227 | stmfd sp!, {r4, r5, r6, r7, lr}\r |
228 | add r7, sp, #12\r | |
229 | stmfd sp!, {r10, r11}\r | |
230 | ldr r6, [sp, #28]\r | |
231 | mov r4, r0\r | |
232 | cmp r6, #0\r | |
233 | mov r5, r1\r | |
234 | mov r10, r2\r | |
235 | mov r11, r3\r | |
236 | beq L30\r | |
237 | bl ASM_PFX(__moddi3)\r | |
238 | stmia r6, {r0-r1}\r | |
8db92ab5 | 239 | L30:\r |
9095d37b LG |
240 | mov r0, r4\r |
241 | mov r1, r5\r | |
242 | mov r2, r10\r | |
243 | mov r3, r11\r | |
244 | bl ASM_PFX(__divdi3)\r | |
245 | ldmfd sp!, {r10, r11}\r | |
246 | ldmfd sp!, {r4, r5, r6, r7, pc}\r | |
247 | \r | |
248 | \r | |
249 | .align 2\r | |
250 | GCC_ASM_EXPORT(InternalMathSwapBytes64)\r | |
8db92ab5 HT |
251 | \r |
252 | ASM_PFX(InternalMathSwapBytes64):\r | |
9095d37b LG |
253 | stmfd sp!, {r4, r5, r7, lr}\r |
254 | mov r5, r1\r | |
255 | bl ASM_PFX(SwapBytes32)\r | |
256 | mov r4, r0\r | |
257 | mov r0, r5\r | |
258 | bl ASM_PFX(SwapBytes32)\r | |
259 | mov r1, r4\r | |
260 | ldmfd sp!, {r4, r5, r7, pc}\r | |
8db92ab5 HT |
261 | \r |
262 | \r | |
9095d37b | 263 | ASM_FUNCTION_REMOVE_IF_UNREFERENCED\r |