]>
git.proxmox.com Git - mirror_qemu.git/blob - util/host-utils.c
2 * Utility compute operations used by translated code.
4 * Copyright (c) 2003 Fabrice Bellard
5 * Copyright (c) 2007 Aurelien Jarno
7 * Permission is hereby granted, free of charge, to any person obtaining a copy
8 * of this software and associated documentation files (the "Software"), to deal
9 * in the Software without restriction, including without limitation the rights
10 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 * copies of the Software, and to permit persons to whom the Software is
12 * furnished to do so, subject to the following conditions:
14 * The above copyright notice and this permission notice shall be included in
15 * all copies or substantial portions of the Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
26 #include "qemu/osdep.h"
27 #include "qemu/host-utils.h"
30 /* Long integer helpers */
31 static inline void mul64(uint64_t *plow
, uint64_t *phigh
,
32 uint64_t a
, uint64_t b
)
37 #ifdef HOST_WORDS_BIGENDIAN
44 LL rl
, rm
, rn
, rh
, a0
, b0
;
50 rl
.ll
= (uint64_t)a0
.l
.low
* b0
.l
.low
;
51 rm
.ll
= (uint64_t)a0
.l
.low
* b0
.l
.high
;
52 rn
.ll
= (uint64_t)a0
.l
.high
* b0
.l
.low
;
53 rh
.ll
= (uint64_t)a0
.l
.high
* b0
.l
.high
;
55 c
= (uint64_t)rl
.l
.high
+ rm
.l
.low
+ rn
.l
.low
;
58 c
= c
+ rm
.l
.high
+ rn
.l
.high
+ rh
.l
.low
;
60 rh
.l
.high
+= (uint32_t)(c
>> 32);
66 /* Unsigned 64x64 -> 128 multiplication */
67 void mulu64 (uint64_t *plow
, uint64_t *phigh
, uint64_t a
, uint64_t b
)
69 mul64(plow
, phigh
, a
, b
);
72 /* Signed 64x64 -> 128 multiplication */
73 void muls64 (uint64_t *plow
, uint64_t *phigh
, int64_t a
, int64_t b
)
77 mul64(plow
, &rh
, a
, b
);
79 /* Adjust for signs. */
89 /* Unsigned 128x64 division. Returns 1 if overflow (divide by zero or */
90 /* quotient exceeds 64 bits). Otherwise returns quotient via plow and */
91 /* remainder via phigh. */
92 int divu128(uint64_t *plow
, uint64_t *phigh
, uint64_t divisor
)
94 uint64_t dhi
= *phigh
;
101 } else if (dhi
== 0) {
102 *plow
= dlo
/ divisor
;
103 *phigh
= dlo
% divisor
;
105 } else if (dhi
> divisor
) {
109 for (i
= 0; i
< 64; i
++) {
111 dhi
= (dhi
<< 1) | (dlo
>> 63);
112 if (carry
|| (dhi
>= divisor
)) {
118 dlo
= (dlo
<< 1) | carry
;
127 int divs128(int64_t *plow
, int64_t *phigh
, int64_t divisor
)
129 int sgn_dvdnd
= *phigh
< 0;
130 int sgn_divsr
= divisor
< 0;
136 if (*plow
== (int64_t)-1) {
145 divisor
= 0 - divisor
;
148 overflow
= divu128((uint64_t *)plow
, (uint64_t *)phigh
, (uint64_t)divisor
);
150 if (sgn_dvdnd
^ sgn_divsr
) {
155 if ((*plow
< 0) ^ (sgn_dvdnd
^ sgn_divsr
)) {
165 * urshift - 128-bit Unsigned Right Shift.
166 * @plow: in/out - lower 64-bit integer.
167 * @phigh: in/out - higher 64-bit integer.
168 * @shift: in - bytes to shift, between 0 and 127.
170 * Result is zero-extended and stored in plow/phigh, which are
171 * input/output variables. Shift values outside the range will
172 * be mod to 128. In other words, the caller is responsible to
173 * verify/assert both the shift range and plow/phigh pointers.
175 void urshift(uint64_t *plow
, uint64_t *phigh
, int32_t shift
)
182 uint64_t h
= *phigh
>> (shift
& 63);
187 *plow
= (*plow
>> (shift
& 63)) | (*phigh
<< (64 - (shift
& 63)));
193 * ulshift - 128-bit Unsigned Left Shift.
194 * @plow: in/out - lower 64-bit integer.
195 * @phigh: in/out - higher 64-bit integer.
196 * @shift: in - bytes to shift, between 0 and 127.
197 * @overflow: out - true if any 1-bit is shifted out.
199 * Result is zero-extended and stored in plow/phigh, which are
200 * input/output variables. Shift values outside the range will
201 * be mod to 128. In other words, the caller is responsible to
202 * verify/assert both the shift range and plow/phigh pointers.
204 void ulshift(uint64_t *plow
, uint64_t *phigh
, int32_t shift
, bool *overflow
)
206 uint64_t low
= *plow
;
207 uint64_t high
= *phigh
;
214 /* check if any bit will be shifted out */
215 urshift(&low
, &high
, 128 - shift
);
221 *phigh
= *plow
<< (shift
& 63);
224 *phigh
= (*plow
>> (64 - (shift
& 63))) | (*phigh
<< (shift
& 63));
225 *plow
= *plow
<< shift
;