2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
34 void tcg_gen_op1(TCGOpcode opc
, TCGArg a1
)
36 TCGOp
*op
= tcg_emit_op(opc
, 1);
40 void tcg_gen_op2(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
42 TCGOp
*op
= tcg_emit_op(opc
, 2);
47 void tcg_gen_op3(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
)
49 TCGOp
*op
= tcg_emit_op(opc
, 3);
55 void tcg_gen_op4(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
, TCGArg a4
)
57 TCGOp
*op
= tcg_emit_op(opc
, 4);
64 void tcg_gen_op5(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
67 TCGOp
*op
= tcg_emit_op(opc
, 5);
75 void tcg_gen_op6(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
76 TCGArg a4
, TCGArg a5
, TCGArg a6
)
78 TCGOp
*op
= tcg_emit_op(opc
, 6);
89 static void add_last_as_label_use(TCGLabel
*l
)
91 TCGLabelUse
*u
= tcg_malloc(sizeof(TCGLabelUse
));
93 u
->op
= tcg_last_op();
94 QSIMPLEQ_INSERT_TAIL(&l
->branches
, u
, next
);
97 void tcg_gen_br(TCGLabel
*l
)
99 tcg_gen_op1(INDEX_op_br
, label_arg(l
));
100 add_last_as_label_use(l
);
103 void tcg_gen_mb(TCGBar mb_type
)
105 #ifdef CONFIG_USER_ONLY
106 bool parallel
= tcg_ctx
->gen_tb
->cflags
& CF_PARALLEL
;
109 * It is tempting to elide the barrier in a uniprocessor context.
110 * However, even with a single cpu we have i/o threads running in
111 * parallel, and lack of memory order can result in e.g. virtio
112 * queue entries being read incorrectly.
114 bool parallel
= true;
118 tcg_gen_op1(INDEX_op_mb
, mb_type
);
124 void tcg_gen_movi_i32(TCGv_i32 ret
, int32_t arg
)
126 tcg_gen_mov_i32(ret
, tcg_constant_i32(arg
));
129 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
131 /* some cases can be optimized here */
133 tcg_gen_mov_i32(ret
, arg1
);
135 tcg_gen_add_i32(ret
, arg1
, tcg_constant_i32(arg2
));
139 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
141 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
142 /* Don't recurse with tcg_gen_neg_i32. */
143 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
145 tcg_gen_sub_i32(ret
, tcg_constant_i32(arg1
), arg2
);
149 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
151 /* some cases can be optimized here */
153 tcg_gen_mov_i32(ret
, arg1
);
155 tcg_gen_sub_i32(ret
, arg1
, tcg_constant_i32(arg2
));
159 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
161 /* Some cases can be optimized here. */
164 tcg_gen_movi_i32(ret
, 0);
167 tcg_gen_mov_i32(ret
, arg1
);
170 /* Don't recurse with tcg_gen_ext8u_i32. */
171 if (TCG_TARGET_HAS_ext8u_i32
) {
172 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
177 if (TCG_TARGET_HAS_ext16u_i32
) {
178 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
184 tcg_gen_and_i32(ret
, arg1
, tcg_constant_i32(arg2
));
187 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
189 /* Some cases can be optimized here. */
191 tcg_gen_movi_i32(ret
, -1);
192 } else if (arg2
== 0) {
193 tcg_gen_mov_i32(ret
, arg1
);
195 tcg_gen_or_i32(ret
, arg1
, tcg_constant_i32(arg2
));
199 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
201 /* Some cases can be optimized here. */
203 tcg_gen_mov_i32(ret
, arg1
);
204 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
205 /* Don't recurse with tcg_gen_not_i32. */
206 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
208 tcg_gen_xor_i32(ret
, arg1
, tcg_constant_i32(arg2
));
212 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
214 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
216 tcg_gen_mov_i32(ret
, arg1
);
218 tcg_gen_shl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
222 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
224 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
226 tcg_gen_mov_i32(ret
, arg1
);
228 tcg_gen_shr_i32(ret
, arg1
, tcg_constant_i32(arg2
));
232 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
234 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
236 tcg_gen_mov_i32(ret
, arg1
);
238 tcg_gen_sar_i32(ret
, arg1
, tcg_constant_i32(arg2
));
242 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
244 if (cond
== TCG_COND_ALWAYS
) {
246 } else if (cond
!= TCG_COND_NEVER
) {
247 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
248 add_last_as_label_use(l
);
252 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
254 if (cond
== TCG_COND_ALWAYS
) {
256 } else if (cond
!= TCG_COND_NEVER
) {
257 tcg_gen_brcond_i32(cond
, arg1
, tcg_constant_i32(arg2
), l
);
261 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
262 TCGv_i32 arg1
, TCGv_i32 arg2
)
264 if (cond
== TCG_COND_ALWAYS
) {
265 tcg_gen_movi_i32(ret
, 1);
266 } else if (cond
== TCG_COND_NEVER
) {
267 tcg_gen_movi_i32(ret
, 0);
269 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
273 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
274 TCGv_i32 arg1
, int32_t arg2
)
276 tcg_gen_setcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
279 void tcg_gen_negsetcond_i32(TCGCond cond
, TCGv_i32 ret
,
280 TCGv_i32 arg1
, TCGv_i32 arg2
)
282 if (cond
== TCG_COND_ALWAYS
) {
283 tcg_gen_movi_i32(ret
, -1);
284 } else if (cond
== TCG_COND_NEVER
) {
285 tcg_gen_movi_i32(ret
, 0);
286 } else if (TCG_TARGET_HAS_negsetcond_i32
) {
287 tcg_gen_op4i_i32(INDEX_op_negsetcond_i32
, ret
, arg1
, arg2
, cond
);
289 tcg_gen_setcond_i32(cond
, ret
, arg1
, arg2
);
290 tcg_gen_neg_i32(ret
, ret
);
294 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
297 tcg_gen_movi_i32(ret
, 0);
298 } else if (is_power_of_2(arg2
)) {
299 tcg_gen_shli_i32(ret
, arg1
, ctz32(arg2
));
301 tcg_gen_mul_i32(ret
, arg1
, tcg_constant_i32(arg2
));
305 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
307 if (TCG_TARGET_HAS_div_i32
) {
308 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
309 } else if (TCG_TARGET_HAS_div2_i32
) {
310 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
311 tcg_gen_sari_i32(t0
, arg1
, 31);
312 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
313 tcg_temp_free_i32(t0
);
315 gen_helper_div_i32(ret
, arg1
, arg2
);
319 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
321 if (TCG_TARGET_HAS_rem_i32
) {
322 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
323 } else if (TCG_TARGET_HAS_div_i32
) {
324 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
325 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
326 tcg_gen_mul_i32(t0
, t0
, arg2
);
327 tcg_gen_sub_i32(ret
, arg1
, t0
);
328 tcg_temp_free_i32(t0
);
329 } else if (TCG_TARGET_HAS_div2_i32
) {
330 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
331 tcg_gen_sari_i32(t0
, arg1
, 31);
332 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
333 tcg_temp_free_i32(t0
);
335 gen_helper_rem_i32(ret
, arg1
, arg2
);
339 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
341 if (TCG_TARGET_HAS_div_i32
) {
342 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
343 } else if (TCG_TARGET_HAS_div2_i32
) {
344 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
345 tcg_gen_movi_i32(t0
, 0);
346 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
347 tcg_temp_free_i32(t0
);
349 gen_helper_divu_i32(ret
, arg1
, arg2
);
353 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
355 if (TCG_TARGET_HAS_rem_i32
) {
356 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
357 } else if (TCG_TARGET_HAS_div_i32
) {
358 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
359 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
360 tcg_gen_mul_i32(t0
, t0
, arg2
);
361 tcg_gen_sub_i32(ret
, arg1
, t0
);
362 tcg_temp_free_i32(t0
);
363 } else if (TCG_TARGET_HAS_div2_i32
) {
364 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
365 tcg_gen_movi_i32(t0
, 0);
366 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
367 tcg_temp_free_i32(t0
);
369 gen_helper_remu_i32(ret
, arg1
, arg2
);
373 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
375 if (TCG_TARGET_HAS_andc_i32
) {
376 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
378 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
379 tcg_gen_not_i32(t0
, arg2
);
380 tcg_gen_and_i32(ret
, arg1
, t0
);
381 tcg_temp_free_i32(t0
);
385 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
387 if (TCG_TARGET_HAS_eqv_i32
) {
388 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
390 tcg_gen_xor_i32(ret
, arg1
, arg2
);
391 tcg_gen_not_i32(ret
, ret
);
395 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
397 if (TCG_TARGET_HAS_nand_i32
) {
398 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
400 tcg_gen_and_i32(ret
, arg1
, arg2
);
401 tcg_gen_not_i32(ret
, ret
);
405 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
407 if (TCG_TARGET_HAS_nor_i32
) {
408 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
410 tcg_gen_or_i32(ret
, arg1
, arg2
);
411 tcg_gen_not_i32(ret
, ret
);
415 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
417 if (TCG_TARGET_HAS_orc_i32
) {
418 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
420 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
421 tcg_gen_not_i32(t0
, arg2
);
422 tcg_gen_or_i32(ret
, arg1
, t0
);
423 tcg_temp_free_i32(t0
);
427 void tcg_gen_clz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
429 if (TCG_TARGET_HAS_clz_i32
) {
430 tcg_gen_op3_i32(INDEX_op_clz_i32
, ret
, arg1
, arg2
);
431 } else if (TCG_TARGET_HAS_clz_i64
) {
432 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
433 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
434 tcg_gen_extu_i32_i64(t1
, arg1
);
435 tcg_gen_extu_i32_i64(t2
, arg2
);
436 tcg_gen_addi_i64(t2
, t2
, 32);
437 tcg_gen_clz_i64(t1
, t1
, t2
);
438 tcg_gen_extrl_i64_i32(ret
, t1
);
439 tcg_temp_free_i64(t1
);
440 tcg_temp_free_i64(t2
);
441 tcg_gen_subi_i32(ret
, ret
, 32);
443 gen_helper_clz_i32(ret
, arg1
, arg2
);
447 void tcg_gen_clzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
449 tcg_gen_clz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
452 void tcg_gen_ctz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
454 if (TCG_TARGET_HAS_ctz_i32
) {
455 tcg_gen_op3_i32(INDEX_op_ctz_i32
, ret
, arg1
, arg2
);
456 } else if (TCG_TARGET_HAS_ctz_i64
) {
457 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
458 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
459 tcg_gen_extu_i32_i64(t1
, arg1
);
460 tcg_gen_extu_i32_i64(t2
, arg2
);
461 tcg_gen_ctz_i64(t1
, t1
, t2
);
462 tcg_gen_extrl_i64_i32(ret
, t1
);
463 tcg_temp_free_i64(t1
);
464 tcg_temp_free_i64(t2
);
465 } else if (TCG_TARGET_HAS_ctpop_i32
466 || TCG_TARGET_HAS_ctpop_i64
467 || TCG_TARGET_HAS_clz_i32
468 || TCG_TARGET_HAS_clz_i64
) {
469 TCGv_i32 z
, t
= tcg_temp_ebb_new_i32();
471 if (TCG_TARGET_HAS_ctpop_i32
|| TCG_TARGET_HAS_ctpop_i64
) {
472 tcg_gen_subi_i32(t
, arg1
, 1);
473 tcg_gen_andc_i32(t
, t
, arg1
);
474 tcg_gen_ctpop_i32(t
, t
);
476 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
477 tcg_gen_neg_i32(t
, arg1
);
478 tcg_gen_and_i32(t
, t
, arg1
);
479 tcg_gen_clzi_i32(t
, t
, 32);
480 tcg_gen_xori_i32(t
, t
, 31);
482 z
= tcg_constant_i32(0);
483 tcg_gen_movcond_i32(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
484 tcg_temp_free_i32(t
);
486 gen_helper_ctz_i32(ret
, arg1
, arg2
);
490 void tcg_gen_ctzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
492 if (!TCG_TARGET_HAS_ctz_i32
&& TCG_TARGET_HAS_ctpop_i32
&& arg2
== 32) {
493 /* This equivalence has the advantage of not requiring a fixup. */
494 TCGv_i32 t
= tcg_temp_ebb_new_i32();
495 tcg_gen_subi_i32(t
, arg1
, 1);
496 tcg_gen_andc_i32(t
, t
, arg1
);
497 tcg_gen_ctpop_i32(ret
, t
);
498 tcg_temp_free_i32(t
);
500 tcg_gen_ctz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
504 void tcg_gen_clrsb_i32(TCGv_i32 ret
, TCGv_i32 arg
)
506 if (TCG_TARGET_HAS_clz_i32
) {
507 TCGv_i32 t
= tcg_temp_ebb_new_i32();
508 tcg_gen_sari_i32(t
, arg
, 31);
509 tcg_gen_xor_i32(t
, t
, arg
);
510 tcg_gen_clzi_i32(t
, t
, 32);
511 tcg_gen_subi_i32(ret
, t
, 1);
512 tcg_temp_free_i32(t
);
514 gen_helper_clrsb_i32(ret
, arg
);
518 void tcg_gen_ctpop_i32(TCGv_i32 ret
, TCGv_i32 arg1
)
520 if (TCG_TARGET_HAS_ctpop_i32
) {
521 tcg_gen_op2_i32(INDEX_op_ctpop_i32
, ret
, arg1
);
522 } else if (TCG_TARGET_HAS_ctpop_i64
) {
523 TCGv_i64 t
= tcg_temp_ebb_new_i64();
524 tcg_gen_extu_i32_i64(t
, arg1
);
525 tcg_gen_ctpop_i64(t
, t
);
526 tcg_gen_extrl_i64_i32(ret
, t
);
527 tcg_temp_free_i64(t
);
529 gen_helper_ctpop_i32(ret
, arg1
);
533 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
535 if (TCG_TARGET_HAS_rot_i32
) {
536 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
540 t0
= tcg_temp_ebb_new_i32();
541 t1
= tcg_temp_ebb_new_i32();
542 tcg_gen_shl_i32(t0
, arg1
, arg2
);
543 tcg_gen_subfi_i32(t1
, 32, arg2
);
544 tcg_gen_shr_i32(t1
, arg1
, t1
);
545 tcg_gen_or_i32(ret
, t0
, t1
);
546 tcg_temp_free_i32(t0
);
547 tcg_temp_free_i32(t1
);
551 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
553 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
554 /* some cases can be optimized here */
556 tcg_gen_mov_i32(ret
, arg1
);
557 } else if (TCG_TARGET_HAS_rot_i32
) {
558 tcg_gen_rotl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
561 t0
= tcg_temp_ebb_new_i32();
562 t1
= tcg_temp_ebb_new_i32();
563 tcg_gen_shli_i32(t0
, arg1
, arg2
);
564 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
565 tcg_gen_or_i32(ret
, t0
, t1
);
566 tcg_temp_free_i32(t0
);
567 tcg_temp_free_i32(t1
);
571 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
573 if (TCG_TARGET_HAS_rot_i32
) {
574 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
578 t0
= tcg_temp_ebb_new_i32();
579 t1
= tcg_temp_ebb_new_i32();
580 tcg_gen_shr_i32(t0
, arg1
, arg2
);
581 tcg_gen_subfi_i32(t1
, 32, arg2
);
582 tcg_gen_shl_i32(t1
, arg1
, t1
);
583 tcg_gen_or_i32(ret
, t0
, t1
);
584 tcg_temp_free_i32(t0
);
585 tcg_temp_free_i32(t1
);
589 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
591 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
592 /* some cases can be optimized here */
594 tcg_gen_mov_i32(ret
, arg1
);
596 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
600 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
601 unsigned int ofs
, unsigned int len
)
606 tcg_debug_assert(ofs
< 32);
607 tcg_debug_assert(len
> 0);
608 tcg_debug_assert(len
<= 32);
609 tcg_debug_assert(ofs
+ len
<= 32);
612 tcg_gen_mov_i32(ret
, arg2
);
615 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
616 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
620 t1
= tcg_temp_ebb_new_i32();
622 if (TCG_TARGET_HAS_extract2_i32
) {
623 if (ofs
+ len
== 32) {
624 tcg_gen_shli_i32(t1
, arg1
, len
);
625 tcg_gen_extract2_i32(ret
, t1
, arg2
, len
);
629 tcg_gen_extract2_i32(ret
, arg1
, arg2
, len
);
630 tcg_gen_rotli_i32(ret
, ret
, len
);
635 mask
= (1u << len
) - 1;
636 if (ofs
+ len
< 32) {
637 tcg_gen_andi_i32(t1
, arg2
, mask
);
638 tcg_gen_shli_i32(t1
, t1
, ofs
);
640 tcg_gen_shli_i32(t1
, arg2
, ofs
);
642 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
643 tcg_gen_or_i32(ret
, ret
, t1
);
645 tcg_temp_free_i32(t1
);
648 void tcg_gen_deposit_z_i32(TCGv_i32 ret
, TCGv_i32 arg
,
649 unsigned int ofs
, unsigned int len
)
651 tcg_debug_assert(ofs
< 32);
652 tcg_debug_assert(len
> 0);
653 tcg_debug_assert(len
<= 32);
654 tcg_debug_assert(ofs
+ len
<= 32);
656 if (ofs
+ len
== 32) {
657 tcg_gen_shli_i32(ret
, arg
, ofs
);
658 } else if (ofs
== 0) {
659 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
660 } else if (TCG_TARGET_HAS_deposit_i32
661 && TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
662 TCGv_i32 zero
= tcg_constant_i32(0);
663 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, zero
, arg
, ofs
, len
);
665 /* To help two-operand hosts we prefer to zero-extend first,
666 which allows ARG to stay live. */
669 if (TCG_TARGET_HAS_ext16u_i32
) {
670 tcg_gen_ext16u_i32(ret
, arg
);
671 tcg_gen_shli_i32(ret
, ret
, ofs
);
676 if (TCG_TARGET_HAS_ext8u_i32
) {
677 tcg_gen_ext8u_i32(ret
, arg
);
678 tcg_gen_shli_i32(ret
, ret
, ofs
);
683 /* Otherwise prefer zero-extension over AND for code size. */
686 if (TCG_TARGET_HAS_ext16u_i32
) {
687 tcg_gen_shli_i32(ret
, arg
, ofs
);
688 tcg_gen_ext16u_i32(ret
, ret
);
693 if (TCG_TARGET_HAS_ext8u_i32
) {
694 tcg_gen_shli_i32(ret
, arg
, ofs
);
695 tcg_gen_ext8u_i32(ret
, ret
);
700 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
701 tcg_gen_shli_i32(ret
, ret
, ofs
);
705 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
706 unsigned int ofs
, unsigned int len
)
708 tcg_debug_assert(ofs
< 32);
709 tcg_debug_assert(len
> 0);
710 tcg_debug_assert(len
<= 32);
711 tcg_debug_assert(ofs
+ len
<= 32);
713 /* Canonicalize certain special cases, even if extract is supported. */
714 if (ofs
+ len
== 32) {
715 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
719 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
723 if (TCG_TARGET_HAS_extract_i32
724 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
725 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
729 /* Assume that zero-extension, if available, is cheaper than a shift. */
732 if (TCG_TARGET_HAS_ext16u_i32
) {
733 tcg_gen_ext16u_i32(ret
, arg
);
734 tcg_gen_shri_i32(ret
, ret
, ofs
);
739 if (TCG_TARGET_HAS_ext8u_i32
) {
740 tcg_gen_ext8u_i32(ret
, arg
);
741 tcg_gen_shri_i32(ret
, ret
, ofs
);
747 /* ??? Ideally we'd know what values are available for immediate AND.
748 Assume that 8 bits are available, plus the special case of 16,
749 so that we get ext8u, ext16u. */
751 case 1 ... 8: case 16:
752 tcg_gen_shri_i32(ret
, arg
, ofs
);
753 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
756 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
757 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
762 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
763 unsigned int ofs
, unsigned int len
)
765 tcg_debug_assert(ofs
< 32);
766 tcg_debug_assert(len
> 0);
767 tcg_debug_assert(len
<= 32);
768 tcg_debug_assert(ofs
+ len
<= 32);
770 /* Canonicalize certain special cases, even if extract is supported. */
771 if (ofs
+ len
== 32) {
772 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
778 tcg_gen_ext16s_i32(ret
, arg
);
781 tcg_gen_ext8s_i32(ret
, arg
);
786 if (TCG_TARGET_HAS_sextract_i32
787 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
788 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
792 /* Assume that sign-extension, if available, is cheaper than a shift. */
795 if (TCG_TARGET_HAS_ext16s_i32
) {
796 tcg_gen_ext16s_i32(ret
, arg
);
797 tcg_gen_sari_i32(ret
, ret
, ofs
);
802 if (TCG_TARGET_HAS_ext8s_i32
) {
803 tcg_gen_ext8s_i32(ret
, arg
);
804 tcg_gen_sari_i32(ret
, ret
, ofs
);
811 if (TCG_TARGET_HAS_ext16s_i32
) {
812 tcg_gen_shri_i32(ret
, arg
, ofs
);
813 tcg_gen_ext16s_i32(ret
, ret
);
818 if (TCG_TARGET_HAS_ext8s_i32
) {
819 tcg_gen_shri_i32(ret
, arg
, ofs
);
820 tcg_gen_ext8s_i32(ret
, ret
);
826 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
827 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
831 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
832 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
834 void tcg_gen_extract2_i32(TCGv_i32 ret
, TCGv_i32 al
, TCGv_i32 ah
,
837 tcg_debug_assert(ofs
<= 32);
839 tcg_gen_mov_i32(ret
, al
);
840 } else if (ofs
== 32) {
841 tcg_gen_mov_i32(ret
, ah
);
842 } else if (al
== ah
) {
843 tcg_gen_rotri_i32(ret
, al
, ofs
);
844 } else if (TCG_TARGET_HAS_extract2_i32
) {
845 tcg_gen_op4i_i32(INDEX_op_extract2_i32
, ret
, al
, ah
, ofs
);
847 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
848 tcg_gen_shri_i32(t0
, al
, ofs
);
849 tcg_gen_deposit_i32(ret
, t0
, ah
, 32 - ofs
, ofs
);
850 tcg_temp_free_i32(t0
);
854 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
855 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
857 if (cond
== TCG_COND_ALWAYS
) {
858 tcg_gen_mov_i32(ret
, v1
);
859 } else if (cond
== TCG_COND_NEVER
) {
860 tcg_gen_mov_i32(ret
, v2
);
861 } else if (TCG_TARGET_HAS_movcond_i32
) {
862 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
864 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
865 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
866 tcg_gen_negsetcond_i32(cond
, t0
, c1
, c2
);
867 tcg_gen_and_i32(t1
, v1
, t0
);
868 tcg_gen_andc_i32(ret
, v2
, t0
);
869 tcg_gen_or_i32(ret
, ret
, t1
);
870 tcg_temp_free_i32(t0
);
871 tcg_temp_free_i32(t1
);
875 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
876 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
878 if (TCG_TARGET_HAS_add2_i32
) {
879 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
881 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
882 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
883 tcg_gen_concat_i32_i64(t0
, al
, ah
);
884 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
885 tcg_gen_add_i64(t0
, t0
, t1
);
886 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
887 tcg_temp_free_i64(t0
);
888 tcg_temp_free_i64(t1
);
892 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
893 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
895 if (TCG_TARGET_HAS_sub2_i32
) {
896 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
898 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
899 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
900 tcg_gen_concat_i32_i64(t0
, al
, ah
);
901 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
902 tcg_gen_sub_i64(t0
, t0
, t1
);
903 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
904 tcg_temp_free_i64(t0
);
905 tcg_temp_free_i64(t1
);
909 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
911 if (TCG_TARGET_HAS_mulu2_i32
) {
912 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
913 } else if (TCG_TARGET_HAS_muluh_i32
) {
914 TCGv_i32 t
= tcg_temp_ebb_new_i32();
915 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
916 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
917 tcg_gen_mov_i32(rl
, t
);
918 tcg_temp_free_i32(t
);
919 } else if (TCG_TARGET_REG_BITS
== 64) {
920 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
921 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
922 tcg_gen_extu_i32_i64(t0
, arg1
);
923 tcg_gen_extu_i32_i64(t1
, arg2
);
924 tcg_gen_mul_i64(t0
, t0
, t1
);
925 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
926 tcg_temp_free_i64(t0
);
927 tcg_temp_free_i64(t1
);
929 qemu_build_not_reached();
933 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
935 if (TCG_TARGET_HAS_muls2_i32
) {
936 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
937 } else if (TCG_TARGET_HAS_mulsh_i32
) {
938 TCGv_i32 t
= tcg_temp_ebb_new_i32();
939 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
940 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
941 tcg_gen_mov_i32(rl
, t
);
942 tcg_temp_free_i32(t
);
943 } else if (TCG_TARGET_REG_BITS
== 32) {
944 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
945 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
946 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
947 TCGv_i32 t3
= tcg_temp_ebb_new_i32();
948 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
949 /* Adjust for negative inputs. */
950 tcg_gen_sari_i32(t2
, arg1
, 31);
951 tcg_gen_sari_i32(t3
, arg2
, 31);
952 tcg_gen_and_i32(t2
, t2
, arg2
);
953 tcg_gen_and_i32(t3
, t3
, arg1
);
954 tcg_gen_sub_i32(rh
, t1
, t2
);
955 tcg_gen_sub_i32(rh
, rh
, t3
);
956 tcg_gen_mov_i32(rl
, t0
);
957 tcg_temp_free_i32(t0
);
958 tcg_temp_free_i32(t1
);
959 tcg_temp_free_i32(t2
);
960 tcg_temp_free_i32(t3
);
962 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
963 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
964 tcg_gen_ext_i32_i64(t0
, arg1
);
965 tcg_gen_ext_i32_i64(t1
, arg2
);
966 tcg_gen_mul_i64(t0
, t0
, t1
);
967 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
968 tcg_temp_free_i64(t0
);
969 tcg_temp_free_i64(t1
);
973 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
975 if (TCG_TARGET_REG_BITS
== 32) {
976 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
977 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
978 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
979 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
980 /* Adjust for negative input for the signed arg1. */
981 tcg_gen_sari_i32(t2
, arg1
, 31);
982 tcg_gen_and_i32(t2
, t2
, arg2
);
983 tcg_gen_sub_i32(rh
, t1
, t2
);
984 tcg_gen_mov_i32(rl
, t0
);
985 tcg_temp_free_i32(t0
);
986 tcg_temp_free_i32(t1
);
987 tcg_temp_free_i32(t2
);
989 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
990 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
991 tcg_gen_ext_i32_i64(t0
, arg1
);
992 tcg_gen_extu_i32_i64(t1
, arg2
);
993 tcg_gen_mul_i64(t0
, t0
, t1
);
994 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
995 tcg_temp_free_i64(t0
);
996 tcg_temp_free_i64(t1
);
1000 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1002 if (TCG_TARGET_HAS_ext8s_i32
) {
1003 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
1005 tcg_gen_shli_i32(ret
, arg
, 24);
1006 tcg_gen_sari_i32(ret
, ret
, 24);
1010 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1012 if (TCG_TARGET_HAS_ext16s_i32
) {
1013 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
1015 tcg_gen_shli_i32(ret
, arg
, 16);
1016 tcg_gen_sari_i32(ret
, ret
, 16);
1020 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1022 if (TCG_TARGET_HAS_ext8u_i32
) {
1023 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
1025 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1029 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1031 if (TCG_TARGET_HAS_ext16u_i32
) {
1032 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1034 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1039 * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1041 * Byte pattern: xxab -> yyba
1043 * With TCG_BSWAP_IZ, x == zero, else undefined.
1044 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1046 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
, int flags
)
1048 /* Only one extension flag may be present. */
1049 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1051 if (TCG_TARGET_HAS_bswap16_i32
) {
1052 tcg_gen_op3i_i32(INDEX_op_bswap16_i32
, ret
, arg
, flags
);
1054 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1055 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1057 /* arg = ..ab (IZ) xxab (!IZ) */
1058 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = ...a (IZ) .xxa (!IZ) */
1059 if (!(flags
& TCG_BSWAP_IZ
)) {
1060 tcg_gen_ext8u_i32(t0
, t0
); /* t0 = ...a */
1063 if (flags
& TCG_BSWAP_OS
) {
1064 tcg_gen_shli_i32(t1
, arg
, 24); /* t1 = b... */
1065 tcg_gen_sari_i32(t1
, t1
, 16); /* t1 = ssb. */
1066 } else if (flags
& TCG_BSWAP_OZ
) {
1067 tcg_gen_ext8u_i32(t1
, arg
); /* t1 = ...b */
1068 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = ..b. */
1070 tcg_gen_shli_i32(t1
, arg
, 8); /* t1 = xab. */
1073 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = ..ba (OZ) */
1075 /* = xaba (no flag) */
1076 tcg_temp_free_i32(t0
);
1077 tcg_temp_free_i32(t1
);
1082 * bswap32_i32: 32-bit byte swap on a 32-bit value.
1084 * Byte pattern: abcd -> dcba
1086 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1088 if (TCG_TARGET_HAS_bswap32_i32
) {
1089 tcg_gen_op3i_i32(INDEX_op_bswap32_i32
, ret
, arg
, 0);
1091 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1092 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1093 TCGv_i32 t2
= tcg_constant_i32(0x00ff00ff);
1096 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = .abc */
1097 tcg_gen_and_i32(t1
, arg
, t2
); /* t1 = .b.d */
1098 tcg_gen_and_i32(t0
, t0
, t2
); /* t0 = .a.c */
1099 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = b.d. */
1100 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = badc */
1102 tcg_gen_shri_i32(t0
, ret
, 16); /* t0 = ..ba */
1103 tcg_gen_shli_i32(t1
, ret
, 16); /* t1 = dc.. */
1104 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = dcba */
1106 tcg_temp_free_i32(t0
);
1107 tcg_temp_free_i32(t1
);
1111 void tcg_gen_hswap_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1113 /* Swapping 2 16-bit elements is a rotate. */
1114 tcg_gen_rotli_i32(ret
, arg
, 16);
1117 void tcg_gen_smin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1119 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, a
, b
);
1122 void tcg_gen_umin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1124 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
1127 void tcg_gen_smax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1129 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, b
, a
);
1132 void tcg_gen_umax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1134 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
1137 void tcg_gen_abs_i32(TCGv_i32 ret
, TCGv_i32 a
)
1139 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1141 tcg_gen_sari_i32(t
, a
, 31);
1142 tcg_gen_xor_i32(ret
, a
, t
);
1143 tcg_gen_sub_i32(ret
, ret
, t
);
1144 tcg_temp_free_i32(t
);
1149 #if TCG_TARGET_REG_BITS == 32
1150 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
1152 void tcg_gen_discard_i64(TCGv_i64 arg
)
1154 tcg_gen_discard_i32(TCGV_LOW(arg
));
1155 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1158 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1160 TCGTemp
*ts
= tcgv_i64_temp(arg
);
1162 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1163 if (ts
->kind
== TEMP_CONST
) {
1164 tcg_gen_movi_i64(ret
, ts
->val
);
1166 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1167 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1171 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1173 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
1174 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
1177 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1179 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
1180 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1183 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1185 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
1186 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1189 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1191 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
1192 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1195 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1197 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
1198 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1201 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1203 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1204 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1207 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1209 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1210 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1213 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1215 /* Since arg2 and ret have different types,
1216 they cannot be the same temporary */
1218 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
1219 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
1221 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1222 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
1226 void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1228 tcg_gen_st8_i32(TCGV_LOW(arg1
), arg2
, offset
);
1231 void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1233 tcg_gen_st16_i32(TCGV_LOW(arg1
), arg2
, offset
);
1236 void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1238 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1241 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1244 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
1245 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
1247 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1248 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
1252 void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1254 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1255 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1258 void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1260 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1261 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1264 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1266 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1267 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1270 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1272 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1273 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1276 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1278 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1279 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1282 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1284 gen_helper_shl_i64(ret
, arg1
, arg2
);
1287 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1289 gen_helper_shr_i64(ret
, arg1
, arg2
);
1292 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1294 gen_helper_sar_i64(ret
, arg1
, arg2
);
1297 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1302 t0
= tcg_temp_ebb_new_i64();
1303 t1
= tcg_temp_ebb_new_i32();
1305 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1306 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1308 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1309 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1310 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1311 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1313 tcg_gen_mov_i64(ret
, t0
);
1314 tcg_temp_free_i64(t0
);
1315 tcg_temp_free_i32(t1
);
1320 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1322 tcg_gen_mov_i64(ret
, tcg_constant_i64(arg
));
1325 #endif /* TCG_TARGET_REG_SIZE == 32 */
1327 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1329 /* some cases can be optimized here */
1331 tcg_gen_mov_i64(ret
, arg1
);
1332 } else if (TCG_TARGET_REG_BITS
== 64) {
1333 tcg_gen_add_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1335 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1336 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1337 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1341 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1343 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
1344 /* Don't recurse with tcg_gen_neg_i64. */
1345 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
1346 } else if (TCG_TARGET_REG_BITS
== 64) {
1347 tcg_gen_sub_i64(ret
, tcg_constant_i64(arg1
), arg2
);
1349 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1350 tcg_constant_i32(arg1
), tcg_constant_i32(arg1
>> 32),
1351 TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1355 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1357 /* some cases can be optimized here */
1359 tcg_gen_mov_i64(ret
, arg1
);
1360 } else if (TCG_TARGET_REG_BITS
== 64) {
1361 tcg_gen_sub_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1363 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1364 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1365 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1369 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1371 if (TCG_TARGET_REG_BITS
== 32) {
1372 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1373 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1377 /* Some cases can be optimized here. */
1380 tcg_gen_movi_i64(ret
, 0);
1383 tcg_gen_mov_i64(ret
, arg1
);
1386 /* Don't recurse with tcg_gen_ext8u_i64. */
1387 if (TCG_TARGET_HAS_ext8u_i64
) {
1388 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1393 if (TCG_TARGET_HAS_ext16u_i64
) {
1394 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1399 if (TCG_TARGET_HAS_ext32u_i64
) {
1400 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1406 tcg_gen_and_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1409 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1411 if (TCG_TARGET_REG_BITS
== 32) {
1412 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1413 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1416 /* Some cases can be optimized here. */
1418 tcg_gen_movi_i64(ret
, -1);
1419 } else if (arg2
== 0) {
1420 tcg_gen_mov_i64(ret
, arg1
);
1422 tcg_gen_or_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1426 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1428 if (TCG_TARGET_REG_BITS
== 32) {
1429 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1430 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1433 /* Some cases can be optimized here. */
1435 tcg_gen_mov_i64(ret
, arg1
);
1436 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1437 /* Don't recurse with tcg_gen_not_i64. */
1438 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1440 tcg_gen_xor_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1444 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1445 unsigned c
, bool right
, bool arith
)
1447 tcg_debug_assert(c
< 64);
1449 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1450 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1451 } else if (c
>= 32) {
1455 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1456 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1458 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1459 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1462 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1463 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1466 if (TCG_TARGET_HAS_extract2_i32
) {
1467 tcg_gen_extract2_i32(TCGV_LOW(ret
),
1468 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), c
);
1470 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1471 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(ret
),
1472 TCGV_HIGH(arg1
), 32 - c
, c
);
1475 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1477 tcg_gen_shri_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1480 if (TCG_TARGET_HAS_extract2_i32
) {
1481 tcg_gen_extract2_i32(TCGV_HIGH(ret
),
1482 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), 32 - c
);
1484 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1485 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1486 tcg_gen_deposit_i32(TCGV_HIGH(ret
), t0
,
1487 TCGV_HIGH(arg1
), c
, 32 - c
);
1488 tcg_temp_free_i32(t0
);
1490 tcg_gen_shli_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1494 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1496 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1497 if (TCG_TARGET_REG_BITS
== 32) {
1498 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1499 } else if (arg2
== 0) {
1500 tcg_gen_mov_i64(ret
, arg1
);
1502 tcg_gen_shl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1506 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1508 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1509 if (TCG_TARGET_REG_BITS
== 32) {
1510 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1511 } else if (arg2
== 0) {
1512 tcg_gen_mov_i64(ret
, arg1
);
1514 tcg_gen_shr_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1518 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1520 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1521 if (TCG_TARGET_REG_BITS
== 32) {
1522 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1523 } else if (arg2
== 0) {
1524 tcg_gen_mov_i64(ret
, arg1
);
1526 tcg_gen_sar_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1530 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1532 if (cond
== TCG_COND_ALWAYS
) {
1534 } else if (cond
!= TCG_COND_NEVER
) {
1535 if (TCG_TARGET_REG_BITS
== 32) {
1536 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1537 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1538 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1540 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1543 add_last_as_label_use(l
);
1547 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1549 if (TCG_TARGET_REG_BITS
== 64) {
1550 tcg_gen_brcond_i64(cond
, arg1
, tcg_constant_i64(arg2
), l
);
1551 } else if (cond
== TCG_COND_ALWAYS
) {
1553 } else if (cond
!= TCG_COND_NEVER
) {
1554 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
,
1555 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1556 tcg_constant_i32(arg2
),
1557 tcg_constant_i32(arg2
>> 32),
1558 cond
, label_arg(l
));
1559 add_last_as_label_use(l
);
1563 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1564 TCGv_i64 arg1
, TCGv_i64 arg2
)
1566 if (cond
== TCG_COND_ALWAYS
) {
1567 tcg_gen_movi_i64(ret
, 1);
1568 } else if (cond
== TCG_COND_NEVER
) {
1569 tcg_gen_movi_i64(ret
, 0);
1571 if (TCG_TARGET_REG_BITS
== 32) {
1572 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1573 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1574 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1575 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1577 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1582 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1583 TCGv_i64 arg1
, int64_t arg2
)
1585 if (TCG_TARGET_REG_BITS
== 64) {
1586 tcg_gen_setcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
1587 } else if (cond
== TCG_COND_ALWAYS
) {
1588 tcg_gen_movi_i64(ret
, 1);
1589 } else if (cond
== TCG_COND_NEVER
) {
1590 tcg_gen_movi_i64(ret
, 0);
1592 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1593 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1594 tcg_constant_i32(arg2
),
1595 tcg_constant_i32(arg2
>> 32), cond
);
1596 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1600 void tcg_gen_negsetcond_i64(TCGCond cond
, TCGv_i64 ret
,
1601 TCGv_i64 arg1
, TCGv_i64 arg2
)
1603 if (cond
== TCG_COND_ALWAYS
) {
1604 tcg_gen_movi_i64(ret
, -1);
1605 } else if (cond
== TCG_COND_NEVER
) {
1606 tcg_gen_movi_i64(ret
, 0);
1607 } else if (TCG_TARGET_HAS_negsetcond_i64
) {
1608 tcg_gen_op4i_i64(INDEX_op_negsetcond_i64
, ret
, arg1
, arg2
, cond
);
1609 } else if (TCG_TARGET_REG_BITS
== 32) {
1610 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1611 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1612 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1613 tcg_gen_neg_i32(TCGV_LOW(ret
), TCGV_LOW(ret
));
1614 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
));
1616 tcg_gen_setcond_i64(cond
, ret
, arg1
, arg2
);
1617 tcg_gen_neg_i64(ret
, ret
);
1621 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1624 tcg_gen_movi_i64(ret
, 0);
1625 } else if (is_power_of_2(arg2
)) {
1626 tcg_gen_shli_i64(ret
, arg1
, ctz64(arg2
));
1628 tcg_gen_mul_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1632 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1634 if (TCG_TARGET_HAS_div_i64
) {
1635 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1636 } else if (TCG_TARGET_HAS_div2_i64
) {
1637 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1638 tcg_gen_sari_i64(t0
, arg1
, 63);
1639 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1640 tcg_temp_free_i64(t0
);
1642 gen_helper_div_i64(ret
, arg1
, arg2
);
1646 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1648 if (TCG_TARGET_HAS_rem_i64
) {
1649 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1650 } else if (TCG_TARGET_HAS_div_i64
) {
1651 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1652 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
1653 tcg_gen_mul_i64(t0
, t0
, arg2
);
1654 tcg_gen_sub_i64(ret
, arg1
, t0
);
1655 tcg_temp_free_i64(t0
);
1656 } else if (TCG_TARGET_HAS_div2_i64
) {
1657 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1658 tcg_gen_sari_i64(t0
, arg1
, 63);
1659 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1660 tcg_temp_free_i64(t0
);
1662 gen_helper_rem_i64(ret
, arg1
, arg2
);
1666 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1668 if (TCG_TARGET_HAS_div_i64
) {
1669 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1670 } else if (TCG_TARGET_HAS_div2_i64
) {
1671 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1672 tcg_gen_movi_i64(t0
, 0);
1673 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1674 tcg_temp_free_i64(t0
);
1676 gen_helper_divu_i64(ret
, arg1
, arg2
);
1680 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1682 if (TCG_TARGET_HAS_rem_i64
) {
1683 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1684 } else if (TCG_TARGET_HAS_div_i64
) {
1685 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1686 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
1687 tcg_gen_mul_i64(t0
, t0
, arg2
);
1688 tcg_gen_sub_i64(ret
, arg1
, t0
);
1689 tcg_temp_free_i64(t0
);
1690 } else if (TCG_TARGET_HAS_div2_i64
) {
1691 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1692 tcg_gen_movi_i64(t0
, 0);
1693 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1694 tcg_temp_free_i64(t0
);
1696 gen_helper_remu_i64(ret
, arg1
, arg2
);
1700 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1702 if (TCG_TARGET_REG_BITS
== 32) {
1703 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1704 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1705 } else if (TCG_TARGET_HAS_ext8s_i64
) {
1706 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1708 tcg_gen_shli_i64(ret
, arg
, 56);
1709 tcg_gen_sari_i64(ret
, ret
, 56);
1713 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1715 if (TCG_TARGET_REG_BITS
== 32) {
1716 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1717 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1718 } else if (TCG_TARGET_HAS_ext16s_i64
) {
1719 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1721 tcg_gen_shli_i64(ret
, arg
, 48);
1722 tcg_gen_sari_i64(ret
, ret
, 48);
1726 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1728 if (TCG_TARGET_REG_BITS
== 32) {
1729 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1730 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1731 } else if (TCG_TARGET_HAS_ext32s_i64
) {
1732 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1734 tcg_gen_shli_i64(ret
, arg
, 32);
1735 tcg_gen_sari_i64(ret
, ret
, 32);
1739 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1741 if (TCG_TARGET_REG_BITS
== 32) {
1742 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1743 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1744 } else if (TCG_TARGET_HAS_ext8u_i64
) {
1745 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1747 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1751 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1753 if (TCG_TARGET_REG_BITS
== 32) {
1754 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1755 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1756 } else if (TCG_TARGET_HAS_ext16u_i64
) {
1757 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1759 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1763 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1765 if (TCG_TARGET_REG_BITS
== 32) {
1766 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1767 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1768 } else if (TCG_TARGET_HAS_ext32u_i64
) {
1769 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1771 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1776 * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
1778 * Byte pattern: xxxxxxxxab -> yyyyyyyyba
1780 * With TCG_BSWAP_IZ, x == zero, else undefined.
1781 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1783 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
1785 /* Only one extension flag may be present. */
1786 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1788 if (TCG_TARGET_REG_BITS
== 32) {
1789 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), flags
);
1790 if (flags
& TCG_BSWAP_OS
) {
1791 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1793 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1795 } else if (TCG_TARGET_HAS_bswap16_i64
) {
1796 tcg_gen_op3i_i64(INDEX_op_bswap16_i64
, ret
, arg
, flags
);
1798 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1799 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1801 /* arg = ......ab or xxxxxxab */
1802 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .......a or .xxxxxxa */
1803 if (!(flags
& TCG_BSWAP_IZ
)) {
1804 tcg_gen_ext8u_i64(t0
, t0
); /* t0 = .......a */
1807 if (flags
& TCG_BSWAP_OS
) {
1808 tcg_gen_shli_i64(t1
, arg
, 56); /* t1 = b....... */
1809 tcg_gen_sari_i64(t1
, t1
, 48); /* t1 = ssssssb. */
1810 } else if (flags
& TCG_BSWAP_OZ
) {
1811 tcg_gen_ext8u_i64(t1
, arg
); /* t1 = .......b */
1812 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ......b. */
1814 tcg_gen_shli_i64(t1
, arg
, 8); /* t1 = xxxxxab. */
1817 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ......ba (OZ) */
1819 /* xxxxxaba (no flag) */
1820 tcg_temp_free_i64(t0
);
1821 tcg_temp_free_i64(t1
);
1826 * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
1828 * Byte pattern: xxxxabcd -> yyyydcba
1830 * With TCG_BSWAP_IZ, x == zero, else undefined.
1831 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1833 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
1835 /* Only one extension flag may be present. */
1836 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1838 if (TCG_TARGET_REG_BITS
== 32) {
1839 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1840 if (flags
& TCG_BSWAP_OS
) {
1841 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1843 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1845 } else if (TCG_TARGET_HAS_bswap32_i64
) {
1846 tcg_gen_op3i_i64(INDEX_op_bswap32_i64
, ret
, arg
, flags
);
1848 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1849 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1850 TCGv_i64 t2
= tcg_constant_i64(0x00ff00ff);
1852 /* arg = xxxxabcd */
1853 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .xxxxabc */
1854 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .....b.d */
1855 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .....a.c */
1856 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ....b.d. */
1857 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ....badc */
1859 tcg_gen_shli_i64(t1
, ret
, 48); /* t1 = dc...... */
1860 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ......ba */
1861 if (flags
& TCG_BSWAP_OS
) {
1862 tcg_gen_sari_i64(t1
, t1
, 32); /* t1 = ssssdc.. */
1864 tcg_gen_shri_i64(t1
, t1
, 32); /* t1 = ....dc.. */
1866 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ssssdcba (OS) */
1867 /* ....dcba (else) */
1869 tcg_temp_free_i64(t0
);
1870 tcg_temp_free_i64(t1
);
1875 * bswap64_i64: 64-bit byte swap on a 64-bit value.
1877 * Byte pattern: abcdefgh -> hgfedcba
1879 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1881 if (TCG_TARGET_REG_BITS
== 32) {
1883 t0
= tcg_temp_ebb_new_i32();
1884 t1
= tcg_temp_ebb_new_i32();
1886 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1887 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1888 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1889 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1890 tcg_temp_free_i32(t0
);
1891 tcg_temp_free_i32(t1
);
1892 } else if (TCG_TARGET_HAS_bswap64_i64
) {
1893 tcg_gen_op3i_i64(INDEX_op_bswap64_i64
, ret
, arg
, 0);
1895 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1896 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1897 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
1899 /* arg = abcdefgh */
1900 tcg_gen_movi_i64(t2
, 0x00ff00ff00ff00ffull
);
1901 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .abcdefg */
1902 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .b.d.f.h */
1903 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .a.c.e.g */
1904 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = b.d.f.h. */
1905 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = badcfehg */
1907 tcg_gen_movi_i64(t2
, 0x0000ffff0000ffffull
);
1908 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ..badcfe */
1909 tcg_gen_and_i64(t1
, ret
, t2
); /* t1 = ..dc..hg */
1910 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = ..ba..fe */
1911 tcg_gen_shli_i64(t1
, t1
, 16); /* t1 = dc..hg.. */
1912 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = dcbahgfe */
1914 tcg_gen_shri_i64(t0
, ret
, 32); /* t0 = ....dcba */
1915 tcg_gen_shli_i64(t1
, ret
, 32); /* t1 = hgfe.... */
1916 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = hgfedcba */
1918 tcg_temp_free_i64(t0
);
1919 tcg_temp_free_i64(t1
);
1920 tcg_temp_free_i64(t2
);
1924 void tcg_gen_hswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1926 uint64_t m
= 0x0000ffff0000ffffull
;
1927 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1928 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1930 /* See include/qemu/bitops.h, hswap64. */
1931 tcg_gen_rotli_i64(t1
, arg
, 32);
1932 tcg_gen_andi_i64(t0
, t1
, m
);
1933 tcg_gen_shli_i64(t0
, t0
, 16);
1934 tcg_gen_shri_i64(t1
, t1
, 16);
1935 tcg_gen_andi_i64(t1
, t1
, m
);
1936 tcg_gen_or_i64(ret
, t0
, t1
);
1938 tcg_temp_free_i64(t0
);
1939 tcg_temp_free_i64(t1
);
1942 void tcg_gen_wswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1944 /* Swapping 2 32-bit elements is a rotate. */
1945 tcg_gen_rotli_i64(ret
, arg
, 32);
1948 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1950 if (TCG_TARGET_REG_BITS
== 32) {
1951 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1952 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1953 } else if (TCG_TARGET_HAS_not_i64
) {
1954 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1956 tcg_gen_xori_i64(ret
, arg
, -1);
1960 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1962 if (TCG_TARGET_REG_BITS
== 32) {
1963 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1964 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1965 } else if (TCG_TARGET_HAS_andc_i64
) {
1966 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1968 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1969 tcg_gen_not_i64(t0
, arg2
);
1970 tcg_gen_and_i64(ret
, arg1
, t0
);
1971 tcg_temp_free_i64(t0
);
1975 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1977 if (TCG_TARGET_REG_BITS
== 32) {
1978 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1979 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1980 } else if (TCG_TARGET_HAS_eqv_i64
) {
1981 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1983 tcg_gen_xor_i64(ret
, arg1
, arg2
);
1984 tcg_gen_not_i64(ret
, ret
);
1988 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1990 if (TCG_TARGET_REG_BITS
== 32) {
1991 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1992 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1993 } else if (TCG_TARGET_HAS_nand_i64
) {
1994 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
1996 tcg_gen_and_i64(ret
, arg1
, arg2
);
1997 tcg_gen_not_i64(ret
, ret
);
2001 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2003 if (TCG_TARGET_REG_BITS
== 32) {
2004 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2005 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2006 } else if (TCG_TARGET_HAS_nor_i64
) {
2007 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
2009 tcg_gen_or_i64(ret
, arg1
, arg2
);
2010 tcg_gen_not_i64(ret
, ret
);
2014 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2016 if (TCG_TARGET_REG_BITS
== 32) {
2017 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2018 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2019 } else if (TCG_TARGET_HAS_orc_i64
) {
2020 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
2022 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2023 tcg_gen_not_i64(t0
, arg2
);
2024 tcg_gen_or_i64(ret
, arg1
, t0
);
2025 tcg_temp_free_i64(t0
);
2029 void tcg_gen_clz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2031 if (TCG_TARGET_HAS_clz_i64
) {
2032 tcg_gen_op3_i64(INDEX_op_clz_i64
, ret
, arg1
, arg2
);
2034 gen_helper_clz_i64(ret
, arg1
, arg2
);
2038 void tcg_gen_clzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2040 if (TCG_TARGET_REG_BITS
== 32
2041 && TCG_TARGET_HAS_clz_i32
2042 && arg2
<= 0xffffffffu
) {
2043 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2044 tcg_gen_clzi_i32(t
, TCGV_LOW(arg1
), arg2
- 32);
2045 tcg_gen_addi_i32(t
, t
, 32);
2046 tcg_gen_clz_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), t
);
2047 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2048 tcg_temp_free_i32(t
);
2050 tcg_gen_clz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2054 void tcg_gen_ctz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2056 if (TCG_TARGET_HAS_ctz_i64
) {
2057 tcg_gen_op3_i64(INDEX_op_ctz_i64
, ret
, arg1
, arg2
);
2058 } else if (TCG_TARGET_HAS_ctpop_i64
|| TCG_TARGET_HAS_clz_i64
) {
2059 TCGv_i64 z
, t
= tcg_temp_ebb_new_i64();
2061 if (TCG_TARGET_HAS_ctpop_i64
) {
2062 tcg_gen_subi_i64(t
, arg1
, 1);
2063 tcg_gen_andc_i64(t
, t
, arg1
);
2064 tcg_gen_ctpop_i64(t
, t
);
2066 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
2067 tcg_gen_neg_i64(t
, arg1
);
2068 tcg_gen_and_i64(t
, t
, arg1
);
2069 tcg_gen_clzi_i64(t
, t
, 64);
2070 tcg_gen_xori_i64(t
, t
, 63);
2072 z
= tcg_constant_i64(0);
2073 tcg_gen_movcond_i64(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
2074 tcg_temp_free_i64(t
);
2075 tcg_temp_free_i64(z
);
2077 gen_helper_ctz_i64(ret
, arg1
, arg2
);
2081 void tcg_gen_ctzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2083 if (TCG_TARGET_REG_BITS
== 32
2084 && TCG_TARGET_HAS_ctz_i32
2085 && arg2
<= 0xffffffffu
) {
2086 TCGv_i32 t32
= tcg_temp_ebb_new_i32();
2087 tcg_gen_ctzi_i32(t32
, TCGV_HIGH(arg1
), arg2
- 32);
2088 tcg_gen_addi_i32(t32
, t32
, 32);
2089 tcg_gen_ctz_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), t32
);
2090 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2091 tcg_temp_free_i32(t32
);
2092 } else if (!TCG_TARGET_HAS_ctz_i64
2093 && TCG_TARGET_HAS_ctpop_i64
2095 /* This equivalence has the advantage of not requiring a fixup. */
2096 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2097 tcg_gen_subi_i64(t
, arg1
, 1);
2098 tcg_gen_andc_i64(t
, t
, arg1
);
2099 tcg_gen_ctpop_i64(ret
, t
);
2100 tcg_temp_free_i64(t
);
2102 tcg_gen_ctz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2106 void tcg_gen_clrsb_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2108 if (TCG_TARGET_HAS_clz_i64
|| TCG_TARGET_HAS_clz_i32
) {
2109 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2110 tcg_gen_sari_i64(t
, arg
, 63);
2111 tcg_gen_xor_i64(t
, t
, arg
);
2112 tcg_gen_clzi_i64(t
, t
, 64);
2113 tcg_gen_subi_i64(ret
, t
, 1);
2114 tcg_temp_free_i64(t
);
2116 gen_helper_clrsb_i64(ret
, arg
);
2120 void tcg_gen_ctpop_i64(TCGv_i64 ret
, TCGv_i64 arg1
)
2122 if (TCG_TARGET_HAS_ctpop_i64
) {
2123 tcg_gen_op2_i64(INDEX_op_ctpop_i64
, ret
, arg1
);
2124 } else if (TCG_TARGET_REG_BITS
== 32 && TCG_TARGET_HAS_ctpop_i32
) {
2125 tcg_gen_ctpop_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2126 tcg_gen_ctpop_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2127 tcg_gen_add_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), TCGV_HIGH(ret
));
2128 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2130 gen_helper_ctpop_i64(ret
, arg1
);
2134 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2136 if (TCG_TARGET_HAS_rot_i64
) {
2137 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
2140 t0
= tcg_temp_ebb_new_i64();
2141 t1
= tcg_temp_ebb_new_i64();
2142 tcg_gen_shl_i64(t0
, arg1
, arg2
);
2143 tcg_gen_subfi_i64(t1
, 64, arg2
);
2144 tcg_gen_shr_i64(t1
, arg1
, t1
);
2145 tcg_gen_or_i64(ret
, t0
, t1
);
2146 tcg_temp_free_i64(t0
);
2147 tcg_temp_free_i64(t1
);
2151 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2153 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2154 /* some cases can be optimized here */
2156 tcg_gen_mov_i64(ret
, arg1
);
2157 } else if (TCG_TARGET_HAS_rot_i64
) {
2158 tcg_gen_rotl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2161 t0
= tcg_temp_ebb_new_i64();
2162 t1
= tcg_temp_ebb_new_i64();
2163 tcg_gen_shli_i64(t0
, arg1
, arg2
);
2164 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
2165 tcg_gen_or_i64(ret
, t0
, t1
);
2166 tcg_temp_free_i64(t0
);
2167 tcg_temp_free_i64(t1
);
2171 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2173 if (TCG_TARGET_HAS_rot_i64
) {
2174 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
2177 t0
= tcg_temp_ebb_new_i64();
2178 t1
= tcg_temp_ebb_new_i64();
2179 tcg_gen_shr_i64(t0
, arg1
, arg2
);
2180 tcg_gen_subfi_i64(t1
, 64, arg2
);
2181 tcg_gen_shl_i64(t1
, arg1
, t1
);
2182 tcg_gen_or_i64(ret
, t0
, t1
);
2183 tcg_temp_free_i64(t0
);
2184 tcg_temp_free_i64(t1
);
2188 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2190 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2191 /* some cases can be optimized here */
2193 tcg_gen_mov_i64(ret
, arg1
);
2195 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
2199 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
2200 unsigned int ofs
, unsigned int len
)
2205 tcg_debug_assert(ofs
< 64);
2206 tcg_debug_assert(len
> 0);
2207 tcg_debug_assert(len
<= 64);
2208 tcg_debug_assert(ofs
+ len
<= 64);
2211 tcg_gen_mov_i64(ret
, arg2
);
2214 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2215 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
2219 if (TCG_TARGET_REG_BITS
== 32) {
2221 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
2222 TCGV_LOW(arg2
), ofs
- 32, len
);
2223 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2226 if (ofs
+ len
<= 32) {
2227 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
2228 TCGV_LOW(arg2
), ofs
, len
);
2229 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2234 t1
= tcg_temp_ebb_new_i64();
2236 if (TCG_TARGET_HAS_extract2_i64
) {
2237 if (ofs
+ len
== 64) {
2238 tcg_gen_shli_i64(t1
, arg1
, len
);
2239 tcg_gen_extract2_i64(ret
, t1
, arg2
, len
);
2243 tcg_gen_extract2_i64(ret
, arg1
, arg2
, len
);
2244 tcg_gen_rotli_i64(ret
, ret
, len
);
2249 mask
= (1ull << len
) - 1;
2250 if (ofs
+ len
< 64) {
2251 tcg_gen_andi_i64(t1
, arg2
, mask
);
2252 tcg_gen_shli_i64(t1
, t1
, ofs
);
2254 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2256 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2257 tcg_gen_or_i64(ret
, ret
, t1
);
2259 tcg_temp_free_i64(t1
);
2262 void tcg_gen_deposit_z_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2263 unsigned int ofs
, unsigned int len
)
2265 tcg_debug_assert(ofs
< 64);
2266 tcg_debug_assert(len
> 0);
2267 tcg_debug_assert(len
<= 64);
2268 tcg_debug_assert(ofs
+ len
<= 64);
2270 if (ofs
+ len
== 64) {
2271 tcg_gen_shli_i64(ret
, arg
, ofs
);
2272 } else if (ofs
== 0) {
2273 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2274 } else if (TCG_TARGET_HAS_deposit_i64
2275 && TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2276 TCGv_i64 zero
= tcg_constant_i64(0);
2277 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, zero
, arg
, ofs
, len
);
2279 if (TCG_TARGET_REG_BITS
== 32) {
2281 tcg_gen_deposit_z_i32(TCGV_HIGH(ret
), TCGV_LOW(arg
),
2283 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
2286 if (ofs
+ len
<= 32) {
2287 tcg_gen_deposit_z_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2288 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2292 /* To help two-operand hosts we prefer to zero-extend first,
2293 which allows ARG to stay live. */
2296 if (TCG_TARGET_HAS_ext32u_i64
) {
2297 tcg_gen_ext32u_i64(ret
, arg
);
2298 tcg_gen_shli_i64(ret
, ret
, ofs
);
2303 if (TCG_TARGET_HAS_ext16u_i64
) {
2304 tcg_gen_ext16u_i64(ret
, arg
);
2305 tcg_gen_shli_i64(ret
, ret
, ofs
);
2310 if (TCG_TARGET_HAS_ext8u_i64
) {
2311 tcg_gen_ext8u_i64(ret
, arg
);
2312 tcg_gen_shli_i64(ret
, ret
, ofs
);
2317 /* Otherwise prefer zero-extension over AND for code size. */
2318 switch (ofs
+ len
) {
2320 if (TCG_TARGET_HAS_ext32u_i64
) {
2321 tcg_gen_shli_i64(ret
, arg
, ofs
);
2322 tcg_gen_ext32u_i64(ret
, ret
);
2327 if (TCG_TARGET_HAS_ext16u_i64
) {
2328 tcg_gen_shli_i64(ret
, arg
, ofs
);
2329 tcg_gen_ext16u_i64(ret
, ret
);
2334 if (TCG_TARGET_HAS_ext8u_i64
) {
2335 tcg_gen_shli_i64(ret
, arg
, ofs
);
2336 tcg_gen_ext8u_i64(ret
, ret
);
2341 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2342 tcg_gen_shli_i64(ret
, ret
, ofs
);
2346 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2347 unsigned int ofs
, unsigned int len
)
2349 tcg_debug_assert(ofs
< 64);
2350 tcg_debug_assert(len
> 0);
2351 tcg_debug_assert(len
<= 64);
2352 tcg_debug_assert(ofs
+ len
<= 64);
2354 /* Canonicalize certain special cases, even if extract is supported. */
2355 if (ofs
+ len
== 64) {
2356 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
2360 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2364 if (TCG_TARGET_REG_BITS
== 32) {
2365 /* Look for a 32-bit extract within one of the two words. */
2367 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2368 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2371 if (ofs
+ len
<= 32) {
2372 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2373 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2376 /* The field is split across two words. One double-word
2377 shift is better than two double-word shifts. */
2381 if (TCG_TARGET_HAS_extract_i64
2382 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2383 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
2387 /* Assume that zero-extension, if available, is cheaper than a shift. */
2388 switch (ofs
+ len
) {
2390 if (TCG_TARGET_HAS_ext32u_i64
) {
2391 tcg_gen_ext32u_i64(ret
, arg
);
2392 tcg_gen_shri_i64(ret
, ret
, ofs
);
2397 if (TCG_TARGET_HAS_ext16u_i64
) {
2398 tcg_gen_ext16u_i64(ret
, arg
);
2399 tcg_gen_shri_i64(ret
, ret
, ofs
);
2404 if (TCG_TARGET_HAS_ext8u_i64
) {
2405 tcg_gen_ext8u_i64(ret
, arg
);
2406 tcg_gen_shri_i64(ret
, ret
, ofs
);
2412 /* ??? Ideally we'd know what values are available for immediate AND.
2413 Assume that 8 bits are available, plus the special cases of 16 and 32,
2414 so that we get ext8u, ext16u, and ext32u. */
2416 case 1 ... 8: case 16: case 32:
2418 tcg_gen_shri_i64(ret
, arg
, ofs
);
2419 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
2422 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2423 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
2428 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2429 unsigned int ofs
, unsigned int len
)
2431 tcg_debug_assert(ofs
< 64);
2432 tcg_debug_assert(len
> 0);
2433 tcg_debug_assert(len
<= 64);
2434 tcg_debug_assert(ofs
+ len
<= 64);
2436 /* Canonicalize certain special cases, even if sextract is supported. */
2437 if (ofs
+ len
== 64) {
2438 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
2444 tcg_gen_ext32s_i64(ret
, arg
);
2447 tcg_gen_ext16s_i64(ret
, arg
);
2450 tcg_gen_ext8s_i64(ret
, arg
);
2455 if (TCG_TARGET_REG_BITS
== 32) {
2456 /* Look for a 32-bit extract within one of the two words. */
2458 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2459 } else if (ofs
+ len
<= 32) {
2460 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2461 } else if (ofs
== 0) {
2462 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2463 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
2465 } else if (len
> 32) {
2466 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2467 /* Extract the bits for the high word normally. */
2468 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
2469 /* Shift the field down for the low part. */
2470 tcg_gen_shri_i64(ret
, arg
, ofs
);
2471 /* Overwrite the shift into the high part. */
2472 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
2473 tcg_temp_free_i32(t
);
2476 /* Shift the field down for the low part, such that the
2477 field sits at the MSB. */
2478 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
2479 /* Shift the field down from the MSB, sign extending. */
2480 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
2482 /* Sign-extend the field from 32 bits. */
2483 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2487 if (TCG_TARGET_HAS_sextract_i64
2488 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2489 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
2493 /* Assume that sign-extension, if available, is cheaper than a shift. */
2494 switch (ofs
+ len
) {
2496 if (TCG_TARGET_HAS_ext32s_i64
) {
2497 tcg_gen_ext32s_i64(ret
, arg
);
2498 tcg_gen_sari_i64(ret
, ret
, ofs
);
2503 if (TCG_TARGET_HAS_ext16s_i64
) {
2504 tcg_gen_ext16s_i64(ret
, arg
);
2505 tcg_gen_sari_i64(ret
, ret
, ofs
);
2510 if (TCG_TARGET_HAS_ext8s_i64
) {
2511 tcg_gen_ext8s_i64(ret
, arg
);
2512 tcg_gen_sari_i64(ret
, ret
, ofs
);
2519 if (TCG_TARGET_HAS_ext32s_i64
) {
2520 tcg_gen_shri_i64(ret
, arg
, ofs
);
2521 tcg_gen_ext32s_i64(ret
, ret
);
2526 if (TCG_TARGET_HAS_ext16s_i64
) {
2527 tcg_gen_shri_i64(ret
, arg
, ofs
);
2528 tcg_gen_ext16s_i64(ret
, ret
);
2533 if (TCG_TARGET_HAS_ext8s_i64
) {
2534 tcg_gen_shri_i64(ret
, arg
, ofs
);
2535 tcg_gen_ext8s_i64(ret
, ret
);
2540 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2541 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
2545 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2546 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2548 void tcg_gen_extract2_i64(TCGv_i64 ret
, TCGv_i64 al
, TCGv_i64 ah
,
2551 tcg_debug_assert(ofs
<= 64);
2553 tcg_gen_mov_i64(ret
, al
);
2554 } else if (ofs
== 64) {
2555 tcg_gen_mov_i64(ret
, ah
);
2556 } else if (al
== ah
) {
2557 tcg_gen_rotri_i64(ret
, al
, ofs
);
2558 } else if (TCG_TARGET_HAS_extract2_i64
) {
2559 tcg_gen_op4i_i64(INDEX_op_extract2_i64
, ret
, al
, ah
, ofs
);
2561 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2562 tcg_gen_shri_i64(t0
, al
, ofs
);
2563 tcg_gen_deposit_i64(ret
, t0
, ah
, 64 - ofs
, ofs
);
2564 tcg_temp_free_i64(t0
);
2568 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
2569 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
2571 if (cond
== TCG_COND_ALWAYS
) {
2572 tcg_gen_mov_i64(ret
, v1
);
2573 } else if (cond
== TCG_COND_NEVER
) {
2574 tcg_gen_mov_i64(ret
, v2
);
2575 } else if (TCG_TARGET_REG_BITS
== 32) {
2576 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
2577 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
2578 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
2579 TCGV_LOW(c1
), TCGV_HIGH(c1
),
2580 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
2582 if (TCG_TARGET_HAS_movcond_i32
) {
2583 tcg_gen_movi_i32(t1
, 0);
2584 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
2585 TCGV_LOW(v1
), TCGV_LOW(v2
));
2586 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
2587 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
2589 tcg_gen_neg_i32(t0
, t0
);
2591 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
2592 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
2593 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
2595 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
2596 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
2597 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
2599 tcg_temp_free_i32(t0
);
2600 tcg_temp_free_i32(t1
);
2601 } else if (TCG_TARGET_HAS_movcond_i64
) {
2602 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
2604 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2605 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2606 tcg_gen_negsetcond_i64(cond
, t0
, c1
, c2
);
2607 tcg_gen_and_i64(t1
, v1
, t0
);
2608 tcg_gen_andc_i64(ret
, v2
, t0
);
2609 tcg_gen_or_i64(ret
, ret
, t1
);
2610 tcg_temp_free_i64(t0
);
2611 tcg_temp_free_i64(t1
);
2615 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2616 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2618 if (TCG_TARGET_HAS_add2_i64
) {
2619 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2621 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2622 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2623 tcg_gen_add_i64(t0
, al
, bl
);
2624 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
2625 tcg_gen_add_i64(rh
, ah
, bh
);
2626 tcg_gen_add_i64(rh
, rh
, t1
);
2627 tcg_gen_mov_i64(rl
, t0
);
2628 tcg_temp_free_i64(t0
);
2629 tcg_temp_free_i64(t1
);
2633 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2634 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2636 if (TCG_TARGET_HAS_sub2_i64
) {
2637 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2639 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2640 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2641 tcg_gen_sub_i64(t0
, al
, bl
);
2642 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
2643 tcg_gen_sub_i64(rh
, ah
, bh
);
2644 tcg_gen_sub_i64(rh
, rh
, t1
);
2645 tcg_gen_mov_i64(rl
, t0
);
2646 tcg_temp_free_i64(t0
);
2647 tcg_temp_free_i64(t1
);
2651 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2653 if (TCG_TARGET_HAS_mulu2_i64
) {
2654 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
2655 } else if (TCG_TARGET_HAS_muluh_i64
) {
2656 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2657 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2658 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
2659 tcg_gen_mov_i64(rl
, t
);
2660 tcg_temp_free_i64(t
);
2662 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2663 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2664 gen_helper_muluh_i64(rh
, arg1
, arg2
);
2665 tcg_gen_mov_i64(rl
, t0
);
2666 tcg_temp_free_i64(t0
);
2670 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2672 if (TCG_TARGET_HAS_muls2_i64
) {
2673 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
2674 } else if (TCG_TARGET_HAS_mulsh_i64
) {
2675 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2676 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2677 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
2678 tcg_gen_mov_i64(rl
, t
);
2679 tcg_temp_free_i64(t
);
2680 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
2681 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2682 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2683 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2684 TCGv_i64 t3
= tcg_temp_ebb_new_i64();
2685 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2686 /* Adjust for negative inputs. */
2687 tcg_gen_sari_i64(t2
, arg1
, 63);
2688 tcg_gen_sari_i64(t3
, arg2
, 63);
2689 tcg_gen_and_i64(t2
, t2
, arg2
);
2690 tcg_gen_and_i64(t3
, t3
, arg1
);
2691 tcg_gen_sub_i64(rh
, t1
, t2
);
2692 tcg_gen_sub_i64(rh
, rh
, t3
);
2693 tcg_gen_mov_i64(rl
, t0
);
2694 tcg_temp_free_i64(t0
);
2695 tcg_temp_free_i64(t1
);
2696 tcg_temp_free_i64(t2
);
2697 tcg_temp_free_i64(t3
);
2699 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2700 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2701 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
2702 tcg_gen_mov_i64(rl
, t0
);
2703 tcg_temp_free_i64(t0
);
2707 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2709 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2710 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2711 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2712 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2713 /* Adjust for negative input for the signed arg1. */
2714 tcg_gen_sari_i64(t2
, arg1
, 63);
2715 tcg_gen_and_i64(t2
, t2
, arg2
);
2716 tcg_gen_sub_i64(rh
, t1
, t2
);
2717 tcg_gen_mov_i64(rl
, t0
);
2718 tcg_temp_free_i64(t0
);
2719 tcg_temp_free_i64(t1
);
2720 tcg_temp_free_i64(t2
);
2723 void tcg_gen_smin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2725 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, a
, b
);
2728 void tcg_gen_umin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2730 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
2733 void tcg_gen_smax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2735 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, b
, a
);
2738 void tcg_gen_umax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2740 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
2743 void tcg_gen_abs_i64(TCGv_i64 ret
, TCGv_i64 a
)
2745 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2747 tcg_gen_sari_i64(t
, a
, 63);
2748 tcg_gen_xor_i64(ret
, a
, t
);
2749 tcg_gen_sub_i64(ret
, ret
, t
);
2750 tcg_temp_free_i64(t
);
2753 /* Size changing operations. */
2755 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2757 if (TCG_TARGET_REG_BITS
== 32) {
2758 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
2759 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
2760 tcg_gen_op2(INDEX_op_extrl_i64_i32
,
2761 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2763 tcg_gen_mov_i32(ret
, (TCGv_i32
)arg
);
2767 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2769 if (TCG_TARGET_REG_BITS
== 32) {
2770 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
2771 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
2772 tcg_gen_op2(INDEX_op_extrh_i64_i32
,
2773 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2775 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2776 tcg_gen_shri_i64(t
, arg
, 32);
2777 tcg_gen_mov_i32(ret
, (TCGv_i32
)t
);
2778 tcg_temp_free_i64(t
);
2782 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2784 if (TCG_TARGET_REG_BITS
== 32) {
2785 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2786 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2788 tcg_gen_op2(INDEX_op_extu_i32_i64
,
2789 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2793 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2795 if (TCG_TARGET_REG_BITS
== 32) {
2796 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2797 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2799 tcg_gen_op2(INDEX_op_ext_i32_i64
,
2800 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2804 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
2808 if (TCG_TARGET_REG_BITS
== 32) {
2809 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
2810 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
2814 tmp
= tcg_temp_ebb_new_i64();
2815 /* These extensions are only needed for type correctness.
2816 We may be able to do better given target specific information. */
2817 tcg_gen_extu_i32_i64(tmp
, high
);
2818 tcg_gen_extu_i32_i64(dest
, low
);
2819 /* If deposit is available, use it. Otherwise use the extra
2820 knowledge that we have of the zero-extensions above. */
2821 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
2822 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
2824 tcg_gen_shli_i64(tmp
, tmp
, 32);
2825 tcg_gen_or_i64(dest
, dest
, tmp
);
2827 tcg_temp_free_i64(tmp
);
2830 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
2832 if (TCG_TARGET_REG_BITS
== 32) {
2833 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
2834 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
2836 tcg_gen_extrl_i64_i32(lo
, arg
);
2837 tcg_gen_extrh_i64_i32(hi
, arg
);
2841 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
2843 tcg_gen_ext32u_i64(lo
, arg
);
2844 tcg_gen_shri_i64(hi
, arg
, 32);
2847 void tcg_gen_extr_i128_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i128 arg
)
2849 tcg_gen_mov_i64(lo
, TCGV128_LOW(arg
));
2850 tcg_gen_mov_i64(hi
, TCGV128_HIGH(arg
));
2853 void tcg_gen_concat_i64_i128(TCGv_i128 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
2855 tcg_gen_mov_i64(TCGV128_LOW(ret
), lo
);
2856 tcg_gen_mov_i64(TCGV128_HIGH(ret
), hi
);
2859 void tcg_gen_mov_i128(TCGv_i128 dst
, TCGv_i128 src
)
2862 tcg_gen_mov_i64(TCGV128_LOW(dst
), TCGV128_LOW(src
));
2863 tcg_gen_mov_i64(TCGV128_HIGH(dst
), TCGV128_HIGH(src
));
2867 /* QEMU specific operations. */
2869 void tcg_gen_exit_tb(const TranslationBlock
*tb
, unsigned idx
)
2872 * Let the jit code return the read-only version of the
2873 * TranslationBlock, so that we minimize the pc-relative
2874 * distance of the address of the exit_tb code to TB.
2875 * This will improve utilization of pc-relative address loads.
2877 * TODO: Move this to translator_loop, so that all const
2878 * TranslationBlock pointers refer to read-only memory.
2879 * This requires coordination with targets that do not use
2880 * the translator_loop.
2882 uintptr_t val
= (uintptr_t)tcg_splitwx_to_rx((void *)tb
) + idx
;
2885 tcg_debug_assert(idx
== 0);
2886 } else if (idx
<= TB_EXIT_IDXMAX
) {
2887 #ifdef CONFIG_DEBUG_TCG
2888 /* This is an exit following a goto_tb. Verify that we have
2889 seen this numbered exit before, via tcg_gen_goto_tb. */
2890 tcg_debug_assert(tcg_ctx
->goto_tb_issue_mask
& (1 << idx
));
2893 /* This is an exit via the exitreq label. */
2894 tcg_debug_assert(idx
== TB_EXIT_REQUESTED
);
2897 tcg_gen_op1i(INDEX_op_exit_tb
, val
);
2900 void tcg_gen_goto_tb(unsigned idx
)
2902 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2903 tcg_debug_assert(!(tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_TB
));
2904 /* We only support two chained exits. */
2905 tcg_debug_assert(idx
<= TB_EXIT_IDXMAX
);
2906 #ifdef CONFIG_DEBUG_TCG
2907 /* Verify that we haven't seen this numbered exit before. */
2908 tcg_debug_assert((tcg_ctx
->goto_tb_issue_mask
& (1 << idx
)) == 0);
2909 tcg_ctx
->goto_tb_issue_mask
|= 1 << idx
;
2911 plugin_gen_disable_mem_helpers();
2912 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
2915 void tcg_gen_lookup_and_goto_ptr(void)
2919 if (tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_PTR
) {
2920 tcg_gen_exit_tb(NULL
, 0);
2924 plugin_gen_disable_mem_helpers();
2925 ptr
= tcg_temp_ebb_new_ptr();
2926 gen_helper_lookup_tb_ptr(ptr
, cpu_env
);
2927 tcg_gen_op1i(INDEX_op_goto_ptr
, tcgv_ptr_arg(ptr
));
2928 tcg_temp_free_ptr(ptr
);