2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
34 void tcg_gen_op1(TCGOpcode opc
, TCGArg a1
)
36 TCGOp
*op
= tcg_emit_op(opc
, 1);
40 void tcg_gen_op2(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
42 TCGOp
*op
= tcg_emit_op(opc
, 2);
47 void tcg_gen_op3(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
)
49 TCGOp
*op
= tcg_emit_op(opc
, 3);
55 void tcg_gen_op4(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
, TCGArg a4
)
57 TCGOp
*op
= tcg_emit_op(opc
, 4);
64 void tcg_gen_op5(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
67 TCGOp
*op
= tcg_emit_op(opc
, 5);
75 void tcg_gen_op6(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
76 TCGArg a4
, TCGArg a5
, TCGArg a6
)
78 TCGOp
*op
= tcg_emit_op(opc
, 6);
89 static void add_last_as_label_use(TCGLabel
*l
)
91 TCGLabelUse
*u
= tcg_malloc(sizeof(TCGLabelUse
));
93 u
->op
= tcg_last_op();
94 QSIMPLEQ_INSERT_TAIL(&l
->branches
, u
, next
);
97 void tcg_gen_br(TCGLabel
*l
)
99 tcg_gen_op1(INDEX_op_br
, label_arg(l
));
100 add_last_as_label_use(l
);
103 void tcg_gen_mb(TCGBar mb_type
)
105 #ifdef CONFIG_USER_ONLY
106 bool parallel
= tcg_ctx
->gen_tb
->cflags
& CF_PARALLEL
;
109 * It is tempting to elide the barrier in a uniprocessor context.
110 * However, even with a single cpu we have i/o threads running in
111 * parallel, and lack of memory order can result in e.g. virtio
112 * queue entries being read incorrectly.
114 bool parallel
= true;
118 tcg_gen_op1(INDEX_op_mb
, mb_type
);
124 void tcg_gen_movi_i32(TCGv_i32 ret
, int32_t arg
)
126 tcg_gen_mov_i32(ret
, tcg_constant_i32(arg
));
129 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
131 /* some cases can be optimized here */
133 tcg_gen_mov_i32(ret
, arg1
);
135 tcg_gen_add_i32(ret
, arg1
, tcg_constant_i32(arg2
));
139 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
141 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
142 /* Don't recurse with tcg_gen_neg_i32. */
143 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
145 tcg_gen_sub_i32(ret
, tcg_constant_i32(arg1
), arg2
);
149 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
151 /* some cases can be optimized here */
153 tcg_gen_mov_i32(ret
, arg1
);
155 tcg_gen_sub_i32(ret
, arg1
, tcg_constant_i32(arg2
));
159 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
161 /* Some cases can be optimized here. */
164 tcg_gen_movi_i32(ret
, 0);
167 tcg_gen_mov_i32(ret
, arg1
);
170 /* Don't recurse with tcg_gen_ext8u_i32. */
171 if (TCG_TARGET_HAS_ext8u_i32
) {
172 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
177 if (TCG_TARGET_HAS_ext16u_i32
) {
178 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
184 tcg_gen_and_i32(ret
, arg1
, tcg_constant_i32(arg2
));
187 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
189 /* Some cases can be optimized here. */
191 tcg_gen_movi_i32(ret
, -1);
192 } else if (arg2
== 0) {
193 tcg_gen_mov_i32(ret
, arg1
);
195 tcg_gen_or_i32(ret
, arg1
, tcg_constant_i32(arg2
));
199 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
201 /* Some cases can be optimized here. */
203 tcg_gen_mov_i32(ret
, arg1
);
204 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
205 /* Don't recurse with tcg_gen_not_i32. */
206 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
208 tcg_gen_xor_i32(ret
, arg1
, tcg_constant_i32(arg2
));
212 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
214 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
216 tcg_gen_mov_i32(ret
, arg1
);
218 tcg_gen_shl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
222 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
224 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
226 tcg_gen_mov_i32(ret
, arg1
);
228 tcg_gen_shr_i32(ret
, arg1
, tcg_constant_i32(arg2
));
232 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
234 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
236 tcg_gen_mov_i32(ret
, arg1
);
238 tcg_gen_sar_i32(ret
, arg1
, tcg_constant_i32(arg2
));
242 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
244 if (cond
== TCG_COND_ALWAYS
) {
246 } else if (cond
!= TCG_COND_NEVER
) {
247 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
248 add_last_as_label_use(l
);
252 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
254 if (cond
== TCG_COND_ALWAYS
) {
256 } else if (cond
!= TCG_COND_NEVER
) {
257 tcg_gen_brcond_i32(cond
, arg1
, tcg_constant_i32(arg2
), l
);
261 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
262 TCGv_i32 arg1
, TCGv_i32 arg2
)
264 if (cond
== TCG_COND_ALWAYS
) {
265 tcg_gen_movi_i32(ret
, 1);
266 } else if (cond
== TCG_COND_NEVER
) {
267 tcg_gen_movi_i32(ret
, 0);
269 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
273 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
274 TCGv_i32 arg1
, int32_t arg2
)
276 tcg_gen_setcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
279 void tcg_gen_negsetcond_i32(TCGCond cond
, TCGv_i32 ret
,
280 TCGv_i32 arg1
, TCGv_i32 arg2
)
282 if (cond
== TCG_COND_ALWAYS
) {
283 tcg_gen_movi_i32(ret
, -1);
284 } else if (cond
== TCG_COND_NEVER
) {
285 tcg_gen_movi_i32(ret
, 0);
286 } else if (TCG_TARGET_HAS_negsetcond_i32
) {
287 tcg_gen_op4i_i32(INDEX_op_negsetcond_i32
, ret
, arg1
, arg2
, cond
);
289 tcg_gen_setcond_i32(cond
, ret
, arg1
, arg2
);
290 tcg_gen_neg_i32(ret
, ret
);
294 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
297 tcg_gen_movi_i32(ret
, 0);
298 } else if (is_power_of_2(arg2
)) {
299 tcg_gen_shli_i32(ret
, arg1
, ctz32(arg2
));
301 tcg_gen_mul_i32(ret
, arg1
, tcg_constant_i32(arg2
));
305 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
307 if (TCG_TARGET_HAS_div_i32
) {
308 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
309 } else if (TCG_TARGET_HAS_div2_i32
) {
310 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
311 tcg_gen_sari_i32(t0
, arg1
, 31);
312 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
313 tcg_temp_free_i32(t0
);
315 gen_helper_div_i32(ret
, arg1
, arg2
);
319 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
321 if (TCG_TARGET_HAS_rem_i32
) {
322 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
323 } else if (TCG_TARGET_HAS_div_i32
) {
324 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
325 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
326 tcg_gen_mul_i32(t0
, t0
, arg2
);
327 tcg_gen_sub_i32(ret
, arg1
, t0
);
328 tcg_temp_free_i32(t0
);
329 } else if (TCG_TARGET_HAS_div2_i32
) {
330 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
331 tcg_gen_sari_i32(t0
, arg1
, 31);
332 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
333 tcg_temp_free_i32(t0
);
335 gen_helper_rem_i32(ret
, arg1
, arg2
);
339 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
341 if (TCG_TARGET_HAS_div_i32
) {
342 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
343 } else if (TCG_TARGET_HAS_div2_i32
) {
344 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
345 tcg_gen_movi_i32(t0
, 0);
346 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
347 tcg_temp_free_i32(t0
);
349 gen_helper_divu_i32(ret
, arg1
, arg2
);
353 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
355 if (TCG_TARGET_HAS_rem_i32
) {
356 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
357 } else if (TCG_TARGET_HAS_div_i32
) {
358 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
359 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
360 tcg_gen_mul_i32(t0
, t0
, arg2
);
361 tcg_gen_sub_i32(ret
, arg1
, t0
);
362 tcg_temp_free_i32(t0
);
363 } else if (TCG_TARGET_HAS_div2_i32
) {
364 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
365 tcg_gen_movi_i32(t0
, 0);
366 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
367 tcg_temp_free_i32(t0
);
369 gen_helper_remu_i32(ret
, arg1
, arg2
);
373 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
375 if (TCG_TARGET_HAS_andc_i32
) {
376 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
378 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
379 tcg_gen_not_i32(t0
, arg2
);
380 tcg_gen_and_i32(ret
, arg1
, t0
);
381 tcg_temp_free_i32(t0
);
385 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
387 if (TCG_TARGET_HAS_eqv_i32
) {
388 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
390 tcg_gen_xor_i32(ret
, arg1
, arg2
);
391 tcg_gen_not_i32(ret
, ret
);
395 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
397 if (TCG_TARGET_HAS_nand_i32
) {
398 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
400 tcg_gen_and_i32(ret
, arg1
, arg2
);
401 tcg_gen_not_i32(ret
, ret
);
405 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
407 if (TCG_TARGET_HAS_nor_i32
) {
408 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
410 tcg_gen_or_i32(ret
, arg1
, arg2
);
411 tcg_gen_not_i32(ret
, ret
);
415 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
417 if (TCG_TARGET_HAS_orc_i32
) {
418 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
420 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
421 tcg_gen_not_i32(t0
, arg2
);
422 tcg_gen_or_i32(ret
, arg1
, t0
);
423 tcg_temp_free_i32(t0
);
427 void tcg_gen_clz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
429 if (TCG_TARGET_HAS_clz_i32
) {
430 tcg_gen_op3_i32(INDEX_op_clz_i32
, ret
, arg1
, arg2
);
431 } else if (TCG_TARGET_HAS_clz_i64
) {
432 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
433 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
434 tcg_gen_extu_i32_i64(t1
, arg1
);
435 tcg_gen_extu_i32_i64(t2
, arg2
);
436 tcg_gen_addi_i64(t2
, t2
, 32);
437 tcg_gen_clz_i64(t1
, t1
, t2
);
438 tcg_gen_extrl_i64_i32(ret
, t1
);
439 tcg_temp_free_i64(t1
);
440 tcg_temp_free_i64(t2
);
441 tcg_gen_subi_i32(ret
, ret
, 32);
443 gen_helper_clz_i32(ret
, arg1
, arg2
);
447 void tcg_gen_clzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
449 tcg_gen_clz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
452 void tcg_gen_ctz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
454 if (TCG_TARGET_HAS_ctz_i32
) {
455 tcg_gen_op3_i32(INDEX_op_ctz_i32
, ret
, arg1
, arg2
);
456 } else if (TCG_TARGET_HAS_ctz_i64
) {
457 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
458 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
459 tcg_gen_extu_i32_i64(t1
, arg1
);
460 tcg_gen_extu_i32_i64(t2
, arg2
);
461 tcg_gen_ctz_i64(t1
, t1
, t2
);
462 tcg_gen_extrl_i64_i32(ret
, t1
);
463 tcg_temp_free_i64(t1
);
464 tcg_temp_free_i64(t2
);
465 } else if (TCG_TARGET_HAS_ctpop_i32
466 || TCG_TARGET_HAS_ctpop_i64
467 || TCG_TARGET_HAS_clz_i32
468 || TCG_TARGET_HAS_clz_i64
) {
469 TCGv_i32 z
, t
= tcg_temp_ebb_new_i32();
471 if (TCG_TARGET_HAS_ctpop_i32
|| TCG_TARGET_HAS_ctpop_i64
) {
472 tcg_gen_subi_i32(t
, arg1
, 1);
473 tcg_gen_andc_i32(t
, t
, arg1
);
474 tcg_gen_ctpop_i32(t
, t
);
476 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
477 tcg_gen_neg_i32(t
, arg1
);
478 tcg_gen_and_i32(t
, t
, arg1
);
479 tcg_gen_clzi_i32(t
, t
, 32);
480 tcg_gen_xori_i32(t
, t
, 31);
482 z
= tcg_constant_i32(0);
483 tcg_gen_movcond_i32(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
484 tcg_temp_free_i32(t
);
486 gen_helper_ctz_i32(ret
, arg1
, arg2
);
490 void tcg_gen_ctzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
492 if (!TCG_TARGET_HAS_ctz_i32
&& TCG_TARGET_HAS_ctpop_i32
&& arg2
== 32) {
493 /* This equivalence has the advantage of not requiring a fixup. */
494 TCGv_i32 t
= tcg_temp_ebb_new_i32();
495 tcg_gen_subi_i32(t
, arg1
, 1);
496 tcg_gen_andc_i32(t
, t
, arg1
);
497 tcg_gen_ctpop_i32(ret
, t
);
498 tcg_temp_free_i32(t
);
500 tcg_gen_ctz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
504 void tcg_gen_clrsb_i32(TCGv_i32 ret
, TCGv_i32 arg
)
506 if (TCG_TARGET_HAS_clz_i32
) {
507 TCGv_i32 t
= tcg_temp_ebb_new_i32();
508 tcg_gen_sari_i32(t
, arg
, 31);
509 tcg_gen_xor_i32(t
, t
, arg
);
510 tcg_gen_clzi_i32(t
, t
, 32);
511 tcg_gen_subi_i32(ret
, t
, 1);
512 tcg_temp_free_i32(t
);
514 gen_helper_clrsb_i32(ret
, arg
);
518 void tcg_gen_ctpop_i32(TCGv_i32 ret
, TCGv_i32 arg1
)
520 if (TCG_TARGET_HAS_ctpop_i32
) {
521 tcg_gen_op2_i32(INDEX_op_ctpop_i32
, ret
, arg1
);
522 } else if (TCG_TARGET_HAS_ctpop_i64
) {
523 TCGv_i64 t
= tcg_temp_ebb_new_i64();
524 tcg_gen_extu_i32_i64(t
, arg1
);
525 tcg_gen_ctpop_i64(t
, t
);
526 tcg_gen_extrl_i64_i32(ret
, t
);
527 tcg_temp_free_i64(t
);
529 gen_helper_ctpop_i32(ret
, arg1
);
533 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
535 if (TCG_TARGET_HAS_rot_i32
) {
536 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
540 t0
= tcg_temp_ebb_new_i32();
541 t1
= tcg_temp_ebb_new_i32();
542 tcg_gen_shl_i32(t0
, arg1
, arg2
);
543 tcg_gen_subfi_i32(t1
, 32, arg2
);
544 tcg_gen_shr_i32(t1
, arg1
, t1
);
545 tcg_gen_or_i32(ret
, t0
, t1
);
546 tcg_temp_free_i32(t0
);
547 tcg_temp_free_i32(t1
);
551 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
553 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
554 /* some cases can be optimized here */
556 tcg_gen_mov_i32(ret
, arg1
);
557 } else if (TCG_TARGET_HAS_rot_i32
) {
558 tcg_gen_rotl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
561 t0
= tcg_temp_ebb_new_i32();
562 t1
= tcg_temp_ebb_new_i32();
563 tcg_gen_shli_i32(t0
, arg1
, arg2
);
564 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
565 tcg_gen_or_i32(ret
, t0
, t1
);
566 tcg_temp_free_i32(t0
);
567 tcg_temp_free_i32(t1
);
571 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
573 if (TCG_TARGET_HAS_rot_i32
) {
574 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
578 t0
= tcg_temp_ebb_new_i32();
579 t1
= tcg_temp_ebb_new_i32();
580 tcg_gen_shr_i32(t0
, arg1
, arg2
);
581 tcg_gen_subfi_i32(t1
, 32, arg2
);
582 tcg_gen_shl_i32(t1
, arg1
, t1
);
583 tcg_gen_or_i32(ret
, t0
, t1
);
584 tcg_temp_free_i32(t0
);
585 tcg_temp_free_i32(t1
);
589 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
591 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
592 /* some cases can be optimized here */
594 tcg_gen_mov_i32(ret
, arg1
);
596 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
600 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
601 unsigned int ofs
, unsigned int len
)
606 tcg_debug_assert(ofs
< 32);
607 tcg_debug_assert(len
> 0);
608 tcg_debug_assert(len
<= 32);
609 tcg_debug_assert(ofs
+ len
<= 32);
612 tcg_gen_mov_i32(ret
, arg2
);
615 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
616 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
620 t1
= tcg_temp_ebb_new_i32();
622 if (TCG_TARGET_HAS_extract2_i32
) {
623 if (ofs
+ len
== 32) {
624 tcg_gen_shli_i32(t1
, arg1
, len
);
625 tcg_gen_extract2_i32(ret
, t1
, arg2
, len
);
629 tcg_gen_extract2_i32(ret
, arg1
, arg2
, len
);
630 tcg_gen_rotli_i32(ret
, ret
, len
);
635 mask
= (1u << len
) - 1;
636 if (ofs
+ len
< 32) {
637 tcg_gen_andi_i32(t1
, arg2
, mask
);
638 tcg_gen_shli_i32(t1
, t1
, ofs
);
640 tcg_gen_shli_i32(t1
, arg2
, ofs
);
642 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
643 tcg_gen_or_i32(ret
, ret
, t1
);
645 tcg_temp_free_i32(t1
);
648 void tcg_gen_deposit_z_i32(TCGv_i32 ret
, TCGv_i32 arg
,
649 unsigned int ofs
, unsigned int len
)
651 tcg_debug_assert(ofs
< 32);
652 tcg_debug_assert(len
> 0);
653 tcg_debug_assert(len
<= 32);
654 tcg_debug_assert(ofs
+ len
<= 32);
656 if (ofs
+ len
== 32) {
657 tcg_gen_shli_i32(ret
, arg
, ofs
);
658 } else if (ofs
== 0) {
659 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
660 } else if (TCG_TARGET_HAS_deposit_i32
661 && TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
662 TCGv_i32 zero
= tcg_constant_i32(0);
663 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, zero
, arg
, ofs
, len
);
665 /* To help two-operand hosts we prefer to zero-extend first,
666 which allows ARG to stay live. */
669 if (TCG_TARGET_HAS_ext16u_i32
) {
670 tcg_gen_ext16u_i32(ret
, arg
);
671 tcg_gen_shli_i32(ret
, ret
, ofs
);
676 if (TCG_TARGET_HAS_ext8u_i32
) {
677 tcg_gen_ext8u_i32(ret
, arg
);
678 tcg_gen_shli_i32(ret
, ret
, ofs
);
683 /* Otherwise prefer zero-extension over AND for code size. */
686 if (TCG_TARGET_HAS_ext16u_i32
) {
687 tcg_gen_shli_i32(ret
, arg
, ofs
);
688 tcg_gen_ext16u_i32(ret
, ret
);
693 if (TCG_TARGET_HAS_ext8u_i32
) {
694 tcg_gen_shli_i32(ret
, arg
, ofs
);
695 tcg_gen_ext8u_i32(ret
, ret
);
700 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
701 tcg_gen_shli_i32(ret
, ret
, ofs
);
705 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
706 unsigned int ofs
, unsigned int len
)
708 tcg_debug_assert(ofs
< 32);
709 tcg_debug_assert(len
> 0);
710 tcg_debug_assert(len
<= 32);
711 tcg_debug_assert(ofs
+ len
<= 32);
713 /* Canonicalize certain special cases, even if extract is supported. */
714 if (ofs
+ len
== 32) {
715 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
719 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
723 if (TCG_TARGET_HAS_extract_i32
724 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
725 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
729 /* Assume that zero-extension, if available, is cheaper than a shift. */
732 if (TCG_TARGET_HAS_ext16u_i32
) {
733 tcg_gen_ext16u_i32(ret
, arg
);
734 tcg_gen_shri_i32(ret
, ret
, ofs
);
739 if (TCG_TARGET_HAS_ext8u_i32
) {
740 tcg_gen_ext8u_i32(ret
, arg
);
741 tcg_gen_shri_i32(ret
, ret
, ofs
);
747 /* ??? Ideally we'd know what values are available for immediate AND.
748 Assume that 8 bits are available, plus the special case of 16,
749 so that we get ext8u, ext16u. */
751 case 1 ... 8: case 16:
752 tcg_gen_shri_i32(ret
, arg
, ofs
);
753 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
756 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
757 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
762 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
763 unsigned int ofs
, unsigned int len
)
765 tcg_debug_assert(ofs
< 32);
766 tcg_debug_assert(len
> 0);
767 tcg_debug_assert(len
<= 32);
768 tcg_debug_assert(ofs
+ len
<= 32);
770 /* Canonicalize certain special cases, even if extract is supported. */
771 if (ofs
+ len
== 32) {
772 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
778 tcg_gen_ext16s_i32(ret
, arg
);
781 tcg_gen_ext8s_i32(ret
, arg
);
786 if (TCG_TARGET_HAS_sextract_i32
787 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
788 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
792 /* Assume that sign-extension, if available, is cheaper than a shift. */
795 if (TCG_TARGET_HAS_ext16s_i32
) {
796 tcg_gen_ext16s_i32(ret
, arg
);
797 tcg_gen_sari_i32(ret
, ret
, ofs
);
802 if (TCG_TARGET_HAS_ext8s_i32
) {
803 tcg_gen_ext8s_i32(ret
, arg
);
804 tcg_gen_sari_i32(ret
, ret
, ofs
);
811 if (TCG_TARGET_HAS_ext16s_i32
) {
812 tcg_gen_shri_i32(ret
, arg
, ofs
);
813 tcg_gen_ext16s_i32(ret
, ret
);
818 if (TCG_TARGET_HAS_ext8s_i32
) {
819 tcg_gen_shri_i32(ret
, arg
, ofs
);
820 tcg_gen_ext8s_i32(ret
, ret
);
826 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
827 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
831 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
832 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
834 void tcg_gen_extract2_i32(TCGv_i32 ret
, TCGv_i32 al
, TCGv_i32 ah
,
837 tcg_debug_assert(ofs
<= 32);
839 tcg_gen_mov_i32(ret
, al
);
840 } else if (ofs
== 32) {
841 tcg_gen_mov_i32(ret
, ah
);
842 } else if (al
== ah
) {
843 tcg_gen_rotri_i32(ret
, al
, ofs
);
844 } else if (TCG_TARGET_HAS_extract2_i32
) {
845 tcg_gen_op4i_i32(INDEX_op_extract2_i32
, ret
, al
, ah
, ofs
);
847 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
848 tcg_gen_shri_i32(t0
, al
, ofs
);
849 tcg_gen_deposit_i32(ret
, t0
, ah
, 32 - ofs
, ofs
);
850 tcg_temp_free_i32(t0
);
854 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
855 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
857 if (cond
== TCG_COND_ALWAYS
) {
858 tcg_gen_mov_i32(ret
, v1
);
859 } else if (cond
== TCG_COND_NEVER
) {
860 tcg_gen_mov_i32(ret
, v2
);
861 } else if (TCG_TARGET_HAS_movcond_i32
) {
862 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
864 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
865 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
866 tcg_gen_negsetcond_i32(cond
, t0
, c1
, c2
);
867 tcg_gen_and_i32(t1
, v1
, t0
);
868 tcg_gen_andc_i32(ret
, v2
, t0
);
869 tcg_gen_or_i32(ret
, ret
, t1
);
870 tcg_temp_free_i32(t0
);
871 tcg_temp_free_i32(t1
);
875 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
876 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
878 if (TCG_TARGET_HAS_add2_i32
) {
879 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
881 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
882 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
883 tcg_gen_concat_i32_i64(t0
, al
, ah
);
884 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
885 tcg_gen_add_i64(t0
, t0
, t1
);
886 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
887 tcg_temp_free_i64(t0
);
888 tcg_temp_free_i64(t1
);
892 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
893 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
895 if (TCG_TARGET_HAS_sub2_i32
) {
896 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
898 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
899 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
900 tcg_gen_concat_i32_i64(t0
, al
, ah
);
901 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
902 tcg_gen_sub_i64(t0
, t0
, t1
);
903 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
904 tcg_temp_free_i64(t0
);
905 tcg_temp_free_i64(t1
);
909 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
911 if (TCG_TARGET_HAS_mulu2_i32
) {
912 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
913 } else if (TCG_TARGET_HAS_muluh_i32
) {
914 TCGv_i32 t
= tcg_temp_ebb_new_i32();
915 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
916 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
917 tcg_gen_mov_i32(rl
, t
);
918 tcg_temp_free_i32(t
);
919 } else if (TCG_TARGET_REG_BITS
== 64) {
920 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
921 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
922 tcg_gen_extu_i32_i64(t0
, arg1
);
923 tcg_gen_extu_i32_i64(t1
, arg2
);
924 tcg_gen_mul_i64(t0
, t0
, t1
);
925 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
926 tcg_temp_free_i64(t0
);
927 tcg_temp_free_i64(t1
);
929 qemu_build_not_reached();
933 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
935 if (TCG_TARGET_HAS_muls2_i32
) {
936 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
937 } else if (TCG_TARGET_HAS_mulsh_i32
) {
938 TCGv_i32 t
= tcg_temp_ebb_new_i32();
939 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
940 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
941 tcg_gen_mov_i32(rl
, t
);
942 tcg_temp_free_i32(t
);
943 } else if (TCG_TARGET_REG_BITS
== 32) {
944 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
945 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
946 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
947 TCGv_i32 t3
= tcg_temp_ebb_new_i32();
948 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
949 /* Adjust for negative inputs. */
950 tcg_gen_sari_i32(t2
, arg1
, 31);
951 tcg_gen_sari_i32(t3
, arg2
, 31);
952 tcg_gen_and_i32(t2
, t2
, arg2
);
953 tcg_gen_and_i32(t3
, t3
, arg1
);
954 tcg_gen_sub_i32(rh
, t1
, t2
);
955 tcg_gen_sub_i32(rh
, rh
, t3
);
956 tcg_gen_mov_i32(rl
, t0
);
957 tcg_temp_free_i32(t0
);
958 tcg_temp_free_i32(t1
);
959 tcg_temp_free_i32(t2
);
960 tcg_temp_free_i32(t3
);
962 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
963 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
964 tcg_gen_ext_i32_i64(t0
, arg1
);
965 tcg_gen_ext_i32_i64(t1
, arg2
);
966 tcg_gen_mul_i64(t0
, t0
, t1
);
967 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
968 tcg_temp_free_i64(t0
);
969 tcg_temp_free_i64(t1
);
973 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
975 if (TCG_TARGET_REG_BITS
== 32) {
976 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
977 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
978 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
979 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
980 /* Adjust for negative input for the signed arg1. */
981 tcg_gen_sari_i32(t2
, arg1
, 31);
982 tcg_gen_and_i32(t2
, t2
, arg2
);
983 tcg_gen_sub_i32(rh
, t1
, t2
);
984 tcg_gen_mov_i32(rl
, t0
);
985 tcg_temp_free_i32(t0
);
986 tcg_temp_free_i32(t1
);
987 tcg_temp_free_i32(t2
);
989 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
990 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
991 tcg_gen_ext_i32_i64(t0
, arg1
);
992 tcg_gen_extu_i32_i64(t1
, arg2
);
993 tcg_gen_mul_i64(t0
, t0
, t1
);
994 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
995 tcg_temp_free_i64(t0
);
996 tcg_temp_free_i64(t1
);
1000 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1002 if (TCG_TARGET_HAS_ext8s_i32
) {
1003 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
1005 tcg_gen_shli_i32(ret
, arg
, 24);
1006 tcg_gen_sari_i32(ret
, ret
, 24);
1010 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1012 if (TCG_TARGET_HAS_ext16s_i32
) {
1013 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
1015 tcg_gen_shli_i32(ret
, arg
, 16);
1016 tcg_gen_sari_i32(ret
, ret
, 16);
1020 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1022 if (TCG_TARGET_HAS_ext8u_i32
) {
1023 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
1025 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1029 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1031 if (TCG_TARGET_HAS_ext16u_i32
) {
1032 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1034 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1039 * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1041 * Byte pattern: xxab -> yyba
1043 * With TCG_BSWAP_IZ, x == zero, else undefined.
1044 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1046 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
, int flags
)
1048 /* Only one extension flag may be present. */
1049 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1051 if (TCG_TARGET_HAS_bswap16_i32
) {
1052 tcg_gen_op3i_i32(INDEX_op_bswap16_i32
, ret
, arg
, flags
);
1054 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1055 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1057 /* arg = ..ab (IZ) xxab (!IZ) */
1058 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = ...a (IZ) .xxa (!IZ) */
1059 if (!(flags
& TCG_BSWAP_IZ
)) {
1060 tcg_gen_ext8u_i32(t0
, t0
); /* t0 = ...a */
1063 if (flags
& TCG_BSWAP_OS
) {
1064 tcg_gen_shli_i32(t1
, arg
, 24); /* t1 = b... */
1065 tcg_gen_sari_i32(t1
, t1
, 16); /* t1 = ssb. */
1066 } else if (flags
& TCG_BSWAP_OZ
) {
1067 tcg_gen_ext8u_i32(t1
, arg
); /* t1 = ...b */
1068 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = ..b. */
1070 tcg_gen_shli_i32(t1
, arg
, 8); /* t1 = xab. */
1073 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = ..ba (OZ) */
1075 /* = xaba (no flag) */
1076 tcg_temp_free_i32(t0
);
1077 tcg_temp_free_i32(t1
);
1081 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1083 if (TCG_TARGET_HAS_bswap32_i32
) {
1084 tcg_gen_op3i_i32(INDEX_op_bswap32_i32
, ret
, arg
, 0);
1086 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1087 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1088 TCGv_i32 t2
= tcg_constant_i32(0x00ff00ff);
1091 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = .abc */
1092 tcg_gen_and_i32(t1
, arg
, t2
); /* t1 = .b.d */
1093 tcg_gen_and_i32(t0
, t0
, t2
); /* t0 = .a.c */
1094 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = b.d. */
1095 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = badc */
1097 tcg_gen_shri_i32(t0
, ret
, 16); /* t0 = ..ba */
1098 tcg_gen_shli_i32(t1
, ret
, 16); /* t1 = dc.. */
1099 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = dcba */
1101 tcg_temp_free_i32(t0
);
1102 tcg_temp_free_i32(t1
);
1106 void tcg_gen_hswap_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1108 /* Swapping 2 16-bit elements is a rotate. */
1109 tcg_gen_rotli_i32(ret
, arg
, 16);
1112 void tcg_gen_smin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1114 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, a
, b
);
1117 void tcg_gen_umin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1119 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
1122 void tcg_gen_smax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1124 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, b
, a
);
1127 void tcg_gen_umax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1129 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
1132 void tcg_gen_abs_i32(TCGv_i32 ret
, TCGv_i32 a
)
1134 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1136 tcg_gen_sari_i32(t
, a
, 31);
1137 tcg_gen_xor_i32(ret
, a
, t
);
1138 tcg_gen_sub_i32(ret
, ret
, t
);
1139 tcg_temp_free_i32(t
);
1144 #if TCG_TARGET_REG_BITS == 32
1145 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
1147 void tcg_gen_discard_i64(TCGv_i64 arg
)
1149 tcg_gen_discard_i32(TCGV_LOW(arg
));
1150 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1153 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1155 TCGTemp
*ts
= tcgv_i64_temp(arg
);
1157 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1158 if (ts
->kind
== TEMP_CONST
) {
1159 tcg_gen_movi_i64(ret
, ts
->val
);
1161 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1162 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1166 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1168 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
1169 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
1172 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1174 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
1175 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1178 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1180 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
1181 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1184 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1186 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
1187 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1190 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1192 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
1193 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1196 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1198 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1199 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1202 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1204 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1205 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1208 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1210 /* Since arg2 and ret have different types,
1211 they cannot be the same temporary */
1213 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
1214 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
1216 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1217 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
1221 void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1223 tcg_gen_st8_i32(TCGV_LOW(arg1
), arg2
, offset
);
1226 void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1228 tcg_gen_st16_i32(TCGV_LOW(arg1
), arg2
, offset
);
1231 void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1233 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1236 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1239 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
1240 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
1242 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1243 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
1247 void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1249 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1250 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1253 void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1255 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1256 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1259 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1261 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1262 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1265 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1267 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1268 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1271 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1273 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1274 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1277 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1279 gen_helper_shl_i64(ret
, arg1
, arg2
);
1282 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1284 gen_helper_shr_i64(ret
, arg1
, arg2
);
1287 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1289 gen_helper_sar_i64(ret
, arg1
, arg2
);
1292 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1297 t0
= tcg_temp_ebb_new_i64();
1298 t1
= tcg_temp_ebb_new_i32();
1300 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1301 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1303 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1304 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1305 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1306 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1308 tcg_gen_mov_i64(ret
, t0
);
1309 tcg_temp_free_i64(t0
);
1310 tcg_temp_free_i32(t1
);
1315 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1317 tcg_gen_mov_i64(ret
, tcg_constant_i64(arg
));
1320 #endif /* TCG_TARGET_REG_SIZE == 32 */
1322 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1324 /* some cases can be optimized here */
1326 tcg_gen_mov_i64(ret
, arg1
);
1327 } else if (TCG_TARGET_REG_BITS
== 64) {
1328 tcg_gen_add_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1330 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1331 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1332 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1336 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1338 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
1339 /* Don't recurse with tcg_gen_neg_i64. */
1340 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
1341 } else if (TCG_TARGET_REG_BITS
== 64) {
1342 tcg_gen_sub_i64(ret
, tcg_constant_i64(arg1
), arg2
);
1344 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1345 tcg_constant_i32(arg1
), tcg_constant_i32(arg1
>> 32),
1346 TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1350 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1352 /* some cases can be optimized here */
1354 tcg_gen_mov_i64(ret
, arg1
);
1355 } else if (TCG_TARGET_REG_BITS
== 64) {
1356 tcg_gen_sub_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1358 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1359 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1360 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1364 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1366 if (TCG_TARGET_REG_BITS
== 32) {
1367 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1368 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1372 /* Some cases can be optimized here. */
1375 tcg_gen_movi_i64(ret
, 0);
1378 tcg_gen_mov_i64(ret
, arg1
);
1381 /* Don't recurse with tcg_gen_ext8u_i64. */
1382 if (TCG_TARGET_HAS_ext8u_i64
) {
1383 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1388 if (TCG_TARGET_HAS_ext16u_i64
) {
1389 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1394 if (TCG_TARGET_HAS_ext32u_i64
) {
1395 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1401 tcg_gen_and_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1404 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1406 if (TCG_TARGET_REG_BITS
== 32) {
1407 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1408 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1411 /* Some cases can be optimized here. */
1413 tcg_gen_movi_i64(ret
, -1);
1414 } else if (arg2
== 0) {
1415 tcg_gen_mov_i64(ret
, arg1
);
1417 tcg_gen_or_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1421 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1423 if (TCG_TARGET_REG_BITS
== 32) {
1424 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1425 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1428 /* Some cases can be optimized here. */
1430 tcg_gen_mov_i64(ret
, arg1
);
1431 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1432 /* Don't recurse with tcg_gen_not_i64. */
1433 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1435 tcg_gen_xor_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1439 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1440 unsigned c
, bool right
, bool arith
)
1442 tcg_debug_assert(c
< 64);
1444 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1445 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1446 } else if (c
>= 32) {
1450 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1451 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1453 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1454 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1457 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1458 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1461 if (TCG_TARGET_HAS_extract2_i32
) {
1462 tcg_gen_extract2_i32(TCGV_LOW(ret
),
1463 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), c
);
1465 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1466 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(ret
),
1467 TCGV_HIGH(arg1
), 32 - c
, c
);
1470 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1472 tcg_gen_shri_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1475 if (TCG_TARGET_HAS_extract2_i32
) {
1476 tcg_gen_extract2_i32(TCGV_HIGH(ret
),
1477 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), 32 - c
);
1479 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1480 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1481 tcg_gen_deposit_i32(TCGV_HIGH(ret
), t0
,
1482 TCGV_HIGH(arg1
), c
, 32 - c
);
1483 tcg_temp_free_i32(t0
);
1485 tcg_gen_shli_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1489 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1491 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1492 if (TCG_TARGET_REG_BITS
== 32) {
1493 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1494 } else if (arg2
== 0) {
1495 tcg_gen_mov_i64(ret
, arg1
);
1497 tcg_gen_shl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1501 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1503 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1504 if (TCG_TARGET_REG_BITS
== 32) {
1505 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1506 } else if (arg2
== 0) {
1507 tcg_gen_mov_i64(ret
, arg1
);
1509 tcg_gen_shr_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1513 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1515 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1516 if (TCG_TARGET_REG_BITS
== 32) {
1517 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1518 } else if (arg2
== 0) {
1519 tcg_gen_mov_i64(ret
, arg1
);
1521 tcg_gen_sar_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1525 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1527 if (cond
== TCG_COND_ALWAYS
) {
1529 } else if (cond
!= TCG_COND_NEVER
) {
1530 if (TCG_TARGET_REG_BITS
== 32) {
1531 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1532 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1533 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1535 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1538 add_last_as_label_use(l
);
1542 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1544 if (TCG_TARGET_REG_BITS
== 64) {
1545 tcg_gen_brcond_i64(cond
, arg1
, tcg_constant_i64(arg2
), l
);
1546 } else if (cond
== TCG_COND_ALWAYS
) {
1548 } else if (cond
!= TCG_COND_NEVER
) {
1549 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
,
1550 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1551 tcg_constant_i32(arg2
),
1552 tcg_constant_i32(arg2
>> 32),
1553 cond
, label_arg(l
));
1554 add_last_as_label_use(l
);
1558 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1559 TCGv_i64 arg1
, TCGv_i64 arg2
)
1561 if (cond
== TCG_COND_ALWAYS
) {
1562 tcg_gen_movi_i64(ret
, 1);
1563 } else if (cond
== TCG_COND_NEVER
) {
1564 tcg_gen_movi_i64(ret
, 0);
1566 if (TCG_TARGET_REG_BITS
== 32) {
1567 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1568 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1569 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1570 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1572 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1577 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1578 TCGv_i64 arg1
, int64_t arg2
)
1580 if (TCG_TARGET_REG_BITS
== 64) {
1581 tcg_gen_setcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
1582 } else if (cond
== TCG_COND_ALWAYS
) {
1583 tcg_gen_movi_i64(ret
, 1);
1584 } else if (cond
== TCG_COND_NEVER
) {
1585 tcg_gen_movi_i64(ret
, 0);
1587 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1588 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1589 tcg_constant_i32(arg2
),
1590 tcg_constant_i32(arg2
>> 32), cond
);
1591 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1595 void tcg_gen_negsetcond_i64(TCGCond cond
, TCGv_i64 ret
,
1596 TCGv_i64 arg1
, TCGv_i64 arg2
)
1598 if (cond
== TCG_COND_ALWAYS
) {
1599 tcg_gen_movi_i64(ret
, -1);
1600 } else if (cond
== TCG_COND_NEVER
) {
1601 tcg_gen_movi_i64(ret
, 0);
1602 } else if (TCG_TARGET_HAS_negsetcond_i64
) {
1603 tcg_gen_op4i_i64(INDEX_op_negsetcond_i64
, ret
, arg1
, arg2
, cond
);
1604 } else if (TCG_TARGET_REG_BITS
== 32) {
1605 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1606 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1607 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1608 tcg_gen_neg_i32(TCGV_LOW(ret
), TCGV_LOW(ret
));
1609 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
));
1611 tcg_gen_setcond_i64(cond
, ret
, arg1
, arg2
);
1612 tcg_gen_neg_i64(ret
, ret
);
1616 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1619 tcg_gen_movi_i64(ret
, 0);
1620 } else if (is_power_of_2(arg2
)) {
1621 tcg_gen_shli_i64(ret
, arg1
, ctz64(arg2
));
1623 tcg_gen_mul_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1627 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1629 if (TCG_TARGET_HAS_div_i64
) {
1630 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1631 } else if (TCG_TARGET_HAS_div2_i64
) {
1632 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1633 tcg_gen_sari_i64(t0
, arg1
, 63);
1634 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1635 tcg_temp_free_i64(t0
);
1637 gen_helper_div_i64(ret
, arg1
, arg2
);
1641 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1643 if (TCG_TARGET_HAS_rem_i64
) {
1644 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1645 } else if (TCG_TARGET_HAS_div_i64
) {
1646 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1647 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
1648 tcg_gen_mul_i64(t0
, t0
, arg2
);
1649 tcg_gen_sub_i64(ret
, arg1
, t0
);
1650 tcg_temp_free_i64(t0
);
1651 } else if (TCG_TARGET_HAS_div2_i64
) {
1652 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1653 tcg_gen_sari_i64(t0
, arg1
, 63);
1654 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1655 tcg_temp_free_i64(t0
);
1657 gen_helper_rem_i64(ret
, arg1
, arg2
);
1661 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1663 if (TCG_TARGET_HAS_div_i64
) {
1664 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1665 } else if (TCG_TARGET_HAS_div2_i64
) {
1666 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1667 tcg_gen_movi_i64(t0
, 0);
1668 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1669 tcg_temp_free_i64(t0
);
1671 gen_helper_divu_i64(ret
, arg1
, arg2
);
1675 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1677 if (TCG_TARGET_HAS_rem_i64
) {
1678 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1679 } else if (TCG_TARGET_HAS_div_i64
) {
1680 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1681 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
1682 tcg_gen_mul_i64(t0
, t0
, arg2
);
1683 tcg_gen_sub_i64(ret
, arg1
, t0
);
1684 tcg_temp_free_i64(t0
);
1685 } else if (TCG_TARGET_HAS_div2_i64
) {
1686 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1687 tcg_gen_movi_i64(t0
, 0);
1688 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1689 tcg_temp_free_i64(t0
);
1691 gen_helper_remu_i64(ret
, arg1
, arg2
);
1695 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1697 if (TCG_TARGET_REG_BITS
== 32) {
1698 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1699 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1700 } else if (TCG_TARGET_HAS_ext8s_i64
) {
1701 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1703 tcg_gen_shli_i64(ret
, arg
, 56);
1704 tcg_gen_sari_i64(ret
, ret
, 56);
1708 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1710 if (TCG_TARGET_REG_BITS
== 32) {
1711 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1712 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1713 } else if (TCG_TARGET_HAS_ext16s_i64
) {
1714 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1716 tcg_gen_shli_i64(ret
, arg
, 48);
1717 tcg_gen_sari_i64(ret
, ret
, 48);
1721 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1723 if (TCG_TARGET_REG_BITS
== 32) {
1724 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1725 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1726 } else if (TCG_TARGET_HAS_ext32s_i64
) {
1727 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1729 tcg_gen_shli_i64(ret
, arg
, 32);
1730 tcg_gen_sari_i64(ret
, ret
, 32);
1734 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1736 if (TCG_TARGET_REG_BITS
== 32) {
1737 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1738 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1739 } else if (TCG_TARGET_HAS_ext8u_i64
) {
1740 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1742 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1746 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1748 if (TCG_TARGET_REG_BITS
== 32) {
1749 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1750 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1751 } else if (TCG_TARGET_HAS_ext16u_i64
) {
1752 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1754 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1758 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1760 if (TCG_TARGET_REG_BITS
== 32) {
1761 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1762 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1763 } else if (TCG_TARGET_HAS_ext32u_i64
) {
1764 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1766 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1771 * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
1773 * Byte pattern: xxxxxxxxab -> yyyyyyyyba
1775 * With TCG_BSWAP_IZ, x == zero, else undefined.
1776 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1778 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
1780 /* Only one extension flag may be present. */
1781 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1783 if (TCG_TARGET_REG_BITS
== 32) {
1784 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), flags
);
1785 if (flags
& TCG_BSWAP_OS
) {
1786 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1788 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1790 } else if (TCG_TARGET_HAS_bswap16_i64
) {
1791 tcg_gen_op3i_i64(INDEX_op_bswap16_i64
, ret
, arg
, flags
);
1793 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1794 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1796 /* arg = ......ab or xxxxxxab */
1797 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .......a or .xxxxxxa */
1798 if (!(flags
& TCG_BSWAP_IZ
)) {
1799 tcg_gen_ext8u_i64(t0
, t0
); /* t0 = .......a */
1802 if (flags
& TCG_BSWAP_OS
) {
1803 tcg_gen_shli_i64(t1
, arg
, 56); /* t1 = b....... */
1804 tcg_gen_sari_i64(t1
, t1
, 48); /* t1 = ssssssb. */
1805 } else if (flags
& TCG_BSWAP_OZ
) {
1806 tcg_gen_ext8u_i64(t1
, arg
); /* t1 = .......b */
1807 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ......b. */
1809 tcg_gen_shli_i64(t1
, arg
, 8); /* t1 = xxxxxab. */
1812 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ......ba (OZ) */
1814 /* xxxxxaba (no flag) */
1815 tcg_temp_free_i64(t0
);
1816 tcg_temp_free_i64(t1
);
1820 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
1822 /* Only one extension flag may be present. */
1823 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1825 if (TCG_TARGET_REG_BITS
== 32) {
1826 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1827 if (flags
& TCG_BSWAP_OS
) {
1828 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1830 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1832 } else if (TCG_TARGET_HAS_bswap32_i64
) {
1833 tcg_gen_op3i_i64(INDEX_op_bswap32_i64
, ret
, arg
, flags
);
1835 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1836 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1837 TCGv_i64 t2
= tcg_constant_i64(0x00ff00ff);
1839 /* arg = xxxxabcd */
1840 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .xxxxabc */
1841 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .....b.d */
1842 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .....a.c */
1843 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ....b.d. */
1844 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ....badc */
1846 tcg_gen_shli_i64(t1
, ret
, 48); /* t1 = dc...... */
1847 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ......ba */
1848 if (flags
& TCG_BSWAP_OS
) {
1849 tcg_gen_sari_i64(t1
, t1
, 32); /* t1 = ssssdc.. */
1851 tcg_gen_shri_i64(t1
, t1
, 32); /* t1 = ....dc.. */
1853 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ssssdcba */
1855 tcg_temp_free_i64(t0
);
1856 tcg_temp_free_i64(t1
);
1860 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1862 if (TCG_TARGET_REG_BITS
== 32) {
1864 t0
= tcg_temp_ebb_new_i32();
1865 t1
= tcg_temp_ebb_new_i32();
1867 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1868 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1869 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1870 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1871 tcg_temp_free_i32(t0
);
1872 tcg_temp_free_i32(t1
);
1873 } else if (TCG_TARGET_HAS_bswap64_i64
) {
1874 tcg_gen_op3i_i64(INDEX_op_bswap64_i64
, ret
, arg
, 0);
1876 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1877 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1878 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
1880 /* arg = abcdefgh */
1881 tcg_gen_movi_i64(t2
, 0x00ff00ff00ff00ffull
);
1882 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .abcdefg */
1883 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .b.d.f.h */
1884 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .a.c.e.g */
1885 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = b.d.f.h. */
1886 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = badcfehg */
1888 tcg_gen_movi_i64(t2
, 0x0000ffff0000ffffull
);
1889 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ..badcfe */
1890 tcg_gen_and_i64(t1
, ret
, t2
); /* t1 = ..dc..hg */
1891 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = ..ba..fe */
1892 tcg_gen_shli_i64(t1
, t1
, 16); /* t1 = dc..hg.. */
1893 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = dcbahgfe */
1895 tcg_gen_shri_i64(t0
, ret
, 32); /* t0 = ....dcba */
1896 tcg_gen_shli_i64(t1
, ret
, 32); /* t1 = hgfe.... */
1897 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = hgfedcba */
1899 tcg_temp_free_i64(t0
);
1900 tcg_temp_free_i64(t1
);
1901 tcg_temp_free_i64(t2
);
1905 void tcg_gen_hswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1907 uint64_t m
= 0x0000ffff0000ffffull
;
1908 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1909 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1911 /* See include/qemu/bitops.h, hswap64. */
1912 tcg_gen_rotli_i64(t1
, arg
, 32);
1913 tcg_gen_andi_i64(t0
, t1
, m
);
1914 tcg_gen_shli_i64(t0
, t0
, 16);
1915 tcg_gen_shri_i64(t1
, t1
, 16);
1916 tcg_gen_andi_i64(t1
, t1
, m
);
1917 tcg_gen_or_i64(ret
, t0
, t1
);
1919 tcg_temp_free_i64(t0
);
1920 tcg_temp_free_i64(t1
);
1923 void tcg_gen_wswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1925 /* Swapping 2 32-bit elements is a rotate. */
1926 tcg_gen_rotli_i64(ret
, arg
, 32);
1929 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1931 if (TCG_TARGET_REG_BITS
== 32) {
1932 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1933 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1934 } else if (TCG_TARGET_HAS_not_i64
) {
1935 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1937 tcg_gen_xori_i64(ret
, arg
, -1);
1941 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1943 if (TCG_TARGET_REG_BITS
== 32) {
1944 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1945 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1946 } else if (TCG_TARGET_HAS_andc_i64
) {
1947 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1949 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1950 tcg_gen_not_i64(t0
, arg2
);
1951 tcg_gen_and_i64(ret
, arg1
, t0
);
1952 tcg_temp_free_i64(t0
);
1956 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1958 if (TCG_TARGET_REG_BITS
== 32) {
1959 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1960 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1961 } else if (TCG_TARGET_HAS_eqv_i64
) {
1962 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1964 tcg_gen_xor_i64(ret
, arg1
, arg2
);
1965 tcg_gen_not_i64(ret
, ret
);
1969 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1971 if (TCG_TARGET_REG_BITS
== 32) {
1972 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1973 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1974 } else if (TCG_TARGET_HAS_nand_i64
) {
1975 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
1977 tcg_gen_and_i64(ret
, arg1
, arg2
);
1978 tcg_gen_not_i64(ret
, ret
);
1982 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1984 if (TCG_TARGET_REG_BITS
== 32) {
1985 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1986 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1987 } else if (TCG_TARGET_HAS_nor_i64
) {
1988 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
1990 tcg_gen_or_i64(ret
, arg1
, arg2
);
1991 tcg_gen_not_i64(ret
, ret
);
1995 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1997 if (TCG_TARGET_REG_BITS
== 32) {
1998 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1999 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2000 } else if (TCG_TARGET_HAS_orc_i64
) {
2001 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
2003 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2004 tcg_gen_not_i64(t0
, arg2
);
2005 tcg_gen_or_i64(ret
, arg1
, t0
);
2006 tcg_temp_free_i64(t0
);
2010 void tcg_gen_clz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2012 if (TCG_TARGET_HAS_clz_i64
) {
2013 tcg_gen_op3_i64(INDEX_op_clz_i64
, ret
, arg1
, arg2
);
2015 gen_helper_clz_i64(ret
, arg1
, arg2
);
2019 void tcg_gen_clzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2021 if (TCG_TARGET_REG_BITS
== 32
2022 && TCG_TARGET_HAS_clz_i32
2023 && arg2
<= 0xffffffffu
) {
2024 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2025 tcg_gen_clzi_i32(t
, TCGV_LOW(arg1
), arg2
- 32);
2026 tcg_gen_addi_i32(t
, t
, 32);
2027 tcg_gen_clz_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), t
);
2028 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2029 tcg_temp_free_i32(t
);
2031 tcg_gen_clz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2035 void tcg_gen_ctz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2037 if (TCG_TARGET_HAS_ctz_i64
) {
2038 tcg_gen_op3_i64(INDEX_op_ctz_i64
, ret
, arg1
, arg2
);
2039 } else if (TCG_TARGET_HAS_ctpop_i64
|| TCG_TARGET_HAS_clz_i64
) {
2040 TCGv_i64 z
, t
= tcg_temp_ebb_new_i64();
2042 if (TCG_TARGET_HAS_ctpop_i64
) {
2043 tcg_gen_subi_i64(t
, arg1
, 1);
2044 tcg_gen_andc_i64(t
, t
, arg1
);
2045 tcg_gen_ctpop_i64(t
, t
);
2047 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
2048 tcg_gen_neg_i64(t
, arg1
);
2049 tcg_gen_and_i64(t
, t
, arg1
);
2050 tcg_gen_clzi_i64(t
, t
, 64);
2051 tcg_gen_xori_i64(t
, t
, 63);
2053 z
= tcg_constant_i64(0);
2054 tcg_gen_movcond_i64(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
2055 tcg_temp_free_i64(t
);
2056 tcg_temp_free_i64(z
);
2058 gen_helper_ctz_i64(ret
, arg1
, arg2
);
2062 void tcg_gen_ctzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2064 if (TCG_TARGET_REG_BITS
== 32
2065 && TCG_TARGET_HAS_ctz_i32
2066 && arg2
<= 0xffffffffu
) {
2067 TCGv_i32 t32
= tcg_temp_ebb_new_i32();
2068 tcg_gen_ctzi_i32(t32
, TCGV_HIGH(arg1
), arg2
- 32);
2069 tcg_gen_addi_i32(t32
, t32
, 32);
2070 tcg_gen_ctz_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), t32
);
2071 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2072 tcg_temp_free_i32(t32
);
2073 } else if (!TCG_TARGET_HAS_ctz_i64
2074 && TCG_TARGET_HAS_ctpop_i64
2076 /* This equivalence has the advantage of not requiring a fixup. */
2077 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2078 tcg_gen_subi_i64(t
, arg1
, 1);
2079 tcg_gen_andc_i64(t
, t
, arg1
);
2080 tcg_gen_ctpop_i64(ret
, t
);
2081 tcg_temp_free_i64(t
);
2083 tcg_gen_ctz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2087 void tcg_gen_clrsb_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2089 if (TCG_TARGET_HAS_clz_i64
|| TCG_TARGET_HAS_clz_i32
) {
2090 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2091 tcg_gen_sari_i64(t
, arg
, 63);
2092 tcg_gen_xor_i64(t
, t
, arg
);
2093 tcg_gen_clzi_i64(t
, t
, 64);
2094 tcg_gen_subi_i64(ret
, t
, 1);
2095 tcg_temp_free_i64(t
);
2097 gen_helper_clrsb_i64(ret
, arg
);
2101 void tcg_gen_ctpop_i64(TCGv_i64 ret
, TCGv_i64 arg1
)
2103 if (TCG_TARGET_HAS_ctpop_i64
) {
2104 tcg_gen_op2_i64(INDEX_op_ctpop_i64
, ret
, arg1
);
2105 } else if (TCG_TARGET_REG_BITS
== 32 && TCG_TARGET_HAS_ctpop_i32
) {
2106 tcg_gen_ctpop_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2107 tcg_gen_ctpop_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2108 tcg_gen_add_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), TCGV_HIGH(ret
));
2109 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2111 gen_helper_ctpop_i64(ret
, arg1
);
2115 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2117 if (TCG_TARGET_HAS_rot_i64
) {
2118 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
2121 t0
= tcg_temp_ebb_new_i64();
2122 t1
= tcg_temp_ebb_new_i64();
2123 tcg_gen_shl_i64(t0
, arg1
, arg2
);
2124 tcg_gen_subfi_i64(t1
, 64, arg2
);
2125 tcg_gen_shr_i64(t1
, arg1
, t1
);
2126 tcg_gen_or_i64(ret
, t0
, t1
);
2127 tcg_temp_free_i64(t0
);
2128 tcg_temp_free_i64(t1
);
2132 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2134 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2135 /* some cases can be optimized here */
2137 tcg_gen_mov_i64(ret
, arg1
);
2138 } else if (TCG_TARGET_HAS_rot_i64
) {
2139 tcg_gen_rotl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2142 t0
= tcg_temp_ebb_new_i64();
2143 t1
= tcg_temp_ebb_new_i64();
2144 tcg_gen_shli_i64(t0
, arg1
, arg2
);
2145 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
2146 tcg_gen_or_i64(ret
, t0
, t1
);
2147 tcg_temp_free_i64(t0
);
2148 tcg_temp_free_i64(t1
);
2152 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2154 if (TCG_TARGET_HAS_rot_i64
) {
2155 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
2158 t0
= tcg_temp_ebb_new_i64();
2159 t1
= tcg_temp_ebb_new_i64();
2160 tcg_gen_shr_i64(t0
, arg1
, arg2
);
2161 tcg_gen_subfi_i64(t1
, 64, arg2
);
2162 tcg_gen_shl_i64(t1
, arg1
, t1
);
2163 tcg_gen_or_i64(ret
, t0
, t1
);
2164 tcg_temp_free_i64(t0
);
2165 tcg_temp_free_i64(t1
);
2169 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2171 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2172 /* some cases can be optimized here */
2174 tcg_gen_mov_i64(ret
, arg1
);
2176 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
2180 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
2181 unsigned int ofs
, unsigned int len
)
2186 tcg_debug_assert(ofs
< 64);
2187 tcg_debug_assert(len
> 0);
2188 tcg_debug_assert(len
<= 64);
2189 tcg_debug_assert(ofs
+ len
<= 64);
2192 tcg_gen_mov_i64(ret
, arg2
);
2195 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2196 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
2200 if (TCG_TARGET_REG_BITS
== 32) {
2202 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
2203 TCGV_LOW(arg2
), ofs
- 32, len
);
2204 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2207 if (ofs
+ len
<= 32) {
2208 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
2209 TCGV_LOW(arg2
), ofs
, len
);
2210 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2215 t1
= tcg_temp_ebb_new_i64();
2217 if (TCG_TARGET_HAS_extract2_i64
) {
2218 if (ofs
+ len
== 64) {
2219 tcg_gen_shli_i64(t1
, arg1
, len
);
2220 tcg_gen_extract2_i64(ret
, t1
, arg2
, len
);
2224 tcg_gen_extract2_i64(ret
, arg1
, arg2
, len
);
2225 tcg_gen_rotli_i64(ret
, ret
, len
);
2230 mask
= (1ull << len
) - 1;
2231 if (ofs
+ len
< 64) {
2232 tcg_gen_andi_i64(t1
, arg2
, mask
);
2233 tcg_gen_shli_i64(t1
, t1
, ofs
);
2235 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2237 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2238 tcg_gen_or_i64(ret
, ret
, t1
);
2240 tcg_temp_free_i64(t1
);
2243 void tcg_gen_deposit_z_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2244 unsigned int ofs
, unsigned int len
)
2246 tcg_debug_assert(ofs
< 64);
2247 tcg_debug_assert(len
> 0);
2248 tcg_debug_assert(len
<= 64);
2249 tcg_debug_assert(ofs
+ len
<= 64);
2251 if (ofs
+ len
== 64) {
2252 tcg_gen_shli_i64(ret
, arg
, ofs
);
2253 } else if (ofs
== 0) {
2254 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2255 } else if (TCG_TARGET_HAS_deposit_i64
2256 && TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2257 TCGv_i64 zero
= tcg_constant_i64(0);
2258 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, zero
, arg
, ofs
, len
);
2260 if (TCG_TARGET_REG_BITS
== 32) {
2262 tcg_gen_deposit_z_i32(TCGV_HIGH(ret
), TCGV_LOW(arg
),
2264 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
2267 if (ofs
+ len
<= 32) {
2268 tcg_gen_deposit_z_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2269 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2273 /* To help two-operand hosts we prefer to zero-extend first,
2274 which allows ARG to stay live. */
2277 if (TCG_TARGET_HAS_ext32u_i64
) {
2278 tcg_gen_ext32u_i64(ret
, arg
);
2279 tcg_gen_shli_i64(ret
, ret
, ofs
);
2284 if (TCG_TARGET_HAS_ext16u_i64
) {
2285 tcg_gen_ext16u_i64(ret
, arg
);
2286 tcg_gen_shli_i64(ret
, ret
, ofs
);
2291 if (TCG_TARGET_HAS_ext8u_i64
) {
2292 tcg_gen_ext8u_i64(ret
, arg
);
2293 tcg_gen_shli_i64(ret
, ret
, ofs
);
2298 /* Otherwise prefer zero-extension over AND for code size. */
2299 switch (ofs
+ len
) {
2301 if (TCG_TARGET_HAS_ext32u_i64
) {
2302 tcg_gen_shli_i64(ret
, arg
, ofs
);
2303 tcg_gen_ext32u_i64(ret
, ret
);
2308 if (TCG_TARGET_HAS_ext16u_i64
) {
2309 tcg_gen_shli_i64(ret
, arg
, ofs
);
2310 tcg_gen_ext16u_i64(ret
, ret
);
2315 if (TCG_TARGET_HAS_ext8u_i64
) {
2316 tcg_gen_shli_i64(ret
, arg
, ofs
);
2317 tcg_gen_ext8u_i64(ret
, ret
);
2322 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2323 tcg_gen_shli_i64(ret
, ret
, ofs
);
2327 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2328 unsigned int ofs
, unsigned int len
)
2330 tcg_debug_assert(ofs
< 64);
2331 tcg_debug_assert(len
> 0);
2332 tcg_debug_assert(len
<= 64);
2333 tcg_debug_assert(ofs
+ len
<= 64);
2335 /* Canonicalize certain special cases, even if extract is supported. */
2336 if (ofs
+ len
== 64) {
2337 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
2341 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2345 if (TCG_TARGET_REG_BITS
== 32) {
2346 /* Look for a 32-bit extract within one of the two words. */
2348 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2349 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2352 if (ofs
+ len
<= 32) {
2353 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2354 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2357 /* The field is split across two words. One double-word
2358 shift is better than two double-word shifts. */
2362 if (TCG_TARGET_HAS_extract_i64
2363 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2364 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
2368 /* Assume that zero-extension, if available, is cheaper than a shift. */
2369 switch (ofs
+ len
) {
2371 if (TCG_TARGET_HAS_ext32u_i64
) {
2372 tcg_gen_ext32u_i64(ret
, arg
);
2373 tcg_gen_shri_i64(ret
, ret
, ofs
);
2378 if (TCG_TARGET_HAS_ext16u_i64
) {
2379 tcg_gen_ext16u_i64(ret
, arg
);
2380 tcg_gen_shri_i64(ret
, ret
, ofs
);
2385 if (TCG_TARGET_HAS_ext8u_i64
) {
2386 tcg_gen_ext8u_i64(ret
, arg
);
2387 tcg_gen_shri_i64(ret
, ret
, ofs
);
2393 /* ??? Ideally we'd know what values are available for immediate AND.
2394 Assume that 8 bits are available, plus the special cases of 16 and 32,
2395 so that we get ext8u, ext16u, and ext32u. */
2397 case 1 ... 8: case 16: case 32:
2399 tcg_gen_shri_i64(ret
, arg
, ofs
);
2400 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
2403 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2404 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
2409 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2410 unsigned int ofs
, unsigned int len
)
2412 tcg_debug_assert(ofs
< 64);
2413 tcg_debug_assert(len
> 0);
2414 tcg_debug_assert(len
<= 64);
2415 tcg_debug_assert(ofs
+ len
<= 64);
2417 /* Canonicalize certain special cases, even if sextract is supported. */
2418 if (ofs
+ len
== 64) {
2419 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
2425 tcg_gen_ext32s_i64(ret
, arg
);
2428 tcg_gen_ext16s_i64(ret
, arg
);
2431 tcg_gen_ext8s_i64(ret
, arg
);
2436 if (TCG_TARGET_REG_BITS
== 32) {
2437 /* Look for a 32-bit extract within one of the two words. */
2439 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2440 } else if (ofs
+ len
<= 32) {
2441 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2442 } else if (ofs
== 0) {
2443 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2444 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
2446 } else if (len
> 32) {
2447 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2448 /* Extract the bits for the high word normally. */
2449 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
2450 /* Shift the field down for the low part. */
2451 tcg_gen_shri_i64(ret
, arg
, ofs
);
2452 /* Overwrite the shift into the high part. */
2453 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
2454 tcg_temp_free_i32(t
);
2457 /* Shift the field down for the low part, such that the
2458 field sits at the MSB. */
2459 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
2460 /* Shift the field down from the MSB, sign extending. */
2461 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
2463 /* Sign-extend the field from 32 bits. */
2464 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2468 if (TCG_TARGET_HAS_sextract_i64
2469 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2470 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
2474 /* Assume that sign-extension, if available, is cheaper than a shift. */
2475 switch (ofs
+ len
) {
2477 if (TCG_TARGET_HAS_ext32s_i64
) {
2478 tcg_gen_ext32s_i64(ret
, arg
);
2479 tcg_gen_sari_i64(ret
, ret
, ofs
);
2484 if (TCG_TARGET_HAS_ext16s_i64
) {
2485 tcg_gen_ext16s_i64(ret
, arg
);
2486 tcg_gen_sari_i64(ret
, ret
, ofs
);
2491 if (TCG_TARGET_HAS_ext8s_i64
) {
2492 tcg_gen_ext8s_i64(ret
, arg
);
2493 tcg_gen_sari_i64(ret
, ret
, ofs
);
2500 if (TCG_TARGET_HAS_ext32s_i64
) {
2501 tcg_gen_shri_i64(ret
, arg
, ofs
);
2502 tcg_gen_ext32s_i64(ret
, ret
);
2507 if (TCG_TARGET_HAS_ext16s_i64
) {
2508 tcg_gen_shri_i64(ret
, arg
, ofs
);
2509 tcg_gen_ext16s_i64(ret
, ret
);
2514 if (TCG_TARGET_HAS_ext8s_i64
) {
2515 tcg_gen_shri_i64(ret
, arg
, ofs
);
2516 tcg_gen_ext8s_i64(ret
, ret
);
2521 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2522 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
2526 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2527 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2529 void tcg_gen_extract2_i64(TCGv_i64 ret
, TCGv_i64 al
, TCGv_i64 ah
,
2532 tcg_debug_assert(ofs
<= 64);
2534 tcg_gen_mov_i64(ret
, al
);
2535 } else if (ofs
== 64) {
2536 tcg_gen_mov_i64(ret
, ah
);
2537 } else if (al
== ah
) {
2538 tcg_gen_rotri_i64(ret
, al
, ofs
);
2539 } else if (TCG_TARGET_HAS_extract2_i64
) {
2540 tcg_gen_op4i_i64(INDEX_op_extract2_i64
, ret
, al
, ah
, ofs
);
2542 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2543 tcg_gen_shri_i64(t0
, al
, ofs
);
2544 tcg_gen_deposit_i64(ret
, t0
, ah
, 64 - ofs
, ofs
);
2545 tcg_temp_free_i64(t0
);
2549 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
2550 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
2552 if (cond
== TCG_COND_ALWAYS
) {
2553 tcg_gen_mov_i64(ret
, v1
);
2554 } else if (cond
== TCG_COND_NEVER
) {
2555 tcg_gen_mov_i64(ret
, v2
);
2556 } else if (TCG_TARGET_REG_BITS
== 32) {
2557 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
2558 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
2559 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
2560 TCGV_LOW(c1
), TCGV_HIGH(c1
),
2561 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
2563 if (TCG_TARGET_HAS_movcond_i32
) {
2564 tcg_gen_movi_i32(t1
, 0);
2565 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
2566 TCGV_LOW(v1
), TCGV_LOW(v2
));
2567 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
2568 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
2570 tcg_gen_neg_i32(t0
, t0
);
2572 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
2573 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
2574 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
2576 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
2577 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
2578 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
2580 tcg_temp_free_i32(t0
);
2581 tcg_temp_free_i32(t1
);
2582 } else if (TCG_TARGET_HAS_movcond_i64
) {
2583 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
2585 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2586 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2587 tcg_gen_negsetcond_i64(cond
, t0
, c1
, c2
);
2588 tcg_gen_and_i64(t1
, v1
, t0
);
2589 tcg_gen_andc_i64(ret
, v2
, t0
);
2590 tcg_gen_or_i64(ret
, ret
, t1
);
2591 tcg_temp_free_i64(t0
);
2592 tcg_temp_free_i64(t1
);
2596 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2597 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2599 if (TCG_TARGET_HAS_add2_i64
) {
2600 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2602 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2603 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2604 tcg_gen_add_i64(t0
, al
, bl
);
2605 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
2606 tcg_gen_add_i64(rh
, ah
, bh
);
2607 tcg_gen_add_i64(rh
, rh
, t1
);
2608 tcg_gen_mov_i64(rl
, t0
);
2609 tcg_temp_free_i64(t0
);
2610 tcg_temp_free_i64(t1
);
2614 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2615 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2617 if (TCG_TARGET_HAS_sub2_i64
) {
2618 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2620 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2621 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2622 tcg_gen_sub_i64(t0
, al
, bl
);
2623 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
2624 tcg_gen_sub_i64(rh
, ah
, bh
);
2625 tcg_gen_sub_i64(rh
, rh
, t1
);
2626 tcg_gen_mov_i64(rl
, t0
);
2627 tcg_temp_free_i64(t0
);
2628 tcg_temp_free_i64(t1
);
2632 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2634 if (TCG_TARGET_HAS_mulu2_i64
) {
2635 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
2636 } else if (TCG_TARGET_HAS_muluh_i64
) {
2637 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2638 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2639 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
2640 tcg_gen_mov_i64(rl
, t
);
2641 tcg_temp_free_i64(t
);
2643 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2644 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2645 gen_helper_muluh_i64(rh
, arg1
, arg2
);
2646 tcg_gen_mov_i64(rl
, t0
);
2647 tcg_temp_free_i64(t0
);
2651 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2653 if (TCG_TARGET_HAS_muls2_i64
) {
2654 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
2655 } else if (TCG_TARGET_HAS_mulsh_i64
) {
2656 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2657 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2658 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
2659 tcg_gen_mov_i64(rl
, t
);
2660 tcg_temp_free_i64(t
);
2661 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
2662 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2663 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2664 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2665 TCGv_i64 t3
= tcg_temp_ebb_new_i64();
2666 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2667 /* Adjust for negative inputs. */
2668 tcg_gen_sari_i64(t2
, arg1
, 63);
2669 tcg_gen_sari_i64(t3
, arg2
, 63);
2670 tcg_gen_and_i64(t2
, t2
, arg2
);
2671 tcg_gen_and_i64(t3
, t3
, arg1
);
2672 tcg_gen_sub_i64(rh
, t1
, t2
);
2673 tcg_gen_sub_i64(rh
, rh
, t3
);
2674 tcg_gen_mov_i64(rl
, t0
);
2675 tcg_temp_free_i64(t0
);
2676 tcg_temp_free_i64(t1
);
2677 tcg_temp_free_i64(t2
);
2678 tcg_temp_free_i64(t3
);
2680 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2681 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2682 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
2683 tcg_gen_mov_i64(rl
, t0
);
2684 tcg_temp_free_i64(t0
);
2688 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2690 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2691 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2692 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2693 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2694 /* Adjust for negative input for the signed arg1. */
2695 tcg_gen_sari_i64(t2
, arg1
, 63);
2696 tcg_gen_and_i64(t2
, t2
, arg2
);
2697 tcg_gen_sub_i64(rh
, t1
, t2
);
2698 tcg_gen_mov_i64(rl
, t0
);
2699 tcg_temp_free_i64(t0
);
2700 tcg_temp_free_i64(t1
);
2701 tcg_temp_free_i64(t2
);
2704 void tcg_gen_smin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2706 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, a
, b
);
2709 void tcg_gen_umin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2711 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
2714 void tcg_gen_smax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2716 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, b
, a
);
2719 void tcg_gen_umax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
2721 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
2724 void tcg_gen_abs_i64(TCGv_i64 ret
, TCGv_i64 a
)
2726 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2728 tcg_gen_sari_i64(t
, a
, 63);
2729 tcg_gen_xor_i64(ret
, a
, t
);
2730 tcg_gen_sub_i64(ret
, ret
, t
);
2731 tcg_temp_free_i64(t
);
2734 /* Size changing operations. */
2736 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2738 if (TCG_TARGET_REG_BITS
== 32) {
2739 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
2740 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
2741 tcg_gen_op2(INDEX_op_extrl_i64_i32
,
2742 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2744 tcg_gen_mov_i32(ret
, (TCGv_i32
)arg
);
2748 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2750 if (TCG_TARGET_REG_BITS
== 32) {
2751 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
2752 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
2753 tcg_gen_op2(INDEX_op_extrh_i64_i32
,
2754 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
2756 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2757 tcg_gen_shri_i64(t
, arg
, 32);
2758 tcg_gen_mov_i32(ret
, (TCGv_i32
)t
);
2759 tcg_temp_free_i64(t
);
2763 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2765 if (TCG_TARGET_REG_BITS
== 32) {
2766 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2767 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2769 tcg_gen_op2(INDEX_op_extu_i32_i64
,
2770 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2774 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2776 if (TCG_TARGET_REG_BITS
== 32) {
2777 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2778 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2780 tcg_gen_op2(INDEX_op_ext_i32_i64
,
2781 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
2785 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
2789 if (TCG_TARGET_REG_BITS
== 32) {
2790 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
2791 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
2795 tmp
= tcg_temp_ebb_new_i64();
2796 /* These extensions are only needed for type correctness.
2797 We may be able to do better given target specific information. */
2798 tcg_gen_extu_i32_i64(tmp
, high
);
2799 tcg_gen_extu_i32_i64(dest
, low
);
2800 /* If deposit is available, use it. Otherwise use the extra
2801 knowledge that we have of the zero-extensions above. */
2802 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
2803 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
2805 tcg_gen_shli_i64(tmp
, tmp
, 32);
2806 tcg_gen_or_i64(dest
, dest
, tmp
);
2808 tcg_temp_free_i64(tmp
);
2811 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
2813 if (TCG_TARGET_REG_BITS
== 32) {
2814 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
2815 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
2817 tcg_gen_extrl_i64_i32(lo
, arg
);
2818 tcg_gen_extrh_i64_i32(hi
, arg
);
2822 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
2824 tcg_gen_ext32u_i64(lo
, arg
);
2825 tcg_gen_shri_i64(hi
, arg
, 32);
2828 void tcg_gen_extr_i128_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i128 arg
)
2830 tcg_gen_mov_i64(lo
, TCGV128_LOW(arg
));
2831 tcg_gen_mov_i64(hi
, TCGV128_HIGH(arg
));
2834 void tcg_gen_concat_i64_i128(TCGv_i128 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
2836 tcg_gen_mov_i64(TCGV128_LOW(ret
), lo
);
2837 tcg_gen_mov_i64(TCGV128_HIGH(ret
), hi
);
2840 void tcg_gen_mov_i128(TCGv_i128 dst
, TCGv_i128 src
)
2843 tcg_gen_mov_i64(TCGV128_LOW(dst
), TCGV128_LOW(src
));
2844 tcg_gen_mov_i64(TCGV128_HIGH(dst
), TCGV128_HIGH(src
));
2848 /* QEMU specific operations. */
2850 void tcg_gen_exit_tb(const TranslationBlock
*tb
, unsigned idx
)
2853 * Let the jit code return the read-only version of the
2854 * TranslationBlock, so that we minimize the pc-relative
2855 * distance of the address of the exit_tb code to TB.
2856 * This will improve utilization of pc-relative address loads.
2858 * TODO: Move this to translator_loop, so that all const
2859 * TranslationBlock pointers refer to read-only memory.
2860 * This requires coordination with targets that do not use
2861 * the translator_loop.
2863 uintptr_t val
= (uintptr_t)tcg_splitwx_to_rx((void *)tb
) + idx
;
2866 tcg_debug_assert(idx
== 0);
2867 } else if (idx
<= TB_EXIT_IDXMAX
) {
2868 #ifdef CONFIG_DEBUG_TCG
2869 /* This is an exit following a goto_tb. Verify that we have
2870 seen this numbered exit before, via tcg_gen_goto_tb. */
2871 tcg_debug_assert(tcg_ctx
->goto_tb_issue_mask
& (1 << idx
));
2874 /* This is an exit via the exitreq label. */
2875 tcg_debug_assert(idx
== TB_EXIT_REQUESTED
);
2878 tcg_gen_op1i(INDEX_op_exit_tb
, val
);
2881 void tcg_gen_goto_tb(unsigned idx
)
2883 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2884 tcg_debug_assert(!(tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_TB
));
2885 /* We only support two chained exits. */
2886 tcg_debug_assert(idx
<= TB_EXIT_IDXMAX
);
2887 #ifdef CONFIG_DEBUG_TCG
2888 /* Verify that we haven't seen this numbered exit before. */
2889 tcg_debug_assert((tcg_ctx
->goto_tb_issue_mask
& (1 << idx
)) == 0);
2890 tcg_ctx
->goto_tb_issue_mask
|= 1 << idx
;
2892 plugin_gen_disable_mem_helpers();
2893 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
2896 void tcg_gen_lookup_and_goto_ptr(void)
2900 if (tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_PTR
) {
2901 tcg_gen_exit_tb(NULL
, 0);
2905 plugin_gen_disable_mem_helpers();
2906 ptr
= tcg_temp_ebb_new_ptr();
2907 gen_helper_lookup_tb_ptr(ptr
, cpu_env
);
2908 tcg_gen_op1i(INDEX_op_goto_ptr
, tcgv_ptr_arg(ptr
));
2909 tcg_temp_free_ptr(ptr
);