2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
35 * Encourage the compiler to tail-call to a function, rather than inlining.
36 * Minimizes code size across 99 bottles of beer on the wall.
38 #define NI __attribute__((noinline))
40 void NI
tcg_gen_op1(TCGOpcode opc
, TCGArg a1
)
42 TCGOp
*op
= tcg_emit_op(opc
, 1);
46 void NI
tcg_gen_op2(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
48 TCGOp
*op
= tcg_emit_op(opc
, 2);
53 void NI
tcg_gen_op3(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
)
55 TCGOp
*op
= tcg_emit_op(opc
, 3);
61 void NI
tcg_gen_op4(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
, TCGArg a4
)
63 TCGOp
*op
= tcg_emit_op(opc
, 4);
70 void NI
tcg_gen_op5(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
73 TCGOp
*op
= tcg_emit_op(opc
, 5);
81 void NI
tcg_gen_op6(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
82 TCGArg a4
, TCGArg a5
, TCGArg a6
)
84 TCGOp
*op
= tcg_emit_op(opc
, 6);
94 * With CONFIG_DEBUG_TCG, tcgv_*_tmp via tcgv_*_arg, is an out-of-line
95 * assertion check. Force tail calls to avoid too much code expansion.
97 #ifdef CONFIG_DEBUG_TCG
103 static void DNI
tcg_gen_op1_i32(TCGOpcode opc
, TCGv_i32 a1
)
105 tcg_gen_op1(opc
, tcgv_i32_arg(a1
));
108 static void DNI
tcg_gen_op1_i64(TCGOpcode opc
, TCGv_i64 a1
)
110 tcg_gen_op1(opc
, tcgv_i64_arg(a1
));
113 static void DNI
tcg_gen_op1i(TCGOpcode opc
, TCGArg a1
)
115 tcg_gen_op1(opc
, a1
);
118 static void DNI
tcg_gen_op2_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
)
120 tcg_gen_op2(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
));
123 static void DNI
tcg_gen_op2_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
)
125 tcg_gen_op2(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
));
128 static void DNI
tcg_gen_op3_i32(TCGOpcode opc
, TCGv_i32 a1
,
129 TCGv_i32 a2
, TCGv_i32 a3
)
131 tcg_gen_op3(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
), tcgv_i32_arg(a3
));
134 static void DNI
tcg_gen_op3_i64(TCGOpcode opc
, TCGv_i64 a1
,
135 TCGv_i64 a2
, TCGv_i64 a3
)
137 tcg_gen_op3(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
), tcgv_i64_arg(a3
));
140 static void DNI
tcg_gen_op3i_i32(TCGOpcode opc
, TCGv_i32 a1
,
141 TCGv_i32 a2
, TCGArg a3
)
143 tcg_gen_op3(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
), a3
);
146 static void DNI
tcg_gen_op3i_i64(TCGOpcode opc
, TCGv_i64 a1
,
147 TCGv_i64 a2
, TCGArg a3
)
149 tcg_gen_op3(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
), a3
);
152 static void DNI
tcg_gen_ldst_op_i32(TCGOpcode opc
, TCGv_i32 val
,
153 TCGv_ptr base
, TCGArg offset
)
155 tcg_gen_op3(opc
, tcgv_i32_arg(val
), tcgv_ptr_arg(base
), offset
);
158 static void DNI
tcg_gen_ldst_op_i64(TCGOpcode opc
, TCGv_i64 val
,
159 TCGv_ptr base
, TCGArg offset
)
161 tcg_gen_op3(opc
, tcgv_i64_arg(val
), tcgv_ptr_arg(base
), offset
);
164 static void DNI
tcg_gen_op4_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
165 TCGv_i32 a3
, TCGv_i32 a4
)
167 tcg_gen_op4(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
168 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
));
171 static void DNI
tcg_gen_op4_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
172 TCGv_i64 a3
, TCGv_i64 a4
)
174 tcg_gen_op4(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
175 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
));
178 static void DNI
tcg_gen_op4i_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
179 TCGv_i32 a3
, TCGArg a4
)
181 tcg_gen_op4(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
182 tcgv_i32_arg(a3
), a4
);
185 static void DNI
tcg_gen_op4i_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
186 TCGv_i64 a3
, TCGArg a4
)
188 tcg_gen_op4(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
189 tcgv_i64_arg(a3
), a4
);
192 static void DNI
tcg_gen_op4ii_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
193 TCGArg a3
, TCGArg a4
)
195 tcg_gen_op4(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
), a3
, a4
);
198 static void DNI
tcg_gen_op4ii_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
199 TCGArg a3
, TCGArg a4
)
201 tcg_gen_op4(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
), a3
, a4
);
204 static void DNI
tcg_gen_op5_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
205 TCGv_i32 a3
, TCGv_i32 a4
, TCGv_i32 a5
)
207 tcg_gen_op5(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
208 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), tcgv_i32_arg(a5
));
211 static void DNI
tcg_gen_op5_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
212 TCGv_i64 a3
, TCGv_i64 a4
, TCGv_i64 a5
)
214 tcg_gen_op5(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
215 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), tcgv_i64_arg(a5
));
218 static void DNI
tcg_gen_op5ii_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
219 TCGv_i32 a3
, TCGArg a4
, TCGArg a5
)
221 tcg_gen_op5(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
222 tcgv_i32_arg(a3
), a4
, a5
);
225 static void DNI
tcg_gen_op5ii_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
226 TCGv_i64 a3
, TCGArg a4
, TCGArg a5
)
228 tcg_gen_op5(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
229 tcgv_i64_arg(a3
), a4
, a5
);
232 static void DNI
tcg_gen_op6_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
233 TCGv_i32 a3
, TCGv_i32 a4
,
234 TCGv_i32 a5
, TCGv_i32 a6
)
236 tcg_gen_op6(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
237 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), tcgv_i32_arg(a5
),
241 static void DNI
tcg_gen_op6_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
242 TCGv_i64 a3
, TCGv_i64 a4
,
243 TCGv_i64 a5
, TCGv_i64 a6
)
245 tcg_gen_op6(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
246 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), tcgv_i64_arg(a5
),
250 static void DNI
tcg_gen_op6i_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
251 TCGv_i32 a3
, TCGv_i32 a4
,
252 TCGv_i32 a5
, TCGArg a6
)
254 tcg_gen_op6(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
255 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), tcgv_i32_arg(a5
), a6
);
258 static void DNI
tcg_gen_op6i_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
259 TCGv_i64 a3
, TCGv_i64 a4
,
260 TCGv_i64 a5
, TCGArg a6
)
262 tcg_gen_op6(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
263 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), tcgv_i64_arg(a5
), a6
);
266 static void DNI
tcg_gen_op6ii_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
267 TCGv_i32 a3
, TCGv_i32 a4
,
268 TCGArg a5
, TCGArg a6
)
270 tcg_gen_op6(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
271 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), a5
, a6
);
276 void gen_set_label(TCGLabel
*l
)
279 tcg_gen_op1(INDEX_op_set_label
, label_arg(l
));
282 static void add_last_as_label_use(TCGLabel
*l
)
284 TCGLabelUse
*u
= tcg_malloc(sizeof(TCGLabelUse
));
286 u
->op
= tcg_last_op();
287 QSIMPLEQ_INSERT_TAIL(&l
->branches
, u
, next
);
290 void tcg_gen_br(TCGLabel
*l
)
292 tcg_gen_op1(INDEX_op_br
, label_arg(l
));
293 add_last_as_label_use(l
);
296 void tcg_gen_mb(TCGBar mb_type
)
298 #ifdef CONFIG_USER_ONLY
299 bool parallel
= tcg_ctx
->gen_tb
->cflags
& CF_PARALLEL
;
302 * It is tempting to elide the barrier in a uniprocessor context.
303 * However, even with a single cpu we have i/o threads running in
304 * parallel, and lack of memory order can result in e.g. virtio
305 * queue entries being read incorrectly.
307 bool parallel
= true;
311 tcg_gen_op1(INDEX_op_mb
, mb_type
);
315 void tcg_gen_plugin_cb_start(unsigned from
, unsigned type
, unsigned wr
)
317 tcg_gen_op3(INDEX_op_plugin_cb_start
, from
, type
, wr
);
320 void tcg_gen_plugin_cb_end(void)
322 tcg_emit_op(INDEX_op_plugin_cb_end
, 0);
327 void tcg_gen_discard_i32(TCGv_i32 arg
)
329 tcg_gen_op1_i32(INDEX_op_discard
, arg
);
332 void tcg_gen_mov_i32(TCGv_i32 ret
, TCGv_i32 arg
)
335 tcg_gen_op2_i32(INDEX_op_mov_i32
, ret
, arg
);
339 void tcg_gen_movi_i32(TCGv_i32 ret
, int32_t arg
)
341 tcg_gen_mov_i32(ret
, tcg_constant_i32(arg
));
344 void tcg_gen_add_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
346 tcg_gen_op3_i32(INDEX_op_add_i32
, ret
, arg1
, arg2
);
349 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
351 /* some cases can be optimized here */
353 tcg_gen_mov_i32(ret
, arg1
);
355 tcg_gen_add_i32(ret
, arg1
, tcg_constant_i32(arg2
));
359 void tcg_gen_sub_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
361 tcg_gen_op3_i32(INDEX_op_sub_i32
, ret
, arg1
, arg2
);
364 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
366 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
367 /* Don't recurse with tcg_gen_neg_i32. */
368 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
370 tcg_gen_sub_i32(ret
, tcg_constant_i32(arg1
), arg2
);
374 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
376 /* some cases can be optimized here */
378 tcg_gen_mov_i32(ret
, arg1
);
380 tcg_gen_sub_i32(ret
, arg1
, tcg_constant_i32(arg2
));
384 void tcg_gen_neg_i32(TCGv_i32 ret
, TCGv_i32 arg
)
386 if (TCG_TARGET_HAS_neg_i32
) {
387 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg
);
389 tcg_gen_subfi_i32(ret
, 0, arg
);
393 void tcg_gen_and_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
395 tcg_gen_op3_i32(INDEX_op_and_i32
, ret
, arg1
, arg2
);
398 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
400 /* Some cases can be optimized here. */
403 tcg_gen_movi_i32(ret
, 0);
406 tcg_gen_mov_i32(ret
, arg1
);
409 /* Don't recurse with tcg_gen_ext8u_i32. */
410 if (TCG_TARGET_HAS_ext8u_i32
) {
411 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
416 if (TCG_TARGET_HAS_ext16u_i32
) {
417 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
423 tcg_gen_and_i32(ret
, arg1
, tcg_constant_i32(arg2
));
426 void tcg_gen_or_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
428 tcg_gen_op3_i32(INDEX_op_or_i32
, ret
, arg1
, arg2
);
431 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
433 /* Some cases can be optimized here. */
435 tcg_gen_movi_i32(ret
, -1);
436 } else if (arg2
== 0) {
437 tcg_gen_mov_i32(ret
, arg1
);
439 tcg_gen_or_i32(ret
, arg1
, tcg_constant_i32(arg2
));
443 void tcg_gen_xor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
445 tcg_gen_op3_i32(INDEX_op_xor_i32
, ret
, arg1
, arg2
);
448 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
450 /* Some cases can be optimized here. */
452 tcg_gen_mov_i32(ret
, arg1
);
453 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
454 /* Don't recurse with tcg_gen_not_i32. */
455 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
457 tcg_gen_xor_i32(ret
, arg1
, tcg_constant_i32(arg2
));
461 void tcg_gen_not_i32(TCGv_i32 ret
, TCGv_i32 arg
)
463 if (TCG_TARGET_HAS_not_i32
) {
464 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg
);
466 tcg_gen_xori_i32(ret
, arg
, -1);
470 void tcg_gen_shl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
472 tcg_gen_op3_i32(INDEX_op_shl_i32
, ret
, arg1
, arg2
);
475 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
477 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
479 tcg_gen_mov_i32(ret
, arg1
);
481 tcg_gen_shl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
485 void tcg_gen_shr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
487 tcg_gen_op3_i32(INDEX_op_shr_i32
, ret
, arg1
, arg2
);
490 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
492 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
494 tcg_gen_mov_i32(ret
, arg1
);
496 tcg_gen_shr_i32(ret
, arg1
, tcg_constant_i32(arg2
));
500 void tcg_gen_sar_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
502 tcg_gen_op3_i32(INDEX_op_sar_i32
, ret
, arg1
, arg2
);
505 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
507 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
509 tcg_gen_mov_i32(ret
, arg1
);
511 tcg_gen_sar_i32(ret
, arg1
, tcg_constant_i32(arg2
));
515 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
517 if (cond
== TCG_COND_ALWAYS
) {
519 } else if (cond
!= TCG_COND_NEVER
) {
520 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
521 add_last_as_label_use(l
);
525 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
527 if (cond
== TCG_COND_ALWAYS
) {
529 } else if (cond
!= TCG_COND_NEVER
) {
530 tcg_gen_brcond_i32(cond
, arg1
, tcg_constant_i32(arg2
), l
);
534 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
535 TCGv_i32 arg1
, TCGv_i32 arg2
)
537 if (cond
== TCG_COND_ALWAYS
) {
538 tcg_gen_movi_i32(ret
, 1);
539 } else if (cond
== TCG_COND_NEVER
) {
540 tcg_gen_movi_i32(ret
, 0);
542 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
546 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
547 TCGv_i32 arg1
, int32_t arg2
)
549 tcg_gen_setcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
552 void tcg_gen_negsetcond_i32(TCGCond cond
, TCGv_i32 ret
,
553 TCGv_i32 arg1
, TCGv_i32 arg2
)
555 if (cond
== TCG_COND_ALWAYS
) {
556 tcg_gen_movi_i32(ret
, -1);
557 } else if (cond
== TCG_COND_NEVER
) {
558 tcg_gen_movi_i32(ret
, 0);
559 } else if (TCG_TARGET_HAS_negsetcond_i32
) {
560 tcg_gen_op4i_i32(INDEX_op_negsetcond_i32
, ret
, arg1
, arg2
, cond
);
562 tcg_gen_setcond_i32(cond
, ret
, arg1
, arg2
);
563 tcg_gen_neg_i32(ret
, ret
);
567 void tcg_gen_negsetcondi_i32(TCGCond cond
, TCGv_i32 ret
,
568 TCGv_i32 arg1
, int32_t arg2
)
570 tcg_gen_negsetcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
573 void tcg_gen_mul_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
575 tcg_gen_op3_i32(INDEX_op_mul_i32
, ret
, arg1
, arg2
);
578 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
581 tcg_gen_movi_i32(ret
, 0);
582 } else if (is_power_of_2(arg2
)) {
583 tcg_gen_shli_i32(ret
, arg1
, ctz32(arg2
));
585 tcg_gen_mul_i32(ret
, arg1
, tcg_constant_i32(arg2
));
589 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
591 if (TCG_TARGET_HAS_div_i32
) {
592 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
593 } else if (TCG_TARGET_HAS_div2_i32
) {
594 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
595 tcg_gen_sari_i32(t0
, arg1
, 31);
596 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
597 tcg_temp_free_i32(t0
);
599 gen_helper_div_i32(ret
, arg1
, arg2
);
603 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
605 if (TCG_TARGET_HAS_rem_i32
) {
606 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
607 } else if (TCG_TARGET_HAS_div_i32
) {
608 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
609 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
610 tcg_gen_mul_i32(t0
, t0
, arg2
);
611 tcg_gen_sub_i32(ret
, arg1
, t0
);
612 tcg_temp_free_i32(t0
);
613 } else if (TCG_TARGET_HAS_div2_i32
) {
614 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
615 tcg_gen_sari_i32(t0
, arg1
, 31);
616 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
617 tcg_temp_free_i32(t0
);
619 gen_helper_rem_i32(ret
, arg1
, arg2
);
623 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
625 if (TCG_TARGET_HAS_div_i32
) {
626 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
627 } else if (TCG_TARGET_HAS_div2_i32
) {
628 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
629 TCGv_i32 zero
= tcg_constant_i32(0);
630 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, zero
, arg2
);
631 tcg_temp_free_i32(t0
);
633 gen_helper_divu_i32(ret
, arg1
, arg2
);
637 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
639 if (TCG_TARGET_HAS_rem_i32
) {
640 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
641 } else if (TCG_TARGET_HAS_div_i32
) {
642 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
643 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
644 tcg_gen_mul_i32(t0
, t0
, arg2
);
645 tcg_gen_sub_i32(ret
, arg1
, t0
);
646 tcg_temp_free_i32(t0
);
647 } else if (TCG_TARGET_HAS_div2_i32
) {
648 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
649 TCGv_i32 zero
= tcg_constant_i32(0);
650 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, zero
, arg2
);
651 tcg_temp_free_i32(t0
);
653 gen_helper_remu_i32(ret
, arg1
, arg2
);
657 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
659 if (TCG_TARGET_HAS_andc_i32
) {
660 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
662 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
663 tcg_gen_not_i32(t0
, arg2
);
664 tcg_gen_and_i32(ret
, arg1
, t0
);
665 tcg_temp_free_i32(t0
);
669 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
671 if (TCG_TARGET_HAS_eqv_i32
) {
672 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
674 tcg_gen_xor_i32(ret
, arg1
, arg2
);
675 tcg_gen_not_i32(ret
, ret
);
679 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
681 if (TCG_TARGET_HAS_nand_i32
) {
682 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
684 tcg_gen_and_i32(ret
, arg1
, arg2
);
685 tcg_gen_not_i32(ret
, ret
);
689 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
691 if (TCG_TARGET_HAS_nor_i32
) {
692 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
694 tcg_gen_or_i32(ret
, arg1
, arg2
);
695 tcg_gen_not_i32(ret
, ret
);
699 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
701 if (TCG_TARGET_HAS_orc_i32
) {
702 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
704 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
705 tcg_gen_not_i32(t0
, arg2
);
706 tcg_gen_or_i32(ret
, arg1
, t0
);
707 tcg_temp_free_i32(t0
);
711 void tcg_gen_clz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
713 if (TCG_TARGET_HAS_clz_i32
) {
714 tcg_gen_op3_i32(INDEX_op_clz_i32
, ret
, arg1
, arg2
);
715 } else if (TCG_TARGET_HAS_clz_i64
) {
716 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
717 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
718 tcg_gen_extu_i32_i64(t1
, arg1
);
719 tcg_gen_extu_i32_i64(t2
, arg2
);
720 tcg_gen_addi_i64(t2
, t2
, 32);
721 tcg_gen_clz_i64(t1
, t1
, t2
);
722 tcg_gen_extrl_i64_i32(ret
, t1
);
723 tcg_temp_free_i64(t1
);
724 tcg_temp_free_i64(t2
);
725 tcg_gen_subi_i32(ret
, ret
, 32);
727 gen_helper_clz_i32(ret
, arg1
, arg2
);
731 void tcg_gen_clzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
733 tcg_gen_clz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
736 void tcg_gen_ctz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
738 if (TCG_TARGET_HAS_ctz_i32
) {
739 tcg_gen_op3_i32(INDEX_op_ctz_i32
, ret
, arg1
, arg2
);
740 } else if (TCG_TARGET_HAS_ctz_i64
) {
741 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
742 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
743 tcg_gen_extu_i32_i64(t1
, arg1
);
744 tcg_gen_extu_i32_i64(t2
, arg2
);
745 tcg_gen_ctz_i64(t1
, t1
, t2
);
746 tcg_gen_extrl_i64_i32(ret
, t1
);
747 tcg_temp_free_i64(t1
);
748 tcg_temp_free_i64(t2
);
749 } else if (TCG_TARGET_HAS_ctpop_i32
750 || TCG_TARGET_HAS_ctpop_i64
751 || TCG_TARGET_HAS_clz_i32
752 || TCG_TARGET_HAS_clz_i64
) {
753 TCGv_i32 z
, t
= tcg_temp_ebb_new_i32();
755 if (TCG_TARGET_HAS_ctpop_i32
|| TCG_TARGET_HAS_ctpop_i64
) {
756 tcg_gen_subi_i32(t
, arg1
, 1);
757 tcg_gen_andc_i32(t
, t
, arg1
);
758 tcg_gen_ctpop_i32(t
, t
);
760 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
761 tcg_gen_neg_i32(t
, arg1
);
762 tcg_gen_and_i32(t
, t
, arg1
);
763 tcg_gen_clzi_i32(t
, t
, 32);
764 tcg_gen_xori_i32(t
, t
, 31);
766 z
= tcg_constant_i32(0);
767 tcg_gen_movcond_i32(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
768 tcg_temp_free_i32(t
);
770 gen_helper_ctz_i32(ret
, arg1
, arg2
);
774 void tcg_gen_ctzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
776 if (!TCG_TARGET_HAS_ctz_i32
&& TCG_TARGET_HAS_ctpop_i32
&& arg2
== 32) {
777 /* This equivalence has the advantage of not requiring a fixup. */
778 TCGv_i32 t
= tcg_temp_ebb_new_i32();
779 tcg_gen_subi_i32(t
, arg1
, 1);
780 tcg_gen_andc_i32(t
, t
, arg1
);
781 tcg_gen_ctpop_i32(ret
, t
);
782 tcg_temp_free_i32(t
);
784 tcg_gen_ctz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
788 void tcg_gen_clrsb_i32(TCGv_i32 ret
, TCGv_i32 arg
)
790 if (TCG_TARGET_HAS_clz_i32
) {
791 TCGv_i32 t
= tcg_temp_ebb_new_i32();
792 tcg_gen_sari_i32(t
, arg
, 31);
793 tcg_gen_xor_i32(t
, t
, arg
);
794 tcg_gen_clzi_i32(t
, t
, 32);
795 tcg_gen_subi_i32(ret
, t
, 1);
796 tcg_temp_free_i32(t
);
798 gen_helper_clrsb_i32(ret
, arg
);
802 void tcg_gen_ctpop_i32(TCGv_i32 ret
, TCGv_i32 arg1
)
804 if (TCG_TARGET_HAS_ctpop_i32
) {
805 tcg_gen_op2_i32(INDEX_op_ctpop_i32
, ret
, arg1
);
806 } else if (TCG_TARGET_HAS_ctpop_i64
) {
807 TCGv_i64 t
= tcg_temp_ebb_new_i64();
808 tcg_gen_extu_i32_i64(t
, arg1
);
809 tcg_gen_ctpop_i64(t
, t
);
810 tcg_gen_extrl_i64_i32(ret
, t
);
811 tcg_temp_free_i64(t
);
813 gen_helper_ctpop_i32(ret
, arg1
);
817 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
819 if (TCG_TARGET_HAS_rot_i32
) {
820 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
824 t0
= tcg_temp_ebb_new_i32();
825 t1
= tcg_temp_ebb_new_i32();
826 tcg_gen_shl_i32(t0
, arg1
, arg2
);
827 tcg_gen_subfi_i32(t1
, 32, arg2
);
828 tcg_gen_shr_i32(t1
, arg1
, t1
);
829 tcg_gen_or_i32(ret
, t0
, t1
);
830 tcg_temp_free_i32(t0
);
831 tcg_temp_free_i32(t1
);
835 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
837 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
838 /* some cases can be optimized here */
840 tcg_gen_mov_i32(ret
, arg1
);
841 } else if (TCG_TARGET_HAS_rot_i32
) {
842 tcg_gen_rotl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
845 t0
= tcg_temp_ebb_new_i32();
846 t1
= tcg_temp_ebb_new_i32();
847 tcg_gen_shli_i32(t0
, arg1
, arg2
);
848 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
849 tcg_gen_or_i32(ret
, t0
, t1
);
850 tcg_temp_free_i32(t0
);
851 tcg_temp_free_i32(t1
);
855 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
857 if (TCG_TARGET_HAS_rot_i32
) {
858 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
862 t0
= tcg_temp_ebb_new_i32();
863 t1
= tcg_temp_ebb_new_i32();
864 tcg_gen_shr_i32(t0
, arg1
, arg2
);
865 tcg_gen_subfi_i32(t1
, 32, arg2
);
866 tcg_gen_shl_i32(t1
, arg1
, t1
);
867 tcg_gen_or_i32(ret
, t0
, t1
);
868 tcg_temp_free_i32(t0
);
869 tcg_temp_free_i32(t1
);
873 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
875 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
876 /* some cases can be optimized here */
878 tcg_gen_mov_i32(ret
, arg1
);
880 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
884 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
885 unsigned int ofs
, unsigned int len
)
890 tcg_debug_assert(ofs
< 32);
891 tcg_debug_assert(len
> 0);
892 tcg_debug_assert(len
<= 32);
893 tcg_debug_assert(ofs
+ len
<= 32);
896 tcg_gen_mov_i32(ret
, arg2
);
899 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
900 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
904 t1
= tcg_temp_ebb_new_i32();
906 if (TCG_TARGET_HAS_extract2_i32
) {
907 if (ofs
+ len
== 32) {
908 tcg_gen_shli_i32(t1
, arg1
, len
);
909 tcg_gen_extract2_i32(ret
, t1
, arg2
, len
);
913 tcg_gen_extract2_i32(ret
, arg1
, arg2
, len
);
914 tcg_gen_rotli_i32(ret
, ret
, len
);
919 mask
= (1u << len
) - 1;
920 if (ofs
+ len
< 32) {
921 tcg_gen_andi_i32(t1
, arg2
, mask
);
922 tcg_gen_shli_i32(t1
, t1
, ofs
);
924 tcg_gen_shli_i32(t1
, arg2
, ofs
);
926 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
927 tcg_gen_or_i32(ret
, ret
, t1
);
929 tcg_temp_free_i32(t1
);
932 void tcg_gen_deposit_z_i32(TCGv_i32 ret
, TCGv_i32 arg
,
933 unsigned int ofs
, unsigned int len
)
935 tcg_debug_assert(ofs
< 32);
936 tcg_debug_assert(len
> 0);
937 tcg_debug_assert(len
<= 32);
938 tcg_debug_assert(ofs
+ len
<= 32);
940 if (ofs
+ len
== 32) {
941 tcg_gen_shli_i32(ret
, arg
, ofs
);
942 } else if (ofs
== 0) {
943 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
944 } else if (TCG_TARGET_HAS_deposit_i32
945 && TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
946 TCGv_i32 zero
= tcg_constant_i32(0);
947 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, zero
, arg
, ofs
, len
);
949 /* To help two-operand hosts we prefer to zero-extend first,
950 which allows ARG to stay live. */
953 if (TCG_TARGET_HAS_ext16u_i32
) {
954 tcg_gen_ext16u_i32(ret
, arg
);
955 tcg_gen_shli_i32(ret
, ret
, ofs
);
960 if (TCG_TARGET_HAS_ext8u_i32
) {
961 tcg_gen_ext8u_i32(ret
, arg
);
962 tcg_gen_shli_i32(ret
, ret
, ofs
);
967 /* Otherwise prefer zero-extension over AND for code size. */
970 if (TCG_TARGET_HAS_ext16u_i32
) {
971 tcg_gen_shli_i32(ret
, arg
, ofs
);
972 tcg_gen_ext16u_i32(ret
, ret
);
977 if (TCG_TARGET_HAS_ext8u_i32
) {
978 tcg_gen_shli_i32(ret
, arg
, ofs
);
979 tcg_gen_ext8u_i32(ret
, ret
);
984 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
985 tcg_gen_shli_i32(ret
, ret
, ofs
);
989 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
990 unsigned int ofs
, unsigned int len
)
992 tcg_debug_assert(ofs
< 32);
993 tcg_debug_assert(len
> 0);
994 tcg_debug_assert(len
<= 32);
995 tcg_debug_assert(ofs
+ len
<= 32);
997 /* Canonicalize certain special cases, even if extract is supported. */
998 if (ofs
+ len
== 32) {
999 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
1003 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
1007 if (TCG_TARGET_HAS_extract_i32
1008 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
1009 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
1013 /* Assume that zero-extension, if available, is cheaper than a shift. */
1014 switch (ofs
+ len
) {
1016 if (TCG_TARGET_HAS_ext16u_i32
) {
1017 tcg_gen_ext16u_i32(ret
, arg
);
1018 tcg_gen_shri_i32(ret
, ret
, ofs
);
1023 if (TCG_TARGET_HAS_ext8u_i32
) {
1024 tcg_gen_ext8u_i32(ret
, arg
);
1025 tcg_gen_shri_i32(ret
, ret
, ofs
);
1031 /* ??? Ideally we'd know what values are available for immediate AND.
1032 Assume that 8 bits are available, plus the special case of 16,
1033 so that we get ext8u, ext16u. */
1035 case 1 ... 8: case 16:
1036 tcg_gen_shri_i32(ret
, arg
, ofs
);
1037 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
1040 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
1041 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
1046 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
1047 unsigned int ofs
, unsigned int len
)
1049 tcg_debug_assert(ofs
< 32);
1050 tcg_debug_assert(len
> 0);
1051 tcg_debug_assert(len
<= 32);
1052 tcg_debug_assert(ofs
+ len
<= 32);
1054 /* Canonicalize certain special cases, even if extract is supported. */
1055 if (ofs
+ len
== 32) {
1056 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
1062 tcg_gen_ext16s_i32(ret
, arg
);
1065 tcg_gen_ext8s_i32(ret
, arg
);
1070 if (TCG_TARGET_HAS_sextract_i32
1071 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
1072 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
1076 /* Assume that sign-extension, if available, is cheaper than a shift. */
1077 switch (ofs
+ len
) {
1079 if (TCG_TARGET_HAS_ext16s_i32
) {
1080 tcg_gen_ext16s_i32(ret
, arg
);
1081 tcg_gen_sari_i32(ret
, ret
, ofs
);
1086 if (TCG_TARGET_HAS_ext8s_i32
) {
1087 tcg_gen_ext8s_i32(ret
, arg
);
1088 tcg_gen_sari_i32(ret
, ret
, ofs
);
1095 if (TCG_TARGET_HAS_ext16s_i32
) {
1096 tcg_gen_shri_i32(ret
, arg
, ofs
);
1097 tcg_gen_ext16s_i32(ret
, ret
);
1102 if (TCG_TARGET_HAS_ext8s_i32
) {
1103 tcg_gen_shri_i32(ret
, arg
, ofs
);
1104 tcg_gen_ext8s_i32(ret
, ret
);
1110 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
1111 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
1115 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
1116 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
1118 void tcg_gen_extract2_i32(TCGv_i32 ret
, TCGv_i32 al
, TCGv_i32 ah
,
1121 tcg_debug_assert(ofs
<= 32);
1123 tcg_gen_mov_i32(ret
, al
);
1124 } else if (ofs
== 32) {
1125 tcg_gen_mov_i32(ret
, ah
);
1126 } else if (al
== ah
) {
1127 tcg_gen_rotri_i32(ret
, al
, ofs
);
1128 } else if (TCG_TARGET_HAS_extract2_i32
) {
1129 tcg_gen_op4i_i32(INDEX_op_extract2_i32
, ret
, al
, ah
, ofs
);
1131 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1132 tcg_gen_shri_i32(t0
, al
, ofs
);
1133 tcg_gen_deposit_i32(ret
, t0
, ah
, 32 - ofs
, ofs
);
1134 tcg_temp_free_i32(t0
);
1138 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
1139 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
1141 if (cond
== TCG_COND_ALWAYS
) {
1142 tcg_gen_mov_i32(ret
, v1
);
1143 } else if (cond
== TCG_COND_NEVER
) {
1144 tcg_gen_mov_i32(ret
, v2
);
1145 } else if (TCG_TARGET_HAS_movcond_i32
) {
1146 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
1148 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1149 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1150 tcg_gen_negsetcond_i32(cond
, t0
, c1
, c2
);
1151 tcg_gen_and_i32(t1
, v1
, t0
);
1152 tcg_gen_andc_i32(ret
, v2
, t0
);
1153 tcg_gen_or_i32(ret
, ret
, t1
);
1154 tcg_temp_free_i32(t0
);
1155 tcg_temp_free_i32(t1
);
1159 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
1160 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
1162 if (TCG_TARGET_HAS_add2_i32
) {
1163 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
1165 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1166 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1167 tcg_gen_concat_i32_i64(t0
, al
, ah
);
1168 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
1169 tcg_gen_add_i64(t0
, t0
, t1
);
1170 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1171 tcg_temp_free_i64(t0
);
1172 tcg_temp_free_i64(t1
);
1176 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
1177 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
1179 if (TCG_TARGET_HAS_sub2_i32
) {
1180 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
1182 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1183 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1184 tcg_gen_concat_i32_i64(t0
, al
, ah
);
1185 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
1186 tcg_gen_sub_i64(t0
, t0
, t1
);
1187 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1188 tcg_temp_free_i64(t0
);
1189 tcg_temp_free_i64(t1
);
1193 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1195 if (TCG_TARGET_HAS_mulu2_i32
) {
1196 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
1197 } else if (TCG_TARGET_HAS_muluh_i32
) {
1198 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1199 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
1200 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
1201 tcg_gen_mov_i32(rl
, t
);
1202 tcg_temp_free_i32(t
);
1203 } else if (TCG_TARGET_REG_BITS
== 64) {
1204 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1205 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1206 tcg_gen_extu_i32_i64(t0
, arg1
);
1207 tcg_gen_extu_i32_i64(t1
, arg2
);
1208 tcg_gen_mul_i64(t0
, t0
, t1
);
1209 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1210 tcg_temp_free_i64(t0
);
1211 tcg_temp_free_i64(t1
);
1213 qemu_build_not_reached();
1217 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1219 if (TCG_TARGET_HAS_muls2_i32
) {
1220 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
1221 } else if (TCG_TARGET_HAS_mulsh_i32
) {
1222 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1223 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
1224 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
1225 tcg_gen_mov_i32(rl
, t
);
1226 tcg_temp_free_i32(t
);
1227 } else if (TCG_TARGET_REG_BITS
== 32) {
1228 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1229 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1230 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
1231 TCGv_i32 t3
= tcg_temp_ebb_new_i32();
1232 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
1233 /* Adjust for negative inputs. */
1234 tcg_gen_sari_i32(t2
, arg1
, 31);
1235 tcg_gen_sari_i32(t3
, arg2
, 31);
1236 tcg_gen_and_i32(t2
, t2
, arg2
);
1237 tcg_gen_and_i32(t3
, t3
, arg1
);
1238 tcg_gen_sub_i32(rh
, t1
, t2
);
1239 tcg_gen_sub_i32(rh
, rh
, t3
);
1240 tcg_gen_mov_i32(rl
, t0
);
1241 tcg_temp_free_i32(t0
);
1242 tcg_temp_free_i32(t1
);
1243 tcg_temp_free_i32(t2
);
1244 tcg_temp_free_i32(t3
);
1246 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1247 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1248 tcg_gen_ext_i32_i64(t0
, arg1
);
1249 tcg_gen_ext_i32_i64(t1
, arg2
);
1250 tcg_gen_mul_i64(t0
, t0
, t1
);
1251 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1252 tcg_temp_free_i64(t0
);
1253 tcg_temp_free_i64(t1
);
1257 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1259 if (TCG_TARGET_REG_BITS
== 32) {
1260 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1261 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1262 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
1263 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
1264 /* Adjust for negative input for the signed arg1. */
1265 tcg_gen_sari_i32(t2
, arg1
, 31);
1266 tcg_gen_and_i32(t2
, t2
, arg2
);
1267 tcg_gen_sub_i32(rh
, t1
, t2
);
1268 tcg_gen_mov_i32(rl
, t0
);
1269 tcg_temp_free_i32(t0
);
1270 tcg_temp_free_i32(t1
);
1271 tcg_temp_free_i32(t2
);
1273 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1274 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1275 tcg_gen_ext_i32_i64(t0
, arg1
);
1276 tcg_gen_extu_i32_i64(t1
, arg2
);
1277 tcg_gen_mul_i64(t0
, t0
, t1
);
1278 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1279 tcg_temp_free_i64(t0
);
1280 tcg_temp_free_i64(t1
);
1284 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1286 if (TCG_TARGET_HAS_ext8s_i32
) {
1287 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
1289 tcg_gen_shli_i32(ret
, arg
, 24);
1290 tcg_gen_sari_i32(ret
, ret
, 24);
1294 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1296 if (TCG_TARGET_HAS_ext16s_i32
) {
1297 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
1299 tcg_gen_shli_i32(ret
, arg
, 16);
1300 tcg_gen_sari_i32(ret
, ret
, 16);
1304 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1306 if (TCG_TARGET_HAS_ext8u_i32
) {
1307 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
1309 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1313 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1315 if (TCG_TARGET_HAS_ext16u_i32
) {
1316 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1318 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1323 * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1325 * Byte pattern: xxab -> yyba
1327 * With TCG_BSWAP_IZ, x == zero, else undefined.
1328 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1330 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
, int flags
)
1332 /* Only one extension flag may be present. */
1333 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1335 if (TCG_TARGET_HAS_bswap16_i32
) {
1336 tcg_gen_op3i_i32(INDEX_op_bswap16_i32
, ret
, arg
, flags
);
1338 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1339 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1341 /* arg = ..ab (IZ) xxab (!IZ) */
1342 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = ...a (IZ) .xxa (!IZ) */
1343 if (!(flags
& TCG_BSWAP_IZ
)) {
1344 tcg_gen_ext8u_i32(t0
, t0
); /* t0 = ...a */
1347 if (flags
& TCG_BSWAP_OS
) {
1348 tcg_gen_shli_i32(t1
, arg
, 24); /* t1 = b... */
1349 tcg_gen_sari_i32(t1
, t1
, 16); /* t1 = ssb. */
1350 } else if (flags
& TCG_BSWAP_OZ
) {
1351 tcg_gen_ext8u_i32(t1
, arg
); /* t1 = ...b */
1352 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = ..b. */
1354 tcg_gen_shli_i32(t1
, arg
, 8); /* t1 = xab. */
1357 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = ..ba (OZ) */
1359 /* = xaba (no flag) */
1360 tcg_temp_free_i32(t0
);
1361 tcg_temp_free_i32(t1
);
1366 * bswap32_i32: 32-bit byte swap on a 32-bit value.
1368 * Byte pattern: abcd -> dcba
1370 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1372 if (TCG_TARGET_HAS_bswap32_i32
) {
1373 tcg_gen_op3i_i32(INDEX_op_bswap32_i32
, ret
, arg
, 0);
1375 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1376 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1377 TCGv_i32 t2
= tcg_constant_i32(0x00ff00ff);
1380 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = .abc */
1381 tcg_gen_and_i32(t1
, arg
, t2
); /* t1 = .b.d */
1382 tcg_gen_and_i32(t0
, t0
, t2
); /* t0 = .a.c */
1383 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = b.d. */
1384 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = badc */
1386 tcg_gen_shri_i32(t0
, ret
, 16); /* t0 = ..ba */
1387 tcg_gen_shli_i32(t1
, ret
, 16); /* t1 = dc.. */
1388 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = dcba */
1390 tcg_temp_free_i32(t0
);
1391 tcg_temp_free_i32(t1
);
1396 * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1398 * Byte pattern: abcd -> cdab
1400 void tcg_gen_hswap_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1402 /* Swapping 2 16-bit elements is a rotate. */
1403 tcg_gen_rotli_i32(ret
, arg
, 16);
1406 void tcg_gen_smin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1408 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, a
, b
);
1411 void tcg_gen_umin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1413 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
1416 void tcg_gen_smax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1418 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, b
, a
);
1421 void tcg_gen_umax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1423 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
1426 void tcg_gen_abs_i32(TCGv_i32 ret
, TCGv_i32 a
)
1428 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1430 tcg_gen_sari_i32(t
, a
, 31);
1431 tcg_gen_xor_i32(ret
, a
, t
);
1432 tcg_gen_sub_i32(ret
, ret
, t
);
1433 tcg_temp_free_i32(t
);
1436 void tcg_gen_ld8u_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1438 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32
, ret
, arg2
, offset
);
1441 void tcg_gen_ld8s_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1443 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32
, ret
, arg2
, offset
);
1446 void tcg_gen_ld16u_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1448 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32
, ret
, arg2
, offset
);
1451 void tcg_gen_ld16s_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1453 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32
, ret
, arg2
, offset
);
1456 void tcg_gen_ld_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1458 tcg_gen_ldst_op_i32(INDEX_op_ld_i32
, ret
, arg2
, offset
);
1461 void tcg_gen_st8_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1463 tcg_gen_ldst_op_i32(INDEX_op_st8_i32
, arg1
, arg2
, offset
);
1466 void tcg_gen_st16_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1468 tcg_gen_ldst_op_i32(INDEX_op_st16_i32
, arg1
, arg2
, offset
);
1471 void tcg_gen_st_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1473 tcg_gen_ldst_op_i32(INDEX_op_st_i32
, arg1
, arg2
, offset
);
1479 void tcg_gen_discard_i64(TCGv_i64 arg
)
1481 if (TCG_TARGET_REG_BITS
== 64) {
1482 tcg_gen_op1_i64(INDEX_op_discard
, arg
);
1484 tcg_gen_discard_i32(TCGV_LOW(arg
));
1485 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1489 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1494 if (TCG_TARGET_REG_BITS
== 64) {
1495 tcg_gen_op2_i64(INDEX_op_mov_i64
, ret
, arg
);
1497 TCGTemp
*ts
= tcgv_i64_temp(arg
);
1499 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1500 if (ts
->kind
== TEMP_CONST
) {
1501 tcg_gen_movi_i64(ret
, ts
->val
);
1503 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1504 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1509 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1511 if (TCG_TARGET_REG_BITS
== 64) {
1512 tcg_gen_mov_i64(ret
, tcg_constant_i64(arg
));
1514 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
1515 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
1519 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1521 if (TCG_TARGET_REG_BITS
== 64) {
1522 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64
, ret
, arg2
, offset
);
1524 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
1525 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1529 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1531 if (TCG_TARGET_REG_BITS
== 64) {
1532 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64
, ret
, arg2
, offset
);
1534 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
1535 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1539 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1541 if (TCG_TARGET_REG_BITS
== 64) {
1542 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64
, ret
, arg2
, offset
);
1544 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
1545 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1549 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1551 if (TCG_TARGET_REG_BITS
== 64) {
1552 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64
, ret
, arg2
, offset
);
1554 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
1555 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1559 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1561 if (TCG_TARGET_REG_BITS
== 64) {
1562 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64
, ret
, arg2
, offset
);
1564 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1565 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1569 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1571 if (TCG_TARGET_REG_BITS
== 64) {
1572 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64
, ret
, arg2
, offset
);
1574 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1575 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1579 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1582 * For 32-bit host, since arg2 and ret have different types,
1583 * they cannot be the same temporary -- no chance of overlap.
1585 if (TCG_TARGET_REG_BITS
== 64) {
1586 tcg_gen_ldst_op_i64(INDEX_op_ld_i64
, ret
, arg2
, offset
);
1587 } else if (HOST_BIG_ENDIAN
) {
1588 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
1589 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
1591 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1592 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
1596 void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1598 if (TCG_TARGET_REG_BITS
== 64) {
1599 tcg_gen_ldst_op_i64(INDEX_op_st8_i64
, arg1
, arg2
, offset
);
1601 tcg_gen_st8_i32(TCGV_LOW(arg1
), arg2
, offset
);
1605 void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1607 if (TCG_TARGET_REG_BITS
== 64) {
1608 tcg_gen_ldst_op_i64(INDEX_op_st16_i64
, arg1
, arg2
, offset
);
1610 tcg_gen_st16_i32(TCGV_LOW(arg1
), arg2
, offset
);
1614 void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1616 if (TCG_TARGET_REG_BITS
== 64) {
1617 tcg_gen_ldst_op_i64(INDEX_op_st32_i64
, arg1
, arg2
, offset
);
1619 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1623 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1625 if (TCG_TARGET_REG_BITS
== 64) {
1626 tcg_gen_ldst_op_i64(INDEX_op_st_i64
, arg1
, arg2
, offset
);
1627 } else if (HOST_BIG_ENDIAN
) {
1628 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
1629 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
1631 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1632 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
1636 void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1638 if (TCG_TARGET_REG_BITS
== 64) {
1639 tcg_gen_op3_i64(INDEX_op_add_i64
, ret
, arg1
, arg2
);
1641 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1642 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1646 void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1648 if (TCG_TARGET_REG_BITS
== 64) {
1649 tcg_gen_op3_i64(INDEX_op_sub_i64
, ret
, arg1
, arg2
);
1651 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1652 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1656 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1658 if (TCG_TARGET_REG_BITS
== 64) {
1659 tcg_gen_op3_i64(INDEX_op_and_i64
, ret
, arg1
, arg2
);
1661 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1662 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1666 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1668 if (TCG_TARGET_REG_BITS
== 64) {
1669 tcg_gen_op3_i64(INDEX_op_or_i64
, ret
, arg1
, arg2
);
1671 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1672 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1676 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1678 if (TCG_TARGET_REG_BITS
== 64) {
1679 tcg_gen_op3_i64(INDEX_op_xor_i64
, ret
, arg1
, arg2
);
1681 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1682 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1686 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1688 if (TCG_TARGET_REG_BITS
== 64) {
1689 tcg_gen_op3_i64(INDEX_op_shl_i64
, ret
, arg1
, arg2
);
1691 gen_helper_shl_i64(ret
, arg1
, arg2
);
1695 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1697 if (TCG_TARGET_REG_BITS
== 64) {
1698 tcg_gen_op3_i64(INDEX_op_shr_i64
, ret
, arg1
, arg2
);
1700 gen_helper_shr_i64(ret
, arg1
, arg2
);
1704 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1706 if (TCG_TARGET_REG_BITS
== 64) {
1707 tcg_gen_op3_i64(INDEX_op_sar_i64
, ret
, arg1
, arg2
);
1709 gen_helper_sar_i64(ret
, arg1
, arg2
);
1713 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1718 if (TCG_TARGET_REG_BITS
== 64) {
1719 tcg_gen_op3_i64(INDEX_op_mul_i64
, ret
, arg1
, arg2
);
1724 t0
= tcg_temp_ebb_new_i64();
1725 t1
= tcg_temp_ebb_new_i32();
1727 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1728 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1730 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1731 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1732 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1733 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1735 tcg_gen_mov_i64(ret
, t0
);
1736 tcg_temp_free_i64(t0
);
1737 tcg_temp_free_i32(t1
);
1740 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1742 /* some cases can be optimized here */
1744 tcg_gen_mov_i64(ret
, arg1
);
1745 } else if (TCG_TARGET_REG_BITS
== 64) {
1746 tcg_gen_add_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1748 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1749 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1750 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1754 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1756 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
1757 /* Don't recurse with tcg_gen_neg_i64. */
1758 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
1759 } else if (TCG_TARGET_REG_BITS
== 64) {
1760 tcg_gen_sub_i64(ret
, tcg_constant_i64(arg1
), arg2
);
1762 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1763 tcg_constant_i32(arg1
), tcg_constant_i32(arg1
>> 32),
1764 TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1768 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1770 /* some cases can be optimized here */
1772 tcg_gen_mov_i64(ret
, arg1
);
1773 } else if (TCG_TARGET_REG_BITS
== 64) {
1774 tcg_gen_sub_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1776 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1777 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1778 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1782 void tcg_gen_neg_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1784 if (TCG_TARGET_HAS_neg_i64
) {
1785 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg
);
1787 tcg_gen_subfi_i64(ret
, 0, arg
);
1791 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1793 if (TCG_TARGET_REG_BITS
== 32) {
1794 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1795 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1799 /* Some cases can be optimized here. */
1802 tcg_gen_movi_i64(ret
, 0);
1805 tcg_gen_mov_i64(ret
, arg1
);
1808 /* Don't recurse with tcg_gen_ext8u_i64. */
1809 if (TCG_TARGET_HAS_ext8u_i64
) {
1810 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1815 if (TCG_TARGET_HAS_ext16u_i64
) {
1816 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1821 if (TCG_TARGET_HAS_ext32u_i64
) {
1822 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1828 tcg_gen_and_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1831 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1833 if (TCG_TARGET_REG_BITS
== 32) {
1834 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1835 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1838 /* Some cases can be optimized here. */
1840 tcg_gen_movi_i64(ret
, -1);
1841 } else if (arg2
== 0) {
1842 tcg_gen_mov_i64(ret
, arg1
);
1844 tcg_gen_or_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1848 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1850 if (TCG_TARGET_REG_BITS
== 32) {
1851 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1852 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1855 /* Some cases can be optimized here. */
1857 tcg_gen_mov_i64(ret
, arg1
);
1858 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1859 /* Don't recurse with tcg_gen_not_i64. */
1860 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1862 tcg_gen_xor_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1866 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1867 unsigned c
, bool right
, bool arith
)
1869 tcg_debug_assert(c
< 64);
1871 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1872 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1873 } else if (c
>= 32) {
1877 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1878 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1880 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1881 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1884 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1885 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1888 if (TCG_TARGET_HAS_extract2_i32
) {
1889 tcg_gen_extract2_i32(TCGV_LOW(ret
),
1890 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), c
);
1892 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1893 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(ret
),
1894 TCGV_HIGH(arg1
), 32 - c
, c
);
1897 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1899 tcg_gen_shri_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1902 if (TCG_TARGET_HAS_extract2_i32
) {
1903 tcg_gen_extract2_i32(TCGV_HIGH(ret
),
1904 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), 32 - c
);
1906 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1907 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1908 tcg_gen_deposit_i32(TCGV_HIGH(ret
), t0
,
1909 TCGV_HIGH(arg1
), c
, 32 - c
);
1910 tcg_temp_free_i32(t0
);
1912 tcg_gen_shli_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1916 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1918 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1919 if (TCG_TARGET_REG_BITS
== 32) {
1920 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1921 } else if (arg2
== 0) {
1922 tcg_gen_mov_i64(ret
, arg1
);
1924 tcg_gen_shl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1928 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1930 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1931 if (TCG_TARGET_REG_BITS
== 32) {
1932 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1933 } else if (arg2
== 0) {
1934 tcg_gen_mov_i64(ret
, arg1
);
1936 tcg_gen_shr_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1940 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1942 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1943 if (TCG_TARGET_REG_BITS
== 32) {
1944 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1945 } else if (arg2
== 0) {
1946 tcg_gen_mov_i64(ret
, arg1
);
1948 tcg_gen_sar_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1952 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1954 if (cond
== TCG_COND_ALWAYS
) {
1956 } else if (cond
!= TCG_COND_NEVER
) {
1957 if (TCG_TARGET_REG_BITS
== 32) {
1958 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1959 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1960 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1962 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1965 add_last_as_label_use(l
);
1969 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1971 if (TCG_TARGET_REG_BITS
== 64) {
1972 tcg_gen_brcond_i64(cond
, arg1
, tcg_constant_i64(arg2
), l
);
1973 } else if (cond
== TCG_COND_ALWAYS
) {
1975 } else if (cond
!= TCG_COND_NEVER
) {
1976 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
,
1977 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1978 tcg_constant_i32(arg2
),
1979 tcg_constant_i32(arg2
>> 32),
1980 cond
, label_arg(l
));
1981 add_last_as_label_use(l
);
1985 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1986 TCGv_i64 arg1
, TCGv_i64 arg2
)
1988 if (cond
== TCG_COND_ALWAYS
) {
1989 tcg_gen_movi_i64(ret
, 1);
1990 } else if (cond
== TCG_COND_NEVER
) {
1991 tcg_gen_movi_i64(ret
, 0);
1993 if (TCG_TARGET_REG_BITS
== 32) {
1994 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1995 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1996 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1997 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1999 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
2004 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
2005 TCGv_i64 arg1
, int64_t arg2
)
2007 if (TCG_TARGET_REG_BITS
== 64) {
2008 tcg_gen_setcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
2009 } else if (cond
== TCG_COND_ALWAYS
) {
2010 tcg_gen_movi_i64(ret
, 1);
2011 } else if (cond
== TCG_COND_NEVER
) {
2012 tcg_gen_movi_i64(ret
, 0);
2014 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
2015 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
2016 tcg_constant_i32(arg2
),
2017 tcg_constant_i32(arg2
>> 32), cond
);
2018 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2022 void tcg_gen_negsetcondi_i64(TCGCond cond
, TCGv_i64 ret
,
2023 TCGv_i64 arg1
, int64_t arg2
)
2025 tcg_gen_negsetcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
2028 void tcg_gen_negsetcond_i64(TCGCond cond
, TCGv_i64 ret
,
2029 TCGv_i64 arg1
, TCGv_i64 arg2
)
2031 if (cond
== TCG_COND_ALWAYS
) {
2032 tcg_gen_movi_i64(ret
, -1);
2033 } else if (cond
== TCG_COND_NEVER
) {
2034 tcg_gen_movi_i64(ret
, 0);
2035 } else if (TCG_TARGET_HAS_negsetcond_i64
) {
2036 tcg_gen_op4i_i64(INDEX_op_negsetcond_i64
, ret
, arg1
, arg2
, cond
);
2037 } else if (TCG_TARGET_REG_BITS
== 32) {
2038 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
2039 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
2040 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
2041 tcg_gen_neg_i32(TCGV_LOW(ret
), TCGV_LOW(ret
));
2042 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
));
2044 tcg_gen_setcond_i64(cond
, ret
, arg1
, arg2
);
2045 tcg_gen_neg_i64(ret
, ret
);
2049 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2052 tcg_gen_movi_i64(ret
, 0);
2053 } else if (is_power_of_2(arg2
)) {
2054 tcg_gen_shli_i64(ret
, arg1
, ctz64(arg2
));
2056 tcg_gen_mul_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2060 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2062 if (TCG_TARGET_HAS_div_i64
) {
2063 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
2064 } else if (TCG_TARGET_HAS_div2_i64
) {
2065 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2066 tcg_gen_sari_i64(t0
, arg1
, 63);
2067 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
2068 tcg_temp_free_i64(t0
);
2070 gen_helper_div_i64(ret
, arg1
, arg2
);
2074 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2076 if (TCG_TARGET_HAS_rem_i64
) {
2077 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
2078 } else if (TCG_TARGET_HAS_div_i64
) {
2079 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2080 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
2081 tcg_gen_mul_i64(t0
, t0
, arg2
);
2082 tcg_gen_sub_i64(ret
, arg1
, t0
);
2083 tcg_temp_free_i64(t0
);
2084 } else if (TCG_TARGET_HAS_div2_i64
) {
2085 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2086 tcg_gen_sari_i64(t0
, arg1
, 63);
2087 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
2088 tcg_temp_free_i64(t0
);
2090 gen_helper_rem_i64(ret
, arg1
, arg2
);
2094 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2096 if (TCG_TARGET_HAS_div_i64
) {
2097 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
2098 } else if (TCG_TARGET_HAS_div2_i64
) {
2099 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2100 TCGv_i64 zero
= tcg_constant_i64(0);
2101 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, zero
, arg2
);
2102 tcg_temp_free_i64(t0
);
2104 gen_helper_divu_i64(ret
, arg1
, arg2
);
2108 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2110 if (TCG_TARGET_HAS_rem_i64
) {
2111 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
2112 } else if (TCG_TARGET_HAS_div_i64
) {
2113 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2114 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
2115 tcg_gen_mul_i64(t0
, t0
, arg2
);
2116 tcg_gen_sub_i64(ret
, arg1
, t0
);
2117 tcg_temp_free_i64(t0
);
2118 } else if (TCG_TARGET_HAS_div2_i64
) {
2119 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2120 TCGv_i64 zero
= tcg_constant_i64(0);
2121 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, zero
, arg2
);
2122 tcg_temp_free_i64(t0
);
2124 gen_helper_remu_i64(ret
, arg1
, arg2
);
2128 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2130 if (TCG_TARGET_REG_BITS
== 32) {
2131 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2132 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2133 } else if (TCG_TARGET_HAS_ext8s_i64
) {
2134 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
2136 tcg_gen_shli_i64(ret
, arg
, 56);
2137 tcg_gen_sari_i64(ret
, ret
, 56);
2141 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2143 if (TCG_TARGET_REG_BITS
== 32) {
2144 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2145 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2146 } else if (TCG_TARGET_HAS_ext16s_i64
) {
2147 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
2149 tcg_gen_shli_i64(ret
, arg
, 48);
2150 tcg_gen_sari_i64(ret
, ret
, 48);
2154 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2156 if (TCG_TARGET_REG_BITS
== 32) {
2157 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2158 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2159 } else if (TCG_TARGET_HAS_ext32s_i64
) {
2160 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
2162 tcg_gen_shli_i64(ret
, arg
, 32);
2163 tcg_gen_sari_i64(ret
, ret
, 32);
2167 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2169 if (TCG_TARGET_REG_BITS
== 32) {
2170 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2171 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2172 } else if (TCG_TARGET_HAS_ext8u_i64
) {
2173 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
2175 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
2179 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2181 if (TCG_TARGET_REG_BITS
== 32) {
2182 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2183 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2184 } else if (TCG_TARGET_HAS_ext16u_i64
) {
2185 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
2187 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
2191 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2193 if (TCG_TARGET_REG_BITS
== 32) {
2194 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2195 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2196 } else if (TCG_TARGET_HAS_ext32u_i64
) {
2197 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
2199 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
2204 * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
2206 * Byte pattern: xxxxxxxxab -> yyyyyyyyba
2208 * With TCG_BSWAP_IZ, x == zero, else undefined.
2209 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2211 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
2213 /* Only one extension flag may be present. */
2214 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
2216 if (TCG_TARGET_REG_BITS
== 32) {
2217 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), flags
);
2218 if (flags
& TCG_BSWAP_OS
) {
2219 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2221 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2223 } else if (TCG_TARGET_HAS_bswap16_i64
) {
2224 tcg_gen_op3i_i64(INDEX_op_bswap16_i64
, ret
, arg
, flags
);
2226 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2227 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2229 /* arg = ......ab or xxxxxxab */
2230 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .......a or .xxxxxxa */
2231 if (!(flags
& TCG_BSWAP_IZ
)) {
2232 tcg_gen_ext8u_i64(t0
, t0
); /* t0 = .......a */
2235 if (flags
& TCG_BSWAP_OS
) {
2236 tcg_gen_shli_i64(t1
, arg
, 56); /* t1 = b....... */
2237 tcg_gen_sari_i64(t1
, t1
, 48); /* t1 = ssssssb. */
2238 } else if (flags
& TCG_BSWAP_OZ
) {
2239 tcg_gen_ext8u_i64(t1
, arg
); /* t1 = .......b */
2240 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ......b. */
2242 tcg_gen_shli_i64(t1
, arg
, 8); /* t1 = xxxxxab. */
2245 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ......ba (OZ) */
2247 /* xxxxxaba (no flag) */
2248 tcg_temp_free_i64(t0
);
2249 tcg_temp_free_i64(t1
);
2254 * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
2256 * Byte pattern: xxxxabcd -> yyyydcba
2258 * With TCG_BSWAP_IZ, x == zero, else undefined.
2259 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2261 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
2263 /* Only one extension flag may be present. */
2264 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
2266 if (TCG_TARGET_REG_BITS
== 32) {
2267 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2268 if (flags
& TCG_BSWAP_OS
) {
2269 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2271 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2273 } else if (TCG_TARGET_HAS_bswap32_i64
) {
2274 tcg_gen_op3i_i64(INDEX_op_bswap32_i64
, ret
, arg
, flags
);
2276 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2277 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2278 TCGv_i64 t2
= tcg_constant_i64(0x00ff00ff);
2280 /* arg = xxxxabcd */
2281 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .xxxxabc */
2282 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .....b.d */
2283 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .....a.c */
2284 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ....b.d. */
2285 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ....badc */
2287 tcg_gen_shli_i64(t1
, ret
, 48); /* t1 = dc...... */
2288 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ......ba */
2289 if (flags
& TCG_BSWAP_OS
) {
2290 tcg_gen_sari_i64(t1
, t1
, 32); /* t1 = ssssdc.. */
2292 tcg_gen_shri_i64(t1
, t1
, 32); /* t1 = ....dc.. */
2294 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ssssdcba (OS) */
2295 /* ....dcba (else) */
2297 tcg_temp_free_i64(t0
);
2298 tcg_temp_free_i64(t1
);
2303 * bswap64_i64: 64-bit byte swap on a 64-bit value.
2305 * Byte pattern: abcdefgh -> hgfedcba
2307 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2309 if (TCG_TARGET_REG_BITS
== 32) {
2311 t0
= tcg_temp_ebb_new_i32();
2312 t1
= tcg_temp_ebb_new_i32();
2314 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
2315 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
2316 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
2317 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
2318 tcg_temp_free_i32(t0
);
2319 tcg_temp_free_i32(t1
);
2320 } else if (TCG_TARGET_HAS_bswap64_i64
) {
2321 tcg_gen_op3i_i64(INDEX_op_bswap64_i64
, ret
, arg
, 0);
2323 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2324 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2325 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2327 /* arg = abcdefgh */
2328 tcg_gen_movi_i64(t2
, 0x00ff00ff00ff00ffull
);
2329 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .abcdefg */
2330 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .b.d.f.h */
2331 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .a.c.e.g */
2332 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = b.d.f.h. */
2333 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = badcfehg */
2335 tcg_gen_movi_i64(t2
, 0x0000ffff0000ffffull
);
2336 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ..badcfe */
2337 tcg_gen_and_i64(t1
, ret
, t2
); /* t1 = ..dc..hg */
2338 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = ..ba..fe */
2339 tcg_gen_shli_i64(t1
, t1
, 16); /* t1 = dc..hg.. */
2340 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = dcbahgfe */
2342 tcg_gen_shri_i64(t0
, ret
, 32); /* t0 = ....dcba */
2343 tcg_gen_shli_i64(t1
, ret
, 32); /* t1 = hgfe.... */
2344 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = hgfedcba */
2346 tcg_temp_free_i64(t0
);
2347 tcg_temp_free_i64(t1
);
2348 tcg_temp_free_i64(t2
);
2353 * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
2354 * See also include/qemu/bitops.h, hswap64.
2356 * Byte pattern: abcdefgh -> ghefcdab
2358 void tcg_gen_hswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2360 uint64_t m
= 0x0000ffff0000ffffull
;
2361 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2362 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2364 /* arg = abcdefgh */
2365 tcg_gen_rotli_i64(t1
, arg
, 32); /* t1 = efghabcd */
2366 tcg_gen_andi_i64(t0
, t1
, m
); /* t0 = ..gh..cd */
2367 tcg_gen_shli_i64(t0
, t0
, 16); /* t0 = gh..cd.. */
2368 tcg_gen_shri_i64(t1
, t1
, 16); /* t1 = ..efghab */
2369 tcg_gen_andi_i64(t1
, t1
, m
); /* t1 = ..ef..ab */
2370 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ghefcdab */
2372 tcg_temp_free_i64(t0
);
2373 tcg_temp_free_i64(t1
);
2377 * wswap_i64: Swap 32-bit words within a 64-bit value.
2379 * Byte pattern: abcdefgh -> efghabcd
2381 void tcg_gen_wswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2383 /* Swapping 2 32-bit elements is a rotate. */
2384 tcg_gen_rotli_i64(ret
, arg
, 32);
2387 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2389 if (TCG_TARGET_REG_BITS
== 32) {
2390 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2391 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
2392 } else if (TCG_TARGET_HAS_not_i64
) {
2393 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
2395 tcg_gen_xori_i64(ret
, arg
, -1);
2399 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2401 if (TCG_TARGET_REG_BITS
== 32) {
2402 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2403 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2404 } else if (TCG_TARGET_HAS_andc_i64
) {
2405 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
2407 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2408 tcg_gen_not_i64(t0
, arg2
);
2409 tcg_gen_and_i64(ret
, arg1
, t0
);
2410 tcg_temp_free_i64(t0
);
2414 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2416 if (TCG_TARGET_REG_BITS
== 32) {
2417 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2418 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2419 } else if (TCG_TARGET_HAS_eqv_i64
) {
2420 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
2422 tcg_gen_xor_i64(ret
, arg1
, arg2
);
2423 tcg_gen_not_i64(ret
, ret
);
2427 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2429 if (TCG_TARGET_REG_BITS
== 32) {
2430 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2431 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2432 } else if (TCG_TARGET_HAS_nand_i64
) {
2433 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
2435 tcg_gen_and_i64(ret
, arg1
, arg2
);
2436 tcg_gen_not_i64(ret
, ret
);
2440 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2442 if (TCG_TARGET_REG_BITS
== 32) {
2443 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2444 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2445 } else if (TCG_TARGET_HAS_nor_i64
) {
2446 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
2448 tcg_gen_or_i64(ret
, arg1
, arg2
);
2449 tcg_gen_not_i64(ret
, ret
);
2453 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2455 if (TCG_TARGET_REG_BITS
== 32) {
2456 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2457 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2458 } else if (TCG_TARGET_HAS_orc_i64
) {
2459 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
2461 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2462 tcg_gen_not_i64(t0
, arg2
);
2463 tcg_gen_or_i64(ret
, arg1
, t0
);
2464 tcg_temp_free_i64(t0
);
2468 void tcg_gen_clz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2470 if (TCG_TARGET_HAS_clz_i64
) {
2471 tcg_gen_op3_i64(INDEX_op_clz_i64
, ret
, arg1
, arg2
);
2473 gen_helper_clz_i64(ret
, arg1
, arg2
);
2477 void tcg_gen_clzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2479 if (TCG_TARGET_REG_BITS
== 32
2480 && TCG_TARGET_HAS_clz_i32
2481 && arg2
<= 0xffffffffu
) {
2482 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2483 tcg_gen_clzi_i32(t
, TCGV_LOW(arg1
), arg2
- 32);
2484 tcg_gen_addi_i32(t
, t
, 32);
2485 tcg_gen_clz_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), t
);
2486 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2487 tcg_temp_free_i32(t
);
2489 tcg_gen_clz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2493 void tcg_gen_ctz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2495 if (TCG_TARGET_HAS_ctz_i64
) {
2496 tcg_gen_op3_i64(INDEX_op_ctz_i64
, ret
, arg1
, arg2
);
2497 } else if (TCG_TARGET_HAS_ctpop_i64
|| TCG_TARGET_HAS_clz_i64
) {
2498 TCGv_i64 z
, t
= tcg_temp_ebb_new_i64();
2500 if (TCG_TARGET_HAS_ctpop_i64
) {
2501 tcg_gen_subi_i64(t
, arg1
, 1);
2502 tcg_gen_andc_i64(t
, t
, arg1
);
2503 tcg_gen_ctpop_i64(t
, t
);
2505 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
2506 tcg_gen_neg_i64(t
, arg1
);
2507 tcg_gen_and_i64(t
, t
, arg1
);
2508 tcg_gen_clzi_i64(t
, t
, 64);
2509 tcg_gen_xori_i64(t
, t
, 63);
2511 z
= tcg_constant_i64(0);
2512 tcg_gen_movcond_i64(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
2513 tcg_temp_free_i64(t
);
2514 tcg_temp_free_i64(z
);
2516 gen_helper_ctz_i64(ret
, arg1
, arg2
);
2520 void tcg_gen_ctzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2522 if (TCG_TARGET_REG_BITS
== 32
2523 && TCG_TARGET_HAS_ctz_i32
2524 && arg2
<= 0xffffffffu
) {
2525 TCGv_i32 t32
= tcg_temp_ebb_new_i32();
2526 tcg_gen_ctzi_i32(t32
, TCGV_HIGH(arg1
), arg2
- 32);
2527 tcg_gen_addi_i32(t32
, t32
, 32);
2528 tcg_gen_ctz_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), t32
);
2529 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2530 tcg_temp_free_i32(t32
);
2531 } else if (!TCG_TARGET_HAS_ctz_i64
2532 && TCG_TARGET_HAS_ctpop_i64
2534 /* This equivalence has the advantage of not requiring a fixup. */
2535 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2536 tcg_gen_subi_i64(t
, arg1
, 1);
2537 tcg_gen_andc_i64(t
, t
, arg1
);
2538 tcg_gen_ctpop_i64(ret
, t
);
2539 tcg_temp_free_i64(t
);
2541 tcg_gen_ctz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2545 void tcg_gen_clrsb_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2547 if (TCG_TARGET_HAS_clz_i64
|| TCG_TARGET_HAS_clz_i32
) {
2548 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2549 tcg_gen_sari_i64(t
, arg
, 63);
2550 tcg_gen_xor_i64(t
, t
, arg
);
2551 tcg_gen_clzi_i64(t
, t
, 64);
2552 tcg_gen_subi_i64(ret
, t
, 1);
2553 tcg_temp_free_i64(t
);
2555 gen_helper_clrsb_i64(ret
, arg
);
2559 void tcg_gen_ctpop_i64(TCGv_i64 ret
, TCGv_i64 arg1
)
2561 if (TCG_TARGET_HAS_ctpop_i64
) {
2562 tcg_gen_op2_i64(INDEX_op_ctpop_i64
, ret
, arg1
);
2563 } else if (TCG_TARGET_REG_BITS
== 32 && TCG_TARGET_HAS_ctpop_i32
) {
2564 tcg_gen_ctpop_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2565 tcg_gen_ctpop_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2566 tcg_gen_add_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), TCGV_HIGH(ret
));
2567 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2569 gen_helper_ctpop_i64(ret
, arg1
);
2573 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2575 if (TCG_TARGET_HAS_rot_i64
) {
2576 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
2579 t0
= tcg_temp_ebb_new_i64();
2580 t1
= tcg_temp_ebb_new_i64();
2581 tcg_gen_shl_i64(t0
, arg1
, arg2
);
2582 tcg_gen_subfi_i64(t1
, 64, arg2
);
2583 tcg_gen_shr_i64(t1
, arg1
, t1
);
2584 tcg_gen_or_i64(ret
, t0
, t1
);
2585 tcg_temp_free_i64(t0
);
2586 tcg_temp_free_i64(t1
);
2590 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2592 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2593 /* some cases can be optimized here */
2595 tcg_gen_mov_i64(ret
, arg1
);
2596 } else if (TCG_TARGET_HAS_rot_i64
) {
2597 tcg_gen_rotl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2600 t0
= tcg_temp_ebb_new_i64();
2601 t1
= tcg_temp_ebb_new_i64();
2602 tcg_gen_shli_i64(t0
, arg1
, arg2
);
2603 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
2604 tcg_gen_or_i64(ret
, t0
, t1
);
2605 tcg_temp_free_i64(t0
);
2606 tcg_temp_free_i64(t1
);
2610 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2612 if (TCG_TARGET_HAS_rot_i64
) {
2613 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
2616 t0
= tcg_temp_ebb_new_i64();
2617 t1
= tcg_temp_ebb_new_i64();
2618 tcg_gen_shr_i64(t0
, arg1
, arg2
);
2619 tcg_gen_subfi_i64(t1
, 64, arg2
);
2620 tcg_gen_shl_i64(t1
, arg1
, t1
);
2621 tcg_gen_or_i64(ret
, t0
, t1
);
2622 tcg_temp_free_i64(t0
);
2623 tcg_temp_free_i64(t1
);
2627 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2629 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2630 /* some cases can be optimized here */
2632 tcg_gen_mov_i64(ret
, arg1
);
2634 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
2638 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
2639 unsigned int ofs
, unsigned int len
)
2644 tcg_debug_assert(ofs
< 64);
2645 tcg_debug_assert(len
> 0);
2646 tcg_debug_assert(len
<= 64);
2647 tcg_debug_assert(ofs
+ len
<= 64);
2650 tcg_gen_mov_i64(ret
, arg2
);
2653 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2654 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
2658 if (TCG_TARGET_REG_BITS
== 32) {
2660 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
2661 TCGV_LOW(arg2
), ofs
- 32, len
);
2662 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2665 if (ofs
+ len
<= 32) {
2666 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
2667 TCGV_LOW(arg2
), ofs
, len
);
2668 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2673 t1
= tcg_temp_ebb_new_i64();
2675 if (TCG_TARGET_HAS_extract2_i64
) {
2676 if (ofs
+ len
== 64) {
2677 tcg_gen_shli_i64(t1
, arg1
, len
);
2678 tcg_gen_extract2_i64(ret
, t1
, arg2
, len
);
2682 tcg_gen_extract2_i64(ret
, arg1
, arg2
, len
);
2683 tcg_gen_rotli_i64(ret
, ret
, len
);
2688 mask
= (1ull << len
) - 1;
2689 if (ofs
+ len
< 64) {
2690 tcg_gen_andi_i64(t1
, arg2
, mask
);
2691 tcg_gen_shli_i64(t1
, t1
, ofs
);
2693 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2695 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2696 tcg_gen_or_i64(ret
, ret
, t1
);
2698 tcg_temp_free_i64(t1
);
2701 void tcg_gen_deposit_z_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2702 unsigned int ofs
, unsigned int len
)
2704 tcg_debug_assert(ofs
< 64);
2705 tcg_debug_assert(len
> 0);
2706 tcg_debug_assert(len
<= 64);
2707 tcg_debug_assert(ofs
+ len
<= 64);
2709 if (ofs
+ len
== 64) {
2710 tcg_gen_shli_i64(ret
, arg
, ofs
);
2711 } else if (ofs
== 0) {
2712 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2713 } else if (TCG_TARGET_HAS_deposit_i64
2714 && TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2715 TCGv_i64 zero
= tcg_constant_i64(0);
2716 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, zero
, arg
, ofs
, len
);
2718 if (TCG_TARGET_REG_BITS
== 32) {
2720 tcg_gen_deposit_z_i32(TCGV_HIGH(ret
), TCGV_LOW(arg
),
2722 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
2725 if (ofs
+ len
<= 32) {
2726 tcg_gen_deposit_z_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2727 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2731 /* To help two-operand hosts we prefer to zero-extend first,
2732 which allows ARG to stay live. */
2735 if (TCG_TARGET_HAS_ext32u_i64
) {
2736 tcg_gen_ext32u_i64(ret
, arg
);
2737 tcg_gen_shli_i64(ret
, ret
, ofs
);
2742 if (TCG_TARGET_HAS_ext16u_i64
) {
2743 tcg_gen_ext16u_i64(ret
, arg
);
2744 tcg_gen_shli_i64(ret
, ret
, ofs
);
2749 if (TCG_TARGET_HAS_ext8u_i64
) {
2750 tcg_gen_ext8u_i64(ret
, arg
);
2751 tcg_gen_shli_i64(ret
, ret
, ofs
);
2756 /* Otherwise prefer zero-extension over AND for code size. */
2757 switch (ofs
+ len
) {
2759 if (TCG_TARGET_HAS_ext32u_i64
) {
2760 tcg_gen_shli_i64(ret
, arg
, ofs
);
2761 tcg_gen_ext32u_i64(ret
, ret
);
2766 if (TCG_TARGET_HAS_ext16u_i64
) {
2767 tcg_gen_shli_i64(ret
, arg
, ofs
);
2768 tcg_gen_ext16u_i64(ret
, ret
);
2773 if (TCG_TARGET_HAS_ext8u_i64
) {
2774 tcg_gen_shli_i64(ret
, arg
, ofs
);
2775 tcg_gen_ext8u_i64(ret
, ret
);
2780 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2781 tcg_gen_shli_i64(ret
, ret
, ofs
);
2785 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2786 unsigned int ofs
, unsigned int len
)
2788 tcg_debug_assert(ofs
< 64);
2789 tcg_debug_assert(len
> 0);
2790 tcg_debug_assert(len
<= 64);
2791 tcg_debug_assert(ofs
+ len
<= 64);
2793 /* Canonicalize certain special cases, even if extract is supported. */
2794 if (ofs
+ len
== 64) {
2795 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
2799 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2803 if (TCG_TARGET_REG_BITS
== 32) {
2804 /* Look for a 32-bit extract within one of the two words. */
2806 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2807 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2810 if (ofs
+ len
<= 32) {
2811 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2812 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2815 /* The field is split across two words. One double-word
2816 shift is better than two double-word shifts. */
2820 if (TCG_TARGET_HAS_extract_i64
2821 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2822 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
2826 /* Assume that zero-extension, if available, is cheaper than a shift. */
2827 switch (ofs
+ len
) {
2829 if (TCG_TARGET_HAS_ext32u_i64
) {
2830 tcg_gen_ext32u_i64(ret
, arg
);
2831 tcg_gen_shri_i64(ret
, ret
, ofs
);
2836 if (TCG_TARGET_HAS_ext16u_i64
) {
2837 tcg_gen_ext16u_i64(ret
, arg
);
2838 tcg_gen_shri_i64(ret
, ret
, ofs
);
2843 if (TCG_TARGET_HAS_ext8u_i64
) {
2844 tcg_gen_ext8u_i64(ret
, arg
);
2845 tcg_gen_shri_i64(ret
, ret
, ofs
);
2851 /* ??? Ideally we'd know what values are available for immediate AND.
2852 Assume that 8 bits are available, plus the special cases of 16 and 32,
2853 so that we get ext8u, ext16u, and ext32u. */
2855 case 1 ... 8: case 16: case 32:
2857 tcg_gen_shri_i64(ret
, arg
, ofs
);
2858 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
2861 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2862 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
2867 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2868 unsigned int ofs
, unsigned int len
)
2870 tcg_debug_assert(ofs
< 64);
2871 tcg_debug_assert(len
> 0);
2872 tcg_debug_assert(len
<= 64);
2873 tcg_debug_assert(ofs
+ len
<= 64);
2875 /* Canonicalize certain special cases, even if sextract is supported. */
2876 if (ofs
+ len
== 64) {
2877 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
2883 tcg_gen_ext32s_i64(ret
, arg
);
2886 tcg_gen_ext16s_i64(ret
, arg
);
2889 tcg_gen_ext8s_i64(ret
, arg
);
2894 if (TCG_TARGET_REG_BITS
== 32) {
2895 /* Look for a 32-bit extract within one of the two words. */
2897 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2898 } else if (ofs
+ len
<= 32) {
2899 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2900 } else if (ofs
== 0) {
2901 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2902 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
2904 } else if (len
> 32) {
2905 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2906 /* Extract the bits for the high word normally. */
2907 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
2908 /* Shift the field down for the low part. */
2909 tcg_gen_shri_i64(ret
, arg
, ofs
);
2910 /* Overwrite the shift into the high part. */
2911 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
2912 tcg_temp_free_i32(t
);
2915 /* Shift the field down for the low part, such that the
2916 field sits at the MSB. */
2917 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
2918 /* Shift the field down from the MSB, sign extending. */
2919 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
2921 /* Sign-extend the field from 32 bits. */
2922 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2926 if (TCG_TARGET_HAS_sextract_i64
2927 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2928 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
2932 /* Assume that sign-extension, if available, is cheaper than a shift. */
2933 switch (ofs
+ len
) {
2935 if (TCG_TARGET_HAS_ext32s_i64
) {
2936 tcg_gen_ext32s_i64(ret
, arg
);
2937 tcg_gen_sari_i64(ret
, ret
, ofs
);
2942 if (TCG_TARGET_HAS_ext16s_i64
) {
2943 tcg_gen_ext16s_i64(ret
, arg
);
2944 tcg_gen_sari_i64(ret
, ret
, ofs
);
2949 if (TCG_TARGET_HAS_ext8s_i64
) {
2950 tcg_gen_ext8s_i64(ret
, arg
);
2951 tcg_gen_sari_i64(ret
, ret
, ofs
);
2958 if (TCG_TARGET_HAS_ext32s_i64
) {
2959 tcg_gen_shri_i64(ret
, arg
, ofs
);
2960 tcg_gen_ext32s_i64(ret
, ret
);
2965 if (TCG_TARGET_HAS_ext16s_i64
) {
2966 tcg_gen_shri_i64(ret
, arg
, ofs
);
2967 tcg_gen_ext16s_i64(ret
, ret
);
2972 if (TCG_TARGET_HAS_ext8s_i64
) {
2973 tcg_gen_shri_i64(ret
, arg
, ofs
);
2974 tcg_gen_ext8s_i64(ret
, ret
);
2979 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2980 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
2984 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2985 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2987 void tcg_gen_extract2_i64(TCGv_i64 ret
, TCGv_i64 al
, TCGv_i64 ah
,
2990 tcg_debug_assert(ofs
<= 64);
2992 tcg_gen_mov_i64(ret
, al
);
2993 } else if (ofs
== 64) {
2994 tcg_gen_mov_i64(ret
, ah
);
2995 } else if (al
== ah
) {
2996 tcg_gen_rotri_i64(ret
, al
, ofs
);
2997 } else if (TCG_TARGET_HAS_extract2_i64
) {
2998 tcg_gen_op4i_i64(INDEX_op_extract2_i64
, ret
, al
, ah
, ofs
);
3000 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3001 tcg_gen_shri_i64(t0
, al
, ofs
);
3002 tcg_gen_deposit_i64(ret
, t0
, ah
, 64 - ofs
, ofs
);
3003 tcg_temp_free_i64(t0
);
3007 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
3008 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
3010 if (cond
== TCG_COND_ALWAYS
) {
3011 tcg_gen_mov_i64(ret
, v1
);
3012 } else if (cond
== TCG_COND_NEVER
) {
3013 tcg_gen_mov_i64(ret
, v2
);
3014 } else if (TCG_TARGET_REG_BITS
== 32) {
3015 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
3016 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
3017 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
3018 TCGV_LOW(c1
), TCGV_HIGH(c1
),
3019 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
3021 if (TCG_TARGET_HAS_movcond_i32
) {
3022 tcg_gen_movi_i32(t1
, 0);
3023 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
3024 TCGV_LOW(v1
), TCGV_LOW(v2
));
3025 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
3026 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
3028 tcg_gen_neg_i32(t0
, t0
);
3030 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
3031 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
3032 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
3034 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
3035 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
3036 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
3038 tcg_temp_free_i32(t0
);
3039 tcg_temp_free_i32(t1
);
3040 } else if (TCG_TARGET_HAS_movcond_i64
) {
3041 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
3043 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3044 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3045 tcg_gen_negsetcond_i64(cond
, t0
, c1
, c2
);
3046 tcg_gen_and_i64(t1
, v1
, t0
);
3047 tcg_gen_andc_i64(ret
, v2
, t0
);
3048 tcg_gen_or_i64(ret
, ret
, t1
);
3049 tcg_temp_free_i64(t0
);
3050 tcg_temp_free_i64(t1
);
3054 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
3055 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
3057 if (TCG_TARGET_HAS_add2_i64
) {
3058 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
3060 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3061 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3062 tcg_gen_add_i64(t0
, al
, bl
);
3063 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
3064 tcg_gen_add_i64(rh
, ah
, bh
);
3065 tcg_gen_add_i64(rh
, rh
, t1
);
3066 tcg_gen_mov_i64(rl
, t0
);
3067 tcg_temp_free_i64(t0
);
3068 tcg_temp_free_i64(t1
);
3072 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
3073 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
3075 if (TCG_TARGET_HAS_sub2_i64
) {
3076 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
3078 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3079 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3080 tcg_gen_sub_i64(t0
, al
, bl
);
3081 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
3082 tcg_gen_sub_i64(rh
, ah
, bh
);
3083 tcg_gen_sub_i64(rh
, rh
, t1
);
3084 tcg_gen_mov_i64(rl
, t0
);
3085 tcg_temp_free_i64(t0
);
3086 tcg_temp_free_i64(t1
);
3090 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
3092 if (TCG_TARGET_HAS_mulu2_i64
) {
3093 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
3094 } else if (TCG_TARGET_HAS_muluh_i64
) {
3095 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3096 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
3097 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
3098 tcg_gen_mov_i64(rl
, t
);
3099 tcg_temp_free_i64(t
);
3101 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3102 tcg_gen_mul_i64(t0
, arg1
, arg2
);
3103 gen_helper_muluh_i64(rh
, arg1
, arg2
);
3104 tcg_gen_mov_i64(rl
, t0
);
3105 tcg_temp_free_i64(t0
);
3109 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
3111 if (TCG_TARGET_HAS_muls2_i64
) {
3112 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
3113 } else if (TCG_TARGET_HAS_mulsh_i64
) {
3114 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3115 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
3116 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
3117 tcg_gen_mov_i64(rl
, t
);
3118 tcg_temp_free_i64(t
);
3119 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
3120 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3121 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3122 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
3123 TCGv_i64 t3
= tcg_temp_ebb_new_i64();
3124 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
3125 /* Adjust for negative inputs. */
3126 tcg_gen_sari_i64(t2
, arg1
, 63);
3127 tcg_gen_sari_i64(t3
, arg2
, 63);
3128 tcg_gen_and_i64(t2
, t2
, arg2
);
3129 tcg_gen_and_i64(t3
, t3
, arg1
);
3130 tcg_gen_sub_i64(rh
, t1
, t2
);
3131 tcg_gen_sub_i64(rh
, rh
, t3
);
3132 tcg_gen_mov_i64(rl
, t0
);
3133 tcg_temp_free_i64(t0
);
3134 tcg_temp_free_i64(t1
);
3135 tcg_temp_free_i64(t2
);
3136 tcg_temp_free_i64(t3
);
3138 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3139 tcg_gen_mul_i64(t0
, arg1
, arg2
);
3140 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
3141 tcg_gen_mov_i64(rl
, t0
);
3142 tcg_temp_free_i64(t0
);
3146 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
3148 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3149 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3150 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
3151 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
3152 /* Adjust for negative input for the signed arg1. */
3153 tcg_gen_sari_i64(t2
, arg1
, 63);
3154 tcg_gen_and_i64(t2
, t2
, arg2
);
3155 tcg_gen_sub_i64(rh
, t1
, t2
);
3156 tcg_gen_mov_i64(rl
, t0
);
3157 tcg_temp_free_i64(t0
);
3158 tcg_temp_free_i64(t1
);
3159 tcg_temp_free_i64(t2
);
3162 void tcg_gen_smin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3164 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, a
, b
);
3167 void tcg_gen_umin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3169 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
3172 void tcg_gen_smax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3174 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, b
, a
);
3177 void tcg_gen_umax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3179 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
3182 void tcg_gen_abs_i64(TCGv_i64 ret
, TCGv_i64 a
)
3184 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3186 tcg_gen_sari_i64(t
, a
, 63);
3187 tcg_gen_xor_i64(ret
, a
, t
);
3188 tcg_gen_sub_i64(ret
, ret
, t
);
3189 tcg_temp_free_i64(t
);
3192 /* Size changing operations. */
3194 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
3196 if (TCG_TARGET_REG_BITS
== 32) {
3197 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
3198 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
3199 tcg_gen_op2(INDEX_op_extrl_i64_i32
,
3200 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
3202 tcg_gen_mov_i32(ret
, (TCGv_i32
)arg
);
3206 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
3208 if (TCG_TARGET_REG_BITS
== 32) {
3209 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
3210 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
3211 tcg_gen_op2(INDEX_op_extrh_i64_i32
,
3212 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
3214 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3215 tcg_gen_shri_i64(t
, arg
, 32);
3216 tcg_gen_mov_i32(ret
, (TCGv_i32
)t
);
3217 tcg_temp_free_i64(t
);
3221 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
3223 if (TCG_TARGET_REG_BITS
== 32) {
3224 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
3225 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
3227 tcg_gen_op2(INDEX_op_extu_i32_i64
,
3228 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
3232 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
3234 if (TCG_TARGET_REG_BITS
== 32) {
3235 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
3236 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
3238 tcg_gen_op2(INDEX_op_ext_i32_i64
,
3239 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
3243 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
3247 if (TCG_TARGET_REG_BITS
== 32) {
3248 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
3249 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
3253 tmp
= tcg_temp_ebb_new_i64();
3254 /* These extensions are only needed for type correctness.
3255 We may be able to do better given target specific information. */
3256 tcg_gen_extu_i32_i64(tmp
, high
);
3257 tcg_gen_extu_i32_i64(dest
, low
);
3258 /* If deposit is available, use it. Otherwise use the extra
3259 knowledge that we have of the zero-extensions above. */
3260 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
3261 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
3263 tcg_gen_shli_i64(tmp
, tmp
, 32);
3264 tcg_gen_or_i64(dest
, dest
, tmp
);
3266 tcg_temp_free_i64(tmp
);
3269 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
3271 if (TCG_TARGET_REG_BITS
== 32) {
3272 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
3273 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
3275 tcg_gen_extrl_i64_i32(lo
, arg
);
3276 tcg_gen_extrh_i64_i32(hi
, arg
);
3280 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
3282 tcg_gen_ext32u_i64(lo
, arg
);
3283 tcg_gen_shri_i64(hi
, arg
, 32);
3286 void tcg_gen_concat32_i64(TCGv_i64 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
3288 tcg_gen_deposit_i64(ret
, lo
, hi
, 32, 32);
3291 void tcg_gen_extr_i128_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i128 arg
)
3293 tcg_gen_mov_i64(lo
, TCGV128_LOW(arg
));
3294 tcg_gen_mov_i64(hi
, TCGV128_HIGH(arg
));
3297 void tcg_gen_concat_i64_i128(TCGv_i128 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
3299 tcg_gen_mov_i64(TCGV128_LOW(ret
), lo
);
3300 tcg_gen_mov_i64(TCGV128_HIGH(ret
), hi
);
3303 void tcg_gen_mov_i128(TCGv_i128 dst
, TCGv_i128 src
)
3306 tcg_gen_mov_i64(TCGV128_LOW(dst
), TCGV128_LOW(src
));
3307 tcg_gen_mov_i64(TCGV128_HIGH(dst
), TCGV128_HIGH(src
));
3311 void tcg_gen_ld_i128(TCGv_i128 ret
, TCGv_ptr base
, tcg_target_long offset
)
3313 if (HOST_BIG_ENDIAN
) {
3314 tcg_gen_ld_i64(TCGV128_HIGH(ret
), base
, offset
);
3315 tcg_gen_ld_i64(TCGV128_LOW(ret
), base
, offset
+ 8);
3317 tcg_gen_ld_i64(TCGV128_LOW(ret
), base
, offset
);
3318 tcg_gen_ld_i64(TCGV128_HIGH(ret
), base
, offset
+ 8);
3322 void tcg_gen_st_i128(TCGv_i128 val
, TCGv_ptr base
, tcg_target_long offset
)
3324 if (HOST_BIG_ENDIAN
) {
3325 tcg_gen_st_i64(TCGV128_HIGH(val
), base
, offset
);
3326 tcg_gen_st_i64(TCGV128_LOW(val
), base
, offset
+ 8);
3328 tcg_gen_st_i64(TCGV128_LOW(val
), base
, offset
);
3329 tcg_gen_st_i64(TCGV128_HIGH(val
), base
, offset
+ 8);
3333 /* QEMU specific operations. */
3335 void tcg_gen_exit_tb(const TranslationBlock
*tb
, unsigned idx
)
3338 * Let the jit code return the read-only version of the
3339 * TranslationBlock, so that we minimize the pc-relative
3340 * distance of the address of the exit_tb code to TB.
3341 * This will improve utilization of pc-relative address loads.
3343 * TODO: Move this to translator_loop, so that all const
3344 * TranslationBlock pointers refer to read-only memory.
3345 * This requires coordination with targets that do not use
3346 * the translator_loop.
3348 uintptr_t val
= (uintptr_t)tcg_splitwx_to_rx((void *)tb
) + idx
;
3351 tcg_debug_assert(idx
== 0);
3352 } else if (idx
<= TB_EXIT_IDXMAX
) {
3353 #ifdef CONFIG_DEBUG_TCG
3354 /* This is an exit following a goto_tb. Verify that we have
3355 seen this numbered exit before, via tcg_gen_goto_tb. */
3356 tcg_debug_assert(tcg_ctx
->goto_tb_issue_mask
& (1 << idx
));
3359 /* This is an exit via the exitreq label. */
3360 tcg_debug_assert(idx
== TB_EXIT_REQUESTED
);
3363 tcg_gen_op1i(INDEX_op_exit_tb
, val
);
3366 void tcg_gen_goto_tb(unsigned idx
)
3368 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
3369 tcg_debug_assert(!(tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_TB
));
3370 /* We only support two chained exits. */
3371 tcg_debug_assert(idx
<= TB_EXIT_IDXMAX
);
3372 #ifdef CONFIG_DEBUG_TCG
3373 /* Verify that we haven't seen this numbered exit before. */
3374 tcg_debug_assert((tcg_ctx
->goto_tb_issue_mask
& (1 << idx
)) == 0);
3375 tcg_ctx
->goto_tb_issue_mask
|= 1 << idx
;
3377 plugin_gen_disable_mem_helpers();
3378 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
3381 void tcg_gen_lookup_and_goto_ptr(void)
3385 if (tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_PTR
) {
3386 tcg_gen_exit_tb(NULL
, 0);
3390 plugin_gen_disable_mem_helpers();
3391 ptr
= tcg_temp_ebb_new_ptr();
3392 gen_helper_lookup_tb_ptr(ptr
, tcg_env
);
3393 tcg_gen_op1i(INDEX_op_goto_ptr
, tcgv_ptr_arg(ptr
));
3394 tcg_temp_free_ptr(ptr
);