2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
35 * Encourage the compiler to tail-call to a function, rather than inlining.
36 * Minimizes code size across 99 bottles of beer on the wall.
38 #define NI __attribute__((noinline))
40 void NI
tcg_gen_op1(TCGOpcode opc
, TCGArg a1
)
42 TCGOp
*op
= tcg_emit_op(opc
, 1);
46 void NI
tcg_gen_op2(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
48 TCGOp
*op
= tcg_emit_op(opc
, 2);
53 void NI
tcg_gen_op3(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
)
55 TCGOp
*op
= tcg_emit_op(opc
, 3);
61 void NI
tcg_gen_op4(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
, TCGArg a4
)
63 TCGOp
*op
= tcg_emit_op(opc
, 4);
70 void NI
tcg_gen_op5(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
73 TCGOp
*op
= tcg_emit_op(opc
, 5);
81 void NI
tcg_gen_op6(TCGOpcode opc
, TCGArg a1
, TCGArg a2
, TCGArg a3
,
82 TCGArg a4
, TCGArg a5
, TCGArg a6
)
84 TCGOp
*op
= tcg_emit_op(opc
, 6);
94 * With CONFIG_DEBUG_TCG, tcgv_*_tmp via tcgv_*_arg, is an out-of-line
95 * assertion check. Force tail calls to avoid too much code expansion.
97 #ifdef CONFIG_DEBUG_TCG
103 void DNI
tcg_gen_op1_i32(TCGOpcode opc
, TCGv_i32 a1
)
105 tcg_gen_op1(opc
, tcgv_i32_arg(a1
));
108 void DNI
tcg_gen_op1_i64(TCGOpcode opc
, TCGv_i64 a1
)
110 tcg_gen_op1(opc
, tcgv_i64_arg(a1
));
113 void DNI
tcg_gen_op1i(TCGOpcode opc
, TCGArg a1
)
115 tcg_gen_op1(opc
, a1
);
118 void DNI
tcg_gen_op2_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
)
120 tcg_gen_op2(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
));
123 void DNI
tcg_gen_op2_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
)
125 tcg_gen_op2(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
));
128 void DNI
tcg_gen_op2i_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGArg a2
)
130 tcg_gen_op2(opc
, tcgv_i32_arg(a1
), a2
);
133 void DNI
tcg_gen_op2i_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGArg a2
)
135 tcg_gen_op2(opc
, tcgv_i64_arg(a1
), a2
);
138 void DNI
tcg_gen_op2ii(TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
140 tcg_gen_op2(opc
, a1
, a2
);
143 void DNI
tcg_gen_op3_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
, TCGv_i32 a3
)
145 tcg_gen_op3(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
), tcgv_i32_arg(a3
));
148 void DNI
tcg_gen_op3_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
, TCGv_i64 a3
)
150 tcg_gen_op3(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
), tcgv_i64_arg(a3
));
153 void DNI
tcg_gen_op3i_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
, TCGArg a3
)
155 tcg_gen_op3(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
), a3
);
158 void DNI
tcg_gen_op3i_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
, TCGArg a3
)
160 tcg_gen_op3(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
), a3
);
163 void DNI
tcg_gen_ldst_op_i32(TCGOpcode opc
, TCGv_i32 val
,
164 TCGv_ptr base
, TCGArg offset
)
166 tcg_gen_op3(opc
, tcgv_i32_arg(val
), tcgv_ptr_arg(base
), offset
);
169 void DNI
tcg_gen_ldst_op_i64(TCGOpcode opc
, TCGv_i64 val
,
170 TCGv_ptr base
, TCGArg offset
)
172 tcg_gen_op3(opc
, tcgv_i64_arg(val
), tcgv_ptr_arg(base
), offset
);
175 void DNI
tcg_gen_op4_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
176 TCGv_i32 a3
, TCGv_i32 a4
)
178 tcg_gen_op4(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
179 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
));
182 void DNI
tcg_gen_op4_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
183 TCGv_i64 a3
, TCGv_i64 a4
)
185 tcg_gen_op4(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
186 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
));
189 void DNI
tcg_gen_op4i_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
190 TCGv_i32 a3
, TCGArg a4
)
192 tcg_gen_op4(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
193 tcgv_i32_arg(a3
), a4
);
196 void DNI
tcg_gen_op4i_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
197 TCGv_i64 a3
, TCGArg a4
)
199 tcg_gen_op4(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
200 tcgv_i64_arg(a3
), a4
);
203 void DNI
tcg_gen_op4ii_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
204 TCGArg a3
, TCGArg a4
)
206 tcg_gen_op4(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
), a3
, a4
);
209 void DNI
tcg_gen_op4ii_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
210 TCGArg a3
, TCGArg a4
)
212 tcg_gen_op4(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
), a3
, a4
);
215 void DNI
tcg_gen_op5_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
216 TCGv_i32 a3
, TCGv_i32 a4
, TCGv_i32 a5
)
218 tcg_gen_op5(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
219 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), tcgv_i32_arg(a5
));
222 void DNI
tcg_gen_op5_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
223 TCGv_i64 a3
, TCGv_i64 a4
, TCGv_i64 a5
)
225 tcg_gen_op5(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
226 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), tcgv_i64_arg(a5
));
229 void DNI
tcg_gen_op5i_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
230 TCGv_i32 a3
, TCGv_i32 a4
, TCGArg a5
)
232 tcg_gen_op5(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
233 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), a5
);
236 void DNI
tcg_gen_op5i_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
237 TCGv_i64 a3
, TCGv_i64 a4
, TCGArg a5
)
239 tcg_gen_op5(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
240 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), a5
);
243 void DNI
tcg_gen_op5ii_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
244 TCGv_i32 a3
, TCGArg a4
, TCGArg a5
)
246 tcg_gen_op5(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
247 tcgv_i32_arg(a3
), a4
, a5
);
250 void DNI
tcg_gen_op5ii_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
251 TCGv_i64 a3
, TCGArg a4
, TCGArg a5
)
253 tcg_gen_op5(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
254 tcgv_i64_arg(a3
), a4
, a5
);
257 void DNI
tcg_gen_op6_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
, TCGv_i32 a3
,
258 TCGv_i32 a4
, TCGv_i32 a5
, TCGv_i32 a6
)
260 tcg_gen_op6(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
261 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), tcgv_i32_arg(a5
),
265 void DNI
tcg_gen_op6_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
, TCGv_i64 a3
,
266 TCGv_i64 a4
, TCGv_i64 a5
, TCGv_i64 a6
)
268 tcg_gen_op6(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
269 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), tcgv_i64_arg(a5
),
273 void DNI
tcg_gen_op6i_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
, TCGv_i32 a3
,
274 TCGv_i32 a4
, TCGv_i32 a5
, TCGArg a6
)
276 tcg_gen_op6(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
277 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), tcgv_i32_arg(a5
), a6
);
280 void DNI
tcg_gen_op6i_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
, TCGv_i64 a3
,
281 TCGv_i64 a4
, TCGv_i64 a5
, TCGArg a6
)
283 tcg_gen_op6(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
284 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), tcgv_i64_arg(a5
), a6
);
287 void DNI
tcg_gen_op6ii_i32(TCGOpcode opc
, TCGv_i32 a1
, TCGv_i32 a2
,
288 TCGv_i32 a3
, TCGv_i32 a4
, TCGArg a5
, TCGArg a6
)
290 tcg_gen_op6(opc
, tcgv_i32_arg(a1
), tcgv_i32_arg(a2
),
291 tcgv_i32_arg(a3
), tcgv_i32_arg(a4
), a5
, a6
);
294 void DNI
tcg_gen_op6ii_i64(TCGOpcode opc
, TCGv_i64 a1
, TCGv_i64 a2
,
295 TCGv_i64 a3
, TCGv_i64 a4
, TCGArg a5
, TCGArg a6
)
297 tcg_gen_op6(opc
, tcgv_i64_arg(a1
), tcgv_i64_arg(a2
),
298 tcgv_i64_arg(a3
), tcgv_i64_arg(a4
), a5
, a6
);
303 void gen_set_label(TCGLabel
*l
)
306 tcg_gen_op1(INDEX_op_set_label
, label_arg(l
));
309 static void add_last_as_label_use(TCGLabel
*l
)
311 TCGLabelUse
*u
= tcg_malloc(sizeof(TCGLabelUse
));
313 u
->op
= tcg_last_op();
314 QSIMPLEQ_INSERT_TAIL(&l
->branches
, u
, next
);
317 void tcg_gen_br(TCGLabel
*l
)
319 tcg_gen_op1(INDEX_op_br
, label_arg(l
));
320 add_last_as_label_use(l
);
323 void tcg_gen_mb(TCGBar mb_type
)
325 #ifdef CONFIG_USER_ONLY
326 bool parallel
= tcg_ctx
->gen_tb
->cflags
& CF_PARALLEL
;
329 * It is tempting to elide the barrier in a uniprocessor context.
330 * However, even with a single cpu we have i/o threads running in
331 * parallel, and lack of memory order can result in e.g. virtio
332 * queue entries being read incorrectly.
334 bool parallel
= true;
338 tcg_gen_op1(INDEX_op_mb
, mb_type
);
342 void tcg_gen_plugin_cb_start(unsigned from
, unsigned type
, unsigned wr
)
344 tcg_gen_op3(INDEX_op_plugin_cb_start
, from
, type
, wr
);
347 void tcg_gen_plugin_cb_end(void)
349 tcg_emit_op(INDEX_op_plugin_cb_end
, 0);
354 void tcg_gen_discard_i32(TCGv_i32 arg
)
356 tcg_gen_op1_i32(INDEX_op_discard
, arg
);
359 void tcg_gen_mov_i32(TCGv_i32 ret
, TCGv_i32 arg
)
362 tcg_gen_op2_i32(INDEX_op_mov_i32
, ret
, arg
);
366 void tcg_gen_movi_i32(TCGv_i32 ret
, int32_t arg
)
368 tcg_gen_mov_i32(ret
, tcg_constant_i32(arg
));
371 void tcg_gen_add_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
373 tcg_gen_op3_i32(INDEX_op_add_i32
, ret
, arg1
, arg2
);
376 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
378 /* some cases can be optimized here */
380 tcg_gen_mov_i32(ret
, arg1
);
382 tcg_gen_add_i32(ret
, arg1
, tcg_constant_i32(arg2
));
386 void tcg_gen_sub_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
388 tcg_gen_op3_i32(INDEX_op_sub_i32
, ret
, arg1
, arg2
);
391 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
393 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
394 /* Don't recurse with tcg_gen_neg_i32. */
395 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
397 tcg_gen_sub_i32(ret
, tcg_constant_i32(arg1
), arg2
);
401 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
403 /* some cases can be optimized here */
405 tcg_gen_mov_i32(ret
, arg1
);
407 tcg_gen_sub_i32(ret
, arg1
, tcg_constant_i32(arg2
));
411 void tcg_gen_neg_i32(TCGv_i32 ret
, TCGv_i32 arg
)
413 if (TCG_TARGET_HAS_neg_i32
) {
414 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg
);
416 tcg_gen_subfi_i32(ret
, 0, arg
);
420 void tcg_gen_and_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
422 tcg_gen_op3_i32(INDEX_op_and_i32
, ret
, arg1
, arg2
);
425 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
427 /* Some cases can be optimized here. */
430 tcg_gen_movi_i32(ret
, 0);
433 tcg_gen_mov_i32(ret
, arg1
);
436 /* Don't recurse with tcg_gen_ext8u_i32. */
437 if (TCG_TARGET_HAS_ext8u_i32
) {
438 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
443 if (TCG_TARGET_HAS_ext16u_i32
) {
444 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
450 tcg_gen_and_i32(ret
, arg1
, tcg_constant_i32(arg2
));
453 void tcg_gen_or_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
455 tcg_gen_op3_i32(INDEX_op_or_i32
, ret
, arg1
, arg2
);
458 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
460 /* Some cases can be optimized here. */
462 tcg_gen_movi_i32(ret
, -1);
463 } else if (arg2
== 0) {
464 tcg_gen_mov_i32(ret
, arg1
);
466 tcg_gen_or_i32(ret
, arg1
, tcg_constant_i32(arg2
));
470 void tcg_gen_xor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
472 tcg_gen_op3_i32(INDEX_op_xor_i32
, ret
, arg1
, arg2
);
475 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
477 /* Some cases can be optimized here. */
479 tcg_gen_mov_i32(ret
, arg1
);
480 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
481 /* Don't recurse with tcg_gen_not_i32. */
482 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
484 tcg_gen_xor_i32(ret
, arg1
, tcg_constant_i32(arg2
));
488 void tcg_gen_not_i32(TCGv_i32 ret
, TCGv_i32 arg
)
490 if (TCG_TARGET_HAS_not_i32
) {
491 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg
);
493 tcg_gen_xori_i32(ret
, arg
, -1);
497 void tcg_gen_shl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
499 tcg_gen_op3_i32(INDEX_op_shl_i32
, ret
, arg1
, arg2
);
502 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
504 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
506 tcg_gen_mov_i32(ret
, arg1
);
508 tcg_gen_shl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
512 void tcg_gen_shr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
514 tcg_gen_op3_i32(INDEX_op_shr_i32
, ret
, arg1
, arg2
);
517 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
519 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
521 tcg_gen_mov_i32(ret
, arg1
);
523 tcg_gen_shr_i32(ret
, arg1
, tcg_constant_i32(arg2
));
527 void tcg_gen_sar_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
529 tcg_gen_op3_i32(INDEX_op_sar_i32
, ret
, arg1
, arg2
);
532 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
534 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
536 tcg_gen_mov_i32(ret
, arg1
);
538 tcg_gen_sar_i32(ret
, arg1
, tcg_constant_i32(arg2
));
542 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
544 if (cond
== TCG_COND_ALWAYS
) {
546 } else if (cond
!= TCG_COND_NEVER
) {
547 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
548 add_last_as_label_use(l
);
552 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
554 if (cond
== TCG_COND_ALWAYS
) {
556 } else if (cond
!= TCG_COND_NEVER
) {
557 tcg_gen_brcond_i32(cond
, arg1
, tcg_constant_i32(arg2
), l
);
561 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
562 TCGv_i32 arg1
, TCGv_i32 arg2
)
564 if (cond
== TCG_COND_ALWAYS
) {
565 tcg_gen_movi_i32(ret
, 1);
566 } else if (cond
== TCG_COND_NEVER
) {
567 tcg_gen_movi_i32(ret
, 0);
569 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
573 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
574 TCGv_i32 arg1
, int32_t arg2
)
576 tcg_gen_setcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
579 void tcg_gen_negsetcond_i32(TCGCond cond
, TCGv_i32 ret
,
580 TCGv_i32 arg1
, TCGv_i32 arg2
)
582 if (cond
== TCG_COND_ALWAYS
) {
583 tcg_gen_movi_i32(ret
, -1);
584 } else if (cond
== TCG_COND_NEVER
) {
585 tcg_gen_movi_i32(ret
, 0);
586 } else if (TCG_TARGET_HAS_negsetcond_i32
) {
587 tcg_gen_op4i_i32(INDEX_op_negsetcond_i32
, ret
, arg1
, arg2
, cond
);
589 tcg_gen_setcond_i32(cond
, ret
, arg1
, arg2
);
590 tcg_gen_neg_i32(ret
, ret
);
594 void tcg_gen_negsetcondi_i32(TCGCond cond
, TCGv_i32 ret
,
595 TCGv_i32 arg1
, int32_t arg2
)
597 tcg_gen_negsetcond_i32(cond
, ret
, arg1
, tcg_constant_i32(arg2
));
600 void tcg_gen_mul_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
602 tcg_gen_op3_i32(INDEX_op_mul_i32
, ret
, arg1
, arg2
);
605 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
608 tcg_gen_movi_i32(ret
, 0);
609 } else if (is_power_of_2(arg2
)) {
610 tcg_gen_shli_i32(ret
, arg1
, ctz32(arg2
));
612 tcg_gen_mul_i32(ret
, arg1
, tcg_constant_i32(arg2
));
616 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
618 if (TCG_TARGET_HAS_div_i32
) {
619 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
620 } else if (TCG_TARGET_HAS_div2_i32
) {
621 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
622 tcg_gen_sari_i32(t0
, arg1
, 31);
623 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
624 tcg_temp_free_i32(t0
);
626 gen_helper_div_i32(ret
, arg1
, arg2
);
630 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
632 if (TCG_TARGET_HAS_rem_i32
) {
633 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
634 } else if (TCG_TARGET_HAS_div_i32
) {
635 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
636 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
637 tcg_gen_mul_i32(t0
, t0
, arg2
);
638 tcg_gen_sub_i32(ret
, arg1
, t0
);
639 tcg_temp_free_i32(t0
);
640 } else if (TCG_TARGET_HAS_div2_i32
) {
641 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
642 tcg_gen_sari_i32(t0
, arg1
, 31);
643 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
644 tcg_temp_free_i32(t0
);
646 gen_helper_rem_i32(ret
, arg1
, arg2
);
650 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
652 if (TCG_TARGET_HAS_div_i32
) {
653 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
654 } else if (TCG_TARGET_HAS_div2_i32
) {
655 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
656 TCGv_i32 zero
= tcg_constant_i32(0);
657 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, zero
, arg2
);
658 tcg_temp_free_i32(t0
);
660 gen_helper_divu_i32(ret
, arg1
, arg2
);
664 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
666 if (TCG_TARGET_HAS_rem_i32
) {
667 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
668 } else if (TCG_TARGET_HAS_div_i32
) {
669 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
670 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
671 tcg_gen_mul_i32(t0
, t0
, arg2
);
672 tcg_gen_sub_i32(ret
, arg1
, t0
);
673 tcg_temp_free_i32(t0
);
674 } else if (TCG_TARGET_HAS_div2_i32
) {
675 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
676 TCGv_i32 zero
= tcg_constant_i32(0);
677 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, zero
, arg2
);
678 tcg_temp_free_i32(t0
);
680 gen_helper_remu_i32(ret
, arg1
, arg2
);
684 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
686 if (TCG_TARGET_HAS_andc_i32
) {
687 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
689 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
690 tcg_gen_not_i32(t0
, arg2
);
691 tcg_gen_and_i32(ret
, arg1
, t0
);
692 tcg_temp_free_i32(t0
);
696 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
698 if (TCG_TARGET_HAS_eqv_i32
) {
699 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
701 tcg_gen_xor_i32(ret
, arg1
, arg2
);
702 tcg_gen_not_i32(ret
, ret
);
706 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
708 if (TCG_TARGET_HAS_nand_i32
) {
709 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
711 tcg_gen_and_i32(ret
, arg1
, arg2
);
712 tcg_gen_not_i32(ret
, ret
);
716 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
718 if (TCG_TARGET_HAS_nor_i32
) {
719 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
721 tcg_gen_or_i32(ret
, arg1
, arg2
);
722 tcg_gen_not_i32(ret
, ret
);
726 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
728 if (TCG_TARGET_HAS_orc_i32
) {
729 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
731 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
732 tcg_gen_not_i32(t0
, arg2
);
733 tcg_gen_or_i32(ret
, arg1
, t0
);
734 tcg_temp_free_i32(t0
);
738 void tcg_gen_clz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
740 if (TCG_TARGET_HAS_clz_i32
) {
741 tcg_gen_op3_i32(INDEX_op_clz_i32
, ret
, arg1
, arg2
);
742 } else if (TCG_TARGET_HAS_clz_i64
) {
743 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
744 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
745 tcg_gen_extu_i32_i64(t1
, arg1
);
746 tcg_gen_extu_i32_i64(t2
, arg2
);
747 tcg_gen_addi_i64(t2
, t2
, 32);
748 tcg_gen_clz_i64(t1
, t1
, t2
);
749 tcg_gen_extrl_i64_i32(ret
, t1
);
750 tcg_temp_free_i64(t1
);
751 tcg_temp_free_i64(t2
);
752 tcg_gen_subi_i32(ret
, ret
, 32);
754 gen_helper_clz_i32(ret
, arg1
, arg2
);
758 void tcg_gen_clzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
760 tcg_gen_clz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
763 void tcg_gen_ctz_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
765 if (TCG_TARGET_HAS_ctz_i32
) {
766 tcg_gen_op3_i32(INDEX_op_ctz_i32
, ret
, arg1
, arg2
);
767 } else if (TCG_TARGET_HAS_ctz_i64
) {
768 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
769 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
770 tcg_gen_extu_i32_i64(t1
, arg1
);
771 tcg_gen_extu_i32_i64(t2
, arg2
);
772 tcg_gen_ctz_i64(t1
, t1
, t2
);
773 tcg_gen_extrl_i64_i32(ret
, t1
);
774 tcg_temp_free_i64(t1
);
775 tcg_temp_free_i64(t2
);
776 } else if (TCG_TARGET_HAS_ctpop_i32
777 || TCG_TARGET_HAS_ctpop_i64
778 || TCG_TARGET_HAS_clz_i32
779 || TCG_TARGET_HAS_clz_i64
) {
780 TCGv_i32 z
, t
= tcg_temp_ebb_new_i32();
782 if (TCG_TARGET_HAS_ctpop_i32
|| TCG_TARGET_HAS_ctpop_i64
) {
783 tcg_gen_subi_i32(t
, arg1
, 1);
784 tcg_gen_andc_i32(t
, t
, arg1
);
785 tcg_gen_ctpop_i32(t
, t
);
787 /* Since all non-x86 hosts have clz(0) == 32, don't fight it. */
788 tcg_gen_neg_i32(t
, arg1
);
789 tcg_gen_and_i32(t
, t
, arg1
);
790 tcg_gen_clzi_i32(t
, t
, 32);
791 tcg_gen_xori_i32(t
, t
, 31);
793 z
= tcg_constant_i32(0);
794 tcg_gen_movcond_i32(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
795 tcg_temp_free_i32(t
);
797 gen_helper_ctz_i32(ret
, arg1
, arg2
);
801 void tcg_gen_ctzi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
803 if (!TCG_TARGET_HAS_ctz_i32
&& TCG_TARGET_HAS_ctpop_i32
&& arg2
== 32) {
804 /* This equivalence has the advantage of not requiring a fixup. */
805 TCGv_i32 t
= tcg_temp_ebb_new_i32();
806 tcg_gen_subi_i32(t
, arg1
, 1);
807 tcg_gen_andc_i32(t
, t
, arg1
);
808 tcg_gen_ctpop_i32(ret
, t
);
809 tcg_temp_free_i32(t
);
811 tcg_gen_ctz_i32(ret
, arg1
, tcg_constant_i32(arg2
));
815 void tcg_gen_clrsb_i32(TCGv_i32 ret
, TCGv_i32 arg
)
817 if (TCG_TARGET_HAS_clz_i32
) {
818 TCGv_i32 t
= tcg_temp_ebb_new_i32();
819 tcg_gen_sari_i32(t
, arg
, 31);
820 tcg_gen_xor_i32(t
, t
, arg
);
821 tcg_gen_clzi_i32(t
, t
, 32);
822 tcg_gen_subi_i32(ret
, t
, 1);
823 tcg_temp_free_i32(t
);
825 gen_helper_clrsb_i32(ret
, arg
);
829 void tcg_gen_ctpop_i32(TCGv_i32 ret
, TCGv_i32 arg1
)
831 if (TCG_TARGET_HAS_ctpop_i32
) {
832 tcg_gen_op2_i32(INDEX_op_ctpop_i32
, ret
, arg1
);
833 } else if (TCG_TARGET_HAS_ctpop_i64
) {
834 TCGv_i64 t
= tcg_temp_ebb_new_i64();
835 tcg_gen_extu_i32_i64(t
, arg1
);
836 tcg_gen_ctpop_i64(t
, t
);
837 tcg_gen_extrl_i64_i32(ret
, t
);
838 tcg_temp_free_i64(t
);
840 gen_helper_ctpop_i32(ret
, arg1
);
844 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
846 if (TCG_TARGET_HAS_rot_i32
) {
847 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
851 t0
= tcg_temp_ebb_new_i32();
852 t1
= tcg_temp_ebb_new_i32();
853 tcg_gen_shl_i32(t0
, arg1
, arg2
);
854 tcg_gen_subfi_i32(t1
, 32, arg2
);
855 tcg_gen_shr_i32(t1
, arg1
, t1
);
856 tcg_gen_or_i32(ret
, t0
, t1
);
857 tcg_temp_free_i32(t0
);
858 tcg_temp_free_i32(t1
);
862 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
864 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
865 /* some cases can be optimized here */
867 tcg_gen_mov_i32(ret
, arg1
);
868 } else if (TCG_TARGET_HAS_rot_i32
) {
869 tcg_gen_rotl_i32(ret
, arg1
, tcg_constant_i32(arg2
));
872 t0
= tcg_temp_ebb_new_i32();
873 t1
= tcg_temp_ebb_new_i32();
874 tcg_gen_shli_i32(t0
, arg1
, arg2
);
875 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
876 tcg_gen_or_i32(ret
, t0
, t1
);
877 tcg_temp_free_i32(t0
);
878 tcg_temp_free_i32(t1
);
882 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
884 if (TCG_TARGET_HAS_rot_i32
) {
885 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
889 t0
= tcg_temp_ebb_new_i32();
890 t1
= tcg_temp_ebb_new_i32();
891 tcg_gen_shr_i32(t0
, arg1
, arg2
);
892 tcg_gen_subfi_i32(t1
, 32, arg2
);
893 tcg_gen_shl_i32(t1
, arg1
, t1
);
894 tcg_gen_or_i32(ret
, t0
, t1
);
895 tcg_temp_free_i32(t0
);
896 tcg_temp_free_i32(t1
);
900 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
902 tcg_debug_assert(arg2
>= 0 && arg2
< 32);
903 /* some cases can be optimized here */
905 tcg_gen_mov_i32(ret
, arg1
);
907 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
911 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
912 unsigned int ofs
, unsigned int len
)
917 tcg_debug_assert(ofs
< 32);
918 tcg_debug_assert(len
> 0);
919 tcg_debug_assert(len
<= 32);
920 tcg_debug_assert(ofs
+ len
<= 32);
923 tcg_gen_mov_i32(ret
, arg2
);
926 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
927 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
931 t1
= tcg_temp_ebb_new_i32();
933 if (TCG_TARGET_HAS_extract2_i32
) {
934 if (ofs
+ len
== 32) {
935 tcg_gen_shli_i32(t1
, arg1
, len
);
936 tcg_gen_extract2_i32(ret
, t1
, arg2
, len
);
940 tcg_gen_extract2_i32(ret
, arg1
, arg2
, len
);
941 tcg_gen_rotli_i32(ret
, ret
, len
);
946 mask
= (1u << len
) - 1;
947 if (ofs
+ len
< 32) {
948 tcg_gen_andi_i32(t1
, arg2
, mask
);
949 tcg_gen_shli_i32(t1
, t1
, ofs
);
951 tcg_gen_shli_i32(t1
, arg2
, ofs
);
953 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
954 tcg_gen_or_i32(ret
, ret
, t1
);
956 tcg_temp_free_i32(t1
);
959 void tcg_gen_deposit_z_i32(TCGv_i32 ret
, TCGv_i32 arg
,
960 unsigned int ofs
, unsigned int len
)
962 tcg_debug_assert(ofs
< 32);
963 tcg_debug_assert(len
> 0);
964 tcg_debug_assert(len
<= 32);
965 tcg_debug_assert(ofs
+ len
<= 32);
967 if (ofs
+ len
== 32) {
968 tcg_gen_shli_i32(ret
, arg
, ofs
);
969 } else if (ofs
== 0) {
970 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
971 } else if (TCG_TARGET_HAS_deposit_i32
972 && TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
973 TCGv_i32 zero
= tcg_constant_i32(0);
974 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, zero
, arg
, ofs
, len
);
976 /* To help two-operand hosts we prefer to zero-extend first,
977 which allows ARG to stay live. */
980 if (TCG_TARGET_HAS_ext16u_i32
) {
981 tcg_gen_ext16u_i32(ret
, arg
);
982 tcg_gen_shli_i32(ret
, ret
, ofs
);
987 if (TCG_TARGET_HAS_ext8u_i32
) {
988 tcg_gen_ext8u_i32(ret
, arg
);
989 tcg_gen_shli_i32(ret
, ret
, ofs
);
994 /* Otherwise prefer zero-extension over AND for code size. */
997 if (TCG_TARGET_HAS_ext16u_i32
) {
998 tcg_gen_shli_i32(ret
, arg
, ofs
);
999 tcg_gen_ext16u_i32(ret
, ret
);
1004 if (TCG_TARGET_HAS_ext8u_i32
) {
1005 tcg_gen_shli_i32(ret
, arg
, ofs
);
1006 tcg_gen_ext8u_i32(ret
, ret
);
1011 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
1012 tcg_gen_shli_i32(ret
, ret
, ofs
);
1016 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
1017 unsigned int ofs
, unsigned int len
)
1019 tcg_debug_assert(ofs
< 32);
1020 tcg_debug_assert(len
> 0);
1021 tcg_debug_assert(len
<= 32);
1022 tcg_debug_assert(ofs
+ len
<= 32);
1024 /* Canonicalize certain special cases, even if extract is supported. */
1025 if (ofs
+ len
== 32) {
1026 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
1030 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
1034 if (TCG_TARGET_HAS_extract_i32
1035 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
1036 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
1040 /* Assume that zero-extension, if available, is cheaper than a shift. */
1041 switch (ofs
+ len
) {
1043 if (TCG_TARGET_HAS_ext16u_i32
) {
1044 tcg_gen_ext16u_i32(ret
, arg
);
1045 tcg_gen_shri_i32(ret
, ret
, ofs
);
1050 if (TCG_TARGET_HAS_ext8u_i32
) {
1051 tcg_gen_ext8u_i32(ret
, arg
);
1052 tcg_gen_shri_i32(ret
, ret
, ofs
);
1058 /* ??? Ideally we'd know what values are available for immediate AND.
1059 Assume that 8 bits are available, plus the special case of 16,
1060 so that we get ext8u, ext16u. */
1062 case 1 ... 8: case 16:
1063 tcg_gen_shri_i32(ret
, arg
, ofs
);
1064 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
1067 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
1068 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
1073 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
1074 unsigned int ofs
, unsigned int len
)
1076 tcg_debug_assert(ofs
< 32);
1077 tcg_debug_assert(len
> 0);
1078 tcg_debug_assert(len
<= 32);
1079 tcg_debug_assert(ofs
+ len
<= 32);
1081 /* Canonicalize certain special cases, even if extract is supported. */
1082 if (ofs
+ len
== 32) {
1083 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
1089 tcg_gen_ext16s_i32(ret
, arg
);
1092 tcg_gen_ext8s_i32(ret
, arg
);
1097 if (TCG_TARGET_HAS_sextract_i32
1098 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
1099 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
1103 /* Assume that sign-extension, if available, is cheaper than a shift. */
1104 switch (ofs
+ len
) {
1106 if (TCG_TARGET_HAS_ext16s_i32
) {
1107 tcg_gen_ext16s_i32(ret
, arg
);
1108 tcg_gen_sari_i32(ret
, ret
, ofs
);
1113 if (TCG_TARGET_HAS_ext8s_i32
) {
1114 tcg_gen_ext8s_i32(ret
, arg
);
1115 tcg_gen_sari_i32(ret
, ret
, ofs
);
1122 if (TCG_TARGET_HAS_ext16s_i32
) {
1123 tcg_gen_shri_i32(ret
, arg
, ofs
);
1124 tcg_gen_ext16s_i32(ret
, ret
);
1129 if (TCG_TARGET_HAS_ext8s_i32
) {
1130 tcg_gen_shri_i32(ret
, arg
, ofs
);
1131 tcg_gen_ext8s_i32(ret
, ret
);
1137 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
1138 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
1142 * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
1143 * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
1145 void tcg_gen_extract2_i32(TCGv_i32 ret
, TCGv_i32 al
, TCGv_i32 ah
,
1148 tcg_debug_assert(ofs
<= 32);
1150 tcg_gen_mov_i32(ret
, al
);
1151 } else if (ofs
== 32) {
1152 tcg_gen_mov_i32(ret
, ah
);
1153 } else if (al
== ah
) {
1154 tcg_gen_rotri_i32(ret
, al
, ofs
);
1155 } else if (TCG_TARGET_HAS_extract2_i32
) {
1156 tcg_gen_op4i_i32(INDEX_op_extract2_i32
, ret
, al
, ah
, ofs
);
1158 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1159 tcg_gen_shri_i32(t0
, al
, ofs
);
1160 tcg_gen_deposit_i32(ret
, t0
, ah
, 32 - ofs
, ofs
);
1161 tcg_temp_free_i32(t0
);
1165 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
1166 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
1168 if (cond
== TCG_COND_ALWAYS
) {
1169 tcg_gen_mov_i32(ret
, v1
);
1170 } else if (cond
== TCG_COND_NEVER
) {
1171 tcg_gen_mov_i32(ret
, v2
);
1172 } else if (TCG_TARGET_HAS_movcond_i32
) {
1173 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
1175 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1176 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1177 tcg_gen_negsetcond_i32(cond
, t0
, c1
, c2
);
1178 tcg_gen_and_i32(t1
, v1
, t0
);
1179 tcg_gen_andc_i32(ret
, v2
, t0
);
1180 tcg_gen_or_i32(ret
, ret
, t1
);
1181 tcg_temp_free_i32(t0
);
1182 tcg_temp_free_i32(t1
);
1186 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
1187 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
1189 if (TCG_TARGET_HAS_add2_i32
) {
1190 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
1192 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1193 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1194 tcg_gen_concat_i32_i64(t0
, al
, ah
);
1195 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
1196 tcg_gen_add_i64(t0
, t0
, t1
);
1197 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1198 tcg_temp_free_i64(t0
);
1199 tcg_temp_free_i64(t1
);
1203 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
1204 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
1206 if (TCG_TARGET_HAS_sub2_i32
) {
1207 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
1209 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1210 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1211 tcg_gen_concat_i32_i64(t0
, al
, ah
);
1212 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
1213 tcg_gen_sub_i64(t0
, t0
, t1
);
1214 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1215 tcg_temp_free_i64(t0
);
1216 tcg_temp_free_i64(t1
);
1220 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1222 if (TCG_TARGET_HAS_mulu2_i32
) {
1223 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
1224 } else if (TCG_TARGET_HAS_muluh_i32
) {
1225 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1226 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
1227 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
1228 tcg_gen_mov_i32(rl
, t
);
1229 tcg_temp_free_i32(t
);
1230 } else if (TCG_TARGET_REG_BITS
== 64) {
1231 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1232 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1233 tcg_gen_extu_i32_i64(t0
, arg1
);
1234 tcg_gen_extu_i32_i64(t1
, arg2
);
1235 tcg_gen_mul_i64(t0
, t0
, t1
);
1236 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1237 tcg_temp_free_i64(t0
);
1238 tcg_temp_free_i64(t1
);
1240 qemu_build_not_reached();
1244 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1246 if (TCG_TARGET_HAS_muls2_i32
) {
1247 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
1248 } else if (TCG_TARGET_HAS_mulsh_i32
) {
1249 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1250 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
1251 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
1252 tcg_gen_mov_i32(rl
, t
);
1253 tcg_temp_free_i32(t
);
1254 } else if (TCG_TARGET_REG_BITS
== 32) {
1255 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1256 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1257 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
1258 TCGv_i32 t3
= tcg_temp_ebb_new_i32();
1259 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
1260 /* Adjust for negative inputs. */
1261 tcg_gen_sari_i32(t2
, arg1
, 31);
1262 tcg_gen_sari_i32(t3
, arg2
, 31);
1263 tcg_gen_and_i32(t2
, t2
, arg2
);
1264 tcg_gen_and_i32(t3
, t3
, arg1
);
1265 tcg_gen_sub_i32(rh
, t1
, t2
);
1266 tcg_gen_sub_i32(rh
, rh
, t3
);
1267 tcg_gen_mov_i32(rl
, t0
);
1268 tcg_temp_free_i32(t0
);
1269 tcg_temp_free_i32(t1
);
1270 tcg_temp_free_i32(t2
);
1271 tcg_temp_free_i32(t3
);
1273 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1274 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1275 tcg_gen_ext_i32_i64(t0
, arg1
);
1276 tcg_gen_ext_i32_i64(t1
, arg2
);
1277 tcg_gen_mul_i64(t0
, t0
, t1
);
1278 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1279 tcg_temp_free_i64(t0
);
1280 tcg_temp_free_i64(t1
);
1284 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
1286 if (TCG_TARGET_REG_BITS
== 32) {
1287 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1288 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1289 TCGv_i32 t2
= tcg_temp_ebb_new_i32();
1290 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
1291 /* Adjust for negative input for the signed arg1. */
1292 tcg_gen_sari_i32(t2
, arg1
, 31);
1293 tcg_gen_and_i32(t2
, t2
, arg2
);
1294 tcg_gen_sub_i32(rh
, t1
, t2
);
1295 tcg_gen_mov_i32(rl
, t0
);
1296 tcg_temp_free_i32(t0
);
1297 tcg_temp_free_i32(t1
);
1298 tcg_temp_free_i32(t2
);
1300 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
1301 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
1302 tcg_gen_ext_i32_i64(t0
, arg1
);
1303 tcg_gen_extu_i32_i64(t1
, arg2
);
1304 tcg_gen_mul_i64(t0
, t0
, t1
);
1305 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
1306 tcg_temp_free_i64(t0
);
1307 tcg_temp_free_i64(t1
);
1311 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1313 if (TCG_TARGET_HAS_ext8s_i32
) {
1314 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
1316 tcg_gen_shli_i32(ret
, arg
, 24);
1317 tcg_gen_sari_i32(ret
, ret
, 24);
1321 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1323 if (TCG_TARGET_HAS_ext16s_i32
) {
1324 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
1326 tcg_gen_shli_i32(ret
, arg
, 16);
1327 tcg_gen_sari_i32(ret
, ret
, 16);
1331 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1333 if (TCG_TARGET_HAS_ext8u_i32
) {
1334 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
1336 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
1340 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1342 if (TCG_TARGET_HAS_ext16u_i32
) {
1343 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
1345 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
1350 * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1352 * Byte pattern: xxab -> yyba
1354 * With TCG_BSWAP_IZ, x == zero, else undefined.
1355 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1357 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
, int flags
)
1359 /* Only one extension flag may be present. */
1360 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
1362 if (TCG_TARGET_HAS_bswap16_i32
) {
1363 tcg_gen_op3i_i32(INDEX_op_bswap16_i32
, ret
, arg
, flags
);
1365 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1366 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1368 /* arg = ..ab (IZ) xxab (!IZ) */
1369 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = ...a (IZ) .xxa (!IZ) */
1370 if (!(flags
& TCG_BSWAP_IZ
)) {
1371 tcg_gen_ext8u_i32(t0
, t0
); /* t0 = ...a */
1374 if (flags
& TCG_BSWAP_OS
) {
1375 tcg_gen_shli_i32(t1
, arg
, 24); /* t1 = b... */
1376 tcg_gen_sari_i32(t1
, t1
, 16); /* t1 = ssb. */
1377 } else if (flags
& TCG_BSWAP_OZ
) {
1378 tcg_gen_ext8u_i32(t1
, arg
); /* t1 = ...b */
1379 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = ..b. */
1381 tcg_gen_shli_i32(t1
, arg
, 8); /* t1 = xab. */
1384 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = ..ba (OZ) */
1386 /* = xaba (no flag) */
1387 tcg_temp_free_i32(t0
);
1388 tcg_temp_free_i32(t1
);
1393 * bswap32_i32: 32-bit byte swap on a 32-bit value.
1395 * Byte pattern: abcd -> dcba
1397 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1399 if (TCG_TARGET_HAS_bswap32_i32
) {
1400 tcg_gen_op3i_i32(INDEX_op_bswap32_i32
, ret
, arg
, 0);
1402 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1403 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
1404 TCGv_i32 t2
= tcg_constant_i32(0x00ff00ff);
1407 tcg_gen_shri_i32(t0
, arg
, 8); /* t0 = .abc */
1408 tcg_gen_and_i32(t1
, arg
, t2
); /* t1 = .b.d */
1409 tcg_gen_and_i32(t0
, t0
, t2
); /* t0 = .a.c */
1410 tcg_gen_shli_i32(t1
, t1
, 8); /* t1 = b.d. */
1411 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = badc */
1413 tcg_gen_shri_i32(t0
, ret
, 16); /* t0 = ..ba */
1414 tcg_gen_shli_i32(t1
, ret
, 16); /* t1 = dc.. */
1415 tcg_gen_or_i32(ret
, t0
, t1
); /* ret = dcba */
1417 tcg_temp_free_i32(t0
);
1418 tcg_temp_free_i32(t1
);
1423 * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1425 * Byte pattern: abcd -> cdab
1427 void tcg_gen_hswap_i32(TCGv_i32 ret
, TCGv_i32 arg
)
1429 /* Swapping 2 16-bit elements is a rotate. */
1430 tcg_gen_rotli_i32(ret
, arg
, 16);
1433 void tcg_gen_smin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1435 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, a
, b
);
1438 void tcg_gen_umin_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1440 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
1443 void tcg_gen_smax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1445 tcg_gen_movcond_i32(TCG_COND_LT
, ret
, a
, b
, b
, a
);
1448 void tcg_gen_umax_i32(TCGv_i32 ret
, TCGv_i32 a
, TCGv_i32 b
)
1450 tcg_gen_movcond_i32(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
1453 void tcg_gen_abs_i32(TCGv_i32 ret
, TCGv_i32 a
)
1455 TCGv_i32 t
= tcg_temp_ebb_new_i32();
1457 tcg_gen_sari_i32(t
, a
, 31);
1458 tcg_gen_xor_i32(ret
, a
, t
);
1459 tcg_gen_sub_i32(ret
, ret
, t
);
1460 tcg_temp_free_i32(t
);
1463 void tcg_gen_ld8u_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1465 tcg_gen_ldst_op_i32(INDEX_op_ld8u_i32
, ret
, arg2
, offset
);
1468 void tcg_gen_ld8s_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1470 tcg_gen_ldst_op_i32(INDEX_op_ld8s_i32
, ret
, arg2
, offset
);
1473 void tcg_gen_ld16u_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1475 tcg_gen_ldst_op_i32(INDEX_op_ld16u_i32
, ret
, arg2
, offset
);
1478 void tcg_gen_ld16s_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1480 tcg_gen_ldst_op_i32(INDEX_op_ld16s_i32
, ret
, arg2
, offset
);
1483 void tcg_gen_ld_i32(TCGv_i32 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1485 tcg_gen_ldst_op_i32(INDEX_op_ld_i32
, ret
, arg2
, offset
);
1488 void tcg_gen_st8_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1490 tcg_gen_ldst_op_i32(INDEX_op_st8_i32
, arg1
, arg2
, offset
);
1493 void tcg_gen_st16_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1495 tcg_gen_ldst_op_i32(INDEX_op_st16_i32
, arg1
, arg2
, offset
);
1498 void tcg_gen_st_i32(TCGv_i32 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1500 tcg_gen_ldst_op_i32(INDEX_op_st_i32
, arg1
, arg2
, offset
);
1506 void tcg_gen_discard_i64(TCGv_i64 arg
)
1508 if (TCG_TARGET_REG_BITS
== 64) {
1509 tcg_gen_op1_i64(INDEX_op_discard
, arg
);
1511 tcg_gen_discard_i32(TCGV_LOW(arg
));
1512 tcg_gen_discard_i32(TCGV_HIGH(arg
));
1516 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1521 if (TCG_TARGET_REG_BITS
== 64) {
1522 tcg_gen_op2_i64(INDEX_op_mov_i64
, ret
, arg
);
1524 TCGTemp
*ts
= tcgv_i64_temp(arg
);
1526 /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1527 if (ts
->kind
== TEMP_CONST
) {
1528 tcg_gen_movi_i64(ret
, ts
->val
);
1530 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1531 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1536 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
1538 if (TCG_TARGET_REG_BITS
== 64) {
1539 tcg_gen_mov_i64(ret
, tcg_constant_i64(arg
));
1541 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
1542 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
1546 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1548 if (TCG_TARGET_REG_BITS
== 64) {
1549 tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64
, ret
, arg2
, offset
);
1551 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
1552 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1556 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1558 if (TCG_TARGET_REG_BITS
== 64) {
1559 tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64
, ret
, arg2
, offset
);
1561 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
1562 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1566 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1568 if (TCG_TARGET_REG_BITS
== 64) {
1569 tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64
, ret
, arg2
, offset
);
1571 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
1572 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1576 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1578 if (TCG_TARGET_REG_BITS
== 64) {
1579 tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64
, ret
, arg2
, offset
);
1581 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
1582 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1586 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1588 if (TCG_TARGET_REG_BITS
== 64) {
1589 tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64
, ret
, arg2
, offset
);
1591 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1592 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1596 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1598 if (TCG_TARGET_REG_BITS
== 64) {
1599 tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64
, ret
, arg2
, offset
);
1601 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1602 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1606 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
1609 * For 32-bit host, since arg2 and ret have different types,
1610 * they cannot be the same temporary -- no chance of overlap.
1612 if (TCG_TARGET_REG_BITS
== 64) {
1613 tcg_gen_ldst_op_i64(INDEX_op_ld_i64
, ret
, arg2
, offset
);
1614 } else if (HOST_BIG_ENDIAN
) {
1615 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
1616 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
1618 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
1619 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
1623 void tcg_gen_st8_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1625 if (TCG_TARGET_REG_BITS
== 64) {
1626 tcg_gen_ldst_op_i64(INDEX_op_st8_i64
, arg1
, arg2
, offset
);
1628 tcg_gen_st8_i32(TCGV_LOW(arg1
), arg2
, offset
);
1632 void tcg_gen_st16_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1634 if (TCG_TARGET_REG_BITS
== 64) {
1635 tcg_gen_ldst_op_i64(INDEX_op_st16_i64
, arg1
, arg2
, offset
);
1637 tcg_gen_st16_i32(TCGV_LOW(arg1
), arg2
, offset
);
1641 void tcg_gen_st32_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1643 if (TCG_TARGET_REG_BITS
== 64) {
1644 tcg_gen_ldst_op_i64(INDEX_op_st32_i64
, arg1
, arg2
, offset
);
1646 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1650 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
1652 if (TCG_TARGET_REG_BITS
== 64) {
1653 tcg_gen_ldst_op_i64(INDEX_op_st_i64
, arg1
, arg2
, offset
);
1654 } else if (HOST_BIG_ENDIAN
) {
1655 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
1656 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
1658 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
1659 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
1663 void tcg_gen_add_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1665 if (TCG_TARGET_REG_BITS
== 64) {
1666 tcg_gen_op3_i64(INDEX_op_add_i64
, ret
, arg1
, arg2
);
1668 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1669 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1673 void tcg_gen_sub_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1675 if (TCG_TARGET_REG_BITS
== 64) {
1676 tcg_gen_op3_i64(INDEX_op_sub_i64
, ret
, arg1
, arg2
);
1678 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
), TCGV_LOW(arg1
),
1679 TCGV_HIGH(arg1
), TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1683 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1685 if (TCG_TARGET_REG_BITS
== 64) {
1686 tcg_gen_op3_i64(INDEX_op_and_i64
, ret
, arg1
, arg2
);
1688 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1689 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1693 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1695 if (TCG_TARGET_REG_BITS
== 64) {
1696 tcg_gen_op3_i64(INDEX_op_or_i64
, ret
, arg1
, arg2
);
1698 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1699 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1703 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1705 if (TCG_TARGET_REG_BITS
== 64) {
1706 tcg_gen_op3_i64(INDEX_op_xor_i64
, ret
, arg1
, arg2
);
1708 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1709 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1713 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1715 if (TCG_TARGET_REG_BITS
== 64) {
1716 tcg_gen_op3_i64(INDEX_op_shl_i64
, ret
, arg1
, arg2
);
1718 gen_helper_shl_i64(ret
, arg1
, arg2
);
1722 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1724 if (TCG_TARGET_REG_BITS
== 64) {
1725 tcg_gen_op3_i64(INDEX_op_shr_i64
, ret
, arg1
, arg2
);
1727 gen_helper_shr_i64(ret
, arg1
, arg2
);
1731 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1733 if (TCG_TARGET_REG_BITS
== 64) {
1734 tcg_gen_op3_i64(INDEX_op_sar_i64
, ret
, arg1
, arg2
);
1736 gen_helper_sar_i64(ret
, arg1
, arg2
);
1740 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1745 if (TCG_TARGET_REG_BITS
== 64) {
1746 tcg_gen_op3_i64(INDEX_op_mul_i64
, ret
, arg1
, arg2
);
1751 t0
= tcg_temp_ebb_new_i64();
1752 t1
= tcg_temp_ebb_new_i32();
1754 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1755 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1757 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1758 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1759 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1760 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1762 tcg_gen_mov_i64(ret
, t0
);
1763 tcg_temp_free_i64(t0
);
1764 tcg_temp_free_i32(t1
);
1767 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1769 /* some cases can be optimized here */
1771 tcg_gen_mov_i64(ret
, arg1
);
1772 } else if (TCG_TARGET_REG_BITS
== 64) {
1773 tcg_gen_add_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1775 tcg_gen_add2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1776 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1777 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1781 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1783 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
1784 /* Don't recurse with tcg_gen_neg_i64. */
1785 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
1786 } else if (TCG_TARGET_REG_BITS
== 64) {
1787 tcg_gen_sub_i64(ret
, tcg_constant_i64(arg1
), arg2
);
1789 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1790 tcg_constant_i32(arg1
), tcg_constant_i32(arg1
>> 32),
1791 TCGV_LOW(arg2
), TCGV_HIGH(arg2
));
1795 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1797 /* some cases can be optimized here */
1799 tcg_gen_mov_i64(ret
, arg1
);
1800 } else if (TCG_TARGET_REG_BITS
== 64) {
1801 tcg_gen_sub_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1803 tcg_gen_sub2_i32(TCGV_LOW(ret
), TCGV_HIGH(ret
),
1804 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1805 tcg_constant_i32(arg2
), tcg_constant_i32(arg2
>> 32));
1809 void tcg_gen_neg_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1811 if (TCG_TARGET_HAS_neg_i64
) {
1812 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg
);
1814 tcg_gen_subfi_i64(ret
, 0, arg
);
1818 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1820 if (TCG_TARGET_REG_BITS
== 32) {
1821 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1822 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1826 /* Some cases can be optimized here. */
1829 tcg_gen_movi_i64(ret
, 0);
1832 tcg_gen_mov_i64(ret
, arg1
);
1835 /* Don't recurse with tcg_gen_ext8u_i64. */
1836 if (TCG_TARGET_HAS_ext8u_i64
) {
1837 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1842 if (TCG_TARGET_HAS_ext16u_i64
) {
1843 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1848 if (TCG_TARGET_HAS_ext32u_i64
) {
1849 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1855 tcg_gen_and_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1858 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1860 if (TCG_TARGET_REG_BITS
== 32) {
1861 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1862 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1865 /* Some cases can be optimized here. */
1867 tcg_gen_movi_i64(ret
, -1);
1868 } else if (arg2
== 0) {
1869 tcg_gen_mov_i64(ret
, arg1
);
1871 tcg_gen_or_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1875 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1877 if (TCG_TARGET_REG_BITS
== 32) {
1878 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1879 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1882 /* Some cases can be optimized here. */
1884 tcg_gen_mov_i64(ret
, arg1
);
1885 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1886 /* Don't recurse with tcg_gen_not_i64. */
1887 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1889 tcg_gen_xor_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1893 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1894 unsigned c
, bool right
, bool arith
)
1896 tcg_debug_assert(c
< 64);
1898 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1899 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1900 } else if (c
>= 32) {
1904 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1905 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1907 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1908 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1911 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1912 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1915 if (TCG_TARGET_HAS_extract2_i32
) {
1916 tcg_gen_extract2_i32(TCGV_LOW(ret
),
1917 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), c
);
1919 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1920 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(ret
),
1921 TCGV_HIGH(arg1
), 32 - c
, c
);
1924 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1926 tcg_gen_shri_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1929 if (TCG_TARGET_HAS_extract2_i32
) {
1930 tcg_gen_extract2_i32(TCGV_HIGH(ret
),
1931 TCGV_LOW(arg1
), TCGV_HIGH(arg1
), 32 - c
);
1933 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
1934 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1935 tcg_gen_deposit_i32(TCGV_HIGH(ret
), t0
,
1936 TCGV_HIGH(arg1
), c
, 32 - c
);
1937 tcg_temp_free_i32(t0
);
1939 tcg_gen_shli_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1943 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1945 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1946 if (TCG_TARGET_REG_BITS
== 32) {
1947 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1948 } else if (arg2
== 0) {
1949 tcg_gen_mov_i64(ret
, arg1
);
1951 tcg_gen_shl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1955 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1957 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1958 if (TCG_TARGET_REG_BITS
== 32) {
1959 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1960 } else if (arg2
== 0) {
1961 tcg_gen_mov_i64(ret
, arg1
);
1963 tcg_gen_shr_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1967 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1969 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
1970 if (TCG_TARGET_REG_BITS
== 32) {
1971 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1972 } else if (arg2
== 0) {
1973 tcg_gen_mov_i64(ret
, arg1
);
1975 tcg_gen_sar_i64(ret
, arg1
, tcg_constant_i64(arg2
));
1979 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1981 if (cond
== TCG_COND_ALWAYS
) {
1983 } else if (cond
!= TCG_COND_NEVER
) {
1984 if (TCG_TARGET_REG_BITS
== 32) {
1985 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1986 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1987 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1989 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1992 add_last_as_label_use(l
);
1996 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1998 if (TCG_TARGET_REG_BITS
== 64) {
1999 tcg_gen_brcond_i64(cond
, arg1
, tcg_constant_i64(arg2
), l
);
2000 } else if (cond
== TCG_COND_ALWAYS
) {
2002 } else if (cond
!= TCG_COND_NEVER
) {
2003 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
,
2004 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
2005 tcg_constant_i32(arg2
),
2006 tcg_constant_i32(arg2
>> 32),
2007 cond
, label_arg(l
));
2008 add_last_as_label_use(l
);
2012 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
2013 TCGv_i64 arg1
, TCGv_i64 arg2
)
2015 if (cond
== TCG_COND_ALWAYS
) {
2016 tcg_gen_movi_i64(ret
, 1);
2017 } else if (cond
== TCG_COND_NEVER
) {
2018 tcg_gen_movi_i64(ret
, 0);
2020 if (TCG_TARGET_REG_BITS
== 32) {
2021 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
2022 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
2023 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
2024 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2026 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
2031 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
2032 TCGv_i64 arg1
, int64_t arg2
)
2034 if (TCG_TARGET_REG_BITS
== 64) {
2035 tcg_gen_setcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
2036 } else if (cond
== TCG_COND_ALWAYS
) {
2037 tcg_gen_movi_i64(ret
, 1);
2038 } else if (cond
== TCG_COND_NEVER
) {
2039 tcg_gen_movi_i64(ret
, 0);
2041 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
2042 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
2043 tcg_constant_i32(arg2
),
2044 tcg_constant_i32(arg2
>> 32), cond
);
2045 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2049 void tcg_gen_negsetcondi_i64(TCGCond cond
, TCGv_i64 ret
,
2050 TCGv_i64 arg1
, int64_t arg2
)
2052 tcg_gen_negsetcond_i64(cond
, ret
, arg1
, tcg_constant_i64(arg2
));
2055 void tcg_gen_negsetcond_i64(TCGCond cond
, TCGv_i64 ret
,
2056 TCGv_i64 arg1
, TCGv_i64 arg2
)
2058 if (cond
== TCG_COND_ALWAYS
) {
2059 tcg_gen_movi_i64(ret
, -1);
2060 } else if (cond
== TCG_COND_NEVER
) {
2061 tcg_gen_movi_i64(ret
, 0);
2062 } else if (TCG_TARGET_HAS_negsetcond_i64
) {
2063 tcg_gen_op4i_i64(INDEX_op_negsetcond_i64
, ret
, arg1
, arg2
, cond
);
2064 } else if (TCG_TARGET_REG_BITS
== 32) {
2065 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
2066 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
2067 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
2068 tcg_gen_neg_i32(TCGV_LOW(ret
), TCGV_LOW(ret
));
2069 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
));
2071 tcg_gen_setcond_i64(cond
, ret
, arg1
, arg2
);
2072 tcg_gen_neg_i64(ret
, ret
);
2076 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2079 tcg_gen_movi_i64(ret
, 0);
2080 } else if (is_power_of_2(arg2
)) {
2081 tcg_gen_shli_i64(ret
, arg1
, ctz64(arg2
));
2083 tcg_gen_mul_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2087 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2089 if (TCG_TARGET_HAS_div_i64
) {
2090 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
2091 } else if (TCG_TARGET_HAS_div2_i64
) {
2092 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2093 tcg_gen_sari_i64(t0
, arg1
, 63);
2094 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
2095 tcg_temp_free_i64(t0
);
2097 gen_helper_div_i64(ret
, arg1
, arg2
);
2101 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2103 if (TCG_TARGET_HAS_rem_i64
) {
2104 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
2105 } else if (TCG_TARGET_HAS_div_i64
) {
2106 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2107 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
2108 tcg_gen_mul_i64(t0
, t0
, arg2
);
2109 tcg_gen_sub_i64(ret
, arg1
, t0
);
2110 tcg_temp_free_i64(t0
);
2111 } else if (TCG_TARGET_HAS_div2_i64
) {
2112 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2113 tcg_gen_sari_i64(t0
, arg1
, 63);
2114 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
2115 tcg_temp_free_i64(t0
);
2117 gen_helper_rem_i64(ret
, arg1
, arg2
);
2121 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2123 if (TCG_TARGET_HAS_div_i64
) {
2124 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
2125 } else if (TCG_TARGET_HAS_div2_i64
) {
2126 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2127 TCGv_i64 zero
= tcg_constant_i64(0);
2128 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, zero
, arg2
);
2129 tcg_temp_free_i64(t0
);
2131 gen_helper_divu_i64(ret
, arg1
, arg2
);
2135 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2137 if (TCG_TARGET_HAS_rem_i64
) {
2138 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
2139 } else if (TCG_TARGET_HAS_div_i64
) {
2140 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2141 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
2142 tcg_gen_mul_i64(t0
, t0
, arg2
);
2143 tcg_gen_sub_i64(ret
, arg1
, t0
);
2144 tcg_temp_free_i64(t0
);
2145 } else if (TCG_TARGET_HAS_div2_i64
) {
2146 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2147 TCGv_i64 zero
= tcg_constant_i64(0);
2148 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, zero
, arg2
);
2149 tcg_temp_free_i64(t0
);
2151 gen_helper_remu_i64(ret
, arg1
, arg2
);
2155 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2157 if (TCG_TARGET_REG_BITS
== 32) {
2158 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2159 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2160 } else if (TCG_TARGET_HAS_ext8s_i64
) {
2161 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
2163 tcg_gen_shli_i64(ret
, arg
, 56);
2164 tcg_gen_sari_i64(ret
, ret
, 56);
2168 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2170 if (TCG_TARGET_REG_BITS
== 32) {
2171 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2172 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2173 } else if (TCG_TARGET_HAS_ext16s_i64
) {
2174 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
2176 tcg_gen_shli_i64(ret
, arg
, 48);
2177 tcg_gen_sari_i64(ret
, ret
, 48);
2181 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2183 if (TCG_TARGET_REG_BITS
== 32) {
2184 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2185 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2186 } else if (TCG_TARGET_HAS_ext32s_i64
) {
2187 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
2189 tcg_gen_shli_i64(ret
, arg
, 32);
2190 tcg_gen_sari_i64(ret
, ret
, 32);
2194 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2196 if (TCG_TARGET_REG_BITS
== 32) {
2197 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2198 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2199 } else if (TCG_TARGET_HAS_ext8u_i64
) {
2200 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
2202 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
2206 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2208 if (TCG_TARGET_REG_BITS
== 32) {
2209 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2210 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2211 } else if (TCG_TARGET_HAS_ext16u_i64
) {
2212 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
2214 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
2218 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2220 if (TCG_TARGET_REG_BITS
== 32) {
2221 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2222 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2223 } else if (TCG_TARGET_HAS_ext32u_i64
) {
2224 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
2226 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
2231 * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
2233 * Byte pattern: xxxxxxxxab -> yyyyyyyyba
2235 * With TCG_BSWAP_IZ, x == zero, else undefined.
2236 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2238 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
2240 /* Only one extension flag may be present. */
2241 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
2243 if (TCG_TARGET_REG_BITS
== 32) {
2244 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), flags
);
2245 if (flags
& TCG_BSWAP_OS
) {
2246 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2248 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2250 } else if (TCG_TARGET_HAS_bswap16_i64
) {
2251 tcg_gen_op3i_i64(INDEX_op_bswap16_i64
, ret
, arg
, flags
);
2253 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2254 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2256 /* arg = ......ab or xxxxxxab */
2257 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .......a or .xxxxxxa */
2258 if (!(flags
& TCG_BSWAP_IZ
)) {
2259 tcg_gen_ext8u_i64(t0
, t0
); /* t0 = .......a */
2262 if (flags
& TCG_BSWAP_OS
) {
2263 tcg_gen_shli_i64(t1
, arg
, 56); /* t1 = b....... */
2264 tcg_gen_sari_i64(t1
, t1
, 48); /* t1 = ssssssb. */
2265 } else if (flags
& TCG_BSWAP_OZ
) {
2266 tcg_gen_ext8u_i64(t1
, arg
); /* t1 = .......b */
2267 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ......b. */
2269 tcg_gen_shli_i64(t1
, arg
, 8); /* t1 = xxxxxab. */
2272 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ......ba (OZ) */
2274 /* xxxxxaba (no flag) */
2275 tcg_temp_free_i64(t0
);
2276 tcg_temp_free_i64(t1
);
2281 * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
2283 * Byte pattern: xxxxabcd -> yyyydcba
2285 * With TCG_BSWAP_IZ, x == zero, else undefined.
2286 * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
2288 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
, int flags
)
2290 /* Only one extension flag may be present. */
2291 tcg_debug_assert(!(flags
& TCG_BSWAP_OS
) || !(flags
& TCG_BSWAP_OZ
));
2293 if (TCG_TARGET_REG_BITS
== 32) {
2294 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2295 if (flags
& TCG_BSWAP_OS
) {
2296 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2298 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2300 } else if (TCG_TARGET_HAS_bswap32_i64
) {
2301 tcg_gen_op3i_i64(INDEX_op_bswap32_i64
, ret
, arg
, flags
);
2303 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2304 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2305 TCGv_i64 t2
= tcg_constant_i64(0x00ff00ff);
2307 /* arg = xxxxabcd */
2308 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .xxxxabc */
2309 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .....b.d */
2310 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .....a.c */
2311 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = ....b.d. */
2312 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ....badc */
2314 tcg_gen_shli_i64(t1
, ret
, 48); /* t1 = dc...... */
2315 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ......ba */
2316 if (flags
& TCG_BSWAP_OS
) {
2317 tcg_gen_sari_i64(t1
, t1
, 32); /* t1 = ssssdc.. */
2319 tcg_gen_shri_i64(t1
, t1
, 32); /* t1 = ....dc.. */
2321 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ssssdcba (OS) */
2322 /* ....dcba (else) */
2324 tcg_temp_free_i64(t0
);
2325 tcg_temp_free_i64(t1
);
2330 * bswap64_i64: 64-bit byte swap on a 64-bit value.
2332 * Byte pattern: abcdefgh -> hgfedcba
2334 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2336 if (TCG_TARGET_REG_BITS
== 32) {
2338 t0
= tcg_temp_ebb_new_i32();
2339 t1
= tcg_temp_ebb_new_i32();
2341 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
2342 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
2343 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
2344 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
2345 tcg_temp_free_i32(t0
);
2346 tcg_temp_free_i32(t1
);
2347 } else if (TCG_TARGET_HAS_bswap64_i64
) {
2348 tcg_gen_op3i_i64(INDEX_op_bswap64_i64
, ret
, arg
, 0);
2350 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2351 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2352 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
2354 /* arg = abcdefgh */
2355 tcg_gen_movi_i64(t2
, 0x00ff00ff00ff00ffull
);
2356 tcg_gen_shri_i64(t0
, arg
, 8); /* t0 = .abcdefg */
2357 tcg_gen_and_i64(t1
, arg
, t2
); /* t1 = .b.d.f.h */
2358 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = .a.c.e.g */
2359 tcg_gen_shli_i64(t1
, t1
, 8); /* t1 = b.d.f.h. */
2360 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = badcfehg */
2362 tcg_gen_movi_i64(t2
, 0x0000ffff0000ffffull
);
2363 tcg_gen_shri_i64(t0
, ret
, 16); /* t0 = ..badcfe */
2364 tcg_gen_and_i64(t1
, ret
, t2
); /* t1 = ..dc..hg */
2365 tcg_gen_and_i64(t0
, t0
, t2
); /* t0 = ..ba..fe */
2366 tcg_gen_shli_i64(t1
, t1
, 16); /* t1 = dc..hg.. */
2367 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = dcbahgfe */
2369 tcg_gen_shri_i64(t0
, ret
, 32); /* t0 = ....dcba */
2370 tcg_gen_shli_i64(t1
, ret
, 32); /* t1 = hgfe.... */
2371 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = hgfedcba */
2373 tcg_temp_free_i64(t0
);
2374 tcg_temp_free_i64(t1
);
2375 tcg_temp_free_i64(t2
);
2380 * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
2381 * See also include/qemu/bitops.h, hswap64.
2383 * Byte pattern: abcdefgh -> ghefcdab
2385 void tcg_gen_hswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2387 uint64_t m
= 0x0000ffff0000ffffull
;
2388 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2389 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
2391 /* arg = abcdefgh */
2392 tcg_gen_rotli_i64(t1
, arg
, 32); /* t1 = efghabcd */
2393 tcg_gen_andi_i64(t0
, t1
, m
); /* t0 = ..gh..cd */
2394 tcg_gen_shli_i64(t0
, t0
, 16); /* t0 = gh..cd.. */
2395 tcg_gen_shri_i64(t1
, t1
, 16); /* t1 = ..efghab */
2396 tcg_gen_andi_i64(t1
, t1
, m
); /* t1 = ..ef..ab */
2397 tcg_gen_or_i64(ret
, t0
, t1
); /* ret = ghefcdab */
2399 tcg_temp_free_i64(t0
);
2400 tcg_temp_free_i64(t1
);
2404 * wswap_i64: Swap 32-bit words within a 64-bit value.
2406 * Byte pattern: abcdefgh -> efghabcd
2408 void tcg_gen_wswap_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2410 /* Swapping 2 32-bit elements is a rotate. */
2411 tcg_gen_rotli_i64(ret
, arg
, 32);
2414 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2416 if (TCG_TARGET_REG_BITS
== 32) {
2417 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2418 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
2419 } else if (TCG_TARGET_HAS_not_i64
) {
2420 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
2422 tcg_gen_xori_i64(ret
, arg
, -1);
2426 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2428 if (TCG_TARGET_REG_BITS
== 32) {
2429 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2430 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2431 } else if (TCG_TARGET_HAS_andc_i64
) {
2432 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
2434 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2435 tcg_gen_not_i64(t0
, arg2
);
2436 tcg_gen_and_i64(ret
, arg1
, t0
);
2437 tcg_temp_free_i64(t0
);
2441 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2443 if (TCG_TARGET_REG_BITS
== 32) {
2444 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2445 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2446 } else if (TCG_TARGET_HAS_eqv_i64
) {
2447 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
2449 tcg_gen_xor_i64(ret
, arg1
, arg2
);
2450 tcg_gen_not_i64(ret
, ret
);
2454 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2456 if (TCG_TARGET_REG_BITS
== 32) {
2457 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2458 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2459 } else if (TCG_TARGET_HAS_nand_i64
) {
2460 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
2462 tcg_gen_and_i64(ret
, arg1
, arg2
);
2463 tcg_gen_not_i64(ret
, ret
);
2467 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2469 if (TCG_TARGET_REG_BITS
== 32) {
2470 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2471 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2472 } else if (TCG_TARGET_HAS_nor_i64
) {
2473 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
2475 tcg_gen_or_i64(ret
, arg1
, arg2
);
2476 tcg_gen_not_i64(ret
, ret
);
2480 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2482 if (TCG_TARGET_REG_BITS
== 32) {
2483 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
2484 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
2485 } else if (TCG_TARGET_HAS_orc_i64
) {
2486 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
2488 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
2489 tcg_gen_not_i64(t0
, arg2
);
2490 tcg_gen_or_i64(ret
, arg1
, t0
);
2491 tcg_temp_free_i64(t0
);
2495 void tcg_gen_clz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2497 if (TCG_TARGET_HAS_clz_i64
) {
2498 tcg_gen_op3_i64(INDEX_op_clz_i64
, ret
, arg1
, arg2
);
2500 gen_helper_clz_i64(ret
, arg1
, arg2
);
2504 void tcg_gen_clzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2506 if (TCG_TARGET_REG_BITS
== 32
2507 && TCG_TARGET_HAS_clz_i32
2508 && arg2
<= 0xffffffffu
) {
2509 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2510 tcg_gen_clzi_i32(t
, TCGV_LOW(arg1
), arg2
- 32);
2511 tcg_gen_addi_i32(t
, t
, 32);
2512 tcg_gen_clz_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), t
);
2513 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2514 tcg_temp_free_i32(t
);
2516 tcg_gen_clz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2520 void tcg_gen_ctz_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2522 if (TCG_TARGET_HAS_ctz_i64
) {
2523 tcg_gen_op3_i64(INDEX_op_ctz_i64
, ret
, arg1
, arg2
);
2524 } else if (TCG_TARGET_HAS_ctpop_i64
|| TCG_TARGET_HAS_clz_i64
) {
2525 TCGv_i64 z
, t
= tcg_temp_ebb_new_i64();
2527 if (TCG_TARGET_HAS_ctpop_i64
) {
2528 tcg_gen_subi_i64(t
, arg1
, 1);
2529 tcg_gen_andc_i64(t
, t
, arg1
);
2530 tcg_gen_ctpop_i64(t
, t
);
2532 /* Since all non-x86 hosts have clz(0) == 64, don't fight it. */
2533 tcg_gen_neg_i64(t
, arg1
);
2534 tcg_gen_and_i64(t
, t
, arg1
);
2535 tcg_gen_clzi_i64(t
, t
, 64);
2536 tcg_gen_xori_i64(t
, t
, 63);
2538 z
= tcg_constant_i64(0);
2539 tcg_gen_movcond_i64(TCG_COND_EQ
, ret
, arg1
, z
, arg2
, t
);
2540 tcg_temp_free_i64(t
);
2541 tcg_temp_free_i64(z
);
2543 gen_helper_ctz_i64(ret
, arg1
, arg2
);
2547 void tcg_gen_ctzi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
2549 if (TCG_TARGET_REG_BITS
== 32
2550 && TCG_TARGET_HAS_ctz_i32
2551 && arg2
<= 0xffffffffu
) {
2552 TCGv_i32 t32
= tcg_temp_ebb_new_i32();
2553 tcg_gen_ctzi_i32(t32
, TCGV_HIGH(arg1
), arg2
- 32);
2554 tcg_gen_addi_i32(t32
, t32
, 32);
2555 tcg_gen_ctz_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), t32
);
2556 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2557 tcg_temp_free_i32(t32
);
2558 } else if (!TCG_TARGET_HAS_ctz_i64
2559 && TCG_TARGET_HAS_ctpop_i64
2561 /* This equivalence has the advantage of not requiring a fixup. */
2562 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2563 tcg_gen_subi_i64(t
, arg1
, 1);
2564 tcg_gen_andc_i64(t
, t
, arg1
);
2565 tcg_gen_ctpop_i64(ret
, t
);
2566 tcg_temp_free_i64(t
);
2568 tcg_gen_ctz_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2572 void tcg_gen_clrsb_i64(TCGv_i64 ret
, TCGv_i64 arg
)
2574 if (TCG_TARGET_HAS_clz_i64
|| TCG_TARGET_HAS_clz_i32
) {
2575 TCGv_i64 t
= tcg_temp_ebb_new_i64();
2576 tcg_gen_sari_i64(t
, arg
, 63);
2577 tcg_gen_xor_i64(t
, t
, arg
);
2578 tcg_gen_clzi_i64(t
, t
, 64);
2579 tcg_gen_subi_i64(ret
, t
, 1);
2580 tcg_temp_free_i64(t
);
2582 gen_helper_clrsb_i64(ret
, arg
);
2586 void tcg_gen_ctpop_i64(TCGv_i64 ret
, TCGv_i64 arg1
)
2588 if (TCG_TARGET_HAS_ctpop_i64
) {
2589 tcg_gen_op2_i64(INDEX_op_ctpop_i64
, ret
, arg1
);
2590 } else if (TCG_TARGET_REG_BITS
== 32 && TCG_TARGET_HAS_ctpop_i32
) {
2591 tcg_gen_ctpop_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2592 tcg_gen_ctpop_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2593 tcg_gen_add_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), TCGV_HIGH(ret
));
2594 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2596 gen_helper_ctpop_i64(ret
, arg1
);
2600 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2602 if (TCG_TARGET_HAS_rot_i64
) {
2603 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
2606 t0
= tcg_temp_ebb_new_i64();
2607 t1
= tcg_temp_ebb_new_i64();
2608 tcg_gen_shl_i64(t0
, arg1
, arg2
);
2609 tcg_gen_subfi_i64(t1
, 64, arg2
);
2610 tcg_gen_shr_i64(t1
, arg1
, t1
);
2611 tcg_gen_or_i64(ret
, t0
, t1
);
2612 tcg_temp_free_i64(t0
);
2613 tcg_temp_free_i64(t1
);
2617 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2619 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2620 /* some cases can be optimized here */
2622 tcg_gen_mov_i64(ret
, arg1
);
2623 } else if (TCG_TARGET_HAS_rot_i64
) {
2624 tcg_gen_rotl_i64(ret
, arg1
, tcg_constant_i64(arg2
));
2627 t0
= tcg_temp_ebb_new_i64();
2628 t1
= tcg_temp_ebb_new_i64();
2629 tcg_gen_shli_i64(t0
, arg1
, arg2
);
2630 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
2631 tcg_gen_or_i64(ret
, t0
, t1
);
2632 tcg_temp_free_i64(t0
);
2633 tcg_temp_free_i64(t1
);
2637 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2639 if (TCG_TARGET_HAS_rot_i64
) {
2640 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
2643 t0
= tcg_temp_ebb_new_i64();
2644 t1
= tcg_temp_ebb_new_i64();
2645 tcg_gen_shr_i64(t0
, arg1
, arg2
);
2646 tcg_gen_subfi_i64(t1
, 64, arg2
);
2647 tcg_gen_shl_i64(t1
, arg1
, t1
);
2648 tcg_gen_or_i64(ret
, t0
, t1
);
2649 tcg_temp_free_i64(t0
);
2650 tcg_temp_free_i64(t1
);
2654 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
2656 tcg_debug_assert(arg2
>= 0 && arg2
< 64);
2657 /* some cases can be optimized here */
2659 tcg_gen_mov_i64(ret
, arg1
);
2661 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
2665 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
2666 unsigned int ofs
, unsigned int len
)
2671 tcg_debug_assert(ofs
< 64);
2672 tcg_debug_assert(len
> 0);
2673 tcg_debug_assert(len
<= 64);
2674 tcg_debug_assert(ofs
+ len
<= 64);
2677 tcg_gen_mov_i64(ret
, arg2
);
2680 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2681 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
2685 if (TCG_TARGET_REG_BITS
== 32) {
2687 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
2688 TCGV_LOW(arg2
), ofs
- 32, len
);
2689 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
2692 if (ofs
+ len
<= 32) {
2693 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
2694 TCGV_LOW(arg2
), ofs
, len
);
2695 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
2700 t1
= tcg_temp_ebb_new_i64();
2702 if (TCG_TARGET_HAS_extract2_i64
) {
2703 if (ofs
+ len
== 64) {
2704 tcg_gen_shli_i64(t1
, arg1
, len
);
2705 tcg_gen_extract2_i64(ret
, t1
, arg2
, len
);
2709 tcg_gen_extract2_i64(ret
, arg1
, arg2
, len
);
2710 tcg_gen_rotli_i64(ret
, ret
, len
);
2715 mask
= (1ull << len
) - 1;
2716 if (ofs
+ len
< 64) {
2717 tcg_gen_andi_i64(t1
, arg2
, mask
);
2718 tcg_gen_shli_i64(t1
, t1
, ofs
);
2720 tcg_gen_shli_i64(t1
, arg2
, ofs
);
2722 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
2723 tcg_gen_or_i64(ret
, ret
, t1
);
2725 tcg_temp_free_i64(t1
);
2728 void tcg_gen_deposit_z_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2729 unsigned int ofs
, unsigned int len
)
2731 tcg_debug_assert(ofs
< 64);
2732 tcg_debug_assert(len
> 0);
2733 tcg_debug_assert(len
<= 64);
2734 tcg_debug_assert(ofs
+ len
<= 64);
2736 if (ofs
+ len
== 64) {
2737 tcg_gen_shli_i64(ret
, arg
, ofs
);
2738 } else if (ofs
== 0) {
2739 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2740 } else if (TCG_TARGET_HAS_deposit_i64
2741 && TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
2742 TCGv_i64 zero
= tcg_constant_i64(0);
2743 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, zero
, arg
, ofs
, len
);
2745 if (TCG_TARGET_REG_BITS
== 32) {
2747 tcg_gen_deposit_z_i32(TCGV_HIGH(ret
), TCGV_LOW(arg
),
2749 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
2752 if (ofs
+ len
<= 32) {
2753 tcg_gen_deposit_z_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2754 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2758 /* To help two-operand hosts we prefer to zero-extend first,
2759 which allows ARG to stay live. */
2762 if (TCG_TARGET_HAS_ext32u_i64
) {
2763 tcg_gen_ext32u_i64(ret
, arg
);
2764 tcg_gen_shli_i64(ret
, ret
, ofs
);
2769 if (TCG_TARGET_HAS_ext16u_i64
) {
2770 tcg_gen_ext16u_i64(ret
, arg
);
2771 tcg_gen_shli_i64(ret
, ret
, ofs
);
2776 if (TCG_TARGET_HAS_ext8u_i64
) {
2777 tcg_gen_ext8u_i64(ret
, arg
);
2778 tcg_gen_shli_i64(ret
, ret
, ofs
);
2783 /* Otherwise prefer zero-extension over AND for code size. */
2784 switch (ofs
+ len
) {
2786 if (TCG_TARGET_HAS_ext32u_i64
) {
2787 tcg_gen_shli_i64(ret
, arg
, ofs
);
2788 tcg_gen_ext32u_i64(ret
, ret
);
2793 if (TCG_TARGET_HAS_ext16u_i64
) {
2794 tcg_gen_shli_i64(ret
, arg
, ofs
);
2795 tcg_gen_ext16u_i64(ret
, ret
);
2800 if (TCG_TARGET_HAS_ext8u_i64
) {
2801 tcg_gen_shli_i64(ret
, arg
, ofs
);
2802 tcg_gen_ext8u_i64(ret
, ret
);
2807 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2808 tcg_gen_shli_i64(ret
, ret
, ofs
);
2812 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2813 unsigned int ofs
, unsigned int len
)
2815 tcg_debug_assert(ofs
< 64);
2816 tcg_debug_assert(len
> 0);
2817 tcg_debug_assert(len
<= 64);
2818 tcg_debug_assert(ofs
+ len
<= 64);
2820 /* Canonicalize certain special cases, even if extract is supported. */
2821 if (ofs
+ len
== 64) {
2822 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
2826 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
2830 if (TCG_TARGET_REG_BITS
== 32) {
2831 /* Look for a 32-bit extract within one of the two words. */
2833 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2834 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2837 if (ofs
+ len
<= 32) {
2838 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2839 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2842 /* The field is split across two words. One double-word
2843 shift is better than two double-word shifts. */
2847 if (TCG_TARGET_HAS_extract_i64
2848 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2849 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
2853 /* Assume that zero-extension, if available, is cheaper than a shift. */
2854 switch (ofs
+ len
) {
2856 if (TCG_TARGET_HAS_ext32u_i64
) {
2857 tcg_gen_ext32u_i64(ret
, arg
);
2858 tcg_gen_shri_i64(ret
, ret
, ofs
);
2863 if (TCG_TARGET_HAS_ext16u_i64
) {
2864 tcg_gen_ext16u_i64(ret
, arg
);
2865 tcg_gen_shri_i64(ret
, ret
, ofs
);
2870 if (TCG_TARGET_HAS_ext8u_i64
) {
2871 tcg_gen_ext8u_i64(ret
, arg
);
2872 tcg_gen_shri_i64(ret
, ret
, ofs
);
2878 /* ??? Ideally we'd know what values are available for immediate AND.
2879 Assume that 8 bits are available, plus the special cases of 16 and 32,
2880 so that we get ext8u, ext16u, and ext32u. */
2882 case 1 ... 8: case 16: case 32:
2884 tcg_gen_shri_i64(ret
, arg
, ofs
);
2885 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
2888 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
2889 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
2894 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
2895 unsigned int ofs
, unsigned int len
)
2897 tcg_debug_assert(ofs
< 64);
2898 tcg_debug_assert(len
> 0);
2899 tcg_debug_assert(len
<= 64);
2900 tcg_debug_assert(ofs
+ len
<= 64);
2902 /* Canonicalize certain special cases, even if sextract is supported. */
2903 if (ofs
+ len
== 64) {
2904 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
2910 tcg_gen_ext32s_i64(ret
, arg
);
2913 tcg_gen_ext16s_i64(ret
, arg
);
2916 tcg_gen_ext8s_i64(ret
, arg
);
2921 if (TCG_TARGET_REG_BITS
== 32) {
2922 /* Look for a 32-bit extract within one of the two words. */
2924 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
2925 } else if (ofs
+ len
<= 32) {
2926 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
2927 } else if (ofs
== 0) {
2928 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
2929 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
2931 } else if (len
> 32) {
2932 TCGv_i32 t
= tcg_temp_ebb_new_i32();
2933 /* Extract the bits for the high word normally. */
2934 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
2935 /* Shift the field down for the low part. */
2936 tcg_gen_shri_i64(ret
, arg
, ofs
);
2937 /* Overwrite the shift into the high part. */
2938 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
2939 tcg_temp_free_i32(t
);
2942 /* Shift the field down for the low part, such that the
2943 field sits at the MSB. */
2944 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
2945 /* Shift the field down from the MSB, sign extending. */
2946 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
2948 /* Sign-extend the field from 32 bits. */
2949 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2953 if (TCG_TARGET_HAS_sextract_i64
2954 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
2955 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
2959 /* Assume that sign-extension, if available, is cheaper than a shift. */
2960 switch (ofs
+ len
) {
2962 if (TCG_TARGET_HAS_ext32s_i64
) {
2963 tcg_gen_ext32s_i64(ret
, arg
);
2964 tcg_gen_sari_i64(ret
, ret
, ofs
);
2969 if (TCG_TARGET_HAS_ext16s_i64
) {
2970 tcg_gen_ext16s_i64(ret
, arg
);
2971 tcg_gen_sari_i64(ret
, ret
, ofs
);
2976 if (TCG_TARGET_HAS_ext8s_i64
) {
2977 tcg_gen_ext8s_i64(ret
, arg
);
2978 tcg_gen_sari_i64(ret
, ret
, ofs
);
2985 if (TCG_TARGET_HAS_ext32s_i64
) {
2986 tcg_gen_shri_i64(ret
, arg
, ofs
);
2987 tcg_gen_ext32s_i64(ret
, ret
);
2992 if (TCG_TARGET_HAS_ext16s_i64
) {
2993 tcg_gen_shri_i64(ret
, arg
, ofs
);
2994 tcg_gen_ext16s_i64(ret
, ret
);
2999 if (TCG_TARGET_HAS_ext8s_i64
) {
3000 tcg_gen_shri_i64(ret
, arg
, ofs
);
3001 tcg_gen_ext8s_i64(ret
, ret
);
3006 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
3007 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
3011 * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
3012 * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
3014 void tcg_gen_extract2_i64(TCGv_i64 ret
, TCGv_i64 al
, TCGv_i64 ah
,
3017 tcg_debug_assert(ofs
<= 64);
3019 tcg_gen_mov_i64(ret
, al
);
3020 } else if (ofs
== 64) {
3021 tcg_gen_mov_i64(ret
, ah
);
3022 } else if (al
== ah
) {
3023 tcg_gen_rotri_i64(ret
, al
, ofs
);
3024 } else if (TCG_TARGET_HAS_extract2_i64
) {
3025 tcg_gen_op4i_i64(INDEX_op_extract2_i64
, ret
, al
, ah
, ofs
);
3027 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3028 tcg_gen_shri_i64(t0
, al
, ofs
);
3029 tcg_gen_deposit_i64(ret
, t0
, ah
, 64 - ofs
, ofs
);
3030 tcg_temp_free_i64(t0
);
3034 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
3035 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
3037 if (cond
== TCG_COND_ALWAYS
) {
3038 tcg_gen_mov_i64(ret
, v1
);
3039 } else if (cond
== TCG_COND_NEVER
) {
3040 tcg_gen_mov_i64(ret
, v2
);
3041 } else if (TCG_TARGET_REG_BITS
== 32) {
3042 TCGv_i32 t0
= tcg_temp_ebb_new_i32();
3043 TCGv_i32 t1
= tcg_temp_ebb_new_i32();
3044 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
3045 TCGV_LOW(c1
), TCGV_HIGH(c1
),
3046 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
3048 if (TCG_TARGET_HAS_movcond_i32
) {
3049 tcg_gen_movi_i32(t1
, 0);
3050 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
3051 TCGV_LOW(v1
), TCGV_LOW(v2
));
3052 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
3053 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
3055 tcg_gen_neg_i32(t0
, t0
);
3057 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
3058 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
3059 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
3061 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
3062 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
3063 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
3065 tcg_temp_free_i32(t0
);
3066 tcg_temp_free_i32(t1
);
3067 } else if (TCG_TARGET_HAS_movcond_i64
) {
3068 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
3070 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3071 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3072 tcg_gen_negsetcond_i64(cond
, t0
, c1
, c2
);
3073 tcg_gen_and_i64(t1
, v1
, t0
);
3074 tcg_gen_andc_i64(ret
, v2
, t0
);
3075 tcg_gen_or_i64(ret
, ret
, t1
);
3076 tcg_temp_free_i64(t0
);
3077 tcg_temp_free_i64(t1
);
3081 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
3082 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
3084 if (TCG_TARGET_HAS_add2_i64
) {
3085 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
3087 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3088 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3089 tcg_gen_add_i64(t0
, al
, bl
);
3090 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
3091 tcg_gen_add_i64(rh
, ah
, bh
);
3092 tcg_gen_add_i64(rh
, rh
, t1
);
3093 tcg_gen_mov_i64(rl
, t0
);
3094 tcg_temp_free_i64(t0
);
3095 tcg_temp_free_i64(t1
);
3099 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
3100 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
3102 if (TCG_TARGET_HAS_sub2_i64
) {
3103 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
3105 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3106 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3107 tcg_gen_sub_i64(t0
, al
, bl
);
3108 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
3109 tcg_gen_sub_i64(rh
, ah
, bh
);
3110 tcg_gen_sub_i64(rh
, rh
, t1
);
3111 tcg_gen_mov_i64(rl
, t0
);
3112 tcg_temp_free_i64(t0
);
3113 tcg_temp_free_i64(t1
);
3117 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
3119 if (TCG_TARGET_HAS_mulu2_i64
) {
3120 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
3121 } else if (TCG_TARGET_HAS_muluh_i64
) {
3122 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3123 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
3124 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
3125 tcg_gen_mov_i64(rl
, t
);
3126 tcg_temp_free_i64(t
);
3128 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3129 tcg_gen_mul_i64(t0
, arg1
, arg2
);
3130 gen_helper_muluh_i64(rh
, arg1
, arg2
);
3131 tcg_gen_mov_i64(rl
, t0
);
3132 tcg_temp_free_i64(t0
);
3136 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
3138 if (TCG_TARGET_HAS_muls2_i64
) {
3139 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
3140 } else if (TCG_TARGET_HAS_mulsh_i64
) {
3141 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3142 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
3143 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
3144 tcg_gen_mov_i64(rl
, t
);
3145 tcg_temp_free_i64(t
);
3146 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
3147 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3148 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3149 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
3150 TCGv_i64 t3
= tcg_temp_ebb_new_i64();
3151 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
3152 /* Adjust for negative inputs. */
3153 tcg_gen_sari_i64(t2
, arg1
, 63);
3154 tcg_gen_sari_i64(t3
, arg2
, 63);
3155 tcg_gen_and_i64(t2
, t2
, arg2
);
3156 tcg_gen_and_i64(t3
, t3
, arg1
);
3157 tcg_gen_sub_i64(rh
, t1
, t2
);
3158 tcg_gen_sub_i64(rh
, rh
, t3
);
3159 tcg_gen_mov_i64(rl
, t0
);
3160 tcg_temp_free_i64(t0
);
3161 tcg_temp_free_i64(t1
);
3162 tcg_temp_free_i64(t2
);
3163 tcg_temp_free_i64(t3
);
3165 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3166 tcg_gen_mul_i64(t0
, arg1
, arg2
);
3167 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
3168 tcg_gen_mov_i64(rl
, t0
);
3169 tcg_temp_free_i64(t0
);
3173 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
3175 TCGv_i64 t0
= tcg_temp_ebb_new_i64();
3176 TCGv_i64 t1
= tcg_temp_ebb_new_i64();
3177 TCGv_i64 t2
= tcg_temp_ebb_new_i64();
3178 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
3179 /* Adjust for negative input for the signed arg1. */
3180 tcg_gen_sari_i64(t2
, arg1
, 63);
3181 tcg_gen_and_i64(t2
, t2
, arg2
);
3182 tcg_gen_sub_i64(rh
, t1
, t2
);
3183 tcg_gen_mov_i64(rl
, t0
);
3184 tcg_temp_free_i64(t0
);
3185 tcg_temp_free_i64(t1
);
3186 tcg_temp_free_i64(t2
);
3189 void tcg_gen_smin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3191 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, a
, b
);
3194 void tcg_gen_umin_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3196 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, a
, b
);
3199 void tcg_gen_smax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3201 tcg_gen_movcond_i64(TCG_COND_LT
, ret
, a
, b
, b
, a
);
3204 void tcg_gen_umax_i64(TCGv_i64 ret
, TCGv_i64 a
, TCGv_i64 b
)
3206 tcg_gen_movcond_i64(TCG_COND_LTU
, ret
, a
, b
, b
, a
);
3209 void tcg_gen_abs_i64(TCGv_i64 ret
, TCGv_i64 a
)
3211 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3213 tcg_gen_sari_i64(t
, a
, 63);
3214 tcg_gen_xor_i64(ret
, a
, t
);
3215 tcg_gen_sub_i64(ret
, ret
, t
);
3216 tcg_temp_free_i64(t
);
3219 /* Size changing operations. */
3221 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
3223 if (TCG_TARGET_REG_BITS
== 32) {
3224 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
3225 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
3226 tcg_gen_op2(INDEX_op_extrl_i64_i32
,
3227 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
3229 tcg_gen_mov_i32(ret
, (TCGv_i32
)arg
);
3233 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
3235 if (TCG_TARGET_REG_BITS
== 32) {
3236 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
3237 } else if (TCG_TARGET_HAS_extr_i64_i32
) {
3238 tcg_gen_op2(INDEX_op_extrh_i64_i32
,
3239 tcgv_i32_arg(ret
), tcgv_i64_arg(arg
));
3241 TCGv_i64 t
= tcg_temp_ebb_new_i64();
3242 tcg_gen_shri_i64(t
, arg
, 32);
3243 tcg_gen_mov_i32(ret
, (TCGv_i32
)t
);
3244 tcg_temp_free_i64(t
);
3248 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
3250 if (TCG_TARGET_REG_BITS
== 32) {
3251 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
3252 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
3254 tcg_gen_op2(INDEX_op_extu_i32_i64
,
3255 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
3259 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
3261 if (TCG_TARGET_REG_BITS
== 32) {
3262 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
3263 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
3265 tcg_gen_op2(INDEX_op_ext_i32_i64
,
3266 tcgv_i64_arg(ret
), tcgv_i32_arg(arg
));
3270 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
3274 if (TCG_TARGET_REG_BITS
== 32) {
3275 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
3276 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
3280 tmp
= tcg_temp_ebb_new_i64();
3281 /* These extensions are only needed for type correctness.
3282 We may be able to do better given target specific information. */
3283 tcg_gen_extu_i32_i64(tmp
, high
);
3284 tcg_gen_extu_i32_i64(dest
, low
);
3285 /* If deposit is available, use it. Otherwise use the extra
3286 knowledge that we have of the zero-extensions above. */
3287 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
3288 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
3290 tcg_gen_shli_i64(tmp
, tmp
, 32);
3291 tcg_gen_or_i64(dest
, dest
, tmp
);
3293 tcg_temp_free_i64(tmp
);
3296 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
3298 if (TCG_TARGET_REG_BITS
== 32) {
3299 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
3300 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
3302 tcg_gen_extrl_i64_i32(lo
, arg
);
3303 tcg_gen_extrh_i64_i32(hi
, arg
);
3307 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
3309 tcg_gen_ext32u_i64(lo
, arg
);
3310 tcg_gen_shri_i64(hi
, arg
, 32);
3313 void tcg_gen_concat32_i64(TCGv_i64 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
3315 tcg_gen_deposit_i64(ret
, lo
, hi
, 32, 32);
3318 void tcg_gen_extr_i128_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i128 arg
)
3320 tcg_gen_mov_i64(lo
, TCGV128_LOW(arg
));
3321 tcg_gen_mov_i64(hi
, TCGV128_HIGH(arg
));
3324 void tcg_gen_concat_i64_i128(TCGv_i128 ret
, TCGv_i64 lo
, TCGv_i64 hi
)
3326 tcg_gen_mov_i64(TCGV128_LOW(ret
), lo
);
3327 tcg_gen_mov_i64(TCGV128_HIGH(ret
), hi
);
3330 void tcg_gen_mov_i128(TCGv_i128 dst
, TCGv_i128 src
)
3333 tcg_gen_mov_i64(TCGV128_LOW(dst
), TCGV128_LOW(src
));
3334 tcg_gen_mov_i64(TCGV128_HIGH(dst
), TCGV128_HIGH(src
));
3338 void tcg_gen_ld_i128(TCGv_i128 ret
, TCGv_ptr base
, tcg_target_long offset
)
3340 if (HOST_BIG_ENDIAN
) {
3341 tcg_gen_ld_i64(TCGV128_HIGH(ret
), base
, offset
);
3342 tcg_gen_ld_i64(TCGV128_LOW(ret
), base
, offset
+ 8);
3344 tcg_gen_ld_i64(TCGV128_LOW(ret
), base
, offset
);
3345 tcg_gen_ld_i64(TCGV128_HIGH(ret
), base
, offset
+ 8);
3349 void tcg_gen_st_i128(TCGv_i128 val
, TCGv_ptr base
, tcg_target_long offset
)
3351 if (HOST_BIG_ENDIAN
) {
3352 tcg_gen_st_i64(TCGV128_HIGH(val
), base
, offset
);
3353 tcg_gen_st_i64(TCGV128_LOW(val
), base
, offset
+ 8);
3355 tcg_gen_st_i64(TCGV128_LOW(val
), base
, offset
);
3356 tcg_gen_st_i64(TCGV128_HIGH(val
), base
, offset
+ 8);
3360 /* QEMU specific operations. */
3362 void tcg_gen_exit_tb(const TranslationBlock
*tb
, unsigned idx
)
3365 * Let the jit code return the read-only version of the
3366 * TranslationBlock, so that we minimize the pc-relative
3367 * distance of the address of the exit_tb code to TB.
3368 * This will improve utilization of pc-relative address loads.
3370 * TODO: Move this to translator_loop, so that all const
3371 * TranslationBlock pointers refer to read-only memory.
3372 * This requires coordination with targets that do not use
3373 * the translator_loop.
3375 uintptr_t val
= (uintptr_t)tcg_splitwx_to_rx((void *)tb
) + idx
;
3378 tcg_debug_assert(idx
== 0);
3379 } else if (idx
<= TB_EXIT_IDXMAX
) {
3380 #ifdef CONFIG_DEBUG_TCG
3381 /* This is an exit following a goto_tb. Verify that we have
3382 seen this numbered exit before, via tcg_gen_goto_tb. */
3383 tcg_debug_assert(tcg_ctx
->goto_tb_issue_mask
& (1 << idx
));
3386 /* This is an exit via the exitreq label. */
3387 tcg_debug_assert(idx
== TB_EXIT_REQUESTED
);
3390 tcg_gen_op1i(INDEX_op_exit_tb
, val
);
3393 void tcg_gen_goto_tb(unsigned idx
)
3395 /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
3396 tcg_debug_assert(!(tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_TB
));
3397 /* We only support two chained exits. */
3398 tcg_debug_assert(idx
<= TB_EXIT_IDXMAX
);
3399 #ifdef CONFIG_DEBUG_TCG
3400 /* Verify that we haven't seen this numbered exit before. */
3401 tcg_debug_assert((tcg_ctx
->goto_tb_issue_mask
& (1 << idx
)) == 0);
3402 tcg_ctx
->goto_tb_issue_mask
|= 1 << idx
;
3404 plugin_gen_disable_mem_helpers();
3405 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
3408 void tcg_gen_lookup_and_goto_ptr(void)
3412 if (tcg_ctx
->gen_tb
->cflags
& CF_NO_GOTO_PTR
) {
3413 tcg_gen_exit_tb(NULL
, 0);
3417 plugin_gen_disable_mem_helpers();
3418 ptr
= tcg_temp_ebb_new_ptr();
3419 gen_helper_lookup_tb_ptr(ptr
, tcg_env
);
3420 tcg_gen_op1i(INDEX_op_goto_ptr
, tcgv_ptr_arg(ptr
));
3421 tcg_temp_free_ptr(ptr
);