2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #include "qemu/osdep.h"
26 #include "qemu-common.h"
28 #include "exec/exec-all.h"
31 #include "trace-tcg.h"
32 #include "trace/mem.h"
34 /* Reduce the number of ifdefs below. This assumes that all uses of
35 TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
36 the compiler can eliminate. */
37 #if TCG_TARGET_REG_BITS == 64
38 extern TCGv_i32
TCGV_LOW_link_error(TCGv_i64
);
39 extern TCGv_i32
TCGV_HIGH_link_error(TCGv_i64
);
40 #define TCGV_LOW TCGV_LOW_link_error
41 #define TCGV_HIGH TCGV_HIGH_link_error
44 /* Note that this is optimized for sequential allocation during translate.
45 Up to and including filling in the forward link immediately. We'll do
46 proper termination of the end of the list after we finish translation. */
48 static void tcg_emit_op(TCGContext
*ctx
, TCGOpcode opc
, int args
)
50 int oi
= ctx
->gen_next_op_idx
;
54 tcg_debug_assert(oi
< OPC_BUF_SIZE
);
55 ctx
->gen_op_buf
[0].prev
= oi
;
56 ctx
->gen_next_op_idx
= ni
;
58 ctx
->gen_op_buf
[oi
] = (TCGOp
){
66 void tcg_gen_op1(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
)
68 int pi
= ctx
->gen_next_parm_idx
;
70 tcg_debug_assert(pi
+ 1 <= OPPARAM_BUF_SIZE
);
71 ctx
->gen_next_parm_idx
= pi
+ 1;
72 ctx
->gen_opparam_buf
[pi
] = a1
;
74 tcg_emit_op(ctx
, opc
, pi
);
77 void tcg_gen_op2(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
, TCGArg a2
)
79 int pi
= ctx
->gen_next_parm_idx
;
81 tcg_debug_assert(pi
+ 2 <= OPPARAM_BUF_SIZE
);
82 ctx
->gen_next_parm_idx
= pi
+ 2;
83 ctx
->gen_opparam_buf
[pi
+ 0] = a1
;
84 ctx
->gen_opparam_buf
[pi
+ 1] = a2
;
86 tcg_emit_op(ctx
, opc
, pi
);
89 void tcg_gen_op3(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
,
92 int pi
= ctx
->gen_next_parm_idx
;
94 tcg_debug_assert(pi
+ 3 <= OPPARAM_BUF_SIZE
);
95 ctx
->gen_next_parm_idx
= pi
+ 3;
96 ctx
->gen_opparam_buf
[pi
+ 0] = a1
;
97 ctx
->gen_opparam_buf
[pi
+ 1] = a2
;
98 ctx
->gen_opparam_buf
[pi
+ 2] = a3
;
100 tcg_emit_op(ctx
, opc
, pi
);
103 void tcg_gen_op4(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
,
104 TCGArg a2
, TCGArg a3
, TCGArg a4
)
106 int pi
= ctx
->gen_next_parm_idx
;
108 tcg_debug_assert(pi
+ 4 <= OPPARAM_BUF_SIZE
);
109 ctx
->gen_next_parm_idx
= pi
+ 4;
110 ctx
->gen_opparam_buf
[pi
+ 0] = a1
;
111 ctx
->gen_opparam_buf
[pi
+ 1] = a2
;
112 ctx
->gen_opparam_buf
[pi
+ 2] = a3
;
113 ctx
->gen_opparam_buf
[pi
+ 3] = a4
;
115 tcg_emit_op(ctx
, opc
, pi
);
118 void tcg_gen_op5(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
,
119 TCGArg a2
, TCGArg a3
, TCGArg a4
, TCGArg a5
)
121 int pi
= ctx
->gen_next_parm_idx
;
123 tcg_debug_assert(pi
+ 5 <= OPPARAM_BUF_SIZE
);
124 ctx
->gen_next_parm_idx
= pi
+ 5;
125 ctx
->gen_opparam_buf
[pi
+ 0] = a1
;
126 ctx
->gen_opparam_buf
[pi
+ 1] = a2
;
127 ctx
->gen_opparam_buf
[pi
+ 2] = a3
;
128 ctx
->gen_opparam_buf
[pi
+ 3] = a4
;
129 ctx
->gen_opparam_buf
[pi
+ 4] = a5
;
131 tcg_emit_op(ctx
, opc
, pi
);
134 void tcg_gen_op6(TCGContext
*ctx
, TCGOpcode opc
, TCGArg a1
, TCGArg a2
,
135 TCGArg a3
, TCGArg a4
, TCGArg a5
, TCGArg a6
)
137 int pi
= ctx
->gen_next_parm_idx
;
139 tcg_debug_assert(pi
+ 6 <= OPPARAM_BUF_SIZE
);
140 ctx
->gen_next_parm_idx
= pi
+ 6;
141 ctx
->gen_opparam_buf
[pi
+ 0] = a1
;
142 ctx
->gen_opparam_buf
[pi
+ 1] = a2
;
143 ctx
->gen_opparam_buf
[pi
+ 2] = a3
;
144 ctx
->gen_opparam_buf
[pi
+ 3] = a4
;
145 ctx
->gen_opparam_buf
[pi
+ 4] = a5
;
146 ctx
->gen_opparam_buf
[pi
+ 5] = a6
;
148 tcg_emit_op(ctx
, opc
, pi
);
151 void tcg_gen_mb(TCGBar mb_type
)
154 tcg_gen_op1(&tcg_ctx
, INDEX_op_mb
, mb_type
);
160 void tcg_gen_addi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
162 /* some cases can be optimized here */
164 tcg_gen_mov_i32(ret
, arg1
);
166 TCGv_i32 t0
= tcg_const_i32(arg2
);
167 tcg_gen_add_i32(ret
, arg1
, t0
);
168 tcg_temp_free_i32(t0
);
172 void tcg_gen_subfi_i32(TCGv_i32 ret
, int32_t arg1
, TCGv_i32 arg2
)
174 if (arg1
== 0 && TCG_TARGET_HAS_neg_i32
) {
175 /* Don't recurse with tcg_gen_neg_i32. */
176 tcg_gen_op2_i32(INDEX_op_neg_i32
, ret
, arg2
);
178 TCGv_i32 t0
= tcg_const_i32(arg1
);
179 tcg_gen_sub_i32(ret
, t0
, arg2
);
180 tcg_temp_free_i32(t0
);
184 void tcg_gen_subi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
186 /* some cases can be optimized here */
188 tcg_gen_mov_i32(ret
, arg1
);
190 TCGv_i32 t0
= tcg_const_i32(arg2
);
191 tcg_gen_sub_i32(ret
, arg1
, t0
);
192 tcg_temp_free_i32(t0
);
196 void tcg_gen_andi_i32(TCGv_i32 ret
, TCGv_i32 arg1
, uint32_t arg2
)
199 /* Some cases can be optimized here. */
202 tcg_gen_movi_i32(ret
, 0);
205 tcg_gen_mov_i32(ret
, arg1
);
208 /* Don't recurse with tcg_gen_ext8u_i32. */
209 if (TCG_TARGET_HAS_ext8u_i32
) {
210 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg1
);
215 if (TCG_TARGET_HAS_ext16u_i32
) {
216 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg1
);
221 t0
= tcg_const_i32(arg2
);
222 tcg_gen_and_i32(ret
, arg1
, t0
);
223 tcg_temp_free_i32(t0
);
226 void tcg_gen_ori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
228 /* Some cases can be optimized here. */
230 tcg_gen_movi_i32(ret
, -1);
231 } else if (arg2
== 0) {
232 tcg_gen_mov_i32(ret
, arg1
);
234 TCGv_i32 t0
= tcg_const_i32(arg2
);
235 tcg_gen_or_i32(ret
, arg1
, t0
);
236 tcg_temp_free_i32(t0
);
240 void tcg_gen_xori_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
242 /* Some cases can be optimized here. */
244 tcg_gen_mov_i32(ret
, arg1
);
245 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i32
) {
246 /* Don't recurse with tcg_gen_not_i32. */
247 tcg_gen_op2_i32(INDEX_op_not_i32
, ret
, arg1
);
249 TCGv_i32 t0
= tcg_const_i32(arg2
);
250 tcg_gen_xor_i32(ret
, arg1
, t0
);
251 tcg_temp_free_i32(t0
);
255 void tcg_gen_shli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
257 tcg_debug_assert(arg2
< 32);
259 tcg_gen_mov_i32(ret
, arg1
);
261 TCGv_i32 t0
= tcg_const_i32(arg2
);
262 tcg_gen_shl_i32(ret
, arg1
, t0
);
263 tcg_temp_free_i32(t0
);
267 void tcg_gen_shri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
269 tcg_debug_assert(arg2
< 32);
271 tcg_gen_mov_i32(ret
, arg1
);
273 TCGv_i32 t0
= tcg_const_i32(arg2
);
274 tcg_gen_shr_i32(ret
, arg1
, t0
);
275 tcg_temp_free_i32(t0
);
279 void tcg_gen_sari_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
281 tcg_debug_assert(arg2
< 32);
283 tcg_gen_mov_i32(ret
, arg1
);
285 TCGv_i32 t0
= tcg_const_i32(arg2
);
286 tcg_gen_sar_i32(ret
, arg1
, t0
);
287 tcg_temp_free_i32(t0
);
291 void tcg_gen_brcond_i32(TCGCond cond
, TCGv_i32 arg1
, TCGv_i32 arg2
, TCGLabel
*l
)
293 if (cond
== TCG_COND_ALWAYS
) {
295 } else if (cond
!= TCG_COND_NEVER
) {
296 tcg_gen_op4ii_i32(INDEX_op_brcond_i32
, arg1
, arg2
, cond
, label_arg(l
));
300 void tcg_gen_brcondi_i32(TCGCond cond
, TCGv_i32 arg1
, int32_t arg2
, TCGLabel
*l
)
302 if (cond
== TCG_COND_ALWAYS
) {
304 } else if (cond
!= TCG_COND_NEVER
) {
305 TCGv_i32 t0
= tcg_const_i32(arg2
);
306 tcg_gen_brcond_i32(cond
, arg1
, t0
, l
);
307 tcg_temp_free_i32(t0
);
311 void tcg_gen_setcond_i32(TCGCond cond
, TCGv_i32 ret
,
312 TCGv_i32 arg1
, TCGv_i32 arg2
)
314 if (cond
== TCG_COND_ALWAYS
) {
315 tcg_gen_movi_i32(ret
, 1);
316 } else if (cond
== TCG_COND_NEVER
) {
317 tcg_gen_movi_i32(ret
, 0);
319 tcg_gen_op4i_i32(INDEX_op_setcond_i32
, ret
, arg1
, arg2
, cond
);
323 void tcg_gen_setcondi_i32(TCGCond cond
, TCGv_i32 ret
,
324 TCGv_i32 arg1
, int32_t arg2
)
326 TCGv_i32 t0
= tcg_const_i32(arg2
);
327 tcg_gen_setcond_i32(cond
, ret
, arg1
, t0
);
328 tcg_temp_free_i32(t0
);
331 void tcg_gen_muli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, int32_t arg2
)
333 TCGv_i32 t0
= tcg_const_i32(arg2
);
334 tcg_gen_mul_i32(ret
, arg1
, t0
);
335 tcg_temp_free_i32(t0
);
338 void tcg_gen_div_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
340 if (TCG_TARGET_HAS_div_i32
) {
341 tcg_gen_op3_i32(INDEX_op_div_i32
, ret
, arg1
, arg2
);
342 } else if (TCG_TARGET_HAS_div2_i32
) {
343 TCGv_i32 t0
= tcg_temp_new_i32();
344 tcg_gen_sari_i32(t0
, arg1
, 31);
345 tcg_gen_op5_i32(INDEX_op_div2_i32
, ret
, t0
, arg1
, t0
, arg2
);
346 tcg_temp_free_i32(t0
);
348 gen_helper_div_i32(ret
, arg1
, arg2
);
352 void tcg_gen_rem_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
354 if (TCG_TARGET_HAS_rem_i32
) {
355 tcg_gen_op3_i32(INDEX_op_rem_i32
, ret
, arg1
, arg2
);
356 } else if (TCG_TARGET_HAS_div_i32
) {
357 TCGv_i32 t0
= tcg_temp_new_i32();
358 tcg_gen_op3_i32(INDEX_op_div_i32
, t0
, arg1
, arg2
);
359 tcg_gen_mul_i32(t0
, t0
, arg2
);
360 tcg_gen_sub_i32(ret
, arg1
, t0
);
361 tcg_temp_free_i32(t0
);
362 } else if (TCG_TARGET_HAS_div2_i32
) {
363 TCGv_i32 t0
= tcg_temp_new_i32();
364 tcg_gen_sari_i32(t0
, arg1
, 31);
365 tcg_gen_op5_i32(INDEX_op_div2_i32
, t0
, ret
, arg1
, t0
, arg2
);
366 tcg_temp_free_i32(t0
);
368 gen_helper_rem_i32(ret
, arg1
, arg2
);
372 void tcg_gen_divu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
374 if (TCG_TARGET_HAS_div_i32
) {
375 tcg_gen_op3_i32(INDEX_op_divu_i32
, ret
, arg1
, arg2
);
376 } else if (TCG_TARGET_HAS_div2_i32
) {
377 TCGv_i32 t0
= tcg_temp_new_i32();
378 tcg_gen_movi_i32(t0
, 0);
379 tcg_gen_op5_i32(INDEX_op_divu2_i32
, ret
, t0
, arg1
, t0
, arg2
);
380 tcg_temp_free_i32(t0
);
382 gen_helper_divu_i32(ret
, arg1
, arg2
);
386 void tcg_gen_remu_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
388 if (TCG_TARGET_HAS_rem_i32
) {
389 tcg_gen_op3_i32(INDEX_op_remu_i32
, ret
, arg1
, arg2
);
390 } else if (TCG_TARGET_HAS_div_i32
) {
391 TCGv_i32 t0
= tcg_temp_new_i32();
392 tcg_gen_op3_i32(INDEX_op_divu_i32
, t0
, arg1
, arg2
);
393 tcg_gen_mul_i32(t0
, t0
, arg2
);
394 tcg_gen_sub_i32(ret
, arg1
, t0
);
395 tcg_temp_free_i32(t0
);
396 } else if (TCG_TARGET_HAS_div2_i32
) {
397 TCGv_i32 t0
= tcg_temp_new_i32();
398 tcg_gen_movi_i32(t0
, 0);
399 tcg_gen_op5_i32(INDEX_op_divu2_i32
, t0
, ret
, arg1
, t0
, arg2
);
400 tcg_temp_free_i32(t0
);
402 gen_helper_remu_i32(ret
, arg1
, arg2
);
406 void tcg_gen_andc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
408 if (TCG_TARGET_HAS_andc_i32
) {
409 tcg_gen_op3_i32(INDEX_op_andc_i32
, ret
, arg1
, arg2
);
411 TCGv_i32 t0
= tcg_temp_new_i32();
412 tcg_gen_not_i32(t0
, arg2
);
413 tcg_gen_and_i32(ret
, arg1
, t0
);
414 tcg_temp_free_i32(t0
);
418 void tcg_gen_eqv_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
420 if (TCG_TARGET_HAS_eqv_i32
) {
421 tcg_gen_op3_i32(INDEX_op_eqv_i32
, ret
, arg1
, arg2
);
423 tcg_gen_xor_i32(ret
, arg1
, arg2
);
424 tcg_gen_not_i32(ret
, ret
);
428 void tcg_gen_nand_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
430 if (TCG_TARGET_HAS_nand_i32
) {
431 tcg_gen_op3_i32(INDEX_op_nand_i32
, ret
, arg1
, arg2
);
433 tcg_gen_and_i32(ret
, arg1
, arg2
);
434 tcg_gen_not_i32(ret
, ret
);
438 void tcg_gen_nor_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
440 if (TCG_TARGET_HAS_nor_i32
) {
441 tcg_gen_op3_i32(INDEX_op_nor_i32
, ret
, arg1
, arg2
);
443 tcg_gen_or_i32(ret
, arg1
, arg2
);
444 tcg_gen_not_i32(ret
, ret
);
448 void tcg_gen_orc_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
450 if (TCG_TARGET_HAS_orc_i32
) {
451 tcg_gen_op3_i32(INDEX_op_orc_i32
, ret
, arg1
, arg2
);
453 TCGv_i32 t0
= tcg_temp_new_i32();
454 tcg_gen_not_i32(t0
, arg2
);
455 tcg_gen_or_i32(ret
, arg1
, t0
);
456 tcg_temp_free_i32(t0
);
460 void tcg_gen_rotl_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
462 if (TCG_TARGET_HAS_rot_i32
) {
463 tcg_gen_op3_i32(INDEX_op_rotl_i32
, ret
, arg1
, arg2
);
467 t0
= tcg_temp_new_i32();
468 t1
= tcg_temp_new_i32();
469 tcg_gen_shl_i32(t0
, arg1
, arg2
);
470 tcg_gen_subfi_i32(t1
, 32, arg2
);
471 tcg_gen_shr_i32(t1
, arg1
, t1
);
472 tcg_gen_or_i32(ret
, t0
, t1
);
473 tcg_temp_free_i32(t0
);
474 tcg_temp_free_i32(t1
);
478 void tcg_gen_rotli_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
480 tcg_debug_assert(arg2
< 32);
481 /* some cases can be optimized here */
483 tcg_gen_mov_i32(ret
, arg1
);
484 } else if (TCG_TARGET_HAS_rot_i32
) {
485 TCGv_i32 t0
= tcg_const_i32(arg2
);
486 tcg_gen_rotl_i32(ret
, arg1
, t0
);
487 tcg_temp_free_i32(t0
);
490 t0
= tcg_temp_new_i32();
491 t1
= tcg_temp_new_i32();
492 tcg_gen_shli_i32(t0
, arg1
, arg2
);
493 tcg_gen_shri_i32(t1
, arg1
, 32 - arg2
);
494 tcg_gen_or_i32(ret
, t0
, t1
);
495 tcg_temp_free_i32(t0
);
496 tcg_temp_free_i32(t1
);
500 void tcg_gen_rotr_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
)
502 if (TCG_TARGET_HAS_rot_i32
) {
503 tcg_gen_op3_i32(INDEX_op_rotr_i32
, ret
, arg1
, arg2
);
507 t0
= tcg_temp_new_i32();
508 t1
= tcg_temp_new_i32();
509 tcg_gen_shr_i32(t0
, arg1
, arg2
);
510 tcg_gen_subfi_i32(t1
, 32, arg2
);
511 tcg_gen_shl_i32(t1
, arg1
, t1
);
512 tcg_gen_or_i32(ret
, t0
, t1
);
513 tcg_temp_free_i32(t0
);
514 tcg_temp_free_i32(t1
);
518 void tcg_gen_rotri_i32(TCGv_i32 ret
, TCGv_i32 arg1
, unsigned arg2
)
520 tcg_debug_assert(arg2
< 32);
521 /* some cases can be optimized here */
523 tcg_gen_mov_i32(ret
, arg1
);
525 tcg_gen_rotli_i32(ret
, arg1
, 32 - arg2
);
529 void tcg_gen_deposit_i32(TCGv_i32 ret
, TCGv_i32 arg1
, TCGv_i32 arg2
,
530 unsigned int ofs
, unsigned int len
)
535 tcg_debug_assert(ofs
< 32);
536 tcg_debug_assert(len
> 0);
537 tcg_debug_assert(len
<= 32);
538 tcg_debug_assert(ofs
+ len
<= 32);
541 tcg_gen_mov_i32(ret
, arg2
);
544 if (TCG_TARGET_HAS_deposit_i32
&& TCG_TARGET_deposit_i32_valid(ofs
, len
)) {
545 tcg_gen_op5ii_i32(INDEX_op_deposit_i32
, ret
, arg1
, arg2
, ofs
, len
);
549 mask
= (1u << len
) - 1;
550 t1
= tcg_temp_new_i32();
552 if (ofs
+ len
< 32) {
553 tcg_gen_andi_i32(t1
, arg2
, mask
);
554 tcg_gen_shli_i32(t1
, t1
, ofs
);
556 tcg_gen_shli_i32(t1
, arg2
, ofs
);
558 tcg_gen_andi_i32(ret
, arg1
, ~(mask
<< ofs
));
559 tcg_gen_or_i32(ret
, ret
, t1
);
561 tcg_temp_free_i32(t1
);
564 void tcg_gen_extract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
565 unsigned int ofs
, unsigned int len
)
567 tcg_debug_assert(ofs
< 32);
568 tcg_debug_assert(len
> 0);
569 tcg_debug_assert(len
<= 32);
570 tcg_debug_assert(ofs
+ len
<= 32);
572 /* Canonicalize certain special cases, even if extract is supported. */
573 if (ofs
+ len
== 32) {
574 tcg_gen_shri_i32(ret
, arg
, 32 - len
);
578 tcg_gen_andi_i32(ret
, arg
, (1u << len
) - 1);
582 if (TCG_TARGET_HAS_extract_i32
583 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
584 tcg_gen_op4ii_i32(INDEX_op_extract_i32
, ret
, arg
, ofs
, len
);
588 /* Assume that zero-extension, if available, is cheaper than a shift. */
591 if (TCG_TARGET_HAS_ext16u_i32
) {
592 tcg_gen_ext16u_i32(ret
, arg
);
593 tcg_gen_shri_i32(ret
, ret
, ofs
);
598 if (TCG_TARGET_HAS_ext8u_i32
) {
599 tcg_gen_ext8u_i32(ret
, arg
);
600 tcg_gen_shri_i32(ret
, ret
, ofs
);
606 /* ??? Ideally we'd know what values are available for immediate AND.
607 Assume that 8 bits are available, plus the special case of 16,
608 so that we get ext8u, ext16u. */
610 case 1 ... 8: case 16:
611 tcg_gen_shri_i32(ret
, arg
, ofs
);
612 tcg_gen_andi_i32(ret
, ret
, (1u << len
) - 1);
615 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
616 tcg_gen_shri_i32(ret
, ret
, 32 - len
);
621 void tcg_gen_sextract_i32(TCGv_i32 ret
, TCGv_i32 arg
,
622 unsigned int ofs
, unsigned int len
)
624 tcg_debug_assert(ofs
< 32);
625 tcg_debug_assert(len
> 0);
626 tcg_debug_assert(len
<= 32);
627 tcg_debug_assert(ofs
+ len
<= 32);
629 /* Canonicalize certain special cases, even if extract is supported. */
630 if (ofs
+ len
== 32) {
631 tcg_gen_sari_i32(ret
, arg
, 32 - len
);
637 tcg_gen_ext16s_i32(ret
, arg
);
640 tcg_gen_ext8s_i32(ret
, arg
);
645 if (TCG_TARGET_HAS_sextract_i32
646 && TCG_TARGET_extract_i32_valid(ofs
, len
)) {
647 tcg_gen_op4ii_i32(INDEX_op_sextract_i32
, ret
, arg
, ofs
, len
);
651 /* Assume that sign-extension, if available, is cheaper than a shift. */
654 if (TCG_TARGET_HAS_ext16s_i32
) {
655 tcg_gen_ext16s_i32(ret
, arg
);
656 tcg_gen_sari_i32(ret
, ret
, ofs
);
661 if (TCG_TARGET_HAS_ext8s_i32
) {
662 tcg_gen_ext8s_i32(ret
, arg
);
663 tcg_gen_sari_i32(ret
, ret
, ofs
);
670 if (TCG_TARGET_HAS_ext16s_i32
) {
671 tcg_gen_shri_i32(ret
, arg
, ofs
);
672 tcg_gen_ext16s_i32(ret
, ret
);
677 if (TCG_TARGET_HAS_ext8s_i32
) {
678 tcg_gen_shri_i32(ret
, arg
, ofs
);
679 tcg_gen_ext8s_i32(ret
, ret
);
685 tcg_gen_shli_i32(ret
, arg
, 32 - len
- ofs
);
686 tcg_gen_sari_i32(ret
, ret
, 32 - len
);
689 void tcg_gen_movcond_i32(TCGCond cond
, TCGv_i32 ret
, TCGv_i32 c1
,
690 TCGv_i32 c2
, TCGv_i32 v1
, TCGv_i32 v2
)
692 if (cond
== TCG_COND_ALWAYS
) {
693 tcg_gen_mov_i32(ret
, v1
);
694 } else if (cond
== TCG_COND_NEVER
) {
695 tcg_gen_mov_i32(ret
, v2
);
696 } else if (TCG_TARGET_HAS_movcond_i32
) {
697 tcg_gen_op6i_i32(INDEX_op_movcond_i32
, ret
, c1
, c2
, v1
, v2
, cond
);
699 TCGv_i32 t0
= tcg_temp_new_i32();
700 TCGv_i32 t1
= tcg_temp_new_i32();
701 tcg_gen_setcond_i32(cond
, t0
, c1
, c2
);
702 tcg_gen_neg_i32(t0
, t0
);
703 tcg_gen_and_i32(t1
, v1
, t0
);
704 tcg_gen_andc_i32(ret
, v2
, t0
);
705 tcg_gen_or_i32(ret
, ret
, t1
);
706 tcg_temp_free_i32(t0
);
707 tcg_temp_free_i32(t1
);
711 void tcg_gen_add2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
712 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
714 if (TCG_TARGET_HAS_add2_i32
) {
715 tcg_gen_op6_i32(INDEX_op_add2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
717 TCGv_i64 t0
= tcg_temp_new_i64();
718 TCGv_i64 t1
= tcg_temp_new_i64();
719 tcg_gen_concat_i32_i64(t0
, al
, ah
);
720 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
721 tcg_gen_add_i64(t0
, t0
, t1
);
722 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
723 tcg_temp_free_i64(t0
);
724 tcg_temp_free_i64(t1
);
728 void tcg_gen_sub2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 al
,
729 TCGv_i32 ah
, TCGv_i32 bl
, TCGv_i32 bh
)
731 if (TCG_TARGET_HAS_sub2_i32
) {
732 tcg_gen_op6_i32(INDEX_op_sub2_i32
, rl
, rh
, al
, ah
, bl
, bh
);
734 TCGv_i64 t0
= tcg_temp_new_i64();
735 TCGv_i64 t1
= tcg_temp_new_i64();
736 tcg_gen_concat_i32_i64(t0
, al
, ah
);
737 tcg_gen_concat_i32_i64(t1
, bl
, bh
);
738 tcg_gen_sub_i64(t0
, t0
, t1
);
739 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
740 tcg_temp_free_i64(t0
);
741 tcg_temp_free_i64(t1
);
745 void tcg_gen_mulu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
747 if (TCG_TARGET_HAS_mulu2_i32
) {
748 tcg_gen_op4_i32(INDEX_op_mulu2_i32
, rl
, rh
, arg1
, arg2
);
749 } else if (TCG_TARGET_HAS_muluh_i32
) {
750 TCGv_i32 t
= tcg_temp_new_i32();
751 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
752 tcg_gen_op3_i32(INDEX_op_muluh_i32
, rh
, arg1
, arg2
);
753 tcg_gen_mov_i32(rl
, t
);
754 tcg_temp_free_i32(t
);
756 TCGv_i64 t0
= tcg_temp_new_i64();
757 TCGv_i64 t1
= tcg_temp_new_i64();
758 tcg_gen_extu_i32_i64(t0
, arg1
);
759 tcg_gen_extu_i32_i64(t1
, arg2
);
760 tcg_gen_mul_i64(t0
, t0
, t1
);
761 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
762 tcg_temp_free_i64(t0
);
763 tcg_temp_free_i64(t1
);
767 void tcg_gen_muls2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
769 if (TCG_TARGET_HAS_muls2_i32
) {
770 tcg_gen_op4_i32(INDEX_op_muls2_i32
, rl
, rh
, arg1
, arg2
);
771 } else if (TCG_TARGET_HAS_mulsh_i32
) {
772 TCGv_i32 t
= tcg_temp_new_i32();
773 tcg_gen_op3_i32(INDEX_op_mul_i32
, t
, arg1
, arg2
);
774 tcg_gen_op3_i32(INDEX_op_mulsh_i32
, rh
, arg1
, arg2
);
775 tcg_gen_mov_i32(rl
, t
);
776 tcg_temp_free_i32(t
);
777 } else if (TCG_TARGET_REG_BITS
== 32) {
778 TCGv_i32 t0
= tcg_temp_new_i32();
779 TCGv_i32 t1
= tcg_temp_new_i32();
780 TCGv_i32 t2
= tcg_temp_new_i32();
781 TCGv_i32 t3
= tcg_temp_new_i32();
782 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
783 /* Adjust for negative inputs. */
784 tcg_gen_sari_i32(t2
, arg1
, 31);
785 tcg_gen_sari_i32(t3
, arg2
, 31);
786 tcg_gen_and_i32(t2
, t2
, arg2
);
787 tcg_gen_and_i32(t3
, t3
, arg1
);
788 tcg_gen_sub_i32(rh
, t1
, t2
);
789 tcg_gen_sub_i32(rh
, rh
, t3
);
790 tcg_gen_mov_i32(rl
, t0
);
791 tcg_temp_free_i32(t0
);
792 tcg_temp_free_i32(t1
);
793 tcg_temp_free_i32(t2
);
794 tcg_temp_free_i32(t3
);
796 TCGv_i64 t0
= tcg_temp_new_i64();
797 TCGv_i64 t1
= tcg_temp_new_i64();
798 tcg_gen_ext_i32_i64(t0
, arg1
);
799 tcg_gen_ext_i32_i64(t1
, arg2
);
800 tcg_gen_mul_i64(t0
, t0
, t1
);
801 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
802 tcg_temp_free_i64(t0
);
803 tcg_temp_free_i64(t1
);
807 void tcg_gen_mulsu2_i32(TCGv_i32 rl
, TCGv_i32 rh
, TCGv_i32 arg1
, TCGv_i32 arg2
)
809 if (TCG_TARGET_REG_BITS
== 32) {
810 TCGv_i32 t0
= tcg_temp_new_i32();
811 TCGv_i32 t1
= tcg_temp_new_i32();
812 TCGv_i32 t2
= tcg_temp_new_i32();
813 tcg_gen_mulu2_i32(t0
, t1
, arg1
, arg2
);
814 /* Adjust for negative input for the signed arg1. */
815 tcg_gen_sari_i32(t2
, arg1
, 31);
816 tcg_gen_and_i32(t2
, t2
, arg2
);
817 tcg_gen_sub_i32(rh
, t1
, t2
);
818 tcg_gen_mov_i32(rl
, t0
);
819 tcg_temp_free_i32(t0
);
820 tcg_temp_free_i32(t1
);
821 tcg_temp_free_i32(t2
);
823 TCGv_i64 t0
= tcg_temp_new_i64();
824 TCGv_i64 t1
= tcg_temp_new_i64();
825 tcg_gen_ext_i32_i64(t0
, arg1
);
826 tcg_gen_extu_i32_i64(t1
, arg2
);
827 tcg_gen_mul_i64(t0
, t0
, t1
);
828 tcg_gen_extr_i64_i32(rl
, rh
, t0
);
829 tcg_temp_free_i64(t0
);
830 tcg_temp_free_i64(t1
);
834 void tcg_gen_ext8s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
836 if (TCG_TARGET_HAS_ext8s_i32
) {
837 tcg_gen_op2_i32(INDEX_op_ext8s_i32
, ret
, arg
);
839 tcg_gen_shli_i32(ret
, arg
, 24);
840 tcg_gen_sari_i32(ret
, ret
, 24);
844 void tcg_gen_ext16s_i32(TCGv_i32 ret
, TCGv_i32 arg
)
846 if (TCG_TARGET_HAS_ext16s_i32
) {
847 tcg_gen_op2_i32(INDEX_op_ext16s_i32
, ret
, arg
);
849 tcg_gen_shli_i32(ret
, arg
, 16);
850 tcg_gen_sari_i32(ret
, ret
, 16);
854 void tcg_gen_ext8u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
856 if (TCG_TARGET_HAS_ext8u_i32
) {
857 tcg_gen_op2_i32(INDEX_op_ext8u_i32
, ret
, arg
);
859 tcg_gen_andi_i32(ret
, arg
, 0xffu
);
863 void tcg_gen_ext16u_i32(TCGv_i32 ret
, TCGv_i32 arg
)
865 if (TCG_TARGET_HAS_ext16u_i32
) {
866 tcg_gen_op2_i32(INDEX_op_ext16u_i32
, ret
, arg
);
868 tcg_gen_andi_i32(ret
, arg
, 0xffffu
);
872 /* Note: we assume the two high bytes are set to zero */
873 void tcg_gen_bswap16_i32(TCGv_i32 ret
, TCGv_i32 arg
)
875 if (TCG_TARGET_HAS_bswap16_i32
) {
876 tcg_gen_op2_i32(INDEX_op_bswap16_i32
, ret
, arg
);
878 TCGv_i32 t0
= tcg_temp_new_i32();
880 tcg_gen_ext8u_i32(t0
, arg
);
881 tcg_gen_shli_i32(t0
, t0
, 8);
882 tcg_gen_shri_i32(ret
, arg
, 8);
883 tcg_gen_or_i32(ret
, ret
, t0
);
884 tcg_temp_free_i32(t0
);
888 void tcg_gen_bswap32_i32(TCGv_i32 ret
, TCGv_i32 arg
)
890 if (TCG_TARGET_HAS_bswap32_i32
) {
891 tcg_gen_op2_i32(INDEX_op_bswap32_i32
, ret
, arg
);
894 t0
= tcg_temp_new_i32();
895 t1
= tcg_temp_new_i32();
897 tcg_gen_shli_i32(t0
, arg
, 24);
899 tcg_gen_andi_i32(t1
, arg
, 0x0000ff00);
900 tcg_gen_shli_i32(t1
, t1
, 8);
901 tcg_gen_or_i32(t0
, t0
, t1
);
903 tcg_gen_shri_i32(t1
, arg
, 8);
904 tcg_gen_andi_i32(t1
, t1
, 0x0000ff00);
905 tcg_gen_or_i32(t0
, t0
, t1
);
907 tcg_gen_shri_i32(t1
, arg
, 24);
908 tcg_gen_or_i32(ret
, t0
, t1
);
909 tcg_temp_free_i32(t0
);
910 tcg_temp_free_i32(t1
);
916 #if TCG_TARGET_REG_BITS == 32
917 /* These are all inline for TCG_TARGET_REG_BITS == 64. */
919 void tcg_gen_discard_i64(TCGv_i64 arg
)
921 tcg_gen_discard_i32(TCGV_LOW(arg
));
922 tcg_gen_discard_i32(TCGV_HIGH(arg
));
925 void tcg_gen_mov_i64(TCGv_i64 ret
, TCGv_i64 arg
)
927 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
928 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
931 void tcg_gen_movi_i64(TCGv_i64 ret
, int64_t arg
)
933 tcg_gen_movi_i32(TCGV_LOW(ret
), arg
);
934 tcg_gen_movi_i32(TCGV_HIGH(ret
), arg
>> 32);
937 void tcg_gen_ld8u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
939 tcg_gen_ld8u_i32(TCGV_LOW(ret
), arg2
, offset
);
940 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
943 void tcg_gen_ld8s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
945 tcg_gen_ld8s_i32(TCGV_LOW(ret
), arg2
, offset
);
946 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
949 void tcg_gen_ld16u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
951 tcg_gen_ld16u_i32(TCGV_LOW(ret
), arg2
, offset
);
952 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
955 void tcg_gen_ld16s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
957 tcg_gen_ld16s_i32(TCGV_LOW(ret
), arg2
, offset
);
958 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
961 void tcg_gen_ld32u_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
963 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
964 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
967 void tcg_gen_ld32s_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
969 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
970 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
973 void tcg_gen_ld_i64(TCGv_i64 ret
, TCGv_ptr arg2
, tcg_target_long offset
)
975 /* Since arg2 and ret have different types,
976 they cannot be the same temporary */
977 #ifdef HOST_WORDS_BIGENDIAN
978 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
);
979 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
+ 4);
981 tcg_gen_ld_i32(TCGV_LOW(ret
), arg2
, offset
);
982 tcg_gen_ld_i32(TCGV_HIGH(ret
), arg2
, offset
+ 4);
986 void tcg_gen_st_i64(TCGv_i64 arg1
, TCGv_ptr arg2
, tcg_target_long offset
)
988 #ifdef HOST_WORDS_BIGENDIAN
989 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
);
990 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
+ 4);
992 tcg_gen_st_i32(TCGV_LOW(arg1
), arg2
, offset
);
993 tcg_gen_st_i32(TCGV_HIGH(arg1
), arg2
, offset
+ 4);
997 void tcg_gen_and_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
999 tcg_gen_and_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1000 tcg_gen_and_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1003 void tcg_gen_or_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1005 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1006 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1009 void tcg_gen_xor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1011 tcg_gen_xor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1012 tcg_gen_xor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1015 void tcg_gen_shl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1017 gen_helper_shl_i64(ret
, arg1
, arg2
);
1020 void tcg_gen_shr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1022 gen_helper_shr_i64(ret
, arg1
, arg2
);
1025 void tcg_gen_sar_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1027 gen_helper_sar_i64(ret
, arg1
, arg2
);
1030 void tcg_gen_mul_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1035 t0
= tcg_temp_new_i64();
1036 t1
= tcg_temp_new_i32();
1038 tcg_gen_mulu2_i32(TCGV_LOW(t0
), TCGV_HIGH(t0
),
1039 TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1041 tcg_gen_mul_i32(t1
, TCGV_LOW(arg1
), TCGV_HIGH(arg2
));
1042 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1043 tcg_gen_mul_i32(t1
, TCGV_HIGH(arg1
), TCGV_LOW(arg2
));
1044 tcg_gen_add_i32(TCGV_HIGH(t0
), TCGV_HIGH(t0
), t1
);
1046 tcg_gen_mov_i64(ret
, t0
);
1047 tcg_temp_free_i64(t0
);
1048 tcg_temp_free_i32(t1
);
1050 #endif /* TCG_TARGET_REG_SIZE == 32 */
1052 void tcg_gen_addi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1054 /* some cases can be optimized here */
1056 tcg_gen_mov_i64(ret
, arg1
);
1058 TCGv_i64 t0
= tcg_const_i64(arg2
);
1059 tcg_gen_add_i64(ret
, arg1
, t0
);
1060 tcg_temp_free_i64(t0
);
1064 void tcg_gen_subfi_i64(TCGv_i64 ret
, int64_t arg1
, TCGv_i64 arg2
)
1066 if (arg1
== 0 && TCG_TARGET_HAS_neg_i64
) {
1067 /* Don't recurse with tcg_gen_neg_i64. */
1068 tcg_gen_op2_i64(INDEX_op_neg_i64
, ret
, arg2
);
1070 TCGv_i64 t0
= tcg_const_i64(arg1
);
1071 tcg_gen_sub_i64(ret
, t0
, arg2
);
1072 tcg_temp_free_i64(t0
);
1076 void tcg_gen_subi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1078 /* some cases can be optimized here */
1080 tcg_gen_mov_i64(ret
, arg1
);
1082 TCGv_i64 t0
= tcg_const_i64(arg2
);
1083 tcg_gen_sub_i64(ret
, arg1
, t0
);
1084 tcg_temp_free_i64(t0
);
1088 void tcg_gen_andi_i64(TCGv_i64 ret
, TCGv_i64 arg1
, uint64_t arg2
)
1092 if (TCG_TARGET_REG_BITS
== 32) {
1093 tcg_gen_andi_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1094 tcg_gen_andi_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1098 /* Some cases can be optimized here. */
1101 tcg_gen_movi_i64(ret
, 0);
1103 case 0xffffffffffffffffull
:
1104 tcg_gen_mov_i64(ret
, arg1
);
1107 /* Don't recurse with tcg_gen_ext8u_i64. */
1108 if (TCG_TARGET_HAS_ext8u_i64
) {
1109 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg1
);
1114 if (TCG_TARGET_HAS_ext16u_i64
) {
1115 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg1
);
1120 if (TCG_TARGET_HAS_ext32u_i64
) {
1121 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg1
);
1126 t0
= tcg_const_i64(arg2
);
1127 tcg_gen_and_i64(ret
, arg1
, t0
);
1128 tcg_temp_free_i64(t0
);
1131 void tcg_gen_ori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1133 if (TCG_TARGET_REG_BITS
== 32) {
1134 tcg_gen_ori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1135 tcg_gen_ori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1138 /* Some cases can be optimized here. */
1140 tcg_gen_movi_i64(ret
, -1);
1141 } else if (arg2
== 0) {
1142 tcg_gen_mov_i64(ret
, arg1
);
1144 TCGv_i64 t0
= tcg_const_i64(arg2
);
1145 tcg_gen_or_i64(ret
, arg1
, t0
);
1146 tcg_temp_free_i64(t0
);
1150 void tcg_gen_xori_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1152 if (TCG_TARGET_REG_BITS
== 32) {
1153 tcg_gen_xori_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), arg2
);
1154 tcg_gen_xori_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), arg2
>> 32);
1157 /* Some cases can be optimized here. */
1159 tcg_gen_mov_i64(ret
, arg1
);
1160 } else if (arg2
== -1 && TCG_TARGET_HAS_not_i64
) {
1161 /* Don't recurse with tcg_gen_not_i64. */
1162 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg1
);
1164 TCGv_i64 t0
= tcg_const_i64(arg2
);
1165 tcg_gen_xor_i64(ret
, arg1
, t0
);
1166 tcg_temp_free_i64(t0
);
1170 static inline void tcg_gen_shifti_i64(TCGv_i64 ret
, TCGv_i64 arg1
,
1171 unsigned c
, bool right
, bool arith
)
1173 tcg_debug_assert(c
< 64);
1175 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1176 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1177 } else if (c
>= 32) {
1181 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1182 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), 31);
1184 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_HIGH(arg1
), c
);
1185 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1188 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_LOW(arg1
), c
);
1189 tcg_gen_movi_i32(TCGV_LOW(ret
), 0);
1194 t0
= tcg_temp_new_i32();
1195 t1
= tcg_temp_new_i32();
1197 tcg_gen_shli_i32(t0
, TCGV_HIGH(arg1
), 32 - c
);
1199 tcg_gen_sari_i32(t1
, TCGV_HIGH(arg1
), c
);
1201 tcg_gen_shri_i32(t1
, TCGV_HIGH(arg1
), c
);
1203 tcg_gen_shri_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), c
);
1204 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t0
);
1205 tcg_gen_mov_i32(TCGV_HIGH(ret
), t1
);
1207 tcg_gen_shri_i32(t0
, TCGV_LOW(arg1
), 32 - c
);
1208 /* Note: ret can be the same as arg1, so we use t1 */
1209 tcg_gen_shli_i32(t1
, TCGV_LOW(arg1
), c
);
1210 tcg_gen_shli_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), c
);
1211 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t0
);
1212 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1214 tcg_temp_free_i32(t0
);
1215 tcg_temp_free_i32(t1
);
1219 void tcg_gen_shli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1221 tcg_debug_assert(arg2
< 64);
1222 if (TCG_TARGET_REG_BITS
== 32) {
1223 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 0, 0);
1224 } else if (arg2
== 0) {
1225 tcg_gen_mov_i64(ret
, arg1
);
1227 TCGv_i64 t0
= tcg_const_i64(arg2
);
1228 tcg_gen_shl_i64(ret
, arg1
, t0
);
1229 tcg_temp_free_i64(t0
);
1233 void tcg_gen_shri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1235 tcg_debug_assert(arg2
< 64);
1236 if (TCG_TARGET_REG_BITS
== 32) {
1237 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 0);
1238 } else if (arg2
== 0) {
1239 tcg_gen_mov_i64(ret
, arg1
);
1241 TCGv_i64 t0
= tcg_const_i64(arg2
);
1242 tcg_gen_shr_i64(ret
, arg1
, t0
);
1243 tcg_temp_free_i64(t0
);
1247 void tcg_gen_sari_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1249 tcg_debug_assert(arg2
< 64);
1250 if (TCG_TARGET_REG_BITS
== 32) {
1251 tcg_gen_shifti_i64(ret
, arg1
, arg2
, 1, 1);
1252 } else if (arg2
== 0) {
1253 tcg_gen_mov_i64(ret
, arg1
);
1255 TCGv_i64 t0
= tcg_const_i64(arg2
);
1256 tcg_gen_sar_i64(ret
, arg1
, t0
);
1257 tcg_temp_free_i64(t0
);
1261 void tcg_gen_brcond_i64(TCGCond cond
, TCGv_i64 arg1
, TCGv_i64 arg2
, TCGLabel
*l
)
1263 if (cond
== TCG_COND_ALWAYS
) {
1265 } else if (cond
!= TCG_COND_NEVER
) {
1266 if (TCG_TARGET_REG_BITS
== 32) {
1267 tcg_gen_op6ii_i32(INDEX_op_brcond2_i32
, TCGV_LOW(arg1
),
1268 TCGV_HIGH(arg1
), TCGV_LOW(arg2
),
1269 TCGV_HIGH(arg2
), cond
, label_arg(l
));
1271 tcg_gen_op4ii_i64(INDEX_op_brcond_i64
, arg1
, arg2
, cond
,
1277 void tcg_gen_brcondi_i64(TCGCond cond
, TCGv_i64 arg1
, int64_t arg2
, TCGLabel
*l
)
1279 if (cond
== TCG_COND_ALWAYS
) {
1281 } else if (cond
!= TCG_COND_NEVER
) {
1282 TCGv_i64 t0
= tcg_const_i64(arg2
);
1283 tcg_gen_brcond_i64(cond
, arg1
, t0
, l
);
1284 tcg_temp_free_i64(t0
);
1288 void tcg_gen_setcond_i64(TCGCond cond
, TCGv_i64 ret
,
1289 TCGv_i64 arg1
, TCGv_i64 arg2
)
1291 if (cond
== TCG_COND_ALWAYS
) {
1292 tcg_gen_movi_i64(ret
, 1);
1293 } else if (cond
== TCG_COND_NEVER
) {
1294 tcg_gen_movi_i64(ret
, 0);
1296 if (TCG_TARGET_REG_BITS
== 32) {
1297 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, TCGV_LOW(ret
),
1298 TCGV_LOW(arg1
), TCGV_HIGH(arg1
),
1299 TCGV_LOW(arg2
), TCGV_HIGH(arg2
), cond
);
1300 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1302 tcg_gen_op4i_i64(INDEX_op_setcond_i64
, ret
, arg1
, arg2
, cond
);
1307 void tcg_gen_setcondi_i64(TCGCond cond
, TCGv_i64 ret
,
1308 TCGv_i64 arg1
, int64_t arg2
)
1310 TCGv_i64 t0
= tcg_const_i64(arg2
);
1311 tcg_gen_setcond_i64(cond
, ret
, arg1
, t0
);
1312 tcg_temp_free_i64(t0
);
1315 void tcg_gen_muli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, int64_t arg2
)
1317 TCGv_i64 t0
= tcg_const_i64(arg2
);
1318 tcg_gen_mul_i64(ret
, arg1
, t0
);
1319 tcg_temp_free_i64(t0
);
1322 void tcg_gen_div_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1324 if (TCG_TARGET_HAS_div_i64
) {
1325 tcg_gen_op3_i64(INDEX_op_div_i64
, ret
, arg1
, arg2
);
1326 } else if (TCG_TARGET_HAS_div2_i64
) {
1327 TCGv_i64 t0
= tcg_temp_new_i64();
1328 tcg_gen_sari_i64(t0
, arg1
, 63);
1329 tcg_gen_op5_i64(INDEX_op_div2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1330 tcg_temp_free_i64(t0
);
1332 gen_helper_div_i64(ret
, arg1
, arg2
);
1336 void tcg_gen_rem_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1338 if (TCG_TARGET_HAS_rem_i64
) {
1339 tcg_gen_op3_i64(INDEX_op_rem_i64
, ret
, arg1
, arg2
);
1340 } else if (TCG_TARGET_HAS_div_i64
) {
1341 TCGv_i64 t0
= tcg_temp_new_i64();
1342 tcg_gen_op3_i64(INDEX_op_div_i64
, t0
, arg1
, arg2
);
1343 tcg_gen_mul_i64(t0
, t0
, arg2
);
1344 tcg_gen_sub_i64(ret
, arg1
, t0
);
1345 tcg_temp_free_i64(t0
);
1346 } else if (TCG_TARGET_HAS_div2_i64
) {
1347 TCGv_i64 t0
= tcg_temp_new_i64();
1348 tcg_gen_sari_i64(t0
, arg1
, 63);
1349 tcg_gen_op5_i64(INDEX_op_div2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1350 tcg_temp_free_i64(t0
);
1352 gen_helper_rem_i64(ret
, arg1
, arg2
);
1356 void tcg_gen_divu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1358 if (TCG_TARGET_HAS_div_i64
) {
1359 tcg_gen_op3_i64(INDEX_op_divu_i64
, ret
, arg1
, arg2
);
1360 } else if (TCG_TARGET_HAS_div2_i64
) {
1361 TCGv_i64 t0
= tcg_temp_new_i64();
1362 tcg_gen_movi_i64(t0
, 0);
1363 tcg_gen_op5_i64(INDEX_op_divu2_i64
, ret
, t0
, arg1
, t0
, arg2
);
1364 tcg_temp_free_i64(t0
);
1366 gen_helper_divu_i64(ret
, arg1
, arg2
);
1370 void tcg_gen_remu_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1372 if (TCG_TARGET_HAS_rem_i64
) {
1373 tcg_gen_op3_i64(INDEX_op_remu_i64
, ret
, arg1
, arg2
);
1374 } else if (TCG_TARGET_HAS_div_i64
) {
1375 TCGv_i64 t0
= tcg_temp_new_i64();
1376 tcg_gen_op3_i64(INDEX_op_divu_i64
, t0
, arg1
, arg2
);
1377 tcg_gen_mul_i64(t0
, t0
, arg2
);
1378 tcg_gen_sub_i64(ret
, arg1
, t0
);
1379 tcg_temp_free_i64(t0
);
1380 } else if (TCG_TARGET_HAS_div2_i64
) {
1381 TCGv_i64 t0
= tcg_temp_new_i64();
1382 tcg_gen_movi_i64(t0
, 0);
1383 tcg_gen_op5_i64(INDEX_op_divu2_i64
, t0
, ret
, arg1
, t0
, arg2
);
1384 tcg_temp_free_i64(t0
);
1386 gen_helper_remu_i64(ret
, arg1
, arg2
);
1390 void tcg_gen_ext8s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1392 if (TCG_TARGET_REG_BITS
== 32) {
1393 tcg_gen_ext8s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1394 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1395 } else if (TCG_TARGET_HAS_ext8s_i64
) {
1396 tcg_gen_op2_i64(INDEX_op_ext8s_i64
, ret
, arg
);
1398 tcg_gen_shli_i64(ret
, arg
, 56);
1399 tcg_gen_sari_i64(ret
, ret
, 56);
1403 void tcg_gen_ext16s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1405 if (TCG_TARGET_REG_BITS
== 32) {
1406 tcg_gen_ext16s_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1407 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1408 } else if (TCG_TARGET_HAS_ext16s_i64
) {
1409 tcg_gen_op2_i64(INDEX_op_ext16s_i64
, ret
, arg
);
1411 tcg_gen_shli_i64(ret
, arg
, 48);
1412 tcg_gen_sari_i64(ret
, ret
, 48);
1416 void tcg_gen_ext32s_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1418 if (TCG_TARGET_REG_BITS
== 32) {
1419 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1420 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1421 } else if (TCG_TARGET_HAS_ext32s_i64
) {
1422 tcg_gen_op2_i64(INDEX_op_ext32s_i64
, ret
, arg
);
1424 tcg_gen_shli_i64(ret
, arg
, 32);
1425 tcg_gen_sari_i64(ret
, ret
, 32);
1429 void tcg_gen_ext8u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1431 if (TCG_TARGET_REG_BITS
== 32) {
1432 tcg_gen_ext8u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1433 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1434 } else if (TCG_TARGET_HAS_ext8u_i64
) {
1435 tcg_gen_op2_i64(INDEX_op_ext8u_i64
, ret
, arg
);
1437 tcg_gen_andi_i64(ret
, arg
, 0xffu
);
1441 void tcg_gen_ext16u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1443 if (TCG_TARGET_REG_BITS
== 32) {
1444 tcg_gen_ext16u_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1445 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1446 } else if (TCG_TARGET_HAS_ext16u_i64
) {
1447 tcg_gen_op2_i64(INDEX_op_ext16u_i64
, ret
, arg
);
1449 tcg_gen_andi_i64(ret
, arg
, 0xffffu
);
1453 void tcg_gen_ext32u_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1455 if (TCG_TARGET_REG_BITS
== 32) {
1456 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1457 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1458 } else if (TCG_TARGET_HAS_ext32u_i64
) {
1459 tcg_gen_op2_i64(INDEX_op_ext32u_i64
, ret
, arg
);
1461 tcg_gen_andi_i64(ret
, arg
, 0xffffffffu
);
1465 /* Note: we assume the six high bytes are set to zero */
1466 void tcg_gen_bswap16_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1468 if (TCG_TARGET_REG_BITS
== 32) {
1469 tcg_gen_bswap16_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1470 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1471 } else if (TCG_TARGET_HAS_bswap16_i64
) {
1472 tcg_gen_op2_i64(INDEX_op_bswap16_i64
, ret
, arg
);
1474 TCGv_i64 t0
= tcg_temp_new_i64();
1476 tcg_gen_ext8u_i64(t0
, arg
);
1477 tcg_gen_shli_i64(t0
, t0
, 8);
1478 tcg_gen_shri_i64(ret
, arg
, 8);
1479 tcg_gen_or_i64(ret
, ret
, t0
);
1480 tcg_temp_free_i64(t0
);
1484 /* Note: we assume the four high bytes are set to zero */
1485 void tcg_gen_bswap32_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1487 if (TCG_TARGET_REG_BITS
== 32) {
1488 tcg_gen_bswap32_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1489 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1490 } else if (TCG_TARGET_HAS_bswap32_i64
) {
1491 tcg_gen_op2_i64(INDEX_op_bswap32_i64
, ret
, arg
);
1494 t0
= tcg_temp_new_i64();
1495 t1
= tcg_temp_new_i64();
1497 tcg_gen_shli_i64(t0
, arg
, 24);
1498 tcg_gen_ext32u_i64(t0
, t0
);
1500 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1501 tcg_gen_shli_i64(t1
, t1
, 8);
1502 tcg_gen_or_i64(t0
, t0
, t1
);
1504 tcg_gen_shri_i64(t1
, arg
, 8);
1505 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1506 tcg_gen_or_i64(t0
, t0
, t1
);
1508 tcg_gen_shri_i64(t1
, arg
, 24);
1509 tcg_gen_or_i64(ret
, t0
, t1
);
1510 tcg_temp_free_i64(t0
);
1511 tcg_temp_free_i64(t1
);
1515 void tcg_gen_bswap64_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1517 if (TCG_TARGET_REG_BITS
== 32) {
1519 t0
= tcg_temp_new_i32();
1520 t1
= tcg_temp_new_i32();
1522 tcg_gen_bswap32_i32(t0
, TCGV_LOW(arg
));
1523 tcg_gen_bswap32_i32(t1
, TCGV_HIGH(arg
));
1524 tcg_gen_mov_i32(TCGV_LOW(ret
), t1
);
1525 tcg_gen_mov_i32(TCGV_HIGH(ret
), t0
);
1526 tcg_temp_free_i32(t0
);
1527 tcg_temp_free_i32(t1
);
1528 } else if (TCG_TARGET_HAS_bswap64_i64
) {
1529 tcg_gen_op2_i64(INDEX_op_bswap64_i64
, ret
, arg
);
1531 TCGv_i64 t0
= tcg_temp_new_i64();
1532 TCGv_i64 t1
= tcg_temp_new_i64();
1534 tcg_gen_shli_i64(t0
, arg
, 56);
1536 tcg_gen_andi_i64(t1
, arg
, 0x0000ff00);
1537 tcg_gen_shli_i64(t1
, t1
, 40);
1538 tcg_gen_or_i64(t0
, t0
, t1
);
1540 tcg_gen_andi_i64(t1
, arg
, 0x00ff0000);
1541 tcg_gen_shli_i64(t1
, t1
, 24);
1542 tcg_gen_or_i64(t0
, t0
, t1
);
1544 tcg_gen_andi_i64(t1
, arg
, 0xff000000);
1545 tcg_gen_shli_i64(t1
, t1
, 8);
1546 tcg_gen_or_i64(t0
, t0
, t1
);
1548 tcg_gen_shri_i64(t1
, arg
, 8);
1549 tcg_gen_andi_i64(t1
, t1
, 0xff000000);
1550 tcg_gen_or_i64(t0
, t0
, t1
);
1552 tcg_gen_shri_i64(t1
, arg
, 24);
1553 tcg_gen_andi_i64(t1
, t1
, 0x00ff0000);
1554 tcg_gen_or_i64(t0
, t0
, t1
);
1556 tcg_gen_shri_i64(t1
, arg
, 40);
1557 tcg_gen_andi_i64(t1
, t1
, 0x0000ff00);
1558 tcg_gen_or_i64(t0
, t0
, t1
);
1560 tcg_gen_shri_i64(t1
, arg
, 56);
1561 tcg_gen_or_i64(ret
, t0
, t1
);
1562 tcg_temp_free_i64(t0
);
1563 tcg_temp_free_i64(t1
);
1567 void tcg_gen_not_i64(TCGv_i64 ret
, TCGv_i64 arg
)
1569 if (TCG_TARGET_REG_BITS
== 32) {
1570 tcg_gen_not_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1571 tcg_gen_not_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
));
1572 } else if (TCG_TARGET_HAS_not_i64
) {
1573 tcg_gen_op2_i64(INDEX_op_not_i64
, ret
, arg
);
1575 tcg_gen_xori_i64(ret
, arg
, -1);
1579 void tcg_gen_andc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1581 if (TCG_TARGET_REG_BITS
== 32) {
1582 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1583 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1584 } else if (TCG_TARGET_HAS_andc_i64
) {
1585 tcg_gen_op3_i64(INDEX_op_andc_i64
, ret
, arg1
, arg2
);
1587 TCGv_i64 t0
= tcg_temp_new_i64();
1588 tcg_gen_not_i64(t0
, arg2
);
1589 tcg_gen_and_i64(ret
, arg1
, t0
);
1590 tcg_temp_free_i64(t0
);
1594 void tcg_gen_eqv_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1596 if (TCG_TARGET_REG_BITS
== 32) {
1597 tcg_gen_eqv_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1598 tcg_gen_eqv_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1599 } else if (TCG_TARGET_HAS_eqv_i64
) {
1600 tcg_gen_op3_i64(INDEX_op_eqv_i64
, ret
, arg1
, arg2
);
1602 tcg_gen_xor_i64(ret
, arg1
, arg2
);
1603 tcg_gen_not_i64(ret
, ret
);
1607 void tcg_gen_nand_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1609 if (TCG_TARGET_REG_BITS
== 32) {
1610 tcg_gen_nand_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1611 tcg_gen_nand_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1612 } else if (TCG_TARGET_HAS_nand_i64
) {
1613 tcg_gen_op3_i64(INDEX_op_nand_i64
, ret
, arg1
, arg2
);
1615 tcg_gen_and_i64(ret
, arg1
, arg2
);
1616 tcg_gen_not_i64(ret
, ret
);
1620 void tcg_gen_nor_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1622 if (TCG_TARGET_REG_BITS
== 32) {
1623 tcg_gen_nor_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1624 tcg_gen_nor_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1625 } else if (TCG_TARGET_HAS_nor_i64
) {
1626 tcg_gen_op3_i64(INDEX_op_nor_i64
, ret
, arg1
, arg2
);
1628 tcg_gen_or_i64(ret
, arg1
, arg2
);
1629 tcg_gen_not_i64(ret
, ret
);
1633 void tcg_gen_orc_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1635 if (TCG_TARGET_REG_BITS
== 32) {
1636 tcg_gen_orc_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
), TCGV_LOW(arg2
));
1637 tcg_gen_orc_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
), TCGV_HIGH(arg2
));
1638 } else if (TCG_TARGET_HAS_orc_i64
) {
1639 tcg_gen_op3_i64(INDEX_op_orc_i64
, ret
, arg1
, arg2
);
1641 TCGv_i64 t0
= tcg_temp_new_i64();
1642 tcg_gen_not_i64(t0
, arg2
);
1643 tcg_gen_or_i64(ret
, arg1
, t0
);
1644 tcg_temp_free_i64(t0
);
1648 void tcg_gen_rotl_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1650 if (TCG_TARGET_HAS_rot_i64
) {
1651 tcg_gen_op3_i64(INDEX_op_rotl_i64
, ret
, arg1
, arg2
);
1654 t0
= tcg_temp_new_i64();
1655 t1
= tcg_temp_new_i64();
1656 tcg_gen_shl_i64(t0
, arg1
, arg2
);
1657 tcg_gen_subfi_i64(t1
, 64, arg2
);
1658 tcg_gen_shr_i64(t1
, arg1
, t1
);
1659 tcg_gen_or_i64(ret
, t0
, t1
);
1660 tcg_temp_free_i64(t0
);
1661 tcg_temp_free_i64(t1
);
1665 void tcg_gen_rotli_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1667 tcg_debug_assert(arg2
< 64);
1668 /* some cases can be optimized here */
1670 tcg_gen_mov_i64(ret
, arg1
);
1671 } else if (TCG_TARGET_HAS_rot_i64
) {
1672 TCGv_i64 t0
= tcg_const_i64(arg2
);
1673 tcg_gen_rotl_i64(ret
, arg1
, t0
);
1674 tcg_temp_free_i64(t0
);
1677 t0
= tcg_temp_new_i64();
1678 t1
= tcg_temp_new_i64();
1679 tcg_gen_shli_i64(t0
, arg1
, arg2
);
1680 tcg_gen_shri_i64(t1
, arg1
, 64 - arg2
);
1681 tcg_gen_or_i64(ret
, t0
, t1
);
1682 tcg_temp_free_i64(t0
);
1683 tcg_temp_free_i64(t1
);
1687 void tcg_gen_rotr_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
)
1689 if (TCG_TARGET_HAS_rot_i64
) {
1690 tcg_gen_op3_i64(INDEX_op_rotr_i64
, ret
, arg1
, arg2
);
1693 t0
= tcg_temp_new_i64();
1694 t1
= tcg_temp_new_i64();
1695 tcg_gen_shr_i64(t0
, arg1
, arg2
);
1696 tcg_gen_subfi_i64(t1
, 64, arg2
);
1697 tcg_gen_shl_i64(t1
, arg1
, t1
);
1698 tcg_gen_or_i64(ret
, t0
, t1
);
1699 tcg_temp_free_i64(t0
);
1700 tcg_temp_free_i64(t1
);
1704 void tcg_gen_rotri_i64(TCGv_i64 ret
, TCGv_i64 arg1
, unsigned arg2
)
1706 tcg_debug_assert(arg2
< 64);
1707 /* some cases can be optimized here */
1709 tcg_gen_mov_i64(ret
, arg1
);
1711 tcg_gen_rotli_i64(ret
, arg1
, 64 - arg2
);
1715 void tcg_gen_deposit_i64(TCGv_i64 ret
, TCGv_i64 arg1
, TCGv_i64 arg2
,
1716 unsigned int ofs
, unsigned int len
)
1721 tcg_debug_assert(ofs
< 64);
1722 tcg_debug_assert(len
> 0);
1723 tcg_debug_assert(len
<= 64);
1724 tcg_debug_assert(ofs
+ len
<= 64);
1727 tcg_gen_mov_i64(ret
, arg2
);
1730 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(ofs
, len
)) {
1731 tcg_gen_op5ii_i64(INDEX_op_deposit_i64
, ret
, arg1
, arg2
, ofs
, len
);
1735 if (TCG_TARGET_REG_BITS
== 32) {
1737 tcg_gen_deposit_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
),
1738 TCGV_LOW(arg2
), ofs
- 32, len
);
1739 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
));
1742 if (ofs
+ len
<= 32) {
1743 tcg_gen_deposit_i32(TCGV_LOW(ret
), TCGV_LOW(arg1
),
1744 TCGV_LOW(arg2
), ofs
, len
);
1745 tcg_gen_mov_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg1
));
1750 mask
= (1ull << len
) - 1;
1751 t1
= tcg_temp_new_i64();
1753 if (ofs
+ len
< 64) {
1754 tcg_gen_andi_i64(t1
, arg2
, mask
);
1755 tcg_gen_shli_i64(t1
, t1
, ofs
);
1757 tcg_gen_shli_i64(t1
, arg2
, ofs
);
1759 tcg_gen_andi_i64(ret
, arg1
, ~(mask
<< ofs
));
1760 tcg_gen_or_i64(ret
, ret
, t1
);
1762 tcg_temp_free_i64(t1
);
1765 void tcg_gen_extract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
1766 unsigned int ofs
, unsigned int len
)
1768 tcg_debug_assert(ofs
< 64);
1769 tcg_debug_assert(len
> 0);
1770 tcg_debug_assert(len
<= 64);
1771 tcg_debug_assert(ofs
+ len
<= 64);
1773 /* Canonicalize certain special cases, even if extract is supported. */
1774 if (ofs
+ len
== 64) {
1775 tcg_gen_shri_i64(ret
, arg
, 64 - len
);
1779 tcg_gen_andi_i64(ret
, arg
, (1ull << len
) - 1);
1783 if (TCG_TARGET_REG_BITS
== 32) {
1784 /* Look for a 32-bit extract within one of the two words. */
1786 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
1787 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1790 if (ofs
+ len
<= 32) {
1791 tcg_gen_extract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
1792 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
1795 /* The field is split across two words. One double-word
1796 shift is better than two double-word shifts. */
1800 if (TCG_TARGET_HAS_extract_i64
1801 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
1802 tcg_gen_op4ii_i64(INDEX_op_extract_i64
, ret
, arg
, ofs
, len
);
1806 /* Assume that zero-extension, if available, is cheaper than a shift. */
1807 switch (ofs
+ len
) {
1809 if (TCG_TARGET_HAS_ext32u_i64
) {
1810 tcg_gen_ext32u_i64(ret
, arg
);
1811 tcg_gen_shri_i64(ret
, ret
, ofs
);
1816 if (TCG_TARGET_HAS_ext16u_i64
) {
1817 tcg_gen_ext16u_i64(ret
, arg
);
1818 tcg_gen_shri_i64(ret
, ret
, ofs
);
1823 if (TCG_TARGET_HAS_ext8u_i64
) {
1824 tcg_gen_ext8u_i64(ret
, arg
);
1825 tcg_gen_shri_i64(ret
, ret
, ofs
);
1831 /* ??? Ideally we'd know what values are available for immediate AND.
1832 Assume that 8 bits are available, plus the special cases of 16 and 32,
1833 so that we get ext8u, ext16u, and ext32u. */
1835 case 1 ... 8: case 16: case 32:
1837 tcg_gen_shri_i64(ret
, arg
, ofs
);
1838 tcg_gen_andi_i64(ret
, ret
, (1ull << len
) - 1);
1841 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
1842 tcg_gen_shri_i64(ret
, ret
, 64 - len
);
1847 void tcg_gen_sextract_i64(TCGv_i64 ret
, TCGv_i64 arg
,
1848 unsigned int ofs
, unsigned int len
)
1850 tcg_debug_assert(ofs
< 64);
1851 tcg_debug_assert(len
> 0);
1852 tcg_debug_assert(len
<= 64);
1853 tcg_debug_assert(ofs
+ len
<= 64);
1855 /* Canonicalize certain special cases, even if sextract is supported. */
1856 if (ofs
+ len
== 64) {
1857 tcg_gen_sari_i64(ret
, arg
, 64 - len
);
1863 tcg_gen_ext32s_i64(ret
, arg
);
1866 tcg_gen_ext16s_i64(ret
, arg
);
1869 tcg_gen_ext8s_i64(ret
, arg
);
1874 if (TCG_TARGET_REG_BITS
== 32) {
1875 /* Look for a 32-bit extract within one of the two words. */
1877 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_HIGH(arg
), ofs
- 32, len
);
1878 } else if (ofs
+ len
<= 32) {
1879 tcg_gen_sextract_i32(TCGV_LOW(ret
), TCGV_LOW(arg
), ofs
, len
);
1880 } else if (ofs
== 0) {
1881 tcg_gen_mov_i32(TCGV_LOW(ret
), TCGV_LOW(arg
));
1882 tcg_gen_sextract_i32(TCGV_HIGH(ret
), TCGV_HIGH(arg
), 0, len
- 32);
1884 } else if (len
> 32) {
1885 TCGv_i32 t
= tcg_temp_new_i32();
1886 /* Extract the bits for the high word normally. */
1887 tcg_gen_sextract_i32(t
, TCGV_HIGH(arg
), ofs
+ 32, len
- 32);
1888 /* Shift the field down for the low part. */
1889 tcg_gen_shri_i64(ret
, arg
, ofs
);
1890 /* Overwrite the shift into the high part. */
1891 tcg_gen_mov_i32(TCGV_HIGH(ret
), t
);
1892 tcg_temp_free_i32(t
);
1895 /* Shift the field down for the low part, such that the
1896 field sits at the MSB. */
1897 tcg_gen_shri_i64(ret
, arg
, ofs
+ len
- 32);
1898 /* Shift the field down from the MSB, sign extending. */
1899 tcg_gen_sari_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), 32 - len
);
1901 /* Sign-extend the field from 32 bits. */
1902 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
1906 if (TCG_TARGET_HAS_sextract_i64
1907 && TCG_TARGET_extract_i64_valid(ofs
, len
)) {
1908 tcg_gen_op4ii_i64(INDEX_op_sextract_i64
, ret
, arg
, ofs
, len
);
1912 /* Assume that sign-extension, if available, is cheaper than a shift. */
1913 switch (ofs
+ len
) {
1915 if (TCG_TARGET_HAS_ext32s_i64
) {
1916 tcg_gen_ext32s_i64(ret
, arg
);
1917 tcg_gen_sari_i64(ret
, ret
, ofs
);
1922 if (TCG_TARGET_HAS_ext16s_i64
) {
1923 tcg_gen_ext16s_i64(ret
, arg
);
1924 tcg_gen_sari_i64(ret
, ret
, ofs
);
1929 if (TCG_TARGET_HAS_ext8s_i64
) {
1930 tcg_gen_ext8s_i64(ret
, arg
);
1931 tcg_gen_sari_i64(ret
, ret
, ofs
);
1938 if (TCG_TARGET_HAS_ext32s_i64
) {
1939 tcg_gen_shri_i64(ret
, arg
, ofs
);
1940 tcg_gen_ext32s_i64(ret
, ret
);
1945 if (TCG_TARGET_HAS_ext16s_i64
) {
1946 tcg_gen_shri_i64(ret
, arg
, ofs
);
1947 tcg_gen_ext16s_i64(ret
, ret
);
1952 if (TCG_TARGET_HAS_ext8s_i64
) {
1953 tcg_gen_shri_i64(ret
, arg
, ofs
);
1954 tcg_gen_ext8s_i64(ret
, ret
);
1959 tcg_gen_shli_i64(ret
, arg
, 64 - len
- ofs
);
1960 tcg_gen_sari_i64(ret
, ret
, 64 - len
);
1963 void tcg_gen_movcond_i64(TCGCond cond
, TCGv_i64 ret
, TCGv_i64 c1
,
1964 TCGv_i64 c2
, TCGv_i64 v1
, TCGv_i64 v2
)
1966 if (cond
== TCG_COND_ALWAYS
) {
1967 tcg_gen_mov_i64(ret
, v1
);
1968 } else if (cond
== TCG_COND_NEVER
) {
1969 tcg_gen_mov_i64(ret
, v2
);
1970 } else if (TCG_TARGET_REG_BITS
== 32) {
1971 TCGv_i32 t0
= tcg_temp_new_i32();
1972 TCGv_i32 t1
= tcg_temp_new_i32();
1973 tcg_gen_op6i_i32(INDEX_op_setcond2_i32
, t0
,
1974 TCGV_LOW(c1
), TCGV_HIGH(c1
),
1975 TCGV_LOW(c2
), TCGV_HIGH(c2
), cond
);
1977 if (TCG_TARGET_HAS_movcond_i32
) {
1978 tcg_gen_movi_i32(t1
, 0);
1979 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_LOW(ret
), t0
, t1
,
1980 TCGV_LOW(v1
), TCGV_LOW(v2
));
1981 tcg_gen_movcond_i32(TCG_COND_NE
, TCGV_HIGH(ret
), t0
, t1
,
1982 TCGV_HIGH(v1
), TCGV_HIGH(v2
));
1984 tcg_gen_neg_i32(t0
, t0
);
1986 tcg_gen_and_i32(t1
, TCGV_LOW(v1
), t0
);
1987 tcg_gen_andc_i32(TCGV_LOW(ret
), TCGV_LOW(v2
), t0
);
1988 tcg_gen_or_i32(TCGV_LOW(ret
), TCGV_LOW(ret
), t1
);
1990 tcg_gen_and_i32(t1
, TCGV_HIGH(v1
), t0
);
1991 tcg_gen_andc_i32(TCGV_HIGH(ret
), TCGV_HIGH(v2
), t0
);
1992 tcg_gen_or_i32(TCGV_HIGH(ret
), TCGV_HIGH(ret
), t1
);
1994 tcg_temp_free_i32(t0
);
1995 tcg_temp_free_i32(t1
);
1996 } else if (TCG_TARGET_HAS_movcond_i64
) {
1997 tcg_gen_op6i_i64(INDEX_op_movcond_i64
, ret
, c1
, c2
, v1
, v2
, cond
);
1999 TCGv_i64 t0
= tcg_temp_new_i64();
2000 TCGv_i64 t1
= tcg_temp_new_i64();
2001 tcg_gen_setcond_i64(cond
, t0
, c1
, c2
);
2002 tcg_gen_neg_i64(t0
, t0
);
2003 tcg_gen_and_i64(t1
, v1
, t0
);
2004 tcg_gen_andc_i64(ret
, v2
, t0
);
2005 tcg_gen_or_i64(ret
, ret
, t1
);
2006 tcg_temp_free_i64(t0
);
2007 tcg_temp_free_i64(t1
);
2011 void tcg_gen_add2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2012 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2014 if (TCG_TARGET_HAS_add2_i64
) {
2015 tcg_gen_op6_i64(INDEX_op_add2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2017 TCGv_i64 t0
= tcg_temp_new_i64();
2018 TCGv_i64 t1
= tcg_temp_new_i64();
2019 tcg_gen_add_i64(t0
, al
, bl
);
2020 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, t0
, al
);
2021 tcg_gen_add_i64(rh
, ah
, bh
);
2022 tcg_gen_add_i64(rh
, rh
, t1
);
2023 tcg_gen_mov_i64(rl
, t0
);
2024 tcg_temp_free_i64(t0
);
2025 tcg_temp_free_i64(t1
);
2029 void tcg_gen_sub2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 al
,
2030 TCGv_i64 ah
, TCGv_i64 bl
, TCGv_i64 bh
)
2032 if (TCG_TARGET_HAS_sub2_i64
) {
2033 tcg_gen_op6_i64(INDEX_op_sub2_i64
, rl
, rh
, al
, ah
, bl
, bh
);
2035 TCGv_i64 t0
= tcg_temp_new_i64();
2036 TCGv_i64 t1
= tcg_temp_new_i64();
2037 tcg_gen_sub_i64(t0
, al
, bl
);
2038 tcg_gen_setcond_i64(TCG_COND_LTU
, t1
, al
, bl
);
2039 tcg_gen_sub_i64(rh
, ah
, bh
);
2040 tcg_gen_sub_i64(rh
, rh
, t1
);
2041 tcg_gen_mov_i64(rl
, t0
);
2042 tcg_temp_free_i64(t0
);
2043 tcg_temp_free_i64(t1
);
2047 void tcg_gen_mulu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2049 if (TCG_TARGET_HAS_mulu2_i64
) {
2050 tcg_gen_op4_i64(INDEX_op_mulu2_i64
, rl
, rh
, arg1
, arg2
);
2051 } else if (TCG_TARGET_HAS_muluh_i64
) {
2052 TCGv_i64 t
= tcg_temp_new_i64();
2053 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2054 tcg_gen_op3_i64(INDEX_op_muluh_i64
, rh
, arg1
, arg2
);
2055 tcg_gen_mov_i64(rl
, t
);
2056 tcg_temp_free_i64(t
);
2058 TCGv_i64 t0
= tcg_temp_new_i64();
2059 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2060 gen_helper_muluh_i64(rh
, arg1
, arg2
);
2061 tcg_gen_mov_i64(rl
, t0
);
2062 tcg_temp_free_i64(t0
);
2066 void tcg_gen_muls2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2068 if (TCG_TARGET_HAS_muls2_i64
) {
2069 tcg_gen_op4_i64(INDEX_op_muls2_i64
, rl
, rh
, arg1
, arg2
);
2070 } else if (TCG_TARGET_HAS_mulsh_i64
) {
2071 TCGv_i64 t
= tcg_temp_new_i64();
2072 tcg_gen_op3_i64(INDEX_op_mul_i64
, t
, arg1
, arg2
);
2073 tcg_gen_op3_i64(INDEX_op_mulsh_i64
, rh
, arg1
, arg2
);
2074 tcg_gen_mov_i64(rl
, t
);
2075 tcg_temp_free_i64(t
);
2076 } else if (TCG_TARGET_HAS_mulu2_i64
|| TCG_TARGET_HAS_muluh_i64
) {
2077 TCGv_i64 t0
= tcg_temp_new_i64();
2078 TCGv_i64 t1
= tcg_temp_new_i64();
2079 TCGv_i64 t2
= tcg_temp_new_i64();
2080 TCGv_i64 t3
= tcg_temp_new_i64();
2081 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2082 /* Adjust for negative inputs. */
2083 tcg_gen_sari_i64(t2
, arg1
, 63);
2084 tcg_gen_sari_i64(t3
, arg2
, 63);
2085 tcg_gen_and_i64(t2
, t2
, arg2
);
2086 tcg_gen_and_i64(t3
, t3
, arg1
);
2087 tcg_gen_sub_i64(rh
, t1
, t2
);
2088 tcg_gen_sub_i64(rh
, rh
, t3
);
2089 tcg_gen_mov_i64(rl
, t0
);
2090 tcg_temp_free_i64(t0
);
2091 tcg_temp_free_i64(t1
);
2092 tcg_temp_free_i64(t2
);
2093 tcg_temp_free_i64(t3
);
2095 TCGv_i64 t0
= tcg_temp_new_i64();
2096 tcg_gen_mul_i64(t0
, arg1
, arg2
);
2097 gen_helper_mulsh_i64(rh
, arg1
, arg2
);
2098 tcg_gen_mov_i64(rl
, t0
);
2099 tcg_temp_free_i64(t0
);
2103 void tcg_gen_mulsu2_i64(TCGv_i64 rl
, TCGv_i64 rh
, TCGv_i64 arg1
, TCGv_i64 arg2
)
2105 TCGv_i64 t0
= tcg_temp_new_i64();
2106 TCGv_i64 t1
= tcg_temp_new_i64();
2107 TCGv_i64 t2
= tcg_temp_new_i64();
2108 tcg_gen_mulu2_i64(t0
, t1
, arg1
, arg2
);
2109 /* Adjust for negative input for the signed arg1. */
2110 tcg_gen_sari_i64(t2
, arg1
, 63);
2111 tcg_gen_and_i64(t2
, t2
, arg2
);
2112 tcg_gen_sub_i64(rh
, t1
, t2
);
2113 tcg_gen_mov_i64(rl
, t0
);
2114 tcg_temp_free_i64(t0
);
2115 tcg_temp_free_i64(t1
);
2116 tcg_temp_free_i64(t2
);
2119 /* Size changing operations. */
2121 void tcg_gen_extrl_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2123 if (TCG_TARGET_REG_BITS
== 32) {
2124 tcg_gen_mov_i32(ret
, TCGV_LOW(arg
));
2125 } else if (TCG_TARGET_HAS_extrl_i64_i32
) {
2126 tcg_gen_op2(&tcg_ctx
, INDEX_op_extrl_i64_i32
,
2127 GET_TCGV_I32(ret
), GET_TCGV_I64(arg
));
2129 tcg_gen_mov_i32(ret
, MAKE_TCGV_I32(GET_TCGV_I64(arg
)));
2133 void tcg_gen_extrh_i64_i32(TCGv_i32 ret
, TCGv_i64 arg
)
2135 if (TCG_TARGET_REG_BITS
== 32) {
2136 tcg_gen_mov_i32(ret
, TCGV_HIGH(arg
));
2137 } else if (TCG_TARGET_HAS_extrh_i64_i32
) {
2138 tcg_gen_op2(&tcg_ctx
, INDEX_op_extrh_i64_i32
,
2139 GET_TCGV_I32(ret
), GET_TCGV_I64(arg
));
2141 TCGv_i64 t
= tcg_temp_new_i64();
2142 tcg_gen_shri_i64(t
, arg
, 32);
2143 tcg_gen_mov_i32(ret
, MAKE_TCGV_I32(GET_TCGV_I64(t
)));
2144 tcg_temp_free_i64(t
);
2148 void tcg_gen_extu_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2150 if (TCG_TARGET_REG_BITS
== 32) {
2151 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2152 tcg_gen_movi_i32(TCGV_HIGH(ret
), 0);
2154 tcg_gen_op2(&tcg_ctx
, INDEX_op_extu_i32_i64
,
2155 GET_TCGV_I64(ret
), GET_TCGV_I32(arg
));
2159 void tcg_gen_ext_i32_i64(TCGv_i64 ret
, TCGv_i32 arg
)
2161 if (TCG_TARGET_REG_BITS
== 32) {
2162 tcg_gen_mov_i32(TCGV_LOW(ret
), arg
);
2163 tcg_gen_sari_i32(TCGV_HIGH(ret
), TCGV_LOW(ret
), 31);
2165 tcg_gen_op2(&tcg_ctx
, INDEX_op_ext_i32_i64
,
2166 GET_TCGV_I64(ret
), GET_TCGV_I32(arg
));
2170 void tcg_gen_concat_i32_i64(TCGv_i64 dest
, TCGv_i32 low
, TCGv_i32 high
)
2174 if (TCG_TARGET_REG_BITS
== 32) {
2175 tcg_gen_mov_i32(TCGV_LOW(dest
), low
);
2176 tcg_gen_mov_i32(TCGV_HIGH(dest
), high
);
2180 tmp
= tcg_temp_new_i64();
2181 /* These extensions are only needed for type correctness.
2182 We may be able to do better given target specific information. */
2183 tcg_gen_extu_i32_i64(tmp
, high
);
2184 tcg_gen_extu_i32_i64(dest
, low
);
2185 /* If deposit is available, use it. Otherwise use the extra
2186 knowledge that we have of the zero-extensions above. */
2187 if (TCG_TARGET_HAS_deposit_i64
&& TCG_TARGET_deposit_i64_valid(32, 32)) {
2188 tcg_gen_deposit_i64(dest
, dest
, tmp
, 32, 32);
2190 tcg_gen_shli_i64(tmp
, tmp
, 32);
2191 tcg_gen_or_i64(dest
, dest
, tmp
);
2193 tcg_temp_free_i64(tmp
);
2196 void tcg_gen_extr_i64_i32(TCGv_i32 lo
, TCGv_i32 hi
, TCGv_i64 arg
)
2198 if (TCG_TARGET_REG_BITS
== 32) {
2199 tcg_gen_mov_i32(lo
, TCGV_LOW(arg
));
2200 tcg_gen_mov_i32(hi
, TCGV_HIGH(arg
));
2202 tcg_gen_extrl_i64_i32(lo
, arg
);
2203 tcg_gen_extrh_i64_i32(hi
, arg
);
2207 void tcg_gen_extr32_i64(TCGv_i64 lo
, TCGv_i64 hi
, TCGv_i64 arg
)
2209 tcg_gen_ext32u_i64(lo
, arg
);
2210 tcg_gen_shri_i64(hi
, arg
, 32);
2213 /* QEMU specific operations. */
2215 void tcg_gen_goto_tb(unsigned idx
)
2217 /* We only support two chained exits. */
2218 tcg_debug_assert(idx
<= 1);
2219 #ifdef CONFIG_DEBUG_TCG
2220 /* Verify that we havn't seen this numbered exit before. */
2221 tcg_debug_assert((tcg_ctx
.goto_tb_issue_mask
& (1 << idx
)) == 0);
2222 tcg_ctx
.goto_tb_issue_mask
|= 1 << idx
;
2224 tcg_gen_op1i(INDEX_op_goto_tb
, idx
);
2227 static inline TCGMemOp
tcg_canonicalize_memop(TCGMemOp op
, bool is64
, bool st
)
2229 /* Trigger the asserts within as early as possible. */
2230 (void)get_alignment_bits(op
);
2232 switch (op
& MO_SIZE
) {
2255 static void gen_ldst_i32(TCGOpcode opc
, TCGv_i32 val
, TCGv addr
,
2256 TCGMemOp memop
, TCGArg idx
)
2258 TCGMemOpIdx oi
= make_memop_idx(memop
, idx
);
2259 #if TARGET_LONG_BITS == 32
2260 tcg_gen_op3i_i32(opc
, val
, addr
, oi
);
2262 if (TCG_TARGET_REG_BITS
== 32) {
2263 tcg_gen_op4i_i32(opc
, val
, TCGV_LOW(addr
), TCGV_HIGH(addr
), oi
);
2265 tcg_gen_op3(&tcg_ctx
, opc
, GET_TCGV_I32(val
), GET_TCGV_I64(addr
), oi
);
2270 static void gen_ldst_i64(TCGOpcode opc
, TCGv_i64 val
, TCGv addr
,
2271 TCGMemOp memop
, TCGArg idx
)
2273 TCGMemOpIdx oi
= make_memop_idx(memop
, idx
);
2274 #if TARGET_LONG_BITS == 32
2275 if (TCG_TARGET_REG_BITS
== 32) {
2276 tcg_gen_op4i_i32(opc
, TCGV_LOW(val
), TCGV_HIGH(val
), addr
, oi
);
2278 tcg_gen_op3(&tcg_ctx
, opc
, GET_TCGV_I64(val
), GET_TCGV_I32(addr
), oi
);
2281 if (TCG_TARGET_REG_BITS
== 32) {
2282 tcg_gen_op5i_i32(opc
, TCGV_LOW(val
), TCGV_HIGH(val
),
2283 TCGV_LOW(addr
), TCGV_HIGH(addr
), oi
);
2285 tcg_gen_op3i_i64(opc
, val
, addr
, oi
);
2290 void tcg_gen_qemu_ld_i32(TCGv_i32 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2292 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2293 trace_guest_mem_before_tcg(tcg_ctx
.cpu
, tcg_ctx
.tcg_env
,
2294 addr
, trace_mem_get_info(memop
, 0));
2295 gen_ldst_i32(INDEX_op_qemu_ld_i32
, val
, addr
, memop
, idx
);
2298 void tcg_gen_qemu_st_i32(TCGv_i32 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2300 memop
= tcg_canonicalize_memop(memop
, 0, 1);
2301 trace_guest_mem_before_tcg(tcg_ctx
.cpu
, tcg_ctx
.tcg_env
,
2302 addr
, trace_mem_get_info(memop
, 1));
2303 gen_ldst_i32(INDEX_op_qemu_st_i32
, val
, addr
, memop
, idx
);
2306 void tcg_gen_qemu_ld_i64(TCGv_i64 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2308 if (TCG_TARGET_REG_BITS
== 32 && (memop
& MO_SIZE
) < MO_64
) {
2309 tcg_gen_qemu_ld_i32(TCGV_LOW(val
), addr
, idx
, memop
);
2310 if (memop
& MO_SIGN
) {
2311 tcg_gen_sari_i32(TCGV_HIGH(val
), TCGV_LOW(val
), 31);
2313 tcg_gen_movi_i32(TCGV_HIGH(val
), 0);
2318 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2319 trace_guest_mem_before_tcg(tcg_ctx
.cpu
, tcg_ctx
.tcg_env
,
2320 addr
, trace_mem_get_info(memop
, 0));
2321 gen_ldst_i64(INDEX_op_qemu_ld_i64
, val
, addr
, memop
, idx
);
2324 void tcg_gen_qemu_st_i64(TCGv_i64 val
, TCGv addr
, TCGArg idx
, TCGMemOp memop
)
2326 if (TCG_TARGET_REG_BITS
== 32 && (memop
& MO_SIZE
) < MO_64
) {
2327 tcg_gen_qemu_st_i32(TCGV_LOW(val
), addr
, idx
, memop
);
2331 memop
= tcg_canonicalize_memop(memop
, 1, 1);
2332 trace_guest_mem_before_tcg(tcg_ctx
.cpu
, tcg_ctx
.tcg_env
,
2333 addr
, trace_mem_get_info(memop
, 1));
2334 gen_ldst_i64(INDEX_op_qemu_st_i64
, val
, addr
, memop
, idx
);
2337 static void tcg_gen_ext_i32(TCGv_i32 ret
, TCGv_i32 val
, TCGMemOp opc
)
2339 switch (opc
& MO_SSIZE
) {
2341 tcg_gen_ext8s_i32(ret
, val
);
2344 tcg_gen_ext8u_i32(ret
, val
);
2347 tcg_gen_ext16s_i32(ret
, val
);
2350 tcg_gen_ext16u_i32(ret
, val
);
2353 tcg_gen_mov_i32(ret
, val
);
2358 static void tcg_gen_ext_i64(TCGv_i64 ret
, TCGv_i64 val
, TCGMemOp opc
)
2360 switch (opc
& MO_SSIZE
) {
2362 tcg_gen_ext8s_i64(ret
, val
);
2365 tcg_gen_ext8u_i64(ret
, val
);
2368 tcg_gen_ext16s_i64(ret
, val
);
2371 tcg_gen_ext16u_i64(ret
, val
);
2374 tcg_gen_ext32s_i64(ret
, val
);
2377 tcg_gen_ext32u_i64(ret
, val
);
2380 tcg_gen_mov_i64(ret
, val
);
2385 #ifdef CONFIG_SOFTMMU
2386 typedef void (*gen_atomic_cx_i32
)(TCGv_i32
, TCGv_env
, TCGv
,
2387 TCGv_i32
, TCGv_i32
, TCGv_i32
);
2388 typedef void (*gen_atomic_cx_i64
)(TCGv_i64
, TCGv_env
, TCGv
,
2389 TCGv_i64
, TCGv_i64
, TCGv_i32
);
2390 typedef void (*gen_atomic_op_i32
)(TCGv_i32
, TCGv_env
, TCGv
,
2391 TCGv_i32
, TCGv_i32
);
2392 typedef void (*gen_atomic_op_i64
)(TCGv_i64
, TCGv_env
, TCGv
,
2393 TCGv_i64
, TCGv_i32
);
2395 typedef void (*gen_atomic_cx_i32
)(TCGv_i32
, TCGv_env
, TCGv
, TCGv_i32
, TCGv_i32
);
2396 typedef void (*gen_atomic_cx_i64
)(TCGv_i64
, TCGv_env
, TCGv
, TCGv_i64
, TCGv_i64
);
2397 typedef void (*gen_atomic_op_i32
)(TCGv_i32
, TCGv_env
, TCGv
, TCGv_i32
);
2398 typedef void (*gen_atomic_op_i64
)(TCGv_i64
, TCGv_env
, TCGv
, TCGv_i64
);
2401 #ifdef CONFIG_ATOMIC64
2402 # define WITH_ATOMIC64(X) X,
2404 # define WITH_ATOMIC64(X)
2407 static void * const table_cmpxchg
[16] = {
2408 [MO_8
] = gen_helper_atomic_cmpxchgb
,
2409 [MO_16
| MO_LE
] = gen_helper_atomic_cmpxchgw_le
,
2410 [MO_16
| MO_BE
] = gen_helper_atomic_cmpxchgw_be
,
2411 [MO_32
| MO_LE
] = gen_helper_atomic_cmpxchgl_le
,
2412 [MO_32
| MO_BE
] = gen_helper_atomic_cmpxchgl_be
,
2413 WITH_ATOMIC64([MO_64
| MO_LE
] = gen_helper_atomic_cmpxchgq_le
)
2414 WITH_ATOMIC64([MO_64
| MO_BE
] = gen_helper_atomic_cmpxchgq_be
)
2417 void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv
, TCGv addr
, TCGv_i32 cmpv
,
2418 TCGv_i32 newv
, TCGArg idx
, TCGMemOp memop
)
2420 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2422 if (!parallel_cpus
) {
2423 TCGv_i32 t1
= tcg_temp_new_i32();
2424 TCGv_i32 t2
= tcg_temp_new_i32();
2426 tcg_gen_ext_i32(t2
, cmpv
, memop
& MO_SIZE
);
2428 tcg_gen_qemu_ld_i32(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2429 tcg_gen_movcond_i32(TCG_COND_EQ
, t2
, t1
, t2
, newv
, t1
);
2430 tcg_gen_qemu_st_i32(t2
, addr
, idx
, memop
);
2431 tcg_temp_free_i32(t2
);
2433 if (memop
& MO_SIGN
) {
2434 tcg_gen_ext_i32(retv
, t1
, memop
);
2436 tcg_gen_mov_i32(retv
, t1
);
2438 tcg_temp_free_i32(t1
);
2440 gen_atomic_cx_i32 gen
;
2442 gen
= table_cmpxchg
[memop
& (MO_SIZE
| MO_BSWAP
)];
2443 tcg_debug_assert(gen
!= NULL
);
2445 #ifdef CONFIG_SOFTMMU
2447 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
& ~MO_SIGN
, idx
));
2448 gen(retv
, tcg_ctx
.tcg_env
, addr
, cmpv
, newv
, oi
);
2449 tcg_temp_free_i32(oi
);
2452 gen(retv
, tcg_ctx
.tcg_env
, addr
, cmpv
, newv
);
2455 if (memop
& MO_SIGN
) {
2456 tcg_gen_ext_i32(retv
, retv
, memop
);
2461 void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv
, TCGv addr
, TCGv_i64 cmpv
,
2462 TCGv_i64 newv
, TCGArg idx
, TCGMemOp memop
)
2464 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2466 if (!parallel_cpus
) {
2467 TCGv_i64 t1
= tcg_temp_new_i64();
2468 TCGv_i64 t2
= tcg_temp_new_i64();
2470 tcg_gen_ext_i64(t2
, cmpv
, memop
& MO_SIZE
);
2472 tcg_gen_qemu_ld_i64(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2473 tcg_gen_movcond_i64(TCG_COND_EQ
, t2
, t1
, t2
, newv
, t1
);
2474 tcg_gen_qemu_st_i64(t2
, addr
, idx
, memop
);
2475 tcg_temp_free_i64(t2
);
2477 if (memop
& MO_SIGN
) {
2478 tcg_gen_ext_i64(retv
, t1
, memop
);
2480 tcg_gen_mov_i64(retv
, t1
);
2482 tcg_temp_free_i64(t1
);
2483 } else if ((memop
& MO_SIZE
) == MO_64
) {
2484 #ifdef CONFIG_ATOMIC64
2485 gen_atomic_cx_i64 gen
;
2487 gen
= table_cmpxchg
[memop
& (MO_SIZE
| MO_BSWAP
)];
2488 tcg_debug_assert(gen
!= NULL
);
2490 #ifdef CONFIG_SOFTMMU
2492 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
, idx
));
2493 gen(retv
, tcg_ctx
.tcg_env
, addr
, cmpv
, newv
, oi
);
2494 tcg_temp_free_i32(oi
);
2497 gen(retv
, tcg_ctx
.tcg_env
, addr
, cmpv
, newv
);
2500 gen_helper_exit_atomic(tcg_ctx
.tcg_env
);
2501 #endif /* CONFIG_ATOMIC64 */
2503 TCGv_i32 c32
= tcg_temp_new_i32();
2504 TCGv_i32 n32
= tcg_temp_new_i32();
2505 TCGv_i32 r32
= tcg_temp_new_i32();
2507 tcg_gen_extrl_i64_i32(c32
, cmpv
);
2508 tcg_gen_extrl_i64_i32(n32
, newv
);
2509 tcg_gen_atomic_cmpxchg_i32(r32
, addr
, c32
, n32
, idx
, memop
& ~MO_SIGN
);
2510 tcg_temp_free_i32(c32
);
2511 tcg_temp_free_i32(n32
);
2513 tcg_gen_extu_i32_i64(retv
, r32
);
2514 tcg_temp_free_i32(r32
);
2516 if (memop
& MO_SIGN
) {
2517 tcg_gen_ext_i64(retv
, retv
, memop
);
2522 static void do_nonatomic_op_i32(TCGv_i32 ret
, TCGv addr
, TCGv_i32 val
,
2523 TCGArg idx
, TCGMemOp memop
, bool new_val
,
2524 void (*gen
)(TCGv_i32
, TCGv_i32
, TCGv_i32
))
2526 TCGv_i32 t1
= tcg_temp_new_i32();
2527 TCGv_i32 t2
= tcg_temp_new_i32();
2529 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2531 tcg_gen_qemu_ld_i32(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2533 tcg_gen_qemu_st_i32(t2
, addr
, idx
, memop
);
2535 tcg_gen_ext_i32(ret
, (new_val
? t2
: t1
), memop
);
2536 tcg_temp_free_i32(t1
);
2537 tcg_temp_free_i32(t2
);
2540 static void do_atomic_op_i32(TCGv_i32 ret
, TCGv addr
, TCGv_i32 val
,
2541 TCGArg idx
, TCGMemOp memop
, void * const table
[])
2543 gen_atomic_op_i32 gen
;
2545 memop
= tcg_canonicalize_memop(memop
, 0, 0);
2547 gen
= table
[memop
& (MO_SIZE
| MO_BSWAP
)];
2548 tcg_debug_assert(gen
!= NULL
);
2550 #ifdef CONFIG_SOFTMMU
2552 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
& ~MO_SIGN
, idx
));
2553 gen(ret
, tcg_ctx
.tcg_env
, addr
, val
, oi
);
2554 tcg_temp_free_i32(oi
);
2557 gen(ret
, tcg_ctx
.tcg_env
, addr
, val
);
2560 if (memop
& MO_SIGN
) {
2561 tcg_gen_ext_i32(ret
, ret
, memop
);
2565 static void do_nonatomic_op_i64(TCGv_i64 ret
, TCGv addr
, TCGv_i64 val
,
2566 TCGArg idx
, TCGMemOp memop
, bool new_val
,
2567 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
))
2569 TCGv_i64 t1
= tcg_temp_new_i64();
2570 TCGv_i64 t2
= tcg_temp_new_i64();
2572 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2574 tcg_gen_qemu_ld_i64(t1
, addr
, idx
, memop
& ~MO_SIGN
);
2576 tcg_gen_qemu_st_i64(t2
, addr
, idx
, memop
);
2578 tcg_gen_ext_i64(ret
, (new_val
? t2
: t1
), memop
);
2579 tcg_temp_free_i64(t1
);
2580 tcg_temp_free_i64(t2
);
2583 static void do_atomic_op_i64(TCGv_i64 ret
, TCGv addr
, TCGv_i64 val
,
2584 TCGArg idx
, TCGMemOp memop
, void * const table
[])
2586 memop
= tcg_canonicalize_memop(memop
, 1, 0);
2588 if ((memop
& MO_SIZE
) == MO_64
) {
2589 #ifdef CONFIG_ATOMIC64
2590 gen_atomic_op_i64 gen
;
2592 gen
= table
[memop
& (MO_SIZE
| MO_BSWAP
)];
2593 tcg_debug_assert(gen
!= NULL
);
2595 #ifdef CONFIG_SOFTMMU
2597 TCGv_i32 oi
= tcg_const_i32(make_memop_idx(memop
& ~MO_SIGN
, idx
));
2598 gen(ret
, tcg_ctx
.tcg_env
, addr
, val
, oi
);
2599 tcg_temp_free_i32(oi
);
2602 gen(ret
, tcg_ctx
.tcg_env
, addr
, val
);
2605 gen_helper_exit_atomic(tcg_ctx
.tcg_env
);
2606 #endif /* CONFIG_ATOMIC64 */
2608 TCGv_i32 v32
= tcg_temp_new_i32();
2609 TCGv_i32 r32
= tcg_temp_new_i32();
2611 tcg_gen_extrl_i64_i32(v32
, val
);
2612 do_atomic_op_i32(r32
, addr
, v32
, idx
, memop
& ~MO_SIGN
, table
);
2613 tcg_temp_free_i32(v32
);
2615 tcg_gen_extu_i32_i64(ret
, r32
);
2616 tcg_temp_free_i32(r32
);
2618 if (memop
& MO_SIGN
) {
2619 tcg_gen_ext_i64(ret
, ret
, memop
);
2624 #define GEN_ATOMIC_HELPER(NAME, OP, NEW) \
2625 static void * const table_##NAME[16] = { \
2626 [MO_8] = gen_helper_atomic_##NAME##b, \
2627 [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le, \
2628 [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be, \
2629 [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le, \
2630 [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be, \
2631 WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le) \
2632 WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be) \
2634 void tcg_gen_atomic_##NAME##_i32 \
2635 (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, TCGMemOp memop) \
2637 if (parallel_cpus) { \
2638 do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME); \
2640 do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW, \
2641 tcg_gen_##OP##_i32); \
2644 void tcg_gen_atomic_##NAME##_i64 \
2645 (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, TCGMemOp memop) \
2647 if (parallel_cpus) { \
2648 do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME); \
2650 do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW, \
2651 tcg_gen_##OP##_i64); \
2655 GEN_ATOMIC_HELPER(fetch_add
, add
, 0)
2656 GEN_ATOMIC_HELPER(fetch_and
, and, 0)
2657 GEN_ATOMIC_HELPER(fetch_or
, or, 0)
2658 GEN_ATOMIC_HELPER(fetch_xor
, xor, 0)
2660 GEN_ATOMIC_HELPER(add_fetch
, add
, 1)
2661 GEN_ATOMIC_HELPER(and_fetch
, and, 1)
2662 GEN_ATOMIC_HELPER(or_fetch
, or, 1)
2663 GEN_ATOMIC_HELPER(xor_fetch
, xor, 1)
2665 static void tcg_gen_mov2_i32(TCGv_i32 r
, TCGv_i32 a
, TCGv_i32 b
)
2667 tcg_gen_mov_i32(r
, b
);
2670 static void tcg_gen_mov2_i64(TCGv_i64 r
, TCGv_i64 a
, TCGv_i64 b
)
2672 tcg_gen_mov_i64(r
, b
);
2675 GEN_ATOMIC_HELPER(xchg
, mov2
, 0)
2677 #undef GEN_ATOMIC_HELPER