]>
git.proxmox.com Git - mirror_qemu.git/blob - tcg/tci.c
2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
27 # define tci_assert(cond) ((void)0)
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg/tcg-op.h"
34 #include "qemu/compiler.h"
36 #if MAX_OPC_PARAM_IARGS != 6
37 # error Fix needed, number of supported input arguments changed!
39 #if TCG_TARGET_REG_BITS == 32
40 typedef uint64_t (*helper_function
)(tcg_target_ulong
, tcg_target_ulong
,
41 tcg_target_ulong
, tcg_target_ulong
,
42 tcg_target_ulong
, tcg_target_ulong
,
43 tcg_target_ulong
, tcg_target_ulong
,
44 tcg_target_ulong
, tcg_target_ulong
,
45 tcg_target_ulong
, tcg_target_ulong
);
47 typedef uint64_t (*helper_function
)(tcg_target_ulong
, tcg_target_ulong
,
48 tcg_target_ulong
, tcg_target_ulong
,
49 tcg_target_ulong
, tcg_target_ulong
);
52 __thread
uintptr_t tci_tb_ptr
;
54 static tcg_target_ulong
tci_read_reg(const tcg_target_ulong
*regs
, TCGReg index
)
56 tci_assert(index
< TCG_TARGET_NB_REGS
);
60 #if TCG_TARGET_REG_BITS == 64
61 static int32_t tci_read_reg32s(const tcg_target_ulong
*regs
, TCGReg index
)
63 return (int32_t)tci_read_reg(regs
, index
);
67 #if TCG_TARGET_REG_BITS == 64
68 static uint64_t tci_read_reg64(const tcg_target_ulong
*regs
, TCGReg index
)
70 return tci_read_reg(regs
, index
);
75 tci_write_reg(tcg_target_ulong
*regs
, TCGReg index
, tcg_target_ulong value
)
77 tci_assert(index
< TCG_TARGET_NB_REGS
);
78 tci_assert(index
!= TCG_AREG0
);
79 tci_assert(index
!= TCG_REG_CALL_STACK
);
83 #if TCG_TARGET_REG_BITS == 32
84 static void tci_write_reg64(tcg_target_ulong
*regs
, uint32_t high_index
,
85 uint32_t low_index
, uint64_t value
)
87 tci_write_reg(regs
, low_index
, value
);
88 tci_write_reg(regs
, high_index
, value
>> 32);
92 #if TCG_TARGET_REG_BITS == 32
93 /* Create a 64 bit value from two 32 bit values. */
94 static uint64_t tci_uint64(uint32_t high
, uint32_t low
)
96 return ((uint64_t)high
<< 32) + low
;
100 /* Read constant (native size) from bytecode. */
101 static tcg_target_ulong
tci_read_i(const uint8_t **tb_ptr
)
103 tcg_target_ulong value
= *(const tcg_target_ulong
*)(*tb_ptr
);
104 *tb_ptr
+= sizeof(value
);
108 /* Read unsigned constant (32 bit) from bytecode. */
109 static uint32_t tci_read_i32(const uint8_t **tb_ptr
)
111 uint32_t value
= *(const uint32_t *)(*tb_ptr
);
112 *tb_ptr
+= sizeof(value
);
116 /* Read signed constant (32 bit) from bytecode. */
117 static int32_t tci_read_s32(const uint8_t **tb_ptr
)
119 int32_t value
= *(const int32_t *)(*tb_ptr
);
120 *tb_ptr
+= sizeof(value
);
124 #if TCG_TARGET_REG_BITS == 64
125 /* Read constant (64 bit) from bytecode. */
126 static uint64_t tci_read_i64(const uint8_t **tb_ptr
)
128 uint64_t value
= *(const uint64_t *)(*tb_ptr
);
129 *tb_ptr
+= sizeof(value
);
134 /* Read indexed register (native size) from bytecode. */
135 static tcg_target_ulong
136 tci_read_r(const tcg_target_ulong
*regs
, const uint8_t **tb_ptr
)
138 tcg_target_ulong value
= tci_read_reg(regs
, **tb_ptr
);
143 #if TCG_TARGET_REG_BITS == 32
144 /* Read two indexed registers (2 * 32 bit) from bytecode. */
145 static uint64_t tci_read_r64(const tcg_target_ulong
*regs
,
146 const uint8_t **tb_ptr
)
148 uint32_t low
= tci_read_r(regs
, tb_ptr
);
149 return tci_uint64(tci_read_r(regs
, tb_ptr
), low
);
151 #elif TCG_TARGET_REG_BITS == 64
152 /* Read indexed register (32 bit signed) from bytecode. */
153 static int32_t tci_read_r32s(const tcg_target_ulong
*regs
,
154 const uint8_t **tb_ptr
)
156 int32_t value
= tci_read_reg32s(regs
, **tb_ptr
);
161 /* Read indexed register (64 bit) from bytecode. */
162 static uint64_t tci_read_r64(const tcg_target_ulong
*regs
,
163 const uint8_t **tb_ptr
)
165 uint64_t value
= tci_read_reg64(regs
, **tb_ptr
);
171 /* Read indexed register(s) with target address from bytecode. */
173 tci_read_ulong(const tcg_target_ulong
*regs
, const uint8_t **tb_ptr
)
175 target_ulong taddr
= tci_read_r(regs
, tb_ptr
);
176 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
177 taddr
+= (uint64_t)tci_read_r(regs
, tb_ptr
) << 32;
182 static tcg_target_ulong
tci_read_label(const uint8_t **tb_ptr
)
184 tcg_target_ulong label
= tci_read_i(tb_ptr
);
185 tci_assert(label
!= 0);
189 static bool tci_compare32(uint32_t u0
, uint32_t u1
, TCGCond condition
)
226 g_assert_not_reached();
231 static bool tci_compare64(uint64_t u0
, uint64_t u1
, TCGCond condition
)
268 g_assert_not_reached();
274 cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
275 #define qemu_ld_leuw \
276 cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
277 #define qemu_ld_leul \
278 cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
279 #define qemu_ld_leq \
280 cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
281 #define qemu_ld_beuw \
282 cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
283 #define qemu_ld_beul \
284 cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
285 #define qemu_ld_beq \
286 cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
287 #define qemu_st_b(X) \
288 cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
289 #define qemu_st_lew(X) \
290 cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
291 #define qemu_st_lel(X) \
292 cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
293 #define qemu_st_leq(X) \
294 cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
295 #define qemu_st_bew(X) \
296 cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
297 #define qemu_st_bel(X) \
298 cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
299 #define qemu_st_beq(X) \
300 cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
302 #if TCG_TARGET_REG_BITS == 64
303 # define CASE_32_64(x) \
304 case glue(glue(INDEX_op_, x), _i64): \
305 case glue(glue(INDEX_op_, x), _i32):
306 # define CASE_64(x) \
307 case glue(glue(INDEX_op_, x), _i64):
309 # define CASE_32_64(x) \
310 case glue(glue(INDEX_op_, x), _i32):
314 /* Interpret pseudo code in tb. */
316 * Disable CFI checks.
317 * One possible operation in the pseudo code is a call to binary code.
318 * Therefore, disable CFI checks in the interpreter function
320 uintptr_t QEMU_DISABLE_CFI
tcg_qemu_tb_exec(CPUArchState
*env
,
321 const void *v_tb_ptr
)
323 const uint8_t *tb_ptr
= v_tb_ptr
;
324 tcg_target_ulong regs
[TCG_TARGET_NB_REGS
];
325 long tcg_temps
[CPU_TEMP_BUF_NLONGS
];
326 uintptr_t sp_value
= (uintptr_t)(tcg_temps
+ CPU_TEMP_BUF_NLONGS
);
329 regs
[TCG_AREG0
] = (tcg_target_ulong
)env
;
330 regs
[TCG_REG_CALL_STACK
] = sp_value
;
334 TCGOpcode opc
= tb_ptr
[0];
335 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
336 uint8_t op_size
= tb_ptr
[1];
337 const uint8_t *old_code_ptr
= tb_ptr
;
342 tcg_target_ulong label
;
349 #if TCG_TARGET_REG_BITS == 32
354 /* Skip opcode and size entry. */
359 t0
= tci_read_i(&tb_ptr
);
360 tci_tb_ptr
= (uintptr_t)tb_ptr
;
361 #if TCG_TARGET_REG_BITS == 32
362 tmp64
= ((helper_function
)t0
)(tci_read_reg(regs
, TCG_REG_R0
),
363 tci_read_reg(regs
, TCG_REG_R1
),
364 tci_read_reg(regs
, TCG_REG_R2
),
365 tci_read_reg(regs
, TCG_REG_R3
),
366 tci_read_reg(regs
, TCG_REG_R4
),
367 tci_read_reg(regs
, TCG_REG_R5
),
368 tci_read_reg(regs
, TCG_REG_R6
),
369 tci_read_reg(regs
, TCG_REG_R7
),
370 tci_read_reg(regs
, TCG_REG_R8
),
371 tci_read_reg(regs
, TCG_REG_R9
),
372 tci_read_reg(regs
, TCG_REG_R10
),
373 tci_read_reg(regs
, TCG_REG_R11
));
374 tci_write_reg(regs
, TCG_REG_R0
, tmp64
);
375 tci_write_reg(regs
, TCG_REG_R1
, tmp64
>> 32);
377 tmp64
= ((helper_function
)t0
)(tci_read_reg(regs
, TCG_REG_R0
),
378 tci_read_reg(regs
, TCG_REG_R1
),
379 tci_read_reg(regs
, TCG_REG_R2
),
380 tci_read_reg(regs
, TCG_REG_R3
),
381 tci_read_reg(regs
, TCG_REG_R4
),
382 tci_read_reg(regs
, TCG_REG_R5
));
383 tci_write_reg(regs
, TCG_REG_R0
, tmp64
);
387 label
= tci_read_label(&tb_ptr
);
388 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);
389 tb_ptr
= (uint8_t *)label
;
391 case INDEX_op_setcond_i32
:
393 t1
= tci_read_r(regs
, &tb_ptr
);
394 t2
= tci_read_r(regs
, &tb_ptr
);
395 condition
= *tb_ptr
++;
396 tci_write_reg(regs
, t0
, tci_compare32(t1
, t2
, condition
));
398 #if TCG_TARGET_REG_BITS == 32
399 case INDEX_op_setcond2_i32
:
401 tmp64
= tci_read_r64(regs
, &tb_ptr
);
402 v64
= tci_read_r64(regs
, &tb_ptr
);
403 condition
= *tb_ptr
++;
404 tci_write_reg(regs
, t0
, tci_compare64(tmp64
, v64
, condition
));
406 #elif TCG_TARGET_REG_BITS == 64
407 case INDEX_op_setcond_i64
:
409 t1
= tci_read_r64(regs
, &tb_ptr
);
410 t2
= tci_read_r64(regs
, &tb_ptr
);
411 condition
= *tb_ptr
++;
412 tci_write_reg(regs
, t0
, tci_compare64(t1
, t2
, condition
));
415 case INDEX_op_mov_i32
:
417 t1
= tci_read_r(regs
, &tb_ptr
);
418 tci_write_reg(regs
, t0
, t1
);
420 case INDEX_op_tci_movi_i32
:
422 t1
= tci_read_i32(&tb_ptr
);
423 tci_write_reg(regs
, t0
, t1
);
426 /* Load/store operations (32 bit). */
430 t1
= tci_read_r(regs
, &tb_ptr
);
431 t2
= tci_read_s32(&tb_ptr
);
432 tci_write_reg(regs
, t0
, *(uint8_t *)(t1
+ t2
));
436 t1
= tci_read_r(regs
, &tb_ptr
);
437 t2
= tci_read_s32(&tb_ptr
);
438 tci_write_reg(regs
, t0
, *(int8_t *)(t1
+ t2
));
442 t1
= tci_read_r(regs
, &tb_ptr
);
443 t2
= tci_read_s32(&tb_ptr
);
444 tci_write_reg(regs
, t0
, *(uint16_t *)(t1
+ t2
));
448 t1
= tci_read_r(regs
, &tb_ptr
);
449 t2
= tci_read_s32(&tb_ptr
);
450 tci_write_reg(regs
, t0
, *(int16_t *)(t1
+ t2
));
452 case INDEX_op_ld_i32
:
455 t1
= tci_read_r(regs
, &tb_ptr
);
456 t2
= tci_read_s32(&tb_ptr
);
457 tci_write_reg(regs
, t0
, *(uint32_t *)(t1
+ t2
));
460 t0
= tci_read_r(regs
, &tb_ptr
);
461 t1
= tci_read_r(regs
, &tb_ptr
);
462 t2
= tci_read_s32(&tb_ptr
);
463 *(uint8_t *)(t1
+ t2
) = t0
;
466 t0
= tci_read_r(regs
, &tb_ptr
);
467 t1
= tci_read_r(regs
, &tb_ptr
);
468 t2
= tci_read_s32(&tb_ptr
);
469 *(uint16_t *)(t1
+ t2
) = t0
;
471 case INDEX_op_st_i32
:
473 t0
= tci_read_r(regs
, &tb_ptr
);
474 t1
= tci_read_r(regs
, &tb_ptr
);
475 t2
= tci_read_s32(&tb_ptr
);
476 *(uint32_t *)(t1
+ t2
) = t0
;
479 /* Arithmetic operations (32 bit). */
481 case INDEX_op_add_i32
:
483 t1
= tci_read_r(regs
, &tb_ptr
);
484 t2
= tci_read_r(regs
, &tb_ptr
);
485 tci_write_reg(regs
, t0
, t1
+ t2
);
487 case INDEX_op_sub_i32
:
489 t1
= tci_read_r(regs
, &tb_ptr
);
490 t2
= tci_read_r(regs
, &tb_ptr
);
491 tci_write_reg(regs
, t0
, t1
- t2
);
493 case INDEX_op_mul_i32
:
495 t1
= tci_read_r(regs
, &tb_ptr
);
496 t2
= tci_read_r(regs
, &tb_ptr
);
497 tci_write_reg(regs
, t0
, t1
* t2
);
499 case INDEX_op_div_i32
:
501 t1
= tci_read_r(regs
, &tb_ptr
);
502 t2
= tci_read_r(regs
, &tb_ptr
);
503 tci_write_reg(regs
, t0
, (int32_t)t1
/ (int32_t)t2
);
505 case INDEX_op_divu_i32
:
507 t1
= tci_read_r(regs
, &tb_ptr
);
508 t2
= tci_read_r(regs
, &tb_ptr
);
509 tci_write_reg(regs
, t0
, (uint32_t)t1
/ (uint32_t)t2
);
511 case INDEX_op_rem_i32
:
513 t1
= tci_read_r(regs
, &tb_ptr
);
514 t2
= tci_read_r(regs
, &tb_ptr
);
515 tci_write_reg(regs
, t0
, (int32_t)t1
% (int32_t)t2
);
517 case INDEX_op_remu_i32
:
519 t1
= tci_read_r(regs
, &tb_ptr
);
520 t2
= tci_read_r(regs
, &tb_ptr
);
521 tci_write_reg(regs
, t0
, (uint32_t)t1
% (uint32_t)t2
);
523 case INDEX_op_and_i32
:
525 t1
= tci_read_r(regs
, &tb_ptr
);
526 t2
= tci_read_r(regs
, &tb_ptr
);
527 tci_write_reg(regs
, t0
, t1
& t2
);
529 case INDEX_op_or_i32
:
531 t1
= tci_read_r(regs
, &tb_ptr
);
532 t2
= tci_read_r(regs
, &tb_ptr
);
533 tci_write_reg(regs
, t0
, t1
| t2
);
535 case INDEX_op_xor_i32
:
537 t1
= tci_read_r(regs
, &tb_ptr
);
538 t2
= tci_read_r(regs
, &tb_ptr
);
539 tci_write_reg(regs
, t0
, t1
^ t2
);
542 /* Shift/rotate operations (32 bit). */
544 case INDEX_op_shl_i32
:
546 t1
= tci_read_r(regs
, &tb_ptr
);
547 t2
= tci_read_r(regs
, &tb_ptr
);
548 tci_write_reg(regs
, t0
, (uint32_t)t1
<< (t2
& 31));
550 case INDEX_op_shr_i32
:
552 t1
= tci_read_r(regs
, &tb_ptr
);
553 t2
= tci_read_r(regs
, &tb_ptr
);
554 tci_write_reg(regs
, t0
, (uint32_t)t1
>> (t2
& 31));
556 case INDEX_op_sar_i32
:
558 t1
= tci_read_r(regs
, &tb_ptr
);
559 t2
= tci_read_r(regs
, &tb_ptr
);
560 tci_write_reg(regs
, t0
, (int32_t)t1
>> (t2
& 31));
562 #if TCG_TARGET_HAS_rot_i32
563 case INDEX_op_rotl_i32
:
565 t1
= tci_read_r(regs
, &tb_ptr
);
566 t2
= tci_read_r(regs
, &tb_ptr
);
567 tci_write_reg(regs
, t0
, rol32(t1
, t2
& 31));
569 case INDEX_op_rotr_i32
:
571 t1
= tci_read_r(regs
, &tb_ptr
);
572 t2
= tci_read_r(regs
, &tb_ptr
);
573 tci_write_reg(regs
, t0
, ror32(t1
, t2
& 31));
576 #if TCG_TARGET_HAS_deposit_i32
577 case INDEX_op_deposit_i32
:
579 t1
= tci_read_r(regs
, &tb_ptr
);
580 t2
= tci_read_r(regs
, &tb_ptr
);
583 tmp32
= (((1 << tmp8
) - 1) << tmp16
);
584 tci_write_reg(regs
, t0
, (t1
& ~tmp32
) | ((t2
<< tmp16
) & tmp32
));
587 case INDEX_op_brcond_i32
:
588 t0
= tci_read_r(regs
, &tb_ptr
);
589 t1
= tci_read_r(regs
, &tb_ptr
);
590 condition
= *tb_ptr
++;
591 label
= tci_read_label(&tb_ptr
);
592 if (tci_compare32(t0
, t1
, condition
)) {
593 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);
594 tb_ptr
= (uint8_t *)label
;
598 #if TCG_TARGET_REG_BITS == 32
599 case INDEX_op_add2_i32
:
602 tmp64
= tci_read_r64(regs
, &tb_ptr
);
603 tmp64
+= tci_read_r64(regs
, &tb_ptr
);
604 tci_write_reg64(regs
, t1
, t0
, tmp64
);
606 case INDEX_op_sub2_i32
:
609 tmp64
= tci_read_r64(regs
, &tb_ptr
);
610 tmp64
-= tci_read_r64(regs
, &tb_ptr
);
611 tci_write_reg64(regs
, t1
, t0
, tmp64
);
613 case INDEX_op_brcond2_i32
:
614 tmp64
= tci_read_r64(regs
, &tb_ptr
);
615 v64
= tci_read_r64(regs
, &tb_ptr
);
616 condition
= *tb_ptr
++;
617 label
= tci_read_label(&tb_ptr
);
618 if (tci_compare64(tmp64
, v64
, condition
)) {
619 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);
620 tb_ptr
= (uint8_t *)label
;
624 case INDEX_op_mulu2_i32
:
627 t2
= tci_read_r(regs
, &tb_ptr
);
628 tmp64
= (uint32_t)tci_read_r(regs
, &tb_ptr
);
629 tci_write_reg64(regs
, t1
, t0
, (uint32_t)t2
* tmp64
);
631 #endif /* TCG_TARGET_REG_BITS == 32 */
632 #if TCG_TARGET_HAS_ext8s_i32
633 case INDEX_op_ext8s_i32
:
635 t1
= tci_read_r(regs
, &tb_ptr
);
636 tci_write_reg(regs
, t0
, (int8_t)t1
);
639 #if TCG_TARGET_HAS_ext16s_i32
640 case INDEX_op_ext16s_i32
:
642 t1
= tci_read_r(regs
, &tb_ptr
);
643 tci_write_reg(regs
, t0
, (int16_t)t1
);
646 #if TCG_TARGET_HAS_ext8u_i32
647 case INDEX_op_ext8u_i32
:
649 t1
= tci_read_r(regs
, &tb_ptr
);
650 tci_write_reg(regs
, t0
, (uint8_t)t1
);
653 #if TCG_TARGET_HAS_ext16u_i32
654 case INDEX_op_ext16u_i32
:
656 t1
= tci_read_r(regs
, &tb_ptr
);
657 tci_write_reg(regs
, t0
, (uint16_t)t1
);
660 #if TCG_TARGET_HAS_bswap16_i32
661 case INDEX_op_bswap16_i32
:
663 t1
= tci_read_r(regs
, &tb_ptr
);
664 tci_write_reg(regs
, t0
, bswap16(t1
));
667 #if TCG_TARGET_HAS_bswap32_i32
668 case INDEX_op_bswap32_i32
:
670 t1
= tci_read_r(regs
, &tb_ptr
);
671 tci_write_reg(regs
, t0
, bswap32(t1
));
674 #if TCG_TARGET_HAS_not_i32
675 case INDEX_op_not_i32
:
677 t1
= tci_read_r(regs
, &tb_ptr
);
678 tci_write_reg(regs
, t0
, ~t1
);
681 #if TCG_TARGET_HAS_neg_i32
682 case INDEX_op_neg_i32
:
684 t1
= tci_read_r(regs
, &tb_ptr
);
685 tci_write_reg(regs
, t0
, -t1
);
688 #if TCG_TARGET_REG_BITS == 64
689 case INDEX_op_mov_i64
:
691 t1
= tci_read_r64(regs
, &tb_ptr
);
692 tci_write_reg(regs
, t0
, t1
);
694 case INDEX_op_tci_movi_i64
:
696 t1
= tci_read_i64(&tb_ptr
);
697 tci_write_reg(regs
, t0
, t1
);
700 /* Load/store operations (64 bit). */
702 case INDEX_op_ld32s_i64
:
704 t1
= tci_read_r(regs
, &tb_ptr
);
705 t2
= tci_read_s32(&tb_ptr
);
706 tci_write_reg(regs
, t0
, *(int32_t *)(t1
+ t2
));
708 case INDEX_op_ld_i64
:
710 t1
= tci_read_r(regs
, &tb_ptr
);
711 t2
= tci_read_s32(&tb_ptr
);
712 tci_write_reg(regs
, t0
, *(uint64_t *)(t1
+ t2
));
714 case INDEX_op_st_i64
:
715 t0
= tci_read_r64(regs
, &tb_ptr
);
716 t1
= tci_read_r(regs
, &tb_ptr
);
717 t2
= tci_read_s32(&tb_ptr
);
718 *(uint64_t *)(t1
+ t2
) = t0
;
721 /* Arithmetic operations (64 bit). */
723 case INDEX_op_add_i64
:
725 t1
= tci_read_r64(regs
, &tb_ptr
);
726 t2
= tci_read_r64(regs
, &tb_ptr
);
727 tci_write_reg(regs
, t0
, t1
+ t2
);
729 case INDEX_op_sub_i64
:
731 t1
= tci_read_r64(regs
, &tb_ptr
);
732 t2
= tci_read_r64(regs
, &tb_ptr
);
733 tci_write_reg(regs
, t0
, t1
- t2
);
735 case INDEX_op_mul_i64
:
737 t1
= tci_read_r64(regs
, &tb_ptr
);
738 t2
= tci_read_r64(regs
, &tb_ptr
);
739 tci_write_reg(regs
, t0
, t1
* t2
);
741 case INDEX_op_div_i64
:
743 t1
= tci_read_r64(regs
, &tb_ptr
);
744 t2
= tci_read_r64(regs
, &tb_ptr
);
745 tci_write_reg(regs
, t0
, (int64_t)t1
/ (int64_t)t2
);
747 case INDEX_op_divu_i64
:
749 t1
= tci_read_r64(regs
, &tb_ptr
);
750 t2
= tci_read_r64(regs
, &tb_ptr
);
751 tci_write_reg(regs
, t0
, (uint64_t)t1
/ (uint64_t)t2
);
753 case INDEX_op_rem_i64
:
755 t1
= tci_read_r64(regs
, &tb_ptr
);
756 t2
= tci_read_r64(regs
, &tb_ptr
);
757 tci_write_reg(regs
, t0
, (int64_t)t1
% (int64_t)t2
);
759 case INDEX_op_remu_i64
:
761 t1
= tci_read_r64(regs
, &tb_ptr
);
762 t2
= tci_read_r64(regs
, &tb_ptr
);
763 tci_write_reg(regs
, t0
, (uint64_t)t1
% (uint64_t)t2
);
765 case INDEX_op_and_i64
:
767 t1
= tci_read_r64(regs
, &tb_ptr
);
768 t2
= tci_read_r64(regs
, &tb_ptr
);
769 tci_write_reg(regs
, t0
, t1
& t2
);
771 case INDEX_op_or_i64
:
773 t1
= tci_read_r64(regs
, &tb_ptr
);
774 t2
= tci_read_r64(regs
, &tb_ptr
);
775 tci_write_reg(regs
, t0
, t1
| t2
);
777 case INDEX_op_xor_i64
:
779 t1
= tci_read_r64(regs
, &tb_ptr
);
780 t2
= tci_read_r64(regs
, &tb_ptr
);
781 tci_write_reg(regs
, t0
, t1
^ t2
);
784 /* Shift/rotate operations (64 bit). */
786 case INDEX_op_shl_i64
:
788 t1
= tci_read_r64(regs
, &tb_ptr
);
789 t2
= tci_read_r64(regs
, &tb_ptr
);
790 tci_write_reg(regs
, t0
, t1
<< (t2
& 63));
792 case INDEX_op_shr_i64
:
794 t1
= tci_read_r64(regs
, &tb_ptr
);
795 t2
= tci_read_r64(regs
, &tb_ptr
);
796 tci_write_reg(regs
, t0
, t1
>> (t2
& 63));
798 case INDEX_op_sar_i64
:
800 t1
= tci_read_r64(regs
, &tb_ptr
);
801 t2
= tci_read_r64(regs
, &tb_ptr
);
802 tci_write_reg(regs
, t0
, ((int64_t)t1
>> (t2
& 63)));
804 #if TCG_TARGET_HAS_rot_i64
805 case INDEX_op_rotl_i64
:
807 t1
= tci_read_r64(regs
, &tb_ptr
);
808 t2
= tci_read_r64(regs
, &tb_ptr
);
809 tci_write_reg(regs
, t0
, rol64(t1
, t2
& 63));
811 case INDEX_op_rotr_i64
:
813 t1
= tci_read_r64(regs
, &tb_ptr
);
814 t2
= tci_read_r64(regs
, &tb_ptr
);
815 tci_write_reg(regs
, t0
, ror64(t1
, t2
& 63));
818 #if TCG_TARGET_HAS_deposit_i64
819 case INDEX_op_deposit_i64
:
821 t1
= tci_read_r64(regs
, &tb_ptr
);
822 t2
= tci_read_r64(regs
, &tb_ptr
);
825 tmp64
= (((1ULL << tmp8
) - 1) << tmp16
);
826 tci_write_reg(regs
, t0
, (t1
& ~tmp64
) | ((t2
<< tmp16
) & tmp64
));
829 case INDEX_op_brcond_i64
:
830 t0
= tci_read_r64(regs
, &tb_ptr
);
831 t1
= tci_read_r64(regs
, &tb_ptr
);
832 condition
= *tb_ptr
++;
833 label
= tci_read_label(&tb_ptr
);
834 if (tci_compare64(t0
, t1
, condition
)) {
835 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);
836 tb_ptr
= (uint8_t *)label
;
840 #if TCG_TARGET_HAS_ext8u_i64
841 case INDEX_op_ext8u_i64
:
843 t1
= tci_read_r(regs
, &tb_ptr
);
844 tci_write_reg(regs
, t0
, (uint8_t)t1
);
847 #if TCG_TARGET_HAS_ext8s_i64
848 case INDEX_op_ext8s_i64
:
850 t1
= tci_read_r(regs
, &tb_ptr
);
851 tci_write_reg(regs
, t0
, (int8_t)t1
);
854 #if TCG_TARGET_HAS_ext16s_i64
855 case INDEX_op_ext16s_i64
:
857 t1
= tci_read_r(regs
, &tb_ptr
);
858 tci_write_reg(regs
, t0
, (int16_t)t1
);
861 #if TCG_TARGET_HAS_ext16u_i64
862 case INDEX_op_ext16u_i64
:
864 t1
= tci_read_r(regs
, &tb_ptr
);
865 tci_write_reg(regs
, t0
, (uint16_t)t1
);
868 #if TCG_TARGET_HAS_ext32s_i64
869 case INDEX_op_ext32s_i64
:
871 case INDEX_op_ext_i32_i64
:
873 t1
= tci_read_r32s(regs
, &tb_ptr
);
874 tci_write_reg(regs
, t0
, t1
);
876 #if TCG_TARGET_HAS_ext32u_i64
877 case INDEX_op_ext32u_i64
:
879 case INDEX_op_extu_i32_i64
:
881 t1
= tci_read_r(regs
, &tb_ptr
);
882 tci_write_reg(regs
, t0
, (uint32_t)t1
);
884 #if TCG_TARGET_HAS_bswap16_i64
885 case INDEX_op_bswap16_i64
:
887 t1
= tci_read_r(regs
, &tb_ptr
);
888 tci_write_reg(regs
, t0
, bswap16(t1
));
891 #if TCG_TARGET_HAS_bswap32_i64
892 case INDEX_op_bswap32_i64
:
894 t1
= tci_read_r(regs
, &tb_ptr
);
895 tci_write_reg(regs
, t0
, bswap32(t1
));
898 #if TCG_TARGET_HAS_bswap64_i64
899 case INDEX_op_bswap64_i64
:
901 t1
= tci_read_r64(regs
, &tb_ptr
);
902 tci_write_reg(regs
, t0
, bswap64(t1
));
905 #if TCG_TARGET_HAS_not_i64
906 case INDEX_op_not_i64
:
908 t1
= tci_read_r64(regs
, &tb_ptr
);
909 tci_write_reg(regs
, t0
, ~t1
);
912 #if TCG_TARGET_HAS_neg_i64
913 case INDEX_op_neg_i64
:
915 t1
= tci_read_r64(regs
, &tb_ptr
);
916 tci_write_reg(regs
, t0
, -t1
);
919 #endif /* TCG_TARGET_REG_BITS == 64 */
921 /* QEMU specific operations. */
923 case INDEX_op_exit_tb
:
924 ret
= *(uint64_t *)tb_ptr
;
927 case INDEX_op_goto_tb
:
928 /* Jump address is aligned */
929 tb_ptr
= QEMU_ALIGN_PTR_UP(tb_ptr
, 4);
930 t0
= qatomic_read((int32_t *)tb_ptr
);
931 tb_ptr
+= sizeof(int32_t);
932 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);
933 tb_ptr
+= (int32_t)t0
;
935 case INDEX_op_qemu_ld_i32
:
937 taddr
= tci_read_ulong(regs
, &tb_ptr
);
938 oi
= tci_read_i(&tb_ptr
);
939 switch (get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
)) {
944 tmp32
= (int8_t)qemu_ld_ub
;
947 tmp32
= qemu_ld_leuw
;
950 tmp32
= (int16_t)qemu_ld_leuw
;
953 tmp32
= qemu_ld_leul
;
956 tmp32
= qemu_ld_beuw
;
959 tmp32
= (int16_t)qemu_ld_beuw
;
962 tmp32
= qemu_ld_beul
;
965 g_assert_not_reached();
967 tci_write_reg(regs
, t0
, tmp32
);
969 case INDEX_op_qemu_ld_i64
:
971 if (TCG_TARGET_REG_BITS
== 32) {
974 taddr
= tci_read_ulong(regs
, &tb_ptr
);
975 oi
= tci_read_i(&tb_ptr
);
976 switch (get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
)) {
981 tmp64
= (int8_t)qemu_ld_ub
;
984 tmp64
= qemu_ld_leuw
;
987 tmp64
= (int16_t)qemu_ld_leuw
;
990 tmp64
= qemu_ld_leul
;
993 tmp64
= (int32_t)qemu_ld_leul
;
999 tmp64
= qemu_ld_beuw
;
1002 tmp64
= (int16_t)qemu_ld_beuw
;
1005 tmp64
= qemu_ld_beul
;
1008 tmp64
= (int32_t)qemu_ld_beul
;
1011 tmp64
= qemu_ld_beq
;
1014 g_assert_not_reached();
1016 tci_write_reg(regs
, t0
, tmp64
);
1017 if (TCG_TARGET_REG_BITS
== 32) {
1018 tci_write_reg(regs
, t1
, tmp64
>> 32);
1021 case INDEX_op_qemu_st_i32
:
1022 t0
= tci_read_r(regs
, &tb_ptr
);
1023 taddr
= tci_read_ulong(regs
, &tb_ptr
);
1024 oi
= tci_read_i(&tb_ptr
);
1025 switch (get_memop(oi
) & (MO_BSWAP
| MO_SIZE
)) {
1042 g_assert_not_reached();
1045 case INDEX_op_qemu_st_i64
:
1046 tmp64
= tci_read_r64(regs
, &tb_ptr
);
1047 taddr
= tci_read_ulong(regs
, &tb_ptr
);
1048 oi
= tci_read_i(&tb_ptr
);
1049 switch (get_memop(oi
) & (MO_BSWAP
| MO_SIZE
)) {
1072 g_assert_not_reached();
1076 /* Ensure ordering for all kinds */
1080 g_assert_not_reached();
1082 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);