2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
21 #include "qemu-common.h"
22 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
23 #include "exec/cpu_ldst.h"
24 #include "tcg/tcg-op.h"
25 #include "qemu/compiler.h"
30 * Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
31 * Without assertions, the interpreter runs much faster.
33 #if defined(CONFIG_DEBUG_TCG)
34 # define tci_assert(cond) assert(cond)
36 # define tci_assert(cond) ((void)(cond))
39 __thread
uintptr_t tci_tb_ptr
;
41 static void tci_write_reg64(tcg_target_ulong
*regs
, uint32_t high_index
,
42 uint32_t low_index
, uint64_t value
)
44 regs
[low_index
] = value
;
45 regs
[high_index
] = value
>> 32;
48 /* Create a 64 bit value from two 32 bit values. */
49 static uint64_t tci_uint64(uint32_t high
, uint32_t low
)
51 return ((uint64_t)high
<< 32) + low
;
55 * Load sets of arguments all at once. The naming convention is:
56 * tci_args_<arguments>
57 * where arguments is a sequence of
59 * b = immediate (bit position)
60 * c = condition (TCGCond)
61 * i = immediate (uint32_t)
62 * I = immediate (tcg_target_ulong)
63 * l = label or pointer
64 * m = immediate (TCGMemOpIdx)
65 * n = immediate (call return length)
67 * s = signed ldst offset
70 static void tci_args_l(uint32_t insn
, const void *tb_ptr
, void **l0
)
72 int diff
= sextract32(insn
, 12, 20);
73 *l0
= diff
? (void *)tb_ptr
+ diff
: NULL
;
76 static void tci_args_r(uint32_t insn
, TCGReg
*r0
)
78 *r0
= extract32(insn
, 8, 4);
81 static void tci_args_nl(uint32_t insn
, const void *tb_ptr
,
82 uint8_t *n0
, void **l1
)
84 *n0
= extract32(insn
, 8, 4);
85 *l1
= sextract32(insn
, 12, 20) + (void *)tb_ptr
;
88 static void tci_args_rl(uint32_t insn
, const void *tb_ptr
,
89 TCGReg
*r0
, void **l1
)
91 *r0
= extract32(insn
, 8, 4);
92 *l1
= sextract32(insn
, 12, 20) + (void *)tb_ptr
;
95 static void tci_args_rr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
)
97 *r0
= extract32(insn
, 8, 4);
98 *r1
= extract32(insn
, 12, 4);
101 static void tci_args_ri(uint32_t insn
, TCGReg
*r0
, tcg_target_ulong
*i1
)
103 *r0
= extract32(insn
, 8, 4);
104 *i1
= sextract32(insn
, 12, 20);
107 static void tci_args_rrm(uint32_t insn
, TCGReg
*r0
,
108 TCGReg
*r1
, TCGMemOpIdx
*m2
)
110 *r0
= extract32(insn
, 8, 4);
111 *r1
= extract32(insn
, 12, 4);
112 *m2
= extract32(insn
, 20, 12);
115 static void tci_args_rrr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
)
117 *r0
= extract32(insn
, 8, 4);
118 *r1
= extract32(insn
, 12, 4);
119 *r2
= extract32(insn
, 16, 4);
122 static void tci_args_rrs(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
, int32_t *i2
)
124 *r0
= extract32(insn
, 8, 4);
125 *r1
= extract32(insn
, 12, 4);
126 *i2
= sextract32(insn
, 16, 16);
129 static void tci_args_rrrc(uint32_t insn
,
130 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGCond
*c3
)
132 *r0
= extract32(insn
, 8, 4);
133 *r1
= extract32(insn
, 12, 4);
134 *r2
= extract32(insn
, 16, 4);
135 *c3
= extract32(insn
, 20, 4);
138 static void tci_args_rrrm(uint32_t insn
,
139 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGMemOpIdx
*m3
)
141 *r0
= extract32(insn
, 8, 4);
142 *r1
= extract32(insn
, 12, 4);
143 *r2
= extract32(insn
, 16, 4);
144 *m3
= extract32(insn
, 20, 12);
147 static void tci_args_rrrbb(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
148 TCGReg
*r2
, uint8_t *i3
, uint8_t *i4
)
150 *r0
= extract32(insn
, 8, 4);
151 *r1
= extract32(insn
, 12, 4);
152 *r2
= extract32(insn
, 16, 4);
153 *i3
= extract32(insn
, 20, 6);
154 *i4
= extract32(insn
, 26, 6);
157 static void tci_args_rrrrr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
158 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
)
160 *r0
= extract32(insn
, 8, 4);
161 *r1
= extract32(insn
, 12, 4);
162 *r2
= extract32(insn
, 16, 4);
163 *r3
= extract32(insn
, 20, 4);
164 *r4
= extract32(insn
, 24, 4);
167 #if TCG_TARGET_REG_BITS == 32
168 static void tci_args_rrrr(uint32_t insn
,
169 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGReg
*r3
)
171 *r0
= extract32(insn
, 8, 4);
172 *r1
= extract32(insn
, 12, 4);
173 *r2
= extract32(insn
, 16, 4);
174 *r3
= extract32(insn
, 20, 4);
177 static void tci_args_rrrrrc(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
178 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
, TCGCond
*c5
)
180 *r0
= extract32(insn
, 8, 4);
181 *r1
= extract32(insn
, 12, 4);
182 *r2
= extract32(insn
, 16, 4);
183 *r3
= extract32(insn
, 20, 4);
184 *r4
= extract32(insn
, 24, 4);
185 *c5
= extract32(insn
, 28, 4);
188 static void tci_args_rrrrrr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
189 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
, TCGReg
*r5
)
191 *r0
= extract32(insn
, 8, 4);
192 *r1
= extract32(insn
, 12, 4);
193 *r2
= extract32(insn
, 16, 4);
194 *r3
= extract32(insn
, 20, 4);
195 *r4
= extract32(insn
, 24, 4);
196 *r5
= extract32(insn
, 28, 4);
200 static bool tci_compare32(uint32_t u0
, uint32_t u1
, TCGCond condition
)
237 g_assert_not_reached();
242 static bool tci_compare64(uint64_t u0
, uint64_t u1
, TCGCond condition
)
279 g_assert_not_reached();
285 cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
286 #define qemu_ld_leuw \
287 cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
288 #define qemu_ld_leul \
289 cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
290 #define qemu_ld_leq \
291 cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
292 #define qemu_ld_beuw \
293 cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
294 #define qemu_ld_beul \
295 cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
296 #define qemu_ld_beq \
297 cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
298 #define qemu_st_b(X) \
299 cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
300 #define qemu_st_lew(X) \
301 cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
302 #define qemu_st_lel(X) \
303 cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
304 #define qemu_st_leq(X) \
305 cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
306 #define qemu_st_bew(X) \
307 cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
308 #define qemu_st_bel(X) \
309 cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
310 #define qemu_st_beq(X) \
311 cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
313 #if TCG_TARGET_REG_BITS == 64
314 # define CASE_32_64(x) \
315 case glue(glue(INDEX_op_, x), _i64): \
316 case glue(glue(INDEX_op_, x), _i32):
317 # define CASE_64(x) \
318 case glue(glue(INDEX_op_, x), _i64):
320 # define CASE_32_64(x) \
321 case glue(glue(INDEX_op_, x), _i32):
325 /* Interpret pseudo code in tb. */
327 * Disable CFI checks.
328 * One possible operation in the pseudo code is a call to binary code.
329 * Therefore, disable CFI checks in the interpreter function
331 uintptr_t QEMU_DISABLE_CFI
tcg_qemu_tb_exec(CPUArchState
*env
,
332 const void *v_tb_ptr
)
334 const uint32_t *tb_ptr
= v_tb_ptr
;
335 tcg_target_ulong regs
[TCG_TARGET_NB_REGS
];
336 uint64_t stack
[(TCG_STATIC_CALL_ARGS_SIZE
+ TCG_STATIC_FRAME_SIZE
)
338 void *call_slots
[TCG_STATIC_CALL_ARGS_SIZE
/ sizeof(uint64_t)];
340 regs
[TCG_AREG0
] = (tcg_target_ulong
)env
;
341 regs
[TCG_REG_CALL_STACK
] = (uintptr_t)stack
;
342 /* Other call_slots entries initialized at first use (see below). */
343 call_slots
[0] = NULL
;
349 TCGReg r0
, r1
, r2
, r3
, r4
;
356 #if TCG_TARGET_REG_BITS == 32
365 opc
= extract32(insn
, 0, 8);
370 * Set up the ffi_avalue array once, delayed until now
371 * because many TB's do not make any calls. In tcg_gen_callN,
372 * we arranged for every real argument to be "left-aligned"
373 * in each 64-bit slot.
375 if (unlikely(call_slots
[0] == NULL
)) {
376 for (int i
= 0; i
< ARRAY_SIZE(call_slots
); ++i
) {
377 call_slots
[i
] = &stack
[i
];
381 tci_args_nl(insn
, tb_ptr
, &len
, &ptr
);
383 /* Helper functions may need to access the "return address" */
384 tci_tb_ptr
= (uintptr_t)tb_ptr
;
388 ffi_call(pptr
[1], pptr
[0], stack
, call_slots
);
391 /* Any result winds up "left-aligned" in the stack[0] slot. */
395 case 1: /* uint32_t */
397 * Note that libffi has an odd special case in that it will
398 * always widen an integral result to ffi_arg.
400 if (sizeof(ffi_arg
) == 4) {
401 regs
[TCG_REG_R0
] = *(uint32_t *)stack
;
405 case 2: /* uint64_t */
406 if (TCG_TARGET_REG_BITS
== 32) {
407 tci_write_reg64(regs
, TCG_REG_R1
, TCG_REG_R0
, stack
[0]);
409 regs
[TCG_REG_R0
] = stack
[0];
413 g_assert_not_reached();
418 tci_args_l(insn
, tb_ptr
, &ptr
);
421 case INDEX_op_setcond_i32
:
422 tci_args_rrrc(insn
, &r0
, &r1
, &r2
, &condition
);
423 regs
[r0
] = tci_compare32(regs
[r1
], regs
[r2
], condition
);
425 #if TCG_TARGET_REG_BITS == 32
426 case INDEX_op_setcond2_i32
:
427 tci_args_rrrrrc(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &condition
);
428 T1
= tci_uint64(regs
[r2
], regs
[r1
]);
429 T2
= tci_uint64(regs
[r4
], regs
[r3
]);
430 regs
[r0
] = tci_compare64(T1
, T2
, condition
);
432 #elif TCG_TARGET_REG_BITS == 64
433 case INDEX_op_setcond_i64
:
434 tci_args_rrrc(insn
, &r0
, &r1
, &r2
, &condition
);
435 regs
[r0
] = tci_compare64(regs
[r1
], regs
[r2
], condition
);
439 tci_args_rr(insn
, &r0
, &r1
);
442 case INDEX_op_tci_movi
:
443 tci_args_ri(insn
, &r0
, &t1
);
446 case INDEX_op_tci_movl
:
447 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
448 regs
[r0
] = *(tcg_target_ulong
*)ptr
;
451 /* Load/store operations (32 bit). */
454 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
455 ptr
= (void *)(regs
[r1
] + ofs
);
456 regs
[r0
] = *(uint8_t *)ptr
;
459 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
460 ptr
= (void *)(regs
[r1
] + ofs
);
461 regs
[r0
] = *(int8_t *)ptr
;
464 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
465 ptr
= (void *)(regs
[r1
] + ofs
);
466 regs
[r0
] = *(uint16_t *)ptr
;
469 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
470 ptr
= (void *)(regs
[r1
] + ofs
);
471 regs
[r0
] = *(int16_t *)ptr
;
473 case INDEX_op_ld_i32
:
475 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
476 ptr
= (void *)(regs
[r1
] + ofs
);
477 regs
[r0
] = *(uint32_t *)ptr
;
480 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
481 ptr
= (void *)(regs
[r1
] + ofs
);
482 *(uint8_t *)ptr
= regs
[r0
];
485 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
486 ptr
= (void *)(regs
[r1
] + ofs
);
487 *(uint16_t *)ptr
= regs
[r0
];
489 case INDEX_op_st_i32
:
491 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
492 ptr
= (void *)(regs
[r1
] + ofs
);
493 *(uint32_t *)ptr
= regs
[r0
];
496 /* Arithmetic operations (mixed 32/64 bit). */
499 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
500 regs
[r0
] = regs
[r1
] + regs
[r2
];
503 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
504 regs
[r0
] = regs
[r1
] - regs
[r2
];
507 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
508 regs
[r0
] = regs
[r1
] * regs
[r2
];
511 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
512 regs
[r0
] = regs
[r1
] & regs
[r2
];
515 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
516 regs
[r0
] = regs
[r1
] | regs
[r2
];
519 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
520 regs
[r0
] = regs
[r1
] ^ regs
[r2
];
523 /* Arithmetic operations (32 bit). */
525 case INDEX_op_div_i32
:
526 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
527 regs
[r0
] = (int32_t)regs
[r1
] / (int32_t)regs
[r2
];
529 case INDEX_op_divu_i32
:
530 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
531 regs
[r0
] = (uint32_t)regs
[r1
] / (uint32_t)regs
[r2
];
533 case INDEX_op_rem_i32
:
534 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
535 regs
[r0
] = (int32_t)regs
[r1
] % (int32_t)regs
[r2
];
537 case INDEX_op_remu_i32
:
538 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
539 regs
[r0
] = (uint32_t)regs
[r1
] % (uint32_t)regs
[r2
];
542 /* Shift/rotate operations (32 bit). */
544 case INDEX_op_shl_i32
:
545 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
546 regs
[r0
] = (uint32_t)regs
[r1
] << (regs
[r2
] & 31);
548 case INDEX_op_shr_i32
:
549 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
550 regs
[r0
] = (uint32_t)regs
[r1
] >> (regs
[r2
] & 31);
552 case INDEX_op_sar_i32
:
553 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
554 regs
[r0
] = (int32_t)regs
[r1
] >> (regs
[r2
] & 31);
556 #if TCG_TARGET_HAS_rot_i32
557 case INDEX_op_rotl_i32
:
558 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
559 regs
[r0
] = rol32(regs
[r1
], regs
[r2
] & 31);
561 case INDEX_op_rotr_i32
:
562 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
563 regs
[r0
] = ror32(regs
[r1
], regs
[r2
] & 31);
566 #if TCG_TARGET_HAS_deposit_i32
567 case INDEX_op_deposit_i32
:
568 tci_args_rrrbb(insn
, &r0
, &r1
, &r2
, &pos
, &len
);
569 regs
[r0
] = deposit32(regs
[r1
], pos
, len
, regs
[r2
]);
572 case INDEX_op_brcond_i32
:
573 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
574 if ((uint32_t)regs
[r0
]) {
578 #if TCG_TARGET_REG_BITS == 32
579 case INDEX_op_add2_i32
:
580 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
581 T1
= tci_uint64(regs
[r3
], regs
[r2
]);
582 T2
= tci_uint64(regs
[r5
], regs
[r4
]);
583 tci_write_reg64(regs
, r1
, r0
, T1
+ T2
);
585 case INDEX_op_sub2_i32
:
586 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
587 T1
= tci_uint64(regs
[r3
], regs
[r2
]);
588 T2
= tci_uint64(regs
[r5
], regs
[r4
]);
589 tci_write_reg64(regs
, r1
, r0
, T1
- T2
);
591 case INDEX_op_mulu2_i32
:
592 tci_args_rrrr(insn
, &r0
, &r1
, &r2
, &r3
);
593 tci_write_reg64(regs
, r1
, r0
, (uint64_t)regs
[r2
] * regs
[r3
]);
595 #endif /* TCG_TARGET_REG_BITS == 32 */
596 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
598 tci_args_rr(insn
, &r0
, &r1
);
599 regs
[r0
] = (int8_t)regs
[r1
];
602 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
604 tci_args_rr(insn
, &r0
, &r1
);
605 regs
[r0
] = (int16_t)regs
[r1
];
608 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
610 tci_args_rr(insn
, &r0
, &r1
);
611 regs
[r0
] = (uint8_t)regs
[r1
];
614 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
616 tci_args_rr(insn
, &r0
, &r1
);
617 regs
[r0
] = (uint16_t)regs
[r1
];
620 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
622 tci_args_rr(insn
, &r0
, &r1
);
623 regs
[r0
] = bswap16(regs
[r1
]);
626 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
628 tci_args_rr(insn
, &r0
, &r1
);
629 regs
[r0
] = bswap32(regs
[r1
]);
632 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
634 tci_args_rr(insn
, &r0
, &r1
);
635 regs
[r0
] = ~regs
[r1
];
638 #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
640 tci_args_rr(insn
, &r0
, &r1
);
641 regs
[r0
] = -regs
[r1
];
644 #if TCG_TARGET_REG_BITS == 64
645 /* Load/store operations (64 bit). */
647 case INDEX_op_ld32s_i64
:
648 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
649 ptr
= (void *)(regs
[r1
] + ofs
);
650 regs
[r0
] = *(int32_t *)ptr
;
652 case INDEX_op_ld_i64
:
653 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
654 ptr
= (void *)(regs
[r1
] + ofs
);
655 regs
[r0
] = *(uint64_t *)ptr
;
657 case INDEX_op_st_i64
:
658 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
659 ptr
= (void *)(regs
[r1
] + ofs
);
660 *(uint64_t *)ptr
= regs
[r0
];
663 /* Arithmetic operations (64 bit). */
665 case INDEX_op_div_i64
:
666 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
667 regs
[r0
] = (int64_t)regs
[r1
] / (int64_t)regs
[r2
];
669 case INDEX_op_divu_i64
:
670 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
671 regs
[r0
] = (uint64_t)regs
[r1
] / (uint64_t)regs
[r2
];
673 case INDEX_op_rem_i64
:
674 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
675 regs
[r0
] = (int64_t)regs
[r1
] % (int64_t)regs
[r2
];
677 case INDEX_op_remu_i64
:
678 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
679 regs
[r0
] = (uint64_t)regs
[r1
] % (uint64_t)regs
[r2
];
682 /* Shift/rotate operations (64 bit). */
684 case INDEX_op_shl_i64
:
685 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
686 regs
[r0
] = regs
[r1
] << (regs
[r2
] & 63);
688 case INDEX_op_shr_i64
:
689 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
690 regs
[r0
] = regs
[r1
] >> (regs
[r2
] & 63);
692 case INDEX_op_sar_i64
:
693 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
694 regs
[r0
] = (int64_t)regs
[r1
] >> (regs
[r2
] & 63);
696 #if TCG_TARGET_HAS_rot_i64
697 case INDEX_op_rotl_i64
:
698 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
699 regs
[r0
] = rol64(regs
[r1
], regs
[r2
] & 63);
701 case INDEX_op_rotr_i64
:
702 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
703 regs
[r0
] = ror64(regs
[r1
], regs
[r2
] & 63);
706 #if TCG_TARGET_HAS_deposit_i64
707 case INDEX_op_deposit_i64
:
708 tci_args_rrrbb(insn
, &r0
, &r1
, &r2
, &pos
, &len
);
709 regs
[r0
] = deposit64(regs
[r1
], pos
, len
, regs
[r2
]);
712 case INDEX_op_brcond_i64
:
713 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
718 case INDEX_op_ext32s_i64
:
719 case INDEX_op_ext_i32_i64
:
720 tci_args_rr(insn
, &r0
, &r1
);
721 regs
[r0
] = (int32_t)regs
[r1
];
723 case INDEX_op_ext32u_i64
:
724 case INDEX_op_extu_i32_i64
:
725 tci_args_rr(insn
, &r0
, &r1
);
726 regs
[r0
] = (uint32_t)regs
[r1
];
728 #if TCG_TARGET_HAS_bswap64_i64
729 case INDEX_op_bswap64_i64
:
730 tci_args_rr(insn
, &r0
, &r1
);
731 regs
[r0
] = bswap64(regs
[r1
]);
734 #endif /* TCG_TARGET_REG_BITS == 64 */
736 /* QEMU specific operations. */
738 case INDEX_op_exit_tb
:
739 tci_args_l(insn
, tb_ptr
, &ptr
);
740 return (uintptr_t)ptr
;
742 case INDEX_op_goto_tb
:
743 tci_args_l(insn
, tb_ptr
, &ptr
);
744 tb_ptr
= *(void **)ptr
;
747 case INDEX_op_goto_ptr
:
748 tci_args_r(insn
, &r0
);
749 ptr
= (void *)regs
[r0
];
756 case INDEX_op_qemu_ld_i32
:
757 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
758 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
761 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
762 taddr
= tci_uint64(regs
[r2
], regs
[r1
]);
764 switch (get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
)) {
769 tmp32
= (int8_t)qemu_ld_ub
;
772 tmp32
= qemu_ld_leuw
;
775 tmp32
= (int16_t)qemu_ld_leuw
;
778 tmp32
= qemu_ld_leul
;
781 tmp32
= qemu_ld_beuw
;
784 tmp32
= (int16_t)qemu_ld_beuw
;
787 tmp32
= qemu_ld_beul
;
790 g_assert_not_reached();
795 case INDEX_op_qemu_ld_i64
:
796 if (TCG_TARGET_REG_BITS
== 64) {
797 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
799 } else if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
800 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
803 tci_args_rrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
);
804 taddr
= tci_uint64(regs
[r3
], regs
[r2
]);
807 switch (get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
)) {
812 tmp64
= (int8_t)qemu_ld_ub
;
815 tmp64
= qemu_ld_leuw
;
818 tmp64
= (int16_t)qemu_ld_leuw
;
821 tmp64
= qemu_ld_leul
;
824 tmp64
= (int32_t)qemu_ld_leul
;
830 tmp64
= qemu_ld_beuw
;
833 tmp64
= (int16_t)qemu_ld_beuw
;
836 tmp64
= qemu_ld_beul
;
839 tmp64
= (int32_t)qemu_ld_beul
;
845 g_assert_not_reached();
847 if (TCG_TARGET_REG_BITS
== 32) {
848 tci_write_reg64(regs
, r1
, r0
, tmp64
);
854 case INDEX_op_qemu_st_i32
:
855 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
856 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
859 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
860 taddr
= tci_uint64(regs
[r2
], regs
[r1
]);
863 switch (get_memop(oi
) & (MO_BSWAP
| MO_SIZE
)) {
880 g_assert_not_reached();
884 case INDEX_op_qemu_st_i64
:
885 if (TCG_TARGET_REG_BITS
== 64) {
886 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
890 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
891 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
894 tci_args_rrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
);
895 taddr
= tci_uint64(regs
[r3
], regs
[r2
]);
898 tmp64
= tci_uint64(regs
[r1
], regs
[r0
]);
900 switch (get_memop(oi
) & (MO_BSWAP
| MO_SIZE
)) {
923 g_assert_not_reached();
928 /* Ensure ordering for all kinds */
932 g_assert_not_reached();
938 * Disassembler that matches the interpreter
941 static const char *str_r(TCGReg r
)
943 static const char regs
[TCG_TARGET_NB_REGS
][4] = {
944 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
945 "r8", "r9", "r10", "r11", "r12", "r13", "env", "sp"
948 QEMU_BUILD_BUG_ON(TCG_AREG0
!= TCG_REG_R14
);
949 QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK
!= TCG_REG_R15
);
951 assert((unsigned)r
< TCG_TARGET_NB_REGS
);
955 static const char *str_c(TCGCond c
)
957 static const char cond
[16][8] = {
958 [TCG_COND_NEVER
] = "never",
959 [TCG_COND_ALWAYS
] = "always",
960 [TCG_COND_EQ
] = "eq",
961 [TCG_COND_NE
] = "ne",
962 [TCG_COND_LT
] = "lt",
963 [TCG_COND_GE
] = "ge",
964 [TCG_COND_LE
] = "le",
965 [TCG_COND_GT
] = "gt",
966 [TCG_COND_LTU
] = "ltu",
967 [TCG_COND_GEU
] = "geu",
968 [TCG_COND_LEU
] = "leu",
969 [TCG_COND_GTU
] = "gtu",
972 assert((unsigned)c
< ARRAY_SIZE(cond
));
973 assert(cond
[c
][0] != 0);
977 /* Disassemble TCI bytecode. */
978 int print_insn_tci(bfd_vma addr
, disassemble_info
*info
)
980 const uint32_t *tb_ptr
= (const void *)(uintptr_t)addr
;
985 TCGReg r0
, r1
, r2
, r3
, r4
;
986 #if TCG_TARGET_REG_BITS == 32
996 /* TCI is always the host, so we don't need to load indirect. */
999 info
->fprintf_func(info
->stream
, "%08x ", insn
);
1001 op
= extract32(insn
, 0, 8);
1002 def
= &tcg_op_defs
[op
];
1003 op_name
= def
->name
;
1007 case INDEX_op_exit_tb
:
1008 case INDEX_op_goto_tb
:
1009 tci_args_l(insn
, tb_ptr
, &ptr
);
1010 info
->fprintf_func(info
->stream
, "%-12s %p", op_name
, ptr
);
1013 case INDEX_op_goto_ptr
:
1014 tci_args_r(insn
, &r0
);
1015 info
->fprintf_func(info
->stream
, "%-12s %s", op_name
, str_r(r0
));
1019 tci_args_nl(insn
, tb_ptr
, &len
, &ptr
);
1020 info
->fprintf_func(info
->stream
, "%-12s %d, %p", op_name
, len
, ptr
);
1023 case INDEX_op_brcond_i32
:
1024 case INDEX_op_brcond_i64
:
1025 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
1026 info
->fprintf_func(info
->stream
, "%-12s %s, 0, ne, %p",
1027 op_name
, str_r(r0
), ptr
);
1030 case INDEX_op_setcond_i32
:
1031 case INDEX_op_setcond_i64
:
1032 tci_args_rrrc(insn
, &r0
, &r1
, &r2
, &c
);
1033 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s",
1034 op_name
, str_r(r0
), str_r(r1
), str_r(r2
), str_c(c
));
1037 case INDEX_op_tci_movi
:
1038 tci_args_ri(insn
, &r0
, &i1
);
1039 info
->fprintf_func(info
->stream
, "%-12s %s, 0x%" TCG_PRIlx
,
1040 op_name
, str_r(r0
), i1
);
1043 case INDEX_op_tci_movl
:
1044 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
1045 info
->fprintf_func(info
->stream
, "%-12s %s, %p",
1046 op_name
, str_r(r0
), ptr
);
1049 case INDEX_op_ld8u_i32
:
1050 case INDEX_op_ld8u_i64
:
1051 case INDEX_op_ld8s_i32
:
1052 case INDEX_op_ld8s_i64
:
1053 case INDEX_op_ld16u_i32
:
1054 case INDEX_op_ld16u_i64
:
1055 case INDEX_op_ld16s_i32
:
1056 case INDEX_op_ld16s_i64
:
1057 case INDEX_op_ld32u_i64
:
1058 case INDEX_op_ld32s_i64
:
1059 case INDEX_op_ld_i32
:
1060 case INDEX_op_ld_i64
:
1061 case INDEX_op_st8_i32
:
1062 case INDEX_op_st8_i64
:
1063 case INDEX_op_st16_i32
:
1064 case INDEX_op_st16_i64
:
1065 case INDEX_op_st32_i64
:
1066 case INDEX_op_st_i32
:
1067 case INDEX_op_st_i64
:
1068 tci_args_rrs(insn
, &r0
, &r1
, &s2
);
1069 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %d",
1070 op_name
, str_r(r0
), str_r(r1
), s2
);
1073 case INDEX_op_mov_i32
:
1074 case INDEX_op_mov_i64
:
1075 case INDEX_op_ext8s_i32
:
1076 case INDEX_op_ext8s_i64
:
1077 case INDEX_op_ext8u_i32
:
1078 case INDEX_op_ext8u_i64
:
1079 case INDEX_op_ext16s_i32
:
1080 case INDEX_op_ext16s_i64
:
1081 case INDEX_op_ext16u_i32
:
1082 case INDEX_op_ext32s_i64
:
1083 case INDEX_op_ext32u_i64
:
1084 case INDEX_op_ext_i32_i64
:
1085 case INDEX_op_extu_i32_i64
:
1086 case INDEX_op_bswap16_i32
:
1087 case INDEX_op_bswap16_i64
:
1088 case INDEX_op_bswap32_i32
:
1089 case INDEX_op_bswap32_i64
:
1090 case INDEX_op_bswap64_i64
:
1091 case INDEX_op_not_i32
:
1092 case INDEX_op_not_i64
:
1093 case INDEX_op_neg_i32
:
1094 case INDEX_op_neg_i64
:
1095 tci_args_rr(insn
, &r0
, &r1
);
1096 info
->fprintf_func(info
->stream
, "%-12s %s, %s",
1097 op_name
, str_r(r0
), str_r(r1
));
1100 case INDEX_op_add_i32
:
1101 case INDEX_op_add_i64
:
1102 case INDEX_op_sub_i32
:
1103 case INDEX_op_sub_i64
:
1104 case INDEX_op_mul_i32
:
1105 case INDEX_op_mul_i64
:
1106 case INDEX_op_and_i32
:
1107 case INDEX_op_and_i64
:
1108 case INDEX_op_or_i32
:
1109 case INDEX_op_or_i64
:
1110 case INDEX_op_xor_i32
:
1111 case INDEX_op_xor_i64
:
1112 case INDEX_op_div_i32
:
1113 case INDEX_op_div_i64
:
1114 case INDEX_op_rem_i32
:
1115 case INDEX_op_rem_i64
:
1116 case INDEX_op_divu_i32
:
1117 case INDEX_op_divu_i64
:
1118 case INDEX_op_remu_i32
:
1119 case INDEX_op_remu_i64
:
1120 case INDEX_op_shl_i32
:
1121 case INDEX_op_shl_i64
:
1122 case INDEX_op_shr_i32
:
1123 case INDEX_op_shr_i64
:
1124 case INDEX_op_sar_i32
:
1125 case INDEX_op_sar_i64
:
1126 case INDEX_op_rotl_i32
:
1127 case INDEX_op_rotl_i64
:
1128 case INDEX_op_rotr_i32
:
1129 case INDEX_op_rotr_i64
:
1130 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
1131 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s",
1132 op_name
, str_r(r0
), str_r(r1
), str_r(r2
));
1135 case INDEX_op_deposit_i32
:
1136 case INDEX_op_deposit_i64
:
1137 tci_args_rrrbb(insn
, &r0
, &r1
, &r2
, &pos
, &len
);
1138 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %d, %d",
1139 op_name
, str_r(r0
), str_r(r1
), str_r(r2
), pos
, len
);
1142 #if TCG_TARGET_REG_BITS == 32
1143 case INDEX_op_setcond2_i32
:
1144 tci_args_rrrrrc(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &c
);
1145 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s, %s, %s",
1146 op_name
, str_r(r0
), str_r(r1
), str_r(r2
),
1147 str_r(r3
), str_r(r4
), str_c(c
));
1150 case INDEX_op_mulu2_i32
:
1151 tci_args_rrrr(insn
, &r0
, &r1
, &r2
, &r3
);
1152 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s",
1153 op_name
, str_r(r0
), str_r(r1
),
1154 str_r(r2
), str_r(r3
));
1157 case INDEX_op_add2_i32
:
1158 case INDEX_op_sub2_i32
:
1159 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
1160 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s, %s, %s",
1161 op_name
, str_r(r0
), str_r(r1
), str_r(r2
),
1162 str_r(r3
), str_r(r4
), str_r(r5
));
1166 case INDEX_op_qemu_ld_i64
:
1167 case INDEX_op_qemu_st_i64
:
1168 len
= DIV_ROUND_UP(64, TCG_TARGET_REG_BITS
);
1170 case INDEX_op_qemu_ld_i32
:
1171 case INDEX_op_qemu_st_i32
:
1174 len
+= DIV_ROUND_UP(TARGET_LONG_BITS
, TCG_TARGET_REG_BITS
);
1177 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
1178 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %x",
1179 op_name
, str_r(r0
), str_r(r1
), oi
);
1182 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
1183 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %x",
1184 op_name
, str_r(r0
), str_r(r1
), str_r(r2
), oi
);
1187 tci_args_rrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
);
1188 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s, %s",
1189 op_name
, str_r(r0
), str_r(r1
),
1190 str_r(r2
), str_r(r3
), str_r(r4
));
1193 g_assert_not_reached();
1198 /* tcg_out_nop_fill uses zeros */
1200 info
->fprintf_func(info
->stream
, "align");
1206 info
->fprintf_func(info
->stream
, "illegal opcode %d", op
);
1210 return sizeof(insn
);