2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
21 #include "qemu-common.h"
22 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
23 #include "exec/cpu_ldst.h"
24 #include "tcg/tcg-op.h"
25 #include "qemu/compiler.h"
30 * Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
31 * Without assertions, the interpreter runs much faster.
33 #if defined(CONFIG_DEBUG_TCG)
34 # define tci_assert(cond) assert(cond)
36 # define tci_assert(cond) ((void)(cond))
39 __thread
uintptr_t tci_tb_ptr
;
41 static void tci_write_reg64(tcg_target_ulong
*regs
, uint32_t high_index
,
42 uint32_t low_index
, uint64_t value
)
44 regs
[low_index
] = value
;
45 regs
[high_index
] = value
>> 32;
48 /* Create a 64 bit value from two 32 bit values. */
49 static uint64_t tci_uint64(uint32_t high
, uint32_t low
)
51 return ((uint64_t)high
<< 32) + low
;
55 * Load sets of arguments all at once. The naming convention is:
56 * tci_args_<arguments>
57 * where arguments is a sequence of
59 * b = immediate (bit position)
60 * c = condition (TCGCond)
61 * i = immediate (uint32_t)
62 * I = immediate (tcg_target_ulong)
63 * l = label or pointer
64 * m = immediate (TCGMemOpIdx)
65 * n = immediate (call return length)
67 * s = signed ldst offset
70 static void tci_args_l(uint32_t insn
, const void *tb_ptr
, void **l0
)
72 int diff
= sextract32(insn
, 12, 20);
73 *l0
= diff
? (void *)tb_ptr
+ diff
: NULL
;
76 static void tci_args_nl(uint32_t insn
, const void *tb_ptr
,
77 uint8_t *n0
, void **l1
)
79 *n0
= extract32(insn
, 8, 4);
80 *l1
= sextract32(insn
, 12, 20) + (void *)tb_ptr
;
83 static void tci_args_rl(uint32_t insn
, const void *tb_ptr
,
84 TCGReg
*r0
, void **l1
)
86 *r0
= extract32(insn
, 8, 4);
87 *l1
= sextract32(insn
, 12, 20) + (void *)tb_ptr
;
90 static void tci_args_rr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
)
92 *r0
= extract32(insn
, 8, 4);
93 *r1
= extract32(insn
, 12, 4);
96 static void tci_args_ri(uint32_t insn
, TCGReg
*r0
, tcg_target_ulong
*i1
)
98 *r0
= extract32(insn
, 8, 4);
99 *i1
= sextract32(insn
, 12, 20);
102 static void tci_args_rrm(uint32_t insn
, TCGReg
*r0
,
103 TCGReg
*r1
, TCGMemOpIdx
*m2
)
105 *r0
= extract32(insn
, 8, 4);
106 *r1
= extract32(insn
, 12, 4);
107 *m2
= extract32(insn
, 20, 12);
110 static void tci_args_rrr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
)
112 *r0
= extract32(insn
, 8, 4);
113 *r1
= extract32(insn
, 12, 4);
114 *r2
= extract32(insn
, 16, 4);
117 static void tci_args_rrs(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
, int32_t *i2
)
119 *r0
= extract32(insn
, 8, 4);
120 *r1
= extract32(insn
, 12, 4);
121 *i2
= sextract32(insn
, 16, 16);
124 static void tci_args_rrrc(uint32_t insn
,
125 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGCond
*c3
)
127 *r0
= extract32(insn
, 8, 4);
128 *r1
= extract32(insn
, 12, 4);
129 *r2
= extract32(insn
, 16, 4);
130 *c3
= extract32(insn
, 20, 4);
133 static void tci_args_rrrm(uint32_t insn
,
134 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGMemOpIdx
*m3
)
136 *r0
= extract32(insn
, 8, 4);
137 *r1
= extract32(insn
, 12, 4);
138 *r2
= extract32(insn
, 16, 4);
139 *m3
= extract32(insn
, 20, 12);
142 static void tci_args_rrrbb(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
143 TCGReg
*r2
, uint8_t *i3
, uint8_t *i4
)
145 *r0
= extract32(insn
, 8, 4);
146 *r1
= extract32(insn
, 12, 4);
147 *r2
= extract32(insn
, 16, 4);
148 *i3
= extract32(insn
, 20, 6);
149 *i4
= extract32(insn
, 26, 6);
152 static void tci_args_rrrrr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
153 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
)
155 *r0
= extract32(insn
, 8, 4);
156 *r1
= extract32(insn
, 12, 4);
157 *r2
= extract32(insn
, 16, 4);
158 *r3
= extract32(insn
, 20, 4);
159 *r4
= extract32(insn
, 24, 4);
162 #if TCG_TARGET_REG_BITS == 32
163 static void tci_args_rrrr(uint32_t insn
,
164 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGReg
*r3
)
166 *r0
= extract32(insn
, 8, 4);
167 *r1
= extract32(insn
, 12, 4);
168 *r2
= extract32(insn
, 16, 4);
169 *r3
= extract32(insn
, 20, 4);
172 static void tci_args_rrrrrc(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
173 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
, TCGCond
*c5
)
175 *r0
= extract32(insn
, 8, 4);
176 *r1
= extract32(insn
, 12, 4);
177 *r2
= extract32(insn
, 16, 4);
178 *r3
= extract32(insn
, 20, 4);
179 *r4
= extract32(insn
, 24, 4);
180 *c5
= extract32(insn
, 28, 4);
183 static void tci_args_rrrrrr(uint32_t insn
, TCGReg
*r0
, TCGReg
*r1
,
184 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
, TCGReg
*r5
)
186 *r0
= extract32(insn
, 8, 4);
187 *r1
= extract32(insn
, 12, 4);
188 *r2
= extract32(insn
, 16, 4);
189 *r3
= extract32(insn
, 20, 4);
190 *r4
= extract32(insn
, 24, 4);
191 *r5
= extract32(insn
, 28, 4);
195 static bool tci_compare32(uint32_t u0
, uint32_t u1
, TCGCond condition
)
232 g_assert_not_reached();
237 static bool tci_compare64(uint64_t u0
, uint64_t u1
, TCGCond condition
)
274 g_assert_not_reached();
280 cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
281 #define qemu_ld_leuw \
282 cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
283 #define qemu_ld_leul \
284 cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
285 #define qemu_ld_leq \
286 cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
287 #define qemu_ld_beuw \
288 cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
289 #define qemu_ld_beul \
290 cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
291 #define qemu_ld_beq \
292 cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
293 #define qemu_st_b(X) \
294 cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
295 #define qemu_st_lew(X) \
296 cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
297 #define qemu_st_lel(X) \
298 cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
299 #define qemu_st_leq(X) \
300 cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
301 #define qemu_st_bew(X) \
302 cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
303 #define qemu_st_bel(X) \
304 cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
305 #define qemu_st_beq(X) \
306 cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
308 #if TCG_TARGET_REG_BITS == 64
309 # define CASE_32_64(x) \
310 case glue(glue(INDEX_op_, x), _i64): \
311 case glue(glue(INDEX_op_, x), _i32):
312 # define CASE_64(x) \
313 case glue(glue(INDEX_op_, x), _i64):
315 # define CASE_32_64(x) \
316 case glue(glue(INDEX_op_, x), _i32):
320 /* Interpret pseudo code in tb. */
322 * Disable CFI checks.
323 * One possible operation in the pseudo code is a call to binary code.
324 * Therefore, disable CFI checks in the interpreter function
326 uintptr_t QEMU_DISABLE_CFI
tcg_qemu_tb_exec(CPUArchState
*env
,
327 const void *v_tb_ptr
)
329 const uint32_t *tb_ptr
= v_tb_ptr
;
330 tcg_target_ulong regs
[TCG_TARGET_NB_REGS
];
331 uint64_t stack
[(TCG_STATIC_CALL_ARGS_SIZE
+ TCG_STATIC_FRAME_SIZE
)
333 void *call_slots
[TCG_STATIC_CALL_ARGS_SIZE
/ sizeof(uint64_t)];
335 regs
[TCG_AREG0
] = (tcg_target_ulong
)env
;
336 regs
[TCG_REG_CALL_STACK
] = (uintptr_t)stack
;
337 /* Other call_slots entries initialized at first use (see below). */
338 call_slots
[0] = NULL
;
344 TCGReg r0
, r1
, r2
, r3
, r4
;
351 #if TCG_TARGET_REG_BITS == 32
360 opc
= extract32(insn
, 0, 8);
365 * Set up the ffi_avalue array once, delayed until now
366 * because many TB's do not make any calls. In tcg_gen_callN,
367 * we arranged for every real argument to be "left-aligned"
368 * in each 64-bit slot.
370 if (unlikely(call_slots
[0] == NULL
)) {
371 for (int i
= 0; i
< ARRAY_SIZE(call_slots
); ++i
) {
372 call_slots
[i
] = &stack
[i
];
376 tci_args_nl(insn
, tb_ptr
, &len
, &ptr
);
378 /* Helper functions may need to access the "return address" */
379 tci_tb_ptr
= (uintptr_t)tb_ptr
;
383 ffi_call(pptr
[1], pptr
[0], stack
, call_slots
);
386 /* Any result winds up "left-aligned" in the stack[0] slot. */
390 case 1: /* uint32_t */
392 * Note that libffi has an odd special case in that it will
393 * always widen an integral result to ffi_arg.
395 if (sizeof(ffi_arg
) == 4) {
396 regs
[TCG_REG_R0
] = *(uint32_t *)stack
;
400 case 2: /* uint64_t */
401 if (TCG_TARGET_REG_BITS
== 32) {
402 tci_write_reg64(regs
, TCG_REG_R1
, TCG_REG_R0
, stack
[0]);
404 regs
[TCG_REG_R0
] = stack
[0];
408 g_assert_not_reached();
413 tci_args_l(insn
, tb_ptr
, &ptr
);
416 case INDEX_op_setcond_i32
:
417 tci_args_rrrc(insn
, &r0
, &r1
, &r2
, &condition
);
418 regs
[r0
] = tci_compare32(regs
[r1
], regs
[r2
], condition
);
420 #if TCG_TARGET_REG_BITS == 32
421 case INDEX_op_setcond2_i32
:
422 tci_args_rrrrrc(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &condition
);
423 T1
= tci_uint64(regs
[r2
], regs
[r1
]);
424 T2
= tci_uint64(regs
[r4
], regs
[r3
]);
425 regs
[r0
] = tci_compare64(T1
, T2
, condition
);
427 #elif TCG_TARGET_REG_BITS == 64
428 case INDEX_op_setcond_i64
:
429 tci_args_rrrc(insn
, &r0
, &r1
, &r2
, &condition
);
430 regs
[r0
] = tci_compare64(regs
[r1
], regs
[r2
], condition
);
434 tci_args_rr(insn
, &r0
, &r1
);
437 case INDEX_op_tci_movi
:
438 tci_args_ri(insn
, &r0
, &t1
);
441 case INDEX_op_tci_movl
:
442 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
443 regs
[r0
] = *(tcg_target_ulong
*)ptr
;
446 /* Load/store operations (32 bit). */
449 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
450 ptr
= (void *)(regs
[r1
] + ofs
);
451 regs
[r0
] = *(uint8_t *)ptr
;
454 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
455 ptr
= (void *)(regs
[r1
] + ofs
);
456 regs
[r0
] = *(int8_t *)ptr
;
459 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
460 ptr
= (void *)(regs
[r1
] + ofs
);
461 regs
[r0
] = *(uint16_t *)ptr
;
464 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
465 ptr
= (void *)(regs
[r1
] + ofs
);
466 regs
[r0
] = *(int16_t *)ptr
;
468 case INDEX_op_ld_i32
:
470 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
471 ptr
= (void *)(regs
[r1
] + ofs
);
472 regs
[r0
] = *(uint32_t *)ptr
;
475 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
476 ptr
= (void *)(regs
[r1
] + ofs
);
477 *(uint8_t *)ptr
= regs
[r0
];
480 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
481 ptr
= (void *)(regs
[r1
] + ofs
);
482 *(uint16_t *)ptr
= regs
[r0
];
484 case INDEX_op_st_i32
:
486 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
487 ptr
= (void *)(regs
[r1
] + ofs
);
488 *(uint32_t *)ptr
= regs
[r0
];
491 /* Arithmetic operations (mixed 32/64 bit). */
494 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
495 regs
[r0
] = regs
[r1
] + regs
[r2
];
498 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
499 regs
[r0
] = regs
[r1
] - regs
[r2
];
502 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
503 regs
[r0
] = regs
[r1
] * regs
[r2
];
506 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
507 regs
[r0
] = regs
[r1
] & regs
[r2
];
510 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
511 regs
[r0
] = regs
[r1
] | regs
[r2
];
514 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
515 regs
[r0
] = regs
[r1
] ^ regs
[r2
];
518 /* Arithmetic operations (32 bit). */
520 case INDEX_op_div_i32
:
521 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
522 regs
[r0
] = (int32_t)regs
[r1
] / (int32_t)regs
[r2
];
524 case INDEX_op_divu_i32
:
525 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
526 regs
[r0
] = (uint32_t)regs
[r1
] / (uint32_t)regs
[r2
];
528 case INDEX_op_rem_i32
:
529 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
530 regs
[r0
] = (int32_t)regs
[r1
] % (int32_t)regs
[r2
];
532 case INDEX_op_remu_i32
:
533 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
534 regs
[r0
] = (uint32_t)regs
[r1
] % (uint32_t)regs
[r2
];
537 /* Shift/rotate operations (32 bit). */
539 case INDEX_op_shl_i32
:
540 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
541 regs
[r0
] = (uint32_t)regs
[r1
] << (regs
[r2
] & 31);
543 case INDEX_op_shr_i32
:
544 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
545 regs
[r0
] = (uint32_t)regs
[r1
] >> (regs
[r2
] & 31);
547 case INDEX_op_sar_i32
:
548 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
549 regs
[r0
] = (int32_t)regs
[r1
] >> (regs
[r2
] & 31);
551 #if TCG_TARGET_HAS_rot_i32
552 case INDEX_op_rotl_i32
:
553 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
554 regs
[r0
] = rol32(regs
[r1
], regs
[r2
] & 31);
556 case INDEX_op_rotr_i32
:
557 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
558 regs
[r0
] = ror32(regs
[r1
], regs
[r2
] & 31);
561 #if TCG_TARGET_HAS_deposit_i32
562 case INDEX_op_deposit_i32
:
563 tci_args_rrrbb(insn
, &r0
, &r1
, &r2
, &pos
, &len
);
564 regs
[r0
] = deposit32(regs
[r1
], pos
, len
, regs
[r2
]);
567 case INDEX_op_brcond_i32
:
568 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
569 if ((uint32_t)regs
[r0
]) {
573 #if TCG_TARGET_REG_BITS == 32
574 case INDEX_op_add2_i32
:
575 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
576 T1
= tci_uint64(regs
[r3
], regs
[r2
]);
577 T2
= tci_uint64(regs
[r5
], regs
[r4
]);
578 tci_write_reg64(regs
, r1
, r0
, T1
+ T2
);
580 case INDEX_op_sub2_i32
:
581 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
582 T1
= tci_uint64(regs
[r3
], regs
[r2
]);
583 T2
= tci_uint64(regs
[r5
], regs
[r4
]);
584 tci_write_reg64(regs
, r1
, r0
, T1
- T2
);
586 case INDEX_op_mulu2_i32
:
587 tci_args_rrrr(insn
, &r0
, &r1
, &r2
, &r3
);
588 tci_write_reg64(regs
, r1
, r0
, (uint64_t)regs
[r2
] * regs
[r3
]);
590 #endif /* TCG_TARGET_REG_BITS == 32 */
591 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
593 tci_args_rr(insn
, &r0
, &r1
);
594 regs
[r0
] = (int8_t)regs
[r1
];
597 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
599 tci_args_rr(insn
, &r0
, &r1
);
600 regs
[r0
] = (int16_t)regs
[r1
];
603 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
605 tci_args_rr(insn
, &r0
, &r1
);
606 regs
[r0
] = (uint8_t)regs
[r1
];
609 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
611 tci_args_rr(insn
, &r0
, &r1
);
612 regs
[r0
] = (uint16_t)regs
[r1
];
615 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
617 tci_args_rr(insn
, &r0
, &r1
);
618 regs
[r0
] = bswap16(regs
[r1
]);
621 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
623 tci_args_rr(insn
, &r0
, &r1
);
624 regs
[r0
] = bswap32(regs
[r1
]);
627 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
629 tci_args_rr(insn
, &r0
, &r1
);
630 regs
[r0
] = ~regs
[r1
];
633 #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
635 tci_args_rr(insn
, &r0
, &r1
);
636 regs
[r0
] = -regs
[r1
];
639 #if TCG_TARGET_REG_BITS == 64
640 /* Load/store operations (64 bit). */
642 case INDEX_op_ld32s_i64
:
643 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
644 ptr
= (void *)(regs
[r1
] + ofs
);
645 regs
[r0
] = *(int32_t *)ptr
;
647 case INDEX_op_ld_i64
:
648 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
649 ptr
= (void *)(regs
[r1
] + ofs
);
650 regs
[r0
] = *(uint64_t *)ptr
;
652 case INDEX_op_st_i64
:
653 tci_args_rrs(insn
, &r0
, &r1
, &ofs
);
654 ptr
= (void *)(regs
[r1
] + ofs
);
655 *(uint64_t *)ptr
= regs
[r0
];
658 /* Arithmetic operations (64 bit). */
660 case INDEX_op_div_i64
:
661 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
662 regs
[r0
] = (int64_t)regs
[r1
] / (int64_t)regs
[r2
];
664 case INDEX_op_divu_i64
:
665 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
666 regs
[r0
] = (uint64_t)regs
[r1
] / (uint64_t)regs
[r2
];
668 case INDEX_op_rem_i64
:
669 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
670 regs
[r0
] = (int64_t)regs
[r1
] % (int64_t)regs
[r2
];
672 case INDEX_op_remu_i64
:
673 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
674 regs
[r0
] = (uint64_t)regs
[r1
] % (uint64_t)regs
[r2
];
677 /* Shift/rotate operations (64 bit). */
679 case INDEX_op_shl_i64
:
680 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
681 regs
[r0
] = regs
[r1
] << (regs
[r2
] & 63);
683 case INDEX_op_shr_i64
:
684 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
685 regs
[r0
] = regs
[r1
] >> (regs
[r2
] & 63);
687 case INDEX_op_sar_i64
:
688 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
689 regs
[r0
] = (int64_t)regs
[r1
] >> (regs
[r2
] & 63);
691 #if TCG_TARGET_HAS_rot_i64
692 case INDEX_op_rotl_i64
:
693 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
694 regs
[r0
] = rol64(regs
[r1
], regs
[r2
] & 63);
696 case INDEX_op_rotr_i64
:
697 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
698 regs
[r0
] = ror64(regs
[r1
], regs
[r2
] & 63);
701 #if TCG_TARGET_HAS_deposit_i64
702 case INDEX_op_deposit_i64
:
703 tci_args_rrrbb(insn
, &r0
, &r1
, &r2
, &pos
, &len
);
704 regs
[r0
] = deposit64(regs
[r1
], pos
, len
, regs
[r2
]);
707 case INDEX_op_brcond_i64
:
708 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
713 case INDEX_op_ext32s_i64
:
714 case INDEX_op_ext_i32_i64
:
715 tci_args_rr(insn
, &r0
, &r1
);
716 regs
[r0
] = (int32_t)regs
[r1
];
718 case INDEX_op_ext32u_i64
:
719 case INDEX_op_extu_i32_i64
:
720 tci_args_rr(insn
, &r0
, &r1
);
721 regs
[r0
] = (uint32_t)regs
[r1
];
723 #if TCG_TARGET_HAS_bswap64_i64
724 case INDEX_op_bswap64_i64
:
725 tci_args_rr(insn
, &r0
, &r1
);
726 regs
[r0
] = bswap64(regs
[r1
]);
729 #endif /* TCG_TARGET_REG_BITS == 64 */
731 /* QEMU specific operations. */
733 case INDEX_op_exit_tb
:
734 tci_args_l(insn
, tb_ptr
, &ptr
);
735 return (uintptr_t)ptr
;
737 case INDEX_op_goto_tb
:
738 tci_args_l(insn
, tb_ptr
, &ptr
);
739 tb_ptr
= *(void **)ptr
;
742 case INDEX_op_qemu_ld_i32
:
743 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
744 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
747 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
748 taddr
= tci_uint64(regs
[r2
], regs
[r1
]);
750 switch (get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
)) {
755 tmp32
= (int8_t)qemu_ld_ub
;
758 tmp32
= qemu_ld_leuw
;
761 tmp32
= (int16_t)qemu_ld_leuw
;
764 tmp32
= qemu_ld_leul
;
767 tmp32
= qemu_ld_beuw
;
770 tmp32
= (int16_t)qemu_ld_beuw
;
773 tmp32
= qemu_ld_beul
;
776 g_assert_not_reached();
781 case INDEX_op_qemu_ld_i64
:
782 if (TCG_TARGET_REG_BITS
== 64) {
783 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
785 } else if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
786 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
789 tci_args_rrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
);
790 taddr
= tci_uint64(regs
[r3
], regs
[r2
]);
793 switch (get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
)) {
798 tmp64
= (int8_t)qemu_ld_ub
;
801 tmp64
= qemu_ld_leuw
;
804 tmp64
= (int16_t)qemu_ld_leuw
;
807 tmp64
= qemu_ld_leul
;
810 tmp64
= (int32_t)qemu_ld_leul
;
816 tmp64
= qemu_ld_beuw
;
819 tmp64
= (int16_t)qemu_ld_beuw
;
822 tmp64
= qemu_ld_beul
;
825 tmp64
= (int32_t)qemu_ld_beul
;
831 g_assert_not_reached();
833 if (TCG_TARGET_REG_BITS
== 32) {
834 tci_write_reg64(regs
, r1
, r0
, tmp64
);
840 case INDEX_op_qemu_st_i32
:
841 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
842 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
845 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
846 taddr
= tci_uint64(regs
[r2
], regs
[r1
]);
849 switch (get_memop(oi
) & (MO_BSWAP
| MO_SIZE
)) {
866 g_assert_not_reached();
870 case INDEX_op_qemu_st_i64
:
871 if (TCG_TARGET_REG_BITS
== 64) {
872 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
876 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
877 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
880 tci_args_rrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
);
881 taddr
= tci_uint64(regs
[r3
], regs
[r2
]);
884 tmp64
= tci_uint64(regs
[r1
], regs
[r0
]);
886 switch (get_memop(oi
) & (MO_BSWAP
| MO_SIZE
)) {
909 g_assert_not_reached();
914 /* Ensure ordering for all kinds */
918 g_assert_not_reached();
924 * Disassembler that matches the interpreter
927 static const char *str_r(TCGReg r
)
929 static const char regs
[TCG_TARGET_NB_REGS
][4] = {
930 "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
931 "r8", "r9", "r10", "r11", "r12", "r13", "env", "sp"
934 QEMU_BUILD_BUG_ON(TCG_AREG0
!= TCG_REG_R14
);
935 QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK
!= TCG_REG_R15
);
937 assert((unsigned)r
< TCG_TARGET_NB_REGS
);
941 static const char *str_c(TCGCond c
)
943 static const char cond
[16][8] = {
944 [TCG_COND_NEVER
] = "never",
945 [TCG_COND_ALWAYS
] = "always",
946 [TCG_COND_EQ
] = "eq",
947 [TCG_COND_NE
] = "ne",
948 [TCG_COND_LT
] = "lt",
949 [TCG_COND_GE
] = "ge",
950 [TCG_COND_LE
] = "le",
951 [TCG_COND_GT
] = "gt",
952 [TCG_COND_LTU
] = "ltu",
953 [TCG_COND_GEU
] = "geu",
954 [TCG_COND_LEU
] = "leu",
955 [TCG_COND_GTU
] = "gtu",
958 assert((unsigned)c
< ARRAY_SIZE(cond
));
959 assert(cond
[c
][0] != 0);
963 /* Disassemble TCI bytecode. */
964 int print_insn_tci(bfd_vma addr
, disassemble_info
*info
)
966 const uint32_t *tb_ptr
= (const void *)(uintptr_t)addr
;
971 TCGReg r0
, r1
, r2
, r3
, r4
;
972 #if TCG_TARGET_REG_BITS == 32
982 /* TCI is always the host, so we don't need to load indirect. */
985 info
->fprintf_func(info
->stream
, "%08x ", insn
);
987 op
= extract32(insn
, 0, 8);
988 def
= &tcg_op_defs
[op
];
993 case INDEX_op_exit_tb
:
994 case INDEX_op_goto_tb
:
995 tci_args_l(insn
, tb_ptr
, &ptr
);
996 info
->fprintf_func(info
->stream
, "%-12s %p", op_name
, ptr
);
1000 tci_args_nl(insn
, tb_ptr
, &len
, &ptr
);
1001 info
->fprintf_func(info
->stream
, "%-12s %d, %p", op_name
, len
, ptr
);
1004 case INDEX_op_brcond_i32
:
1005 case INDEX_op_brcond_i64
:
1006 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
1007 info
->fprintf_func(info
->stream
, "%-12s %s, 0, ne, %p",
1008 op_name
, str_r(r0
), ptr
);
1011 case INDEX_op_setcond_i32
:
1012 case INDEX_op_setcond_i64
:
1013 tci_args_rrrc(insn
, &r0
, &r1
, &r2
, &c
);
1014 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s",
1015 op_name
, str_r(r0
), str_r(r1
), str_r(r2
), str_c(c
));
1018 case INDEX_op_tci_movi
:
1019 tci_args_ri(insn
, &r0
, &i1
);
1020 info
->fprintf_func(info
->stream
, "%-12s %s, 0x%" TCG_PRIlx
,
1021 op_name
, str_r(r0
), i1
);
1024 case INDEX_op_tci_movl
:
1025 tci_args_rl(insn
, tb_ptr
, &r0
, &ptr
);
1026 info
->fprintf_func(info
->stream
, "%-12s %s, %p",
1027 op_name
, str_r(r0
), ptr
);
1030 case INDEX_op_ld8u_i32
:
1031 case INDEX_op_ld8u_i64
:
1032 case INDEX_op_ld8s_i32
:
1033 case INDEX_op_ld8s_i64
:
1034 case INDEX_op_ld16u_i32
:
1035 case INDEX_op_ld16u_i64
:
1036 case INDEX_op_ld16s_i32
:
1037 case INDEX_op_ld16s_i64
:
1038 case INDEX_op_ld32u_i64
:
1039 case INDEX_op_ld32s_i64
:
1040 case INDEX_op_ld_i32
:
1041 case INDEX_op_ld_i64
:
1042 case INDEX_op_st8_i32
:
1043 case INDEX_op_st8_i64
:
1044 case INDEX_op_st16_i32
:
1045 case INDEX_op_st16_i64
:
1046 case INDEX_op_st32_i64
:
1047 case INDEX_op_st_i32
:
1048 case INDEX_op_st_i64
:
1049 tci_args_rrs(insn
, &r0
, &r1
, &s2
);
1050 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %d",
1051 op_name
, str_r(r0
), str_r(r1
), s2
);
1054 case INDEX_op_mov_i32
:
1055 case INDEX_op_mov_i64
:
1056 case INDEX_op_ext8s_i32
:
1057 case INDEX_op_ext8s_i64
:
1058 case INDEX_op_ext8u_i32
:
1059 case INDEX_op_ext8u_i64
:
1060 case INDEX_op_ext16s_i32
:
1061 case INDEX_op_ext16s_i64
:
1062 case INDEX_op_ext16u_i32
:
1063 case INDEX_op_ext32s_i64
:
1064 case INDEX_op_ext32u_i64
:
1065 case INDEX_op_ext_i32_i64
:
1066 case INDEX_op_extu_i32_i64
:
1067 case INDEX_op_bswap16_i32
:
1068 case INDEX_op_bswap16_i64
:
1069 case INDEX_op_bswap32_i32
:
1070 case INDEX_op_bswap32_i64
:
1071 case INDEX_op_bswap64_i64
:
1072 case INDEX_op_not_i32
:
1073 case INDEX_op_not_i64
:
1074 case INDEX_op_neg_i32
:
1075 case INDEX_op_neg_i64
:
1076 tci_args_rr(insn
, &r0
, &r1
);
1077 info
->fprintf_func(info
->stream
, "%-12s %s, %s",
1078 op_name
, str_r(r0
), str_r(r1
));
1081 case INDEX_op_add_i32
:
1082 case INDEX_op_add_i64
:
1083 case INDEX_op_sub_i32
:
1084 case INDEX_op_sub_i64
:
1085 case INDEX_op_mul_i32
:
1086 case INDEX_op_mul_i64
:
1087 case INDEX_op_and_i32
:
1088 case INDEX_op_and_i64
:
1089 case INDEX_op_or_i32
:
1090 case INDEX_op_or_i64
:
1091 case INDEX_op_xor_i32
:
1092 case INDEX_op_xor_i64
:
1093 case INDEX_op_div_i32
:
1094 case INDEX_op_div_i64
:
1095 case INDEX_op_rem_i32
:
1096 case INDEX_op_rem_i64
:
1097 case INDEX_op_divu_i32
:
1098 case INDEX_op_divu_i64
:
1099 case INDEX_op_remu_i32
:
1100 case INDEX_op_remu_i64
:
1101 case INDEX_op_shl_i32
:
1102 case INDEX_op_shl_i64
:
1103 case INDEX_op_shr_i32
:
1104 case INDEX_op_shr_i64
:
1105 case INDEX_op_sar_i32
:
1106 case INDEX_op_sar_i64
:
1107 case INDEX_op_rotl_i32
:
1108 case INDEX_op_rotl_i64
:
1109 case INDEX_op_rotr_i32
:
1110 case INDEX_op_rotr_i64
:
1111 tci_args_rrr(insn
, &r0
, &r1
, &r2
);
1112 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s",
1113 op_name
, str_r(r0
), str_r(r1
), str_r(r2
));
1116 case INDEX_op_deposit_i32
:
1117 case INDEX_op_deposit_i64
:
1118 tci_args_rrrbb(insn
, &r0
, &r1
, &r2
, &pos
, &len
);
1119 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %d, %d",
1120 op_name
, str_r(r0
), str_r(r1
), str_r(r2
), pos
, len
);
1123 #if TCG_TARGET_REG_BITS == 32
1124 case INDEX_op_setcond2_i32
:
1125 tci_args_rrrrrc(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &c
);
1126 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s, %s, %s",
1127 op_name
, str_r(r0
), str_r(r1
), str_r(r2
),
1128 str_r(r3
), str_r(r4
), str_c(c
));
1131 case INDEX_op_mulu2_i32
:
1132 tci_args_rrrr(insn
, &r0
, &r1
, &r2
, &r3
);
1133 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s",
1134 op_name
, str_r(r0
), str_r(r1
),
1135 str_r(r2
), str_r(r3
));
1138 case INDEX_op_add2_i32
:
1139 case INDEX_op_sub2_i32
:
1140 tci_args_rrrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
1141 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s, %s, %s",
1142 op_name
, str_r(r0
), str_r(r1
), str_r(r2
),
1143 str_r(r3
), str_r(r4
), str_r(r5
));
1147 case INDEX_op_qemu_ld_i64
:
1148 case INDEX_op_qemu_st_i64
:
1149 len
= DIV_ROUND_UP(64, TCG_TARGET_REG_BITS
);
1151 case INDEX_op_qemu_ld_i32
:
1152 case INDEX_op_qemu_st_i32
:
1155 len
+= DIV_ROUND_UP(TARGET_LONG_BITS
, TCG_TARGET_REG_BITS
);
1158 tci_args_rrm(insn
, &r0
, &r1
, &oi
);
1159 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %x",
1160 op_name
, str_r(r0
), str_r(r1
), oi
);
1163 tci_args_rrrm(insn
, &r0
, &r1
, &r2
, &oi
);
1164 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %x",
1165 op_name
, str_r(r0
), str_r(r1
), str_r(r2
), oi
);
1168 tci_args_rrrrr(insn
, &r0
, &r1
, &r2
, &r3
, &r4
);
1169 info
->fprintf_func(info
->stream
, "%-12s %s, %s, %s, %s, %s",
1170 op_name
, str_r(r0
), str_r(r1
),
1171 str_r(r2
), str_r(r3
), str_r(r4
));
1174 g_assert_not_reached();
1179 /* tcg_out_nop_fill uses zeros */
1181 info
->fprintf_func(info
->stream
, "align");
1187 info
->fprintf_func(info
->stream
, "illegal opcode %d", op
);
1191 return sizeof(insn
);