]>
git.proxmox.com Git - mirror_qemu.git/blob - tcg/tci.c
2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
27 # define tci_assert(cond) ((void)0)
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg/tcg-op.h"
34 #include "qemu/compiler.h"
36 #if MAX_OPC_PARAM_IARGS != 6
37 # error Fix needed, number of supported input arguments changed!
39 #if TCG_TARGET_REG_BITS == 32
40 typedef uint64_t (*helper_function
)(tcg_target_ulong
, tcg_target_ulong
,
41 tcg_target_ulong
, tcg_target_ulong
,
42 tcg_target_ulong
, tcg_target_ulong
,
43 tcg_target_ulong
, tcg_target_ulong
,
44 tcg_target_ulong
, tcg_target_ulong
,
45 tcg_target_ulong
, tcg_target_ulong
);
47 typedef uint64_t (*helper_function
)(tcg_target_ulong
, tcg_target_ulong
,
48 tcg_target_ulong
, tcg_target_ulong
,
49 tcg_target_ulong
, tcg_target_ulong
);
52 __thread
uintptr_t tci_tb_ptr
;
54 static tcg_target_ulong
tci_read_reg(const tcg_target_ulong
*regs
, TCGReg index
)
56 tci_assert(index
< TCG_TARGET_NB_REGS
);
61 tci_write_reg(tcg_target_ulong
*regs
, TCGReg index
, tcg_target_ulong value
)
63 tci_assert(index
< TCG_TARGET_NB_REGS
);
64 tci_assert(index
!= TCG_AREG0
);
65 tci_assert(index
!= TCG_REG_CALL_STACK
);
69 #if TCG_TARGET_REG_BITS == 32
70 static void tci_write_reg64(tcg_target_ulong
*regs
, uint32_t high_index
,
71 uint32_t low_index
, uint64_t value
)
73 tci_write_reg(regs
, low_index
, value
);
74 tci_write_reg(regs
, high_index
, value
>> 32);
78 #if TCG_TARGET_REG_BITS == 32
79 /* Create a 64 bit value from two 32 bit values. */
80 static uint64_t tci_uint64(uint32_t high
, uint32_t low
)
82 return ((uint64_t)high
<< 32) + low
;
86 /* Read constant byte from bytecode. */
87 static uint8_t tci_read_b(const uint8_t **tb_ptr
)
89 return *(tb_ptr
[0]++);
92 /* Read register number from bytecode. */
93 static TCGReg
tci_read_r(const uint8_t **tb_ptr
)
95 uint8_t regno
= tci_read_b(tb_ptr
);
96 tci_assert(regno
< TCG_TARGET_NB_REGS
);
100 /* Read constant (native size) from bytecode. */
101 static tcg_target_ulong
tci_read_i(const uint8_t **tb_ptr
)
103 tcg_target_ulong value
= *(const tcg_target_ulong
*)(*tb_ptr
);
104 *tb_ptr
+= sizeof(value
);
108 /* Read unsigned constant (32 bit) from bytecode. */
109 static uint32_t tci_read_i32(const uint8_t **tb_ptr
)
111 uint32_t value
= *(const uint32_t *)(*tb_ptr
);
112 *tb_ptr
+= sizeof(value
);
116 /* Read signed constant (32 bit) from bytecode. */
117 static int32_t tci_read_s32(const uint8_t **tb_ptr
)
119 int32_t value
= *(const int32_t *)(*tb_ptr
);
120 *tb_ptr
+= sizeof(value
);
124 /* Read indexed register (native size) from bytecode. */
125 static tcg_target_ulong
126 tci_read_rval(const tcg_target_ulong
*regs
, const uint8_t **tb_ptr
)
128 tcg_target_ulong value
= tci_read_reg(regs
, **tb_ptr
);
133 #if TCG_TARGET_REG_BITS == 32
134 /* Read two indexed registers (2 * 32 bit) from bytecode. */
135 static uint64_t tci_read_r64(const tcg_target_ulong
*regs
,
136 const uint8_t **tb_ptr
)
138 uint32_t low
= tci_read_rval(regs
, tb_ptr
);
139 return tci_uint64(tci_read_rval(regs
, tb_ptr
), low
);
141 #elif TCG_TARGET_REG_BITS == 64
142 /* Read indexed register (64 bit) from bytecode. */
143 static uint64_t tci_read_r64(const tcg_target_ulong
*regs
,
144 const uint8_t **tb_ptr
)
146 return tci_read_rval(regs
, tb_ptr
);
150 /* Read indexed register(s) with target address from bytecode. */
152 tci_read_ulong(const tcg_target_ulong
*regs
, const uint8_t **tb_ptr
)
154 target_ulong taddr
= tci_read_rval(regs
, tb_ptr
);
155 #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
156 taddr
+= (uint64_t)tci_read_rval(regs
, tb_ptr
) << 32;
161 static tcg_target_ulong
tci_read_label(const uint8_t **tb_ptr
)
163 tcg_target_ulong label
= tci_read_i(tb_ptr
);
164 tci_assert(label
!= 0);
169 * Load sets of arguments all at once. The naming convention is:
170 * tci_args_<arguments>
171 * where arguments is a sequence of
173 * c = condition (TCGCond)
174 * i = immediate (uint32_t)
175 * I = immediate (tcg_target_ulong)
176 * l = label or pointer
178 * s = signed ldst offset
181 static void tci_args_l(const uint8_t **tb_ptr
, void **l0
)
183 *l0
= (void *)tci_read_label(tb_ptr
);
186 static void tci_args_rr(const uint8_t **tb_ptr
,
187 TCGReg
*r0
, TCGReg
*r1
)
189 *r0
= tci_read_r(tb_ptr
);
190 *r1
= tci_read_r(tb_ptr
);
193 static void tci_args_ri(const uint8_t **tb_ptr
,
194 TCGReg
*r0
, tcg_target_ulong
*i1
)
196 *r0
= tci_read_r(tb_ptr
);
197 *i1
= tci_read_i32(tb_ptr
);
200 #if TCG_TARGET_REG_BITS == 64
201 static void tci_args_rI(const uint8_t **tb_ptr
,
202 TCGReg
*r0
, tcg_target_ulong
*i1
)
204 *r0
= tci_read_r(tb_ptr
);
205 *i1
= tci_read_i(tb_ptr
);
209 static void tci_args_rrr(const uint8_t **tb_ptr
,
210 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
)
212 *r0
= tci_read_r(tb_ptr
);
213 *r1
= tci_read_r(tb_ptr
);
214 *r2
= tci_read_r(tb_ptr
);
217 static void tci_args_rrs(const uint8_t **tb_ptr
,
218 TCGReg
*r0
, TCGReg
*r1
, int32_t *i2
)
220 *r0
= tci_read_r(tb_ptr
);
221 *r1
= tci_read_r(tb_ptr
);
222 *i2
= tci_read_s32(tb_ptr
);
225 static void tci_args_rrcl(const uint8_t **tb_ptr
,
226 TCGReg
*r0
, TCGReg
*r1
, TCGCond
*c2
, void **l3
)
228 *r0
= tci_read_r(tb_ptr
);
229 *r1
= tci_read_r(tb_ptr
);
230 *c2
= tci_read_b(tb_ptr
);
231 *l3
= (void *)tci_read_label(tb_ptr
);
234 static void tci_args_rrrc(const uint8_t **tb_ptr
,
235 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGCond
*c3
)
237 *r0
= tci_read_r(tb_ptr
);
238 *r1
= tci_read_r(tb_ptr
);
239 *r2
= tci_read_r(tb_ptr
);
240 *c3
= tci_read_b(tb_ptr
);
243 #if TCG_TARGET_REG_BITS == 32
244 static void tci_args_rrrrcl(const uint8_t **tb_ptr
, TCGReg
*r0
, TCGReg
*r1
,
245 TCGReg
*r2
, TCGReg
*r3
, TCGCond
*c4
, void **l5
)
247 *r0
= tci_read_r(tb_ptr
);
248 *r1
= tci_read_r(tb_ptr
);
249 *r2
= tci_read_r(tb_ptr
);
250 *r3
= tci_read_r(tb_ptr
);
251 *c4
= tci_read_b(tb_ptr
);
252 *l5
= (void *)tci_read_label(tb_ptr
);
255 static void tci_args_rrrrrc(const uint8_t **tb_ptr
, TCGReg
*r0
, TCGReg
*r1
,
256 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
, TCGCond
*c5
)
258 *r0
= tci_read_r(tb_ptr
);
259 *r1
= tci_read_r(tb_ptr
);
260 *r2
= tci_read_r(tb_ptr
);
261 *r3
= tci_read_r(tb_ptr
);
262 *r4
= tci_read_r(tb_ptr
);
263 *c5
= tci_read_b(tb_ptr
);
267 static bool tci_compare32(uint32_t u0
, uint32_t u1
, TCGCond condition
)
304 g_assert_not_reached();
309 static bool tci_compare64(uint64_t u0
, uint64_t u1
, TCGCond condition
)
346 g_assert_not_reached();
352 cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
353 #define qemu_ld_leuw \
354 cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
355 #define qemu_ld_leul \
356 cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
357 #define qemu_ld_leq \
358 cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
359 #define qemu_ld_beuw \
360 cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
361 #define qemu_ld_beul \
362 cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
363 #define qemu_ld_beq \
364 cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
365 #define qemu_st_b(X) \
366 cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
367 #define qemu_st_lew(X) \
368 cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
369 #define qemu_st_lel(X) \
370 cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
371 #define qemu_st_leq(X) \
372 cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
373 #define qemu_st_bew(X) \
374 cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
375 #define qemu_st_bel(X) \
376 cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
377 #define qemu_st_beq(X) \
378 cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
380 #if TCG_TARGET_REG_BITS == 64
381 # define CASE_32_64(x) \
382 case glue(glue(INDEX_op_, x), _i64): \
383 case glue(glue(INDEX_op_, x), _i32):
384 # define CASE_64(x) \
385 case glue(glue(INDEX_op_, x), _i64):
387 # define CASE_32_64(x) \
388 case glue(glue(INDEX_op_, x), _i32):
392 /* Interpret pseudo code in tb. */
394 * Disable CFI checks.
395 * One possible operation in the pseudo code is a call to binary code.
396 * Therefore, disable CFI checks in the interpreter function
398 uintptr_t QEMU_DISABLE_CFI
tcg_qemu_tb_exec(CPUArchState
*env
,
399 const void *v_tb_ptr
)
401 const uint8_t *tb_ptr
= v_tb_ptr
;
402 tcg_target_ulong regs
[TCG_TARGET_NB_REGS
];
403 long tcg_temps
[CPU_TEMP_BUF_NLONGS
];
404 uintptr_t sp_value
= (uintptr_t)(tcg_temps
+ CPU_TEMP_BUF_NLONGS
);
407 regs
[TCG_AREG0
] = (tcg_target_ulong
)env
;
408 regs
[TCG_REG_CALL_STACK
] = sp_value
;
412 TCGOpcode opc
= tb_ptr
[0];
413 #if defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
414 uint8_t op_size
= tb_ptr
[1];
415 const uint8_t *old_code_ptr
= tb_ptr
;
427 #if TCG_TARGET_REG_BITS == 32
435 /* Skip opcode and size entry. */
440 t0
= tci_read_i(&tb_ptr
);
441 tci_tb_ptr
= (uintptr_t)tb_ptr
;
442 #if TCG_TARGET_REG_BITS == 32
443 tmp64
= ((helper_function
)t0
)(tci_read_reg(regs
, TCG_REG_R0
),
444 tci_read_reg(regs
, TCG_REG_R1
),
445 tci_read_reg(regs
, TCG_REG_R2
),
446 tci_read_reg(regs
, TCG_REG_R3
),
447 tci_read_reg(regs
, TCG_REG_R4
),
448 tci_read_reg(regs
, TCG_REG_R5
),
449 tci_read_reg(regs
, TCG_REG_R6
),
450 tci_read_reg(regs
, TCG_REG_R7
),
451 tci_read_reg(regs
, TCG_REG_R8
),
452 tci_read_reg(regs
, TCG_REG_R9
),
453 tci_read_reg(regs
, TCG_REG_R10
),
454 tci_read_reg(regs
, TCG_REG_R11
));
455 tci_write_reg(regs
, TCG_REG_R0
, tmp64
);
456 tci_write_reg(regs
, TCG_REG_R1
, tmp64
>> 32);
458 tmp64
= ((helper_function
)t0
)(tci_read_reg(regs
, TCG_REG_R0
),
459 tci_read_reg(regs
, TCG_REG_R1
),
460 tci_read_reg(regs
, TCG_REG_R2
),
461 tci_read_reg(regs
, TCG_REG_R3
),
462 tci_read_reg(regs
, TCG_REG_R4
),
463 tci_read_reg(regs
, TCG_REG_R5
));
464 tci_write_reg(regs
, TCG_REG_R0
, tmp64
);
468 tci_args_l(&tb_ptr
, &ptr
);
469 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);
472 case INDEX_op_setcond_i32
:
473 tci_args_rrrc(&tb_ptr
, &r0
, &r1
, &r2
, &condition
);
474 regs
[r0
] = tci_compare32(regs
[r1
], regs
[r2
], condition
);
476 #if TCG_TARGET_REG_BITS == 32
477 case INDEX_op_setcond2_i32
:
478 tci_args_rrrrrc(&tb_ptr
, &r0
, &r1
, &r2
, &r3
, &r4
, &condition
);
479 T1
= tci_uint64(regs
[r2
], regs
[r1
]);
480 T2
= tci_uint64(regs
[r4
], regs
[r3
]);
481 regs
[r0
] = tci_compare64(T1
, T2
, condition
);
483 #elif TCG_TARGET_REG_BITS == 64
484 case INDEX_op_setcond_i64
:
485 tci_args_rrrc(&tb_ptr
, &r0
, &r1
, &r2
, &condition
);
486 regs
[r0
] = tci_compare64(regs
[r1
], regs
[r2
], condition
);
490 tci_args_rr(&tb_ptr
, &r0
, &r1
);
493 case INDEX_op_tci_movi_i32
:
494 tci_args_ri(&tb_ptr
, &r0
, &t1
);
498 /* Load/store operations (32 bit). */
501 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
502 ptr
= (void *)(regs
[r1
] + ofs
);
503 regs
[r0
] = *(uint8_t *)ptr
;
506 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
507 ptr
= (void *)(regs
[r1
] + ofs
);
508 regs
[r0
] = *(int8_t *)ptr
;
511 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
512 ptr
= (void *)(regs
[r1
] + ofs
);
513 regs
[r0
] = *(uint16_t *)ptr
;
516 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
517 ptr
= (void *)(regs
[r1
] + ofs
);
518 regs
[r0
] = *(int16_t *)ptr
;
520 case INDEX_op_ld_i32
:
522 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
523 ptr
= (void *)(regs
[r1
] + ofs
);
524 regs
[r0
] = *(uint32_t *)ptr
;
527 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
528 ptr
= (void *)(regs
[r1
] + ofs
);
529 *(uint8_t *)ptr
= regs
[r0
];
532 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
533 ptr
= (void *)(regs
[r1
] + ofs
);
534 *(uint16_t *)ptr
= regs
[r0
];
536 case INDEX_op_st_i32
:
538 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
539 ptr
= (void *)(regs
[r1
] + ofs
);
540 *(uint32_t *)ptr
= regs
[r0
];
543 /* Arithmetic operations (mixed 32/64 bit). */
546 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
547 regs
[r0
] = regs
[r1
] + regs
[r2
];
550 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
551 regs
[r0
] = regs
[r1
] - regs
[r2
];
554 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
555 regs
[r0
] = regs
[r1
] * regs
[r2
];
558 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
559 regs
[r0
] = regs
[r1
] & regs
[r2
];
562 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
563 regs
[r0
] = regs
[r1
] | regs
[r2
];
566 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
567 regs
[r0
] = regs
[r1
] ^ regs
[r2
];
570 /* Arithmetic operations (32 bit). */
572 case INDEX_op_div_i32
:
573 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
574 regs
[r0
] = (int32_t)regs
[r1
] / (int32_t)regs
[r2
];
576 case INDEX_op_divu_i32
:
577 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
578 regs
[r0
] = (uint32_t)regs
[r1
] / (uint32_t)regs
[r2
];
580 case INDEX_op_rem_i32
:
581 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
582 regs
[r0
] = (int32_t)regs
[r1
] % (int32_t)regs
[r2
];
584 case INDEX_op_remu_i32
:
585 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
586 regs
[r0
] = (uint32_t)regs
[r1
] % (uint32_t)regs
[r2
];
589 /* Shift/rotate operations (32 bit). */
591 case INDEX_op_shl_i32
:
592 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
593 regs
[r0
] = (uint32_t)regs
[r1
] << (regs
[r2
] & 31);
595 case INDEX_op_shr_i32
:
596 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
597 regs
[r0
] = (uint32_t)regs
[r1
] >> (regs
[r2
] & 31);
599 case INDEX_op_sar_i32
:
600 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
601 regs
[r0
] = (int32_t)regs
[r1
] >> (regs
[r2
] & 31);
603 #if TCG_TARGET_HAS_rot_i32
604 case INDEX_op_rotl_i32
:
605 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
606 regs
[r0
] = rol32(regs
[r1
], regs
[r2
] & 31);
608 case INDEX_op_rotr_i32
:
609 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
610 regs
[r0
] = ror32(regs
[r1
], regs
[r2
] & 31);
613 #if TCG_TARGET_HAS_deposit_i32
614 case INDEX_op_deposit_i32
:
616 t1
= tci_read_rval(regs
, &tb_ptr
);
617 t2
= tci_read_rval(regs
, &tb_ptr
);
620 tmp32
= (((1 << tmp8
) - 1) << tmp16
);
621 tci_write_reg(regs
, t0
, (t1
& ~tmp32
) | ((t2
<< tmp16
) & tmp32
));
624 case INDEX_op_brcond_i32
:
625 tci_args_rrcl(&tb_ptr
, &r0
, &r1
, &condition
, &ptr
);
626 if (tci_compare32(regs
[r0
], regs
[r1
], condition
)) {
627 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);
632 #if TCG_TARGET_REG_BITS == 32
633 case INDEX_op_add2_i32
:
636 tmp64
= tci_read_r64(regs
, &tb_ptr
);
637 tmp64
+= tci_read_r64(regs
, &tb_ptr
);
638 tci_write_reg64(regs
, t1
, t0
, tmp64
);
640 case INDEX_op_sub2_i32
:
643 tmp64
= tci_read_r64(regs
, &tb_ptr
);
644 tmp64
-= tci_read_r64(regs
, &tb_ptr
);
645 tci_write_reg64(regs
, t1
, t0
, tmp64
);
647 case INDEX_op_brcond2_i32
:
648 tci_args_rrrrcl(&tb_ptr
, &r0
, &r1
, &r2
, &r3
, &condition
, &ptr
);
649 T1
= tci_uint64(regs
[r1
], regs
[r0
]);
650 T2
= tci_uint64(regs
[r3
], regs
[r2
]);
651 if (tci_compare64(T1
, T2
, condition
)) {
652 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);
657 case INDEX_op_mulu2_i32
:
660 t2
= tci_read_rval(regs
, &tb_ptr
);
661 tmp64
= (uint32_t)tci_read_rval(regs
, &tb_ptr
);
662 tci_write_reg64(regs
, t1
, t0
, (uint32_t)t2
* tmp64
);
664 #endif /* TCG_TARGET_REG_BITS == 32 */
665 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
667 tci_args_rr(&tb_ptr
, &r0
, &r1
);
668 regs
[r0
] = (int8_t)regs
[r1
];
671 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
673 tci_args_rr(&tb_ptr
, &r0
, &r1
);
674 regs
[r0
] = (int16_t)regs
[r1
];
677 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
679 tci_args_rr(&tb_ptr
, &r0
, &r1
);
680 regs
[r0
] = (uint8_t)regs
[r1
];
683 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
685 tci_args_rr(&tb_ptr
, &r0
, &r1
);
686 regs
[r0
] = (uint16_t)regs
[r1
];
689 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
691 tci_args_rr(&tb_ptr
, &r0
, &r1
);
692 regs
[r0
] = bswap16(regs
[r1
]);
695 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
697 tci_args_rr(&tb_ptr
, &r0
, &r1
);
698 regs
[r0
] = bswap32(regs
[r1
]);
701 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
703 tci_args_rr(&tb_ptr
, &r0
, &r1
);
704 regs
[r0
] = ~regs
[r1
];
707 #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
709 tci_args_rr(&tb_ptr
, &r0
, &r1
);
710 regs
[r0
] = -regs
[r1
];
713 #if TCG_TARGET_REG_BITS == 64
714 case INDEX_op_tci_movi_i64
:
715 tci_args_rI(&tb_ptr
, &r0
, &t1
);
719 /* Load/store operations (64 bit). */
721 case INDEX_op_ld32s_i64
:
722 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
723 ptr
= (void *)(regs
[r1
] + ofs
);
724 regs
[r0
] = *(int32_t *)ptr
;
726 case INDEX_op_ld_i64
:
727 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
728 ptr
= (void *)(regs
[r1
] + ofs
);
729 regs
[r0
] = *(uint64_t *)ptr
;
731 case INDEX_op_st_i64
:
732 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
733 ptr
= (void *)(regs
[r1
] + ofs
);
734 *(uint64_t *)ptr
= regs
[r0
];
737 /* Arithmetic operations (64 bit). */
739 case INDEX_op_div_i64
:
740 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
741 regs
[r0
] = (int64_t)regs
[r1
] / (int64_t)regs
[r2
];
743 case INDEX_op_divu_i64
:
744 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
745 regs
[r0
] = (uint64_t)regs
[r1
] / (uint64_t)regs
[r2
];
747 case INDEX_op_rem_i64
:
748 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
749 regs
[r0
] = (int64_t)regs
[r1
] % (int64_t)regs
[r2
];
751 case INDEX_op_remu_i64
:
752 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
753 regs
[r0
] = (uint64_t)regs
[r1
] % (uint64_t)regs
[r2
];
756 /* Shift/rotate operations (64 bit). */
758 case INDEX_op_shl_i64
:
759 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
760 regs
[r0
] = regs
[r1
] << (regs
[r2
] & 63);
762 case INDEX_op_shr_i64
:
763 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
764 regs
[r0
] = regs
[r1
] >> (regs
[r2
] & 63);
766 case INDEX_op_sar_i64
:
767 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
768 regs
[r0
] = (int64_t)regs
[r1
] >> (regs
[r2
] & 63);
770 #if TCG_TARGET_HAS_rot_i64
771 case INDEX_op_rotl_i64
:
772 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
773 regs
[r0
] = rol64(regs
[r1
], regs
[r2
] & 63);
775 case INDEX_op_rotr_i64
:
776 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
777 regs
[r0
] = ror64(regs
[r1
], regs
[r2
] & 63);
780 #if TCG_TARGET_HAS_deposit_i64
781 case INDEX_op_deposit_i64
:
783 t1
= tci_read_rval(regs
, &tb_ptr
);
784 t2
= tci_read_rval(regs
, &tb_ptr
);
787 tmp64
= (((1ULL << tmp8
) - 1) << tmp16
);
788 tci_write_reg(regs
, t0
, (t1
& ~tmp64
) | ((t2
<< tmp16
) & tmp64
));
791 case INDEX_op_brcond_i64
:
792 tci_args_rrcl(&tb_ptr
, &r0
, &r1
, &condition
, &ptr
);
793 if (tci_compare64(regs
[r0
], regs
[r1
], condition
)) {
794 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);
799 case INDEX_op_ext32s_i64
:
800 case INDEX_op_ext_i32_i64
:
801 tci_args_rr(&tb_ptr
, &r0
, &r1
);
802 regs
[r0
] = (int32_t)regs
[r1
];
804 case INDEX_op_ext32u_i64
:
805 case INDEX_op_extu_i32_i64
:
806 tci_args_rr(&tb_ptr
, &r0
, &r1
);
807 regs
[r0
] = (uint32_t)regs
[r1
];
809 #if TCG_TARGET_HAS_bswap64_i64
810 case INDEX_op_bswap64_i64
:
811 tci_args_rr(&tb_ptr
, &r0
, &r1
);
812 regs
[r0
] = bswap64(regs
[r1
]);
815 #endif /* TCG_TARGET_REG_BITS == 64 */
817 /* QEMU specific operations. */
819 case INDEX_op_exit_tb
:
820 ret
= *(uint64_t *)tb_ptr
;
823 case INDEX_op_goto_tb
:
824 /* Jump address is aligned */
825 tb_ptr
= QEMU_ALIGN_PTR_UP(tb_ptr
, 4);
826 t0
= qatomic_read((int32_t *)tb_ptr
);
827 tb_ptr
+= sizeof(int32_t);
828 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);
829 tb_ptr
+= (int32_t)t0
;
831 case INDEX_op_qemu_ld_i32
:
833 taddr
= tci_read_ulong(regs
, &tb_ptr
);
834 oi
= tci_read_i(&tb_ptr
);
835 switch (get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
)) {
840 tmp32
= (int8_t)qemu_ld_ub
;
843 tmp32
= qemu_ld_leuw
;
846 tmp32
= (int16_t)qemu_ld_leuw
;
849 tmp32
= qemu_ld_leul
;
852 tmp32
= qemu_ld_beuw
;
855 tmp32
= (int16_t)qemu_ld_beuw
;
858 tmp32
= qemu_ld_beul
;
861 g_assert_not_reached();
863 tci_write_reg(regs
, t0
, tmp32
);
865 case INDEX_op_qemu_ld_i64
:
867 if (TCG_TARGET_REG_BITS
== 32) {
870 taddr
= tci_read_ulong(regs
, &tb_ptr
);
871 oi
= tci_read_i(&tb_ptr
);
872 switch (get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
)) {
877 tmp64
= (int8_t)qemu_ld_ub
;
880 tmp64
= qemu_ld_leuw
;
883 tmp64
= (int16_t)qemu_ld_leuw
;
886 tmp64
= qemu_ld_leul
;
889 tmp64
= (int32_t)qemu_ld_leul
;
895 tmp64
= qemu_ld_beuw
;
898 tmp64
= (int16_t)qemu_ld_beuw
;
901 tmp64
= qemu_ld_beul
;
904 tmp64
= (int32_t)qemu_ld_beul
;
910 g_assert_not_reached();
912 tci_write_reg(regs
, t0
, tmp64
);
913 if (TCG_TARGET_REG_BITS
== 32) {
914 tci_write_reg(regs
, t1
, tmp64
>> 32);
917 case INDEX_op_qemu_st_i32
:
918 t0
= tci_read_rval(regs
, &tb_ptr
);
919 taddr
= tci_read_ulong(regs
, &tb_ptr
);
920 oi
= tci_read_i(&tb_ptr
);
921 switch (get_memop(oi
) & (MO_BSWAP
| MO_SIZE
)) {
938 g_assert_not_reached();
941 case INDEX_op_qemu_st_i64
:
942 tmp64
= tci_read_r64(regs
, &tb_ptr
);
943 taddr
= tci_read_ulong(regs
, &tb_ptr
);
944 oi
= tci_read_i(&tb_ptr
);
945 switch (get_memop(oi
) & (MO_BSWAP
| MO_SIZE
)) {
968 g_assert_not_reached();
972 /* Ensure ordering for all kinds */
976 g_assert_not_reached();
978 tci_assert(tb_ptr
== old_code_ptr
+ op_size
);