]>
git.proxmox.com Git - mirror_qemu.git/blob - tcg/tci.c
2 * Tiny Code Interpreter for QEMU
4 * Copyright (c) 2009, 2011, 2016 Stefan Weil
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "qemu/osdep.h"
22 /* Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
23 * Without assertions, the interpreter runs much faster. */
24 #if defined(CONFIG_DEBUG_TCG)
25 # define tci_assert(cond) assert(cond)
27 # define tci_assert(cond) ((void)(cond))
30 #include "qemu-common.h"
31 #include "tcg/tcg.h" /* MAX_OPC_PARAM_IARGS */
32 #include "exec/cpu_ldst.h"
33 #include "tcg/tcg-op.h"
34 #include "qemu/compiler.h"
36 #if MAX_OPC_PARAM_IARGS != 6
37 # error Fix needed, number of supported input arguments changed!
39 #if TCG_TARGET_REG_BITS == 32
40 typedef uint64_t (*helper_function
)(tcg_target_ulong
, tcg_target_ulong
,
41 tcg_target_ulong
, tcg_target_ulong
,
42 tcg_target_ulong
, tcg_target_ulong
,
43 tcg_target_ulong
, tcg_target_ulong
,
44 tcg_target_ulong
, tcg_target_ulong
,
45 tcg_target_ulong
, tcg_target_ulong
);
47 typedef uint64_t (*helper_function
)(tcg_target_ulong
, tcg_target_ulong
,
48 tcg_target_ulong
, tcg_target_ulong
,
49 tcg_target_ulong
, tcg_target_ulong
);
52 __thread
uintptr_t tci_tb_ptr
;
54 static tcg_target_ulong
tci_read_reg(const tcg_target_ulong
*regs
, TCGReg index
)
56 tci_assert(index
< TCG_TARGET_NB_REGS
);
61 tci_write_reg(tcg_target_ulong
*regs
, TCGReg index
, tcg_target_ulong value
)
63 tci_assert(index
< TCG_TARGET_NB_REGS
);
64 tci_assert(index
!= TCG_AREG0
);
65 tci_assert(index
!= TCG_REG_CALL_STACK
);
69 static void tci_write_reg64(tcg_target_ulong
*regs
, uint32_t high_index
,
70 uint32_t low_index
, uint64_t value
)
72 tci_write_reg(regs
, low_index
, value
);
73 tci_write_reg(regs
, high_index
, value
>> 32);
76 /* Create a 64 bit value from two 32 bit values. */
77 static uint64_t tci_uint64(uint32_t high
, uint32_t low
)
79 return ((uint64_t)high
<< 32) + low
;
82 /* Read constant byte from bytecode. */
83 static uint8_t tci_read_b(const uint8_t **tb_ptr
)
85 return *(tb_ptr
[0]++);
88 /* Read register number from bytecode. */
89 static TCGReg
tci_read_r(const uint8_t **tb_ptr
)
91 uint8_t regno
= tci_read_b(tb_ptr
);
92 tci_assert(regno
< TCG_TARGET_NB_REGS
);
96 /* Read constant (native size) from bytecode. */
97 static tcg_target_ulong
tci_read_i(const uint8_t **tb_ptr
)
99 tcg_target_ulong value
= *(const tcg_target_ulong
*)(*tb_ptr
);
100 *tb_ptr
+= sizeof(value
);
104 /* Read unsigned constant (32 bit) from bytecode. */
105 static uint32_t tci_read_i32(const uint8_t **tb_ptr
)
107 uint32_t value
= *(const uint32_t *)(*tb_ptr
);
108 *tb_ptr
+= sizeof(value
);
112 /* Read signed constant (32 bit) from bytecode. */
113 static int32_t tci_read_s32(const uint8_t **tb_ptr
)
115 int32_t value
= *(const int32_t *)(*tb_ptr
);
116 *tb_ptr
+= sizeof(value
);
120 static tcg_target_ulong
tci_read_label(const uint8_t **tb_ptr
)
122 return tci_read_i(tb_ptr
);
126 * Load sets of arguments all at once. The naming convention is:
127 * tci_args_<arguments>
128 * where arguments is a sequence of
130 * b = immediate (bit position)
131 * c = condition (TCGCond)
132 * i = immediate (uint32_t)
133 * I = immediate (tcg_target_ulong)
134 * l = label or pointer
135 * m = immediate (TCGMemOpIdx)
137 * s = signed ldst offset
140 static void check_size(const uint8_t *start
, const uint8_t **tb_ptr
)
142 const uint8_t *old_code_ptr
= start
- 2;
143 uint8_t op_size
= old_code_ptr
[1];
144 tci_assert(*tb_ptr
== old_code_ptr
+ op_size
);
147 static void tci_args_l(const uint8_t **tb_ptr
, void **l0
)
149 const uint8_t *start
= *tb_ptr
;
151 *l0
= (void *)tci_read_label(tb_ptr
);
153 check_size(start
, tb_ptr
);
156 static void tci_args_rr(const uint8_t **tb_ptr
,
157 TCGReg
*r0
, TCGReg
*r1
)
159 const uint8_t *start
= *tb_ptr
;
161 *r0
= tci_read_r(tb_ptr
);
162 *r1
= tci_read_r(tb_ptr
);
164 check_size(start
, tb_ptr
);
167 static void tci_args_ri(const uint8_t **tb_ptr
,
168 TCGReg
*r0
, tcg_target_ulong
*i1
)
170 const uint8_t *start
= *tb_ptr
;
172 *r0
= tci_read_r(tb_ptr
);
173 *i1
= tci_read_i32(tb_ptr
);
175 check_size(start
, tb_ptr
);
178 #if TCG_TARGET_REG_BITS == 64
179 static void tci_args_rI(const uint8_t **tb_ptr
,
180 TCGReg
*r0
, tcg_target_ulong
*i1
)
182 const uint8_t *start
= *tb_ptr
;
184 *r0
= tci_read_r(tb_ptr
);
185 *i1
= tci_read_i(tb_ptr
);
187 check_size(start
, tb_ptr
);
191 static void tci_args_rrm(const uint8_t **tb_ptr
,
192 TCGReg
*r0
, TCGReg
*r1
, TCGMemOpIdx
*m2
)
194 const uint8_t *start
= *tb_ptr
;
196 *r0
= tci_read_r(tb_ptr
);
197 *r1
= tci_read_r(tb_ptr
);
198 *m2
= tci_read_i32(tb_ptr
);
200 check_size(start
, tb_ptr
);
203 static void tci_args_rrr(const uint8_t **tb_ptr
,
204 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
)
206 const uint8_t *start
= *tb_ptr
;
208 *r0
= tci_read_r(tb_ptr
);
209 *r1
= tci_read_r(tb_ptr
);
210 *r2
= tci_read_r(tb_ptr
);
212 check_size(start
, tb_ptr
);
215 static void tci_args_rrs(const uint8_t **tb_ptr
,
216 TCGReg
*r0
, TCGReg
*r1
, int32_t *i2
)
218 const uint8_t *start
= *tb_ptr
;
220 *r0
= tci_read_r(tb_ptr
);
221 *r1
= tci_read_r(tb_ptr
);
222 *i2
= tci_read_s32(tb_ptr
);
224 check_size(start
, tb_ptr
);
227 static void tci_args_rrcl(const uint8_t **tb_ptr
,
228 TCGReg
*r0
, TCGReg
*r1
, TCGCond
*c2
, void **l3
)
230 const uint8_t *start
= *tb_ptr
;
232 *r0
= tci_read_r(tb_ptr
);
233 *r1
= tci_read_r(tb_ptr
);
234 *c2
= tci_read_b(tb_ptr
);
235 *l3
= (void *)tci_read_label(tb_ptr
);
237 check_size(start
, tb_ptr
);
240 static void tci_args_rrrc(const uint8_t **tb_ptr
,
241 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGCond
*c3
)
243 const uint8_t *start
= *tb_ptr
;
245 *r0
= tci_read_r(tb_ptr
);
246 *r1
= tci_read_r(tb_ptr
);
247 *r2
= tci_read_r(tb_ptr
);
248 *c3
= tci_read_b(tb_ptr
);
250 check_size(start
, tb_ptr
);
253 static void tci_args_rrrm(const uint8_t **tb_ptr
,
254 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGMemOpIdx
*m3
)
256 const uint8_t *start
= *tb_ptr
;
258 *r0
= tci_read_r(tb_ptr
);
259 *r1
= tci_read_r(tb_ptr
);
260 *r2
= tci_read_r(tb_ptr
);
261 *m3
= tci_read_i32(tb_ptr
);
263 check_size(start
, tb_ptr
);
266 static void tci_args_rrrbb(const uint8_t **tb_ptr
, TCGReg
*r0
, TCGReg
*r1
,
267 TCGReg
*r2
, uint8_t *i3
, uint8_t *i4
)
269 const uint8_t *start
= *tb_ptr
;
271 *r0
= tci_read_r(tb_ptr
);
272 *r1
= tci_read_r(tb_ptr
);
273 *r2
= tci_read_r(tb_ptr
);
274 *i3
= tci_read_b(tb_ptr
);
275 *i4
= tci_read_b(tb_ptr
);
277 check_size(start
, tb_ptr
);
280 static void tci_args_rrrrm(const uint8_t **tb_ptr
, TCGReg
*r0
, TCGReg
*r1
,
281 TCGReg
*r2
, TCGReg
*r3
, TCGMemOpIdx
*m4
)
283 const uint8_t *start
= *tb_ptr
;
285 *r0
= tci_read_r(tb_ptr
);
286 *r1
= tci_read_r(tb_ptr
);
287 *r2
= tci_read_r(tb_ptr
);
288 *r3
= tci_read_r(tb_ptr
);
289 *m4
= tci_read_i32(tb_ptr
);
291 check_size(start
, tb_ptr
);
294 #if TCG_TARGET_REG_BITS == 32
295 static void tci_args_rrrr(const uint8_t **tb_ptr
,
296 TCGReg
*r0
, TCGReg
*r1
, TCGReg
*r2
, TCGReg
*r3
)
298 const uint8_t *start
= *tb_ptr
;
300 *r0
= tci_read_r(tb_ptr
);
301 *r1
= tci_read_r(tb_ptr
);
302 *r2
= tci_read_r(tb_ptr
);
303 *r3
= tci_read_r(tb_ptr
);
305 check_size(start
, tb_ptr
);
308 static void tci_args_rrrrcl(const uint8_t **tb_ptr
, TCGReg
*r0
, TCGReg
*r1
,
309 TCGReg
*r2
, TCGReg
*r3
, TCGCond
*c4
, void **l5
)
311 const uint8_t *start
= *tb_ptr
;
313 *r0
= tci_read_r(tb_ptr
);
314 *r1
= tci_read_r(tb_ptr
);
315 *r2
= tci_read_r(tb_ptr
);
316 *r3
= tci_read_r(tb_ptr
);
317 *c4
= tci_read_b(tb_ptr
);
318 *l5
= (void *)tci_read_label(tb_ptr
);
320 check_size(start
, tb_ptr
);
323 static void tci_args_rrrrrc(const uint8_t **tb_ptr
, TCGReg
*r0
, TCGReg
*r1
,
324 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
, TCGCond
*c5
)
326 const uint8_t *start
= *tb_ptr
;
328 *r0
= tci_read_r(tb_ptr
);
329 *r1
= tci_read_r(tb_ptr
);
330 *r2
= tci_read_r(tb_ptr
);
331 *r3
= tci_read_r(tb_ptr
);
332 *r4
= tci_read_r(tb_ptr
);
333 *c5
= tci_read_b(tb_ptr
);
335 check_size(start
, tb_ptr
);
338 static void tci_args_rrrrrr(const uint8_t **tb_ptr
, TCGReg
*r0
, TCGReg
*r1
,
339 TCGReg
*r2
, TCGReg
*r3
, TCGReg
*r4
, TCGReg
*r5
)
341 const uint8_t *start
= *tb_ptr
;
343 *r0
= tci_read_r(tb_ptr
);
344 *r1
= tci_read_r(tb_ptr
);
345 *r2
= tci_read_r(tb_ptr
);
346 *r3
= tci_read_r(tb_ptr
);
347 *r4
= tci_read_r(tb_ptr
);
348 *r5
= tci_read_r(tb_ptr
);
350 check_size(start
, tb_ptr
);
354 static bool tci_compare32(uint32_t u0
, uint32_t u1
, TCGCond condition
)
391 g_assert_not_reached();
396 static bool tci_compare64(uint64_t u0
, uint64_t u1
, TCGCond condition
)
433 g_assert_not_reached();
439 cpu_ldub_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
440 #define qemu_ld_leuw \
441 cpu_lduw_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
442 #define qemu_ld_leul \
443 cpu_ldl_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
444 #define qemu_ld_leq \
445 cpu_ldq_le_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
446 #define qemu_ld_beuw \
447 cpu_lduw_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
448 #define qemu_ld_beul \
449 cpu_ldl_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
450 #define qemu_ld_beq \
451 cpu_ldq_be_mmuidx_ra(env, taddr, get_mmuidx(oi), (uintptr_t)tb_ptr)
452 #define qemu_st_b(X) \
453 cpu_stb_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
454 #define qemu_st_lew(X) \
455 cpu_stw_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
456 #define qemu_st_lel(X) \
457 cpu_stl_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
458 #define qemu_st_leq(X) \
459 cpu_stq_le_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
460 #define qemu_st_bew(X) \
461 cpu_stw_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
462 #define qemu_st_bel(X) \
463 cpu_stl_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
464 #define qemu_st_beq(X) \
465 cpu_stq_be_mmuidx_ra(env, taddr, X, get_mmuidx(oi), (uintptr_t)tb_ptr)
467 #if TCG_TARGET_REG_BITS == 64
468 # define CASE_32_64(x) \
469 case glue(glue(INDEX_op_, x), _i64): \
470 case glue(glue(INDEX_op_, x), _i32):
471 # define CASE_64(x) \
472 case glue(glue(INDEX_op_, x), _i64):
474 # define CASE_32_64(x) \
475 case glue(glue(INDEX_op_, x), _i32):
479 /* Interpret pseudo code in tb. */
481 * Disable CFI checks.
482 * One possible operation in the pseudo code is a call to binary code.
483 * Therefore, disable CFI checks in the interpreter function
485 uintptr_t QEMU_DISABLE_CFI
tcg_qemu_tb_exec(CPUArchState
*env
,
486 const void *v_tb_ptr
)
488 const uint8_t *tb_ptr
= v_tb_ptr
;
489 tcg_target_ulong regs
[TCG_TARGET_NB_REGS
];
490 long tcg_temps
[CPU_TEMP_BUF_NLONGS
];
491 uintptr_t sp_value
= (uintptr_t)(tcg_temps
+ CPU_TEMP_BUF_NLONGS
);
493 regs
[TCG_AREG0
] = (tcg_target_ulong
)env
;
494 regs
[TCG_REG_CALL_STACK
] = sp_value
;
498 TCGOpcode opc
= tb_ptr
[0];
499 TCGReg r0
, r1
, r2
, r3
;
506 #if TCG_TARGET_REG_BITS == 32
514 /* Skip opcode and size entry. */
519 tci_args_l(&tb_ptr
, &ptr
);
520 tci_tb_ptr
= (uintptr_t)tb_ptr
;
521 #if TCG_TARGET_REG_BITS == 32
522 tmp64
= ((helper_function
)ptr
)(tci_read_reg(regs
, TCG_REG_R0
),
523 tci_read_reg(regs
, TCG_REG_R1
),
524 tci_read_reg(regs
, TCG_REG_R2
),
525 tci_read_reg(regs
, TCG_REG_R3
),
526 tci_read_reg(regs
, TCG_REG_R4
),
527 tci_read_reg(regs
, TCG_REG_R5
),
528 tci_read_reg(regs
, TCG_REG_R6
),
529 tci_read_reg(regs
, TCG_REG_R7
),
530 tci_read_reg(regs
, TCG_REG_R8
),
531 tci_read_reg(regs
, TCG_REG_R9
),
532 tci_read_reg(regs
, TCG_REG_R10
),
533 tci_read_reg(regs
, TCG_REG_R11
));
534 tci_write_reg(regs
, TCG_REG_R0
, tmp64
);
535 tci_write_reg(regs
, TCG_REG_R1
, tmp64
>> 32);
537 tmp64
= ((helper_function
)ptr
)(tci_read_reg(regs
, TCG_REG_R0
),
538 tci_read_reg(regs
, TCG_REG_R1
),
539 tci_read_reg(regs
, TCG_REG_R2
),
540 tci_read_reg(regs
, TCG_REG_R3
),
541 tci_read_reg(regs
, TCG_REG_R4
),
542 tci_read_reg(regs
, TCG_REG_R5
));
543 tci_write_reg(regs
, TCG_REG_R0
, tmp64
);
547 tci_args_l(&tb_ptr
, &ptr
);
550 case INDEX_op_setcond_i32
:
551 tci_args_rrrc(&tb_ptr
, &r0
, &r1
, &r2
, &condition
);
552 regs
[r0
] = tci_compare32(regs
[r1
], regs
[r2
], condition
);
554 #if TCG_TARGET_REG_BITS == 32
555 case INDEX_op_setcond2_i32
:
556 tci_args_rrrrrc(&tb_ptr
, &r0
, &r1
, &r2
, &r3
, &r4
, &condition
);
557 T1
= tci_uint64(regs
[r2
], regs
[r1
]);
558 T2
= tci_uint64(regs
[r4
], regs
[r3
]);
559 regs
[r0
] = tci_compare64(T1
, T2
, condition
);
561 #elif TCG_TARGET_REG_BITS == 64
562 case INDEX_op_setcond_i64
:
563 tci_args_rrrc(&tb_ptr
, &r0
, &r1
, &r2
, &condition
);
564 regs
[r0
] = tci_compare64(regs
[r1
], regs
[r2
], condition
);
568 tci_args_rr(&tb_ptr
, &r0
, &r1
);
571 case INDEX_op_tci_movi_i32
:
572 tci_args_ri(&tb_ptr
, &r0
, &t1
);
576 /* Load/store operations (32 bit). */
579 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
580 ptr
= (void *)(regs
[r1
] + ofs
);
581 regs
[r0
] = *(uint8_t *)ptr
;
584 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
585 ptr
= (void *)(regs
[r1
] + ofs
);
586 regs
[r0
] = *(int8_t *)ptr
;
589 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
590 ptr
= (void *)(regs
[r1
] + ofs
);
591 regs
[r0
] = *(uint16_t *)ptr
;
594 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
595 ptr
= (void *)(regs
[r1
] + ofs
);
596 regs
[r0
] = *(int16_t *)ptr
;
598 case INDEX_op_ld_i32
:
600 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
601 ptr
= (void *)(regs
[r1
] + ofs
);
602 regs
[r0
] = *(uint32_t *)ptr
;
605 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
606 ptr
= (void *)(regs
[r1
] + ofs
);
607 *(uint8_t *)ptr
= regs
[r0
];
610 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
611 ptr
= (void *)(regs
[r1
] + ofs
);
612 *(uint16_t *)ptr
= regs
[r0
];
614 case INDEX_op_st_i32
:
616 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
617 ptr
= (void *)(regs
[r1
] + ofs
);
618 *(uint32_t *)ptr
= regs
[r0
];
621 /* Arithmetic operations (mixed 32/64 bit). */
624 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
625 regs
[r0
] = regs
[r1
] + regs
[r2
];
628 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
629 regs
[r0
] = regs
[r1
] - regs
[r2
];
632 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
633 regs
[r0
] = regs
[r1
] * regs
[r2
];
636 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
637 regs
[r0
] = regs
[r1
] & regs
[r2
];
640 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
641 regs
[r0
] = regs
[r1
] | regs
[r2
];
644 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
645 regs
[r0
] = regs
[r1
] ^ regs
[r2
];
648 /* Arithmetic operations (32 bit). */
650 case INDEX_op_div_i32
:
651 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
652 regs
[r0
] = (int32_t)regs
[r1
] / (int32_t)regs
[r2
];
654 case INDEX_op_divu_i32
:
655 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
656 regs
[r0
] = (uint32_t)regs
[r1
] / (uint32_t)regs
[r2
];
658 case INDEX_op_rem_i32
:
659 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
660 regs
[r0
] = (int32_t)regs
[r1
] % (int32_t)regs
[r2
];
662 case INDEX_op_remu_i32
:
663 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
664 regs
[r0
] = (uint32_t)regs
[r1
] % (uint32_t)regs
[r2
];
667 /* Shift/rotate operations (32 bit). */
669 case INDEX_op_shl_i32
:
670 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
671 regs
[r0
] = (uint32_t)regs
[r1
] << (regs
[r2
] & 31);
673 case INDEX_op_shr_i32
:
674 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
675 regs
[r0
] = (uint32_t)regs
[r1
] >> (regs
[r2
] & 31);
677 case INDEX_op_sar_i32
:
678 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
679 regs
[r0
] = (int32_t)regs
[r1
] >> (regs
[r2
] & 31);
681 #if TCG_TARGET_HAS_rot_i32
682 case INDEX_op_rotl_i32
:
683 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
684 regs
[r0
] = rol32(regs
[r1
], regs
[r2
] & 31);
686 case INDEX_op_rotr_i32
:
687 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
688 regs
[r0
] = ror32(regs
[r1
], regs
[r2
] & 31);
691 #if TCG_TARGET_HAS_deposit_i32
692 case INDEX_op_deposit_i32
:
693 tci_args_rrrbb(&tb_ptr
, &r0
, &r1
, &r2
, &pos
, &len
);
694 regs
[r0
] = deposit32(regs
[r1
], pos
, len
, regs
[r2
]);
697 case INDEX_op_brcond_i32
:
698 tci_args_rrcl(&tb_ptr
, &r0
, &r1
, &condition
, &ptr
);
699 if (tci_compare32(regs
[r0
], regs
[r1
], condition
)) {
703 #if TCG_TARGET_REG_BITS == 32
704 case INDEX_op_add2_i32
:
705 tci_args_rrrrrr(&tb_ptr
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
706 T1
= tci_uint64(regs
[r3
], regs
[r2
]);
707 T2
= tci_uint64(regs
[r5
], regs
[r4
]);
708 tci_write_reg64(regs
, r1
, r0
, T1
+ T2
);
710 case INDEX_op_sub2_i32
:
711 tci_args_rrrrrr(&tb_ptr
, &r0
, &r1
, &r2
, &r3
, &r4
, &r5
);
712 T1
= tci_uint64(regs
[r3
], regs
[r2
]);
713 T2
= tci_uint64(regs
[r5
], regs
[r4
]);
714 tci_write_reg64(regs
, r1
, r0
, T1
- T2
);
716 case INDEX_op_brcond2_i32
:
717 tci_args_rrrrcl(&tb_ptr
, &r0
, &r1
, &r2
, &r3
, &condition
, &ptr
);
718 T1
= tci_uint64(regs
[r1
], regs
[r0
]);
719 T2
= tci_uint64(regs
[r3
], regs
[r2
]);
720 if (tci_compare64(T1
, T2
, condition
)) {
725 case INDEX_op_mulu2_i32
:
726 tci_args_rrrr(&tb_ptr
, &r0
, &r1
, &r2
, &r3
);
727 tci_write_reg64(regs
, r1
, r0
, (uint64_t)regs
[r2
] * regs
[r3
]);
729 #endif /* TCG_TARGET_REG_BITS == 32 */
730 #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
732 tci_args_rr(&tb_ptr
, &r0
, &r1
);
733 regs
[r0
] = (int8_t)regs
[r1
];
736 #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
738 tci_args_rr(&tb_ptr
, &r0
, &r1
);
739 regs
[r0
] = (int16_t)regs
[r1
];
742 #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
744 tci_args_rr(&tb_ptr
, &r0
, &r1
);
745 regs
[r0
] = (uint8_t)regs
[r1
];
748 #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
750 tci_args_rr(&tb_ptr
, &r0
, &r1
);
751 regs
[r0
] = (uint16_t)regs
[r1
];
754 #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
756 tci_args_rr(&tb_ptr
, &r0
, &r1
);
757 regs
[r0
] = bswap16(regs
[r1
]);
760 #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
762 tci_args_rr(&tb_ptr
, &r0
, &r1
);
763 regs
[r0
] = bswap32(regs
[r1
]);
766 #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
768 tci_args_rr(&tb_ptr
, &r0
, &r1
);
769 regs
[r0
] = ~regs
[r1
];
772 #if TCG_TARGET_HAS_neg_i32 || TCG_TARGET_HAS_neg_i64
774 tci_args_rr(&tb_ptr
, &r0
, &r1
);
775 regs
[r0
] = -regs
[r1
];
778 #if TCG_TARGET_REG_BITS == 64
779 case INDEX_op_tci_movi_i64
:
780 tci_args_rI(&tb_ptr
, &r0
, &t1
);
784 /* Load/store operations (64 bit). */
786 case INDEX_op_ld32s_i64
:
787 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
788 ptr
= (void *)(regs
[r1
] + ofs
);
789 regs
[r0
] = *(int32_t *)ptr
;
791 case INDEX_op_ld_i64
:
792 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
793 ptr
= (void *)(regs
[r1
] + ofs
);
794 regs
[r0
] = *(uint64_t *)ptr
;
796 case INDEX_op_st_i64
:
797 tci_args_rrs(&tb_ptr
, &r0
, &r1
, &ofs
);
798 ptr
= (void *)(regs
[r1
] + ofs
);
799 *(uint64_t *)ptr
= regs
[r0
];
802 /* Arithmetic operations (64 bit). */
804 case INDEX_op_div_i64
:
805 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
806 regs
[r0
] = (int64_t)regs
[r1
] / (int64_t)regs
[r2
];
808 case INDEX_op_divu_i64
:
809 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
810 regs
[r0
] = (uint64_t)regs
[r1
] / (uint64_t)regs
[r2
];
812 case INDEX_op_rem_i64
:
813 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
814 regs
[r0
] = (int64_t)regs
[r1
] % (int64_t)regs
[r2
];
816 case INDEX_op_remu_i64
:
817 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
818 regs
[r0
] = (uint64_t)regs
[r1
] % (uint64_t)regs
[r2
];
821 /* Shift/rotate operations (64 bit). */
823 case INDEX_op_shl_i64
:
824 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
825 regs
[r0
] = regs
[r1
] << (regs
[r2
] & 63);
827 case INDEX_op_shr_i64
:
828 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
829 regs
[r0
] = regs
[r1
] >> (regs
[r2
] & 63);
831 case INDEX_op_sar_i64
:
832 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
833 regs
[r0
] = (int64_t)regs
[r1
] >> (regs
[r2
] & 63);
835 #if TCG_TARGET_HAS_rot_i64
836 case INDEX_op_rotl_i64
:
837 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
838 regs
[r0
] = rol64(regs
[r1
], regs
[r2
] & 63);
840 case INDEX_op_rotr_i64
:
841 tci_args_rrr(&tb_ptr
, &r0
, &r1
, &r2
);
842 regs
[r0
] = ror64(regs
[r1
], regs
[r2
] & 63);
845 #if TCG_TARGET_HAS_deposit_i64
846 case INDEX_op_deposit_i64
:
847 tci_args_rrrbb(&tb_ptr
, &r0
, &r1
, &r2
, &pos
, &len
);
848 regs
[r0
] = deposit64(regs
[r1
], pos
, len
, regs
[r2
]);
851 case INDEX_op_brcond_i64
:
852 tci_args_rrcl(&tb_ptr
, &r0
, &r1
, &condition
, &ptr
);
853 if (tci_compare64(regs
[r0
], regs
[r1
], condition
)) {
857 case INDEX_op_ext32s_i64
:
858 case INDEX_op_ext_i32_i64
:
859 tci_args_rr(&tb_ptr
, &r0
, &r1
);
860 regs
[r0
] = (int32_t)regs
[r1
];
862 case INDEX_op_ext32u_i64
:
863 case INDEX_op_extu_i32_i64
:
864 tci_args_rr(&tb_ptr
, &r0
, &r1
);
865 regs
[r0
] = (uint32_t)regs
[r1
];
867 #if TCG_TARGET_HAS_bswap64_i64
868 case INDEX_op_bswap64_i64
:
869 tci_args_rr(&tb_ptr
, &r0
, &r1
);
870 regs
[r0
] = bswap64(regs
[r1
]);
873 #endif /* TCG_TARGET_REG_BITS == 64 */
875 /* QEMU specific operations. */
877 case INDEX_op_exit_tb
:
878 tci_args_l(&tb_ptr
, &ptr
);
879 return (uintptr_t)ptr
;
881 case INDEX_op_goto_tb
:
882 tci_args_l(&tb_ptr
, &ptr
);
883 tb_ptr
= *(void **)ptr
;
886 case INDEX_op_qemu_ld_i32
:
887 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
888 tci_args_rrm(&tb_ptr
, &r0
, &r1
, &oi
);
891 tci_args_rrrm(&tb_ptr
, &r0
, &r1
, &r2
, &oi
);
892 taddr
= tci_uint64(regs
[r2
], regs
[r1
]);
894 switch (get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
)) {
899 tmp32
= (int8_t)qemu_ld_ub
;
902 tmp32
= qemu_ld_leuw
;
905 tmp32
= (int16_t)qemu_ld_leuw
;
908 tmp32
= qemu_ld_leul
;
911 tmp32
= qemu_ld_beuw
;
914 tmp32
= (int16_t)qemu_ld_beuw
;
917 tmp32
= qemu_ld_beul
;
920 g_assert_not_reached();
925 case INDEX_op_qemu_ld_i64
:
926 if (TCG_TARGET_REG_BITS
== 64) {
927 tci_args_rrm(&tb_ptr
, &r0
, &r1
, &oi
);
929 } else if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
930 tci_args_rrrm(&tb_ptr
, &r0
, &r1
, &r2
, &oi
);
933 tci_args_rrrrm(&tb_ptr
, &r0
, &r1
, &r2
, &r3
, &oi
);
934 taddr
= tci_uint64(regs
[r3
], regs
[r2
]);
936 switch (get_memop(oi
) & (MO_BSWAP
| MO_SSIZE
)) {
941 tmp64
= (int8_t)qemu_ld_ub
;
944 tmp64
= qemu_ld_leuw
;
947 tmp64
= (int16_t)qemu_ld_leuw
;
950 tmp64
= qemu_ld_leul
;
953 tmp64
= (int32_t)qemu_ld_leul
;
959 tmp64
= qemu_ld_beuw
;
962 tmp64
= (int16_t)qemu_ld_beuw
;
965 tmp64
= qemu_ld_beul
;
968 tmp64
= (int32_t)qemu_ld_beul
;
974 g_assert_not_reached();
976 if (TCG_TARGET_REG_BITS
== 32) {
977 tci_write_reg64(regs
, r1
, r0
, tmp64
);
983 case INDEX_op_qemu_st_i32
:
984 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
985 tci_args_rrm(&tb_ptr
, &r0
, &r1
, &oi
);
988 tci_args_rrrm(&tb_ptr
, &r0
, &r1
, &r2
, &oi
);
989 taddr
= tci_uint64(regs
[r2
], regs
[r1
]);
992 switch (get_memop(oi
) & (MO_BSWAP
| MO_SIZE
)) {
1009 g_assert_not_reached();
1013 case INDEX_op_qemu_st_i64
:
1014 if (TCG_TARGET_REG_BITS
== 64) {
1015 tci_args_rrm(&tb_ptr
, &r0
, &r1
, &oi
);
1019 if (TARGET_LONG_BITS
<= TCG_TARGET_REG_BITS
) {
1020 tci_args_rrrm(&tb_ptr
, &r0
, &r1
, &r2
, &oi
);
1023 tci_args_rrrrm(&tb_ptr
, &r0
, &r1
, &r2
, &r3
, &oi
);
1024 taddr
= tci_uint64(regs
[r3
], regs
[r2
]);
1026 tmp64
= tci_uint64(regs
[r1
], regs
[r0
]);
1028 switch (get_memop(oi
) & (MO_BSWAP
| MO_SIZE
)) {
1051 g_assert_not_reached();
1056 /* Ensure ordering for all kinds */
1060 g_assert_not_reached();