2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
70 /* check if we really need so many registers :P */
71 static const int tcg_target_reg_alloc_order
[] = {
97 static const int tcg_target_call_iarg_regs
[4] = {
104 static const int tcg_target_call_oarg_regs
[2] = {
109 static uint8_t *tb_ret_addr
;
111 static inline uint32_t reloc_lo16_val (void *pc
, tcg_target_long target
)
113 return target
& 0xffff;
116 static inline void reloc_lo16 (void *pc
, tcg_target_long target
)
118 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
119 | reloc_lo16_val(pc
, target
);
122 static inline uint32_t reloc_hi16_val (void *pc
, tcg_target_long target
)
124 return (target
>> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc
, tcg_target_long target
)
129 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
130 | reloc_hi16_val(pc
, target
);
133 static inline uint32_t reloc_pc16_val (void *pc
, tcg_target_long target
)
137 disp
= target
- (tcg_target_long
) pc
- 4;
138 if (disp
!= (disp
<< 14) >> 14) {
142 return (disp
>> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc
, tcg_target_long target
)
147 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
148 | reloc_pc16_val(pc
, target
);
151 static inline uint32_t reloc_26_val (void *pc
, tcg_target_long target
)
153 if ((((tcg_target_long
)pc
+ 4) & 0xf0000000) != (target
& 0xf0000000)) {
157 return (target
>> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc
, tcg_target_long target
)
162 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0x3ffffff)
163 | reloc_26_val(pc
, target
);
166 static void patch_reloc(uint8_t *code_ptr
, int type
,
167 tcg_target_long value
, tcg_target_long addend
)
172 reloc_lo16(code_ptr
, value
);
175 reloc_hi16(code_ptr
, value
);
178 reloc_pc16(code_ptr
, value
);
181 reloc_pc26(code_ptr
, value
);
188 /* maximum number of register used for input function arguments */
189 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
194 /* parse target specific constraints */
195 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
202 ct
->ct
|= TCG_CT_REG
;
203 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
206 ct
->ct
|= TCG_CT_REG
;
207 tcg_regset_clear(ct
->u
.regs
);
208 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_T9
);
210 case 'L': /* qemu_ld output arg constraint */
211 ct
->ct
|= TCG_CT_REG
;
212 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
213 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_V0
);
215 case 'l': /* qemu_ld input arg constraint */
216 ct
->ct
|= TCG_CT_REG
;
217 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
218 #if defined(CONFIG_SOFTMMU)
219 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
220 # if defined(CONFIG_TCG_PASS_AREG0) && (TARGET_LONG_BITS == 64)
221 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
225 case 'S': /* qemu_st constraint */
226 ct
->ct
|= TCG_CT_REG
;
227 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
228 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
229 #if defined(CONFIG_SOFTMMU)
230 # if (defined(CONFIG_TCG_PASS_AREG0) && TARGET_LONG_BITS == 32) || \
231 (!defined(CONFIG_TCG_PASS_AREG0) && TARGET_LONG_BITS == 64)
232 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A1
);
234 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
235 # if defined(CONFIG_TCG_PASS_AREG0) && TARGET_LONG_BITS == 64
236 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A3
);
241 ct
->ct
|= TCG_CT_CONST_U16
;
244 ct
->ct
|= TCG_CT_CONST_S16
;
247 /* We are cheating a bit here, using the fact that the register
248 ZERO is also the register number 0. Hence there is no need
249 to check for const_args in each instruction. */
250 ct
->ct
|= TCG_CT_CONST_ZERO
;
260 /* test if a constant matches the constraint */
261 static inline int tcg_target_const_match(tcg_target_long val
,
262 const TCGArgConstraint
*arg_ct
)
266 if (ct
& TCG_CT_CONST
)
268 else if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0)
270 else if ((ct
& TCG_CT_CONST_U16
) && val
== (uint16_t)val
)
272 else if ((ct
& TCG_CT_CONST_S16
) && val
== (int16_t)val
)
278 /* instruction opcodes */
280 OPC_BEQ
= 0x04 << 26,
281 OPC_BNE
= 0x05 << 26,
282 OPC_ADDIU
= 0x09 << 26,
283 OPC_SLTI
= 0x0A << 26,
284 OPC_SLTIU
= 0x0B << 26,
285 OPC_ANDI
= 0x0C << 26,
286 OPC_ORI
= 0x0D << 26,
287 OPC_XORI
= 0x0E << 26,
288 OPC_LUI
= 0x0F << 26,
292 OPC_LBU
= 0x24 << 26,
293 OPC_LHU
= 0x25 << 26,
294 OPC_LWU
= 0x27 << 26,
299 OPC_SPECIAL
= 0x00 << 26,
300 OPC_SLL
= OPC_SPECIAL
| 0x00,
301 OPC_SRL
= OPC_SPECIAL
| 0x02,
302 OPC_SRA
= OPC_SPECIAL
| 0x03,
303 OPC_SLLV
= OPC_SPECIAL
| 0x04,
304 OPC_SRLV
= OPC_SPECIAL
| 0x06,
305 OPC_SRAV
= OPC_SPECIAL
| 0x07,
306 OPC_JR
= OPC_SPECIAL
| 0x08,
307 OPC_JALR
= OPC_SPECIAL
| 0x09,
308 OPC_MFHI
= OPC_SPECIAL
| 0x10,
309 OPC_MFLO
= OPC_SPECIAL
| 0x12,
310 OPC_MULT
= OPC_SPECIAL
| 0x18,
311 OPC_MULTU
= OPC_SPECIAL
| 0x19,
312 OPC_DIV
= OPC_SPECIAL
| 0x1A,
313 OPC_DIVU
= OPC_SPECIAL
| 0x1B,
314 OPC_ADDU
= OPC_SPECIAL
| 0x21,
315 OPC_SUBU
= OPC_SPECIAL
| 0x23,
316 OPC_AND
= OPC_SPECIAL
| 0x24,
317 OPC_OR
= OPC_SPECIAL
| 0x25,
318 OPC_XOR
= OPC_SPECIAL
| 0x26,
319 OPC_NOR
= OPC_SPECIAL
| 0x27,
320 OPC_SLT
= OPC_SPECIAL
| 0x2A,
321 OPC_SLTU
= OPC_SPECIAL
| 0x2B,
323 OPC_SPECIAL3
= 0x1f << 26,
324 OPC_SEB
= OPC_SPECIAL3
| 0x420,
325 OPC_SEH
= OPC_SPECIAL3
| 0x620,
331 static inline void tcg_out_opc_reg(TCGContext
*s
, int opc
, int rd
, int rs
, int rt
)
336 inst
|= (rs
& 0x1F) << 21;
337 inst
|= (rt
& 0x1F) << 16;
338 inst
|= (rd
& 0x1F) << 11;
345 static inline void tcg_out_opc_imm(TCGContext
*s
, int opc
, int rt
, int rs
, int imm
)
350 inst
|= (rs
& 0x1F) << 21;
351 inst
|= (rt
& 0x1F) << 16;
352 inst
|= (imm
& 0xffff);
359 static inline void tcg_out_opc_br(TCGContext
*s
, int opc
, int rt
, int rs
)
361 /* We pay attention here to not modify the branch target by reading
362 the existing value and using it again. This ensure that caches and
363 memory are kept coherent during retranslation. */
364 uint16_t offset
= (uint16_t)(*(uint32_t *) s
->code_ptr
);
366 tcg_out_opc_imm(s
, opc
, rt
, rs
, offset
);
372 static inline void tcg_out_opc_sa(TCGContext
*s
, int opc
, int rd
, int rt
, int sa
)
377 inst
|= (rt
& 0x1F) << 16;
378 inst
|= (rd
& 0x1F) << 11;
379 inst
|= (sa
& 0x1F) << 6;
384 static inline void tcg_out_nop(TCGContext
*s
)
389 static inline void tcg_out_mov(TCGContext
*s
, TCGType type
,
390 TCGReg ret
, TCGReg arg
)
392 /* Simple reg-reg move, optimising out the 'do nothing' case */
394 tcg_out_opc_reg(s
, OPC_ADDU
, ret
, arg
, TCG_REG_ZERO
);
398 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
399 TCGReg reg
, tcg_target_long arg
)
401 if (arg
== (int16_t)arg
) {
402 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, TCG_REG_ZERO
, arg
);
403 } else if (arg
== (uint16_t)arg
) {
404 tcg_out_opc_imm(s
, OPC_ORI
, reg
, TCG_REG_ZERO
, arg
);
406 tcg_out_opc_imm(s
, OPC_LUI
, reg
, 0, arg
>> 16);
407 tcg_out_opc_imm(s
, OPC_ORI
, reg
, reg
, arg
& 0xffff);
411 static inline void tcg_out_bswap16(TCGContext
*s
, int ret
, int arg
)
413 /* ret and arg can't be register at */
414 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
418 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
419 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0x00ff);
421 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 8);
422 tcg_out_opc_imm(s
, OPC_ANDI
, ret
, ret
, 0xff00);
423 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
426 static inline void tcg_out_bswap16s(TCGContext
*s
, int ret
, int arg
)
428 /* ret and arg can't be register at */
429 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
433 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
434 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff);
436 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
437 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
438 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
441 static inline void tcg_out_bswap32(TCGContext
*s
, int ret
, int arg
)
443 /* ret and arg must be different and can't be register at */
444 if (ret
== arg
|| ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
448 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
450 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 24);
451 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
453 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, arg
, 0xff00);
454 tcg_out_opc_sa(s
, OPC_SLL
, TCG_REG_AT
, TCG_REG_AT
, 8);
455 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
457 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
458 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff00);
459 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
462 static inline void tcg_out_ext8s(TCGContext
*s
, int ret
, int arg
)
464 #ifdef _MIPS_ARCH_MIPS32R2
465 tcg_out_opc_reg(s
, OPC_SEB
, ret
, 0, arg
);
467 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
468 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 24);
472 static inline void tcg_out_ext16s(TCGContext
*s
, int ret
, int arg
)
474 #ifdef _MIPS_ARCH_MIPS32R2
475 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, arg
);
477 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 16);
478 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
482 static inline void tcg_out_ldst(TCGContext
*s
, int opc
, int arg
,
483 int arg1
, tcg_target_long arg2
)
485 if (arg2
== (int16_t) arg2
) {
486 tcg_out_opc_imm(s
, opc
, arg
, arg1
, arg2
);
488 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, arg2
);
489 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, TCG_REG_AT
, arg1
);
490 tcg_out_opc_imm(s
, opc
, arg
, TCG_REG_AT
, 0);
494 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg arg
,
495 TCGReg arg1
, tcg_target_long arg2
)
497 tcg_out_ldst(s
, OPC_LW
, arg
, arg1
, arg2
);
500 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
,
501 TCGReg arg1
, tcg_target_long arg2
)
503 tcg_out_ldst(s
, OPC_SW
, arg
, arg1
, arg2
);
506 static inline void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
508 if (val
== (int16_t)val
) {
509 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, reg
, val
);
511 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, val
);
512 tcg_out_opc_reg(s
, OPC_ADDU
, reg
, reg
, TCG_REG_AT
);
516 /* Helper routines for marshalling helper function arguments into
517 * the correct registers and stack.
518 * arg_num is where we want to put this argument, and is updated to be ready
519 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
520 * real registers, 4+ on stack.
522 * We provide routines for arguments which are: immediate, 32 bit
523 * value in register, 16 and 8 bit values in register (which must be zero
524 * extended before use) and 64 bit value in a lo:hi register pair.
526 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
527 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
529 if (*arg_num < 4) { \
530 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
532 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
533 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
537 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
538 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
539 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8
, TCGReg arg
)
540 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
541 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
542 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
543 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16
, TCGReg arg
)
544 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
545 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
546 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
547 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32
, uint32_t arg
)
548 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
550 /* We don't use the macro for this one to avoid an unnecessary reg-reg
551 move when storing to the stack. */
552 static inline void tcg_out_call_iarg_reg32(TCGContext
*s
, int *arg_num
,
556 tcg_out_mov(s
, TCG_TYPE_I32
, tcg_target_call_iarg_regs
[*arg_num
], arg
);
558 tcg_out_st(s
, TCG_TYPE_I32
, arg
, TCG_REG_SP
, 4 * (*arg_num
));
563 static inline void tcg_out_call_iarg_reg64(TCGContext
*s
, int *arg_num
,
564 TCGReg arg_low
, TCGReg arg_high
)
566 (*arg_num
) = (*arg_num
+ 1) & ~1;
568 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
569 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
570 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
572 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
573 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
577 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
, int arg1
,
578 int arg2
, int label_index
)
580 TCGLabel
*l
= &s
->labels
[label_index
];
584 tcg_out_opc_br(s
, OPC_BEQ
, arg1
, arg2
);
587 tcg_out_opc_br(s
, OPC_BNE
, arg1
, arg2
);
590 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
591 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
594 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
595 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
598 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
599 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
602 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
603 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
606 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
607 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
610 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
611 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
614 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
615 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
618 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
619 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
626 reloc_pc16(s
->code_ptr
- 4, l
->u
.value
);
628 tcg_out_reloc(s
, s
->code_ptr
- 4, R_MIPS_PC16
, label_index
, 0);
633 /* XXX: we implement it at the target level to avoid having to
634 handle cross basic blocks temporaries */
635 static void tcg_out_brcond2(TCGContext
*s
, TCGCond cond
, int arg1
,
636 int arg2
, int arg3
, int arg4
, int label_index
)
642 tcg_out_brcond(s
, TCG_COND_NE
, arg2
, arg4
, label_index
);
643 tcg_out_brcond(s
, TCG_COND_NE
, arg1
, arg3
, label_index
);
649 tcg_out_brcond(s
, TCG_COND_LT
, arg2
, arg4
, label_index
);
653 tcg_out_brcond(s
, TCG_COND_GT
, arg2
, arg4
, label_index
);
657 tcg_out_brcond(s
, TCG_COND_LTU
, arg2
, arg4
, label_index
);
661 tcg_out_brcond(s
, TCG_COND_GTU
, arg2
, arg4
, label_index
);
667 label_ptr
= s
->code_ptr
;
668 tcg_out_opc_br(s
, OPC_BNE
, arg2
, arg4
);
673 tcg_out_brcond(s
, TCG_COND_EQ
, arg1
, arg3
, label_index
);
677 tcg_out_brcond(s
, TCG_COND_LTU
, arg1
, arg3
, label_index
);
681 tcg_out_brcond(s
, TCG_COND_LEU
, arg1
, arg3
, label_index
);
685 tcg_out_brcond(s
, TCG_COND_GTU
, arg1
, arg3
, label_index
);
689 tcg_out_brcond(s
, TCG_COND_GEU
, arg1
, arg3
, label_index
);
695 reloc_pc16(label_ptr
, (tcg_target_long
) s
->code_ptr
);
698 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, int ret
,
704 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg2
, 1);
705 } else if (arg2
== 0) {
706 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg1
, 1);
708 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
709 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, ret
, 1);
714 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg2
);
715 } else if (arg2
== 0) {
716 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg1
);
718 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
719 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, ret
);
723 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
726 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
729 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
730 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
733 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
734 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
737 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
738 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
741 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
742 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
745 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
748 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
756 /* XXX: we implement it at the target level to avoid having to
757 handle cross basic blocks temporaries */
758 static void tcg_out_setcond2(TCGContext
*s
, TCGCond cond
, int ret
,
759 int arg1
, int arg2
, int arg3
, int arg4
)
763 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_AT
, arg2
, arg4
);
764 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg1
, arg3
);
765 tcg_out_opc_reg(s
, OPC_AND
, ret
, TCG_REG_AT
, TCG_REG_T0
);
768 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_AT
, arg2
, arg4
);
769 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_T0
, arg1
, arg3
);
770 tcg_out_opc_reg(s
, OPC_OR
, ret
, TCG_REG_AT
, TCG_REG_T0
);
774 tcg_out_setcond(s
, TCG_COND_LT
, TCG_REG_AT
, arg2
, arg4
);
778 tcg_out_setcond(s
, TCG_COND_GT
, TCG_REG_AT
, arg2
, arg4
);
782 tcg_out_setcond(s
, TCG_COND_LTU
, TCG_REG_AT
, arg2
, arg4
);
786 tcg_out_setcond(s
, TCG_COND_GTU
, TCG_REG_AT
, arg2
, arg4
);
793 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg2
, arg4
);
798 tcg_out_setcond(s
, TCG_COND_LTU
, ret
, arg1
, arg3
);
802 tcg_out_setcond(s
, TCG_COND_LEU
, ret
, arg1
, arg3
);
806 tcg_out_setcond(s
, TCG_COND_GTU
, ret
, arg1
, arg3
);
810 tcg_out_setcond(s
, TCG_COND_GEU
, ret
, arg1
, arg3
);
816 tcg_out_opc_reg(s
, OPC_AND
, ret
, ret
, TCG_REG_T0
);
817 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
820 #if defined(CONFIG_SOFTMMU)
822 #include "../../softmmu_defs.h"
824 #ifdef CONFIG_TCG_PASS_AREG0
825 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
827 static const void * const qemu_ld_helpers
[4] = {
834 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
835 uintxx_t val, int mmu_idx) */
836 static const void * const qemu_st_helpers
[4] = {
843 /* legacy helper signature: __ld_mmu(target_ulong addr, int
845 static void *qemu_ld_helpers
[4] = {
852 /* legacy helper signature: __st_mmu(target_ulong addr, uintxx_t val,
854 static void *qemu_st_helpers
[4] = {
863 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
866 int addr_regl
, addr_meml
;
867 int data_regl
, data_regh
, data_reg1
, data_reg2
;
868 int mem_index
, s_bits
;
869 #if defined(CONFIG_SOFTMMU)
870 void *label1_ptr
, *label2_ptr
;
873 #if TARGET_LONG_BITS == 64
874 # if defined(CONFIG_SOFTMMU)
877 int addr_regh
, addr_memh
;
885 #if TARGET_LONG_BITS == 64
892 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
893 data_reg1
= data_regh
;
894 data_reg2
= data_regl
;
896 data_reg1
= data_regl
;
897 data_reg2
= data_regh
;
900 data_reg1
= data_regl
;
903 #if TARGET_LONG_BITS == 64
904 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
915 #if defined(CONFIG_SOFTMMU)
916 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
917 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
918 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
919 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
920 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_meml
);
921 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
922 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
924 # if TARGET_LONG_BITS == 64
925 label3_ptr
= s
->code_ptr
;
926 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
929 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
930 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_memh
);
932 label1_ptr
= s
->code_ptr
;
933 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
936 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
938 label1_ptr
= s
->code_ptr
;
939 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
945 # ifdef CONFIG_TCG_PASS_AREG0
946 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
948 # if TARGET_LONG_BITS == 64
949 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
951 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
953 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
954 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_ld_helpers
[s_bits
]);
955 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
960 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xff);
963 tcg_out_ext8s(s
, data_reg1
, TCG_REG_V0
);
966 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xffff);
969 tcg_out_ext16s(s
, data_reg1
, TCG_REG_V0
);
972 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
975 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg2
, TCG_REG_V1
);
976 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
982 label2_ptr
= s
->code_ptr
;
983 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
986 /* label1: fast path */
987 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
989 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
990 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
991 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_A0
, addr_regl
);
993 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
994 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_V0
, addr_regl
, GUEST_BASE
);
996 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_V0
, GUEST_BASE
);
997 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_V0
, addr_regl
);
1003 tcg_out_opc_imm(s
, OPC_LBU
, data_reg1
, TCG_REG_V0
, 0);
1006 tcg_out_opc_imm(s
, OPC_LB
, data_reg1
, TCG_REG_V0
, 0);
1009 if (TCG_NEED_BSWAP
) {
1010 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1011 tcg_out_bswap16(s
, data_reg1
, TCG_REG_T0
);
1013 tcg_out_opc_imm(s
, OPC_LHU
, data_reg1
, TCG_REG_V0
, 0);
1017 if (TCG_NEED_BSWAP
) {
1018 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1019 tcg_out_bswap16s(s
, data_reg1
, TCG_REG_T0
);
1021 tcg_out_opc_imm(s
, OPC_LH
, data_reg1
, TCG_REG_V0
, 0);
1025 if (TCG_NEED_BSWAP
) {
1026 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1027 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1029 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1033 if (TCG_NEED_BSWAP
) {
1034 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 4);
1035 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1036 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1037 tcg_out_bswap32(s
, data_reg2
, TCG_REG_T0
);
1039 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1040 tcg_out_opc_imm(s
, OPC_LW
, data_reg2
, TCG_REG_V0
, 4);
1047 #if defined(CONFIG_SOFTMMU)
1048 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1052 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
1055 int addr_regl
, addr_meml
;
1056 int data_regl
, data_regh
, data_reg1
, data_reg2
;
1057 int mem_index
, s_bits
;
1058 #if defined(CONFIG_SOFTMMU)
1059 uint8_t *label1_ptr
, *label2_ptr
;
1062 #if TARGET_LONG_BITS == 64
1063 # if defined(CONFIG_SOFTMMU)
1064 uint8_t *label3_ptr
;
1066 int addr_regh
, addr_memh
;
1069 data_regl
= *args
++;
1071 data_regh
= *args
++;
1072 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1073 data_reg1
= data_regh
;
1074 data_reg2
= data_regl
;
1076 data_reg1
= data_regl
;
1077 data_reg2
= data_regh
;
1080 data_reg1
= data_regl
;
1084 addr_regl
= *args
++;
1085 #if TARGET_LONG_BITS == 64
1086 addr_regh
= *args
++;
1087 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1100 #if defined(CONFIG_SOFTMMU)
1101 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
1102 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
1103 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
1104 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1105 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_meml
);
1106 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
1107 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1109 # if TARGET_LONG_BITS == 64
1110 label3_ptr
= s
->code_ptr
;
1111 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1114 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1115 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_memh
);
1117 label1_ptr
= s
->code_ptr
;
1118 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1121 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
1123 label1_ptr
= s
->code_ptr
;
1124 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1130 # ifdef CONFIG_TCG_PASS_AREG0
1131 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1133 # if TARGET_LONG_BITS == 64
1134 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1136 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1140 tcg_out_call_iarg_reg8(s
, &arg_num
, data_regl
);
1143 tcg_out_call_iarg_reg16(s
, &arg_num
, data_regl
);
1146 tcg_out_call_iarg_reg32(s
, &arg_num
, data_regl
);
1149 tcg_out_call_iarg_reg64(s
, &arg_num
, data_regl
, data_regh
);
1154 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1155 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_st_helpers
[s_bits
]);
1156 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1159 label2_ptr
= s
->code_ptr
;
1160 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1163 /* label1: fast path */
1164 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1166 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1167 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1168 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1170 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1171 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_A0
, addr_regl
, GUEST_BASE
);
1173 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_A0
, GUEST_BASE
);
1174 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1181 tcg_out_opc_imm(s
, OPC_SB
, data_reg1
, TCG_REG_A0
, 0);
1184 if (TCG_NEED_BSWAP
) {
1185 tcg_out_bswap16(s
, TCG_REG_T0
, data_reg1
);
1186 tcg_out_opc_imm(s
, OPC_SH
, TCG_REG_T0
, TCG_REG_A0
, 0);
1188 tcg_out_opc_imm(s
, OPC_SH
, data_reg1
, TCG_REG_A0
, 0);
1192 if (TCG_NEED_BSWAP
) {
1193 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1194 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1196 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1200 if (TCG_NEED_BSWAP
) {
1201 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg2
);
1202 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1203 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1204 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 4);
1206 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1207 tcg_out_opc_imm(s
, OPC_SW
, data_reg2
, TCG_REG_A0
, 4);
1214 #if defined(CONFIG_SOFTMMU)
1215 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1219 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1220 const TCGArg
*args
, const int *const_args
)
1223 case INDEX_op_exit_tb
:
1224 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_V0
, args
[0]);
1225 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, (tcg_target_long
)tb_ret_addr
);
1226 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1229 case INDEX_op_goto_tb
:
1230 if (s
->tb_jmp_offset
) {
1231 /* direct jump method */
1234 /* indirect jump method */
1235 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, (tcg_target_long
)(s
->tb_next
+ args
[0]));
1236 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_AT
, TCG_REG_AT
, 0);
1237 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1240 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1243 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, args
[0], 0);
1247 tcg_out_opc_reg(s
, OPC_JR
, 0, args
[0], 0);
1251 tcg_out_brcond(s
, TCG_COND_EQ
, TCG_REG_ZERO
, TCG_REG_ZERO
, args
[0]);
1254 case INDEX_op_mov_i32
:
1255 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1257 case INDEX_op_movi_i32
:
1258 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1261 case INDEX_op_ld8u_i32
:
1262 tcg_out_ldst(s
, OPC_LBU
, args
[0], args
[1], args
[2]);
1264 case INDEX_op_ld8s_i32
:
1265 tcg_out_ldst(s
, OPC_LB
, args
[0], args
[1], args
[2]);
1267 case INDEX_op_ld16u_i32
:
1268 tcg_out_ldst(s
, OPC_LHU
, args
[0], args
[1], args
[2]);
1270 case INDEX_op_ld16s_i32
:
1271 tcg_out_ldst(s
, OPC_LH
, args
[0], args
[1], args
[2]);
1273 case INDEX_op_ld_i32
:
1274 tcg_out_ldst(s
, OPC_LW
, args
[0], args
[1], args
[2]);
1276 case INDEX_op_st8_i32
:
1277 tcg_out_ldst(s
, OPC_SB
, args
[0], args
[1], args
[2]);
1279 case INDEX_op_st16_i32
:
1280 tcg_out_ldst(s
, OPC_SH
, args
[0], args
[1], args
[2]);
1282 case INDEX_op_st_i32
:
1283 tcg_out_ldst(s
, OPC_SW
, args
[0], args
[1], args
[2]);
1286 case INDEX_op_add_i32
:
1287 if (const_args
[2]) {
1288 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], args
[2]);
1290 tcg_out_opc_reg(s
, OPC_ADDU
, args
[0], args
[1], args
[2]);
1293 case INDEX_op_add2_i32
:
1294 if (const_args
[4]) {
1295 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], args
[4]);
1297 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, args
[2], args
[4]);
1299 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, TCG_REG_AT
, args
[2]);
1300 if (const_args
[5]) {
1301 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], args
[5]);
1303 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[3], args
[5]);
1305 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[1], TCG_REG_T0
);
1306 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1308 case INDEX_op_sub_i32
:
1309 if (const_args
[2]) {
1310 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], -args
[2]);
1312 tcg_out_opc_reg(s
, OPC_SUBU
, args
[0], args
[1], args
[2]);
1315 case INDEX_op_sub2_i32
:
1316 if (const_args
[4]) {
1317 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], -args
[4]);
1319 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, args
[2], args
[4]);
1321 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, args
[2], TCG_REG_AT
);
1322 if (const_args
[5]) {
1323 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], -args
[5]);
1325 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[3], args
[5]);
1327 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[1], TCG_REG_T0
);
1328 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1330 case INDEX_op_mul_i32
:
1331 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[1], args
[2]);
1332 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1334 case INDEX_op_mulu2_i32
:
1335 tcg_out_opc_reg(s
, OPC_MULTU
, 0, args
[2], args
[3]);
1336 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1337 tcg_out_opc_reg(s
, OPC_MFHI
, args
[1], 0, 0);
1339 case INDEX_op_div_i32
:
1340 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1341 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1343 case INDEX_op_divu_i32
:
1344 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1345 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1347 case INDEX_op_rem_i32
:
1348 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1349 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1351 case INDEX_op_remu_i32
:
1352 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1353 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1356 case INDEX_op_and_i32
:
1357 if (const_args
[2]) {
1358 tcg_out_opc_imm(s
, OPC_ANDI
, args
[0], args
[1], args
[2]);
1360 tcg_out_opc_reg(s
, OPC_AND
, args
[0], args
[1], args
[2]);
1363 case INDEX_op_or_i32
:
1364 if (const_args
[2]) {
1365 tcg_out_opc_imm(s
, OPC_ORI
, args
[0], args
[1], args
[2]);
1367 tcg_out_opc_reg(s
, OPC_OR
, args
[0], args
[1], args
[2]);
1370 case INDEX_op_nor_i32
:
1371 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], args
[1], args
[2]);
1373 case INDEX_op_not_i32
:
1374 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], TCG_REG_ZERO
, args
[1]);
1376 case INDEX_op_xor_i32
:
1377 if (const_args
[2]) {
1378 tcg_out_opc_imm(s
, OPC_XORI
, args
[0], args
[1], args
[2]);
1380 tcg_out_opc_reg(s
, OPC_XOR
, args
[0], args
[1], args
[2]);
1384 case INDEX_op_sar_i32
:
1385 if (const_args
[2]) {
1386 tcg_out_opc_sa(s
, OPC_SRA
, args
[0], args
[1], args
[2]);
1388 tcg_out_opc_reg(s
, OPC_SRAV
, args
[0], args
[2], args
[1]);
1391 case INDEX_op_shl_i32
:
1392 if (const_args
[2]) {
1393 tcg_out_opc_sa(s
, OPC_SLL
, args
[0], args
[1], args
[2]);
1395 tcg_out_opc_reg(s
, OPC_SLLV
, args
[0], args
[2], args
[1]);
1398 case INDEX_op_shr_i32
:
1399 if (const_args
[2]) {
1400 tcg_out_opc_sa(s
, OPC_SRL
, args
[0], args
[1], args
[2]);
1402 tcg_out_opc_reg(s
, OPC_SRLV
, args
[0], args
[2], args
[1]);
1406 case INDEX_op_ext8s_i32
:
1407 tcg_out_ext8s(s
, args
[0], args
[1]);
1409 case INDEX_op_ext16s_i32
:
1410 tcg_out_ext16s(s
, args
[0], args
[1]);
1413 case INDEX_op_brcond_i32
:
1414 tcg_out_brcond(s
, args
[2], args
[0], args
[1], args
[3]);
1416 case INDEX_op_brcond2_i32
:
1417 tcg_out_brcond2(s
, args
[4], args
[0], args
[1], args
[2], args
[3], args
[5]);
1420 case INDEX_op_setcond_i32
:
1421 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2]);
1423 case INDEX_op_setcond2_i32
:
1424 tcg_out_setcond2(s
, args
[5], args
[0], args
[1], args
[2], args
[3], args
[4]);
1427 case INDEX_op_qemu_ld8u
:
1428 tcg_out_qemu_ld(s
, args
, 0);
1430 case INDEX_op_qemu_ld8s
:
1431 tcg_out_qemu_ld(s
, args
, 0 | 4);
1433 case INDEX_op_qemu_ld16u
:
1434 tcg_out_qemu_ld(s
, args
, 1);
1436 case INDEX_op_qemu_ld16s
:
1437 tcg_out_qemu_ld(s
, args
, 1 | 4);
1439 case INDEX_op_qemu_ld32
:
1440 tcg_out_qemu_ld(s
, args
, 2);
1442 case INDEX_op_qemu_ld64
:
1443 tcg_out_qemu_ld(s
, args
, 3);
1445 case INDEX_op_qemu_st8
:
1446 tcg_out_qemu_st(s
, args
, 0);
1448 case INDEX_op_qemu_st16
:
1449 tcg_out_qemu_st(s
, args
, 1);
1451 case INDEX_op_qemu_st32
:
1452 tcg_out_qemu_st(s
, args
, 2);
1454 case INDEX_op_qemu_st64
:
1455 tcg_out_qemu_st(s
, args
, 3);
1463 static const TCGTargetOpDef mips_op_defs
[] = {
1464 { INDEX_op_exit_tb
, { } },
1465 { INDEX_op_goto_tb
, { } },
1466 { INDEX_op_call
, { "C" } },
1467 { INDEX_op_jmp
, { "r" } },
1468 { INDEX_op_br
, { } },
1470 { INDEX_op_mov_i32
, { "r", "r" } },
1471 { INDEX_op_movi_i32
, { "r" } },
1472 { INDEX_op_ld8u_i32
, { "r", "r" } },
1473 { INDEX_op_ld8s_i32
, { "r", "r" } },
1474 { INDEX_op_ld16u_i32
, { "r", "r" } },
1475 { INDEX_op_ld16s_i32
, { "r", "r" } },
1476 { INDEX_op_ld_i32
, { "r", "r" } },
1477 { INDEX_op_st8_i32
, { "rZ", "r" } },
1478 { INDEX_op_st16_i32
, { "rZ", "r" } },
1479 { INDEX_op_st_i32
, { "rZ", "r" } },
1481 { INDEX_op_add_i32
, { "r", "rZ", "rJZ" } },
1482 { INDEX_op_mul_i32
, { "r", "rZ", "rZ" } },
1483 { INDEX_op_mulu2_i32
, { "r", "r", "rZ", "rZ" } },
1484 { INDEX_op_div_i32
, { "r", "rZ", "rZ" } },
1485 { INDEX_op_divu_i32
, { "r", "rZ", "rZ" } },
1486 { INDEX_op_rem_i32
, { "r", "rZ", "rZ" } },
1487 { INDEX_op_remu_i32
, { "r", "rZ", "rZ" } },
1488 { INDEX_op_sub_i32
, { "r", "rZ", "rJZ" } },
1490 { INDEX_op_and_i32
, { "r", "rZ", "rIZ" } },
1491 { INDEX_op_nor_i32
, { "r", "rZ", "rZ" } },
1492 { INDEX_op_not_i32
, { "r", "rZ" } },
1493 { INDEX_op_or_i32
, { "r", "rZ", "rIZ" } },
1494 { INDEX_op_xor_i32
, { "r", "rZ", "rIZ" } },
1496 { INDEX_op_shl_i32
, { "r", "rZ", "riZ" } },
1497 { INDEX_op_shr_i32
, { "r", "rZ", "riZ" } },
1498 { INDEX_op_sar_i32
, { "r", "rZ", "riZ" } },
1500 { INDEX_op_ext8s_i32
, { "r", "rZ" } },
1501 { INDEX_op_ext16s_i32
, { "r", "rZ" } },
1503 { INDEX_op_brcond_i32
, { "rZ", "rZ" } },
1504 { INDEX_op_setcond_i32
, { "r", "rZ", "rZ" } },
1505 { INDEX_op_setcond2_i32
, { "r", "rZ", "rZ", "rZ", "rZ" } },
1507 { INDEX_op_add2_i32
, { "r", "r", "rZ", "rZ", "rJZ", "rJZ" } },
1508 { INDEX_op_sub2_i32
, { "r", "r", "rZ", "rZ", "rJZ", "rJZ" } },
1509 { INDEX_op_brcond2_i32
, { "rZ", "rZ", "rZ", "rZ" } },
1511 #if TARGET_LONG_BITS == 32
1512 { INDEX_op_qemu_ld8u
, { "L", "lZ" } },
1513 { INDEX_op_qemu_ld8s
, { "L", "lZ" } },
1514 { INDEX_op_qemu_ld16u
, { "L", "lZ" } },
1515 { INDEX_op_qemu_ld16s
, { "L", "lZ" } },
1516 { INDEX_op_qemu_ld32
, { "L", "lZ" } },
1517 { INDEX_op_qemu_ld64
, { "L", "L", "lZ" } },
1519 { INDEX_op_qemu_st8
, { "SZ", "SZ" } },
1520 { INDEX_op_qemu_st16
, { "SZ", "SZ" } },
1521 { INDEX_op_qemu_st32
, { "SZ", "SZ" } },
1522 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ" } },
1524 { INDEX_op_qemu_ld8u
, { "L", "lZ", "lZ" } },
1525 { INDEX_op_qemu_ld8s
, { "L", "lZ", "lZ" } },
1526 { INDEX_op_qemu_ld16u
, { "L", "lZ", "lZ" } },
1527 { INDEX_op_qemu_ld16s
, { "L", "lZ", "lZ" } },
1528 { INDEX_op_qemu_ld32
, { "L", "lZ", "lZ" } },
1529 { INDEX_op_qemu_ld64
, { "L", "L", "lZ", "lZ" } },
1531 { INDEX_op_qemu_st8
, { "SZ", "SZ", "SZ" } },
1532 { INDEX_op_qemu_st16
, { "SZ", "SZ", "SZ" } },
1533 { INDEX_op_qemu_st32
, { "SZ", "SZ", "SZ" } },
1534 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ", "SZ" } },
1539 static int tcg_target_callee_save_regs
[] = {
1540 TCG_REG_S0
, /* used for the global env (TCG_AREG0) */
1550 TCG_REG_RA
, /* should be last for ABI compliance */
1553 /* Generate global QEMU prologue and epilogue code */
1554 static void tcg_target_qemu_prologue(TCGContext
*s
)
1558 /* reserve some stack space */
1559 frame_size
= ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1560 + TCG_STATIC_CALL_ARGS_SIZE
;
1561 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1562 ~(TCG_TARGET_STACK_ALIGN
- 1);
1565 tcg_out_addi(s
, TCG_REG_SP
, -frame_size
);
1566 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1567 tcg_out_st(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1568 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1571 /* Call generated code */
1572 tcg_out_opc_reg(s
, OPC_JR
, 0, tcg_target_call_iarg_regs
[1], 0);
1573 tcg_out_mov(s
, TCG_TYPE_PTR
, TCG_AREG0
, tcg_target_call_iarg_regs
[0]);
1574 tb_ret_addr
= s
->code_ptr
;
1577 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1578 tcg_out_ld(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1579 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1582 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_RA
, 0);
1583 tcg_out_addi(s
, TCG_REG_SP
, frame_size
);
1586 static void tcg_target_init(TCGContext
*s
)
1588 tcg_regset_set(tcg_target_available_regs
[TCG_TYPE_I32
], 0xffffffff);
1589 tcg_regset_set(tcg_target_call_clobber_regs
,
1606 tcg_regset_clear(s
->reserved_regs
);
1607 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ZERO
); /* zero register */
1608 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K0
); /* kernel use only */
1609 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K1
); /* kernel use only */
1610 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_AT
); /* internal use */
1611 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_T0
); /* internal use */
1612 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RA
); /* return address */
1613 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_SP
); /* stack pointer */
1615 tcg_add_target_add_op_defs(mips_op_defs
);
1616 tcg_set_frame(s
, TCG_AREG0
, offsetof(CPUArchState
, temp_buf
),
1617 CPU_TEMP_BUF_NLONGS
* sizeof(long));