2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
70 /* check if we really need so many registers :P */
71 static const int tcg_target_reg_alloc_order
[] = {
97 static const int tcg_target_call_iarg_regs
[4] = {
104 static const int tcg_target_call_oarg_regs
[2] = {
109 static uint8_t *tb_ret_addr
;
111 static inline uint32_t reloc_lo16_val (void *pc
, tcg_target_long target
)
113 return target
& 0xffff;
116 static inline void reloc_lo16 (void *pc
, tcg_target_long target
)
118 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
119 | reloc_lo16_val(pc
, target
);
122 static inline uint32_t reloc_hi16_val (void *pc
, tcg_target_long target
)
124 return (target
>> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc
, tcg_target_long target
)
129 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
130 | reloc_hi16_val(pc
, target
);
133 static inline uint32_t reloc_pc16_val (void *pc
, tcg_target_long target
)
137 disp
= target
- (tcg_target_long
) pc
- 4;
138 if (disp
!= (disp
<< 14) >> 14) {
142 return (disp
>> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc
, tcg_target_long target
)
147 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
148 | reloc_pc16_val(pc
, target
);
151 static inline uint32_t reloc_26_val (void *pc
, tcg_target_long target
)
153 if ((((tcg_target_long
)pc
+ 4) & 0xf0000000) != (target
& 0xf0000000)) {
157 return (target
>> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc
, tcg_target_long target
)
162 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0x3ffffff)
163 | reloc_26_val(pc
, target
);
166 static void patch_reloc(uint8_t *code_ptr
, int type
,
167 tcg_target_long value
, tcg_target_long addend
)
172 reloc_lo16(code_ptr
, value
);
175 reloc_hi16(code_ptr
, value
);
178 reloc_pc16(code_ptr
, value
);
181 reloc_pc26(code_ptr
, value
);
188 /* maximum number of register used for input function arguments */
189 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
194 /* parse target specific constraints */
195 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
202 ct
->ct
|= TCG_CT_REG
;
203 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
206 ct
->ct
|= TCG_CT_REG
;
207 tcg_regset_clear(ct
->u
.regs
);
208 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_T9
);
210 case 'L': /* qemu_ld output arg constraint */
211 ct
->ct
|= TCG_CT_REG
;
212 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
213 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_V0
);
215 case 'l': /* qemu_ld input arg constraint */
216 ct
->ct
|= TCG_CT_REG
;
217 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
218 #if defined(CONFIG_SOFTMMU)
219 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
220 # if (TARGET_LONG_BITS == 64)
221 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
225 case 'S': /* qemu_st constraint */
226 ct
->ct
|= TCG_CT_REG
;
227 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
228 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
229 #if defined(CONFIG_SOFTMMU)
230 # if (TARGET_LONG_BITS == 32)
231 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A1
);
233 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
234 # if TARGET_LONG_BITS == 64
235 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A3
);
240 ct
->ct
|= TCG_CT_CONST_U16
;
243 ct
->ct
|= TCG_CT_CONST_S16
;
246 /* We are cheating a bit here, using the fact that the register
247 ZERO is also the register number 0. Hence there is no need
248 to check for const_args in each instruction. */
249 ct
->ct
|= TCG_CT_CONST_ZERO
;
259 /* test if a constant matches the constraint */
260 static inline int tcg_target_const_match(tcg_target_long val
,
261 const TCGArgConstraint
*arg_ct
)
265 if (ct
& TCG_CT_CONST
)
267 else if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0)
269 else if ((ct
& TCG_CT_CONST_U16
) && val
== (uint16_t)val
)
271 else if ((ct
& TCG_CT_CONST_S16
) && val
== (int16_t)val
)
277 /* instruction opcodes */
279 OPC_BEQ
= 0x04 << 26,
280 OPC_BNE
= 0x05 << 26,
281 OPC_ADDIU
= 0x09 << 26,
282 OPC_SLTI
= 0x0A << 26,
283 OPC_SLTIU
= 0x0B << 26,
284 OPC_ANDI
= 0x0C << 26,
285 OPC_ORI
= 0x0D << 26,
286 OPC_XORI
= 0x0E << 26,
287 OPC_LUI
= 0x0F << 26,
291 OPC_LBU
= 0x24 << 26,
292 OPC_LHU
= 0x25 << 26,
293 OPC_LWU
= 0x27 << 26,
298 OPC_SPECIAL
= 0x00 << 26,
299 OPC_SLL
= OPC_SPECIAL
| 0x00,
300 OPC_SRL
= OPC_SPECIAL
| 0x02,
301 OPC_SRA
= OPC_SPECIAL
| 0x03,
302 OPC_SLLV
= OPC_SPECIAL
| 0x04,
303 OPC_SRLV
= OPC_SPECIAL
| 0x06,
304 OPC_SRAV
= OPC_SPECIAL
| 0x07,
305 OPC_JR
= OPC_SPECIAL
| 0x08,
306 OPC_JALR
= OPC_SPECIAL
| 0x09,
307 OPC_MFHI
= OPC_SPECIAL
| 0x10,
308 OPC_MFLO
= OPC_SPECIAL
| 0x12,
309 OPC_MULT
= OPC_SPECIAL
| 0x18,
310 OPC_MULTU
= OPC_SPECIAL
| 0x19,
311 OPC_DIV
= OPC_SPECIAL
| 0x1A,
312 OPC_DIVU
= OPC_SPECIAL
| 0x1B,
313 OPC_ADDU
= OPC_SPECIAL
| 0x21,
314 OPC_SUBU
= OPC_SPECIAL
| 0x23,
315 OPC_AND
= OPC_SPECIAL
| 0x24,
316 OPC_OR
= OPC_SPECIAL
| 0x25,
317 OPC_XOR
= OPC_SPECIAL
| 0x26,
318 OPC_NOR
= OPC_SPECIAL
| 0x27,
319 OPC_SLT
= OPC_SPECIAL
| 0x2A,
320 OPC_SLTU
= OPC_SPECIAL
| 0x2B,
322 OPC_SPECIAL3
= 0x1f << 26,
323 OPC_SEB
= OPC_SPECIAL3
| 0x420,
324 OPC_SEH
= OPC_SPECIAL3
| 0x620,
330 static inline void tcg_out_opc_reg(TCGContext
*s
, int opc
, int rd
, int rs
, int rt
)
335 inst
|= (rs
& 0x1F) << 21;
336 inst
|= (rt
& 0x1F) << 16;
337 inst
|= (rd
& 0x1F) << 11;
344 static inline void tcg_out_opc_imm(TCGContext
*s
, int opc
, int rt
, int rs
, int imm
)
349 inst
|= (rs
& 0x1F) << 21;
350 inst
|= (rt
& 0x1F) << 16;
351 inst
|= (imm
& 0xffff);
358 static inline void tcg_out_opc_br(TCGContext
*s
, int opc
, int rt
, int rs
)
360 /* We pay attention here to not modify the branch target by reading
361 the existing value and using it again. This ensure that caches and
362 memory are kept coherent during retranslation. */
363 uint16_t offset
= (uint16_t)(*(uint32_t *) s
->code_ptr
);
365 tcg_out_opc_imm(s
, opc
, rt
, rs
, offset
);
371 static inline void tcg_out_opc_sa(TCGContext
*s
, int opc
, int rd
, int rt
, int sa
)
376 inst
|= (rt
& 0x1F) << 16;
377 inst
|= (rd
& 0x1F) << 11;
378 inst
|= (sa
& 0x1F) << 6;
383 static inline void tcg_out_nop(TCGContext
*s
)
388 static inline void tcg_out_mov(TCGContext
*s
, TCGType type
,
389 TCGReg ret
, TCGReg arg
)
391 /* Simple reg-reg move, optimising out the 'do nothing' case */
393 tcg_out_opc_reg(s
, OPC_ADDU
, ret
, arg
, TCG_REG_ZERO
);
397 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
398 TCGReg reg
, tcg_target_long arg
)
400 if (arg
== (int16_t)arg
) {
401 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, TCG_REG_ZERO
, arg
);
402 } else if (arg
== (uint16_t)arg
) {
403 tcg_out_opc_imm(s
, OPC_ORI
, reg
, TCG_REG_ZERO
, arg
);
405 tcg_out_opc_imm(s
, OPC_LUI
, reg
, 0, arg
>> 16);
406 tcg_out_opc_imm(s
, OPC_ORI
, reg
, reg
, arg
& 0xffff);
410 static inline void tcg_out_bswap16(TCGContext
*s
, int ret
, int arg
)
412 /* ret and arg can't be register at */
413 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
417 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
418 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0x00ff);
420 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 8);
421 tcg_out_opc_imm(s
, OPC_ANDI
, ret
, ret
, 0xff00);
422 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
425 static inline void tcg_out_bswap16s(TCGContext
*s
, int ret
, int arg
)
427 /* ret and arg can't be register at */
428 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
432 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
433 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff);
435 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
436 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
437 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
440 static inline void tcg_out_bswap32(TCGContext
*s
, int ret
, int arg
)
442 /* ret and arg must be different and can't be register at */
443 if (ret
== arg
|| ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
447 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
449 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 24);
450 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
452 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, arg
, 0xff00);
453 tcg_out_opc_sa(s
, OPC_SLL
, TCG_REG_AT
, TCG_REG_AT
, 8);
454 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
456 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
457 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff00);
458 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
461 static inline void tcg_out_ext8s(TCGContext
*s
, int ret
, int arg
)
463 #ifdef _MIPS_ARCH_MIPS32R2
464 tcg_out_opc_reg(s
, OPC_SEB
, ret
, 0, arg
);
466 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
467 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 24);
471 static inline void tcg_out_ext16s(TCGContext
*s
, int ret
, int arg
)
473 #ifdef _MIPS_ARCH_MIPS32R2
474 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, arg
);
476 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 16);
477 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
481 static inline void tcg_out_ldst(TCGContext
*s
, int opc
, int arg
,
482 int arg1
, tcg_target_long arg2
)
484 if (arg2
== (int16_t) arg2
) {
485 tcg_out_opc_imm(s
, opc
, arg
, arg1
, arg2
);
487 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, arg2
);
488 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, TCG_REG_AT
, arg1
);
489 tcg_out_opc_imm(s
, opc
, arg
, TCG_REG_AT
, 0);
493 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg arg
,
494 TCGReg arg1
, tcg_target_long arg2
)
496 tcg_out_ldst(s
, OPC_LW
, arg
, arg1
, arg2
);
499 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
,
500 TCGReg arg1
, tcg_target_long arg2
)
502 tcg_out_ldst(s
, OPC_SW
, arg
, arg1
, arg2
);
505 static inline void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
507 if (val
== (int16_t)val
) {
508 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, reg
, val
);
510 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, val
);
511 tcg_out_opc_reg(s
, OPC_ADDU
, reg
, reg
, TCG_REG_AT
);
515 /* Helper routines for marshalling helper function arguments into
516 * the correct registers and stack.
517 * arg_num is where we want to put this argument, and is updated to be ready
518 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
519 * real registers, 4+ on stack.
521 * We provide routines for arguments which are: immediate, 32 bit
522 * value in register, 16 and 8 bit values in register (which must be zero
523 * extended before use) and 64 bit value in a lo:hi register pair.
525 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
526 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
528 if (*arg_num < 4) { \
529 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
531 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
532 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
536 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
537 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
538 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8
, TCGReg arg
)
539 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
540 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
541 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
542 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16
, TCGReg arg
)
543 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
544 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
545 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
546 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32
, uint32_t arg
)
547 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
549 /* We don't use the macro for this one to avoid an unnecessary reg-reg
550 move when storing to the stack. */
551 static inline void tcg_out_call_iarg_reg32(TCGContext
*s
, int *arg_num
,
555 tcg_out_mov(s
, TCG_TYPE_I32
, tcg_target_call_iarg_regs
[*arg_num
], arg
);
557 tcg_out_st(s
, TCG_TYPE_I32
, arg
, TCG_REG_SP
, 4 * (*arg_num
));
562 static inline void tcg_out_call_iarg_reg64(TCGContext
*s
, int *arg_num
,
563 TCGReg arg_low
, TCGReg arg_high
)
565 (*arg_num
) = (*arg_num
+ 1) & ~1;
567 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
568 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
569 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
571 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
572 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
576 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
, int arg1
,
577 int arg2
, int label_index
)
579 TCGLabel
*l
= &s
->labels
[label_index
];
583 tcg_out_opc_br(s
, OPC_BEQ
, arg1
, arg2
);
586 tcg_out_opc_br(s
, OPC_BNE
, arg1
, arg2
);
589 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
590 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
593 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
594 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
597 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
598 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
601 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
602 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
605 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
606 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
609 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
610 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
613 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
614 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
617 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
618 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
625 reloc_pc16(s
->code_ptr
- 4, l
->u
.value
);
627 tcg_out_reloc(s
, s
->code_ptr
- 4, R_MIPS_PC16
, label_index
, 0);
632 /* XXX: we implement it at the target level to avoid having to
633 handle cross basic blocks temporaries */
634 static void tcg_out_brcond2(TCGContext
*s
, TCGCond cond
, int arg1
,
635 int arg2
, int arg3
, int arg4
, int label_index
)
641 tcg_out_brcond(s
, TCG_COND_NE
, arg2
, arg4
, label_index
);
642 tcg_out_brcond(s
, TCG_COND_NE
, arg1
, arg3
, label_index
);
648 tcg_out_brcond(s
, TCG_COND_LT
, arg2
, arg4
, label_index
);
652 tcg_out_brcond(s
, TCG_COND_GT
, arg2
, arg4
, label_index
);
656 tcg_out_brcond(s
, TCG_COND_LTU
, arg2
, arg4
, label_index
);
660 tcg_out_brcond(s
, TCG_COND_GTU
, arg2
, arg4
, label_index
);
666 label_ptr
= s
->code_ptr
;
667 tcg_out_opc_br(s
, OPC_BNE
, arg2
, arg4
);
672 tcg_out_brcond(s
, TCG_COND_EQ
, arg1
, arg3
, label_index
);
676 tcg_out_brcond(s
, TCG_COND_LTU
, arg1
, arg3
, label_index
);
680 tcg_out_brcond(s
, TCG_COND_LEU
, arg1
, arg3
, label_index
);
684 tcg_out_brcond(s
, TCG_COND_GTU
, arg1
, arg3
, label_index
);
688 tcg_out_brcond(s
, TCG_COND_GEU
, arg1
, arg3
, label_index
);
694 reloc_pc16(label_ptr
, (tcg_target_long
) s
->code_ptr
);
697 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, int ret
,
703 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg2
, 1);
704 } else if (arg2
== 0) {
705 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg1
, 1);
707 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
708 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, ret
, 1);
713 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg2
);
714 } else if (arg2
== 0) {
715 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg1
);
717 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
718 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, ret
);
722 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
725 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
728 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
729 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
732 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
733 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
736 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
737 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
740 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
741 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
744 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
747 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
755 /* XXX: we implement it at the target level to avoid having to
756 handle cross basic blocks temporaries */
757 static void tcg_out_setcond2(TCGContext
*s
, TCGCond cond
, int ret
,
758 int arg1
, int arg2
, int arg3
, int arg4
)
762 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_AT
, arg2
, arg4
);
763 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg1
, arg3
);
764 tcg_out_opc_reg(s
, OPC_AND
, ret
, TCG_REG_AT
, TCG_REG_T0
);
767 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_AT
, arg2
, arg4
);
768 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_T0
, arg1
, arg3
);
769 tcg_out_opc_reg(s
, OPC_OR
, ret
, TCG_REG_AT
, TCG_REG_T0
);
773 tcg_out_setcond(s
, TCG_COND_LT
, TCG_REG_AT
, arg2
, arg4
);
777 tcg_out_setcond(s
, TCG_COND_GT
, TCG_REG_AT
, arg2
, arg4
);
781 tcg_out_setcond(s
, TCG_COND_LTU
, TCG_REG_AT
, arg2
, arg4
);
785 tcg_out_setcond(s
, TCG_COND_GTU
, TCG_REG_AT
, arg2
, arg4
);
792 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg2
, arg4
);
797 tcg_out_setcond(s
, TCG_COND_LTU
, ret
, arg1
, arg3
);
801 tcg_out_setcond(s
, TCG_COND_LEU
, ret
, arg1
, arg3
);
805 tcg_out_setcond(s
, TCG_COND_GTU
, ret
, arg1
, arg3
);
809 tcg_out_setcond(s
, TCG_COND_GEU
, ret
, arg1
, arg3
);
815 tcg_out_opc_reg(s
, OPC_AND
, ret
, ret
, TCG_REG_T0
);
816 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
819 #if defined(CONFIG_SOFTMMU)
821 #include "../../softmmu_defs.h"
823 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
825 static const void * const qemu_ld_helpers
[4] = {
832 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
833 uintxx_t val, int mmu_idx) */
834 static const void * const qemu_st_helpers
[4] = {
842 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
845 int addr_regl
, addr_meml
;
846 int data_regl
, data_regh
, data_reg1
, data_reg2
;
847 int mem_index
, s_bits
;
848 #if defined(CONFIG_SOFTMMU)
849 void *label1_ptr
, *label2_ptr
;
852 #if TARGET_LONG_BITS == 64
853 # if defined(CONFIG_SOFTMMU)
856 int addr_regh
, addr_memh
;
864 #if TARGET_LONG_BITS == 64
871 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
872 data_reg1
= data_regh
;
873 data_reg2
= data_regl
;
875 data_reg1
= data_regl
;
876 data_reg2
= data_regh
;
879 data_reg1
= data_regl
;
882 #if TARGET_LONG_BITS == 64
883 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
894 #if defined(CONFIG_SOFTMMU)
895 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
896 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
897 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
898 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
899 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_meml
);
900 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
901 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
903 # if TARGET_LONG_BITS == 64
904 label3_ptr
= s
->code_ptr
;
905 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
908 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
909 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_memh
);
911 label1_ptr
= s
->code_ptr
;
912 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
915 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
917 label1_ptr
= s
->code_ptr
;
918 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
924 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
925 # if TARGET_LONG_BITS == 64
926 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
928 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
930 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
931 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_ld_helpers
[s_bits
]);
932 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
937 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xff);
940 tcg_out_ext8s(s
, data_reg1
, TCG_REG_V0
);
943 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xffff);
946 tcg_out_ext16s(s
, data_reg1
, TCG_REG_V0
);
949 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
952 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg2
, TCG_REG_V1
);
953 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
959 label2_ptr
= s
->code_ptr
;
960 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
963 /* label1: fast path */
964 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
966 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
967 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
968 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_A0
, addr_regl
);
970 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
971 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_V0
, addr_regl
, GUEST_BASE
);
973 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_V0
, GUEST_BASE
);
974 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_V0
, addr_regl
);
980 tcg_out_opc_imm(s
, OPC_LBU
, data_reg1
, TCG_REG_V0
, 0);
983 tcg_out_opc_imm(s
, OPC_LB
, data_reg1
, TCG_REG_V0
, 0);
986 if (TCG_NEED_BSWAP
) {
987 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
988 tcg_out_bswap16(s
, data_reg1
, TCG_REG_T0
);
990 tcg_out_opc_imm(s
, OPC_LHU
, data_reg1
, TCG_REG_V0
, 0);
994 if (TCG_NEED_BSWAP
) {
995 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
996 tcg_out_bswap16s(s
, data_reg1
, TCG_REG_T0
);
998 tcg_out_opc_imm(s
, OPC_LH
, data_reg1
, TCG_REG_V0
, 0);
1002 if (TCG_NEED_BSWAP
) {
1003 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1004 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1006 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1010 if (TCG_NEED_BSWAP
) {
1011 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 4);
1012 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1013 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1014 tcg_out_bswap32(s
, data_reg2
, TCG_REG_T0
);
1016 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1017 tcg_out_opc_imm(s
, OPC_LW
, data_reg2
, TCG_REG_V0
, 4);
1024 #if defined(CONFIG_SOFTMMU)
1025 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1029 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
1032 int addr_regl
, addr_meml
;
1033 int data_regl
, data_regh
, data_reg1
, data_reg2
;
1034 int mem_index
, s_bits
;
1035 #if defined(CONFIG_SOFTMMU)
1036 uint8_t *label1_ptr
, *label2_ptr
;
1039 #if TARGET_LONG_BITS == 64
1040 # if defined(CONFIG_SOFTMMU)
1041 uint8_t *label3_ptr
;
1043 int addr_regh
, addr_memh
;
1046 data_regl
= *args
++;
1048 data_regh
= *args
++;
1049 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1050 data_reg1
= data_regh
;
1051 data_reg2
= data_regl
;
1053 data_reg1
= data_regl
;
1054 data_reg2
= data_regh
;
1057 data_reg1
= data_regl
;
1061 addr_regl
= *args
++;
1062 #if TARGET_LONG_BITS == 64
1063 addr_regh
= *args
++;
1064 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1077 #if defined(CONFIG_SOFTMMU)
1078 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
1079 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
1080 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
1081 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1082 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_meml
);
1083 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
1084 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1086 # if TARGET_LONG_BITS == 64
1087 label3_ptr
= s
->code_ptr
;
1088 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1091 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1092 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_memh
);
1094 label1_ptr
= s
->code_ptr
;
1095 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1098 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
1100 label1_ptr
= s
->code_ptr
;
1101 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1107 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1108 # if TARGET_LONG_BITS == 64
1109 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1111 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1115 tcg_out_call_iarg_reg8(s
, &arg_num
, data_regl
);
1118 tcg_out_call_iarg_reg16(s
, &arg_num
, data_regl
);
1121 tcg_out_call_iarg_reg32(s
, &arg_num
, data_regl
);
1124 tcg_out_call_iarg_reg64(s
, &arg_num
, data_regl
, data_regh
);
1129 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1130 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_st_helpers
[s_bits
]);
1131 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1134 label2_ptr
= s
->code_ptr
;
1135 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1138 /* label1: fast path */
1139 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1141 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1142 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1143 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1145 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1146 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_A0
, addr_regl
, GUEST_BASE
);
1148 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_A0
, GUEST_BASE
);
1149 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1156 tcg_out_opc_imm(s
, OPC_SB
, data_reg1
, TCG_REG_A0
, 0);
1159 if (TCG_NEED_BSWAP
) {
1160 tcg_out_bswap16(s
, TCG_REG_T0
, data_reg1
);
1161 tcg_out_opc_imm(s
, OPC_SH
, TCG_REG_T0
, TCG_REG_A0
, 0);
1163 tcg_out_opc_imm(s
, OPC_SH
, data_reg1
, TCG_REG_A0
, 0);
1167 if (TCG_NEED_BSWAP
) {
1168 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1169 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1171 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1175 if (TCG_NEED_BSWAP
) {
1176 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg2
);
1177 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1178 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1179 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 4);
1181 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1182 tcg_out_opc_imm(s
, OPC_SW
, data_reg2
, TCG_REG_A0
, 4);
1189 #if defined(CONFIG_SOFTMMU)
1190 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1194 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1195 const TCGArg
*args
, const int *const_args
)
1198 case INDEX_op_exit_tb
:
1199 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_V0
, args
[0]);
1200 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, (tcg_target_long
)tb_ret_addr
);
1201 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1204 case INDEX_op_goto_tb
:
1205 if (s
->tb_jmp_offset
) {
1206 /* direct jump method */
1209 /* indirect jump method */
1210 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, (tcg_target_long
)(s
->tb_next
+ args
[0]));
1211 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_AT
, TCG_REG_AT
, 0);
1212 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1215 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1218 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, args
[0], 0);
1222 tcg_out_opc_reg(s
, OPC_JR
, 0, args
[0], 0);
1226 tcg_out_brcond(s
, TCG_COND_EQ
, TCG_REG_ZERO
, TCG_REG_ZERO
, args
[0]);
1229 case INDEX_op_mov_i32
:
1230 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1232 case INDEX_op_movi_i32
:
1233 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1236 case INDEX_op_ld8u_i32
:
1237 tcg_out_ldst(s
, OPC_LBU
, args
[0], args
[1], args
[2]);
1239 case INDEX_op_ld8s_i32
:
1240 tcg_out_ldst(s
, OPC_LB
, args
[0], args
[1], args
[2]);
1242 case INDEX_op_ld16u_i32
:
1243 tcg_out_ldst(s
, OPC_LHU
, args
[0], args
[1], args
[2]);
1245 case INDEX_op_ld16s_i32
:
1246 tcg_out_ldst(s
, OPC_LH
, args
[0], args
[1], args
[2]);
1248 case INDEX_op_ld_i32
:
1249 tcg_out_ldst(s
, OPC_LW
, args
[0], args
[1], args
[2]);
1251 case INDEX_op_st8_i32
:
1252 tcg_out_ldst(s
, OPC_SB
, args
[0], args
[1], args
[2]);
1254 case INDEX_op_st16_i32
:
1255 tcg_out_ldst(s
, OPC_SH
, args
[0], args
[1], args
[2]);
1257 case INDEX_op_st_i32
:
1258 tcg_out_ldst(s
, OPC_SW
, args
[0], args
[1], args
[2]);
1261 case INDEX_op_add_i32
:
1262 if (const_args
[2]) {
1263 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], args
[2]);
1265 tcg_out_opc_reg(s
, OPC_ADDU
, args
[0], args
[1], args
[2]);
1268 case INDEX_op_add2_i32
:
1269 if (const_args
[4]) {
1270 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], args
[4]);
1272 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, args
[2], args
[4]);
1274 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, TCG_REG_AT
, args
[2]);
1275 if (const_args
[5]) {
1276 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], args
[5]);
1278 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[3], args
[5]);
1280 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[1], TCG_REG_T0
);
1281 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1283 case INDEX_op_sub_i32
:
1284 if (const_args
[2]) {
1285 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], -args
[2]);
1287 tcg_out_opc_reg(s
, OPC_SUBU
, args
[0], args
[1], args
[2]);
1290 case INDEX_op_sub2_i32
:
1291 if (const_args
[4]) {
1292 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], -args
[4]);
1294 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, args
[2], args
[4]);
1296 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, args
[2], TCG_REG_AT
);
1297 if (const_args
[5]) {
1298 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], -args
[5]);
1300 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[3], args
[5]);
1302 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[1], TCG_REG_T0
);
1303 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1305 case INDEX_op_mul_i32
:
1306 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[1], args
[2]);
1307 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1309 case INDEX_op_mulu2_i32
:
1310 tcg_out_opc_reg(s
, OPC_MULTU
, 0, args
[2], args
[3]);
1311 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1312 tcg_out_opc_reg(s
, OPC_MFHI
, args
[1], 0, 0);
1314 case INDEX_op_div_i32
:
1315 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1316 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1318 case INDEX_op_divu_i32
:
1319 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1320 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1322 case INDEX_op_rem_i32
:
1323 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1324 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1326 case INDEX_op_remu_i32
:
1327 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1328 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1331 case INDEX_op_and_i32
:
1332 if (const_args
[2]) {
1333 tcg_out_opc_imm(s
, OPC_ANDI
, args
[0], args
[1], args
[2]);
1335 tcg_out_opc_reg(s
, OPC_AND
, args
[0], args
[1], args
[2]);
1338 case INDEX_op_or_i32
:
1339 if (const_args
[2]) {
1340 tcg_out_opc_imm(s
, OPC_ORI
, args
[0], args
[1], args
[2]);
1342 tcg_out_opc_reg(s
, OPC_OR
, args
[0], args
[1], args
[2]);
1345 case INDEX_op_nor_i32
:
1346 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], args
[1], args
[2]);
1348 case INDEX_op_not_i32
:
1349 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], TCG_REG_ZERO
, args
[1]);
1351 case INDEX_op_xor_i32
:
1352 if (const_args
[2]) {
1353 tcg_out_opc_imm(s
, OPC_XORI
, args
[0], args
[1], args
[2]);
1355 tcg_out_opc_reg(s
, OPC_XOR
, args
[0], args
[1], args
[2]);
1359 case INDEX_op_sar_i32
:
1360 if (const_args
[2]) {
1361 tcg_out_opc_sa(s
, OPC_SRA
, args
[0], args
[1], args
[2]);
1363 tcg_out_opc_reg(s
, OPC_SRAV
, args
[0], args
[2], args
[1]);
1366 case INDEX_op_shl_i32
:
1367 if (const_args
[2]) {
1368 tcg_out_opc_sa(s
, OPC_SLL
, args
[0], args
[1], args
[2]);
1370 tcg_out_opc_reg(s
, OPC_SLLV
, args
[0], args
[2], args
[1]);
1373 case INDEX_op_shr_i32
:
1374 if (const_args
[2]) {
1375 tcg_out_opc_sa(s
, OPC_SRL
, args
[0], args
[1], args
[2]);
1377 tcg_out_opc_reg(s
, OPC_SRLV
, args
[0], args
[2], args
[1]);
1381 case INDEX_op_ext8s_i32
:
1382 tcg_out_ext8s(s
, args
[0], args
[1]);
1384 case INDEX_op_ext16s_i32
:
1385 tcg_out_ext16s(s
, args
[0], args
[1]);
1388 case INDEX_op_brcond_i32
:
1389 tcg_out_brcond(s
, args
[2], args
[0], args
[1], args
[3]);
1391 case INDEX_op_brcond2_i32
:
1392 tcg_out_brcond2(s
, args
[4], args
[0], args
[1], args
[2], args
[3], args
[5]);
1395 case INDEX_op_setcond_i32
:
1396 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2]);
1398 case INDEX_op_setcond2_i32
:
1399 tcg_out_setcond2(s
, args
[5], args
[0], args
[1], args
[2], args
[3], args
[4]);
1402 case INDEX_op_qemu_ld8u
:
1403 tcg_out_qemu_ld(s
, args
, 0);
1405 case INDEX_op_qemu_ld8s
:
1406 tcg_out_qemu_ld(s
, args
, 0 | 4);
1408 case INDEX_op_qemu_ld16u
:
1409 tcg_out_qemu_ld(s
, args
, 1);
1411 case INDEX_op_qemu_ld16s
:
1412 tcg_out_qemu_ld(s
, args
, 1 | 4);
1414 case INDEX_op_qemu_ld32
:
1415 tcg_out_qemu_ld(s
, args
, 2);
1417 case INDEX_op_qemu_ld64
:
1418 tcg_out_qemu_ld(s
, args
, 3);
1420 case INDEX_op_qemu_st8
:
1421 tcg_out_qemu_st(s
, args
, 0);
1423 case INDEX_op_qemu_st16
:
1424 tcg_out_qemu_st(s
, args
, 1);
1426 case INDEX_op_qemu_st32
:
1427 tcg_out_qemu_st(s
, args
, 2);
1429 case INDEX_op_qemu_st64
:
1430 tcg_out_qemu_st(s
, args
, 3);
1438 static const TCGTargetOpDef mips_op_defs
[] = {
1439 { INDEX_op_exit_tb
, { } },
1440 { INDEX_op_goto_tb
, { } },
1441 { INDEX_op_call
, { "C" } },
1442 { INDEX_op_jmp
, { "r" } },
1443 { INDEX_op_br
, { } },
1445 { INDEX_op_mov_i32
, { "r", "r" } },
1446 { INDEX_op_movi_i32
, { "r" } },
1447 { INDEX_op_ld8u_i32
, { "r", "r" } },
1448 { INDEX_op_ld8s_i32
, { "r", "r" } },
1449 { INDEX_op_ld16u_i32
, { "r", "r" } },
1450 { INDEX_op_ld16s_i32
, { "r", "r" } },
1451 { INDEX_op_ld_i32
, { "r", "r" } },
1452 { INDEX_op_st8_i32
, { "rZ", "r" } },
1453 { INDEX_op_st16_i32
, { "rZ", "r" } },
1454 { INDEX_op_st_i32
, { "rZ", "r" } },
1456 { INDEX_op_add_i32
, { "r", "rZ", "rJZ" } },
1457 { INDEX_op_mul_i32
, { "r", "rZ", "rZ" } },
1458 { INDEX_op_mulu2_i32
, { "r", "r", "rZ", "rZ" } },
1459 { INDEX_op_div_i32
, { "r", "rZ", "rZ" } },
1460 { INDEX_op_divu_i32
, { "r", "rZ", "rZ" } },
1461 { INDEX_op_rem_i32
, { "r", "rZ", "rZ" } },
1462 { INDEX_op_remu_i32
, { "r", "rZ", "rZ" } },
1463 { INDEX_op_sub_i32
, { "r", "rZ", "rJZ" } },
1465 { INDEX_op_and_i32
, { "r", "rZ", "rIZ" } },
1466 { INDEX_op_nor_i32
, { "r", "rZ", "rZ" } },
1467 { INDEX_op_not_i32
, { "r", "rZ" } },
1468 { INDEX_op_or_i32
, { "r", "rZ", "rIZ" } },
1469 { INDEX_op_xor_i32
, { "r", "rZ", "rIZ" } },
1471 { INDEX_op_shl_i32
, { "r", "rZ", "riZ" } },
1472 { INDEX_op_shr_i32
, { "r", "rZ", "riZ" } },
1473 { INDEX_op_sar_i32
, { "r", "rZ", "riZ" } },
1475 { INDEX_op_ext8s_i32
, { "r", "rZ" } },
1476 { INDEX_op_ext16s_i32
, { "r", "rZ" } },
1478 { INDEX_op_brcond_i32
, { "rZ", "rZ" } },
1479 { INDEX_op_setcond_i32
, { "r", "rZ", "rZ" } },
1480 { INDEX_op_setcond2_i32
, { "r", "rZ", "rZ", "rZ", "rZ" } },
1482 { INDEX_op_add2_i32
, { "r", "r", "rZ", "rZ", "rJZ", "rJZ" } },
1483 { INDEX_op_sub2_i32
, { "r", "r", "rZ", "rZ", "rJZ", "rJZ" } },
1484 { INDEX_op_brcond2_i32
, { "rZ", "rZ", "rZ", "rZ" } },
1486 #if TARGET_LONG_BITS == 32
1487 { INDEX_op_qemu_ld8u
, { "L", "lZ" } },
1488 { INDEX_op_qemu_ld8s
, { "L", "lZ" } },
1489 { INDEX_op_qemu_ld16u
, { "L", "lZ" } },
1490 { INDEX_op_qemu_ld16s
, { "L", "lZ" } },
1491 { INDEX_op_qemu_ld32
, { "L", "lZ" } },
1492 { INDEX_op_qemu_ld64
, { "L", "L", "lZ" } },
1494 { INDEX_op_qemu_st8
, { "SZ", "SZ" } },
1495 { INDEX_op_qemu_st16
, { "SZ", "SZ" } },
1496 { INDEX_op_qemu_st32
, { "SZ", "SZ" } },
1497 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ" } },
1499 { INDEX_op_qemu_ld8u
, { "L", "lZ", "lZ" } },
1500 { INDEX_op_qemu_ld8s
, { "L", "lZ", "lZ" } },
1501 { INDEX_op_qemu_ld16u
, { "L", "lZ", "lZ" } },
1502 { INDEX_op_qemu_ld16s
, { "L", "lZ", "lZ" } },
1503 { INDEX_op_qemu_ld32
, { "L", "lZ", "lZ" } },
1504 { INDEX_op_qemu_ld64
, { "L", "L", "lZ", "lZ" } },
1506 { INDEX_op_qemu_st8
, { "SZ", "SZ", "SZ" } },
1507 { INDEX_op_qemu_st16
, { "SZ", "SZ", "SZ" } },
1508 { INDEX_op_qemu_st32
, { "SZ", "SZ", "SZ" } },
1509 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ", "SZ" } },
1514 static int tcg_target_callee_save_regs
[] = {
1515 TCG_REG_S0
, /* used for the global env (TCG_AREG0) */
1525 TCG_REG_RA
, /* should be last for ABI compliance */
1528 /* Generate global QEMU prologue and epilogue code */
1529 static void tcg_target_qemu_prologue(TCGContext
*s
)
1533 /* reserve some stack space */
1534 frame_size
= ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1535 + TCG_STATIC_CALL_ARGS_SIZE
;
1536 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1537 ~(TCG_TARGET_STACK_ALIGN
- 1);
1540 tcg_out_addi(s
, TCG_REG_SP
, -frame_size
);
1541 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1542 tcg_out_st(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1543 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1546 /* Call generated code */
1547 tcg_out_opc_reg(s
, OPC_JR
, 0, tcg_target_call_iarg_regs
[1], 0);
1548 tcg_out_mov(s
, TCG_TYPE_PTR
, TCG_AREG0
, tcg_target_call_iarg_regs
[0]);
1549 tb_ret_addr
= s
->code_ptr
;
1552 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1553 tcg_out_ld(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1554 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1557 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_RA
, 0);
1558 tcg_out_addi(s
, TCG_REG_SP
, frame_size
);
1561 static void tcg_target_init(TCGContext
*s
)
1563 tcg_regset_set(tcg_target_available_regs
[TCG_TYPE_I32
], 0xffffffff);
1564 tcg_regset_set(tcg_target_call_clobber_regs
,
1581 tcg_regset_clear(s
->reserved_regs
);
1582 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ZERO
); /* zero register */
1583 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K0
); /* kernel use only */
1584 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K1
); /* kernel use only */
1585 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_AT
); /* internal use */
1586 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_T0
); /* internal use */
1587 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RA
); /* return address */
1588 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_SP
); /* stack pointer */
1590 tcg_add_target_add_op_defs(mips_op_defs
);
1591 tcg_set_frame(s
, TCG_AREG0
, offsetof(CPUArchState
, temp_buf
),
1592 CPU_TEMP_BUF_NLONGS
* sizeof(long));