2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
70 /* check if we really need so many registers :P */
71 static const TCGReg tcg_target_reg_alloc_order
[] = {
97 static const TCGReg tcg_target_call_iarg_regs
[4] = {
104 static const TCGReg tcg_target_call_oarg_regs
[2] = {
109 static uint8_t *tb_ret_addr
;
111 static inline uint32_t reloc_lo16_val (void *pc
, tcg_target_long target
)
113 return target
& 0xffff;
116 static inline void reloc_lo16 (void *pc
, tcg_target_long target
)
118 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
119 | reloc_lo16_val(pc
, target
);
122 static inline uint32_t reloc_hi16_val (void *pc
, tcg_target_long target
)
124 return (target
>> 16) & 0xffff;
127 static inline void reloc_hi16 (void *pc
, tcg_target_long target
)
129 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
130 | reloc_hi16_val(pc
, target
);
133 static inline uint32_t reloc_pc16_val (void *pc
, tcg_target_long target
)
137 disp
= target
- (tcg_target_long
) pc
- 4;
138 if (disp
!= (disp
<< 14) >> 14) {
142 return (disp
>> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc
, tcg_target_long target
)
147 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
148 | reloc_pc16_val(pc
, target
);
151 static inline uint32_t reloc_26_val (void *pc
, tcg_target_long target
)
153 if ((((tcg_target_long
)pc
+ 4) & 0xf0000000) != (target
& 0xf0000000)) {
157 return (target
>> 2) & 0x3ffffff;
160 static inline void reloc_pc26 (void *pc
, tcg_target_long target
)
162 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0x3ffffff)
163 | reloc_26_val(pc
, target
);
166 static void patch_reloc(uint8_t *code_ptr
, int type
,
167 tcg_target_long value
, tcg_target_long addend
)
172 reloc_lo16(code_ptr
, value
);
175 reloc_hi16(code_ptr
, value
);
178 reloc_pc16(code_ptr
, value
);
181 reloc_pc26(code_ptr
, value
);
188 /* maximum number of register used for input function arguments */
189 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
194 /* parse target specific constraints */
195 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
202 ct
->ct
|= TCG_CT_REG
;
203 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
206 ct
->ct
|= TCG_CT_REG
;
207 tcg_regset_clear(ct
->u
.regs
);
208 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_T9
);
210 case 'L': /* qemu_ld output arg constraint */
211 ct
->ct
|= TCG_CT_REG
;
212 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
213 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_V0
);
215 case 'l': /* qemu_ld input arg constraint */
216 ct
->ct
|= TCG_CT_REG
;
217 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
218 #if defined(CONFIG_SOFTMMU)
219 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
220 # if (TARGET_LONG_BITS == 64)
221 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
225 case 'S': /* qemu_st constraint */
226 ct
->ct
|= TCG_CT_REG
;
227 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
228 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
229 #if defined(CONFIG_SOFTMMU)
230 # if (TARGET_LONG_BITS == 32)
231 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A1
);
233 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
234 # if TARGET_LONG_BITS == 64
235 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A3
);
240 ct
->ct
|= TCG_CT_CONST_U16
;
243 ct
->ct
|= TCG_CT_CONST_S16
;
246 /* We are cheating a bit here, using the fact that the register
247 ZERO is also the register number 0. Hence there is no need
248 to check for const_args in each instruction. */
249 ct
->ct
|= TCG_CT_CONST_ZERO
;
259 /* test if a constant matches the constraint */
260 static inline int tcg_target_const_match(tcg_target_long val
,
261 const TCGArgConstraint
*arg_ct
)
265 if (ct
& TCG_CT_CONST
)
267 else if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0)
269 else if ((ct
& TCG_CT_CONST_U16
) && val
== (uint16_t)val
)
271 else if ((ct
& TCG_CT_CONST_S16
) && val
== (int16_t)val
)
277 /* instruction opcodes */
279 OPC_BEQ
= 0x04 << 26,
280 OPC_BNE
= 0x05 << 26,
281 OPC_BLEZ
= 0x06 << 26,
282 OPC_BGTZ
= 0x07 << 26,
283 OPC_ADDIU
= 0x09 << 26,
284 OPC_SLTI
= 0x0A << 26,
285 OPC_SLTIU
= 0x0B << 26,
286 OPC_ANDI
= 0x0C << 26,
287 OPC_ORI
= 0x0D << 26,
288 OPC_XORI
= 0x0E << 26,
289 OPC_LUI
= 0x0F << 26,
293 OPC_LBU
= 0x24 << 26,
294 OPC_LHU
= 0x25 << 26,
295 OPC_LWU
= 0x27 << 26,
300 OPC_SPECIAL
= 0x00 << 26,
301 OPC_SLL
= OPC_SPECIAL
| 0x00,
302 OPC_SRL
= OPC_SPECIAL
| 0x02,
303 OPC_SRA
= OPC_SPECIAL
| 0x03,
304 OPC_SLLV
= OPC_SPECIAL
| 0x04,
305 OPC_SRLV
= OPC_SPECIAL
| 0x06,
306 OPC_SRAV
= OPC_SPECIAL
| 0x07,
307 OPC_JR
= OPC_SPECIAL
| 0x08,
308 OPC_JALR
= OPC_SPECIAL
| 0x09,
309 OPC_MFHI
= OPC_SPECIAL
| 0x10,
310 OPC_MFLO
= OPC_SPECIAL
| 0x12,
311 OPC_MULT
= OPC_SPECIAL
| 0x18,
312 OPC_MULTU
= OPC_SPECIAL
| 0x19,
313 OPC_DIV
= OPC_SPECIAL
| 0x1A,
314 OPC_DIVU
= OPC_SPECIAL
| 0x1B,
315 OPC_ADDU
= OPC_SPECIAL
| 0x21,
316 OPC_SUBU
= OPC_SPECIAL
| 0x23,
317 OPC_AND
= OPC_SPECIAL
| 0x24,
318 OPC_OR
= OPC_SPECIAL
| 0x25,
319 OPC_XOR
= OPC_SPECIAL
| 0x26,
320 OPC_NOR
= OPC_SPECIAL
| 0x27,
321 OPC_SLT
= OPC_SPECIAL
| 0x2A,
322 OPC_SLTU
= OPC_SPECIAL
| 0x2B,
324 OPC_REGIMM
= 0x01 << 26,
325 OPC_BLTZ
= OPC_REGIMM
| (0x00 << 16),
326 OPC_BGEZ
= OPC_REGIMM
| (0x01 << 16),
328 OPC_SPECIAL3
= 0x1f << 26,
329 OPC_WSBH
= OPC_SPECIAL3
| 0x0a0,
330 OPC_SEB
= OPC_SPECIAL3
| 0x420,
331 OPC_SEH
= OPC_SPECIAL3
| 0x620,
337 static inline void tcg_out_opc_reg(TCGContext
*s
, int opc
,
338 TCGReg rd
, TCGReg rs
, TCGReg rt
)
343 inst
|= (rs
& 0x1F) << 21;
344 inst
|= (rt
& 0x1F) << 16;
345 inst
|= (rd
& 0x1F) << 11;
352 static inline void tcg_out_opc_imm(TCGContext
*s
, int opc
,
353 TCGReg rt
, TCGReg rs
, TCGArg imm
)
358 inst
|= (rs
& 0x1F) << 21;
359 inst
|= (rt
& 0x1F) << 16;
360 inst
|= (imm
& 0xffff);
367 static inline void tcg_out_opc_br(TCGContext
*s
, int opc
,
368 TCGReg rt
, TCGReg rs
)
370 /* We pay attention here to not modify the branch target by reading
371 the existing value and using it again. This ensure that caches and
372 memory are kept coherent during retranslation. */
373 uint16_t offset
= (uint16_t)(*(uint32_t *) s
->code_ptr
);
375 tcg_out_opc_imm(s
, opc
, rt
, rs
, offset
);
381 static inline void tcg_out_opc_sa(TCGContext
*s
, int opc
,
382 TCGReg rd
, TCGReg rt
, TCGArg sa
)
387 inst
|= (rt
& 0x1F) << 16;
388 inst
|= (rd
& 0x1F) << 11;
389 inst
|= (sa
& 0x1F) << 6;
394 static inline void tcg_out_nop(TCGContext
*s
)
399 static inline void tcg_out_mov(TCGContext
*s
, TCGType type
,
400 TCGReg ret
, TCGReg arg
)
402 /* Simple reg-reg move, optimising out the 'do nothing' case */
404 tcg_out_opc_reg(s
, OPC_ADDU
, ret
, arg
, TCG_REG_ZERO
);
408 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
409 TCGReg reg
, tcg_target_long arg
)
411 if (arg
== (int16_t)arg
) {
412 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, TCG_REG_ZERO
, arg
);
413 } else if (arg
== (uint16_t)arg
) {
414 tcg_out_opc_imm(s
, OPC_ORI
, reg
, TCG_REG_ZERO
, arg
);
416 tcg_out_opc_imm(s
, OPC_LUI
, reg
, 0, arg
>> 16);
417 tcg_out_opc_imm(s
, OPC_ORI
, reg
, reg
, arg
& 0xffff);
421 static inline void tcg_out_bswap16(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
423 #ifdef _MIPS_ARCH_MIPS32R2
424 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
426 /* ret and arg can't be register at */
427 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
431 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
432 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 8);
433 tcg_out_opc_imm(s
, OPC_ANDI
, ret
, ret
, 0xff00);
434 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
438 static inline void tcg_out_bswap16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
440 #ifdef _MIPS_ARCH_MIPS32R2
441 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
442 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, ret
);
444 /* ret and arg can't be register at */
445 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
449 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
450 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
451 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
452 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
456 static inline void tcg_out_bswap32(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
458 #ifdef _MIPS_ARCH_MIPS32R2
459 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
460 tcg_out_opc_sa(s
, OPC_ROTR
, ret
, ret
, 16);
462 /* ret and arg must be different and can't be register at */
463 if (ret
== arg
|| ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
467 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
469 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 24);
470 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
472 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, arg
, 0xff00);
473 tcg_out_opc_sa(s
, OPC_SLL
, TCG_REG_AT
, TCG_REG_AT
, 8);
474 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
476 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
477 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff00);
478 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
482 static inline void tcg_out_ext8s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
484 #ifdef _MIPS_ARCH_MIPS32R2
485 tcg_out_opc_reg(s
, OPC_SEB
, ret
, 0, arg
);
487 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
488 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 24);
492 static inline void tcg_out_ext16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
494 #ifdef _MIPS_ARCH_MIPS32R2
495 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, arg
);
497 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 16);
498 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
502 static inline void tcg_out_ldst(TCGContext
*s
, int opc
, TCGArg arg
,
503 TCGReg arg1
, TCGArg arg2
)
505 if (arg2
== (int16_t) arg2
) {
506 tcg_out_opc_imm(s
, opc
, arg
, arg1
, arg2
);
508 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, arg2
);
509 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, TCG_REG_AT
, arg1
);
510 tcg_out_opc_imm(s
, opc
, arg
, TCG_REG_AT
, 0);
514 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg arg
,
515 TCGReg arg1
, tcg_target_long arg2
)
517 tcg_out_ldst(s
, OPC_LW
, arg
, arg1
, arg2
);
520 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
,
521 TCGReg arg1
, tcg_target_long arg2
)
523 tcg_out_ldst(s
, OPC_SW
, arg
, arg1
, arg2
);
526 static inline void tcg_out_addi(TCGContext
*s
, TCGReg reg
, TCGArg val
)
528 if (val
== (int16_t)val
) {
529 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, reg
, val
);
531 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, val
);
532 tcg_out_opc_reg(s
, OPC_ADDU
, reg
, reg
, TCG_REG_AT
);
536 /* Helper routines for marshalling helper function arguments into
537 * the correct registers and stack.
538 * arg_num is where we want to put this argument, and is updated to be ready
539 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
540 * real registers, 4+ on stack.
542 * We provide routines for arguments which are: immediate, 32 bit
543 * value in register, 16 and 8 bit values in register (which must be zero
544 * extended before use) and 64 bit value in a lo:hi register pair.
546 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
547 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
549 if (*arg_num < 4) { \
550 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
552 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
553 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
557 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
558 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
559 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8
, TCGReg arg
)
560 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
561 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
562 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
563 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16
, TCGReg arg
)
564 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
565 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
566 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
567 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32
, TCGArg arg
)
568 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
570 /* We don't use the macro for this one to avoid an unnecessary reg-reg
571 move when storing to the stack. */
572 static inline void tcg_out_call_iarg_reg32(TCGContext
*s
, int *arg_num
,
576 tcg_out_mov(s
, TCG_TYPE_I32
, tcg_target_call_iarg_regs
[*arg_num
], arg
);
578 tcg_out_st(s
, TCG_TYPE_I32
, arg
, TCG_REG_SP
, 4 * (*arg_num
));
583 static inline void tcg_out_call_iarg_reg64(TCGContext
*s
, int *arg_num
,
584 TCGReg arg_low
, TCGReg arg_high
)
586 (*arg_num
) = (*arg_num
+ 1) & ~1;
588 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
589 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
590 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
592 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
593 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
597 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
, TCGArg arg1
,
598 TCGArg arg2
, int label_index
)
600 TCGLabel
*l
= &s
->labels
[label_index
];
604 tcg_out_opc_br(s
, OPC_BEQ
, arg1
, arg2
);
607 tcg_out_opc_br(s
, OPC_BNE
, arg1
, arg2
);
611 tcg_out_opc_br(s
, OPC_BLTZ
, 0, arg1
);
613 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
614 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
618 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
619 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
623 tcg_out_opc_br(s
, OPC_BGEZ
, 0, arg1
);
625 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
626 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
630 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
631 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
635 tcg_out_opc_br(s
, OPC_BLEZ
, 0, arg1
);
637 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
638 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
642 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
643 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
647 tcg_out_opc_br(s
, OPC_BGTZ
, 0, arg1
);
649 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
650 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
654 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
655 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
662 reloc_pc16(s
->code_ptr
- 4, l
->u
.value
);
664 tcg_out_reloc(s
, s
->code_ptr
- 4, R_MIPS_PC16
, label_index
, 0);
669 /* XXX: we implement it at the target level to avoid having to
670 handle cross basic blocks temporaries */
671 static void tcg_out_brcond2(TCGContext
*s
, TCGCond cond
, TCGArg arg1
,
672 TCGArg arg2
, TCGArg arg3
, TCGArg arg4
,
679 tcg_out_brcond(s
, TCG_COND_NE
, arg2
, arg4
, label_index
);
680 tcg_out_brcond(s
, TCG_COND_NE
, arg1
, arg3
, label_index
);
686 tcg_out_brcond(s
, TCG_COND_LT
, arg2
, arg4
, label_index
);
690 tcg_out_brcond(s
, TCG_COND_GT
, arg2
, arg4
, label_index
);
694 tcg_out_brcond(s
, TCG_COND_LTU
, arg2
, arg4
, label_index
);
698 tcg_out_brcond(s
, TCG_COND_GTU
, arg2
, arg4
, label_index
);
704 label_ptr
= s
->code_ptr
;
705 tcg_out_opc_br(s
, OPC_BNE
, arg2
, arg4
);
710 tcg_out_brcond(s
, TCG_COND_EQ
, arg1
, arg3
, label_index
);
714 tcg_out_brcond(s
, TCG_COND_LTU
, arg1
, arg3
, label_index
);
718 tcg_out_brcond(s
, TCG_COND_LEU
, arg1
, arg3
, label_index
);
722 tcg_out_brcond(s
, TCG_COND_GTU
, arg1
, arg3
, label_index
);
726 tcg_out_brcond(s
, TCG_COND_GEU
, arg1
, arg3
, label_index
);
732 reloc_pc16(label_ptr
, (tcg_target_long
) s
->code_ptr
);
735 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
736 TCGArg arg1
, TCGArg arg2
)
741 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg2
, 1);
742 } else if (arg2
== 0) {
743 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg1
, 1);
745 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
746 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, ret
, 1);
751 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg2
);
752 } else if (arg2
== 0) {
753 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg1
);
755 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
756 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, ret
);
760 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
763 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
766 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
767 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
770 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
771 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
774 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
775 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
778 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
779 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
782 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
785 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
793 /* XXX: we implement it at the target level to avoid having to
794 handle cross basic blocks temporaries */
795 static void tcg_out_setcond2(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
796 TCGArg arg1
, TCGArg arg2
, TCGArg arg3
, TCGArg arg4
)
800 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_AT
, arg2
, arg4
);
801 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg1
, arg3
);
802 tcg_out_opc_reg(s
, OPC_AND
, ret
, TCG_REG_AT
, TCG_REG_T0
);
805 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_AT
, arg2
, arg4
);
806 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_T0
, arg1
, arg3
);
807 tcg_out_opc_reg(s
, OPC_OR
, ret
, TCG_REG_AT
, TCG_REG_T0
);
811 tcg_out_setcond(s
, TCG_COND_LT
, TCG_REG_AT
, arg2
, arg4
);
815 tcg_out_setcond(s
, TCG_COND_GT
, TCG_REG_AT
, arg2
, arg4
);
819 tcg_out_setcond(s
, TCG_COND_LTU
, TCG_REG_AT
, arg2
, arg4
);
823 tcg_out_setcond(s
, TCG_COND_GTU
, TCG_REG_AT
, arg2
, arg4
);
830 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg2
, arg4
);
835 tcg_out_setcond(s
, TCG_COND_LTU
, ret
, arg1
, arg3
);
839 tcg_out_setcond(s
, TCG_COND_LEU
, ret
, arg1
, arg3
);
843 tcg_out_setcond(s
, TCG_COND_GTU
, ret
, arg1
, arg3
);
847 tcg_out_setcond(s
, TCG_COND_GEU
, ret
, arg1
, arg3
);
853 tcg_out_opc_reg(s
, OPC_AND
, ret
, ret
, TCG_REG_T0
);
854 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
857 #if defined(CONFIG_SOFTMMU)
859 #include "../../softmmu_defs.h"
861 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
863 static const void * const qemu_ld_helpers
[4] = {
870 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
871 uintxx_t val, int mmu_idx) */
872 static const void * const qemu_st_helpers
[4] = {
880 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
883 TCGReg addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
884 #if defined(CONFIG_SOFTMMU)
885 void *label1_ptr
, *label2_ptr
;
887 int mem_index
, s_bits
;
889 # if TARGET_LONG_BITS == 64
901 #if defined(CONFIG_SOFTMMU)
902 # if TARGET_LONG_BITS == 64
904 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
919 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
920 data_reg1
= data_regh
;
921 data_reg2
= data_regl
;
923 data_reg1
= data_regl
;
924 data_reg2
= data_regh
;
927 data_reg1
= data_regl
;
930 #if defined(CONFIG_SOFTMMU)
931 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
932 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
933 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
934 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
935 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_meml
);
936 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
937 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
939 # if TARGET_LONG_BITS == 64
940 label3_ptr
= s
->code_ptr
;
941 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
944 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
945 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_memh
);
947 label1_ptr
= s
->code_ptr
;
948 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
951 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
953 label1_ptr
= s
->code_ptr
;
954 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
960 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
961 # if TARGET_LONG_BITS == 64
962 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
964 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
966 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
967 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_ld_helpers
[s_bits
]);
968 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
973 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xff);
976 tcg_out_ext8s(s
, data_reg1
, TCG_REG_V0
);
979 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xffff);
982 tcg_out_ext16s(s
, data_reg1
, TCG_REG_V0
);
985 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
988 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg2
, TCG_REG_V1
);
989 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
995 label2_ptr
= s
->code_ptr
;
996 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
999 /* label1: fast path */
1000 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1002 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1003 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1004 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_A0
, addr_regl
);
1006 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1007 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_V0
, addr_regl
, GUEST_BASE
);
1009 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_V0
, GUEST_BASE
);
1010 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_V0
, addr_regl
);
1016 tcg_out_opc_imm(s
, OPC_LBU
, data_reg1
, TCG_REG_V0
, 0);
1019 tcg_out_opc_imm(s
, OPC_LB
, data_reg1
, TCG_REG_V0
, 0);
1022 if (TCG_NEED_BSWAP
) {
1023 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1024 tcg_out_bswap16(s
, data_reg1
, TCG_REG_T0
);
1026 tcg_out_opc_imm(s
, OPC_LHU
, data_reg1
, TCG_REG_V0
, 0);
1030 if (TCG_NEED_BSWAP
) {
1031 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1032 tcg_out_bswap16s(s
, data_reg1
, TCG_REG_T0
);
1034 tcg_out_opc_imm(s
, OPC_LH
, data_reg1
, TCG_REG_V0
, 0);
1038 if (TCG_NEED_BSWAP
) {
1039 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1040 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1042 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1046 if (TCG_NEED_BSWAP
) {
1047 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 4);
1048 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1049 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1050 tcg_out_bswap32(s
, data_reg2
, TCG_REG_T0
);
1052 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1053 tcg_out_opc_imm(s
, OPC_LW
, data_reg2
, TCG_REG_V0
, 4);
1060 #if defined(CONFIG_SOFTMMU)
1061 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1065 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
1068 TCGReg addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
1069 #if defined(CONFIG_SOFTMMU)
1070 uint8_t *label1_ptr
, *label2_ptr
;
1072 int mem_index
, s_bits
;
1075 #if TARGET_LONG_BITS == 64
1076 # if defined(CONFIG_SOFTMMU)
1077 uint8_t *label3_ptr
;
1082 data_regl
= *args
++;
1084 data_regh
= *args
++;
1088 addr_regl
= *args
++;
1089 #if defined(CONFIG_SOFTMMU)
1090 # if TARGET_LONG_BITS == 64
1091 addr_regh
= *args
++;
1092 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1107 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1108 data_reg1
= data_regh
;
1109 data_reg2
= data_regl
;
1111 data_reg1
= data_regl
;
1112 data_reg2
= data_regh
;
1115 data_reg1
= data_regl
;
1119 #if defined(CONFIG_SOFTMMU)
1120 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
1121 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
1122 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
1123 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1124 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_meml
);
1125 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
1126 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1128 # if TARGET_LONG_BITS == 64
1129 label3_ptr
= s
->code_ptr
;
1130 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1133 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1134 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_memh
);
1136 label1_ptr
= s
->code_ptr
;
1137 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1140 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
1142 label1_ptr
= s
->code_ptr
;
1143 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1149 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1150 # if TARGET_LONG_BITS == 64
1151 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1153 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1157 tcg_out_call_iarg_reg8(s
, &arg_num
, data_regl
);
1160 tcg_out_call_iarg_reg16(s
, &arg_num
, data_regl
);
1163 tcg_out_call_iarg_reg32(s
, &arg_num
, data_regl
);
1166 tcg_out_call_iarg_reg64(s
, &arg_num
, data_regl
, data_regh
);
1171 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1172 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_st_helpers
[s_bits
]);
1173 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1176 label2_ptr
= s
->code_ptr
;
1177 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1180 /* label1: fast path */
1181 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1183 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1184 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1185 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1187 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1188 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_A0
, addr_regl
, GUEST_BASE
);
1190 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_A0
, GUEST_BASE
);
1191 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1198 tcg_out_opc_imm(s
, OPC_SB
, data_reg1
, TCG_REG_A0
, 0);
1201 if (TCG_NEED_BSWAP
) {
1202 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_T0
, data_reg1
, 0xffff);
1203 tcg_out_bswap16(s
, TCG_REG_T0
, TCG_REG_T0
);
1204 tcg_out_opc_imm(s
, OPC_SH
, TCG_REG_T0
, TCG_REG_A0
, 0);
1206 tcg_out_opc_imm(s
, OPC_SH
, data_reg1
, TCG_REG_A0
, 0);
1210 if (TCG_NEED_BSWAP
) {
1211 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1212 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1214 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1218 if (TCG_NEED_BSWAP
) {
1219 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg2
);
1220 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1221 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1222 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 4);
1224 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1225 tcg_out_opc_imm(s
, OPC_SW
, data_reg2
, TCG_REG_A0
, 4);
1232 #if defined(CONFIG_SOFTMMU)
1233 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1237 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1238 const TCGArg
*args
, const int *const_args
)
1241 case INDEX_op_exit_tb
:
1242 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_V0
, args
[0]);
1243 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, (tcg_target_long
)tb_ret_addr
);
1244 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1247 case INDEX_op_goto_tb
:
1248 if (s
->tb_jmp_offset
) {
1249 /* direct jump method */
1252 /* indirect jump method */
1253 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, (tcg_target_long
)(s
->tb_next
+ args
[0]));
1254 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_AT
, TCG_REG_AT
, 0);
1255 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1258 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1261 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, args
[0], 0);
1265 tcg_out_opc_reg(s
, OPC_JR
, 0, args
[0], 0);
1269 tcg_out_brcond(s
, TCG_COND_EQ
, TCG_REG_ZERO
, TCG_REG_ZERO
, args
[0]);
1272 case INDEX_op_mov_i32
:
1273 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1275 case INDEX_op_movi_i32
:
1276 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1279 case INDEX_op_ld8u_i32
:
1280 tcg_out_ldst(s
, OPC_LBU
, args
[0], args
[1], args
[2]);
1282 case INDEX_op_ld8s_i32
:
1283 tcg_out_ldst(s
, OPC_LB
, args
[0], args
[1], args
[2]);
1285 case INDEX_op_ld16u_i32
:
1286 tcg_out_ldst(s
, OPC_LHU
, args
[0], args
[1], args
[2]);
1288 case INDEX_op_ld16s_i32
:
1289 tcg_out_ldst(s
, OPC_LH
, args
[0], args
[1], args
[2]);
1291 case INDEX_op_ld_i32
:
1292 tcg_out_ldst(s
, OPC_LW
, args
[0], args
[1], args
[2]);
1294 case INDEX_op_st8_i32
:
1295 tcg_out_ldst(s
, OPC_SB
, args
[0], args
[1], args
[2]);
1297 case INDEX_op_st16_i32
:
1298 tcg_out_ldst(s
, OPC_SH
, args
[0], args
[1], args
[2]);
1300 case INDEX_op_st_i32
:
1301 tcg_out_ldst(s
, OPC_SW
, args
[0], args
[1], args
[2]);
1304 case INDEX_op_add_i32
:
1305 if (const_args
[2]) {
1306 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], args
[2]);
1308 tcg_out_opc_reg(s
, OPC_ADDU
, args
[0], args
[1], args
[2]);
1311 case INDEX_op_add2_i32
:
1312 if (const_args
[4]) {
1313 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], args
[4]);
1315 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, args
[2], args
[4]);
1317 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, TCG_REG_AT
, args
[2]);
1318 if (const_args
[5]) {
1319 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], args
[5]);
1321 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[3], args
[5]);
1323 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[1], TCG_REG_T0
);
1324 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1326 case INDEX_op_sub_i32
:
1327 if (const_args
[2]) {
1328 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], -args
[2]);
1330 tcg_out_opc_reg(s
, OPC_SUBU
, args
[0], args
[1], args
[2]);
1333 case INDEX_op_sub2_i32
:
1334 if (const_args
[4]) {
1335 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], -args
[4]);
1337 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, args
[2], args
[4]);
1339 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, args
[2], TCG_REG_AT
);
1340 if (const_args
[5]) {
1341 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], -args
[5]);
1343 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[3], args
[5]);
1345 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[1], TCG_REG_T0
);
1346 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1348 case INDEX_op_mul_i32
:
1349 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[1], args
[2]);
1350 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1352 case INDEX_op_mulu2_i32
:
1353 tcg_out_opc_reg(s
, OPC_MULTU
, 0, args
[2], args
[3]);
1354 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1355 tcg_out_opc_reg(s
, OPC_MFHI
, args
[1], 0, 0);
1357 case INDEX_op_div_i32
:
1358 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1359 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1361 case INDEX_op_divu_i32
:
1362 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1363 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1365 case INDEX_op_rem_i32
:
1366 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1367 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1369 case INDEX_op_remu_i32
:
1370 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1371 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1374 case INDEX_op_and_i32
:
1375 if (const_args
[2]) {
1376 tcg_out_opc_imm(s
, OPC_ANDI
, args
[0], args
[1], args
[2]);
1378 tcg_out_opc_reg(s
, OPC_AND
, args
[0], args
[1], args
[2]);
1381 case INDEX_op_or_i32
:
1382 if (const_args
[2]) {
1383 tcg_out_opc_imm(s
, OPC_ORI
, args
[0], args
[1], args
[2]);
1385 tcg_out_opc_reg(s
, OPC_OR
, args
[0], args
[1], args
[2]);
1388 case INDEX_op_nor_i32
:
1389 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], args
[1], args
[2]);
1391 case INDEX_op_not_i32
:
1392 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], TCG_REG_ZERO
, args
[1]);
1394 case INDEX_op_xor_i32
:
1395 if (const_args
[2]) {
1396 tcg_out_opc_imm(s
, OPC_XORI
, args
[0], args
[1], args
[2]);
1398 tcg_out_opc_reg(s
, OPC_XOR
, args
[0], args
[1], args
[2]);
1402 case INDEX_op_sar_i32
:
1403 if (const_args
[2]) {
1404 tcg_out_opc_sa(s
, OPC_SRA
, args
[0], args
[1], args
[2]);
1406 tcg_out_opc_reg(s
, OPC_SRAV
, args
[0], args
[2], args
[1]);
1409 case INDEX_op_shl_i32
:
1410 if (const_args
[2]) {
1411 tcg_out_opc_sa(s
, OPC_SLL
, args
[0], args
[1], args
[2]);
1413 tcg_out_opc_reg(s
, OPC_SLLV
, args
[0], args
[2], args
[1]);
1416 case INDEX_op_shr_i32
:
1417 if (const_args
[2]) {
1418 tcg_out_opc_sa(s
, OPC_SRL
, args
[0], args
[1], args
[2]);
1420 tcg_out_opc_reg(s
, OPC_SRLV
, args
[0], args
[2], args
[1]);
1424 /* The bswap routines do not work on non-R2 CPU. In that case
1425 we let TCG generating the corresponding code. */
1426 case INDEX_op_bswap16_i32
:
1427 tcg_out_bswap16(s
, args
[0], args
[1]);
1429 case INDEX_op_bswap32_i32
:
1430 tcg_out_bswap32(s
, args
[0], args
[1]);
1433 case INDEX_op_ext8s_i32
:
1434 tcg_out_ext8s(s
, args
[0], args
[1]);
1436 case INDEX_op_ext16s_i32
:
1437 tcg_out_ext16s(s
, args
[0], args
[1]);
1440 case INDEX_op_brcond_i32
:
1441 tcg_out_brcond(s
, args
[2], args
[0], args
[1], args
[3]);
1443 case INDEX_op_brcond2_i32
:
1444 tcg_out_brcond2(s
, args
[4], args
[0], args
[1], args
[2], args
[3], args
[5]);
1447 case INDEX_op_setcond_i32
:
1448 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2]);
1450 case INDEX_op_setcond2_i32
:
1451 tcg_out_setcond2(s
, args
[5], args
[0], args
[1], args
[2], args
[3], args
[4]);
1454 case INDEX_op_qemu_ld8u
:
1455 tcg_out_qemu_ld(s
, args
, 0);
1457 case INDEX_op_qemu_ld8s
:
1458 tcg_out_qemu_ld(s
, args
, 0 | 4);
1460 case INDEX_op_qemu_ld16u
:
1461 tcg_out_qemu_ld(s
, args
, 1);
1463 case INDEX_op_qemu_ld16s
:
1464 tcg_out_qemu_ld(s
, args
, 1 | 4);
1466 case INDEX_op_qemu_ld32
:
1467 tcg_out_qemu_ld(s
, args
, 2);
1469 case INDEX_op_qemu_ld64
:
1470 tcg_out_qemu_ld(s
, args
, 3);
1472 case INDEX_op_qemu_st8
:
1473 tcg_out_qemu_st(s
, args
, 0);
1475 case INDEX_op_qemu_st16
:
1476 tcg_out_qemu_st(s
, args
, 1);
1478 case INDEX_op_qemu_st32
:
1479 tcg_out_qemu_st(s
, args
, 2);
1481 case INDEX_op_qemu_st64
:
1482 tcg_out_qemu_st(s
, args
, 3);
1490 static const TCGTargetOpDef mips_op_defs
[] = {
1491 { INDEX_op_exit_tb
, { } },
1492 { INDEX_op_goto_tb
, { } },
1493 { INDEX_op_call
, { "C" } },
1494 { INDEX_op_jmp
, { "r" } },
1495 { INDEX_op_br
, { } },
1497 { INDEX_op_mov_i32
, { "r", "r" } },
1498 { INDEX_op_movi_i32
, { "r" } },
1499 { INDEX_op_ld8u_i32
, { "r", "r" } },
1500 { INDEX_op_ld8s_i32
, { "r", "r" } },
1501 { INDEX_op_ld16u_i32
, { "r", "r" } },
1502 { INDEX_op_ld16s_i32
, { "r", "r" } },
1503 { INDEX_op_ld_i32
, { "r", "r" } },
1504 { INDEX_op_st8_i32
, { "rZ", "r" } },
1505 { INDEX_op_st16_i32
, { "rZ", "r" } },
1506 { INDEX_op_st_i32
, { "rZ", "r" } },
1508 { INDEX_op_add_i32
, { "r", "rZ", "rJ" } },
1509 { INDEX_op_mul_i32
, { "r", "rZ", "rZ" } },
1510 { INDEX_op_mulu2_i32
, { "r", "r", "rZ", "rZ" } },
1511 { INDEX_op_div_i32
, { "r", "rZ", "rZ" } },
1512 { INDEX_op_divu_i32
, { "r", "rZ", "rZ" } },
1513 { INDEX_op_rem_i32
, { "r", "rZ", "rZ" } },
1514 { INDEX_op_remu_i32
, { "r", "rZ", "rZ" } },
1515 { INDEX_op_sub_i32
, { "r", "rZ", "rJ" } },
1517 { INDEX_op_and_i32
, { "r", "rZ", "rI" } },
1518 { INDEX_op_nor_i32
, { "r", "rZ", "rZ" } },
1519 { INDEX_op_not_i32
, { "r", "rZ" } },
1520 { INDEX_op_or_i32
, { "r", "rZ", "rIZ" } },
1521 { INDEX_op_xor_i32
, { "r", "rZ", "rIZ" } },
1523 { INDEX_op_shl_i32
, { "r", "rZ", "ri" } },
1524 { INDEX_op_shr_i32
, { "r", "rZ", "ri" } },
1525 { INDEX_op_sar_i32
, { "r", "rZ", "ri" } },
1527 { INDEX_op_bswap16_i32
, { "r", "r" } },
1528 { INDEX_op_bswap32_i32
, { "r", "r" } },
1530 { INDEX_op_ext8s_i32
, { "r", "rZ" } },
1531 { INDEX_op_ext16s_i32
, { "r", "rZ" } },
1533 { INDEX_op_brcond_i32
, { "rZ", "rZ" } },
1534 { INDEX_op_setcond_i32
, { "r", "rZ", "rZ" } },
1535 { INDEX_op_setcond2_i32
, { "r", "rZ", "rZ", "rZ", "rZ" } },
1537 { INDEX_op_add2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1538 { INDEX_op_sub2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1539 { INDEX_op_brcond2_i32
, { "rZ", "rZ", "rZ", "rZ" } },
1541 #if TARGET_LONG_BITS == 32
1542 { INDEX_op_qemu_ld8u
, { "L", "lZ" } },
1543 { INDEX_op_qemu_ld8s
, { "L", "lZ" } },
1544 { INDEX_op_qemu_ld16u
, { "L", "lZ" } },
1545 { INDEX_op_qemu_ld16s
, { "L", "lZ" } },
1546 { INDEX_op_qemu_ld32
, { "L", "lZ" } },
1547 { INDEX_op_qemu_ld64
, { "L", "L", "lZ" } },
1549 { INDEX_op_qemu_st8
, { "SZ", "SZ" } },
1550 { INDEX_op_qemu_st16
, { "SZ", "SZ" } },
1551 { INDEX_op_qemu_st32
, { "SZ", "SZ" } },
1552 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ" } },
1554 { INDEX_op_qemu_ld8u
, { "L", "lZ", "lZ" } },
1555 { INDEX_op_qemu_ld8s
, { "L", "lZ", "lZ" } },
1556 { INDEX_op_qemu_ld16u
, { "L", "lZ", "lZ" } },
1557 { INDEX_op_qemu_ld16s
, { "L", "lZ", "lZ" } },
1558 { INDEX_op_qemu_ld32
, { "L", "lZ", "lZ" } },
1559 { INDEX_op_qemu_ld64
, { "L", "L", "lZ", "lZ" } },
1561 { INDEX_op_qemu_st8
, { "SZ", "SZ", "SZ" } },
1562 { INDEX_op_qemu_st16
, { "SZ", "SZ", "SZ" } },
1563 { INDEX_op_qemu_st32
, { "SZ", "SZ", "SZ" } },
1564 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ", "SZ" } },
1569 static int tcg_target_callee_save_regs
[] = {
1570 TCG_REG_S0
, /* used for the global env (TCG_AREG0) */
1579 TCG_REG_RA
, /* should be last for ABI compliance */
1582 /* Generate global QEMU prologue and epilogue code */
1583 static void tcg_target_qemu_prologue(TCGContext
*s
)
1587 /* reserve some stack space, also for TCG temps. */
1588 frame_size
= ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1589 + TCG_STATIC_CALL_ARGS_SIZE
1590 + CPU_TEMP_BUF_NLONGS
* sizeof(long);
1591 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1592 ~(TCG_TARGET_STACK_ALIGN
- 1);
1593 tcg_set_frame(s
, TCG_REG_SP
, ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1594 + TCG_STATIC_CALL_ARGS_SIZE
,
1595 CPU_TEMP_BUF_NLONGS
* sizeof(long));
1598 tcg_out_addi(s
, TCG_REG_SP
, -frame_size
);
1599 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1600 tcg_out_st(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1601 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1604 /* Call generated code */
1605 tcg_out_opc_reg(s
, OPC_JR
, 0, tcg_target_call_iarg_regs
[1], 0);
1606 tcg_out_mov(s
, TCG_TYPE_PTR
, TCG_AREG0
, tcg_target_call_iarg_regs
[0]);
1607 tb_ret_addr
= s
->code_ptr
;
1610 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1611 tcg_out_ld(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1612 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1615 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_RA
, 0);
1616 tcg_out_addi(s
, TCG_REG_SP
, frame_size
);
1619 static void tcg_target_init(TCGContext
*s
)
1621 tcg_regset_set(tcg_target_available_regs
[TCG_TYPE_I32
], 0xffffffff);
1622 tcg_regset_set(tcg_target_call_clobber_regs
,
1639 tcg_regset_clear(s
->reserved_regs
);
1640 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ZERO
); /* zero register */
1641 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K0
); /* kernel use only */
1642 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K1
); /* kernel use only */
1643 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_AT
); /* internal use */
1644 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_T0
); /* internal use */
1645 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RA
); /* return address */
1646 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_SP
); /* stack pointer */
1647 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_GP
); /* global pointer */
1649 tcg_add_target_add_op_defs(mips_op_defs
);