2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
5 * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
6 * Based on i386/tcg-target.c - Copyright (c) 2008 Fabrice Bellard
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
27 #if defined(TCG_TARGET_WORDS_BIGENDIAN) == defined(TARGET_WORDS_BIGENDIAN)
28 # define TCG_NEED_BSWAP 0
30 # define TCG_NEED_BSWAP 1
34 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
70 /* check if we really need so many registers :P */
71 static const TCGReg tcg_target_reg_alloc_order
[] = {
97 static const TCGReg tcg_target_call_iarg_regs
[4] = {
104 static const TCGReg tcg_target_call_oarg_regs
[2] = {
109 static uint8_t *tb_ret_addr
;
111 static inline uint32_t reloc_lo16_val(void *pc
, intptr_t target
)
113 return target
& 0xffff;
116 static inline void reloc_lo16(void *pc
, intptr_t target
)
118 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
119 | reloc_lo16_val(pc
, target
);
122 static inline uint32_t reloc_hi16_val(void *pc
, intptr_t target
)
124 return (target
>> 16) & 0xffff;
127 static inline void reloc_hi16(void *pc
, intptr_t target
)
129 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
130 | reloc_hi16_val(pc
, target
);
133 static inline uint32_t reloc_pc16_val(void *pc
, intptr_t target
)
137 disp
= target
- (intptr_t)pc
- 4;
138 if (disp
!= (disp
<< 14) >> 14) {
142 return (disp
>> 2) & 0xffff;
145 static inline void reloc_pc16 (void *pc
, tcg_target_long target
)
147 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xffff)
148 | reloc_pc16_val(pc
, target
);
151 static inline uint32_t reloc_26_val (void *pc
, tcg_target_long target
)
153 if ((((tcg_target_long
)pc
+ 4) & 0xf0000000) != (target
& 0xf0000000)) {
157 return (target
>> 2) & 0x3ffffff;
160 static inline void reloc_pc26(void *pc
, intptr_t target
)
162 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0x3ffffff)
163 | reloc_26_val(pc
, target
);
166 static void patch_reloc(uint8_t *code_ptr
, int type
,
167 intptr_t value
, intptr_t addend
)
172 reloc_lo16(code_ptr
, value
);
175 reloc_hi16(code_ptr
, value
);
178 reloc_pc16(code_ptr
, value
);
181 reloc_pc26(code_ptr
, value
);
188 /* parse target specific constraints */
189 static int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
196 ct
->ct
|= TCG_CT_REG
;
197 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
200 ct
->ct
|= TCG_CT_REG
;
201 tcg_regset_clear(ct
->u
.regs
);
202 tcg_regset_set_reg(ct
->u
.regs
, TCG_REG_T9
);
204 case 'L': /* qemu_ld output arg constraint */
205 ct
->ct
|= TCG_CT_REG
;
206 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
207 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_V0
);
209 case 'l': /* qemu_ld input arg constraint */
210 ct
->ct
|= TCG_CT_REG
;
211 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
212 #if defined(CONFIG_SOFTMMU)
213 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
214 # if (TARGET_LONG_BITS == 64)
215 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
219 case 'S': /* qemu_st constraint */
220 ct
->ct
|= TCG_CT_REG
;
221 tcg_regset_set(ct
->u
.regs
, 0xffffffff);
222 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A0
);
223 #if defined(CONFIG_SOFTMMU)
224 # if (TARGET_LONG_BITS == 32)
225 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A1
);
227 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A2
);
228 # if TARGET_LONG_BITS == 64
229 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_A3
);
234 ct
->ct
|= TCG_CT_CONST_U16
;
237 ct
->ct
|= TCG_CT_CONST_S16
;
240 /* We are cheating a bit here, using the fact that the register
241 ZERO is also the register number 0. Hence there is no need
242 to check for const_args in each instruction. */
243 ct
->ct
|= TCG_CT_CONST_ZERO
;
253 /* test if a constant matches the constraint */
254 static inline int tcg_target_const_match(tcg_target_long val
,
255 const TCGArgConstraint
*arg_ct
)
259 if (ct
& TCG_CT_CONST
)
261 else if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0)
263 else if ((ct
& TCG_CT_CONST_U16
) && val
== (uint16_t)val
)
265 else if ((ct
& TCG_CT_CONST_S16
) && val
== (int16_t)val
)
271 /* instruction opcodes */
273 OPC_BEQ
= 0x04 << 26,
274 OPC_BNE
= 0x05 << 26,
275 OPC_BLEZ
= 0x06 << 26,
276 OPC_BGTZ
= 0x07 << 26,
277 OPC_ADDIU
= 0x09 << 26,
278 OPC_SLTI
= 0x0A << 26,
279 OPC_SLTIU
= 0x0B << 26,
280 OPC_ANDI
= 0x0C << 26,
281 OPC_ORI
= 0x0D << 26,
282 OPC_XORI
= 0x0E << 26,
283 OPC_LUI
= 0x0F << 26,
287 OPC_LBU
= 0x24 << 26,
288 OPC_LHU
= 0x25 << 26,
289 OPC_LWU
= 0x27 << 26,
294 OPC_SPECIAL
= 0x00 << 26,
295 OPC_SLL
= OPC_SPECIAL
| 0x00,
296 OPC_SRL
= OPC_SPECIAL
| 0x02,
297 OPC_ROTR
= OPC_SPECIAL
| (0x01 << 21) | 0x02,
298 OPC_SRA
= OPC_SPECIAL
| 0x03,
299 OPC_SLLV
= OPC_SPECIAL
| 0x04,
300 OPC_SRLV
= OPC_SPECIAL
| 0x06,
301 OPC_ROTRV
= OPC_SPECIAL
| (0x01 << 6) | 0x06,
302 OPC_SRAV
= OPC_SPECIAL
| 0x07,
303 OPC_JR
= OPC_SPECIAL
| 0x08,
304 OPC_JALR
= OPC_SPECIAL
| 0x09,
305 OPC_MOVZ
= OPC_SPECIAL
| 0x0A,
306 OPC_MOVN
= OPC_SPECIAL
| 0x0B,
307 OPC_MFHI
= OPC_SPECIAL
| 0x10,
308 OPC_MFLO
= OPC_SPECIAL
| 0x12,
309 OPC_MULT
= OPC_SPECIAL
| 0x18,
310 OPC_MULTU
= OPC_SPECIAL
| 0x19,
311 OPC_DIV
= OPC_SPECIAL
| 0x1A,
312 OPC_DIVU
= OPC_SPECIAL
| 0x1B,
313 OPC_ADDU
= OPC_SPECIAL
| 0x21,
314 OPC_SUBU
= OPC_SPECIAL
| 0x23,
315 OPC_AND
= OPC_SPECIAL
| 0x24,
316 OPC_OR
= OPC_SPECIAL
| 0x25,
317 OPC_XOR
= OPC_SPECIAL
| 0x26,
318 OPC_NOR
= OPC_SPECIAL
| 0x27,
319 OPC_SLT
= OPC_SPECIAL
| 0x2A,
320 OPC_SLTU
= OPC_SPECIAL
| 0x2B,
322 OPC_REGIMM
= 0x01 << 26,
323 OPC_BLTZ
= OPC_REGIMM
| (0x00 << 16),
324 OPC_BGEZ
= OPC_REGIMM
| (0x01 << 16),
326 OPC_SPECIAL2
= 0x1c << 26,
327 OPC_MUL
= OPC_SPECIAL2
| 0x002,
329 OPC_SPECIAL3
= 0x1f << 26,
330 OPC_INS
= OPC_SPECIAL3
| 0x004,
331 OPC_WSBH
= OPC_SPECIAL3
| 0x0a0,
332 OPC_SEB
= OPC_SPECIAL3
| 0x420,
333 OPC_SEH
= OPC_SPECIAL3
| 0x620,
339 static inline void tcg_out_opc_reg(TCGContext
*s
, int opc
,
340 TCGReg rd
, TCGReg rs
, TCGReg rt
)
345 inst
|= (rs
& 0x1F) << 21;
346 inst
|= (rt
& 0x1F) << 16;
347 inst
|= (rd
& 0x1F) << 11;
354 static inline void tcg_out_opc_imm(TCGContext
*s
, int opc
,
355 TCGReg rt
, TCGReg rs
, TCGArg imm
)
360 inst
|= (rs
& 0x1F) << 21;
361 inst
|= (rt
& 0x1F) << 16;
362 inst
|= (imm
& 0xffff);
369 static inline void tcg_out_opc_br(TCGContext
*s
, int opc
,
370 TCGReg rt
, TCGReg rs
)
372 /* We pay attention here to not modify the branch target by reading
373 the existing value and using it again. This ensure that caches and
374 memory are kept coherent during retranslation. */
375 uint16_t offset
= (uint16_t)(*(uint32_t *) s
->code_ptr
);
377 tcg_out_opc_imm(s
, opc
, rt
, rs
, offset
);
383 static inline void tcg_out_opc_sa(TCGContext
*s
, int opc
,
384 TCGReg rd
, TCGReg rt
, TCGArg sa
)
389 inst
|= (rt
& 0x1F) << 16;
390 inst
|= (rd
& 0x1F) << 11;
391 inst
|= (sa
& 0x1F) << 6;
396 static inline void tcg_out_nop(TCGContext
*s
)
401 static inline void tcg_out_mov(TCGContext
*s
, TCGType type
,
402 TCGReg ret
, TCGReg arg
)
404 /* Simple reg-reg move, optimising out the 'do nothing' case */
406 tcg_out_opc_reg(s
, OPC_ADDU
, ret
, arg
, TCG_REG_ZERO
);
410 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
411 TCGReg reg
, tcg_target_long arg
)
413 if (arg
== (int16_t)arg
) {
414 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, TCG_REG_ZERO
, arg
);
415 } else if (arg
== (uint16_t)arg
) {
416 tcg_out_opc_imm(s
, OPC_ORI
, reg
, TCG_REG_ZERO
, arg
);
418 tcg_out_opc_imm(s
, OPC_LUI
, reg
, 0, arg
>> 16);
419 tcg_out_opc_imm(s
, OPC_ORI
, reg
, reg
, arg
& 0xffff);
423 static inline void tcg_out_bswap16(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
425 if (use_mips32r2_instructions
) {
426 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
428 /* ret and arg can't be register at */
429 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
433 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
434 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 8);
435 tcg_out_opc_imm(s
, OPC_ANDI
, ret
, ret
, 0xff00);
436 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
440 static inline void tcg_out_bswap16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
442 if (use_mips32r2_instructions
) {
443 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
444 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, ret
);
446 /* ret and arg can't be register at */
447 if (ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
451 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
452 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
453 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
454 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
458 static inline void tcg_out_bswap32(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
460 if (use_mips32r2_instructions
) {
461 tcg_out_opc_reg(s
, OPC_WSBH
, ret
, 0, arg
);
462 tcg_out_opc_sa(s
, OPC_ROTR
, ret
, ret
, 16);
464 /* ret and arg must be different and can't be register at */
465 if (ret
== arg
|| ret
== TCG_REG_AT
|| arg
== TCG_REG_AT
) {
469 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
471 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 24);
472 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
474 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, arg
, 0xff00);
475 tcg_out_opc_sa(s
, OPC_SLL
, TCG_REG_AT
, TCG_REG_AT
, 8);
476 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
478 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_AT
, arg
, 8);
479 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_AT
, TCG_REG_AT
, 0xff00);
480 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
484 static inline void tcg_out_ext8s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
486 if (use_mips32r2_instructions
) {
487 tcg_out_opc_reg(s
, OPC_SEB
, ret
, 0, arg
);
489 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 24);
490 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 24);
494 static inline void tcg_out_ext16s(TCGContext
*s
, TCGReg ret
, TCGReg arg
)
496 if (use_mips32r2_instructions
) {
497 tcg_out_opc_reg(s
, OPC_SEH
, ret
, 0, arg
);
499 tcg_out_opc_sa(s
, OPC_SLL
, ret
, arg
, 16);
500 tcg_out_opc_sa(s
, OPC_SRA
, ret
, ret
, 16);
504 static inline void tcg_out_ldst(TCGContext
*s
, int opc
, TCGArg arg
,
505 TCGReg arg1
, TCGArg arg2
)
507 if (arg2
== (int16_t) arg2
) {
508 tcg_out_opc_imm(s
, opc
, arg
, arg1
, arg2
);
510 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, arg2
);
511 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, TCG_REG_AT
, arg1
);
512 tcg_out_opc_imm(s
, opc
, arg
, TCG_REG_AT
, 0);
516 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, TCGReg arg
,
517 TCGReg arg1
, intptr_t arg2
)
519 tcg_out_ldst(s
, OPC_LW
, arg
, arg1
, arg2
);
522 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, TCGReg arg
,
523 TCGReg arg1
, intptr_t arg2
)
525 tcg_out_ldst(s
, OPC_SW
, arg
, arg1
, arg2
);
528 static inline void tcg_out_addi(TCGContext
*s
, TCGReg reg
, TCGArg val
)
530 if (val
== (int16_t)val
) {
531 tcg_out_opc_imm(s
, OPC_ADDIU
, reg
, reg
, val
);
533 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, val
);
534 tcg_out_opc_reg(s
, OPC_ADDU
, reg
, reg
, TCG_REG_AT
);
538 /* Helper routines for marshalling helper function arguments into
539 * the correct registers and stack.
540 * arg_num is where we want to put this argument, and is updated to be ready
541 * for the next call. arg is the argument itself. Note that arg_num 0..3 is
542 * real registers, 4+ on stack.
544 * We provide routines for arguments which are: immediate, 32 bit
545 * value in register, 16 and 8 bit values in register (which must be zero
546 * extended before use) and 64 bit value in a lo:hi register pair.
548 #define DEFINE_TCG_OUT_CALL_IARG(NAME, ARGPARAM) \
549 static inline void NAME(TCGContext *s, int *arg_num, ARGPARAM) \
551 if (*arg_num < 4) { \
552 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(tcg_target_call_iarg_regs[*arg_num]); \
554 DEFINE_TCG_OUT_CALL_IARG_GET_ARG(TCG_REG_AT); \
555 tcg_out_st(s, TCG_TYPE_I32, TCG_REG_AT, TCG_REG_SP, 4 * (*arg_num)); \
559 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
560 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xff);
561 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg8
, TCGReg arg
)
562 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
563 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
564 tcg_out_opc_imm(s, OPC_ANDI, A, arg, 0xffff);
565 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_reg16
, TCGReg arg
)
566 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
567 #define DEFINE_TCG_OUT_CALL_IARG_GET_ARG(A) \
568 tcg_out_movi(s, TCG_TYPE_I32, A, arg);
569 DEFINE_TCG_OUT_CALL_IARG(tcg_out_call_iarg_imm32
, TCGArg arg
)
570 #undef DEFINE_TCG_OUT_CALL_IARG_GET_ARG
572 /* We don't use the macro for this one to avoid an unnecessary reg-reg
573 move when storing to the stack. */
574 static inline void tcg_out_call_iarg_reg32(TCGContext
*s
, int *arg_num
,
578 tcg_out_mov(s
, TCG_TYPE_I32
, tcg_target_call_iarg_regs
[*arg_num
], arg
);
580 tcg_out_st(s
, TCG_TYPE_I32
, arg
, TCG_REG_SP
, 4 * (*arg_num
));
585 static inline void tcg_out_call_iarg_reg64(TCGContext
*s
, int *arg_num
,
586 TCGReg arg_low
, TCGReg arg_high
)
588 (*arg_num
) = (*arg_num
+ 1) & ~1;
590 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
591 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
592 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
594 tcg_out_call_iarg_reg32(s
, arg_num
, arg_low
);
595 tcg_out_call_iarg_reg32(s
, arg_num
, arg_high
);
599 static void tcg_out_brcond(TCGContext
*s
, TCGCond cond
, TCGArg arg1
,
600 TCGArg arg2
, int label_index
)
602 TCGLabel
*l
= &s
->labels
[label_index
];
606 tcg_out_opc_br(s
, OPC_BEQ
, arg1
, arg2
);
609 tcg_out_opc_br(s
, OPC_BNE
, arg1
, arg2
);
613 tcg_out_opc_br(s
, OPC_BLTZ
, 0, arg1
);
615 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
616 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
620 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
621 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
625 tcg_out_opc_br(s
, OPC_BGEZ
, 0, arg1
);
627 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg1
, arg2
);
628 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
632 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg1
, arg2
);
633 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
637 tcg_out_opc_br(s
, OPC_BLEZ
, 0, arg1
);
639 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
640 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
644 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
645 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_AT
, TCG_REG_ZERO
);
649 tcg_out_opc_br(s
, OPC_BGTZ
, 0, arg1
);
651 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, arg2
, arg1
);
652 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
656 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, arg2
, arg1
);
657 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_AT
, TCG_REG_ZERO
);
664 reloc_pc16(s
->code_ptr
- 4, l
->u
.value
);
666 tcg_out_reloc(s
, s
->code_ptr
- 4, R_MIPS_PC16
, label_index
, 0);
671 /* XXX: we implement it at the target level to avoid having to
672 handle cross basic blocks temporaries */
673 static void tcg_out_brcond2(TCGContext
*s
, TCGCond cond
, TCGArg arg1
,
674 TCGArg arg2
, TCGArg arg3
, TCGArg arg4
,
681 tcg_out_brcond(s
, TCG_COND_NE
, arg2
, arg4
, label_index
);
682 tcg_out_brcond(s
, TCG_COND_NE
, arg1
, arg3
, label_index
);
688 tcg_out_brcond(s
, TCG_COND_LT
, arg2
, arg4
, label_index
);
692 tcg_out_brcond(s
, TCG_COND_GT
, arg2
, arg4
, label_index
);
696 tcg_out_brcond(s
, TCG_COND_LTU
, arg2
, arg4
, label_index
);
700 tcg_out_brcond(s
, TCG_COND_GTU
, arg2
, arg4
, label_index
);
706 label_ptr
= s
->code_ptr
;
707 tcg_out_opc_br(s
, OPC_BNE
, arg2
, arg4
);
712 tcg_out_brcond(s
, TCG_COND_EQ
, arg1
, arg3
, label_index
);
716 tcg_out_brcond(s
, TCG_COND_LTU
, arg1
, arg3
, label_index
);
720 tcg_out_brcond(s
, TCG_COND_LEU
, arg1
, arg3
, label_index
);
724 tcg_out_brcond(s
, TCG_COND_GTU
, arg1
, arg3
, label_index
);
728 tcg_out_brcond(s
, TCG_COND_GEU
, arg1
, arg3
, label_index
);
734 reloc_pc16(label_ptr
, (tcg_target_long
) s
->code_ptr
);
737 static void tcg_out_movcond(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
738 TCGArg c1
, TCGArg c2
, TCGArg v
)
743 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, c2
);
744 } else if (c2
== 0) {
745 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, c1
);
747 tcg_out_opc_reg(s
, OPC_XOR
, TCG_REG_AT
, c1
, c2
);
748 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
753 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, c2
);
754 } else if (c2
== 0) {
755 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, c1
);
757 tcg_out_opc_reg(s
, OPC_XOR
, TCG_REG_AT
, c1
, c2
);
758 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
762 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c1
, c2
);
763 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
766 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c1
, c2
);
767 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
770 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c1
, c2
);
771 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
774 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c1
, c2
);
775 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
778 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c2
, c1
);
779 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
782 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c2
, c1
);
783 tcg_out_opc_reg(s
, OPC_MOVZ
, ret
, v
, TCG_REG_AT
);
786 tcg_out_opc_reg(s
, OPC_SLT
, TCG_REG_AT
, c2
, c1
);
787 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
790 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_AT
, c2
, c1
);
791 tcg_out_opc_reg(s
, OPC_MOVN
, ret
, v
, TCG_REG_AT
);
799 static void tcg_out_setcond(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
800 TCGArg arg1
, TCGArg arg2
)
805 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg2
, 1);
806 } else if (arg2
== 0) {
807 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, arg1
, 1);
809 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
810 tcg_out_opc_imm(s
, OPC_SLTIU
, ret
, ret
, 1);
815 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg2
);
816 } else if (arg2
== 0) {
817 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, arg1
);
819 tcg_out_opc_reg(s
, OPC_XOR
, ret
, arg1
, arg2
);
820 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, TCG_REG_ZERO
, ret
);
824 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
827 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
830 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg1
, arg2
);
831 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
834 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg1
, arg2
);
835 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
838 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
839 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
842 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
843 tcg_out_opc_imm(s
, OPC_XORI
, ret
, ret
, 1);
846 tcg_out_opc_reg(s
, OPC_SLT
, ret
, arg2
, arg1
);
849 tcg_out_opc_reg(s
, OPC_SLTU
, ret
, arg2
, arg1
);
857 /* XXX: we implement it at the target level to avoid having to
858 handle cross basic blocks temporaries */
859 static void tcg_out_setcond2(TCGContext
*s
, TCGCond cond
, TCGReg ret
,
860 TCGArg arg1
, TCGArg arg2
, TCGArg arg3
, TCGArg arg4
)
864 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_AT
, arg2
, arg4
);
865 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg1
, arg3
);
866 tcg_out_opc_reg(s
, OPC_AND
, ret
, TCG_REG_AT
, TCG_REG_T0
);
869 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_AT
, arg2
, arg4
);
870 tcg_out_setcond(s
, TCG_COND_NE
, TCG_REG_T0
, arg1
, arg3
);
871 tcg_out_opc_reg(s
, OPC_OR
, ret
, TCG_REG_AT
, TCG_REG_T0
);
875 tcg_out_setcond(s
, TCG_COND_LT
, TCG_REG_AT
, arg2
, arg4
);
879 tcg_out_setcond(s
, TCG_COND_GT
, TCG_REG_AT
, arg2
, arg4
);
883 tcg_out_setcond(s
, TCG_COND_LTU
, TCG_REG_AT
, arg2
, arg4
);
887 tcg_out_setcond(s
, TCG_COND_GTU
, TCG_REG_AT
, arg2
, arg4
);
894 tcg_out_setcond(s
, TCG_COND_EQ
, TCG_REG_T0
, arg2
, arg4
);
899 tcg_out_setcond(s
, TCG_COND_LTU
, ret
, arg1
, arg3
);
903 tcg_out_setcond(s
, TCG_COND_LEU
, ret
, arg1
, arg3
);
907 tcg_out_setcond(s
, TCG_COND_GTU
, ret
, arg1
, arg3
);
911 tcg_out_setcond(s
, TCG_COND_GEU
, ret
, arg1
, arg3
);
917 tcg_out_opc_reg(s
, OPC_AND
, ret
, ret
, TCG_REG_T0
);
918 tcg_out_opc_reg(s
, OPC_OR
, ret
, ret
, TCG_REG_AT
);
921 #if defined(CONFIG_SOFTMMU)
922 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
924 static const void * const qemu_ld_helpers
[4] = {
931 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
932 uintxx_t val, int mmu_idx) */
933 static const void * const qemu_st_helpers
[4] = {
941 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
,
944 TCGReg addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
945 #if defined(CONFIG_SOFTMMU)
946 void *label1_ptr
, *label2_ptr
;
948 int mem_index
, s_bits
;
950 # if TARGET_LONG_BITS == 64
962 #if defined(CONFIG_SOFTMMU)
963 # if TARGET_LONG_BITS == 64
965 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
980 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
981 data_reg1
= data_regh
;
982 data_reg2
= data_regl
;
984 data_reg1
= data_regl
;
985 data_reg2
= data_regh
;
988 data_reg1
= data_regl
;
991 #if defined(CONFIG_SOFTMMU)
992 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
993 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
994 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
995 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
996 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_meml
);
997 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
998 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1000 # if TARGET_LONG_BITS == 64
1001 label3_ptr
= s
->code_ptr
;
1002 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1005 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1006 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_read
) + addr_memh
);
1008 label1_ptr
= s
->code_ptr
;
1009 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1012 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
1014 label1_ptr
= s
->code_ptr
;
1015 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1021 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1022 # if TARGET_LONG_BITS == 64
1023 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1025 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1027 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1028 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_ld_helpers
[s_bits
]);
1029 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1034 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xff);
1037 tcg_out_ext8s(s
, data_reg1
, TCG_REG_V0
);
1040 tcg_out_opc_imm(s
, OPC_ANDI
, data_reg1
, TCG_REG_V0
, 0xffff);
1043 tcg_out_ext16s(s
, data_reg1
, TCG_REG_V0
);
1046 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
1049 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg2
, TCG_REG_V1
);
1050 tcg_out_mov(s
, TCG_TYPE_I32
, data_reg1
, TCG_REG_V0
);
1056 label2_ptr
= s
->code_ptr
;
1057 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1060 /* label1: fast path */
1061 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1063 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1064 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1065 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_A0
, addr_regl
);
1067 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1068 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_V0
, addr_regl
, GUEST_BASE
);
1070 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_V0
, GUEST_BASE
);
1071 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_V0
, TCG_REG_V0
, addr_regl
);
1077 tcg_out_opc_imm(s
, OPC_LBU
, data_reg1
, TCG_REG_V0
, 0);
1080 tcg_out_opc_imm(s
, OPC_LB
, data_reg1
, TCG_REG_V0
, 0);
1083 if (TCG_NEED_BSWAP
) {
1084 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1085 tcg_out_bswap16(s
, data_reg1
, TCG_REG_T0
);
1087 tcg_out_opc_imm(s
, OPC_LHU
, data_reg1
, TCG_REG_V0
, 0);
1091 if (TCG_NEED_BSWAP
) {
1092 tcg_out_opc_imm(s
, OPC_LHU
, TCG_REG_T0
, TCG_REG_V0
, 0);
1093 tcg_out_bswap16s(s
, data_reg1
, TCG_REG_T0
);
1095 tcg_out_opc_imm(s
, OPC_LH
, data_reg1
, TCG_REG_V0
, 0);
1099 if (TCG_NEED_BSWAP
) {
1100 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1101 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1103 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1107 if (TCG_NEED_BSWAP
) {
1108 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 4);
1109 tcg_out_bswap32(s
, data_reg1
, TCG_REG_T0
);
1110 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_T0
, TCG_REG_V0
, 0);
1111 tcg_out_bswap32(s
, data_reg2
, TCG_REG_T0
);
1113 tcg_out_opc_imm(s
, OPC_LW
, data_reg1
, TCG_REG_V0
, 0);
1114 tcg_out_opc_imm(s
, OPC_LW
, data_reg2
, TCG_REG_V0
, 4);
1121 #if defined(CONFIG_SOFTMMU)
1122 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1126 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
,
1129 TCGReg addr_regl
, data_regl
, data_regh
, data_reg1
, data_reg2
;
1130 #if defined(CONFIG_SOFTMMU)
1131 uint8_t *label1_ptr
, *label2_ptr
;
1133 int mem_index
, s_bits
;
1136 #if TARGET_LONG_BITS == 64
1137 # if defined(CONFIG_SOFTMMU)
1138 uint8_t *label3_ptr
;
1143 data_regl
= *args
++;
1145 data_regh
= *args
++;
1149 addr_regl
= *args
++;
1150 #if defined(CONFIG_SOFTMMU)
1151 # if TARGET_LONG_BITS == 64
1152 addr_regh
= *args
++;
1153 # if defined(TCG_TARGET_WORDS_BIGENDIAN)
1168 #if defined(TCG_TARGET_WORDS_BIGENDIAN)
1169 data_reg1
= data_regh
;
1170 data_reg2
= data_regl
;
1172 data_reg1
= data_regl
;
1173 data_reg2
= data_regh
;
1176 data_reg1
= data_regl
;
1180 #if defined(CONFIG_SOFTMMU)
1181 tcg_out_opc_sa(s
, OPC_SRL
, TCG_REG_A0
, addr_regl
, TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
);
1182 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_A0
, TCG_REG_A0
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
);
1183 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, TCG_AREG0
);
1184 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1185 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_meml
);
1186 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1));
1187 tcg_out_opc_reg(s
, OPC_AND
, TCG_REG_T0
, TCG_REG_T0
, addr_regl
);
1189 # if TARGET_LONG_BITS == 64
1190 label3_ptr
= s
->code_ptr
;
1191 tcg_out_opc_br(s
, OPC_BNE
, TCG_REG_T0
, TCG_REG_AT
);
1194 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_AT
, TCG_REG_A0
,
1195 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addr_write
) + addr_memh
);
1197 label1_ptr
= s
->code_ptr
;
1198 tcg_out_opc_br(s
, OPC_BEQ
, addr_regh
, TCG_REG_AT
);
1201 reloc_pc16(label3_ptr
, (tcg_target_long
) s
->code_ptr
);
1203 label1_ptr
= s
->code_ptr
;
1204 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_T0
, TCG_REG_AT
);
1210 tcg_out_call_iarg_reg32(s
, &arg_num
, TCG_AREG0
);
1211 # if TARGET_LONG_BITS == 64
1212 tcg_out_call_iarg_reg64(s
, &arg_num
, addr_regl
, addr_regh
);
1214 tcg_out_call_iarg_reg32(s
, &arg_num
, addr_regl
);
1218 tcg_out_call_iarg_reg8(s
, &arg_num
, data_regl
);
1221 tcg_out_call_iarg_reg16(s
, &arg_num
, data_regl
);
1224 tcg_out_call_iarg_reg32(s
, &arg_num
, data_regl
);
1227 tcg_out_call_iarg_reg64(s
, &arg_num
, data_regl
, data_regh
);
1232 tcg_out_call_iarg_imm32(s
, &arg_num
, mem_index
);
1233 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_T9
, (tcg_target_long
)qemu_st_helpers
[s_bits
]);
1234 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, TCG_REG_T9
, 0);
1237 label2_ptr
= s
->code_ptr
;
1238 tcg_out_opc_br(s
, OPC_BEQ
, TCG_REG_ZERO
, TCG_REG_ZERO
);
1241 /* label1: fast path */
1242 reloc_pc16(label1_ptr
, (tcg_target_long
) s
->code_ptr
);
1244 tcg_out_opc_imm(s
, OPC_LW
, TCG_REG_A0
, TCG_REG_A0
,
1245 offsetof(CPUArchState
, tlb_table
[mem_index
][0].addend
));
1246 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1248 if (GUEST_BASE
== (int16_t)GUEST_BASE
) {
1249 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_A0
, addr_regl
, GUEST_BASE
);
1251 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_A0
, GUEST_BASE
);
1252 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_A0
, TCG_REG_A0
, addr_regl
);
1259 tcg_out_opc_imm(s
, OPC_SB
, data_reg1
, TCG_REG_A0
, 0);
1262 if (TCG_NEED_BSWAP
) {
1263 tcg_out_opc_imm(s
, OPC_ANDI
, TCG_REG_T0
, data_reg1
, 0xffff);
1264 tcg_out_bswap16(s
, TCG_REG_T0
, TCG_REG_T0
);
1265 tcg_out_opc_imm(s
, OPC_SH
, TCG_REG_T0
, TCG_REG_A0
, 0);
1267 tcg_out_opc_imm(s
, OPC_SH
, data_reg1
, TCG_REG_A0
, 0);
1271 if (TCG_NEED_BSWAP
) {
1272 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1273 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1275 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1279 if (TCG_NEED_BSWAP
) {
1280 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg2
);
1281 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 0);
1282 tcg_out_bswap32(s
, TCG_REG_T0
, data_reg1
);
1283 tcg_out_opc_imm(s
, OPC_SW
, TCG_REG_T0
, TCG_REG_A0
, 4);
1285 tcg_out_opc_imm(s
, OPC_SW
, data_reg1
, TCG_REG_A0
, 0);
1286 tcg_out_opc_imm(s
, OPC_SW
, data_reg2
, TCG_REG_A0
, 4);
1293 #if defined(CONFIG_SOFTMMU)
1294 reloc_pc16(label2_ptr
, (tcg_target_long
) s
->code_ptr
);
1298 static inline void tcg_out_op(TCGContext
*s
, TCGOpcode opc
,
1299 const TCGArg
*args
, const int *const_args
)
1302 case INDEX_op_exit_tb
:
1303 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_V0
, args
[0]);
1304 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, (tcg_target_long
)tb_ret_addr
);
1305 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1308 case INDEX_op_goto_tb
:
1309 if (s
->tb_jmp_offset
) {
1310 /* direct jump method */
1313 /* indirect jump method */
1314 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_AT
, (tcg_target_long
)(s
->tb_next
+ args
[0]));
1315 tcg_out_ld(s
, TCG_TYPE_PTR
, TCG_REG_AT
, TCG_REG_AT
, 0);
1316 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_AT
, 0);
1319 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1322 tcg_out_opc_reg(s
, OPC_JALR
, TCG_REG_RA
, args
[0], 0);
1326 tcg_out_brcond(s
, TCG_COND_EQ
, TCG_REG_ZERO
, TCG_REG_ZERO
, args
[0]);
1329 case INDEX_op_mov_i32
:
1330 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1332 case INDEX_op_movi_i32
:
1333 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], args
[1]);
1336 case INDEX_op_ld8u_i32
:
1337 tcg_out_ldst(s
, OPC_LBU
, args
[0], args
[1], args
[2]);
1339 case INDEX_op_ld8s_i32
:
1340 tcg_out_ldst(s
, OPC_LB
, args
[0], args
[1], args
[2]);
1342 case INDEX_op_ld16u_i32
:
1343 tcg_out_ldst(s
, OPC_LHU
, args
[0], args
[1], args
[2]);
1345 case INDEX_op_ld16s_i32
:
1346 tcg_out_ldst(s
, OPC_LH
, args
[0], args
[1], args
[2]);
1348 case INDEX_op_ld_i32
:
1349 tcg_out_ldst(s
, OPC_LW
, args
[0], args
[1], args
[2]);
1351 case INDEX_op_st8_i32
:
1352 tcg_out_ldst(s
, OPC_SB
, args
[0], args
[1], args
[2]);
1354 case INDEX_op_st16_i32
:
1355 tcg_out_ldst(s
, OPC_SH
, args
[0], args
[1], args
[2]);
1357 case INDEX_op_st_i32
:
1358 tcg_out_ldst(s
, OPC_SW
, args
[0], args
[1], args
[2]);
1361 case INDEX_op_add_i32
:
1362 if (const_args
[2]) {
1363 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], args
[2]);
1365 tcg_out_opc_reg(s
, OPC_ADDU
, args
[0], args
[1], args
[2]);
1368 case INDEX_op_add2_i32
:
1369 if (const_args
[4]) {
1370 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], args
[4]);
1372 tcg_out_opc_reg(s
, OPC_ADDU
, TCG_REG_AT
, args
[2], args
[4]);
1374 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, TCG_REG_AT
, args
[2]);
1375 if (const_args
[5]) {
1376 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], args
[5]);
1378 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[3], args
[5]);
1380 tcg_out_opc_reg(s
, OPC_ADDU
, args
[1], args
[1], TCG_REG_T0
);
1381 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1383 case INDEX_op_sub_i32
:
1384 if (const_args
[2]) {
1385 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[0], args
[1], -args
[2]);
1387 tcg_out_opc_reg(s
, OPC_SUBU
, args
[0], args
[1], args
[2]);
1390 case INDEX_op_sub2_i32
:
1391 if (const_args
[4]) {
1392 tcg_out_opc_imm(s
, OPC_ADDIU
, TCG_REG_AT
, args
[2], -args
[4]);
1394 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, args
[2], args
[4]);
1396 tcg_out_opc_reg(s
, OPC_SLTU
, TCG_REG_T0
, args
[2], TCG_REG_AT
);
1397 if (const_args
[5]) {
1398 tcg_out_opc_imm(s
, OPC_ADDIU
, args
[1], args
[3], -args
[5]);
1400 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[3], args
[5]);
1402 tcg_out_opc_reg(s
, OPC_SUBU
, args
[1], args
[1], TCG_REG_T0
);
1403 tcg_out_mov(s
, TCG_TYPE_I32
, args
[0], TCG_REG_AT
);
1405 case INDEX_op_mul_i32
:
1406 if (use_mips32_instructions
) {
1407 tcg_out_opc_reg(s
, OPC_MUL
, args
[0], args
[1], args
[2]);
1409 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[1], args
[2]);
1410 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1413 case INDEX_op_muls2_i32
:
1414 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[2], args
[3]);
1415 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1416 tcg_out_opc_reg(s
, OPC_MFHI
, args
[1], 0, 0);
1418 case INDEX_op_mulu2_i32
:
1419 tcg_out_opc_reg(s
, OPC_MULTU
, 0, args
[2], args
[3]);
1420 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1421 tcg_out_opc_reg(s
, OPC_MFHI
, args
[1], 0, 0);
1423 case INDEX_op_mulsh_i32
:
1424 tcg_out_opc_reg(s
, OPC_MULT
, 0, args
[1], args
[2]);
1425 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1427 case INDEX_op_muluh_i32
:
1428 tcg_out_opc_reg(s
, OPC_MULTU
, 0, args
[1], args
[2]);
1429 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1431 case INDEX_op_div_i32
:
1432 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1433 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1435 case INDEX_op_divu_i32
:
1436 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1437 tcg_out_opc_reg(s
, OPC_MFLO
, args
[0], 0, 0);
1439 case INDEX_op_rem_i32
:
1440 tcg_out_opc_reg(s
, OPC_DIV
, 0, args
[1], args
[2]);
1441 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1443 case INDEX_op_remu_i32
:
1444 tcg_out_opc_reg(s
, OPC_DIVU
, 0, args
[1], args
[2]);
1445 tcg_out_opc_reg(s
, OPC_MFHI
, args
[0], 0, 0);
1448 case INDEX_op_and_i32
:
1449 if (const_args
[2]) {
1450 tcg_out_opc_imm(s
, OPC_ANDI
, args
[0], args
[1], args
[2]);
1452 tcg_out_opc_reg(s
, OPC_AND
, args
[0], args
[1], args
[2]);
1455 case INDEX_op_or_i32
:
1456 if (const_args
[2]) {
1457 tcg_out_opc_imm(s
, OPC_ORI
, args
[0], args
[1], args
[2]);
1459 tcg_out_opc_reg(s
, OPC_OR
, args
[0], args
[1], args
[2]);
1462 case INDEX_op_nor_i32
:
1463 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], args
[1], args
[2]);
1465 case INDEX_op_not_i32
:
1466 tcg_out_opc_reg(s
, OPC_NOR
, args
[0], TCG_REG_ZERO
, args
[1]);
1468 case INDEX_op_xor_i32
:
1469 if (const_args
[2]) {
1470 tcg_out_opc_imm(s
, OPC_XORI
, args
[0], args
[1], args
[2]);
1472 tcg_out_opc_reg(s
, OPC_XOR
, args
[0], args
[1], args
[2]);
1476 case INDEX_op_sar_i32
:
1477 if (const_args
[2]) {
1478 tcg_out_opc_sa(s
, OPC_SRA
, args
[0], args
[1], args
[2]);
1480 tcg_out_opc_reg(s
, OPC_SRAV
, args
[0], args
[2], args
[1]);
1483 case INDEX_op_shl_i32
:
1484 if (const_args
[2]) {
1485 tcg_out_opc_sa(s
, OPC_SLL
, args
[0], args
[1], args
[2]);
1487 tcg_out_opc_reg(s
, OPC_SLLV
, args
[0], args
[2], args
[1]);
1490 case INDEX_op_shr_i32
:
1491 if (const_args
[2]) {
1492 tcg_out_opc_sa(s
, OPC_SRL
, args
[0], args
[1], args
[2]);
1494 tcg_out_opc_reg(s
, OPC_SRLV
, args
[0], args
[2], args
[1]);
1497 case INDEX_op_rotl_i32
:
1498 if (const_args
[2]) {
1499 tcg_out_opc_sa(s
, OPC_ROTR
, args
[0], args
[1], 0x20 - args
[2]);
1501 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_AT
, 32);
1502 tcg_out_opc_reg(s
, OPC_SUBU
, TCG_REG_AT
, TCG_REG_AT
, args
[2]);
1503 tcg_out_opc_reg(s
, OPC_ROTRV
, args
[0], TCG_REG_AT
, args
[1]);
1506 case INDEX_op_rotr_i32
:
1507 if (const_args
[2]) {
1508 tcg_out_opc_sa(s
, OPC_ROTR
, args
[0], args
[1], args
[2]);
1510 tcg_out_opc_reg(s
, OPC_ROTRV
, args
[0], args
[2], args
[1]);
1514 case INDEX_op_bswap16_i32
:
1515 tcg_out_opc_reg(s
, OPC_WSBH
, args
[0], 0, args
[1]);
1517 case INDEX_op_bswap32_i32
:
1518 tcg_out_opc_reg(s
, OPC_WSBH
, args
[0], 0, args
[1]);
1519 tcg_out_opc_sa(s
, OPC_ROTR
, args
[0], args
[0], 16);
1522 case INDEX_op_ext8s_i32
:
1523 tcg_out_opc_reg(s
, OPC_SEB
, args
[0], 0, args
[1]);
1525 case INDEX_op_ext16s_i32
:
1526 tcg_out_opc_reg(s
, OPC_SEH
, args
[0], 0, args
[1]);
1529 case INDEX_op_deposit_i32
:
1530 tcg_out_opc_imm(s
, OPC_INS
, args
[0], args
[2],
1531 ((args
[3] + args
[4] - 1) << 11) | (args
[3] << 6));
1534 case INDEX_op_brcond_i32
:
1535 tcg_out_brcond(s
, args
[2], args
[0], args
[1], args
[3]);
1537 case INDEX_op_brcond2_i32
:
1538 tcg_out_brcond2(s
, args
[4], args
[0], args
[1], args
[2], args
[3], args
[5]);
1541 case INDEX_op_movcond_i32
:
1542 tcg_out_movcond(s
, args
[5], args
[0], args
[1], args
[2], args
[3]);
1545 case INDEX_op_setcond_i32
:
1546 tcg_out_setcond(s
, args
[3], args
[0], args
[1], args
[2]);
1548 case INDEX_op_setcond2_i32
:
1549 tcg_out_setcond2(s
, args
[5], args
[0], args
[1], args
[2], args
[3], args
[4]);
1552 case INDEX_op_qemu_ld8u
:
1553 tcg_out_qemu_ld(s
, args
, 0);
1555 case INDEX_op_qemu_ld8s
:
1556 tcg_out_qemu_ld(s
, args
, 0 | 4);
1558 case INDEX_op_qemu_ld16u
:
1559 tcg_out_qemu_ld(s
, args
, 1);
1561 case INDEX_op_qemu_ld16s
:
1562 tcg_out_qemu_ld(s
, args
, 1 | 4);
1564 case INDEX_op_qemu_ld32
:
1565 tcg_out_qemu_ld(s
, args
, 2);
1567 case INDEX_op_qemu_ld64
:
1568 tcg_out_qemu_ld(s
, args
, 3);
1570 case INDEX_op_qemu_st8
:
1571 tcg_out_qemu_st(s
, args
, 0);
1573 case INDEX_op_qemu_st16
:
1574 tcg_out_qemu_st(s
, args
, 1);
1576 case INDEX_op_qemu_st32
:
1577 tcg_out_qemu_st(s
, args
, 2);
1579 case INDEX_op_qemu_st64
:
1580 tcg_out_qemu_st(s
, args
, 3);
1588 static const TCGTargetOpDef mips_op_defs
[] = {
1589 { INDEX_op_exit_tb
, { } },
1590 { INDEX_op_goto_tb
, { } },
1591 { INDEX_op_call
, { "C" } },
1592 { INDEX_op_br
, { } },
1594 { INDEX_op_mov_i32
, { "r", "r" } },
1595 { INDEX_op_movi_i32
, { "r" } },
1596 { INDEX_op_ld8u_i32
, { "r", "r" } },
1597 { INDEX_op_ld8s_i32
, { "r", "r" } },
1598 { INDEX_op_ld16u_i32
, { "r", "r" } },
1599 { INDEX_op_ld16s_i32
, { "r", "r" } },
1600 { INDEX_op_ld_i32
, { "r", "r" } },
1601 { INDEX_op_st8_i32
, { "rZ", "r" } },
1602 { INDEX_op_st16_i32
, { "rZ", "r" } },
1603 { INDEX_op_st_i32
, { "rZ", "r" } },
1605 { INDEX_op_add_i32
, { "r", "rZ", "rJ" } },
1606 { INDEX_op_mul_i32
, { "r", "rZ", "rZ" } },
1607 { INDEX_op_muls2_i32
, { "r", "r", "rZ", "rZ" } },
1608 { INDEX_op_mulu2_i32
, { "r", "r", "rZ", "rZ" } },
1609 { INDEX_op_mulsh_i32
, { "r", "rZ", "rZ" } },
1610 { INDEX_op_muluh_i32
, { "r", "rZ", "rZ" } },
1611 { INDEX_op_div_i32
, { "r", "rZ", "rZ" } },
1612 { INDEX_op_divu_i32
, { "r", "rZ", "rZ" } },
1613 { INDEX_op_rem_i32
, { "r", "rZ", "rZ" } },
1614 { INDEX_op_remu_i32
, { "r", "rZ", "rZ" } },
1615 { INDEX_op_sub_i32
, { "r", "rZ", "rJ" } },
1617 { INDEX_op_and_i32
, { "r", "rZ", "rI" } },
1618 { INDEX_op_nor_i32
, { "r", "rZ", "rZ" } },
1619 { INDEX_op_not_i32
, { "r", "rZ" } },
1620 { INDEX_op_or_i32
, { "r", "rZ", "rIZ" } },
1621 { INDEX_op_xor_i32
, { "r", "rZ", "rIZ" } },
1623 { INDEX_op_shl_i32
, { "r", "rZ", "ri" } },
1624 { INDEX_op_shr_i32
, { "r", "rZ", "ri" } },
1625 { INDEX_op_sar_i32
, { "r", "rZ", "ri" } },
1626 { INDEX_op_rotr_i32
, { "r", "rZ", "ri" } },
1627 { INDEX_op_rotl_i32
, { "r", "rZ", "ri" } },
1629 { INDEX_op_bswap16_i32
, { "r", "r" } },
1630 { INDEX_op_bswap32_i32
, { "r", "r" } },
1632 { INDEX_op_ext8s_i32
, { "r", "rZ" } },
1633 { INDEX_op_ext16s_i32
, { "r", "rZ" } },
1635 { INDEX_op_deposit_i32
, { "r", "0", "rZ" } },
1637 { INDEX_op_brcond_i32
, { "rZ", "rZ" } },
1638 { INDEX_op_movcond_i32
, { "r", "rZ", "rZ", "rZ", "0" } },
1639 { INDEX_op_setcond_i32
, { "r", "rZ", "rZ" } },
1640 { INDEX_op_setcond2_i32
, { "r", "rZ", "rZ", "rZ", "rZ" } },
1642 { INDEX_op_add2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1643 { INDEX_op_sub2_i32
, { "r", "r", "rZ", "rZ", "rJ", "rJ" } },
1644 { INDEX_op_brcond2_i32
, { "rZ", "rZ", "rZ", "rZ" } },
1646 #if TARGET_LONG_BITS == 32
1647 { INDEX_op_qemu_ld8u
, { "L", "lZ" } },
1648 { INDEX_op_qemu_ld8s
, { "L", "lZ" } },
1649 { INDEX_op_qemu_ld16u
, { "L", "lZ" } },
1650 { INDEX_op_qemu_ld16s
, { "L", "lZ" } },
1651 { INDEX_op_qemu_ld32
, { "L", "lZ" } },
1652 { INDEX_op_qemu_ld64
, { "L", "L", "lZ" } },
1654 { INDEX_op_qemu_st8
, { "SZ", "SZ" } },
1655 { INDEX_op_qemu_st16
, { "SZ", "SZ" } },
1656 { INDEX_op_qemu_st32
, { "SZ", "SZ" } },
1657 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ" } },
1659 { INDEX_op_qemu_ld8u
, { "L", "lZ", "lZ" } },
1660 { INDEX_op_qemu_ld8s
, { "L", "lZ", "lZ" } },
1661 { INDEX_op_qemu_ld16u
, { "L", "lZ", "lZ" } },
1662 { INDEX_op_qemu_ld16s
, { "L", "lZ", "lZ" } },
1663 { INDEX_op_qemu_ld32
, { "L", "lZ", "lZ" } },
1664 { INDEX_op_qemu_ld64
, { "L", "L", "lZ", "lZ" } },
1666 { INDEX_op_qemu_st8
, { "SZ", "SZ", "SZ" } },
1667 { INDEX_op_qemu_st16
, { "SZ", "SZ", "SZ" } },
1668 { INDEX_op_qemu_st32
, { "SZ", "SZ", "SZ" } },
1669 { INDEX_op_qemu_st64
, { "SZ", "SZ", "SZ", "SZ" } },
1674 static int tcg_target_callee_save_regs
[] = {
1675 TCG_REG_S0
, /* used for the global env (TCG_AREG0) */
1684 TCG_REG_RA
, /* should be last for ABI compliance */
1687 /* The Linux kernel doesn't provide any information about the available
1688 instruction set. Probe it using a signal handler. */
1692 #ifndef use_movnz_instructions
1693 bool use_movnz_instructions
= false;
1696 #ifndef use_mips32_instructions
1697 bool use_mips32_instructions
= false;
1700 #ifndef use_mips32r2_instructions
1701 bool use_mips32r2_instructions
= false;
1704 static volatile sig_atomic_t got_sigill
;
1706 static void sigill_handler(int signo
, siginfo_t
*si
, void *data
)
1708 /* Skip the faulty instruction */
1709 ucontext_t
*uc
= (ucontext_t
*)data
;
1710 uc
->uc_mcontext
.pc
+= 4;
1715 static void tcg_target_detect_isa(void)
1717 struct sigaction sa_old
, sa_new
;
1719 memset(&sa_new
, 0, sizeof(sa_new
));
1720 sa_new
.sa_flags
= SA_SIGINFO
;
1721 sa_new
.sa_sigaction
= sigill_handler
;
1722 sigaction(SIGILL
, &sa_new
, &sa_old
);
1724 /* Probe for movn/movz, necessary to implement movcond. */
1725 #ifndef use_movnz_instructions
1727 asm volatile(".set push\n"
1729 "movn $zero, $zero, $zero\n"
1730 "movz $zero, $zero, $zero\n"
1733 use_movnz_instructions
= !got_sigill
;
1736 /* Probe for MIPS32 instructions. As no subsetting is allowed
1737 by the specification, it is only necessary to probe for one
1738 of the instructions. */
1739 #ifndef use_mips32_instructions
1741 asm volatile(".set push\n"
1743 "mul $zero, $zero\n"
1746 use_mips32_instructions
= !got_sigill
;
1749 /* Probe for MIPS32r2 instructions if MIPS32 instructions are
1750 available. As no subsetting is allowed by the specification,
1751 it is only necessary to probe for one of the instructions. */
1752 #ifndef use_mips32r2_instructions
1753 if (use_mips32_instructions
) {
1755 asm volatile(".set push\n"
1757 "seb $zero, $zero\n"
1760 use_mips32r2_instructions
= !got_sigill
;
1764 sigaction(SIGILL
, &sa_old
, NULL
);
1767 /* Generate global QEMU prologue and epilogue code */
1768 static void tcg_target_qemu_prologue(TCGContext
*s
)
1772 /* reserve some stack space, also for TCG temps. */
1773 frame_size
= ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1774 + TCG_STATIC_CALL_ARGS_SIZE
1775 + CPU_TEMP_BUF_NLONGS
* sizeof(long);
1776 frame_size
= (frame_size
+ TCG_TARGET_STACK_ALIGN
- 1) &
1777 ~(TCG_TARGET_STACK_ALIGN
- 1);
1778 tcg_set_frame(s
, TCG_REG_SP
, ARRAY_SIZE(tcg_target_callee_save_regs
) * 4
1779 + TCG_STATIC_CALL_ARGS_SIZE
,
1780 CPU_TEMP_BUF_NLONGS
* sizeof(long));
1783 tcg_out_addi(s
, TCG_REG_SP
, -frame_size
);
1784 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1785 tcg_out_st(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1786 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1789 /* Call generated code */
1790 tcg_out_opc_reg(s
, OPC_JR
, 0, tcg_target_call_iarg_regs
[1], 0);
1791 tcg_out_mov(s
, TCG_TYPE_PTR
, TCG_AREG0
, tcg_target_call_iarg_regs
[0]);
1792 tb_ret_addr
= s
->code_ptr
;
1795 for(i
= 0 ; i
< ARRAY_SIZE(tcg_target_callee_save_regs
) ; i
++) {
1796 tcg_out_ld(s
, TCG_TYPE_I32
, tcg_target_callee_save_regs
[i
],
1797 TCG_REG_SP
, TCG_STATIC_CALL_ARGS_SIZE
+ i
* 4);
1800 tcg_out_opc_reg(s
, OPC_JR
, 0, TCG_REG_RA
, 0);
1801 tcg_out_addi(s
, TCG_REG_SP
, frame_size
);
1804 static void tcg_target_init(TCGContext
*s
)
1806 tcg_target_detect_isa();
1807 tcg_regset_set(tcg_target_available_regs
[TCG_TYPE_I32
], 0xffffffff);
1808 tcg_regset_set(tcg_target_call_clobber_regs
,
1825 tcg_regset_clear(s
->reserved_regs
);
1826 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_ZERO
); /* zero register */
1827 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K0
); /* kernel use only */
1828 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_K1
); /* kernel use only */
1829 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_AT
); /* internal use */
1830 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_T0
); /* internal use */
1831 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RA
); /* return address */
1832 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_SP
); /* stack pointer */
1833 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_GP
); /* global pointer */
1835 tcg_add_target_add_op_defs(mips_op_defs
);