2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
60 static const int tcg_target_reg_alloc_order
[] = {
78 static const int tcg_target_call_iarg_regs
[4] = {
85 static const int tcg_target_call_oarg_regs
[2] = {
90 static void patch_reloc(uint8_t *code_ptr
, int type
,
91 tcg_target_long value
, tcg_target_long addend
)
94 case R_PARISC_PCREL17F
:
95 hppa_patch17f((uint32_t *)code_ptr
, value
, addend
);
102 /* maximum number of register used for input function arguments */
103 static inline int tcg_target_get_call_iarg_regs_count(int flags
)
108 /* parse target specific constraints */
109 int target_parse_constraint(TCGArgConstraint
*ct
, const char **pct_str
)
116 ct
->ct
|= TCG_CT_REG
;
117 tcg_regset_set32(ct
->u
.regs
, 0, 0xffffffff);
119 case 'L': /* qemu_ld/st constraint */
120 ct
->ct
|= TCG_CT_REG
;
121 tcg_regset_set32(ct
->u
.regs
, 0, 0xffffffff);
122 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_R26
);
123 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_R25
);
124 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_R24
);
125 tcg_regset_reset_reg(ct
->u
.regs
, TCG_REG_R23
);
135 /* test if a constant matches the constraint */
136 static inline int tcg_target_const_match(tcg_target_long val
,
137 const TCGArgConstraint
*arg_ct
)
148 #define INSN_OP(x) ((x) << 26)
149 #define INSN_EXT3BR(x) ((x) << 13)
150 #define INSN_EXT3SH(x) ((x) << 10)
151 #define INSN_EXT4(x) ((x) << 6)
152 #define INSN_EXT5(x) (x)
153 #define INSN_EXT6(x) ((x) << 6)
154 #define INSN_EXT7(x) ((x) << 6)
155 #define INSN_EXT8A(x) ((x) << 6)
156 #define INSN_EXT8B(x) ((x) << 5)
157 #define INSN_T(x) (x)
158 #define INSN_R1(x) ((x) << 16)
159 #define INSN_R2(x) ((x) << 21)
160 #define INSN_DEP_LEN(x) (32 - (x))
161 #define INSN_SHDEP_CP(x) ((31 - (x)) << 5)
162 #define INSN_SHDEP_P(x) ((x) << 5)
163 #define INSN_COND(x) ((x) << 13)
176 #define ARITH_ADD (INSN_OP(0x02) | INSN_EXT6(0x28))
177 #define ARITH_AND (INSN_OP(0x02) | INSN_EXT6(0x08))
178 #define ARITH_OR (INSN_OP(0x02) | INSN_EXT6(0x09))
179 #define ARITH_XOR (INSN_OP(0x02) | INSN_EXT6(0x0a))
180 #define ARITH_SUB (INSN_OP(0x02) | INSN_EXT6(0x10))
182 #define SHD (INSN_OP(0x34) | INSN_EXT3SH(2))
183 #define VSHD (INSN_OP(0x34) | INSN_EXT3SH(0))
184 #define DEP (INSN_OP(0x35) | INSN_EXT3SH(3))
185 #define ZDEP (INSN_OP(0x35) | INSN_EXT3SH(2))
186 #define ZVDEP (INSN_OP(0x35) | INSN_EXT3SH(0))
187 #define EXTRU (INSN_OP(0x34) | INSN_EXT3SH(6))
188 #define EXTRS (INSN_OP(0x34) | INSN_EXT3SH(7))
189 #define VEXTRS (INSN_OP(0x34) | INSN_EXT3SH(5))
191 #define SUBI (INSN_OP(0x25))
192 #define MTCTL (INSN_OP(0x00) | INSN_EXT8B(0xc2))
194 #define BL (INSN_OP(0x3a) | INSN_EXT3BR(0))
195 #define BLE_SR4 (INSN_OP(0x39) | (1 << 13))
196 #define BV (INSN_OP(0x3a) | INSN_EXT3BR(6))
197 #define BV_N (INSN_OP(0x3a) | INSN_EXT3BR(6) | 2)
198 #define LDIL (INSN_OP(0x08))
199 #define LDO (INSN_OP(0x0d))
201 #define LDB (INSN_OP(0x10))
202 #define LDH (INSN_OP(0x11))
203 #define LDW (INSN_OP(0x12))
204 #define LDWM (INSN_OP(0x13))
206 #define STB (INSN_OP(0x18))
207 #define STH (INSN_OP(0x19))
208 #define STW (INSN_OP(0x1a))
209 #define STWM (INSN_OP(0x1b))
211 #define COMBT (INSN_OP(0x20))
212 #define COMBF (INSN_OP(0x22))
214 static int lowsignext(uint32_t val
, int start
, int length
)
216 return (((val
<< 1) & ~(~0 << length
)) |
217 ((val
>> (length
- 1)) & 1)) << start
;
220 static inline void tcg_out_mov(TCGContext
*s
, int ret
, int arg
)
222 /* PA1.1 defines COPY as OR r,0,t */
223 tcg_out32(s
, ARITH_OR
| INSN_T(ret
) | INSN_R1(arg
) | INSN_R2(TCG_REG_R0
));
225 /* PA2.0 defines COPY as LDO 0(r),t
226 * but hppa-dis.c is unaware of this definition */
227 /* tcg_out32(s, LDO | INSN_R1(ret) | INSN_R2(arg) | reassemble_14(0)); */
230 static inline void tcg_out_movi(TCGContext
*s
, TCGType type
,
231 int ret
, tcg_target_long arg
)
233 if (arg
== (arg
& 0x1fff)) {
234 tcg_out32(s
, LDO
| INSN_R1(ret
) | INSN_R2(TCG_REG_R0
) |
237 tcg_out32(s
, LDIL
| INSN_R2(ret
) |
238 reassemble_21(lrsel((uint32_t)arg
, 0)));
240 tcg_out32(s
, LDO
| INSN_R1(ret
) | INSN_R2(ret
) |
241 reassemble_14(rrsel((uint32_t)arg
, 0)));
245 static inline void tcg_out_ld_raw(TCGContext
*s
, int ret
,
248 tcg_out32(s
, LDIL
| INSN_R2(ret
) |
249 reassemble_21(lrsel((uint32_t)arg
, 0)));
250 tcg_out32(s
, LDW
| INSN_R1(ret
) | INSN_R2(ret
) |
251 reassemble_14(rrsel((uint32_t)arg
, 0)));
254 static inline void tcg_out_ld_ptr(TCGContext
*s
, int ret
,
257 tcg_out_ld_raw(s
, ret
, arg
);
260 static inline void tcg_out_ldst(TCGContext
*s
, int ret
, int addr
, int offset
,
263 if (offset
== (offset
& 0xfff))
264 tcg_out32(s
, op
| INSN_R1(ret
) | INSN_R2(addr
) |
265 reassemble_14(offset
));
267 fprintf(stderr
, "unimplemented %s with offset %d\n", __func__
, offset
);
272 static inline void tcg_out_ld(TCGContext
*s
, TCGType type
, int ret
,
273 int arg1
, tcg_target_long arg2
)
275 fprintf(stderr
, "unimplemented %s\n", __func__
);
279 static inline void tcg_out_st(TCGContext
*s
, TCGType type
, int ret
,
280 int arg1
, tcg_target_long arg2
)
282 fprintf(stderr
, "unimplemented %s\n", __func__
);
286 static inline void tcg_out_arith(TCGContext
*s
, int t
, int r1
, int r2
, int op
)
288 tcg_out32(s
, op
| INSN_T(t
) | INSN_R1(r1
) | INSN_R2(r2
));
291 static inline void tcg_out_arithi(TCGContext
*s
, int t
, int r1
,
292 tcg_target_long val
, int op
)
294 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_R20
, val
);
295 tcg_out_arith(s
, t
, r1
, TCG_REG_R20
, op
);
298 static inline void tcg_out_addi(TCGContext
*s
, int reg
, tcg_target_long val
)
300 tcg_out_arithi(s
, reg
, reg
, val
, ARITH_ADD
);
303 static inline void tcg_out_nop(TCGContext
*s
)
305 tcg_out32(s
, ARITH_OR
| INSN_T(TCG_REG_R0
) | INSN_R1(TCG_REG_R0
) |
306 INSN_R2(TCG_REG_R0
));
309 static inline void tcg_out_ext8s(TCGContext
*s
, int ret
, int arg
) {
310 tcg_out32(s
, EXTRS
| INSN_R1(ret
) | INSN_R2(arg
) |
311 INSN_SHDEP_P(31) | INSN_DEP_LEN(8));
314 static inline void tcg_out_ext16s(TCGContext
*s
, int ret
, int arg
) {
315 tcg_out32(s
, EXTRS
| INSN_R1(ret
) | INSN_R2(arg
) |
316 INSN_SHDEP_P(31) | INSN_DEP_LEN(16));
319 static inline void tcg_out_bswap16(TCGContext
*s
, int ret
, int arg
) {
321 tcg_out_mov(s
, ret
, arg
);
322 tcg_out32(s
, DEP
| INSN_R2(ret
) | INSN_R1(ret
) |
323 INSN_SHDEP_CP(15) | INSN_DEP_LEN(8));
324 tcg_out32(s
, SHD
| INSN_T(ret
) | INSN_R1(TCG_REG_R0
) |
325 INSN_R2(ret
) | INSN_SHDEP_CP(8));
328 static inline void tcg_out_bswap32(TCGContext
*s
, int ret
, int arg
, int temp
) {
329 tcg_out32(s
, SHD
| INSN_T(temp
) | INSN_R1(arg
) |
330 INSN_R2(arg
) | INSN_SHDEP_CP(16));
331 tcg_out32(s
, DEP
| INSN_R2(temp
) | INSN_R1(temp
) |
332 INSN_SHDEP_CP(15) | INSN_DEP_LEN(8));
333 tcg_out32(s
, SHD
| INSN_T(ret
) | INSN_R1(arg
) |
334 INSN_R2(temp
) | INSN_SHDEP_CP(8));
337 static inline void tcg_out_call(TCGContext
*s
, void *func
)
339 uint32_t val
= (uint32_t)__canonicalize_funcptr_for_compare(func
);
340 tcg_out32(s
, LDIL
| INSN_R2(TCG_REG_R20
) |
341 reassemble_21(lrsel(val
, 0)));
342 tcg_out32(s
, BLE_SR4
| INSN_R2(TCG_REG_R20
) |
343 reassemble_17(rrsel(val
, 0) >> 2));
344 tcg_out_mov(s
, TCG_REG_RP
, TCG_REG_R31
);
347 #if defined(CONFIG_SOFTMMU)
349 #include "../../softmmu_defs.h"
351 static void *qemu_ld_helpers
[4] = {
358 static void *qemu_st_helpers
[4] = {
366 static void tcg_out_qemu_ld(TCGContext
*s
, const TCGArg
*args
, int opc
)
368 int addr_reg
, data_reg
, data_reg2
, r0
, r1
, mem_index
, s_bits
, bswap
;
369 #if defined(CONFIG_SOFTMMU)
370 uint32_t *label1_ptr
, *label2_ptr
;
372 #if TARGET_LONG_BITS == 64
373 #if defined(CONFIG_SOFTMMU)
374 uint32_t *label3_ptr
;
383 data_reg2
= 0; /* surpress warning */
385 #if TARGET_LONG_BITS == 64
394 #if defined(CONFIG_SOFTMMU)
395 tcg_out_mov(s
, r1
, addr_reg
);
397 tcg_out_mov(s
, r0
, addr_reg
);
399 tcg_out32(s
, SHD
| INSN_T(r1
) | INSN_R1(TCG_REG_R0
) | INSN_R2(r1
) |
400 INSN_SHDEP_CP(TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
));
402 tcg_out_arithi(s
, r0
, r0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1),
405 tcg_out_arithi(s
, r1
, r1
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
,
408 tcg_out_arith(s
, r1
, r1
, TCG_AREG0
, ARITH_ADD
);
409 tcg_out_arithi(s
, r1
, r1
,
410 offsetof(CPUState
, tlb_table
[mem_index
][0].addr_read
),
413 tcg_out_ldst(s
, TCG_REG_R20
, r1
, 0, LDW
);
415 #if TARGET_LONG_BITS == 32
416 /* if equal, jump to label1 */
417 label1_ptr
= (uint32_t *)s
->code_ptr
;
418 tcg_out32(s
, COMBT
| INSN_R1(TCG_REG_R20
) | INSN_R2(r0
) |
419 INSN_COND(COND_EQUAL
));
420 tcg_out_mov(s
, r0
, addr_reg
); /* delay slot */
422 /* if not equal, jump to label3 */
423 label3_ptr
= (uint32_t *)s
->code_ptr
;
424 tcg_out32(s
, COMBF
| INSN_R1(TCG_REG_R20
) | INSN_R2(r0
) |
425 INSN_COND(COND_EQUAL
));
426 tcg_out_mov(s
, r0
, addr_reg
); /* delay slot */
428 tcg_out_ldst(s
, TCG_REG_R20
, r1
, 4, LDW
);
430 /* if equal, jump to label1 */
431 label1_ptr
= (uint32_t *)s
->code_ptr
;
432 tcg_out32(s
, COMBT
| INSN_R1(TCG_REG_R20
) | INSN_R2(addr_reg2
) |
433 INSN_COND(COND_EQUAL
));
434 tcg_out_nop(s
); /* delay slot */
437 *label3_ptr
|= reassemble_12((uint32_t *)s
->code_ptr
- label3_ptr
- 2);
440 #if TARGET_LONG_BITS == 32
441 tcg_out_mov(s
, TCG_REG_R26
, addr_reg
);
442 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_R25
, mem_index
);
444 tcg_out_mov(s
, TCG_REG_R26
, addr_reg
);
445 tcg_out_mov(s
, TCG_REG_R25
, addr_reg2
);
446 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_R24
, mem_index
);
449 tcg_out_call(s
, qemu_ld_helpers
[s_bits
]);
453 tcg_out_ext8s(s
, data_reg
, TCG_REG_RET0
);
456 tcg_out_ext16s(s
, data_reg
, TCG_REG_RET0
);
462 tcg_out_mov(s
, data_reg
, TCG_REG_RET0
);
466 tcg_out_mov(s
, data_reg
, TCG_REG_RET0
);
467 tcg_out_mov(s
, data_reg2
, TCG_REG_RET1
);
472 label2_ptr
= (uint32_t *)s
->code_ptr
;
473 tcg_out32(s
, BL
| INSN_R2(TCG_REG_R0
) | 2);
476 *label1_ptr
|= reassemble_12((uint32_t *)s
->code_ptr
- label1_ptr
- 2);
478 tcg_out_arithi(s
, TCG_REG_R20
, r1
,
479 offsetof(CPUTLBEntry
, addend
) - offsetof(CPUTLBEntry
, addr_read
),
481 tcg_out_ldst(s
, TCG_REG_R20
, TCG_REG_R20
, 0, LDW
);
482 tcg_out_arith(s
, r0
, r0
, TCG_REG_R20
, ARITH_ADD
);
487 #ifdef TARGET_WORDS_BIGENDIAN
494 tcg_out_ldst(s
, data_reg
, r0
, 0, LDB
);
497 tcg_out_ldst(s
, data_reg
, r0
, 0, LDB
);
498 tcg_out_ext8s(s
, data_reg
, data_reg
);
501 tcg_out_ldst(s
, data_reg
, r0
, 0, LDH
);
503 tcg_out_bswap16(s
, data_reg
, data_reg
);
506 tcg_out_ldst(s
, data_reg
, r0
, 0, LDH
);
508 tcg_out_bswap16(s
, data_reg
, data_reg
);
509 tcg_out_ext16s(s
, data_reg
, data_reg
);
512 tcg_out_ldst(s
, data_reg
, r0
, 0, LDW
);
514 tcg_out_bswap32(s
, data_reg
, data_reg
, TCG_REG_R20
);
519 tcg_out_ldst(s
, data_reg
, r0
, 0, LDW
);
520 tcg_out_ldst(s
, data_reg2
, r0
, 4, LDW
);
522 tcg_out_ldst(s
, data_reg
, r0
, 4, LDW
);
523 tcg_out_bswap32(s
, data_reg
, data_reg
, TCG_REG_R20
);
524 tcg_out_ldst(s
, data_reg2
, r0
, 0, LDW
);
525 tcg_out_bswap32(s
, data_reg2
, data_reg2
, TCG_REG_R20
);
532 #if defined(CONFIG_SOFTMMU)
534 *label2_ptr
|= reassemble_17((uint32_t *)s
->code_ptr
- label2_ptr
- 2);
538 static void tcg_out_qemu_st(TCGContext
*s
, const TCGArg
*args
, int opc
)
540 int addr_reg
, data_reg
, data_reg2
, r0
, r1
, mem_index
, s_bits
, bswap
;
541 #if defined(CONFIG_SOFTMMU)
542 uint32_t *label1_ptr
, *label2_ptr
;
544 #if TARGET_LONG_BITS == 64
545 #if defined(CONFIG_SOFTMMU)
546 uint32_t *label3_ptr
;
555 data_reg2
= 0; /* surpress warning */
557 #if TARGET_LONG_BITS == 64
567 #if defined(CONFIG_SOFTMMU)
568 tcg_out_mov(s
, r1
, addr_reg
);
570 tcg_out_mov(s
, r0
, addr_reg
);
572 tcg_out32(s
, SHD
| INSN_T(r1
) | INSN_R1(TCG_REG_R0
) | INSN_R2(r1
) |
573 INSN_SHDEP_CP(TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
));
575 tcg_out_arithi(s
, r0
, r0
, TARGET_PAGE_MASK
| ((1 << s_bits
) - 1),
578 tcg_out_arithi(s
, r1
, r1
, (CPU_TLB_SIZE
- 1) << CPU_TLB_ENTRY_BITS
,
581 tcg_out_arith(s
, r1
, r1
, TCG_AREG0
, ARITH_ADD
);
582 tcg_out_arithi(s
, r1
, r1
,
583 offsetof(CPUState
, tlb_table
[mem_index
][0].addr_write
),
586 tcg_out_ldst(s
, TCG_REG_R20
, r1
, 0, LDW
);
588 #if TARGET_LONG_BITS == 32
589 /* if equal, jump to label1 */
590 label1_ptr
= (uint32_t *)s
->code_ptr
;
591 tcg_out32(s
, COMBT
| INSN_R1(TCG_REG_R20
) | INSN_R2(r0
) |
592 INSN_COND(COND_EQUAL
));
593 tcg_out_mov(s
, r0
, addr_reg
); /* delay slot */
595 /* if not equal, jump to label3 */
596 label3_ptr
= (uint32_t *)s
->code_ptr
;
597 tcg_out32(s
, COMBF
| INSN_R1(TCG_REG_R20
) | INSN_R2(r0
) |
598 INSN_COND(COND_EQUAL
));
599 tcg_out_mov(s
, r0
, addr_reg
); /* delay slot */
601 tcg_out_ldst(s
, TCG_REG_R20
, r1
, 4, LDW
);
603 /* if equal, jump to label1 */
604 label1_ptr
= (uint32_t *)s
->code_ptr
;
605 tcg_out32(s
, COMBT
| INSN_R1(TCG_REG_R20
) | INSN_R2(addr_reg2
) |
606 INSN_COND(COND_EQUAL
));
607 tcg_out_nop(s
); /* delay slot */
610 *label3_ptr
|= reassemble_12((uint32_t *)s
->code_ptr
- label3_ptr
- 2);
613 tcg_out_mov(s
, TCG_REG_R26
, addr_reg
);
614 #if TARGET_LONG_BITS == 64
615 tcg_out_mov(s
, TCG_REG_R25
, addr_reg2
);
618 tcg_out_mov(s
, TCG_REG_R24
, data_reg
);
619 tcg_out_mov(s
, TCG_REG_R23
, data_reg2
);
620 /* TODO: push mem_index */
625 tcg_out32(s
, EXTRU
| INSN_R1(TCG_REG_R24
) | INSN_R2(data_reg
) |
626 INSN_SHDEP_P(31) | INSN_DEP_LEN(8));
629 tcg_out32(s
, EXTRU
| INSN_R1(TCG_REG_R24
) | INSN_R2(data_reg
) |
630 INSN_SHDEP_P(31) | INSN_DEP_LEN(16));
633 tcg_out_mov(s
, TCG_REG_R24
, data_reg
);
636 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_R23
, mem_index
);
641 tcg_out_mov(s
, TCG_REG_R25
, data_reg
);
642 tcg_out_mov(s
, TCG_REG_R24
, data_reg2
);
643 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_R23
, mem_index
);
647 tcg_out32(s
, EXTRU
| INSN_R1(TCG_REG_R25
) | INSN_R2(data_reg
) |
648 INSN_SHDEP_P(31) | INSN_DEP_LEN(8));
651 tcg_out32(s
, EXTRU
| INSN_R1(TCG_REG_R25
) | INSN_R2(data_reg
) |
652 INSN_SHDEP_P(31) | INSN_DEP_LEN(16));
655 tcg_out_mov(s
, TCG_REG_R25
, data_reg
);
658 tcg_out_movi(s
, TCG_TYPE_I32
, TCG_REG_R24
, mem_index
);
661 tcg_out_call(s
, qemu_st_helpers
[s_bits
]);
664 label2_ptr
= (uint32_t *)s
->code_ptr
;
665 tcg_out32(s
, BL
| INSN_R2(TCG_REG_R0
) | 2);
668 *label1_ptr
|= reassemble_12((uint32_t *)s
->code_ptr
- label1_ptr
- 2);
670 tcg_out_arithi(s
, TCG_REG_R20
, r1
,
671 offsetof(CPUTLBEntry
, addend
) - offsetof(CPUTLBEntry
, addr_write
),
673 tcg_out_ldst(s
, TCG_REG_R20
, TCG_REG_R20
, 0, LDW
);
674 tcg_out_arith(s
, r0
, r0
, TCG_REG_R20
, ARITH_ADD
);
679 #ifdef TARGET_WORDS_BIGENDIAN
686 tcg_out_ldst(s
, data_reg
, r0
, 0, STB
);
690 tcg_out_bswap16(s
, TCG_REG_R20
, data_reg
);
691 data_reg
= TCG_REG_R20
;
693 tcg_out_ldst(s
, data_reg
, r0
, 0, STH
);
697 tcg_out_bswap32(s
, TCG_REG_R20
, data_reg
, TCG_REG_R20
);
698 data_reg
= TCG_REG_R20
;
700 tcg_out_ldst(s
, data_reg
, r0
, 0, STW
);
705 tcg_out_ldst(s
, data_reg
, r0
, 0, STW
);
706 tcg_out_ldst(s
, data_reg2
, r0
, 4, STW
);
708 tcg_out_bswap32(s
, TCG_REG_R20
, data_reg
, TCG_REG_R20
);
709 tcg_out_ldst(s
, TCG_REG_R20
, r0
, 4, STW
);
710 tcg_out_bswap32(s
, TCG_REG_R20
, data_reg2
, TCG_REG_R20
);
711 tcg_out_ldst(s
, TCG_REG_R20
, r0
, 0, STW
);
718 #if defined(CONFIG_SOFTMMU)
720 *label2_ptr
|= reassemble_17((uint32_t *)s
->code_ptr
- label2_ptr
- 2);
724 static inline void tcg_out_op(TCGContext
*s
, int opc
, const TCGArg
*args
,
725 const int *const_args
)
730 case INDEX_op_exit_tb
:
731 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_RET0
, args
[0]);
732 tcg_out32(s
, BV_N
| INSN_R2(TCG_REG_R18
));
734 case INDEX_op_goto_tb
:
735 if (s
->tb_jmp_offset
) {
736 /* direct jump method */
737 fprintf(stderr
, "goto_tb direct\n");
739 tcg_out_movi(s
, TCG_TYPE_PTR
, TCG_REG_R20
, args
[0]);
740 tcg_out32(s
, BV_N
| INSN_R2(TCG_REG_R20
));
741 s
->tb_jmp_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
743 /* indirect jump method */
744 tcg_out_ld_ptr(s
, TCG_REG_R20
,
745 (tcg_target_long
)(s
->tb_next
+ args
[0]));
746 tcg_out32(s
, BV_N
| INSN_R2(TCG_REG_R20
));
748 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
751 tcg_out32(s
, BLE_SR4
| INSN_R2(args
[0]));
752 tcg_out_mov(s
, TCG_REG_RP
, TCG_REG_R31
);
755 fprintf(stderr
, "unimplemented jmp\n");
759 fprintf(stderr
, "unimplemented br\n");
762 case INDEX_op_movi_i32
:
763 tcg_out_movi(s
, TCG_TYPE_I32
, args
[0], (uint32_t)args
[1]);
766 case INDEX_op_ld8u_i32
:
767 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDB
);
769 case INDEX_op_ld8s_i32
:
770 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDB
);
771 tcg_out_ext8s(s
, args
[0], args
[0]);
773 case INDEX_op_ld16u_i32
:
774 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDH
);
776 case INDEX_op_ld16s_i32
:
777 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDH
);
778 tcg_out_ext16s(s
, args
[0], args
[0]);
780 case INDEX_op_ld_i32
:
781 tcg_out_ldst(s
, args
[0], args
[1], args
[2], LDW
);
784 case INDEX_op_st8_i32
:
785 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STB
);
787 case INDEX_op_st16_i32
:
788 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STH
);
790 case INDEX_op_st_i32
:
791 tcg_out_ldst(s
, args
[0], args
[1], args
[2], STW
);
794 case INDEX_op_sub_i32
:
797 case INDEX_op_and_i32
:
800 case INDEX_op_or_i32
:
803 case INDEX_op_xor_i32
:
806 case INDEX_op_add_i32
:
810 case INDEX_op_shl_i32
:
811 tcg_out32(s
, SUBI
| INSN_R1(TCG_REG_R20
) | INSN_R2(args
[2]) |
812 lowsignext(0x1f, 0, 11));
813 tcg_out32(s
, MTCTL
| INSN_R2(11) | INSN_R1(TCG_REG_R20
));
814 tcg_out32(s
, ZVDEP
| INSN_R2(args
[0]) | INSN_R1(args
[1]) |
817 case INDEX_op_shr_i32
:
818 tcg_out32(s
, MTCTL
| INSN_R2(11) | INSN_R1(args
[2]));
819 tcg_out32(s
, VSHD
| INSN_T(args
[0]) | INSN_R1(TCG_REG_R0
) |
822 case INDEX_op_sar_i32
:
823 tcg_out32(s
, SUBI
| INSN_R1(TCG_REG_R20
) | INSN_R2(args
[2]) |
824 lowsignext(0x1f, 0, 11));
825 tcg_out32(s
, MTCTL
| INSN_R2(11) | INSN_R1(TCG_REG_R20
));
826 tcg_out32(s
, VEXTRS
| INSN_R1(args
[0]) | INSN_R2(args
[1]) |
830 case INDEX_op_mul_i32
:
831 fprintf(stderr
, "unimplemented mul\n");
834 case INDEX_op_mulu2_i32
:
835 fprintf(stderr
, "unimplemented mulu2\n");
838 case INDEX_op_div2_i32
:
839 fprintf(stderr
, "unimplemented div2\n");
842 case INDEX_op_divu2_i32
:
843 fprintf(stderr
, "unimplemented divu2\n");
847 case INDEX_op_brcond_i32
:
848 fprintf(stderr
, "unimplemented brcond\n");
852 case INDEX_op_qemu_ld8u
:
853 tcg_out_qemu_ld(s
, args
, 0);
855 case INDEX_op_qemu_ld8s
:
856 tcg_out_qemu_ld(s
, args
, 0 | 4);
858 case INDEX_op_qemu_ld16u
:
859 tcg_out_qemu_ld(s
, args
, 1);
861 case INDEX_op_qemu_ld16s
:
862 tcg_out_qemu_ld(s
, args
, 1 | 4);
864 case INDEX_op_qemu_ld32u
:
865 tcg_out_qemu_ld(s
, args
, 2);
868 case INDEX_op_qemu_st8
:
869 tcg_out_qemu_st(s
, args
, 0);
871 case INDEX_op_qemu_st16
:
872 tcg_out_qemu_st(s
, args
, 1);
874 case INDEX_op_qemu_st32
:
875 tcg_out_qemu_st(s
, args
, 2);
879 fprintf(stderr
, "unknown opcode 0x%x\n", opc
);
885 tcg_out_arith(s
, args
[0], args
[1], args
[2], c
);
888 static const TCGTargetOpDef hppa_op_defs
[] = {
889 { INDEX_op_exit_tb
, { } },
890 { INDEX_op_goto_tb
, { } },
892 { INDEX_op_call
, { "r" } },
893 { INDEX_op_jmp
, { "r" } },
894 { INDEX_op_br
, { } },
896 { INDEX_op_mov_i32
, { "r", "r" } },
897 { INDEX_op_movi_i32
, { "r" } },
898 { INDEX_op_ld8u_i32
, { "r", "r" } },
899 { INDEX_op_ld8s_i32
, { "r", "r" } },
900 { INDEX_op_ld16u_i32
, { "r", "r" } },
901 { INDEX_op_ld16s_i32
, { "r", "r" } },
902 { INDEX_op_ld_i32
, { "r", "r" } },
903 { INDEX_op_st8_i32
, { "r", "r" } },
904 { INDEX_op_st16_i32
, { "r", "r" } },
905 { INDEX_op_st_i32
, { "r", "r" } },
907 { INDEX_op_add_i32
, { "r", "r", "r" } },
908 { INDEX_op_sub_i32
, { "r", "r", "r" } },
909 { INDEX_op_and_i32
, { "r", "r", "r" } },
910 { INDEX_op_or_i32
, { "r", "r", "r" } },
911 { INDEX_op_xor_i32
, { "r", "r", "r" } },
913 { INDEX_op_shl_i32
, { "r", "r", "r" } },
914 { INDEX_op_shr_i32
, { "r", "r", "r" } },
915 { INDEX_op_sar_i32
, { "r", "r", "r" } },
917 { INDEX_op_brcond_i32
, { "r", "r" } },
919 #if TARGET_LONG_BITS == 32
920 { INDEX_op_qemu_ld8u
, { "r", "L" } },
921 { INDEX_op_qemu_ld8s
, { "r", "L" } },
922 { INDEX_op_qemu_ld16u
, { "r", "L" } },
923 { INDEX_op_qemu_ld16s
, { "r", "L" } },
924 { INDEX_op_qemu_ld32u
, { "r", "L" } },
925 { INDEX_op_qemu_ld64
, { "r", "r", "L" } },
927 { INDEX_op_qemu_st8
, { "L", "L" } },
928 { INDEX_op_qemu_st16
, { "L", "L" } },
929 { INDEX_op_qemu_st32
, { "L", "L" } },
930 { INDEX_op_qemu_st64
, { "L", "L", "L" } },
932 { INDEX_op_qemu_ld8u
, { "r", "L", "L" } },
933 { INDEX_op_qemu_ld8s
, { "r", "L", "L" } },
934 { INDEX_op_qemu_ld16u
, { "r", "L", "L" } },
935 { INDEX_op_qemu_ld16s
, { "r", "L", "L" } },
936 { INDEX_op_qemu_ld32u
, { "r", "L", "L" } },
937 { INDEX_op_qemu_ld32s
, { "r", "L", "L" } },
938 { INDEX_op_qemu_ld64
, { "r", "r", "L", "L" } },
940 { INDEX_op_qemu_st8
, { "L", "L", "L" } },
941 { INDEX_op_qemu_st16
, { "L", "L", "L" } },
942 { INDEX_op_qemu_st32
, { "L", "L", "L" } },
943 { INDEX_op_qemu_st64
, { "L", "L", "L", "L" } },
948 void tcg_target_init(TCGContext
*s
)
950 tcg_regset_set32(tcg_target_available_regs
[TCG_TYPE_I32
], 0, 0xffffffff);
951 tcg_regset_set32(tcg_target_call_clobber_regs
, 0,
960 tcg_regset_clear(s
->reserved_regs
);
961 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R0
); /* hardwired to zero */
962 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R1
); /* addil target */
963 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_RP
); /* link register */
964 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R3
); /* frame pointer */
965 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R18
); /* return pointer */
966 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R19
); /* clobbered w/o pic */
967 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R20
); /* reserved */
968 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_DP
); /* data pointer */
969 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_SP
); /* stack pointer */
970 tcg_regset_set_reg(s
->reserved_regs
, TCG_REG_R31
); /* ble link reg */
972 tcg_add_target_add_op_defs(hppa_op_defs
);