2 * Tiny Code Generator for QEMU
4 * Copyright (c) 2008 Fabrice Bellard
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to deal
8 * in the Software without restriction, including without limitation the rights
9 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 * copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 #define TCG_CT_CONST_S16 0x100
26 #define TCG_CT_CONST_U16 0x200
27 #define TCG_CT_CONST_S32 0x400
28 #define TCG_CT_CONST_U32 0x800
29 #define TCG_CT_CONST_ZERO 0x1000
31 static uint8_t *tb_ret_addr
;
35 #if TARGET_LONG_BITS == 32
47 #define HAVE_ISA_2_06 0
49 #ifdef CONFIG_USE_GUEST_BASE
50 #define TCG_GUEST_BASE_REG 30
52 #define TCG_GUEST_BASE_REG 0
56 static const char * const tcg_target_reg_names
[TCG_TARGET_NB_REGS
] = {
92 static const int tcg_target_reg_alloc_order
[] = {
128 static const int tcg_target_call_iarg_regs
[] = {
139 static const int tcg_target_call_oarg_regs
[] = {
143 static const int tcg_target_callee_save_regs
[] = {
160 TCG_REG_R27
, /* currently used for the global env */
167 static uint32_t reloc_pc24_val (void *pc
, tcg_target_long target
)
169 tcg_target_long disp
;
171 disp
= target
- (tcg_target_long
) pc
;
172 if ((disp
<< 38) >> 38 != disp
)
175 return disp
& 0x3fffffc;
178 static void reloc_pc24 (void *pc
, tcg_target_long target
)
180 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0x3fffffc)
181 | reloc_pc24_val (pc
, target
);
184 static uint16_t reloc_pc14_val (void *pc
, tcg_target_long target
)
186 tcg_target_long disp
;
188 disp
= target
- (tcg_target_long
) pc
;
189 if (disp
!= (int16_t) disp
)
192 return disp
& 0xfffc;
195 static void reloc_pc14 (void *pc
, tcg_target_long target
)
197 *(uint32_t *) pc
= (*(uint32_t *) pc
& ~0xfffc)
198 | reloc_pc14_val (pc
, target
);
201 static void patch_reloc (uint8_t *code_ptr
, int type
,
202 tcg_target_long value
, tcg_target_long addend
)
207 reloc_pc14 (code_ptr
, value
);
210 reloc_pc24 (code_ptr
, value
);
217 /* parse target specific constraints */
218 static int target_parse_constraint (TCGArgConstraint
*ct
, const char **pct_str
)
224 case 'A': case 'B': case 'C': case 'D':
225 ct
->ct
|= TCG_CT_REG
;
226 tcg_regset_set_reg (ct
->u
.regs
, 3 + ct_str
[0] - 'A');
229 ct
->ct
|= TCG_CT_REG
;
230 tcg_regset_set32 (ct
->u
.regs
, 0, 0xffffffff);
232 case 'L': /* qemu_ld constraint */
233 ct
->ct
|= TCG_CT_REG
;
234 tcg_regset_set32 (ct
->u
.regs
, 0, 0xffffffff);
235 tcg_regset_reset_reg (ct
->u
.regs
, TCG_REG_R3
);
236 #ifdef CONFIG_SOFTMMU
237 tcg_regset_reset_reg (ct
->u
.regs
, TCG_REG_R4
);
238 tcg_regset_reset_reg (ct
->u
.regs
, TCG_REG_R5
);
241 case 'S': /* qemu_st constraint */
242 ct
->ct
|= TCG_CT_REG
;
243 tcg_regset_set32 (ct
->u
.regs
, 0, 0xffffffff);
244 tcg_regset_reset_reg (ct
->u
.regs
, TCG_REG_R3
);
245 #ifdef CONFIG_SOFTMMU
246 tcg_regset_reset_reg (ct
->u
.regs
, TCG_REG_R4
);
247 tcg_regset_reset_reg (ct
->u
.regs
, TCG_REG_R5
);
248 tcg_regset_reset_reg (ct
->u
.regs
, TCG_REG_R6
);
252 ct
->ct
|= TCG_CT_CONST_S16
;
255 ct
->ct
|= TCG_CT_CONST_U16
;
258 ct
->ct
|= TCG_CT_CONST_S32
;
261 ct
->ct
|= TCG_CT_CONST_U32
;
264 ct
->ct
|= TCG_CT_CONST_ZERO
;
274 /* test if a constant matches the constraint */
275 static int tcg_target_const_match (tcg_target_long val
,
276 const TCGArgConstraint
*arg_ct
)
279 if (ct
& TCG_CT_CONST
) {
281 } else if ((ct
& TCG_CT_CONST_S16
) && val
== (int16_t)val
) {
283 } else if ((ct
& TCG_CT_CONST_U16
) && val
== (uint16_t)val
) {
285 } else if ((ct
& TCG_CT_CONST_S32
) && val
== (int32_t)val
) {
287 } else if ((ct
& TCG_CT_CONST_U32
) && val
== (uint32_t)val
) {
289 } else if ((ct
& TCG_CT_CONST_ZERO
) && val
== 0) {
295 #define OPCD(opc) ((opc)<<26)
296 #define XO19(opc) (OPCD(19)|((opc)<<1))
297 #define XO30(opc) (OPCD(30)|((opc)<<2))
298 #define XO31(opc) (OPCD(31)|((opc)<<1))
299 #define XO58(opc) (OPCD(58)|(opc))
300 #define XO62(opc) (OPCD(62)|(opc))
304 #define LBZ OPCD( 34)
305 #define LHZ OPCD( 40)
306 #define LHA OPCD( 42)
307 #define LWZ OPCD( 32)
308 #define STB OPCD( 38)
309 #define STH OPCD( 44)
310 #define STW OPCD( 36)
313 #define STDU XO62( 1)
314 #define STDX XO31(149)
317 #define LDX XO31( 21)
320 #define LWAX XO31(341)
322 #define ADDIC OPCD( 12)
323 #define ADDI OPCD( 14)
324 #define ADDIS OPCD( 15)
325 #define ORI OPCD( 24)
326 #define ORIS OPCD( 25)
327 #define XORI OPCD( 26)
328 #define XORIS OPCD( 27)
329 #define ANDI OPCD( 28)
330 #define ANDIS OPCD( 29)
331 #define MULLI OPCD( 7)
332 #define CMPLI OPCD( 10)
333 #define CMPI OPCD( 11)
334 #define SUBFIC OPCD( 8)
336 #define LWZU OPCD( 33)
337 #define STWU OPCD( 37)
339 #define RLWIMI OPCD( 20)
340 #define RLWINM OPCD( 21)
341 #define RLWNM OPCD( 23)
343 #define RLDICL XO30( 0)
344 #define RLDICR XO30( 1)
345 #define RLDIMI XO30( 3)
346 #define RLDCL XO30( 8)
348 #define BCLR XO19( 16)
349 #define BCCTR XO19(528)
350 #define CRAND XO19(257)
351 #define CRANDC XO19(129)
352 #define CRNAND XO19(225)
353 #define CROR XO19(449)
354 #define CRNOR XO19( 33)
356 #define EXTSB XO31(954)
357 #define EXTSH XO31(922)
358 #define EXTSW XO31(986)
359 #define ADD XO31(266)
360 #define ADDE XO31(138)
361 #define ADDC XO31( 10)
362 #define AND XO31( 28)
363 #define SUBF XO31( 40)
364 #define SUBFC XO31( 8)
365 #define SUBFE XO31(136)
367 #define XOR XO31(316)
368 #define MULLW XO31(235)
369 #define MULHWU XO31( 11)
370 #define DIVW XO31(491)
371 #define DIVWU XO31(459)
373 #define CMPL XO31( 32)
374 #define LHBRX XO31(790)
375 #define LWBRX XO31(534)
376 #define LDBRX XO31(532)
377 #define STHBRX XO31(918)
378 #define STWBRX XO31(662)
379 #define STDBRX XO31(660)
380 #define MFSPR XO31(339)
381 #define MTSPR XO31(467)
382 #define SRAWI XO31(824)
383 #define NEG XO31(104)
384 #define MFCR XO31( 19)
385 #define NOR XO31(124)
386 #define CNTLZW XO31( 26)
387 #define CNTLZD XO31( 58)
388 #define ANDC XO31( 60)
389 #define ORC XO31(412)
390 #define EQV XO31(284)
391 #define NAND XO31(476)
393 #define MULLD XO31(233)
394 #define MULHD XO31( 73)
395 #define MULHDU XO31( 9)
396 #define DIVD XO31(489)
397 #define DIVDU XO31(457)
399 #define LBZX XO31( 87)
400 #define LHZX XO31(279)
401 #define LHAX XO31(343)
402 #define LWZX XO31( 23)
403 #define STBX XO31(215)
404 #define STHX XO31(407)
405 #define STWX XO31(151)
407 #define SPR(a,b) ((((a)<<5)|(b))<<11)
409 #define CTR SPR(9, 0)
411 #define SLW XO31( 24)
412 #define SRW XO31(536)
413 #define SRAW XO31(792)
415 #define SLD XO31( 27)
416 #define SRD XO31(539)
417 #define SRAD XO31(794)
418 #define SRADI XO31(413<<1)
421 #define TRAP (TW | TO (31))
423 #define RT(r) ((r)<<21)
424 #define RS(r) ((r)<<21)
425 #define RA(r) ((r)<<16)
426 #define RB(r) ((r)<<11)
427 #define TO(t) ((t)<<21)
428 #define SH(s) ((s)<<11)
429 #define MB(b) ((b)<<6)
430 #define ME(e) ((e)<<1)
431 #define BO(o) ((o)<<21)
432 #define MB64(b) ((b)<<5)
436 #define TAB(t, a, b) (RT(t) | RA(a) | RB(b))
437 #define SAB(s, a, b) (RS(s) | RA(a) | RB(b))
438 #define TAI(s, a, i) (RT(s) | RA(a) | ((i) & 0xffff))
439 #define SAI(s, a, i) (RS(s) | RA(a) | ((i) & 0xffff))
441 #define BF(n) ((n)<<23)
442 #define BI(n, c) (((c)+((n)*4))<<16)
443 #define BT(n, c) (((c)+((n)*4))<<21)
444 #define BA(n, c) (((c)+((n)*4))<<16)
445 #define BB(n, c) (((c)+((n)*4))<<11)
447 #define BO_COND_TRUE BO (12)
448 #define BO_COND_FALSE BO ( 4)
449 #define BO_ALWAYS BO (20)
458 static const uint32_t tcg_to_bc
[] = {
459 [TCG_COND_EQ
] = BC
| BI (7, CR_EQ
) | BO_COND_TRUE
,
460 [TCG_COND_NE
] = BC
| BI (7, CR_EQ
) | BO_COND_FALSE
,
461 [TCG_COND_LT
] = BC
| BI (7, CR_LT
) | BO_COND_TRUE
,
462 [TCG_COND_GE
] = BC
| BI (7, CR_LT
) | BO_COND_FALSE
,
463 [TCG_COND_LE
] = BC
| BI (7, CR_GT
) | BO_COND_FALSE
,
464 [TCG_COND_GT
] = BC
| BI (7, CR_GT
) | BO_COND_TRUE
,
465 [TCG_COND_LTU
] = BC
| BI (7, CR_LT
) | BO_COND_TRUE
,
466 [TCG_COND_GEU
] = BC
| BI (7, CR_LT
) | BO_COND_FALSE
,
467 [TCG_COND_LEU
] = BC
| BI (7, CR_GT
) | BO_COND_FALSE
,
468 [TCG_COND_GTU
] = BC
| BI (7, CR_GT
) | BO_COND_TRUE
,
471 static inline void tcg_out_mov(TCGContext
*s
, TCGType type
,
472 TCGReg ret
, TCGReg arg
)
474 tcg_out32 (s
, OR
| SAB (arg
, ret
, arg
));
477 static inline void tcg_out_rld(TCGContext
*s
, int op
, TCGReg ra
, TCGReg rs
,
480 sh
= SH (sh
& 0x1f) | (((sh
>> 5) & 1) << 1);
481 mb
= MB64 ((mb
>> 5) | ((mb
<< 1) & 0x3f));
482 tcg_out32 (s
, op
| RA (ra
) | RS (rs
) | sh
| mb
);
485 static inline void tcg_out_rlw(TCGContext
*s
, int op
, TCGReg ra
, TCGReg rs
,
486 int sh
, int mb
, int me
)
488 tcg_out32(s
, op
| RA(ra
) | RS(rs
) | SH(sh
) | MB(mb
) | ME(me
));
491 static inline void tcg_out_ext32u(TCGContext
*s
, TCGReg dst
, TCGReg src
)
493 tcg_out_rld(s
, RLDICL
, dst
, src
, 0, 32);
496 static inline void tcg_out_shli64(TCGContext
*s
, TCGReg dst
, TCGReg src
, int c
)
498 tcg_out_rld(s
, RLDICR
, dst
, src
, c
, 63 - c
);
501 static inline void tcg_out_shri64(TCGContext
*s
, TCGReg dst
, TCGReg src
, int c
)
503 tcg_out_rld(s
, RLDICL
, dst
, src
, 64 - c
, c
);
506 static void tcg_out_movi32(TCGContext
*s
, TCGReg ret
, int32_t arg
)
508 if (arg
== (int16_t) arg
) {
509 tcg_out32(s
, ADDI
| TAI(ret
, 0, arg
));
511 tcg_out32(s
, ADDIS
| TAI(ret
, 0, arg
>> 16));
513 tcg_out32(s
, ORI
| SAI(ret
, ret
, arg
));
518 static void tcg_out_movi(TCGContext
*s
, TCGType type
, TCGReg ret
,
521 if (type
== TCG_TYPE_I32
|| arg
== (int32_t)arg
) {
522 tcg_out_movi32(s
, ret
, arg
);
523 } else if (arg
== (uint32_t)arg
&& !(arg
& 0x8000)) {
524 tcg_out32(s
, ADDI
| TAI(ret
, 0, arg
));
525 tcg_out32(s
, ORIS
| SAI(ret
, ret
, arg
>> 16));
527 int32_t high
= arg
>> 32;
528 tcg_out_movi32(s
, ret
, high
);
530 tcg_out_shli64(s
, ret
, ret
, 32);
532 if (arg
& 0xffff0000) {
533 tcg_out32(s
, ORIS
| SAI(ret
, ret
, arg
>> 16));
536 tcg_out32(s
, ORI
| SAI(ret
, ret
, arg
));
541 static bool mask_operand(uint32_t c
, int *mb
, int *me
)
545 /* Accept a bit pattern like:
549 Keep track of the transitions. */
550 if (c
== 0 || c
== -1) {
556 if (test
& (test
- 1)) {
561 *mb
= test
? clz32(test
& -test
) + 1 : 0;
565 static bool mask64_operand(uint64_t c
, int *mb
, int *me
)
574 /* Accept 1..10..0. */
580 /* Accept 0..01..1. */
581 if (lsb
== 1 && (c
& (c
+ 1)) == 0) {
582 *mb
= clz64(c
+ 1) + 1;
589 static void tcg_out_andi32(TCGContext
*s
, TCGReg dst
, TCGReg src
, uint32_t c
)
593 if ((c
& 0xffff) == c
) {
594 tcg_out32(s
, ANDI
| SAI(src
, dst
, c
));
596 } else if ((c
& 0xffff0000) == c
) {
597 tcg_out32(s
, ANDIS
| SAI(src
, dst
, c
>> 16));
599 } else if (mask_operand(c
, &mb
, &me
)) {
600 tcg_out_rlw(s
, RLWINM
, dst
, src
, 0, mb
, me
);
602 tcg_out_movi(s
, TCG_TYPE_I32
, 0, c
);
603 tcg_out32(s
, AND
| SAB(src
, dst
, 0));
607 static void tcg_out_andi64(TCGContext
*s
, TCGReg dst
, TCGReg src
, uint64_t c
)
611 if ((c
& 0xffff) == c
) {
612 tcg_out32(s
, ANDI
| SAI(src
, dst
, c
));
614 } else if ((c
& 0xffff0000) == c
) {
615 tcg_out32(s
, ANDIS
| SAI(src
, dst
, c
>> 16));
617 } else if (mask64_operand(c
, &mb
, &me
)) {
619 tcg_out_rld(s
, RLDICR
, dst
, src
, 0, me
);
621 tcg_out_rld(s
, RLDICL
, dst
, src
, 0, mb
);
624 tcg_out_movi(s
, TCG_TYPE_I64
, 0, c
);
625 tcg_out32(s
, AND
| SAB(src
, dst
, 0));
629 static void tcg_out_zori32(TCGContext
*s
, TCGReg dst
, TCGReg src
, uint32_t c
,
630 int op_lo
, int op_hi
)
633 tcg_out32(s
, op_hi
| SAI(src
, dst
, c
>> 16));
637 tcg_out32(s
, op_lo
| SAI(src
, dst
, c
));
642 static void tcg_out_ori32(TCGContext
*s
, TCGReg dst
, TCGReg src
, uint32_t c
)
644 tcg_out_zori32(s
, dst
, src
, c
, ORI
, ORIS
);
647 static void tcg_out_xori32(TCGContext
*s
, TCGReg dst
, TCGReg src
, uint32_t c
)
649 tcg_out_zori32(s
, dst
, src
, c
, XORI
, XORIS
);
652 static void tcg_out_b (TCGContext
*s
, int mask
, tcg_target_long target
)
654 tcg_target_long disp
;
656 disp
= target
- (tcg_target_long
) s
->code_ptr
;
657 if ((disp
<< 38) >> 38 == disp
)
658 tcg_out32 (s
, B
| (disp
& 0x3fffffc) | mask
);
660 tcg_out_movi (s
, TCG_TYPE_I64
, 0, (tcg_target_long
) target
);
661 tcg_out32 (s
, MTSPR
| RS (0) | CTR
);
662 tcg_out32 (s
, BCCTR
| BO_ALWAYS
| mask
);
666 static void tcg_out_call (TCGContext
*s
, tcg_target_long arg
, int const_arg
)
670 tcg_out_b (s
, LK
, arg
);
673 tcg_out32 (s
, MTSPR
| RS (arg
) | LR
);
674 tcg_out32 (s
, BCLR
| BO_ALWAYS
| LK
);
681 tcg_out_movi (s
, TCG_TYPE_I64
, reg
, arg
);
685 tcg_out32 (s
, LD
| RT (0) | RA (reg
));
686 tcg_out32 (s
, MTSPR
| RA (0) | CTR
);
687 tcg_out32 (s
, LD
| RT (11) | RA (reg
) | 16);
688 tcg_out32 (s
, LD
| RT (2) | RA (reg
) | 8);
689 tcg_out32 (s
, BCCTR
| BO_ALWAYS
| LK
);
693 static void tcg_out_ldst(TCGContext
*s
, TCGReg ret
, TCGReg addr
,
694 int offset
, int op1
, int op2
)
696 if (offset
== (int16_t) offset
) {
697 tcg_out32(s
, op1
| TAI(ret
, addr
, offset
));
699 tcg_out_movi(s
, TCG_TYPE_I64
, 0, offset
);
700 tcg_out32(s
, op2
| TAB(ret
, addr
, 0));
704 static void tcg_out_ldsta(TCGContext
*s
, TCGReg ret
, TCGReg addr
,
705 int offset
, int op1
, int op2
)
707 if (offset
== (int16_t) (offset
& ~3)) {
708 tcg_out32(s
, op1
| TAI(ret
, addr
, offset
));
710 tcg_out_movi(s
, TCG_TYPE_I64
, 0, offset
);
711 tcg_out32(s
, op2
| TAB(ret
, addr
, 0));
715 #if defined (CONFIG_SOFTMMU)
717 #include "exec/softmmu_defs.h"
719 /* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
721 static const void * const qemu_ld_helpers
[4] = {
728 /* helper signature: helper_st_mmu(CPUState *env, target_ulong addr,
729 uintxx_t val, int mmu_idx) */
730 static const void * const qemu_st_helpers
[4] = {
737 static void tcg_out_tlb_read(TCGContext
*s
, TCGReg r0
, TCGReg r1
, TCGReg r2
,
738 TCGReg addr_reg
, int s_bits
, int offset
)
740 #if TARGET_LONG_BITS == 32
741 tcg_out_ext32u(s
, addr_reg
, addr_reg
);
743 tcg_out_rlw(s
, RLWINM
, r0
, addr_reg
,
744 32 - (TARGET_PAGE_BITS
- CPU_TLB_ENTRY_BITS
),
745 32 - (CPU_TLB_BITS
+ CPU_TLB_ENTRY_BITS
),
746 31 - CPU_TLB_ENTRY_BITS
);
747 tcg_out32(s
, ADD
| TAB(r0
, r0
, TCG_AREG0
));
748 tcg_out32(s
, LWZU
| TAI(r1
, r0
, offset
));
749 tcg_out_rlw(s
, RLWINM
, r2
, addr_reg
, 0,
750 (32 - s_bits
) & 31, 31 - TARGET_PAGE_BITS
);
752 tcg_out_rld (s
, RLDICL
, r0
, addr_reg
,
753 64 - TARGET_PAGE_BITS
,
755 tcg_out_shli64(s
, r0
, r0
, CPU_TLB_ENTRY_BITS
);
757 tcg_out32(s
, ADD
| TAB(r0
, r0
, TCG_AREG0
));
758 tcg_out32(s
, LD_ADDR
| TAI(r1
, r0
, offset
));
761 tcg_out_rld (s
, RLDICR
, r2
, addr_reg
, 0, 63 - TARGET_PAGE_BITS
);
764 tcg_out_rld (s
, RLDICL
, r2
, addr_reg
,
765 64 - TARGET_PAGE_BITS
,
766 TARGET_PAGE_BITS
- s_bits
);
767 tcg_out_rld (s
, RLDICL
, r2
, r2
, TARGET_PAGE_BITS
, 0);
773 static const uint32_t qemu_ldx_opc
[8] = {
774 #ifdef TARGET_WORDS_BIGENDIAN
775 LBZX
, LHZX
, LWZX
, LDX
,
778 LBZX
, LHBRX
, LWBRX
, LDBRX
,
783 static const uint32_t qemu_stx_opc
[4] = {
784 #ifdef TARGET_WORDS_BIGENDIAN
785 STBX
, STHX
, STWX
, STDX
787 STBX
, STHBRX
, STWBRX
, STDBRX
,
791 static const uint32_t qemu_exts_opc
[4] = {
792 EXTSB
, EXTSH
, EXTSW
, 0
795 static void tcg_out_qemu_ld (TCGContext
*s
, const TCGArg
*args
, int opc
)
797 TCGReg addr_reg
, data_reg
, r0
, r1
, rbase
;
798 uint32_t insn
, s_bits
;
799 #ifdef CONFIG_SOFTMMU
802 void *label1_ptr
, *label2_ptr
;
809 #ifdef CONFIG_SOFTMMU
817 tcg_out_tlb_read (s
, r0
, r1
, r2
, addr_reg
, s_bits
,
818 offsetof (CPUArchState
, tlb_table
[mem_index
][0].addr_read
));
820 tcg_out32 (s
, CMP
| BF (7) | RA (r2
) | RB (r1
) | CMP_L
);
822 label1_ptr
= s
->code_ptr
;
824 tcg_out32 (s
, BC
| BI (7, CR_EQ
) | BO_COND_TRUE
);
829 tcg_out_mov (s
, TCG_TYPE_I64
, ir
++, TCG_AREG0
);
830 tcg_out_mov (s
, TCG_TYPE_I64
, ir
++, addr_reg
);
831 tcg_out_movi (s
, TCG_TYPE_I64
, ir
++, mem_index
);
833 tcg_out_call (s
, (tcg_target_long
) qemu_ld_helpers
[s_bits
], 1);
836 insn
= qemu_exts_opc
[s_bits
];
837 tcg_out32(s
, insn
| RA(data_reg
) | RS(3));
838 } else if (data_reg
!= 3) {
839 tcg_out_mov(s
, TCG_TYPE_I64
, data_reg
, 3);
841 label2_ptr
= s
->code_ptr
;
844 /* label1: fast path */
846 reloc_pc14 (label1_ptr
, (tcg_target_long
) s
->code_ptr
);
849 /* r0 now contains &env->tlb_table[mem_index][index].addr_read */
850 tcg_out32(s
, LD
| TAI(r0
, r0
,
851 offsetof(CPUTLBEntry
, addend
)
852 - offsetof(CPUTLBEntry
, addr_read
)));
853 /* r0 = env->tlb_table[mem_index][index].addend */
854 tcg_out32(s
, ADD
| TAB(r0
, r0
, addr_reg
));
855 /* r0 = env->tlb_table[mem_index][index].addend + addr */
857 #else /* !CONFIG_SOFTMMU */
858 #if TARGET_LONG_BITS == 32
859 tcg_out_ext32u(s
, addr_reg
, addr_reg
);
863 rbase
= GUEST_BASE
? TCG_GUEST_BASE_REG
: 0;
866 insn
= qemu_ldx_opc
[opc
];
867 if (!HAVE_ISA_2_06
&& insn
== LDBRX
) {
868 tcg_out32(s
, ADDI
| TAI(r1
, r0
, 4));
869 tcg_out32(s
, LWBRX
| TAB(data_reg
, rbase
, r0
));
870 tcg_out32(s
, LWBRX
| TAB( r1
, rbase
, r1
));
871 tcg_out_rld(s
, RLDIMI
, data_reg
, r1
, 32, 0);
873 tcg_out32(s
, insn
| TAB(data_reg
, rbase
, r0
));
875 insn
= qemu_ldx_opc
[s_bits
];
876 tcg_out32(s
, insn
| TAB(data_reg
, rbase
, r0
));
877 insn
= qemu_exts_opc
[s_bits
];
878 tcg_out32 (s
, insn
| RA(data_reg
) | RS(data_reg
));
881 #ifdef CONFIG_SOFTMMU
882 reloc_pc24 (label2_ptr
, (tcg_target_long
) s
->code_ptr
);
886 static void tcg_out_qemu_st (TCGContext
*s
, const TCGArg
*args
, int opc
)
888 TCGReg addr_reg
, r0
, r1
, rbase
, data_reg
;
890 #ifdef CONFIG_SOFTMMU
893 void *label1_ptr
, *label2_ptr
;
899 #ifdef CONFIG_SOFTMMU
907 tcg_out_tlb_read (s
, r0
, r1
, r2
, addr_reg
, opc
,
908 offsetof (CPUArchState
, tlb_table
[mem_index
][0].addr_write
));
910 tcg_out32 (s
, CMP
| BF (7) | RA (r2
) | RB (r1
) | CMP_L
);
912 label1_ptr
= s
->code_ptr
;
914 tcg_out32 (s
, BC
| BI (7, CR_EQ
) | BO_COND_TRUE
);
919 tcg_out_mov (s
, TCG_TYPE_I64
, ir
++, TCG_AREG0
);
920 tcg_out_mov (s
, TCG_TYPE_I64
, ir
++, addr_reg
);
921 tcg_out_rld (s
, RLDICL
, ir
++, data_reg
, 0, 64 - (1 << (3 + opc
)));
922 tcg_out_movi (s
, TCG_TYPE_I64
, ir
++, mem_index
);
924 tcg_out_call (s
, (tcg_target_long
) qemu_st_helpers
[opc
], 1);
926 label2_ptr
= s
->code_ptr
;
929 /* label1: fast path */
931 reloc_pc14 (label1_ptr
, (tcg_target_long
) s
->code_ptr
);
937 | (offsetof (CPUTLBEntry
, addend
)
938 - offsetof (CPUTLBEntry
, addr_write
))
940 /* r0 = env->tlb_table[mem_index][index].addend */
941 tcg_out32(s
, ADD
| TAB(r0
, r0
, addr_reg
));
942 /* r0 = env->tlb_table[mem_index][index].addend + addr */
944 #else /* !CONFIG_SOFTMMU */
945 #if TARGET_LONG_BITS == 32
946 tcg_out_ext32u(s
, addr_reg
, addr_reg
);
950 rbase
= GUEST_BASE
? TCG_GUEST_BASE_REG
: 0;
953 insn
= qemu_stx_opc
[opc
];
954 if (!HAVE_ISA_2_06
&& insn
== STDBRX
) {
955 tcg_out32(s
, STWBRX
| SAB(data_reg
, rbase
, r0
));
956 tcg_out32(s
, ADDI
| TAI(r1
, r0
, 4));
957 tcg_out_shri64(s
, 0, data_reg
, 32);
958 tcg_out32(s
, STWBRX
| SAB(0, rbase
, r1
));
960 tcg_out32(s
, insn
| SAB(data_reg
, rbase
, r0
));
963 #ifdef CONFIG_SOFTMMU
964 reloc_pc24 (label2_ptr
, (tcg_target_long
) s
->code_ptr
);
968 static void tcg_target_qemu_prologue (TCGContext
*s
)
979 + 8 /* compiler doubleword */
980 + 8 /* link editor doubleword */
981 + 8 /* TOC save area */
982 + TCG_STATIC_CALL_ARGS_SIZE
983 + ARRAY_SIZE (tcg_target_callee_save_regs
) * 8
984 + CPU_TEMP_BUF_NLONGS
* sizeof(long)
986 frame_size
= (frame_size
+ 15) & ~15;
988 tcg_set_frame (s
, TCG_REG_CALL_STACK
, frame_size
989 - CPU_TEMP_BUF_NLONGS
* sizeof (long),
990 CPU_TEMP_BUF_NLONGS
* sizeof (long));
993 /* First emit adhoc function descriptor */
994 addr
= (uint64_t) s
->code_ptr
+ 24;
995 tcg_out32 (s
, addr
>> 32); tcg_out32 (s
, addr
); /* entry point */
996 s
->code_ptr
+= 16; /* skip TOC and environment pointer */
1000 tcg_out32 (s
, MFSPR
| RT (0) | LR
);
1001 tcg_out32 (s
, STDU
| RS (1) | RA (1) | (-frame_size
& 0xffff));
1002 for (i
= 0; i
< ARRAY_SIZE (tcg_target_callee_save_regs
); ++i
)
1004 | RS (tcg_target_callee_save_regs
[i
])
1006 | (i
* 8 + 48 + TCG_STATIC_CALL_ARGS_SIZE
)
1009 tcg_out32 (s
, STD
| RS (0) | RA (1) | (frame_size
+ 16));
1011 #ifdef CONFIG_USE_GUEST_BASE
1013 tcg_out_movi (s
, TCG_TYPE_I64
, TCG_GUEST_BASE_REG
, GUEST_BASE
);
1014 tcg_regset_set_reg (s
->reserved_regs
, TCG_GUEST_BASE_REG
);
1018 tcg_out_mov (s
, TCG_TYPE_PTR
, TCG_AREG0
, tcg_target_call_iarg_regs
[0]);
1019 tcg_out32 (s
, MTSPR
| RS (tcg_target_call_iarg_regs
[1]) | CTR
);
1020 tcg_out32 (s
, BCCTR
| BO_ALWAYS
);
1023 tb_ret_addr
= s
->code_ptr
;
1025 for (i
= 0; i
< ARRAY_SIZE (tcg_target_callee_save_regs
); ++i
)
1027 | RT (tcg_target_callee_save_regs
[i
])
1029 | (i
* 8 + 48 + TCG_STATIC_CALL_ARGS_SIZE
)
1032 tcg_out32(s
, LD
| TAI(0, 1, frame_size
+ 16));
1033 tcg_out32(s
, MTSPR
| RS(0) | LR
);
1034 tcg_out32(s
, ADDI
| TAI(1, 1, frame_size
));
1035 tcg_out32(s
, BCLR
| BO_ALWAYS
);
1038 static void tcg_out_ld (TCGContext
*s
, TCGType type
, TCGReg ret
, TCGReg arg1
,
1039 tcg_target_long arg2
)
1041 if (type
== TCG_TYPE_I32
)
1042 tcg_out_ldst (s
, ret
, arg1
, arg2
, LWZ
, LWZX
);
1044 tcg_out_ldsta (s
, ret
, arg1
, arg2
, LD
, LDX
);
1047 static void tcg_out_st (TCGContext
*s
, TCGType type
, TCGReg arg
, TCGReg arg1
,
1048 tcg_target_long arg2
)
1050 if (type
== TCG_TYPE_I32
)
1051 tcg_out_ldst (s
, arg
, arg1
, arg2
, STW
, STWX
);
1053 tcg_out_ldsta (s
, arg
, arg1
, arg2
, STD
, STDX
);
1056 static void tcg_out_cmp (TCGContext
*s
, int cond
, TCGArg arg1
, TCGArg arg2
,
1057 int const_arg2
, int cr
, int arch64
)
1066 if ((int16_t) arg2
== arg2
) {
1071 else if ((uint16_t) arg2
== arg2
) {
1086 if ((int16_t) arg2
== arg2
) {
1101 if ((uint16_t) arg2
== arg2
) {
1114 op
|= BF (cr
) | (arch64
<< 21);
1117 tcg_out32 (s
, op
| RA (arg1
) | (arg2
& 0xffff));
1120 tcg_out_movi (s
, TCG_TYPE_I64
, 0, arg2
);
1121 tcg_out32 (s
, op
| RA (arg1
) | RB (0));
1124 tcg_out32 (s
, op
| RA (arg1
) | RB (arg2
));
1129 static void tcg_out_setcond (TCGContext
*s
, TCGType type
, TCGCond cond
,
1130 TCGArg arg0
, TCGArg arg1
, TCGArg arg2
,
1143 if ((uint16_t) arg2
== arg2
) {
1144 tcg_out32(s
, XORI
| SAI(arg1
, 0, arg2
));
1147 tcg_out_movi (s
, type
, 0, arg2
);
1148 tcg_out32 (s
, XOR
| SAB (arg1
, 0, 0));
1154 tcg_out32 (s
, XOR
| SAB (arg1
, 0, arg2
));
1157 if (type
== TCG_TYPE_I64
) {
1158 tcg_out32 (s
, CNTLZD
| RS (arg
) | RA (0));
1159 tcg_out_rld (s
, RLDICL
, arg0
, 0, 58, 6);
1162 tcg_out32 (s
, CNTLZW
| RS (arg
) | RA (0));
1163 tcg_out_rlw(s
, RLWINM
, arg0
, 0, 27, 5, 31);
1174 if ((uint16_t) arg2
== arg2
) {
1175 tcg_out32(s
, XORI
| SAI(arg1
, 0, arg2
));
1177 tcg_out_movi (s
, type
, 0, arg2
);
1178 tcg_out32 (s
, XOR
| SAB (arg1
, 0, 0));
1184 tcg_out32 (s
, XOR
| SAB (arg1
, 0, arg2
));
1187 /* Make sure and discard the high 32-bits of the input. */
1188 if (type
== TCG_TYPE_I32
) {
1189 tcg_out32(s
, EXTSW
| RA(TCG_REG_R0
) | RS(arg
));
1193 if (arg
== arg1
&& arg1
== arg0
) {
1194 tcg_out32(s
, ADDIC
| TAI(0, arg
, -1));
1195 tcg_out32(s
, SUBFE
| TAB(arg0
, 0, arg
));
1198 tcg_out32(s
, ADDIC
| TAI(arg0
, arg
, -1));
1199 tcg_out32(s
, SUBFE
| TAB(arg0
, arg0
, arg
));
1218 crop
= CRNOR
| BT (7, CR_EQ
) | BA (7, CR_LT
) | BB (7, CR_LT
);
1224 crop
= CRNOR
| BT (7, CR_EQ
) | BA (7, CR_GT
) | BB (7, CR_GT
);
1226 tcg_out_cmp (s
, cond
, arg1
, arg2
, const_arg2
, 7, type
== TCG_TYPE_I64
);
1227 if (crop
) tcg_out32 (s
, crop
);
1228 tcg_out32 (s
, MFCR
| RT (0));
1229 tcg_out_rlw(s
, RLWINM
, arg0
, 0, sh
, 31, 31);
1237 static void tcg_out_bc (TCGContext
*s
, int bc
, int label_index
)
1239 TCGLabel
*l
= &s
->labels
[label_index
];
1242 tcg_out32 (s
, bc
| reloc_pc14_val (s
->code_ptr
, l
->u
.value
));
1244 uint16_t val
= *(uint16_t *) &s
->code_ptr
[2];
1246 /* Thanks to Andrzej Zaborowski */
1247 tcg_out32 (s
, bc
| (val
& 0xfffc));
1248 tcg_out_reloc (s
, s
->code_ptr
- 4, R_PPC_REL14
, label_index
, 0);
1252 static void tcg_out_brcond (TCGContext
*s
, TCGCond cond
,
1253 TCGArg arg1
, TCGArg arg2
, int const_arg2
,
1254 int label_index
, int arch64
)
1256 tcg_out_cmp (s
, cond
, arg1
, arg2
, const_arg2
, 7, arch64
);
1257 tcg_out_bc (s
, tcg_to_bc
[cond
], label_index
);
1260 void ppc_tb_set_jmp_target (unsigned long jmp_addr
, unsigned long addr
)
1263 unsigned long patch_size
;
1265 s
.code_ptr
= (uint8_t *) jmp_addr
;
1266 tcg_out_b (&s
, 0, addr
);
1267 patch_size
= s
.code_ptr
- (uint8_t *) jmp_addr
;
1268 flush_icache_range (jmp_addr
, jmp_addr
+ patch_size
);
1271 static void tcg_out_op (TCGContext
*s
, TCGOpcode opc
, const TCGArg
*args
,
1272 const int *const_args
)
1278 case INDEX_op_exit_tb
:
1279 tcg_out_movi (s
, TCG_TYPE_I64
, TCG_REG_R3
, args
[0]);
1280 tcg_out_b (s
, 0, (tcg_target_long
) tb_ret_addr
);
1282 case INDEX_op_goto_tb
:
1283 if (s
->tb_jmp_offset
) {
1284 /* direct jump method */
1286 s
->tb_jmp_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1292 s
->tb_next_offset
[args
[0]] = s
->code_ptr
- s
->code_buf
;
1296 TCGLabel
*l
= &s
->labels
[args
[0]];
1299 tcg_out_b (s
, 0, l
->u
.value
);
1302 uint32_t val
= *(uint32_t *) s
->code_ptr
;
1304 /* Thanks to Andrzej Zaborowski */
1305 tcg_out32 (s
, B
| (val
& 0x3fffffc));
1306 tcg_out_reloc (s
, s
->code_ptr
- 4, R_PPC_REL24
, args
[0], 0);
1311 tcg_out_call (s
, args
[0], const_args
[0]);
1313 case INDEX_op_movi_i32
:
1314 tcg_out_movi (s
, TCG_TYPE_I32
, args
[0], args
[1]);
1316 case INDEX_op_movi_i64
:
1317 tcg_out_movi (s
, TCG_TYPE_I64
, args
[0], args
[1]);
1319 case INDEX_op_ld8u_i32
:
1320 case INDEX_op_ld8u_i64
:
1321 tcg_out_ldst (s
, args
[0], args
[1], args
[2], LBZ
, LBZX
);
1323 case INDEX_op_ld8s_i32
:
1324 case INDEX_op_ld8s_i64
:
1325 tcg_out_ldst (s
, args
[0], args
[1], args
[2], LBZ
, LBZX
);
1326 tcg_out32 (s
, EXTSB
| RS (args
[0]) | RA (args
[0]));
1328 case INDEX_op_ld16u_i32
:
1329 case INDEX_op_ld16u_i64
:
1330 tcg_out_ldst (s
, args
[0], args
[1], args
[2], LHZ
, LHZX
);
1332 case INDEX_op_ld16s_i32
:
1333 case INDEX_op_ld16s_i64
:
1334 tcg_out_ldst (s
, args
[0], args
[1], args
[2], LHA
, LHAX
);
1336 case INDEX_op_ld_i32
:
1337 case INDEX_op_ld32u_i64
:
1338 tcg_out_ldst (s
, args
[0], args
[1], args
[2], LWZ
, LWZX
);
1340 case INDEX_op_ld32s_i64
:
1341 tcg_out_ldsta (s
, args
[0], args
[1], args
[2], LWA
, LWAX
);
1343 case INDEX_op_ld_i64
:
1344 tcg_out_ldsta (s
, args
[0], args
[1], args
[2], LD
, LDX
);
1346 case INDEX_op_st8_i32
:
1347 case INDEX_op_st8_i64
:
1348 tcg_out_ldst (s
, args
[0], args
[1], args
[2], STB
, STBX
);
1350 case INDEX_op_st16_i32
:
1351 case INDEX_op_st16_i64
:
1352 tcg_out_ldst (s
, args
[0], args
[1], args
[2], STH
, STHX
);
1354 case INDEX_op_st_i32
:
1355 case INDEX_op_st32_i64
:
1356 tcg_out_ldst (s
, args
[0], args
[1], args
[2], STW
, STWX
);
1358 case INDEX_op_st_i64
:
1359 tcg_out_ldsta (s
, args
[0], args
[1], args
[2], STD
, STDX
);
1362 case INDEX_op_add_i32
:
1363 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1364 if (const_args
[2]) {
1370 tcg_out32(s
, ADDIS
| TAI(a0
, a1
, h
>> 16));
1373 if (l
|| a0
!= a1
) {
1374 tcg_out32(s
, ADDI
| TAI(a0
, a1
, l
));
1377 tcg_out32(s
, ADD
| TAB(a0
, a1
, a2
));
1380 case INDEX_op_sub_i32
:
1381 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1382 if (const_args
[1]) {
1383 if (const_args
[2]) {
1384 tcg_out_movi(s
, TCG_TYPE_I32
, a0
, a1
- a2
);
1386 tcg_out32(s
, SUBFIC
| TAI(a0
, a2
, a1
));
1388 } else if (const_args
[2]) {
1392 tcg_out32(s
, SUBF
| TAB(a0
, a2
, a1
));
1396 case INDEX_op_and_i32
:
1397 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1398 if (const_args
[2]) {
1399 tcg_out_andi32(s
, a0
, a1
, a2
);
1401 tcg_out32(s
, AND
| SAB(a1
, a0
, a2
));
1404 case INDEX_op_and_i64
:
1405 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1406 if (const_args
[2]) {
1407 tcg_out_andi64(s
, a0
, a1
, a2
);
1409 tcg_out32(s
, AND
| SAB(a1
, a0
, a2
));
1412 case INDEX_op_or_i64
:
1413 case INDEX_op_or_i32
:
1414 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1415 if (const_args
[2]) {
1416 tcg_out_ori32(s
, a0
, a1
, a2
);
1418 tcg_out32(s
, OR
| SAB(a1
, a0
, a2
));
1421 case INDEX_op_xor_i64
:
1422 case INDEX_op_xor_i32
:
1423 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1424 if (const_args
[2]) {
1425 tcg_out_xori32(s
, a0
, a1
, a2
);
1427 tcg_out32(s
, XOR
| SAB(a1
, a0
, a2
));
1430 case INDEX_op_andc_i32
:
1431 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1432 if (const_args
[2]) {
1433 tcg_out_andi32(s
, a0
, a1
, ~a2
);
1435 tcg_out32(s
, ANDC
| SAB(a1
, a0
, a2
));
1438 case INDEX_op_andc_i64
:
1439 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1440 if (const_args
[2]) {
1441 tcg_out_andi64(s
, a0
, a1
, ~a2
);
1443 tcg_out32(s
, ANDC
| SAB(a1
, a0
, a2
));
1446 case INDEX_op_orc_i32
:
1447 if (const_args
[2]) {
1448 tcg_out_ori32(s
, args
[0], args
[1], ~args
[2]);
1452 case INDEX_op_orc_i64
:
1453 tcg_out32(s
, ORC
| SAB(args
[1], args
[0], args
[2]));
1455 case INDEX_op_eqv_i32
:
1456 if (const_args
[2]) {
1457 tcg_out_xori32(s
, args
[0], args
[1], ~args
[2]);
1461 case INDEX_op_eqv_i64
:
1462 tcg_out32(s
, EQV
| SAB(args
[1], args
[0], args
[2]));
1464 case INDEX_op_nand_i32
:
1465 case INDEX_op_nand_i64
:
1466 tcg_out32(s
, NAND
| SAB(args
[1], args
[0], args
[2]));
1468 case INDEX_op_nor_i32
:
1469 case INDEX_op_nor_i64
:
1470 tcg_out32(s
, NOR
| SAB(args
[1], args
[0], args
[2]));
1473 case INDEX_op_mul_i32
:
1474 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1475 if (const_args
[2]) {
1476 tcg_out32(s
, MULLI
| TAI(a0
, a1
, a2
));
1478 tcg_out32(s
, MULLW
| TAB(a0
, a1
, a2
));
1482 case INDEX_op_div_i32
:
1483 tcg_out32 (s
, DIVW
| TAB (args
[0], args
[1], args
[2]));
1486 case INDEX_op_divu_i32
:
1487 tcg_out32 (s
, DIVWU
| TAB (args
[0], args
[1], args
[2]));
1490 case INDEX_op_rem_i32
:
1491 tcg_out32 (s
, DIVW
| TAB (0, args
[1], args
[2]));
1492 tcg_out32 (s
, MULLW
| TAB (0, 0, args
[2]));
1493 tcg_out32 (s
, SUBF
| TAB (args
[0], 0, args
[1]));
1496 case INDEX_op_remu_i32
:
1497 tcg_out32 (s
, DIVWU
| TAB (0, args
[1], args
[2]));
1498 tcg_out32 (s
, MULLW
| TAB (0, 0, args
[2]));
1499 tcg_out32 (s
, SUBF
| TAB (args
[0], 0, args
[1]));
1502 case INDEX_op_shl_i32
:
1503 if (const_args
[2]) {
1504 tcg_out_rlw(s
, RLWINM
, args
[0], args
[1], args
[2], 0, 31 - args
[2]);
1506 tcg_out32 (s
, SLW
| SAB (args
[1], args
[0], args
[2]));
1509 case INDEX_op_shr_i32
:
1510 if (const_args
[2]) {
1511 tcg_out_rlw(s
, RLWINM
, args
[0], args
[1], 32 - args
[2], args
[2], 31);
1513 tcg_out32 (s
, SRW
| SAB (args
[1], args
[0], args
[2]));
1516 case INDEX_op_sar_i32
:
1518 tcg_out32 (s
, SRAWI
| RS (args
[1]) | RA (args
[0]) | SH (args
[2]));
1520 tcg_out32 (s
, SRAW
| SAB (args
[1], args
[0], args
[2]));
1522 case INDEX_op_rotl_i32
:
1523 if (const_args
[2]) {
1524 tcg_out_rlw(s
, RLWINM
, args
[0], args
[1], args
[2], 0, 31);
1526 tcg_out32(s
, RLWNM
| SAB(args
[1], args
[0], args
[2])
1530 case INDEX_op_rotr_i32
:
1531 if (const_args
[2]) {
1532 tcg_out_rlw(s
, RLWINM
, args
[0], args
[1], 32 - args
[2], 0, 31);
1534 tcg_out32(s
, SUBFIC
| TAI(0, args
[2], 32));
1535 tcg_out32(s
, RLWNM
| SAB(args
[1], args
[0], args
[2])
1540 case INDEX_op_brcond_i32
:
1541 tcg_out_brcond (s
, args
[2], args
[0], args
[1], const_args
[1], args
[3], 0);
1544 case INDEX_op_brcond_i64
:
1545 tcg_out_brcond (s
, args
[2], args
[0], args
[1], const_args
[1], args
[3], 1);
1548 case INDEX_op_neg_i32
:
1549 case INDEX_op_neg_i64
:
1550 tcg_out32 (s
, NEG
| RT (args
[0]) | RA (args
[1]));
1553 case INDEX_op_not_i32
:
1554 case INDEX_op_not_i64
:
1555 tcg_out32 (s
, NOR
| SAB (args
[1], args
[0], args
[1]));
1558 case INDEX_op_add_i64
:
1559 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1560 if (const_args
[2]) {
1563 /* We can always split any 32-bit signed constant into 3 pieces.
1564 Note the positive 0x80000000 coming from the sub_i64 path,
1565 handled with the same code we need for eg 0x7fff8000. */
1566 assert(a2
== (int32_t)a2
|| a2
== 0x80000000);
1570 if (h1
< 0 && (int64_t)a2
> 0) {
1574 assert((TCGArg
)h2
+ h1
+ l0
== a2
);
1577 tcg_out32(s
, ADDIS
| TAI(a0
, a1
, h2
>> 16));
1581 tcg_out32(s
, ADDIS
| TAI(a0
, a1
, h1
>> 16));
1584 if (l0
|| a0
!= a1
) {
1585 tcg_out32(s
, ADDI
| TAI(a0
, a1
, l0
));
1588 tcg_out32(s
, ADD
| TAB(a0
, a1
, a2
));
1591 case INDEX_op_sub_i64
:
1592 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1593 if (const_args
[1]) {
1594 if (const_args
[2]) {
1595 tcg_out_movi(s
, TCG_TYPE_I64
, a0
, a1
- a2
);
1597 tcg_out32(s
, SUBFIC
| TAI(a0
, a2
, a1
));
1599 } else if (const_args
[2]) {
1603 tcg_out32(s
, SUBF
| TAB(a0
, a2
, a1
));
1607 case INDEX_op_shl_i64
:
1609 tcg_out_shli64(s
, args
[0], args
[1], args
[2]);
1611 tcg_out32 (s
, SLD
| SAB (args
[1], args
[0], args
[2]));
1613 case INDEX_op_shr_i64
:
1615 tcg_out_shri64(s
, args
[0], args
[1], args
[2]);
1617 tcg_out32 (s
, SRD
| SAB (args
[1], args
[0], args
[2]));
1619 case INDEX_op_sar_i64
:
1620 if (const_args
[2]) {
1621 int sh
= SH (args
[2] & 0x1f) | (((args
[2] >> 5) & 1) << 1);
1622 tcg_out32 (s
, SRADI
| RA (args
[0]) | RS (args
[1]) | sh
);
1625 tcg_out32 (s
, SRAD
| SAB (args
[1], args
[0], args
[2]));
1627 case INDEX_op_rotl_i64
:
1628 if (const_args
[2]) {
1629 tcg_out_rld(s
, RLDICL
, args
[0], args
[1], args
[2], 0);
1631 tcg_out32(s
, RLDCL
| SAB(args
[1], args
[0], args
[2]) | MB64(0));
1634 case INDEX_op_rotr_i64
:
1635 if (const_args
[2]) {
1636 tcg_out_rld(s
, RLDICL
, args
[0], args
[1], 64 - args
[2], 0);
1638 tcg_out32(s
, SUBFIC
| TAI(0, args
[2], 64));
1639 tcg_out32(s
, RLDCL
| SAB(args
[1], args
[0], 0) | MB64(0));
1643 case INDEX_op_mul_i64
:
1644 a0
= args
[0], a1
= args
[1], a2
= args
[2];
1645 if (const_args
[2]) {
1646 tcg_out32(s
, MULLI
| TAI(a0
, a1
, a2
));
1648 tcg_out32(s
, MULLD
| TAB(a0
, a1
, a2
));
1651 case INDEX_op_div_i64
:
1652 tcg_out32 (s
, DIVD
| TAB (args
[0], args
[1], args
[2]));
1654 case INDEX_op_divu_i64
:
1655 tcg_out32 (s
, DIVDU
| TAB (args
[0], args
[1], args
[2]));
1657 case INDEX_op_rem_i64
:
1658 tcg_out32 (s
, DIVD
| TAB (0, args
[1], args
[2]));
1659 tcg_out32 (s
, MULLD
| TAB (0, 0, args
[2]));
1660 tcg_out32 (s
, SUBF
| TAB (args
[0], 0, args
[1]));
1662 case INDEX_op_remu_i64
:
1663 tcg_out32 (s
, DIVDU
| TAB (0, args
[1], args
[2]));
1664 tcg_out32 (s
, MULLD
| TAB (0, 0, args
[2]));
1665 tcg_out32 (s
, SUBF
| TAB (args
[0], 0, args
[1]));
1668 case INDEX_op_qemu_ld8u
:
1669 tcg_out_qemu_ld (s
, args
, 0);
1671 case INDEX_op_qemu_ld8s
:
1672 tcg_out_qemu_ld (s
, args
, 0 | 4);
1674 case INDEX_op_qemu_ld16u
:
1675 tcg_out_qemu_ld (s
, args
, 1);
1677 case INDEX_op_qemu_ld16s
:
1678 tcg_out_qemu_ld (s
, args
, 1 | 4);
1680 case INDEX_op_qemu_ld32
:
1681 case INDEX_op_qemu_ld32u
:
1682 tcg_out_qemu_ld (s
, args
, 2);
1684 case INDEX_op_qemu_ld32s
:
1685 tcg_out_qemu_ld (s
, args
, 2 | 4);
1687 case INDEX_op_qemu_ld64
:
1688 tcg_out_qemu_ld (s
, args
, 3);
1690 case INDEX_op_qemu_st8
:
1691 tcg_out_qemu_st (s
, args
, 0);
1693 case INDEX_op_qemu_st16
:
1694 tcg_out_qemu_st (s
, args
, 1);
1696 case INDEX_op_qemu_st32
:
1697 tcg_out_qemu_st (s
, args
, 2);
1699 case INDEX_op_qemu_st64
:
1700 tcg_out_qemu_st (s
, args
, 3);
1703 case INDEX_op_ext8s_i32
:
1704 case INDEX_op_ext8s_i64
:
1707 case INDEX_op_ext16s_i32
:
1708 case INDEX_op_ext16s_i64
:
1711 case INDEX_op_ext32s_i64
:
1715 tcg_out32 (s
, c
| RS (args
[1]) | RA (args
[0]));
1718 case INDEX_op_setcond_i32
:
1719 tcg_out_setcond (s
, TCG_TYPE_I32
, args
[3], args
[0], args
[1], args
[2],
1722 case INDEX_op_setcond_i64
:
1723 tcg_out_setcond (s
, TCG_TYPE_I64
, args
[3], args
[0], args
[1], args
[2],
1727 case INDEX_op_bswap16_i32
:
1728 case INDEX_op_bswap16_i64
:
1729 a0
= args
[0], a1
= args
[1];
1732 /* a0 = (a1 r<< 24) & 0xff # 000c */
1733 tcg_out_rlw(s
, RLWINM
, a0
, a1
, 24, 24, 31);
1734 /* a0 = (a0 & ~0xff00) | (a1 r<< 8) & 0xff00 # 00dc */
1735 tcg_out_rlw(s
, RLWIMI
, a0
, a1
, 8, 16, 23);
1737 /* r0 = (a1 r<< 8) & 0xff00 # 00d0 */
1738 tcg_out_rlw(s
, RLWINM
, TCG_REG_R0
, a1
, 8, 16, 23);
1739 /* a0 = (a1 r<< 24) & 0xff # 000c */
1740 tcg_out_rlw(s
, RLWINM
, a0
, a1
, 24, 24, 31);
1741 /* a0 = a0 | r0 # 00dc */
1742 tcg_out32(s
, OR
| SAB(TCG_REG_R0
, a0
, a0
));
1746 case INDEX_op_bswap32_i32
:
1747 case INDEX_op_bswap32_i64
:
1748 /* Stolen from gcc's builtin_bswap32 */
1750 a0
= args
[0] == a1
? TCG_REG_R0
: args
[0];
1752 /* a1 = args[1] # abcd */
1753 /* a0 = rotate_left (a1, 8) # bcda */
1754 tcg_out_rlw(s
, RLWINM
, a0
, a1
, 8, 0, 31);
1755 /* a0 = (a0 & ~0xff000000) | ((a1 r<< 24) & 0xff000000) # dcda */
1756 tcg_out_rlw(s
, RLWIMI
, a0
, a1
, 24, 0, 7);
1757 /* a0 = (a0 & ~0x0000ff00) | ((a1 r<< 24) & 0x0000ff00) # dcba */
1758 tcg_out_rlw(s
, RLWIMI
, a0
, a1
, 24, 16, 23);
1760 if (a0
== TCG_REG_R0
) {
1761 tcg_out_mov(s
, TCG_TYPE_I64
, args
[0], a0
);
1765 case INDEX_op_bswap64_i64
:
1766 a0
= args
[0], a1
= args
[1], a2
= 0;
1772 /* a1 = # abcd efgh */
1773 /* a0 = rl32(a1, 8) # 0000 fghe */
1774 tcg_out_rlw(s
, RLWINM
, a0
, a1
, 8, 0, 31);
1775 /* a0 = dep(a0, rl32(a1, 24), 0xff000000) # 0000 hghe */
1776 tcg_out_rlw(s
, RLWIMI
, a0
, a1
, 24, 0, 7);
1777 /* a0 = dep(a0, rl32(a1, 24), 0x0000ff00) # 0000 hgfe */
1778 tcg_out_rlw(s
, RLWIMI
, a0
, a1
, 24, 16, 23);
1780 /* a0 = rl64(a0, 32) # hgfe 0000 */
1781 /* a2 = rl64(a1, 32) # efgh abcd */
1782 tcg_out_rld(s
, RLDICL
, a0
, a0
, 32, 0);
1783 tcg_out_rld(s
, RLDICL
, a2
, a1
, 32, 0);
1785 /* a0 = dep(a0, rl32(a2, 8), 0xffffffff) # hgfe bcda */
1786 tcg_out_rlw(s
, RLWIMI
, a0
, a2
, 8, 0, 31);
1787 /* a0 = dep(a0, rl32(a2, 24), 0xff000000) # hgfe dcda */
1788 tcg_out_rlw(s
, RLWIMI
, a0
, a2
, 24, 0, 7);
1789 /* a0 = dep(a0, rl32(a2, 24), 0x0000ff00) # hgfe dcba */
1790 tcg_out_rlw(s
, RLWIMI
, a0
, a2
, 24, 16, 23);
1793 tcg_out_mov(s
, TCG_TYPE_I64
, args
[0], a0
);
1794 /* Revert the source rotate that we performed above. */
1795 tcg_out_rld(s
, RLDICL
, a1
, a1
, 32, 0);
1799 case INDEX_op_deposit_i32
:
1800 tcg_out_rlw(s
, RLWIMI
, args
[0], args
[2], args
[3],
1801 32 - args
[3] - args
[4], 31 - args
[3]);
1803 case INDEX_op_deposit_i64
:
1804 tcg_out_rld(s
, RLDIMI
, args
[0], args
[2], args
[3],
1805 64 - args
[3] - args
[4]);
1814 static const TCGTargetOpDef ppc_op_defs
[] = {
1815 { INDEX_op_exit_tb
, { } },
1816 { INDEX_op_goto_tb
, { } },
1817 { INDEX_op_call
, { "ri" } },
1818 { INDEX_op_br
, { } },
1820 { INDEX_op_mov_i32
, { "r", "r" } },
1821 { INDEX_op_mov_i64
, { "r", "r" } },
1822 { INDEX_op_movi_i32
, { "r" } },
1823 { INDEX_op_movi_i64
, { "r" } },
1825 { INDEX_op_ld8u_i32
, { "r", "r" } },
1826 { INDEX_op_ld8s_i32
, { "r", "r" } },
1827 { INDEX_op_ld16u_i32
, { "r", "r" } },
1828 { INDEX_op_ld16s_i32
, { "r", "r" } },
1829 { INDEX_op_ld_i32
, { "r", "r" } },
1830 { INDEX_op_ld_i64
, { "r", "r" } },
1831 { INDEX_op_st8_i32
, { "r", "r" } },
1832 { INDEX_op_st8_i64
, { "r", "r" } },
1833 { INDEX_op_st16_i32
, { "r", "r" } },
1834 { INDEX_op_st16_i64
, { "r", "r" } },
1835 { INDEX_op_st_i32
, { "r", "r" } },
1836 { INDEX_op_st_i64
, { "r", "r" } },
1837 { INDEX_op_st32_i64
, { "r", "r" } },
1839 { INDEX_op_ld8u_i64
, { "r", "r" } },
1840 { INDEX_op_ld8s_i64
, { "r", "r" } },
1841 { INDEX_op_ld16u_i64
, { "r", "r" } },
1842 { INDEX_op_ld16s_i64
, { "r", "r" } },
1843 { INDEX_op_ld32u_i64
, { "r", "r" } },
1844 { INDEX_op_ld32s_i64
, { "r", "r" } },
1846 { INDEX_op_add_i32
, { "r", "r", "ri" } },
1847 { INDEX_op_mul_i32
, { "r", "r", "rI" } },
1848 { INDEX_op_div_i32
, { "r", "r", "r" } },
1849 { INDEX_op_divu_i32
, { "r", "r", "r" } },
1850 { INDEX_op_rem_i32
, { "r", "r", "r" } },
1851 { INDEX_op_remu_i32
, { "r", "r", "r" } },
1852 { INDEX_op_sub_i32
, { "r", "rI", "ri" } },
1853 { INDEX_op_and_i32
, { "r", "r", "ri" } },
1854 { INDEX_op_or_i32
, { "r", "r", "ri" } },
1855 { INDEX_op_xor_i32
, { "r", "r", "ri" } },
1856 { INDEX_op_andc_i32
, { "r", "r", "ri" } },
1857 { INDEX_op_orc_i32
, { "r", "r", "ri" } },
1858 { INDEX_op_eqv_i32
, { "r", "r", "ri" } },
1859 { INDEX_op_nand_i32
, { "r", "r", "r" } },
1860 { INDEX_op_nor_i32
, { "r", "r", "r" } },
1862 { INDEX_op_shl_i32
, { "r", "r", "ri" } },
1863 { INDEX_op_shr_i32
, { "r", "r", "ri" } },
1864 { INDEX_op_sar_i32
, { "r", "r", "ri" } },
1865 { INDEX_op_rotl_i32
, { "r", "r", "ri" } },
1866 { INDEX_op_rotr_i32
, { "r", "r", "ri" } },
1868 { INDEX_op_brcond_i32
, { "r", "ri" } },
1869 { INDEX_op_brcond_i64
, { "r", "ri" } },
1871 { INDEX_op_neg_i32
, { "r", "r" } },
1872 { INDEX_op_not_i32
, { "r", "r" } },
1874 { INDEX_op_add_i64
, { "r", "r", "rT" } },
1875 { INDEX_op_sub_i64
, { "r", "rI", "rT" } },
1876 { INDEX_op_and_i64
, { "r", "r", "ri" } },
1877 { INDEX_op_or_i64
, { "r", "r", "rU" } },
1878 { INDEX_op_xor_i64
, { "r", "r", "rU" } },
1879 { INDEX_op_andc_i64
, { "r", "r", "ri" } },
1880 { INDEX_op_orc_i64
, { "r", "r", "r" } },
1881 { INDEX_op_eqv_i64
, { "r", "r", "r" } },
1882 { INDEX_op_nand_i64
, { "r", "r", "r" } },
1883 { INDEX_op_nor_i64
, { "r", "r", "r" } },
1885 { INDEX_op_shl_i64
, { "r", "r", "ri" } },
1886 { INDEX_op_shr_i64
, { "r", "r", "ri" } },
1887 { INDEX_op_sar_i64
, { "r", "r", "ri" } },
1888 { INDEX_op_rotl_i64
, { "r", "r", "ri" } },
1889 { INDEX_op_rotr_i64
, { "r", "r", "ri" } },
1891 { INDEX_op_mul_i64
, { "r", "r", "rI" } },
1892 { INDEX_op_div_i64
, { "r", "r", "r" } },
1893 { INDEX_op_divu_i64
, { "r", "r", "r" } },
1894 { INDEX_op_rem_i64
, { "r", "r", "r" } },
1895 { INDEX_op_remu_i64
, { "r", "r", "r" } },
1897 { INDEX_op_neg_i64
, { "r", "r" } },
1898 { INDEX_op_not_i64
, { "r", "r" } },
1900 { INDEX_op_qemu_ld8u
, { "r", "L" } },
1901 { INDEX_op_qemu_ld8s
, { "r", "L" } },
1902 { INDEX_op_qemu_ld16u
, { "r", "L" } },
1903 { INDEX_op_qemu_ld16s
, { "r", "L" } },
1904 { INDEX_op_qemu_ld32
, { "r", "L" } },
1905 { INDEX_op_qemu_ld32u
, { "r", "L" } },
1906 { INDEX_op_qemu_ld32s
, { "r", "L" } },
1907 { INDEX_op_qemu_ld64
, { "r", "L" } },
1909 { INDEX_op_qemu_st8
, { "S", "S" } },
1910 { INDEX_op_qemu_st16
, { "S", "S" } },
1911 { INDEX_op_qemu_st32
, { "S", "S" } },
1912 { INDEX_op_qemu_st64
, { "S", "S" } },
1914 { INDEX_op_ext8s_i32
, { "r", "r" } },
1915 { INDEX_op_ext16s_i32
, { "r", "r" } },
1916 { INDEX_op_ext8s_i64
, { "r", "r" } },
1917 { INDEX_op_ext16s_i64
, { "r", "r" } },
1918 { INDEX_op_ext32s_i64
, { "r", "r" } },
1920 { INDEX_op_setcond_i32
, { "r", "r", "ri" } },
1921 { INDEX_op_setcond_i64
, { "r", "r", "ri" } },
1923 { INDEX_op_bswap16_i32
, { "r", "r" } },
1924 { INDEX_op_bswap16_i64
, { "r", "r" } },
1925 { INDEX_op_bswap32_i32
, { "r", "r" } },
1926 { INDEX_op_bswap32_i64
, { "r", "r" } },
1927 { INDEX_op_bswap64_i64
, { "r", "r" } },
1929 { INDEX_op_deposit_i32
, { "r", "0", "r" } },
1930 { INDEX_op_deposit_i64
, { "r", "0", "r" } },
1935 static void tcg_target_init (TCGContext
*s
)
1937 tcg_regset_set32 (tcg_target_available_regs
[TCG_TYPE_I32
], 0, 0xffffffff);
1938 tcg_regset_set32 (tcg_target_available_regs
[TCG_TYPE_I64
], 0, 0xffffffff);
1939 tcg_regset_set32 (tcg_target_call_clobber_regs
, 0,
1951 (1 << TCG_REG_R10
) |
1952 (1 << TCG_REG_R11
) |
1956 tcg_regset_clear (s
->reserved_regs
);
1957 tcg_regset_set_reg (s
->reserved_regs
, TCG_REG_R0
);
1958 tcg_regset_set_reg (s
->reserved_regs
, TCG_REG_R1
);
1960 tcg_regset_set_reg (s
->reserved_regs
, TCG_REG_R2
);
1962 tcg_regset_set_reg (s
->reserved_regs
, TCG_REG_R13
);
1964 tcg_add_target_add_op_defs (ppc_op_defs
);