4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env
, cpu_regwptr
;
43 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
44 static TCGv_i32 cpu_cc_op
;
45 static TCGv_i32 cpu_psr
;
46 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
53 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
55 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
56 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
57 static TCGv_i32 cpu_softint
;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32
;
64 static TCGv_i64 cpu_tmp64
;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr
[TARGET_DPREGS
];
68 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
69 static target_ulong gen_opc_jump_pc
[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext
{
74 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit
;
82 uint32_t cc_op
; /* current CC operation */
83 struct TranslationBlock
*tb
;
89 // This function uses non-native bit order
90 #define GET_FIELD(X, FROM, TO) \
91 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
93 // This function uses the order in the manuals, i.e. bit 0 is 2^0
94 #define GET_FIELD_SP(X, FROM, TO) \
95 GET_FIELD(X, 31 - (TO), 31 - (FROM))
97 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #ifdef TARGET_SPARC64
101 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
104 #define DFPREG(r) (r & 0x1e)
105 #define QFPREG(r) (r & 0x1c)
108 #define UA2005_HTRAP_MASK 0xff
109 #define V8_TRAP_MASK 0x7f
111 static int sign_extend(int x
, int len
)
114 return (x
<< len
) >> len
;
117 #define IS_IMM (insn & (1<<13))
119 static inline void gen_update_fprs_dirty(int rd
)
121 #if defined(TARGET_SPARC64)
122 tcg_gen_ori_i32(cpu_fprs
, cpu_fprs
, (rd
< 32) ? 1 : 2);
126 /* floating point registers moves */
127 static TCGv_i32
gen_load_fpr_F(DisasContext
*dc
, unsigned int src
)
129 #if TCG_TARGET_REG_BITS == 32
131 return TCGV_LOW(cpu_fpr
[src
/ 2]);
133 return TCGV_HIGH(cpu_fpr
[src
/ 2]);
137 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr
[src
/ 2]));
139 TCGv_i32 ret
= tcg_temp_local_new_i32();
140 TCGv_i64 t
= tcg_temp_new_i64();
142 tcg_gen_shri_i64(t
, cpu_fpr
[src
/ 2], 32);
143 tcg_gen_trunc_i64_i32(ret
, t
);
144 tcg_temp_free_i64(t
);
146 dc
->t32
[dc
->n_t32
++] = ret
;
147 assert(dc
->n_t32
<= ARRAY_SIZE(dc
->t32
));
154 static void gen_store_fpr_F(DisasContext
*dc
, unsigned int dst
, TCGv_i32 v
)
156 #if TCG_TARGET_REG_BITS == 32
158 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr
[dst
/ 2]), v
);
160 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr
[dst
/ 2]), v
);
163 TCGv_i64 t
= MAKE_TCGV_I64(GET_TCGV_I32(v
));
164 tcg_gen_deposit_i64(cpu_fpr
[dst
/ 2], cpu_fpr
[dst
/ 2], t
,
165 (dst
& 1 ? 0 : 32), 32);
167 gen_update_fprs_dirty(dst
);
170 static TCGv_i32
gen_dest_fpr_F(void)
175 static TCGv_i64
gen_load_fpr_D(DisasContext
*dc
, unsigned int src
)
178 return cpu_fpr
[src
/ 2];
181 static void gen_store_fpr_D(DisasContext
*dc
, unsigned int dst
, TCGv_i64 v
)
184 tcg_gen_mov_i64(cpu_fpr
[dst
/ 2], v
);
185 gen_update_fprs_dirty(dst
);
188 static TCGv_i64
gen_dest_fpr_D(void)
193 static void gen_op_load_fpr_QT0(unsigned int src
)
195 tcg_gen_st_i64(cpu_fpr
[src
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
196 offsetof(CPU_QuadU
, ll
.upper
));
197 tcg_gen_st_i64(cpu_fpr
[src
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
198 offsetof(CPU_QuadU
, ll
.lower
));
201 static void gen_op_load_fpr_QT1(unsigned int src
)
203 tcg_gen_st_i64(cpu_fpr
[src
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
204 offsetof(CPU_QuadU
, ll
.upper
));
205 tcg_gen_st_i64(cpu_fpr
[src
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
206 offsetof(CPU_QuadU
, ll
.lower
));
209 static void gen_op_store_QT0_fpr(unsigned int dst
)
211 tcg_gen_ld_i64(cpu_fpr
[dst
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
212 offsetof(CPU_QuadU
, ll
.upper
));
213 tcg_gen_ld_i64(cpu_fpr
[dst
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
214 offsetof(CPU_QuadU
, ll
.lower
));
217 #ifdef TARGET_SPARC64
218 static void gen_move_Q(unsigned int rd
, unsigned int rs
)
223 tcg_gen_mov_i64(cpu_fpr
[rd
/ 2], cpu_fpr
[rs
/ 2]);
224 tcg_gen_mov_i64(cpu_fpr
[rd
/ 2 + 1], cpu_fpr
[rs
/ 2 + 1]);
225 gen_update_fprs_dirty(rd
);
230 #ifdef CONFIG_USER_ONLY
231 #define supervisor(dc) 0
232 #ifdef TARGET_SPARC64
233 #define hypervisor(dc) 0
236 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237 #ifdef TARGET_SPARC64
238 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
243 #ifdef TARGET_SPARC64
245 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
247 #define AM_CHECK(dc) (1)
251 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
253 #ifdef TARGET_SPARC64
255 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
259 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
262 tcg_gen_movi_tl(tn
, 0);
264 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
266 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
270 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
275 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
277 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
281 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
282 target_ulong pc
, target_ulong npc
)
284 TranslationBlock
*tb
;
287 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
288 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
290 /* jump to same page: we can use a direct jump */
291 tcg_gen_goto_tb(tb_num
);
292 tcg_gen_movi_tl(cpu_pc
, pc
);
293 tcg_gen_movi_tl(cpu_npc
, npc
);
294 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
296 /* jump to another page: currently not optimized */
297 tcg_gen_movi_tl(cpu_pc
, pc
);
298 tcg_gen_movi_tl(cpu_npc
, npc
);
304 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
306 tcg_gen_extu_i32_tl(reg
, src
);
307 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
308 tcg_gen_andi_tl(reg
, reg
, 0x1);
311 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
313 tcg_gen_extu_i32_tl(reg
, src
);
314 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
315 tcg_gen_andi_tl(reg
, reg
, 0x1);
318 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
320 tcg_gen_extu_i32_tl(reg
, src
);
321 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
322 tcg_gen_andi_tl(reg
, reg
, 0x1);
325 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
327 tcg_gen_extu_i32_tl(reg
, src
);
328 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
329 tcg_gen_andi_tl(reg
, reg
, 0x1);
332 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
338 l1
= gen_new_label();
340 r_temp
= tcg_temp_new();
341 tcg_gen_xor_tl(r_temp
, src1
, src2
);
342 tcg_gen_not_tl(r_temp
, r_temp
);
343 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
344 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
345 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
346 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
347 r_const
= tcg_const_i32(TT_TOVF
);
348 gen_helper_raise_exception(cpu_env
, r_const
);
349 tcg_temp_free_i32(r_const
);
351 tcg_temp_free(r_temp
);
354 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
359 l1
= gen_new_label();
360 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
361 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
362 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
363 r_const
= tcg_const_i32(TT_TOVF
);
364 gen_helper_raise_exception(cpu_env
, r_const
);
365 tcg_temp_free_i32(r_const
);
369 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
371 tcg_gen_mov_tl(cpu_cc_src
, src1
);
372 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
373 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
374 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
377 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
379 tcg_gen_mov_tl(cpu_cc_src
, src1
);
380 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
381 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
382 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
385 static TCGv_i32
gen_add32_carry32(void)
387 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
389 /* Carry is computed from a previous add: (dst < src) */
390 #if TARGET_LONG_BITS == 64
391 cc_src1_32
= tcg_temp_new_i32();
392 cc_src2_32
= tcg_temp_new_i32();
393 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
394 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
396 cc_src1_32
= cpu_cc_dst
;
397 cc_src2_32
= cpu_cc_src
;
400 carry_32
= tcg_temp_new_i32();
401 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
403 #if TARGET_LONG_BITS == 64
404 tcg_temp_free_i32(cc_src1_32
);
405 tcg_temp_free_i32(cc_src2_32
);
411 static TCGv_i32
gen_sub32_carry32(void)
413 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
415 /* Carry is computed from a previous borrow: (src1 < src2) */
416 #if TARGET_LONG_BITS == 64
417 cc_src1_32
= tcg_temp_new_i32();
418 cc_src2_32
= tcg_temp_new_i32();
419 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
420 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
422 cc_src1_32
= cpu_cc_src
;
423 cc_src2_32
= cpu_cc_src2
;
426 carry_32
= tcg_temp_new_i32();
427 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
429 #if TARGET_LONG_BITS == 64
430 tcg_temp_free_i32(cc_src1_32
);
431 tcg_temp_free_i32(cc_src2_32
);
437 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
438 TCGv src2
, int update_cc
)
446 /* Carry is known to be zero. Fall back to plain ADD. */
448 gen_op_add_cc(dst
, src1
, src2
);
450 tcg_gen_add_tl(dst
, src1
, src2
);
457 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
459 /* For 32-bit hosts, we can re-use the host's hardware carry
460 generation by using an ADD2 opcode. We discard the low
461 part of the output. Ideally we'd combine this operation
462 with the add that generated the carry in the first place. */
463 TCGv dst_low
= tcg_temp_new();
464 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
465 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
466 tcg_temp_free(dst_low
);
470 carry_32
= gen_add32_carry32();
476 carry_32
= gen_sub32_carry32();
480 /* We need external help to produce the carry. */
481 carry_32
= tcg_temp_new_i32();
482 gen_helper_compute_C_icc(carry_32
, cpu_env
);
486 #if TARGET_LONG_BITS == 64
487 carry
= tcg_temp_new();
488 tcg_gen_extu_i32_i64(carry
, carry_32
);
493 tcg_gen_add_tl(dst
, src1
, src2
);
494 tcg_gen_add_tl(dst
, dst
, carry
);
496 tcg_temp_free_i32(carry_32
);
497 #if TARGET_LONG_BITS == 64
498 tcg_temp_free(carry
);
501 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
505 tcg_gen_mov_tl(cpu_cc_src
, src1
);
506 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
507 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
508 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
509 dc
->cc_op
= CC_OP_ADDX
;
513 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
515 tcg_gen_mov_tl(cpu_cc_src
, src1
);
516 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
517 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
518 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
521 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
523 tcg_gen_mov_tl(cpu_cc_src
, src1
);
524 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
525 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
526 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
527 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
528 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
531 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
537 l1
= gen_new_label();
539 r_temp
= tcg_temp_new();
540 tcg_gen_xor_tl(r_temp
, src1
, src2
);
541 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
542 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
543 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
544 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
545 r_const
= tcg_const_i32(TT_TOVF
);
546 gen_helper_raise_exception(cpu_env
, r_const
);
547 tcg_temp_free_i32(r_const
);
549 tcg_temp_free(r_temp
);
552 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
554 tcg_gen_mov_tl(cpu_cc_src
, src1
);
555 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
557 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
558 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
559 dc
->cc_op
= CC_OP_LOGIC
;
561 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
562 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
563 dc
->cc_op
= CC_OP_SUB
;
565 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
568 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
570 tcg_gen_mov_tl(cpu_cc_src
, src1
);
571 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
572 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
573 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
576 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
577 TCGv src2
, int update_cc
)
585 /* Carry is known to be zero. Fall back to plain SUB. */
587 gen_op_sub_cc(dst
, src1
, src2
);
589 tcg_gen_sub_tl(dst
, src1
, src2
);
596 carry_32
= gen_add32_carry32();
602 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
604 /* For 32-bit hosts, we can re-use the host's hardware carry
605 generation by using a SUB2 opcode. We discard the low
606 part of the output. Ideally we'd combine this operation
607 with the add that generated the carry in the first place. */
608 TCGv dst_low
= tcg_temp_new();
609 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
610 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
611 tcg_temp_free(dst_low
);
615 carry_32
= gen_sub32_carry32();
619 /* We need external help to produce the carry. */
620 carry_32
= tcg_temp_new_i32();
621 gen_helper_compute_C_icc(carry_32
, cpu_env
);
625 #if TARGET_LONG_BITS == 64
626 carry
= tcg_temp_new();
627 tcg_gen_extu_i32_i64(carry
, carry_32
);
632 tcg_gen_sub_tl(dst
, src1
, src2
);
633 tcg_gen_sub_tl(dst
, dst
, carry
);
635 tcg_temp_free_i32(carry_32
);
636 #if TARGET_LONG_BITS == 64
637 tcg_temp_free(carry
);
640 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
644 tcg_gen_mov_tl(cpu_cc_src
, src1
);
645 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
646 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
647 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
648 dc
->cc_op
= CC_OP_SUBX
;
652 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
654 tcg_gen_mov_tl(cpu_cc_src
, src1
);
655 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
656 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
657 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
660 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
662 tcg_gen_mov_tl(cpu_cc_src
, src1
);
663 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
664 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
665 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
666 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
667 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
670 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
675 l1
= gen_new_label();
676 r_temp
= tcg_temp_new();
682 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
683 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
684 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
685 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
686 tcg_gen_movi_tl(cpu_cc_src2
, 0);
690 // env->y = (b2 << 31) | (env->y >> 1);
691 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
692 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
693 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
694 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
695 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
696 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
699 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
700 gen_mov_reg_V(r_temp
, cpu_psr
);
701 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
702 tcg_temp_free(r_temp
);
704 // T0 = (b1 << 31) | (T0 >> 1);
706 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
707 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
708 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
710 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
712 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
715 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
717 TCGv_i32 r_src1
, r_src2
;
718 TCGv_i64 r_temp
, r_temp2
;
720 r_src1
= tcg_temp_new_i32();
721 r_src2
= tcg_temp_new_i32();
723 tcg_gen_trunc_tl_i32(r_src1
, src1
);
724 tcg_gen_trunc_tl_i32(r_src2
, src2
);
726 r_temp
= tcg_temp_new_i64();
727 r_temp2
= tcg_temp_new_i64();
730 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
731 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
733 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
734 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
737 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
739 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
740 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
741 tcg_temp_free_i64(r_temp
);
742 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
744 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
746 tcg_temp_free_i64(r_temp2
);
748 tcg_temp_free_i32(r_src1
);
749 tcg_temp_free_i32(r_src2
);
752 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
754 /* zero-extend truncated operands before multiplication */
755 gen_op_multiply(dst
, src1
, src2
, 0);
758 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
760 /* sign-extend truncated operands before multiplication */
761 gen_op_multiply(dst
, src1
, src2
, 1);
764 #ifdef TARGET_SPARC64
765 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
770 l1
= gen_new_label();
771 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
772 r_const
= tcg_const_i32(TT_DIV_ZERO
);
773 gen_helper_raise_exception(cpu_env
, r_const
);
774 tcg_temp_free_i32(r_const
);
778 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
781 TCGv r_temp1
, r_temp2
;
783 l1
= gen_new_label();
784 l2
= gen_new_label();
785 r_temp1
= tcg_temp_local_new();
786 r_temp2
= tcg_temp_local_new();
787 tcg_gen_mov_tl(r_temp1
, src1
);
788 tcg_gen_mov_tl(r_temp2
, src2
);
789 gen_trap_ifdivzero_tl(r_temp2
);
790 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp1
, INT64_MIN
, l1
);
791 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp2
, -1, l1
);
792 tcg_gen_movi_i64(dst
, INT64_MIN
);
795 tcg_gen_div_i64(dst
, r_temp1
, r_temp2
);
797 tcg_temp_free(r_temp1
);
798 tcg_temp_free(r_temp2
);
803 static inline void gen_op_eval_ba(TCGv dst
)
805 tcg_gen_movi_tl(dst
, 1);
809 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
811 gen_mov_reg_Z(dst
, src
);
815 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
817 gen_mov_reg_N(cpu_tmp0
, src
);
818 gen_mov_reg_V(dst
, src
);
819 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
820 gen_mov_reg_Z(cpu_tmp0
, src
);
821 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
825 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
827 gen_mov_reg_V(cpu_tmp0
, src
);
828 gen_mov_reg_N(dst
, src
);
829 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
833 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
835 gen_mov_reg_Z(cpu_tmp0
, src
);
836 gen_mov_reg_C(dst
, src
);
837 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
841 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
843 gen_mov_reg_C(dst
, src
);
847 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
849 gen_mov_reg_V(dst
, src
);
853 static inline void gen_op_eval_bn(TCGv dst
)
855 tcg_gen_movi_tl(dst
, 0);
859 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
861 gen_mov_reg_N(dst
, src
);
865 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
867 gen_mov_reg_Z(dst
, src
);
868 tcg_gen_xori_tl(dst
, dst
, 0x1);
872 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
874 gen_mov_reg_N(cpu_tmp0
, src
);
875 gen_mov_reg_V(dst
, src
);
876 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
877 gen_mov_reg_Z(cpu_tmp0
, src
);
878 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
879 tcg_gen_xori_tl(dst
, dst
, 0x1);
883 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
885 gen_mov_reg_V(cpu_tmp0
, src
);
886 gen_mov_reg_N(dst
, src
);
887 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
888 tcg_gen_xori_tl(dst
, dst
, 0x1);
892 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
894 gen_mov_reg_Z(cpu_tmp0
, src
);
895 gen_mov_reg_C(dst
, src
);
896 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
897 tcg_gen_xori_tl(dst
, dst
, 0x1);
901 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
903 gen_mov_reg_C(dst
, src
);
904 tcg_gen_xori_tl(dst
, dst
, 0x1);
908 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
910 gen_mov_reg_N(dst
, src
);
911 tcg_gen_xori_tl(dst
, dst
, 0x1);
915 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
917 gen_mov_reg_V(dst
, src
);
918 tcg_gen_xori_tl(dst
, dst
, 0x1);
922 FPSR bit field FCC1 | FCC0:
928 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
929 unsigned int fcc_offset
)
931 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
932 tcg_gen_andi_tl(reg
, reg
, 0x1);
935 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
936 unsigned int fcc_offset
)
938 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
939 tcg_gen_andi_tl(reg
, reg
, 0x1);
943 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
944 unsigned int fcc_offset
)
946 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
947 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
948 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
951 // 1 or 2: FCC0 ^ FCC1
952 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
953 unsigned int fcc_offset
)
955 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
956 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
957 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
961 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
962 unsigned int fcc_offset
)
964 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
968 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
969 unsigned int fcc_offset
)
971 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
972 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
973 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
974 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
978 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
979 unsigned int fcc_offset
)
981 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
985 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
986 unsigned int fcc_offset
)
988 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
989 tcg_gen_xori_tl(dst
, dst
, 0x1);
990 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
991 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
995 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
996 unsigned int fcc_offset
)
998 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
999 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1000 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1003 // 0: !(FCC0 | FCC1)
1004 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1005 unsigned int fcc_offset
)
1007 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1008 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1009 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1010 tcg_gen_xori_tl(dst
, dst
, 0x1);
1013 // 0 or 3: !(FCC0 ^ FCC1)
1014 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1015 unsigned int fcc_offset
)
1017 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1018 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1019 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1020 tcg_gen_xori_tl(dst
, dst
, 0x1);
1024 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1025 unsigned int fcc_offset
)
1027 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1028 tcg_gen_xori_tl(dst
, dst
, 0x1);
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1033 unsigned int fcc_offset
)
1035 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1036 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1037 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1038 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1039 tcg_gen_xori_tl(dst
, dst
, 0x1);
1043 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1044 unsigned int fcc_offset
)
1046 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1047 tcg_gen_xori_tl(dst
, dst
, 0x1);
1050 // !2: !(!FCC0 & FCC1)
1051 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1052 unsigned int fcc_offset
)
1054 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1055 tcg_gen_xori_tl(dst
, dst
, 0x1);
1056 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1057 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1058 tcg_gen_xori_tl(dst
, dst
, 0x1);
1061 // !3: !(FCC0 & FCC1)
1062 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1063 unsigned int fcc_offset
)
1065 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1066 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1067 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1068 tcg_gen_xori_tl(dst
, dst
, 0x1);
1071 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1072 target_ulong pc2
, TCGv r_cond
)
1076 l1
= gen_new_label();
1078 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1080 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1083 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1086 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1087 target_ulong pc2
, TCGv r_cond
)
1091 l1
= gen_new_label();
1093 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1095 gen_goto_tb(dc
, 0, pc2
, pc1
);
1098 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1101 static inline void gen_generic_branch(DisasContext
*dc
)
1105 l1
= gen_new_label();
1106 l2
= gen_new_label();
1108 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cond
, 0, l1
);
1110 tcg_gen_movi_tl(cpu_npc
, dc
->jump_pc
[0]);
1114 tcg_gen_movi_tl(cpu_npc
, dc
->jump_pc
[1]);
1118 /* call this function before using the condition register as it may
1119 have been set for a jump */
1120 static inline void flush_cond(DisasContext
*dc
)
1122 if (dc
->npc
== JUMP_PC
) {
1123 gen_generic_branch(dc
);
1124 dc
->npc
= DYNAMIC_PC
;
1128 static inline void save_npc(DisasContext
*dc
)
1130 if (dc
->npc
== JUMP_PC
) {
1131 gen_generic_branch(dc
);
1132 dc
->npc
= DYNAMIC_PC
;
1133 } else if (dc
->npc
!= DYNAMIC_PC
) {
1134 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1138 static inline void save_state(DisasContext
*dc
)
1140 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1141 /* flush pending conditional evaluations before exposing cpu state */
1142 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1143 dc
->cc_op
= CC_OP_FLAGS
;
1144 gen_helper_compute_psr(cpu_env
);
1149 static inline void gen_mov_pc_npc(DisasContext
*dc
)
1151 if (dc
->npc
== JUMP_PC
) {
1152 gen_generic_branch(dc
);
1153 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1154 dc
->pc
= DYNAMIC_PC
;
1155 } else if (dc
->npc
== DYNAMIC_PC
) {
1156 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1157 dc
->pc
= DYNAMIC_PC
;
1163 static inline void gen_op_next_insn(void)
1165 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1166 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1169 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1174 #ifdef TARGET_SPARC64
1182 switch (dc
->cc_op
) {
1186 gen_helper_compute_psr(cpu_env
);
1187 dc
->cc_op
= CC_OP_FLAGS
;
1192 gen_op_eval_bn(r_dst
);
1195 gen_op_eval_be(r_dst
, r_src
);
1198 gen_op_eval_ble(r_dst
, r_src
);
1201 gen_op_eval_bl(r_dst
, r_src
);
1204 gen_op_eval_bleu(r_dst
, r_src
);
1207 gen_op_eval_bcs(r_dst
, r_src
);
1210 gen_op_eval_bneg(r_dst
, r_src
);
1213 gen_op_eval_bvs(r_dst
, r_src
);
1216 gen_op_eval_ba(r_dst
);
1219 gen_op_eval_bne(r_dst
, r_src
);
1222 gen_op_eval_bg(r_dst
, r_src
);
1225 gen_op_eval_bge(r_dst
, r_src
);
1228 gen_op_eval_bgu(r_dst
, r_src
);
1231 gen_op_eval_bcc(r_dst
, r_src
);
1234 gen_op_eval_bpos(r_dst
, r_src
);
1237 gen_op_eval_bvc(r_dst
, r_src
);
1242 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1244 unsigned int offset
;
1264 gen_op_eval_bn(r_dst
);
1267 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1270 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1273 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1276 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1279 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1282 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1285 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1288 gen_op_eval_ba(r_dst
);
1291 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1294 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1297 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1300 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1303 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1306 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1309 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1314 #ifdef TARGET_SPARC64
1316 static const int gen_tcg_cond_reg
[8] = {
1327 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1331 l1
= gen_new_label();
1332 tcg_gen_movi_tl(r_dst
, 0);
1333 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1334 tcg_gen_movi_tl(r_dst
, 1);
1339 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
)
1341 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1342 target_ulong target
= dc
->pc
+ offset
;
1344 #ifdef TARGET_SPARC64
1345 if (unlikely(AM_CHECK(dc
))) {
1346 target
&= 0xffffffffULL
;
1350 /* unconditional not taken */
1352 dc
->pc
= dc
->npc
+ 4;
1353 dc
->npc
= dc
->pc
+ 4;
1356 dc
->npc
= dc
->pc
+ 4;
1358 } else if (cond
== 0x8) {
1359 /* unconditional taken */
1362 dc
->npc
= dc
->pc
+ 4;
1366 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1370 gen_cond(cpu_cond
, cc
, cond
, dc
);
1372 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1376 dc
->jump_pc
[0] = target
;
1377 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1378 dc
->jump_pc
[1] = DYNAMIC_PC
;
1379 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1381 dc
->jump_pc
[1] = dc
->npc
+ 4;
1388 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
)
1390 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1391 target_ulong target
= dc
->pc
+ offset
;
1393 #ifdef TARGET_SPARC64
1394 if (unlikely(AM_CHECK(dc
))) {
1395 target
&= 0xffffffffULL
;
1399 /* unconditional not taken */
1401 dc
->pc
= dc
->npc
+ 4;
1402 dc
->npc
= dc
->pc
+ 4;
1405 dc
->npc
= dc
->pc
+ 4;
1407 } else if (cond
== 0x8) {
1408 /* unconditional taken */
1411 dc
->npc
= dc
->pc
+ 4;
1415 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1419 gen_fcond(cpu_cond
, cc
, cond
);
1421 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1425 dc
->jump_pc
[0] = target
;
1426 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1427 dc
->jump_pc
[1] = DYNAMIC_PC
;
1428 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1430 dc
->jump_pc
[1] = dc
->npc
+ 4;
1437 #ifdef TARGET_SPARC64
1438 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1441 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1442 target_ulong target
= dc
->pc
+ offset
;
1444 if (unlikely(AM_CHECK(dc
))) {
1445 target
&= 0xffffffffULL
;
1448 gen_cond_reg(cpu_cond
, cond
, r_reg
);
1450 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1454 dc
->jump_pc
[0] = target
;
1455 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1456 dc
->jump_pc
[1] = DYNAMIC_PC
;
1457 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1459 dc
->jump_pc
[1] = dc
->npc
+ 4;
1465 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1469 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1472 gen_helper_fcmps_fcc1(cpu_env
, r_rs1
, r_rs2
);
1475 gen_helper_fcmps_fcc2(cpu_env
, r_rs1
, r_rs2
);
1478 gen_helper_fcmps_fcc3(cpu_env
, r_rs1
, r_rs2
);
1483 static inline void gen_op_fcmpd(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1487 gen_helper_fcmpd(cpu_env
, r_rs1
, r_rs2
);
1490 gen_helper_fcmpd_fcc1(cpu_env
, r_rs1
, r_rs2
);
1493 gen_helper_fcmpd_fcc2(cpu_env
, r_rs1
, r_rs2
);
1496 gen_helper_fcmpd_fcc3(cpu_env
, r_rs1
, r_rs2
);
1501 static inline void gen_op_fcmpq(int fccno
)
1505 gen_helper_fcmpq(cpu_env
);
1508 gen_helper_fcmpq_fcc1(cpu_env
);
1511 gen_helper_fcmpq_fcc2(cpu_env
);
1514 gen_helper_fcmpq_fcc3(cpu_env
);
1519 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1523 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1526 gen_helper_fcmpes_fcc1(cpu_env
, r_rs1
, r_rs2
);
1529 gen_helper_fcmpes_fcc2(cpu_env
, r_rs1
, r_rs2
);
1532 gen_helper_fcmpes_fcc3(cpu_env
, r_rs1
, r_rs2
);
1537 static inline void gen_op_fcmped(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1541 gen_helper_fcmped(cpu_env
, r_rs1
, r_rs2
);
1544 gen_helper_fcmped_fcc1(cpu_env
, r_rs1
, r_rs2
);
1547 gen_helper_fcmped_fcc2(cpu_env
, r_rs1
, r_rs2
);
1550 gen_helper_fcmped_fcc3(cpu_env
, r_rs1
, r_rs2
);
1555 static inline void gen_op_fcmpeq(int fccno
)
1559 gen_helper_fcmpeq(cpu_env
);
1562 gen_helper_fcmpeq_fcc1(cpu_env
);
1565 gen_helper_fcmpeq_fcc2(cpu_env
);
1568 gen_helper_fcmpeq_fcc3(cpu_env
);
1575 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1577 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1580 static inline void gen_op_fcmpd(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1582 gen_helper_fcmpd(cpu_env
, r_rs1
, r_rs2
);
1585 static inline void gen_op_fcmpq(int fccno
)
1587 gen_helper_fcmpq(cpu_env
);
1590 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1592 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1595 static inline void gen_op_fcmped(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1597 gen_helper_fcmped(cpu_env
, r_rs1
, r_rs2
);
1600 static inline void gen_op_fcmpeq(int fccno
)
1602 gen_helper_fcmpeq(cpu_env
);
1606 static inline void gen_op_fpexception_im(int fsr_flags
)
1610 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1611 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1612 r_const
= tcg_const_i32(TT_FP_EXCP
);
1613 gen_helper_raise_exception(cpu_env
, r_const
);
1614 tcg_temp_free_i32(r_const
);
1617 static int gen_trap_ifnofpu(DisasContext
*dc
)
1619 #if !defined(CONFIG_USER_ONLY)
1620 if (!dc
->fpu_enabled
) {
1624 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1625 gen_helper_raise_exception(cpu_env
, r_const
);
1626 tcg_temp_free_i32(r_const
);
1634 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1636 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1639 static inline void gen_fop_FF(DisasContext
*dc
, int rd
, int rs
,
1640 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i32
))
1644 src
= gen_load_fpr_F(dc
, rs
);
1645 dst
= gen_dest_fpr_F();
1647 gen(dst
, cpu_env
, src
);
1649 gen_store_fpr_F(dc
, rd
, dst
);
1652 static inline void gen_ne_fop_FF(DisasContext
*dc
, int rd
, int rs
,
1653 void (*gen
)(TCGv_i32
, TCGv_i32
))
1657 src
= gen_load_fpr_F(dc
, rs
);
1658 dst
= gen_dest_fpr_F();
1662 gen_store_fpr_F(dc
, rd
, dst
);
1665 static inline void gen_fop_FFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1666 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i32
, TCGv_i32
))
1668 TCGv_i32 dst
, src1
, src2
;
1670 src1
= gen_load_fpr_F(dc
, rs1
);
1671 src2
= gen_load_fpr_F(dc
, rs2
);
1672 dst
= gen_dest_fpr_F();
1674 gen(dst
, cpu_env
, src1
, src2
);
1676 gen_store_fpr_F(dc
, rd
, dst
);
1679 #ifdef TARGET_SPARC64
1680 static inline void gen_ne_fop_FFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1681 void (*gen
)(TCGv_i32
, TCGv_i32
, TCGv_i32
))
1683 TCGv_i32 dst
, src1
, src2
;
1685 src1
= gen_load_fpr_F(dc
, rs1
);
1686 src2
= gen_load_fpr_F(dc
, rs2
);
1687 dst
= gen_dest_fpr_F();
1689 gen(dst
, src1
, src2
);
1691 gen_store_fpr_F(dc
, rd
, dst
);
1695 static inline void gen_fop_DD(DisasContext
*dc
, int rd
, int rs
,
1696 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i64
))
1700 src
= gen_load_fpr_D(dc
, rs
);
1701 dst
= gen_dest_fpr_D();
1703 gen(dst
, cpu_env
, src
);
1705 gen_store_fpr_D(dc
, rd
, dst
);
1708 #ifdef TARGET_SPARC64
1709 static inline void gen_ne_fop_DD(DisasContext
*dc
, int rd
, int rs
,
1710 void (*gen
)(TCGv_i64
, TCGv_i64
))
1714 src
= gen_load_fpr_D(dc
, rs
);
1715 dst
= gen_dest_fpr_D();
1719 gen_store_fpr_D(dc
, rd
, dst
);
1723 static inline void gen_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1724 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i64
, TCGv_i64
))
1726 TCGv_i64 dst
, src1
, src2
;
1728 src1
= gen_load_fpr_D(dc
, rs1
);
1729 src2
= gen_load_fpr_D(dc
, rs2
);
1730 dst
= gen_dest_fpr_D();
1732 gen(dst
, cpu_env
, src1
, src2
);
1734 gen_store_fpr_D(dc
, rd
, dst
);
1737 #ifdef TARGET_SPARC64
1738 static inline void gen_ne_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1739 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
))
1741 TCGv_i64 dst
, src1
, src2
;
1743 src1
= gen_load_fpr_D(dc
, rs1
);
1744 src2
= gen_load_fpr_D(dc
, rs2
);
1745 dst
= gen_dest_fpr_D();
1747 gen(dst
, src1
, src2
);
1749 gen_store_fpr_D(dc
, rd
, dst
);
1752 static inline void gen_gsr_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1753 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
, TCGv_i64
))
1755 TCGv_i64 dst
, src1
, src2
;
1757 src1
= gen_load_fpr_D(dc
, rs1
);
1758 src2
= gen_load_fpr_D(dc
, rs2
);
1759 dst
= gen_dest_fpr_D();
1761 gen(dst
, cpu_gsr
, src1
, src2
);
1763 gen_store_fpr_D(dc
, rd
, dst
);
1766 static inline void gen_ne_fop_DDDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1767 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
, TCGv_i64
))
1769 TCGv_i64 dst
, src0
, src1
, src2
;
1771 src1
= gen_load_fpr_D(dc
, rs1
);
1772 src2
= gen_load_fpr_D(dc
, rs2
);
1773 src0
= gen_load_fpr_D(dc
, rd
);
1774 dst
= gen_dest_fpr_D();
1776 gen(dst
, src0
, src1
, src2
);
1778 gen_store_fpr_D(dc
, rd
, dst
);
1782 static inline void gen_fop_QQ(DisasContext
*dc
, int rd
, int rs
,
1783 void (*gen
)(TCGv_ptr
))
1785 gen_op_load_fpr_QT1(QFPREG(rs
));
1789 gen_op_store_QT0_fpr(QFPREG(rd
));
1790 gen_update_fprs_dirty(QFPREG(rd
));
1793 #ifdef TARGET_SPARC64
1794 static inline void gen_ne_fop_QQ(DisasContext
*dc
, int rd
, int rs
,
1795 void (*gen
)(TCGv_ptr
))
1797 gen_op_load_fpr_QT1(QFPREG(rs
));
1801 gen_op_store_QT0_fpr(QFPREG(rd
));
1802 gen_update_fprs_dirty(QFPREG(rd
));
1806 static inline void gen_fop_QQQ(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1807 void (*gen
)(TCGv_ptr
))
1809 gen_op_load_fpr_QT0(QFPREG(rs1
));
1810 gen_op_load_fpr_QT1(QFPREG(rs2
));
1814 gen_op_store_QT0_fpr(QFPREG(rd
));
1815 gen_update_fprs_dirty(QFPREG(rd
));
1818 static inline void gen_fop_DFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1819 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
, TCGv_i32
))
1822 TCGv_i32 src1
, src2
;
1824 src1
= gen_load_fpr_F(dc
, rs1
);
1825 src2
= gen_load_fpr_F(dc
, rs2
);
1826 dst
= gen_dest_fpr_D();
1828 gen(dst
, cpu_env
, src1
, src2
);
1830 gen_store_fpr_D(dc
, rd
, dst
);
1833 static inline void gen_fop_QDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1834 void (*gen
)(TCGv_ptr
, TCGv_i64
, TCGv_i64
))
1836 TCGv_i64 src1
, src2
;
1838 src1
= gen_load_fpr_D(dc
, rs1
);
1839 src2
= gen_load_fpr_D(dc
, rs2
);
1841 gen(cpu_env
, src1
, src2
);
1843 gen_op_store_QT0_fpr(QFPREG(rd
));
1844 gen_update_fprs_dirty(QFPREG(rd
));
1847 #ifdef TARGET_SPARC64
1848 static inline void gen_fop_DF(DisasContext
*dc
, int rd
, int rs
,
1849 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
))
1854 src
= gen_load_fpr_F(dc
, rs
);
1855 dst
= gen_dest_fpr_D();
1857 gen(dst
, cpu_env
, src
);
1859 gen_store_fpr_D(dc
, rd
, dst
);
1863 static inline void gen_ne_fop_DF(DisasContext
*dc
, int rd
, int rs
,
1864 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
))
1869 src
= gen_load_fpr_F(dc
, rs
);
1870 dst
= gen_dest_fpr_D();
1872 gen(dst
, cpu_env
, src
);
1874 gen_store_fpr_D(dc
, rd
, dst
);
1877 static inline void gen_fop_FD(DisasContext
*dc
, int rd
, int rs
,
1878 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i64
))
1883 src
= gen_load_fpr_D(dc
, rs
);
1884 dst
= gen_dest_fpr_F();
1886 gen(dst
, cpu_env
, src
);
1888 gen_store_fpr_F(dc
, rd
, dst
);
1891 static inline void gen_fop_FQ(DisasContext
*dc
, int rd
, int rs
,
1892 void (*gen
)(TCGv_i32
, TCGv_ptr
))
1896 gen_op_load_fpr_QT1(QFPREG(rs
));
1897 dst
= gen_dest_fpr_F();
1901 gen_store_fpr_F(dc
, rd
, dst
);
1904 static inline void gen_fop_DQ(DisasContext
*dc
, int rd
, int rs
,
1905 void (*gen
)(TCGv_i64
, TCGv_ptr
))
1909 gen_op_load_fpr_QT1(QFPREG(rs
));
1910 dst
= gen_dest_fpr_D();
1914 gen_store_fpr_D(dc
, rd
, dst
);
1917 static inline void gen_ne_fop_QF(DisasContext
*dc
, int rd
, int rs
,
1918 void (*gen
)(TCGv_ptr
, TCGv_i32
))
1922 src
= gen_load_fpr_F(dc
, rs
);
1926 gen_op_store_QT0_fpr(QFPREG(rd
));
1927 gen_update_fprs_dirty(QFPREG(rd
));
1930 static inline void gen_ne_fop_QD(DisasContext
*dc
, int rd
, int rs
,
1931 void (*gen
)(TCGv_ptr
, TCGv_i64
))
1935 src
= gen_load_fpr_D(dc
, rs
);
1939 gen_op_store_QT0_fpr(QFPREG(rd
));
1940 gen_update_fprs_dirty(QFPREG(rd
));
1944 #ifdef TARGET_SPARC64
1945 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1951 r_asi
= tcg_temp_new_i32();
1952 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1954 asi
= GET_FIELD(insn
, 19, 26);
1955 r_asi
= tcg_const_i32(asi
);
1960 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1963 TCGv_i32 r_asi
, r_size
, r_sign
;
1965 r_asi
= gen_get_asi(insn
, addr
);
1966 r_size
= tcg_const_i32(size
);
1967 r_sign
= tcg_const_i32(sign
);
1968 gen_helper_ld_asi(dst
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
1969 tcg_temp_free_i32(r_sign
);
1970 tcg_temp_free_i32(r_size
);
1971 tcg_temp_free_i32(r_asi
);
1974 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1976 TCGv_i32 r_asi
, r_size
;
1978 r_asi
= gen_get_asi(insn
, addr
);
1979 r_size
= tcg_const_i32(size
);
1980 gen_helper_st_asi(cpu_env
, addr
, src
, r_asi
, r_size
);
1981 tcg_temp_free_i32(r_size
);
1982 tcg_temp_free_i32(r_asi
);
1985 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1987 TCGv_i32 r_asi
, r_size
, r_rd
;
1989 r_asi
= gen_get_asi(insn
, addr
);
1990 r_size
= tcg_const_i32(size
);
1991 r_rd
= tcg_const_i32(rd
);
1992 gen_helper_ldf_asi(cpu_env
, addr
, r_asi
, r_size
, r_rd
);
1993 tcg_temp_free_i32(r_rd
);
1994 tcg_temp_free_i32(r_size
);
1995 tcg_temp_free_i32(r_asi
);
1998 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
2000 TCGv_i32 r_asi
, r_size
, r_rd
;
2002 r_asi
= gen_get_asi(insn
, addr
);
2003 r_size
= tcg_const_i32(size
);
2004 r_rd
= tcg_const_i32(rd
);
2005 gen_helper_stf_asi(cpu_env
, addr
, r_asi
, r_size
, r_rd
);
2006 tcg_temp_free_i32(r_rd
);
2007 tcg_temp_free_i32(r_size
);
2008 tcg_temp_free_i32(r_asi
);
2011 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
2013 TCGv_i32 r_asi
, r_size
, r_sign
;
2015 r_asi
= gen_get_asi(insn
, addr
);
2016 r_size
= tcg_const_i32(4);
2017 r_sign
= tcg_const_i32(0);
2018 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2019 tcg_temp_free_i32(r_sign
);
2020 gen_helper_st_asi(cpu_env
, addr
, dst
, r_asi
, r_size
);
2021 tcg_temp_free_i32(r_size
);
2022 tcg_temp_free_i32(r_asi
);
2023 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2026 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2028 TCGv_i32 r_asi
, r_rd
;
2030 r_asi
= gen_get_asi(insn
, addr
);
2031 r_rd
= tcg_const_i32(rd
);
2032 gen_helper_ldda_asi(cpu_env
, addr
, r_asi
, r_rd
);
2033 tcg_temp_free_i32(r_rd
);
2034 tcg_temp_free_i32(r_asi
);
2037 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2039 TCGv_i32 r_asi
, r_size
;
2041 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
2042 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
2043 r_asi
= gen_get_asi(insn
, addr
);
2044 r_size
= tcg_const_i32(8);
2045 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2046 tcg_temp_free_i32(r_size
);
2047 tcg_temp_free_i32(r_asi
);
2050 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
2056 r_val1
= tcg_temp_new();
2057 gen_movl_reg_TN(rd
, r_val1
);
2058 r_asi
= gen_get_asi(insn
, addr
);
2059 gen_helper_cas_asi(dst
, cpu_env
, addr
, r_val1
, val2
, r_asi
);
2060 tcg_temp_free_i32(r_asi
);
2061 tcg_temp_free(r_val1
);
2064 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
2069 gen_movl_reg_TN(rd
, cpu_tmp64
);
2070 r_asi
= gen_get_asi(insn
, addr
);
2071 gen_helper_casx_asi(dst
, cpu_env
, addr
, cpu_tmp64
, val2
, r_asi
);
2072 tcg_temp_free_i32(r_asi
);
2075 #elif !defined(CONFIG_USER_ONLY)
2077 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
2080 TCGv_i32 r_asi
, r_size
, r_sign
;
2082 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2083 r_size
= tcg_const_i32(size
);
2084 r_sign
= tcg_const_i32(sign
);
2085 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2086 tcg_temp_free(r_sign
);
2087 tcg_temp_free(r_size
);
2088 tcg_temp_free(r_asi
);
2089 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2092 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
2094 TCGv_i32 r_asi
, r_size
;
2096 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
2097 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2098 r_size
= tcg_const_i32(size
);
2099 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2100 tcg_temp_free(r_size
);
2101 tcg_temp_free(r_asi
);
2104 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
2106 TCGv_i32 r_asi
, r_size
, r_sign
;
2109 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2110 r_size
= tcg_const_i32(4);
2111 r_sign
= tcg_const_i32(0);
2112 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2113 tcg_temp_free(r_sign
);
2114 r_val
= tcg_temp_new_i64();
2115 tcg_gen_extu_tl_i64(r_val
, dst
);
2116 gen_helper_st_asi(cpu_env
, addr
, r_val
, r_asi
, r_size
);
2117 tcg_temp_free_i64(r_val
);
2118 tcg_temp_free(r_size
);
2119 tcg_temp_free(r_asi
);
2120 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2123 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2125 TCGv_i32 r_asi
, r_size
, r_sign
;
2127 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2128 r_size
= tcg_const_i32(8);
2129 r_sign
= tcg_const_i32(0);
2130 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2131 tcg_temp_free(r_sign
);
2132 tcg_temp_free(r_size
);
2133 tcg_temp_free(r_asi
);
2134 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
2135 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
2136 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
2137 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
2138 gen_movl_TN_reg(rd
, hi
);
2141 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2143 TCGv_i32 r_asi
, r_size
;
2145 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
2146 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
2147 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2148 r_size
= tcg_const_i32(8);
2149 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2150 tcg_temp_free(r_size
);
2151 tcg_temp_free(r_asi
);
2155 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2156 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
2159 TCGv_i32 r_asi
, r_size
;
2161 gen_ld_asi(dst
, addr
, insn
, 1, 0);
2163 r_val
= tcg_const_i64(0xffULL
);
2164 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2165 r_size
= tcg_const_i32(1);
2166 gen_helper_st_asi(cpu_env
, addr
, r_val
, r_asi
, r_size
);
2167 tcg_temp_free_i32(r_size
);
2168 tcg_temp_free_i32(r_asi
);
2169 tcg_temp_free_i64(r_val
);
2173 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
2178 rs1
= GET_FIELD(insn
, 13, 17);
2180 tcg_gen_movi_tl(def
, 0);
2181 } else if (rs1
< 8) {
2182 r_rs1
= cpu_gregs
[rs1
];
2184 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
2189 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
2193 if (IS_IMM
) { /* immediate */
2194 target_long simm
= GET_FIELDs(insn
, 19, 31);
2195 tcg_gen_movi_tl(def
, simm
);
2196 } else { /* register */
2197 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
2199 tcg_gen_movi_tl(def
, 0);
2200 } else if (rs2
< 8) {
2201 r_rs2
= cpu_gregs
[rs2
];
2203 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
2209 #ifdef TARGET_SPARC64
2210 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
2212 TCGv_i32 r_tl
= tcg_temp_new_i32();
2214 /* load env->tl into r_tl */
2215 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
2217 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2218 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
2220 /* calculate offset to current trap state from env->ts, reuse r_tl */
2221 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
2222 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUSPARCState
, ts
));
2224 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2226 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
2227 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
2228 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
2229 tcg_temp_free_ptr(r_tl_tmp
);
2232 tcg_temp_free_i32(r_tl
);
2235 static void gen_edge(DisasContext
*dc
, TCGv dst
, TCGv s1
, TCGv s2
,
2236 int width
, bool cc
, bool left
)
2238 TCGv lo1
, lo2
, t1
, t2
;
2239 uint64_t amask
, tabl
, tabr
;
2240 int shift
, imask
, omask
;
2243 tcg_gen_mov_tl(cpu_cc_src
, s1
);
2244 tcg_gen_mov_tl(cpu_cc_src2
, s2
);
2245 tcg_gen_sub_tl(cpu_cc_dst
, s1
, s2
);
2246 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2247 dc
->cc_op
= CC_OP_SUB
;
2250 /* Theory of operation: there are two tables, left and right (not to
2251 be confused with the left and right versions of the opcode). These
2252 are indexed by the low 3 bits of the inputs. To make things "easy",
2253 these tables are loaded into two constants, TABL and TABR below.
2254 The operation index = (input & imask) << shift calculates the index
2255 into the constant, while val = (table >> index) & omask calculates
2256 the value we're looking for. */
2263 tabl
= 0x80c0e0f0f8fcfeffULL
;
2264 tabr
= 0xff7f3f1f0f070301ULL
;
2266 tabl
= 0x0103070f1f3f7fffULL
;
2267 tabr
= 0xfffefcf8f0e0c080ULL
;
2287 tabl
= (2 << 2) | 3;
2288 tabr
= (3 << 2) | 1;
2290 tabl
= (1 << 2) | 3;
2291 tabr
= (3 << 2) | 2;
2298 lo1
= tcg_temp_new();
2299 lo2
= tcg_temp_new();
2300 tcg_gen_andi_tl(lo1
, s1
, imask
);
2301 tcg_gen_andi_tl(lo2
, s2
, imask
);
2302 tcg_gen_shli_tl(lo1
, lo1
, shift
);
2303 tcg_gen_shli_tl(lo2
, lo2
, shift
);
2305 t1
= tcg_const_tl(tabl
);
2306 t2
= tcg_const_tl(tabr
);
2307 tcg_gen_shr_tl(lo1
, t1
, lo1
);
2308 tcg_gen_shr_tl(lo2
, t2
, lo2
);
2309 tcg_gen_andi_tl(dst
, lo1
, omask
);
2310 tcg_gen_andi_tl(lo2
, lo2
, omask
);
2314 amask
&= 0xffffffffULL
;
2316 tcg_gen_andi_tl(s1
, s1
, amask
);
2317 tcg_gen_andi_tl(s2
, s2
, amask
);
2319 /* We want to compute
2320 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2321 We've already done dst = lo1, so this reduces to
2322 dst &= (s1 == s2 ? -1 : lo2)
2327 tcg_gen_setcond_tl(TCG_COND_EQ
, t1
, s1
, s2
);
2328 tcg_gen_neg_tl(t1
, t1
);
2329 tcg_gen_or_tl(lo2
, lo2
, t1
);
2330 tcg_gen_and_tl(dst
, dst
, lo2
);
2338 static void gen_alignaddr(TCGv dst
, TCGv s1
, TCGv s2
, bool left
)
2340 TCGv tmp
= tcg_temp_new();
2342 tcg_gen_add_tl(tmp
, s1
, s2
);
2343 tcg_gen_andi_tl(dst
, tmp
, -8);
2345 tcg_gen_neg_tl(tmp
, tmp
);
2347 tcg_gen_deposit_tl(cpu_gsr
, cpu_gsr
, tmp
, 0, 3);
2352 static void gen_faligndata(TCGv dst
, TCGv gsr
, TCGv s1
, TCGv s2
)
2356 t1
= tcg_temp_new();
2357 t2
= tcg_temp_new();
2358 shift
= tcg_temp_new();
2360 tcg_gen_andi_tl(shift
, gsr
, 7);
2361 tcg_gen_shli_tl(shift
, shift
, 3);
2362 tcg_gen_shl_tl(t1
, s1
, shift
);
2364 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2365 shift of (up to 63) followed by a constant shift of 1. */
2366 tcg_gen_xori_tl(shift
, shift
, 63);
2367 tcg_gen_shr_tl(t2
, s2
, shift
);
2368 tcg_gen_shri_tl(t2
, t2
, 1);
2370 tcg_gen_or_tl(dst
, t1
, t2
);
2374 tcg_temp_free(shift
);
2378 #define CHECK_IU_FEATURE(dc, FEATURE) \
2379 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2381 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2382 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2385 /* before an instruction, dc->pc must be static */
2386 static void disas_sparc_insn(DisasContext
* dc
, unsigned int insn
)
2388 unsigned int opc
, rs1
, rs2
, rd
;
2389 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
2390 TCGv_i32 cpu_src1_32
, cpu_src2_32
, cpu_dst_32
;
2391 TCGv_i64 cpu_src1_64
, cpu_src2_64
, cpu_dst_64
;
2394 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
2395 tcg_gen_debug_insn_start(dc
->pc
);
2398 opc
= GET_FIELD(insn
, 0, 1);
2400 rd
= GET_FIELD(insn
, 2, 6);
2402 cpu_tmp1
= cpu_src1
= tcg_temp_new();
2403 cpu_tmp2
= cpu_src2
= tcg_temp_new();
2406 case 0: /* branches/sethi */
2408 unsigned int xop
= GET_FIELD(insn
, 7, 9);
2411 #ifdef TARGET_SPARC64
2412 case 0x1: /* V9 BPcc */
2416 target
= GET_FIELD_SP(insn
, 0, 18);
2417 target
= sign_extend(target
, 19);
2419 cc
= GET_FIELD_SP(insn
, 20, 21);
2421 do_branch(dc
, target
, insn
, 0);
2423 do_branch(dc
, target
, insn
, 1);
2428 case 0x3: /* V9 BPr */
2430 target
= GET_FIELD_SP(insn
, 0, 13) |
2431 (GET_FIELD_SP(insn
, 20, 21) << 14);
2432 target
= sign_extend(target
, 16);
2434 cpu_src1
= get_src1(insn
, cpu_src1
);
2435 do_branch_reg(dc
, target
, insn
, cpu_src1
);
2438 case 0x5: /* V9 FBPcc */
2440 int cc
= GET_FIELD_SP(insn
, 20, 21);
2441 if (gen_trap_ifnofpu(dc
)) {
2444 target
= GET_FIELD_SP(insn
, 0, 18);
2445 target
= sign_extend(target
, 19);
2447 do_fbranch(dc
, target
, insn
, cc
);
2451 case 0x7: /* CBN+x */
2456 case 0x2: /* BN+x */
2458 target
= GET_FIELD(insn
, 10, 31);
2459 target
= sign_extend(target
, 22);
2461 do_branch(dc
, target
, insn
, 0);
2464 case 0x6: /* FBN+x */
2466 if (gen_trap_ifnofpu(dc
)) {
2469 target
= GET_FIELD(insn
, 10, 31);
2470 target
= sign_extend(target
, 22);
2472 do_fbranch(dc
, target
, insn
, 0);
2475 case 0x4: /* SETHI */
2477 uint32_t value
= GET_FIELD(insn
, 10, 31);
2480 r_const
= tcg_const_tl(value
<< 10);
2481 gen_movl_TN_reg(rd
, r_const
);
2482 tcg_temp_free(r_const
);
2485 case 0x0: /* UNIMPL */
2494 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2497 r_const
= tcg_const_tl(dc
->pc
);
2498 gen_movl_TN_reg(15, r_const
);
2499 tcg_temp_free(r_const
);
2502 #ifdef TARGET_SPARC64
2503 if (unlikely(AM_CHECK(dc
))) {
2504 target
&= 0xffffffffULL
;
2510 case 2: /* FPU & Logical Operations */
2512 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2513 if (xop
== 0x3a) { /* generate trap */
2516 cpu_src1
= get_src1(insn
, cpu_src1
);
2518 rs2
= GET_FIELD(insn
, 25, 31);
2519 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2521 rs2
= GET_FIELD(insn
, 27, 31);
2523 gen_movl_reg_TN(rs2
, cpu_src2
);
2524 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2526 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2529 cond
= GET_FIELD(insn
, 3, 6);
2530 if (cond
== 0x8) { /* Trap Always */
2532 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2534 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2536 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2537 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2538 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2539 gen_helper_raise_exception(cpu_env
, cpu_tmp32
);
2541 } else if (cond
!= 0) {
2542 TCGv r_cond
= tcg_temp_new();
2544 #ifdef TARGET_SPARC64
2546 int cc
= GET_FIELD_SP(insn
, 11, 12);
2550 gen_cond(r_cond
, 0, cond
, dc
);
2552 gen_cond(r_cond
, 1, cond
, dc
);
2557 gen_cond(r_cond
, 0, cond
, dc
);
2559 l1
= gen_new_label();
2560 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2562 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2564 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2566 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2567 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2568 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2569 gen_helper_raise_exception(cpu_env
, cpu_tmp32
);
2572 tcg_temp_free(r_cond
);
2578 } else if (xop
== 0x28) {
2579 rs1
= GET_FIELD(insn
, 13, 17);
2582 #ifndef TARGET_SPARC64
2583 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2584 manual, rdy on the microSPARC
2586 case 0x0f: /* stbar in the SPARCv8 manual,
2587 rdy on the microSPARC II */
2588 case 0x10 ... 0x1f: /* implementation-dependent in the
2589 SPARCv8 manual, rdy on the
2592 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2595 /* Read Asr17 for a Leon3 monoprocessor */
2596 r_const
= tcg_const_tl((1 << 8)
2597 | (dc
->def
->nwindows
- 1));
2598 gen_movl_TN_reg(rd
, r_const
);
2599 tcg_temp_free(r_const
);
2603 gen_movl_TN_reg(rd
, cpu_y
);
2605 #ifdef TARGET_SPARC64
2606 case 0x2: /* V9 rdccr */
2607 gen_helper_compute_psr(cpu_env
);
2608 gen_helper_rdccr(cpu_dst
, cpu_env
);
2609 gen_movl_TN_reg(rd
, cpu_dst
);
2611 case 0x3: /* V9 rdasi */
2612 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2613 gen_movl_TN_reg(rd
, cpu_dst
);
2615 case 0x4: /* V9 rdtick */
2619 r_tickptr
= tcg_temp_new_ptr();
2620 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2621 offsetof(CPUSPARCState
, tick
));
2622 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2623 tcg_temp_free_ptr(r_tickptr
);
2624 gen_movl_TN_reg(rd
, cpu_dst
);
2627 case 0x5: /* V9 rdpc */
2631 if (unlikely(AM_CHECK(dc
))) {
2632 r_const
= tcg_const_tl(dc
->pc
& 0xffffffffULL
);
2634 r_const
= tcg_const_tl(dc
->pc
);
2636 gen_movl_TN_reg(rd
, r_const
);
2637 tcg_temp_free(r_const
);
2640 case 0x6: /* V9 rdfprs */
2641 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2642 gen_movl_TN_reg(rd
, cpu_dst
);
2644 case 0xf: /* V9 membar */
2645 break; /* no effect */
2646 case 0x13: /* Graphics Status */
2647 if (gen_trap_ifnofpu(dc
)) {
2650 gen_movl_TN_reg(rd
, cpu_gsr
);
2652 case 0x16: /* Softint */
2653 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2654 gen_movl_TN_reg(rd
, cpu_dst
);
2656 case 0x17: /* Tick compare */
2657 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2659 case 0x18: /* System tick */
2663 r_tickptr
= tcg_temp_new_ptr();
2664 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2665 offsetof(CPUSPARCState
, stick
));
2666 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2667 tcg_temp_free_ptr(r_tickptr
);
2668 gen_movl_TN_reg(rd
, cpu_dst
);
2671 case 0x19: /* System tick compare */
2672 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2674 case 0x10: /* Performance Control */
2675 case 0x11: /* Performance Instrumentation Counter */
2676 case 0x12: /* Dispatch Control */
2677 case 0x14: /* Softint set, WO */
2678 case 0x15: /* Softint clear, WO */
2683 #if !defined(CONFIG_USER_ONLY)
2684 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2685 #ifndef TARGET_SPARC64
2686 if (!supervisor(dc
))
2688 gen_helper_compute_psr(cpu_env
);
2689 dc
->cc_op
= CC_OP_FLAGS
;
2690 gen_helper_rdpsr(cpu_dst
, cpu_env
);
2692 CHECK_IU_FEATURE(dc
, HYPV
);
2693 if (!hypervisor(dc
))
2695 rs1
= GET_FIELD(insn
, 13, 17);
2698 // gen_op_rdhpstate();
2701 // gen_op_rdhtstate();
2704 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2707 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2710 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2712 case 31: // hstick_cmpr
2713 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2719 gen_movl_TN_reg(rd
, cpu_dst
);
2721 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2722 if (!supervisor(dc
))
2724 #ifdef TARGET_SPARC64
2725 rs1
= GET_FIELD(insn
, 13, 17);
2731 r_tsptr
= tcg_temp_new_ptr();
2732 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2733 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2734 offsetof(trap_state
, tpc
));
2735 tcg_temp_free_ptr(r_tsptr
);
2742 r_tsptr
= tcg_temp_new_ptr();
2743 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2744 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2745 offsetof(trap_state
, tnpc
));
2746 tcg_temp_free_ptr(r_tsptr
);
2753 r_tsptr
= tcg_temp_new_ptr();
2754 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2755 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2756 offsetof(trap_state
, tstate
));
2757 tcg_temp_free_ptr(r_tsptr
);
2764 r_tsptr
= tcg_temp_new_ptr();
2765 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2766 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2767 offsetof(trap_state
, tt
));
2768 tcg_temp_free_ptr(r_tsptr
);
2769 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2776 r_tickptr
= tcg_temp_new_ptr();
2777 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2778 offsetof(CPUSPARCState
, tick
));
2779 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2780 gen_movl_TN_reg(rd
, cpu_tmp0
);
2781 tcg_temp_free_ptr(r_tickptr
);
2785 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2788 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2789 offsetof(CPUSPARCState
, pstate
));
2790 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2793 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2794 offsetof(CPUSPARCState
, tl
));
2795 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2798 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2799 offsetof(CPUSPARCState
, psrpil
));
2800 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2803 gen_helper_rdcwp(cpu_tmp0
, cpu_env
);
2806 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2807 offsetof(CPUSPARCState
, cansave
));
2808 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2810 case 11: // canrestore
2811 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2812 offsetof(CPUSPARCState
, canrestore
));
2813 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2815 case 12: // cleanwin
2816 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2817 offsetof(CPUSPARCState
, cleanwin
));
2818 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2820 case 13: // otherwin
2821 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2822 offsetof(CPUSPARCState
, otherwin
));
2823 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2826 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2827 offsetof(CPUSPARCState
, wstate
));
2828 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2830 case 16: // UA2005 gl
2831 CHECK_IU_FEATURE(dc
, GL
);
2832 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2833 offsetof(CPUSPARCState
, gl
));
2834 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2836 case 26: // UA2005 strand status
2837 CHECK_IU_FEATURE(dc
, HYPV
);
2838 if (!hypervisor(dc
))
2840 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2843 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2850 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2852 gen_movl_TN_reg(rd
, cpu_tmp0
);
2854 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2855 #ifdef TARGET_SPARC64
2857 gen_helper_flushw(cpu_env
);
2859 if (!supervisor(dc
))
2861 gen_movl_TN_reg(rd
, cpu_tbr
);
2865 } else if (xop
== 0x34) { /* FPU Operations */
2866 if (gen_trap_ifnofpu(dc
)) {
2869 gen_op_clear_ieee_excp_and_FTT();
2870 rs1
= GET_FIELD(insn
, 13, 17);
2871 rs2
= GET_FIELD(insn
, 27, 31);
2872 xop
= GET_FIELD(insn
, 18, 26);
2875 case 0x1: /* fmovs */
2876 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
2877 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
2879 case 0x5: /* fnegs */
2880 gen_ne_fop_FF(dc
, rd
, rs2
, gen_helper_fnegs
);
2882 case 0x9: /* fabss */
2883 gen_ne_fop_FF(dc
, rd
, rs2
, gen_helper_fabss
);
2885 case 0x29: /* fsqrts */
2886 CHECK_FPU_FEATURE(dc
, FSQRT
);
2887 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fsqrts
);
2889 case 0x2a: /* fsqrtd */
2890 CHECK_FPU_FEATURE(dc
, FSQRT
);
2891 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fsqrtd
);
2893 case 0x2b: /* fsqrtq */
2894 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2895 gen_fop_QQ(dc
, rd
, rs2
, gen_helper_fsqrtq
);
2897 case 0x41: /* fadds */
2898 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fadds
);
2900 case 0x42: /* faddd */
2901 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_faddd
);
2903 case 0x43: /* faddq */
2904 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2905 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_faddq
);
2907 case 0x45: /* fsubs */
2908 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fsubs
);
2910 case 0x46: /* fsubd */
2911 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fsubd
);
2913 case 0x47: /* fsubq */
2914 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2915 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fsubq
);
2917 case 0x49: /* fmuls */
2918 CHECK_FPU_FEATURE(dc
, FMUL
);
2919 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fmuls
);
2921 case 0x4a: /* fmuld */
2922 CHECK_FPU_FEATURE(dc
, FMUL
);
2923 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld
);
2925 case 0x4b: /* fmulq */
2926 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2927 CHECK_FPU_FEATURE(dc
, FMUL
);
2928 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fmulq
);
2930 case 0x4d: /* fdivs */
2931 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fdivs
);
2933 case 0x4e: /* fdivd */
2934 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fdivd
);
2936 case 0x4f: /* fdivq */
2937 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2938 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fdivq
);
2940 case 0x69: /* fsmuld */
2941 CHECK_FPU_FEATURE(dc
, FSMULD
);
2942 gen_fop_DFF(dc
, rd
, rs1
, rs2
, gen_helper_fsmuld
);
2944 case 0x6e: /* fdmulq */
2945 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2946 gen_fop_QDD(dc
, rd
, rs1
, rs2
, gen_helper_fdmulq
);
2948 case 0xc4: /* fitos */
2949 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fitos
);
2951 case 0xc6: /* fdtos */
2952 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fdtos
);
2954 case 0xc7: /* fqtos */
2955 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2956 gen_fop_FQ(dc
, rd
, rs2
, gen_helper_fqtos
);
2958 case 0xc8: /* fitod */
2959 gen_ne_fop_DF(dc
, rd
, rs2
, gen_helper_fitod
);
2961 case 0xc9: /* fstod */
2962 gen_ne_fop_DF(dc
, rd
, rs2
, gen_helper_fstod
);
2964 case 0xcb: /* fqtod */
2965 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2966 gen_fop_DQ(dc
, rd
, rs2
, gen_helper_fqtod
);
2968 case 0xcc: /* fitoq */
2969 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2970 gen_ne_fop_QF(dc
, rd
, rs2
, gen_helper_fitoq
);
2972 case 0xcd: /* fstoq */
2973 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2974 gen_ne_fop_QF(dc
, rd
, rs2
, gen_helper_fstoq
);
2976 case 0xce: /* fdtoq */
2977 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2978 gen_ne_fop_QD(dc
, rd
, rs2
, gen_helper_fdtoq
);
2980 case 0xd1: /* fstoi */
2981 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fstoi
);
2983 case 0xd2: /* fdtoi */
2984 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fdtoi
);
2986 case 0xd3: /* fqtoi */
2987 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2988 gen_fop_FQ(dc
, rd
, rs2
, gen_helper_fqtoi
);
2990 #ifdef TARGET_SPARC64
2991 case 0x2: /* V9 fmovd */
2992 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
2993 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
2995 case 0x3: /* V9 fmovq */
2996 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2997 gen_move_Q(rd
, rs2
);
2999 case 0x6: /* V9 fnegd */
3000 gen_ne_fop_DD(dc
, rd
, rs2
, gen_helper_fnegd
);
3002 case 0x7: /* V9 fnegq */
3003 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3004 gen_ne_fop_QQ(dc
, rd
, rs2
, gen_helper_fnegq
);
3006 case 0xa: /* V9 fabsd */
3007 gen_ne_fop_DD(dc
, rd
, rs2
, gen_helper_fabsd
);
3009 case 0xb: /* V9 fabsq */
3010 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3011 gen_ne_fop_QQ(dc
, rd
, rs2
, gen_helper_fabsq
);
3013 case 0x81: /* V9 fstox */
3014 gen_fop_DF(dc
, rd
, rs2
, gen_helper_fstox
);
3016 case 0x82: /* V9 fdtox */
3017 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fdtox
);
3019 case 0x83: /* V9 fqtox */
3020 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3021 gen_fop_DQ(dc
, rd
, rs2
, gen_helper_fqtox
);
3023 case 0x84: /* V9 fxtos */
3024 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fxtos
);
3026 case 0x88: /* V9 fxtod */
3027 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fxtod
);
3029 case 0x8c: /* V9 fxtoq */
3030 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3031 gen_ne_fop_QD(dc
, rd
, rs2
, gen_helper_fxtoq
);
3037 } else if (xop
== 0x35) { /* FPU Operations */
3038 #ifdef TARGET_SPARC64
3041 if (gen_trap_ifnofpu(dc
)) {
3044 gen_op_clear_ieee_excp_and_FTT();
3045 rs1
= GET_FIELD(insn
, 13, 17);
3046 rs2
= GET_FIELD(insn
, 27, 31);
3047 xop
= GET_FIELD(insn
, 18, 26);
3049 #ifdef TARGET_SPARC64
3050 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
3053 l1
= gen_new_label();
3054 cond
= GET_FIELD_SP(insn
, 14, 17);
3055 cpu_src1
= get_src1(insn
, cpu_src1
);
3056 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
3058 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
3059 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
3062 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
3065 l1
= gen_new_label();
3066 cond
= GET_FIELD_SP(insn
, 14, 17);
3067 cpu_src1
= get_src1(insn
, cpu_src1
);
3068 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
3070 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
3071 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
3074 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
3077 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3078 l1
= gen_new_label();
3079 cond
= GET_FIELD_SP(insn
, 14, 17);
3080 cpu_src1
= get_src1(insn
, cpu_src1
);
3081 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
3083 gen_move_Q(rd
, rs2
);
3089 #ifdef TARGET_SPARC64
3090 #define FMOVSCC(fcc) \
3095 l1 = gen_new_label(); \
3096 r_cond = tcg_temp_new(); \
3097 cond = GET_FIELD_SP(insn, 14, 17); \
3098 gen_fcond(r_cond, fcc, cond); \
3099 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3101 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3102 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3103 gen_set_label(l1); \
3104 tcg_temp_free(r_cond); \
3106 #define FMOVDCC(fcc) \
3111 l1 = gen_new_label(); \
3112 r_cond = tcg_temp_new(); \
3113 cond = GET_FIELD_SP(insn, 14, 17); \
3114 gen_fcond(r_cond, fcc, cond); \
3115 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3117 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3118 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3119 gen_set_label(l1); \
3120 tcg_temp_free(r_cond); \
3122 #define FMOVQCC(fcc) \
3127 l1 = gen_new_label(); \
3128 r_cond = tcg_temp_new(); \
3129 cond = GET_FIELD_SP(insn, 14, 17); \
3130 gen_fcond(r_cond, fcc, cond); \
3131 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3133 gen_move_Q(rd, rs2); \
3134 gen_set_label(l1); \
3135 tcg_temp_free(r_cond); \
3137 case 0x001: /* V9 fmovscc %fcc0 */
3140 case 0x002: /* V9 fmovdcc %fcc0 */
3143 case 0x003: /* V9 fmovqcc %fcc0 */
3144 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3147 case 0x041: /* V9 fmovscc %fcc1 */
3150 case 0x042: /* V9 fmovdcc %fcc1 */
3153 case 0x043: /* V9 fmovqcc %fcc1 */
3154 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3157 case 0x081: /* V9 fmovscc %fcc2 */
3160 case 0x082: /* V9 fmovdcc %fcc2 */
3163 case 0x083: /* V9 fmovqcc %fcc2 */
3164 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3167 case 0x0c1: /* V9 fmovscc %fcc3 */
3170 case 0x0c2: /* V9 fmovdcc %fcc3 */
3173 case 0x0c3: /* V9 fmovqcc %fcc3 */
3174 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3180 #define FMOVSCC(icc) \
3185 l1 = gen_new_label(); \
3186 r_cond = tcg_temp_new(); \
3187 cond = GET_FIELD_SP(insn, 14, 17); \
3188 gen_cond(r_cond, icc, cond, dc); \
3189 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3191 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3192 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3193 gen_set_label(l1); \
3194 tcg_temp_free(r_cond); \
3196 #define FMOVDCC(icc) \
3201 l1 = gen_new_label(); \
3202 r_cond = tcg_temp_new(); \
3203 cond = GET_FIELD_SP(insn, 14, 17); \
3204 gen_cond(r_cond, icc, cond, dc); \
3205 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3207 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3208 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3209 gen_update_fprs_dirty(DFPREG(rd)); \
3210 gen_set_label(l1); \
3211 tcg_temp_free(r_cond); \
3213 #define FMOVQCC(icc) \
3218 l1 = gen_new_label(); \
3219 r_cond = tcg_temp_new(); \
3220 cond = GET_FIELD_SP(insn, 14, 17); \
3221 gen_cond(r_cond, icc, cond, dc); \
3222 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3224 gen_move_Q(rd, rs2); \
3225 gen_set_label(l1); \
3226 tcg_temp_free(r_cond); \
3229 case 0x101: /* V9 fmovscc %icc */
3232 case 0x102: /* V9 fmovdcc %icc */
3235 case 0x103: /* V9 fmovqcc %icc */
3236 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3239 case 0x181: /* V9 fmovscc %xcc */
3242 case 0x182: /* V9 fmovdcc %xcc */
3245 case 0x183: /* V9 fmovqcc %xcc */
3246 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3253 case 0x51: /* fcmps, V9 %fcc */
3254 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
3255 cpu_src2_32
= gen_load_fpr_F(dc
, rs2
);
3256 gen_op_fcmps(rd
& 3, cpu_src1_32
, cpu_src2_32
);
3258 case 0x52: /* fcmpd, V9 %fcc */
3259 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
3260 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
3261 gen_op_fcmpd(rd
& 3, cpu_src1_64
, cpu_src2_64
);
3263 case 0x53: /* fcmpq, V9 %fcc */
3264 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3265 gen_op_load_fpr_QT0(QFPREG(rs1
));
3266 gen_op_load_fpr_QT1(QFPREG(rs2
));
3267 gen_op_fcmpq(rd
& 3);
3269 case 0x55: /* fcmpes, V9 %fcc */
3270 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
3271 cpu_src2_32
= gen_load_fpr_F(dc
, rs2
);
3272 gen_op_fcmpes(rd
& 3, cpu_src1_32
, cpu_src2_32
);
3274 case 0x56: /* fcmped, V9 %fcc */
3275 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
3276 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
3277 gen_op_fcmped(rd
& 3, cpu_src1_64
, cpu_src2_64
);
3279 case 0x57: /* fcmpeq, V9 %fcc */
3280 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3281 gen_op_load_fpr_QT0(QFPREG(rs1
));
3282 gen_op_load_fpr_QT1(QFPREG(rs2
));
3283 gen_op_fcmpeq(rd
& 3);
3288 } else if (xop
== 0x2) {
3291 rs1
= GET_FIELD(insn
, 13, 17);
3293 // or %g0, x, y -> mov T0, x; mov y, T0
3294 if (IS_IMM
) { /* immediate */
3297 simm
= GET_FIELDs(insn
, 19, 31);
3298 r_const
= tcg_const_tl(simm
);
3299 gen_movl_TN_reg(rd
, r_const
);
3300 tcg_temp_free(r_const
);
3301 } else { /* register */
3302 rs2
= GET_FIELD(insn
, 27, 31);
3303 gen_movl_reg_TN(rs2
, cpu_dst
);
3304 gen_movl_TN_reg(rd
, cpu_dst
);
3307 cpu_src1
= get_src1(insn
, cpu_src1
);
3308 if (IS_IMM
) { /* immediate */
3309 simm
= GET_FIELDs(insn
, 19, 31);
3310 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3311 gen_movl_TN_reg(rd
, cpu_dst
);
3312 } else { /* register */
3313 // or x, %g0, y -> mov T1, x; mov y, T1
3314 rs2
= GET_FIELD(insn
, 27, 31);
3316 gen_movl_reg_TN(rs2
, cpu_src2
);
3317 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3318 gen_movl_TN_reg(rd
, cpu_dst
);
3320 gen_movl_TN_reg(rd
, cpu_src1
);
3323 #ifdef TARGET_SPARC64
3324 } else if (xop
== 0x25) { /* sll, V9 sllx */
3325 cpu_src1
= get_src1(insn
, cpu_src1
);
3326 if (IS_IMM
) { /* immediate */
3327 simm
= GET_FIELDs(insn
, 20, 31);
3328 if (insn
& (1 << 12)) {
3329 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3331 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
3333 } else { /* register */
3334 rs2
= GET_FIELD(insn
, 27, 31);
3335 gen_movl_reg_TN(rs2
, cpu_src2
);
3336 if (insn
& (1 << 12)) {
3337 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3339 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3341 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3343 gen_movl_TN_reg(rd
, cpu_dst
);
3344 } else if (xop
== 0x26) { /* srl, V9 srlx */
3345 cpu_src1
= get_src1(insn
, cpu_src1
);
3346 if (IS_IMM
) { /* immediate */
3347 simm
= GET_FIELDs(insn
, 20, 31);
3348 if (insn
& (1 << 12)) {
3349 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3351 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3352 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3354 } else { /* register */
3355 rs2
= GET_FIELD(insn
, 27, 31);
3356 gen_movl_reg_TN(rs2
, cpu_src2
);
3357 if (insn
& (1 << 12)) {
3358 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3359 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3361 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3362 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3363 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3366 gen_movl_TN_reg(rd
, cpu_dst
);
3367 } else if (xop
== 0x27) { /* sra, V9 srax */
3368 cpu_src1
= get_src1(insn
, cpu_src1
);
3369 if (IS_IMM
) { /* immediate */
3370 simm
= GET_FIELDs(insn
, 20, 31);
3371 if (insn
& (1 << 12)) {
3372 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3374 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3375 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3376 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3378 } else { /* register */
3379 rs2
= GET_FIELD(insn
, 27, 31);
3380 gen_movl_reg_TN(rs2
, cpu_src2
);
3381 if (insn
& (1 << 12)) {
3382 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3383 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3385 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3386 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3387 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3388 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3391 gen_movl_TN_reg(rd
, cpu_dst
);
3393 } else if (xop
< 0x36) {
3395 cpu_src1
= get_src1(insn
, cpu_src1
);
3396 cpu_src2
= get_src2(insn
, cpu_src2
);
3397 switch (xop
& ~0x10) {
3400 simm
= GET_FIELDs(insn
, 19, 31);
3402 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3403 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3404 dc
->cc_op
= CC_OP_ADD
;
3406 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3410 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3411 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3412 dc
->cc_op
= CC_OP_ADD
;
3414 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3420 simm
= GET_FIELDs(insn
, 19, 31);
3421 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3423 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3426 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3427 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3428 dc
->cc_op
= CC_OP_LOGIC
;
3433 simm
= GET_FIELDs(insn
, 19, 31);
3434 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3436 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3439 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3440 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3441 dc
->cc_op
= CC_OP_LOGIC
;
3446 simm
= GET_FIELDs(insn
, 19, 31);
3447 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3449 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3452 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3453 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3454 dc
->cc_op
= CC_OP_LOGIC
;
3459 simm
= GET_FIELDs(insn
, 19, 31);
3461 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3463 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3467 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3468 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3469 dc
->cc_op
= CC_OP_SUB
;
3471 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3475 case 0x5: /* andn */
3477 simm
= GET_FIELDs(insn
, 19, 31);
3478 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3480 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3483 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3484 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3485 dc
->cc_op
= CC_OP_LOGIC
;
3490 simm
= GET_FIELDs(insn
, 19, 31);
3491 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3493 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3496 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3497 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3498 dc
->cc_op
= CC_OP_LOGIC
;
3501 case 0x7: /* xorn */
3503 simm
= GET_FIELDs(insn
, 19, 31);
3504 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3506 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3507 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3510 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3511 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3512 dc
->cc_op
= CC_OP_LOGIC
;
3515 case 0x8: /* addx, V9 addc */
3516 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3519 #ifdef TARGET_SPARC64
3520 case 0x9: /* V9 mulx */
3522 simm
= GET_FIELDs(insn
, 19, 31);
3523 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3525 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3529 case 0xa: /* umul */
3530 CHECK_IU_FEATURE(dc
, MUL
);
3531 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3533 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3534 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3535 dc
->cc_op
= CC_OP_LOGIC
;
3538 case 0xb: /* smul */
3539 CHECK_IU_FEATURE(dc
, MUL
);
3540 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3542 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3543 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3544 dc
->cc_op
= CC_OP_LOGIC
;
3547 case 0xc: /* subx, V9 subc */
3548 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3551 #ifdef TARGET_SPARC64
3552 case 0xd: /* V9 udivx */
3554 TCGv r_temp1
, r_temp2
;
3555 r_temp1
= tcg_temp_local_new();
3556 r_temp2
= tcg_temp_local_new();
3557 tcg_gen_mov_tl(r_temp1
, cpu_src1
);
3558 tcg_gen_mov_tl(r_temp2
, cpu_src2
);
3559 gen_trap_ifdivzero_tl(r_temp2
);
3560 tcg_gen_divu_i64(cpu_dst
, r_temp1
, r_temp2
);
3561 tcg_temp_free(r_temp1
);
3562 tcg_temp_free(r_temp2
);
3566 case 0xe: /* udiv */
3567 CHECK_IU_FEATURE(dc
, DIV
);
3569 gen_helper_udiv_cc(cpu_dst
, cpu_env
, cpu_src1
,
3571 dc
->cc_op
= CC_OP_DIV
;
3573 gen_helper_udiv(cpu_dst
, cpu_env
, cpu_src1
,
3577 case 0xf: /* sdiv */
3578 CHECK_IU_FEATURE(dc
, DIV
);
3580 gen_helper_sdiv_cc(cpu_dst
, cpu_env
, cpu_src1
,
3582 dc
->cc_op
= CC_OP_DIV
;
3584 gen_helper_sdiv(cpu_dst
, cpu_env
, cpu_src1
,
3591 gen_movl_TN_reg(rd
, cpu_dst
);
3593 cpu_src1
= get_src1(insn
, cpu_src1
);
3594 cpu_src2
= get_src2(insn
, cpu_src2
);
3596 case 0x20: /* taddcc */
3597 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3598 gen_movl_TN_reg(rd
, cpu_dst
);
3599 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3600 dc
->cc_op
= CC_OP_TADD
;
3602 case 0x21: /* tsubcc */
3603 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3604 gen_movl_TN_reg(rd
, cpu_dst
);
3605 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3606 dc
->cc_op
= CC_OP_TSUB
;
3608 case 0x22: /* taddcctv */
3610 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3611 gen_movl_TN_reg(rd
, cpu_dst
);
3612 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3613 dc
->cc_op
= CC_OP_TADDTV
;
3615 case 0x23: /* tsubcctv */
3617 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3618 gen_movl_TN_reg(rd
, cpu_dst
);
3619 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3620 dc
->cc_op
= CC_OP_TSUBTV
;
3622 case 0x24: /* mulscc */
3623 gen_helper_compute_psr(cpu_env
);
3624 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3625 gen_movl_TN_reg(rd
, cpu_dst
);
3626 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3627 dc
->cc_op
= CC_OP_ADD
;
3629 #ifndef TARGET_SPARC64
3630 case 0x25: /* sll */
3631 if (IS_IMM
) { /* immediate */
3632 simm
= GET_FIELDs(insn
, 20, 31);
3633 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3634 } else { /* register */
3635 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3636 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3638 gen_movl_TN_reg(rd
, cpu_dst
);
3640 case 0x26: /* srl */
3641 if (IS_IMM
) { /* immediate */
3642 simm
= GET_FIELDs(insn
, 20, 31);
3643 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3644 } else { /* register */
3645 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3646 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3648 gen_movl_TN_reg(rd
, cpu_dst
);
3650 case 0x27: /* sra */
3651 if (IS_IMM
) { /* immediate */
3652 simm
= GET_FIELDs(insn
, 20, 31);
3653 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3654 } else { /* register */
3655 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3656 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3658 gen_movl_TN_reg(rd
, cpu_dst
);
3665 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3666 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3668 #ifndef TARGET_SPARC64
3669 case 0x01 ... 0x0f: /* undefined in the
3673 case 0x10 ... 0x1f: /* implementation-dependent
3679 case 0x2: /* V9 wrccr */
3680 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3681 gen_helper_wrccr(cpu_env
, cpu_dst
);
3682 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3683 dc
->cc_op
= CC_OP_FLAGS
;
3685 case 0x3: /* V9 wrasi */
3686 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3687 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3688 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3690 case 0x6: /* V9 wrfprs */
3691 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3692 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3698 case 0xf: /* V9 sir, nop if user */
3699 #if !defined(CONFIG_USER_ONLY)
3700 if (supervisor(dc
)) {
3705 case 0x13: /* Graphics Status */
3706 if (gen_trap_ifnofpu(dc
)) {
3709 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3711 case 0x14: /* Softint set */
3712 if (!supervisor(dc
))
3714 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3715 gen_helper_set_softint(cpu_env
, cpu_tmp64
);
3717 case 0x15: /* Softint clear */
3718 if (!supervisor(dc
))
3720 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3721 gen_helper_clear_softint(cpu_env
, cpu_tmp64
);
3723 case 0x16: /* Softint write */
3724 if (!supervisor(dc
))
3726 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3727 gen_helper_write_softint(cpu_env
, cpu_tmp64
);
3729 case 0x17: /* Tick compare */
3730 #if !defined(CONFIG_USER_ONLY)
3731 if (!supervisor(dc
))
3737 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3739 r_tickptr
= tcg_temp_new_ptr();
3740 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3741 offsetof(CPUSPARCState
, tick
));
3742 gen_helper_tick_set_limit(r_tickptr
,
3744 tcg_temp_free_ptr(r_tickptr
);
3747 case 0x18: /* System tick */
3748 #if !defined(CONFIG_USER_ONLY)
3749 if (!supervisor(dc
))
3755 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3757 r_tickptr
= tcg_temp_new_ptr();
3758 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3759 offsetof(CPUSPARCState
, stick
));
3760 gen_helper_tick_set_count(r_tickptr
,
3762 tcg_temp_free_ptr(r_tickptr
);
3765 case 0x19: /* System tick compare */
3766 #if !defined(CONFIG_USER_ONLY)
3767 if (!supervisor(dc
))
3773 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3775 r_tickptr
= tcg_temp_new_ptr();
3776 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3777 offsetof(CPUSPARCState
, stick
));
3778 gen_helper_tick_set_limit(r_tickptr
,
3780 tcg_temp_free_ptr(r_tickptr
);
3784 case 0x10: /* Performance Control */
3785 case 0x11: /* Performance Instrumentation
3787 case 0x12: /* Dispatch Control */
3794 #if !defined(CONFIG_USER_ONLY)
3795 case 0x31: /* wrpsr, V9 saved, restored */
3797 if (!supervisor(dc
))
3799 #ifdef TARGET_SPARC64
3802 gen_helper_saved(cpu_env
);
3805 gen_helper_restored(cpu_env
);
3807 case 2: /* UA2005 allclean */
3808 case 3: /* UA2005 otherw */
3809 case 4: /* UA2005 normalw */
3810 case 5: /* UA2005 invalw */
3816 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3817 gen_helper_wrpsr(cpu_env
, cpu_dst
);
3818 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3819 dc
->cc_op
= CC_OP_FLAGS
;
3827 case 0x32: /* wrwim, V9 wrpr */
3829 if (!supervisor(dc
))
3831 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3832 #ifdef TARGET_SPARC64
3838 r_tsptr
= tcg_temp_new_ptr();
3839 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3840 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3841 offsetof(trap_state
, tpc
));
3842 tcg_temp_free_ptr(r_tsptr
);
3849 r_tsptr
= tcg_temp_new_ptr();
3850 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3851 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3852 offsetof(trap_state
, tnpc
));
3853 tcg_temp_free_ptr(r_tsptr
);
3860 r_tsptr
= tcg_temp_new_ptr();
3861 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3862 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3863 offsetof(trap_state
,
3865 tcg_temp_free_ptr(r_tsptr
);
3872 r_tsptr
= tcg_temp_new_ptr();
3873 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3874 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3875 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3876 offsetof(trap_state
, tt
));
3877 tcg_temp_free_ptr(r_tsptr
);
3884 r_tickptr
= tcg_temp_new_ptr();
3885 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3886 offsetof(CPUSPARCState
, tick
));
3887 gen_helper_tick_set_count(r_tickptr
,
3889 tcg_temp_free_ptr(r_tickptr
);
3893 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3897 TCGv r_tmp
= tcg_temp_local_new();
3899 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3901 gen_helper_wrpstate(cpu_env
, r_tmp
);
3902 tcg_temp_free(r_tmp
);
3903 dc
->npc
= DYNAMIC_PC
;
3908 TCGv r_tmp
= tcg_temp_local_new();
3910 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3912 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_tmp
);
3913 tcg_temp_free(r_tmp
);
3914 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3915 offsetof(CPUSPARCState
, tl
));
3916 dc
->npc
= DYNAMIC_PC
;
3920 gen_helper_wrpil(cpu_env
, cpu_tmp0
);
3923 gen_helper_wrcwp(cpu_env
, cpu_tmp0
);
3926 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3927 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3928 offsetof(CPUSPARCState
,
3931 case 11: // canrestore
3932 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3933 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3934 offsetof(CPUSPARCState
,
3937 case 12: // cleanwin
3938 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3939 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3940 offsetof(CPUSPARCState
,
3943 case 13: // otherwin
3944 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3945 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3946 offsetof(CPUSPARCState
,
3950 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3951 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3952 offsetof(CPUSPARCState
,
3955 case 16: // UA2005 gl
3956 CHECK_IU_FEATURE(dc
, GL
);
3957 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3958 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3959 offsetof(CPUSPARCState
, gl
));
3961 case 26: // UA2005 strand status
3962 CHECK_IU_FEATURE(dc
, HYPV
);
3963 if (!hypervisor(dc
))
3965 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3971 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3972 if (dc
->def
->nwindows
!= 32)
3973 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3974 (1 << dc
->def
->nwindows
) - 1);
3975 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3979 case 0x33: /* wrtbr, UA2005 wrhpr */
3981 #ifndef TARGET_SPARC64
3982 if (!supervisor(dc
))
3984 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3986 CHECK_IU_FEATURE(dc
, HYPV
);
3987 if (!hypervisor(dc
))
3989 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3992 // XXX gen_op_wrhpstate();
3999 // XXX gen_op_wrhtstate();
4002 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
4005 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
4007 case 31: // hstick_cmpr
4011 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
4012 r_tickptr
= tcg_temp_new_ptr();
4013 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
4014 offsetof(CPUSPARCState
, hstick
));
4015 gen_helper_tick_set_limit(r_tickptr
,
4017 tcg_temp_free_ptr(r_tickptr
);
4020 case 6: // hver readonly
4028 #ifdef TARGET_SPARC64
4029 case 0x2c: /* V9 movcc */
4031 int cc
= GET_FIELD_SP(insn
, 11, 12);
4032 int cond
= GET_FIELD_SP(insn
, 14, 17);
4036 r_cond
= tcg_temp_new();
4037 if (insn
& (1 << 18)) {
4039 gen_cond(r_cond
, 0, cond
, dc
);
4041 gen_cond(r_cond
, 1, cond
, dc
);
4045 gen_fcond(r_cond
, cc
, cond
);
4048 l1
= gen_new_label();
4050 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
4051 if (IS_IMM
) { /* immediate */
4054 simm
= GET_FIELD_SPs(insn
, 0, 10);
4055 r_const
= tcg_const_tl(simm
);
4056 gen_movl_TN_reg(rd
, r_const
);
4057 tcg_temp_free(r_const
);
4059 rs2
= GET_FIELD_SP(insn
, 0, 4);
4060 gen_movl_reg_TN(rs2
, cpu_tmp0
);
4061 gen_movl_TN_reg(rd
, cpu_tmp0
);
4064 tcg_temp_free(r_cond
);
4067 case 0x2d: /* V9 sdivx */
4068 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
4069 gen_movl_TN_reg(rd
, cpu_dst
);
4071 case 0x2e: /* V9 popc */
4073 cpu_src2
= get_src2(insn
, cpu_src2
);
4074 gen_helper_popc(cpu_dst
, cpu_src2
);
4075 gen_movl_TN_reg(rd
, cpu_dst
);
4077 case 0x2f: /* V9 movr */
4079 int cond
= GET_FIELD_SP(insn
, 10, 12);
4082 cpu_src1
= get_src1(insn
, cpu_src1
);
4084 l1
= gen_new_label();
4086 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
4088 if (IS_IMM
) { /* immediate */
4091 simm
= GET_FIELD_SPs(insn
, 0, 9);
4092 r_const
= tcg_const_tl(simm
);
4093 gen_movl_TN_reg(rd
, r_const
);
4094 tcg_temp_free(r_const
);
4096 rs2
= GET_FIELD_SP(insn
, 0, 4);
4097 gen_movl_reg_TN(rs2
, cpu_tmp0
);
4098 gen_movl_TN_reg(rd
, cpu_tmp0
);
4108 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4109 #ifdef TARGET_SPARC64
4110 int opf
= GET_FIELD_SP(insn
, 5, 13);
4111 rs1
= GET_FIELD(insn
, 13, 17);
4112 rs2
= GET_FIELD(insn
, 27, 31);
4113 if (gen_trap_ifnofpu(dc
)) {
4118 case 0x000: /* VIS I edge8cc */
4119 CHECK_FPU_FEATURE(dc
, VIS1
);
4120 gen_movl_reg_TN(rs1
, cpu_src1
);
4121 gen_movl_reg_TN(rs2
, cpu_src2
);
4122 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 1, 0);
4123 gen_movl_TN_reg(rd
, cpu_dst
);
4125 case 0x001: /* VIS II edge8n */
4126 CHECK_FPU_FEATURE(dc
, VIS2
);
4127 gen_movl_reg_TN(rs1
, cpu_src1
);
4128 gen_movl_reg_TN(rs2
, cpu_src2
);
4129 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 0, 0);
4130 gen_movl_TN_reg(rd
, cpu_dst
);
4132 case 0x002: /* VIS I edge8lcc */
4133 CHECK_FPU_FEATURE(dc
, VIS1
);
4134 gen_movl_reg_TN(rs1
, cpu_src1
);
4135 gen_movl_reg_TN(rs2
, cpu_src2
);
4136 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 1, 1);
4137 gen_movl_TN_reg(rd
, cpu_dst
);
4139 case 0x003: /* VIS II edge8ln */
4140 CHECK_FPU_FEATURE(dc
, VIS2
);
4141 gen_movl_reg_TN(rs1
, cpu_src1
);
4142 gen_movl_reg_TN(rs2
, cpu_src2
);
4143 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 0, 1);
4144 gen_movl_TN_reg(rd
, cpu_dst
);
4146 case 0x004: /* VIS I edge16cc */
4147 CHECK_FPU_FEATURE(dc
, VIS1
);
4148 gen_movl_reg_TN(rs1
, cpu_src1
);
4149 gen_movl_reg_TN(rs2
, cpu_src2
);
4150 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 1, 0);
4151 gen_movl_TN_reg(rd
, cpu_dst
);
4153 case 0x005: /* VIS II edge16n */
4154 CHECK_FPU_FEATURE(dc
, VIS2
);
4155 gen_movl_reg_TN(rs1
, cpu_src1
);
4156 gen_movl_reg_TN(rs2
, cpu_src2
);
4157 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 0, 0);
4158 gen_movl_TN_reg(rd
, cpu_dst
);
4160 case 0x006: /* VIS I edge16lcc */
4161 CHECK_FPU_FEATURE(dc
, VIS1
);
4162 gen_movl_reg_TN(rs1
, cpu_src1
);
4163 gen_movl_reg_TN(rs2
, cpu_src2
);
4164 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 1, 1);
4165 gen_movl_TN_reg(rd
, cpu_dst
);
4167 case 0x007: /* VIS II edge16ln */
4168 CHECK_FPU_FEATURE(dc
, VIS2
);
4169 gen_movl_reg_TN(rs1
, cpu_src1
);
4170 gen_movl_reg_TN(rs2
, cpu_src2
);
4171 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 0, 1);
4172 gen_movl_TN_reg(rd
, cpu_dst
);
4174 case 0x008: /* VIS I edge32cc */
4175 CHECK_FPU_FEATURE(dc
, VIS1
);
4176 gen_movl_reg_TN(rs1
, cpu_src1
);
4177 gen_movl_reg_TN(rs2
, cpu_src2
);
4178 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 1, 0);
4179 gen_movl_TN_reg(rd
, cpu_dst
);
4181 case 0x009: /* VIS II edge32n */
4182 CHECK_FPU_FEATURE(dc
, VIS2
);
4183 gen_movl_reg_TN(rs1
, cpu_src1
);
4184 gen_movl_reg_TN(rs2
, cpu_src2
);
4185 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 0, 0);
4186 gen_movl_TN_reg(rd
, cpu_dst
);
4188 case 0x00a: /* VIS I edge32lcc */
4189 CHECK_FPU_FEATURE(dc
, VIS1
);
4190 gen_movl_reg_TN(rs1
, cpu_src1
);
4191 gen_movl_reg_TN(rs2
, cpu_src2
);
4192 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 1, 1);
4193 gen_movl_TN_reg(rd
, cpu_dst
);
4195 case 0x00b: /* VIS II edge32ln */
4196 CHECK_FPU_FEATURE(dc
, VIS2
);
4197 gen_movl_reg_TN(rs1
, cpu_src1
);
4198 gen_movl_reg_TN(rs2
, cpu_src2
);
4199 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 0, 1);
4200 gen_movl_TN_reg(rd
, cpu_dst
);
4202 case 0x010: /* VIS I array8 */
4203 CHECK_FPU_FEATURE(dc
, VIS1
);
4204 cpu_src1
= get_src1(insn
, cpu_src1
);
4205 gen_movl_reg_TN(rs2
, cpu_src2
);
4206 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4207 gen_movl_TN_reg(rd
, cpu_dst
);
4209 case 0x012: /* VIS I array16 */
4210 CHECK_FPU_FEATURE(dc
, VIS1
);
4211 cpu_src1
= get_src1(insn
, cpu_src1
);
4212 gen_movl_reg_TN(rs2
, cpu_src2
);
4213 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4214 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
4215 gen_movl_TN_reg(rd
, cpu_dst
);
4217 case 0x014: /* VIS I array32 */
4218 CHECK_FPU_FEATURE(dc
, VIS1
);
4219 cpu_src1
= get_src1(insn
, cpu_src1
);
4220 gen_movl_reg_TN(rs2
, cpu_src2
);
4221 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4222 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
4223 gen_movl_TN_reg(rd
, cpu_dst
);
4225 case 0x018: /* VIS I alignaddr */
4226 CHECK_FPU_FEATURE(dc
, VIS1
);
4227 cpu_src1
= get_src1(insn
, cpu_src1
);
4228 gen_movl_reg_TN(rs2
, cpu_src2
);
4229 gen_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
, 0);
4230 gen_movl_TN_reg(rd
, cpu_dst
);
4232 case 0x01a: /* VIS I alignaddrl */
4233 CHECK_FPU_FEATURE(dc
, VIS1
);
4234 cpu_src1
= get_src1(insn
, cpu_src1
);
4235 gen_movl_reg_TN(rs2
, cpu_src2
);
4236 gen_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
, 1);
4237 gen_movl_TN_reg(rd
, cpu_dst
);
4239 case 0x019: /* VIS II bmask */
4240 CHECK_FPU_FEATURE(dc
, VIS2
);
4241 cpu_src1
= get_src1(insn
, cpu_src1
);
4242 cpu_src2
= get_src1(insn
, cpu_src2
);
4243 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4244 tcg_gen_deposit_tl(cpu_gsr
, cpu_gsr
, cpu_dst
, 32, 32);
4245 gen_movl_TN_reg(rd
, cpu_dst
);
4247 case 0x020: /* VIS I fcmple16 */
4248 CHECK_FPU_FEATURE(dc
, VIS1
);
4249 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4250 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4251 gen_helper_fcmple16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4252 gen_movl_TN_reg(rd
, cpu_dst
);
4254 case 0x022: /* VIS I fcmpne16 */
4255 CHECK_FPU_FEATURE(dc
, VIS1
);
4256 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4257 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4258 gen_helper_fcmpne16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4259 gen_movl_TN_reg(rd
, cpu_dst
);
4261 case 0x024: /* VIS I fcmple32 */
4262 CHECK_FPU_FEATURE(dc
, VIS1
);
4263 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4264 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4265 gen_helper_fcmple32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4266 gen_movl_TN_reg(rd
, cpu_dst
);
4268 case 0x026: /* VIS I fcmpne32 */
4269 CHECK_FPU_FEATURE(dc
, VIS1
);
4270 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4271 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4272 gen_helper_fcmpne32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4273 gen_movl_TN_reg(rd
, cpu_dst
);
4275 case 0x028: /* VIS I fcmpgt16 */
4276 CHECK_FPU_FEATURE(dc
, VIS1
);
4277 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4278 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4279 gen_helper_fcmpgt16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4280 gen_movl_TN_reg(rd
, cpu_dst
);
4282 case 0x02a: /* VIS I fcmpeq16 */
4283 CHECK_FPU_FEATURE(dc
, VIS1
);
4284 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4285 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4286 gen_helper_fcmpeq16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4287 gen_movl_TN_reg(rd
, cpu_dst
);
4289 case 0x02c: /* VIS I fcmpgt32 */
4290 CHECK_FPU_FEATURE(dc
, VIS1
);
4291 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4292 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4293 gen_helper_fcmpgt32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4294 gen_movl_TN_reg(rd
, cpu_dst
);
4296 case 0x02e: /* VIS I fcmpeq32 */
4297 CHECK_FPU_FEATURE(dc
, VIS1
);
4298 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4299 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4300 gen_helper_fcmpeq32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4301 gen_movl_TN_reg(rd
, cpu_dst
);
4303 case 0x031: /* VIS I fmul8x16 */
4304 CHECK_FPU_FEATURE(dc
, VIS1
);
4305 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16
);
4307 case 0x033: /* VIS I fmul8x16au */
4308 CHECK_FPU_FEATURE(dc
, VIS1
);
4309 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16au
);
4311 case 0x035: /* VIS I fmul8x16al */
4312 CHECK_FPU_FEATURE(dc
, VIS1
);
4313 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16al
);
4315 case 0x036: /* VIS I fmul8sux16 */
4316 CHECK_FPU_FEATURE(dc
, VIS1
);
4317 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8sux16
);
4319 case 0x037: /* VIS I fmul8ulx16 */
4320 CHECK_FPU_FEATURE(dc
, VIS1
);
4321 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8ulx16
);
4323 case 0x038: /* VIS I fmuld8sux16 */
4324 CHECK_FPU_FEATURE(dc
, VIS1
);
4325 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld8sux16
);
4327 case 0x039: /* VIS I fmuld8ulx16 */
4328 CHECK_FPU_FEATURE(dc
, VIS1
);
4329 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld8ulx16
);
4331 case 0x03a: /* VIS I fpack32 */
4332 CHECK_FPU_FEATURE(dc
, VIS1
);
4333 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpack32
);
4335 case 0x03b: /* VIS I fpack16 */
4336 CHECK_FPU_FEATURE(dc
, VIS1
);
4337 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4338 cpu_dst_32
= gen_dest_fpr_F();
4339 gen_helper_fpack16(cpu_dst_32
, cpu_gsr
, cpu_src1_64
);
4340 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4342 case 0x03d: /* VIS I fpackfix */
4343 CHECK_FPU_FEATURE(dc
, VIS1
);
4344 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4345 cpu_dst_32
= gen_dest_fpr_F();
4346 gen_helper_fpackfix(cpu_dst_32
, cpu_gsr
, cpu_src1_64
);
4347 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4349 case 0x03e: /* VIS I pdist */
4350 CHECK_FPU_FEATURE(dc
, VIS1
);
4351 gen_ne_fop_DDDD(dc
, rd
, rs1
, rs2
, gen_helper_pdist
);
4353 case 0x048: /* VIS I faligndata */
4354 CHECK_FPU_FEATURE(dc
, VIS1
);
4355 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_faligndata
);
4357 case 0x04b: /* VIS I fpmerge */
4358 CHECK_FPU_FEATURE(dc
, VIS1
);
4359 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpmerge
);
4361 case 0x04c: /* VIS II bshuffle */
4362 CHECK_FPU_FEATURE(dc
, VIS2
);
4363 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_bshuffle
);
4365 case 0x04d: /* VIS I fexpand */
4366 CHECK_FPU_FEATURE(dc
, VIS1
);
4367 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fexpand
);
4369 case 0x050: /* VIS I fpadd16 */
4370 CHECK_FPU_FEATURE(dc
, VIS1
);
4371 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpadd16
);
4373 case 0x051: /* VIS I fpadd16s */
4374 CHECK_FPU_FEATURE(dc
, VIS1
);
4375 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fpadd16s
);
4377 case 0x052: /* VIS I fpadd32 */
4378 CHECK_FPU_FEATURE(dc
, VIS1
);
4379 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpadd32
);
4381 case 0x053: /* VIS I fpadd32s */
4382 CHECK_FPU_FEATURE(dc
, VIS1
);
4383 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_add_i32
);
4385 case 0x054: /* VIS I fpsub16 */
4386 CHECK_FPU_FEATURE(dc
, VIS1
);
4387 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpsub16
);
4389 case 0x055: /* VIS I fpsub16s */
4390 CHECK_FPU_FEATURE(dc
, VIS1
);
4391 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fpsub16s
);
4393 case 0x056: /* VIS I fpsub32 */
4394 CHECK_FPU_FEATURE(dc
, VIS1
);
4395 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpsub32
);
4397 case 0x057: /* VIS I fpsub32s */
4398 CHECK_FPU_FEATURE(dc
, VIS1
);
4399 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_sub_i32
);
4401 case 0x060: /* VIS I fzero */
4402 CHECK_FPU_FEATURE(dc
, VIS1
);
4403 cpu_dst_64
= gen_dest_fpr_D();
4404 tcg_gen_movi_i64(cpu_dst_64
, 0);
4405 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4407 case 0x061: /* VIS I fzeros */
4408 CHECK_FPU_FEATURE(dc
, VIS1
);
4409 cpu_dst_32
= gen_dest_fpr_F();
4410 tcg_gen_movi_i32(cpu_dst_32
, 0);
4411 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4413 case 0x062: /* VIS I fnor */
4414 CHECK_FPU_FEATURE(dc
, VIS1
);
4415 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_nor_i64
);
4417 case 0x063: /* VIS I fnors */
4418 CHECK_FPU_FEATURE(dc
, VIS1
);
4419 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_nor_i32
);
4421 case 0x064: /* VIS I fandnot2 */
4422 CHECK_FPU_FEATURE(dc
, VIS1
);
4423 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_andc_i64
);
4425 case 0x065: /* VIS I fandnot2s */
4426 CHECK_FPU_FEATURE(dc
, VIS1
);
4427 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_andc_i32
);
4429 case 0x066: /* VIS I fnot2 */
4430 CHECK_FPU_FEATURE(dc
, VIS1
);
4431 gen_ne_fop_DD(dc
, rd
, rs2
, tcg_gen_not_i64
);
4433 case 0x067: /* VIS I fnot2s */
4434 CHECK_FPU_FEATURE(dc
, VIS1
);
4435 gen_ne_fop_FF(dc
, rd
, rs2
, tcg_gen_not_i32
);
4437 case 0x068: /* VIS I fandnot1 */
4438 CHECK_FPU_FEATURE(dc
, VIS1
);
4439 gen_ne_fop_DDD(dc
, rd
, rs2
, rs1
, tcg_gen_andc_i64
);
4441 case 0x069: /* VIS I fandnot1s */
4442 CHECK_FPU_FEATURE(dc
, VIS1
);
4443 gen_ne_fop_FFF(dc
, rd
, rs2
, rs1
, tcg_gen_andc_i32
);
4445 case 0x06a: /* VIS I fnot1 */
4446 CHECK_FPU_FEATURE(dc
, VIS1
);
4447 gen_ne_fop_DD(dc
, rd
, rs1
, tcg_gen_not_i64
);
4449 case 0x06b: /* VIS I fnot1s */
4450 CHECK_FPU_FEATURE(dc
, VIS1
);
4451 gen_ne_fop_FF(dc
, rd
, rs1
, tcg_gen_not_i32
);
4453 case 0x06c: /* VIS I fxor */
4454 CHECK_FPU_FEATURE(dc
, VIS1
);
4455 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_xor_i64
);
4457 case 0x06d: /* VIS I fxors */
4458 CHECK_FPU_FEATURE(dc
, VIS1
);
4459 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_xor_i32
);
4461 case 0x06e: /* VIS I fnand */
4462 CHECK_FPU_FEATURE(dc
, VIS1
);
4463 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_nand_i64
);
4465 case 0x06f: /* VIS I fnands */
4466 CHECK_FPU_FEATURE(dc
, VIS1
);
4467 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_nand_i32
);
4469 case 0x070: /* VIS I fand */
4470 CHECK_FPU_FEATURE(dc
, VIS1
);
4471 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_and_i64
);
4473 case 0x071: /* VIS I fands */
4474 CHECK_FPU_FEATURE(dc
, VIS1
);
4475 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_and_i32
);
4477 case 0x072: /* VIS I fxnor */
4478 CHECK_FPU_FEATURE(dc
, VIS1
);
4479 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_eqv_i64
);
4481 case 0x073: /* VIS I fxnors */
4482 CHECK_FPU_FEATURE(dc
, VIS1
);
4483 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_eqv_i32
);
4485 case 0x074: /* VIS I fsrc1 */
4486 CHECK_FPU_FEATURE(dc
, VIS1
);
4487 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4488 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
4490 case 0x075: /* VIS I fsrc1s */
4491 CHECK_FPU_FEATURE(dc
, VIS1
);
4492 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
4493 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
4495 case 0x076: /* VIS I fornot2 */
4496 CHECK_FPU_FEATURE(dc
, VIS1
);
4497 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_orc_i64
);
4499 case 0x077: /* VIS I fornot2s */
4500 CHECK_FPU_FEATURE(dc
, VIS1
);
4501 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_orc_i32
);
4503 case 0x078: /* VIS I fsrc2 */
4504 CHECK_FPU_FEATURE(dc
, VIS1
);
4505 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4506 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
4508 case 0x079: /* VIS I fsrc2s */
4509 CHECK_FPU_FEATURE(dc
, VIS1
);
4510 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
4511 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
4513 case 0x07a: /* VIS I fornot1 */
4514 CHECK_FPU_FEATURE(dc
, VIS1
);
4515 gen_ne_fop_DDD(dc
, rd
, rs2
, rs1
, tcg_gen_orc_i64
);
4517 case 0x07b: /* VIS I fornot1s */
4518 CHECK_FPU_FEATURE(dc
, VIS1
);
4519 gen_ne_fop_FFF(dc
, rd
, rs2
, rs1
, tcg_gen_orc_i32
);
4521 case 0x07c: /* VIS I for */
4522 CHECK_FPU_FEATURE(dc
, VIS1
);
4523 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_or_i64
);
4525 case 0x07d: /* VIS I fors */
4526 CHECK_FPU_FEATURE(dc
, VIS1
);
4527 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_or_i32
);
4529 case 0x07e: /* VIS I fone */
4530 CHECK_FPU_FEATURE(dc
, VIS1
);
4531 cpu_dst_64
= gen_dest_fpr_D();
4532 tcg_gen_movi_i64(cpu_dst_64
, -1);
4533 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4535 case 0x07f: /* VIS I fones */
4536 CHECK_FPU_FEATURE(dc
, VIS1
);
4537 cpu_dst_32
= gen_dest_fpr_F();
4538 tcg_gen_movi_i32(cpu_dst_32
, -1);
4539 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4541 case 0x080: /* VIS I shutdown */
4542 case 0x081: /* VIS II siam */
4551 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4552 #ifdef TARGET_SPARC64
4557 #ifdef TARGET_SPARC64
4558 } else if (xop
== 0x39) { /* V9 return */
4562 cpu_src1
= get_src1(insn
, cpu_src1
);
4563 if (IS_IMM
) { /* immediate */
4564 simm
= GET_FIELDs(insn
, 19, 31);
4565 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4566 } else { /* register */
4567 rs2
= GET_FIELD(insn
, 27, 31);
4569 gen_movl_reg_TN(rs2
, cpu_src2
);
4570 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4572 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4574 gen_helper_restore(cpu_env
);
4576 r_const
= tcg_const_i32(3);
4577 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4578 tcg_temp_free_i32(r_const
);
4579 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4580 dc
->npc
= DYNAMIC_PC
;
4584 cpu_src1
= get_src1(insn
, cpu_src1
);
4585 if (IS_IMM
) { /* immediate */
4586 simm
= GET_FIELDs(insn
, 19, 31);
4587 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4588 } else { /* register */
4589 rs2
= GET_FIELD(insn
, 27, 31);
4591 gen_movl_reg_TN(rs2
, cpu_src2
);
4592 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4594 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4597 case 0x38: /* jmpl */
4602 r_pc
= tcg_const_tl(dc
->pc
);
4603 gen_movl_TN_reg(rd
, r_pc
);
4604 tcg_temp_free(r_pc
);
4606 r_const
= tcg_const_i32(3);
4607 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4608 tcg_temp_free_i32(r_const
);
4609 gen_address_mask(dc
, cpu_dst
);
4610 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4611 dc
->npc
= DYNAMIC_PC
;
4614 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4615 case 0x39: /* rett, V9 return */
4619 if (!supervisor(dc
))
4622 r_const
= tcg_const_i32(3);
4623 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4624 tcg_temp_free_i32(r_const
);
4625 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4626 dc
->npc
= DYNAMIC_PC
;
4627 gen_helper_rett(cpu_env
);
4631 case 0x3b: /* flush */
4632 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4636 case 0x3c: /* save */
4638 gen_helper_save(cpu_env
);
4639 gen_movl_TN_reg(rd
, cpu_dst
);
4641 case 0x3d: /* restore */
4643 gen_helper_restore(cpu_env
);
4644 gen_movl_TN_reg(rd
, cpu_dst
);
4646 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4647 case 0x3e: /* V9 done/retry */
4651 if (!supervisor(dc
))
4653 dc
->npc
= DYNAMIC_PC
;
4654 dc
->pc
= DYNAMIC_PC
;
4655 gen_helper_done(cpu_env
);
4658 if (!supervisor(dc
))
4660 dc
->npc
= DYNAMIC_PC
;
4661 dc
->pc
= DYNAMIC_PC
;
4662 gen_helper_retry(cpu_env
);
4677 case 3: /* load/store instructions */
4679 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4681 /* flush pending conditional evaluations before exposing
4683 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4684 dc
->cc_op
= CC_OP_FLAGS
;
4685 gen_helper_compute_psr(cpu_env
);
4687 cpu_src1
= get_src1(insn
, cpu_src1
);
4688 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4689 rs2
= GET_FIELD(insn
, 27, 31);
4690 gen_movl_reg_TN(rs2
, cpu_src2
);
4691 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4692 } else if (IS_IMM
) { /* immediate */
4693 simm
= GET_FIELDs(insn
, 19, 31);
4694 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4695 } else { /* register */
4696 rs2
= GET_FIELD(insn
, 27, 31);
4698 gen_movl_reg_TN(rs2
, cpu_src2
);
4699 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4701 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4703 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4704 (xop
> 0x17 && xop
<= 0x1d ) ||
4705 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4707 case 0x0: /* ld, V9 lduw, load unsigned word */
4708 gen_address_mask(dc
, cpu_addr
);
4709 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4711 case 0x1: /* ldub, load unsigned byte */
4712 gen_address_mask(dc
, cpu_addr
);
4713 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4715 case 0x2: /* lduh, load unsigned halfword */
4716 gen_address_mask(dc
, cpu_addr
);
4717 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4719 case 0x3: /* ldd, load double word */
4726 r_const
= tcg_const_i32(7);
4727 /* XXX remove alignment check */
4728 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
4729 tcg_temp_free_i32(r_const
);
4730 gen_address_mask(dc
, cpu_addr
);
4731 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4732 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4733 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4734 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4735 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4736 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4737 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4740 case 0x9: /* ldsb, load signed byte */
4741 gen_address_mask(dc
, cpu_addr
);
4742 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4744 case 0xa: /* ldsh, load signed halfword */
4745 gen_address_mask(dc
, cpu_addr
);
4746 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4748 case 0xd: /* ldstub -- XXX: should be atomically */
4752 gen_address_mask(dc
, cpu_addr
);
4753 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4754 r_const
= tcg_const_tl(0xff);
4755 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4756 tcg_temp_free(r_const
);
4759 case 0x0f: /* swap, swap register with memory. Also
4761 CHECK_IU_FEATURE(dc
, SWAP
);
4762 gen_movl_reg_TN(rd
, cpu_val
);
4763 gen_address_mask(dc
, cpu_addr
);
4764 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4765 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4766 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4768 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4769 case 0x10: /* lda, V9 lduwa, load word alternate */
4770 #ifndef TARGET_SPARC64
4773 if (!supervisor(dc
))
4777 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4779 case 0x11: /* lduba, load unsigned byte alternate */
4780 #ifndef TARGET_SPARC64
4783 if (!supervisor(dc
))
4787 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4789 case 0x12: /* lduha, load unsigned halfword alternate */
4790 #ifndef TARGET_SPARC64
4793 if (!supervisor(dc
))
4797 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4799 case 0x13: /* ldda, load double word alternate */
4800 #ifndef TARGET_SPARC64
4803 if (!supervisor(dc
))
4809 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4811 case 0x19: /* ldsba, load signed byte alternate */
4812 #ifndef TARGET_SPARC64
4815 if (!supervisor(dc
))
4819 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4821 case 0x1a: /* ldsha, load signed halfword alternate */
4822 #ifndef TARGET_SPARC64
4825 if (!supervisor(dc
))
4829 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4831 case 0x1d: /* ldstuba -- XXX: should be atomically */
4832 #ifndef TARGET_SPARC64
4835 if (!supervisor(dc
))
4839 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4841 case 0x1f: /* swapa, swap reg with alt. memory. Also
4843 CHECK_IU_FEATURE(dc
, SWAP
);
4844 #ifndef TARGET_SPARC64
4847 if (!supervisor(dc
))
4851 gen_movl_reg_TN(rd
, cpu_val
);
4852 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4855 #ifndef TARGET_SPARC64
4856 case 0x30: /* ldc */
4857 case 0x31: /* ldcsr */
4858 case 0x33: /* lddc */
4862 #ifdef TARGET_SPARC64
4863 case 0x08: /* V9 ldsw */
4864 gen_address_mask(dc
, cpu_addr
);
4865 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4867 case 0x0b: /* V9 ldx */
4868 gen_address_mask(dc
, cpu_addr
);
4869 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4871 case 0x18: /* V9 ldswa */
4873 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4875 case 0x1b: /* V9 ldxa */
4877 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4879 case 0x2d: /* V9 prefetch, no effect */
4881 case 0x30: /* V9 ldfa */
4882 if (gen_trap_ifnofpu(dc
)) {
4886 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4887 gen_update_fprs_dirty(rd
);
4889 case 0x33: /* V9 lddfa */
4890 if (gen_trap_ifnofpu(dc
)) {
4894 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4895 gen_update_fprs_dirty(DFPREG(rd
));
4897 case 0x3d: /* V9 prefetcha, no effect */
4899 case 0x32: /* V9 ldqfa */
4900 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4901 if (gen_trap_ifnofpu(dc
)) {
4905 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4906 gen_update_fprs_dirty(QFPREG(rd
));
4912 gen_movl_TN_reg(rd
, cpu_val
);
4913 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4916 } else if (xop
>= 0x20 && xop
< 0x24) {
4917 if (gen_trap_ifnofpu(dc
)) {
4922 case 0x20: /* ldf, load fpreg */
4923 gen_address_mask(dc
, cpu_addr
);
4924 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4925 cpu_dst_32
= gen_dest_fpr_F();
4926 tcg_gen_trunc_tl_i32(cpu_dst_32
, cpu_tmp0
);
4927 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4929 case 0x21: /* ldfsr, V9 ldxfsr */
4930 #ifdef TARGET_SPARC64
4931 gen_address_mask(dc
, cpu_addr
);
4933 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4934 gen_helper_ldxfsr(cpu_env
, cpu_tmp64
);
4936 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4937 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4938 gen_helper_ldfsr(cpu_env
, cpu_tmp32
);
4942 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4943 gen_helper_ldfsr(cpu_env
, cpu_tmp32
);
4947 case 0x22: /* ldqf, load quad fpreg */
4951 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4952 r_const
= tcg_const_i32(dc
->mem_idx
);
4953 gen_address_mask(dc
, cpu_addr
);
4954 gen_helper_ldqf(cpu_env
, cpu_addr
, r_const
);
4955 tcg_temp_free_i32(r_const
);
4956 gen_op_store_QT0_fpr(QFPREG(rd
));
4957 gen_update_fprs_dirty(QFPREG(rd
));
4960 case 0x23: /* lddf, load double fpreg */
4961 gen_address_mask(dc
, cpu_addr
);
4962 cpu_dst_64
= gen_dest_fpr_D();
4963 tcg_gen_qemu_ld64(cpu_dst_64
, cpu_addr
, dc
->mem_idx
);
4964 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4969 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4970 xop
== 0xe || xop
== 0x1e) {
4971 gen_movl_reg_TN(rd
, cpu_val
);
4973 case 0x4: /* st, store word */
4974 gen_address_mask(dc
, cpu_addr
);
4975 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4977 case 0x5: /* stb, store byte */
4978 gen_address_mask(dc
, cpu_addr
);
4979 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4981 case 0x6: /* sth, store halfword */
4982 gen_address_mask(dc
, cpu_addr
);
4983 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4985 case 0x7: /* std, store double word */
4992 gen_address_mask(dc
, cpu_addr
);
4993 r_const
= tcg_const_i32(7);
4994 /* XXX remove alignment check */
4995 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
4996 tcg_temp_free_i32(r_const
);
4997 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4998 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4999 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
5002 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5003 case 0x14: /* sta, V9 stwa, store word alternate */
5004 #ifndef TARGET_SPARC64
5007 if (!supervisor(dc
))
5011 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
5012 dc
->npc
= DYNAMIC_PC
;
5014 case 0x15: /* stba, store byte alternate */
5015 #ifndef TARGET_SPARC64
5018 if (!supervisor(dc
))
5022 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
5023 dc
->npc
= DYNAMIC_PC
;
5025 case 0x16: /* stha, store halfword alternate */
5026 #ifndef TARGET_SPARC64
5029 if (!supervisor(dc
))
5033 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
5034 dc
->npc
= DYNAMIC_PC
;
5036 case 0x17: /* stda, store double word alternate */
5037 #ifndef TARGET_SPARC64
5040 if (!supervisor(dc
))
5047 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
5051 #ifdef TARGET_SPARC64
5052 case 0x0e: /* V9 stx */
5053 gen_address_mask(dc
, cpu_addr
);
5054 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
5056 case 0x1e: /* V9 stxa */
5058 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
5059 dc
->npc
= DYNAMIC_PC
;
5065 } else if (xop
> 0x23 && xop
< 0x28) {
5066 if (gen_trap_ifnofpu(dc
)) {
5071 case 0x24: /* stf, store fpreg */
5072 gen_address_mask(dc
, cpu_addr
);
5073 cpu_src1_32
= gen_load_fpr_F(dc
, rd
);
5074 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_src1_32
);
5075 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
5077 case 0x25: /* stfsr, V9 stxfsr */
5078 #ifdef TARGET_SPARC64
5079 gen_address_mask(dc
, cpu_addr
);
5080 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
5082 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
5084 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
5086 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
5087 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
5091 #ifdef TARGET_SPARC64
5092 /* V9 stqf, store quad fpreg */
5096 CHECK_FPU_FEATURE(dc
, FLOAT128
);
5097 gen_op_load_fpr_QT0(QFPREG(rd
));
5098 r_const
= tcg_const_i32(dc
->mem_idx
);
5099 gen_address_mask(dc
, cpu_addr
);
5100 gen_helper_stqf(cpu_env
, cpu_addr
, r_const
);
5101 tcg_temp_free_i32(r_const
);
5104 #else /* !TARGET_SPARC64 */
5105 /* stdfq, store floating point queue */
5106 #if defined(CONFIG_USER_ONLY)
5109 if (!supervisor(dc
))
5111 if (gen_trap_ifnofpu(dc
)) {
5117 case 0x27: /* stdf, store double fpreg */
5118 gen_address_mask(dc
, cpu_addr
);
5119 cpu_src1_64
= gen_load_fpr_D(dc
, rd
);
5120 tcg_gen_qemu_st64(cpu_src1_64
, cpu_addr
, dc
->mem_idx
);
5125 } else if (xop
> 0x33 && xop
< 0x3f) {
5128 #ifdef TARGET_SPARC64
5129 case 0x34: /* V9 stfa */
5130 if (gen_trap_ifnofpu(dc
)) {
5133 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
5135 case 0x36: /* V9 stqfa */
5139 CHECK_FPU_FEATURE(dc
, FLOAT128
);
5140 if (gen_trap_ifnofpu(dc
)) {
5143 r_const
= tcg_const_i32(7);
5144 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
5145 tcg_temp_free_i32(r_const
);
5146 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
5149 case 0x37: /* V9 stdfa */
5150 if (gen_trap_ifnofpu(dc
)) {
5153 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
5155 case 0x3c: /* V9 casa */
5156 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
5157 gen_movl_TN_reg(rd
, cpu_val
);
5159 case 0x3e: /* V9 casxa */
5160 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
5161 gen_movl_TN_reg(rd
, cpu_val
);
5164 case 0x34: /* stc */
5165 case 0x35: /* stcsr */
5166 case 0x36: /* stdcq */
5167 case 0x37: /* stdc */
5178 /* default case for non jump instructions */
5179 if (dc
->npc
== DYNAMIC_PC
) {
5180 dc
->pc
= DYNAMIC_PC
;
5182 } else if (dc
->npc
== JUMP_PC
) {
5183 /* we can do a static jump */
5184 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
5188 dc
->npc
= dc
->npc
+ 4;
5197 r_const
= tcg_const_i32(TT_ILL_INSN
);
5198 gen_helper_raise_exception(cpu_env
, r_const
);
5199 tcg_temp_free_i32(r_const
);
5208 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
5209 gen_helper_raise_exception(cpu_env
, r_const
);
5210 tcg_temp_free_i32(r_const
);
5214 #if !defined(CONFIG_USER_ONLY)
5220 r_const
= tcg_const_i32(TT_PRIV_INSN
);
5221 gen_helper_raise_exception(cpu_env
, r_const
);
5222 tcg_temp_free_i32(r_const
);
5229 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
5232 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5235 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
5239 #ifndef TARGET_SPARC64
5245 r_const
= tcg_const_i32(TT_NCP_INSN
);
5246 gen_helper_raise_exception(cpu_env
, r_const
);
5247 tcg_temp_free(r_const
);
5253 tcg_temp_free(cpu_tmp1
);
5254 tcg_temp_free(cpu_tmp2
);
5255 if (dc
->n_t32
!= 0) {
5257 for (i
= dc
->n_t32
- 1; i
>= 0; --i
) {
5258 tcg_temp_free_i32(dc
->t32
[i
]);
5264 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
5265 int spc
, CPUSPARCState
*env
)
5267 target_ulong pc_start
, last_pc
;
5268 uint16_t *gen_opc_end
;
5269 DisasContext dc1
, *dc
= &dc1
;
5276 memset(dc
, 0, sizeof(DisasContext
));
5281 dc
->npc
= (target_ulong
) tb
->cs_base
;
5282 dc
->cc_op
= CC_OP_DYNAMIC
;
5283 dc
->mem_idx
= cpu_mmu_index(env
);
5285 dc
->fpu_enabled
= tb_fpu_enabled(tb
->flags
);
5286 dc
->address_mask_32bit
= tb_am_enabled(tb
->flags
);
5287 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
5288 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
5290 cpu_tmp0
= tcg_temp_new();
5291 cpu_tmp32
= tcg_temp_new_i32();
5292 cpu_tmp64
= tcg_temp_new_i64();
5294 cpu_dst
= tcg_temp_local_new();
5297 cpu_val
= tcg_temp_local_new();
5298 cpu_addr
= tcg_temp_local_new();
5301 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5303 max_insns
= CF_COUNT_MASK
;
5306 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5307 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5308 if (bp
->pc
== dc
->pc
) {
5309 if (dc
->pc
!= pc_start
)
5311 gen_helper_debug(cpu_env
);
5319 qemu_log("Search PC...\n");
5320 j
= gen_opc_ptr
- gen_opc_buf
;
5324 gen_opc_instr_start
[lj
++] = 0;
5325 gen_opc_pc
[lj
] = dc
->pc
;
5326 gen_opc_npc
[lj
] = dc
->npc
;
5327 gen_opc_instr_start
[lj
] = 1;
5328 gen_opc_icount
[lj
] = num_insns
;
5331 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
5334 insn
= cpu_ldl_code(env
, dc
->pc
);
5335 disas_sparc_insn(dc
, insn
);
5340 /* if the next PC is different, we abort now */
5341 if (dc
->pc
!= (last_pc
+ 4))
5343 /* if we reach a page boundary, we stop generation so that the
5344 PC of a TT_TFAULT exception is always in the right page */
5345 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
5347 /* if single step mode, we generate only one instruction and
5348 generate an exception */
5349 if (dc
->singlestep
) {
5352 } while ((gen_opc_ptr
< gen_opc_end
) &&
5353 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
5354 num_insns
< max_insns
);
5357 tcg_temp_free(cpu_addr
);
5358 tcg_temp_free(cpu_val
);
5359 tcg_temp_free(cpu_dst
);
5360 tcg_temp_free_i64(cpu_tmp64
);
5361 tcg_temp_free_i32(cpu_tmp32
);
5362 tcg_temp_free(cpu_tmp0
);
5364 if (tb
->cflags
& CF_LAST_IO
)
5367 if (dc
->pc
!= DYNAMIC_PC
&&
5368 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
5369 /* static PC and NPC: we can use direct chaining */
5370 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
5372 if (dc
->pc
!= DYNAMIC_PC
)
5373 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
5378 gen_icount_end(tb
, num_insns
);
5379 *gen_opc_ptr
= INDEX_op_end
;
5381 j
= gen_opc_ptr
- gen_opc_buf
;
5384 gen_opc_instr_start
[lj
++] = 0;
5388 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
5389 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
5391 tb
->size
= last_pc
+ 4 - pc_start
;
5392 tb
->icount
= num_insns
;
5395 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5396 qemu_log("--------------\n");
5397 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5398 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
5404 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5406 gen_intermediate_code_internal(tb
, 0, env
);
5409 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5411 gen_intermediate_code_internal(tb
, 1, env
);
5414 void gen_intermediate_code_init(CPUSPARCState
*env
)
5418 static const char * const gregnames
[8] = {
5419 NULL
, // g0 not used
5428 static const char * const fregnames
[32] = {
5429 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5430 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5431 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5432 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5435 /* init various static tables */
5439 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5440 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5441 offsetof(CPUSPARCState
, regwptr
),
5443 #ifdef TARGET_SPARC64
5444 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, xcc
),
5446 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, asi
),
5448 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, fprs
),
5450 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, gsr
),
5452 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5453 offsetof(CPUSPARCState
, tick_cmpr
),
5455 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5456 offsetof(CPUSPARCState
, stick_cmpr
),
5458 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5459 offsetof(CPUSPARCState
, hstick_cmpr
),
5461 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, hintp
),
5463 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, htba
),
5465 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, hver
),
5467 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5468 offsetof(CPUSPARCState
, ssr
), "ssr");
5469 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5470 offsetof(CPUSPARCState
, version
), "ver");
5471 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5472 offsetof(CPUSPARCState
, softint
),
5475 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, wim
),
5478 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cond
),
5480 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cc_src
),
5482 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5483 offsetof(CPUSPARCState
, cc_src2
),
5485 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cc_dst
),
5487 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, cc_op
),
5489 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, psr
),
5491 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, fsr
),
5493 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, pc
),
5495 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, npc
),
5497 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, y
), "y");
5498 #ifndef CONFIG_USER_ONLY
5499 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, tbr
),
5502 for (i
= 1; i
< 8; i
++) {
5503 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5504 offsetof(CPUSPARCState
, gregs
[i
]),
5507 for (i
= 0; i
< TARGET_DPREGS
; i
++) {
5508 cpu_fpr
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
5509 offsetof(CPUSPARCState
, fpr
[i
]),
5513 /* register helpers */
5515 #define GEN_HELPER 2
5520 void restore_state_to_opc(CPUSPARCState
*env
, TranslationBlock
*tb
, int pc_pos
)
5523 env
->pc
= gen_opc_pc
[pc_pos
];
5524 npc
= gen_opc_npc
[pc_pos
];
5526 /* dynamic NPC: already stored */
5527 } else if (npc
== 2) {
5528 /* jump PC: use 'cond' and the jump targets of the translation */
5530 env
->npc
= gen_opc_jump_pc
[0];
5532 env
->npc
= gen_opc_jump_pc
[1];
5538 /* flush pending conditional evaluations before exposing cpu state */
5539 if (CC_OP
!= CC_OP_FLAGS
) {
5540 helper_compute_psr(env
);