4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env
, cpu_regwptr
;
43 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
44 static TCGv_i32 cpu_cc_op
;
45 static TCGv_i32 cpu_psr
;
46 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
53 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
55 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
56 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
57 static TCGv_i32 cpu_softint
;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32
;
64 static TCGv_i64 cpu_tmp64
;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr
[TARGET_DPREGS
];
68 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
69 static target_ulong gen_opc_jump_pc
[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext
{
74 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit
;
82 uint32_t cc_op
; /* current CC operation */
83 struct TranslationBlock
*tb
;
89 // This function uses non-native bit order
90 #define GET_FIELD(X, FROM, TO) \
91 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
93 // This function uses the order in the manuals, i.e. bit 0 is 2^0
94 #define GET_FIELD_SP(X, FROM, TO) \
95 GET_FIELD(X, 31 - (TO), 31 - (FROM))
97 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
100 #ifdef TARGET_SPARC64
101 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
104 #define DFPREG(r) (r & 0x1e)
105 #define QFPREG(r) (r & 0x1c)
108 #define UA2005_HTRAP_MASK 0xff
109 #define V8_TRAP_MASK 0x7f
111 static int sign_extend(int x
, int len
)
114 return (x
<< len
) >> len
;
117 #define IS_IMM (insn & (1<<13))
119 static inline void gen_update_fprs_dirty(int rd
)
121 #if defined(TARGET_SPARC64)
122 tcg_gen_ori_i32(cpu_fprs
, cpu_fprs
, (rd
< 32) ? 1 : 2);
126 /* floating point registers moves */
127 static TCGv_i32
gen_load_fpr_F(DisasContext
*dc
, unsigned int src
)
129 #if TCG_TARGET_REG_BITS == 32
131 return TCGV_LOW(cpu_fpr
[src
/ 2]);
133 return TCGV_HIGH(cpu_fpr
[src
/ 2]);
137 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr
[src
/ 2]));
139 TCGv_i32 ret
= tcg_temp_local_new_i32();
140 TCGv_i64 t
= tcg_temp_new_i64();
142 tcg_gen_shri_i64(t
, cpu_fpr
[src
/ 2], 32);
143 tcg_gen_trunc_i64_i32(ret
, t
);
144 tcg_temp_free_i64(t
);
146 dc
->t32
[dc
->n_t32
++] = ret
;
147 assert(dc
->n_t32
<= ARRAY_SIZE(dc
->t32
));
154 static void gen_store_fpr_F(DisasContext
*dc
, unsigned int dst
, TCGv_i32 v
)
156 #if TCG_TARGET_REG_BITS == 32
158 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr
[dst
/ 2]), v
);
160 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr
[dst
/ 2]), v
);
163 TCGv_i64 t
= MAKE_TCGV_I64(GET_TCGV_I32(v
));
164 tcg_gen_deposit_i64(cpu_fpr
[dst
/ 2], cpu_fpr
[dst
/ 2], t
,
165 (dst
& 1 ? 0 : 32), 32);
167 gen_update_fprs_dirty(dst
);
170 static TCGv_i32
gen_dest_fpr_F(void)
175 static TCGv_i64
gen_load_fpr_D(DisasContext
*dc
, unsigned int src
)
178 return cpu_fpr
[src
/ 2];
181 static void gen_store_fpr_D(DisasContext
*dc
, unsigned int dst
, TCGv_i64 v
)
184 tcg_gen_mov_i64(cpu_fpr
[dst
/ 2], v
);
185 gen_update_fprs_dirty(dst
);
188 static TCGv_i64
gen_dest_fpr_D(void)
193 static void gen_op_load_fpr_QT0(unsigned int src
)
195 tcg_gen_st_i64(cpu_fpr
[src
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
196 offsetof(CPU_QuadU
, ll
.upper
));
197 tcg_gen_st_i64(cpu_fpr
[src
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
198 offsetof(CPU_QuadU
, ll
.lower
));
201 static void gen_op_load_fpr_QT1(unsigned int src
)
203 tcg_gen_st_i64(cpu_fpr
[src
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
204 offsetof(CPU_QuadU
, ll
.upper
));
205 tcg_gen_st_i64(cpu_fpr
[src
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
206 offsetof(CPU_QuadU
, ll
.lower
));
209 static void gen_op_store_QT0_fpr(unsigned int dst
)
211 tcg_gen_ld_i64(cpu_fpr
[dst
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
212 offsetof(CPU_QuadU
, ll
.upper
));
213 tcg_gen_ld_i64(cpu_fpr
[dst
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
214 offsetof(CPU_QuadU
, ll
.lower
));
217 #ifdef TARGET_SPARC64
218 static void gen_move_Q(unsigned int rd
, unsigned int rs
)
223 tcg_gen_mov_i64(cpu_fpr
[rd
/ 2], cpu_fpr
[rs
/ 2]);
224 tcg_gen_mov_i64(cpu_fpr
[rd
/ 2 + 1], cpu_fpr
[rs
/ 2 + 1]);
225 gen_update_fprs_dirty(rd
);
230 #ifdef CONFIG_USER_ONLY
231 #define supervisor(dc) 0
232 #ifdef TARGET_SPARC64
233 #define hypervisor(dc) 0
236 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237 #ifdef TARGET_SPARC64
238 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
243 #ifdef TARGET_SPARC64
245 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
247 #define AM_CHECK(dc) (1)
251 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
253 #ifdef TARGET_SPARC64
255 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
259 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
262 tcg_gen_movi_tl(tn
, 0);
264 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
266 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
270 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
275 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
277 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
281 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
282 target_ulong pc
, target_ulong npc
)
284 TranslationBlock
*tb
;
287 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
288 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
290 /* jump to same page: we can use a direct jump */
291 tcg_gen_goto_tb(tb_num
);
292 tcg_gen_movi_tl(cpu_pc
, pc
);
293 tcg_gen_movi_tl(cpu_npc
, npc
);
294 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
296 /* jump to another page: currently not optimized */
297 tcg_gen_movi_tl(cpu_pc
, pc
);
298 tcg_gen_movi_tl(cpu_npc
, npc
);
304 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
306 tcg_gen_extu_i32_tl(reg
, src
);
307 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
308 tcg_gen_andi_tl(reg
, reg
, 0x1);
311 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
313 tcg_gen_extu_i32_tl(reg
, src
);
314 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
315 tcg_gen_andi_tl(reg
, reg
, 0x1);
318 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
320 tcg_gen_extu_i32_tl(reg
, src
);
321 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
322 tcg_gen_andi_tl(reg
, reg
, 0x1);
325 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
327 tcg_gen_extu_i32_tl(reg
, src
);
328 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
329 tcg_gen_andi_tl(reg
, reg
, 0x1);
332 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
338 l1
= gen_new_label();
340 r_temp
= tcg_temp_new();
341 tcg_gen_xor_tl(r_temp
, src1
, src2
);
342 tcg_gen_not_tl(r_temp
, r_temp
);
343 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
344 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
345 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
346 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
347 r_const
= tcg_const_i32(TT_TOVF
);
348 gen_helper_raise_exception(cpu_env
, r_const
);
349 tcg_temp_free_i32(r_const
);
351 tcg_temp_free(r_temp
);
354 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
359 l1
= gen_new_label();
360 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
361 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
362 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
363 r_const
= tcg_const_i32(TT_TOVF
);
364 gen_helper_raise_exception(cpu_env
, r_const
);
365 tcg_temp_free_i32(r_const
);
369 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
371 tcg_gen_mov_tl(cpu_cc_src
, src1
);
372 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
373 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
374 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
377 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
379 tcg_gen_mov_tl(cpu_cc_src
, src1
);
380 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
381 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
382 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
385 static TCGv_i32
gen_add32_carry32(void)
387 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
389 /* Carry is computed from a previous add: (dst < src) */
390 #if TARGET_LONG_BITS == 64
391 cc_src1_32
= tcg_temp_new_i32();
392 cc_src2_32
= tcg_temp_new_i32();
393 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
394 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
396 cc_src1_32
= cpu_cc_dst
;
397 cc_src2_32
= cpu_cc_src
;
400 carry_32
= tcg_temp_new_i32();
401 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
403 #if TARGET_LONG_BITS == 64
404 tcg_temp_free_i32(cc_src1_32
);
405 tcg_temp_free_i32(cc_src2_32
);
411 static TCGv_i32
gen_sub32_carry32(void)
413 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
415 /* Carry is computed from a previous borrow: (src1 < src2) */
416 #if TARGET_LONG_BITS == 64
417 cc_src1_32
= tcg_temp_new_i32();
418 cc_src2_32
= tcg_temp_new_i32();
419 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
420 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
422 cc_src1_32
= cpu_cc_src
;
423 cc_src2_32
= cpu_cc_src2
;
426 carry_32
= tcg_temp_new_i32();
427 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
429 #if TARGET_LONG_BITS == 64
430 tcg_temp_free_i32(cc_src1_32
);
431 tcg_temp_free_i32(cc_src2_32
);
437 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
438 TCGv src2
, int update_cc
)
446 /* Carry is known to be zero. Fall back to plain ADD. */
448 gen_op_add_cc(dst
, src1
, src2
);
450 tcg_gen_add_tl(dst
, src1
, src2
);
457 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
459 /* For 32-bit hosts, we can re-use the host's hardware carry
460 generation by using an ADD2 opcode. We discard the low
461 part of the output. Ideally we'd combine this operation
462 with the add that generated the carry in the first place. */
463 TCGv dst_low
= tcg_temp_new();
464 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
465 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
466 tcg_temp_free(dst_low
);
470 carry_32
= gen_add32_carry32();
476 carry_32
= gen_sub32_carry32();
480 /* We need external help to produce the carry. */
481 carry_32
= tcg_temp_new_i32();
482 gen_helper_compute_C_icc(carry_32
, cpu_env
);
486 #if TARGET_LONG_BITS == 64
487 carry
= tcg_temp_new();
488 tcg_gen_extu_i32_i64(carry
, carry_32
);
493 tcg_gen_add_tl(dst
, src1
, src2
);
494 tcg_gen_add_tl(dst
, dst
, carry
);
496 tcg_temp_free_i32(carry_32
);
497 #if TARGET_LONG_BITS == 64
498 tcg_temp_free(carry
);
501 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
505 tcg_gen_mov_tl(cpu_cc_src
, src1
);
506 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
507 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
508 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
509 dc
->cc_op
= CC_OP_ADDX
;
513 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
515 tcg_gen_mov_tl(cpu_cc_src
, src1
);
516 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
517 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
518 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
521 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
523 tcg_gen_mov_tl(cpu_cc_src
, src1
);
524 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
525 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
526 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
527 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
528 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
531 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
537 l1
= gen_new_label();
539 r_temp
= tcg_temp_new();
540 tcg_gen_xor_tl(r_temp
, src1
, src2
);
541 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
542 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
543 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
544 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
545 r_const
= tcg_const_i32(TT_TOVF
);
546 gen_helper_raise_exception(cpu_env
, r_const
);
547 tcg_temp_free_i32(r_const
);
549 tcg_temp_free(r_temp
);
552 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
554 tcg_gen_mov_tl(cpu_cc_src
, src1
);
555 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
557 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
558 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
559 dc
->cc_op
= CC_OP_LOGIC
;
561 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
562 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
563 dc
->cc_op
= CC_OP_SUB
;
565 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
568 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
570 tcg_gen_mov_tl(cpu_cc_src
, src1
);
571 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
572 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
573 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
576 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
577 TCGv src2
, int update_cc
)
585 /* Carry is known to be zero. Fall back to plain SUB. */
587 gen_op_sub_cc(dst
, src1
, src2
);
589 tcg_gen_sub_tl(dst
, src1
, src2
);
596 carry_32
= gen_add32_carry32();
602 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
604 /* For 32-bit hosts, we can re-use the host's hardware carry
605 generation by using a SUB2 opcode. We discard the low
606 part of the output. Ideally we'd combine this operation
607 with the add that generated the carry in the first place. */
608 TCGv dst_low
= tcg_temp_new();
609 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
610 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
611 tcg_temp_free(dst_low
);
615 carry_32
= gen_sub32_carry32();
619 /* We need external help to produce the carry. */
620 carry_32
= tcg_temp_new_i32();
621 gen_helper_compute_C_icc(carry_32
, cpu_env
);
625 #if TARGET_LONG_BITS == 64
626 carry
= tcg_temp_new();
627 tcg_gen_extu_i32_i64(carry
, carry_32
);
632 tcg_gen_sub_tl(dst
, src1
, src2
);
633 tcg_gen_sub_tl(dst
, dst
, carry
);
635 tcg_temp_free_i32(carry_32
);
636 #if TARGET_LONG_BITS == 64
637 tcg_temp_free(carry
);
640 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
644 tcg_gen_mov_tl(cpu_cc_src
, src1
);
645 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
646 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
647 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
648 dc
->cc_op
= CC_OP_SUBX
;
652 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
654 tcg_gen_mov_tl(cpu_cc_src
, src1
);
655 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
656 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
657 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
660 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
662 tcg_gen_mov_tl(cpu_cc_src
, src1
);
663 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
664 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
665 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
666 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
667 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
670 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
675 l1
= gen_new_label();
676 r_temp
= tcg_temp_new();
682 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
683 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
684 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
685 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
686 tcg_gen_movi_tl(cpu_cc_src2
, 0);
690 // env->y = (b2 << 31) | (env->y >> 1);
691 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
692 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
693 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
694 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
695 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
696 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
699 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
700 gen_mov_reg_V(r_temp
, cpu_psr
);
701 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
702 tcg_temp_free(r_temp
);
704 // T0 = (b1 << 31) | (T0 >> 1);
706 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
707 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
708 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
710 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
712 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
715 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
717 TCGv_i32 r_src1
, r_src2
;
718 TCGv_i64 r_temp
, r_temp2
;
720 r_src1
= tcg_temp_new_i32();
721 r_src2
= tcg_temp_new_i32();
723 tcg_gen_trunc_tl_i32(r_src1
, src1
);
724 tcg_gen_trunc_tl_i32(r_src2
, src2
);
726 r_temp
= tcg_temp_new_i64();
727 r_temp2
= tcg_temp_new_i64();
730 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
731 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
733 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
734 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
737 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
739 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
740 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
741 tcg_temp_free_i64(r_temp
);
742 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
744 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
746 tcg_temp_free_i64(r_temp2
);
748 tcg_temp_free_i32(r_src1
);
749 tcg_temp_free_i32(r_src2
);
752 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
754 /* zero-extend truncated operands before multiplication */
755 gen_op_multiply(dst
, src1
, src2
, 0);
758 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
760 /* sign-extend truncated operands before multiplication */
761 gen_op_multiply(dst
, src1
, src2
, 1);
764 #ifdef TARGET_SPARC64
765 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
770 l1
= gen_new_label();
771 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
772 r_const
= tcg_const_i32(TT_DIV_ZERO
);
773 gen_helper_raise_exception(cpu_env
, r_const
);
774 tcg_temp_free_i32(r_const
);
778 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
781 TCGv r_temp1
, r_temp2
;
783 l1
= gen_new_label();
784 l2
= gen_new_label();
785 r_temp1
= tcg_temp_local_new();
786 r_temp2
= tcg_temp_local_new();
787 tcg_gen_mov_tl(r_temp1
, src1
);
788 tcg_gen_mov_tl(r_temp2
, src2
);
789 gen_trap_ifdivzero_tl(r_temp2
);
790 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp1
, INT64_MIN
, l1
);
791 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp2
, -1, l1
);
792 tcg_gen_movi_i64(dst
, INT64_MIN
);
795 tcg_gen_div_i64(dst
, r_temp1
, r_temp2
);
797 tcg_temp_free(r_temp1
);
798 tcg_temp_free(r_temp2
);
803 static inline void gen_op_eval_ba(TCGv dst
)
805 tcg_gen_movi_tl(dst
, 1);
809 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
811 gen_mov_reg_Z(dst
, src
);
815 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
817 gen_mov_reg_N(cpu_tmp0
, src
);
818 gen_mov_reg_V(dst
, src
);
819 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
820 gen_mov_reg_Z(cpu_tmp0
, src
);
821 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
825 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
827 gen_mov_reg_V(cpu_tmp0
, src
);
828 gen_mov_reg_N(dst
, src
);
829 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
833 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
835 gen_mov_reg_Z(cpu_tmp0
, src
);
836 gen_mov_reg_C(dst
, src
);
837 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
841 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
843 gen_mov_reg_C(dst
, src
);
847 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
849 gen_mov_reg_V(dst
, src
);
853 static inline void gen_op_eval_bn(TCGv dst
)
855 tcg_gen_movi_tl(dst
, 0);
859 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
861 gen_mov_reg_N(dst
, src
);
865 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
867 gen_mov_reg_Z(dst
, src
);
868 tcg_gen_xori_tl(dst
, dst
, 0x1);
872 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
874 gen_mov_reg_N(cpu_tmp0
, src
);
875 gen_mov_reg_V(dst
, src
);
876 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
877 gen_mov_reg_Z(cpu_tmp0
, src
);
878 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
879 tcg_gen_xori_tl(dst
, dst
, 0x1);
883 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
885 gen_mov_reg_V(cpu_tmp0
, src
);
886 gen_mov_reg_N(dst
, src
);
887 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
888 tcg_gen_xori_tl(dst
, dst
, 0x1);
892 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
894 gen_mov_reg_Z(cpu_tmp0
, src
);
895 gen_mov_reg_C(dst
, src
);
896 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
897 tcg_gen_xori_tl(dst
, dst
, 0x1);
901 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
903 gen_mov_reg_C(dst
, src
);
904 tcg_gen_xori_tl(dst
, dst
, 0x1);
908 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
910 gen_mov_reg_N(dst
, src
);
911 tcg_gen_xori_tl(dst
, dst
, 0x1);
915 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
917 gen_mov_reg_V(dst
, src
);
918 tcg_gen_xori_tl(dst
, dst
, 0x1);
922 FPSR bit field FCC1 | FCC0:
928 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
929 unsigned int fcc_offset
)
931 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
932 tcg_gen_andi_tl(reg
, reg
, 0x1);
935 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
936 unsigned int fcc_offset
)
938 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
939 tcg_gen_andi_tl(reg
, reg
, 0x1);
943 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
944 unsigned int fcc_offset
)
946 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
947 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
948 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
951 // 1 or 2: FCC0 ^ FCC1
952 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
953 unsigned int fcc_offset
)
955 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
956 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
957 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
961 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
962 unsigned int fcc_offset
)
964 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
968 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
969 unsigned int fcc_offset
)
971 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
972 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
973 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
974 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
978 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
979 unsigned int fcc_offset
)
981 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
985 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
986 unsigned int fcc_offset
)
988 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
989 tcg_gen_xori_tl(dst
, dst
, 0x1);
990 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
991 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
995 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
996 unsigned int fcc_offset
)
998 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
999 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1000 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1003 // 0: !(FCC0 | FCC1)
1004 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1005 unsigned int fcc_offset
)
1007 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1008 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1009 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1010 tcg_gen_xori_tl(dst
, dst
, 0x1);
1013 // 0 or 3: !(FCC0 ^ FCC1)
1014 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1015 unsigned int fcc_offset
)
1017 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1018 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1019 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1020 tcg_gen_xori_tl(dst
, dst
, 0x1);
1024 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1025 unsigned int fcc_offset
)
1027 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1028 tcg_gen_xori_tl(dst
, dst
, 0x1);
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1033 unsigned int fcc_offset
)
1035 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1036 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1037 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1038 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1039 tcg_gen_xori_tl(dst
, dst
, 0x1);
1043 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1044 unsigned int fcc_offset
)
1046 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1047 tcg_gen_xori_tl(dst
, dst
, 0x1);
1050 // !2: !(!FCC0 & FCC1)
1051 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1052 unsigned int fcc_offset
)
1054 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1055 tcg_gen_xori_tl(dst
, dst
, 0x1);
1056 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1057 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1058 tcg_gen_xori_tl(dst
, dst
, 0x1);
1061 // !3: !(FCC0 & FCC1)
1062 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1063 unsigned int fcc_offset
)
1065 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1066 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1067 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1068 tcg_gen_xori_tl(dst
, dst
, 0x1);
1071 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1072 target_ulong pc2
, TCGv r_cond
)
1076 l1
= gen_new_label();
1078 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1080 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1083 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1086 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1087 target_ulong pc2
, TCGv r_cond
)
1091 l1
= gen_new_label();
1093 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1095 gen_goto_tb(dc
, 0, pc2
, pc1
);
1098 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1101 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1106 l1
= gen_new_label();
1107 l2
= gen_new_label();
1109 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1111 tcg_gen_movi_tl(cpu_npc
, npc1
);
1115 tcg_gen_movi_tl(cpu_npc
, npc2
);
1119 /* call this function before using the condition register as it may
1120 have been set for a jump */
1121 static inline void flush_cond(DisasContext
*dc
)
1123 if (dc
->npc
== JUMP_PC
) {
1124 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
1125 dc
->npc
= DYNAMIC_PC
;
1129 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1131 if (dc
->npc
== JUMP_PC
) {
1132 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1133 dc
->npc
= DYNAMIC_PC
;
1134 } else if (dc
->npc
!= DYNAMIC_PC
) {
1135 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1139 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1141 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1142 /* flush pending conditional evaluations before exposing cpu state */
1143 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1144 dc
->cc_op
= CC_OP_FLAGS
;
1145 gen_helper_compute_psr(cpu_env
);
1150 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1152 if (dc
->npc
== JUMP_PC
) {
1153 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1154 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1155 dc
->pc
= DYNAMIC_PC
;
1156 } else if (dc
->npc
== DYNAMIC_PC
) {
1157 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1158 dc
->pc
= DYNAMIC_PC
;
1164 static inline void gen_op_next_insn(void)
1166 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1167 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1170 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1175 #ifdef TARGET_SPARC64
1183 switch (dc
->cc_op
) {
1187 gen_helper_compute_psr(cpu_env
);
1188 dc
->cc_op
= CC_OP_FLAGS
;
1193 gen_op_eval_bn(r_dst
);
1196 gen_op_eval_be(r_dst
, r_src
);
1199 gen_op_eval_ble(r_dst
, r_src
);
1202 gen_op_eval_bl(r_dst
, r_src
);
1205 gen_op_eval_bleu(r_dst
, r_src
);
1208 gen_op_eval_bcs(r_dst
, r_src
);
1211 gen_op_eval_bneg(r_dst
, r_src
);
1214 gen_op_eval_bvs(r_dst
, r_src
);
1217 gen_op_eval_ba(r_dst
);
1220 gen_op_eval_bne(r_dst
, r_src
);
1223 gen_op_eval_bg(r_dst
, r_src
);
1226 gen_op_eval_bge(r_dst
, r_src
);
1229 gen_op_eval_bgu(r_dst
, r_src
);
1232 gen_op_eval_bcc(r_dst
, r_src
);
1235 gen_op_eval_bpos(r_dst
, r_src
);
1238 gen_op_eval_bvc(r_dst
, r_src
);
1243 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1245 unsigned int offset
;
1265 gen_op_eval_bn(r_dst
);
1268 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1271 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1274 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1277 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1280 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1283 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1286 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1289 gen_op_eval_ba(r_dst
);
1292 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1295 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1298 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1301 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1304 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1307 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1310 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1315 #ifdef TARGET_SPARC64
1317 static const int gen_tcg_cond_reg
[8] = {
1328 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1332 l1
= gen_new_label();
1333 tcg_gen_movi_tl(r_dst
, 0);
1334 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1335 tcg_gen_movi_tl(r_dst
, 1);
1340 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
)
1342 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1343 target_ulong target
= dc
->pc
+ offset
;
1345 #ifdef TARGET_SPARC64
1346 if (unlikely(AM_CHECK(dc
))) {
1347 target
&= 0xffffffffULL
;
1351 /* unconditional not taken */
1353 dc
->pc
= dc
->npc
+ 4;
1354 dc
->npc
= dc
->pc
+ 4;
1357 dc
->npc
= dc
->pc
+ 4;
1359 } else if (cond
== 0x8) {
1360 /* unconditional taken */
1363 dc
->npc
= dc
->pc
+ 4;
1367 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1371 gen_cond(cpu_cond
, cc
, cond
, dc
);
1373 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1377 dc
->jump_pc
[0] = target
;
1378 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1379 dc
->jump_pc
[1] = DYNAMIC_PC
;
1380 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1382 dc
->jump_pc
[1] = dc
->npc
+ 4;
1389 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
)
1391 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1392 target_ulong target
= dc
->pc
+ offset
;
1394 #ifdef TARGET_SPARC64
1395 if (unlikely(AM_CHECK(dc
))) {
1396 target
&= 0xffffffffULL
;
1400 /* unconditional not taken */
1402 dc
->pc
= dc
->npc
+ 4;
1403 dc
->npc
= dc
->pc
+ 4;
1406 dc
->npc
= dc
->pc
+ 4;
1408 } else if (cond
== 0x8) {
1409 /* unconditional taken */
1412 dc
->npc
= dc
->pc
+ 4;
1416 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1420 gen_fcond(cpu_cond
, cc
, cond
);
1422 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1426 dc
->jump_pc
[0] = target
;
1427 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1428 dc
->jump_pc
[1] = DYNAMIC_PC
;
1429 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1431 dc
->jump_pc
[1] = dc
->npc
+ 4;
1438 #ifdef TARGET_SPARC64
1439 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1442 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1443 target_ulong target
= dc
->pc
+ offset
;
1445 if (unlikely(AM_CHECK(dc
))) {
1446 target
&= 0xffffffffULL
;
1449 gen_cond_reg(cpu_cond
, cond
, r_reg
);
1451 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1455 dc
->jump_pc
[0] = target
;
1456 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1457 dc
->jump_pc
[1] = DYNAMIC_PC
;
1458 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1460 dc
->jump_pc
[1] = dc
->npc
+ 4;
1466 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1470 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1473 gen_helper_fcmps_fcc1(cpu_env
, r_rs1
, r_rs2
);
1476 gen_helper_fcmps_fcc2(cpu_env
, r_rs1
, r_rs2
);
1479 gen_helper_fcmps_fcc3(cpu_env
, r_rs1
, r_rs2
);
1484 static inline void gen_op_fcmpd(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1488 gen_helper_fcmpd(cpu_env
, r_rs1
, r_rs2
);
1491 gen_helper_fcmpd_fcc1(cpu_env
, r_rs1
, r_rs2
);
1494 gen_helper_fcmpd_fcc2(cpu_env
, r_rs1
, r_rs2
);
1497 gen_helper_fcmpd_fcc3(cpu_env
, r_rs1
, r_rs2
);
1502 static inline void gen_op_fcmpq(int fccno
)
1506 gen_helper_fcmpq(cpu_env
);
1509 gen_helper_fcmpq_fcc1(cpu_env
);
1512 gen_helper_fcmpq_fcc2(cpu_env
);
1515 gen_helper_fcmpq_fcc3(cpu_env
);
1520 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1524 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1527 gen_helper_fcmpes_fcc1(cpu_env
, r_rs1
, r_rs2
);
1530 gen_helper_fcmpes_fcc2(cpu_env
, r_rs1
, r_rs2
);
1533 gen_helper_fcmpes_fcc3(cpu_env
, r_rs1
, r_rs2
);
1538 static inline void gen_op_fcmped(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1542 gen_helper_fcmped(cpu_env
, r_rs1
, r_rs2
);
1545 gen_helper_fcmped_fcc1(cpu_env
, r_rs1
, r_rs2
);
1548 gen_helper_fcmped_fcc2(cpu_env
, r_rs1
, r_rs2
);
1551 gen_helper_fcmped_fcc3(cpu_env
, r_rs1
, r_rs2
);
1556 static inline void gen_op_fcmpeq(int fccno
)
1560 gen_helper_fcmpeq(cpu_env
);
1563 gen_helper_fcmpeq_fcc1(cpu_env
);
1566 gen_helper_fcmpeq_fcc2(cpu_env
);
1569 gen_helper_fcmpeq_fcc3(cpu_env
);
1576 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1578 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1581 static inline void gen_op_fcmpd(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1583 gen_helper_fcmpd(cpu_env
, r_rs1
, r_rs2
);
1586 static inline void gen_op_fcmpq(int fccno
)
1588 gen_helper_fcmpq(cpu_env
);
1591 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1593 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1596 static inline void gen_op_fcmped(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1598 gen_helper_fcmped(cpu_env
, r_rs1
, r_rs2
);
1601 static inline void gen_op_fcmpeq(int fccno
)
1603 gen_helper_fcmpeq(cpu_env
);
1607 static inline void gen_op_fpexception_im(int fsr_flags
)
1611 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1612 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1613 r_const
= tcg_const_i32(TT_FP_EXCP
);
1614 gen_helper_raise_exception(cpu_env
, r_const
);
1615 tcg_temp_free_i32(r_const
);
1618 static int gen_trap_ifnofpu(DisasContext
*dc
)
1620 #if !defined(CONFIG_USER_ONLY)
1621 if (!dc
->fpu_enabled
) {
1624 save_state(dc
, cpu_cond
);
1625 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1626 gen_helper_raise_exception(cpu_env
, r_const
);
1627 tcg_temp_free_i32(r_const
);
1635 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1637 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1640 static inline void gen_fop_FF(DisasContext
*dc
, int rd
, int rs
,
1641 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i32
))
1645 src
= gen_load_fpr_F(dc
, rs
);
1646 dst
= gen_dest_fpr_F();
1648 gen(dst
, cpu_env
, src
);
1650 gen_store_fpr_F(dc
, rd
, dst
);
1653 static inline void gen_ne_fop_FF(DisasContext
*dc
, int rd
, int rs
,
1654 void (*gen
)(TCGv_i32
, TCGv_i32
))
1658 src
= gen_load_fpr_F(dc
, rs
);
1659 dst
= gen_dest_fpr_F();
1663 gen_store_fpr_F(dc
, rd
, dst
);
1666 static inline void gen_fop_FFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1667 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i32
, TCGv_i32
))
1669 TCGv_i32 dst
, src1
, src2
;
1671 src1
= gen_load_fpr_F(dc
, rs1
);
1672 src2
= gen_load_fpr_F(dc
, rs2
);
1673 dst
= gen_dest_fpr_F();
1675 gen(dst
, cpu_env
, src1
, src2
);
1677 gen_store_fpr_F(dc
, rd
, dst
);
1680 #ifdef TARGET_SPARC64
1681 static inline void gen_ne_fop_FFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1682 void (*gen
)(TCGv_i32
, TCGv_i32
, TCGv_i32
))
1684 TCGv_i32 dst
, src1
, src2
;
1686 src1
= gen_load_fpr_F(dc
, rs1
);
1687 src2
= gen_load_fpr_F(dc
, rs2
);
1688 dst
= gen_dest_fpr_F();
1690 gen(dst
, src1
, src2
);
1692 gen_store_fpr_F(dc
, rd
, dst
);
1696 static inline void gen_fop_DD(DisasContext
*dc
, int rd
, int rs
,
1697 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i64
))
1701 src
= gen_load_fpr_D(dc
, rs
);
1702 dst
= gen_dest_fpr_D();
1704 gen(dst
, cpu_env
, src
);
1706 gen_store_fpr_D(dc
, rd
, dst
);
1709 #ifdef TARGET_SPARC64
1710 static inline void gen_ne_fop_DD(DisasContext
*dc
, int rd
, int rs
,
1711 void (*gen
)(TCGv_i64
, TCGv_i64
))
1715 src
= gen_load_fpr_D(dc
, rs
);
1716 dst
= gen_dest_fpr_D();
1720 gen_store_fpr_D(dc
, rd
, dst
);
1724 static inline void gen_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1725 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i64
, TCGv_i64
))
1727 TCGv_i64 dst
, src1
, src2
;
1729 src1
= gen_load_fpr_D(dc
, rs1
);
1730 src2
= gen_load_fpr_D(dc
, rs2
);
1731 dst
= gen_dest_fpr_D();
1733 gen(dst
, cpu_env
, src1
, src2
);
1735 gen_store_fpr_D(dc
, rd
, dst
);
1738 #ifdef TARGET_SPARC64
1739 static inline void gen_ne_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1740 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
))
1742 TCGv_i64 dst
, src1
, src2
;
1744 src1
= gen_load_fpr_D(dc
, rs1
);
1745 src2
= gen_load_fpr_D(dc
, rs2
);
1746 dst
= gen_dest_fpr_D();
1748 gen(dst
, src1
, src2
);
1750 gen_store_fpr_D(dc
, rd
, dst
);
1753 static inline void gen_gsr_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1754 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
, TCGv_i64
))
1756 TCGv_i64 dst
, src1
, src2
;
1758 src1
= gen_load_fpr_D(dc
, rs1
);
1759 src2
= gen_load_fpr_D(dc
, rs2
);
1760 dst
= gen_dest_fpr_D();
1762 gen(dst
, cpu_gsr
, src1
, src2
);
1764 gen_store_fpr_D(dc
, rd
, dst
);
1767 static inline void gen_ne_fop_DDDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1768 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
, TCGv_i64
))
1770 TCGv_i64 dst
, src0
, src1
, src2
;
1772 src1
= gen_load_fpr_D(dc
, rs1
);
1773 src2
= gen_load_fpr_D(dc
, rs2
);
1774 src0
= gen_load_fpr_D(dc
, rd
);
1775 dst
= gen_dest_fpr_D();
1777 gen(dst
, src0
, src1
, src2
);
1779 gen_store_fpr_D(dc
, rd
, dst
);
1783 static inline void gen_fop_QQ(DisasContext
*dc
, int rd
, int rs
,
1784 void (*gen
)(TCGv_ptr
))
1786 gen_op_load_fpr_QT1(QFPREG(rs
));
1790 gen_op_store_QT0_fpr(QFPREG(rd
));
1791 gen_update_fprs_dirty(QFPREG(rd
));
1794 #ifdef TARGET_SPARC64
1795 static inline void gen_ne_fop_QQ(DisasContext
*dc
, int rd
, int rs
,
1796 void (*gen
)(TCGv_ptr
))
1798 gen_op_load_fpr_QT1(QFPREG(rs
));
1802 gen_op_store_QT0_fpr(QFPREG(rd
));
1803 gen_update_fprs_dirty(QFPREG(rd
));
1807 static inline void gen_fop_QQQ(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1808 void (*gen
)(TCGv_ptr
))
1810 gen_op_load_fpr_QT0(QFPREG(rs1
));
1811 gen_op_load_fpr_QT1(QFPREG(rs2
));
1815 gen_op_store_QT0_fpr(QFPREG(rd
));
1816 gen_update_fprs_dirty(QFPREG(rd
));
1819 static inline void gen_fop_DFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1820 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
, TCGv_i32
))
1823 TCGv_i32 src1
, src2
;
1825 src1
= gen_load_fpr_F(dc
, rs1
);
1826 src2
= gen_load_fpr_F(dc
, rs2
);
1827 dst
= gen_dest_fpr_D();
1829 gen(dst
, cpu_env
, src1
, src2
);
1831 gen_store_fpr_D(dc
, rd
, dst
);
1834 static inline void gen_fop_QDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1835 void (*gen
)(TCGv_ptr
, TCGv_i64
, TCGv_i64
))
1837 TCGv_i64 src1
, src2
;
1839 src1
= gen_load_fpr_D(dc
, rs1
);
1840 src2
= gen_load_fpr_D(dc
, rs2
);
1842 gen(cpu_env
, src1
, src2
);
1844 gen_op_store_QT0_fpr(QFPREG(rd
));
1845 gen_update_fprs_dirty(QFPREG(rd
));
1848 #ifdef TARGET_SPARC64
1849 static inline void gen_fop_DF(DisasContext
*dc
, int rd
, int rs
,
1850 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
))
1855 src
= gen_load_fpr_F(dc
, rs
);
1856 dst
= gen_dest_fpr_D();
1858 gen(dst
, cpu_env
, src
);
1860 gen_store_fpr_D(dc
, rd
, dst
);
1864 static inline void gen_ne_fop_DF(DisasContext
*dc
, int rd
, int rs
,
1865 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
))
1870 src
= gen_load_fpr_F(dc
, rs
);
1871 dst
= gen_dest_fpr_D();
1873 gen(dst
, cpu_env
, src
);
1875 gen_store_fpr_D(dc
, rd
, dst
);
1878 static inline void gen_fop_FD(DisasContext
*dc
, int rd
, int rs
,
1879 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i64
))
1884 src
= gen_load_fpr_D(dc
, rs
);
1885 dst
= gen_dest_fpr_F();
1887 gen(dst
, cpu_env
, src
);
1889 gen_store_fpr_F(dc
, rd
, dst
);
1892 static inline void gen_fop_FQ(DisasContext
*dc
, int rd
, int rs
,
1893 void (*gen
)(TCGv_i32
, TCGv_ptr
))
1897 gen_op_load_fpr_QT1(QFPREG(rs
));
1898 dst
= gen_dest_fpr_F();
1902 gen_store_fpr_F(dc
, rd
, dst
);
1905 static inline void gen_fop_DQ(DisasContext
*dc
, int rd
, int rs
,
1906 void (*gen
)(TCGv_i64
, TCGv_ptr
))
1910 gen_op_load_fpr_QT1(QFPREG(rs
));
1911 dst
= gen_dest_fpr_D();
1915 gen_store_fpr_D(dc
, rd
, dst
);
1918 static inline void gen_ne_fop_QF(DisasContext
*dc
, int rd
, int rs
,
1919 void (*gen
)(TCGv_ptr
, TCGv_i32
))
1923 src
= gen_load_fpr_F(dc
, rs
);
1927 gen_op_store_QT0_fpr(QFPREG(rd
));
1928 gen_update_fprs_dirty(QFPREG(rd
));
1931 static inline void gen_ne_fop_QD(DisasContext
*dc
, int rd
, int rs
,
1932 void (*gen
)(TCGv_ptr
, TCGv_i64
))
1936 src
= gen_load_fpr_D(dc
, rs
);
1940 gen_op_store_QT0_fpr(QFPREG(rd
));
1941 gen_update_fprs_dirty(QFPREG(rd
));
1945 #ifdef TARGET_SPARC64
1946 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1952 r_asi
= tcg_temp_new_i32();
1953 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1955 asi
= GET_FIELD(insn
, 19, 26);
1956 r_asi
= tcg_const_i32(asi
);
1961 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1964 TCGv_i32 r_asi
, r_size
, r_sign
;
1966 r_asi
= gen_get_asi(insn
, addr
);
1967 r_size
= tcg_const_i32(size
);
1968 r_sign
= tcg_const_i32(sign
);
1969 gen_helper_ld_asi(dst
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
1970 tcg_temp_free_i32(r_sign
);
1971 tcg_temp_free_i32(r_size
);
1972 tcg_temp_free_i32(r_asi
);
1975 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1977 TCGv_i32 r_asi
, r_size
;
1979 r_asi
= gen_get_asi(insn
, addr
);
1980 r_size
= tcg_const_i32(size
);
1981 gen_helper_st_asi(cpu_env
, addr
, src
, r_asi
, r_size
);
1982 tcg_temp_free_i32(r_size
);
1983 tcg_temp_free_i32(r_asi
);
1986 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1988 TCGv_i32 r_asi
, r_size
, r_rd
;
1990 r_asi
= gen_get_asi(insn
, addr
);
1991 r_size
= tcg_const_i32(size
);
1992 r_rd
= tcg_const_i32(rd
);
1993 gen_helper_ldf_asi(cpu_env
, addr
, r_asi
, r_size
, r_rd
);
1994 tcg_temp_free_i32(r_rd
);
1995 tcg_temp_free_i32(r_size
);
1996 tcg_temp_free_i32(r_asi
);
1999 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
2001 TCGv_i32 r_asi
, r_size
, r_rd
;
2003 r_asi
= gen_get_asi(insn
, addr
);
2004 r_size
= tcg_const_i32(size
);
2005 r_rd
= tcg_const_i32(rd
);
2006 gen_helper_stf_asi(cpu_env
, addr
, r_asi
, r_size
, r_rd
);
2007 tcg_temp_free_i32(r_rd
);
2008 tcg_temp_free_i32(r_size
);
2009 tcg_temp_free_i32(r_asi
);
2012 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
2014 TCGv_i32 r_asi
, r_size
, r_sign
;
2016 r_asi
= gen_get_asi(insn
, addr
);
2017 r_size
= tcg_const_i32(4);
2018 r_sign
= tcg_const_i32(0);
2019 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2020 tcg_temp_free_i32(r_sign
);
2021 gen_helper_st_asi(cpu_env
, addr
, dst
, r_asi
, r_size
);
2022 tcg_temp_free_i32(r_size
);
2023 tcg_temp_free_i32(r_asi
);
2024 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2027 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2029 TCGv_i32 r_asi
, r_rd
;
2031 r_asi
= gen_get_asi(insn
, addr
);
2032 r_rd
= tcg_const_i32(rd
);
2033 gen_helper_ldda_asi(cpu_env
, addr
, r_asi
, r_rd
);
2034 tcg_temp_free_i32(r_rd
);
2035 tcg_temp_free_i32(r_asi
);
2038 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2040 TCGv_i32 r_asi
, r_size
;
2042 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
2043 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
2044 r_asi
= gen_get_asi(insn
, addr
);
2045 r_size
= tcg_const_i32(8);
2046 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2047 tcg_temp_free_i32(r_size
);
2048 tcg_temp_free_i32(r_asi
);
2051 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
2057 r_val1
= tcg_temp_new();
2058 gen_movl_reg_TN(rd
, r_val1
);
2059 r_asi
= gen_get_asi(insn
, addr
);
2060 gen_helper_cas_asi(dst
, cpu_env
, addr
, r_val1
, val2
, r_asi
);
2061 tcg_temp_free_i32(r_asi
);
2062 tcg_temp_free(r_val1
);
2065 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
2070 gen_movl_reg_TN(rd
, cpu_tmp64
);
2071 r_asi
= gen_get_asi(insn
, addr
);
2072 gen_helper_casx_asi(dst
, cpu_env
, addr
, cpu_tmp64
, val2
, r_asi
);
2073 tcg_temp_free_i32(r_asi
);
2076 #elif !defined(CONFIG_USER_ONLY)
2078 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
2081 TCGv_i32 r_asi
, r_size
, r_sign
;
2083 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2084 r_size
= tcg_const_i32(size
);
2085 r_sign
= tcg_const_i32(sign
);
2086 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2087 tcg_temp_free(r_sign
);
2088 tcg_temp_free(r_size
);
2089 tcg_temp_free(r_asi
);
2090 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2093 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
2095 TCGv_i32 r_asi
, r_size
;
2097 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
2098 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2099 r_size
= tcg_const_i32(size
);
2100 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2101 tcg_temp_free(r_size
);
2102 tcg_temp_free(r_asi
);
2105 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
2107 TCGv_i32 r_asi
, r_size
, r_sign
;
2110 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2111 r_size
= tcg_const_i32(4);
2112 r_sign
= tcg_const_i32(0);
2113 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2114 tcg_temp_free(r_sign
);
2115 r_val
= tcg_temp_new_i64();
2116 tcg_gen_extu_tl_i64(r_val
, dst
);
2117 gen_helper_st_asi(cpu_env
, addr
, r_val
, r_asi
, r_size
);
2118 tcg_temp_free_i64(r_val
);
2119 tcg_temp_free(r_size
);
2120 tcg_temp_free(r_asi
);
2121 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2124 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2126 TCGv_i32 r_asi
, r_size
, r_sign
;
2128 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2129 r_size
= tcg_const_i32(8);
2130 r_sign
= tcg_const_i32(0);
2131 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2132 tcg_temp_free(r_sign
);
2133 tcg_temp_free(r_size
);
2134 tcg_temp_free(r_asi
);
2135 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
2136 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
2137 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
2138 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
2139 gen_movl_TN_reg(rd
, hi
);
2142 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
2144 TCGv_i32 r_asi
, r_size
;
2146 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
2147 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
2148 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2149 r_size
= tcg_const_i32(8);
2150 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2151 tcg_temp_free(r_size
);
2152 tcg_temp_free(r_asi
);
2156 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2157 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
2160 TCGv_i32 r_asi
, r_size
;
2162 gen_ld_asi(dst
, addr
, insn
, 1, 0);
2164 r_val
= tcg_const_i64(0xffULL
);
2165 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2166 r_size
= tcg_const_i32(1);
2167 gen_helper_st_asi(cpu_env
, addr
, r_val
, r_asi
, r_size
);
2168 tcg_temp_free_i32(r_size
);
2169 tcg_temp_free_i32(r_asi
);
2170 tcg_temp_free_i64(r_val
);
2174 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
2179 rs1
= GET_FIELD(insn
, 13, 17);
2181 tcg_gen_movi_tl(def
, 0);
2182 } else if (rs1
< 8) {
2183 r_rs1
= cpu_gregs
[rs1
];
2185 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
2190 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
2194 if (IS_IMM
) { /* immediate */
2195 target_long simm
= GET_FIELDs(insn
, 19, 31);
2196 tcg_gen_movi_tl(def
, simm
);
2197 } else { /* register */
2198 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
2200 tcg_gen_movi_tl(def
, 0);
2201 } else if (rs2
< 8) {
2202 r_rs2
= cpu_gregs
[rs2
];
2204 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
2210 #ifdef TARGET_SPARC64
2211 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
2213 TCGv_i32 r_tl
= tcg_temp_new_i32();
2215 /* load env->tl into r_tl */
2216 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
2218 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2219 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
2221 /* calculate offset to current trap state from env->ts, reuse r_tl */
2222 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
2223 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUSPARCState
, ts
));
2225 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2227 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
2228 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
2229 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
2230 tcg_temp_free_ptr(r_tl_tmp
);
2233 tcg_temp_free_i32(r_tl
);
2236 static void gen_edge(DisasContext
*dc
, TCGv dst
, TCGv s1
, TCGv s2
,
2237 int width
, bool cc
, bool left
)
2239 TCGv lo1
, lo2
, t1
, t2
;
2240 uint64_t amask
, tabl
, tabr
;
2241 int shift
, imask
, omask
;
2244 tcg_gen_mov_tl(cpu_cc_src
, s1
);
2245 tcg_gen_mov_tl(cpu_cc_src2
, s2
);
2246 tcg_gen_sub_tl(cpu_cc_dst
, s1
, s2
);
2247 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2248 dc
->cc_op
= CC_OP_SUB
;
2251 /* Theory of operation: there are two tables, left and right (not to
2252 be confused with the left and right versions of the opcode). These
2253 are indexed by the low 3 bits of the inputs. To make things "easy",
2254 these tables are loaded into two constants, TABL and TABR below.
2255 The operation index = (input & imask) << shift calculates the index
2256 into the constant, while val = (table >> index) & omask calculates
2257 the value we're looking for. */
2264 tabl
= 0x80c0e0f0f8fcfeffULL
;
2265 tabr
= 0xff7f3f1f0f070301ULL
;
2267 tabl
= 0x0103070f1f3f7fffULL
;
2268 tabr
= 0xfffefcf8f0e0c080ULL
;
2288 tabl
= (2 << 2) | 3;
2289 tabr
= (3 << 2) | 1;
2291 tabl
= (1 << 2) | 3;
2292 tabr
= (3 << 2) | 2;
2299 lo1
= tcg_temp_new();
2300 lo2
= tcg_temp_new();
2301 tcg_gen_andi_tl(lo1
, s1
, imask
);
2302 tcg_gen_andi_tl(lo2
, s2
, imask
);
2303 tcg_gen_shli_tl(lo1
, lo1
, shift
);
2304 tcg_gen_shli_tl(lo2
, lo2
, shift
);
2306 t1
= tcg_const_tl(tabl
);
2307 t2
= tcg_const_tl(tabr
);
2308 tcg_gen_shr_tl(lo1
, t1
, lo1
);
2309 tcg_gen_shr_tl(lo2
, t2
, lo2
);
2310 tcg_gen_andi_tl(dst
, lo1
, omask
);
2311 tcg_gen_andi_tl(lo2
, lo2
, omask
);
2315 amask
&= 0xffffffffULL
;
2317 tcg_gen_andi_tl(s1
, s1
, amask
);
2318 tcg_gen_andi_tl(s2
, s2
, amask
);
2320 /* We want to compute
2321 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2322 We've already done dst = lo1, so this reduces to
2323 dst &= (s1 == s2 ? -1 : lo2)
2328 tcg_gen_setcond_tl(TCG_COND_EQ
, t1
, s1
, s2
);
2329 tcg_gen_neg_tl(t1
, t1
);
2330 tcg_gen_or_tl(lo2
, lo2
, t1
);
2331 tcg_gen_and_tl(dst
, dst
, lo2
);
2339 static void gen_alignaddr(TCGv dst
, TCGv s1
, TCGv s2
, bool left
)
2341 TCGv tmp
= tcg_temp_new();
2343 tcg_gen_add_tl(tmp
, s1
, s2
);
2344 tcg_gen_andi_tl(dst
, tmp
, -8);
2346 tcg_gen_neg_tl(tmp
, tmp
);
2348 tcg_gen_deposit_tl(cpu_gsr
, cpu_gsr
, tmp
, 0, 3);
2353 static void gen_faligndata(TCGv dst
, TCGv gsr
, TCGv s1
, TCGv s2
)
2357 t1
= tcg_temp_new();
2358 t2
= tcg_temp_new();
2359 shift
= tcg_temp_new();
2361 tcg_gen_andi_tl(shift
, gsr
, 7);
2362 tcg_gen_shli_tl(shift
, shift
, 3);
2363 tcg_gen_shl_tl(t1
, s1
, shift
);
2365 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2366 shift of (up to 63) followed by a constant shift of 1. */
2367 tcg_gen_xori_tl(shift
, shift
, 63);
2368 tcg_gen_shr_tl(t2
, s2
, shift
);
2369 tcg_gen_shri_tl(t2
, t2
, 1);
2371 tcg_gen_or_tl(dst
, t1
, t2
);
2375 tcg_temp_free(shift
);
2379 #define CHECK_IU_FEATURE(dc, FEATURE) \
2380 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2382 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2383 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2386 /* before an instruction, dc->pc must be static */
2387 static void disas_sparc_insn(DisasContext
* dc
, unsigned int insn
)
2389 unsigned int opc
, rs1
, rs2
, rd
;
2390 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
2391 TCGv_i32 cpu_src1_32
, cpu_src2_32
, cpu_dst_32
;
2392 TCGv_i64 cpu_src1_64
, cpu_src2_64
, cpu_dst_64
;
2395 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
2396 tcg_gen_debug_insn_start(dc
->pc
);
2399 opc
= GET_FIELD(insn
, 0, 1);
2401 rd
= GET_FIELD(insn
, 2, 6);
2403 cpu_tmp1
= cpu_src1
= tcg_temp_new();
2404 cpu_tmp2
= cpu_src2
= tcg_temp_new();
2407 case 0: /* branches/sethi */
2409 unsigned int xop
= GET_FIELD(insn
, 7, 9);
2412 #ifdef TARGET_SPARC64
2413 case 0x1: /* V9 BPcc */
2417 target
= GET_FIELD_SP(insn
, 0, 18);
2418 target
= sign_extend(target
, 19);
2420 cc
= GET_FIELD_SP(insn
, 20, 21);
2422 do_branch(dc
, target
, insn
, 0);
2424 do_branch(dc
, target
, insn
, 1);
2429 case 0x3: /* V9 BPr */
2431 target
= GET_FIELD_SP(insn
, 0, 13) |
2432 (GET_FIELD_SP(insn
, 20, 21) << 14);
2433 target
= sign_extend(target
, 16);
2435 cpu_src1
= get_src1(insn
, cpu_src1
);
2436 do_branch_reg(dc
, target
, insn
, cpu_src1
);
2439 case 0x5: /* V9 FBPcc */
2441 int cc
= GET_FIELD_SP(insn
, 20, 21);
2442 if (gen_trap_ifnofpu(dc
)) {
2445 target
= GET_FIELD_SP(insn
, 0, 18);
2446 target
= sign_extend(target
, 19);
2448 do_fbranch(dc
, target
, insn
, cc
);
2452 case 0x7: /* CBN+x */
2457 case 0x2: /* BN+x */
2459 target
= GET_FIELD(insn
, 10, 31);
2460 target
= sign_extend(target
, 22);
2462 do_branch(dc
, target
, insn
, 0);
2465 case 0x6: /* FBN+x */
2467 if (gen_trap_ifnofpu(dc
)) {
2470 target
= GET_FIELD(insn
, 10, 31);
2471 target
= sign_extend(target
, 22);
2473 do_fbranch(dc
, target
, insn
, 0);
2476 case 0x4: /* SETHI */
2478 uint32_t value
= GET_FIELD(insn
, 10, 31);
2481 r_const
= tcg_const_tl(value
<< 10);
2482 gen_movl_TN_reg(rd
, r_const
);
2483 tcg_temp_free(r_const
);
2486 case 0x0: /* UNIMPL */
2495 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2498 r_const
= tcg_const_tl(dc
->pc
);
2499 gen_movl_TN_reg(15, r_const
);
2500 tcg_temp_free(r_const
);
2502 gen_mov_pc_npc(dc
, cpu_cond
);
2503 #ifdef TARGET_SPARC64
2504 if (unlikely(AM_CHECK(dc
))) {
2505 target
&= 0xffffffffULL
;
2511 case 2: /* FPU & Logical Operations */
2513 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2514 if (xop
== 0x3a) { /* generate trap */
2517 cpu_src1
= get_src1(insn
, cpu_src1
);
2519 rs2
= GET_FIELD(insn
, 25, 31);
2520 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2522 rs2
= GET_FIELD(insn
, 27, 31);
2524 gen_movl_reg_TN(rs2
, cpu_src2
);
2525 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2527 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2530 cond
= GET_FIELD(insn
, 3, 6);
2531 if (cond
== 0x8) { /* Trap Always */
2532 save_state(dc
, cpu_cond
);
2533 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2535 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2537 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2538 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2539 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2540 gen_helper_raise_exception(cpu_env
, cpu_tmp32
);
2542 } else if (cond
!= 0) {
2543 TCGv r_cond
= tcg_temp_new();
2545 #ifdef TARGET_SPARC64
2547 int cc
= GET_FIELD_SP(insn
, 11, 12);
2549 save_state(dc
, cpu_cond
);
2551 gen_cond(r_cond
, 0, cond
, dc
);
2553 gen_cond(r_cond
, 1, cond
, dc
);
2557 save_state(dc
, cpu_cond
);
2558 gen_cond(r_cond
, 0, cond
, dc
);
2560 l1
= gen_new_label();
2561 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2563 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2565 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2567 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2568 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2569 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2570 gen_helper_raise_exception(cpu_env
, cpu_tmp32
);
2573 tcg_temp_free(r_cond
);
2579 } else if (xop
== 0x28) {
2580 rs1
= GET_FIELD(insn
, 13, 17);
2583 #ifndef TARGET_SPARC64
2584 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2585 manual, rdy on the microSPARC
2587 case 0x0f: /* stbar in the SPARCv8 manual,
2588 rdy on the microSPARC II */
2589 case 0x10 ... 0x1f: /* implementation-dependent in the
2590 SPARCv8 manual, rdy on the
2593 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2596 /* Read Asr17 for a Leon3 monoprocessor */
2597 r_const
= tcg_const_tl((1 << 8)
2598 | (dc
->def
->nwindows
- 1));
2599 gen_movl_TN_reg(rd
, r_const
);
2600 tcg_temp_free(r_const
);
2604 gen_movl_TN_reg(rd
, cpu_y
);
2606 #ifdef TARGET_SPARC64
2607 case 0x2: /* V9 rdccr */
2608 gen_helper_compute_psr(cpu_env
);
2609 gen_helper_rdccr(cpu_dst
, cpu_env
);
2610 gen_movl_TN_reg(rd
, cpu_dst
);
2612 case 0x3: /* V9 rdasi */
2613 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2614 gen_movl_TN_reg(rd
, cpu_dst
);
2616 case 0x4: /* V9 rdtick */
2620 r_tickptr
= tcg_temp_new_ptr();
2621 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2622 offsetof(CPUSPARCState
, tick
));
2623 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2624 tcg_temp_free_ptr(r_tickptr
);
2625 gen_movl_TN_reg(rd
, cpu_dst
);
2628 case 0x5: /* V9 rdpc */
2632 if (unlikely(AM_CHECK(dc
))) {
2633 r_const
= tcg_const_tl(dc
->pc
& 0xffffffffULL
);
2635 r_const
= tcg_const_tl(dc
->pc
);
2637 gen_movl_TN_reg(rd
, r_const
);
2638 tcg_temp_free(r_const
);
2641 case 0x6: /* V9 rdfprs */
2642 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2643 gen_movl_TN_reg(rd
, cpu_dst
);
2645 case 0xf: /* V9 membar */
2646 break; /* no effect */
2647 case 0x13: /* Graphics Status */
2648 if (gen_trap_ifnofpu(dc
)) {
2651 gen_movl_TN_reg(rd
, cpu_gsr
);
2653 case 0x16: /* Softint */
2654 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2655 gen_movl_TN_reg(rd
, cpu_dst
);
2657 case 0x17: /* Tick compare */
2658 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2660 case 0x18: /* System tick */
2664 r_tickptr
= tcg_temp_new_ptr();
2665 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2666 offsetof(CPUSPARCState
, stick
));
2667 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2668 tcg_temp_free_ptr(r_tickptr
);
2669 gen_movl_TN_reg(rd
, cpu_dst
);
2672 case 0x19: /* System tick compare */
2673 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2675 case 0x10: /* Performance Control */
2676 case 0x11: /* Performance Instrumentation Counter */
2677 case 0x12: /* Dispatch Control */
2678 case 0x14: /* Softint set, WO */
2679 case 0x15: /* Softint clear, WO */
2684 #if !defined(CONFIG_USER_ONLY)
2685 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2686 #ifndef TARGET_SPARC64
2687 if (!supervisor(dc
))
2689 gen_helper_compute_psr(cpu_env
);
2690 dc
->cc_op
= CC_OP_FLAGS
;
2691 gen_helper_rdpsr(cpu_dst
, cpu_env
);
2693 CHECK_IU_FEATURE(dc
, HYPV
);
2694 if (!hypervisor(dc
))
2696 rs1
= GET_FIELD(insn
, 13, 17);
2699 // gen_op_rdhpstate();
2702 // gen_op_rdhtstate();
2705 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2708 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2711 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2713 case 31: // hstick_cmpr
2714 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2720 gen_movl_TN_reg(rd
, cpu_dst
);
2722 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2723 if (!supervisor(dc
))
2725 #ifdef TARGET_SPARC64
2726 rs1
= GET_FIELD(insn
, 13, 17);
2732 r_tsptr
= tcg_temp_new_ptr();
2733 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2734 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2735 offsetof(trap_state
, tpc
));
2736 tcg_temp_free_ptr(r_tsptr
);
2743 r_tsptr
= tcg_temp_new_ptr();
2744 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2745 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2746 offsetof(trap_state
, tnpc
));
2747 tcg_temp_free_ptr(r_tsptr
);
2754 r_tsptr
= tcg_temp_new_ptr();
2755 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2756 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2757 offsetof(trap_state
, tstate
));
2758 tcg_temp_free_ptr(r_tsptr
);
2765 r_tsptr
= tcg_temp_new_ptr();
2766 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2767 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2768 offsetof(trap_state
, tt
));
2769 tcg_temp_free_ptr(r_tsptr
);
2770 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2777 r_tickptr
= tcg_temp_new_ptr();
2778 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2779 offsetof(CPUSPARCState
, tick
));
2780 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2781 gen_movl_TN_reg(rd
, cpu_tmp0
);
2782 tcg_temp_free_ptr(r_tickptr
);
2786 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2789 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2790 offsetof(CPUSPARCState
, pstate
));
2791 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2794 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2795 offsetof(CPUSPARCState
, tl
));
2796 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2799 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2800 offsetof(CPUSPARCState
, psrpil
));
2801 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2804 gen_helper_rdcwp(cpu_tmp0
, cpu_env
);
2807 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2808 offsetof(CPUSPARCState
, cansave
));
2809 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2811 case 11: // canrestore
2812 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2813 offsetof(CPUSPARCState
, canrestore
));
2814 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2816 case 12: // cleanwin
2817 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2818 offsetof(CPUSPARCState
, cleanwin
));
2819 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2821 case 13: // otherwin
2822 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2823 offsetof(CPUSPARCState
, otherwin
));
2824 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2827 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2828 offsetof(CPUSPARCState
, wstate
));
2829 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2831 case 16: // UA2005 gl
2832 CHECK_IU_FEATURE(dc
, GL
);
2833 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2834 offsetof(CPUSPARCState
, gl
));
2835 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2837 case 26: // UA2005 strand status
2838 CHECK_IU_FEATURE(dc
, HYPV
);
2839 if (!hypervisor(dc
))
2841 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2844 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2851 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2853 gen_movl_TN_reg(rd
, cpu_tmp0
);
2855 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2856 #ifdef TARGET_SPARC64
2857 save_state(dc
, cpu_cond
);
2858 gen_helper_flushw(cpu_env
);
2860 if (!supervisor(dc
))
2862 gen_movl_TN_reg(rd
, cpu_tbr
);
2866 } else if (xop
== 0x34) { /* FPU Operations */
2867 if (gen_trap_ifnofpu(dc
)) {
2870 gen_op_clear_ieee_excp_and_FTT();
2871 rs1
= GET_FIELD(insn
, 13, 17);
2872 rs2
= GET_FIELD(insn
, 27, 31);
2873 xop
= GET_FIELD(insn
, 18, 26);
2874 save_state(dc
, cpu_cond
);
2876 case 0x1: /* fmovs */
2877 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
2878 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
2880 case 0x5: /* fnegs */
2881 gen_ne_fop_FF(dc
, rd
, rs2
, gen_helper_fnegs
);
2883 case 0x9: /* fabss */
2884 gen_ne_fop_FF(dc
, rd
, rs2
, gen_helper_fabss
);
2886 case 0x29: /* fsqrts */
2887 CHECK_FPU_FEATURE(dc
, FSQRT
);
2888 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fsqrts
);
2890 case 0x2a: /* fsqrtd */
2891 CHECK_FPU_FEATURE(dc
, FSQRT
);
2892 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fsqrtd
);
2894 case 0x2b: /* fsqrtq */
2895 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2896 gen_fop_QQ(dc
, rd
, rs2
, gen_helper_fsqrtq
);
2898 case 0x41: /* fadds */
2899 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fadds
);
2901 case 0x42: /* faddd */
2902 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_faddd
);
2904 case 0x43: /* faddq */
2905 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2906 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_faddq
);
2908 case 0x45: /* fsubs */
2909 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fsubs
);
2911 case 0x46: /* fsubd */
2912 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fsubd
);
2914 case 0x47: /* fsubq */
2915 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2916 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fsubq
);
2918 case 0x49: /* fmuls */
2919 CHECK_FPU_FEATURE(dc
, FMUL
);
2920 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fmuls
);
2922 case 0x4a: /* fmuld */
2923 CHECK_FPU_FEATURE(dc
, FMUL
);
2924 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld
);
2926 case 0x4b: /* fmulq */
2927 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2928 CHECK_FPU_FEATURE(dc
, FMUL
);
2929 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fmulq
);
2931 case 0x4d: /* fdivs */
2932 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fdivs
);
2934 case 0x4e: /* fdivd */
2935 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fdivd
);
2937 case 0x4f: /* fdivq */
2938 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2939 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fdivq
);
2941 case 0x69: /* fsmuld */
2942 CHECK_FPU_FEATURE(dc
, FSMULD
);
2943 gen_fop_DFF(dc
, rd
, rs1
, rs2
, gen_helper_fsmuld
);
2945 case 0x6e: /* fdmulq */
2946 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2947 gen_fop_QDD(dc
, rd
, rs1
, rs2
, gen_helper_fdmulq
);
2949 case 0xc4: /* fitos */
2950 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fitos
);
2952 case 0xc6: /* fdtos */
2953 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fdtos
);
2955 case 0xc7: /* fqtos */
2956 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2957 gen_fop_FQ(dc
, rd
, rs2
, gen_helper_fqtos
);
2959 case 0xc8: /* fitod */
2960 gen_ne_fop_DF(dc
, rd
, rs2
, gen_helper_fitod
);
2962 case 0xc9: /* fstod */
2963 gen_ne_fop_DF(dc
, rd
, rs2
, gen_helper_fstod
);
2965 case 0xcb: /* fqtod */
2966 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2967 gen_fop_DQ(dc
, rd
, rs2
, gen_helper_fqtod
);
2969 case 0xcc: /* fitoq */
2970 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2971 gen_ne_fop_QF(dc
, rd
, rs2
, gen_helper_fitoq
);
2973 case 0xcd: /* fstoq */
2974 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2975 gen_ne_fop_QF(dc
, rd
, rs2
, gen_helper_fstoq
);
2977 case 0xce: /* fdtoq */
2978 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2979 gen_ne_fop_QD(dc
, rd
, rs2
, gen_helper_fdtoq
);
2981 case 0xd1: /* fstoi */
2982 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fstoi
);
2984 case 0xd2: /* fdtoi */
2985 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fdtoi
);
2987 case 0xd3: /* fqtoi */
2988 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2989 gen_fop_FQ(dc
, rd
, rs2
, gen_helper_fqtoi
);
2991 #ifdef TARGET_SPARC64
2992 case 0x2: /* V9 fmovd */
2993 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
2994 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
2996 case 0x3: /* V9 fmovq */
2997 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2998 gen_move_Q(rd
, rs2
);
3000 case 0x6: /* V9 fnegd */
3001 gen_ne_fop_DD(dc
, rd
, rs2
, gen_helper_fnegd
);
3003 case 0x7: /* V9 fnegq */
3004 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3005 gen_ne_fop_QQ(dc
, rd
, rs2
, gen_helper_fnegq
);
3007 case 0xa: /* V9 fabsd */
3008 gen_ne_fop_DD(dc
, rd
, rs2
, gen_helper_fabsd
);
3010 case 0xb: /* V9 fabsq */
3011 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3012 gen_ne_fop_QQ(dc
, rd
, rs2
, gen_helper_fabsq
);
3014 case 0x81: /* V9 fstox */
3015 gen_fop_DF(dc
, rd
, rs2
, gen_helper_fstox
);
3017 case 0x82: /* V9 fdtox */
3018 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fdtox
);
3020 case 0x83: /* V9 fqtox */
3021 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3022 gen_fop_DQ(dc
, rd
, rs2
, gen_helper_fqtox
);
3024 case 0x84: /* V9 fxtos */
3025 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fxtos
);
3027 case 0x88: /* V9 fxtod */
3028 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fxtod
);
3030 case 0x8c: /* V9 fxtoq */
3031 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3032 gen_ne_fop_QD(dc
, rd
, rs2
, gen_helper_fxtoq
);
3038 } else if (xop
== 0x35) { /* FPU Operations */
3039 #ifdef TARGET_SPARC64
3042 if (gen_trap_ifnofpu(dc
)) {
3045 gen_op_clear_ieee_excp_and_FTT();
3046 rs1
= GET_FIELD(insn
, 13, 17);
3047 rs2
= GET_FIELD(insn
, 27, 31);
3048 xop
= GET_FIELD(insn
, 18, 26);
3049 save_state(dc
, cpu_cond
);
3050 #ifdef TARGET_SPARC64
3051 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
3054 l1
= gen_new_label();
3055 cond
= GET_FIELD_SP(insn
, 14, 17);
3056 cpu_src1
= get_src1(insn
, cpu_src1
);
3057 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
3059 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
3060 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
3063 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
3066 l1
= gen_new_label();
3067 cond
= GET_FIELD_SP(insn
, 14, 17);
3068 cpu_src1
= get_src1(insn
, cpu_src1
);
3069 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
3071 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
3072 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
3075 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
3078 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3079 l1
= gen_new_label();
3080 cond
= GET_FIELD_SP(insn
, 14, 17);
3081 cpu_src1
= get_src1(insn
, cpu_src1
);
3082 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
3084 gen_move_Q(rd
, rs2
);
3090 #ifdef TARGET_SPARC64
3091 #define FMOVSCC(fcc) \
3096 l1 = gen_new_label(); \
3097 r_cond = tcg_temp_new(); \
3098 cond = GET_FIELD_SP(insn, 14, 17); \
3099 gen_fcond(r_cond, fcc, cond); \
3100 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3102 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3103 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3104 gen_set_label(l1); \
3105 tcg_temp_free(r_cond); \
3107 #define FMOVDCC(fcc) \
3112 l1 = gen_new_label(); \
3113 r_cond = tcg_temp_new(); \
3114 cond = GET_FIELD_SP(insn, 14, 17); \
3115 gen_fcond(r_cond, fcc, cond); \
3116 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3118 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3119 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3120 gen_set_label(l1); \
3121 tcg_temp_free(r_cond); \
3123 #define FMOVQCC(fcc) \
3128 l1 = gen_new_label(); \
3129 r_cond = tcg_temp_new(); \
3130 cond = GET_FIELD_SP(insn, 14, 17); \
3131 gen_fcond(r_cond, fcc, cond); \
3132 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3134 gen_move_Q(rd, rs2); \
3135 gen_set_label(l1); \
3136 tcg_temp_free(r_cond); \
3138 case 0x001: /* V9 fmovscc %fcc0 */
3141 case 0x002: /* V9 fmovdcc %fcc0 */
3144 case 0x003: /* V9 fmovqcc %fcc0 */
3145 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3148 case 0x041: /* V9 fmovscc %fcc1 */
3151 case 0x042: /* V9 fmovdcc %fcc1 */
3154 case 0x043: /* V9 fmovqcc %fcc1 */
3155 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3158 case 0x081: /* V9 fmovscc %fcc2 */
3161 case 0x082: /* V9 fmovdcc %fcc2 */
3164 case 0x083: /* V9 fmovqcc %fcc2 */
3165 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3168 case 0x0c1: /* V9 fmovscc %fcc3 */
3171 case 0x0c2: /* V9 fmovdcc %fcc3 */
3174 case 0x0c3: /* V9 fmovqcc %fcc3 */
3175 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3181 #define FMOVSCC(icc) \
3186 l1 = gen_new_label(); \
3187 r_cond = tcg_temp_new(); \
3188 cond = GET_FIELD_SP(insn, 14, 17); \
3189 gen_cond(r_cond, icc, cond, dc); \
3190 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3192 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3193 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3194 gen_set_label(l1); \
3195 tcg_temp_free(r_cond); \
3197 #define FMOVDCC(icc) \
3202 l1 = gen_new_label(); \
3203 r_cond = tcg_temp_new(); \
3204 cond = GET_FIELD_SP(insn, 14, 17); \
3205 gen_cond(r_cond, icc, cond, dc); \
3206 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3208 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3209 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3210 gen_update_fprs_dirty(DFPREG(rd)); \
3211 gen_set_label(l1); \
3212 tcg_temp_free(r_cond); \
3214 #define FMOVQCC(icc) \
3219 l1 = gen_new_label(); \
3220 r_cond = tcg_temp_new(); \
3221 cond = GET_FIELD_SP(insn, 14, 17); \
3222 gen_cond(r_cond, icc, cond, dc); \
3223 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3225 gen_move_Q(rd, rs2); \
3226 gen_set_label(l1); \
3227 tcg_temp_free(r_cond); \
3230 case 0x101: /* V9 fmovscc %icc */
3233 case 0x102: /* V9 fmovdcc %icc */
3236 case 0x103: /* V9 fmovqcc %icc */
3237 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3240 case 0x181: /* V9 fmovscc %xcc */
3243 case 0x182: /* V9 fmovdcc %xcc */
3246 case 0x183: /* V9 fmovqcc %xcc */
3247 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3254 case 0x51: /* fcmps, V9 %fcc */
3255 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
3256 cpu_src2_32
= gen_load_fpr_F(dc
, rs2
);
3257 gen_op_fcmps(rd
& 3, cpu_src1_32
, cpu_src2_32
);
3259 case 0x52: /* fcmpd, V9 %fcc */
3260 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
3261 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
3262 gen_op_fcmpd(rd
& 3, cpu_src1_64
, cpu_src2_64
);
3264 case 0x53: /* fcmpq, V9 %fcc */
3265 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3266 gen_op_load_fpr_QT0(QFPREG(rs1
));
3267 gen_op_load_fpr_QT1(QFPREG(rs2
));
3268 gen_op_fcmpq(rd
& 3);
3270 case 0x55: /* fcmpes, V9 %fcc */
3271 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
3272 cpu_src2_32
= gen_load_fpr_F(dc
, rs2
);
3273 gen_op_fcmpes(rd
& 3, cpu_src1_32
, cpu_src2_32
);
3275 case 0x56: /* fcmped, V9 %fcc */
3276 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
3277 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
3278 gen_op_fcmped(rd
& 3, cpu_src1_64
, cpu_src2_64
);
3280 case 0x57: /* fcmpeq, V9 %fcc */
3281 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3282 gen_op_load_fpr_QT0(QFPREG(rs1
));
3283 gen_op_load_fpr_QT1(QFPREG(rs2
));
3284 gen_op_fcmpeq(rd
& 3);
3289 } else if (xop
== 0x2) {
3292 rs1
= GET_FIELD(insn
, 13, 17);
3294 // or %g0, x, y -> mov T0, x; mov y, T0
3295 if (IS_IMM
) { /* immediate */
3298 simm
= GET_FIELDs(insn
, 19, 31);
3299 r_const
= tcg_const_tl(simm
);
3300 gen_movl_TN_reg(rd
, r_const
);
3301 tcg_temp_free(r_const
);
3302 } else { /* register */
3303 rs2
= GET_FIELD(insn
, 27, 31);
3304 gen_movl_reg_TN(rs2
, cpu_dst
);
3305 gen_movl_TN_reg(rd
, cpu_dst
);
3308 cpu_src1
= get_src1(insn
, cpu_src1
);
3309 if (IS_IMM
) { /* immediate */
3310 simm
= GET_FIELDs(insn
, 19, 31);
3311 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3312 gen_movl_TN_reg(rd
, cpu_dst
);
3313 } else { /* register */
3314 // or x, %g0, y -> mov T1, x; mov y, T1
3315 rs2
= GET_FIELD(insn
, 27, 31);
3317 gen_movl_reg_TN(rs2
, cpu_src2
);
3318 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3319 gen_movl_TN_reg(rd
, cpu_dst
);
3321 gen_movl_TN_reg(rd
, cpu_src1
);
3324 #ifdef TARGET_SPARC64
3325 } else if (xop
== 0x25) { /* sll, V9 sllx */
3326 cpu_src1
= get_src1(insn
, cpu_src1
);
3327 if (IS_IMM
) { /* immediate */
3328 simm
= GET_FIELDs(insn
, 20, 31);
3329 if (insn
& (1 << 12)) {
3330 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3332 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
3334 } else { /* register */
3335 rs2
= GET_FIELD(insn
, 27, 31);
3336 gen_movl_reg_TN(rs2
, cpu_src2
);
3337 if (insn
& (1 << 12)) {
3338 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3340 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3342 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3344 gen_movl_TN_reg(rd
, cpu_dst
);
3345 } else if (xop
== 0x26) { /* srl, V9 srlx */
3346 cpu_src1
= get_src1(insn
, cpu_src1
);
3347 if (IS_IMM
) { /* immediate */
3348 simm
= GET_FIELDs(insn
, 20, 31);
3349 if (insn
& (1 << 12)) {
3350 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3352 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3353 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3355 } else { /* register */
3356 rs2
= GET_FIELD(insn
, 27, 31);
3357 gen_movl_reg_TN(rs2
, cpu_src2
);
3358 if (insn
& (1 << 12)) {
3359 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3360 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3362 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3363 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3364 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3367 gen_movl_TN_reg(rd
, cpu_dst
);
3368 } else if (xop
== 0x27) { /* sra, V9 srax */
3369 cpu_src1
= get_src1(insn
, cpu_src1
);
3370 if (IS_IMM
) { /* immediate */
3371 simm
= GET_FIELDs(insn
, 20, 31);
3372 if (insn
& (1 << 12)) {
3373 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3375 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3376 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3377 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3379 } else { /* register */
3380 rs2
= GET_FIELD(insn
, 27, 31);
3381 gen_movl_reg_TN(rs2
, cpu_src2
);
3382 if (insn
& (1 << 12)) {
3383 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3384 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3386 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3387 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3388 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3389 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3392 gen_movl_TN_reg(rd
, cpu_dst
);
3394 } else if (xop
< 0x36) {
3396 cpu_src1
= get_src1(insn
, cpu_src1
);
3397 cpu_src2
= get_src2(insn
, cpu_src2
);
3398 switch (xop
& ~0x10) {
3401 simm
= GET_FIELDs(insn
, 19, 31);
3403 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3404 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3405 dc
->cc_op
= CC_OP_ADD
;
3407 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3411 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3412 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3413 dc
->cc_op
= CC_OP_ADD
;
3415 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3421 simm
= GET_FIELDs(insn
, 19, 31);
3422 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3424 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3427 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3428 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3429 dc
->cc_op
= CC_OP_LOGIC
;
3434 simm
= GET_FIELDs(insn
, 19, 31);
3435 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3437 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3440 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3441 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3442 dc
->cc_op
= CC_OP_LOGIC
;
3447 simm
= GET_FIELDs(insn
, 19, 31);
3448 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3450 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3453 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3454 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3455 dc
->cc_op
= CC_OP_LOGIC
;
3460 simm
= GET_FIELDs(insn
, 19, 31);
3462 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3464 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3468 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3469 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3470 dc
->cc_op
= CC_OP_SUB
;
3472 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3476 case 0x5: /* andn */
3478 simm
= GET_FIELDs(insn
, 19, 31);
3479 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3481 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3484 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3485 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3486 dc
->cc_op
= CC_OP_LOGIC
;
3491 simm
= GET_FIELDs(insn
, 19, 31);
3492 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3494 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3497 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3498 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3499 dc
->cc_op
= CC_OP_LOGIC
;
3502 case 0x7: /* xorn */
3504 simm
= GET_FIELDs(insn
, 19, 31);
3505 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3507 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3508 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3511 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3512 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3513 dc
->cc_op
= CC_OP_LOGIC
;
3516 case 0x8: /* addx, V9 addc */
3517 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3520 #ifdef TARGET_SPARC64
3521 case 0x9: /* V9 mulx */
3523 simm
= GET_FIELDs(insn
, 19, 31);
3524 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3526 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3530 case 0xa: /* umul */
3531 CHECK_IU_FEATURE(dc
, MUL
);
3532 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3534 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3535 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3536 dc
->cc_op
= CC_OP_LOGIC
;
3539 case 0xb: /* smul */
3540 CHECK_IU_FEATURE(dc
, MUL
);
3541 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3543 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3544 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3545 dc
->cc_op
= CC_OP_LOGIC
;
3548 case 0xc: /* subx, V9 subc */
3549 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3552 #ifdef TARGET_SPARC64
3553 case 0xd: /* V9 udivx */
3555 TCGv r_temp1
, r_temp2
;
3556 r_temp1
= tcg_temp_local_new();
3557 r_temp2
= tcg_temp_local_new();
3558 tcg_gen_mov_tl(r_temp1
, cpu_src1
);
3559 tcg_gen_mov_tl(r_temp2
, cpu_src2
);
3560 gen_trap_ifdivzero_tl(r_temp2
);
3561 tcg_gen_divu_i64(cpu_dst
, r_temp1
, r_temp2
);
3562 tcg_temp_free(r_temp1
);
3563 tcg_temp_free(r_temp2
);
3567 case 0xe: /* udiv */
3568 CHECK_IU_FEATURE(dc
, DIV
);
3570 gen_helper_udiv_cc(cpu_dst
, cpu_env
, cpu_src1
,
3572 dc
->cc_op
= CC_OP_DIV
;
3574 gen_helper_udiv(cpu_dst
, cpu_env
, cpu_src1
,
3578 case 0xf: /* sdiv */
3579 CHECK_IU_FEATURE(dc
, DIV
);
3581 gen_helper_sdiv_cc(cpu_dst
, cpu_env
, cpu_src1
,
3583 dc
->cc_op
= CC_OP_DIV
;
3585 gen_helper_sdiv(cpu_dst
, cpu_env
, cpu_src1
,
3592 gen_movl_TN_reg(rd
, cpu_dst
);
3594 cpu_src1
= get_src1(insn
, cpu_src1
);
3595 cpu_src2
= get_src2(insn
, cpu_src2
);
3597 case 0x20: /* taddcc */
3598 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3599 gen_movl_TN_reg(rd
, cpu_dst
);
3600 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3601 dc
->cc_op
= CC_OP_TADD
;
3603 case 0x21: /* tsubcc */
3604 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3605 gen_movl_TN_reg(rd
, cpu_dst
);
3606 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3607 dc
->cc_op
= CC_OP_TSUB
;
3609 case 0x22: /* taddcctv */
3610 save_state(dc
, cpu_cond
);
3611 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3612 gen_movl_TN_reg(rd
, cpu_dst
);
3613 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3614 dc
->cc_op
= CC_OP_TADDTV
;
3616 case 0x23: /* tsubcctv */
3617 save_state(dc
, cpu_cond
);
3618 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3619 gen_movl_TN_reg(rd
, cpu_dst
);
3620 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3621 dc
->cc_op
= CC_OP_TSUBTV
;
3623 case 0x24: /* mulscc */
3624 gen_helper_compute_psr(cpu_env
);
3625 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3626 gen_movl_TN_reg(rd
, cpu_dst
);
3627 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3628 dc
->cc_op
= CC_OP_ADD
;
3630 #ifndef TARGET_SPARC64
3631 case 0x25: /* sll */
3632 if (IS_IMM
) { /* immediate */
3633 simm
= GET_FIELDs(insn
, 20, 31);
3634 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3635 } else { /* register */
3636 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3637 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3639 gen_movl_TN_reg(rd
, cpu_dst
);
3641 case 0x26: /* srl */
3642 if (IS_IMM
) { /* immediate */
3643 simm
= GET_FIELDs(insn
, 20, 31);
3644 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3645 } else { /* register */
3646 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3647 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3649 gen_movl_TN_reg(rd
, cpu_dst
);
3651 case 0x27: /* sra */
3652 if (IS_IMM
) { /* immediate */
3653 simm
= GET_FIELDs(insn
, 20, 31);
3654 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3655 } else { /* register */
3656 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3657 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3659 gen_movl_TN_reg(rd
, cpu_dst
);
3666 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3667 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3669 #ifndef TARGET_SPARC64
3670 case 0x01 ... 0x0f: /* undefined in the
3674 case 0x10 ... 0x1f: /* implementation-dependent
3680 case 0x2: /* V9 wrccr */
3681 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3682 gen_helper_wrccr(cpu_env
, cpu_dst
);
3683 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3684 dc
->cc_op
= CC_OP_FLAGS
;
3686 case 0x3: /* V9 wrasi */
3687 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3688 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3689 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3691 case 0x6: /* V9 wrfprs */
3692 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3693 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3694 save_state(dc
, cpu_cond
);
3699 case 0xf: /* V9 sir, nop if user */
3700 #if !defined(CONFIG_USER_ONLY)
3701 if (supervisor(dc
)) {
3706 case 0x13: /* Graphics Status */
3707 if (gen_trap_ifnofpu(dc
)) {
3710 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3712 case 0x14: /* Softint set */
3713 if (!supervisor(dc
))
3715 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3716 gen_helper_set_softint(cpu_env
, cpu_tmp64
);
3718 case 0x15: /* Softint clear */
3719 if (!supervisor(dc
))
3721 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3722 gen_helper_clear_softint(cpu_env
, cpu_tmp64
);
3724 case 0x16: /* Softint write */
3725 if (!supervisor(dc
))
3727 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3728 gen_helper_write_softint(cpu_env
, cpu_tmp64
);
3730 case 0x17: /* Tick compare */
3731 #if !defined(CONFIG_USER_ONLY)
3732 if (!supervisor(dc
))
3738 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3740 r_tickptr
= tcg_temp_new_ptr();
3741 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3742 offsetof(CPUSPARCState
, tick
));
3743 gen_helper_tick_set_limit(r_tickptr
,
3745 tcg_temp_free_ptr(r_tickptr
);
3748 case 0x18: /* System tick */
3749 #if !defined(CONFIG_USER_ONLY)
3750 if (!supervisor(dc
))
3756 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3758 r_tickptr
= tcg_temp_new_ptr();
3759 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3760 offsetof(CPUSPARCState
, stick
));
3761 gen_helper_tick_set_count(r_tickptr
,
3763 tcg_temp_free_ptr(r_tickptr
);
3766 case 0x19: /* System tick compare */
3767 #if !defined(CONFIG_USER_ONLY)
3768 if (!supervisor(dc
))
3774 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3776 r_tickptr
= tcg_temp_new_ptr();
3777 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3778 offsetof(CPUSPARCState
, stick
));
3779 gen_helper_tick_set_limit(r_tickptr
,
3781 tcg_temp_free_ptr(r_tickptr
);
3785 case 0x10: /* Performance Control */
3786 case 0x11: /* Performance Instrumentation
3788 case 0x12: /* Dispatch Control */
3795 #if !defined(CONFIG_USER_ONLY)
3796 case 0x31: /* wrpsr, V9 saved, restored */
3798 if (!supervisor(dc
))
3800 #ifdef TARGET_SPARC64
3803 gen_helper_saved(cpu_env
);
3806 gen_helper_restored(cpu_env
);
3808 case 2: /* UA2005 allclean */
3809 case 3: /* UA2005 otherw */
3810 case 4: /* UA2005 normalw */
3811 case 5: /* UA2005 invalw */
3817 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3818 gen_helper_wrpsr(cpu_env
, cpu_dst
);
3819 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3820 dc
->cc_op
= CC_OP_FLAGS
;
3821 save_state(dc
, cpu_cond
);
3828 case 0x32: /* wrwim, V9 wrpr */
3830 if (!supervisor(dc
))
3832 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3833 #ifdef TARGET_SPARC64
3839 r_tsptr
= tcg_temp_new_ptr();
3840 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3841 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3842 offsetof(trap_state
, tpc
));
3843 tcg_temp_free_ptr(r_tsptr
);
3850 r_tsptr
= tcg_temp_new_ptr();
3851 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3852 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3853 offsetof(trap_state
, tnpc
));
3854 tcg_temp_free_ptr(r_tsptr
);
3861 r_tsptr
= tcg_temp_new_ptr();
3862 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3863 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3864 offsetof(trap_state
,
3866 tcg_temp_free_ptr(r_tsptr
);
3873 r_tsptr
= tcg_temp_new_ptr();
3874 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3875 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3876 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3877 offsetof(trap_state
, tt
));
3878 tcg_temp_free_ptr(r_tsptr
);
3885 r_tickptr
= tcg_temp_new_ptr();
3886 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3887 offsetof(CPUSPARCState
, tick
));
3888 gen_helper_tick_set_count(r_tickptr
,
3890 tcg_temp_free_ptr(r_tickptr
);
3894 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3898 TCGv r_tmp
= tcg_temp_local_new();
3900 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3901 save_state(dc
, cpu_cond
);
3902 gen_helper_wrpstate(cpu_env
, r_tmp
);
3903 tcg_temp_free(r_tmp
);
3904 dc
->npc
= DYNAMIC_PC
;
3909 TCGv r_tmp
= tcg_temp_local_new();
3911 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3912 save_state(dc
, cpu_cond
);
3913 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_tmp
);
3914 tcg_temp_free(r_tmp
);
3915 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3916 offsetof(CPUSPARCState
, tl
));
3917 dc
->npc
= DYNAMIC_PC
;
3921 gen_helper_wrpil(cpu_env
, cpu_tmp0
);
3924 gen_helper_wrcwp(cpu_env
, cpu_tmp0
);
3927 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3928 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3929 offsetof(CPUSPARCState
,
3932 case 11: // canrestore
3933 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3934 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3935 offsetof(CPUSPARCState
,
3938 case 12: // cleanwin
3939 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3940 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3941 offsetof(CPUSPARCState
,
3944 case 13: // otherwin
3945 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3946 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3947 offsetof(CPUSPARCState
,
3951 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3952 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3953 offsetof(CPUSPARCState
,
3956 case 16: // UA2005 gl
3957 CHECK_IU_FEATURE(dc
, GL
);
3958 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3959 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3960 offsetof(CPUSPARCState
, gl
));
3962 case 26: // UA2005 strand status
3963 CHECK_IU_FEATURE(dc
, HYPV
);
3964 if (!hypervisor(dc
))
3966 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3972 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3973 if (dc
->def
->nwindows
!= 32)
3974 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3975 (1 << dc
->def
->nwindows
) - 1);
3976 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3980 case 0x33: /* wrtbr, UA2005 wrhpr */
3982 #ifndef TARGET_SPARC64
3983 if (!supervisor(dc
))
3985 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3987 CHECK_IU_FEATURE(dc
, HYPV
);
3988 if (!hypervisor(dc
))
3990 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3993 // XXX gen_op_wrhpstate();
3994 save_state(dc
, cpu_cond
);
4000 // XXX gen_op_wrhtstate();
4003 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
4006 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
4008 case 31: // hstick_cmpr
4012 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
4013 r_tickptr
= tcg_temp_new_ptr();
4014 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
4015 offsetof(CPUSPARCState
, hstick
));
4016 gen_helper_tick_set_limit(r_tickptr
,
4018 tcg_temp_free_ptr(r_tickptr
);
4021 case 6: // hver readonly
4029 #ifdef TARGET_SPARC64
4030 case 0x2c: /* V9 movcc */
4032 int cc
= GET_FIELD_SP(insn
, 11, 12);
4033 int cond
= GET_FIELD_SP(insn
, 14, 17);
4037 r_cond
= tcg_temp_new();
4038 if (insn
& (1 << 18)) {
4040 gen_cond(r_cond
, 0, cond
, dc
);
4042 gen_cond(r_cond
, 1, cond
, dc
);
4046 gen_fcond(r_cond
, cc
, cond
);
4049 l1
= gen_new_label();
4051 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
4052 if (IS_IMM
) { /* immediate */
4055 simm
= GET_FIELD_SPs(insn
, 0, 10);
4056 r_const
= tcg_const_tl(simm
);
4057 gen_movl_TN_reg(rd
, r_const
);
4058 tcg_temp_free(r_const
);
4060 rs2
= GET_FIELD_SP(insn
, 0, 4);
4061 gen_movl_reg_TN(rs2
, cpu_tmp0
);
4062 gen_movl_TN_reg(rd
, cpu_tmp0
);
4065 tcg_temp_free(r_cond
);
4068 case 0x2d: /* V9 sdivx */
4069 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
4070 gen_movl_TN_reg(rd
, cpu_dst
);
4072 case 0x2e: /* V9 popc */
4074 cpu_src2
= get_src2(insn
, cpu_src2
);
4075 gen_helper_popc(cpu_dst
, cpu_src2
);
4076 gen_movl_TN_reg(rd
, cpu_dst
);
4078 case 0x2f: /* V9 movr */
4080 int cond
= GET_FIELD_SP(insn
, 10, 12);
4083 cpu_src1
= get_src1(insn
, cpu_src1
);
4085 l1
= gen_new_label();
4087 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
4089 if (IS_IMM
) { /* immediate */
4092 simm
= GET_FIELD_SPs(insn
, 0, 9);
4093 r_const
= tcg_const_tl(simm
);
4094 gen_movl_TN_reg(rd
, r_const
);
4095 tcg_temp_free(r_const
);
4097 rs2
= GET_FIELD_SP(insn
, 0, 4);
4098 gen_movl_reg_TN(rs2
, cpu_tmp0
);
4099 gen_movl_TN_reg(rd
, cpu_tmp0
);
4109 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4110 #ifdef TARGET_SPARC64
4111 int opf
= GET_FIELD_SP(insn
, 5, 13);
4112 rs1
= GET_FIELD(insn
, 13, 17);
4113 rs2
= GET_FIELD(insn
, 27, 31);
4114 if (gen_trap_ifnofpu(dc
)) {
4119 case 0x000: /* VIS I edge8cc */
4120 CHECK_FPU_FEATURE(dc
, VIS1
);
4121 gen_movl_reg_TN(rs1
, cpu_src1
);
4122 gen_movl_reg_TN(rs2
, cpu_src2
);
4123 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 1, 0);
4124 gen_movl_TN_reg(rd
, cpu_dst
);
4126 case 0x001: /* VIS II edge8n */
4127 CHECK_FPU_FEATURE(dc
, VIS2
);
4128 gen_movl_reg_TN(rs1
, cpu_src1
);
4129 gen_movl_reg_TN(rs2
, cpu_src2
);
4130 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 0, 0);
4131 gen_movl_TN_reg(rd
, cpu_dst
);
4133 case 0x002: /* VIS I edge8lcc */
4134 CHECK_FPU_FEATURE(dc
, VIS1
);
4135 gen_movl_reg_TN(rs1
, cpu_src1
);
4136 gen_movl_reg_TN(rs2
, cpu_src2
);
4137 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 1, 1);
4138 gen_movl_TN_reg(rd
, cpu_dst
);
4140 case 0x003: /* VIS II edge8ln */
4141 CHECK_FPU_FEATURE(dc
, VIS2
);
4142 gen_movl_reg_TN(rs1
, cpu_src1
);
4143 gen_movl_reg_TN(rs2
, cpu_src2
);
4144 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 0, 1);
4145 gen_movl_TN_reg(rd
, cpu_dst
);
4147 case 0x004: /* VIS I edge16cc */
4148 CHECK_FPU_FEATURE(dc
, VIS1
);
4149 gen_movl_reg_TN(rs1
, cpu_src1
);
4150 gen_movl_reg_TN(rs2
, cpu_src2
);
4151 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 1, 0);
4152 gen_movl_TN_reg(rd
, cpu_dst
);
4154 case 0x005: /* VIS II edge16n */
4155 CHECK_FPU_FEATURE(dc
, VIS2
);
4156 gen_movl_reg_TN(rs1
, cpu_src1
);
4157 gen_movl_reg_TN(rs2
, cpu_src2
);
4158 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 0, 0);
4159 gen_movl_TN_reg(rd
, cpu_dst
);
4161 case 0x006: /* VIS I edge16lcc */
4162 CHECK_FPU_FEATURE(dc
, VIS1
);
4163 gen_movl_reg_TN(rs1
, cpu_src1
);
4164 gen_movl_reg_TN(rs2
, cpu_src2
);
4165 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 1, 1);
4166 gen_movl_TN_reg(rd
, cpu_dst
);
4168 case 0x007: /* VIS II edge16ln */
4169 CHECK_FPU_FEATURE(dc
, VIS2
);
4170 gen_movl_reg_TN(rs1
, cpu_src1
);
4171 gen_movl_reg_TN(rs2
, cpu_src2
);
4172 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 0, 1);
4173 gen_movl_TN_reg(rd
, cpu_dst
);
4175 case 0x008: /* VIS I edge32cc */
4176 CHECK_FPU_FEATURE(dc
, VIS1
);
4177 gen_movl_reg_TN(rs1
, cpu_src1
);
4178 gen_movl_reg_TN(rs2
, cpu_src2
);
4179 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 1, 0);
4180 gen_movl_TN_reg(rd
, cpu_dst
);
4182 case 0x009: /* VIS II edge32n */
4183 CHECK_FPU_FEATURE(dc
, VIS2
);
4184 gen_movl_reg_TN(rs1
, cpu_src1
);
4185 gen_movl_reg_TN(rs2
, cpu_src2
);
4186 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 0, 0);
4187 gen_movl_TN_reg(rd
, cpu_dst
);
4189 case 0x00a: /* VIS I edge32lcc */
4190 CHECK_FPU_FEATURE(dc
, VIS1
);
4191 gen_movl_reg_TN(rs1
, cpu_src1
);
4192 gen_movl_reg_TN(rs2
, cpu_src2
);
4193 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 1, 1);
4194 gen_movl_TN_reg(rd
, cpu_dst
);
4196 case 0x00b: /* VIS II edge32ln */
4197 CHECK_FPU_FEATURE(dc
, VIS2
);
4198 gen_movl_reg_TN(rs1
, cpu_src1
);
4199 gen_movl_reg_TN(rs2
, cpu_src2
);
4200 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 0, 1);
4201 gen_movl_TN_reg(rd
, cpu_dst
);
4203 case 0x010: /* VIS I array8 */
4204 CHECK_FPU_FEATURE(dc
, VIS1
);
4205 cpu_src1
= get_src1(insn
, cpu_src1
);
4206 gen_movl_reg_TN(rs2
, cpu_src2
);
4207 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4208 gen_movl_TN_reg(rd
, cpu_dst
);
4210 case 0x012: /* VIS I array16 */
4211 CHECK_FPU_FEATURE(dc
, VIS1
);
4212 cpu_src1
= get_src1(insn
, cpu_src1
);
4213 gen_movl_reg_TN(rs2
, cpu_src2
);
4214 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4215 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
4216 gen_movl_TN_reg(rd
, cpu_dst
);
4218 case 0x014: /* VIS I array32 */
4219 CHECK_FPU_FEATURE(dc
, VIS1
);
4220 cpu_src1
= get_src1(insn
, cpu_src1
);
4221 gen_movl_reg_TN(rs2
, cpu_src2
);
4222 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4223 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
4224 gen_movl_TN_reg(rd
, cpu_dst
);
4226 case 0x018: /* VIS I alignaddr */
4227 CHECK_FPU_FEATURE(dc
, VIS1
);
4228 cpu_src1
= get_src1(insn
, cpu_src1
);
4229 gen_movl_reg_TN(rs2
, cpu_src2
);
4230 gen_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
, 0);
4231 gen_movl_TN_reg(rd
, cpu_dst
);
4233 case 0x01a: /* VIS I alignaddrl */
4234 CHECK_FPU_FEATURE(dc
, VIS1
);
4235 cpu_src1
= get_src1(insn
, cpu_src1
);
4236 gen_movl_reg_TN(rs2
, cpu_src2
);
4237 gen_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
, 1);
4238 gen_movl_TN_reg(rd
, cpu_dst
);
4240 case 0x019: /* VIS II bmask */
4241 CHECK_FPU_FEATURE(dc
, VIS2
);
4242 cpu_src1
= get_src1(insn
, cpu_src1
);
4243 cpu_src2
= get_src1(insn
, cpu_src2
);
4244 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4245 tcg_gen_deposit_tl(cpu_gsr
, cpu_gsr
, cpu_dst
, 32, 32);
4246 gen_movl_TN_reg(rd
, cpu_dst
);
4248 case 0x020: /* VIS I fcmple16 */
4249 CHECK_FPU_FEATURE(dc
, VIS1
);
4250 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4251 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4252 gen_helper_fcmple16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4253 gen_movl_TN_reg(rd
, cpu_dst
);
4255 case 0x022: /* VIS I fcmpne16 */
4256 CHECK_FPU_FEATURE(dc
, VIS1
);
4257 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4258 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4259 gen_helper_fcmpne16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4260 gen_movl_TN_reg(rd
, cpu_dst
);
4262 case 0x024: /* VIS I fcmple32 */
4263 CHECK_FPU_FEATURE(dc
, VIS1
);
4264 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4265 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4266 gen_helper_fcmple32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4267 gen_movl_TN_reg(rd
, cpu_dst
);
4269 case 0x026: /* VIS I fcmpne32 */
4270 CHECK_FPU_FEATURE(dc
, VIS1
);
4271 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4272 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4273 gen_helper_fcmpne32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4274 gen_movl_TN_reg(rd
, cpu_dst
);
4276 case 0x028: /* VIS I fcmpgt16 */
4277 CHECK_FPU_FEATURE(dc
, VIS1
);
4278 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4279 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4280 gen_helper_fcmpgt16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4281 gen_movl_TN_reg(rd
, cpu_dst
);
4283 case 0x02a: /* VIS I fcmpeq16 */
4284 CHECK_FPU_FEATURE(dc
, VIS1
);
4285 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4286 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4287 gen_helper_fcmpeq16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4288 gen_movl_TN_reg(rd
, cpu_dst
);
4290 case 0x02c: /* VIS I fcmpgt32 */
4291 CHECK_FPU_FEATURE(dc
, VIS1
);
4292 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4293 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4294 gen_helper_fcmpgt32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4295 gen_movl_TN_reg(rd
, cpu_dst
);
4297 case 0x02e: /* VIS I fcmpeq32 */
4298 CHECK_FPU_FEATURE(dc
, VIS1
);
4299 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4300 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4301 gen_helper_fcmpeq32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4302 gen_movl_TN_reg(rd
, cpu_dst
);
4304 case 0x031: /* VIS I fmul8x16 */
4305 CHECK_FPU_FEATURE(dc
, VIS1
);
4306 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16
);
4308 case 0x033: /* VIS I fmul8x16au */
4309 CHECK_FPU_FEATURE(dc
, VIS1
);
4310 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16au
);
4312 case 0x035: /* VIS I fmul8x16al */
4313 CHECK_FPU_FEATURE(dc
, VIS1
);
4314 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16al
);
4316 case 0x036: /* VIS I fmul8sux16 */
4317 CHECK_FPU_FEATURE(dc
, VIS1
);
4318 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8sux16
);
4320 case 0x037: /* VIS I fmul8ulx16 */
4321 CHECK_FPU_FEATURE(dc
, VIS1
);
4322 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8ulx16
);
4324 case 0x038: /* VIS I fmuld8sux16 */
4325 CHECK_FPU_FEATURE(dc
, VIS1
);
4326 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld8sux16
);
4328 case 0x039: /* VIS I fmuld8ulx16 */
4329 CHECK_FPU_FEATURE(dc
, VIS1
);
4330 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld8ulx16
);
4332 case 0x03a: /* VIS I fpack32 */
4333 CHECK_FPU_FEATURE(dc
, VIS1
);
4334 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpack32
);
4336 case 0x03b: /* VIS I fpack16 */
4337 CHECK_FPU_FEATURE(dc
, VIS1
);
4338 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4339 cpu_dst_32
= gen_dest_fpr_F();
4340 gen_helper_fpack16(cpu_dst_32
, cpu_gsr
, cpu_src1_64
);
4341 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4343 case 0x03d: /* VIS I fpackfix */
4344 CHECK_FPU_FEATURE(dc
, VIS1
);
4345 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4346 cpu_dst_32
= gen_dest_fpr_F();
4347 gen_helper_fpackfix(cpu_dst_32
, cpu_gsr
, cpu_src1_64
);
4348 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4350 case 0x03e: /* VIS I pdist */
4351 CHECK_FPU_FEATURE(dc
, VIS1
);
4352 gen_ne_fop_DDDD(dc
, rd
, rs1
, rs2
, gen_helper_pdist
);
4354 case 0x048: /* VIS I faligndata */
4355 CHECK_FPU_FEATURE(dc
, VIS1
);
4356 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_faligndata
);
4358 case 0x04b: /* VIS I fpmerge */
4359 CHECK_FPU_FEATURE(dc
, VIS1
);
4360 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpmerge
);
4362 case 0x04c: /* VIS II bshuffle */
4363 CHECK_FPU_FEATURE(dc
, VIS2
);
4364 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_bshuffle
);
4366 case 0x04d: /* VIS I fexpand */
4367 CHECK_FPU_FEATURE(dc
, VIS1
);
4368 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fexpand
);
4370 case 0x050: /* VIS I fpadd16 */
4371 CHECK_FPU_FEATURE(dc
, VIS1
);
4372 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpadd16
);
4374 case 0x051: /* VIS I fpadd16s */
4375 CHECK_FPU_FEATURE(dc
, VIS1
);
4376 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fpadd16s
);
4378 case 0x052: /* VIS I fpadd32 */
4379 CHECK_FPU_FEATURE(dc
, VIS1
);
4380 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpadd32
);
4382 case 0x053: /* VIS I fpadd32s */
4383 CHECK_FPU_FEATURE(dc
, VIS1
);
4384 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_add_i32
);
4386 case 0x054: /* VIS I fpsub16 */
4387 CHECK_FPU_FEATURE(dc
, VIS1
);
4388 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpsub16
);
4390 case 0x055: /* VIS I fpsub16s */
4391 CHECK_FPU_FEATURE(dc
, VIS1
);
4392 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fpsub16s
);
4394 case 0x056: /* VIS I fpsub32 */
4395 CHECK_FPU_FEATURE(dc
, VIS1
);
4396 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpsub32
);
4398 case 0x057: /* VIS I fpsub32s */
4399 CHECK_FPU_FEATURE(dc
, VIS1
);
4400 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_sub_i32
);
4402 case 0x060: /* VIS I fzero */
4403 CHECK_FPU_FEATURE(dc
, VIS1
);
4404 cpu_dst_64
= gen_dest_fpr_D();
4405 tcg_gen_movi_i64(cpu_dst_64
, 0);
4406 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4408 case 0x061: /* VIS I fzeros */
4409 CHECK_FPU_FEATURE(dc
, VIS1
);
4410 cpu_dst_32
= gen_dest_fpr_F();
4411 tcg_gen_movi_i32(cpu_dst_32
, 0);
4412 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4414 case 0x062: /* VIS I fnor */
4415 CHECK_FPU_FEATURE(dc
, VIS1
);
4416 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_nor_i64
);
4418 case 0x063: /* VIS I fnors */
4419 CHECK_FPU_FEATURE(dc
, VIS1
);
4420 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_nor_i32
);
4422 case 0x064: /* VIS I fandnot2 */
4423 CHECK_FPU_FEATURE(dc
, VIS1
);
4424 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_andc_i64
);
4426 case 0x065: /* VIS I fandnot2s */
4427 CHECK_FPU_FEATURE(dc
, VIS1
);
4428 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_andc_i32
);
4430 case 0x066: /* VIS I fnot2 */
4431 CHECK_FPU_FEATURE(dc
, VIS1
);
4432 gen_ne_fop_DD(dc
, rd
, rs2
, tcg_gen_not_i64
);
4434 case 0x067: /* VIS I fnot2s */
4435 CHECK_FPU_FEATURE(dc
, VIS1
);
4436 gen_ne_fop_FF(dc
, rd
, rs2
, tcg_gen_not_i32
);
4438 case 0x068: /* VIS I fandnot1 */
4439 CHECK_FPU_FEATURE(dc
, VIS1
);
4440 gen_ne_fop_DDD(dc
, rd
, rs2
, rs1
, tcg_gen_andc_i64
);
4442 case 0x069: /* VIS I fandnot1s */
4443 CHECK_FPU_FEATURE(dc
, VIS1
);
4444 gen_ne_fop_FFF(dc
, rd
, rs2
, rs1
, tcg_gen_andc_i32
);
4446 case 0x06a: /* VIS I fnot1 */
4447 CHECK_FPU_FEATURE(dc
, VIS1
);
4448 gen_ne_fop_DD(dc
, rd
, rs1
, tcg_gen_not_i64
);
4450 case 0x06b: /* VIS I fnot1s */
4451 CHECK_FPU_FEATURE(dc
, VIS1
);
4452 gen_ne_fop_FF(dc
, rd
, rs1
, tcg_gen_not_i32
);
4454 case 0x06c: /* VIS I fxor */
4455 CHECK_FPU_FEATURE(dc
, VIS1
);
4456 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_xor_i64
);
4458 case 0x06d: /* VIS I fxors */
4459 CHECK_FPU_FEATURE(dc
, VIS1
);
4460 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_xor_i32
);
4462 case 0x06e: /* VIS I fnand */
4463 CHECK_FPU_FEATURE(dc
, VIS1
);
4464 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_nand_i64
);
4466 case 0x06f: /* VIS I fnands */
4467 CHECK_FPU_FEATURE(dc
, VIS1
);
4468 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_nand_i32
);
4470 case 0x070: /* VIS I fand */
4471 CHECK_FPU_FEATURE(dc
, VIS1
);
4472 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_and_i64
);
4474 case 0x071: /* VIS I fands */
4475 CHECK_FPU_FEATURE(dc
, VIS1
);
4476 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_and_i32
);
4478 case 0x072: /* VIS I fxnor */
4479 CHECK_FPU_FEATURE(dc
, VIS1
);
4480 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_eqv_i64
);
4482 case 0x073: /* VIS I fxnors */
4483 CHECK_FPU_FEATURE(dc
, VIS1
);
4484 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_eqv_i32
);
4486 case 0x074: /* VIS I fsrc1 */
4487 CHECK_FPU_FEATURE(dc
, VIS1
);
4488 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4489 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
4491 case 0x075: /* VIS I fsrc1s */
4492 CHECK_FPU_FEATURE(dc
, VIS1
);
4493 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
4494 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
4496 case 0x076: /* VIS I fornot2 */
4497 CHECK_FPU_FEATURE(dc
, VIS1
);
4498 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_orc_i64
);
4500 case 0x077: /* VIS I fornot2s */
4501 CHECK_FPU_FEATURE(dc
, VIS1
);
4502 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_orc_i32
);
4504 case 0x078: /* VIS I fsrc2 */
4505 CHECK_FPU_FEATURE(dc
, VIS1
);
4506 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4507 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
4509 case 0x079: /* VIS I fsrc2s */
4510 CHECK_FPU_FEATURE(dc
, VIS1
);
4511 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
4512 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
4514 case 0x07a: /* VIS I fornot1 */
4515 CHECK_FPU_FEATURE(dc
, VIS1
);
4516 gen_ne_fop_DDD(dc
, rd
, rs2
, rs1
, tcg_gen_orc_i64
);
4518 case 0x07b: /* VIS I fornot1s */
4519 CHECK_FPU_FEATURE(dc
, VIS1
);
4520 gen_ne_fop_FFF(dc
, rd
, rs2
, rs1
, tcg_gen_orc_i32
);
4522 case 0x07c: /* VIS I for */
4523 CHECK_FPU_FEATURE(dc
, VIS1
);
4524 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_or_i64
);
4526 case 0x07d: /* VIS I fors */
4527 CHECK_FPU_FEATURE(dc
, VIS1
);
4528 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_or_i32
);
4530 case 0x07e: /* VIS I fone */
4531 CHECK_FPU_FEATURE(dc
, VIS1
);
4532 cpu_dst_64
= gen_dest_fpr_D();
4533 tcg_gen_movi_i64(cpu_dst_64
, -1);
4534 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4536 case 0x07f: /* VIS I fones */
4537 CHECK_FPU_FEATURE(dc
, VIS1
);
4538 cpu_dst_32
= gen_dest_fpr_F();
4539 tcg_gen_movi_i32(cpu_dst_32
, -1);
4540 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4542 case 0x080: /* VIS I shutdown */
4543 case 0x081: /* VIS II siam */
4552 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4553 #ifdef TARGET_SPARC64
4558 #ifdef TARGET_SPARC64
4559 } else if (xop
== 0x39) { /* V9 return */
4562 save_state(dc
, cpu_cond
);
4563 cpu_src1
= get_src1(insn
, cpu_src1
);
4564 if (IS_IMM
) { /* immediate */
4565 simm
= GET_FIELDs(insn
, 19, 31);
4566 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4567 } else { /* register */
4568 rs2
= GET_FIELD(insn
, 27, 31);
4570 gen_movl_reg_TN(rs2
, cpu_src2
);
4571 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4573 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4575 gen_helper_restore(cpu_env
);
4576 gen_mov_pc_npc(dc
, cpu_cond
);
4577 r_const
= tcg_const_i32(3);
4578 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4579 tcg_temp_free_i32(r_const
);
4580 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4581 dc
->npc
= DYNAMIC_PC
;
4585 cpu_src1
= get_src1(insn
, cpu_src1
);
4586 if (IS_IMM
) { /* immediate */
4587 simm
= GET_FIELDs(insn
, 19, 31);
4588 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4589 } else { /* register */
4590 rs2
= GET_FIELD(insn
, 27, 31);
4592 gen_movl_reg_TN(rs2
, cpu_src2
);
4593 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4595 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4598 case 0x38: /* jmpl */
4603 r_pc
= tcg_const_tl(dc
->pc
);
4604 gen_movl_TN_reg(rd
, r_pc
);
4605 tcg_temp_free(r_pc
);
4606 gen_mov_pc_npc(dc
, cpu_cond
);
4607 r_const
= tcg_const_i32(3);
4608 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4609 tcg_temp_free_i32(r_const
);
4610 gen_address_mask(dc
, cpu_dst
);
4611 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4612 dc
->npc
= DYNAMIC_PC
;
4615 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4616 case 0x39: /* rett, V9 return */
4620 if (!supervisor(dc
))
4622 gen_mov_pc_npc(dc
, cpu_cond
);
4623 r_const
= tcg_const_i32(3);
4624 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4625 tcg_temp_free_i32(r_const
);
4626 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4627 dc
->npc
= DYNAMIC_PC
;
4628 gen_helper_rett(cpu_env
);
4632 case 0x3b: /* flush */
4633 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4637 case 0x3c: /* save */
4638 save_state(dc
, cpu_cond
);
4639 gen_helper_save(cpu_env
);
4640 gen_movl_TN_reg(rd
, cpu_dst
);
4642 case 0x3d: /* restore */
4643 save_state(dc
, cpu_cond
);
4644 gen_helper_restore(cpu_env
);
4645 gen_movl_TN_reg(rd
, cpu_dst
);
4647 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4648 case 0x3e: /* V9 done/retry */
4652 if (!supervisor(dc
))
4654 dc
->npc
= DYNAMIC_PC
;
4655 dc
->pc
= DYNAMIC_PC
;
4656 gen_helper_done(cpu_env
);
4659 if (!supervisor(dc
))
4661 dc
->npc
= DYNAMIC_PC
;
4662 dc
->pc
= DYNAMIC_PC
;
4663 gen_helper_retry(cpu_env
);
4678 case 3: /* load/store instructions */
4680 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4682 /* flush pending conditional evaluations before exposing
4684 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4685 dc
->cc_op
= CC_OP_FLAGS
;
4686 gen_helper_compute_psr(cpu_env
);
4688 cpu_src1
= get_src1(insn
, cpu_src1
);
4689 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4690 rs2
= GET_FIELD(insn
, 27, 31);
4691 gen_movl_reg_TN(rs2
, cpu_src2
);
4692 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4693 } else if (IS_IMM
) { /* immediate */
4694 simm
= GET_FIELDs(insn
, 19, 31);
4695 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4696 } else { /* register */
4697 rs2
= GET_FIELD(insn
, 27, 31);
4699 gen_movl_reg_TN(rs2
, cpu_src2
);
4700 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4702 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4704 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4705 (xop
> 0x17 && xop
<= 0x1d ) ||
4706 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4708 case 0x0: /* ld, V9 lduw, load unsigned word */
4709 gen_address_mask(dc
, cpu_addr
);
4710 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4712 case 0x1: /* ldub, load unsigned byte */
4713 gen_address_mask(dc
, cpu_addr
);
4714 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4716 case 0x2: /* lduh, load unsigned halfword */
4717 gen_address_mask(dc
, cpu_addr
);
4718 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4720 case 0x3: /* ldd, load double word */
4726 save_state(dc
, cpu_cond
);
4727 r_const
= tcg_const_i32(7);
4728 /* XXX remove alignment check */
4729 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
4730 tcg_temp_free_i32(r_const
);
4731 gen_address_mask(dc
, cpu_addr
);
4732 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4733 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4734 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4735 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4736 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4737 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4738 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4741 case 0x9: /* ldsb, load signed byte */
4742 gen_address_mask(dc
, cpu_addr
);
4743 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4745 case 0xa: /* ldsh, load signed halfword */
4746 gen_address_mask(dc
, cpu_addr
);
4747 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4749 case 0xd: /* ldstub -- XXX: should be atomically */
4753 gen_address_mask(dc
, cpu_addr
);
4754 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4755 r_const
= tcg_const_tl(0xff);
4756 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4757 tcg_temp_free(r_const
);
4760 case 0x0f: /* swap, swap register with memory. Also
4762 CHECK_IU_FEATURE(dc
, SWAP
);
4763 gen_movl_reg_TN(rd
, cpu_val
);
4764 gen_address_mask(dc
, cpu_addr
);
4765 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4766 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4767 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4769 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4770 case 0x10: /* lda, V9 lduwa, load word alternate */
4771 #ifndef TARGET_SPARC64
4774 if (!supervisor(dc
))
4777 save_state(dc
, cpu_cond
);
4778 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4780 case 0x11: /* lduba, load unsigned byte alternate */
4781 #ifndef TARGET_SPARC64
4784 if (!supervisor(dc
))
4787 save_state(dc
, cpu_cond
);
4788 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4790 case 0x12: /* lduha, load unsigned halfword alternate */
4791 #ifndef TARGET_SPARC64
4794 if (!supervisor(dc
))
4797 save_state(dc
, cpu_cond
);
4798 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4800 case 0x13: /* ldda, load double word alternate */
4801 #ifndef TARGET_SPARC64
4804 if (!supervisor(dc
))
4809 save_state(dc
, cpu_cond
);
4810 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4812 case 0x19: /* ldsba, load signed byte alternate */
4813 #ifndef TARGET_SPARC64
4816 if (!supervisor(dc
))
4819 save_state(dc
, cpu_cond
);
4820 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4822 case 0x1a: /* ldsha, load signed halfword alternate */
4823 #ifndef TARGET_SPARC64
4826 if (!supervisor(dc
))
4829 save_state(dc
, cpu_cond
);
4830 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4832 case 0x1d: /* ldstuba -- XXX: should be atomically */
4833 #ifndef TARGET_SPARC64
4836 if (!supervisor(dc
))
4839 save_state(dc
, cpu_cond
);
4840 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4842 case 0x1f: /* swapa, swap reg with alt. memory. Also
4844 CHECK_IU_FEATURE(dc
, SWAP
);
4845 #ifndef TARGET_SPARC64
4848 if (!supervisor(dc
))
4851 save_state(dc
, cpu_cond
);
4852 gen_movl_reg_TN(rd
, cpu_val
);
4853 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4856 #ifndef TARGET_SPARC64
4857 case 0x30: /* ldc */
4858 case 0x31: /* ldcsr */
4859 case 0x33: /* lddc */
4863 #ifdef TARGET_SPARC64
4864 case 0x08: /* V9 ldsw */
4865 gen_address_mask(dc
, cpu_addr
);
4866 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4868 case 0x0b: /* V9 ldx */
4869 gen_address_mask(dc
, cpu_addr
);
4870 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4872 case 0x18: /* V9 ldswa */
4873 save_state(dc
, cpu_cond
);
4874 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4876 case 0x1b: /* V9 ldxa */
4877 save_state(dc
, cpu_cond
);
4878 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4880 case 0x2d: /* V9 prefetch, no effect */
4882 case 0x30: /* V9 ldfa */
4883 if (gen_trap_ifnofpu(dc
)) {
4886 save_state(dc
, cpu_cond
);
4887 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4888 gen_update_fprs_dirty(rd
);
4890 case 0x33: /* V9 lddfa */
4891 if (gen_trap_ifnofpu(dc
)) {
4894 save_state(dc
, cpu_cond
);
4895 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4896 gen_update_fprs_dirty(DFPREG(rd
));
4898 case 0x3d: /* V9 prefetcha, no effect */
4900 case 0x32: /* V9 ldqfa */
4901 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4902 if (gen_trap_ifnofpu(dc
)) {
4905 save_state(dc
, cpu_cond
);
4906 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4907 gen_update_fprs_dirty(QFPREG(rd
));
4913 gen_movl_TN_reg(rd
, cpu_val
);
4914 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4917 } else if (xop
>= 0x20 && xop
< 0x24) {
4918 if (gen_trap_ifnofpu(dc
)) {
4921 save_state(dc
, cpu_cond
);
4923 case 0x20: /* ldf, load fpreg */
4924 gen_address_mask(dc
, cpu_addr
);
4925 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4926 cpu_dst_32
= gen_dest_fpr_F();
4927 tcg_gen_trunc_tl_i32(cpu_dst_32
, cpu_tmp0
);
4928 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4930 case 0x21: /* ldfsr, V9 ldxfsr */
4931 #ifdef TARGET_SPARC64
4932 gen_address_mask(dc
, cpu_addr
);
4934 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4935 gen_helper_ldxfsr(cpu_env
, cpu_tmp64
);
4937 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4938 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4939 gen_helper_ldfsr(cpu_env
, cpu_tmp32
);
4943 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4944 gen_helper_ldfsr(cpu_env
, cpu_tmp32
);
4948 case 0x22: /* ldqf, load quad fpreg */
4952 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4953 r_const
= tcg_const_i32(dc
->mem_idx
);
4954 gen_address_mask(dc
, cpu_addr
);
4955 gen_helper_ldqf(cpu_env
, cpu_addr
, r_const
);
4956 tcg_temp_free_i32(r_const
);
4957 gen_op_store_QT0_fpr(QFPREG(rd
));
4958 gen_update_fprs_dirty(QFPREG(rd
));
4961 case 0x23: /* lddf, load double fpreg */
4962 gen_address_mask(dc
, cpu_addr
);
4963 cpu_dst_64
= gen_dest_fpr_D();
4964 tcg_gen_qemu_ld64(cpu_dst_64
, cpu_addr
, dc
->mem_idx
);
4965 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4970 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4971 xop
== 0xe || xop
== 0x1e) {
4972 gen_movl_reg_TN(rd
, cpu_val
);
4974 case 0x4: /* st, store word */
4975 gen_address_mask(dc
, cpu_addr
);
4976 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4978 case 0x5: /* stb, store byte */
4979 gen_address_mask(dc
, cpu_addr
);
4980 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4982 case 0x6: /* sth, store halfword */
4983 gen_address_mask(dc
, cpu_addr
);
4984 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4986 case 0x7: /* std, store double word */
4992 save_state(dc
, cpu_cond
);
4993 gen_address_mask(dc
, cpu_addr
);
4994 r_const
= tcg_const_i32(7);
4995 /* XXX remove alignment check */
4996 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
4997 tcg_temp_free_i32(r_const
);
4998 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4999 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
5000 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
5003 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5004 case 0x14: /* sta, V9 stwa, store word alternate */
5005 #ifndef TARGET_SPARC64
5008 if (!supervisor(dc
))
5011 save_state(dc
, cpu_cond
);
5012 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
5013 dc
->npc
= DYNAMIC_PC
;
5015 case 0x15: /* stba, store byte alternate */
5016 #ifndef TARGET_SPARC64
5019 if (!supervisor(dc
))
5022 save_state(dc
, cpu_cond
);
5023 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
5024 dc
->npc
= DYNAMIC_PC
;
5026 case 0x16: /* stha, store halfword alternate */
5027 #ifndef TARGET_SPARC64
5030 if (!supervisor(dc
))
5033 save_state(dc
, cpu_cond
);
5034 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
5035 dc
->npc
= DYNAMIC_PC
;
5037 case 0x17: /* stda, store double word alternate */
5038 #ifndef TARGET_SPARC64
5041 if (!supervisor(dc
))
5047 save_state(dc
, cpu_cond
);
5048 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
5052 #ifdef TARGET_SPARC64
5053 case 0x0e: /* V9 stx */
5054 gen_address_mask(dc
, cpu_addr
);
5055 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
5057 case 0x1e: /* V9 stxa */
5058 save_state(dc
, cpu_cond
);
5059 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
5060 dc
->npc
= DYNAMIC_PC
;
5066 } else if (xop
> 0x23 && xop
< 0x28) {
5067 if (gen_trap_ifnofpu(dc
)) {
5070 save_state(dc
, cpu_cond
);
5072 case 0x24: /* stf, store fpreg */
5073 gen_address_mask(dc
, cpu_addr
);
5074 cpu_src1_32
= gen_load_fpr_F(dc
, rd
);
5075 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_src1_32
);
5076 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
5078 case 0x25: /* stfsr, V9 stxfsr */
5079 #ifdef TARGET_SPARC64
5080 gen_address_mask(dc
, cpu_addr
);
5081 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
5083 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
5085 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
5087 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
5088 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
5092 #ifdef TARGET_SPARC64
5093 /* V9 stqf, store quad fpreg */
5097 CHECK_FPU_FEATURE(dc
, FLOAT128
);
5098 gen_op_load_fpr_QT0(QFPREG(rd
));
5099 r_const
= tcg_const_i32(dc
->mem_idx
);
5100 gen_address_mask(dc
, cpu_addr
);
5101 gen_helper_stqf(cpu_env
, cpu_addr
, r_const
);
5102 tcg_temp_free_i32(r_const
);
5105 #else /* !TARGET_SPARC64 */
5106 /* stdfq, store floating point queue */
5107 #if defined(CONFIG_USER_ONLY)
5110 if (!supervisor(dc
))
5112 if (gen_trap_ifnofpu(dc
)) {
5118 case 0x27: /* stdf, store double fpreg */
5119 gen_address_mask(dc
, cpu_addr
);
5120 cpu_src1_64
= gen_load_fpr_D(dc
, rd
);
5121 tcg_gen_qemu_st64(cpu_src1_64
, cpu_addr
, dc
->mem_idx
);
5126 } else if (xop
> 0x33 && xop
< 0x3f) {
5127 save_state(dc
, cpu_cond
);
5129 #ifdef TARGET_SPARC64
5130 case 0x34: /* V9 stfa */
5131 if (gen_trap_ifnofpu(dc
)) {
5134 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
5136 case 0x36: /* V9 stqfa */
5140 CHECK_FPU_FEATURE(dc
, FLOAT128
);
5141 if (gen_trap_ifnofpu(dc
)) {
5144 r_const
= tcg_const_i32(7);
5145 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
5146 tcg_temp_free_i32(r_const
);
5147 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
5150 case 0x37: /* V9 stdfa */
5151 if (gen_trap_ifnofpu(dc
)) {
5154 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
5156 case 0x3c: /* V9 casa */
5157 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
5158 gen_movl_TN_reg(rd
, cpu_val
);
5160 case 0x3e: /* V9 casxa */
5161 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
5162 gen_movl_TN_reg(rd
, cpu_val
);
5165 case 0x34: /* stc */
5166 case 0x35: /* stcsr */
5167 case 0x36: /* stdcq */
5168 case 0x37: /* stdc */
5179 /* default case for non jump instructions */
5180 if (dc
->npc
== DYNAMIC_PC
) {
5181 dc
->pc
= DYNAMIC_PC
;
5183 } else if (dc
->npc
== JUMP_PC
) {
5184 /* we can do a static jump */
5185 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
5189 dc
->npc
= dc
->npc
+ 4;
5197 save_state(dc
, cpu_cond
);
5198 r_const
= tcg_const_i32(TT_ILL_INSN
);
5199 gen_helper_raise_exception(cpu_env
, r_const
);
5200 tcg_temp_free_i32(r_const
);
5208 save_state(dc
, cpu_cond
);
5209 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
5210 gen_helper_raise_exception(cpu_env
, r_const
);
5211 tcg_temp_free_i32(r_const
);
5215 #if !defined(CONFIG_USER_ONLY)
5220 save_state(dc
, cpu_cond
);
5221 r_const
= tcg_const_i32(TT_PRIV_INSN
);
5222 gen_helper_raise_exception(cpu_env
, r_const
);
5223 tcg_temp_free_i32(r_const
);
5229 save_state(dc
, cpu_cond
);
5230 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
5233 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5235 save_state(dc
, cpu_cond
);
5236 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
5240 #ifndef TARGET_SPARC64
5245 save_state(dc
, cpu_cond
);
5246 r_const
= tcg_const_i32(TT_NCP_INSN
);
5247 gen_helper_raise_exception(cpu_env
, r_const
);
5248 tcg_temp_free(r_const
);
5254 tcg_temp_free(cpu_tmp1
);
5255 tcg_temp_free(cpu_tmp2
);
5256 if (dc
->n_t32
!= 0) {
5258 for (i
= dc
->n_t32
- 1; i
>= 0; --i
) {
5259 tcg_temp_free_i32(dc
->t32
[i
]);
5265 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
5266 int spc
, CPUSPARCState
*env
)
5268 target_ulong pc_start
, last_pc
;
5269 uint16_t *gen_opc_end
;
5270 DisasContext dc1
, *dc
= &dc1
;
5277 memset(dc
, 0, sizeof(DisasContext
));
5282 dc
->npc
= (target_ulong
) tb
->cs_base
;
5283 dc
->cc_op
= CC_OP_DYNAMIC
;
5284 dc
->mem_idx
= cpu_mmu_index(env
);
5286 dc
->fpu_enabled
= tb_fpu_enabled(tb
->flags
);
5287 dc
->address_mask_32bit
= tb_am_enabled(tb
->flags
);
5288 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
5289 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
5291 cpu_tmp0
= tcg_temp_new();
5292 cpu_tmp32
= tcg_temp_new_i32();
5293 cpu_tmp64
= tcg_temp_new_i64();
5295 cpu_dst
= tcg_temp_local_new();
5298 cpu_val
= tcg_temp_local_new();
5299 cpu_addr
= tcg_temp_local_new();
5302 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5304 max_insns
= CF_COUNT_MASK
;
5307 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5308 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5309 if (bp
->pc
== dc
->pc
) {
5310 if (dc
->pc
!= pc_start
)
5311 save_state(dc
, cpu_cond
);
5312 gen_helper_debug(cpu_env
);
5320 qemu_log("Search PC...\n");
5321 j
= gen_opc_ptr
- gen_opc_buf
;
5325 gen_opc_instr_start
[lj
++] = 0;
5326 gen_opc_pc
[lj
] = dc
->pc
;
5327 gen_opc_npc
[lj
] = dc
->npc
;
5328 gen_opc_instr_start
[lj
] = 1;
5329 gen_opc_icount
[lj
] = num_insns
;
5332 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
5335 insn
= cpu_ldl_code(env
, dc
->pc
);
5336 disas_sparc_insn(dc
, insn
);
5341 /* if the next PC is different, we abort now */
5342 if (dc
->pc
!= (last_pc
+ 4))
5344 /* if we reach a page boundary, we stop generation so that the
5345 PC of a TT_TFAULT exception is always in the right page */
5346 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
5348 /* if single step mode, we generate only one instruction and
5349 generate an exception */
5350 if (dc
->singlestep
) {
5353 } while ((gen_opc_ptr
< gen_opc_end
) &&
5354 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
5355 num_insns
< max_insns
);
5358 tcg_temp_free(cpu_addr
);
5359 tcg_temp_free(cpu_val
);
5360 tcg_temp_free(cpu_dst
);
5361 tcg_temp_free_i64(cpu_tmp64
);
5362 tcg_temp_free_i32(cpu_tmp32
);
5363 tcg_temp_free(cpu_tmp0
);
5365 if (tb
->cflags
& CF_LAST_IO
)
5368 if (dc
->pc
!= DYNAMIC_PC
&&
5369 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
5370 /* static PC and NPC: we can use direct chaining */
5371 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
5373 if (dc
->pc
!= DYNAMIC_PC
)
5374 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
5375 save_npc(dc
, cpu_cond
);
5379 gen_icount_end(tb
, num_insns
);
5380 *gen_opc_ptr
= INDEX_op_end
;
5382 j
= gen_opc_ptr
- gen_opc_buf
;
5385 gen_opc_instr_start
[lj
++] = 0;
5389 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
5390 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
5392 tb
->size
= last_pc
+ 4 - pc_start
;
5393 tb
->icount
= num_insns
;
5396 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5397 qemu_log("--------------\n");
5398 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5399 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
5405 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5407 gen_intermediate_code_internal(tb
, 0, env
);
5410 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5412 gen_intermediate_code_internal(tb
, 1, env
);
5415 void gen_intermediate_code_init(CPUSPARCState
*env
)
5419 static const char * const gregnames
[8] = {
5420 NULL
, // g0 not used
5429 static const char * const fregnames
[32] = {
5430 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5431 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5432 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5433 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5436 /* init various static tables */
5440 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5441 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5442 offsetof(CPUSPARCState
, regwptr
),
5444 #ifdef TARGET_SPARC64
5445 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, xcc
),
5447 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, asi
),
5449 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, fprs
),
5451 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, gsr
),
5453 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5454 offsetof(CPUSPARCState
, tick_cmpr
),
5456 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5457 offsetof(CPUSPARCState
, stick_cmpr
),
5459 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5460 offsetof(CPUSPARCState
, hstick_cmpr
),
5462 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, hintp
),
5464 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, htba
),
5466 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, hver
),
5468 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5469 offsetof(CPUSPARCState
, ssr
), "ssr");
5470 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5471 offsetof(CPUSPARCState
, version
), "ver");
5472 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5473 offsetof(CPUSPARCState
, softint
),
5476 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, wim
),
5479 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cond
),
5481 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cc_src
),
5483 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5484 offsetof(CPUSPARCState
, cc_src2
),
5486 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cc_dst
),
5488 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, cc_op
),
5490 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, psr
),
5492 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, fsr
),
5494 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, pc
),
5496 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, npc
),
5498 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, y
), "y");
5499 #ifndef CONFIG_USER_ONLY
5500 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, tbr
),
5503 for (i
= 1; i
< 8; i
++) {
5504 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5505 offsetof(CPUSPARCState
, gregs
[i
]),
5508 for (i
= 0; i
< TARGET_DPREGS
; i
++) {
5509 cpu_fpr
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
5510 offsetof(CPUSPARCState
, fpr
[i
]),
5514 /* register helpers */
5516 #define GEN_HELPER 2
5521 void restore_state_to_opc(CPUSPARCState
*env
, TranslationBlock
*tb
, int pc_pos
)
5524 env
->pc
= gen_opc_pc
[pc_pos
];
5525 npc
= gen_opc_npc
[pc_pos
];
5527 /* dynamic NPC: already stored */
5528 } else if (npc
== 2) {
5529 /* jump PC: use 'cond' and the jump targets of the translation */
5531 env
->npc
= gen_opc_jump_pc
[0];
5533 env
->npc
= gen_opc_jump_pc
[1];
5539 /* flush pending conditional evaluations before exposing cpu state */
5540 if (CC_OP
!= CC_OP_FLAGS
) {
5541 helper_compute_psr(env
);