4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env
, cpu_regwptr
;
43 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
44 static TCGv_i32 cpu_cc_op
;
45 static TCGv_i32 cpu_psr
;
46 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond
, cpu_dst
, cpu_addr
;
53 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
55 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
56 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
57 static TCGv_i32 cpu_softint
;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32
;
64 static TCGv_i64 cpu_tmp64
;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr
[TARGET_DPREGS
];
68 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
69 static target_ulong gen_opc_jump_pc
[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext
{
74 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit
;
82 uint32_t cc_op
; /* current CC operation */
83 struct TranslationBlock
*tb
;
98 // This function uses non-native bit order
99 #define GET_FIELD(X, FROM, TO) \
100 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
102 // This function uses the order in the manuals, i.e. bit 0 is 2^0
103 #define GET_FIELD_SP(X, FROM, TO) \
104 GET_FIELD(X, 31 - (TO), 31 - (FROM))
106 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
107 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
109 #ifdef TARGET_SPARC64
110 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
111 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
113 #define DFPREG(r) (r & 0x1e)
114 #define QFPREG(r) (r & 0x1c)
117 #define UA2005_HTRAP_MASK 0xff
118 #define V8_TRAP_MASK 0x7f
120 static int sign_extend(int x
, int len
)
123 return (x
<< len
) >> len
;
126 #define IS_IMM (insn & (1<<13))
128 static inline void gen_update_fprs_dirty(int rd
)
130 #if defined(TARGET_SPARC64)
131 tcg_gen_ori_i32(cpu_fprs
, cpu_fprs
, (rd
< 32) ? 1 : 2);
135 /* floating point registers moves */
136 static TCGv_i32
gen_load_fpr_F(DisasContext
*dc
, unsigned int src
)
138 #if TCG_TARGET_REG_BITS == 32
140 return TCGV_LOW(cpu_fpr
[src
/ 2]);
142 return TCGV_HIGH(cpu_fpr
[src
/ 2]);
146 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr
[src
/ 2]));
148 TCGv_i32 ret
= tcg_temp_new_i32();
149 TCGv_i64 t
= tcg_temp_new_i64();
151 tcg_gen_shri_i64(t
, cpu_fpr
[src
/ 2], 32);
152 tcg_gen_trunc_i64_i32(ret
, t
);
153 tcg_temp_free_i64(t
);
155 dc
->t32
[dc
->n_t32
++] = ret
;
156 assert(dc
->n_t32
<= ARRAY_SIZE(dc
->t32
));
163 static void gen_store_fpr_F(DisasContext
*dc
, unsigned int dst
, TCGv_i32 v
)
165 #if TCG_TARGET_REG_BITS == 32
167 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr
[dst
/ 2]), v
);
169 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr
[dst
/ 2]), v
);
172 TCGv_i64 t
= MAKE_TCGV_I64(GET_TCGV_I32(v
));
173 tcg_gen_deposit_i64(cpu_fpr
[dst
/ 2], cpu_fpr
[dst
/ 2], t
,
174 (dst
& 1 ? 0 : 32), 32);
176 gen_update_fprs_dirty(dst
);
179 static TCGv_i32
gen_dest_fpr_F(void)
184 static TCGv_i64
gen_load_fpr_D(DisasContext
*dc
, unsigned int src
)
187 return cpu_fpr
[src
/ 2];
190 static void gen_store_fpr_D(DisasContext
*dc
, unsigned int dst
, TCGv_i64 v
)
193 tcg_gen_mov_i64(cpu_fpr
[dst
/ 2], v
);
194 gen_update_fprs_dirty(dst
);
197 static TCGv_i64
gen_dest_fpr_D(void)
202 static void gen_op_load_fpr_QT0(unsigned int src
)
204 tcg_gen_st_i64(cpu_fpr
[src
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
205 offsetof(CPU_QuadU
, ll
.upper
));
206 tcg_gen_st_i64(cpu_fpr
[src
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
207 offsetof(CPU_QuadU
, ll
.lower
));
210 static void gen_op_load_fpr_QT1(unsigned int src
)
212 tcg_gen_st_i64(cpu_fpr
[src
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
213 offsetof(CPU_QuadU
, ll
.upper
));
214 tcg_gen_st_i64(cpu_fpr
[src
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
215 offsetof(CPU_QuadU
, ll
.lower
));
218 static void gen_op_store_QT0_fpr(unsigned int dst
)
220 tcg_gen_ld_i64(cpu_fpr
[dst
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
221 offsetof(CPU_QuadU
, ll
.upper
));
222 tcg_gen_ld_i64(cpu_fpr
[dst
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
223 offsetof(CPU_QuadU
, ll
.lower
));
226 #ifdef TARGET_SPARC64
227 static void gen_move_Q(unsigned int rd
, unsigned int rs
)
232 tcg_gen_mov_i64(cpu_fpr
[rd
/ 2], cpu_fpr
[rs
/ 2]);
233 tcg_gen_mov_i64(cpu_fpr
[rd
/ 2 + 1], cpu_fpr
[rs
/ 2 + 1]);
234 gen_update_fprs_dirty(rd
);
239 #ifdef CONFIG_USER_ONLY
240 #define supervisor(dc) 0
241 #ifdef TARGET_SPARC64
242 #define hypervisor(dc) 0
245 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
246 #ifdef TARGET_SPARC64
247 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
252 #ifdef TARGET_SPARC64
254 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
256 #define AM_CHECK(dc) (1)
260 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
262 #ifdef TARGET_SPARC64
264 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
268 static inline TCGv
get_temp_tl(DisasContext
*dc
)
271 assert(dc
->n_ttl
< ARRAY_SIZE(dc
->ttl
));
272 dc
->ttl
[dc
->n_ttl
++] = t
= tcg_temp_new();
276 static inline TCGv
gen_load_gpr(DisasContext
*dc
, int reg
)
278 if (reg
== 0 || reg
>= 8) {
279 TCGv t
= get_temp_tl(dc
);
281 tcg_gen_movi_tl(t
, 0);
283 tcg_gen_ld_tl(t
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
287 return cpu_gregs
[reg
];
291 static inline void gen_store_gpr(DisasContext
*dc
, int reg
, TCGv v
)
295 tcg_gen_mov_tl(cpu_gregs
[reg
], v
);
297 tcg_gen_st_tl(v
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
302 static inline TCGv
gen_dest_gpr(DisasContext
*dc
, int reg
)
304 if (reg
== 0 || reg
>= 8) {
305 return get_temp_tl(dc
);
307 return cpu_gregs
[reg
];
311 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
312 target_ulong pc
, target_ulong npc
)
314 TranslationBlock
*tb
;
317 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
318 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
320 /* jump to same page: we can use a direct jump */
321 tcg_gen_goto_tb(tb_num
);
322 tcg_gen_movi_tl(cpu_pc
, pc
);
323 tcg_gen_movi_tl(cpu_npc
, npc
);
324 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
326 /* jump to another page: currently not optimized */
327 tcg_gen_movi_tl(cpu_pc
, pc
);
328 tcg_gen_movi_tl(cpu_npc
, npc
);
334 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
336 tcg_gen_extu_i32_tl(reg
, src
);
337 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
338 tcg_gen_andi_tl(reg
, reg
, 0x1);
341 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
343 tcg_gen_extu_i32_tl(reg
, src
);
344 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
345 tcg_gen_andi_tl(reg
, reg
, 0x1);
348 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
350 tcg_gen_extu_i32_tl(reg
, src
);
351 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
352 tcg_gen_andi_tl(reg
, reg
, 0x1);
355 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
357 tcg_gen_extu_i32_tl(reg
, src
);
358 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
359 tcg_gen_andi_tl(reg
, reg
, 0x1);
362 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
364 tcg_gen_mov_tl(cpu_cc_src
, src1
);
365 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
366 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
367 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
370 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
372 tcg_gen_mov_tl(cpu_cc_src
, src1
);
373 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
374 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
375 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
378 static TCGv_i32
gen_add32_carry32(void)
380 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
382 /* Carry is computed from a previous add: (dst < src) */
383 #if TARGET_LONG_BITS == 64
384 cc_src1_32
= tcg_temp_new_i32();
385 cc_src2_32
= tcg_temp_new_i32();
386 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
387 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
389 cc_src1_32
= cpu_cc_dst
;
390 cc_src2_32
= cpu_cc_src
;
393 carry_32
= tcg_temp_new_i32();
394 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
396 #if TARGET_LONG_BITS == 64
397 tcg_temp_free_i32(cc_src1_32
);
398 tcg_temp_free_i32(cc_src2_32
);
404 static TCGv_i32
gen_sub32_carry32(void)
406 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
408 /* Carry is computed from a previous borrow: (src1 < src2) */
409 #if TARGET_LONG_BITS == 64
410 cc_src1_32
= tcg_temp_new_i32();
411 cc_src2_32
= tcg_temp_new_i32();
412 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
413 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
415 cc_src1_32
= cpu_cc_src
;
416 cc_src2_32
= cpu_cc_src2
;
419 carry_32
= tcg_temp_new_i32();
420 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
422 #if TARGET_LONG_BITS == 64
423 tcg_temp_free_i32(cc_src1_32
);
424 tcg_temp_free_i32(cc_src2_32
);
430 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
431 TCGv src2
, int update_cc
)
439 /* Carry is known to be zero. Fall back to plain ADD. */
441 gen_op_add_cc(dst
, src1
, src2
);
443 tcg_gen_add_tl(dst
, src1
, src2
);
450 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
452 /* For 32-bit hosts, we can re-use the host's hardware carry
453 generation by using an ADD2 opcode. We discard the low
454 part of the output. Ideally we'd combine this operation
455 with the add that generated the carry in the first place. */
456 TCGv dst_low
= tcg_temp_new();
457 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
458 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
459 tcg_temp_free(dst_low
);
463 carry_32
= gen_add32_carry32();
469 carry_32
= gen_sub32_carry32();
473 /* We need external help to produce the carry. */
474 carry_32
= tcg_temp_new_i32();
475 gen_helper_compute_C_icc(carry_32
, cpu_env
);
479 #if TARGET_LONG_BITS == 64
480 carry
= tcg_temp_new();
481 tcg_gen_extu_i32_i64(carry
, carry_32
);
486 tcg_gen_add_tl(dst
, src1
, src2
);
487 tcg_gen_add_tl(dst
, dst
, carry
);
489 tcg_temp_free_i32(carry_32
);
490 #if TARGET_LONG_BITS == 64
491 tcg_temp_free(carry
);
494 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
498 tcg_gen_mov_tl(cpu_cc_src
, src1
);
499 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
500 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
501 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
502 dc
->cc_op
= CC_OP_ADDX
;
506 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
508 tcg_gen_mov_tl(cpu_cc_src
, src1
);
509 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
511 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
512 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
513 dc
->cc_op
= CC_OP_LOGIC
;
515 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
516 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
517 dc
->cc_op
= CC_OP_SUB
;
519 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
522 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
524 tcg_gen_mov_tl(cpu_cc_src
, src1
);
525 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
526 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
527 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
530 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
531 TCGv src2
, int update_cc
)
539 /* Carry is known to be zero. Fall back to plain SUB. */
541 gen_op_sub_cc(dst
, src1
, src2
);
543 tcg_gen_sub_tl(dst
, src1
, src2
);
550 carry_32
= gen_add32_carry32();
556 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
558 /* For 32-bit hosts, we can re-use the host's hardware carry
559 generation by using a SUB2 opcode. We discard the low
560 part of the output. Ideally we'd combine this operation
561 with the add that generated the carry in the first place. */
562 TCGv dst_low
= tcg_temp_new();
563 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
564 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
565 tcg_temp_free(dst_low
);
569 carry_32
= gen_sub32_carry32();
573 /* We need external help to produce the carry. */
574 carry_32
= tcg_temp_new_i32();
575 gen_helper_compute_C_icc(carry_32
, cpu_env
);
579 #if TARGET_LONG_BITS == 64
580 carry
= tcg_temp_new();
581 tcg_gen_extu_i32_i64(carry
, carry_32
);
586 tcg_gen_sub_tl(dst
, src1
, src2
);
587 tcg_gen_sub_tl(dst
, dst
, carry
);
589 tcg_temp_free_i32(carry_32
);
590 #if TARGET_LONG_BITS == 64
591 tcg_temp_free(carry
);
594 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
598 tcg_gen_mov_tl(cpu_cc_src
, src1
);
599 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
600 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
601 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
602 dc
->cc_op
= CC_OP_SUBX
;
606 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
610 r_temp
= tcg_temp_new();
616 zero
= tcg_const_tl(0);
617 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
618 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
619 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
620 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_cc_src2
, r_temp
, zero
,
625 // env->y = (b2 << 31) | (env->y >> 1);
626 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
627 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
628 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
629 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
630 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
631 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
634 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
635 gen_mov_reg_V(r_temp
, cpu_psr
);
636 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
637 tcg_temp_free(r_temp
);
639 // T0 = (b1 << 31) | (T0 >> 1);
641 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
642 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
643 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
645 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
647 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
650 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
652 TCGv_i32 r_src1
, r_src2
;
653 TCGv_i64 r_temp
, r_temp2
;
655 r_src1
= tcg_temp_new_i32();
656 r_src2
= tcg_temp_new_i32();
658 tcg_gen_trunc_tl_i32(r_src1
, src1
);
659 tcg_gen_trunc_tl_i32(r_src2
, src2
);
661 r_temp
= tcg_temp_new_i64();
662 r_temp2
= tcg_temp_new_i64();
665 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
666 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
668 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
669 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
672 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
674 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
675 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
676 tcg_temp_free_i64(r_temp
);
677 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
679 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
681 tcg_temp_free_i64(r_temp2
);
683 tcg_temp_free_i32(r_src1
);
684 tcg_temp_free_i32(r_src2
);
687 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
689 /* zero-extend truncated operands before multiplication */
690 gen_op_multiply(dst
, src1
, src2
, 0);
693 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
695 /* sign-extend truncated operands before multiplication */
696 gen_op_multiply(dst
, src1
, src2
, 1);
700 static inline void gen_op_eval_ba(TCGv dst
)
702 tcg_gen_movi_tl(dst
, 1);
706 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
708 gen_mov_reg_Z(dst
, src
);
712 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
714 gen_mov_reg_N(cpu_tmp0
, src
);
715 gen_mov_reg_V(dst
, src
);
716 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
717 gen_mov_reg_Z(cpu_tmp0
, src
);
718 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
722 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
724 gen_mov_reg_V(cpu_tmp0
, src
);
725 gen_mov_reg_N(dst
, src
);
726 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
730 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
732 gen_mov_reg_Z(cpu_tmp0
, src
);
733 gen_mov_reg_C(dst
, src
);
734 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
738 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
740 gen_mov_reg_C(dst
, src
);
744 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
746 gen_mov_reg_V(dst
, src
);
750 static inline void gen_op_eval_bn(TCGv dst
)
752 tcg_gen_movi_tl(dst
, 0);
756 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
758 gen_mov_reg_N(dst
, src
);
762 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
764 gen_mov_reg_Z(dst
, src
);
765 tcg_gen_xori_tl(dst
, dst
, 0x1);
769 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
771 gen_mov_reg_N(cpu_tmp0
, src
);
772 gen_mov_reg_V(dst
, src
);
773 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
774 gen_mov_reg_Z(cpu_tmp0
, src
);
775 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
776 tcg_gen_xori_tl(dst
, dst
, 0x1);
780 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
782 gen_mov_reg_V(cpu_tmp0
, src
);
783 gen_mov_reg_N(dst
, src
);
784 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
785 tcg_gen_xori_tl(dst
, dst
, 0x1);
789 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
791 gen_mov_reg_Z(cpu_tmp0
, src
);
792 gen_mov_reg_C(dst
, src
);
793 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
794 tcg_gen_xori_tl(dst
, dst
, 0x1);
798 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
800 gen_mov_reg_C(dst
, src
);
801 tcg_gen_xori_tl(dst
, dst
, 0x1);
805 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
807 gen_mov_reg_N(dst
, src
);
808 tcg_gen_xori_tl(dst
, dst
, 0x1);
812 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
814 gen_mov_reg_V(dst
, src
);
815 tcg_gen_xori_tl(dst
, dst
, 0x1);
819 FPSR bit field FCC1 | FCC0:
825 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
826 unsigned int fcc_offset
)
828 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
829 tcg_gen_andi_tl(reg
, reg
, 0x1);
832 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
833 unsigned int fcc_offset
)
835 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
836 tcg_gen_andi_tl(reg
, reg
, 0x1);
840 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
841 unsigned int fcc_offset
)
843 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
844 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
845 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
848 // 1 or 2: FCC0 ^ FCC1
849 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
850 unsigned int fcc_offset
)
852 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
853 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
854 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
858 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
859 unsigned int fcc_offset
)
861 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
865 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
866 unsigned int fcc_offset
)
868 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
869 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
870 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
871 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
875 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
876 unsigned int fcc_offset
)
878 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
882 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
883 unsigned int fcc_offset
)
885 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
886 tcg_gen_xori_tl(dst
, dst
, 0x1);
887 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
888 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
892 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
893 unsigned int fcc_offset
)
895 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
896 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
897 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
901 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
902 unsigned int fcc_offset
)
904 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
905 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
906 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
907 tcg_gen_xori_tl(dst
, dst
, 0x1);
910 // 0 or 3: !(FCC0 ^ FCC1)
911 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
912 unsigned int fcc_offset
)
914 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
915 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
916 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
917 tcg_gen_xori_tl(dst
, dst
, 0x1);
921 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
922 unsigned int fcc_offset
)
924 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
925 tcg_gen_xori_tl(dst
, dst
, 0x1);
928 // !1: !(FCC0 & !FCC1)
929 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
930 unsigned int fcc_offset
)
932 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
933 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
934 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
935 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
936 tcg_gen_xori_tl(dst
, dst
, 0x1);
940 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
941 unsigned int fcc_offset
)
943 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
944 tcg_gen_xori_tl(dst
, dst
, 0x1);
947 // !2: !(!FCC0 & FCC1)
948 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
949 unsigned int fcc_offset
)
951 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
952 tcg_gen_xori_tl(dst
, dst
, 0x1);
953 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
954 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
955 tcg_gen_xori_tl(dst
, dst
, 0x1);
958 // !3: !(FCC0 & FCC1)
959 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
960 unsigned int fcc_offset
)
962 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
963 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
964 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
965 tcg_gen_xori_tl(dst
, dst
, 0x1);
968 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
969 target_ulong pc2
, TCGv r_cond
)
973 l1
= gen_new_label();
975 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
977 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
980 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
983 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
984 target_ulong pc2
, TCGv r_cond
)
988 l1
= gen_new_label();
990 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
992 gen_goto_tb(dc
, 0, pc2
, pc1
);
995 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
998 static inline void gen_generic_branch(DisasContext
*dc
)
1000 TCGv npc0
= tcg_const_tl(dc
->jump_pc
[0]);
1001 TCGv npc1
= tcg_const_tl(dc
->jump_pc
[1]);
1002 TCGv zero
= tcg_const_tl(0);
1004 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_npc
, cpu_cond
, zero
, npc0
, npc1
);
1006 tcg_temp_free(npc0
);
1007 tcg_temp_free(npc1
);
1008 tcg_temp_free(zero
);
1011 /* call this function before using the condition register as it may
1012 have been set for a jump */
1013 static inline void flush_cond(DisasContext
*dc
)
1015 if (dc
->npc
== JUMP_PC
) {
1016 gen_generic_branch(dc
);
1017 dc
->npc
= DYNAMIC_PC
;
1021 static inline void save_npc(DisasContext
*dc
)
1023 if (dc
->npc
== JUMP_PC
) {
1024 gen_generic_branch(dc
);
1025 dc
->npc
= DYNAMIC_PC
;
1026 } else if (dc
->npc
!= DYNAMIC_PC
) {
1027 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1031 static inline void update_psr(DisasContext
*dc
)
1033 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1034 dc
->cc_op
= CC_OP_FLAGS
;
1035 gen_helper_compute_psr(cpu_env
);
1039 static inline void save_state(DisasContext
*dc
)
1041 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1045 static inline void gen_mov_pc_npc(DisasContext
*dc
)
1047 if (dc
->npc
== JUMP_PC
) {
1048 gen_generic_branch(dc
);
1049 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1050 dc
->pc
= DYNAMIC_PC
;
1051 } else if (dc
->npc
== DYNAMIC_PC
) {
1052 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1053 dc
->pc
= DYNAMIC_PC
;
1059 static inline void gen_op_next_insn(void)
1061 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1062 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1065 static void free_compare(DisasCompare
*cmp
)
1068 tcg_temp_free(cmp
->c1
);
1071 tcg_temp_free(cmp
->c2
);
1075 static void gen_compare(DisasCompare
*cmp
, bool xcc
, unsigned int cond
,
1078 static int subcc_cond
[16] = {
1094 -1, /* no overflow */
1097 static int logic_cond
[16] = {
1099 TCG_COND_EQ
, /* eq: Z */
1100 TCG_COND_LE
, /* le: Z | (N ^ V) -> Z | N */
1101 TCG_COND_LT
, /* lt: N ^ V -> N */
1102 TCG_COND_EQ
, /* leu: C | Z -> Z */
1103 TCG_COND_NEVER
, /* ltu: C -> 0 */
1104 TCG_COND_LT
, /* neg: N */
1105 TCG_COND_NEVER
, /* vs: V -> 0 */
1107 TCG_COND_NE
, /* ne: !Z */
1108 TCG_COND_GT
, /* gt: !(Z | (N ^ V)) -> !(Z | N) */
1109 TCG_COND_GE
, /* ge: !(N ^ V) -> !N */
1110 TCG_COND_NE
, /* gtu: !(C | Z) -> !Z */
1111 TCG_COND_ALWAYS
, /* geu: !C -> 1 */
1112 TCG_COND_GE
, /* pos: !N */
1113 TCG_COND_ALWAYS
, /* vc: !V -> 1 */
1119 #ifdef TARGET_SPARC64
1129 switch (dc
->cc_op
) {
1131 cmp
->cond
= logic_cond
[cond
];
1133 cmp
->is_bool
= false;
1135 cmp
->c2
= tcg_const_tl(0);
1136 #ifdef TARGET_SPARC64
1139 cmp
->c1
= tcg_temp_new();
1140 tcg_gen_ext32s_tl(cmp
->c1
, cpu_cc_dst
);
1145 cmp
->c1
= cpu_cc_dst
;
1152 cmp
->cond
= (cond
== 6 ? TCG_COND_LT
: TCG_COND_GE
);
1153 goto do_compare_dst_0
;
1155 case 7: /* overflow */
1156 case 15: /* !overflow */
1160 cmp
->cond
= subcc_cond
[cond
];
1161 cmp
->is_bool
= false;
1162 #ifdef TARGET_SPARC64
1164 /* Note that sign-extension works for unsigned compares as
1165 long as both operands are sign-extended. */
1166 cmp
->g1
= cmp
->g2
= false;
1167 cmp
->c1
= tcg_temp_new();
1168 cmp
->c2
= tcg_temp_new();
1169 tcg_gen_ext32s_tl(cmp
->c1
, cpu_cc_src
);
1170 tcg_gen_ext32s_tl(cmp
->c2
, cpu_cc_src2
);
1174 cmp
->g1
= cmp
->g2
= true;
1175 cmp
->c1
= cpu_cc_src
;
1176 cmp
->c2
= cpu_cc_src2
;
1183 gen_helper_compute_psr(cpu_env
);
1184 dc
->cc_op
= CC_OP_FLAGS
;
1188 /* We're going to generate a boolean result. */
1189 cmp
->cond
= TCG_COND_NE
;
1190 cmp
->is_bool
= true;
1191 cmp
->g1
= cmp
->g2
= false;
1192 cmp
->c1
= r_dst
= tcg_temp_new();
1193 cmp
->c2
= tcg_const_tl(0);
1197 gen_op_eval_bn(r_dst
);
1200 gen_op_eval_be(r_dst
, r_src
);
1203 gen_op_eval_ble(r_dst
, r_src
);
1206 gen_op_eval_bl(r_dst
, r_src
);
1209 gen_op_eval_bleu(r_dst
, r_src
);
1212 gen_op_eval_bcs(r_dst
, r_src
);
1215 gen_op_eval_bneg(r_dst
, r_src
);
1218 gen_op_eval_bvs(r_dst
, r_src
);
1221 gen_op_eval_ba(r_dst
);
1224 gen_op_eval_bne(r_dst
, r_src
);
1227 gen_op_eval_bg(r_dst
, r_src
);
1230 gen_op_eval_bge(r_dst
, r_src
);
1233 gen_op_eval_bgu(r_dst
, r_src
);
1236 gen_op_eval_bcc(r_dst
, r_src
);
1239 gen_op_eval_bpos(r_dst
, r_src
);
1242 gen_op_eval_bvc(r_dst
, r_src
);
1249 static void gen_fcompare(DisasCompare
*cmp
, unsigned int cc
, unsigned int cond
)
1251 unsigned int offset
;
1254 /* For now we still generate a straight boolean result. */
1255 cmp
->cond
= TCG_COND_NE
;
1256 cmp
->is_bool
= true;
1257 cmp
->g1
= cmp
->g2
= false;
1258 cmp
->c1
= r_dst
= tcg_temp_new();
1259 cmp
->c2
= tcg_const_tl(0);
1279 gen_op_eval_bn(r_dst
);
1282 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1285 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1288 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1291 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1294 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1297 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1300 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1303 gen_op_eval_ba(r_dst
);
1306 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1309 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1312 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1315 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1318 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1321 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1324 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1329 static void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1333 gen_compare(&cmp
, cc
, cond
, dc
);
1335 /* The interface is to return a boolean in r_dst. */
1337 tcg_gen_mov_tl(r_dst
, cmp
.c1
);
1339 tcg_gen_setcond_tl(cmp
.cond
, r_dst
, cmp
.c1
, cmp
.c2
);
1345 static void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1348 gen_fcompare(&cmp
, cc
, cond
);
1350 /* The interface is to return a boolean in r_dst. */
1352 tcg_gen_mov_tl(r_dst
, cmp
.c1
);
1354 tcg_gen_setcond_tl(cmp
.cond
, r_dst
, cmp
.c1
, cmp
.c2
);
1360 #ifdef TARGET_SPARC64
1362 static const int gen_tcg_cond_reg
[8] = {
1373 static void gen_compare_reg(DisasCompare
*cmp
, int cond
, TCGv r_src
)
1375 cmp
->cond
= tcg_invert_cond(gen_tcg_cond_reg
[cond
]);
1376 cmp
->is_bool
= false;
1380 cmp
->c2
= tcg_const_tl(0);
1383 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1386 gen_compare_reg(&cmp
, cond
, r_src
);
1388 /* The interface is to return a boolean in r_dst. */
1389 tcg_gen_setcond_tl(cmp
.cond
, r_dst
, cmp
.c1
, cmp
.c2
);
1395 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
)
1397 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1398 target_ulong target
= dc
->pc
+ offset
;
1400 #ifdef TARGET_SPARC64
1401 if (unlikely(AM_CHECK(dc
))) {
1402 target
&= 0xffffffffULL
;
1406 /* unconditional not taken */
1408 dc
->pc
= dc
->npc
+ 4;
1409 dc
->npc
= dc
->pc
+ 4;
1412 dc
->npc
= dc
->pc
+ 4;
1414 } else if (cond
== 0x8) {
1415 /* unconditional taken */
1418 dc
->npc
= dc
->pc
+ 4;
1422 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1426 gen_cond(cpu_cond
, cc
, cond
, dc
);
1428 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1432 dc
->jump_pc
[0] = target
;
1433 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1434 dc
->jump_pc
[1] = DYNAMIC_PC
;
1435 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1437 dc
->jump_pc
[1] = dc
->npc
+ 4;
1444 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
)
1446 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1447 target_ulong target
= dc
->pc
+ offset
;
1449 #ifdef TARGET_SPARC64
1450 if (unlikely(AM_CHECK(dc
))) {
1451 target
&= 0xffffffffULL
;
1455 /* unconditional not taken */
1457 dc
->pc
= dc
->npc
+ 4;
1458 dc
->npc
= dc
->pc
+ 4;
1461 dc
->npc
= dc
->pc
+ 4;
1463 } else if (cond
== 0x8) {
1464 /* unconditional taken */
1467 dc
->npc
= dc
->pc
+ 4;
1471 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1475 gen_fcond(cpu_cond
, cc
, cond
);
1477 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1481 dc
->jump_pc
[0] = target
;
1482 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1483 dc
->jump_pc
[1] = DYNAMIC_PC
;
1484 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1486 dc
->jump_pc
[1] = dc
->npc
+ 4;
1493 #ifdef TARGET_SPARC64
1494 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1497 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1498 target_ulong target
= dc
->pc
+ offset
;
1500 if (unlikely(AM_CHECK(dc
))) {
1501 target
&= 0xffffffffULL
;
1504 gen_cond_reg(cpu_cond
, cond
, r_reg
);
1506 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1510 dc
->jump_pc
[0] = target
;
1511 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1512 dc
->jump_pc
[1] = DYNAMIC_PC
;
1513 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1515 dc
->jump_pc
[1] = dc
->npc
+ 4;
1521 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1525 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1528 gen_helper_fcmps_fcc1(cpu_env
, r_rs1
, r_rs2
);
1531 gen_helper_fcmps_fcc2(cpu_env
, r_rs1
, r_rs2
);
1534 gen_helper_fcmps_fcc3(cpu_env
, r_rs1
, r_rs2
);
1539 static inline void gen_op_fcmpd(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1543 gen_helper_fcmpd(cpu_env
, r_rs1
, r_rs2
);
1546 gen_helper_fcmpd_fcc1(cpu_env
, r_rs1
, r_rs2
);
1549 gen_helper_fcmpd_fcc2(cpu_env
, r_rs1
, r_rs2
);
1552 gen_helper_fcmpd_fcc3(cpu_env
, r_rs1
, r_rs2
);
1557 static inline void gen_op_fcmpq(int fccno
)
1561 gen_helper_fcmpq(cpu_env
);
1564 gen_helper_fcmpq_fcc1(cpu_env
);
1567 gen_helper_fcmpq_fcc2(cpu_env
);
1570 gen_helper_fcmpq_fcc3(cpu_env
);
1575 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1579 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1582 gen_helper_fcmpes_fcc1(cpu_env
, r_rs1
, r_rs2
);
1585 gen_helper_fcmpes_fcc2(cpu_env
, r_rs1
, r_rs2
);
1588 gen_helper_fcmpes_fcc3(cpu_env
, r_rs1
, r_rs2
);
1593 static inline void gen_op_fcmped(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1597 gen_helper_fcmped(cpu_env
, r_rs1
, r_rs2
);
1600 gen_helper_fcmped_fcc1(cpu_env
, r_rs1
, r_rs2
);
1603 gen_helper_fcmped_fcc2(cpu_env
, r_rs1
, r_rs2
);
1606 gen_helper_fcmped_fcc3(cpu_env
, r_rs1
, r_rs2
);
1611 static inline void gen_op_fcmpeq(int fccno
)
1615 gen_helper_fcmpeq(cpu_env
);
1618 gen_helper_fcmpeq_fcc1(cpu_env
);
1621 gen_helper_fcmpeq_fcc2(cpu_env
);
1624 gen_helper_fcmpeq_fcc3(cpu_env
);
1631 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1633 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1636 static inline void gen_op_fcmpd(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1638 gen_helper_fcmpd(cpu_env
, r_rs1
, r_rs2
);
1641 static inline void gen_op_fcmpq(int fccno
)
1643 gen_helper_fcmpq(cpu_env
);
1646 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1648 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1651 static inline void gen_op_fcmped(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1653 gen_helper_fcmped(cpu_env
, r_rs1
, r_rs2
);
1656 static inline void gen_op_fcmpeq(int fccno
)
1658 gen_helper_fcmpeq(cpu_env
);
1662 static inline void gen_op_fpexception_im(int fsr_flags
)
1666 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1667 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1668 r_const
= tcg_const_i32(TT_FP_EXCP
);
1669 gen_helper_raise_exception(cpu_env
, r_const
);
1670 tcg_temp_free_i32(r_const
);
1673 static int gen_trap_ifnofpu(DisasContext
*dc
)
1675 #if !defined(CONFIG_USER_ONLY)
1676 if (!dc
->fpu_enabled
) {
1680 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1681 gen_helper_raise_exception(cpu_env
, r_const
);
1682 tcg_temp_free_i32(r_const
);
1690 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1692 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1695 static inline void gen_fop_FF(DisasContext
*dc
, int rd
, int rs
,
1696 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i32
))
1700 src
= gen_load_fpr_F(dc
, rs
);
1701 dst
= gen_dest_fpr_F();
1703 gen(dst
, cpu_env
, src
);
1705 gen_store_fpr_F(dc
, rd
, dst
);
1708 static inline void gen_ne_fop_FF(DisasContext
*dc
, int rd
, int rs
,
1709 void (*gen
)(TCGv_i32
, TCGv_i32
))
1713 src
= gen_load_fpr_F(dc
, rs
);
1714 dst
= gen_dest_fpr_F();
1718 gen_store_fpr_F(dc
, rd
, dst
);
1721 static inline void gen_fop_FFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1722 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i32
, TCGv_i32
))
1724 TCGv_i32 dst
, src1
, src2
;
1726 src1
= gen_load_fpr_F(dc
, rs1
);
1727 src2
= gen_load_fpr_F(dc
, rs2
);
1728 dst
= gen_dest_fpr_F();
1730 gen(dst
, cpu_env
, src1
, src2
);
1732 gen_store_fpr_F(dc
, rd
, dst
);
1735 #ifdef TARGET_SPARC64
1736 static inline void gen_ne_fop_FFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1737 void (*gen
)(TCGv_i32
, TCGv_i32
, TCGv_i32
))
1739 TCGv_i32 dst
, src1
, src2
;
1741 src1
= gen_load_fpr_F(dc
, rs1
);
1742 src2
= gen_load_fpr_F(dc
, rs2
);
1743 dst
= gen_dest_fpr_F();
1745 gen(dst
, src1
, src2
);
1747 gen_store_fpr_F(dc
, rd
, dst
);
1751 static inline void gen_fop_DD(DisasContext
*dc
, int rd
, int rs
,
1752 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i64
))
1756 src
= gen_load_fpr_D(dc
, rs
);
1757 dst
= gen_dest_fpr_D();
1759 gen(dst
, cpu_env
, src
);
1761 gen_store_fpr_D(dc
, rd
, dst
);
1764 #ifdef TARGET_SPARC64
1765 static inline void gen_ne_fop_DD(DisasContext
*dc
, int rd
, int rs
,
1766 void (*gen
)(TCGv_i64
, TCGv_i64
))
1770 src
= gen_load_fpr_D(dc
, rs
);
1771 dst
= gen_dest_fpr_D();
1775 gen_store_fpr_D(dc
, rd
, dst
);
1779 static inline void gen_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1780 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i64
, TCGv_i64
))
1782 TCGv_i64 dst
, src1
, src2
;
1784 src1
= gen_load_fpr_D(dc
, rs1
);
1785 src2
= gen_load_fpr_D(dc
, rs2
);
1786 dst
= gen_dest_fpr_D();
1788 gen(dst
, cpu_env
, src1
, src2
);
1790 gen_store_fpr_D(dc
, rd
, dst
);
1793 #ifdef TARGET_SPARC64
1794 static inline void gen_ne_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1795 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
))
1797 TCGv_i64 dst
, src1
, src2
;
1799 src1
= gen_load_fpr_D(dc
, rs1
);
1800 src2
= gen_load_fpr_D(dc
, rs2
);
1801 dst
= gen_dest_fpr_D();
1803 gen(dst
, src1
, src2
);
1805 gen_store_fpr_D(dc
, rd
, dst
);
1808 static inline void gen_gsr_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1809 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
, TCGv_i64
))
1811 TCGv_i64 dst
, src1
, src2
;
1813 src1
= gen_load_fpr_D(dc
, rs1
);
1814 src2
= gen_load_fpr_D(dc
, rs2
);
1815 dst
= gen_dest_fpr_D();
1817 gen(dst
, cpu_gsr
, src1
, src2
);
1819 gen_store_fpr_D(dc
, rd
, dst
);
1822 static inline void gen_ne_fop_DDDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1823 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
, TCGv_i64
))
1825 TCGv_i64 dst
, src0
, src1
, src2
;
1827 src1
= gen_load_fpr_D(dc
, rs1
);
1828 src2
= gen_load_fpr_D(dc
, rs2
);
1829 src0
= gen_load_fpr_D(dc
, rd
);
1830 dst
= gen_dest_fpr_D();
1832 gen(dst
, src0
, src1
, src2
);
1834 gen_store_fpr_D(dc
, rd
, dst
);
1838 static inline void gen_fop_QQ(DisasContext
*dc
, int rd
, int rs
,
1839 void (*gen
)(TCGv_ptr
))
1841 gen_op_load_fpr_QT1(QFPREG(rs
));
1845 gen_op_store_QT0_fpr(QFPREG(rd
));
1846 gen_update_fprs_dirty(QFPREG(rd
));
1849 #ifdef TARGET_SPARC64
1850 static inline void gen_ne_fop_QQ(DisasContext
*dc
, int rd
, int rs
,
1851 void (*gen
)(TCGv_ptr
))
1853 gen_op_load_fpr_QT1(QFPREG(rs
));
1857 gen_op_store_QT0_fpr(QFPREG(rd
));
1858 gen_update_fprs_dirty(QFPREG(rd
));
1862 static inline void gen_fop_QQQ(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1863 void (*gen
)(TCGv_ptr
))
1865 gen_op_load_fpr_QT0(QFPREG(rs1
));
1866 gen_op_load_fpr_QT1(QFPREG(rs2
));
1870 gen_op_store_QT0_fpr(QFPREG(rd
));
1871 gen_update_fprs_dirty(QFPREG(rd
));
1874 static inline void gen_fop_DFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1875 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
, TCGv_i32
))
1878 TCGv_i32 src1
, src2
;
1880 src1
= gen_load_fpr_F(dc
, rs1
);
1881 src2
= gen_load_fpr_F(dc
, rs2
);
1882 dst
= gen_dest_fpr_D();
1884 gen(dst
, cpu_env
, src1
, src2
);
1886 gen_store_fpr_D(dc
, rd
, dst
);
1889 static inline void gen_fop_QDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1890 void (*gen
)(TCGv_ptr
, TCGv_i64
, TCGv_i64
))
1892 TCGv_i64 src1
, src2
;
1894 src1
= gen_load_fpr_D(dc
, rs1
);
1895 src2
= gen_load_fpr_D(dc
, rs2
);
1897 gen(cpu_env
, src1
, src2
);
1899 gen_op_store_QT0_fpr(QFPREG(rd
));
1900 gen_update_fprs_dirty(QFPREG(rd
));
1903 #ifdef TARGET_SPARC64
1904 static inline void gen_fop_DF(DisasContext
*dc
, int rd
, int rs
,
1905 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
))
1910 src
= gen_load_fpr_F(dc
, rs
);
1911 dst
= gen_dest_fpr_D();
1913 gen(dst
, cpu_env
, src
);
1915 gen_store_fpr_D(dc
, rd
, dst
);
1919 static inline void gen_ne_fop_DF(DisasContext
*dc
, int rd
, int rs
,
1920 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
))
1925 src
= gen_load_fpr_F(dc
, rs
);
1926 dst
= gen_dest_fpr_D();
1928 gen(dst
, cpu_env
, src
);
1930 gen_store_fpr_D(dc
, rd
, dst
);
1933 static inline void gen_fop_FD(DisasContext
*dc
, int rd
, int rs
,
1934 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i64
))
1939 src
= gen_load_fpr_D(dc
, rs
);
1940 dst
= gen_dest_fpr_F();
1942 gen(dst
, cpu_env
, src
);
1944 gen_store_fpr_F(dc
, rd
, dst
);
1947 static inline void gen_fop_FQ(DisasContext
*dc
, int rd
, int rs
,
1948 void (*gen
)(TCGv_i32
, TCGv_ptr
))
1952 gen_op_load_fpr_QT1(QFPREG(rs
));
1953 dst
= gen_dest_fpr_F();
1957 gen_store_fpr_F(dc
, rd
, dst
);
1960 static inline void gen_fop_DQ(DisasContext
*dc
, int rd
, int rs
,
1961 void (*gen
)(TCGv_i64
, TCGv_ptr
))
1965 gen_op_load_fpr_QT1(QFPREG(rs
));
1966 dst
= gen_dest_fpr_D();
1970 gen_store_fpr_D(dc
, rd
, dst
);
1973 static inline void gen_ne_fop_QF(DisasContext
*dc
, int rd
, int rs
,
1974 void (*gen
)(TCGv_ptr
, TCGv_i32
))
1978 src
= gen_load_fpr_F(dc
, rs
);
1982 gen_op_store_QT0_fpr(QFPREG(rd
));
1983 gen_update_fprs_dirty(QFPREG(rd
));
1986 static inline void gen_ne_fop_QD(DisasContext
*dc
, int rd
, int rs
,
1987 void (*gen
)(TCGv_ptr
, TCGv_i64
))
1991 src
= gen_load_fpr_D(dc
, rs
);
1995 gen_op_store_QT0_fpr(QFPREG(rd
));
1996 gen_update_fprs_dirty(QFPREG(rd
));
2000 #ifdef TARGET_SPARC64
2001 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
2007 r_asi
= tcg_temp_new_i32();
2008 tcg_gen_mov_i32(r_asi
, cpu_asi
);
2010 asi
= GET_FIELD(insn
, 19, 26);
2011 r_asi
= tcg_const_i32(asi
);
2016 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
2019 TCGv_i32 r_asi
, r_size
, r_sign
;
2021 r_asi
= gen_get_asi(insn
, addr
);
2022 r_size
= tcg_const_i32(size
);
2023 r_sign
= tcg_const_i32(sign
);
2024 gen_helper_ld_asi(dst
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2025 tcg_temp_free_i32(r_sign
);
2026 tcg_temp_free_i32(r_size
);
2027 tcg_temp_free_i32(r_asi
);
2030 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
2032 TCGv_i32 r_asi
, r_size
;
2034 r_asi
= gen_get_asi(insn
, addr
);
2035 r_size
= tcg_const_i32(size
);
2036 gen_helper_st_asi(cpu_env
, addr
, src
, r_asi
, r_size
);
2037 tcg_temp_free_i32(r_size
);
2038 tcg_temp_free_i32(r_asi
);
2041 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
2043 TCGv_i32 r_asi
, r_size
, r_rd
;
2045 r_asi
= gen_get_asi(insn
, addr
);
2046 r_size
= tcg_const_i32(size
);
2047 r_rd
= tcg_const_i32(rd
);
2048 gen_helper_ldf_asi(cpu_env
, addr
, r_asi
, r_size
, r_rd
);
2049 tcg_temp_free_i32(r_rd
);
2050 tcg_temp_free_i32(r_size
);
2051 tcg_temp_free_i32(r_asi
);
2054 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
2056 TCGv_i32 r_asi
, r_size
, r_rd
;
2058 r_asi
= gen_get_asi(insn
, addr
);
2059 r_size
= tcg_const_i32(size
);
2060 r_rd
= tcg_const_i32(rd
);
2061 gen_helper_stf_asi(cpu_env
, addr
, r_asi
, r_size
, r_rd
);
2062 tcg_temp_free_i32(r_rd
);
2063 tcg_temp_free_i32(r_size
);
2064 tcg_temp_free_i32(r_asi
);
2067 static inline void gen_swap_asi(TCGv dst
, TCGv src
, TCGv addr
, int insn
)
2069 TCGv_i32 r_asi
, r_size
, r_sign
;
2071 r_asi
= gen_get_asi(insn
, addr
);
2072 r_size
= tcg_const_i32(4);
2073 r_sign
= tcg_const_i32(0);
2074 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2075 tcg_temp_free_i32(r_sign
);
2076 gen_helper_st_asi(cpu_env
, addr
, src
, r_asi
, r_size
);
2077 tcg_temp_free_i32(r_size
);
2078 tcg_temp_free_i32(r_asi
);
2079 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2082 static inline void gen_ldda_asi(DisasContext
*dc
, TCGv hi
, TCGv addr
,
2085 TCGv_i32 r_asi
, r_rd
;
2087 r_asi
= gen_get_asi(insn
, addr
);
2088 r_rd
= tcg_const_i32(rd
);
2089 gen_helper_ldda_asi(cpu_env
, addr
, r_asi
, r_rd
);
2090 tcg_temp_free_i32(r_rd
);
2091 tcg_temp_free_i32(r_asi
);
2094 static inline void gen_stda_asi(DisasContext
*dc
, TCGv hi
, TCGv addr
,
2097 TCGv_i32 r_asi
, r_size
;
2098 TCGv lo
= gen_load_gpr(dc
, rd
+ 1);
2100 tcg_gen_concat_tl_i64(cpu_tmp64
, lo
, hi
);
2101 r_asi
= gen_get_asi(insn
, addr
);
2102 r_size
= tcg_const_i32(8);
2103 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2104 tcg_temp_free_i32(r_size
);
2105 tcg_temp_free_i32(r_asi
);
2108 static inline void gen_cas_asi(DisasContext
*dc
, TCGv addr
,
2109 TCGv val2
, int insn
, int rd
)
2111 TCGv val1
= gen_load_gpr(dc
, rd
);
2112 TCGv dst
= gen_dest_gpr(dc
, rd
);
2113 TCGv_i32 r_asi
= gen_get_asi(insn
, addr
);
2115 gen_helper_cas_asi(dst
, cpu_env
, addr
, val1
, val2
, r_asi
);
2116 tcg_temp_free_i32(r_asi
);
2117 gen_store_gpr(dc
, rd
, dst
);
2120 static inline void gen_casx_asi(DisasContext
*dc
, TCGv addr
,
2121 TCGv val2
, int insn
, int rd
)
2123 TCGv val1
= gen_load_gpr(dc
, rd
);
2124 TCGv dst
= gen_dest_gpr(dc
, rd
);
2125 TCGv_i32 r_asi
= gen_get_asi(insn
, addr
);
2127 gen_helper_casx_asi(dst
, cpu_env
, addr
, val1
, val2
, r_asi
);
2128 tcg_temp_free_i32(r_asi
);
2129 gen_store_gpr(dc
, rd
, dst
);
2132 #elif !defined(CONFIG_USER_ONLY)
2134 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
2137 TCGv_i32 r_asi
, r_size
, r_sign
;
2139 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2140 r_size
= tcg_const_i32(size
);
2141 r_sign
= tcg_const_i32(sign
);
2142 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2143 tcg_temp_free(r_sign
);
2144 tcg_temp_free(r_size
);
2145 tcg_temp_free(r_asi
);
2146 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2149 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
2151 TCGv_i32 r_asi
, r_size
;
2153 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
2154 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2155 r_size
= tcg_const_i32(size
);
2156 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2157 tcg_temp_free(r_size
);
2158 tcg_temp_free(r_asi
);
2161 static inline void gen_swap_asi(TCGv dst
, TCGv src
, TCGv addr
, int insn
)
2163 TCGv_i32 r_asi
, r_size
, r_sign
;
2166 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2167 r_size
= tcg_const_i32(4);
2168 r_sign
= tcg_const_i32(0);
2169 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2170 tcg_temp_free(r_sign
);
2171 r_val
= tcg_temp_new_i64();
2172 tcg_gen_extu_tl_i64(r_val
, src
);
2173 gen_helper_st_asi(cpu_env
, addr
, r_val
, r_asi
, r_size
);
2174 tcg_temp_free_i64(r_val
);
2175 tcg_temp_free(r_size
);
2176 tcg_temp_free(r_asi
);
2177 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2180 static inline void gen_ldda_asi(DisasContext
*dc
, TCGv hi
, TCGv addr
,
2183 TCGv_i32 r_asi
, r_size
, r_sign
;
2186 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2187 r_size
= tcg_const_i32(8);
2188 r_sign
= tcg_const_i32(0);
2189 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2190 tcg_temp_free(r_sign
);
2191 tcg_temp_free(r_size
);
2192 tcg_temp_free(r_asi
);
2194 t
= gen_dest_gpr(dc
, rd
+ 1);
2195 tcg_gen_trunc_i64_tl(t
, cpu_tmp64
);
2196 gen_store_gpr(dc
, rd
+ 1, t
);
2198 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
2199 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
2200 gen_store_gpr(dc
, rd
, hi
);
2203 static inline void gen_stda_asi(DisasContext
*dc
, TCGv hi
, TCGv addr
,
2206 TCGv_i32 r_asi
, r_size
;
2207 TCGv lo
= gen_load_gpr(dc
, rd
+ 1);
2209 tcg_gen_concat_tl_i64(cpu_tmp64
, lo
, hi
);
2210 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2211 r_size
= tcg_const_i32(8);
2212 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2213 tcg_temp_free(r_size
);
2214 tcg_temp_free(r_asi
);
2218 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2219 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
2222 TCGv_i32 r_asi
, r_size
;
2224 gen_ld_asi(dst
, addr
, insn
, 1, 0);
2226 r_val
= tcg_const_i64(0xffULL
);
2227 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2228 r_size
= tcg_const_i32(1);
2229 gen_helper_st_asi(cpu_env
, addr
, r_val
, r_asi
, r_size
);
2230 tcg_temp_free_i32(r_size
);
2231 tcg_temp_free_i32(r_asi
);
2232 tcg_temp_free_i64(r_val
);
2236 static TCGv
get_src1(DisasContext
*dc
, unsigned int insn
)
2238 unsigned int rs1
= GET_FIELD(insn
, 13, 17);
2239 return gen_load_gpr(dc
, rs1
);
2242 static TCGv
get_src2(DisasContext
*dc
, unsigned int insn
)
2244 if (IS_IMM
) { /* immediate */
2245 target_long simm
= GET_FIELDs(insn
, 19, 31);
2246 TCGv t
= get_temp_tl(dc
);
2247 tcg_gen_movi_tl(t
, simm
);
2249 } else { /* register */
2250 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
2251 return gen_load_gpr(dc
, rs2
);
2255 #ifdef TARGET_SPARC64
2256 static void gen_fmovs(DisasContext
*dc
, DisasCompare
*cmp
, int rd
, int rs
)
2258 TCGv_i32 c32
, zero
, dst
, s1
, s2
;
2260 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2261 or fold the comparison down to 32 bits and use movcond_i32. Choose
2263 c32
= tcg_temp_new_i32();
2265 tcg_gen_trunc_i64_i32(c32
, cmp
->c1
);
2267 TCGv_i64 c64
= tcg_temp_new_i64();
2268 tcg_gen_setcond_i64(cmp
->cond
, c64
, cmp
->c1
, cmp
->c2
);
2269 tcg_gen_trunc_i64_i32(c32
, c64
);
2270 tcg_temp_free_i64(c64
);
2273 s1
= gen_load_fpr_F(dc
, rs
);
2274 s2
= gen_load_fpr_F(dc
, rd
);
2275 dst
= gen_dest_fpr_F();
2276 zero
= tcg_const_i32(0);
2278 tcg_gen_movcond_i32(TCG_COND_NE
, dst
, c32
, zero
, s1
, s2
);
2280 tcg_temp_free_i32(c32
);
2281 tcg_temp_free_i32(zero
);
2282 gen_store_fpr_F(dc
, rd
, dst
);
2285 static void gen_fmovd(DisasContext
*dc
, DisasCompare
*cmp
, int rd
, int rs
)
2287 TCGv_i64 dst
= gen_dest_fpr_D();
2288 tcg_gen_movcond_i64(cmp
->cond
, dst
, cmp
->c1
, cmp
->c2
,
2289 gen_load_fpr_D(dc
, rs
),
2290 gen_load_fpr_D(dc
, rd
));
2291 gen_store_fpr_D(dc
, rd
, dst
);
2294 static void gen_fmovq(DisasContext
*dc
, DisasCompare
*cmp
, int rd
, int rs
)
2296 int qd
= QFPREG(rd
);
2297 int qs
= QFPREG(rs
);
2299 tcg_gen_movcond_i64(cmp
->cond
, cpu_fpr
[qd
/ 2], cmp
->c1
, cmp
->c2
,
2300 cpu_fpr
[qs
/ 2], cpu_fpr
[qd
/ 2]);
2301 tcg_gen_movcond_i64(cmp
->cond
, cpu_fpr
[qd
/ 2 + 1], cmp
->c1
, cmp
->c2
,
2302 cpu_fpr
[qs
/ 2 + 1], cpu_fpr
[qd
/ 2 + 1]);
2304 gen_update_fprs_dirty(qd
);
2307 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
2309 TCGv_i32 r_tl
= tcg_temp_new_i32();
2311 /* load env->tl into r_tl */
2312 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
2314 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2315 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
2317 /* calculate offset to current trap state from env->ts, reuse r_tl */
2318 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
2319 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUSPARCState
, ts
));
2321 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2323 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
2324 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
2325 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
2326 tcg_temp_free_ptr(r_tl_tmp
);
2329 tcg_temp_free_i32(r_tl
);
2332 static void gen_edge(DisasContext
*dc
, TCGv dst
, TCGv s1
, TCGv s2
,
2333 int width
, bool cc
, bool left
)
2335 TCGv lo1
, lo2
, t1
, t2
;
2336 uint64_t amask
, tabl
, tabr
;
2337 int shift
, imask
, omask
;
2340 tcg_gen_mov_tl(cpu_cc_src
, s1
);
2341 tcg_gen_mov_tl(cpu_cc_src2
, s2
);
2342 tcg_gen_sub_tl(cpu_cc_dst
, s1
, s2
);
2343 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2344 dc
->cc_op
= CC_OP_SUB
;
2347 /* Theory of operation: there are two tables, left and right (not to
2348 be confused with the left and right versions of the opcode). These
2349 are indexed by the low 3 bits of the inputs. To make things "easy",
2350 these tables are loaded into two constants, TABL and TABR below.
2351 The operation index = (input & imask) << shift calculates the index
2352 into the constant, while val = (table >> index) & omask calculates
2353 the value we're looking for. */
2360 tabl
= 0x80c0e0f0f8fcfeffULL
;
2361 tabr
= 0xff7f3f1f0f070301ULL
;
2363 tabl
= 0x0103070f1f3f7fffULL
;
2364 tabr
= 0xfffefcf8f0e0c080ULL
;
2384 tabl
= (2 << 2) | 3;
2385 tabr
= (3 << 2) | 1;
2387 tabl
= (1 << 2) | 3;
2388 tabr
= (3 << 2) | 2;
2395 lo1
= tcg_temp_new();
2396 lo2
= tcg_temp_new();
2397 tcg_gen_andi_tl(lo1
, s1
, imask
);
2398 tcg_gen_andi_tl(lo2
, s2
, imask
);
2399 tcg_gen_shli_tl(lo1
, lo1
, shift
);
2400 tcg_gen_shli_tl(lo2
, lo2
, shift
);
2402 t1
= tcg_const_tl(tabl
);
2403 t2
= tcg_const_tl(tabr
);
2404 tcg_gen_shr_tl(lo1
, t1
, lo1
);
2405 tcg_gen_shr_tl(lo2
, t2
, lo2
);
2406 tcg_gen_andi_tl(dst
, lo1
, omask
);
2407 tcg_gen_andi_tl(lo2
, lo2
, omask
);
2411 amask
&= 0xffffffffULL
;
2413 tcg_gen_andi_tl(s1
, s1
, amask
);
2414 tcg_gen_andi_tl(s2
, s2
, amask
);
2416 /* We want to compute
2417 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2418 We've already done dst = lo1, so this reduces to
2419 dst &= (s1 == s2 ? -1 : lo2)
2424 tcg_gen_setcond_tl(TCG_COND_EQ
, t1
, s1
, s2
);
2425 tcg_gen_neg_tl(t1
, t1
);
2426 tcg_gen_or_tl(lo2
, lo2
, t1
);
2427 tcg_gen_and_tl(dst
, dst
, lo2
);
2435 static void gen_alignaddr(TCGv dst
, TCGv s1
, TCGv s2
, bool left
)
2437 TCGv tmp
= tcg_temp_new();
2439 tcg_gen_add_tl(tmp
, s1
, s2
);
2440 tcg_gen_andi_tl(dst
, tmp
, -8);
2442 tcg_gen_neg_tl(tmp
, tmp
);
2444 tcg_gen_deposit_tl(cpu_gsr
, cpu_gsr
, tmp
, 0, 3);
2449 static void gen_faligndata(TCGv dst
, TCGv gsr
, TCGv s1
, TCGv s2
)
2453 t1
= tcg_temp_new();
2454 t2
= tcg_temp_new();
2455 shift
= tcg_temp_new();
2457 tcg_gen_andi_tl(shift
, gsr
, 7);
2458 tcg_gen_shli_tl(shift
, shift
, 3);
2459 tcg_gen_shl_tl(t1
, s1
, shift
);
2461 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2462 shift of (up to 63) followed by a constant shift of 1. */
2463 tcg_gen_xori_tl(shift
, shift
, 63);
2464 tcg_gen_shr_tl(t2
, s2
, shift
);
2465 tcg_gen_shri_tl(t2
, t2
, 1);
2467 tcg_gen_or_tl(dst
, t1
, t2
);
2471 tcg_temp_free(shift
);
2475 #define CHECK_IU_FEATURE(dc, FEATURE) \
2476 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2478 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2479 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2482 /* before an instruction, dc->pc must be static */
2483 static void disas_sparc_insn(DisasContext
* dc
, unsigned int insn
)
2485 unsigned int opc
, rs1
, rs2
, rd
;
2486 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
2487 TCGv_i32 cpu_src1_32
, cpu_src2_32
, cpu_dst_32
;
2488 TCGv_i64 cpu_src1_64
, cpu_src2_64
, cpu_dst_64
;
2491 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
2492 tcg_gen_debug_insn_start(dc
->pc
);
2495 opc
= GET_FIELD(insn
, 0, 1);
2497 rd
= GET_FIELD(insn
, 2, 6);
2499 cpu_tmp1
= cpu_src1
= tcg_temp_new();
2500 cpu_tmp2
= cpu_src2
= tcg_temp_new();
2503 case 0: /* branches/sethi */
2505 unsigned int xop
= GET_FIELD(insn
, 7, 9);
2508 #ifdef TARGET_SPARC64
2509 case 0x1: /* V9 BPcc */
2513 target
= GET_FIELD_SP(insn
, 0, 18);
2514 target
= sign_extend(target
, 19);
2516 cc
= GET_FIELD_SP(insn
, 20, 21);
2518 do_branch(dc
, target
, insn
, 0);
2520 do_branch(dc
, target
, insn
, 1);
2525 case 0x3: /* V9 BPr */
2527 target
= GET_FIELD_SP(insn
, 0, 13) |
2528 (GET_FIELD_SP(insn
, 20, 21) << 14);
2529 target
= sign_extend(target
, 16);
2531 cpu_src1
= get_src1(dc
, insn
);
2532 do_branch_reg(dc
, target
, insn
, cpu_src1
);
2535 case 0x5: /* V9 FBPcc */
2537 int cc
= GET_FIELD_SP(insn
, 20, 21);
2538 if (gen_trap_ifnofpu(dc
)) {
2541 target
= GET_FIELD_SP(insn
, 0, 18);
2542 target
= sign_extend(target
, 19);
2544 do_fbranch(dc
, target
, insn
, cc
);
2548 case 0x7: /* CBN+x */
2553 case 0x2: /* BN+x */
2555 target
= GET_FIELD(insn
, 10, 31);
2556 target
= sign_extend(target
, 22);
2558 do_branch(dc
, target
, insn
, 0);
2561 case 0x6: /* FBN+x */
2563 if (gen_trap_ifnofpu(dc
)) {
2566 target
= GET_FIELD(insn
, 10, 31);
2567 target
= sign_extend(target
, 22);
2569 do_fbranch(dc
, target
, insn
, 0);
2572 case 0x4: /* SETHI */
2573 /* Special-case %g0 because that's the canonical nop. */
2575 uint32_t value
= GET_FIELD(insn
, 10, 31);
2576 TCGv t
= gen_dest_gpr(dc
, rd
);
2577 tcg_gen_movi_tl(t
, value
<< 10);
2578 gen_store_gpr(dc
, rd
, t
);
2581 case 0x0: /* UNIMPL */
2590 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2591 TCGv o7
= gen_dest_gpr(dc
, 15);
2593 tcg_gen_movi_tl(o7
, dc
->pc
);
2594 gen_store_gpr(dc
, 15, o7
);
2597 #ifdef TARGET_SPARC64
2598 if (unlikely(AM_CHECK(dc
))) {
2599 target
&= 0xffffffffULL
;
2605 case 2: /* FPU & Logical Operations */
2607 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2608 if (xop
== 0x3a) { /* generate trap */
2609 int cond
= GET_FIELD(insn
, 3, 6);
2621 /* Conditional trap. */
2623 #ifdef TARGET_SPARC64
2625 int cc
= GET_FIELD_SP(insn
, 11, 12);
2627 gen_compare(&cmp
, 0, cond
, dc
);
2628 } else if (cc
== 2) {
2629 gen_compare(&cmp
, 1, cond
, dc
);
2634 gen_compare(&cmp
, 0, cond
, dc
);
2636 l1
= gen_new_label();
2637 tcg_gen_brcond_tl(tcg_invert_cond(cmp
.cond
),
2638 cmp
.c1
, cmp
.c2
, l1
);
2642 mask
= ((dc
->def
->features
& CPU_FEATURE_HYPV
) && supervisor(dc
)
2643 ? UA2005_HTRAP_MASK
: V8_TRAP_MASK
);
2645 /* Don't use the normal temporaries, as they may well have
2646 gone out of scope with the branch above. While we're
2647 doing that we might as well pre-truncate to 32-bit. */
2648 trap
= tcg_temp_new_i32();
2650 rs1
= GET_FIELD_SP(insn
, 14, 18);
2652 rs2
= GET_FIELD_SP(insn
, 0, 6);
2654 tcg_gen_movi_i32(trap
, (rs2
& mask
) + TT_TRAP
);
2655 /* Signal that the trap value is fully constant. */
2658 TCGv t1
= gen_load_gpr(dc
, rs1
);
2659 tcg_gen_trunc_tl_i32(trap
, t1
);
2660 tcg_gen_addi_i32(trap
, trap
, rs2
);
2664 rs2
= GET_FIELD_SP(insn
, 0, 4);
2665 t1
= gen_load_gpr(dc
, rs1
);
2666 t2
= gen_load_gpr(dc
, rs2
);
2667 tcg_gen_add_tl(t1
, t1
, t2
);
2668 tcg_gen_trunc_tl_i32(trap
, t1
);
2671 tcg_gen_andi_i32(trap
, trap
, mask
);
2672 tcg_gen_addi_i32(trap
, trap
, TT_TRAP
);
2675 gen_helper_raise_exception(cpu_env
, trap
);
2676 tcg_temp_free_i32(trap
);
2679 /* An unconditional trap ends the TB. */
2683 /* A conditional trap falls through to the next insn. */
2687 } else if (xop
== 0x28) {
2688 rs1
= GET_FIELD(insn
, 13, 17);
2691 #ifndef TARGET_SPARC64
2692 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2693 manual, rdy on the microSPARC
2695 case 0x0f: /* stbar in the SPARCv8 manual,
2696 rdy on the microSPARC II */
2697 case 0x10 ... 0x1f: /* implementation-dependent in the
2698 SPARCv8 manual, rdy on the
2701 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2702 TCGv t
= gen_dest_gpr(dc
, rd
);
2703 /* Read Asr17 for a Leon3 monoprocessor */
2704 tcg_gen_movi_tl(t
, (1 << 8) | (dc
->def
->nwindows
- 1));
2705 gen_store_gpr(dc
, rd
, t
);
2709 gen_store_gpr(dc
, rd
, cpu_y
);
2711 #ifdef TARGET_SPARC64
2712 case 0x2: /* V9 rdccr */
2714 gen_helper_rdccr(cpu_dst
, cpu_env
);
2715 gen_store_gpr(dc
, rd
, cpu_dst
);
2717 case 0x3: /* V9 rdasi */
2718 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2719 gen_store_gpr(dc
, rd
, cpu_dst
);
2721 case 0x4: /* V9 rdtick */
2725 r_tickptr
= tcg_temp_new_ptr();
2726 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2727 offsetof(CPUSPARCState
, tick
));
2728 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2729 tcg_temp_free_ptr(r_tickptr
);
2730 gen_store_gpr(dc
, rd
, cpu_dst
);
2733 case 0x5: /* V9 rdpc */
2735 TCGv t
= gen_dest_gpr(dc
, rd
);
2736 if (unlikely(AM_CHECK(dc
))) {
2737 tcg_gen_movi_tl(t
, dc
->pc
& 0xffffffffULL
);
2739 tcg_gen_movi_tl(t
, dc
->pc
);
2741 gen_store_gpr(dc
, rd
, t
);
2744 case 0x6: /* V9 rdfprs */
2745 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2746 gen_store_gpr(dc
, rd
, cpu_dst
);
2748 case 0xf: /* V9 membar */
2749 break; /* no effect */
2750 case 0x13: /* Graphics Status */
2751 if (gen_trap_ifnofpu(dc
)) {
2754 gen_store_gpr(dc
, rd
, cpu_gsr
);
2756 case 0x16: /* Softint */
2757 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2758 gen_store_gpr(dc
, rd
, cpu_dst
);
2760 case 0x17: /* Tick compare */
2761 gen_store_gpr(dc
, rd
, cpu_tick_cmpr
);
2763 case 0x18: /* System tick */
2767 r_tickptr
= tcg_temp_new_ptr();
2768 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2769 offsetof(CPUSPARCState
, stick
));
2770 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2771 tcg_temp_free_ptr(r_tickptr
);
2772 gen_store_gpr(dc
, rd
, cpu_dst
);
2775 case 0x19: /* System tick compare */
2776 gen_store_gpr(dc
, rd
, cpu_stick_cmpr
);
2778 case 0x10: /* Performance Control */
2779 case 0x11: /* Performance Instrumentation Counter */
2780 case 0x12: /* Dispatch Control */
2781 case 0x14: /* Softint set, WO */
2782 case 0x15: /* Softint clear, WO */
2787 #if !defined(CONFIG_USER_ONLY)
2788 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2789 #ifndef TARGET_SPARC64
2790 if (!supervisor(dc
)) {
2794 gen_helper_rdpsr(cpu_dst
, cpu_env
);
2796 CHECK_IU_FEATURE(dc
, HYPV
);
2797 if (!hypervisor(dc
))
2799 rs1
= GET_FIELD(insn
, 13, 17);
2802 // gen_op_rdhpstate();
2805 // gen_op_rdhtstate();
2808 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2811 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2814 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2816 case 31: // hstick_cmpr
2817 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2823 gen_store_gpr(dc
, rd
, cpu_dst
);
2825 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2826 if (!supervisor(dc
))
2828 #ifdef TARGET_SPARC64
2829 rs1
= GET_FIELD(insn
, 13, 17);
2835 r_tsptr
= tcg_temp_new_ptr();
2836 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2837 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2838 offsetof(trap_state
, tpc
));
2839 tcg_temp_free_ptr(r_tsptr
);
2846 r_tsptr
= tcg_temp_new_ptr();
2847 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2848 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2849 offsetof(trap_state
, tnpc
));
2850 tcg_temp_free_ptr(r_tsptr
);
2857 r_tsptr
= tcg_temp_new_ptr();
2858 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2859 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2860 offsetof(trap_state
, tstate
));
2861 tcg_temp_free_ptr(r_tsptr
);
2868 r_tsptr
= tcg_temp_new_ptr();
2869 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2870 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2871 offsetof(trap_state
, tt
));
2872 tcg_temp_free_ptr(r_tsptr
);
2873 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2880 r_tickptr
= tcg_temp_new_ptr();
2881 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2882 offsetof(CPUSPARCState
, tick
));
2883 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2884 tcg_temp_free_ptr(r_tickptr
);
2888 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2891 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2892 offsetof(CPUSPARCState
, pstate
));
2893 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2896 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2897 offsetof(CPUSPARCState
, tl
));
2898 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2901 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2902 offsetof(CPUSPARCState
, psrpil
));
2903 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2906 gen_helper_rdcwp(cpu_tmp0
, cpu_env
);
2909 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2910 offsetof(CPUSPARCState
, cansave
));
2911 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2913 case 11: // canrestore
2914 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2915 offsetof(CPUSPARCState
, canrestore
));
2916 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2918 case 12: // cleanwin
2919 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2920 offsetof(CPUSPARCState
, cleanwin
));
2921 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2923 case 13: // otherwin
2924 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2925 offsetof(CPUSPARCState
, otherwin
));
2926 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2929 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2930 offsetof(CPUSPARCState
, wstate
));
2931 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2933 case 16: // UA2005 gl
2934 CHECK_IU_FEATURE(dc
, GL
);
2935 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2936 offsetof(CPUSPARCState
, gl
));
2937 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2939 case 26: // UA2005 strand status
2940 CHECK_IU_FEATURE(dc
, HYPV
);
2941 if (!hypervisor(dc
))
2943 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2946 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2953 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2955 gen_store_gpr(dc
, rd
, cpu_tmp0
);
2957 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2958 #ifdef TARGET_SPARC64
2960 gen_helper_flushw(cpu_env
);
2962 if (!supervisor(dc
))
2964 gen_store_gpr(dc
, rd
, cpu_tbr
);
2968 } else if (xop
== 0x34) { /* FPU Operations */
2969 if (gen_trap_ifnofpu(dc
)) {
2972 gen_op_clear_ieee_excp_and_FTT();
2973 rs1
= GET_FIELD(insn
, 13, 17);
2974 rs2
= GET_FIELD(insn
, 27, 31);
2975 xop
= GET_FIELD(insn
, 18, 26);
2978 case 0x1: /* fmovs */
2979 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
2980 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
2982 case 0x5: /* fnegs */
2983 gen_ne_fop_FF(dc
, rd
, rs2
, gen_helper_fnegs
);
2985 case 0x9: /* fabss */
2986 gen_ne_fop_FF(dc
, rd
, rs2
, gen_helper_fabss
);
2988 case 0x29: /* fsqrts */
2989 CHECK_FPU_FEATURE(dc
, FSQRT
);
2990 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fsqrts
);
2992 case 0x2a: /* fsqrtd */
2993 CHECK_FPU_FEATURE(dc
, FSQRT
);
2994 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fsqrtd
);
2996 case 0x2b: /* fsqrtq */
2997 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2998 gen_fop_QQ(dc
, rd
, rs2
, gen_helper_fsqrtq
);
3000 case 0x41: /* fadds */
3001 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fadds
);
3003 case 0x42: /* faddd */
3004 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_faddd
);
3006 case 0x43: /* faddq */
3007 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3008 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_faddq
);
3010 case 0x45: /* fsubs */
3011 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fsubs
);
3013 case 0x46: /* fsubd */
3014 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fsubd
);
3016 case 0x47: /* fsubq */
3017 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3018 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fsubq
);
3020 case 0x49: /* fmuls */
3021 CHECK_FPU_FEATURE(dc
, FMUL
);
3022 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fmuls
);
3024 case 0x4a: /* fmuld */
3025 CHECK_FPU_FEATURE(dc
, FMUL
);
3026 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld
);
3028 case 0x4b: /* fmulq */
3029 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3030 CHECK_FPU_FEATURE(dc
, FMUL
);
3031 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fmulq
);
3033 case 0x4d: /* fdivs */
3034 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fdivs
);
3036 case 0x4e: /* fdivd */
3037 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fdivd
);
3039 case 0x4f: /* fdivq */
3040 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3041 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fdivq
);
3043 case 0x69: /* fsmuld */
3044 CHECK_FPU_FEATURE(dc
, FSMULD
);
3045 gen_fop_DFF(dc
, rd
, rs1
, rs2
, gen_helper_fsmuld
);
3047 case 0x6e: /* fdmulq */
3048 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3049 gen_fop_QDD(dc
, rd
, rs1
, rs2
, gen_helper_fdmulq
);
3051 case 0xc4: /* fitos */
3052 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fitos
);
3054 case 0xc6: /* fdtos */
3055 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fdtos
);
3057 case 0xc7: /* fqtos */
3058 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3059 gen_fop_FQ(dc
, rd
, rs2
, gen_helper_fqtos
);
3061 case 0xc8: /* fitod */
3062 gen_ne_fop_DF(dc
, rd
, rs2
, gen_helper_fitod
);
3064 case 0xc9: /* fstod */
3065 gen_ne_fop_DF(dc
, rd
, rs2
, gen_helper_fstod
);
3067 case 0xcb: /* fqtod */
3068 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3069 gen_fop_DQ(dc
, rd
, rs2
, gen_helper_fqtod
);
3071 case 0xcc: /* fitoq */
3072 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3073 gen_ne_fop_QF(dc
, rd
, rs2
, gen_helper_fitoq
);
3075 case 0xcd: /* fstoq */
3076 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3077 gen_ne_fop_QF(dc
, rd
, rs2
, gen_helper_fstoq
);
3079 case 0xce: /* fdtoq */
3080 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3081 gen_ne_fop_QD(dc
, rd
, rs2
, gen_helper_fdtoq
);
3083 case 0xd1: /* fstoi */
3084 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fstoi
);
3086 case 0xd2: /* fdtoi */
3087 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fdtoi
);
3089 case 0xd3: /* fqtoi */
3090 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3091 gen_fop_FQ(dc
, rd
, rs2
, gen_helper_fqtoi
);
3093 #ifdef TARGET_SPARC64
3094 case 0x2: /* V9 fmovd */
3095 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
3096 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
3098 case 0x3: /* V9 fmovq */
3099 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3100 gen_move_Q(rd
, rs2
);
3102 case 0x6: /* V9 fnegd */
3103 gen_ne_fop_DD(dc
, rd
, rs2
, gen_helper_fnegd
);
3105 case 0x7: /* V9 fnegq */
3106 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3107 gen_ne_fop_QQ(dc
, rd
, rs2
, gen_helper_fnegq
);
3109 case 0xa: /* V9 fabsd */
3110 gen_ne_fop_DD(dc
, rd
, rs2
, gen_helper_fabsd
);
3112 case 0xb: /* V9 fabsq */
3113 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3114 gen_ne_fop_QQ(dc
, rd
, rs2
, gen_helper_fabsq
);
3116 case 0x81: /* V9 fstox */
3117 gen_fop_DF(dc
, rd
, rs2
, gen_helper_fstox
);
3119 case 0x82: /* V9 fdtox */
3120 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fdtox
);
3122 case 0x83: /* V9 fqtox */
3123 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3124 gen_fop_DQ(dc
, rd
, rs2
, gen_helper_fqtox
);
3126 case 0x84: /* V9 fxtos */
3127 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fxtos
);
3129 case 0x88: /* V9 fxtod */
3130 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fxtod
);
3132 case 0x8c: /* V9 fxtoq */
3133 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3134 gen_ne_fop_QD(dc
, rd
, rs2
, gen_helper_fxtoq
);
3140 } else if (xop
== 0x35) { /* FPU Operations */
3141 #ifdef TARGET_SPARC64
3144 if (gen_trap_ifnofpu(dc
)) {
3147 gen_op_clear_ieee_excp_and_FTT();
3148 rs1
= GET_FIELD(insn
, 13, 17);
3149 rs2
= GET_FIELD(insn
, 27, 31);
3150 xop
= GET_FIELD(insn
, 18, 26);
3153 #ifdef TARGET_SPARC64
3157 cond = GET_FIELD_SP(insn, 14, 17); \
3158 cpu_src1 = get_src1(dc, insn); \
3159 gen_compare_reg(&cmp, cond, cpu_src1); \
3160 gen_fmov##sz(dc, &cmp, rd, rs2); \
3161 free_compare(&cmp); \
3164 if ((xop
& 0x11f) == 0x005) { /* V9 fmovsr */
3167 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
3170 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
3171 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3178 #ifdef TARGET_SPARC64
3179 #define FMOVCC(fcc, sz) \
3182 cond = GET_FIELD_SP(insn, 14, 17); \
3183 gen_fcompare(&cmp, fcc, cond); \
3184 gen_fmov##sz(dc, &cmp, rd, rs2); \
3185 free_compare(&cmp); \
3188 case 0x001: /* V9 fmovscc %fcc0 */
3191 case 0x002: /* V9 fmovdcc %fcc0 */
3194 case 0x003: /* V9 fmovqcc %fcc0 */
3195 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3198 case 0x041: /* V9 fmovscc %fcc1 */
3201 case 0x042: /* V9 fmovdcc %fcc1 */
3204 case 0x043: /* V9 fmovqcc %fcc1 */
3205 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3208 case 0x081: /* V9 fmovscc %fcc2 */
3211 case 0x082: /* V9 fmovdcc %fcc2 */
3214 case 0x083: /* V9 fmovqcc %fcc2 */
3215 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3218 case 0x0c1: /* V9 fmovscc %fcc3 */
3221 case 0x0c2: /* V9 fmovdcc %fcc3 */
3224 case 0x0c3: /* V9 fmovqcc %fcc3 */
3225 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3229 #define FMOVCC(xcc, sz) \
3232 cond = GET_FIELD_SP(insn, 14, 17); \
3233 gen_compare(&cmp, xcc, cond, dc); \
3234 gen_fmov##sz(dc, &cmp, rd, rs2); \
3235 free_compare(&cmp); \
3238 case 0x101: /* V9 fmovscc %icc */
3241 case 0x102: /* V9 fmovdcc %icc */
3244 case 0x103: /* V9 fmovqcc %icc */
3245 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3248 case 0x181: /* V9 fmovscc %xcc */
3251 case 0x182: /* V9 fmovdcc %xcc */
3254 case 0x183: /* V9 fmovqcc %xcc */
3255 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3260 case 0x51: /* fcmps, V9 %fcc */
3261 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
3262 cpu_src2_32
= gen_load_fpr_F(dc
, rs2
);
3263 gen_op_fcmps(rd
& 3, cpu_src1_32
, cpu_src2_32
);
3265 case 0x52: /* fcmpd, V9 %fcc */
3266 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
3267 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
3268 gen_op_fcmpd(rd
& 3, cpu_src1_64
, cpu_src2_64
);
3270 case 0x53: /* fcmpq, V9 %fcc */
3271 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3272 gen_op_load_fpr_QT0(QFPREG(rs1
));
3273 gen_op_load_fpr_QT1(QFPREG(rs2
));
3274 gen_op_fcmpq(rd
& 3);
3276 case 0x55: /* fcmpes, V9 %fcc */
3277 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
3278 cpu_src2_32
= gen_load_fpr_F(dc
, rs2
);
3279 gen_op_fcmpes(rd
& 3, cpu_src1_32
, cpu_src2_32
);
3281 case 0x56: /* fcmped, V9 %fcc */
3282 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
3283 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
3284 gen_op_fcmped(rd
& 3, cpu_src1_64
, cpu_src2_64
);
3286 case 0x57: /* fcmpeq, V9 %fcc */
3287 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3288 gen_op_load_fpr_QT0(QFPREG(rs1
));
3289 gen_op_load_fpr_QT1(QFPREG(rs2
));
3290 gen_op_fcmpeq(rd
& 3);
3295 } else if (xop
== 0x2) {
3296 TCGv dst
= gen_dest_gpr(dc
, rd
);
3297 rs1
= GET_FIELD(insn
, 13, 17);
3299 /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3300 if (IS_IMM
) { /* immediate */
3301 simm
= GET_FIELDs(insn
, 19, 31);
3302 tcg_gen_movi_tl(dst
, simm
);
3303 gen_store_gpr(dc
, rd
, dst
);
3304 } else { /* register */
3305 rs2
= GET_FIELD(insn
, 27, 31);
3307 tcg_gen_movi_tl(dst
, 0);
3308 gen_store_gpr(dc
, rd
, dst
);
3310 cpu_src2
= gen_load_gpr(dc
, rs2
);
3311 gen_store_gpr(dc
, rd
, cpu_src2
);
3315 cpu_src1
= get_src1(dc
, insn
);
3316 if (IS_IMM
) { /* immediate */
3317 simm
= GET_FIELDs(insn
, 19, 31);
3318 tcg_gen_ori_tl(dst
, cpu_src1
, simm
);
3319 gen_store_gpr(dc
, rd
, dst
);
3320 } else { /* register */
3321 rs2
= GET_FIELD(insn
, 27, 31);
3323 /* mov shortcut: or x, %g0, y -> mov x, y */
3324 gen_store_gpr(dc
, rd
, cpu_src1
);
3326 cpu_src2
= gen_load_gpr(dc
, rs2
);
3327 tcg_gen_or_tl(dst
, cpu_src1
, cpu_src2
);
3328 gen_store_gpr(dc
, rd
, dst
);
3332 #ifdef TARGET_SPARC64
3333 } else if (xop
== 0x25) { /* sll, V9 sllx */
3334 cpu_src1
= get_src1(dc
, insn
);
3335 if (IS_IMM
) { /* immediate */
3336 simm
= GET_FIELDs(insn
, 20, 31);
3337 if (insn
& (1 << 12)) {
3338 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3340 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
3342 } else { /* register */
3343 rs2
= GET_FIELD(insn
, 27, 31);
3344 cpu_src2
= gen_load_gpr(dc
, rs2
);
3345 if (insn
& (1 << 12)) {
3346 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3348 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3350 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3352 gen_store_gpr(dc
, rd
, cpu_dst
);
3353 } else if (xop
== 0x26) { /* srl, V9 srlx */
3354 cpu_src1
= get_src1(dc
, insn
);
3355 if (IS_IMM
) { /* immediate */
3356 simm
= GET_FIELDs(insn
, 20, 31);
3357 if (insn
& (1 << 12)) {
3358 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3360 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3361 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3363 } else { /* register */
3364 rs2
= GET_FIELD(insn
, 27, 31);
3365 cpu_src2
= gen_load_gpr(dc
, rs2
);
3366 if (insn
& (1 << 12)) {
3367 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3368 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3370 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3371 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3372 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3375 gen_store_gpr(dc
, rd
, cpu_dst
);
3376 } else if (xop
== 0x27) { /* sra, V9 srax */
3377 cpu_src1
= get_src1(dc
, insn
);
3378 if (IS_IMM
) { /* immediate */
3379 simm
= GET_FIELDs(insn
, 20, 31);
3380 if (insn
& (1 << 12)) {
3381 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3383 tcg_gen_ext32s_i64(cpu_dst
, cpu_src1
);
3384 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3386 } else { /* register */
3387 rs2
= GET_FIELD(insn
, 27, 31);
3388 cpu_src2
= gen_load_gpr(dc
, rs2
);
3389 if (insn
& (1 << 12)) {
3390 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3391 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3393 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3394 tcg_gen_ext32s_i64(cpu_dst
, cpu_src1
);
3395 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3398 gen_store_gpr(dc
, rd
, cpu_dst
);
3400 } else if (xop
< 0x36) {
3402 cpu_src1
= get_src1(dc
, insn
);
3403 cpu_src2
= get_src2(dc
, insn
);
3404 switch (xop
& ~0x10) {
3407 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3408 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3409 dc
->cc_op
= CC_OP_ADD
;
3411 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3415 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3417 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3418 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3419 dc
->cc_op
= CC_OP_LOGIC
;
3423 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3425 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3426 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3427 dc
->cc_op
= CC_OP_LOGIC
;
3431 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3433 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3434 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3435 dc
->cc_op
= CC_OP_LOGIC
;
3440 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3441 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3442 dc
->cc_op
= CC_OP_SUB
;
3444 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3447 case 0x5: /* andn */
3448 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3450 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3451 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3452 dc
->cc_op
= CC_OP_LOGIC
;
3456 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3458 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3459 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3460 dc
->cc_op
= CC_OP_LOGIC
;
3463 case 0x7: /* xorn */
3464 tcg_gen_eqv_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3466 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3467 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3468 dc
->cc_op
= CC_OP_LOGIC
;
3471 case 0x8: /* addx, V9 addc */
3472 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3475 #ifdef TARGET_SPARC64
3476 case 0x9: /* V9 mulx */
3477 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3480 case 0xa: /* umul */
3481 CHECK_IU_FEATURE(dc
, MUL
);
3482 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3484 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3485 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3486 dc
->cc_op
= CC_OP_LOGIC
;
3489 case 0xb: /* smul */
3490 CHECK_IU_FEATURE(dc
, MUL
);
3491 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3493 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3494 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3495 dc
->cc_op
= CC_OP_LOGIC
;
3498 case 0xc: /* subx, V9 subc */
3499 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3502 #ifdef TARGET_SPARC64
3503 case 0xd: /* V9 udivx */
3504 gen_helper_udivx(cpu_dst
, cpu_env
, cpu_src1
, cpu_src2
);
3507 case 0xe: /* udiv */
3508 CHECK_IU_FEATURE(dc
, DIV
);
3510 gen_helper_udiv_cc(cpu_dst
, cpu_env
, cpu_src1
,
3512 dc
->cc_op
= CC_OP_DIV
;
3514 gen_helper_udiv(cpu_dst
, cpu_env
, cpu_src1
,
3518 case 0xf: /* sdiv */
3519 CHECK_IU_FEATURE(dc
, DIV
);
3521 gen_helper_sdiv_cc(cpu_dst
, cpu_env
, cpu_src1
,
3523 dc
->cc_op
= CC_OP_DIV
;
3525 gen_helper_sdiv(cpu_dst
, cpu_env
, cpu_src1
,
3532 gen_store_gpr(dc
, rd
, cpu_dst
);
3534 cpu_src1
= get_src1(dc
, insn
);
3535 cpu_src2
= get_src2(dc
, insn
);
3537 case 0x20: /* taddcc */
3538 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3539 gen_store_gpr(dc
, rd
, cpu_dst
);
3540 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3541 dc
->cc_op
= CC_OP_TADD
;
3543 case 0x21: /* tsubcc */
3544 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3545 gen_store_gpr(dc
, rd
, cpu_dst
);
3546 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3547 dc
->cc_op
= CC_OP_TSUB
;
3549 case 0x22: /* taddcctv */
3550 gen_helper_taddcctv(cpu_dst
, cpu_env
,
3551 cpu_src1
, cpu_src2
);
3552 gen_store_gpr(dc
, rd
, cpu_dst
);
3553 dc
->cc_op
= CC_OP_TADDTV
;
3555 case 0x23: /* tsubcctv */
3556 gen_helper_tsubcctv(cpu_dst
, cpu_env
,
3557 cpu_src1
, cpu_src2
);
3558 gen_store_gpr(dc
, rd
, cpu_dst
);
3559 dc
->cc_op
= CC_OP_TSUBTV
;
3561 case 0x24: /* mulscc */
3563 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3564 gen_store_gpr(dc
, rd
, cpu_dst
);
3565 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3566 dc
->cc_op
= CC_OP_ADD
;
3568 #ifndef TARGET_SPARC64
3569 case 0x25: /* sll */
3570 if (IS_IMM
) { /* immediate */
3571 simm
= GET_FIELDs(insn
, 20, 31);
3572 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3573 } else { /* register */
3574 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3575 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3577 gen_store_gpr(dc
, rd
, cpu_dst
);
3579 case 0x26: /* srl */
3580 if (IS_IMM
) { /* immediate */
3581 simm
= GET_FIELDs(insn
, 20, 31);
3582 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3583 } else { /* register */
3584 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3585 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3587 gen_store_gpr(dc
, rd
, cpu_dst
);
3589 case 0x27: /* sra */
3590 if (IS_IMM
) { /* immediate */
3591 simm
= GET_FIELDs(insn
, 20, 31);
3592 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3593 } else { /* register */
3594 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3595 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3597 gen_store_gpr(dc
, rd
, cpu_dst
);
3604 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3605 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3607 #ifndef TARGET_SPARC64
3608 case 0x01 ... 0x0f: /* undefined in the
3612 case 0x10 ... 0x1f: /* implementation-dependent
3618 case 0x2: /* V9 wrccr */
3619 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3620 gen_helper_wrccr(cpu_env
, cpu_dst
);
3621 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3622 dc
->cc_op
= CC_OP_FLAGS
;
3624 case 0x3: /* V9 wrasi */
3625 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3626 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3627 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3629 case 0x6: /* V9 wrfprs */
3630 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3631 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3637 case 0xf: /* V9 sir, nop if user */
3638 #if !defined(CONFIG_USER_ONLY)
3639 if (supervisor(dc
)) {
3644 case 0x13: /* Graphics Status */
3645 if (gen_trap_ifnofpu(dc
)) {
3648 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3650 case 0x14: /* Softint set */
3651 if (!supervisor(dc
))
3653 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3654 gen_helper_set_softint(cpu_env
, cpu_tmp64
);
3656 case 0x15: /* Softint clear */
3657 if (!supervisor(dc
))
3659 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3660 gen_helper_clear_softint(cpu_env
, cpu_tmp64
);
3662 case 0x16: /* Softint write */
3663 if (!supervisor(dc
))
3665 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3666 gen_helper_write_softint(cpu_env
, cpu_tmp64
);
3668 case 0x17: /* Tick compare */
3669 #if !defined(CONFIG_USER_ONLY)
3670 if (!supervisor(dc
))
3676 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3678 r_tickptr
= tcg_temp_new_ptr();
3679 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3680 offsetof(CPUSPARCState
, tick
));
3681 gen_helper_tick_set_limit(r_tickptr
,
3683 tcg_temp_free_ptr(r_tickptr
);
3686 case 0x18: /* System tick */
3687 #if !defined(CONFIG_USER_ONLY)
3688 if (!supervisor(dc
))
3694 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3696 r_tickptr
= tcg_temp_new_ptr();
3697 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3698 offsetof(CPUSPARCState
, stick
));
3699 gen_helper_tick_set_count(r_tickptr
,
3701 tcg_temp_free_ptr(r_tickptr
);
3704 case 0x19: /* System tick compare */
3705 #if !defined(CONFIG_USER_ONLY)
3706 if (!supervisor(dc
))
3712 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3714 r_tickptr
= tcg_temp_new_ptr();
3715 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3716 offsetof(CPUSPARCState
, stick
));
3717 gen_helper_tick_set_limit(r_tickptr
,
3719 tcg_temp_free_ptr(r_tickptr
);
3723 case 0x10: /* Performance Control */
3724 case 0x11: /* Performance Instrumentation
3726 case 0x12: /* Dispatch Control */
3733 #if !defined(CONFIG_USER_ONLY)
3734 case 0x31: /* wrpsr, V9 saved, restored */
3736 if (!supervisor(dc
))
3738 #ifdef TARGET_SPARC64
3741 gen_helper_saved(cpu_env
);
3744 gen_helper_restored(cpu_env
);
3746 case 2: /* UA2005 allclean */
3747 case 3: /* UA2005 otherw */
3748 case 4: /* UA2005 normalw */
3749 case 5: /* UA2005 invalw */
3755 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3756 gen_helper_wrpsr(cpu_env
, cpu_dst
);
3757 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3758 dc
->cc_op
= CC_OP_FLAGS
;
3766 case 0x32: /* wrwim, V9 wrpr */
3768 if (!supervisor(dc
))
3770 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3771 #ifdef TARGET_SPARC64
3777 r_tsptr
= tcg_temp_new_ptr();
3778 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3779 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3780 offsetof(trap_state
, tpc
));
3781 tcg_temp_free_ptr(r_tsptr
);
3788 r_tsptr
= tcg_temp_new_ptr();
3789 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3790 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3791 offsetof(trap_state
, tnpc
));
3792 tcg_temp_free_ptr(r_tsptr
);
3799 r_tsptr
= tcg_temp_new_ptr();
3800 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3801 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3802 offsetof(trap_state
,
3804 tcg_temp_free_ptr(r_tsptr
);
3811 r_tsptr
= tcg_temp_new_ptr();
3812 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3813 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3814 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3815 offsetof(trap_state
, tt
));
3816 tcg_temp_free_ptr(r_tsptr
);
3823 r_tickptr
= tcg_temp_new_ptr();
3824 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3825 offsetof(CPUSPARCState
, tick
));
3826 gen_helper_tick_set_count(r_tickptr
,
3828 tcg_temp_free_ptr(r_tickptr
);
3832 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3836 gen_helper_wrpstate(cpu_env
, cpu_tmp0
);
3837 dc
->npc
= DYNAMIC_PC
;
3841 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3842 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3843 offsetof(CPUSPARCState
, tl
));
3844 dc
->npc
= DYNAMIC_PC
;
3847 gen_helper_wrpil(cpu_env
, cpu_tmp0
);
3850 gen_helper_wrcwp(cpu_env
, cpu_tmp0
);
3853 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3854 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3855 offsetof(CPUSPARCState
,
3858 case 11: // canrestore
3859 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3860 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3861 offsetof(CPUSPARCState
,
3864 case 12: // cleanwin
3865 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3866 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3867 offsetof(CPUSPARCState
,
3870 case 13: // otherwin
3871 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3872 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3873 offsetof(CPUSPARCState
,
3877 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3878 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3879 offsetof(CPUSPARCState
,
3882 case 16: // UA2005 gl
3883 CHECK_IU_FEATURE(dc
, GL
);
3884 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3885 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3886 offsetof(CPUSPARCState
, gl
));
3888 case 26: // UA2005 strand status
3889 CHECK_IU_FEATURE(dc
, HYPV
);
3890 if (!hypervisor(dc
))
3892 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3898 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3899 if (dc
->def
->nwindows
!= 32)
3900 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3901 (1 << dc
->def
->nwindows
) - 1);
3902 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3906 case 0x33: /* wrtbr, UA2005 wrhpr */
3908 #ifndef TARGET_SPARC64
3909 if (!supervisor(dc
))
3911 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3913 CHECK_IU_FEATURE(dc
, HYPV
);
3914 if (!hypervisor(dc
))
3916 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3919 // XXX gen_op_wrhpstate();
3926 // XXX gen_op_wrhtstate();
3929 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3932 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3934 case 31: // hstick_cmpr
3938 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3939 r_tickptr
= tcg_temp_new_ptr();
3940 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3941 offsetof(CPUSPARCState
, hstick
));
3942 gen_helper_tick_set_limit(r_tickptr
,
3944 tcg_temp_free_ptr(r_tickptr
);
3947 case 6: // hver readonly
3955 #ifdef TARGET_SPARC64
3956 case 0x2c: /* V9 movcc */
3958 int cc
= GET_FIELD_SP(insn
, 11, 12);
3959 int cond
= GET_FIELD_SP(insn
, 14, 17);
3963 if (insn
& (1 << 18)) {
3965 gen_compare(&cmp
, 0, cond
, dc
);
3966 } else if (cc
== 2) {
3967 gen_compare(&cmp
, 1, cond
, dc
);
3972 gen_fcompare(&cmp
, cc
, cond
);
3975 /* The get_src2 above loaded the normal 13-bit
3976 immediate field, not the 11-bit field we have
3977 in movcc. But it did handle the reg case. */
3979 simm
= GET_FIELD_SPs(insn
, 0, 10);
3980 tcg_gen_movi_tl(cpu_src2
, simm
);
3983 dst
= gen_load_gpr(dc
, rd
);
3984 tcg_gen_movcond_tl(cmp
.cond
, dst
,
3988 gen_store_gpr(dc
, rd
, dst
);
3991 case 0x2d: /* V9 sdivx */
3992 gen_helper_sdivx(cpu_dst
, cpu_env
, cpu_src1
, cpu_src2
);
3993 gen_store_gpr(dc
, rd
, cpu_dst
);
3995 case 0x2e: /* V9 popc */
3996 gen_helper_popc(cpu_dst
, cpu_src2
);
3997 gen_store_gpr(dc
, rd
, cpu_dst
);
3999 case 0x2f: /* V9 movr */
4001 int cond
= GET_FIELD_SP(insn
, 10, 12);
4005 gen_compare_reg(&cmp
, cond
, cpu_src1
);
4007 /* The get_src2 above loaded the normal 13-bit
4008 immediate field, not the 10-bit field we have
4009 in movr. But it did handle the reg case. */
4011 simm
= GET_FIELD_SPs(insn
, 0, 9);
4012 tcg_gen_movi_tl(cpu_src2
, simm
);
4015 dst
= gen_load_gpr(dc
, rd
);
4016 tcg_gen_movcond_tl(cmp
.cond
, dst
,
4020 gen_store_gpr(dc
, rd
, dst
);
4028 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4029 #ifdef TARGET_SPARC64
4030 int opf
= GET_FIELD_SP(insn
, 5, 13);
4031 rs1
= GET_FIELD(insn
, 13, 17);
4032 rs2
= GET_FIELD(insn
, 27, 31);
4033 if (gen_trap_ifnofpu(dc
)) {
4038 case 0x000: /* VIS I edge8cc */
4039 CHECK_FPU_FEATURE(dc
, VIS1
);
4040 cpu_src1
= gen_load_gpr(dc
, rs1
);
4041 cpu_src2
= gen_load_gpr(dc
, rs2
);
4042 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 1, 0);
4043 gen_store_gpr(dc
, rd
, cpu_dst
);
4045 case 0x001: /* VIS II edge8n */
4046 CHECK_FPU_FEATURE(dc
, VIS2
);
4047 cpu_src1
= gen_load_gpr(dc
, rs1
);
4048 cpu_src2
= gen_load_gpr(dc
, rs2
);
4049 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 0, 0);
4050 gen_store_gpr(dc
, rd
, cpu_dst
);
4052 case 0x002: /* VIS I edge8lcc */
4053 CHECK_FPU_FEATURE(dc
, VIS1
);
4054 cpu_src1
= gen_load_gpr(dc
, rs1
);
4055 cpu_src2
= gen_load_gpr(dc
, rs2
);
4056 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 1, 1);
4057 gen_store_gpr(dc
, rd
, cpu_dst
);
4059 case 0x003: /* VIS II edge8ln */
4060 CHECK_FPU_FEATURE(dc
, VIS2
);
4061 cpu_src1
= gen_load_gpr(dc
, rs1
);
4062 cpu_src2
= gen_load_gpr(dc
, rs2
);
4063 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 0, 1);
4064 gen_store_gpr(dc
, rd
, cpu_dst
);
4066 case 0x004: /* VIS I edge16cc */
4067 CHECK_FPU_FEATURE(dc
, VIS1
);
4068 cpu_src1
= gen_load_gpr(dc
, rs1
);
4069 cpu_src2
= gen_load_gpr(dc
, rs2
);
4070 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 1, 0);
4071 gen_store_gpr(dc
, rd
, cpu_dst
);
4073 case 0x005: /* VIS II edge16n */
4074 CHECK_FPU_FEATURE(dc
, VIS2
);
4075 cpu_src1
= gen_load_gpr(dc
, rs1
);
4076 cpu_src2
= gen_load_gpr(dc
, rs2
);
4077 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 0, 0);
4078 gen_store_gpr(dc
, rd
, cpu_dst
);
4080 case 0x006: /* VIS I edge16lcc */
4081 CHECK_FPU_FEATURE(dc
, VIS1
);
4082 cpu_src1
= gen_load_gpr(dc
, rs1
);
4083 cpu_src2
= gen_load_gpr(dc
, rs2
);
4084 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 1, 1);
4085 gen_store_gpr(dc
, rd
, cpu_dst
);
4087 case 0x007: /* VIS II edge16ln */
4088 CHECK_FPU_FEATURE(dc
, VIS2
);
4089 cpu_src1
= gen_load_gpr(dc
, rs1
);
4090 cpu_src2
= gen_load_gpr(dc
, rs2
);
4091 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 0, 1);
4092 gen_store_gpr(dc
, rd
, cpu_dst
);
4094 case 0x008: /* VIS I edge32cc */
4095 CHECK_FPU_FEATURE(dc
, VIS1
);
4096 cpu_src1
= gen_load_gpr(dc
, rs1
);
4097 cpu_src2
= gen_load_gpr(dc
, rs2
);
4098 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 1, 0);
4099 gen_store_gpr(dc
, rd
, cpu_dst
);
4101 case 0x009: /* VIS II edge32n */
4102 CHECK_FPU_FEATURE(dc
, VIS2
);
4103 cpu_src1
= gen_load_gpr(dc
, rs1
);
4104 cpu_src2
= gen_load_gpr(dc
, rs2
);
4105 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 0, 0);
4106 gen_store_gpr(dc
, rd
, cpu_dst
);
4108 case 0x00a: /* VIS I edge32lcc */
4109 CHECK_FPU_FEATURE(dc
, VIS1
);
4110 cpu_src1
= gen_load_gpr(dc
, rs1
);
4111 cpu_src2
= gen_load_gpr(dc
, rs2
);
4112 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 1, 1);
4113 gen_store_gpr(dc
, rd
, cpu_dst
);
4115 case 0x00b: /* VIS II edge32ln */
4116 CHECK_FPU_FEATURE(dc
, VIS2
);
4117 cpu_src1
= gen_load_gpr(dc
, rs1
);
4118 cpu_src2
= gen_load_gpr(dc
, rs2
);
4119 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 0, 1);
4120 gen_store_gpr(dc
, rd
, cpu_dst
);
4122 case 0x010: /* VIS I array8 */
4123 CHECK_FPU_FEATURE(dc
, VIS1
);
4124 cpu_src1
= gen_load_gpr(dc
, rs1
);
4125 cpu_src2
= gen_load_gpr(dc
, rs2
);
4126 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4127 gen_store_gpr(dc
, rd
, cpu_dst
);
4129 case 0x012: /* VIS I array16 */
4130 CHECK_FPU_FEATURE(dc
, VIS1
);
4131 cpu_src1
= gen_load_gpr(dc
, rs1
);
4132 cpu_src2
= gen_load_gpr(dc
, rs2
);
4133 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4134 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
4135 gen_store_gpr(dc
, rd
, cpu_dst
);
4137 case 0x014: /* VIS I array32 */
4138 CHECK_FPU_FEATURE(dc
, VIS1
);
4139 cpu_src1
= gen_load_gpr(dc
, rs1
);
4140 cpu_src2
= gen_load_gpr(dc
, rs2
);
4141 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4142 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
4143 gen_store_gpr(dc
, rd
, cpu_dst
);
4145 case 0x018: /* VIS I alignaddr */
4146 CHECK_FPU_FEATURE(dc
, VIS1
);
4147 cpu_src1
= gen_load_gpr(dc
, rs1
);
4148 cpu_src2
= gen_load_gpr(dc
, rs2
);
4149 gen_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
, 0);
4150 gen_store_gpr(dc
, rd
, cpu_dst
);
4152 case 0x01a: /* VIS I alignaddrl */
4153 CHECK_FPU_FEATURE(dc
, VIS1
);
4154 cpu_src1
= gen_load_gpr(dc
, rs1
);
4155 cpu_src2
= gen_load_gpr(dc
, rs2
);
4156 gen_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
, 1);
4157 gen_store_gpr(dc
, rd
, cpu_dst
);
4159 case 0x019: /* VIS II bmask */
4160 CHECK_FPU_FEATURE(dc
, VIS2
);
4161 cpu_src1
= gen_load_gpr(dc
, rs1
);
4162 cpu_src2
= gen_load_gpr(dc
, rs2
);
4163 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4164 tcg_gen_deposit_tl(cpu_gsr
, cpu_gsr
, cpu_dst
, 32, 32);
4165 gen_store_gpr(dc
, rd
, cpu_dst
);
4167 case 0x020: /* VIS I fcmple16 */
4168 CHECK_FPU_FEATURE(dc
, VIS1
);
4169 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4170 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4171 gen_helper_fcmple16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4172 gen_store_gpr(dc
, rd
, cpu_dst
);
4174 case 0x022: /* VIS I fcmpne16 */
4175 CHECK_FPU_FEATURE(dc
, VIS1
);
4176 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4177 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4178 gen_helper_fcmpne16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4179 gen_store_gpr(dc
, rd
, cpu_dst
);
4181 case 0x024: /* VIS I fcmple32 */
4182 CHECK_FPU_FEATURE(dc
, VIS1
);
4183 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4184 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4185 gen_helper_fcmple32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4186 gen_store_gpr(dc
, rd
, cpu_dst
);
4188 case 0x026: /* VIS I fcmpne32 */
4189 CHECK_FPU_FEATURE(dc
, VIS1
);
4190 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4191 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4192 gen_helper_fcmpne32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4193 gen_store_gpr(dc
, rd
, cpu_dst
);
4195 case 0x028: /* VIS I fcmpgt16 */
4196 CHECK_FPU_FEATURE(dc
, VIS1
);
4197 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4198 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4199 gen_helper_fcmpgt16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4200 gen_store_gpr(dc
, rd
, cpu_dst
);
4202 case 0x02a: /* VIS I fcmpeq16 */
4203 CHECK_FPU_FEATURE(dc
, VIS1
);
4204 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4205 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4206 gen_helper_fcmpeq16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4207 gen_store_gpr(dc
, rd
, cpu_dst
);
4209 case 0x02c: /* VIS I fcmpgt32 */
4210 CHECK_FPU_FEATURE(dc
, VIS1
);
4211 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4212 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4213 gen_helper_fcmpgt32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4214 gen_store_gpr(dc
, rd
, cpu_dst
);
4216 case 0x02e: /* VIS I fcmpeq32 */
4217 CHECK_FPU_FEATURE(dc
, VIS1
);
4218 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4219 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4220 gen_helper_fcmpeq32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4221 gen_store_gpr(dc
, rd
, cpu_dst
);
4223 case 0x031: /* VIS I fmul8x16 */
4224 CHECK_FPU_FEATURE(dc
, VIS1
);
4225 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16
);
4227 case 0x033: /* VIS I fmul8x16au */
4228 CHECK_FPU_FEATURE(dc
, VIS1
);
4229 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16au
);
4231 case 0x035: /* VIS I fmul8x16al */
4232 CHECK_FPU_FEATURE(dc
, VIS1
);
4233 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16al
);
4235 case 0x036: /* VIS I fmul8sux16 */
4236 CHECK_FPU_FEATURE(dc
, VIS1
);
4237 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8sux16
);
4239 case 0x037: /* VIS I fmul8ulx16 */
4240 CHECK_FPU_FEATURE(dc
, VIS1
);
4241 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8ulx16
);
4243 case 0x038: /* VIS I fmuld8sux16 */
4244 CHECK_FPU_FEATURE(dc
, VIS1
);
4245 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld8sux16
);
4247 case 0x039: /* VIS I fmuld8ulx16 */
4248 CHECK_FPU_FEATURE(dc
, VIS1
);
4249 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld8ulx16
);
4251 case 0x03a: /* VIS I fpack32 */
4252 CHECK_FPU_FEATURE(dc
, VIS1
);
4253 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpack32
);
4255 case 0x03b: /* VIS I fpack16 */
4256 CHECK_FPU_FEATURE(dc
, VIS1
);
4257 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4258 cpu_dst_32
= gen_dest_fpr_F();
4259 gen_helper_fpack16(cpu_dst_32
, cpu_gsr
, cpu_src1_64
);
4260 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4262 case 0x03d: /* VIS I fpackfix */
4263 CHECK_FPU_FEATURE(dc
, VIS1
);
4264 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4265 cpu_dst_32
= gen_dest_fpr_F();
4266 gen_helper_fpackfix(cpu_dst_32
, cpu_gsr
, cpu_src1_64
);
4267 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4269 case 0x03e: /* VIS I pdist */
4270 CHECK_FPU_FEATURE(dc
, VIS1
);
4271 gen_ne_fop_DDDD(dc
, rd
, rs1
, rs2
, gen_helper_pdist
);
4273 case 0x048: /* VIS I faligndata */
4274 CHECK_FPU_FEATURE(dc
, VIS1
);
4275 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_faligndata
);
4277 case 0x04b: /* VIS I fpmerge */
4278 CHECK_FPU_FEATURE(dc
, VIS1
);
4279 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpmerge
);
4281 case 0x04c: /* VIS II bshuffle */
4282 CHECK_FPU_FEATURE(dc
, VIS2
);
4283 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_bshuffle
);
4285 case 0x04d: /* VIS I fexpand */
4286 CHECK_FPU_FEATURE(dc
, VIS1
);
4287 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fexpand
);
4289 case 0x050: /* VIS I fpadd16 */
4290 CHECK_FPU_FEATURE(dc
, VIS1
);
4291 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpadd16
);
4293 case 0x051: /* VIS I fpadd16s */
4294 CHECK_FPU_FEATURE(dc
, VIS1
);
4295 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fpadd16s
);
4297 case 0x052: /* VIS I fpadd32 */
4298 CHECK_FPU_FEATURE(dc
, VIS1
);
4299 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpadd32
);
4301 case 0x053: /* VIS I fpadd32s */
4302 CHECK_FPU_FEATURE(dc
, VIS1
);
4303 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_add_i32
);
4305 case 0x054: /* VIS I fpsub16 */
4306 CHECK_FPU_FEATURE(dc
, VIS1
);
4307 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpsub16
);
4309 case 0x055: /* VIS I fpsub16s */
4310 CHECK_FPU_FEATURE(dc
, VIS1
);
4311 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fpsub16s
);
4313 case 0x056: /* VIS I fpsub32 */
4314 CHECK_FPU_FEATURE(dc
, VIS1
);
4315 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpsub32
);
4317 case 0x057: /* VIS I fpsub32s */
4318 CHECK_FPU_FEATURE(dc
, VIS1
);
4319 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_sub_i32
);
4321 case 0x060: /* VIS I fzero */
4322 CHECK_FPU_FEATURE(dc
, VIS1
);
4323 cpu_dst_64
= gen_dest_fpr_D();
4324 tcg_gen_movi_i64(cpu_dst_64
, 0);
4325 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4327 case 0x061: /* VIS I fzeros */
4328 CHECK_FPU_FEATURE(dc
, VIS1
);
4329 cpu_dst_32
= gen_dest_fpr_F();
4330 tcg_gen_movi_i32(cpu_dst_32
, 0);
4331 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4333 case 0x062: /* VIS I fnor */
4334 CHECK_FPU_FEATURE(dc
, VIS1
);
4335 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_nor_i64
);
4337 case 0x063: /* VIS I fnors */
4338 CHECK_FPU_FEATURE(dc
, VIS1
);
4339 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_nor_i32
);
4341 case 0x064: /* VIS I fandnot2 */
4342 CHECK_FPU_FEATURE(dc
, VIS1
);
4343 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_andc_i64
);
4345 case 0x065: /* VIS I fandnot2s */
4346 CHECK_FPU_FEATURE(dc
, VIS1
);
4347 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_andc_i32
);
4349 case 0x066: /* VIS I fnot2 */
4350 CHECK_FPU_FEATURE(dc
, VIS1
);
4351 gen_ne_fop_DD(dc
, rd
, rs2
, tcg_gen_not_i64
);
4353 case 0x067: /* VIS I fnot2s */
4354 CHECK_FPU_FEATURE(dc
, VIS1
);
4355 gen_ne_fop_FF(dc
, rd
, rs2
, tcg_gen_not_i32
);
4357 case 0x068: /* VIS I fandnot1 */
4358 CHECK_FPU_FEATURE(dc
, VIS1
);
4359 gen_ne_fop_DDD(dc
, rd
, rs2
, rs1
, tcg_gen_andc_i64
);
4361 case 0x069: /* VIS I fandnot1s */
4362 CHECK_FPU_FEATURE(dc
, VIS1
);
4363 gen_ne_fop_FFF(dc
, rd
, rs2
, rs1
, tcg_gen_andc_i32
);
4365 case 0x06a: /* VIS I fnot1 */
4366 CHECK_FPU_FEATURE(dc
, VIS1
);
4367 gen_ne_fop_DD(dc
, rd
, rs1
, tcg_gen_not_i64
);
4369 case 0x06b: /* VIS I fnot1s */
4370 CHECK_FPU_FEATURE(dc
, VIS1
);
4371 gen_ne_fop_FF(dc
, rd
, rs1
, tcg_gen_not_i32
);
4373 case 0x06c: /* VIS I fxor */
4374 CHECK_FPU_FEATURE(dc
, VIS1
);
4375 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_xor_i64
);
4377 case 0x06d: /* VIS I fxors */
4378 CHECK_FPU_FEATURE(dc
, VIS1
);
4379 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_xor_i32
);
4381 case 0x06e: /* VIS I fnand */
4382 CHECK_FPU_FEATURE(dc
, VIS1
);
4383 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_nand_i64
);
4385 case 0x06f: /* VIS I fnands */
4386 CHECK_FPU_FEATURE(dc
, VIS1
);
4387 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_nand_i32
);
4389 case 0x070: /* VIS I fand */
4390 CHECK_FPU_FEATURE(dc
, VIS1
);
4391 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_and_i64
);
4393 case 0x071: /* VIS I fands */
4394 CHECK_FPU_FEATURE(dc
, VIS1
);
4395 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_and_i32
);
4397 case 0x072: /* VIS I fxnor */
4398 CHECK_FPU_FEATURE(dc
, VIS1
);
4399 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_eqv_i64
);
4401 case 0x073: /* VIS I fxnors */
4402 CHECK_FPU_FEATURE(dc
, VIS1
);
4403 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_eqv_i32
);
4405 case 0x074: /* VIS I fsrc1 */
4406 CHECK_FPU_FEATURE(dc
, VIS1
);
4407 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4408 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
4410 case 0x075: /* VIS I fsrc1s */
4411 CHECK_FPU_FEATURE(dc
, VIS1
);
4412 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
4413 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
4415 case 0x076: /* VIS I fornot2 */
4416 CHECK_FPU_FEATURE(dc
, VIS1
);
4417 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_orc_i64
);
4419 case 0x077: /* VIS I fornot2s */
4420 CHECK_FPU_FEATURE(dc
, VIS1
);
4421 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_orc_i32
);
4423 case 0x078: /* VIS I fsrc2 */
4424 CHECK_FPU_FEATURE(dc
, VIS1
);
4425 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4426 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
4428 case 0x079: /* VIS I fsrc2s */
4429 CHECK_FPU_FEATURE(dc
, VIS1
);
4430 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
4431 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
4433 case 0x07a: /* VIS I fornot1 */
4434 CHECK_FPU_FEATURE(dc
, VIS1
);
4435 gen_ne_fop_DDD(dc
, rd
, rs2
, rs1
, tcg_gen_orc_i64
);
4437 case 0x07b: /* VIS I fornot1s */
4438 CHECK_FPU_FEATURE(dc
, VIS1
);
4439 gen_ne_fop_FFF(dc
, rd
, rs2
, rs1
, tcg_gen_orc_i32
);
4441 case 0x07c: /* VIS I for */
4442 CHECK_FPU_FEATURE(dc
, VIS1
);
4443 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_or_i64
);
4445 case 0x07d: /* VIS I fors */
4446 CHECK_FPU_FEATURE(dc
, VIS1
);
4447 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_or_i32
);
4449 case 0x07e: /* VIS I fone */
4450 CHECK_FPU_FEATURE(dc
, VIS1
);
4451 cpu_dst_64
= gen_dest_fpr_D();
4452 tcg_gen_movi_i64(cpu_dst_64
, -1);
4453 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4455 case 0x07f: /* VIS I fones */
4456 CHECK_FPU_FEATURE(dc
, VIS1
);
4457 cpu_dst_32
= gen_dest_fpr_F();
4458 tcg_gen_movi_i32(cpu_dst_32
, -1);
4459 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4461 case 0x080: /* VIS I shutdown */
4462 case 0x081: /* VIS II siam */
4471 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4472 #ifdef TARGET_SPARC64
4477 #ifdef TARGET_SPARC64
4478 } else if (xop
== 0x39) { /* V9 return */
4482 cpu_src1
= get_src1(dc
, insn
);
4483 if (IS_IMM
) { /* immediate */
4484 simm
= GET_FIELDs(insn
, 19, 31);
4485 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4486 } else { /* register */
4487 rs2
= GET_FIELD(insn
, 27, 31);
4489 cpu_src2
= gen_load_gpr(dc
, rs2
);
4490 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4492 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4495 gen_helper_restore(cpu_env
);
4497 r_const
= tcg_const_i32(3);
4498 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4499 tcg_temp_free_i32(r_const
);
4500 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4501 dc
->npc
= DYNAMIC_PC
;
4505 cpu_src1
= get_src1(dc
, insn
);
4506 if (IS_IMM
) { /* immediate */
4507 simm
= GET_FIELDs(insn
, 19, 31);
4508 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4509 } else { /* register */
4510 rs2
= GET_FIELD(insn
, 27, 31);
4512 cpu_src2
= gen_load_gpr(dc
, rs2
);
4513 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4515 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4519 case 0x38: /* jmpl */
4524 t
= gen_dest_gpr(dc
, rd
);
4525 tcg_gen_movi_tl(t
, dc
->pc
);
4526 gen_store_gpr(dc
, rd
, t
);
4528 r_const
= tcg_const_i32(3);
4529 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4530 tcg_temp_free_i32(r_const
);
4531 gen_address_mask(dc
, cpu_dst
);
4532 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4533 dc
->npc
= DYNAMIC_PC
;
4536 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4537 case 0x39: /* rett, V9 return */
4541 if (!supervisor(dc
))
4544 r_const
= tcg_const_i32(3);
4545 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4546 tcg_temp_free_i32(r_const
);
4547 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4548 dc
->npc
= DYNAMIC_PC
;
4549 gen_helper_rett(cpu_env
);
4553 case 0x3b: /* flush */
4554 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4558 case 0x3c: /* save */
4560 gen_helper_save(cpu_env
);
4561 gen_store_gpr(dc
, rd
, cpu_dst
);
4563 case 0x3d: /* restore */
4565 gen_helper_restore(cpu_env
);
4566 gen_store_gpr(dc
, rd
, cpu_dst
);
4568 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4569 case 0x3e: /* V9 done/retry */
4573 if (!supervisor(dc
))
4575 dc
->npc
= DYNAMIC_PC
;
4576 dc
->pc
= DYNAMIC_PC
;
4577 gen_helper_done(cpu_env
);
4580 if (!supervisor(dc
))
4582 dc
->npc
= DYNAMIC_PC
;
4583 dc
->pc
= DYNAMIC_PC
;
4584 gen_helper_retry(cpu_env
);
4599 case 3: /* load/store instructions */
4601 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4603 cpu_src1
= get_src1(dc
, insn
);
4604 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4605 rs2
= GET_FIELD(insn
, 27, 31);
4606 cpu_src2
= gen_load_gpr(dc
, rs2
);
4607 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4608 } else if (IS_IMM
) { /* immediate */
4609 simm
= GET_FIELDs(insn
, 19, 31);
4610 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4611 } else { /* register */
4612 rs2
= GET_FIELD(insn
, 27, 31);
4614 cpu_src2
= gen_load_gpr(dc
, rs2
);
4615 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4617 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4620 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4621 (xop
> 0x17 && xop
<= 0x1d ) ||
4622 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4623 TCGv cpu_val
= gen_dest_gpr(dc
, rd
);
4626 case 0x0: /* ld, V9 lduw, load unsigned word */
4627 gen_address_mask(dc
, cpu_addr
);
4628 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4630 case 0x1: /* ldub, load unsigned byte */
4631 gen_address_mask(dc
, cpu_addr
);
4632 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4634 case 0x2: /* lduh, load unsigned halfword */
4635 gen_address_mask(dc
, cpu_addr
);
4636 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4638 case 0x3: /* ldd, load double word */
4645 r_const
= tcg_const_i32(7);
4646 /* XXX remove alignment check */
4647 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
4648 tcg_temp_free_i32(r_const
);
4649 gen_address_mask(dc
, cpu_addr
);
4650 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4651 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4652 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4653 gen_store_gpr(dc
, rd
+ 1, cpu_tmp0
);
4654 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4655 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4656 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4659 case 0x9: /* ldsb, load signed byte */
4660 gen_address_mask(dc
, cpu_addr
);
4661 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4663 case 0xa: /* ldsh, load signed halfword */
4664 gen_address_mask(dc
, cpu_addr
);
4665 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4667 case 0xd: /* ldstub -- XXX: should be atomically */
4671 gen_address_mask(dc
, cpu_addr
);
4672 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4673 r_const
= tcg_const_tl(0xff);
4674 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4675 tcg_temp_free(r_const
);
4678 case 0x0f: /* swap, swap register with memory. Also
4680 CHECK_IU_FEATURE(dc
, SWAP
);
4681 cpu_src1
= gen_load_gpr(dc
, rd
);
4682 gen_address_mask(dc
, cpu_addr
);
4683 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4684 tcg_gen_qemu_st32(cpu_src1
, cpu_addr
, dc
->mem_idx
);
4685 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4687 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4688 case 0x10: /* lda, V9 lduwa, load word alternate */
4689 #ifndef TARGET_SPARC64
4692 if (!supervisor(dc
))
4696 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4698 case 0x11: /* lduba, load unsigned byte alternate */
4699 #ifndef TARGET_SPARC64
4702 if (!supervisor(dc
))
4706 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4708 case 0x12: /* lduha, load unsigned halfword alternate */
4709 #ifndef TARGET_SPARC64
4712 if (!supervisor(dc
))
4716 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4718 case 0x13: /* ldda, load double word alternate */
4719 #ifndef TARGET_SPARC64
4722 if (!supervisor(dc
))
4728 gen_ldda_asi(dc
, cpu_val
, cpu_addr
, insn
, rd
);
4730 case 0x19: /* ldsba, load signed byte alternate */
4731 #ifndef TARGET_SPARC64
4734 if (!supervisor(dc
))
4738 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4740 case 0x1a: /* ldsha, load signed halfword alternate */
4741 #ifndef TARGET_SPARC64
4744 if (!supervisor(dc
))
4748 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4750 case 0x1d: /* ldstuba -- XXX: should be atomically */
4751 #ifndef TARGET_SPARC64
4754 if (!supervisor(dc
))
4758 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4760 case 0x1f: /* swapa, swap reg with alt. memory. Also
4762 CHECK_IU_FEATURE(dc
, SWAP
);
4763 #ifndef TARGET_SPARC64
4766 if (!supervisor(dc
))
4770 cpu_src1
= gen_load_gpr(dc
, rd
);
4771 gen_swap_asi(cpu_val
, cpu_src1
, cpu_addr
, insn
);
4774 #ifndef TARGET_SPARC64
4775 case 0x30: /* ldc */
4776 case 0x31: /* ldcsr */
4777 case 0x33: /* lddc */
4781 #ifdef TARGET_SPARC64
4782 case 0x08: /* V9 ldsw */
4783 gen_address_mask(dc
, cpu_addr
);
4784 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4786 case 0x0b: /* V9 ldx */
4787 gen_address_mask(dc
, cpu_addr
);
4788 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4790 case 0x18: /* V9 ldswa */
4792 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4794 case 0x1b: /* V9 ldxa */
4796 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4798 case 0x2d: /* V9 prefetch, no effect */
4800 case 0x30: /* V9 ldfa */
4801 if (gen_trap_ifnofpu(dc
)) {
4805 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4806 gen_update_fprs_dirty(rd
);
4808 case 0x33: /* V9 lddfa */
4809 if (gen_trap_ifnofpu(dc
)) {
4813 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4814 gen_update_fprs_dirty(DFPREG(rd
));
4816 case 0x3d: /* V9 prefetcha, no effect */
4818 case 0x32: /* V9 ldqfa */
4819 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4820 if (gen_trap_ifnofpu(dc
)) {
4824 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4825 gen_update_fprs_dirty(QFPREG(rd
));
4831 gen_store_gpr(dc
, rd
, cpu_val
);
4832 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4835 } else if (xop
>= 0x20 && xop
< 0x24) {
4836 if (gen_trap_ifnofpu(dc
)) {
4841 case 0x20: /* ldf, load fpreg */
4842 gen_address_mask(dc
, cpu_addr
);
4843 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4844 cpu_dst_32
= gen_dest_fpr_F();
4845 tcg_gen_trunc_tl_i32(cpu_dst_32
, cpu_tmp0
);
4846 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4848 case 0x21: /* ldfsr, V9 ldxfsr */
4849 #ifdef TARGET_SPARC64
4850 gen_address_mask(dc
, cpu_addr
);
4852 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4853 gen_helper_ldxfsr(cpu_env
, cpu_tmp64
);
4855 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4856 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4857 gen_helper_ldfsr(cpu_env
, cpu_tmp32
);
4861 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4862 gen_helper_ldfsr(cpu_env
, cpu_tmp32
);
4866 case 0x22: /* ldqf, load quad fpreg */
4870 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4871 r_const
= tcg_const_i32(dc
->mem_idx
);
4872 gen_address_mask(dc
, cpu_addr
);
4873 gen_helper_ldqf(cpu_env
, cpu_addr
, r_const
);
4874 tcg_temp_free_i32(r_const
);
4875 gen_op_store_QT0_fpr(QFPREG(rd
));
4876 gen_update_fprs_dirty(QFPREG(rd
));
4879 case 0x23: /* lddf, load double fpreg */
4880 gen_address_mask(dc
, cpu_addr
);
4881 cpu_dst_64
= gen_dest_fpr_D();
4882 tcg_gen_qemu_ld64(cpu_dst_64
, cpu_addr
, dc
->mem_idx
);
4883 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4888 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4889 xop
== 0xe || xop
== 0x1e) {
4890 TCGv cpu_val
= gen_load_gpr(dc
, rd
);
4893 case 0x4: /* st, store word */
4894 gen_address_mask(dc
, cpu_addr
);
4895 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4897 case 0x5: /* stb, store byte */
4898 gen_address_mask(dc
, cpu_addr
);
4899 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4901 case 0x6: /* sth, store halfword */
4902 gen_address_mask(dc
, cpu_addr
);
4903 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4905 case 0x7: /* std, store double word */
4913 gen_address_mask(dc
, cpu_addr
);
4914 r_const
= tcg_const_i32(7);
4915 /* XXX remove alignment check */
4916 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
4917 tcg_temp_free_i32(r_const
);
4918 lo
= gen_load_gpr(dc
, rd
+ 1);
4919 tcg_gen_concat_tl_i64(cpu_tmp64
, lo
, cpu_val
);
4920 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4923 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4924 case 0x14: /* sta, V9 stwa, store word alternate */
4925 #ifndef TARGET_SPARC64
4928 if (!supervisor(dc
))
4932 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4933 dc
->npc
= DYNAMIC_PC
;
4935 case 0x15: /* stba, store byte alternate */
4936 #ifndef TARGET_SPARC64
4939 if (!supervisor(dc
))
4943 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4944 dc
->npc
= DYNAMIC_PC
;
4946 case 0x16: /* stha, store halfword alternate */
4947 #ifndef TARGET_SPARC64
4950 if (!supervisor(dc
))
4954 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4955 dc
->npc
= DYNAMIC_PC
;
4957 case 0x17: /* stda, store double word alternate */
4958 #ifndef TARGET_SPARC64
4961 if (!supervisor(dc
))
4968 gen_stda_asi(dc
, cpu_val
, cpu_addr
, insn
, rd
);
4972 #ifdef TARGET_SPARC64
4973 case 0x0e: /* V9 stx */
4974 gen_address_mask(dc
, cpu_addr
);
4975 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4977 case 0x1e: /* V9 stxa */
4979 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4980 dc
->npc
= DYNAMIC_PC
;
4986 } else if (xop
> 0x23 && xop
< 0x28) {
4987 if (gen_trap_ifnofpu(dc
)) {
4992 case 0x24: /* stf, store fpreg */
4993 gen_address_mask(dc
, cpu_addr
);
4994 cpu_src1_32
= gen_load_fpr_F(dc
, rd
);
4995 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_src1_32
);
4996 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4998 case 0x25: /* stfsr, V9 stxfsr */
4999 #ifdef TARGET_SPARC64
5000 gen_address_mask(dc
, cpu_addr
);
5001 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
5003 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
5005 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
5007 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
5008 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
5012 #ifdef TARGET_SPARC64
5013 /* V9 stqf, store quad fpreg */
5017 CHECK_FPU_FEATURE(dc
, FLOAT128
);
5018 gen_op_load_fpr_QT0(QFPREG(rd
));
5019 r_const
= tcg_const_i32(dc
->mem_idx
);
5020 gen_address_mask(dc
, cpu_addr
);
5021 gen_helper_stqf(cpu_env
, cpu_addr
, r_const
);
5022 tcg_temp_free_i32(r_const
);
5025 #else /* !TARGET_SPARC64 */
5026 /* stdfq, store floating point queue */
5027 #if defined(CONFIG_USER_ONLY)
5030 if (!supervisor(dc
))
5032 if (gen_trap_ifnofpu(dc
)) {
5038 case 0x27: /* stdf, store double fpreg */
5039 gen_address_mask(dc
, cpu_addr
);
5040 cpu_src1_64
= gen_load_fpr_D(dc
, rd
);
5041 tcg_gen_qemu_st64(cpu_src1_64
, cpu_addr
, dc
->mem_idx
);
5046 } else if (xop
> 0x33 && xop
< 0x3f) {
5049 #ifdef TARGET_SPARC64
5050 case 0x34: /* V9 stfa */
5051 if (gen_trap_ifnofpu(dc
)) {
5054 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
5056 case 0x36: /* V9 stqfa */
5060 CHECK_FPU_FEATURE(dc
, FLOAT128
);
5061 if (gen_trap_ifnofpu(dc
)) {
5064 r_const
= tcg_const_i32(7);
5065 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
5066 tcg_temp_free_i32(r_const
);
5067 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
5070 case 0x37: /* V9 stdfa */
5071 if (gen_trap_ifnofpu(dc
)) {
5074 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
5076 case 0x3c: /* V9 casa */
5077 gen_cas_asi(dc
, cpu_addr
, cpu_src2
, insn
, rd
);
5079 case 0x3e: /* V9 casxa */
5080 gen_casx_asi(dc
, cpu_addr
, cpu_src2
, insn
, rd
);
5083 case 0x34: /* stc */
5084 case 0x35: /* stcsr */
5085 case 0x36: /* stdcq */
5086 case 0x37: /* stdc */
5097 /* default case for non jump instructions */
5098 if (dc
->npc
== DYNAMIC_PC
) {
5099 dc
->pc
= DYNAMIC_PC
;
5101 } else if (dc
->npc
== JUMP_PC
) {
5102 /* we can do a static jump */
5103 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
5107 dc
->npc
= dc
->npc
+ 4;
5116 r_const
= tcg_const_i32(TT_ILL_INSN
);
5117 gen_helper_raise_exception(cpu_env
, r_const
);
5118 tcg_temp_free_i32(r_const
);
5127 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
5128 gen_helper_raise_exception(cpu_env
, r_const
);
5129 tcg_temp_free_i32(r_const
);
5133 #if !defined(CONFIG_USER_ONLY)
5139 r_const
= tcg_const_i32(TT_PRIV_INSN
);
5140 gen_helper_raise_exception(cpu_env
, r_const
);
5141 tcg_temp_free_i32(r_const
);
5148 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
5151 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5154 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
5158 #ifndef TARGET_SPARC64
5164 r_const
= tcg_const_i32(TT_NCP_INSN
);
5165 gen_helper_raise_exception(cpu_env
, r_const
);
5166 tcg_temp_free(r_const
);
5172 tcg_temp_free(cpu_tmp1
);
5173 tcg_temp_free(cpu_tmp2
);
5174 if (dc
->n_t32
!= 0) {
5176 for (i
= dc
->n_t32
- 1; i
>= 0; --i
) {
5177 tcg_temp_free_i32(dc
->t32
[i
]);
5181 if (dc
->n_ttl
!= 0) {
5183 for (i
= dc
->n_ttl
- 1; i
>= 0; --i
) {
5184 tcg_temp_free(dc
->ttl
[i
]);
5190 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
5191 int spc
, CPUSPARCState
*env
)
5193 target_ulong pc_start
, last_pc
;
5194 uint16_t *gen_opc_end
;
5195 DisasContext dc1
, *dc
= &dc1
;
5202 memset(dc
, 0, sizeof(DisasContext
));
5207 dc
->npc
= (target_ulong
) tb
->cs_base
;
5208 dc
->cc_op
= CC_OP_DYNAMIC
;
5209 dc
->mem_idx
= cpu_mmu_index(env
);
5211 dc
->fpu_enabled
= tb_fpu_enabled(tb
->flags
);
5212 dc
->address_mask_32bit
= tb_am_enabled(tb
->flags
);
5213 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
5214 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
5217 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5219 max_insns
= CF_COUNT_MASK
;
5222 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5223 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5224 if (bp
->pc
== dc
->pc
) {
5225 if (dc
->pc
!= pc_start
)
5227 gen_helper_debug(cpu_env
);
5235 qemu_log("Search PC...\n");
5236 j
= gen_opc_ptr
- gen_opc_buf
;
5240 gen_opc_instr_start
[lj
++] = 0;
5241 gen_opc_pc
[lj
] = dc
->pc
;
5242 gen_opc_npc
[lj
] = dc
->npc
;
5243 gen_opc_instr_start
[lj
] = 1;
5244 gen_opc_icount
[lj
] = num_insns
;
5247 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
5250 insn
= cpu_ldl_code(env
, dc
->pc
);
5252 cpu_tmp0
= tcg_temp_new();
5253 cpu_tmp32
= tcg_temp_new_i32();
5254 cpu_tmp64
= tcg_temp_new_i64();
5255 cpu_dst
= tcg_temp_new();
5256 cpu_addr
= tcg_temp_new();
5258 disas_sparc_insn(dc
, insn
);
5261 tcg_temp_free(cpu_addr
);
5262 tcg_temp_free(cpu_dst
);
5263 tcg_temp_free_i64(cpu_tmp64
);
5264 tcg_temp_free_i32(cpu_tmp32
);
5265 tcg_temp_free(cpu_tmp0
);
5269 /* if the next PC is different, we abort now */
5270 if (dc
->pc
!= (last_pc
+ 4))
5272 /* if we reach a page boundary, we stop generation so that the
5273 PC of a TT_TFAULT exception is always in the right page */
5274 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
5276 /* if single step mode, we generate only one instruction and
5277 generate an exception */
5278 if (dc
->singlestep
) {
5281 } while ((gen_opc_ptr
< gen_opc_end
) &&
5282 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
5283 num_insns
< max_insns
);
5286 if (tb
->cflags
& CF_LAST_IO
) {
5290 if (dc
->pc
!= DYNAMIC_PC
&&
5291 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
5292 /* static PC and NPC: we can use direct chaining */
5293 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
5295 if (dc
->pc
!= DYNAMIC_PC
) {
5296 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
5302 gen_icount_end(tb
, num_insns
);
5303 *gen_opc_ptr
= INDEX_op_end
;
5305 j
= gen_opc_ptr
- gen_opc_buf
;
5308 gen_opc_instr_start
[lj
++] = 0;
5312 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
5313 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
5315 tb
->size
= last_pc
+ 4 - pc_start
;
5316 tb
->icount
= num_insns
;
5319 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5320 qemu_log("--------------\n");
5321 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5322 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
5328 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5330 gen_intermediate_code_internal(tb
, 0, env
);
5333 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5335 gen_intermediate_code_internal(tb
, 1, env
);
5338 void gen_intermediate_code_init(CPUSPARCState
*env
)
5342 static const char * const gregnames
[8] = {
5343 NULL
, // g0 not used
5352 static const char * const fregnames
[32] = {
5353 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5354 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5355 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5356 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5359 /* init various static tables */
5363 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5364 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5365 offsetof(CPUSPARCState
, regwptr
),
5367 #ifdef TARGET_SPARC64
5368 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, xcc
),
5370 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, asi
),
5372 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, fprs
),
5374 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, gsr
),
5376 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5377 offsetof(CPUSPARCState
, tick_cmpr
),
5379 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5380 offsetof(CPUSPARCState
, stick_cmpr
),
5382 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5383 offsetof(CPUSPARCState
, hstick_cmpr
),
5385 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, hintp
),
5387 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, htba
),
5389 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, hver
),
5391 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5392 offsetof(CPUSPARCState
, ssr
), "ssr");
5393 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5394 offsetof(CPUSPARCState
, version
), "ver");
5395 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5396 offsetof(CPUSPARCState
, softint
),
5399 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, wim
),
5402 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cond
),
5404 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cc_src
),
5406 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5407 offsetof(CPUSPARCState
, cc_src2
),
5409 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cc_dst
),
5411 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, cc_op
),
5413 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, psr
),
5415 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, fsr
),
5417 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, pc
),
5419 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, npc
),
5421 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, y
), "y");
5422 #ifndef CONFIG_USER_ONLY
5423 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, tbr
),
5426 for (i
= 1; i
< 8; i
++) {
5427 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5428 offsetof(CPUSPARCState
, gregs
[i
]),
5431 for (i
= 0; i
< TARGET_DPREGS
; i
++) {
5432 cpu_fpr
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
5433 offsetof(CPUSPARCState
, fpr
[i
]),
5437 /* register helpers */
5439 #define GEN_HELPER 2
5444 void restore_state_to_opc(CPUSPARCState
*env
, TranslationBlock
*tb
, int pc_pos
)
5447 env
->pc
= gen_opc_pc
[pc_pos
];
5448 npc
= gen_opc_npc
[pc_pos
];
5450 /* dynamic NPC: already stored */
5451 } else if (npc
== 2) {
5452 /* jump PC: use 'cond' and the jump targets of the translation */
5454 env
->npc
= gen_opc_jump_pc
[0];
5456 env
->npc
= gen_opc_jump_pc
[1];