4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env
, cpu_regwptr
;
43 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
44 static TCGv_i32 cpu_cc_op
;
45 static TCGv_i32 cpu_psr
;
46 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond
, cpu_dst
;
53 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
55 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
56 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
57 static TCGv_i32 cpu_softint
;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i64 cpu_tmp64
;
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr
[TARGET_DPREGS
];
67 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
68 static target_ulong gen_opc_jump_pc
[2];
70 #include "gen-icount.h"
72 typedef struct DisasContext
{
73 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
74 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
75 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
79 int address_mask_32bit
;
81 uint32_t cc_op
; /* current CC operation */
82 struct TranslationBlock
*tb
;
97 // This function uses non-native bit order
98 #define GET_FIELD(X, FROM, TO) \
99 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
101 // This function uses the order in the manuals, i.e. bit 0 is 2^0
102 #define GET_FIELD_SP(X, FROM, TO) \
103 GET_FIELD(X, 31 - (TO), 31 - (FROM))
105 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
106 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
108 #ifdef TARGET_SPARC64
109 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
110 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
112 #define DFPREG(r) (r & 0x1e)
113 #define QFPREG(r) (r & 0x1c)
116 #define UA2005_HTRAP_MASK 0xff
117 #define V8_TRAP_MASK 0x7f
119 static int sign_extend(int x
, int len
)
122 return (x
<< len
) >> len
;
125 #define IS_IMM (insn & (1<<13))
127 static inline TCGv_i32
get_temp_i32(DisasContext
*dc
)
130 assert(dc
->n_t32
< ARRAY_SIZE(dc
->t32
));
131 dc
->t32
[dc
->n_t32
++] = t
= tcg_temp_new_i32();
135 static inline TCGv
get_temp_tl(DisasContext
*dc
)
138 assert(dc
->n_ttl
< ARRAY_SIZE(dc
->ttl
));
139 dc
->ttl
[dc
->n_ttl
++] = t
= tcg_temp_new();
143 static inline void gen_update_fprs_dirty(int rd
)
145 #if defined(TARGET_SPARC64)
146 tcg_gen_ori_i32(cpu_fprs
, cpu_fprs
, (rd
< 32) ? 1 : 2);
150 /* floating point registers moves */
151 static TCGv_i32
gen_load_fpr_F(DisasContext
*dc
, unsigned int src
)
153 #if TCG_TARGET_REG_BITS == 32
155 return TCGV_LOW(cpu_fpr
[src
/ 2]);
157 return TCGV_HIGH(cpu_fpr
[src
/ 2]);
161 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr
[src
/ 2]));
163 TCGv_i32 ret
= get_temp_i32(dc
);
164 TCGv_i64 t
= tcg_temp_new_i64();
166 tcg_gen_shri_i64(t
, cpu_fpr
[src
/ 2], 32);
167 tcg_gen_trunc_i64_i32(ret
, t
);
168 tcg_temp_free_i64(t
);
175 static void gen_store_fpr_F(DisasContext
*dc
, unsigned int dst
, TCGv_i32 v
)
177 #if TCG_TARGET_REG_BITS == 32
179 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr
[dst
/ 2]), v
);
181 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr
[dst
/ 2]), v
);
184 TCGv_i64 t
= MAKE_TCGV_I64(GET_TCGV_I32(v
));
185 tcg_gen_deposit_i64(cpu_fpr
[dst
/ 2], cpu_fpr
[dst
/ 2], t
,
186 (dst
& 1 ? 0 : 32), 32);
188 gen_update_fprs_dirty(dst
);
191 static TCGv_i32
gen_dest_fpr_F(DisasContext
*dc
)
193 return get_temp_i32(dc
);
196 static TCGv_i64
gen_load_fpr_D(DisasContext
*dc
, unsigned int src
)
199 return cpu_fpr
[src
/ 2];
202 static void gen_store_fpr_D(DisasContext
*dc
, unsigned int dst
, TCGv_i64 v
)
205 tcg_gen_mov_i64(cpu_fpr
[dst
/ 2], v
);
206 gen_update_fprs_dirty(dst
);
209 static TCGv_i64
gen_dest_fpr_D(void)
214 static void gen_op_load_fpr_QT0(unsigned int src
)
216 tcg_gen_st_i64(cpu_fpr
[src
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
217 offsetof(CPU_QuadU
, ll
.upper
));
218 tcg_gen_st_i64(cpu_fpr
[src
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
219 offsetof(CPU_QuadU
, ll
.lower
));
222 static void gen_op_load_fpr_QT1(unsigned int src
)
224 tcg_gen_st_i64(cpu_fpr
[src
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
225 offsetof(CPU_QuadU
, ll
.upper
));
226 tcg_gen_st_i64(cpu_fpr
[src
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
227 offsetof(CPU_QuadU
, ll
.lower
));
230 static void gen_op_store_QT0_fpr(unsigned int dst
)
232 tcg_gen_ld_i64(cpu_fpr
[dst
/ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
233 offsetof(CPU_QuadU
, ll
.upper
));
234 tcg_gen_ld_i64(cpu_fpr
[dst
/2 + 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
235 offsetof(CPU_QuadU
, ll
.lower
));
238 #ifdef TARGET_SPARC64
239 static void gen_move_Q(unsigned int rd
, unsigned int rs
)
244 tcg_gen_mov_i64(cpu_fpr
[rd
/ 2], cpu_fpr
[rs
/ 2]);
245 tcg_gen_mov_i64(cpu_fpr
[rd
/ 2 + 1], cpu_fpr
[rs
/ 2 + 1]);
246 gen_update_fprs_dirty(rd
);
251 #ifdef CONFIG_USER_ONLY
252 #define supervisor(dc) 0
253 #ifdef TARGET_SPARC64
254 #define hypervisor(dc) 0
257 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
258 #ifdef TARGET_SPARC64
259 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
264 #ifdef TARGET_SPARC64
266 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
268 #define AM_CHECK(dc) (1)
272 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
274 #ifdef TARGET_SPARC64
276 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
280 static inline TCGv
gen_load_gpr(DisasContext
*dc
, int reg
)
282 if (reg
== 0 || reg
>= 8) {
283 TCGv t
= get_temp_tl(dc
);
285 tcg_gen_movi_tl(t
, 0);
287 tcg_gen_ld_tl(t
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
291 return cpu_gregs
[reg
];
295 static inline void gen_store_gpr(DisasContext
*dc
, int reg
, TCGv v
)
299 tcg_gen_mov_tl(cpu_gregs
[reg
], v
);
301 tcg_gen_st_tl(v
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
306 static inline TCGv
gen_dest_gpr(DisasContext
*dc
, int reg
)
308 if (reg
== 0 || reg
>= 8) {
309 return get_temp_tl(dc
);
311 return cpu_gregs
[reg
];
315 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
316 target_ulong pc
, target_ulong npc
)
318 TranslationBlock
*tb
;
321 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
322 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
324 /* jump to same page: we can use a direct jump */
325 tcg_gen_goto_tb(tb_num
);
326 tcg_gen_movi_tl(cpu_pc
, pc
);
327 tcg_gen_movi_tl(cpu_npc
, npc
);
328 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
330 /* jump to another page: currently not optimized */
331 tcg_gen_movi_tl(cpu_pc
, pc
);
332 tcg_gen_movi_tl(cpu_npc
, npc
);
338 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
340 tcg_gen_extu_i32_tl(reg
, src
);
341 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
342 tcg_gen_andi_tl(reg
, reg
, 0x1);
345 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
347 tcg_gen_extu_i32_tl(reg
, src
);
348 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
349 tcg_gen_andi_tl(reg
, reg
, 0x1);
352 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
354 tcg_gen_extu_i32_tl(reg
, src
);
355 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
356 tcg_gen_andi_tl(reg
, reg
, 0x1);
359 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
361 tcg_gen_extu_i32_tl(reg
, src
);
362 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
363 tcg_gen_andi_tl(reg
, reg
, 0x1);
366 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
368 tcg_gen_mov_tl(cpu_cc_src
, src1
);
369 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
370 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
371 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
374 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
376 tcg_gen_mov_tl(cpu_cc_src
, src1
);
377 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
378 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
379 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
382 static TCGv_i32
gen_add32_carry32(void)
384 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
386 /* Carry is computed from a previous add: (dst < src) */
387 #if TARGET_LONG_BITS == 64
388 cc_src1_32
= tcg_temp_new_i32();
389 cc_src2_32
= tcg_temp_new_i32();
390 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
391 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
393 cc_src1_32
= cpu_cc_dst
;
394 cc_src2_32
= cpu_cc_src
;
397 carry_32
= tcg_temp_new_i32();
398 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
400 #if TARGET_LONG_BITS == 64
401 tcg_temp_free_i32(cc_src1_32
);
402 tcg_temp_free_i32(cc_src2_32
);
408 static TCGv_i32
gen_sub32_carry32(void)
410 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
412 /* Carry is computed from a previous borrow: (src1 < src2) */
413 #if TARGET_LONG_BITS == 64
414 cc_src1_32
= tcg_temp_new_i32();
415 cc_src2_32
= tcg_temp_new_i32();
416 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
417 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
419 cc_src1_32
= cpu_cc_src
;
420 cc_src2_32
= cpu_cc_src2
;
423 carry_32
= tcg_temp_new_i32();
424 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
426 #if TARGET_LONG_BITS == 64
427 tcg_temp_free_i32(cc_src1_32
);
428 tcg_temp_free_i32(cc_src2_32
);
434 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
435 TCGv src2
, int update_cc
)
443 /* Carry is known to be zero. Fall back to plain ADD. */
445 gen_op_add_cc(dst
, src1
, src2
);
447 tcg_gen_add_tl(dst
, src1
, src2
);
454 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
456 /* For 32-bit hosts, we can re-use the host's hardware carry
457 generation by using an ADD2 opcode. We discard the low
458 part of the output. Ideally we'd combine this operation
459 with the add that generated the carry in the first place. */
460 TCGv dst_low
= tcg_temp_new();
461 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
462 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
463 tcg_temp_free(dst_low
);
467 carry_32
= gen_add32_carry32();
473 carry_32
= gen_sub32_carry32();
477 /* We need external help to produce the carry. */
478 carry_32
= tcg_temp_new_i32();
479 gen_helper_compute_C_icc(carry_32
, cpu_env
);
483 #if TARGET_LONG_BITS == 64
484 carry
= tcg_temp_new();
485 tcg_gen_extu_i32_i64(carry
, carry_32
);
490 tcg_gen_add_tl(dst
, src1
, src2
);
491 tcg_gen_add_tl(dst
, dst
, carry
);
493 tcg_temp_free_i32(carry_32
);
494 #if TARGET_LONG_BITS == 64
495 tcg_temp_free(carry
);
498 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
502 tcg_gen_mov_tl(cpu_cc_src
, src1
);
503 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
504 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
505 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
506 dc
->cc_op
= CC_OP_ADDX
;
510 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
512 tcg_gen_mov_tl(cpu_cc_src
, src1
);
513 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
515 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
516 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
517 dc
->cc_op
= CC_OP_LOGIC
;
519 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
520 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
521 dc
->cc_op
= CC_OP_SUB
;
523 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
526 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
528 tcg_gen_mov_tl(cpu_cc_src
, src1
);
529 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
530 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
531 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
534 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
535 TCGv src2
, int update_cc
)
543 /* Carry is known to be zero. Fall back to plain SUB. */
545 gen_op_sub_cc(dst
, src1
, src2
);
547 tcg_gen_sub_tl(dst
, src1
, src2
);
554 carry_32
= gen_add32_carry32();
560 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
562 /* For 32-bit hosts, we can re-use the host's hardware carry
563 generation by using a SUB2 opcode. We discard the low
564 part of the output. Ideally we'd combine this operation
565 with the add that generated the carry in the first place. */
566 TCGv dst_low
= tcg_temp_new();
567 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
568 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
569 tcg_temp_free(dst_low
);
573 carry_32
= gen_sub32_carry32();
577 /* We need external help to produce the carry. */
578 carry_32
= tcg_temp_new_i32();
579 gen_helper_compute_C_icc(carry_32
, cpu_env
);
583 #if TARGET_LONG_BITS == 64
584 carry
= tcg_temp_new();
585 tcg_gen_extu_i32_i64(carry
, carry_32
);
590 tcg_gen_sub_tl(dst
, src1
, src2
);
591 tcg_gen_sub_tl(dst
, dst
, carry
);
593 tcg_temp_free_i32(carry_32
);
594 #if TARGET_LONG_BITS == 64
595 tcg_temp_free(carry
);
598 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
602 tcg_gen_mov_tl(cpu_cc_src
, src1
);
603 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
604 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
605 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
606 dc
->cc_op
= CC_OP_SUBX
;
610 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
614 r_temp
= tcg_temp_new();
620 zero
= tcg_const_tl(0);
621 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
622 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
623 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
624 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_cc_src2
, r_temp
, zero
,
629 // env->y = (b2 << 31) | (env->y >> 1);
630 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
631 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
632 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
633 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
634 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
635 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
638 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
639 gen_mov_reg_V(r_temp
, cpu_psr
);
640 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
641 tcg_temp_free(r_temp
);
643 // T0 = (b1 << 31) | (T0 >> 1);
645 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
646 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
647 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
649 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
651 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
654 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
656 TCGv_i32 r_src1
, r_src2
;
657 TCGv_i64 r_temp
, r_temp2
;
659 r_src1
= tcg_temp_new_i32();
660 r_src2
= tcg_temp_new_i32();
662 tcg_gen_trunc_tl_i32(r_src1
, src1
);
663 tcg_gen_trunc_tl_i32(r_src2
, src2
);
665 r_temp
= tcg_temp_new_i64();
666 r_temp2
= tcg_temp_new_i64();
669 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
670 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
672 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
673 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
676 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
678 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
679 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
680 tcg_temp_free_i64(r_temp
);
681 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
683 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
685 tcg_temp_free_i64(r_temp2
);
687 tcg_temp_free_i32(r_src1
);
688 tcg_temp_free_i32(r_src2
);
691 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
693 /* zero-extend truncated operands before multiplication */
694 gen_op_multiply(dst
, src1
, src2
, 0);
697 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
699 /* sign-extend truncated operands before multiplication */
700 gen_op_multiply(dst
, src1
, src2
, 1);
704 static inline void gen_op_eval_ba(TCGv dst
)
706 tcg_gen_movi_tl(dst
, 1);
710 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
712 gen_mov_reg_Z(dst
, src
);
716 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
718 gen_mov_reg_N(cpu_tmp0
, src
);
719 gen_mov_reg_V(dst
, src
);
720 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
721 gen_mov_reg_Z(cpu_tmp0
, src
);
722 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
726 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
728 gen_mov_reg_V(cpu_tmp0
, src
);
729 gen_mov_reg_N(dst
, src
);
730 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
734 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
736 gen_mov_reg_Z(cpu_tmp0
, src
);
737 gen_mov_reg_C(dst
, src
);
738 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
742 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
744 gen_mov_reg_C(dst
, src
);
748 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
750 gen_mov_reg_V(dst
, src
);
754 static inline void gen_op_eval_bn(TCGv dst
)
756 tcg_gen_movi_tl(dst
, 0);
760 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
762 gen_mov_reg_N(dst
, src
);
766 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
768 gen_mov_reg_Z(dst
, src
);
769 tcg_gen_xori_tl(dst
, dst
, 0x1);
773 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
775 gen_mov_reg_N(cpu_tmp0
, src
);
776 gen_mov_reg_V(dst
, src
);
777 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
778 gen_mov_reg_Z(cpu_tmp0
, src
);
779 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
780 tcg_gen_xori_tl(dst
, dst
, 0x1);
784 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
786 gen_mov_reg_V(cpu_tmp0
, src
);
787 gen_mov_reg_N(dst
, src
);
788 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
789 tcg_gen_xori_tl(dst
, dst
, 0x1);
793 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
795 gen_mov_reg_Z(cpu_tmp0
, src
);
796 gen_mov_reg_C(dst
, src
);
797 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
798 tcg_gen_xori_tl(dst
, dst
, 0x1);
802 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
804 gen_mov_reg_C(dst
, src
);
805 tcg_gen_xori_tl(dst
, dst
, 0x1);
809 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
811 gen_mov_reg_N(dst
, src
);
812 tcg_gen_xori_tl(dst
, dst
, 0x1);
816 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
818 gen_mov_reg_V(dst
, src
);
819 tcg_gen_xori_tl(dst
, dst
, 0x1);
823 FPSR bit field FCC1 | FCC0:
829 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
830 unsigned int fcc_offset
)
832 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
833 tcg_gen_andi_tl(reg
, reg
, 0x1);
836 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
837 unsigned int fcc_offset
)
839 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
840 tcg_gen_andi_tl(reg
, reg
, 0x1);
844 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
845 unsigned int fcc_offset
)
847 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
848 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
849 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
852 // 1 or 2: FCC0 ^ FCC1
853 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
854 unsigned int fcc_offset
)
856 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
857 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
858 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
862 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
863 unsigned int fcc_offset
)
865 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
869 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
870 unsigned int fcc_offset
)
872 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
873 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
874 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
875 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
879 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
880 unsigned int fcc_offset
)
882 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
886 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
887 unsigned int fcc_offset
)
889 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
890 tcg_gen_xori_tl(dst
, dst
, 0x1);
891 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
892 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
896 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
897 unsigned int fcc_offset
)
899 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
900 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
901 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
905 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
906 unsigned int fcc_offset
)
908 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
909 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
910 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
911 tcg_gen_xori_tl(dst
, dst
, 0x1);
914 // 0 or 3: !(FCC0 ^ FCC1)
915 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
916 unsigned int fcc_offset
)
918 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
919 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
920 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
921 tcg_gen_xori_tl(dst
, dst
, 0x1);
925 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
926 unsigned int fcc_offset
)
928 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
929 tcg_gen_xori_tl(dst
, dst
, 0x1);
932 // !1: !(FCC0 & !FCC1)
933 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
934 unsigned int fcc_offset
)
936 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
937 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
938 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
939 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
940 tcg_gen_xori_tl(dst
, dst
, 0x1);
944 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
945 unsigned int fcc_offset
)
947 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
948 tcg_gen_xori_tl(dst
, dst
, 0x1);
951 // !2: !(!FCC0 & FCC1)
952 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
953 unsigned int fcc_offset
)
955 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
956 tcg_gen_xori_tl(dst
, dst
, 0x1);
957 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
958 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
959 tcg_gen_xori_tl(dst
, dst
, 0x1);
962 // !3: !(FCC0 & FCC1)
963 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
964 unsigned int fcc_offset
)
966 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
967 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
968 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
969 tcg_gen_xori_tl(dst
, dst
, 0x1);
972 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
973 target_ulong pc2
, TCGv r_cond
)
977 l1
= gen_new_label();
979 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
981 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
984 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
987 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
988 target_ulong pc2
, TCGv r_cond
)
992 l1
= gen_new_label();
994 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
996 gen_goto_tb(dc
, 0, pc2
, pc1
);
999 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1002 static inline void gen_generic_branch(DisasContext
*dc
)
1004 TCGv npc0
= tcg_const_tl(dc
->jump_pc
[0]);
1005 TCGv npc1
= tcg_const_tl(dc
->jump_pc
[1]);
1006 TCGv zero
= tcg_const_tl(0);
1008 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_npc
, cpu_cond
, zero
, npc0
, npc1
);
1010 tcg_temp_free(npc0
);
1011 tcg_temp_free(npc1
);
1012 tcg_temp_free(zero
);
1015 /* call this function before using the condition register as it may
1016 have been set for a jump */
1017 static inline void flush_cond(DisasContext
*dc
)
1019 if (dc
->npc
== JUMP_PC
) {
1020 gen_generic_branch(dc
);
1021 dc
->npc
= DYNAMIC_PC
;
1025 static inline void save_npc(DisasContext
*dc
)
1027 if (dc
->npc
== JUMP_PC
) {
1028 gen_generic_branch(dc
);
1029 dc
->npc
= DYNAMIC_PC
;
1030 } else if (dc
->npc
!= DYNAMIC_PC
) {
1031 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1035 static inline void update_psr(DisasContext
*dc
)
1037 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1038 dc
->cc_op
= CC_OP_FLAGS
;
1039 gen_helper_compute_psr(cpu_env
);
1043 static inline void save_state(DisasContext
*dc
)
1045 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1049 static inline void gen_mov_pc_npc(DisasContext
*dc
)
1051 if (dc
->npc
== JUMP_PC
) {
1052 gen_generic_branch(dc
);
1053 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1054 dc
->pc
= DYNAMIC_PC
;
1055 } else if (dc
->npc
== DYNAMIC_PC
) {
1056 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1057 dc
->pc
= DYNAMIC_PC
;
1063 static inline void gen_op_next_insn(void)
1065 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1066 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1069 static void free_compare(DisasCompare
*cmp
)
1072 tcg_temp_free(cmp
->c1
);
1075 tcg_temp_free(cmp
->c2
);
1079 static void gen_compare(DisasCompare
*cmp
, bool xcc
, unsigned int cond
,
1082 static int subcc_cond
[16] = {
1098 -1, /* no overflow */
1101 static int logic_cond
[16] = {
1103 TCG_COND_EQ
, /* eq: Z */
1104 TCG_COND_LE
, /* le: Z | (N ^ V) -> Z | N */
1105 TCG_COND_LT
, /* lt: N ^ V -> N */
1106 TCG_COND_EQ
, /* leu: C | Z -> Z */
1107 TCG_COND_NEVER
, /* ltu: C -> 0 */
1108 TCG_COND_LT
, /* neg: N */
1109 TCG_COND_NEVER
, /* vs: V -> 0 */
1111 TCG_COND_NE
, /* ne: !Z */
1112 TCG_COND_GT
, /* gt: !(Z | (N ^ V)) -> !(Z | N) */
1113 TCG_COND_GE
, /* ge: !(N ^ V) -> !N */
1114 TCG_COND_NE
, /* gtu: !(C | Z) -> !Z */
1115 TCG_COND_ALWAYS
, /* geu: !C -> 1 */
1116 TCG_COND_GE
, /* pos: !N */
1117 TCG_COND_ALWAYS
, /* vc: !V -> 1 */
1123 #ifdef TARGET_SPARC64
1133 switch (dc
->cc_op
) {
1135 cmp
->cond
= logic_cond
[cond
];
1137 cmp
->is_bool
= false;
1139 cmp
->c2
= tcg_const_tl(0);
1140 #ifdef TARGET_SPARC64
1143 cmp
->c1
= tcg_temp_new();
1144 tcg_gen_ext32s_tl(cmp
->c1
, cpu_cc_dst
);
1149 cmp
->c1
= cpu_cc_dst
;
1156 cmp
->cond
= (cond
== 6 ? TCG_COND_LT
: TCG_COND_GE
);
1157 goto do_compare_dst_0
;
1159 case 7: /* overflow */
1160 case 15: /* !overflow */
1164 cmp
->cond
= subcc_cond
[cond
];
1165 cmp
->is_bool
= false;
1166 #ifdef TARGET_SPARC64
1168 /* Note that sign-extension works for unsigned compares as
1169 long as both operands are sign-extended. */
1170 cmp
->g1
= cmp
->g2
= false;
1171 cmp
->c1
= tcg_temp_new();
1172 cmp
->c2
= tcg_temp_new();
1173 tcg_gen_ext32s_tl(cmp
->c1
, cpu_cc_src
);
1174 tcg_gen_ext32s_tl(cmp
->c2
, cpu_cc_src2
);
1178 cmp
->g1
= cmp
->g2
= true;
1179 cmp
->c1
= cpu_cc_src
;
1180 cmp
->c2
= cpu_cc_src2
;
1187 gen_helper_compute_psr(cpu_env
);
1188 dc
->cc_op
= CC_OP_FLAGS
;
1192 /* We're going to generate a boolean result. */
1193 cmp
->cond
= TCG_COND_NE
;
1194 cmp
->is_bool
= true;
1195 cmp
->g1
= cmp
->g2
= false;
1196 cmp
->c1
= r_dst
= tcg_temp_new();
1197 cmp
->c2
= tcg_const_tl(0);
1201 gen_op_eval_bn(r_dst
);
1204 gen_op_eval_be(r_dst
, r_src
);
1207 gen_op_eval_ble(r_dst
, r_src
);
1210 gen_op_eval_bl(r_dst
, r_src
);
1213 gen_op_eval_bleu(r_dst
, r_src
);
1216 gen_op_eval_bcs(r_dst
, r_src
);
1219 gen_op_eval_bneg(r_dst
, r_src
);
1222 gen_op_eval_bvs(r_dst
, r_src
);
1225 gen_op_eval_ba(r_dst
);
1228 gen_op_eval_bne(r_dst
, r_src
);
1231 gen_op_eval_bg(r_dst
, r_src
);
1234 gen_op_eval_bge(r_dst
, r_src
);
1237 gen_op_eval_bgu(r_dst
, r_src
);
1240 gen_op_eval_bcc(r_dst
, r_src
);
1243 gen_op_eval_bpos(r_dst
, r_src
);
1246 gen_op_eval_bvc(r_dst
, r_src
);
1253 static void gen_fcompare(DisasCompare
*cmp
, unsigned int cc
, unsigned int cond
)
1255 unsigned int offset
;
1258 /* For now we still generate a straight boolean result. */
1259 cmp
->cond
= TCG_COND_NE
;
1260 cmp
->is_bool
= true;
1261 cmp
->g1
= cmp
->g2
= false;
1262 cmp
->c1
= r_dst
= tcg_temp_new();
1263 cmp
->c2
= tcg_const_tl(0);
1283 gen_op_eval_bn(r_dst
);
1286 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1289 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1292 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1295 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1298 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1301 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1304 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1307 gen_op_eval_ba(r_dst
);
1310 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1313 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1316 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1319 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1322 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1325 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1328 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1333 static void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1337 gen_compare(&cmp
, cc
, cond
, dc
);
1339 /* The interface is to return a boolean in r_dst. */
1341 tcg_gen_mov_tl(r_dst
, cmp
.c1
);
1343 tcg_gen_setcond_tl(cmp
.cond
, r_dst
, cmp
.c1
, cmp
.c2
);
1349 static void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1352 gen_fcompare(&cmp
, cc
, cond
);
1354 /* The interface is to return a boolean in r_dst. */
1356 tcg_gen_mov_tl(r_dst
, cmp
.c1
);
1358 tcg_gen_setcond_tl(cmp
.cond
, r_dst
, cmp
.c1
, cmp
.c2
);
1364 #ifdef TARGET_SPARC64
1366 static const int gen_tcg_cond_reg
[8] = {
1377 static void gen_compare_reg(DisasCompare
*cmp
, int cond
, TCGv r_src
)
1379 cmp
->cond
= tcg_invert_cond(gen_tcg_cond_reg
[cond
]);
1380 cmp
->is_bool
= false;
1384 cmp
->c2
= tcg_const_tl(0);
1387 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1390 gen_compare_reg(&cmp
, cond
, r_src
);
1392 /* The interface is to return a boolean in r_dst. */
1393 tcg_gen_setcond_tl(cmp
.cond
, r_dst
, cmp
.c1
, cmp
.c2
);
1399 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
)
1401 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1402 target_ulong target
= dc
->pc
+ offset
;
1404 #ifdef TARGET_SPARC64
1405 if (unlikely(AM_CHECK(dc
))) {
1406 target
&= 0xffffffffULL
;
1410 /* unconditional not taken */
1412 dc
->pc
= dc
->npc
+ 4;
1413 dc
->npc
= dc
->pc
+ 4;
1416 dc
->npc
= dc
->pc
+ 4;
1418 } else if (cond
== 0x8) {
1419 /* unconditional taken */
1422 dc
->npc
= dc
->pc
+ 4;
1426 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1430 gen_cond(cpu_cond
, cc
, cond
, dc
);
1432 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1436 dc
->jump_pc
[0] = target
;
1437 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1438 dc
->jump_pc
[1] = DYNAMIC_PC
;
1439 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1441 dc
->jump_pc
[1] = dc
->npc
+ 4;
1448 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
)
1450 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1451 target_ulong target
= dc
->pc
+ offset
;
1453 #ifdef TARGET_SPARC64
1454 if (unlikely(AM_CHECK(dc
))) {
1455 target
&= 0xffffffffULL
;
1459 /* unconditional not taken */
1461 dc
->pc
= dc
->npc
+ 4;
1462 dc
->npc
= dc
->pc
+ 4;
1465 dc
->npc
= dc
->pc
+ 4;
1467 } else if (cond
== 0x8) {
1468 /* unconditional taken */
1471 dc
->npc
= dc
->pc
+ 4;
1475 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1479 gen_fcond(cpu_cond
, cc
, cond
);
1481 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1485 dc
->jump_pc
[0] = target
;
1486 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1487 dc
->jump_pc
[1] = DYNAMIC_PC
;
1488 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1490 dc
->jump_pc
[1] = dc
->npc
+ 4;
1497 #ifdef TARGET_SPARC64
1498 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1501 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1502 target_ulong target
= dc
->pc
+ offset
;
1504 if (unlikely(AM_CHECK(dc
))) {
1505 target
&= 0xffffffffULL
;
1508 gen_cond_reg(cpu_cond
, cond
, r_reg
);
1510 gen_branch_a(dc
, target
, dc
->npc
, cpu_cond
);
1514 dc
->jump_pc
[0] = target
;
1515 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1516 dc
->jump_pc
[1] = DYNAMIC_PC
;
1517 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1519 dc
->jump_pc
[1] = dc
->npc
+ 4;
1525 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1529 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1532 gen_helper_fcmps_fcc1(cpu_env
, r_rs1
, r_rs2
);
1535 gen_helper_fcmps_fcc2(cpu_env
, r_rs1
, r_rs2
);
1538 gen_helper_fcmps_fcc3(cpu_env
, r_rs1
, r_rs2
);
1543 static inline void gen_op_fcmpd(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1547 gen_helper_fcmpd(cpu_env
, r_rs1
, r_rs2
);
1550 gen_helper_fcmpd_fcc1(cpu_env
, r_rs1
, r_rs2
);
1553 gen_helper_fcmpd_fcc2(cpu_env
, r_rs1
, r_rs2
);
1556 gen_helper_fcmpd_fcc3(cpu_env
, r_rs1
, r_rs2
);
1561 static inline void gen_op_fcmpq(int fccno
)
1565 gen_helper_fcmpq(cpu_env
);
1568 gen_helper_fcmpq_fcc1(cpu_env
);
1571 gen_helper_fcmpq_fcc2(cpu_env
);
1574 gen_helper_fcmpq_fcc3(cpu_env
);
1579 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1583 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1586 gen_helper_fcmpes_fcc1(cpu_env
, r_rs1
, r_rs2
);
1589 gen_helper_fcmpes_fcc2(cpu_env
, r_rs1
, r_rs2
);
1592 gen_helper_fcmpes_fcc3(cpu_env
, r_rs1
, r_rs2
);
1597 static inline void gen_op_fcmped(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1601 gen_helper_fcmped(cpu_env
, r_rs1
, r_rs2
);
1604 gen_helper_fcmped_fcc1(cpu_env
, r_rs1
, r_rs2
);
1607 gen_helper_fcmped_fcc2(cpu_env
, r_rs1
, r_rs2
);
1610 gen_helper_fcmped_fcc3(cpu_env
, r_rs1
, r_rs2
);
1615 static inline void gen_op_fcmpeq(int fccno
)
1619 gen_helper_fcmpeq(cpu_env
);
1622 gen_helper_fcmpeq_fcc1(cpu_env
);
1625 gen_helper_fcmpeq_fcc2(cpu_env
);
1628 gen_helper_fcmpeq_fcc3(cpu_env
);
1635 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1637 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1640 static inline void gen_op_fcmpd(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1642 gen_helper_fcmpd(cpu_env
, r_rs1
, r_rs2
);
1645 static inline void gen_op_fcmpq(int fccno
)
1647 gen_helper_fcmpq(cpu_env
);
1650 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1652 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1655 static inline void gen_op_fcmped(int fccno
, TCGv_i64 r_rs1
, TCGv_i64 r_rs2
)
1657 gen_helper_fcmped(cpu_env
, r_rs1
, r_rs2
);
1660 static inline void gen_op_fcmpeq(int fccno
)
1662 gen_helper_fcmpeq(cpu_env
);
1666 static inline void gen_op_fpexception_im(int fsr_flags
)
1670 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1671 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1672 r_const
= tcg_const_i32(TT_FP_EXCP
);
1673 gen_helper_raise_exception(cpu_env
, r_const
);
1674 tcg_temp_free_i32(r_const
);
1677 static int gen_trap_ifnofpu(DisasContext
*dc
)
1679 #if !defined(CONFIG_USER_ONLY)
1680 if (!dc
->fpu_enabled
) {
1684 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1685 gen_helper_raise_exception(cpu_env
, r_const
);
1686 tcg_temp_free_i32(r_const
);
1694 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1696 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1699 static inline void gen_fop_FF(DisasContext
*dc
, int rd
, int rs
,
1700 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i32
))
1704 src
= gen_load_fpr_F(dc
, rs
);
1705 dst
= gen_dest_fpr_F(dc
);
1707 gen(dst
, cpu_env
, src
);
1709 gen_store_fpr_F(dc
, rd
, dst
);
1712 static inline void gen_ne_fop_FF(DisasContext
*dc
, int rd
, int rs
,
1713 void (*gen
)(TCGv_i32
, TCGv_i32
))
1717 src
= gen_load_fpr_F(dc
, rs
);
1718 dst
= gen_dest_fpr_F(dc
);
1722 gen_store_fpr_F(dc
, rd
, dst
);
1725 static inline void gen_fop_FFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1726 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i32
, TCGv_i32
))
1728 TCGv_i32 dst
, src1
, src2
;
1730 src1
= gen_load_fpr_F(dc
, rs1
);
1731 src2
= gen_load_fpr_F(dc
, rs2
);
1732 dst
= gen_dest_fpr_F(dc
);
1734 gen(dst
, cpu_env
, src1
, src2
);
1736 gen_store_fpr_F(dc
, rd
, dst
);
1739 #ifdef TARGET_SPARC64
1740 static inline void gen_ne_fop_FFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1741 void (*gen
)(TCGv_i32
, TCGv_i32
, TCGv_i32
))
1743 TCGv_i32 dst
, src1
, src2
;
1745 src1
= gen_load_fpr_F(dc
, rs1
);
1746 src2
= gen_load_fpr_F(dc
, rs2
);
1747 dst
= gen_dest_fpr_F(dc
);
1749 gen(dst
, src1
, src2
);
1751 gen_store_fpr_F(dc
, rd
, dst
);
1755 static inline void gen_fop_DD(DisasContext
*dc
, int rd
, int rs
,
1756 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i64
))
1760 src
= gen_load_fpr_D(dc
, rs
);
1761 dst
= gen_dest_fpr_D();
1763 gen(dst
, cpu_env
, src
);
1765 gen_store_fpr_D(dc
, rd
, dst
);
1768 #ifdef TARGET_SPARC64
1769 static inline void gen_ne_fop_DD(DisasContext
*dc
, int rd
, int rs
,
1770 void (*gen
)(TCGv_i64
, TCGv_i64
))
1774 src
= gen_load_fpr_D(dc
, rs
);
1775 dst
= gen_dest_fpr_D();
1779 gen_store_fpr_D(dc
, rd
, dst
);
1783 static inline void gen_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1784 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i64
, TCGv_i64
))
1786 TCGv_i64 dst
, src1
, src2
;
1788 src1
= gen_load_fpr_D(dc
, rs1
);
1789 src2
= gen_load_fpr_D(dc
, rs2
);
1790 dst
= gen_dest_fpr_D();
1792 gen(dst
, cpu_env
, src1
, src2
);
1794 gen_store_fpr_D(dc
, rd
, dst
);
1797 #ifdef TARGET_SPARC64
1798 static inline void gen_ne_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1799 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
))
1801 TCGv_i64 dst
, src1
, src2
;
1803 src1
= gen_load_fpr_D(dc
, rs1
);
1804 src2
= gen_load_fpr_D(dc
, rs2
);
1805 dst
= gen_dest_fpr_D();
1807 gen(dst
, src1
, src2
);
1809 gen_store_fpr_D(dc
, rd
, dst
);
1812 static inline void gen_gsr_fop_DDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1813 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
, TCGv_i64
))
1815 TCGv_i64 dst
, src1
, src2
;
1817 src1
= gen_load_fpr_D(dc
, rs1
);
1818 src2
= gen_load_fpr_D(dc
, rs2
);
1819 dst
= gen_dest_fpr_D();
1821 gen(dst
, cpu_gsr
, src1
, src2
);
1823 gen_store_fpr_D(dc
, rd
, dst
);
1826 static inline void gen_ne_fop_DDDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1827 void (*gen
)(TCGv_i64
, TCGv_i64
, TCGv_i64
, TCGv_i64
))
1829 TCGv_i64 dst
, src0
, src1
, src2
;
1831 src1
= gen_load_fpr_D(dc
, rs1
);
1832 src2
= gen_load_fpr_D(dc
, rs2
);
1833 src0
= gen_load_fpr_D(dc
, rd
);
1834 dst
= gen_dest_fpr_D();
1836 gen(dst
, src0
, src1
, src2
);
1838 gen_store_fpr_D(dc
, rd
, dst
);
1842 static inline void gen_fop_QQ(DisasContext
*dc
, int rd
, int rs
,
1843 void (*gen
)(TCGv_ptr
))
1845 gen_op_load_fpr_QT1(QFPREG(rs
));
1849 gen_op_store_QT0_fpr(QFPREG(rd
));
1850 gen_update_fprs_dirty(QFPREG(rd
));
1853 #ifdef TARGET_SPARC64
1854 static inline void gen_ne_fop_QQ(DisasContext
*dc
, int rd
, int rs
,
1855 void (*gen
)(TCGv_ptr
))
1857 gen_op_load_fpr_QT1(QFPREG(rs
));
1861 gen_op_store_QT0_fpr(QFPREG(rd
));
1862 gen_update_fprs_dirty(QFPREG(rd
));
1866 static inline void gen_fop_QQQ(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1867 void (*gen
)(TCGv_ptr
))
1869 gen_op_load_fpr_QT0(QFPREG(rs1
));
1870 gen_op_load_fpr_QT1(QFPREG(rs2
));
1874 gen_op_store_QT0_fpr(QFPREG(rd
));
1875 gen_update_fprs_dirty(QFPREG(rd
));
1878 static inline void gen_fop_DFF(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1879 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
, TCGv_i32
))
1882 TCGv_i32 src1
, src2
;
1884 src1
= gen_load_fpr_F(dc
, rs1
);
1885 src2
= gen_load_fpr_F(dc
, rs2
);
1886 dst
= gen_dest_fpr_D();
1888 gen(dst
, cpu_env
, src1
, src2
);
1890 gen_store_fpr_D(dc
, rd
, dst
);
1893 static inline void gen_fop_QDD(DisasContext
*dc
, int rd
, int rs1
, int rs2
,
1894 void (*gen
)(TCGv_ptr
, TCGv_i64
, TCGv_i64
))
1896 TCGv_i64 src1
, src2
;
1898 src1
= gen_load_fpr_D(dc
, rs1
);
1899 src2
= gen_load_fpr_D(dc
, rs2
);
1901 gen(cpu_env
, src1
, src2
);
1903 gen_op_store_QT0_fpr(QFPREG(rd
));
1904 gen_update_fprs_dirty(QFPREG(rd
));
1907 #ifdef TARGET_SPARC64
1908 static inline void gen_fop_DF(DisasContext
*dc
, int rd
, int rs
,
1909 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
))
1914 src
= gen_load_fpr_F(dc
, rs
);
1915 dst
= gen_dest_fpr_D();
1917 gen(dst
, cpu_env
, src
);
1919 gen_store_fpr_D(dc
, rd
, dst
);
1923 static inline void gen_ne_fop_DF(DisasContext
*dc
, int rd
, int rs
,
1924 void (*gen
)(TCGv_i64
, TCGv_ptr
, TCGv_i32
))
1929 src
= gen_load_fpr_F(dc
, rs
);
1930 dst
= gen_dest_fpr_D();
1932 gen(dst
, cpu_env
, src
);
1934 gen_store_fpr_D(dc
, rd
, dst
);
1937 static inline void gen_fop_FD(DisasContext
*dc
, int rd
, int rs
,
1938 void (*gen
)(TCGv_i32
, TCGv_ptr
, TCGv_i64
))
1943 src
= gen_load_fpr_D(dc
, rs
);
1944 dst
= gen_dest_fpr_F(dc
);
1946 gen(dst
, cpu_env
, src
);
1948 gen_store_fpr_F(dc
, rd
, dst
);
1951 static inline void gen_fop_FQ(DisasContext
*dc
, int rd
, int rs
,
1952 void (*gen
)(TCGv_i32
, TCGv_ptr
))
1956 gen_op_load_fpr_QT1(QFPREG(rs
));
1957 dst
= gen_dest_fpr_F(dc
);
1961 gen_store_fpr_F(dc
, rd
, dst
);
1964 static inline void gen_fop_DQ(DisasContext
*dc
, int rd
, int rs
,
1965 void (*gen
)(TCGv_i64
, TCGv_ptr
))
1969 gen_op_load_fpr_QT1(QFPREG(rs
));
1970 dst
= gen_dest_fpr_D();
1974 gen_store_fpr_D(dc
, rd
, dst
);
1977 static inline void gen_ne_fop_QF(DisasContext
*dc
, int rd
, int rs
,
1978 void (*gen
)(TCGv_ptr
, TCGv_i32
))
1982 src
= gen_load_fpr_F(dc
, rs
);
1986 gen_op_store_QT0_fpr(QFPREG(rd
));
1987 gen_update_fprs_dirty(QFPREG(rd
));
1990 static inline void gen_ne_fop_QD(DisasContext
*dc
, int rd
, int rs
,
1991 void (*gen
)(TCGv_ptr
, TCGv_i64
))
1995 src
= gen_load_fpr_D(dc
, rs
);
1999 gen_op_store_QT0_fpr(QFPREG(rd
));
2000 gen_update_fprs_dirty(QFPREG(rd
));
2004 #ifdef TARGET_SPARC64
2005 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
2011 r_asi
= tcg_temp_new_i32();
2012 tcg_gen_mov_i32(r_asi
, cpu_asi
);
2014 asi
= GET_FIELD(insn
, 19, 26);
2015 r_asi
= tcg_const_i32(asi
);
2020 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
2023 TCGv_i32 r_asi
, r_size
, r_sign
;
2025 r_asi
= gen_get_asi(insn
, addr
);
2026 r_size
= tcg_const_i32(size
);
2027 r_sign
= tcg_const_i32(sign
);
2028 gen_helper_ld_asi(dst
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2029 tcg_temp_free_i32(r_sign
);
2030 tcg_temp_free_i32(r_size
);
2031 tcg_temp_free_i32(r_asi
);
2034 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
2036 TCGv_i32 r_asi
, r_size
;
2038 r_asi
= gen_get_asi(insn
, addr
);
2039 r_size
= tcg_const_i32(size
);
2040 gen_helper_st_asi(cpu_env
, addr
, src
, r_asi
, r_size
);
2041 tcg_temp_free_i32(r_size
);
2042 tcg_temp_free_i32(r_asi
);
2045 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
2047 TCGv_i32 r_asi
, r_size
, r_rd
;
2049 r_asi
= gen_get_asi(insn
, addr
);
2050 r_size
= tcg_const_i32(size
);
2051 r_rd
= tcg_const_i32(rd
);
2052 gen_helper_ldf_asi(cpu_env
, addr
, r_asi
, r_size
, r_rd
);
2053 tcg_temp_free_i32(r_rd
);
2054 tcg_temp_free_i32(r_size
);
2055 tcg_temp_free_i32(r_asi
);
2058 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
2060 TCGv_i32 r_asi
, r_size
, r_rd
;
2062 r_asi
= gen_get_asi(insn
, addr
);
2063 r_size
= tcg_const_i32(size
);
2064 r_rd
= tcg_const_i32(rd
);
2065 gen_helper_stf_asi(cpu_env
, addr
, r_asi
, r_size
, r_rd
);
2066 tcg_temp_free_i32(r_rd
);
2067 tcg_temp_free_i32(r_size
);
2068 tcg_temp_free_i32(r_asi
);
2071 static inline void gen_swap_asi(TCGv dst
, TCGv src
, TCGv addr
, int insn
)
2073 TCGv_i32 r_asi
, r_size
, r_sign
;
2075 r_asi
= gen_get_asi(insn
, addr
);
2076 r_size
= tcg_const_i32(4);
2077 r_sign
= tcg_const_i32(0);
2078 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2079 tcg_temp_free_i32(r_sign
);
2080 gen_helper_st_asi(cpu_env
, addr
, src
, r_asi
, r_size
);
2081 tcg_temp_free_i32(r_size
);
2082 tcg_temp_free_i32(r_asi
);
2083 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2086 static inline void gen_ldda_asi(DisasContext
*dc
, TCGv hi
, TCGv addr
,
2089 TCGv_i32 r_asi
, r_rd
;
2091 r_asi
= gen_get_asi(insn
, addr
);
2092 r_rd
= tcg_const_i32(rd
);
2093 gen_helper_ldda_asi(cpu_env
, addr
, r_asi
, r_rd
);
2094 tcg_temp_free_i32(r_rd
);
2095 tcg_temp_free_i32(r_asi
);
2098 static inline void gen_stda_asi(DisasContext
*dc
, TCGv hi
, TCGv addr
,
2101 TCGv_i32 r_asi
, r_size
;
2102 TCGv lo
= gen_load_gpr(dc
, rd
+ 1);
2104 tcg_gen_concat_tl_i64(cpu_tmp64
, lo
, hi
);
2105 r_asi
= gen_get_asi(insn
, addr
);
2106 r_size
= tcg_const_i32(8);
2107 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2108 tcg_temp_free_i32(r_size
);
2109 tcg_temp_free_i32(r_asi
);
2112 static inline void gen_cas_asi(DisasContext
*dc
, TCGv addr
,
2113 TCGv val2
, int insn
, int rd
)
2115 TCGv val1
= gen_load_gpr(dc
, rd
);
2116 TCGv dst
= gen_dest_gpr(dc
, rd
);
2117 TCGv_i32 r_asi
= gen_get_asi(insn
, addr
);
2119 gen_helper_cas_asi(dst
, cpu_env
, addr
, val1
, val2
, r_asi
);
2120 tcg_temp_free_i32(r_asi
);
2121 gen_store_gpr(dc
, rd
, dst
);
2124 static inline void gen_casx_asi(DisasContext
*dc
, TCGv addr
,
2125 TCGv val2
, int insn
, int rd
)
2127 TCGv val1
= gen_load_gpr(dc
, rd
);
2128 TCGv dst
= gen_dest_gpr(dc
, rd
);
2129 TCGv_i32 r_asi
= gen_get_asi(insn
, addr
);
2131 gen_helper_casx_asi(dst
, cpu_env
, addr
, val1
, val2
, r_asi
);
2132 tcg_temp_free_i32(r_asi
);
2133 gen_store_gpr(dc
, rd
, dst
);
2136 #elif !defined(CONFIG_USER_ONLY)
2138 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
2141 TCGv_i32 r_asi
, r_size
, r_sign
;
2143 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2144 r_size
= tcg_const_i32(size
);
2145 r_sign
= tcg_const_i32(sign
);
2146 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2147 tcg_temp_free(r_sign
);
2148 tcg_temp_free(r_size
);
2149 tcg_temp_free(r_asi
);
2150 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2153 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
2155 TCGv_i32 r_asi
, r_size
;
2157 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
2158 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2159 r_size
= tcg_const_i32(size
);
2160 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2161 tcg_temp_free(r_size
);
2162 tcg_temp_free(r_asi
);
2165 static inline void gen_swap_asi(TCGv dst
, TCGv src
, TCGv addr
, int insn
)
2167 TCGv_i32 r_asi
, r_size
, r_sign
;
2170 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2171 r_size
= tcg_const_i32(4);
2172 r_sign
= tcg_const_i32(0);
2173 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2174 tcg_temp_free(r_sign
);
2175 r_val
= tcg_temp_new_i64();
2176 tcg_gen_extu_tl_i64(r_val
, src
);
2177 gen_helper_st_asi(cpu_env
, addr
, r_val
, r_asi
, r_size
);
2178 tcg_temp_free_i64(r_val
);
2179 tcg_temp_free(r_size
);
2180 tcg_temp_free(r_asi
);
2181 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
2184 static inline void gen_ldda_asi(DisasContext
*dc
, TCGv hi
, TCGv addr
,
2187 TCGv_i32 r_asi
, r_size
, r_sign
;
2190 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2191 r_size
= tcg_const_i32(8);
2192 r_sign
= tcg_const_i32(0);
2193 gen_helper_ld_asi(cpu_tmp64
, cpu_env
, addr
, r_asi
, r_size
, r_sign
);
2194 tcg_temp_free(r_sign
);
2195 tcg_temp_free(r_size
);
2196 tcg_temp_free(r_asi
);
2198 t
= gen_dest_gpr(dc
, rd
+ 1);
2199 tcg_gen_trunc_i64_tl(t
, cpu_tmp64
);
2200 gen_store_gpr(dc
, rd
+ 1, t
);
2202 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
2203 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
2204 gen_store_gpr(dc
, rd
, hi
);
2207 static inline void gen_stda_asi(DisasContext
*dc
, TCGv hi
, TCGv addr
,
2210 TCGv_i32 r_asi
, r_size
;
2211 TCGv lo
= gen_load_gpr(dc
, rd
+ 1);
2213 tcg_gen_concat_tl_i64(cpu_tmp64
, lo
, hi
);
2214 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2215 r_size
= tcg_const_i32(8);
2216 gen_helper_st_asi(cpu_env
, addr
, cpu_tmp64
, r_asi
, r_size
);
2217 tcg_temp_free(r_size
);
2218 tcg_temp_free(r_asi
);
2222 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2223 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
2226 TCGv_i32 r_asi
, r_size
;
2228 gen_ld_asi(dst
, addr
, insn
, 1, 0);
2230 r_val
= tcg_const_i64(0xffULL
);
2231 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
2232 r_size
= tcg_const_i32(1);
2233 gen_helper_st_asi(cpu_env
, addr
, r_val
, r_asi
, r_size
);
2234 tcg_temp_free_i32(r_size
);
2235 tcg_temp_free_i32(r_asi
);
2236 tcg_temp_free_i64(r_val
);
2240 static TCGv
get_src1(DisasContext
*dc
, unsigned int insn
)
2242 unsigned int rs1
= GET_FIELD(insn
, 13, 17);
2243 return gen_load_gpr(dc
, rs1
);
2246 static TCGv
get_src2(DisasContext
*dc
, unsigned int insn
)
2248 if (IS_IMM
) { /* immediate */
2249 target_long simm
= GET_FIELDs(insn
, 19, 31);
2250 TCGv t
= get_temp_tl(dc
);
2251 tcg_gen_movi_tl(t
, simm
);
2253 } else { /* register */
2254 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
2255 return gen_load_gpr(dc
, rs2
);
2259 #ifdef TARGET_SPARC64
2260 static void gen_fmovs(DisasContext
*dc
, DisasCompare
*cmp
, int rd
, int rs
)
2262 TCGv_i32 c32
, zero
, dst
, s1
, s2
;
2264 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2265 or fold the comparison down to 32 bits and use movcond_i32. Choose
2267 c32
= tcg_temp_new_i32();
2269 tcg_gen_trunc_i64_i32(c32
, cmp
->c1
);
2271 TCGv_i64 c64
= tcg_temp_new_i64();
2272 tcg_gen_setcond_i64(cmp
->cond
, c64
, cmp
->c1
, cmp
->c2
);
2273 tcg_gen_trunc_i64_i32(c32
, c64
);
2274 tcg_temp_free_i64(c64
);
2277 s1
= gen_load_fpr_F(dc
, rs
);
2278 s2
= gen_load_fpr_F(dc
, rd
);
2279 dst
= gen_dest_fpr_F(dc
);
2280 zero
= tcg_const_i32(0);
2282 tcg_gen_movcond_i32(TCG_COND_NE
, dst
, c32
, zero
, s1
, s2
);
2284 tcg_temp_free_i32(c32
);
2285 tcg_temp_free_i32(zero
);
2286 gen_store_fpr_F(dc
, rd
, dst
);
2289 static void gen_fmovd(DisasContext
*dc
, DisasCompare
*cmp
, int rd
, int rs
)
2291 TCGv_i64 dst
= gen_dest_fpr_D();
2292 tcg_gen_movcond_i64(cmp
->cond
, dst
, cmp
->c1
, cmp
->c2
,
2293 gen_load_fpr_D(dc
, rs
),
2294 gen_load_fpr_D(dc
, rd
));
2295 gen_store_fpr_D(dc
, rd
, dst
);
2298 static void gen_fmovq(DisasContext
*dc
, DisasCompare
*cmp
, int rd
, int rs
)
2300 int qd
= QFPREG(rd
);
2301 int qs
= QFPREG(rs
);
2303 tcg_gen_movcond_i64(cmp
->cond
, cpu_fpr
[qd
/ 2], cmp
->c1
, cmp
->c2
,
2304 cpu_fpr
[qs
/ 2], cpu_fpr
[qd
/ 2]);
2305 tcg_gen_movcond_i64(cmp
->cond
, cpu_fpr
[qd
/ 2 + 1], cmp
->c1
, cmp
->c2
,
2306 cpu_fpr
[qs
/ 2 + 1], cpu_fpr
[qd
/ 2 + 1]);
2308 gen_update_fprs_dirty(qd
);
2311 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
2313 TCGv_i32 r_tl
= tcg_temp_new_i32();
2315 /* load env->tl into r_tl */
2316 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
2318 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2319 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
2321 /* calculate offset to current trap state from env->ts, reuse r_tl */
2322 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
2323 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUSPARCState
, ts
));
2325 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2327 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
2328 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
2329 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
2330 tcg_temp_free_ptr(r_tl_tmp
);
2333 tcg_temp_free_i32(r_tl
);
2336 static void gen_edge(DisasContext
*dc
, TCGv dst
, TCGv s1
, TCGv s2
,
2337 int width
, bool cc
, bool left
)
2339 TCGv lo1
, lo2
, t1
, t2
;
2340 uint64_t amask
, tabl
, tabr
;
2341 int shift
, imask
, omask
;
2344 tcg_gen_mov_tl(cpu_cc_src
, s1
);
2345 tcg_gen_mov_tl(cpu_cc_src2
, s2
);
2346 tcg_gen_sub_tl(cpu_cc_dst
, s1
, s2
);
2347 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
2348 dc
->cc_op
= CC_OP_SUB
;
2351 /* Theory of operation: there are two tables, left and right (not to
2352 be confused with the left and right versions of the opcode). These
2353 are indexed by the low 3 bits of the inputs. To make things "easy",
2354 these tables are loaded into two constants, TABL and TABR below.
2355 The operation index = (input & imask) << shift calculates the index
2356 into the constant, while val = (table >> index) & omask calculates
2357 the value we're looking for. */
2364 tabl
= 0x80c0e0f0f8fcfeffULL
;
2365 tabr
= 0xff7f3f1f0f070301ULL
;
2367 tabl
= 0x0103070f1f3f7fffULL
;
2368 tabr
= 0xfffefcf8f0e0c080ULL
;
2388 tabl
= (2 << 2) | 3;
2389 tabr
= (3 << 2) | 1;
2391 tabl
= (1 << 2) | 3;
2392 tabr
= (3 << 2) | 2;
2399 lo1
= tcg_temp_new();
2400 lo2
= tcg_temp_new();
2401 tcg_gen_andi_tl(lo1
, s1
, imask
);
2402 tcg_gen_andi_tl(lo2
, s2
, imask
);
2403 tcg_gen_shli_tl(lo1
, lo1
, shift
);
2404 tcg_gen_shli_tl(lo2
, lo2
, shift
);
2406 t1
= tcg_const_tl(tabl
);
2407 t2
= tcg_const_tl(tabr
);
2408 tcg_gen_shr_tl(lo1
, t1
, lo1
);
2409 tcg_gen_shr_tl(lo2
, t2
, lo2
);
2410 tcg_gen_andi_tl(dst
, lo1
, omask
);
2411 tcg_gen_andi_tl(lo2
, lo2
, omask
);
2415 amask
&= 0xffffffffULL
;
2417 tcg_gen_andi_tl(s1
, s1
, amask
);
2418 tcg_gen_andi_tl(s2
, s2
, amask
);
2420 /* We want to compute
2421 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2422 We've already done dst = lo1, so this reduces to
2423 dst &= (s1 == s2 ? -1 : lo2)
2428 tcg_gen_setcond_tl(TCG_COND_EQ
, t1
, s1
, s2
);
2429 tcg_gen_neg_tl(t1
, t1
);
2430 tcg_gen_or_tl(lo2
, lo2
, t1
);
2431 tcg_gen_and_tl(dst
, dst
, lo2
);
2439 static void gen_alignaddr(TCGv dst
, TCGv s1
, TCGv s2
, bool left
)
2441 TCGv tmp
= tcg_temp_new();
2443 tcg_gen_add_tl(tmp
, s1
, s2
);
2444 tcg_gen_andi_tl(dst
, tmp
, -8);
2446 tcg_gen_neg_tl(tmp
, tmp
);
2448 tcg_gen_deposit_tl(cpu_gsr
, cpu_gsr
, tmp
, 0, 3);
2453 static void gen_faligndata(TCGv dst
, TCGv gsr
, TCGv s1
, TCGv s2
)
2457 t1
= tcg_temp_new();
2458 t2
= tcg_temp_new();
2459 shift
= tcg_temp_new();
2461 tcg_gen_andi_tl(shift
, gsr
, 7);
2462 tcg_gen_shli_tl(shift
, shift
, 3);
2463 tcg_gen_shl_tl(t1
, s1
, shift
);
2465 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2466 shift of (up to 63) followed by a constant shift of 1. */
2467 tcg_gen_xori_tl(shift
, shift
, 63);
2468 tcg_gen_shr_tl(t2
, s2
, shift
);
2469 tcg_gen_shri_tl(t2
, t2
, 1);
2471 tcg_gen_or_tl(dst
, t1
, t2
);
2475 tcg_temp_free(shift
);
2479 #define CHECK_IU_FEATURE(dc, FEATURE) \
2480 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2482 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2483 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2486 /* before an instruction, dc->pc must be static */
2487 static void disas_sparc_insn(DisasContext
* dc
, unsigned int insn
)
2489 unsigned int opc
, rs1
, rs2
, rd
;
2490 TCGv cpu_src1
, cpu_src2
;
2491 TCGv_i32 cpu_src1_32
, cpu_src2_32
, cpu_dst_32
;
2492 TCGv_i64 cpu_src1_64
, cpu_src2_64
, cpu_dst_64
;
2495 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
2496 tcg_gen_debug_insn_start(dc
->pc
);
2499 opc
= GET_FIELD(insn
, 0, 1);
2501 rd
= GET_FIELD(insn
, 2, 6);
2504 case 0: /* branches/sethi */
2506 unsigned int xop
= GET_FIELD(insn
, 7, 9);
2509 #ifdef TARGET_SPARC64
2510 case 0x1: /* V9 BPcc */
2514 target
= GET_FIELD_SP(insn
, 0, 18);
2515 target
= sign_extend(target
, 19);
2517 cc
= GET_FIELD_SP(insn
, 20, 21);
2519 do_branch(dc
, target
, insn
, 0);
2521 do_branch(dc
, target
, insn
, 1);
2526 case 0x3: /* V9 BPr */
2528 target
= GET_FIELD_SP(insn
, 0, 13) |
2529 (GET_FIELD_SP(insn
, 20, 21) << 14);
2530 target
= sign_extend(target
, 16);
2532 cpu_src1
= get_src1(dc
, insn
);
2533 do_branch_reg(dc
, target
, insn
, cpu_src1
);
2536 case 0x5: /* V9 FBPcc */
2538 int cc
= GET_FIELD_SP(insn
, 20, 21);
2539 if (gen_trap_ifnofpu(dc
)) {
2542 target
= GET_FIELD_SP(insn
, 0, 18);
2543 target
= sign_extend(target
, 19);
2545 do_fbranch(dc
, target
, insn
, cc
);
2549 case 0x7: /* CBN+x */
2554 case 0x2: /* BN+x */
2556 target
= GET_FIELD(insn
, 10, 31);
2557 target
= sign_extend(target
, 22);
2559 do_branch(dc
, target
, insn
, 0);
2562 case 0x6: /* FBN+x */
2564 if (gen_trap_ifnofpu(dc
)) {
2567 target
= GET_FIELD(insn
, 10, 31);
2568 target
= sign_extend(target
, 22);
2570 do_fbranch(dc
, target
, insn
, 0);
2573 case 0x4: /* SETHI */
2574 /* Special-case %g0 because that's the canonical nop. */
2576 uint32_t value
= GET_FIELD(insn
, 10, 31);
2577 TCGv t
= gen_dest_gpr(dc
, rd
);
2578 tcg_gen_movi_tl(t
, value
<< 10);
2579 gen_store_gpr(dc
, rd
, t
);
2582 case 0x0: /* UNIMPL */
2591 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2592 TCGv o7
= gen_dest_gpr(dc
, 15);
2594 tcg_gen_movi_tl(o7
, dc
->pc
);
2595 gen_store_gpr(dc
, 15, o7
);
2598 #ifdef TARGET_SPARC64
2599 if (unlikely(AM_CHECK(dc
))) {
2600 target
&= 0xffffffffULL
;
2606 case 2: /* FPU & Logical Operations */
2608 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2609 if (xop
== 0x3a) { /* generate trap */
2610 int cond
= GET_FIELD(insn
, 3, 6);
2622 /* Conditional trap. */
2624 #ifdef TARGET_SPARC64
2626 int cc
= GET_FIELD_SP(insn
, 11, 12);
2628 gen_compare(&cmp
, 0, cond
, dc
);
2629 } else if (cc
== 2) {
2630 gen_compare(&cmp
, 1, cond
, dc
);
2635 gen_compare(&cmp
, 0, cond
, dc
);
2637 l1
= gen_new_label();
2638 tcg_gen_brcond_tl(tcg_invert_cond(cmp
.cond
),
2639 cmp
.c1
, cmp
.c2
, l1
);
2643 mask
= ((dc
->def
->features
& CPU_FEATURE_HYPV
) && supervisor(dc
)
2644 ? UA2005_HTRAP_MASK
: V8_TRAP_MASK
);
2646 /* Don't use the normal temporaries, as they may well have
2647 gone out of scope with the branch above. While we're
2648 doing that we might as well pre-truncate to 32-bit. */
2649 trap
= tcg_temp_new_i32();
2651 rs1
= GET_FIELD_SP(insn
, 14, 18);
2653 rs2
= GET_FIELD_SP(insn
, 0, 6);
2655 tcg_gen_movi_i32(trap
, (rs2
& mask
) + TT_TRAP
);
2656 /* Signal that the trap value is fully constant. */
2659 TCGv t1
= gen_load_gpr(dc
, rs1
);
2660 tcg_gen_trunc_tl_i32(trap
, t1
);
2661 tcg_gen_addi_i32(trap
, trap
, rs2
);
2665 rs2
= GET_FIELD_SP(insn
, 0, 4);
2666 t1
= gen_load_gpr(dc
, rs1
);
2667 t2
= gen_load_gpr(dc
, rs2
);
2668 tcg_gen_add_tl(t1
, t1
, t2
);
2669 tcg_gen_trunc_tl_i32(trap
, t1
);
2672 tcg_gen_andi_i32(trap
, trap
, mask
);
2673 tcg_gen_addi_i32(trap
, trap
, TT_TRAP
);
2676 gen_helper_raise_exception(cpu_env
, trap
);
2677 tcg_temp_free_i32(trap
);
2680 /* An unconditional trap ends the TB. */
2684 /* A conditional trap falls through to the next insn. */
2688 } else if (xop
== 0x28) {
2689 rs1
= GET_FIELD(insn
, 13, 17);
2692 #ifndef TARGET_SPARC64
2693 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2694 manual, rdy on the microSPARC
2696 case 0x0f: /* stbar in the SPARCv8 manual,
2697 rdy on the microSPARC II */
2698 case 0x10 ... 0x1f: /* implementation-dependent in the
2699 SPARCv8 manual, rdy on the
2702 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2703 TCGv t
= gen_dest_gpr(dc
, rd
);
2704 /* Read Asr17 for a Leon3 monoprocessor */
2705 tcg_gen_movi_tl(t
, (1 << 8) | (dc
->def
->nwindows
- 1));
2706 gen_store_gpr(dc
, rd
, t
);
2710 gen_store_gpr(dc
, rd
, cpu_y
);
2712 #ifdef TARGET_SPARC64
2713 case 0x2: /* V9 rdccr */
2715 gen_helper_rdccr(cpu_dst
, cpu_env
);
2716 gen_store_gpr(dc
, rd
, cpu_dst
);
2718 case 0x3: /* V9 rdasi */
2719 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2720 gen_store_gpr(dc
, rd
, cpu_dst
);
2722 case 0x4: /* V9 rdtick */
2726 r_tickptr
= tcg_temp_new_ptr();
2727 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2728 offsetof(CPUSPARCState
, tick
));
2729 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2730 tcg_temp_free_ptr(r_tickptr
);
2731 gen_store_gpr(dc
, rd
, cpu_dst
);
2734 case 0x5: /* V9 rdpc */
2736 TCGv t
= gen_dest_gpr(dc
, rd
);
2737 if (unlikely(AM_CHECK(dc
))) {
2738 tcg_gen_movi_tl(t
, dc
->pc
& 0xffffffffULL
);
2740 tcg_gen_movi_tl(t
, dc
->pc
);
2742 gen_store_gpr(dc
, rd
, t
);
2745 case 0x6: /* V9 rdfprs */
2746 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2747 gen_store_gpr(dc
, rd
, cpu_dst
);
2749 case 0xf: /* V9 membar */
2750 break; /* no effect */
2751 case 0x13: /* Graphics Status */
2752 if (gen_trap_ifnofpu(dc
)) {
2755 gen_store_gpr(dc
, rd
, cpu_gsr
);
2757 case 0x16: /* Softint */
2758 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2759 gen_store_gpr(dc
, rd
, cpu_dst
);
2761 case 0x17: /* Tick compare */
2762 gen_store_gpr(dc
, rd
, cpu_tick_cmpr
);
2764 case 0x18: /* System tick */
2768 r_tickptr
= tcg_temp_new_ptr();
2769 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2770 offsetof(CPUSPARCState
, stick
));
2771 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2772 tcg_temp_free_ptr(r_tickptr
);
2773 gen_store_gpr(dc
, rd
, cpu_dst
);
2776 case 0x19: /* System tick compare */
2777 gen_store_gpr(dc
, rd
, cpu_stick_cmpr
);
2779 case 0x10: /* Performance Control */
2780 case 0x11: /* Performance Instrumentation Counter */
2781 case 0x12: /* Dispatch Control */
2782 case 0x14: /* Softint set, WO */
2783 case 0x15: /* Softint clear, WO */
2788 #if !defined(CONFIG_USER_ONLY)
2789 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2790 #ifndef TARGET_SPARC64
2791 if (!supervisor(dc
)) {
2795 gen_helper_rdpsr(cpu_dst
, cpu_env
);
2797 CHECK_IU_FEATURE(dc
, HYPV
);
2798 if (!hypervisor(dc
))
2800 rs1
= GET_FIELD(insn
, 13, 17);
2803 // gen_op_rdhpstate();
2806 // gen_op_rdhtstate();
2809 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2812 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2815 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2817 case 31: // hstick_cmpr
2818 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2824 gen_store_gpr(dc
, rd
, cpu_dst
);
2826 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2827 if (!supervisor(dc
))
2829 #ifdef TARGET_SPARC64
2830 rs1
= GET_FIELD(insn
, 13, 17);
2836 r_tsptr
= tcg_temp_new_ptr();
2837 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2838 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2839 offsetof(trap_state
, tpc
));
2840 tcg_temp_free_ptr(r_tsptr
);
2847 r_tsptr
= tcg_temp_new_ptr();
2848 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2849 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2850 offsetof(trap_state
, tnpc
));
2851 tcg_temp_free_ptr(r_tsptr
);
2858 r_tsptr
= tcg_temp_new_ptr();
2859 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2860 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2861 offsetof(trap_state
, tstate
));
2862 tcg_temp_free_ptr(r_tsptr
);
2867 TCGv_ptr r_tsptr
= tcg_temp_new_ptr();
2869 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2870 tcg_gen_ld32s_tl(cpu_tmp0
, r_tsptr
,
2871 offsetof(trap_state
, tt
));
2872 tcg_temp_free_ptr(r_tsptr
);
2879 r_tickptr
= tcg_temp_new_ptr();
2880 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2881 offsetof(CPUSPARCState
, tick
));
2882 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2883 tcg_temp_free_ptr(r_tickptr
);
2887 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2890 tcg_gen_ld32s_tl(cpu_tmp0
, cpu_env
,
2891 offsetof(CPUSPARCState
, pstate
));
2894 tcg_gen_ld32s_tl(cpu_tmp0
, cpu_env
,
2895 offsetof(CPUSPARCState
, tl
));
2898 tcg_gen_ld32s_tl(cpu_tmp0
, cpu_env
,
2899 offsetof(CPUSPARCState
, psrpil
));
2902 gen_helper_rdcwp(cpu_tmp0
, cpu_env
);
2905 tcg_gen_ld32s_tl(cpu_tmp0
, cpu_env
,
2906 offsetof(CPUSPARCState
, cansave
));
2908 case 11: // canrestore
2909 tcg_gen_ld32s_tl(cpu_tmp0
, cpu_env
,
2910 offsetof(CPUSPARCState
, canrestore
));
2912 case 12: // cleanwin
2913 tcg_gen_ld32s_tl(cpu_tmp0
, cpu_env
,
2914 offsetof(CPUSPARCState
, cleanwin
));
2916 case 13: // otherwin
2917 tcg_gen_ld32s_tl(cpu_tmp0
, cpu_env
,
2918 offsetof(CPUSPARCState
, otherwin
));
2921 tcg_gen_ld32s_tl(cpu_tmp0
, cpu_env
,
2922 offsetof(CPUSPARCState
, wstate
));
2924 case 16: // UA2005 gl
2925 CHECK_IU_FEATURE(dc
, GL
);
2926 tcg_gen_ld32s_tl(cpu_tmp0
, cpu_env
,
2927 offsetof(CPUSPARCState
, gl
));
2929 case 26: // UA2005 strand status
2930 CHECK_IU_FEATURE(dc
, HYPV
);
2931 if (!hypervisor(dc
))
2933 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2936 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2943 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2945 gen_store_gpr(dc
, rd
, cpu_tmp0
);
2947 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2948 #ifdef TARGET_SPARC64
2950 gen_helper_flushw(cpu_env
);
2952 if (!supervisor(dc
))
2954 gen_store_gpr(dc
, rd
, cpu_tbr
);
2958 } else if (xop
== 0x34) { /* FPU Operations */
2959 if (gen_trap_ifnofpu(dc
)) {
2962 gen_op_clear_ieee_excp_and_FTT();
2963 rs1
= GET_FIELD(insn
, 13, 17);
2964 rs2
= GET_FIELD(insn
, 27, 31);
2965 xop
= GET_FIELD(insn
, 18, 26);
2968 case 0x1: /* fmovs */
2969 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
2970 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
2972 case 0x5: /* fnegs */
2973 gen_ne_fop_FF(dc
, rd
, rs2
, gen_helper_fnegs
);
2975 case 0x9: /* fabss */
2976 gen_ne_fop_FF(dc
, rd
, rs2
, gen_helper_fabss
);
2978 case 0x29: /* fsqrts */
2979 CHECK_FPU_FEATURE(dc
, FSQRT
);
2980 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fsqrts
);
2982 case 0x2a: /* fsqrtd */
2983 CHECK_FPU_FEATURE(dc
, FSQRT
);
2984 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fsqrtd
);
2986 case 0x2b: /* fsqrtq */
2987 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2988 gen_fop_QQ(dc
, rd
, rs2
, gen_helper_fsqrtq
);
2990 case 0x41: /* fadds */
2991 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fadds
);
2993 case 0x42: /* faddd */
2994 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_faddd
);
2996 case 0x43: /* faddq */
2997 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2998 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_faddq
);
3000 case 0x45: /* fsubs */
3001 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fsubs
);
3003 case 0x46: /* fsubd */
3004 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fsubd
);
3006 case 0x47: /* fsubq */
3007 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3008 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fsubq
);
3010 case 0x49: /* fmuls */
3011 CHECK_FPU_FEATURE(dc
, FMUL
);
3012 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fmuls
);
3014 case 0x4a: /* fmuld */
3015 CHECK_FPU_FEATURE(dc
, FMUL
);
3016 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld
);
3018 case 0x4b: /* fmulq */
3019 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3020 CHECK_FPU_FEATURE(dc
, FMUL
);
3021 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fmulq
);
3023 case 0x4d: /* fdivs */
3024 gen_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fdivs
);
3026 case 0x4e: /* fdivd */
3027 gen_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fdivd
);
3029 case 0x4f: /* fdivq */
3030 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3031 gen_fop_QQQ(dc
, rd
, rs1
, rs2
, gen_helper_fdivq
);
3033 case 0x69: /* fsmuld */
3034 CHECK_FPU_FEATURE(dc
, FSMULD
);
3035 gen_fop_DFF(dc
, rd
, rs1
, rs2
, gen_helper_fsmuld
);
3037 case 0x6e: /* fdmulq */
3038 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3039 gen_fop_QDD(dc
, rd
, rs1
, rs2
, gen_helper_fdmulq
);
3041 case 0xc4: /* fitos */
3042 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fitos
);
3044 case 0xc6: /* fdtos */
3045 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fdtos
);
3047 case 0xc7: /* fqtos */
3048 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3049 gen_fop_FQ(dc
, rd
, rs2
, gen_helper_fqtos
);
3051 case 0xc8: /* fitod */
3052 gen_ne_fop_DF(dc
, rd
, rs2
, gen_helper_fitod
);
3054 case 0xc9: /* fstod */
3055 gen_ne_fop_DF(dc
, rd
, rs2
, gen_helper_fstod
);
3057 case 0xcb: /* fqtod */
3058 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3059 gen_fop_DQ(dc
, rd
, rs2
, gen_helper_fqtod
);
3061 case 0xcc: /* fitoq */
3062 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3063 gen_ne_fop_QF(dc
, rd
, rs2
, gen_helper_fitoq
);
3065 case 0xcd: /* fstoq */
3066 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3067 gen_ne_fop_QF(dc
, rd
, rs2
, gen_helper_fstoq
);
3069 case 0xce: /* fdtoq */
3070 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3071 gen_ne_fop_QD(dc
, rd
, rs2
, gen_helper_fdtoq
);
3073 case 0xd1: /* fstoi */
3074 gen_fop_FF(dc
, rd
, rs2
, gen_helper_fstoi
);
3076 case 0xd2: /* fdtoi */
3077 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fdtoi
);
3079 case 0xd3: /* fqtoi */
3080 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3081 gen_fop_FQ(dc
, rd
, rs2
, gen_helper_fqtoi
);
3083 #ifdef TARGET_SPARC64
3084 case 0x2: /* V9 fmovd */
3085 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
3086 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
3088 case 0x3: /* V9 fmovq */
3089 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3090 gen_move_Q(rd
, rs2
);
3092 case 0x6: /* V9 fnegd */
3093 gen_ne_fop_DD(dc
, rd
, rs2
, gen_helper_fnegd
);
3095 case 0x7: /* V9 fnegq */
3096 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3097 gen_ne_fop_QQ(dc
, rd
, rs2
, gen_helper_fnegq
);
3099 case 0xa: /* V9 fabsd */
3100 gen_ne_fop_DD(dc
, rd
, rs2
, gen_helper_fabsd
);
3102 case 0xb: /* V9 fabsq */
3103 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3104 gen_ne_fop_QQ(dc
, rd
, rs2
, gen_helper_fabsq
);
3106 case 0x81: /* V9 fstox */
3107 gen_fop_DF(dc
, rd
, rs2
, gen_helper_fstox
);
3109 case 0x82: /* V9 fdtox */
3110 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fdtox
);
3112 case 0x83: /* V9 fqtox */
3113 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3114 gen_fop_DQ(dc
, rd
, rs2
, gen_helper_fqtox
);
3116 case 0x84: /* V9 fxtos */
3117 gen_fop_FD(dc
, rd
, rs2
, gen_helper_fxtos
);
3119 case 0x88: /* V9 fxtod */
3120 gen_fop_DD(dc
, rd
, rs2
, gen_helper_fxtod
);
3122 case 0x8c: /* V9 fxtoq */
3123 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3124 gen_ne_fop_QD(dc
, rd
, rs2
, gen_helper_fxtoq
);
3130 } else if (xop
== 0x35) { /* FPU Operations */
3131 #ifdef TARGET_SPARC64
3134 if (gen_trap_ifnofpu(dc
)) {
3137 gen_op_clear_ieee_excp_and_FTT();
3138 rs1
= GET_FIELD(insn
, 13, 17);
3139 rs2
= GET_FIELD(insn
, 27, 31);
3140 xop
= GET_FIELD(insn
, 18, 26);
3143 #ifdef TARGET_SPARC64
3147 cond = GET_FIELD_SP(insn, 14, 17); \
3148 cpu_src1 = get_src1(dc, insn); \
3149 gen_compare_reg(&cmp, cond, cpu_src1); \
3150 gen_fmov##sz(dc, &cmp, rd, rs2); \
3151 free_compare(&cmp); \
3154 if ((xop
& 0x11f) == 0x005) { /* V9 fmovsr */
3157 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
3160 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
3161 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3168 #ifdef TARGET_SPARC64
3169 #define FMOVCC(fcc, sz) \
3172 cond = GET_FIELD_SP(insn, 14, 17); \
3173 gen_fcompare(&cmp, fcc, cond); \
3174 gen_fmov##sz(dc, &cmp, rd, rs2); \
3175 free_compare(&cmp); \
3178 case 0x001: /* V9 fmovscc %fcc0 */
3181 case 0x002: /* V9 fmovdcc %fcc0 */
3184 case 0x003: /* V9 fmovqcc %fcc0 */
3185 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3188 case 0x041: /* V9 fmovscc %fcc1 */
3191 case 0x042: /* V9 fmovdcc %fcc1 */
3194 case 0x043: /* V9 fmovqcc %fcc1 */
3195 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3198 case 0x081: /* V9 fmovscc %fcc2 */
3201 case 0x082: /* V9 fmovdcc %fcc2 */
3204 case 0x083: /* V9 fmovqcc %fcc2 */
3205 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3208 case 0x0c1: /* V9 fmovscc %fcc3 */
3211 case 0x0c2: /* V9 fmovdcc %fcc3 */
3214 case 0x0c3: /* V9 fmovqcc %fcc3 */
3215 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3219 #define FMOVCC(xcc, sz) \
3222 cond = GET_FIELD_SP(insn, 14, 17); \
3223 gen_compare(&cmp, xcc, cond, dc); \
3224 gen_fmov##sz(dc, &cmp, rd, rs2); \
3225 free_compare(&cmp); \
3228 case 0x101: /* V9 fmovscc %icc */
3231 case 0x102: /* V9 fmovdcc %icc */
3234 case 0x103: /* V9 fmovqcc %icc */
3235 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3238 case 0x181: /* V9 fmovscc %xcc */
3241 case 0x182: /* V9 fmovdcc %xcc */
3244 case 0x183: /* V9 fmovqcc %xcc */
3245 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3250 case 0x51: /* fcmps, V9 %fcc */
3251 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
3252 cpu_src2_32
= gen_load_fpr_F(dc
, rs2
);
3253 gen_op_fcmps(rd
& 3, cpu_src1_32
, cpu_src2_32
);
3255 case 0x52: /* fcmpd, V9 %fcc */
3256 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
3257 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
3258 gen_op_fcmpd(rd
& 3, cpu_src1_64
, cpu_src2_64
);
3260 case 0x53: /* fcmpq, V9 %fcc */
3261 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3262 gen_op_load_fpr_QT0(QFPREG(rs1
));
3263 gen_op_load_fpr_QT1(QFPREG(rs2
));
3264 gen_op_fcmpq(rd
& 3);
3266 case 0x55: /* fcmpes, V9 %fcc */
3267 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
3268 cpu_src2_32
= gen_load_fpr_F(dc
, rs2
);
3269 gen_op_fcmpes(rd
& 3, cpu_src1_32
, cpu_src2_32
);
3271 case 0x56: /* fcmped, V9 %fcc */
3272 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
3273 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
3274 gen_op_fcmped(rd
& 3, cpu_src1_64
, cpu_src2_64
);
3276 case 0x57: /* fcmpeq, V9 %fcc */
3277 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3278 gen_op_load_fpr_QT0(QFPREG(rs1
));
3279 gen_op_load_fpr_QT1(QFPREG(rs2
));
3280 gen_op_fcmpeq(rd
& 3);
3285 } else if (xop
== 0x2) {
3286 TCGv dst
= gen_dest_gpr(dc
, rd
);
3287 rs1
= GET_FIELD(insn
, 13, 17);
3289 /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3290 if (IS_IMM
) { /* immediate */
3291 simm
= GET_FIELDs(insn
, 19, 31);
3292 tcg_gen_movi_tl(dst
, simm
);
3293 gen_store_gpr(dc
, rd
, dst
);
3294 } else { /* register */
3295 rs2
= GET_FIELD(insn
, 27, 31);
3297 tcg_gen_movi_tl(dst
, 0);
3298 gen_store_gpr(dc
, rd
, dst
);
3300 cpu_src2
= gen_load_gpr(dc
, rs2
);
3301 gen_store_gpr(dc
, rd
, cpu_src2
);
3305 cpu_src1
= get_src1(dc
, insn
);
3306 if (IS_IMM
) { /* immediate */
3307 simm
= GET_FIELDs(insn
, 19, 31);
3308 tcg_gen_ori_tl(dst
, cpu_src1
, simm
);
3309 gen_store_gpr(dc
, rd
, dst
);
3310 } else { /* register */
3311 rs2
= GET_FIELD(insn
, 27, 31);
3313 /* mov shortcut: or x, %g0, y -> mov x, y */
3314 gen_store_gpr(dc
, rd
, cpu_src1
);
3316 cpu_src2
= gen_load_gpr(dc
, rs2
);
3317 tcg_gen_or_tl(dst
, cpu_src1
, cpu_src2
);
3318 gen_store_gpr(dc
, rd
, dst
);
3322 #ifdef TARGET_SPARC64
3323 } else if (xop
== 0x25) { /* sll, V9 sllx */
3324 cpu_src1
= get_src1(dc
, insn
);
3325 if (IS_IMM
) { /* immediate */
3326 simm
= GET_FIELDs(insn
, 20, 31);
3327 if (insn
& (1 << 12)) {
3328 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3330 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
3332 } else { /* register */
3333 rs2
= GET_FIELD(insn
, 27, 31);
3334 cpu_src2
= gen_load_gpr(dc
, rs2
);
3335 if (insn
& (1 << 12)) {
3336 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3338 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3340 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3342 gen_store_gpr(dc
, rd
, cpu_dst
);
3343 } else if (xop
== 0x26) { /* srl, V9 srlx */
3344 cpu_src1
= get_src1(dc
, insn
);
3345 if (IS_IMM
) { /* immediate */
3346 simm
= GET_FIELDs(insn
, 20, 31);
3347 if (insn
& (1 << 12)) {
3348 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3350 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3351 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3353 } else { /* register */
3354 rs2
= GET_FIELD(insn
, 27, 31);
3355 cpu_src2
= gen_load_gpr(dc
, rs2
);
3356 if (insn
& (1 << 12)) {
3357 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3358 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3360 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3361 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3362 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3365 gen_store_gpr(dc
, rd
, cpu_dst
);
3366 } else if (xop
== 0x27) { /* sra, V9 srax */
3367 cpu_src1
= get_src1(dc
, insn
);
3368 if (IS_IMM
) { /* immediate */
3369 simm
= GET_FIELDs(insn
, 20, 31);
3370 if (insn
& (1 << 12)) {
3371 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3373 tcg_gen_ext32s_i64(cpu_dst
, cpu_src1
);
3374 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3376 } else { /* register */
3377 rs2
= GET_FIELD(insn
, 27, 31);
3378 cpu_src2
= gen_load_gpr(dc
, rs2
);
3379 if (insn
& (1 << 12)) {
3380 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3381 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3383 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3384 tcg_gen_ext32s_i64(cpu_dst
, cpu_src1
);
3385 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3388 gen_store_gpr(dc
, rd
, cpu_dst
);
3390 } else if (xop
< 0x36) {
3392 cpu_src1
= get_src1(dc
, insn
);
3393 cpu_src2
= get_src2(dc
, insn
);
3394 switch (xop
& ~0x10) {
3397 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3398 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3399 dc
->cc_op
= CC_OP_ADD
;
3401 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3405 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3407 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3408 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3409 dc
->cc_op
= CC_OP_LOGIC
;
3413 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3415 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3416 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3417 dc
->cc_op
= CC_OP_LOGIC
;
3421 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3423 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3424 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3425 dc
->cc_op
= CC_OP_LOGIC
;
3430 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3431 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3432 dc
->cc_op
= CC_OP_SUB
;
3434 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3437 case 0x5: /* andn */
3438 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3440 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3441 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3442 dc
->cc_op
= CC_OP_LOGIC
;
3446 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3448 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3449 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3450 dc
->cc_op
= CC_OP_LOGIC
;
3453 case 0x7: /* xorn */
3454 tcg_gen_eqv_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3456 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3457 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3458 dc
->cc_op
= CC_OP_LOGIC
;
3461 case 0x8: /* addx, V9 addc */
3462 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3465 #ifdef TARGET_SPARC64
3466 case 0x9: /* V9 mulx */
3467 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3470 case 0xa: /* umul */
3471 CHECK_IU_FEATURE(dc
, MUL
);
3472 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3474 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3475 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3476 dc
->cc_op
= CC_OP_LOGIC
;
3479 case 0xb: /* smul */
3480 CHECK_IU_FEATURE(dc
, MUL
);
3481 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3483 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3484 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3485 dc
->cc_op
= CC_OP_LOGIC
;
3488 case 0xc: /* subx, V9 subc */
3489 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3492 #ifdef TARGET_SPARC64
3493 case 0xd: /* V9 udivx */
3494 gen_helper_udivx(cpu_dst
, cpu_env
, cpu_src1
, cpu_src2
);
3497 case 0xe: /* udiv */
3498 CHECK_IU_FEATURE(dc
, DIV
);
3500 gen_helper_udiv_cc(cpu_dst
, cpu_env
, cpu_src1
,
3502 dc
->cc_op
= CC_OP_DIV
;
3504 gen_helper_udiv(cpu_dst
, cpu_env
, cpu_src1
,
3508 case 0xf: /* sdiv */
3509 CHECK_IU_FEATURE(dc
, DIV
);
3511 gen_helper_sdiv_cc(cpu_dst
, cpu_env
, cpu_src1
,
3513 dc
->cc_op
= CC_OP_DIV
;
3515 gen_helper_sdiv(cpu_dst
, cpu_env
, cpu_src1
,
3522 gen_store_gpr(dc
, rd
, cpu_dst
);
3524 cpu_src1
= get_src1(dc
, insn
);
3525 cpu_src2
= get_src2(dc
, insn
);
3527 case 0x20: /* taddcc */
3528 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3529 gen_store_gpr(dc
, rd
, cpu_dst
);
3530 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3531 dc
->cc_op
= CC_OP_TADD
;
3533 case 0x21: /* tsubcc */
3534 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3535 gen_store_gpr(dc
, rd
, cpu_dst
);
3536 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3537 dc
->cc_op
= CC_OP_TSUB
;
3539 case 0x22: /* taddcctv */
3540 gen_helper_taddcctv(cpu_dst
, cpu_env
,
3541 cpu_src1
, cpu_src2
);
3542 gen_store_gpr(dc
, rd
, cpu_dst
);
3543 dc
->cc_op
= CC_OP_TADDTV
;
3545 case 0x23: /* tsubcctv */
3546 gen_helper_tsubcctv(cpu_dst
, cpu_env
,
3547 cpu_src1
, cpu_src2
);
3548 gen_store_gpr(dc
, rd
, cpu_dst
);
3549 dc
->cc_op
= CC_OP_TSUBTV
;
3551 case 0x24: /* mulscc */
3553 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3554 gen_store_gpr(dc
, rd
, cpu_dst
);
3555 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3556 dc
->cc_op
= CC_OP_ADD
;
3558 #ifndef TARGET_SPARC64
3559 case 0x25: /* sll */
3560 if (IS_IMM
) { /* immediate */
3561 simm
= GET_FIELDs(insn
, 20, 31);
3562 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3563 } else { /* register */
3564 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3565 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3567 gen_store_gpr(dc
, rd
, cpu_dst
);
3569 case 0x26: /* srl */
3570 if (IS_IMM
) { /* immediate */
3571 simm
= GET_FIELDs(insn
, 20, 31);
3572 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3573 } else { /* register */
3574 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3575 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3577 gen_store_gpr(dc
, rd
, cpu_dst
);
3579 case 0x27: /* sra */
3580 if (IS_IMM
) { /* immediate */
3581 simm
= GET_FIELDs(insn
, 20, 31);
3582 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3583 } else { /* register */
3584 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3585 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3587 gen_store_gpr(dc
, rd
, cpu_dst
);
3594 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3595 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3597 #ifndef TARGET_SPARC64
3598 case 0x01 ... 0x0f: /* undefined in the
3602 case 0x10 ... 0x1f: /* implementation-dependent
3608 case 0x2: /* V9 wrccr */
3609 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3610 gen_helper_wrccr(cpu_env
, cpu_dst
);
3611 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3612 dc
->cc_op
= CC_OP_FLAGS
;
3614 case 0x3: /* V9 wrasi */
3615 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3616 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3617 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3619 case 0x6: /* V9 wrfprs */
3620 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3621 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3627 case 0xf: /* V9 sir, nop if user */
3628 #if !defined(CONFIG_USER_ONLY)
3629 if (supervisor(dc
)) {
3634 case 0x13: /* Graphics Status */
3635 if (gen_trap_ifnofpu(dc
)) {
3638 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3640 case 0x14: /* Softint set */
3641 if (!supervisor(dc
))
3643 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3644 gen_helper_set_softint(cpu_env
, cpu_tmp64
);
3646 case 0x15: /* Softint clear */
3647 if (!supervisor(dc
))
3649 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3650 gen_helper_clear_softint(cpu_env
, cpu_tmp64
);
3652 case 0x16: /* Softint write */
3653 if (!supervisor(dc
))
3655 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3656 gen_helper_write_softint(cpu_env
, cpu_tmp64
);
3658 case 0x17: /* Tick compare */
3659 #if !defined(CONFIG_USER_ONLY)
3660 if (!supervisor(dc
))
3666 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3668 r_tickptr
= tcg_temp_new_ptr();
3669 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3670 offsetof(CPUSPARCState
, tick
));
3671 gen_helper_tick_set_limit(r_tickptr
,
3673 tcg_temp_free_ptr(r_tickptr
);
3676 case 0x18: /* System tick */
3677 #if !defined(CONFIG_USER_ONLY)
3678 if (!supervisor(dc
))
3684 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3686 r_tickptr
= tcg_temp_new_ptr();
3687 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3688 offsetof(CPUSPARCState
, stick
));
3689 gen_helper_tick_set_count(r_tickptr
,
3691 tcg_temp_free_ptr(r_tickptr
);
3694 case 0x19: /* System tick compare */
3695 #if !defined(CONFIG_USER_ONLY)
3696 if (!supervisor(dc
))
3702 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3704 r_tickptr
= tcg_temp_new_ptr();
3705 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3706 offsetof(CPUSPARCState
, stick
));
3707 gen_helper_tick_set_limit(r_tickptr
,
3709 tcg_temp_free_ptr(r_tickptr
);
3713 case 0x10: /* Performance Control */
3714 case 0x11: /* Performance Instrumentation
3716 case 0x12: /* Dispatch Control */
3723 #if !defined(CONFIG_USER_ONLY)
3724 case 0x31: /* wrpsr, V9 saved, restored */
3726 if (!supervisor(dc
))
3728 #ifdef TARGET_SPARC64
3731 gen_helper_saved(cpu_env
);
3734 gen_helper_restored(cpu_env
);
3736 case 2: /* UA2005 allclean */
3737 case 3: /* UA2005 otherw */
3738 case 4: /* UA2005 normalw */
3739 case 5: /* UA2005 invalw */
3745 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3746 gen_helper_wrpsr(cpu_env
, cpu_dst
);
3747 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3748 dc
->cc_op
= CC_OP_FLAGS
;
3756 case 0x32: /* wrwim, V9 wrpr */
3758 if (!supervisor(dc
))
3760 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3761 #ifdef TARGET_SPARC64
3767 r_tsptr
= tcg_temp_new_ptr();
3768 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3769 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3770 offsetof(trap_state
, tpc
));
3771 tcg_temp_free_ptr(r_tsptr
);
3778 r_tsptr
= tcg_temp_new_ptr();
3779 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3780 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3781 offsetof(trap_state
, tnpc
));
3782 tcg_temp_free_ptr(r_tsptr
);
3789 r_tsptr
= tcg_temp_new_ptr();
3790 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3791 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3792 offsetof(trap_state
,
3794 tcg_temp_free_ptr(r_tsptr
);
3801 r_tsptr
= tcg_temp_new_ptr();
3802 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3803 tcg_gen_st32_tl(cpu_tmp0
, r_tsptr
,
3804 offsetof(trap_state
, tt
));
3805 tcg_temp_free_ptr(r_tsptr
);
3812 r_tickptr
= tcg_temp_new_ptr();
3813 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3814 offsetof(CPUSPARCState
, tick
));
3815 gen_helper_tick_set_count(r_tickptr
,
3817 tcg_temp_free_ptr(r_tickptr
);
3821 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3825 gen_helper_wrpstate(cpu_env
, cpu_tmp0
);
3826 dc
->npc
= DYNAMIC_PC
;
3830 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
,
3831 offsetof(CPUSPARCState
, tl
));
3832 dc
->npc
= DYNAMIC_PC
;
3835 gen_helper_wrpil(cpu_env
, cpu_tmp0
);
3838 gen_helper_wrcwp(cpu_env
, cpu_tmp0
);
3841 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
,
3842 offsetof(CPUSPARCState
,
3845 case 11: // canrestore
3846 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
,
3847 offsetof(CPUSPARCState
,
3850 case 12: // cleanwin
3851 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
,
3852 offsetof(CPUSPARCState
,
3855 case 13: // otherwin
3856 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
,
3857 offsetof(CPUSPARCState
,
3861 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
,
3862 offsetof(CPUSPARCState
,
3865 case 16: // UA2005 gl
3866 CHECK_IU_FEATURE(dc
, GL
);
3867 tcg_gen_st32_tl(cpu_tmp0
, cpu_env
,
3868 offsetof(CPUSPARCState
, gl
));
3870 case 26: // UA2005 strand status
3871 CHECK_IU_FEATURE(dc
, HYPV
);
3872 if (!hypervisor(dc
))
3874 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3880 tcg_gen_trunc_tl_i32(cpu_wim
, cpu_tmp0
);
3881 if (dc
->def
->nwindows
!= 32) {
3882 tcg_gen_andi_tl(cpu_wim
, cpu_wim
,
3883 (1 << dc
->def
->nwindows
) - 1);
3888 case 0x33: /* wrtbr, UA2005 wrhpr */
3890 #ifndef TARGET_SPARC64
3891 if (!supervisor(dc
))
3893 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3895 CHECK_IU_FEATURE(dc
, HYPV
);
3896 if (!hypervisor(dc
))
3898 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3901 // XXX gen_op_wrhpstate();
3908 // XXX gen_op_wrhtstate();
3911 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3914 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3916 case 31: // hstick_cmpr
3920 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3921 r_tickptr
= tcg_temp_new_ptr();
3922 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3923 offsetof(CPUSPARCState
, hstick
));
3924 gen_helper_tick_set_limit(r_tickptr
,
3926 tcg_temp_free_ptr(r_tickptr
);
3929 case 6: // hver readonly
3937 #ifdef TARGET_SPARC64
3938 case 0x2c: /* V9 movcc */
3940 int cc
= GET_FIELD_SP(insn
, 11, 12);
3941 int cond
= GET_FIELD_SP(insn
, 14, 17);
3945 if (insn
& (1 << 18)) {
3947 gen_compare(&cmp
, 0, cond
, dc
);
3948 } else if (cc
== 2) {
3949 gen_compare(&cmp
, 1, cond
, dc
);
3954 gen_fcompare(&cmp
, cc
, cond
);
3957 /* The get_src2 above loaded the normal 13-bit
3958 immediate field, not the 11-bit field we have
3959 in movcc. But it did handle the reg case. */
3961 simm
= GET_FIELD_SPs(insn
, 0, 10);
3962 tcg_gen_movi_tl(cpu_src2
, simm
);
3965 dst
= gen_load_gpr(dc
, rd
);
3966 tcg_gen_movcond_tl(cmp
.cond
, dst
,
3970 gen_store_gpr(dc
, rd
, dst
);
3973 case 0x2d: /* V9 sdivx */
3974 gen_helper_sdivx(cpu_dst
, cpu_env
, cpu_src1
, cpu_src2
);
3975 gen_store_gpr(dc
, rd
, cpu_dst
);
3977 case 0x2e: /* V9 popc */
3978 gen_helper_popc(cpu_dst
, cpu_src2
);
3979 gen_store_gpr(dc
, rd
, cpu_dst
);
3981 case 0x2f: /* V9 movr */
3983 int cond
= GET_FIELD_SP(insn
, 10, 12);
3987 gen_compare_reg(&cmp
, cond
, cpu_src1
);
3989 /* The get_src2 above loaded the normal 13-bit
3990 immediate field, not the 10-bit field we have
3991 in movr. But it did handle the reg case. */
3993 simm
= GET_FIELD_SPs(insn
, 0, 9);
3994 tcg_gen_movi_tl(cpu_src2
, simm
);
3997 dst
= gen_load_gpr(dc
, rd
);
3998 tcg_gen_movcond_tl(cmp
.cond
, dst
,
4002 gen_store_gpr(dc
, rd
, dst
);
4010 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4011 #ifdef TARGET_SPARC64
4012 int opf
= GET_FIELD_SP(insn
, 5, 13);
4013 rs1
= GET_FIELD(insn
, 13, 17);
4014 rs2
= GET_FIELD(insn
, 27, 31);
4015 if (gen_trap_ifnofpu(dc
)) {
4020 case 0x000: /* VIS I edge8cc */
4021 CHECK_FPU_FEATURE(dc
, VIS1
);
4022 cpu_src1
= gen_load_gpr(dc
, rs1
);
4023 cpu_src2
= gen_load_gpr(dc
, rs2
);
4024 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 1, 0);
4025 gen_store_gpr(dc
, rd
, cpu_dst
);
4027 case 0x001: /* VIS II edge8n */
4028 CHECK_FPU_FEATURE(dc
, VIS2
);
4029 cpu_src1
= gen_load_gpr(dc
, rs1
);
4030 cpu_src2
= gen_load_gpr(dc
, rs2
);
4031 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 0, 0);
4032 gen_store_gpr(dc
, rd
, cpu_dst
);
4034 case 0x002: /* VIS I edge8lcc */
4035 CHECK_FPU_FEATURE(dc
, VIS1
);
4036 cpu_src1
= gen_load_gpr(dc
, rs1
);
4037 cpu_src2
= gen_load_gpr(dc
, rs2
);
4038 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 1, 1);
4039 gen_store_gpr(dc
, rd
, cpu_dst
);
4041 case 0x003: /* VIS II edge8ln */
4042 CHECK_FPU_FEATURE(dc
, VIS2
);
4043 cpu_src1
= gen_load_gpr(dc
, rs1
);
4044 cpu_src2
= gen_load_gpr(dc
, rs2
);
4045 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 8, 0, 1);
4046 gen_store_gpr(dc
, rd
, cpu_dst
);
4048 case 0x004: /* VIS I edge16cc */
4049 CHECK_FPU_FEATURE(dc
, VIS1
);
4050 cpu_src1
= gen_load_gpr(dc
, rs1
);
4051 cpu_src2
= gen_load_gpr(dc
, rs2
);
4052 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 1, 0);
4053 gen_store_gpr(dc
, rd
, cpu_dst
);
4055 case 0x005: /* VIS II edge16n */
4056 CHECK_FPU_FEATURE(dc
, VIS2
);
4057 cpu_src1
= gen_load_gpr(dc
, rs1
);
4058 cpu_src2
= gen_load_gpr(dc
, rs2
);
4059 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 0, 0);
4060 gen_store_gpr(dc
, rd
, cpu_dst
);
4062 case 0x006: /* VIS I edge16lcc */
4063 CHECK_FPU_FEATURE(dc
, VIS1
);
4064 cpu_src1
= gen_load_gpr(dc
, rs1
);
4065 cpu_src2
= gen_load_gpr(dc
, rs2
);
4066 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 1, 1);
4067 gen_store_gpr(dc
, rd
, cpu_dst
);
4069 case 0x007: /* VIS II edge16ln */
4070 CHECK_FPU_FEATURE(dc
, VIS2
);
4071 cpu_src1
= gen_load_gpr(dc
, rs1
);
4072 cpu_src2
= gen_load_gpr(dc
, rs2
);
4073 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 16, 0, 1);
4074 gen_store_gpr(dc
, rd
, cpu_dst
);
4076 case 0x008: /* VIS I edge32cc */
4077 CHECK_FPU_FEATURE(dc
, VIS1
);
4078 cpu_src1
= gen_load_gpr(dc
, rs1
);
4079 cpu_src2
= gen_load_gpr(dc
, rs2
);
4080 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 1, 0);
4081 gen_store_gpr(dc
, rd
, cpu_dst
);
4083 case 0x009: /* VIS II edge32n */
4084 CHECK_FPU_FEATURE(dc
, VIS2
);
4085 cpu_src1
= gen_load_gpr(dc
, rs1
);
4086 cpu_src2
= gen_load_gpr(dc
, rs2
);
4087 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 0, 0);
4088 gen_store_gpr(dc
, rd
, cpu_dst
);
4090 case 0x00a: /* VIS I edge32lcc */
4091 CHECK_FPU_FEATURE(dc
, VIS1
);
4092 cpu_src1
= gen_load_gpr(dc
, rs1
);
4093 cpu_src2
= gen_load_gpr(dc
, rs2
);
4094 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 1, 1);
4095 gen_store_gpr(dc
, rd
, cpu_dst
);
4097 case 0x00b: /* VIS II edge32ln */
4098 CHECK_FPU_FEATURE(dc
, VIS2
);
4099 cpu_src1
= gen_load_gpr(dc
, rs1
);
4100 cpu_src2
= gen_load_gpr(dc
, rs2
);
4101 gen_edge(dc
, cpu_dst
, cpu_src1
, cpu_src2
, 32, 0, 1);
4102 gen_store_gpr(dc
, rd
, cpu_dst
);
4104 case 0x010: /* VIS I array8 */
4105 CHECK_FPU_FEATURE(dc
, VIS1
);
4106 cpu_src1
= gen_load_gpr(dc
, rs1
);
4107 cpu_src2
= gen_load_gpr(dc
, rs2
);
4108 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4109 gen_store_gpr(dc
, rd
, cpu_dst
);
4111 case 0x012: /* VIS I array16 */
4112 CHECK_FPU_FEATURE(dc
, VIS1
);
4113 cpu_src1
= gen_load_gpr(dc
, rs1
);
4114 cpu_src2
= gen_load_gpr(dc
, rs2
);
4115 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4116 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
4117 gen_store_gpr(dc
, rd
, cpu_dst
);
4119 case 0x014: /* VIS I array32 */
4120 CHECK_FPU_FEATURE(dc
, VIS1
);
4121 cpu_src1
= gen_load_gpr(dc
, rs1
);
4122 cpu_src2
= gen_load_gpr(dc
, rs2
);
4123 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
4124 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
4125 gen_store_gpr(dc
, rd
, cpu_dst
);
4127 case 0x018: /* VIS I alignaddr */
4128 CHECK_FPU_FEATURE(dc
, VIS1
);
4129 cpu_src1
= gen_load_gpr(dc
, rs1
);
4130 cpu_src2
= gen_load_gpr(dc
, rs2
);
4131 gen_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
, 0);
4132 gen_store_gpr(dc
, rd
, cpu_dst
);
4134 case 0x01a: /* VIS I alignaddrl */
4135 CHECK_FPU_FEATURE(dc
, VIS1
);
4136 cpu_src1
= gen_load_gpr(dc
, rs1
);
4137 cpu_src2
= gen_load_gpr(dc
, rs2
);
4138 gen_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
, 1);
4139 gen_store_gpr(dc
, rd
, cpu_dst
);
4141 case 0x019: /* VIS II bmask */
4142 CHECK_FPU_FEATURE(dc
, VIS2
);
4143 cpu_src1
= gen_load_gpr(dc
, rs1
);
4144 cpu_src2
= gen_load_gpr(dc
, rs2
);
4145 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4146 tcg_gen_deposit_tl(cpu_gsr
, cpu_gsr
, cpu_dst
, 32, 32);
4147 gen_store_gpr(dc
, rd
, cpu_dst
);
4149 case 0x020: /* VIS I fcmple16 */
4150 CHECK_FPU_FEATURE(dc
, VIS1
);
4151 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4152 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4153 gen_helper_fcmple16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4154 gen_store_gpr(dc
, rd
, cpu_dst
);
4156 case 0x022: /* VIS I fcmpne16 */
4157 CHECK_FPU_FEATURE(dc
, VIS1
);
4158 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4159 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4160 gen_helper_fcmpne16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4161 gen_store_gpr(dc
, rd
, cpu_dst
);
4163 case 0x024: /* VIS I fcmple32 */
4164 CHECK_FPU_FEATURE(dc
, VIS1
);
4165 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4166 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4167 gen_helper_fcmple32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4168 gen_store_gpr(dc
, rd
, cpu_dst
);
4170 case 0x026: /* VIS I fcmpne32 */
4171 CHECK_FPU_FEATURE(dc
, VIS1
);
4172 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4173 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4174 gen_helper_fcmpne32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4175 gen_store_gpr(dc
, rd
, cpu_dst
);
4177 case 0x028: /* VIS I fcmpgt16 */
4178 CHECK_FPU_FEATURE(dc
, VIS1
);
4179 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4180 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4181 gen_helper_fcmpgt16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4182 gen_store_gpr(dc
, rd
, cpu_dst
);
4184 case 0x02a: /* VIS I fcmpeq16 */
4185 CHECK_FPU_FEATURE(dc
, VIS1
);
4186 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4187 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4188 gen_helper_fcmpeq16(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4189 gen_store_gpr(dc
, rd
, cpu_dst
);
4191 case 0x02c: /* VIS I fcmpgt32 */
4192 CHECK_FPU_FEATURE(dc
, VIS1
);
4193 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4194 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4195 gen_helper_fcmpgt32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4196 gen_store_gpr(dc
, rd
, cpu_dst
);
4198 case 0x02e: /* VIS I fcmpeq32 */
4199 CHECK_FPU_FEATURE(dc
, VIS1
);
4200 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4201 cpu_src2_64
= gen_load_fpr_D(dc
, rs2
);
4202 gen_helper_fcmpeq32(cpu_dst
, cpu_src1_64
, cpu_src2_64
);
4203 gen_store_gpr(dc
, rd
, cpu_dst
);
4205 case 0x031: /* VIS I fmul8x16 */
4206 CHECK_FPU_FEATURE(dc
, VIS1
);
4207 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16
);
4209 case 0x033: /* VIS I fmul8x16au */
4210 CHECK_FPU_FEATURE(dc
, VIS1
);
4211 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16au
);
4213 case 0x035: /* VIS I fmul8x16al */
4214 CHECK_FPU_FEATURE(dc
, VIS1
);
4215 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8x16al
);
4217 case 0x036: /* VIS I fmul8sux16 */
4218 CHECK_FPU_FEATURE(dc
, VIS1
);
4219 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8sux16
);
4221 case 0x037: /* VIS I fmul8ulx16 */
4222 CHECK_FPU_FEATURE(dc
, VIS1
);
4223 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmul8ulx16
);
4225 case 0x038: /* VIS I fmuld8sux16 */
4226 CHECK_FPU_FEATURE(dc
, VIS1
);
4227 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld8sux16
);
4229 case 0x039: /* VIS I fmuld8ulx16 */
4230 CHECK_FPU_FEATURE(dc
, VIS1
);
4231 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fmuld8ulx16
);
4233 case 0x03a: /* VIS I fpack32 */
4234 CHECK_FPU_FEATURE(dc
, VIS1
);
4235 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpack32
);
4237 case 0x03b: /* VIS I fpack16 */
4238 CHECK_FPU_FEATURE(dc
, VIS1
);
4239 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4240 cpu_dst_32
= gen_dest_fpr_F(dc
);
4241 gen_helper_fpack16(cpu_dst_32
, cpu_gsr
, cpu_src1_64
);
4242 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4244 case 0x03d: /* VIS I fpackfix */
4245 CHECK_FPU_FEATURE(dc
, VIS1
);
4246 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4247 cpu_dst_32
= gen_dest_fpr_F(dc
);
4248 gen_helper_fpackfix(cpu_dst_32
, cpu_gsr
, cpu_src1_64
);
4249 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4251 case 0x03e: /* VIS I pdist */
4252 CHECK_FPU_FEATURE(dc
, VIS1
);
4253 gen_ne_fop_DDDD(dc
, rd
, rs1
, rs2
, gen_helper_pdist
);
4255 case 0x048: /* VIS I faligndata */
4256 CHECK_FPU_FEATURE(dc
, VIS1
);
4257 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_faligndata
);
4259 case 0x04b: /* VIS I fpmerge */
4260 CHECK_FPU_FEATURE(dc
, VIS1
);
4261 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpmerge
);
4263 case 0x04c: /* VIS II bshuffle */
4264 CHECK_FPU_FEATURE(dc
, VIS2
);
4265 gen_gsr_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_bshuffle
);
4267 case 0x04d: /* VIS I fexpand */
4268 CHECK_FPU_FEATURE(dc
, VIS1
);
4269 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fexpand
);
4271 case 0x050: /* VIS I fpadd16 */
4272 CHECK_FPU_FEATURE(dc
, VIS1
);
4273 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpadd16
);
4275 case 0x051: /* VIS I fpadd16s */
4276 CHECK_FPU_FEATURE(dc
, VIS1
);
4277 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fpadd16s
);
4279 case 0x052: /* VIS I fpadd32 */
4280 CHECK_FPU_FEATURE(dc
, VIS1
);
4281 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpadd32
);
4283 case 0x053: /* VIS I fpadd32s */
4284 CHECK_FPU_FEATURE(dc
, VIS1
);
4285 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_add_i32
);
4287 case 0x054: /* VIS I fpsub16 */
4288 CHECK_FPU_FEATURE(dc
, VIS1
);
4289 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpsub16
);
4291 case 0x055: /* VIS I fpsub16s */
4292 CHECK_FPU_FEATURE(dc
, VIS1
);
4293 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, gen_helper_fpsub16s
);
4295 case 0x056: /* VIS I fpsub32 */
4296 CHECK_FPU_FEATURE(dc
, VIS1
);
4297 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, gen_helper_fpsub32
);
4299 case 0x057: /* VIS I fpsub32s */
4300 CHECK_FPU_FEATURE(dc
, VIS1
);
4301 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_sub_i32
);
4303 case 0x060: /* VIS I fzero */
4304 CHECK_FPU_FEATURE(dc
, VIS1
);
4305 cpu_dst_64
= gen_dest_fpr_D();
4306 tcg_gen_movi_i64(cpu_dst_64
, 0);
4307 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4309 case 0x061: /* VIS I fzeros */
4310 CHECK_FPU_FEATURE(dc
, VIS1
);
4311 cpu_dst_32
= gen_dest_fpr_F(dc
);
4312 tcg_gen_movi_i32(cpu_dst_32
, 0);
4313 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4315 case 0x062: /* VIS I fnor */
4316 CHECK_FPU_FEATURE(dc
, VIS1
);
4317 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_nor_i64
);
4319 case 0x063: /* VIS I fnors */
4320 CHECK_FPU_FEATURE(dc
, VIS1
);
4321 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_nor_i32
);
4323 case 0x064: /* VIS I fandnot2 */
4324 CHECK_FPU_FEATURE(dc
, VIS1
);
4325 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_andc_i64
);
4327 case 0x065: /* VIS I fandnot2s */
4328 CHECK_FPU_FEATURE(dc
, VIS1
);
4329 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_andc_i32
);
4331 case 0x066: /* VIS I fnot2 */
4332 CHECK_FPU_FEATURE(dc
, VIS1
);
4333 gen_ne_fop_DD(dc
, rd
, rs2
, tcg_gen_not_i64
);
4335 case 0x067: /* VIS I fnot2s */
4336 CHECK_FPU_FEATURE(dc
, VIS1
);
4337 gen_ne_fop_FF(dc
, rd
, rs2
, tcg_gen_not_i32
);
4339 case 0x068: /* VIS I fandnot1 */
4340 CHECK_FPU_FEATURE(dc
, VIS1
);
4341 gen_ne_fop_DDD(dc
, rd
, rs2
, rs1
, tcg_gen_andc_i64
);
4343 case 0x069: /* VIS I fandnot1s */
4344 CHECK_FPU_FEATURE(dc
, VIS1
);
4345 gen_ne_fop_FFF(dc
, rd
, rs2
, rs1
, tcg_gen_andc_i32
);
4347 case 0x06a: /* VIS I fnot1 */
4348 CHECK_FPU_FEATURE(dc
, VIS1
);
4349 gen_ne_fop_DD(dc
, rd
, rs1
, tcg_gen_not_i64
);
4351 case 0x06b: /* VIS I fnot1s */
4352 CHECK_FPU_FEATURE(dc
, VIS1
);
4353 gen_ne_fop_FF(dc
, rd
, rs1
, tcg_gen_not_i32
);
4355 case 0x06c: /* VIS I fxor */
4356 CHECK_FPU_FEATURE(dc
, VIS1
);
4357 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_xor_i64
);
4359 case 0x06d: /* VIS I fxors */
4360 CHECK_FPU_FEATURE(dc
, VIS1
);
4361 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_xor_i32
);
4363 case 0x06e: /* VIS I fnand */
4364 CHECK_FPU_FEATURE(dc
, VIS1
);
4365 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_nand_i64
);
4367 case 0x06f: /* VIS I fnands */
4368 CHECK_FPU_FEATURE(dc
, VIS1
);
4369 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_nand_i32
);
4371 case 0x070: /* VIS I fand */
4372 CHECK_FPU_FEATURE(dc
, VIS1
);
4373 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_and_i64
);
4375 case 0x071: /* VIS I fands */
4376 CHECK_FPU_FEATURE(dc
, VIS1
);
4377 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_and_i32
);
4379 case 0x072: /* VIS I fxnor */
4380 CHECK_FPU_FEATURE(dc
, VIS1
);
4381 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_eqv_i64
);
4383 case 0x073: /* VIS I fxnors */
4384 CHECK_FPU_FEATURE(dc
, VIS1
);
4385 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_eqv_i32
);
4387 case 0x074: /* VIS I fsrc1 */
4388 CHECK_FPU_FEATURE(dc
, VIS1
);
4389 cpu_src1_64
= gen_load_fpr_D(dc
, rs1
);
4390 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
4392 case 0x075: /* VIS I fsrc1s */
4393 CHECK_FPU_FEATURE(dc
, VIS1
);
4394 cpu_src1_32
= gen_load_fpr_F(dc
, rs1
);
4395 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
4397 case 0x076: /* VIS I fornot2 */
4398 CHECK_FPU_FEATURE(dc
, VIS1
);
4399 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_orc_i64
);
4401 case 0x077: /* VIS I fornot2s */
4402 CHECK_FPU_FEATURE(dc
, VIS1
);
4403 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_orc_i32
);
4405 case 0x078: /* VIS I fsrc2 */
4406 CHECK_FPU_FEATURE(dc
, VIS1
);
4407 cpu_src1_64
= gen_load_fpr_D(dc
, rs2
);
4408 gen_store_fpr_D(dc
, rd
, cpu_src1_64
);
4410 case 0x079: /* VIS I fsrc2s */
4411 CHECK_FPU_FEATURE(dc
, VIS1
);
4412 cpu_src1_32
= gen_load_fpr_F(dc
, rs2
);
4413 gen_store_fpr_F(dc
, rd
, cpu_src1_32
);
4415 case 0x07a: /* VIS I fornot1 */
4416 CHECK_FPU_FEATURE(dc
, VIS1
);
4417 gen_ne_fop_DDD(dc
, rd
, rs2
, rs1
, tcg_gen_orc_i64
);
4419 case 0x07b: /* VIS I fornot1s */
4420 CHECK_FPU_FEATURE(dc
, VIS1
);
4421 gen_ne_fop_FFF(dc
, rd
, rs2
, rs1
, tcg_gen_orc_i32
);
4423 case 0x07c: /* VIS I for */
4424 CHECK_FPU_FEATURE(dc
, VIS1
);
4425 gen_ne_fop_DDD(dc
, rd
, rs1
, rs2
, tcg_gen_or_i64
);
4427 case 0x07d: /* VIS I fors */
4428 CHECK_FPU_FEATURE(dc
, VIS1
);
4429 gen_ne_fop_FFF(dc
, rd
, rs1
, rs2
, tcg_gen_or_i32
);
4431 case 0x07e: /* VIS I fone */
4432 CHECK_FPU_FEATURE(dc
, VIS1
);
4433 cpu_dst_64
= gen_dest_fpr_D();
4434 tcg_gen_movi_i64(cpu_dst_64
, -1);
4435 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4437 case 0x07f: /* VIS I fones */
4438 CHECK_FPU_FEATURE(dc
, VIS1
);
4439 cpu_dst_32
= gen_dest_fpr_F(dc
);
4440 tcg_gen_movi_i32(cpu_dst_32
, -1);
4441 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4443 case 0x080: /* VIS I shutdown */
4444 case 0x081: /* VIS II siam */
4453 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4454 #ifdef TARGET_SPARC64
4459 #ifdef TARGET_SPARC64
4460 } else if (xop
== 0x39) { /* V9 return */
4464 cpu_src1
= get_src1(dc
, insn
);
4465 if (IS_IMM
) { /* immediate */
4466 simm
= GET_FIELDs(insn
, 19, 31);
4467 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4468 } else { /* register */
4469 rs2
= GET_FIELD(insn
, 27, 31);
4471 cpu_src2
= gen_load_gpr(dc
, rs2
);
4472 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4474 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4477 gen_helper_restore(cpu_env
);
4479 r_const
= tcg_const_i32(3);
4480 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4481 tcg_temp_free_i32(r_const
);
4482 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4483 dc
->npc
= DYNAMIC_PC
;
4487 cpu_src1
= get_src1(dc
, insn
);
4488 if (IS_IMM
) { /* immediate */
4489 simm
= GET_FIELDs(insn
, 19, 31);
4490 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4491 } else { /* register */
4492 rs2
= GET_FIELD(insn
, 27, 31);
4494 cpu_src2
= gen_load_gpr(dc
, rs2
);
4495 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4497 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4501 case 0x38: /* jmpl */
4506 t
= gen_dest_gpr(dc
, rd
);
4507 tcg_gen_movi_tl(t
, dc
->pc
);
4508 gen_store_gpr(dc
, rd
, t
);
4510 r_const
= tcg_const_i32(3);
4511 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4512 tcg_temp_free_i32(r_const
);
4513 gen_address_mask(dc
, cpu_dst
);
4514 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4515 dc
->npc
= DYNAMIC_PC
;
4518 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4519 case 0x39: /* rett, V9 return */
4523 if (!supervisor(dc
))
4526 r_const
= tcg_const_i32(3);
4527 gen_helper_check_align(cpu_env
, cpu_dst
, r_const
);
4528 tcg_temp_free_i32(r_const
);
4529 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4530 dc
->npc
= DYNAMIC_PC
;
4531 gen_helper_rett(cpu_env
);
4535 case 0x3b: /* flush */
4536 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4540 case 0x3c: /* save */
4542 gen_helper_save(cpu_env
);
4543 gen_store_gpr(dc
, rd
, cpu_dst
);
4545 case 0x3d: /* restore */
4547 gen_helper_restore(cpu_env
);
4548 gen_store_gpr(dc
, rd
, cpu_dst
);
4550 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4551 case 0x3e: /* V9 done/retry */
4555 if (!supervisor(dc
))
4557 dc
->npc
= DYNAMIC_PC
;
4558 dc
->pc
= DYNAMIC_PC
;
4559 gen_helper_done(cpu_env
);
4562 if (!supervisor(dc
))
4564 dc
->npc
= DYNAMIC_PC
;
4565 dc
->pc
= DYNAMIC_PC
;
4566 gen_helper_retry(cpu_env
);
4581 case 3: /* load/store instructions */
4583 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4584 /* ??? gen_address_mask prevents us from using a source
4585 register directly. Always generate a temporary. */
4586 TCGv cpu_addr
= get_temp_tl(dc
);
4588 tcg_gen_mov_tl(cpu_addr
, get_src1(dc
, insn
));
4589 if (xop
== 0x3c || xop
== 0x3e) {
4590 /* V9 casa/casxa : no offset */
4591 } else if (IS_IMM
) { /* immediate */
4592 simm
= GET_FIELDs(insn
, 19, 31);
4594 tcg_gen_addi_tl(cpu_addr
, cpu_addr
, simm
);
4596 } else { /* register */
4597 rs2
= GET_FIELD(insn
, 27, 31);
4599 tcg_gen_add_tl(cpu_addr
, cpu_addr
, gen_load_gpr(dc
, rs2
));
4602 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4603 (xop
> 0x17 && xop
<= 0x1d ) ||
4604 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4605 TCGv cpu_val
= gen_dest_gpr(dc
, rd
);
4608 case 0x0: /* ld, V9 lduw, load unsigned word */
4609 gen_address_mask(dc
, cpu_addr
);
4610 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4612 case 0x1: /* ldub, load unsigned byte */
4613 gen_address_mask(dc
, cpu_addr
);
4614 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4616 case 0x2: /* lduh, load unsigned halfword */
4617 gen_address_mask(dc
, cpu_addr
);
4618 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4620 case 0x3: /* ldd, load double word */
4627 r_const
= tcg_const_i32(7);
4628 /* XXX remove alignment check */
4629 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
4630 tcg_temp_free_i32(r_const
);
4631 gen_address_mask(dc
, cpu_addr
);
4632 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4633 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4634 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4635 gen_store_gpr(dc
, rd
+ 1, cpu_tmp0
);
4636 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4637 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4638 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4641 case 0x9: /* ldsb, load signed byte */
4642 gen_address_mask(dc
, cpu_addr
);
4643 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4645 case 0xa: /* ldsh, load signed halfword */
4646 gen_address_mask(dc
, cpu_addr
);
4647 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4649 case 0xd: /* ldstub -- XXX: should be atomically */
4653 gen_address_mask(dc
, cpu_addr
);
4654 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4655 r_const
= tcg_const_tl(0xff);
4656 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4657 tcg_temp_free(r_const
);
4660 case 0x0f: /* swap, swap register with memory. Also
4662 CHECK_IU_FEATURE(dc
, SWAP
);
4663 cpu_src1
= gen_load_gpr(dc
, rd
);
4664 gen_address_mask(dc
, cpu_addr
);
4665 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4666 tcg_gen_qemu_st32(cpu_src1
, cpu_addr
, dc
->mem_idx
);
4667 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4669 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4670 case 0x10: /* lda, V9 lduwa, load word alternate */
4671 #ifndef TARGET_SPARC64
4674 if (!supervisor(dc
))
4678 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4680 case 0x11: /* lduba, load unsigned byte alternate */
4681 #ifndef TARGET_SPARC64
4684 if (!supervisor(dc
))
4688 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4690 case 0x12: /* lduha, load unsigned halfword alternate */
4691 #ifndef TARGET_SPARC64
4694 if (!supervisor(dc
))
4698 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4700 case 0x13: /* ldda, load double word alternate */
4701 #ifndef TARGET_SPARC64
4704 if (!supervisor(dc
))
4710 gen_ldda_asi(dc
, cpu_val
, cpu_addr
, insn
, rd
);
4712 case 0x19: /* ldsba, load signed byte alternate */
4713 #ifndef TARGET_SPARC64
4716 if (!supervisor(dc
))
4720 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4722 case 0x1a: /* ldsha, load signed halfword alternate */
4723 #ifndef TARGET_SPARC64
4726 if (!supervisor(dc
))
4730 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4732 case 0x1d: /* ldstuba -- XXX: should be atomically */
4733 #ifndef TARGET_SPARC64
4736 if (!supervisor(dc
))
4740 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4742 case 0x1f: /* swapa, swap reg with alt. memory. Also
4744 CHECK_IU_FEATURE(dc
, SWAP
);
4745 #ifndef TARGET_SPARC64
4748 if (!supervisor(dc
))
4752 cpu_src1
= gen_load_gpr(dc
, rd
);
4753 gen_swap_asi(cpu_val
, cpu_src1
, cpu_addr
, insn
);
4756 #ifndef TARGET_SPARC64
4757 case 0x30: /* ldc */
4758 case 0x31: /* ldcsr */
4759 case 0x33: /* lddc */
4763 #ifdef TARGET_SPARC64
4764 case 0x08: /* V9 ldsw */
4765 gen_address_mask(dc
, cpu_addr
);
4766 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4768 case 0x0b: /* V9 ldx */
4769 gen_address_mask(dc
, cpu_addr
);
4770 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4772 case 0x18: /* V9 ldswa */
4774 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4776 case 0x1b: /* V9 ldxa */
4778 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4780 case 0x2d: /* V9 prefetch, no effect */
4782 case 0x30: /* V9 ldfa */
4783 if (gen_trap_ifnofpu(dc
)) {
4787 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4788 gen_update_fprs_dirty(rd
);
4790 case 0x33: /* V9 lddfa */
4791 if (gen_trap_ifnofpu(dc
)) {
4795 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4796 gen_update_fprs_dirty(DFPREG(rd
));
4798 case 0x3d: /* V9 prefetcha, no effect */
4800 case 0x32: /* V9 ldqfa */
4801 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4802 if (gen_trap_ifnofpu(dc
)) {
4806 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4807 gen_update_fprs_dirty(QFPREG(rd
));
4813 gen_store_gpr(dc
, rd
, cpu_val
);
4814 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4817 } else if (xop
>= 0x20 && xop
< 0x24) {
4818 if (gen_trap_ifnofpu(dc
)) {
4823 case 0x20: /* ldf, load fpreg */
4824 gen_address_mask(dc
, cpu_addr
);
4825 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4826 cpu_dst_32
= gen_dest_fpr_F(dc
);
4827 tcg_gen_trunc_tl_i32(cpu_dst_32
, cpu_tmp0
);
4828 gen_store_fpr_F(dc
, rd
, cpu_dst_32
);
4830 case 0x21: /* ldfsr, V9 ldxfsr */
4831 #ifdef TARGET_SPARC64
4832 gen_address_mask(dc
, cpu_addr
);
4834 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4835 gen_helper_ldxfsr(cpu_env
, cpu_tmp64
);
4840 TCGv_i32 t32
= get_temp_i32(dc
);
4841 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4842 tcg_gen_trunc_tl_i32(t32
, cpu_tmp0
);
4843 gen_helper_ldfsr(cpu_env
, t32
);
4846 case 0x22: /* ldqf, load quad fpreg */
4850 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4851 r_const
= tcg_const_i32(dc
->mem_idx
);
4852 gen_address_mask(dc
, cpu_addr
);
4853 gen_helper_ldqf(cpu_env
, cpu_addr
, r_const
);
4854 tcg_temp_free_i32(r_const
);
4855 gen_op_store_QT0_fpr(QFPREG(rd
));
4856 gen_update_fprs_dirty(QFPREG(rd
));
4859 case 0x23: /* lddf, load double fpreg */
4860 gen_address_mask(dc
, cpu_addr
);
4861 cpu_dst_64
= gen_dest_fpr_D();
4862 tcg_gen_qemu_ld64(cpu_dst_64
, cpu_addr
, dc
->mem_idx
);
4863 gen_store_fpr_D(dc
, rd
, cpu_dst_64
);
4868 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4869 xop
== 0xe || xop
== 0x1e) {
4870 TCGv cpu_val
= gen_load_gpr(dc
, rd
);
4873 case 0x4: /* st, store word */
4874 gen_address_mask(dc
, cpu_addr
);
4875 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4877 case 0x5: /* stb, store byte */
4878 gen_address_mask(dc
, cpu_addr
);
4879 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4881 case 0x6: /* sth, store halfword */
4882 gen_address_mask(dc
, cpu_addr
);
4883 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4885 case 0x7: /* std, store double word */
4893 gen_address_mask(dc
, cpu_addr
);
4894 r_const
= tcg_const_i32(7);
4895 /* XXX remove alignment check */
4896 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
4897 tcg_temp_free_i32(r_const
);
4898 lo
= gen_load_gpr(dc
, rd
+ 1);
4899 tcg_gen_concat_tl_i64(cpu_tmp64
, lo
, cpu_val
);
4900 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4903 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4904 case 0x14: /* sta, V9 stwa, store word alternate */
4905 #ifndef TARGET_SPARC64
4908 if (!supervisor(dc
))
4912 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4913 dc
->npc
= DYNAMIC_PC
;
4915 case 0x15: /* stba, store byte alternate */
4916 #ifndef TARGET_SPARC64
4919 if (!supervisor(dc
))
4923 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4924 dc
->npc
= DYNAMIC_PC
;
4926 case 0x16: /* stha, store halfword alternate */
4927 #ifndef TARGET_SPARC64
4930 if (!supervisor(dc
))
4934 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4935 dc
->npc
= DYNAMIC_PC
;
4937 case 0x17: /* stda, store double word alternate */
4938 #ifndef TARGET_SPARC64
4941 if (!supervisor(dc
))
4948 gen_stda_asi(dc
, cpu_val
, cpu_addr
, insn
, rd
);
4952 #ifdef TARGET_SPARC64
4953 case 0x0e: /* V9 stx */
4954 gen_address_mask(dc
, cpu_addr
);
4955 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4957 case 0x1e: /* V9 stxa */
4959 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4960 dc
->npc
= DYNAMIC_PC
;
4966 } else if (xop
> 0x23 && xop
< 0x28) {
4967 if (gen_trap_ifnofpu(dc
)) {
4972 case 0x24: /* stf, store fpreg */
4973 gen_address_mask(dc
, cpu_addr
);
4974 cpu_src1_32
= gen_load_fpr_F(dc
, rd
);
4975 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_src1_32
);
4976 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4978 case 0x25: /* stfsr, V9 stxfsr */
4980 TCGv t
= get_temp_tl(dc
);
4982 tcg_gen_ld_tl(t
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
4983 #ifdef TARGET_SPARC64
4984 gen_address_mask(dc
, cpu_addr
);
4986 tcg_gen_qemu_st64(t
, cpu_addr
, dc
->mem_idx
);
4990 tcg_gen_qemu_st32(t
, cpu_addr
, dc
->mem_idx
);
4994 #ifdef TARGET_SPARC64
4995 /* V9 stqf, store quad fpreg */
4999 CHECK_FPU_FEATURE(dc
, FLOAT128
);
5000 gen_op_load_fpr_QT0(QFPREG(rd
));
5001 r_const
= tcg_const_i32(dc
->mem_idx
);
5002 gen_address_mask(dc
, cpu_addr
);
5003 gen_helper_stqf(cpu_env
, cpu_addr
, r_const
);
5004 tcg_temp_free_i32(r_const
);
5007 #else /* !TARGET_SPARC64 */
5008 /* stdfq, store floating point queue */
5009 #if defined(CONFIG_USER_ONLY)
5012 if (!supervisor(dc
))
5014 if (gen_trap_ifnofpu(dc
)) {
5020 case 0x27: /* stdf, store double fpreg */
5021 gen_address_mask(dc
, cpu_addr
);
5022 cpu_src1_64
= gen_load_fpr_D(dc
, rd
);
5023 tcg_gen_qemu_st64(cpu_src1_64
, cpu_addr
, dc
->mem_idx
);
5028 } else if (xop
> 0x33 && xop
< 0x3f) {
5031 #ifdef TARGET_SPARC64
5032 case 0x34: /* V9 stfa */
5033 if (gen_trap_ifnofpu(dc
)) {
5036 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
5038 case 0x36: /* V9 stqfa */
5042 CHECK_FPU_FEATURE(dc
, FLOAT128
);
5043 if (gen_trap_ifnofpu(dc
)) {
5046 r_const
= tcg_const_i32(7);
5047 gen_helper_check_align(cpu_env
, cpu_addr
, r_const
);
5048 tcg_temp_free_i32(r_const
);
5049 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
5052 case 0x37: /* V9 stdfa */
5053 if (gen_trap_ifnofpu(dc
)) {
5056 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
5058 case 0x3c: /* V9 casa */
5059 rs2
= GET_FIELD(insn
, 27, 31);
5060 cpu_src2
= gen_load_gpr(dc
, rs2
);
5061 gen_cas_asi(dc
, cpu_addr
, cpu_src2
, insn
, rd
);
5063 case 0x3e: /* V9 casxa */
5064 rs2
= GET_FIELD(insn
, 27, 31);
5065 cpu_src2
= gen_load_gpr(dc
, rs2
);
5066 gen_casx_asi(dc
, cpu_addr
, cpu_src2
, insn
, rd
);
5069 case 0x34: /* stc */
5070 case 0x35: /* stcsr */
5071 case 0x36: /* stdcq */
5072 case 0x37: /* stdc */
5084 /* default case for non jump instructions */
5085 if (dc
->npc
== DYNAMIC_PC
) {
5086 dc
->pc
= DYNAMIC_PC
;
5088 } else if (dc
->npc
== JUMP_PC
) {
5089 /* we can do a static jump */
5090 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
5094 dc
->npc
= dc
->npc
+ 4;
5103 r_const
= tcg_const_i32(TT_ILL_INSN
);
5104 gen_helper_raise_exception(cpu_env
, r_const
);
5105 tcg_temp_free_i32(r_const
);
5114 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
5115 gen_helper_raise_exception(cpu_env
, r_const
);
5116 tcg_temp_free_i32(r_const
);
5120 #if !defined(CONFIG_USER_ONLY)
5126 r_const
= tcg_const_i32(TT_PRIV_INSN
);
5127 gen_helper_raise_exception(cpu_env
, r_const
);
5128 tcg_temp_free_i32(r_const
);
5135 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
5138 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5141 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
5145 #ifndef TARGET_SPARC64
5151 r_const
= tcg_const_i32(TT_NCP_INSN
);
5152 gen_helper_raise_exception(cpu_env
, r_const
);
5153 tcg_temp_free(r_const
);
5159 if (dc
->n_t32
!= 0) {
5161 for (i
= dc
->n_t32
- 1; i
>= 0; --i
) {
5162 tcg_temp_free_i32(dc
->t32
[i
]);
5166 if (dc
->n_ttl
!= 0) {
5168 for (i
= dc
->n_ttl
- 1; i
>= 0; --i
) {
5169 tcg_temp_free(dc
->ttl
[i
]);
5175 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
5176 int spc
, CPUSPARCState
*env
)
5178 target_ulong pc_start
, last_pc
;
5179 uint16_t *gen_opc_end
;
5180 DisasContext dc1
, *dc
= &dc1
;
5187 memset(dc
, 0, sizeof(DisasContext
));
5192 dc
->npc
= (target_ulong
) tb
->cs_base
;
5193 dc
->cc_op
= CC_OP_DYNAMIC
;
5194 dc
->mem_idx
= cpu_mmu_index(env
);
5196 dc
->fpu_enabled
= tb_fpu_enabled(tb
->flags
);
5197 dc
->address_mask_32bit
= tb_am_enabled(tb
->flags
);
5198 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
5199 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
5202 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5204 max_insns
= CF_COUNT_MASK
;
5207 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5208 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5209 if (bp
->pc
== dc
->pc
) {
5210 if (dc
->pc
!= pc_start
)
5212 gen_helper_debug(cpu_env
);
5220 qemu_log("Search PC...\n");
5221 j
= gen_opc_ptr
- gen_opc_buf
;
5225 gen_opc_instr_start
[lj
++] = 0;
5226 gen_opc_pc
[lj
] = dc
->pc
;
5227 gen_opc_npc
[lj
] = dc
->npc
;
5228 gen_opc_instr_start
[lj
] = 1;
5229 gen_opc_icount
[lj
] = num_insns
;
5232 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
5235 insn
= cpu_ldl_code(env
, dc
->pc
);
5237 cpu_tmp0
= tcg_temp_new();
5238 cpu_tmp64
= tcg_temp_new_i64();
5239 cpu_dst
= tcg_temp_new();
5241 disas_sparc_insn(dc
, insn
);
5244 tcg_temp_free(cpu_dst
);
5245 tcg_temp_free_i64(cpu_tmp64
);
5246 tcg_temp_free(cpu_tmp0
);
5250 /* if the next PC is different, we abort now */
5251 if (dc
->pc
!= (last_pc
+ 4))
5253 /* if we reach a page boundary, we stop generation so that the
5254 PC of a TT_TFAULT exception is always in the right page */
5255 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
5257 /* if single step mode, we generate only one instruction and
5258 generate an exception */
5259 if (dc
->singlestep
) {
5262 } while ((gen_opc_ptr
< gen_opc_end
) &&
5263 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
5264 num_insns
< max_insns
);
5267 if (tb
->cflags
& CF_LAST_IO
) {
5271 if (dc
->pc
!= DYNAMIC_PC
&&
5272 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
5273 /* static PC and NPC: we can use direct chaining */
5274 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
5276 if (dc
->pc
!= DYNAMIC_PC
) {
5277 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
5283 gen_icount_end(tb
, num_insns
);
5284 *gen_opc_ptr
= INDEX_op_end
;
5286 j
= gen_opc_ptr
- gen_opc_buf
;
5289 gen_opc_instr_start
[lj
++] = 0;
5293 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
5294 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
5296 tb
->size
= last_pc
+ 4 - pc_start
;
5297 tb
->icount
= num_insns
;
5300 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5301 qemu_log("--------------\n");
5302 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5303 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
5309 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5311 gen_intermediate_code_internal(tb
, 0, env
);
5314 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5316 gen_intermediate_code_internal(tb
, 1, env
);
5319 void gen_intermediate_code_init(CPUSPARCState
*env
)
5323 static const char * const gregnames
[8] = {
5324 NULL
, // g0 not used
5333 static const char * const fregnames
[32] = {
5334 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5335 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5336 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5337 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5340 /* init various static tables */
5344 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5345 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5346 offsetof(CPUSPARCState
, regwptr
),
5348 #ifdef TARGET_SPARC64
5349 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, xcc
),
5351 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, asi
),
5353 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, fprs
),
5355 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, gsr
),
5357 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5358 offsetof(CPUSPARCState
, tick_cmpr
),
5360 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5361 offsetof(CPUSPARCState
, stick_cmpr
),
5363 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5364 offsetof(CPUSPARCState
, hstick_cmpr
),
5366 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, hintp
),
5368 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, htba
),
5370 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, hver
),
5372 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5373 offsetof(CPUSPARCState
, ssr
), "ssr");
5374 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5375 offsetof(CPUSPARCState
, version
), "ver");
5376 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5377 offsetof(CPUSPARCState
, softint
),
5380 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, wim
),
5383 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cond
),
5385 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cc_src
),
5387 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5388 offsetof(CPUSPARCState
, cc_src2
),
5390 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, cc_dst
),
5392 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, cc_op
),
5394 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUSPARCState
, psr
),
5396 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, fsr
),
5398 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, pc
),
5400 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, npc
),
5402 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, y
), "y");
5403 #ifndef CONFIG_USER_ONLY
5404 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUSPARCState
, tbr
),
5407 for (i
= 1; i
< 8; i
++) {
5408 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5409 offsetof(CPUSPARCState
, gregs
[i
]),
5412 for (i
= 0; i
< TARGET_DPREGS
; i
++) {
5413 cpu_fpr
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
5414 offsetof(CPUSPARCState
, fpr
[i
]),
5418 /* register helpers */
5420 #define GEN_HELPER 2
5425 void restore_state_to_opc(CPUSPARCState
*env
, TranslationBlock
*tb
, int pc_pos
)
5428 env
->pc
= gen_opc_pc
[pc_pos
];
5429 npc
= gen_opc_npc
[pc_pos
];
5431 /* dynamic NPC: already stored */
5432 } else if (npc
== 2) {
5433 /* jump PC: use 'cond' and the jump targets of the translation */
5435 env
->npc
= gen_opc_jump_pc
[0];
5437 env
->npc
= gen_opc_jump_pc
[1];