4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 /* global register indexes */
49 static TCGv cpu_env
, cpu_T
[3], cpu_regwptr
, cpu_cc_src
, cpu_cc_dst
, cpu_psr
;
50 static TCGv cpu_gregs
[8];
54 /* local register indexes (only used inside old micro ops) */
57 typedef struct DisasContext
{
58 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
59 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
60 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
64 struct TranslationBlock
*tb
;
67 typedef struct sparc_def_t sparc_def_t
;
70 const unsigned char *name
;
71 target_ulong iu_version
;
75 uint32_t mmu_ctpr_mask
;
76 uint32_t mmu_cxr_mask
;
77 uint32_t mmu_sfsr_mask
;
78 uint32_t mmu_trcr_mask
;
81 static const sparc_def_t
*cpu_sparc_find_by_name(const unsigned char *name
);
86 // This function uses non-native bit order
87 #define GET_FIELD(X, FROM, TO) \
88 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
90 // This function uses the order in the manuals, i.e. bit 0 is 2^0
91 #define GET_FIELD_SP(X, FROM, TO) \
92 GET_FIELD(X, 31 - (TO), 31 - (FROM))
94 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
95 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #define FFPREG(r) (r)
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
107 static int sign_extend(int x
, int len
)
110 return (x
<< len
) >> len
;
113 #define IS_IMM (insn & (1<<13))
115 static void disas_sparc_insn(DisasContext
* dc
);
117 #ifdef TARGET_SPARC64
118 #define GEN32(func, NAME) \
119 static GenOpFunc * const NAME ## _table [64] = { \
120 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
121 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
122 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
123 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
124 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
125 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
126 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
127 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
128 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
129 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
130 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
131 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
133 static inline void func(int n) \
135 NAME ## _table[n](); \
138 #define GEN32(func, NAME) \
139 static GenOpFunc *const NAME ## _table [32] = { \
140 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
141 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
142 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
143 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
144 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
145 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
146 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
147 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
149 static inline void func(int n) \
151 NAME ## _table[n](); \
155 /* floating point registers moves */
156 GEN32(gen_op_load_fpr_FT0
, gen_op_load_fpr_FT0_fprf
);
157 GEN32(gen_op_load_fpr_FT1
, gen_op_load_fpr_FT1_fprf
);
158 GEN32(gen_op_store_FT0_fpr
, gen_op_store_FT0_fpr_fprf
);
159 GEN32(gen_op_store_FT1_fpr
, gen_op_store_FT1_fpr_fprf
);
161 GEN32(gen_op_load_fpr_DT0
, gen_op_load_fpr_DT0_fprf
);
162 GEN32(gen_op_load_fpr_DT1
, gen_op_load_fpr_DT1_fprf
);
163 GEN32(gen_op_store_DT0_fpr
, gen_op_store_DT0_fpr_fprf
);
164 GEN32(gen_op_store_DT1_fpr
, gen_op_store_DT1_fpr_fprf
);
166 #if defined(CONFIG_USER_ONLY)
167 GEN32(gen_op_load_fpr_QT0
, gen_op_load_fpr_QT0_fprf
);
168 GEN32(gen_op_load_fpr_QT1
, gen_op_load_fpr_QT1_fprf
);
169 GEN32(gen_op_store_QT0_fpr
, gen_op_store_QT0_fpr_fprf
);
170 GEN32(gen_op_store_QT1_fpr
, gen_op_store_QT1_fpr_fprf
);
174 #ifdef CONFIG_USER_ONLY
175 #define supervisor(dc) 0
176 #ifdef TARGET_SPARC64
177 #define hypervisor(dc) 0
179 #define gen_op_ldst(name) gen_op_##name##_raw()
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
184 #define OP_LD_TABLE(width) \
185 static GenOpFunc * const gen_op_##width[] = { \
186 &gen_op_##width##_user, \
187 &gen_op_##width##_kernel, \
188 &gen_op_##width##_hypv, \
191 #define OP_LD_TABLE(width) \
192 static GenOpFunc * const gen_op_##width[] = { \
193 &gen_op_##width##_user, \
194 &gen_op_##width##_kernel, \
197 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
200 #ifndef CONFIG_USER_ONLY
203 #endif /* __i386__ */
211 #define ABI32_MASK(addr) tcg_gen_andi_i64(addr, addr, 0xffffffffULL);
213 #define ABI32_MASK(addr)
216 static inline void gen_movl_simm_T1(int32_t val
)
218 tcg_gen_movi_tl(cpu_T
[1], val
);
221 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
224 tcg_gen_movi_tl(tn
, 0);
226 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
228 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
232 static inline void gen_movl_reg_T0(int reg
)
234 gen_movl_reg_TN(reg
, cpu_T
[0]);
237 static inline void gen_movl_reg_T1(int reg
)
239 gen_movl_reg_TN(reg
, cpu_T
[1]);
243 static inline void gen_movl_reg_T2(int reg
)
245 gen_movl_reg_TN(reg
, cpu_T
[2]);
248 #endif /* __i386__ */
249 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
254 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
256 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
260 static inline void gen_movl_T0_reg(int reg
)
262 gen_movl_TN_reg(reg
, cpu_T
[0]);
265 static inline void gen_movl_T1_reg(int reg
)
267 gen_movl_TN_reg(reg
, cpu_T
[1]);
270 static inline void gen_op_movl_T0_env(size_t offset
)
272 tcg_gen_ld_i32(cpu_T
[0], cpu_env
, offset
);
275 static inline void gen_op_movl_env_T0(size_t offset
)
277 tcg_gen_st_i32(cpu_T
[0], cpu_env
, offset
);
280 static inline void gen_op_movtl_T0_env(size_t offset
)
282 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offset
);
285 static inline void gen_op_movtl_env_T0(size_t offset
)
287 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offset
);
290 static inline void gen_op_add_T1_T0(void)
292 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
295 static inline void gen_op_or_T1_T0(void)
297 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
300 static inline void gen_op_xor_T1_T0(void)
302 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
305 static inline void gen_jmp_im(target_ulong pc
)
307 tcg_gen_movi_tl(cpu_tmp0
, pc
);
308 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, pc
));
311 static inline void gen_movl_npc_im(target_ulong npc
)
313 tcg_gen_movi_tl(cpu_tmp0
, npc
);
314 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, npc
));
317 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
318 target_ulong pc
, target_ulong npc
)
320 TranslationBlock
*tb
;
323 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
324 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
325 /* jump to same page: we can use a direct jump */
326 tcg_gen_goto_tb(tb_num
);
328 gen_movl_npc_im(npc
);
329 tcg_gen_exit_tb((long)tb
+ tb_num
);
331 /* jump to another page: currently not optimized */
333 gen_movl_npc_im(npc
);
339 static inline void gen_mov_reg_N(TCGv reg
, TCGv src
)
341 tcg_gen_shri_i32(reg
, src
, 23);
342 tcg_gen_andi_tl(reg
, reg
, 0x1);
345 static inline void gen_mov_reg_Z(TCGv reg
, TCGv src
)
347 tcg_gen_shri_i32(reg
, src
, 22);
348 tcg_gen_andi_tl(reg
, reg
, 0x1);
351 static inline void gen_mov_reg_V(TCGv reg
, TCGv src
)
353 tcg_gen_shri_i32(reg
, src
, 21);
354 tcg_gen_andi_tl(reg
, reg
, 0x1);
357 static inline void gen_mov_reg_C(TCGv reg
, TCGv src
)
359 tcg_gen_shri_i32(reg
, src
, 20);
360 tcg_gen_andi_tl(reg
, reg
, 0x1);
363 static inline void gen_op_exception(int exception
)
367 r_except
= tcg_temp_new(TCG_TYPE_I32
);
368 tcg_gen_movi_i32(r_except
, exception
);
369 tcg_gen_helper_0_1(raise_exception
, r_except
);
372 static inline void gen_cc_clear(void)
374 tcg_gen_movi_i32(cpu_psr
, 0);
375 #ifdef TARGET_SPARC64
376 tcg_gen_movi_i32(cpu_xcc
, 0);
382 env->psr |= PSR_ZERO;
383 if ((int32_t) T0 < 0)
386 static inline void gen_cc_NZ(TCGv dst
)
391 l1
= gen_new_label();
392 l2
= gen_new_label();
393 r_zero
= tcg_const_tl(0);
394 tcg_gen_brcond_i32(TCG_COND_NE
, dst
, r_zero
, l1
);
395 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
397 tcg_gen_brcond_i32(TCG_COND_GE
, dst
, r_zero
, l2
);
398 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
400 #ifdef TARGET_SPARC64
404 l3
= gen_new_label();
405 l4
= gen_new_label();
406 tcg_gen_brcond_tl(TCG_COND_NE
, dst
, r_zero
, l3
);
407 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
409 tcg_gen_brcond_tl(TCG_COND_GE
, dst
, r_zero
, l4
);
410 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
418 env->psr |= PSR_CARRY;
420 static inline void gen_cc_C_add(TCGv dst
, TCGv src1
)
424 l1
= gen_new_label();
425 tcg_gen_brcond_i32(TCG_COND_GEU
, dst
, src1
, l1
);
426 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
428 #ifdef TARGET_SPARC64
432 l2
= gen_new_label();
433 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l2
);
434 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
441 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
444 static inline void gen_cc_V_add(TCGv dst
, TCGv src1
, TCGv src2
)
446 TCGv r_temp
, r_temp2
, r_temp3
, r_zero
;
449 l1
= gen_new_label();
451 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
452 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
453 r_temp3
= tcg_temp_new(TCG_TYPE_TL
);
454 r_zero
= tcg_const_tl(0);
455 tcg_gen_xor_tl(r_temp
, src1
, src2
);
456 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
457 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
458 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
459 tcg_gen_andi_tl(r_temp3
, r_temp
, (1 << 31));
460 tcg_gen_brcond_i32(TCG_COND_EQ
, r_temp3
, r_zero
, l1
);
461 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
463 #ifdef TARGET_SPARC64
467 l2
= gen_new_label();
468 tcg_gen_xor_tl(r_temp
, src1
, src2
);
469 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
470 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
471 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
472 tcg_gen_andi_tl(r_temp3
, r_temp
, (1ULL << 63));
473 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp3
, r_zero
, l2
);
474 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_OVF
);
480 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
482 TCGv r_temp
, r_temp2
, r_temp3
, r_zero
;
485 l1
= gen_new_label();
487 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
488 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
489 r_temp3
= tcg_temp_new(TCG_TYPE_TL
);
490 r_zero
= tcg_const_tl(0);
491 tcg_gen_xor_tl(r_temp
, src1
, src2
);
492 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
493 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
494 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
495 tcg_gen_andi_tl(r_temp3
, r_temp
, (1 << 31));
496 tcg_gen_brcond_i32(TCG_COND_EQ
, r_temp3
, r_zero
, l1
);
497 gen_op_exception(TT_TOVF
);
499 #ifdef TARGET_SPARC64
503 l2
= gen_new_label();
504 tcg_gen_xor_tl(r_temp
, src1
, src2
);
505 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
506 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
507 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
508 tcg_gen_andi_tl(r_temp3
, r_temp
, (1ULL << 63));
509 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp3
, r_zero
, l2
);
510 gen_op_exception(TT_TOVF
);
516 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
521 l1
= gen_new_label();
522 r_zero
= tcg_const_tl(0);
523 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
524 tcg_gen_or_tl(r_temp
, src1
, src2
);
525 tcg_gen_andi_tl(r_temp
, r_temp
, 0x3);
526 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp
, r_zero
, l1
);
527 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
531 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
536 l1
= gen_new_label();
537 r_zero
= tcg_const_tl(0);
538 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
539 tcg_gen_or_tl(r_temp
, src1
, src2
);
540 tcg_gen_andi_tl(r_temp
, r_temp
, 0x3);
541 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp
, r_zero
, l1
);
542 gen_op_exception(TT_TOVF
);
546 static inline void gen_op_add_T1_T0_cc(void)
548 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
549 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
552 gen_cc_C_add(cpu_T
[0], cpu_cc_src
);
553 gen_cc_V_add(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
556 static inline void gen_op_addx_T1_T0_cc(void)
558 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
559 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
560 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
562 gen_cc_C_add(cpu_T
[0], cpu_cc_src
);
563 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
564 gen_cc_C_add(cpu_T
[0], cpu_cc_src
);
566 gen_cc_V_add(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
569 static inline void gen_op_tadd_T1_T0_cc(void)
571 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
572 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
575 gen_cc_C_add(cpu_T
[0], cpu_cc_src
);
576 gen_cc_V_add(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
577 gen_cc_V_tag(cpu_cc_src
, cpu_T
[1]);
580 static inline void gen_op_tadd_T1_T0_ccTV(void)
582 gen_tag_tv(cpu_T
[0], cpu_T
[1]);
583 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
584 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
585 gen_add_tv(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
588 gen_cc_C_add(cpu_T
[0], cpu_cc_src
);
593 env->psr |= PSR_CARRY;
595 static inline void gen_cc_C_sub(TCGv src1
, TCGv src2
)
599 l1
= gen_new_label();
600 tcg_gen_brcond_i32(TCG_COND_GEU
, src1
, src2
, l1
);
601 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
603 #ifdef TARGET_SPARC64
607 l2
= gen_new_label();
608 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l2
);
609 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
616 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
619 static inline void gen_cc_V_sub(TCGv dst
, TCGv src1
, TCGv src2
)
621 TCGv r_temp
, r_temp2
, r_temp3
, r_zero
;
624 l1
= gen_new_label();
626 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
627 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
628 r_temp3
= tcg_temp_new(TCG_TYPE_TL
);
629 r_zero
= tcg_const_tl(0);
630 tcg_gen_xor_tl(r_temp
, src1
, src2
);
631 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
632 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
633 tcg_gen_andi_tl(r_temp3
, r_temp
, (1 << 31));
634 tcg_gen_brcond_i32(TCG_COND_EQ
, r_temp3
, r_zero
, l1
);
635 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
637 #ifdef TARGET_SPARC64
641 l2
= gen_new_label();
642 tcg_gen_xor_tl(r_temp
, src1
, src2
);
643 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
644 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
645 tcg_gen_andi_tl(r_temp3
, r_temp
, (1ULL << 63));
646 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp3
, r_zero
, l2
);
647 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_OVF
);
653 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
655 TCGv r_temp
, r_temp2
, r_temp3
, r_zero
;
658 l1
= gen_new_label();
660 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
661 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
662 r_temp3
= tcg_temp_new(TCG_TYPE_TL
);
663 r_zero
= tcg_const_tl(0);
664 tcg_gen_xor_tl(r_temp
, src1
, src2
);
665 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
666 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
667 tcg_gen_andi_tl(r_temp3
, r_temp
, (1 << 31));
668 tcg_gen_brcond_i32(TCG_COND_EQ
, r_temp3
, r_zero
, l1
);
669 gen_op_exception(TT_TOVF
);
671 #ifdef TARGET_SPARC64
675 l2
= gen_new_label();
676 tcg_gen_xor_tl(r_temp
, src1
, src2
);
677 tcg_gen_xor_tl(r_temp2
, src1
, dst
);
678 tcg_gen_and_tl(r_temp
, r_temp
, r_temp2
);
679 tcg_gen_andi_tl(r_temp3
, r_temp
, (1ULL << 63));
680 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp3
, r_zero
, l2
);
681 gen_op_exception(TT_TOVF
);
687 static inline void gen_op_sub_T1_T0_cc(void)
689 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
690 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
693 gen_cc_C_sub(cpu_cc_src
, cpu_T
[1]);
694 gen_cc_V_sub(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
697 static inline void gen_op_subx_T1_T0_cc(void)
699 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
700 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
701 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_tmp0
);
703 gen_cc_C_sub(cpu_T
[0], cpu_cc_src
);
704 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
705 gen_cc_C_sub(cpu_T
[0], cpu_cc_src
);
707 gen_cc_V_sub(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
710 static inline void gen_op_tsub_T1_T0_cc(void)
712 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
713 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
716 gen_cc_C_sub(cpu_cc_src
, cpu_T
[1]);
717 gen_cc_V_sub(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
718 gen_cc_V_tag(cpu_cc_src
, cpu_T
[1]);
721 static inline void gen_op_tsub_T1_T0_ccTV(void)
723 gen_tag_tv(cpu_T
[0], cpu_T
[1]);
724 tcg_gen_mov_tl(cpu_cc_src
, cpu_T
[0]);
725 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
726 gen_sub_tv(cpu_T
[0], cpu_cc_src
, cpu_T
[1]);
729 gen_cc_C_sub(cpu_cc_src
, cpu_T
[1]);
732 #ifdef TARGET_SPARC64
733 static inline void gen_trap_ifdivzero_i64(TCGv divisor
)
737 l1
= gen_new_label();
738 tcg_gen_brcond_i64(TCG_COND_NE
, divisor
, tcg_const_tl(0), l1
);
739 gen_op_exception(TT_DIV_ZERO
);
743 static inline void gen_op_sdivx_T1_T0(void)
747 l1
= gen_new_label();
748 l2
= gen_new_label();
749 gen_trap_ifdivzero_i64(cpu_T
[1]);
750 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_T
[0], tcg_const_i64(INT64_MIN
), l1
);
751 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_T
[1], tcg_const_i64(-1), l1
);
752 tcg_gen_movi_i64(cpu_T
[0], INT64_MIN
);
753 gen_op_jmp_label(l2
);
755 tcg_gen_div_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
760 static inline void gen_op_div_cc(void)
767 l1
= gen_new_label();
768 r_zero
= tcg_const_tl(0);
769 tcg_gen_brcond_i32(TCG_COND_EQ
, cpu_T
[1], r_zero
, l1
);
770 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
774 static inline void gen_op_logic_T0_cc(void)
781 static inline void gen_op_eval_ba(TCGv dst
)
783 tcg_gen_movi_tl(dst
, 1);
787 static inline void gen_op_eval_be(TCGv dst
, TCGv src
)
789 gen_mov_reg_Z(dst
, src
);
793 static inline void gen_op_eval_ble(TCGv dst
, TCGv src
)
797 r_flag
= tcg_temp_new(TCG_TYPE_TL
);
798 gen_mov_reg_N(r_flag
, src
);
799 gen_mov_reg_V(dst
, src
);
800 tcg_gen_xor_tl(dst
, dst
, r_flag
);
801 gen_mov_reg_Z(r_flag
, src
);
802 tcg_gen_or_tl(dst
, dst
, r_flag
);
806 static inline void gen_op_eval_bl(TCGv dst
, TCGv src
)
810 r_V
= tcg_temp_new(TCG_TYPE_TL
);
811 gen_mov_reg_V(r_V
, src
);
812 gen_mov_reg_N(dst
, src
);
813 tcg_gen_xor_tl(dst
, dst
, r_V
);
817 static inline void gen_op_eval_bleu(TCGv dst
, TCGv src
)
821 r_Z
= tcg_temp_new(TCG_TYPE_TL
);
822 gen_mov_reg_Z(r_Z
, src
);
823 gen_mov_reg_C(dst
, src
);
824 tcg_gen_or_tl(dst
, dst
, r_Z
);
828 static inline void gen_op_eval_bcs(TCGv dst
, TCGv src
)
830 gen_mov_reg_C(dst
, src
);
834 static inline void gen_op_eval_bvs(TCGv dst
, TCGv src
)
836 gen_mov_reg_V(dst
, src
);
840 static inline void gen_op_eval_bn(TCGv dst
)
842 tcg_gen_movi_tl(dst
, 0);
846 static inline void gen_op_eval_bneg(TCGv dst
, TCGv src
)
848 gen_mov_reg_N(dst
, src
);
852 static inline void gen_op_eval_bne(TCGv dst
, TCGv src
)
854 gen_mov_reg_Z(dst
, src
);
855 tcg_gen_xori_tl(dst
, dst
, 0x1);
859 static inline void gen_op_eval_bg(TCGv dst
, TCGv src
)
863 r_flag
= tcg_temp_new(TCG_TYPE_TL
);
864 gen_mov_reg_N(r_flag
, src
);
865 gen_mov_reg_V(dst
, src
);
866 tcg_gen_xor_tl(dst
, dst
, r_flag
);
867 gen_mov_reg_Z(r_flag
, src
);
868 tcg_gen_or_tl(dst
, dst
, r_flag
);
869 tcg_gen_xori_tl(dst
, dst
, 0x1);
873 static inline void gen_op_eval_bge(TCGv dst
, TCGv src
)
877 r_V
= tcg_temp_new(TCG_TYPE_TL
);
878 gen_mov_reg_V(r_V
, src
);
879 gen_mov_reg_N(dst
, src
);
880 tcg_gen_xor_tl(dst
, dst
, r_V
);
881 tcg_gen_xori_tl(dst
, dst
, 0x1);
885 static inline void gen_op_eval_bgu(TCGv dst
, TCGv src
)
889 r_Z
= tcg_temp_new(TCG_TYPE_TL
);
890 gen_mov_reg_Z(r_Z
, src
);
891 gen_mov_reg_C(dst
, src
);
892 tcg_gen_or_tl(dst
, dst
, r_Z
);
893 tcg_gen_xori_tl(dst
, dst
, 0x1);
897 static inline void gen_op_eval_bcc(TCGv dst
, TCGv src
)
899 gen_mov_reg_C(dst
, src
);
900 tcg_gen_xori_tl(dst
, dst
, 0x1);
904 static inline void gen_op_eval_bpos(TCGv dst
, TCGv src
)
906 gen_mov_reg_N(dst
, src
);
907 tcg_gen_xori_tl(dst
, dst
, 0x1);
911 static inline void gen_op_eval_bvc(TCGv dst
, TCGv src
)
913 gen_mov_reg_V(dst
, src
);
914 tcg_gen_xori_tl(dst
, dst
, 0x1);
918 FPSR bit field FCC1 | FCC0:
924 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
925 unsigned int fcc_offset
)
927 tcg_gen_shri_i32(reg
, src
, 10 + fcc_offset
);
928 tcg_gen_andi_tl(reg
, reg
, 0x1);
931 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
932 unsigned int fcc_offset
)
934 tcg_gen_shri_i32(reg
, src
, 11 + fcc_offset
);
935 tcg_gen_andi_tl(reg
, reg
, 0x1);
939 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
940 unsigned int fcc_offset
)
944 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
945 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
946 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
947 tcg_gen_or_tl(dst
, dst
, r_fcc1
);
950 // 1 or 2: FCC0 ^ FCC1
951 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
952 unsigned int fcc_offset
)
956 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
957 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
958 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
959 tcg_gen_xor_tl(dst
, dst
, r_fcc1
);
963 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
964 unsigned int fcc_offset
)
966 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
970 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
971 unsigned int fcc_offset
)
975 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
976 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
977 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
978 tcg_gen_xori_tl(r_fcc1
, r_fcc1
, 0x1);
979 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
983 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
984 unsigned int fcc_offset
)
986 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
990 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
991 unsigned int fcc_offset
)
995 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
996 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
997 tcg_gen_xori_tl(dst
, dst
, 0x1);
998 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
999 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
1003 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1004 unsigned int fcc_offset
)
1008 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1009 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1010 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1011 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
1014 // 0: !(FCC0 | FCC1)
1015 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1016 unsigned int fcc_offset
)
1020 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1021 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1022 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1023 tcg_gen_or_tl(dst
, dst
, r_fcc1
);
1024 tcg_gen_xori_tl(dst
, dst
, 0x1);
1027 // 0 or 3: !(FCC0 ^ FCC1)
1028 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1029 unsigned int fcc_offset
)
1033 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1034 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1035 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1036 tcg_gen_xor_tl(dst
, dst
, r_fcc1
);
1037 tcg_gen_xori_tl(dst
, dst
, 0x1);
1041 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1042 unsigned int fcc_offset
)
1044 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1045 tcg_gen_xori_tl(dst
, dst
, 0x1);
1048 // !1: !(FCC0 & !FCC1)
1049 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1050 unsigned int fcc_offset
)
1054 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1055 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1056 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1057 tcg_gen_xori_tl(r_fcc1
, r_fcc1
, 0x1);
1058 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
1059 tcg_gen_xori_tl(dst
, dst
, 0x1);
1063 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1064 unsigned int fcc_offset
)
1066 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1067 tcg_gen_xori_tl(dst
, dst
, 0x1);
1070 // !2: !(!FCC0 & FCC1)
1071 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1072 unsigned int fcc_offset
)
1076 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1077 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1078 tcg_gen_xori_tl(dst
, dst
, 0x1);
1079 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1080 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
1081 tcg_gen_xori_tl(dst
, dst
, 0x1);
1084 // !3: !(FCC0 & FCC1)
1085 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1086 unsigned int fcc_offset
)
1090 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
1091 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1092 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
1093 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
1094 tcg_gen_xori_tl(dst
, dst
, 0x1);
1097 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1098 target_ulong pc2
, TCGv r_cond
)
1103 l1
= gen_new_label();
1104 r_zero
= tcg_const_tl(0);
1106 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, r_zero
, l1
);
1108 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1111 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1114 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1115 target_ulong pc2
, TCGv r_cond
)
1120 l1
= gen_new_label();
1121 r_zero
= tcg_const_tl(0);
1123 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, r_zero
, l1
);
1125 gen_goto_tb(dc
, 0, pc2
, pc1
);
1128 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1131 static inline void gen_branch(DisasContext
*dc
, target_ulong pc
,
1134 gen_goto_tb(dc
, 0, pc
, npc
);
1137 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1143 l1
= gen_new_label();
1144 l2
= gen_new_label();
1145 r_zero
= tcg_const_tl(0);
1147 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, r_zero
, l1
);
1149 gen_movl_npc_im(npc1
);
1150 gen_op_jmp_label(l2
);
1153 gen_movl_npc_im(npc2
);
1157 /* call this function before using T2 as it may have been set for a jump */
1158 static inline void flush_T2(DisasContext
* dc
)
1160 if (dc
->npc
== JUMP_PC
) {
1161 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
1162 dc
->npc
= DYNAMIC_PC
;
1166 static inline void save_npc(DisasContext
* dc
)
1168 if (dc
->npc
== JUMP_PC
) {
1169 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
1170 dc
->npc
= DYNAMIC_PC
;
1171 } else if (dc
->npc
!= DYNAMIC_PC
) {
1172 gen_movl_npc_im(dc
->npc
);
1176 static inline void save_state(DisasContext
* dc
)
1182 static inline void gen_mov_pc_npc(DisasContext
* dc
)
1184 if (dc
->npc
== JUMP_PC
) {
1185 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
1186 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
1187 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, pc
));
1188 dc
->pc
= DYNAMIC_PC
;
1189 } else if (dc
->npc
== DYNAMIC_PC
) {
1190 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
1191 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, pc
));
1192 dc
->pc
= DYNAMIC_PC
;
1198 static inline void gen_op_next_insn(void)
1200 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
1201 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, pc
));
1202 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, 4);
1203 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
1206 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1210 #ifdef TARGET_SPARC64
1220 gen_op_eval_bn(r_dst
);
1223 gen_op_eval_be(r_dst
, r_src
);
1226 gen_op_eval_ble(r_dst
, r_src
);
1229 gen_op_eval_bl(r_dst
, r_src
);
1232 gen_op_eval_bleu(r_dst
, r_src
);
1235 gen_op_eval_bcs(r_dst
, r_src
);
1238 gen_op_eval_bneg(r_dst
, r_src
);
1241 gen_op_eval_bvs(r_dst
, r_src
);
1244 gen_op_eval_ba(r_dst
);
1247 gen_op_eval_bne(r_dst
, r_src
);
1250 gen_op_eval_bg(r_dst
, r_src
);
1253 gen_op_eval_bge(r_dst
, r_src
);
1256 gen_op_eval_bgu(r_dst
, r_src
);
1259 gen_op_eval_bcc(r_dst
, r_src
);
1262 gen_op_eval_bpos(r_dst
, r_src
);
1265 gen_op_eval_bvc(r_dst
, r_src
);
1270 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1273 unsigned int offset
;
1275 r_src
= tcg_temp_new(TCG_TYPE_TL
);
1276 tcg_gen_ld_tl(r_src
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1296 gen_op_eval_bn(r_dst
);
1299 gen_op_eval_fbne(r_dst
, r_src
, offset
);
1302 gen_op_eval_fblg(r_dst
, r_src
, offset
);
1305 gen_op_eval_fbul(r_dst
, r_src
, offset
);
1308 gen_op_eval_fbl(r_dst
, r_src
, offset
);
1311 gen_op_eval_fbug(r_dst
, r_src
, offset
);
1314 gen_op_eval_fbg(r_dst
, r_src
, offset
);
1317 gen_op_eval_fbu(r_dst
, r_src
, offset
);
1320 gen_op_eval_ba(r_dst
);
1323 gen_op_eval_fbe(r_dst
, r_src
, offset
);
1326 gen_op_eval_fbue(r_dst
, r_src
, offset
);
1329 gen_op_eval_fbge(r_dst
, r_src
, offset
);
1332 gen_op_eval_fbuge(r_dst
, r_src
, offset
);
1335 gen_op_eval_fble(r_dst
, r_src
, offset
);
1338 gen_op_eval_fbule(r_dst
, r_src
, offset
);
1341 gen_op_eval_fbo(r_dst
, r_src
, offset
);
1346 #ifdef TARGET_SPARC64
1348 static const int gen_tcg_cond_reg
[8] = {
1359 static inline void gen_cond_reg(TCGv r_dst
, int cond
)
1364 l1
= gen_new_label();
1365 r_zero
= tcg_const_tl(0);
1366 tcg_gen_mov_tl(r_dst
, r_zero
);
1367 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
1368 tcg_gen_movi_tl(r_dst
, 1);
1373 /* XXX: potentially incorrect if dynamic npc */
1374 static void do_branch(DisasContext
* dc
, int32_t offset
, uint32_t insn
, int cc
)
1376 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1377 target_ulong target
= dc
->pc
+ offset
;
1380 /* unconditional not taken */
1382 dc
->pc
= dc
->npc
+ 4;
1383 dc
->npc
= dc
->pc
+ 4;
1386 dc
->npc
= dc
->pc
+ 4;
1388 } else if (cond
== 0x8) {
1389 /* unconditional taken */
1392 dc
->npc
= dc
->pc
+ 4;
1399 gen_cond(cpu_T
[2], cc
, cond
);
1401 gen_branch_a(dc
, target
, dc
->npc
, cpu_T
[2]);
1405 dc
->jump_pc
[0] = target
;
1406 dc
->jump_pc
[1] = dc
->npc
+ 4;
1412 /* XXX: potentially incorrect if dynamic npc */
1413 static void do_fbranch(DisasContext
* dc
, int32_t offset
, uint32_t insn
, int cc
)
1415 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1416 target_ulong target
= dc
->pc
+ offset
;
1419 /* unconditional not taken */
1421 dc
->pc
= dc
->npc
+ 4;
1422 dc
->npc
= dc
->pc
+ 4;
1425 dc
->npc
= dc
->pc
+ 4;
1427 } else if (cond
== 0x8) {
1428 /* unconditional taken */
1431 dc
->npc
= dc
->pc
+ 4;
1438 gen_fcond(cpu_T
[2], cc
, cond
);
1440 gen_branch_a(dc
, target
, dc
->npc
, cpu_T
[2]);
1444 dc
->jump_pc
[0] = target
;
1445 dc
->jump_pc
[1] = dc
->npc
+ 4;
1451 #ifdef TARGET_SPARC64
1452 /* XXX: potentially incorrect if dynamic npc */
1453 static void do_branch_reg(DisasContext
* dc
, int32_t offset
, uint32_t insn
)
1455 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1456 target_ulong target
= dc
->pc
+ offset
;
1459 gen_cond_reg(cpu_T
[2], cond
);
1461 gen_branch_a(dc
, target
, dc
->npc
, cpu_T
[2]);
1465 dc
->jump_pc
[0] = target
;
1466 dc
->jump_pc
[1] = dc
->npc
+ 4;
1471 static GenOpFunc
* const gen_fcmps
[4] = {
1478 static GenOpFunc
* const gen_fcmpd
[4] = {
1485 #if defined(CONFIG_USER_ONLY)
1486 static GenOpFunc
* const gen_fcmpq
[4] = {
1494 static GenOpFunc
* const gen_fcmpes
[4] = {
1501 static GenOpFunc
* const gen_fcmped
[4] = {
1508 #if defined(CONFIG_USER_ONLY)
1509 static GenOpFunc
* const gen_fcmpeq
[4] = {
1517 static inline void gen_op_fcmps(int fccno
)
1519 tcg_gen_helper_0_0(gen_fcmps
[fccno
]);
1522 static inline void gen_op_fcmpd(int fccno
)
1524 tcg_gen_helper_0_0(gen_fcmpd
[fccno
]);
1527 #if defined(CONFIG_USER_ONLY)
1528 static inline void gen_op_fcmpq(int fccno
)
1530 tcg_gen_helper_0_0(gen_fcmpq
[fccno
]);
1534 static inline void gen_op_fcmpes(int fccno
)
1536 tcg_gen_helper_0_0(gen_fcmpes
[fccno
]);
1539 static inline void gen_op_fcmped(int fccno
)
1541 tcg_gen_helper_0_0(gen_fcmped
[fccno
]);
1544 #if defined(CONFIG_USER_ONLY)
1545 static inline void gen_op_fcmpeq(int fccno
)
1547 tcg_gen_helper_0_0(gen_fcmpeq
[fccno
]);
1553 static inline void gen_op_fcmps(int fccno
)
1555 tcg_gen_helper_0_0(helper_fcmps
);
1558 static inline void gen_op_fcmpd(int fccno
)
1560 tcg_gen_helper_0_0(helper_fcmpd
);
1563 #if defined(CONFIG_USER_ONLY)
1564 static inline void gen_op_fcmpq(int fccno
)
1566 tcg_gen_helper_0_0(helper_fcmpq
);
1570 static inline void gen_op_fcmpes(int fccno
)
1572 tcg_gen_helper_0_0(helper_fcmpes
);
1575 static inline void gen_op_fcmped(int fccno
)
1577 tcg_gen_helper_0_0(helper_fcmped
);
1580 #if defined(CONFIG_USER_ONLY)
1581 static inline void gen_op_fcmpeq(int fccno
)
1583 tcg_gen_helper_0_0(helper_fcmpeq
);
1589 static inline void gen_op_fpexception_im(int fsr_flags
)
1591 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1592 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, ~FSR_FTT_MASK
);
1593 tcg_gen_ori_tl(cpu_tmp0
, cpu_tmp0
, fsr_flags
);
1594 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1595 gen_op_exception(TT_FP_EXCP
);
1598 static int gen_trap_ifnofpu(DisasContext
* dc
)
1600 #if !defined(CONFIG_USER_ONLY)
1601 if (!dc
->fpu_enabled
) {
1603 gen_op_exception(TT_NFPU_INSN
);
1611 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1613 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1614 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, ~(FSR_FTT_MASK
| FSR_CEXC_MASK
));
1615 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1618 static inline void gen_clear_float_exceptions(void)
1620 tcg_gen_helper_0_0(helper_clear_float_exceptions
);
1624 #ifdef TARGET_SPARC64
1625 static inline void gen_ld_asi(int insn
, int size
, int sign
)
1628 TCGv r_size
, r_sign
;
1630 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1631 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1632 tcg_gen_movi_i32(r_size
, size
);
1633 tcg_gen_movi_i32(r_sign
, sign
);
1635 offset
= GET_FIELD(insn
, 25, 31);
1636 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1637 tcg_gen_ld_i32(cpu_T
[1], cpu_env
, offsetof(CPUSPARCState
, asi
));
1639 asi
= GET_FIELD(insn
, 19, 26);
1640 tcg_gen_movi_i32(cpu_T
[1], asi
);
1642 tcg_gen_helper_1_4(helper_ld_asi
, cpu_T
[1], cpu_T
[0], cpu_T
[1], r_size
,
1646 static inline void gen_st_asi(int insn
, int size
)
1651 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1652 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1653 tcg_gen_movi_i32(r_size
, size
);
1655 offset
= GET_FIELD(insn
, 25, 31);
1656 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1657 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1659 asi
= GET_FIELD(insn
, 19, 26);
1660 tcg_gen_movi_i32(r_asi
, asi
);
1662 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], cpu_T
[1], r_asi
, r_size
);
1665 static inline void gen_ldf_asi(int insn
, int size
, int rd
)
1668 TCGv r_asi
, r_size
, r_rd
;
1670 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1671 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1672 r_rd
= tcg_temp_new(TCG_TYPE_I32
);
1673 tcg_gen_movi_i32(r_size
, size
);
1674 tcg_gen_movi_i32(r_rd
, rd
);
1676 offset
= GET_FIELD(insn
, 25, 31);
1677 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1678 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1680 asi
= GET_FIELD(insn
, 19, 26);
1681 tcg_gen_movi_i32(r_asi
, asi
);
1683 tcg_gen_helper_0_4(helper_ldf_asi
, cpu_T
[0], r_asi
, r_size
, r_rd
);
1686 static inline void gen_stf_asi(int insn
, int size
, int rd
)
1689 TCGv r_asi
, r_size
, r_rd
;
1691 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1692 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1693 r_rd
= tcg_temp_new(TCG_TYPE_I32
);
1694 tcg_gen_movi_i32(r_size
, size
);
1695 tcg_gen_movi_i32(r_rd
, rd
);
1697 offset
= GET_FIELD(insn
, 25, 31);
1698 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1699 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1701 asi
= GET_FIELD(insn
, 19, 26);
1702 tcg_gen_movi_i32(r_asi
, asi
);
1704 tcg_gen_helper_0_4(helper_stf_asi
, cpu_T
[0], r_asi
, r_size
, r_rd
);
1707 static inline void gen_swap_asi(int insn
)
1710 TCGv r_size
, r_sign
, r_temp
;
1712 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1713 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1714 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
1715 tcg_gen_movi_i32(r_size
, 4);
1716 tcg_gen_movi_i32(r_sign
, 0);
1718 offset
= GET_FIELD(insn
, 25, 31);
1719 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1720 tcg_gen_ld_i32(cpu_T
[1], cpu_env
, offsetof(CPUSPARCState
, asi
));
1722 asi
= GET_FIELD(insn
, 19, 26);
1723 tcg_gen_movi_i32(cpu_T
[1], asi
);
1725 tcg_gen_helper_1_4(helper_ld_asi
, r_temp
, cpu_T
[0], cpu_T
[1], r_size
,
1727 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], cpu_T
[1], r_size
, r_sign
);
1728 tcg_gen_mov_i32(cpu_T
[1], r_temp
);
1731 static inline void gen_ldda_asi(int insn
)
1734 TCGv r_size
, r_sign
, r_dword
;
1736 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1737 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1738 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1739 tcg_gen_movi_i32(r_size
, 8);
1740 tcg_gen_movi_i32(r_sign
, 0);
1742 offset
= GET_FIELD(insn
, 25, 31);
1743 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1744 tcg_gen_ld_i32(cpu_T
[1], cpu_env
, offsetof(CPUSPARCState
, asi
));
1746 asi
= GET_FIELD(insn
, 19, 26);
1747 tcg_gen_movi_i32(cpu_T
[1], asi
);
1749 tcg_gen_helper_1_4(helper_ld_asi
, r_dword
, cpu_T
[0], cpu_T
[1], r_size
,
1751 tcg_gen_trunc_i64_i32(cpu_T
[0], r_dword
);
1752 tcg_gen_shri_i64(r_dword
, r_dword
, 32);
1753 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
1756 static inline void gen_cas_asi(int insn
, int rd
)
1761 r_val1
= tcg_temp_new(TCG_TYPE_I32
);
1762 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1763 gen_movl_reg_TN(rd
, r_val1
);
1765 offset
= GET_FIELD(insn
, 25, 31);
1766 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1767 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1769 asi
= GET_FIELD(insn
, 19, 26);
1770 tcg_gen_movi_i32(r_asi
, asi
);
1772 tcg_gen_helper_1_4(helper_cas_asi
, cpu_T
[1], cpu_T
[0], r_val1
, cpu_T
[1],
1776 static inline void gen_casx_asi(int insn
, int rd
)
1781 r_val1
= tcg_temp_new(TCG_TYPE_I64
);
1782 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1783 gen_movl_reg_TN(rd
, r_val1
);
1785 offset
= GET_FIELD(insn
, 25, 31);
1786 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1787 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1789 asi
= GET_FIELD(insn
, 19, 26);
1790 tcg_gen_movi_i32(r_asi
, asi
);
1792 tcg_gen_helper_1_4(helper_casx_asi
, cpu_T
[1], cpu_T
[0], r_val1
, cpu_T
[1],
1796 #elif !defined(CONFIG_USER_ONLY)
1798 static inline void gen_ld_asi(int insn
, int size
, int sign
)
1801 TCGv r_size
, r_sign
, r_dword
;
1803 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1804 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1805 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1806 tcg_gen_movi_i32(r_size
, size
);
1807 tcg_gen_movi_i32(r_sign
, sign
);
1808 asi
= GET_FIELD(insn
, 19, 26);
1809 tcg_gen_movi_i32(cpu_T
[1], asi
);
1810 tcg_gen_helper_1_4(helper_ld_asi
, r_dword
, cpu_T
[0], cpu_T
[1], r_size
,
1812 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
1815 static inline void gen_st_asi(int insn
, int size
)
1818 TCGv r_dword
, r_asi
, r_size
;
1820 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1821 tcg_gen_extu_i32_i64(r_dword
, cpu_T
[1]);
1822 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1823 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1824 asi
= GET_FIELD(insn
, 19, 26);
1825 tcg_gen_movi_i32(r_asi
, asi
);
1826 tcg_gen_movi_i32(r_size
, size
);
1827 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], r_dword
, r_asi
, r_size
);
1830 static inline void gen_swap_asi(int insn
)
1833 TCGv r_size
, r_sign
, r_temp
;
1835 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1836 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1837 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
1838 tcg_gen_movi_i32(r_size
, 4);
1839 tcg_gen_movi_i32(r_sign
, 0);
1840 asi
= GET_FIELD(insn
, 19, 26);
1841 tcg_gen_movi_i32(cpu_T
[1], asi
);
1842 tcg_gen_helper_1_4(helper_ld_asi
, r_temp
, cpu_T
[0], cpu_T
[1], r_size
,
1844 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], cpu_T
[1], r_size
, r_sign
);
1845 tcg_gen_mov_i32(cpu_T
[1], r_temp
);
1848 static inline void gen_ldda_asi(int insn
)
1851 TCGv r_size
, r_sign
, r_dword
;
1853 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1854 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1855 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1856 tcg_gen_movi_i32(r_size
, 8);
1857 tcg_gen_movi_i32(r_sign
, 0);
1858 asi
= GET_FIELD(insn
, 19, 26);
1859 tcg_gen_movi_i32(cpu_T
[1], asi
);
1860 tcg_gen_helper_1_4(helper_ld_asi
, r_dword
, cpu_T
[0], cpu_T
[1], r_size
,
1862 tcg_gen_trunc_i64_i32(cpu_T
[0], r_dword
);
1863 tcg_gen_shri_i64(r_dword
, r_dword
, 32);
1864 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
1868 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1869 static inline void gen_ldstub_asi(int insn
)
1872 TCGv r_dword
, r_asi
, r_size
;
1874 gen_ld_asi(insn
, 1, 0);
1876 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1877 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1878 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1879 asi
= GET_FIELD(insn
, 19, 26);
1880 tcg_gen_movi_i32(r_dword
, 0xff);
1881 tcg_gen_movi_i32(r_asi
, asi
);
1882 tcg_gen_movi_i32(r_size
, 1);
1883 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], r_dword
, r_asi
, r_size
);
1887 /* before an instruction, dc->pc must be static */
1888 static void disas_sparc_insn(DisasContext
* dc
)
1890 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1892 insn
= ldl_code(dc
->pc
);
1893 opc
= GET_FIELD(insn
, 0, 1);
1895 rd
= GET_FIELD(insn
, 2, 6);
1897 case 0: /* branches/sethi */
1899 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1902 #ifdef TARGET_SPARC64
1903 case 0x1: /* V9 BPcc */
1907 target
= GET_FIELD_SP(insn
, 0, 18);
1908 target
= sign_extend(target
, 18);
1910 cc
= GET_FIELD_SP(insn
, 20, 21);
1912 do_branch(dc
, target
, insn
, 0);
1914 do_branch(dc
, target
, insn
, 1);
1919 case 0x3: /* V9 BPr */
1921 target
= GET_FIELD_SP(insn
, 0, 13) |
1922 (GET_FIELD_SP(insn
, 20, 21) << 14);
1923 target
= sign_extend(target
, 16);
1925 rs1
= GET_FIELD(insn
, 13, 17);
1926 gen_movl_reg_T0(rs1
);
1927 do_branch_reg(dc
, target
, insn
);
1930 case 0x5: /* V9 FBPcc */
1932 int cc
= GET_FIELD_SP(insn
, 20, 21);
1933 if (gen_trap_ifnofpu(dc
))
1935 target
= GET_FIELD_SP(insn
, 0, 18);
1936 target
= sign_extend(target
, 19);
1938 do_fbranch(dc
, target
, insn
, cc
);
1942 case 0x7: /* CBN+x */
1947 case 0x2: /* BN+x */
1949 target
= GET_FIELD(insn
, 10, 31);
1950 target
= sign_extend(target
, 22);
1952 do_branch(dc
, target
, insn
, 0);
1955 case 0x6: /* FBN+x */
1957 if (gen_trap_ifnofpu(dc
))
1959 target
= GET_FIELD(insn
, 10, 31);
1960 target
= sign_extend(target
, 22);
1962 do_fbranch(dc
, target
, insn
, 0);
1965 case 0x4: /* SETHI */
1970 uint32_t value
= GET_FIELD(insn
, 10, 31);
1971 tcg_gen_movi_tl(cpu_T
[0], value
<< 10);
1972 gen_movl_T0_reg(rd
);
1977 case 0x0: /* UNIMPL */
1986 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1988 tcg_gen_movi_tl(cpu_T
[0], dc
->pc
);
1989 gen_movl_T0_reg(15);
1995 case 2: /* FPU & Logical Operations */
1997 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1998 if (xop
== 0x3a) { /* generate trap */
2001 rs1
= GET_FIELD(insn
, 13, 17);
2002 gen_movl_reg_T0(rs1
);
2004 rs2
= GET_FIELD(insn
, 25, 31);
2005 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], rs2
);
2007 rs2
= GET_FIELD(insn
, 27, 31);
2011 gen_movl_reg_T1(rs2
);
2017 cond
= GET_FIELD(insn
, 3, 6);
2020 tcg_gen_helper_0_1(helper_trap
, cpu_T
[0]);
2021 } else if (cond
!= 0) {
2022 TCGv r_cond
= tcg_temp_new(TCG_TYPE_TL
);
2023 #ifdef TARGET_SPARC64
2025 int cc
= GET_FIELD_SP(insn
, 11, 12);
2029 gen_cond(r_cond
, 0, cond
);
2031 gen_cond(r_cond
, 1, cond
);
2036 gen_cond(r_cond
, 0, cond
);
2038 tcg_gen_helper_0_2(helper_trapcc
, cpu_T
[0], r_cond
);
2044 } else if (xop
== 0x28) {
2045 rs1
= GET_FIELD(insn
, 13, 17);
2048 #ifndef TARGET_SPARC64
2049 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2050 manual, rdy on the microSPARC
2052 case 0x0f: /* stbar in the SPARCv8 manual,
2053 rdy on the microSPARC II */
2054 case 0x10 ... 0x1f: /* implementation-dependent in the
2055 SPARCv8 manual, rdy on the
2058 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, y
));
2059 gen_movl_T0_reg(rd
);
2061 #ifdef TARGET_SPARC64
2062 case 0x2: /* V9 rdccr */
2064 gen_movl_T0_reg(rd
);
2066 case 0x3: /* V9 rdasi */
2067 gen_op_movl_T0_env(offsetof(CPUSPARCState
, asi
));
2068 gen_movl_T0_reg(rd
);
2070 case 0x4: /* V9 rdtick */
2074 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2075 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2076 offsetof(CPUState
, tick
));
2077 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_T
[0],
2079 gen_movl_T0_reg(rd
);
2082 case 0x5: /* V9 rdpc */
2083 tcg_gen_movi_tl(cpu_T
[0], dc
->pc
);
2084 gen_movl_T0_reg(rd
);
2086 case 0x6: /* V9 rdfprs */
2087 gen_op_movl_T0_env(offsetof(CPUSPARCState
, fprs
));
2088 gen_movl_T0_reg(rd
);
2090 case 0xf: /* V9 membar */
2091 break; /* no effect */
2092 case 0x13: /* Graphics Status */
2093 if (gen_trap_ifnofpu(dc
))
2095 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, gsr
));
2096 gen_movl_T0_reg(rd
);
2098 case 0x17: /* Tick compare */
2099 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tick_cmpr
));
2100 gen_movl_T0_reg(rd
);
2102 case 0x18: /* System tick */
2106 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2107 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2108 offsetof(CPUState
, stick
));
2109 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_T
[0],
2111 gen_movl_T0_reg(rd
);
2114 case 0x19: /* System tick compare */
2115 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, stick_cmpr
));
2116 gen_movl_T0_reg(rd
);
2118 case 0x10: /* Performance Control */
2119 case 0x11: /* Performance Instrumentation Counter */
2120 case 0x12: /* Dispatch Control */
2121 case 0x14: /* Softint set, WO */
2122 case 0x15: /* Softint clear, WO */
2123 case 0x16: /* Softint write */
2128 #if !defined(CONFIG_USER_ONLY)
2129 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2130 #ifndef TARGET_SPARC64
2131 if (!supervisor(dc
))
2133 tcg_gen_helper_1_0(helper_rdpsr
, cpu_T
[0]);
2135 if (!hypervisor(dc
))
2137 rs1
= GET_FIELD(insn
, 13, 17);
2140 // gen_op_rdhpstate();
2143 // gen_op_rdhtstate();
2146 gen_op_movl_T0_env(offsetof(CPUSPARCState
, hintp
));
2149 gen_op_movl_T0_env(offsetof(CPUSPARCState
, htba
));
2152 gen_op_movl_T0_env(offsetof(CPUSPARCState
, hver
));
2154 case 31: // hstick_cmpr
2155 gen_op_movl_env_T0(offsetof(CPUSPARCState
, hstick_cmpr
));
2161 gen_movl_T0_reg(rd
);
2163 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2164 if (!supervisor(dc
))
2166 #ifdef TARGET_SPARC64
2167 rs1
= GET_FIELD(insn
, 13, 17);
2173 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2174 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2175 offsetof(CPUState
, tsptr
));
2176 tcg_gen_ld_tl(cpu_T
[0], r_tsptr
,
2177 offsetof(trap_state
, tpc
));
2184 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2185 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2186 offsetof(CPUState
, tsptr
));
2187 tcg_gen_ld_tl(cpu_T
[0], r_tsptr
,
2188 offsetof(trap_state
, tnpc
));
2195 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2196 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2197 offsetof(CPUState
, tsptr
));
2198 tcg_gen_ld_tl(cpu_T
[0], r_tsptr
,
2199 offsetof(trap_state
, tstate
));
2206 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2207 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2208 offsetof(CPUState
, tsptr
));
2209 tcg_gen_ld_i32(cpu_T
[0], r_tsptr
,
2210 offsetof(trap_state
, tt
));
2217 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2218 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2219 offsetof(CPUState
, tick
));
2220 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_T
[0],
2222 gen_movl_T0_reg(rd
);
2226 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tbr
));
2229 gen_op_movl_T0_env(offsetof(CPUSPARCState
, pstate
));
2232 gen_op_movl_T0_env(offsetof(CPUSPARCState
, tl
));
2235 gen_op_movl_T0_env(offsetof(CPUSPARCState
, psrpil
));
2241 gen_op_movl_T0_env(offsetof(CPUSPARCState
, cansave
));
2243 case 11: // canrestore
2244 gen_op_movl_T0_env(offsetof(CPUSPARCState
, canrestore
));
2246 case 12: // cleanwin
2247 gen_op_movl_T0_env(offsetof(CPUSPARCState
, cleanwin
));
2249 case 13: // otherwin
2250 gen_op_movl_T0_env(offsetof(CPUSPARCState
, otherwin
));
2253 gen_op_movl_T0_env(offsetof(CPUSPARCState
, wstate
));
2255 case 16: // UA2005 gl
2256 gen_op_movl_T0_env(offsetof(CPUSPARCState
, gl
));
2258 case 26: // UA2005 strand status
2259 if (!hypervisor(dc
))
2261 gen_op_movl_T0_env(offsetof(CPUSPARCState
, ssr
));
2264 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, version
));
2271 gen_op_movl_T0_env(offsetof(CPUSPARCState
, wim
));
2273 gen_movl_T0_reg(rd
);
2275 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2276 #ifdef TARGET_SPARC64
2279 if (!supervisor(dc
))
2281 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tbr
));
2282 gen_movl_T0_reg(rd
);
2286 } else if (xop
== 0x34) { /* FPU Operations */
2287 if (gen_trap_ifnofpu(dc
))
2289 gen_op_clear_ieee_excp_and_FTT();
2290 rs1
= GET_FIELD(insn
, 13, 17);
2291 rs2
= GET_FIELD(insn
, 27, 31);
2292 xop
= GET_FIELD(insn
, 18, 26);
2294 case 0x1: /* fmovs */
2295 gen_op_load_fpr_FT0(rs2
);
2296 gen_op_store_FT0_fpr(rd
);
2298 case 0x5: /* fnegs */
2299 gen_op_load_fpr_FT1(rs2
);
2301 gen_op_store_FT0_fpr(rd
);
2303 case 0x9: /* fabss */
2304 gen_op_load_fpr_FT1(rs2
);
2305 tcg_gen_helper_0_0(helper_fabss
);
2306 gen_op_store_FT0_fpr(rd
);
2308 case 0x29: /* fsqrts */
2309 gen_op_load_fpr_FT1(rs2
);
2310 gen_clear_float_exceptions();
2311 tcg_gen_helper_0_0(helper_fsqrts
);
2312 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2313 gen_op_store_FT0_fpr(rd
);
2315 case 0x2a: /* fsqrtd */
2316 gen_op_load_fpr_DT1(DFPREG(rs2
));
2317 gen_clear_float_exceptions();
2318 tcg_gen_helper_0_0(helper_fsqrtd
);
2319 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2320 gen_op_store_DT0_fpr(DFPREG(rd
));
2322 case 0x2b: /* fsqrtq */
2323 #if defined(CONFIG_USER_ONLY)
2324 gen_op_load_fpr_QT1(QFPREG(rs2
));
2325 gen_clear_float_exceptions();
2326 tcg_gen_helper_0_0(helper_fsqrtq
);
2327 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2328 gen_op_store_QT0_fpr(QFPREG(rd
));
2334 gen_op_load_fpr_FT0(rs1
);
2335 gen_op_load_fpr_FT1(rs2
);
2336 gen_clear_float_exceptions();
2338 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2339 gen_op_store_FT0_fpr(rd
);
2342 gen_op_load_fpr_DT0(DFPREG(rs1
));
2343 gen_op_load_fpr_DT1(DFPREG(rs2
));
2344 gen_clear_float_exceptions();
2346 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2347 gen_op_store_DT0_fpr(DFPREG(rd
));
2349 case 0x43: /* faddq */
2350 #if defined(CONFIG_USER_ONLY)
2351 gen_op_load_fpr_QT0(QFPREG(rs1
));
2352 gen_op_load_fpr_QT1(QFPREG(rs2
));
2353 gen_clear_float_exceptions();
2355 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2356 gen_op_store_QT0_fpr(QFPREG(rd
));
2362 gen_op_load_fpr_FT0(rs1
);
2363 gen_op_load_fpr_FT1(rs2
);
2364 gen_clear_float_exceptions();
2366 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2367 gen_op_store_FT0_fpr(rd
);
2370 gen_op_load_fpr_DT0(DFPREG(rs1
));
2371 gen_op_load_fpr_DT1(DFPREG(rs2
));
2372 gen_clear_float_exceptions();
2374 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2375 gen_op_store_DT0_fpr(DFPREG(rd
));
2377 case 0x47: /* fsubq */
2378 #if defined(CONFIG_USER_ONLY)
2379 gen_op_load_fpr_QT0(QFPREG(rs1
));
2380 gen_op_load_fpr_QT1(QFPREG(rs2
));
2381 gen_clear_float_exceptions();
2383 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2384 gen_op_store_QT0_fpr(QFPREG(rd
));
2390 gen_op_load_fpr_FT0(rs1
);
2391 gen_op_load_fpr_FT1(rs2
);
2392 gen_clear_float_exceptions();
2394 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2395 gen_op_store_FT0_fpr(rd
);
2398 gen_op_load_fpr_DT0(DFPREG(rs1
));
2399 gen_op_load_fpr_DT1(DFPREG(rs2
));
2400 gen_clear_float_exceptions();
2402 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2403 gen_op_store_DT0_fpr(DFPREG(rd
));
2405 case 0x4b: /* fmulq */
2406 #if defined(CONFIG_USER_ONLY)
2407 gen_op_load_fpr_QT0(QFPREG(rs1
));
2408 gen_op_load_fpr_QT1(QFPREG(rs2
));
2409 gen_clear_float_exceptions();
2411 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2412 gen_op_store_QT0_fpr(QFPREG(rd
));
2418 gen_op_load_fpr_FT0(rs1
);
2419 gen_op_load_fpr_FT1(rs2
);
2420 gen_clear_float_exceptions();
2422 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2423 gen_op_store_FT0_fpr(rd
);
2426 gen_op_load_fpr_DT0(DFPREG(rs1
));
2427 gen_op_load_fpr_DT1(DFPREG(rs2
));
2428 gen_clear_float_exceptions();
2430 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2431 gen_op_store_DT0_fpr(DFPREG(rd
));
2433 case 0x4f: /* fdivq */
2434 #if defined(CONFIG_USER_ONLY)
2435 gen_op_load_fpr_QT0(QFPREG(rs1
));
2436 gen_op_load_fpr_QT1(QFPREG(rs2
));
2437 gen_clear_float_exceptions();
2439 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2440 gen_op_store_QT0_fpr(QFPREG(rd
));
2446 gen_op_load_fpr_FT0(rs1
);
2447 gen_op_load_fpr_FT1(rs2
);
2448 gen_clear_float_exceptions();
2450 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2451 gen_op_store_DT0_fpr(DFPREG(rd
));
2453 case 0x6e: /* fdmulq */
2454 #if defined(CONFIG_USER_ONLY)
2455 gen_op_load_fpr_DT0(DFPREG(rs1
));
2456 gen_op_load_fpr_DT1(DFPREG(rs2
));
2457 gen_clear_float_exceptions();
2459 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2460 gen_op_store_QT0_fpr(QFPREG(rd
));
2466 gen_op_load_fpr_FT1(rs2
);
2467 gen_clear_float_exceptions();
2469 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2470 gen_op_store_FT0_fpr(rd
);
2473 gen_op_load_fpr_DT1(DFPREG(rs2
));
2474 gen_clear_float_exceptions();
2476 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2477 gen_op_store_FT0_fpr(rd
);
2479 case 0xc7: /* fqtos */
2480 #if defined(CONFIG_USER_ONLY)
2481 gen_op_load_fpr_QT1(QFPREG(rs2
));
2482 gen_clear_float_exceptions();
2484 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2485 gen_op_store_FT0_fpr(rd
);
2491 gen_op_load_fpr_FT1(rs2
);
2493 gen_op_store_DT0_fpr(DFPREG(rd
));
2496 gen_op_load_fpr_FT1(rs2
);
2498 gen_op_store_DT0_fpr(DFPREG(rd
));
2500 case 0xcb: /* fqtod */
2501 #if defined(CONFIG_USER_ONLY)
2502 gen_op_load_fpr_QT1(QFPREG(rs2
));
2503 gen_clear_float_exceptions();
2505 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2506 gen_op_store_DT0_fpr(DFPREG(rd
));
2511 case 0xcc: /* fitoq */
2512 #if defined(CONFIG_USER_ONLY)
2513 gen_op_load_fpr_FT1(rs2
);
2515 gen_op_store_QT0_fpr(QFPREG(rd
));
2520 case 0xcd: /* fstoq */
2521 #if defined(CONFIG_USER_ONLY)
2522 gen_op_load_fpr_FT1(rs2
);
2524 gen_op_store_QT0_fpr(QFPREG(rd
));
2529 case 0xce: /* fdtoq */
2530 #if defined(CONFIG_USER_ONLY)
2531 gen_op_load_fpr_DT1(DFPREG(rs2
));
2533 gen_op_store_QT0_fpr(QFPREG(rd
));
2539 gen_op_load_fpr_FT1(rs2
);
2540 gen_clear_float_exceptions();
2542 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2543 gen_op_store_FT0_fpr(rd
);
2546 gen_op_load_fpr_DT1(DFPREG(rs2
));
2547 gen_clear_float_exceptions();
2549 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2550 gen_op_store_FT0_fpr(rd
);
2552 case 0xd3: /* fqtoi */
2553 #if defined(CONFIG_USER_ONLY)
2554 gen_op_load_fpr_QT1(QFPREG(rs2
));
2555 gen_clear_float_exceptions();
2557 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2558 gen_op_store_FT0_fpr(rd
);
2563 #ifdef TARGET_SPARC64
2564 case 0x2: /* V9 fmovd */
2565 gen_op_load_fpr_DT0(DFPREG(rs2
));
2566 gen_op_store_DT0_fpr(DFPREG(rd
));
2568 case 0x3: /* V9 fmovq */
2569 #if defined(CONFIG_USER_ONLY)
2570 gen_op_load_fpr_QT0(QFPREG(rs2
));
2571 gen_op_store_QT0_fpr(QFPREG(rd
));
2576 case 0x6: /* V9 fnegd */
2577 gen_op_load_fpr_DT1(DFPREG(rs2
));
2579 gen_op_store_DT0_fpr(DFPREG(rd
));
2581 case 0x7: /* V9 fnegq */
2582 #if defined(CONFIG_USER_ONLY)
2583 gen_op_load_fpr_QT1(QFPREG(rs2
));
2585 gen_op_store_QT0_fpr(QFPREG(rd
));
2590 case 0xa: /* V9 fabsd */
2591 gen_op_load_fpr_DT1(DFPREG(rs2
));
2592 tcg_gen_helper_0_0(helper_fabsd
);
2593 gen_op_store_DT0_fpr(DFPREG(rd
));
2595 case 0xb: /* V9 fabsq */
2596 #if defined(CONFIG_USER_ONLY)
2597 gen_op_load_fpr_QT1(QFPREG(rs2
));
2598 tcg_gen_helper_0_0(helper_fabsq
);
2599 gen_op_store_QT0_fpr(QFPREG(rd
));
2604 case 0x81: /* V9 fstox */
2605 gen_op_load_fpr_FT1(rs2
);
2606 gen_clear_float_exceptions();
2608 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2609 gen_op_store_DT0_fpr(DFPREG(rd
));
2611 case 0x82: /* V9 fdtox */
2612 gen_op_load_fpr_DT1(DFPREG(rs2
));
2613 gen_clear_float_exceptions();
2615 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2616 gen_op_store_DT0_fpr(DFPREG(rd
));
2618 case 0x83: /* V9 fqtox */
2619 #if defined(CONFIG_USER_ONLY)
2620 gen_op_load_fpr_QT1(QFPREG(rs2
));
2621 gen_clear_float_exceptions();
2623 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2624 gen_op_store_DT0_fpr(DFPREG(rd
));
2629 case 0x84: /* V9 fxtos */
2630 gen_op_load_fpr_DT1(DFPREG(rs2
));
2631 gen_clear_float_exceptions();
2633 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2634 gen_op_store_FT0_fpr(rd
);
2636 case 0x88: /* V9 fxtod */
2637 gen_op_load_fpr_DT1(DFPREG(rs2
));
2638 gen_clear_float_exceptions();
2640 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2641 gen_op_store_DT0_fpr(DFPREG(rd
));
2643 case 0x8c: /* V9 fxtoq */
2644 #if defined(CONFIG_USER_ONLY)
2645 gen_op_load_fpr_DT1(DFPREG(rs2
));
2646 gen_clear_float_exceptions();
2648 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2649 gen_op_store_QT0_fpr(QFPREG(rd
));
2658 } else if (xop
== 0x35) { /* FPU Operations */
2659 #ifdef TARGET_SPARC64
2662 if (gen_trap_ifnofpu(dc
))
2664 gen_op_clear_ieee_excp_and_FTT();
2665 rs1
= GET_FIELD(insn
, 13, 17);
2666 rs2
= GET_FIELD(insn
, 27, 31);
2667 xop
= GET_FIELD(insn
, 18, 26);
2668 #ifdef TARGET_SPARC64
2669 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2673 l1
= gen_new_label();
2674 r_zero
= tcg_const_tl(0);
2675 cond
= GET_FIELD_SP(insn
, 14, 17);
2676 rs1
= GET_FIELD(insn
, 13, 17);
2677 gen_movl_reg_T0(rs1
);
2678 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
2679 gen_op_load_fpr_FT0(rs2
);
2680 gen_op_store_FT0_fpr(rd
);
2683 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2687 l1
= gen_new_label();
2688 r_zero
= tcg_const_tl(0);
2689 cond
= GET_FIELD_SP(insn
, 14, 17);
2690 rs1
= GET_FIELD(insn
, 13, 17);
2691 gen_movl_reg_T0(rs1
);
2692 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
2693 gen_op_load_fpr_DT0(DFPREG(rs2
));
2694 gen_op_store_DT0_fpr(DFPREG(rd
));
2697 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2698 #if defined(CONFIG_USER_ONLY)
2702 l1
= gen_new_label();
2703 r_zero
= tcg_const_tl(0);
2704 cond
= GET_FIELD_SP(insn
, 14, 17);
2705 rs1
= GET_FIELD(insn
, 13, 17);
2706 gen_movl_reg_T0(rs1
);
2707 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
2708 gen_op_load_fpr_QT0(QFPREG(rs2
));
2709 gen_op_store_QT0_fpr(QFPREG(rd
));
2718 #ifdef TARGET_SPARC64
2719 #define FMOVCC(size_FDQ, fcc) \
2721 TCGv r_zero, r_cond; \
2724 l1 = gen_new_label(); \
2725 r_zero = tcg_const_tl(0); \
2726 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2727 cond = GET_FIELD_SP(insn, 14, 17); \
2728 gen_fcond(r_cond, fcc, cond); \
2729 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2730 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2731 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2732 gen_set_label(l1); \
2734 case 0x001: /* V9 fmovscc %fcc0 */
2737 case 0x002: /* V9 fmovdcc %fcc0 */
2740 case 0x003: /* V9 fmovqcc %fcc0 */
2741 #if defined(CONFIG_USER_ONLY)
2747 case 0x041: /* V9 fmovscc %fcc1 */
2750 case 0x042: /* V9 fmovdcc %fcc1 */
2753 case 0x043: /* V9 fmovqcc %fcc1 */
2754 #if defined(CONFIG_USER_ONLY)
2760 case 0x081: /* V9 fmovscc %fcc2 */
2763 case 0x082: /* V9 fmovdcc %fcc2 */
2766 case 0x083: /* V9 fmovqcc %fcc2 */
2767 #if defined(CONFIG_USER_ONLY)
2773 case 0x0c1: /* V9 fmovscc %fcc3 */
2776 case 0x0c2: /* V9 fmovdcc %fcc3 */
2779 case 0x0c3: /* V9 fmovqcc %fcc3 */
2780 #if defined(CONFIG_USER_ONLY)
2787 #define FMOVCC(size_FDQ, icc) \
2789 TCGv r_zero, r_cond; \
2792 l1 = gen_new_label(); \
2793 r_zero = tcg_const_tl(0); \
2794 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2795 cond = GET_FIELD_SP(insn, 14, 17); \
2796 gen_cond(r_cond, icc, cond); \
2797 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2798 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2799 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2800 gen_set_label(l1); \
2803 case 0x101: /* V9 fmovscc %icc */
2806 case 0x102: /* V9 fmovdcc %icc */
2808 case 0x103: /* V9 fmovqcc %icc */
2809 #if defined(CONFIG_USER_ONLY)
2815 case 0x181: /* V9 fmovscc %xcc */
2818 case 0x182: /* V9 fmovdcc %xcc */
2821 case 0x183: /* V9 fmovqcc %xcc */
2822 #if defined(CONFIG_USER_ONLY)
2830 case 0x51: /* fcmps, V9 %fcc */
2831 gen_op_load_fpr_FT0(rs1
);
2832 gen_op_load_fpr_FT1(rs2
);
2833 gen_op_fcmps(rd
& 3);
2835 case 0x52: /* fcmpd, V9 %fcc */
2836 gen_op_load_fpr_DT0(DFPREG(rs1
));
2837 gen_op_load_fpr_DT1(DFPREG(rs2
));
2838 gen_op_fcmpd(rd
& 3);
2840 case 0x53: /* fcmpq, V9 %fcc */
2841 #if defined(CONFIG_USER_ONLY)
2842 gen_op_load_fpr_QT0(QFPREG(rs1
));
2843 gen_op_load_fpr_QT1(QFPREG(rs2
));
2844 gen_op_fcmpq(rd
& 3);
2846 #else /* !defined(CONFIG_USER_ONLY) */
2849 case 0x55: /* fcmpes, V9 %fcc */
2850 gen_op_load_fpr_FT0(rs1
);
2851 gen_op_load_fpr_FT1(rs2
);
2852 gen_op_fcmpes(rd
& 3);
2854 case 0x56: /* fcmped, V9 %fcc */
2855 gen_op_load_fpr_DT0(DFPREG(rs1
));
2856 gen_op_load_fpr_DT1(DFPREG(rs2
));
2857 gen_op_fcmped(rd
& 3);
2859 case 0x57: /* fcmpeq, V9 %fcc */
2860 #if defined(CONFIG_USER_ONLY)
2861 gen_op_load_fpr_QT0(QFPREG(rs1
));
2862 gen_op_load_fpr_QT1(QFPREG(rs2
));
2863 gen_op_fcmpeq(rd
& 3);
2865 #else/* !defined(CONFIG_USER_ONLY) */
2872 } else if (xop
== 0x2) {
2875 rs1
= GET_FIELD(insn
, 13, 17);
2877 // or %g0, x, y -> mov T0, x; mov y, T0
2878 if (IS_IMM
) { /* immediate */
2879 rs2
= GET_FIELDs(insn
, 19, 31);
2880 tcg_gen_movi_tl(cpu_T
[0], (int)rs2
);
2881 } else { /* register */
2882 rs2
= GET_FIELD(insn
, 27, 31);
2883 gen_movl_reg_T0(rs2
);
2886 gen_movl_reg_T0(rs1
);
2887 if (IS_IMM
) { /* immediate */
2888 rs2
= GET_FIELDs(insn
, 19, 31);
2889 tcg_gen_ori_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
2890 } else { /* register */
2891 // or x, %g0, y -> mov T1, x; mov y, T1
2892 rs2
= GET_FIELD(insn
, 27, 31);
2894 gen_movl_reg_T1(rs2
);
2899 gen_movl_T0_reg(rd
);
2901 #ifdef TARGET_SPARC64
2902 } else if (xop
== 0x25) { /* sll, V9 sllx */
2903 rs1
= GET_FIELD(insn
, 13, 17);
2904 gen_movl_reg_T0(rs1
);
2905 if (IS_IMM
) { /* immediate */
2906 rs2
= GET_FIELDs(insn
, 20, 31);
2907 if (insn
& (1 << 12)) {
2908 tcg_gen_shli_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x3f);
2910 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2911 tcg_gen_shli_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x1f);
2913 } else { /* register */
2914 rs2
= GET_FIELD(insn
, 27, 31);
2915 gen_movl_reg_T1(rs2
);
2916 if (insn
& (1 << 12)) {
2917 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x3f);
2918 tcg_gen_shl_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2920 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x1f);
2921 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2922 tcg_gen_shl_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2925 gen_movl_T0_reg(rd
);
2926 } else if (xop
== 0x26) { /* srl, V9 srlx */
2927 rs1
= GET_FIELD(insn
, 13, 17);
2928 gen_movl_reg_T0(rs1
);
2929 if (IS_IMM
) { /* immediate */
2930 rs2
= GET_FIELDs(insn
, 20, 31);
2931 if (insn
& (1 << 12)) {
2932 tcg_gen_shri_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x3f);
2934 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2935 tcg_gen_shri_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x1f);
2937 } else { /* register */
2938 rs2
= GET_FIELD(insn
, 27, 31);
2939 gen_movl_reg_T1(rs2
);
2940 if (insn
& (1 << 12)) {
2941 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x3f);
2942 tcg_gen_shr_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2944 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x1f);
2945 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2946 tcg_gen_shr_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2949 gen_movl_T0_reg(rd
);
2950 } else if (xop
== 0x27) { /* sra, V9 srax */
2951 rs1
= GET_FIELD(insn
, 13, 17);
2952 gen_movl_reg_T0(rs1
);
2953 if (IS_IMM
) { /* immediate */
2954 rs2
= GET_FIELDs(insn
, 20, 31);
2955 if (insn
& (1 << 12)) {
2956 tcg_gen_sari_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x3f);
2958 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2959 tcg_gen_ext_i32_i64(cpu_T
[0], cpu_T
[0]);
2960 tcg_gen_sari_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x1f);
2962 } else { /* register */
2963 rs2
= GET_FIELD(insn
, 27, 31);
2964 gen_movl_reg_T1(rs2
);
2965 if (insn
& (1 << 12)) {
2966 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x3f);
2967 tcg_gen_sar_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2969 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x1f);
2970 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2971 tcg_gen_sar_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2974 gen_movl_T0_reg(rd
);
2976 } else if (xop
< 0x36) {
2977 rs1
= GET_FIELD(insn
, 13, 17);
2978 gen_movl_reg_T0(rs1
);
2979 if (IS_IMM
) { /* immediate */
2980 rs2
= GET_FIELDs(insn
, 19, 31);
2981 gen_movl_simm_T1(rs2
);
2982 } else { /* register */
2983 rs2
= GET_FIELD(insn
, 27, 31);
2984 gen_movl_reg_T1(rs2
);
2987 switch (xop
& ~0x10) {
2990 gen_op_add_T1_T0_cc();
2995 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2997 gen_op_logic_T0_cc();
3000 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3002 gen_op_logic_T0_cc();
3005 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3007 gen_op_logic_T0_cc();
3011 gen_op_sub_T1_T0_cc();
3013 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3016 tcg_gen_xori_tl(cpu_T
[1], cpu_T
[1], -1);
3017 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3019 gen_op_logic_T0_cc();
3022 tcg_gen_xori_tl(cpu_T
[1], cpu_T
[1], -1);
3023 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3025 gen_op_logic_T0_cc();
3028 tcg_gen_xori_tl(cpu_T
[1], cpu_T
[1], -1);
3029 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3031 gen_op_logic_T0_cc();
3035 gen_op_addx_T1_T0_cc();
3037 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3038 tcg_gen_add_tl(cpu_T
[1], cpu_T
[1], cpu_tmp0
);
3039 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3042 #ifdef TARGET_SPARC64
3043 case 0x9: /* V9 mulx */
3044 tcg_gen_mul_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3048 gen_op_umul_T1_T0();
3050 gen_op_logic_T0_cc();
3053 gen_op_smul_T1_T0();
3055 gen_op_logic_T0_cc();
3059 gen_op_subx_T1_T0_cc();
3061 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3062 tcg_gen_add_tl(cpu_T
[1], cpu_T
[1], cpu_tmp0
);
3063 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3066 #ifdef TARGET_SPARC64
3067 case 0xd: /* V9 udivx */
3068 gen_trap_ifdivzero_i64(cpu_T
[1]);
3069 tcg_gen_divu_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3073 gen_op_udiv_T1_T0();
3078 gen_op_sdiv_T1_T0();
3085 gen_movl_T0_reg(rd
);
3088 case 0x20: /* taddcc */
3089 gen_op_tadd_T1_T0_cc();
3090 gen_movl_T0_reg(rd
);
3092 case 0x21: /* tsubcc */
3093 gen_op_tsub_T1_T0_cc();
3094 gen_movl_T0_reg(rd
);
3096 case 0x22: /* taddcctv */
3098 gen_op_tadd_T1_T0_ccTV();
3099 gen_movl_T0_reg(rd
);
3101 case 0x23: /* tsubcctv */
3103 gen_op_tsub_T1_T0_ccTV();
3104 gen_movl_T0_reg(rd
);
3106 case 0x24: /* mulscc */
3107 gen_op_mulscc_T1_T0();
3108 gen_movl_T0_reg(rd
);
3110 #ifndef TARGET_SPARC64
3111 case 0x25: /* sll */
3112 tcg_gen_andi_i32(cpu_T
[1], cpu_T
[1], 0x1f);
3113 tcg_gen_shl_i32(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3114 gen_movl_T0_reg(rd
);
3116 case 0x26: /* srl */
3117 tcg_gen_andi_i32(cpu_T
[1], cpu_T
[1], 0x1f);
3118 tcg_gen_shr_i32(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3119 gen_movl_T0_reg(rd
);
3121 case 0x27: /* sra */
3122 tcg_gen_andi_i32(cpu_T
[1], cpu_T
[1], 0x1f);
3123 tcg_gen_sar_i32(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
3124 gen_movl_T0_reg(rd
);
3132 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, y
));
3134 #ifndef TARGET_SPARC64
3135 case 0x01 ... 0x0f: /* undefined in the
3139 case 0x10 ... 0x1f: /* implementation-dependent
3145 case 0x2: /* V9 wrccr */
3149 case 0x3: /* V9 wrasi */
3151 gen_op_movl_env_T0(offsetof(CPUSPARCState
, asi
));
3153 case 0x6: /* V9 wrfprs */
3155 gen_op_movl_env_T0(offsetof(CPUSPARCState
, fprs
));
3161 case 0xf: /* V9 sir, nop if user */
3162 #if !defined(CONFIG_USER_ONLY)
3167 case 0x13: /* Graphics Status */
3168 if (gen_trap_ifnofpu(dc
))
3171 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, gsr
));
3173 case 0x17: /* Tick compare */
3174 #if !defined(CONFIG_USER_ONLY)
3175 if (!supervisor(dc
))
3182 gen_op_movtl_env_T0(offsetof(CPUSPARCState
,
3184 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3185 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3186 offsetof(CPUState
, tick
));
3187 tcg_gen_helper_0_2(helper_tick_set_limit
,
3188 r_tickptr
, cpu_T
[0]);
3191 case 0x18: /* System tick */
3192 #if !defined(CONFIG_USER_ONLY)
3193 if (!supervisor(dc
))
3200 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3201 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3202 offsetof(CPUState
, stick
));
3203 tcg_gen_helper_0_2(helper_tick_set_count
,
3204 r_tickptr
, cpu_T
[0]);
3207 case 0x19: /* System tick compare */
3208 #if !defined(CONFIG_USER_ONLY)
3209 if (!supervisor(dc
))
3216 gen_op_movtl_env_T0(offsetof(CPUSPARCState
,
3218 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3219 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3220 offsetof(CPUState
, stick
));
3221 tcg_gen_helper_0_2(helper_tick_set_limit
,
3222 r_tickptr
, cpu_T
[0]);
3226 case 0x10: /* Performance Control */
3227 case 0x11: /* Performance Instrumentation Counter */
3228 case 0x12: /* Dispatch Control */
3229 case 0x14: /* Softint set */
3230 case 0x15: /* Softint clear */
3231 case 0x16: /* Softint write */
3238 #if !defined(CONFIG_USER_ONLY)
3239 case 0x31: /* wrpsr, V9 saved, restored */
3241 if (!supervisor(dc
))
3243 #ifdef TARGET_SPARC64
3251 case 2: /* UA2005 allclean */
3252 case 3: /* UA2005 otherw */
3253 case 4: /* UA2005 normalw */
3254 case 5: /* UA2005 invalw */
3261 tcg_gen_helper_0_1(helper_wrpsr
, cpu_T
[0]);
3269 case 0x32: /* wrwim, V9 wrpr */
3271 if (!supervisor(dc
))
3274 #ifdef TARGET_SPARC64
3280 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3281 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3282 offsetof(CPUState
, tsptr
));
3283 tcg_gen_st_tl(cpu_T
[0], r_tsptr
,
3284 offsetof(trap_state
, tpc
));
3291 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3292 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3293 offsetof(CPUState
, tsptr
));
3294 tcg_gen_st_tl(cpu_T
[0], r_tsptr
,
3295 offsetof(trap_state
, tnpc
));
3302 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3303 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3304 offsetof(CPUState
, tsptr
));
3305 tcg_gen_st_tl(cpu_T
[0], r_tsptr
,
3306 offsetof(trap_state
, tstate
));
3313 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3314 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3315 offsetof(CPUState
, tsptr
));
3316 tcg_gen_st_i32(cpu_T
[0], r_tsptr
,
3317 offsetof(trap_state
, tt
));
3324 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3325 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3326 offsetof(CPUState
, tick
));
3327 tcg_gen_helper_0_2(helper_tick_set_count
,
3328 r_tickptr
, cpu_T
[0]);
3332 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, tbr
));
3336 tcg_gen_helper_0_1(helper_wrpstate
, cpu_T
[0]);
3342 gen_op_movl_env_T0(offsetof(CPUSPARCState
, tl
));
3345 gen_op_movl_env_T0(offsetof(CPUSPARCState
, psrpil
));
3351 gen_op_movl_env_T0(offsetof(CPUSPARCState
, cansave
));
3353 case 11: // canrestore
3354 gen_op_movl_env_T0(offsetof(CPUSPARCState
, canrestore
));
3356 case 12: // cleanwin
3357 gen_op_movl_env_T0(offsetof(CPUSPARCState
, cleanwin
));
3359 case 13: // otherwin
3360 gen_op_movl_env_T0(offsetof(CPUSPARCState
, otherwin
));
3363 gen_op_movl_env_T0(offsetof(CPUSPARCState
, wstate
));
3365 case 16: // UA2005 gl
3366 gen_op_movl_env_T0(offsetof(CPUSPARCState
, gl
));
3368 case 26: // UA2005 strand status
3369 if (!hypervisor(dc
))
3371 gen_op_movl_env_T0(offsetof(CPUSPARCState
, ssr
));
3377 tcg_gen_andi_i32(cpu_T
[0], cpu_T
[0], ((1 << NWINDOWS
) - 1));
3378 gen_op_movl_env_T0(offsetof(CPUSPARCState
, wim
));
3382 case 0x33: /* wrtbr, UA2005 wrhpr */
3384 #ifndef TARGET_SPARC64
3385 if (!supervisor(dc
))
3388 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, tbr
));
3390 if (!hypervisor(dc
))
3395 // XXX gen_op_wrhpstate();
3402 // XXX gen_op_wrhtstate();
3405 gen_op_movl_env_T0(offsetof(CPUSPARCState
, hintp
));
3408 gen_op_movl_env_T0(offsetof(CPUSPARCState
, htba
));
3410 case 31: // hstick_cmpr
3414 gen_op_movtl_env_T0(offsetof(CPUSPARCState
,
3416 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3417 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3418 offsetof(CPUState
, hstick
));
3419 tcg_gen_helper_0_2(helper_tick_set_limit
,
3420 r_tickptr
, cpu_T
[0]);
3423 case 6: // hver readonly
3431 #ifdef TARGET_SPARC64
3432 case 0x2c: /* V9 movcc */
3434 int cc
= GET_FIELD_SP(insn
, 11, 12);
3435 int cond
= GET_FIELD_SP(insn
, 14, 17);
3439 r_cond
= tcg_temp_new(TCG_TYPE_TL
);
3440 if (insn
& (1 << 18)) {
3442 gen_cond(r_cond
, 0, cond
);
3444 gen_cond(r_cond
, 1, cond
);
3448 gen_fcond(r_cond
, cc
, cond
);
3451 l1
= gen_new_label();
3453 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
,
3454 tcg_const_tl(0), l1
);
3455 if (IS_IMM
) { /* immediate */
3456 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3457 gen_movl_simm_T1(rs2
);
3459 rs2
= GET_FIELD_SP(insn
, 0, 4);
3460 gen_movl_reg_T1(rs2
);
3462 gen_movl_T1_reg(rd
);
3466 case 0x2d: /* V9 sdivx */
3467 gen_op_sdivx_T1_T0();
3468 gen_movl_T0_reg(rd
);
3470 case 0x2e: /* V9 popc */
3472 if (IS_IMM
) { /* immediate */
3473 rs2
= GET_FIELD_SPs(insn
, 0, 12);
3474 gen_movl_simm_T1(rs2
);
3475 // XXX optimize: popc(constant)
3478 rs2
= GET_FIELD_SP(insn
, 0, 4);
3479 gen_movl_reg_T1(rs2
);
3481 tcg_gen_helper_1_1(helper_popc
, cpu_T
[0],
3483 gen_movl_T0_reg(rd
);
3485 case 0x2f: /* V9 movr */
3487 int cond
= GET_FIELD_SP(insn
, 10, 12);
3491 rs1
= GET_FIELD(insn
, 13, 17);
3492 gen_movl_reg_T0(rs1
);
3494 l1
= gen_new_label();
3496 r_zero
= tcg_const_tl(0);
3497 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
3498 if (IS_IMM
) { /* immediate */
3499 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3500 gen_movl_simm_T1(rs2
);
3502 rs2
= GET_FIELD_SP(insn
, 0, 4);
3503 gen_movl_reg_T1(rs2
);
3505 gen_movl_T1_reg(rd
);
3514 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3515 #ifdef TARGET_SPARC64
3516 int opf
= GET_FIELD_SP(insn
, 5, 13);
3517 rs1
= GET_FIELD(insn
, 13, 17);
3518 rs2
= GET_FIELD(insn
, 27, 31);
3519 if (gen_trap_ifnofpu(dc
))
3523 case 0x000: /* VIS I edge8cc */
3524 case 0x001: /* VIS II edge8n */
3525 case 0x002: /* VIS I edge8lcc */
3526 case 0x003: /* VIS II edge8ln */
3527 case 0x004: /* VIS I edge16cc */
3528 case 0x005: /* VIS II edge16n */
3529 case 0x006: /* VIS I edge16lcc */
3530 case 0x007: /* VIS II edge16ln */
3531 case 0x008: /* VIS I edge32cc */
3532 case 0x009: /* VIS II edge32n */
3533 case 0x00a: /* VIS I edge32lcc */
3534 case 0x00b: /* VIS II edge32ln */
3537 case 0x010: /* VIS I array8 */
3538 gen_movl_reg_T0(rs1
);
3539 gen_movl_reg_T1(rs2
);
3541 gen_movl_T0_reg(rd
);
3543 case 0x012: /* VIS I array16 */
3544 gen_movl_reg_T0(rs1
);
3545 gen_movl_reg_T1(rs2
);
3547 gen_movl_T0_reg(rd
);
3549 case 0x014: /* VIS I array32 */
3550 gen_movl_reg_T0(rs1
);
3551 gen_movl_reg_T1(rs2
);
3553 gen_movl_T0_reg(rd
);
3555 case 0x018: /* VIS I alignaddr */
3556 gen_movl_reg_T0(rs1
);
3557 gen_movl_reg_T1(rs2
);
3559 gen_movl_T0_reg(rd
);
3561 case 0x019: /* VIS II bmask */
3562 case 0x01a: /* VIS I alignaddrl */
3565 case 0x020: /* VIS I fcmple16 */
3566 gen_op_load_fpr_DT0(DFPREG(rs1
));
3567 gen_op_load_fpr_DT1(DFPREG(rs2
));
3569 gen_op_store_DT0_fpr(DFPREG(rd
));
3571 case 0x022: /* VIS I fcmpne16 */
3572 gen_op_load_fpr_DT0(DFPREG(rs1
));
3573 gen_op_load_fpr_DT1(DFPREG(rs2
));
3575 gen_op_store_DT0_fpr(DFPREG(rd
));
3577 case 0x024: /* VIS I fcmple32 */
3578 gen_op_load_fpr_DT0(DFPREG(rs1
));
3579 gen_op_load_fpr_DT1(DFPREG(rs2
));
3581 gen_op_store_DT0_fpr(DFPREG(rd
));
3583 case 0x026: /* VIS I fcmpne32 */
3584 gen_op_load_fpr_DT0(DFPREG(rs1
));
3585 gen_op_load_fpr_DT1(DFPREG(rs2
));
3587 gen_op_store_DT0_fpr(DFPREG(rd
));
3589 case 0x028: /* VIS I fcmpgt16 */
3590 gen_op_load_fpr_DT0(DFPREG(rs1
));
3591 gen_op_load_fpr_DT1(DFPREG(rs2
));
3593 gen_op_store_DT0_fpr(DFPREG(rd
));
3595 case 0x02a: /* VIS I fcmpeq16 */
3596 gen_op_load_fpr_DT0(DFPREG(rs1
));
3597 gen_op_load_fpr_DT1(DFPREG(rs2
));
3599 gen_op_store_DT0_fpr(DFPREG(rd
));
3601 case 0x02c: /* VIS I fcmpgt32 */
3602 gen_op_load_fpr_DT0(DFPREG(rs1
));
3603 gen_op_load_fpr_DT1(DFPREG(rs2
));
3605 gen_op_store_DT0_fpr(DFPREG(rd
));
3607 case 0x02e: /* VIS I fcmpeq32 */
3608 gen_op_load_fpr_DT0(DFPREG(rs1
));
3609 gen_op_load_fpr_DT1(DFPREG(rs2
));
3611 gen_op_store_DT0_fpr(DFPREG(rd
));
3613 case 0x031: /* VIS I fmul8x16 */
3614 gen_op_load_fpr_DT0(DFPREG(rs1
));
3615 gen_op_load_fpr_DT1(DFPREG(rs2
));
3617 gen_op_store_DT0_fpr(DFPREG(rd
));
3619 case 0x033: /* VIS I fmul8x16au */
3620 gen_op_load_fpr_DT0(DFPREG(rs1
));
3621 gen_op_load_fpr_DT1(DFPREG(rs2
));
3622 gen_op_fmul8x16au();
3623 gen_op_store_DT0_fpr(DFPREG(rd
));
3625 case 0x035: /* VIS I fmul8x16al */
3626 gen_op_load_fpr_DT0(DFPREG(rs1
));
3627 gen_op_load_fpr_DT1(DFPREG(rs2
));
3628 gen_op_fmul8x16al();
3629 gen_op_store_DT0_fpr(DFPREG(rd
));
3631 case 0x036: /* VIS I fmul8sux16 */
3632 gen_op_load_fpr_DT0(DFPREG(rs1
));
3633 gen_op_load_fpr_DT1(DFPREG(rs2
));
3634 gen_op_fmul8sux16();
3635 gen_op_store_DT0_fpr(DFPREG(rd
));
3637 case 0x037: /* VIS I fmul8ulx16 */
3638 gen_op_load_fpr_DT0(DFPREG(rs1
));
3639 gen_op_load_fpr_DT1(DFPREG(rs2
));
3640 gen_op_fmul8ulx16();
3641 gen_op_store_DT0_fpr(DFPREG(rd
));
3643 case 0x038: /* VIS I fmuld8sux16 */
3644 gen_op_load_fpr_DT0(DFPREG(rs1
));
3645 gen_op_load_fpr_DT1(DFPREG(rs2
));
3646 gen_op_fmuld8sux16();
3647 gen_op_store_DT0_fpr(DFPREG(rd
));
3649 case 0x039: /* VIS I fmuld8ulx16 */
3650 gen_op_load_fpr_DT0(DFPREG(rs1
));
3651 gen_op_load_fpr_DT1(DFPREG(rs2
));
3652 gen_op_fmuld8ulx16();
3653 gen_op_store_DT0_fpr(DFPREG(rd
));
3655 case 0x03a: /* VIS I fpack32 */
3656 case 0x03b: /* VIS I fpack16 */
3657 case 0x03d: /* VIS I fpackfix */
3658 case 0x03e: /* VIS I pdist */
3661 case 0x048: /* VIS I faligndata */
3662 gen_op_load_fpr_DT0(DFPREG(rs1
));
3663 gen_op_load_fpr_DT1(DFPREG(rs2
));
3664 gen_op_faligndata();
3665 gen_op_store_DT0_fpr(DFPREG(rd
));
3667 case 0x04b: /* VIS I fpmerge */
3668 gen_op_load_fpr_DT0(DFPREG(rs1
));
3669 gen_op_load_fpr_DT1(DFPREG(rs2
));
3671 gen_op_store_DT0_fpr(DFPREG(rd
));
3673 case 0x04c: /* VIS II bshuffle */
3676 case 0x04d: /* VIS I fexpand */
3677 gen_op_load_fpr_DT0(DFPREG(rs1
));
3678 gen_op_load_fpr_DT1(DFPREG(rs2
));
3680 gen_op_store_DT0_fpr(DFPREG(rd
));
3682 case 0x050: /* VIS I fpadd16 */
3683 gen_op_load_fpr_DT0(DFPREG(rs1
));
3684 gen_op_load_fpr_DT1(DFPREG(rs2
));
3686 gen_op_store_DT0_fpr(DFPREG(rd
));
3688 case 0x051: /* VIS I fpadd16s */
3689 gen_op_load_fpr_FT0(rs1
);
3690 gen_op_load_fpr_FT1(rs2
);
3692 gen_op_store_FT0_fpr(rd
);
3694 case 0x052: /* VIS I fpadd32 */
3695 gen_op_load_fpr_DT0(DFPREG(rs1
));
3696 gen_op_load_fpr_DT1(DFPREG(rs2
));
3698 gen_op_store_DT0_fpr(DFPREG(rd
));
3700 case 0x053: /* VIS I fpadd32s */
3701 gen_op_load_fpr_FT0(rs1
);
3702 gen_op_load_fpr_FT1(rs2
);
3704 gen_op_store_FT0_fpr(rd
);
3706 case 0x054: /* VIS I fpsub16 */
3707 gen_op_load_fpr_DT0(DFPREG(rs1
));
3708 gen_op_load_fpr_DT1(DFPREG(rs2
));
3710 gen_op_store_DT0_fpr(DFPREG(rd
));
3712 case 0x055: /* VIS I fpsub16s */
3713 gen_op_load_fpr_FT0(rs1
);
3714 gen_op_load_fpr_FT1(rs2
);
3716 gen_op_store_FT0_fpr(rd
);
3718 case 0x056: /* VIS I fpsub32 */
3719 gen_op_load_fpr_DT0(DFPREG(rs1
));
3720 gen_op_load_fpr_DT1(DFPREG(rs2
));
3722 gen_op_store_DT0_fpr(DFPREG(rd
));
3724 case 0x057: /* VIS I fpsub32s */
3725 gen_op_load_fpr_FT0(rs1
);
3726 gen_op_load_fpr_FT1(rs2
);
3728 gen_op_store_FT0_fpr(rd
);
3730 case 0x060: /* VIS I fzero */
3731 gen_op_movl_DT0_0();
3732 gen_op_store_DT0_fpr(DFPREG(rd
));
3734 case 0x061: /* VIS I fzeros */
3735 gen_op_movl_FT0_0();
3736 gen_op_store_FT0_fpr(rd
);
3738 case 0x062: /* VIS I fnor */
3739 gen_op_load_fpr_DT0(DFPREG(rs1
));
3740 gen_op_load_fpr_DT1(DFPREG(rs2
));
3742 gen_op_store_DT0_fpr(DFPREG(rd
));
3744 case 0x063: /* VIS I fnors */
3745 gen_op_load_fpr_FT0(rs1
);
3746 gen_op_load_fpr_FT1(rs2
);
3748 gen_op_store_FT0_fpr(rd
);
3750 case 0x064: /* VIS I fandnot2 */
3751 gen_op_load_fpr_DT1(DFPREG(rs1
));
3752 gen_op_load_fpr_DT0(DFPREG(rs2
));
3754 gen_op_store_DT0_fpr(DFPREG(rd
));
3756 case 0x065: /* VIS I fandnot2s */
3757 gen_op_load_fpr_FT1(rs1
);
3758 gen_op_load_fpr_FT0(rs2
);
3760 gen_op_store_FT0_fpr(rd
);
3762 case 0x066: /* VIS I fnot2 */
3763 gen_op_load_fpr_DT1(DFPREG(rs2
));
3765 gen_op_store_DT0_fpr(DFPREG(rd
));
3767 case 0x067: /* VIS I fnot2s */
3768 gen_op_load_fpr_FT1(rs2
);
3770 gen_op_store_FT0_fpr(rd
);
3772 case 0x068: /* VIS I fandnot1 */
3773 gen_op_load_fpr_DT0(DFPREG(rs1
));
3774 gen_op_load_fpr_DT1(DFPREG(rs2
));
3776 gen_op_store_DT0_fpr(DFPREG(rd
));
3778 case 0x069: /* VIS I fandnot1s */
3779 gen_op_load_fpr_FT0(rs1
);
3780 gen_op_load_fpr_FT1(rs2
);
3782 gen_op_store_FT0_fpr(rd
);
3784 case 0x06a: /* VIS I fnot1 */
3785 gen_op_load_fpr_DT1(DFPREG(rs1
));
3787 gen_op_store_DT0_fpr(DFPREG(rd
));
3789 case 0x06b: /* VIS I fnot1s */
3790 gen_op_load_fpr_FT1(rs1
);
3792 gen_op_store_FT0_fpr(rd
);
3794 case 0x06c: /* VIS I fxor */
3795 gen_op_load_fpr_DT0(DFPREG(rs1
));
3796 gen_op_load_fpr_DT1(DFPREG(rs2
));
3798 gen_op_store_DT0_fpr(DFPREG(rd
));
3800 case 0x06d: /* VIS I fxors */
3801 gen_op_load_fpr_FT0(rs1
);
3802 gen_op_load_fpr_FT1(rs2
);
3804 gen_op_store_FT0_fpr(rd
);
3806 case 0x06e: /* VIS I fnand */
3807 gen_op_load_fpr_DT0(DFPREG(rs1
));
3808 gen_op_load_fpr_DT1(DFPREG(rs2
));
3810 gen_op_store_DT0_fpr(DFPREG(rd
));
3812 case 0x06f: /* VIS I fnands */
3813 gen_op_load_fpr_FT0(rs1
);
3814 gen_op_load_fpr_FT1(rs2
);
3816 gen_op_store_FT0_fpr(rd
);
3818 case 0x070: /* VIS I fand */
3819 gen_op_load_fpr_DT0(DFPREG(rs1
));
3820 gen_op_load_fpr_DT1(DFPREG(rs2
));
3822 gen_op_store_DT0_fpr(DFPREG(rd
));
3824 case 0x071: /* VIS I fands */
3825 gen_op_load_fpr_FT0(rs1
);
3826 gen_op_load_fpr_FT1(rs2
);
3828 gen_op_store_FT0_fpr(rd
);
3830 case 0x072: /* VIS I fxnor */
3831 gen_op_load_fpr_DT0(DFPREG(rs1
));
3832 gen_op_load_fpr_DT1(DFPREG(rs2
));
3834 gen_op_store_DT0_fpr(DFPREG(rd
));
3836 case 0x073: /* VIS I fxnors */
3837 gen_op_load_fpr_FT0(rs1
);
3838 gen_op_load_fpr_FT1(rs2
);
3840 gen_op_store_FT0_fpr(rd
);
3842 case 0x074: /* VIS I fsrc1 */
3843 gen_op_load_fpr_DT0(DFPREG(rs1
));
3844 gen_op_store_DT0_fpr(DFPREG(rd
));
3846 case 0x075: /* VIS I fsrc1s */
3847 gen_op_load_fpr_FT0(rs1
);
3848 gen_op_store_FT0_fpr(rd
);
3850 case 0x076: /* VIS I fornot2 */
3851 gen_op_load_fpr_DT1(DFPREG(rs1
));
3852 gen_op_load_fpr_DT0(DFPREG(rs2
));
3854 gen_op_store_DT0_fpr(DFPREG(rd
));
3856 case 0x077: /* VIS I fornot2s */
3857 gen_op_load_fpr_FT1(rs1
);
3858 gen_op_load_fpr_FT0(rs2
);
3860 gen_op_store_FT0_fpr(rd
);
3862 case 0x078: /* VIS I fsrc2 */
3863 gen_op_load_fpr_DT0(DFPREG(rs2
));
3864 gen_op_store_DT0_fpr(DFPREG(rd
));
3866 case 0x079: /* VIS I fsrc2s */
3867 gen_op_load_fpr_FT0(rs2
);
3868 gen_op_store_FT0_fpr(rd
);
3870 case 0x07a: /* VIS I fornot1 */
3871 gen_op_load_fpr_DT0(DFPREG(rs1
));
3872 gen_op_load_fpr_DT1(DFPREG(rs2
));
3874 gen_op_store_DT0_fpr(DFPREG(rd
));
3876 case 0x07b: /* VIS I fornot1s */
3877 gen_op_load_fpr_FT0(rs1
);
3878 gen_op_load_fpr_FT1(rs2
);
3880 gen_op_store_FT0_fpr(rd
);
3882 case 0x07c: /* VIS I for */
3883 gen_op_load_fpr_DT0(DFPREG(rs1
));
3884 gen_op_load_fpr_DT1(DFPREG(rs2
));
3886 gen_op_store_DT0_fpr(DFPREG(rd
));
3888 case 0x07d: /* VIS I fors */
3889 gen_op_load_fpr_FT0(rs1
);
3890 gen_op_load_fpr_FT1(rs2
);
3892 gen_op_store_FT0_fpr(rd
);
3894 case 0x07e: /* VIS I fone */
3895 gen_op_movl_DT0_1();
3896 gen_op_store_DT0_fpr(DFPREG(rd
));
3898 case 0x07f: /* VIS I fones */
3899 gen_op_movl_FT0_1();
3900 gen_op_store_FT0_fpr(rd
);
3902 case 0x080: /* VIS I shutdown */
3903 case 0x081: /* VIS II siam */
3912 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
3913 #ifdef TARGET_SPARC64
3918 #ifdef TARGET_SPARC64
3919 } else if (xop
== 0x39) { /* V9 return */
3920 rs1
= GET_FIELD(insn
, 13, 17);
3922 gen_movl_reg_T0(rs1
);
3923 if (IS_IMM
) { /* immediate */
3924 rs2
= GET_FIELDs(insn
, 19, 31);
3925 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
3926 } else { /* register */
3927 rs2
= GET_FIELD(insn
, 27, 31);
3931 gen_movl_reg_T1(rs2
);
3939 gen_op_check_align_T0_3();
3940 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUSPARCState
, npc
));
3941 dc
->npc
= DYNAMIC_PC
;
3945 rs1
= GET_FIELD(insn
, 13, 17);
3946 gen_movl_reg_T0(rs1
);
3947 if (IS_IMM
) { /* immediate */
3948 rs2
= GET_FIELDs(insn
, 19, 31);
3949 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
3950 } else { /* register */
3951 rs2
= GET_FIELD(insn
, 27, 31);
3955 gen_movl_reg_T1(rs2
);
3962 case 0x38: /* jmpl */
3965 tcg_gen_movi_tl(cpu_T
[1], dc
->pc
);
3966 gen_movl_T1_reg(rd
);
3969 gen_op_check_align_T0_3();
3970 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUSPARCState
, npc
));
3971 dc
->npc
= DYNAMIC_PC
;
3974 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3975 case 0x39: /* rett, V9 return */
3977 if (!supervisor(dc
))
3980 gen_op_check_align_T0_3();
3981 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUSPARCState
, npc
));
3982 dc
->npc
= DYNAMIC_PC
;
3983 tcg_gen_helper_0_0(helper_rett
);
3987 case 0x3b: /* flush */
3988 tcg_gen_helper_0_1(helper_flush
, cpu_T
[0]);
3990 case 0x3c: /* save */
3993 gen_movl_T0_reg(rd
);
3995 case 0x3d: /* restore */
3998 gen_movl_T0_reg(rd
);
4000 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4001 case 0x3e: /* V9 done/retry */
4005 if (!supervisor(dc
))
4007 dc
->npc
= DYNAMIC_PC
;
4008 dc
->pc
= DYNAMIC_PC
;
4009 tcg_gen_helper_0_0(helper_done
);
4012 if (!supervisor(dc
))
4014 dc
->npc
= DYNAMIC_PC
;
4015 dc
->pc
= DYNAMIC_PC
;
4016 tcg_gen_helper_0_0(helper_retry
);
4031 case 3: /* load/store instructions */
4033 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4034 rs1
= GET_FIELD(insn
, 13, 17);
4036 gen_movl_reg_T0(rs1
);
4037 if (xop
== 0x3c || xop
== 0x3e)
4039 rs2
= GET_FIELD(insn
, 27, 31);
4040 gen_movl_reg_T1(rs2
);
4042 else if (IS_IMM
) { /* immediate */
4043 rs2
= GET_FIELDs(insn
, 19, 31);
4044 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
4045 } else { /* register */
4046 rs2
= GET_FIELD(insn
, 27, 31);
4050 gen_movl_reg_T1(rs2
);
4056 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4057 (xop
> 0x17 && xop
<= 0x1d ) ||
4058 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4060 case 0x0: /* load unsigned word */
4061 gen_op_check_align_T0_3();
4062 ABI32_MASK(cpu_T
[0]);
4063 tcg_gen_qemu_ld32u(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4065 case 0x1: /* load unsigned byte */
4066 ABI32_MASK(cpu_T
[0]);
4067 tcg_gen_qemu_ld8u(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4069 case 0x2: /* load unsigned halfword */
4070 gen_op_check_align_T0_1();
4071 ABI32_MASK(cpu_T
[0]);
4072 tcg_gen_qemu_ld16u(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4074 case 0x3: /* load double word */
4080 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
4081 gen_op_check_align_T0_7();
4082 ABI32_MASK(cpu_T
[0]);
4083 tcg_gen_qemu_ld64(r_dword
, cpu_T
[0], dc
->mem_idx
);
4084 tcg_gen_trunc_i64_i32(cpu_T
[0], r_dword
);
4085 gen_movl_T0_reg(rd
+ 1);
4086 tcg_gen_shri_i64(r_dword
, r_dword
, 32);
4087 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
4090 case 0x9: /* load signed byte */
4091 ABI32_MASK(cpu_T
[0]);
4092 tcg_gen_qemu_ld8s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4094 case 0xa: /* load signed halfword */
4095 gen_op_check_align_T0_1();
4096 ABI32_MASK(cpu_T
[0]);
4097 tcg_gen_qemu_ld16s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4099 case 0xd: /* ldstub -- XXX: should be atomically */
4100 tcg_gen_movi_i32(cpu_tmp0
, 0xff);
4101 ABI32_MASK(cpu_T
[0]);
4102 tcg_gen_qemu_ld8s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4103 tcg_gen_qemu_st8(cpu_tmp0
, cpu_T
[0], dc
->mem_idx
);
4105 case 0x0f: /* swap register with memory. Also atomically */
4106 gen_op_check_align_T0_3();
4107 gen_movl_reg_T1(rd
);
4108 ABI32_MASK(cpu_T
[0]);
4109 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_T
[0], dc
->mem_idx
);
4110 tcg_gen_qemu_st32(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4111 tcg_gen_mov_i32(cpu_T
[1], cpu_tmp0
);
4113 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4114 case 0x10: /* load word alternate */
4115 #ifndef TARGET_SPARC64
4118 if (!supervisor(dc
))
4121 gen_op_check_align_T0_3();
4122 gen_ld_asi(insn
, 4, 0);
4124 case 0x11: /* load unsigned byte alternate */
4125 #ifndef TARGET_SPARC64
4128 if (!supervisor(dc
))
4131 gen_ld_asi(insn
, 1, 0);
4133 case 0x12: /* load unsigned halfword alternate */
4134 #ifndef TARGET_SPARC64
4137 if (!supervisor(dc
))
4140 gen_op_check_align_T0_1();
4141 gen_ld_asi(insn
, 2, 0);
4143 case 0x13: /* load double word alternate */
4144 #ifndef TARGET_SPARC64
4147 if (!supervisor(dc
))
4152 gen_op_check_align_T0_7();
4154 gen_movl_T0_reg(rd
+ 1);
4156 case 0x19: /* load signed byte alternate */
4157 #ifndef TARGET_SPARC64
4160 if (!supervisor(dc
))
4163 gen_ld_asi(insn
, 1, 1);
4165 case 0x1a: /* load signed halfword alternate */
4166 #ifndef TARGET_SPARC64
4169 if (!supervisor(dc
))
4172 gen_op_check_align_T0_1();
4173 gen_ld_asi(insn
, 2, 1);
4175 case 0x1d: /* ldstuba -- XXX: should be atomically */
4176 #ifndef TARGET_SPARC64
4179 if (!supervisor(dc
))
4182 gen_ldstub_asi(insn
);
4184 case 0x1f: /* swap reg with alt. memory. Also atomically */
4185 #ifndef TARGET_SPARC64
4188 if (!supervisor(dc
))
4191 gen_op_check_align_T0_3();
4192 gen_movl_reg_T1(rd
);
4196 #ifndef TARGET_SPARC64
4197 case 0x30: /* ldc */
4198 case 0x31: /* ldcsr */
4199 case 0x33: /* lddc */
4203 #ifdef TARGET_SPARC64
4204 case 0x08: /* V9 ldsw */
4205 gen_op_check_align_T0_3();
4206 ABI32_MASK(cpu_T
[0]);
4207 tcg_gen_qemu_ld32s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4209 case 0x0b: /* V9 ldx */
4210 gen_op_check_align_T0_7();
4211 ABI32_MASK(cpu_T
[0]);
4212 tcg_gen_qemu_ld64(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4214 case 0x18: /* V9 ldswa */
4215 gen_op_check_align_T0_3();
4216 gen_ld_asi(insn
, 4, 1);
4218 case 0x1b: /* V9 ldxa */
4219 gen_op_check_align_T0_7();
4220 gen_ld_asi(insn
, 8, 0);
4222 case 0x2d: /* V9 prefetch, no effect */
4224 case 0x30: /* V9 ldfa */
4225 gen_op_check_align_T0_3();
4226 gen_ldf_asi(insn
, 4, rd
);
4228 case 0x33: /* V9 lddfa */
4229 gen_op_check_align_T0_3();
4230 gen_ldf_asi(insn
, 8, DFPREG(rd
));
4232 case 0x3d: /* V9 prefetcha, no effect */
4234 case 0x32: /* V9 ldqfa */
4235 #if defined(CONFIG_USER_ONLY)
4236 gen_op_check_align_T0_3();
4237 gen_ldf_asi(insn
, 16, QFPREG(rd
));
4246 gen_movl_T1_reg(rd
);
4247 #ifdef TARGET_SPARC64
4250 } else if (xop
>= 0x20 && xop
< 0x24) {
4251 if (gen_trap_ifnofpu(dc
))
4254 case 0x20: /* load fpreg */
4255 gen_op_check_align_T0_3();
4257 gen_op_store_FT0_fpr(rd
);
4259 case 0x21: /* load fsr */
4260 gen_op_check_align_T0_3();
4262 tcg_gen_helper_0_0(helper_ldfsr
);
4264 case 0x22: /* load quad fpreg */
4265 #if defined(CONFIG_USER_ONLY)
4266 gen_op_check_align_T0_7();
4268 gen_op_store_QT0_fpr(QFPREG(rd
));
4273 case 0x23: /* load double fpreg */
4274 gen_op_check_align_T0_7();
4276 gen_op_store_DT0_fpr(DFPREG(rd
));
4281 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4282 xop
== 0xe || xop
== 0x1e) {
4283 gen_movl_reg_T1(rd
);
4285 case 0x4: /* store word */
4286 gen_op_check_align_T0_3();
4287 ABI32_MASK(cpu_T
[0]);
4288 tcg_gen_qemu_st32(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4290 case 0x5: /* store byte */
4291 ABI32_MASK(cpu_T
[0]);
4292 tcg_gen_qemu_st8(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4294 case 0x6: /* store halfword */
4295 gen_op_check_align_T0_1();
4296 ABI32_MASK(cpu_T
[0]);
4297 tcg_gen_qemu_st16(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4299 case 0x7: /* store double word */
4304 TCGv r_dword
, r_low
;
4306 gen_op_check_align_T0_7();
4307 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
4308 r_low
= tcg_temp_new(TCG_TYPE_I32
);
4309 gen_movl_reg_TN(rd
+ 1, r_low
);
4310 tcg_gen_helper_1_2(helper_pack64
, r_dword
, cpu_T
[1],
4312 tcg_gen_qemu_st64(r_dword
, cpu_T
[0], dc
->mem_idx
);
4314 #else /* __i386__ */
4315 gen_op_check_align_T0_7();
4317 gen_movl_reg_T2(rd
+ 1);
4319 #endif /* __i386__ */
4321 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4322 case 0x14: /* store word alternate */
4323 #ifndef TARGET_SPARC64
4326 if (!supervisor(dc
))
4329 gen_op_check_align_T0_3();
4330 gen_st_asi(insn
, 4);
4332 case 0x15: /* store byte alternate */
4333 #ifndef TARGET_SPARC64
4336 if (!supervisor(dc
))
4339 gen_st_asi(insn
, 1);
4341 case 0x16: /* store halfword alternate */
4342 #ifndef TARGET_SPARC64
4345 if (!supervisor(dc
))
4348 gen_op_check_align_T0_1();
4349 gen_st_asi(insn
, 2);
4351 case 0x17: /* store double word alternate */
4352 #ifndef TARGET_SPARC64
4355 if (!supervisor(dc
))
4362 TCGv r_dword
, r_temp
, r_size
;
4364 gen_op_check_align_T0_7();
4365 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
4366 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
4367 r_size
= tcg_temp_new(TCG_TYPE_I32
);
4368 gen_movl_reg_TN(rd
+ 1, r_temp
);
4369 tcg_gen_helper_1_2(helper_pack64
, r_dword
, cpu_T
[1],
4371 #ifdef TARGET_SPARC64
4375 offset
= GET_FIELD(insn
, 25, 31);
4376 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
4377 tcg_gen_ld_i32(r_dword
, cpu_env
, offsetof(CPUSPARCState
, asi
));
4380 asi
= GET_FIELD(insn
, 19, 26);
4381 tcg_gen_movi_i32(r_temp
, asi
);
4382 #ifdef TARGET_SPARC64
4385 tcg_gen_movi_i32(r_size
, 8);
4386 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], r_dword
, r_temp
, r_size
);
4390 #ifdef TARGET_SPARC64
4391 case 0x0e: /* V9 stx */
4392 gen_op_check_align_T0_7();
4393 ABI32_MASK(cpu_T
[0]);
4394 tcg_gen_qemu_st64(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4396 case 0x1e: /* V9 stxa */
4397 gen_op_check_align_T0_7();
4398 gen_st_asi(insn
, 8);
4404 } else if (xop
> 0x23 && xop
< 0x28) {
4405 if (gen_trap_ifnofpu(dc
))
4409 gen_op_check_align_T0_3();
4410 gen_op_load_fpr_FT0(rd
);
4413 case 0x25: /* stfsr, V9 stxfsr */
4414 #ifdef CONFIG_USER_ONLY
4415 gen_op_check_align_T0_3();
4417 tcg_gen_helper_0_0(helper_stfsr
);
4421 #ifdef TARGET_SPARC64
4422 #if defined(CONFIG_USER_ONLY)
4423 /* V9 stqf, store quad fpreg */
4424 gen_op_check_align_T0_7();
4425 gen_op_load_fpr_QT0(QFPREG(rd
));
4431 #else /* !TARGET_SPARC64 */
4432 /* stdfq, store floating point queue */
4433 #if defined(CONFIG_USER_ONLY)
4436 if (!supervisor(dc
))
4438 if (gen_trap_ifnofpu(dc
))
4444 gen_op_check_align_T0_7();
4445 gen_op_load_fpr_DT0(DFPREG(rd
));
4451 } else if (xop
> 0x33 && xop
< 0x3f) {
4453 #ifdef TARGET_SPARC64
4454 case 0x34: /* V9 stfa */
4455 gen_op_check_align_T0_3();
4456 gen_op_load_fpr_FT0(rd
);
4457 gen_stf_asi(insn
, 4, rd
);
4459 case 0x36: /* V9 stqfa */
4460 #if defined(CONFIG_USER_ONLY)
4461 gen_op_check_align_T0_7();
4462 gen_op_load_fpr_QT0(QFPREG(rd
));
4463 gen_stf_asi(insn
, 16, QFPREG(rd
));
4468 case 0x37: /* V9 stdfa */
4469 gen_op_check_align_T0_3();
4470 gen_op_load_fpr_DT0(DFPREG(rd
));
4471 gen_stf_asi(insn
, 8, DFPREG(rd
));
4473 case 0x3c: /* V9 casa */
4474 gen_op_check_align_T0_3();
4475 gen_cas_asi(insn
, rd
);
4476 gen_movl_T1_reg(rd
);
4478 case 0x3e: /* V9 casxa */
4479 gen_op_check_align_T0_7();
4480 gen_casx_asi(insn
, rd
);
4481 gen_movl_T1_reg(rd
);
4484 case 0x34: /* stc */
4485 case 0x35: /* stcsr */
4486 case 0x36: /* stdcq */
4487 case 0x37: /* stdc */
4499 /* default case for non jump instructions */
4500 if (dc
->npc
== DYNAMIC_PC
) {
4501 dc
->pc
= DYNAMIC_PC
;
4503 } else if (dc
->npc
== JUMP_PC
) {
4504 /* we can do a static jump */
4505 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
4509 dc
->npc
= dc
->npc
+ 4;
4515 gen_op_exception(TT_ILL_INSN
);
4518 #if !defined(CONFIG_USER_ONLY)
4521 gen_op_exception(TT_PRIV_INSN
);
4526 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4529 #ifndef TARGET_SPARC64
4532 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4537 #ifndef TARGET_SPARC64
4540 gen_op_exception(TT_NCP_INSN
);
4546 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
4550 static inline int gen_intermediate_code_internal(TranslationBlock
* tb
,
4551 int spc
, CPUSPARCState
*env
)
4553 target_ulong pc_start
, last_pc
;
4554 uint16_t *gen_opc_end
;
4555 DisasContext dc1
, *dc
= &dc1
;
4558 memset(dc
, 0, sizeof(DisasContext
));
4563 dc
->npc
= (target_ulong
) tb
->cs_base
;
4564 dc
->mem_idx
= cpu_mmu_index(env
);
4565 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4566 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4568 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
4571 if (env
->nb_breakpoints
> 0) {
4572 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4573 if (env
->breakpoints
[j
] == dc
->pc
) {
4574 if (dc
->pc
!= pc_start
)
4576 tcg_gen_helper_0_0(helper_debug
);
4585 fprintf(logfile
, "Search PC...\n");
4586 j
= gen_opc_ptr
- gen_opc_buf
;
4590 gen_opc_instr_start
[lj
++] = 0;
4591 gen_opc_pc
[lj
] = dc
->pc
;
4592 gen_opc_npc
[lj
] = dc
->npc
;
4593 gen_opc_instr_start
[lj
] = 1;
4597 disas_sparc_insn(dc
);
4601 /* if the next PC is different, we abort now */
4602 if (dc
->pc
!= (last_pc
+ 4))
4604 /* if we reach a page boundary, we stop generation so that the
4605 PC of a TT_TFAULT exception is always in the right page */
4606 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4608 /* if single step mode, we generate only one instruction and
4609 generate an exception */
4610 if (env
->singlestep_enabled
) {
4615 } while ((gen_opc_ptr
< gen_opc_end
) &&
4616 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32));
4620 if (dc
->pc
!= DYNAMIC_PC
&&
4621 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4622 /* static PC and NPC: we can use direct chaining */
4623 gen_branch(dc
, dc
->pc
, dc
->npc
);
4625 if (dc
->pc
!= DYNAMIC_PC
)
4631 *gen_opc_ptr
= INDEX_op_end
;
4633 j
= gen_opc_ptr
- gen_opc_buf
;
4636 gen_opc_instr_start
[lj
++] = 0;
4642 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4643 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4645 tb
->size
= last_pc
+ 4 - pc_start
;
4648 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4649 fprintf(logfile
, "--------------\n");
4650 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4651 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
4652 fprintf(logfile
, "\n");
4658 int gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4660 return gen_intermediate_code_internal(tb
, 0, env
);
4663 int gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4665 return gen_intermediate_code_internal(tb
, 1, env
);
4668 void cpu_reset(CPUSPARCState
*env
)
4673 env
->regwptr
= env
->regbase
+ (env
->cwp
* 16);
4674 #if defined(CONFIG_USER_ONLY)
4675 env
->user_mode_only
= 1;
4676 #ifdef TARGET_SPARC64
4677 env
->cleanwin
= NWINDOWS
- 2;
4678 env
->cansave
= NWINDOWS
- 2;
4679 env
->pstate
= PS_RMO
| PS_PEF
| PS_IE
;
4680 env
->asi
= 0x82; // Primary no-fault
4686 #ifdef TARGET_SPARC64
4687 env
->pstate
= PS_PRIV
;
4688 env
->hpstate
= HS_PRIV
;
4689 env
->pc
= 0x1fff0000000ULL
;
4690 env
->tsptr
= &env
->ts
[env
->tl
];
4693 env
->mmuregs
[0] &= ~(MMU_E
| MMU_NF
);
4694 env
->mmuregs
[0] |= env
->mmu_bm
;
4696 env
->npc
= env
->pc
+ 4;
4700 CPUSPARCState
*cpu_sparc_init(const char *cpu_model
)
4703 const sparc_def_t
*def
;
4706 static const char * const gregnames
[8] = {
4707 NULL
, // g0 not used
4717 def
= cpu_sparc_find_by_name(cpu_model
);
4721 env
= qemu_mallocz(sizeof(CPUSPARCState
));
4725 env
->cpu_model_str
= cpu_model
;
4726 env
->version
= def
->iu_version
;
4727 env
->fsr
= def
->fpu_version
;
4728 #if !defined(TARGET_SPARC64)
4729 env
->mmu_bm
= def
->mmu_bm
;
4730 env
->mmu_ctpr_mask
= def
->mmu_ctpr_mask
;
4731 env
->mmu_cxr_mask
= def
->mmu_cxr_mask
;
4732 env
->mmu_sfsr_mask
= def
->mmu_sfsr_mask
;
4733 env
->mmu_trcr_mask
= def
->mmu_trcr_mask
;
4734 env
->mmuregs
[0] |= def
->mmu_version
;
4735 cpu_sparc_set_id(env
, 0);
4738 /* init various static tables */
4742 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
4743 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
4744 cpu_regwptr
= tcg_global_mem_new(TCG_TYPE_PTR
, TCG_AREG0
,
4745 offsetof(CPUState
, regwptr
),
4747 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4748 #ifdef TARGET_SPARC64
4749 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
4750 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
4751 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
4752 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
4753 cpu_T
[2] = tcg_global_mem_new(TCG_TYPE_TL
,
4754 TCG_AREG0
, offsetof(CPUState
, t2
), "T2");
4755 cpu_xcc
= tcg_global_mem_new(TCG_TYPE_I32
,
4756 TCG_AREG0
, offsetof(CPUState
, xcc
),
4759 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
4760 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
4761 cpu_T
[2] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "T2");
4763 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
4764 TCG_AREG0
, offsetof(CPUState
, cc_src
),
4766 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
4767 TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4769 cpu_psr
= tcg_global_mem_new(TCG_TYPE_I32
,
4770 TCG_AREG0
, offsetof(CPUState
, psr
),
4772 for (i
= 1; i
< 8; i
++)
4773 cpu_gregs
[i
] = tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4774 offsetof(CPUState
, gregs
[i
]),
4783 void cpu_sparc_set_id(CPUSPARCState
*env
, unsigned int cpu
)
4785 #if !defined(TARGET_SPARC64)
4786 env
->mxccregs
[7] = ((cpu
+ 8) & 0xf) << 24;
4790 static const sparc_def_t sparc_defs
[] = {
4791 #ifdef TARGET_SPARC64
4793 .name
= "Fujitsu Sparc64",
4794 .iu_version
= ((0x04ULL
<< 48) | (0x02ULL
<< 32) | (0ULL << 24)
4795 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4796 .fpu_version
= 0x00000000,
4800 .name
= "Fujitsu Sparc64 III",
4801 .iu_version
= ((0x04ULL
<< 48) | (0x03ULL
<< 32) | (0ULL << 24)
4802 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4803 .fpu_version
= 0x00000000,
4807 .name
= "Fujitsu Sparc64 IV",
4808 .iu_version
= ((0x04ULL
<< 48) | (0x04ULL
<< 32) | (0ULL << 24)
4809 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4810 .fpu_version
= 0x00000000,
4814 .name
= "Fujitsu Sparc64 V",
4815 .iu_version
= ((0x04ULL
<< 48) | (0x05ULL
<< 32) | (0x51ULL
<< 24)
4816 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4817 .fpu_version
= 0x00000000,
4821 .name
= "TI UltraSparc I",
4822 .iu_version
= ((0x17ULL
<< 48) | (0x10ULL
<< 32) | (0x40ULL
<< 24)
4823 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4824 .fpu_version
= 0x00000000,
4828 .name
= "TI UltraSparc II",
4829 .iu_version
= ((0x17ULL
<< 48) | (0x11ULL
<< 32) | (0x20ULL
<< 24)
4830 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4831 .fpu_version
= 0x00000000,
4835 .name
= "TI UltraSparc IIi",
4836 .iu_version
= ((0x17ULL
<< 48) | (0x12ULL
<< 32) | (0x91ULL
<< 24)
4837 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4838 .fpu_version
= 0x00000000,
4842 .name
= "TI UltraSparc IIe",
4843 .iu_version
= ((0x17ULL
<< 48) | (0x13ULL
<< 32) | (0x14ULL
<< 24)
4844 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4845 .fpu_version
= 0x00000000,
4849 .name
= "Sun UltraSparc III",
4850 .iu_version
= ((0x3eULL
<< 48) | (0x14ULL
<< 32) | (0x34ULL
<< 24)
4851 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4852 .fpu_version
= 0x00000000,
4856 .name
= "Sun UltraSparc III Cu",
4857 .iu_version
= ((0x3eULL
<< 48) | (0x15ULL
<< 32) | (0x41ULL
<< 24)
4858 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4859 .fpu_version
= 0x00000000,
4863 .name
= "Sun UltraSparc IIIi",
4864 .iu_version
= ((0x3eULL
<< 48) | (0x16ULL
<< 32) | (0x34ULL
<< 24)
4865 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4866 .fpu_version
= 0x00000000,
4870 .name
= "Sun UltraSparc IV",
4871 .iu_version
= ((0x3eULL
<< 48) | (0x18ULL
<< 32) | (0x31ULL
<< 24)
4872 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4873 .fpu_version
= 0x00000000,
4877 .name
= "Sun UltraSparc IV+",
4878 .iu_version
= ((0x3eULL
<< 48) | (0x19ULL
<< 32) | (0x22ULL
<< 24)
4879 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4880 .fpu_version
= 0x00000000,
4884 .name
= "Sun UltraSparc IIIi+",
4885 .iu_version
= ((0x3eULL
<< 48) | (0x22ULL
<< 32) | (0ULL << 24)
4886 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4887 .fpu_version
= 0x00000000,
4891 .name
= "NEC UltraSparc I",
4892 .iu_version
= ((0x22ULL
<< 48) | (0x10ULL
<< 32) | (0x40ULL
<< 24)
4893 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4894 .fpu_version
= 0x00000000,
4899 .name
= "Fujitsu MB86900",
4900 .iu_version
= 0x00 << 24, /* Impl 0, ver 0 */
4901 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4902 .mmu_version
= 0x00 << 24, /* Impl 0, ver 0 */
4903 .mmu_bm
= 0x00004000,
4904 .mmu_ctpr_mask
= 0x007ffff0,
4905 .mmu_cxr_mask
= 0x0000003f,
4906 .mmu_sfsr_mask
= 0xffffffff,
4907 .mmu_trcr_mask
= 0xffffffff,
4910 .name
= "Fujitsu MB86904",
4911 .iu_version
= 0x04 << 24, /* Impl 0, ver 4 */
4912 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4913 .mmu_version
= 0x04 << 24, /* Impl 0, ver 4 */
4914 .mmu_bm
= 0x00004000,
4915 .mmu_ctpr_mask
= 0x00ffffc0,
4916 .mmu_cxr_mask
= 0x000000ff,
4917 .mmu_sfsr_mask
= 0x00016fff,
4918 .mmu_trcr_mask
= 0x00ffffff,
4921 .name
= "Fujitsu MB86907",
4922 .iu_version
= 0x05 << 24, /* Impl 0, ver 5 */
4923 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4924 .mmu_version
= 0x05 << 24, /* Impl 0, ver 5 */
4925 .mmu_bm
= 0x00004000,
4926 .mmu_ctpr_mask
= 0xffffffc0,
4927 .mmu_cxr_mask
= 0x000000ff,
4928 .mmu_sfsr_mask
= 0x00016fff,
4929 .mmu_trcr_mask
= 0xffffffff,
4932 .name
= "LSI L64811",
4933 .iu_version
= 0x10 << 24, /* Impl 1, ver 0 */
4934 .fpu_version
= 1 << 17, /* FPU version 1 (LSI L64814) */
4935 .mmu_version
= 0x10 << 24,
4936 .mmu_bm
= 0x00004000,
4937 .mmu_ctpr_mask
= 0x007ffff0,
4938 .mmu_cxr_mask
= 0x0000003f,
4939 .mmu_sfsr_mask
= 0xffffffff,
4940 .mmu_trcr_mask
= 0xffffffff,
4943 .name
= "Cypress CY7C601",
4944 .iu_version
= 0x11 << 24, /* Impl 1, ver 1 */
4945 .fpu_version
= 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4946 .mmu_version
= 0x10 << 24,
4947 .mmu_bm
= 0x00004000,
4948 .mmu_ctpr_mask
= 0x007ffff0,
4949 .mmu_cxr_mask
= 0x0000003f,
4950 .mmu_sfsr_mask
= 0xffffffff,
4951 .mmu_trcr_mask
= 0xffffffff,
4954 .name
= "Cypress CY7C611",
4955 .iu_version
= 0x13 << 24, /* Impl 1, ver 3 */
4956 .fpu_version
= 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4957 .mmu_version
= 0x10 << 24,
4958 .mmu_bm
= 0x00004000,
4959 .mmu_ctpr_mask
= 0x007ffff0,
4960 .mmu_cxr_mask
= 0x0000003f,
4961 .mmu_sfsr_mask
= 0xffffffff,
4962 .mmu_trcr_mask
= 0xffffffff,
4965 .name
= "TI SuperSparc II",
4966 .iu_version
= 0x40000000,
4967 .fpu_version
= 0 << 17,
4968 .mmu_version
= 0x04000000,
4969 .mmu_bm
= 0x00002000,
4970 .mmu_ctpr_mask
= 0xffffffc0,
4971 .mmu_cxr_mask
= 0x0000ffff,
4972 .mmu_sfsr_mask
= 0xffffffff,
4973 .mmu_trcr_mask
= 0xffffffff,
4976 .name
= "TI MicroSparc I",
4977 .iu_version
= 0x41000000,
4978 .fpu_version
= 4 << 17,
4979 .mmu_version
= 0x41000000,
4980 .mmu_bm
= 0x00004000,
4981 .mmu_ctpr_mask
= 0x007ffff0,
4982 .mmu_cxr_mask
= 0x0000003f,
4983 .mmu_sfsr_mask
= 0x00016fff,
4984 .mmu_trcr_mask
= 0x0000003f,
4987 .name
= "TI MicroSparc II",
4988 .iu_version
= 0x42000000,
4989 .fpu_version
= 4 << 17,
4990 .mmu_version
= 0x02000000,
4991 .mmu_bm
= 0x00004000,
4992 .mmu_ctpr_mask
= 0x00ffffc0,
4993 .mmu_cxr_mask
= 0x000000ff,
4994 .mmu_sfsr_mask
= 0x00016fff,
4995 .mmu_trcr_mask
= 0x00ffffff,
4998 .name
= "TI MicroSparc IIep",
4999 .iu_version
= 0x42000000,
5000 .fpu_version
= 4 << 17,
5001 .mmu_version
= 0x04000000,
5002 .mmu_bm
= 0x00004000,
5003 .mmu_ctpr_mask
= 0x00ffffc0,
5004 .mmu_cxr_mask
= 0x000000ff,
5005 .mmu_sfsr_mask
= 0x00016bff,
5006 .mmu_trcr_mask
= 0x00ffffff,
5009 .name
= "TI SuperSparc 51",
5010 .iu_version
= 0x43000000,
5011 .fpu_version
= 0 << 17,
5012 .mmu_version
= 0x04000000,
5013 .mmu_bm
= 0x00002000,
5014 .mmu_ctpr_mask
= 0xffffffc0,
5015 .mmu_cxr_mask
= 0x0000ffff,
5016 .mmu_sfsr_mask
= 0xffffffff,
5017 .mmu_trcr_mask
= 0xffffffff,
5020 .name
= "TI SuperSparc 61",
5021 .iu_version
= 0x44000000,
5022 .fpu_version
= 0 << 17,
5023 .mmu_version
= 0x04000000,
5024 .mmu_bm
= 0x00002000,
5025 .mmu_ctpr_mask
= 0xffffffc0,
5026 .mmu_cxr_mask
= 0x0000ffff,
5027 .mmu_sfsr_mask
= 0xffffffff,
5028 .mmu_trcr_mask
= 0xffffffff,
5031 .name
= "Ross RT625",
5032 .iu_version
= 0x1e000000,
5033 .fpu_version
= 1 << 17,
5034 .mmu_version
= 0x1e000000,
5035 .mmu_bm
= 0x00004000,
5036 .mmu_ctpr_mask
= 0x007ffff0,
5037 .mmu_cxr_mask
= 0x0000003f,
5038 .mmu_sfsr_mask
= 0xffffffff,
5039 .mmu_trcr_mask
= 0xffffffff,
5042 .name
= "Ross RT620",
5043 .iu_version
= 0x1f000000,
5044 .fpu_version
= 1 << 17,
5045 .mmu_version
= 0x1f000000,
5046 .mmu_bm
= 0x00004000,
5047 .mmu_ctpr_mask
= 0x007ffff0,
5048 .mmu_cxr_mask
= 0x0000003f,
5049 .mmu_sfsr_mask
= 0xffffffff,
5050 .mmu_trcr_mask
= 0xffffffff,
5053 .name
= "BIT B5010",
5054 .iu_version
= 0x20000000,
5055 .fpu_version
= 0 << 17, /* B5010/B5110/B5120/B5210 */
5056 .mmu_version
= 0x20000000,
5057 .mmu_bm
= 0x00004000,
5058 .mmu_ctpr_mask
= 0x007ffff0,
5059 .mmu_cxr_mask
= 0x0000003f,
5060 .mmu_sfsr_mask
= 0xffffffff,
5061 .mmu_trcr_mask
= 0xffffffff,
5064 .name
= "Matsushita MN10501",
5065 .iu_version
= 0x50000000,
5066 .fpu_version
= 0 << 17,
5067 .mmu_version
= 0x50000000,
5068 .mmu_bm
= 0x00004000,
5069 .mmu_ctpr_mask
= 0x007ffff0,
5070 .mmu_cxr_mask
= 0x0000003f,
5071 .mmu_sfsr_mask
= 0xffffffff,
5072 .mmu_trcr_mask
= 0xffffffff,
5075 .name
= "Weitek W8601",
5076 .iu_version
= 0x90 << 24, /* Impl 9, ver 0 */
5077 .fpu_version
= 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
5078 .mmu_version
= 0x10 << 24,
5079 .mmu_bm
= 0x00004000,
5080 .mmu_ctpr_mask
= 0x007ffff0,
5081 .mmu_cxr_mask
= 0x0000003f,
5082 .mmu_sfsr_mask
= 0xffffffff,
5083 .mmu_trcr_mask
= 0xffffffff,
5087 .iu_version
= 0xf2000000,
5088 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
5089 .mmu_version
= 0xf2000000,
5090 .mmu_bm
= 0x00004000,
5091 .mmu_ctpr_mask
= 0x007ffff0,
5092 .mmu_cxr_mask
= 0x0000003f,
5093 .mmu_sfsr_mask
= 0xffffffff,
5094 .mmu_trcr_mask
= 0xffffffff,
5098 .iu_version
= 0xf3000000,
5099 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
5100 .mmu_version
= 0xf3000000,
5101 .mmu_bm
= 0x00004000,
5102 .mmu_ctpr_mask
= 0x007ffff0,
5103 .mmu_cxr_mask
= 0x0000003f,
5104 .mmu_sfsr_mask
= 0xffffffff,
5105 .mmu_trcr_mask
= 0xffffffff,
5110 static const sparc_def_t
*cpu_sparc_find_by_name(const unsigned char *name
)
5114 for (i
= 0; i
< sizeof(sparc_defs
) / sizeof(sparc_def_t
); i
++) {
5115 if (strcasecmp(name
, sparc_defs
[i
].name
) == 0) {
5116 return &sparc_defs
[i
];
5122 void sparc_cpu_list (FILE *f
, int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...))
5126 for (i
= 0; i
< sizeof(sparc_defs
) / sizeof(sparc_def_t
); i
++) {
5127 (*cpu_fprintf
)(f
, "Sparc %16s IU " TARGET_FMT_lx
" FPU %08x MMU %08x\n",
5129 sparc_defs
[i
].iu_version
,
5130 sparc_defs
[i
].fpu_version
,
5131 sparc_defs
[i
].mmu_version
);
5135 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
5137 void cpu_dump_state(CPUState
*env
, FILE *f
,
5138 int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...),
5143 cpu_fprintf(f
, "pc: " TARGET_FMT_lx
" npc: " TARGET_FMT_lx
"\n", env
->pc
, env
->npc
);
5144 cpu_fprintf(f
, "General Registers:\n");
5145 for (i
= 0; i
< 4; i
++)
5146 cpu_fprintf(f
, "%%g%c: " TARGET_FMT_lx
"\t", i
+ '0', env
->gregs
[i
]);
5147 cpu_fprintf(f
, "\n");
5149 cpu_fprintf(f
, "%%g%c: " TARGET_FMT_lx
"\t", i
+ '0', env
->gregs
[i
]);
5150 cpu_fprintf(f
, "\nCurrent Register Window:\n");
5151 for (x
= 0; x
< 3; x
++) {
5152 for (i
= 0; i
< 4; i
++)
5153 cpu_fprintf(f
, "%%%c%d: " TARGET_FMT_lx
"\t",
5154 (x
== 0 ? 'o' : (x
== 1 ? 'l' : 'i')), i
,
5155 env
->regwptr
[i
+ x
* 8]);
5156 cpu_fprintf(f
, "\n");
5158 cpu_fprintf(f
, "%%%c%d: " TARGET_FMT_lx
"\t",
5159 (x
== 0 ? 'o' : x
== 1 ? 'l' : 'i'), i
,
5160 env
->regwptr
[i
+ x
* 8]);
5161 cpu_fprintf(f
, "\n");
5163 cpu_fprintf(f
, "\nFloating Point Registers:\n");
5164 for (i
= 0; i
< 32; i
++) {
5166 cpu_fprintf(f
, "%%f%02d:", i
);
5167 cpu_fprintf(f
, " %016lf", env
->fpr
[i
]);
5169 cpu_fprintf(f
, "\n");
5171 #ifdef TARGET_SPARC64
5172 cpu_fprintf(f
, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
5173 env
->pstate
, GET_CCR(env
), env
->asi
, env
->tl
, env
->fprs
);
5174 cpu_fprintf(f
, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
5175 env
->cansave
, env
->canrestore
, env
->otherwin
, env
->wstate
,
5176 env
->cleanwin
, NWINDOWS
- 1 - env
->cwp
);
5178 cpu_fprintf(f
, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env
),
5179 GET_FLAG(PSR_ZERO
, 'Z'), GET_FLAG(PSR_OVF
, 'V'),
5180 GET_FLAG(PSR_NEG
, 'N'), GET_FLAG(PSR_CARRY
, 'C'),
5181 env
->psrs
?'S':'-', env
->psrps
?'P':'-',
5182 env
->psret
?'E':'-', env
->wim
);
5184 cpu_fprintf(f
, "fsr: 0x%08x\n", GET_FSR32(env
));
5187 #if defined(CONFIG_USER_ONLY)
5188 target_phys_addr_t
cpu_get_phys_page_debug(CPUState
*env
, target_ulong addr
)
5194 extern int get_physical_address (CPUState
*env
, target_phys_addr_t
*physical
, int *prot
,
5195 int *access_index
, target_ulong address
, int rw
,
5198 target_phys_addr_t
cpu_get_phys_page_debug(CPUState
*env
, target_ulong addr
)
5200 target_phys_addr_t phys_addr
;
5201 int prot
, access_index
;
5203 if (get_physical_address(env
, &phys_addr
, &prot
, &access_index
, addr
, 2,
5204 MMU_KERNEL_IDX
) != 0)
5205 if (get_physical_address(env
, &phys_addr
, &prot
, &access_index
, addr
,
5206 0, MMU_KERNEL_IDX
) != 0)
5208 if (cpu_get_physical_page_desc(phys_addr
) == IO_MEM_UNASSIGNED
)
5214 void helper_flush(target_ulong addr
)
5217 tb_invalidate_page_range(addr
, addr
+ 8);