4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 /* global register indexes */
49 static TCGv cpu_env
, cpu_T
[3], cpu_regwptr
;
50 /* local register indexes (only used inside old micro ops) */
53 typedef struct DisasContext
{
54 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
55 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
56 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
60 struct TranslationBlock
*tb
;
63 typedef struct sparc_def_t sparc_def_t
;
66 const unsigned char *name
;
67 target_ulong iu_version
;
71 uint32_t mmu_ctpr_mask
;
72 uint32_t mmu_cxr_mask
;
73 uint32_t mmu_sfsr_mask
;
74 uint32_t mmu_trcr_mask
;
77 static const sparc_def_t
*cpu_sparc_find_by_name(const unsigned char *name
);
82 // This function uses non-native bit order
83 #define GET_FIELD(X, FROM, TO) \
84 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
86 // This function uses the order in the manuals, i.e. bit 0 is 2^0
87 #define GET_FIELD_SP(X, FROM, TO) \
88 GET_FIELD(X, 31 - (TO), 31 - (FROM))
90 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
91 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
96 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
99 #define DFPREG(r) (r & 0x1e)
100 #define QFPREG(r) (r & 0x1c)
103 static int sign_extend(int x
, int len
)
106 return (x
<< len
) >> len
;
109 #define IS_IMM (insn & (1<<13))
111 static void disas_sparc_insn(DisasContext
* dc
);
113 #ifdef TARGET_SPARC64
114 #define GEN32(func, NAME) \
115 static GenOpFunc * const NAME ## _table [64] = { \
116 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
117 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
118 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
119 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
120 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
121 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
122 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
123 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
124 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
125 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
126 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
127 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
129 static inline void func(int n) \
131 NAME ## _table[n](); \
134 #define GEN32(func, NAME) \
135 static GenOpFunc *const NAME ## _table [32] = { \
136 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
137 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
138 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
139 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
140 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
141 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
142 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
143 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
145 static inline void func(int n) \
147 NAME ## _table[n](); \
151 /* floating point registers moves */
152 GEN32(gen_op_load_fpr_FT0
, gen_op_load_fpr_FT0_fprf
);
153 GEN32(gen_op_load_fpr_FT1
, gen_op_load_fpr_FT1_fprf
);
154 GEN32(gen_op_store_FT0_fpr
, gen_op_store_FT0_fpr_fprf
);
155 GEN32(gen_op_store_FT1_fpr
, gen_op_store_FT1_fpr_fprf
);
157 GEN32(gen_op_load_fpr_DT0
, gen_op_load_fpr_DT0_fprf
);
158 GEN32(gen_op_load_fpr_DT1
, gen_op_load_fpr_DT1_fprf
);
159 GEN32(gen_op_store_DT0_fpr
, gen_op_store_DT0_fpr_fprf
);
160 GEN32(gen_op_store_DT1_fpr
, gen_op_store_DT1_fpr_fprf
);
162 #if defined(CONFIG_USER_ONLY)
163 GEN32(gen_op_load_fpr_QT0
, gen_op_load_fpr_QT0_fprf
);
164 GEN32(gen_op_load_fpr_QT1
, gen_op_load_fpr_QT1_fprf
);
165 GEN32(gen_op_store_QT0_fpr
, gen_op_store_QT0_fpr_fprf
);
166 GEN32(gen_op_store_QT1_fpr
, gen_op_store_QT1_fpr_fprf
);
170 #ifdef CONFIG_USER_ONLY
171 #define supervisor(dc) 0
172 #ifdef TARGET_SPARC64
173 #define hypervisor(dc) 0
175 #define gen_op_ldst(name) gen_op_##name##_raw()
177 #define supervisor(dc) (dc->mem_idx >= 1)
178 #ifdef TARGET_SPARC64
179 #define hypervisor(dc) (dc->mem_idx == 2)
180 #define OP_LD_TABLE(width) \
181 static GenOpFunc * const gen_op_##width[] = { \
182 &gen_op_##width##_user, \
183 &gen_op_##width##_kernel, \
184 &gen_op_##width##_hypv, \
187 #define OP_LD_TABLE(width) \
188 static GenOpFunc * const gen_op_##width[] = { \
189 &gen_op_##width##_user, \
190 &gen_op_##width##_kernel, \
193 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
196 #ifndef CONFIG_USER_ONLY
199 #endif /* __i386__ */
207 #define ABI32_MASK(addr) tcg_gen_andi_i64(addr, addr, 0xffffffffULL);
209 #define ABI32_MASK(addr)
212 static inline void gen_movl_simm_T1(int32_t val
)
214 tcg_gen_movi_tl(cpu_T
[1], val
);
217 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
220 tcg_gen_movi_tl(tn
, 0);
222 tcg_gen_ld_tl(tn
, cpu_env
, offsetof(CPUState
, gregs
[reg
]));
224 tcg_gen_ld_ptr(cpu_regwptr
, cpu_env
, offsetof(CPUState
, regwptr
)); // XXX
225 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
229 static inline void gen_movl_reg_T0(int reg
)
231 gen_movl_reg_TN(reg
, cpu_T
[0]);
234 static inline void gen_movl_reg_T1(int reg
)
236 gen_movl_reg_TN(reg
, cpu_T
[1]);
240 static inline void gen_movl_reg_T2(int reg
)
242 gen_movl_reg_TN(reg
, cpu_T
[2]);
245 #endif /* __i386__ */
246 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
251 tcg_gen_st_tl(tn
, cpu_env
, offsetof(CPUState
, gregs
[reg
]));
253 tcg_gen_ld_ptr(cpu_regwptr
, cpu_env
, offsetof(CPUState
, regwptr
)); // XXX
254 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
258 static inline void gen_movl_T0_reg(int reg
)
260 gen_movl_TN_reg(reg
, cpu_T
[0]);
263 static inline void gen_movl_T1_reg(int reg
)
265 gen_movl_TN_reg(reg
, cpu_T
[1]);
268 static inline void gen_op_movl_T0_env(size_t offset
)
270 tcg_gen_ld_i32(cpu_T
[0], cpu_env
, offset
);
273 static inline void gen_op_movl_env_T0(size_t offset
)
275 tcg_gen_st_i32(cpu_T
[0], cpu_env
, offset
);
278 static inline void gen_op_movtl_T0_env(size_t offset
)
280 tcg_gen_ld_tl(cpu_T
[0], cpu_env
, offset
);
283 static inline void gen_op_movtl_env_T0(size_t offset
)
285 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offset
);
288 static inline void gen_op_add_T1_T0(void)
290 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
293 static inline void gen_op_or_T1_T0(void)
295 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
298 static inline void gen_op_xor_T1_T0(void)
300 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
303 static inline void gen_jmp_im(target_ulong pc
)
305 tcg_gen_movi_tl(cpu_tmp0
, pc
);
306 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, pc
));
309 static inline void gen_movl_npc_im(target_ulong npc
)
311 tcg_gen_movi_tl(cpu_tmp0
, npc
);
312 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUState
, npc
));
315 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
316 target_ulong pc
, target_ulong npc
)
318 TranslationBlock
*tb
;
321 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
322 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
323 /* jump to same page: we can use a direct jump */
324 tcg_gen_goto_tb(tb_num
);
326 gen_movl_npc_im(npc
);
327 tcg_gen_exit_tb((long)tb
+ tb_num
);
329 /* jump to another page: currently not optimized */
331 gen_movl_npc_im(npc
);
337 static inline void gen_mov_reg_N(TCGv reg
, TCGv src
)
339 tcg_gen_shri_i32(reg
, src
, 23);
340 tcg_gen_andi_tl(reg
, reg
, 0x1);
343 static inline void gen_mov_reg_Z(TCGv reg
, TCGv src
)
345 tcg_gen_shri_i32(reg
, src
, 22);
346 tcg_gen_andi_tl(reg
, reg
, 0x1);
349 static inline void gen_mov_reg_V(TCGv reg
, TCGv src
)
351 tcg_gen_shri_i32(reg
, src
, 21);
352 tcg_gen_andi_tl(reg
, reg
, 0x1);
355 static inline void gen_mov_reg_C(TCGv reg
, TCGv src
)
357 tcg_gen_shri_i32(reg
, src
, 20);
358 tcg_gen_andi_tl(reg
, reg
, 0x1);
362 static inline void gen_op_eval_ba(TCGv dst
)
364 tcg_gen_movi_tl(dst
, 1);
368 static inline void gen_op_eval_be(TCGv dst
, TCGv src
)
370 gen_mov_reg_Z(dst
, src
);
374 static inline void gen_op_eval_ble(TCGv dst
, TCGv src
)
378 r_flag
= tcg_temp_new(TCG_TYPE_TL
);
379 gen_mov_reg_N(r_flag
, src
);
380 gen_mov_reg_V(dst
, src
);
381 tcg_gen_xor_tl(dst
, dst
, r_flag
);
382 gen_mov_reg_Z(r_flag
, src
);
383 tcg_gen_or_tl(dst
, dst
, r_flag
);
387 static inline void gen_op_eval_bl(TCGv dst
, TCGv src
)
391 r_V
= tcg_temp_new(TCG_TYPE_TL
);
392 gen_mov_reg_V(r_V
, src
);
393 gen_mov_reg_N(dst
, src
);
394 tcg_gen_xor_tl(dst
, dst
, r_V
);
398 static inline void gen_op_eval_bleu(TCGv dst
, TCGv src
)
402 r_Z
= tcg_temp_new(TCG_TYPE_TL
);
403 gen_mov_reg_Z(r_Z
, src
);
404 gen_mov_reg_C(dst
, src
);
405 tcg_gen_or_tl(dst
, dst
, r_Z
);
409 static inline void gen_op_eval_bcs(TCGv dst
, TCGv src
)
411 gen_mov_reg_C(dst
, src
);
415 static inline void gen_op_eval_bvs(TCGv dst
, TCGv src
)
417 gen_mov_reg_V(dst
, src
);
421 static inline void gen_op_eval_bn(TCGv dst
)
423 tcg_gen_movi_tl(dst
, 0);
427 static inline void gen_op_eval_bneg(TCGv dst
, TCGv src
)
429 gen_mov_reg_N(dst
, src
);
433 static inline void gen_op_eval_bne(TCGv dst
, TCGv src
)
435 gen_mov_reg_Z(dst
, src
);
436 tcg_gen_xori_tl(dst
, dst
, 0x1);
440 static inline void gen_op_eval_bg(TCGv dst
, TCGv src
)
444 r_flag
= tcg_temp_new(TCG_TYPE_TL
);
445 gen_mov_reg_N(r_flag
, src
);
446 gen_mov_reg_V(dst
, src
);
447 tcg_gen_xor_tl(dst
, dst
, r_flag
);
448 gen_mov_reg_Z(r_flag
, src
);
449 tcg_gen_or_tl(dst
, dst
, r_flag
);
450 tcg_gen_xori_tl(dst
, dst
, 0x1);
454 static inline void gen_op_eval_bge(TCGv dst
, TCGv src
)
458 r_V
= tcg_temp_new(TCG_TYPE_TL
);
459 gen_mov_reg_V(r_V
, src
);
460 gen_mov_reg_N(dst
, src
);
461 tcg_gen_xor_tl(dst
, dst
, r_V
);
462 tcg_gen_xori_tl(dst
, dst
, 0x1);
466 static inline void gen_op_eval_bgu(TCGv dst
, TCGv src
)
470 r_Z
= tcg_temp_new(TCG_TYPE_TL
);
471 gen_mov_reg_Z(r_Z
, src
);
472 gen_mov_reg_C(dst
, src
);
473 tcg_gen_or_tl(dst
, dst
, r_Z
);
474 tcg_gen_xori_tl(dst
, dst
, 0x1);
478 static inline void gen_op_eval_bcc(TCGv dst
, TCGv src
)
480 gen_mov_reg_C(dst
, src
);
481 tcg_gen_xori_tl(dst
, dst
, 0x1);
485 static inline void gen_op_eval_bpos(TCGv dst
, TCGv src
)
487 gen_mov_reg_N(dst
, src
);
488 tcg_gen_xori_tl(dst
, dst
, 0x1);
492 static inline void gen_op_eval_bvc(TCGv dst
, TCGv src
)
494 gen_mov_reg_V(dst
, src
);
495 tcg_gen_xori_tl(dst
, dst
, 0x1);
499 FPSR bit field FCC1 | FCC0:
505 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
506 unsigned int fcc_offset
)
508 tcg_gen_shri_i32(reg
, src
, 10 + fcc_offset
);
509 tcg_gen_andi_tl(reg
, reg
, 0x1);
512 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
513 unsigned int fcc_offset
)
515 tcg_gen_shri_i32(reg
, src
, 11 + fcc_offset
);
516 tcg_gen_andi_tl(reg
, reg
, 0x1);
520 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
521 unsigned int fcc_offset
)
525 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
526 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
527 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
528 tcg_gen_or_tl(dst
, dst
, r_fcc1
);
531 // 1 or 2: FCC0 ^ FCC1
532 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
533 unsigned int fcc_offset
)
537 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
538 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
539 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
540 tcg_gen_xor_tl(dst
, dst
, r_fcc1
);
544 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
545 unsigned int fcc_offset
)
547 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
551 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
552 unsigned int fcc_offset
)
556 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
557 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
558 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
559 tcg_gen_xori_tl(r_fcc1
, r_fcc1
, 0x1);
560 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
564 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
565 unsigned int fcc_offset
)
567 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
571 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
572 unsigned int fcc_offset
)
576 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
577 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
578 tcg_gen_xori_tl(dst
, dst
, 0x1);
579 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
580 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
584 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
585 unsigned int fcc_offset
)
589 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
590 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
591 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
592 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
596 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
597 unsigned int fcc_offset
)
601 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
602 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
603 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
604 tcg_gen_or_tl(dst
, dst
, r_fcc1
);
605 tcg_gen_xori_tl(dst
, dst
, 0x1);
608 // 0 or 3: !(FCC0 ^ FCC1)
609 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
610 unsigned int fcc_offset
)
614 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
615 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
616 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
617 tcg_gen_xor_tl(dst
, dst
, r_fcc1
);
618 tcg_gen_xori_tl(dst
, dst
, 0x1);
622 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
623 unsigned int fcc_offset
)
625 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
626 tcg_gen_xori_tl(dst
, dst
, 0x1);
629 // !1: !(FCC0 & !FCC1)
630 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
631 unsigned int fcc_offset
)
635 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
636 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
637 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
638 tcg_gen_xori_tl(r_fcc1
, r_fcc1
, 0x1);
639 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
640 tcg_gen_xori_tl(dst
, dst
, 0x1);
644 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
645 unsigned int fcc_offset
)
647 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
648 tcg_gen_xori_tl(dst
, dst
, 0x1);
651 // !2: !(!FCC0 & FCC1)
652 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
653 unsigned int fcc_offset
)
657 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
658 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
659 tcg_gen_xori_tl(dst
, dst
, 0x1);
660 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
661 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
662 tcg_gen_xori_tl(dst
, dst
, 0x1);
665 // !3: !(FCC0 & FCC1)
666 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
667 unsigned int fcc_offset
)
671 r_fcc1
= tcg_temp_new(TCG_TYPE_TL
);
672 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
673 gen_mov_reg_FCC1(r_fcc1
, src
, fcc_offset
);
674 tcg_gen_and_tl(dst
, dst
, r_fcc1
);
675 tcg_gen_xori_tl(dst
, dst
, 0x1);
678 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
679 target_ulong pc2
, TCGv r_cond
)
684 l1
= gen_new_label();
685 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
686 tcg_gen_movi_tl(r_zero
, 0);
688 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, r_zero
, l1
);
690 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
693 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
696 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
697 target_ulong pc2
, TCGv r_cond
)
702 l1
= gen_new_label();
703 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
704 tcg_gen_movi_tl(r_zero
, 0);
706 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, r_zero
, l1
);
708 gen_goto_tb(dc
, 0, pc2
, pc1
);
711 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
714 static inline void gen_branch(DisasContext
*dc
, target_ulong pc
,
717 gen_goto_tb(dc
, 0, pc
, npc
);
720 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
726 l1
= gen_new_label();
727 l2
= gen_new_label();
728 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
729 tcg_gen_movi_tl(r_zero
, 0);
731 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, r_zero
, l1
);
733 gen_movl_npc_im(npc1
);
734 gen_op_jmp_label(l2
);
737 gen_movl_npc_im(npc2
);
741 /* call this function before using T2 as it may have been set for a jump */
742 static inline void flush_T2(DisasContext
* dc
)
744 if (dc
->npc
== JUMP_PC
) {
745 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
746 dc
->npc
= DYNAMIC_PC
;
750 static inline void save_npc(DisasContext
* dc
)
752 if (dc
->npc
== JUMP_PC
) {
753 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
754 dc
->npc
= DYNAMIC_PC
;
755 } else if (dc
->npc
!= DYNAMIC_PC
) {
756 gen_movl_npc_im(dc
->npc
);
760 static inline void save_state(DisasContext
* dc
)
766 static inline void gen_mov_pc_npc(DisasContext
* dc
)
768 if (dc
->npc
== JUMP_PC
) {
769 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
770 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
771 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, pc
));
773 } else if (dc
->npc
== DYNAMIC_PC
) {
774 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
775 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, pc
));
782 static inline void gen_op_next_insn(void)
784 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
785 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, pc
));
786 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, 4);
787 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, npc
));
790 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
794 r_src
= tcg_temp_new(TCG_TYPE_TL
);
795 #ifdef TARGET_SPARC64
797 tcg_gen_ld_i32(r_src
, cpu_env
, offsetof(CPUSPARCState
, xcc
));
799 tcg_gen_ld_i32(r_src
, cpu_env
, offsetof(CPUSPARCState
, psr
));
801 tcg_gen_ld_i32(r_src
, cpu_env
, offsetof(CPUSPARCState
, psr
));
805 gen_op_eval_bn(r_dst
);
808 gen_op_eval_be(r_dst
, r_src
);
811 gen_op_eval_ble(r_dst
, r_src
);
814 gen_op_eval_bl(r_dst
, r_src
);
817 gen_op_eval_bleu(r_dst
, r_src
);
820 gen_op_eval_bcs(r_dst
, r_src
);
823 gen_op_eval_bneg(r_dst
, r_src
);
826 gen_op_eval_bvs(r_dst
, r_src
);
829 gen_op_eval_ba(r_dst
);
832 gen_op_eval_bne(r_dst
, r_src
);
835 gen_op_eval_bg(r_dst
, r_src
);
838 gen_op_eval_bge(r_dst
, r_src
);
841 gen_op_eval_bgu(r_dst
, r_src
);
844 gen_op_eval_bcc(r_dst
, r_src
);
847 gen_op_eval_bpos(r_dst
, r_src
);
850 gen_op_eval_bvc(r_dst
, r_src
);
855 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
860 r_src
= tcg_temp_new(TCG_TYPE_TL
);
861 tcg_gen_ld_tl(r_src
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
881 gen_op_eval_bn(r_dst
);
884 gen_op_eval_fbne(r_dst
, r_src
, offset
);
887 gen_op_eval_fblg(r_dst
, r_src
, offset
);
890 gen_op_eval_fbul(r_dst
, r_src
, offset
);
893 gen_op_eval_fbl(r_dst
, r_src
, offset
);
896 gen_op_eval_fbug(r_dst
, r_src
, offset
);
899 gen_op_eval_fbg(r_dst
, r_src
, offset
);
902 gen_op_eval_fbu(r_dst
, r_src
, offset
);
905 gen_op_eval_ba(r_dst
);
908 gen_op_eval_fbe(r_dst
, r_src
, offset
);
911 gen_op_eval_fbue(r_dst
, r_src
, offset
);
914 gen_op_eval_fbge(r_dst
, r_src
, offset
);
917 gen_op_eval_fbuge(r_dst
, r_src
, offset
);
920 gen_op_eval_fble(r_dst
, r_src
, offset
);
923 gen_op_eval_fbule(r_dst
, r_src
, offset
);
926 gen_op_eval_fbo(r_dst
, r_src
, offset
);
931 #ifdef TARGET_SPARC64
933 static const int gen_tcg_cond_reg
[8] = {
944 static inline void gen_cond_reg(TCGv r_dst
, int cond
)
949 l1
= gen_new_label();
950 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
951 tcg_gen_movi_tl(r_zero
, 0);
952 tcg_gen_mov_tl(r_dst
, r_zero
);
953 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
954 tcg_gen_movi_tl(r_dst
, 1);
959 /* XXX: potentially incorrect if dynamic npc */
960 static void do_branch(DisasContext
* dc
, int32_t offset
, uint32_t insn
, int cc
)
962 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
963 target_ulong target
= dc
->pc
+ offset
;
966 /* unconditional not taken */
968 dc
->pc
= dc
->npc
+ 4;
969 dc
->npc
= dc
->pc
+ 4;
972 dc
->npc
= dc
->pc
+ 4;
974 } else if (cond
== 0x8) {
975 /* unconditional taken */
978 dc
->npc
= dc
->pc
+ 4;
985 gen_cond(cpu_T
[2], cc
, cond
);
987 gen_branch_a(dc
, target
, dc
->npc
, cpu_T
[2]);
991 dc
->jump_pc
[0] = target
;
992 dc
->jump_pc
[1] = dc
->npc
+ 4;
998 /* XXX: potentially incorrect if dynamic npc */
999 static void do_fbranch(DisasContext
* dc
, int32_t offset
, uint32_t insn
, int cc
)
1001 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1002 target_ulong target
= dc
->pc
+ offset
;
1005 /* unconditional not taken */
1007 dc
->pc
= dc
->npc
+ 4;
1008 dc
->npc
= dc
->pc
+ 4;
1011 dc
->npc
= dc
->pc
+ 4;
1013 } else if (cond
== 0x8) {
1014 /* unconditional taken */
1017 dc
->npc
= dc
->pc
+ 4;
1024 gen_fcond(cpu_T
[2], cc
, cond
);
1026 gen_branch_a(dc
, target
, dc
->npc
, cpu_T
[2]);
1030 dc
->jump_pc
[0] = target
;
1031 dc
->jump_pc
[1] = dc
->npc
+ 4;
1037 #ifdef TARGET_SPARC64
1038 /* XXX: potentially incorrect if dynamic npc */
1039 static void do_branch_reg(DisasContext
* dc
, int32_t offset
, uint32_t insn
)
1041 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1042 target_ulong target
= dc
->pc
+ offset
;
1045 gen_cond_reg(cpu_T
[2], cond
);
1047 gen_branch_a(dc
, target
, dc
->npc
, cpu_T
[2]);
1051 dc
->jump_pc
[0] = target
;
1052 dc
->jump_pc
[1] = dc
->npc
+ 4;
1057 static GenOpFunc
* const gen_fcmps
[4] = {
1064 static GenOpFunc
* const gen_fcmpd
[4] = {
1071 #if defined(CONFIG_USER_ONLY)
1072 static GenOpFunc
* const gen_fcmpq
[4] = {
1080 static GenOpFunc
* const gen_fcmpes
[4] = {
1087 static GenOpFunc
* const gen_fcmped
[4] = {
1094 #if defined(CONFIG_USER_ONLY)
1095 static GenOpFunc
* const gen_fcmpeq
[4] = {
1103 static inline void gen_op_fcmps(int fccno
)
1105 tcg_gen_helper_0_0(gen_fcmps
[fccno
]);
1108 static inline void gen_op_fcmpd(int fccno
)
1110 tcg_gen_helper_0_0(gen_fcmpd
[fccno
]);
1113 #if defined(CONFIG_USER_ONLY)
1114 static inline void gen_op_fcmpq(int fccno
)
1116 tcg_gen_helper_0_0(gen_fcmpq
[fccno
]);
1120 static inline void gen_op_fcmpes(int fccno
)
1122 tcg_gen_helper_0_0(gen_fcmpes
[fccno
]);
1125 static inline void gen_op_fcmped(int fccno
)
1127 tcg_gen_helper_0_0(gen_fcmped
[fccno
]);
1130 #if defined(CONFIG_USER_ONLY)
1131 static inline void gen_op_fcmpeq(int fccno
)
1133 tcg_gen_helper_0_0(gen_fcmpeq
[fccno
]);
1139 static inline void gen_op_fcmps(int fccno
)
1141 tcg_gen_helper_0_0(helper_fcmps
);
1144 static inline void gen_op_fcmpd(int fccno
)
1146 tcg_gen_helper_0_0(helper_fcmpd
);
1149 #if defined(CONFIG_USER_ONLY)
1150 static inline void gen_op_fcmpq(int fccno
)
1152 tcg_gen_helper_0_0(helper_fcmpq
);
1156 static inline void gen_op_fcmpes(int fccno
)
1158 tcg_gen_helper_0_0(helper_fcmpes
);
1161 static inline void gen_op_fcmped(int fccno
)
1163 tcg_gen_helper_0_0(helper_fcmped
);
1166 #if defined(CONFIG_USER_ONLY)
1167 static inline void gen_op_fcmpeq(int fccno
)
1169 tcg_gen_helper_0_0(helper_fcmpeq
);
1175 static inline void gen_op_exception(int exception
)
1179 r_except
= tcg_temp_new(TCG_TYPE_I32
);
1180 tcg_gen_movi_i32(r_except
, exception
);
1181 tcg_gen_helper_0_1(raise_exception
, r_except
);
1184 static inline void gen_op_fpexception_im(int fsr_flags
)
1186 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1187 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, ~FSR_FTT_MASK
);
1188 tcg_gen_ori_tl(cpu_tmp0
, cpu_tmp0
, fsr_flags
);
1189 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1190 gen_op_exception(TT_FP_EXCP
);
1193 static int gen_trap_ifnofpu(DisasContext
* dc
)
1195 #if !defined(CONFIG_USER_ONLY)
1196 if (!dc
->fpu_enabled
) {
1198 gen_op_exception(TT_NFPU_INSN
);
1206 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1208 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1209 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, ~(FSR_FTT_MASK
| FSR_CEXC_MASK
));
1210 tcg_gen_st_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, fsr
));
1213 static inline void gen_clear_float_exceptions(void)
1215 tcg_gen_helper_0_0(helper_clear_float_exceptions
);
1219 #ifdef TARGET_SPARC64
1220 static inline void gen_ld_asi(int insn
, int size
, int sign
)
1223 TCGv r_size
, r_sign
;
1225 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1226 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1227 tcg_gen_movi_i32(r_size
, size
);
1228 tcg_gen_movi_i32(r_sign
, sign
);
1230 offset
= GET_FIELD(insn
, 25, 31);
1231 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1232 tcg_gen_ld_i32(cpu_T
[1], cpu_env
, offsetof(CPUSPARCState
, asi
));
1234 asi
= GET_FIELD(insn
, 19, 26);
1235 tcg_gen_movi_i32(cpu_T
[1], asi
);
1237 tcg_gen_helper_1_4(helper_ld_asi
, cpu_T
[1], cpu_T
[0], cpu_T
[1], r_size
,
1241 static inline void gen_st_asi(int insn
, int size
)
1246 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1247 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1248 tcg_gen_movi_i32(r_size
, size
);
1250 offset
= GET_FIELD(insn
, 25, 31);
1251 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1252 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1254 asi
= GET_FIELD(insn
, 19, 26);
1255 tcg_gen_movi_i32(r_asi
, asi
);
1257 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], cpu_T
[1], r_asi
, r_size
);
1260 static inline void gen_ldf_asi(int insn
, int size
, int rd
)
1263 TCGv r_asi
, r_size
, r_rd
;
1265 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1266 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1267 r_rd
= tcg_temp_new(TCG_TYPE_I32
);
1268 tcg_gen_movi_i32(r_size
, size
);
1269 tcg_gen_movi_i32(r_rd
, rd
);
1271 offset
= GET_FIELD(insn
, 25, 31);
1272 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1273 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1275 asi
= GET_FIELD(insn
, 19, 26);
1276 tcg_gen_movi_i32(r_asi
, asi
);
1278 tcg_gen_helper_0_4(helper_ldf_asi
, cpu_T
[0], r_asi
, r_size
, r_rd
);
1281 static inline void gen_stf_asi(int insn
, int size
, int rd
)
1284 TCGv r_asi
, r_size
, r_rd
;
1286 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1287 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1288 r_rd
= tcg_temp_new(TCG_TYPE_I32
);
1289 tcg_gen_movi_i32(r_size
, size
);
1290 tcg_gen_movi_i32(r_rd
, rd
);
1292 offset
= GET_FIELD(insn
, 25, 31);
1293 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1294 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1296 asi
= GET_FIELD(insn
, 19, 26);
1297 tcg_gen_movi_i32(r_asi
, asi
);
1299 tcg_gen_helper_0_4(helper_stf_asi
, cpu_T
[0], r_asi
, r_size
, r_rd
);
1302 static inline void gen_swap_asi(int insn
)
1305 TCGv r_size
, r_sign
, r_temp
;
1307 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1308 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1309 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
1310 tcg_gen_movi_i32(r_size
, 4);
1311 tcg_gen_movi_i32(r_sign
, 0);
1313 offset
= GET_FIELD(insn
, 25, 31);
1314 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1315 tcg_gen_ld_i32(cpu_T
[1], cpu_env
, offsetof(CPUSPARCState
, asi
));
1317 asi
= GET_FIELD(insn
, 19, 26);
1318 tcg_gen_movi_i32(cpu_T
[1], asi
);
1320 tcg_gen_helper_1_4(helper_ld_asi
, r_temp
, cpu_T
[0], cpu_T
[1], r_size
,
1322 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], cpu_T
[1], r_size
, r_sign
);
1323 tcg_gen_mov_i32(cpu_T
[1], r_temp
);
1326 static inline void gen_ldda_asi(int insn
)
1329 TCGv r_size
, r_sign
, r_dword
;
1331 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1332 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1333 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1334 tcg_gen_movi_i32(r_size
, 8);
1335 tcg_gen_movi_i32(r_sign
, 0);
1337 offset
= GET_FIELD(insn
, 25, 31);
1338 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1339 tcg_gen_ld_i32(cpu_T
[1], cpu_env
, offsetof(CPUSPARCState
, asi
));
1341 asi
= GET_FIELD(insn
, 19, 26);
1342 tcg_gen_movi_i32(cpu_T
[1], asi
);
1344 tcg_gen_helper_1_4(helper_ld_asi
, r_dword
, cpu_T
[0], cpu_T
[1], r_size
,
1346 tcg_gen_trunc_i64_i32(cpu_T
[0], r_dword
);
1347 tcg_gen_shri_i64(r_dword
, r_dword
, 32);
1348 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
1351 static inline void gen_cas_asi(int insn
, int rd
)
1356 r_val1
= tcg_temp_new(TCG_TYPE_I32
);
1357 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1358 gen_movl_reg_TN(rd
, r_val1
);
1360 offset
= GET_FIELD(insn
, 25, 31);
1361 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1362 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1364 asi
= GET_FIELD(insn
, 19, 26);
1365 tcg_gen_movi_i32(r_asi
, asi
);
1367 tcg_gen_helper_1_4(helper_cas_asi
, cpu_T
[1], cpu_T
[0], r_val1
, cpu_T
[1],
1371 static inline void gen_casx_asi(int insn
, int rd
)
1376 r_val1
= tcg_temp_new(TCG_TYPE_I64
);
1377 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1378 gen_movl_reg_TN(rd
, r_val1
);
1380 offset
= GET_FIELD(insn
, 25, 31);
1381 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
1382 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1384 asi
= GET_FIELD(insn
, 19, 26);
1385 tcg_gen_movi_i32(r_asi
, asi
);
1387 tcg_gen_helper_1_4(helper_casx_asi
, cpu_T
[1], cpu_T
[0], r_val1
, cpu_T
[1],
1391 #elif !defined(CONFIG_USER_ONLY)
1393 static inline void gen_ld_asi(int insn
, int size
, int sign
)
1396 TCGv r_size
, r_sign
, r_dword
;
1398 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1399 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1400 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1401 tcg_gen_movi_i32(r_size
, size
);
1402 tcg_gen_movi_i32(r_sign
, sign
);
1403 asi
= GET_FIELD(insn
, 19, 26);
1404 tcg_gen_movi_i32(cpu_T
[1], asi
);
1405 tcg_gen_helper_1_4(helper_ld_asi
, r_dword
, cpu_T
[0], cpu_T
[1], r_size
,
1407 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
1410 static inline void gen_st_asi(int insn
, int size
)
1413 TCGv r_dword
, r_asi
, r_size
;
1415 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1416 tcg_gen_extu_i32_i64(r_dword
, cpu_T
[1]);
1417 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1418 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1419 asi
= GET_FIELD(insn
, 19, 26);
1420 tcg_gen_movi_i32(r_asi
, asi
);
1421 tcg_gen_movi_i32(r_size
, size
);
1422 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], r_dword
, r_asi
, r_size
);
1425 static inline void gen_swap_asi(int insn
)
1428 TCGv r_size
, r_sign
, r_temp
;
1430 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1431 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1432 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
1433 tcg_gen_movi_i32(r_size
, 4);
1434 tcg_gen_movi_i32(r_sign
, 0);
1435 asi
= GET_FIELD(insn
, 19, 26);
1436 tcg_gen_movi_i32(cpu_T
[1], asi
);
1437 tcg_gen_helper_1_4(helper_ld_asi
, r_temp
, cpu_T
[0], cpu_T
[1], r_size
,
1439 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], cpu_T
[1], r_size
, r_sign
);
1440 tcg_gen_mov_i32(cpu_T
[1], r_temp
);
1443 static inline void gen_ldda_asi(int insn
)
1446 TCGv r_size
, r_sign
, r_dword
;
1448 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1449 r_sign
= tcg_temp_new(TCG_TYPE_I32
);
1450 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1451 tcg_gen_movi_i32(r_size
, 8);
1452 tcg_gen_movi_i32(r_sign
, 0);
1453 asi
= GET_FIELD(insn
, 19, 26);
1454 tcg_gen_movi_i32(cpu_T
[1], asi
);
1455 tcg_gen_helper_1_4(helper_ld_asi
, r_dword
, cpu_T
[0], cpu_T
[1], r_size
,
1457 tcg_gen_trunc_i64_i32(cpu_T
[0], r_dword
);
1458 tcg_gen_shri_i64(r_dword
, r_dword
, 32);
1459 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
1463 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1464 static inline void gen_ldstub_asi(int insn
)
1467 TCGv r_dword
, r_asi
, r_size
;
1469 gen_ld_asi(insn
, 1, 0);
1471 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
1472 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1473 r_size
= tcg_temp_new(TCG_TYPE_I32
);
1474 asi
= GET_FIELD(insn
, 19, 26);
1475 tcg_gen_movi_i32(r_dword
, 0xff);
1476 tcg_gen_movi_i32(r_asi
, asi
);
1477 tcg_gen_movi_i32(r_size
, 1);
1478 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], r_dword
, r_asi
, r_size
);
1482 /* before an instruction, dc->pc must be static */
1483 static void disas_sparc_insn(DisasContext
* dc
)
1485 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1487 insn
= ldl_code(dc
->pc
);
1488 opc
= GET_FIELD(insn
, 0, 1);
1490 rd
= GET_FIELD(insn
, 2, 6);
1492 case 0: /* branches/sethi */
1494 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1497 #ifdef TARGET_SPARC64
1498 case 0x1: /* V9 BPcc */
1502 target
= GET_FIELD_SP(insn
, 0, 18);
1503 target
= sign_extend(target
, 18);
1505 cc
= GET_FIELD_SP(insn
, 20, 21);
1507 do_branch(dc
, target
, insn
, 0);
1509 do_branch(dc
, target
, insn
, 1);
1514 case 0x3: /* V9 BPr */
1516 target
= GET_FIELD_SP(insn
, 0, 13) |
1517 (GET_FIELD_SP(insn
, 20, 21) << 14);
1518 target
= sign_extend(target
, 16);
1520 rs1
= GET_FIELD(insn
, 13, 17);
1521 gen_movl_reg_T0(rs1
);
1522 do_branch_reg(dc
, target
, insn
);
1525 case 0x5: /* V9 FBPcc */
1527 int cc
= GET_FIELD_SP(insn
, 20, 21);
1528 if (gen_trap_ifnofpu(dc
))
1530 target
= GET_FIELD_SP(insn
, 0, 18);
1531 target
= sign_extend(target
, 19);
1533 do_fbranch(dc
, target
, insn
, cc
);
1537 case 0x7: /* CBN+x */
1542 case 0x2: /* BN+x */
1544 target
= GET_FIELD(insn
, 10, 31);
1545 target
= sign_extend(target
, 22);
1547 do_branch(dc
, target
, insn
, 0);
1550 case 0x6: /* FBN+x */
1552 if (gen_trap_ifnofpu(dc
))
1554 target
= GET_FIELD(insn
, 10, 31);
1555 target
= sign_extend(target
, 22);
1557 do_fbranch(dc
, target
, insn
, 0);
1560 case 0x4: /* SETHI */
1565 uint32_t value
= GET_FIELD(insn
, 10, 31);
1566 tcg_gen_movi_tl(cpu_T
[0], value
<< 10);
1567 gen_movl_T0_reg(rd
);
1572 case 0x0: /* UNIMPL */
1581 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1583 tcg_gen_movi_tl(cpu_T
[0], dc
->pc
);
1584 gen_movl_T0_reg(15);
1590 case 2: /* FPU & Logical Operations */
1592 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1593 if (xop
== 0x3a) { /* generate trap */
1596 rs1
= GET_FIELD(insn
, 13, 17);
1597 gen_movl_reg_T0(rs1
);
1599 rs2
= GET_FIELD(insn
, 25, 31);
1600 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], rs2
);
1602 rs2
= GET_FIELD(insn
, 27, 31);
1606 gen_movl_reg_T1(rs2
);
1612 cond
= GET_FIELD(insn
, 3, 6);
1615 tcg_gen_helper_0_1(helper_trap
, cpu_T
[0]);
1616 } else if (cond
!= 0) {
1617 #ifdef TARGET_SPARC64
1619 int cc
= GET_FIELD_SP(insn
, 11, 12);
1623 gen_cond(cpu_T
[2], 0, cond
);
1625 gen_cond(cpu_T
[2], 1, cond
);
1631 gen_cond(cpu_T
[2], 0, cond
);
1633 tcg_gen_helper_0_2(helper_trapcc
, cpu_T
[0], cpu_T
[2]);
1639 } else if (xop
== 0x28) {
1640 rs1
= GET_FIELD(insn
, 13, 17);
1643 #ifndef TARGET_SPARC64
1644 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1645 manual, rdy on the microSPARC
1647 case 0x0f: /* stbar in the SPARCv8 manual,
1648 rdy on the microSPARC II */
1649 case 0x10 ... 0x1f: /* implementation-dependent in the
1650 SPARCv8 manual, rdy on the
1653 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, y
));
1654 gen_movl_T0_reg(rd
);
1656 #ifdef TARGET_SPARC64
1657 case 0x2: /* V9 rdccr */
1659 gen_movl_T0_reg(rd
);
1661 case 0x3: /* V9 rdasi */
1662 gen_op_movl_T0_env(offsetof(CPUSPARCState
, asi
));
1663 gen_movl_T0_reg(rd
);
1665 case 0x4: /* V9 rdtick */
1669 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
1670 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1671 offsetof(CPUState
, tick
));
1672 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_T
[0],
1674 gen_movl_T0_reg(rd
);
1677 case 0x5: /* V9 rdpc */
1678 tcg_gen_movi_tl(cpu_T
[0], dc
->pc
);
1679 gen_movl_T0_reg(rd
);
1681 case 0x6: /* V9 rdfprs */
1682 gen_op_movl_T0_env(offsetof(CPUSPARCState
, fprs
));
1683 gen_movl_T0_reg(rd
);
1685 case 0xf: /* V9 membar */
1686 break; /* no effect */
1687 case 0x13: /* Graphics Status */
1688 if (gen_trap_ifnofpu(dc
))
1690 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, gsr
));
1691 gen_movl_T0_reg(rd
);
1693 case 0x17: /* Tick compare */
1694 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tick_cmpr
));
1695 gen_movl_T0_reg(rd
);
1697 case 0x18: /* System tick */
1701 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
1702 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1703 offsetof(CPUState
, stick
));
1704 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_T
[0],
1706 gen_movl_T0_reg(rd
);
1709 case 0x19: /* System tick compare */
1710 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, stick_cmpr
));
1711 gen_movl_T0_reg(rd
);
1713 case 0x10: /* Performance Control */
1714 case 0x11: /* Performance Instrumentation Counter */
1715 case 0x12: /* Dispatch Control */
1716 case 0x14: /* Softint set, WO */
1717 case 0x15: /* Softint clear, WO */
1718 case 0x16: /* Softint write */
1723 #if !defined(CONFIG_USER_ONLY)
1724 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
1725 #ifndef TARGET_SPARC64
1726 if (!supervisor(dc
))
1728 tcg_gen_helper_1_0(helper_rdpsr
, cpu_T
[0]);
1730 if (!hypervisor(dc
))
1732 rs1
= GET_FIELD(insn
, 13, 17);
1735 // gen_op_rdhpstate();
1738 // gen_op_rdhtstate();
1741 gen_op_movl_T0_env(offsetof(CPUSPARCState
, hintp
));
1744 gen_op_movl_T0_env(offsetof(CPUSPARCState
, htba
));
1747 gen_op_movl_T0_env(offsetof(CPUSPARCState
, hver
));
1749 case 31: // hstick_cmpr
1750 gen_op_movl_env_T0(offsetof(CPUSPARCState
, hstick_cmpr
));
1756 gen_movl_T0_reg(rd
);
1758 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
1759 if (!supervisor(dc
))
1761 #ifdef TARGET_SPARC64
1762 rs1
= GET_FIELD(insn
, 13, 17);
1768 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
1769 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
1770 offsetof(CPUState
, tsptr
));
1771 tcg_gen_ld_tl(cpu_T
[0], r_tsptr
,
1772 offsetof(trap_state
, tpc
));
1779 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
1780 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
1781 offsetof(CPUState
, tsptr
));
1782 tcg_gen_ld_tl(cpu_T
[0], r_tsptr
,
1783 offsetof(trap_state
, tnpc
));
1790 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
1791 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
1792 offsetof(CPUState
, tsptr
));
1793 tcg_gen_ld_tl(cpu_T
[0], r_tsptr
,
1794 offsetof(trap_state
, tstate
));
1801 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
1802 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
1803 offsetof(CPUState
, tsptr
));
1804 tcg_gen_ld_i32(cpu_T
[0], r_tsptr
,
1805 offsetof(trap_state
, tt
));
1812 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
1813 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
1814 offsetof(CPUState
, tick
));
1815 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_T
[0],
1817 gen_movl_T0_reg(rd
);
1821 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tbr
));
1824 gen_op_movl_T0_env(offsetof(CPUSPARCState
, pstate
));
1827 gen_op_movl_T0_env(offsetof(CPUSPARCState
, tl
));
1830 gen_op_movl_T0_env(offsetof(CPUSPARCState
, psrpil
));
1836 gen_op_movl_T0_env(offsetof(CPUSPARCState
, cansave
));
1838 case 11: // canrestore
1839 gen_op_movl_T0_env(offsetof(CPUSPARCState
, canrestore
));
1841 case 12: // cleanwin
1842 gen_op_movl_T0_env(offsetof(CPUSPARCState
, cleanwin
));
1844 case 13: // otherwin
1845 gen_op_movl_T0_env(offsetof(CPUSPARCState
, otherwin
));
1848 gen_op_movl_T0_env(offsetof(CPUSPARCState
, wstate
));
1850 case 16: // UA2005 gl
1851 gen_op_movl_T0_env(offsetof(CPUSPARCState
, gl
));
1853 case 26: // UA2005 strand status
1854 if (!hypervisor(dc
))
1856 gen_op_movl_T0_env(offsetof(CPUSPARCState
, ssr
));
1859 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, version
));
1866 gen_op_movl_T0_env(offsetof(CPUSPARCState
, wim
));
1868 gen_movl_T0_reg(rd
);
1870 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
1871 #ifdef TARGET_SPARC64
1874 if (!supervisor(dc
))
1876 gen_op_movtl_T0_env(offsetof(CPUSPARCState
, tbr
));
1877 gen_movl_T0_reg(rd
);
1881 } else if (xop
== 0x34) { /* FPU Operations */
1882 if (gen_trap_ifnofpu(dc
))
1884 gen_op_clear_ieee_excp_and_FTT();
1885 rs1
= GET_FIELD(insn
, 13, 17);
1886 rs2
= GET_FIELD(insn
, 27, 31);
1887 xop
= GET_FIELD(insn
, 18, 26);
1889 case 0x1: /* fmovs */
1890 gen_op_load_fpr_FT0(rs2
);
1891 gen_op_store_FT0_fpr(rd
);
1893 case 0x5: /* fnegs */
1894 gen_op_load_fpr_FT1(rs2
);
1896 gen_op_store_FT0_fpr(rd
);
1898 case 0x9: /* fabss */
1899 gen_op_load_fpr_FT1(rs2
);
1900 tcg_gen_helper_0_0(helper_fabss
);
1901 gen_op_store_FT0_fpr(rd
);
1903 case 0x29: /* fsqrts */
1904 gen_op_load_fpr_FT1(rs2
);
1905 gen_clear_float_exceptions();
1906 tcg_gen_helper_0_0(helper_fsqrts
);
1907 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1908 gen_op_store_FT0_fpr(rd
);
1910 case 0x2a: /* fsqrtd */
1911 gen_op_load_fpr_DT1(DFPREG(rs2
));
1912 gen_clear_float_exceptions();
1913 tcg_gen_helper_0_0(helper_fsqrtd
);
1914 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1915 gen_op_store_DT0_fpr(DFPREG(rd
));
1917 case 0x2b: /* fsqrtq */
1918 #if defined(CONFIG_USER_ONLY)
1919 gen_op_load_fpr_QT1(QFPREG(rs2
));
1920 gen_clear_float_exceptions();
1921 tcg_gen_helper_0_0(helper_fsqrtq
);
1922 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1923 gen_op_store_QT0_fpr(QFPREG(rd
));
1929 gen_op_load_fpr_FT0(rs1
);
1930 gen_op_load_fpr_FT1(rs2
);
1931 gen_clear_float_exceptions();
1933 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1934 gen_op_store_FT0_fpr(rd
);
1937 gen_op_load_fpr_DT0(DFPREG(rs1
));
1938 gen_op_load_fpr_DT1(DFPREG(rs2
));
1939 gen_clear_float_exceptions();
1941 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1942 gen_op_store_DT0_fpr(DFPREG(rd
));
1944 case 0x43: /* faddq */
1945 #if defined(CONFIG_USER_ONLY)
1946 gen_op_load_fpr_QT0(QFPREG(rs1
));
1947 gen_op_load_fpr_QT1(QFPREG(rs2
));
1948 gen_clear_float_exceptions();
1950 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1951 gen_op_store_QT0_fpr(QFPREG(rd
));
1957 gen_op_load_fpr_FT0(rs1
);
1958 gen_op_load_fpr_FT1(rs2
);
1959 gen_clear_float_exceptions();
1961 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1962 gen_op_store_FT0_fpr(rd
);
1965 gen_op_load_fpr_DT0(DFPREG(rs1
));
1966 gen_op_load_fpr_DT1(DFPREG(rs2
));
1967 gen_clear_float_exceptions();
1969 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1970 gen_op_store_DT0_fpr(DFPREG(rd
));
1972 case 0x47: /* fsubq */
1973 #if defined(CONFIG_USER_ONLY)
1974 gen_op_load_fpr_QT0(QFPREG(rs1
));
1975 gen_op_load_fpr_QT1(QFPREG(rs2
));
1976 gen_clear_float_exceptions();
1978 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1979 gen_op_store_QT0_fpr(QFPREG(rd
));
1985 gen_op_load_fpr_FT0(rs1
);
1986 gen_op_load_fpr_FT1(rs2
);
1987 gen_clear_float_exceptions();
1989 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1990 gen_op_store_FT0_fpr(rd
);
1993 gen_op_load_fpr_DT0(DFPREG(rs1
));
1994 gen_op_load_fpr_DT1(DFPREG(rs2
));
1995 gen_clear_float_exceptions();
1997 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
1998 gen_op_store_DT0_fpr(DFPREG(rd
));
2000 case 0x4b: /* fmulq */
2001 #if defined(CONFIG_USER_ONLY)
2002 gen_op_load_fpr_QT0(QFPREG(rs1
));
2003 gen_op_load_fpr_QT1(QFPREG(rs2
));
2004 gen_clear_float_exceptions();
2006 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2007 gen_op_store_QT0_fpr(QFPREG(rd
));
2013 gen_op_load_fpr_FT0(rs1
);
2014 gen_op_load_fpr_FT1(rs2
);
2015 gen_clear_float_exceptions();
2017 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2018 gen_op_store_FT0_fpr(rd
);
2021 gen_op_load_fpr_DT0(DFPREG(rs1
));
2022 gen_op_load_fpr_DT1(DFPREG(rs2
));
2023 gen_clear_float_exceptions();
2025 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2026 gen_op_store_DT0_fpr(DFPREG(rd
));
2028 case 0x4f: /* fdivq */
2029 #if defined(CONFIG_USER_ONLY)
2030 gen_op_load_fpr_QT0(QFPREG(rs1
));
2031 gen_op_load_fpr_QT1(QFPREG(rs2
));
2032 gen_clear_float_exceptions();
2034 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2035 gen_op_store_QT0_fpr(QFPREG(rd
));
2041 gen_op_load_fpr_FT0(rs1
);
2042 gen_op_load_fpr_FT1(rs2
);
2043 gen_clear_float_exceptions();
2045 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2046 gen_op_store_DT0_fpr(DFPREG(rd
));
2048 case 0x6e: /* fdmulq */
2049 #if defined(CONFIG_USER_ONLY)
2050 gen_op_load_fpr_DT0(DFPREG(rs1
));
2051 gen_op_load_fpr_DT1(DFPREG(rs2
));
2052 gen_clear_float_exceptions();
2054 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2055 gen_op_store_QT0_fpr(QFPREG(rd
));
2061 gen_op_load_fpr_FT1(rs2
);
2062 gen_clear_float_exceptions();
2064 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2065 gen_op_store_FT0_fpr(rd
);
2068 gen_op_load_fpr_DT1(DFPREG(rs2
));
2069 gen_clear_float_exceptions();
2071 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2072 gen_op_store_FT0_fpr(rd
);
2074 case 0xc7: /* fqtos */
2075 #if defined(CONFIG_USER_ONLY)
2076 gen_op_load_fpr_QT1(QFPREG(rs2
));
2077 gen_clear_float_exceptions();
2079 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2080 gen_op_store_FT0_fpr(rd
);
2086 gen_op_load_fpr_FT1(rs2
);
2088 gen_op_store_DT0_fpr(DFPREG(rd
));
2091 gen_op_load_fpr_FT1(rs2
);
2093 gen_op_store_DT0_fpr(DFPREG(rd
));
2095 case 0xcb: /* fqtod */
2096 #if defined(CONFIG_USER_ONLY)
2097 gen_op_load_fpr_QT1(QFPREG(rs2
));
2098 gen_clear_float_exceptions();
2100 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2101 gen_op_store_DT0_fpr(DFPREG(rd
));
2106 case 0xcc: /* fitoq */
2107 #if defined(CONFIG_USER_ONLY)
2108 gen_op_load_fpr_FT1(rs2
);
2110 gen_op_store_QT0_fpr(QFPREG(rd
));
2115 case 0xcd: /* fstoq */
2116 #if defined(CONFIG_USER_ONLY)
2117 gen_op_load_fpr_FT1(rs2
);
2119 gen_op_store_QT0_fpr(QFPREG(rd
));
2124 case 0xce: /* fdtoq */
2125 #if defined(CONFIG_USER_ONLY)
2126 gen_op_load_fpr_DT1(DFPREG(rs2
));
2128 gen_op_store_QT0_fpr(QFPREG(rd
));
2134 gen_op_load_fpr_FT1(rs2
);
2135 gen_clear_float_exceptions();
2137 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2138 gen_op_store_FT0_fpr(rd
);
2141 gen_op_load_fpr_DT1(DFPREG(rs2
));
2142 gen_clear_float_exceptions();
2144 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2145 gen_op_store_FT0_fpr(rd
);
2147 case 0xd3: /* fqtoi */
2148 #if defined(CONFIG_USER_ONLY)
2149 gen_op_load_fpr_QT1(QFPREG(rs2
));
2150 gen_clear_float_exceptions();
2152 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2153 gen_op_store_FT0_fpr(rd
);
2158 #ifdef TARGET_SPARC64
2159 case 0x2: /* V9 fmovd */
2160 gen_op_load_fpr_DT0(DFPREG(rs2
));
2161 gen_op_store_DT0_fpr(DFPREG(rd
));
2163 case 0x3: /* V9 fmovq */
2164 #if defined(CONFIG_USER_ONLY)
2165 gen_op_load_fpr_QT0(QFPREG(rs2
));
2166 gen_op_store_QT0_fpr(QFPREG(rd
));
2171 case 0x6: /* V9 fnegd */
2172 gen_op_load_fpr_DT1(DFPREG(rs2
));
2174 gen_op_store_DT0_fpr(DFPREG(rd
));
2176 case 0x7: /* V9 fnegq */
2177 #if defined(CONFIG_USER_ONLY)
2178 gen_op_load_fpr_QT1(QFPREG(rs2
));
2180 gen_op_store_QT0_fpr(QFPREG(rd
));
2185 case 0xa: /* V9 fabsd */
2186 gen_op_load_fpr_DT1(DFPREG(rs2
));
2187 tcg_gen_helper_0_0(helper_fabsd
);
2188 gen_op_store_DT0_fpr(DFPREG(rd
));
2190 case 0xb: /* V9 fabsq */
2191 #if defined(CONFIG_USER_ONLY)
2192 gen_op_load_fpr_QT1(QFPREG(rs2
));
2193 tcg_gen_helper_0_0(helper_fabsq
);
2194 gen_op_store_QT0_fpr(QFPREG(rd
));
2199 case 0x81: /* V9 fstox */
2200 gen_op_load_fpr_FT1(rs2
);
2201 gen_clear_float_exceptions();
2203 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2204 gen_op_store_DT0_fpr(DFPREG(rd
));
2206 case 0x82: /* V9 fdtox */
2207 gen_op_load_fpr_DT1(DFPREG(rs2
));
2208 gen_clear_float_exceptions();
2210 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2211 gen_op_store_DT0_fpr(DFPREG(rd
));
2213 case 0x83: /* V9 fqtox */
2214 #if defined(CONFIG_USER_ONLY)
2215 gen_op_load_fpr_QT1(QFPREG(rs2
));
2216 gen_clear_float_exceptions();
2218 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2219 gen_op_store_DT0_fpr(DFPREG(rd
));
2224 case 0x84: /* V9 fxtos */
2225 gen_op_load_fpr_DT1(DFPREG(rs2
));
2226 gen_clear_float_exceptions();
2228 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2229 gen_op_store_FT0_fpr(rd
);
2231 case 0x88: /* V9 fxtod */
2232 gen_op_load_fpr_DT1(DFPREG(rs2
));
2233 gen_clear_float_exceptions();
2235 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2236 gen_op_store_DT0_fpr(DFPREG(rd
));
2238 case 0x8c: /* V9 fxtoq */
2239 #if defined(CONFIG_USER_ONLY)
2240 gen_op_load_fpr_DT1(DFPREG(rs2
));
2241 gen_clear_float_exceptions();
2243 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2244 gen_op_store_QT0_fpr(QFPREG(rd
));
2253 } else if (xop
== 0x35) { /* FPU Operations */
2254 #ifdef TARGET_SPARC64
2257 if (gen_trap_ifnofpu(dc
))
2259 gen_op_clear_ieee_excp_and_FTT();
2260 rs1
= GET_FIELD(insn
, 13, 17);
2261 rs2
= GET_FIELD(insn
, 27, 31);
2262 xop
= GET_FIELD(insn
, 18, 26);
2263 #ifdef TARGET_SPARC64
2264 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2268 l1
= gen_new_label();
2269 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
2270 cond
= GET_FIELD_SP(insn
, 14, 17);
2271 rs1
= GET_FIELD(insn
, 13, 17);
2272 gen_movl_reg_T0(rs1
);
2273 tcg_gen_movi_tl(r_zero
, 0);
2274 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
2275 gen_op_load_fpr_FT0(rs2
);
2276 gen_op_store_FT0_fpr(rd
);
2279 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2283 l1
= gen_new_label();
2284 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
2285 cond
= GET_FIELD_SP(insn
, 14, 17);
2286 rs1
= GET_FIELD(insn
, 13, 17);
2287 gen_movl_reg_T0(rs1
);
2288 tcg_gen_movi_tl(r_zero
, 0);
2289 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
2290 gen_op_load_fpr_DT0(DFPREG(rs2
));
2291 gen_op_store_DT0_fpr(DFPREG(rd
));
2294 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2295 #if defined(CONFIG_USER_ONLY)
2299 l1
= gen_new_label();
2300 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
2301 cond
= GET_FIELD_SP(insn
, 14, 17);
2302 rs1
= GET_FIELD(insn
, 13, 17);
2303 gen_movl_reg_T0(rs1
);
2304 tcg_gen_movi_tl(r_zero
, 0);
2305 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
2306 gen_op_load_fpr_QT0(QFPREG(rs2
));
2307 gen_op_store_QT0_fpr(QFPREG(rd
));
2316 #ifdef TARGET_SPARC64
2317 #define FMOVCC(size_FDQ, fcc) \
2319 TCGv r_zero, r_cond; \
2322 l1 = gen_new_label(); \
2323 r_zero = tcg_temp_new(TCG_TYPE_TL); \
2324 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2325 tcg_gen_movi_tl(r_zero, 0); \
2326 cond = GET_FIELD_SP(insn, 14, 17); \
2327 gen_fcond(r_cond, fcc, cond); \
2328 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2329 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2330 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2331 gen_set_label(l1); \
2333 case 0x001: /* V9 fmovscc %fcc0 */
2336 case 0x002: /* V9 fmovdcc %fcc0 */
2339 case 0x003: /* V9 fmovqcc %fcc0 */
2340 #if defined(CONFIG_USER_ONLY)
2346 case 0x041: /* V9 fmovscc %fcc1 */
2349 case 0x042: /* V9 fmovdcc %fcc1 */
2352 case 0x043: /* V9 fmovqcc %fcc1 */
2353 #if defined(CONFIG_USER_ONLY)
2359 case 0x081: /* V9 fmovscc %fcc2 */
2362 case 0x082: /* V9 fmovdcc %fcc2 */
2365 case 0x083: /* V9 fmovqcc %fcc2 */
2366 #if defined(CONFIG_USER_ONLY)
2372 case 0x0c1: /* V9 fmovscc %fcc3 */
2375 case 0x0c2: /* V9 fmovdcc %fcc3 */
2378 case 0x0c3: /* V9 fmovqcc %fcc3 */
2379 #if defined(CONFIG_USER_ONLY)
2386 #define FMOVCC(size_FDQ, icc) \
2388 TCGv r_zero, r_cond; \
2391 l1 = gen_new_label(); \
2392 r_zero = tcg_temp_new(TCG_TYPE_TL); \
2393 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2394 tcg_gen_movi_tl(r_zero, 0); \
2395 cond = GET_FIELD_SP(insn, 14, 17); \
2396 gen_cond(r_cond, icc, cond); \
2397 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2398 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2399 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2400 gen_set_label(l1); \
2403 case 0x101: /* V9 fmovscc %icc */
2406 case 0x102: /* V9 fmovdcc %icc */
2408 case 0x103: /* V9 fmovqcc %icc */
2409 #if defined(CONFIG_USER_ONLY)
2415 case 0x181: /* V9 fmovscc %xcc */
2418 case 0x182: /* V9 fmovdcc %xcc */
2421 case 0x183: /* V9 fmovqcc %xcc */
2422 #if defined(CONFIG_USER_ONLY)
2430 case 0x51: /* fcmps, V9 %fcc */
2431 gen_op_load_fpr_FT0(rs1
);
2432 gen_op_load_fpr_FT1(rs2
);
2433 gen_op_fcmps(rd
& 3);
2435 case 0x52: /* fcmpd, V9 %fcc */
2436 gen_op_load_fpr_DT0(DFPREG(rs1
));
2437 gen_op_load_fpr_DT1(DFPREG(rs2
));
2438 gen_op_fcmpd(rd
& 3);
2440 case 0x53: /* fcmpq, V9 %fcc */
2441 #if defined(CONFIG_USER_ONLY)
2442 gen_op_load_fpr_QT0(QFPREG(rs1
));
2443 gen_op_load_fpr_QT1(QFPREG(rs2
));
2444 gen_op_fcmpq(rd
& 3);
2446 #else /* !defined(CONFIG_USER_ONLY) */
2449 case 0x55: /* fcmpes, V9 %fcc */
2450 gen_op_load_fpr_FT0(rs1
);
2451 gen_op_load_fpr_FT1(rs2
);
2452 gen_op_fcmpes(rd
& 3);
2454 case 0x56: /* fcmped, V9 %fcc */
2455 gen_op_load_fpr_DT0(DFPREG(rs1
));
2456 gen_op_load_fpr_DT1(DFPREG(rs2
));
2457 gen_op_fcmped(rd
& 3);
2459 case 0x57: /* fcmpeq, V9 %fcc */
2460 #if defined(CONFIG_USER_ONLY)
2461 gen_op_load_fpr_QT0(QFPREG(rs1
));
2462 gen_op_load_fpr_QT1(QFPREG(rs2
));
2463 gen_op_fcmpeq(rd
& 3);
2465 #else/* !defined(CONFIG_USER_ONLY) */
2472 } else if (xop
== 0x2) {
2475 rs1
= GET_FIELD(insn
, 13, 17);
2477 // or %g0, x, y -> mov T0, x; mov y, T0
2478 if (IS_IMM
) { /* immediate */
2479 rs2
= GET_FIELDs(insn
, 19, 31);
2480 tcg_gen_movi_tl(cpu_T
[0], (int)rs2
);
2481 } else { /* register */
2482 rs2
= GET_FIELD(insn
, 27, 31);
2483 gen_movl_reg_T0(rs2
);
2486 gen_movl_reg_T0(rs1
);
2487 if (IS_IMM
) { /* immediate */
2488 rs2
= GET_FIELDs(insn
, 19, 31);
2489 tcg_gen_ori_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
2490 } else { /* register */
2491 // or x, %g0, y -> mov T1, x; mov y, T1
2492 rs2
= GET_FIELD(insn
, 27, 31);
2494 gen_movl_reg_T1(rs2
);
2499 gen_movl_T0_reg(rd
);
2501 #ifdef TARGET_SPARC64
2502 } else if (xop
== 0x25) { /* sll, V9 sllx */
2503 rs1
= GET_FIELD(insn
, 13, 17);
2504 gen_movl_reg_T0(rs1
);
2505 if (IS_IMM
) { /* immediate */
2506 rs2
= GET_FIELDs(insn
, 20, 31);
2507 if (insn
& (1 << 12)) {
2508 tcg_gen_shli_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x3f);
2510 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2511 tcg_gen_shli_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x1f);
2513 } else { /* register */
2514 rs2
= GET_FIELD(insn
, 27, 31);
2515 gen_movl_reg_T1(rs2
);
2516 if (insn
& (1 << 12)) {
2517 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x3f);
2518 tcg_gen_shl_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2520 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x1f);
2521 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2522 tcg_gen_shl_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2525 gen_movl_T0_reg(rd
);
2526 } else if (xop
== 0x26) { /* srl, V9 srlx */
2527 rs1
= GET_FIELD(insn
, 13, 17);
2528 gen_movl_reg_T0(rs1
);
2529 if (IS_IMM
) { /* immediate */
2530 rs2
= GET_FIELDs(insn
, 20, 31);
2531 if (insn
& (1 << 12)) {
2532 tcg_gen_shri_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x3f);
2534 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2535 tcg_gen_shri_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x1f);
2537 } else { /* register */
2538 rs2
= GET_FIELD(insn
, 27, 31);
2539 gen_movl_reg_T1(rs2
);
2540 if (insn
& (1 << 12)) {
2541 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x3f);
2542 tcg_gen_shr_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2544 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x1f);
2545 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2546 tcg_gen_shr_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2549 gen_movl_T0_reg(rd
);
2550 } else if (xop
== 0x27) { /* sra, V9 srax */
2551 rs1
= GET_FIELD(insn
, 13, 17);
2552 gen_movl_reg_T0(rs1
);
2553 if (IS_IMM
) { /* immediate */
2554 rs2
= GET_FIELDs(insn
, 20, 31);
2555 if (insn
& (1 << 12)) {
2556 tcg_gen_sari_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x3f);
2558 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2559 tcg_gen_ext_i32_i64(cpu_T
[0], cpu_T
[0]);
2560 tcg_gen_sari_i64(cpu_T
[0], cpu_T
[0], rs2
& 0x1f);
2562 } else { /* register */
2563 rs2
= GET_FIELD(insn
, 27, 31);
2564 gen_movl_reg_T1(rs2
);
2565 if (insn
& (1 << 12)) {
2566 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x3f);
2567 tcg_gen_sar_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2569 tcg_gen_andi_i64(cpu_T
[1], cpu_T
[1], 0x1f);
2570 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], 0xffffffffULL
);
2571 tcg_gen_sar_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2574 gen_movl_T0_reg(rd
);
2576 } else if (xop
< 0x36) {
2577 rs1
= GET_FIELD(insn
, 13, 17);
2578 gen_movl_reg_T0(rs1
);
2579 if (IS_IMM
) { /* immediate */
2580 rs2
= GET_FIELDs(insn
, 19, 31);
2581 gen_movl_simm_T1(rs2
);
2582 } else { /* register */
2583 rs2
= GET_FIELD(insn
, 27, 31);
2584 gen_movl_reg_T1(rs2
);
2587 switch (xop
& ~0x10) {
2590 gen_op_add_T1_T0_cc();
2595 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2597 gen_op_logic_T0_cc();
2600 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2602 gen_op_logic_T0_cc();
2605 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2607 gen_op_logic_T0_cc();
2611 gen_op_sub_T1_T0_cc();
2613 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2616 tcg_gen_xori_tl(cpu_T
[1], cpu_T
[1], -1);
2617 tcg_gen_and_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2619 gen_op_logic_T0_cc();
2622 tcg_gen_xori_tl(cpu_T
[1], cpu_T
[1], -1);
2623 tcg_gen_or_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2625 gen_op_logic_T0_cc();
2628 tcg_gen_xori_tl(cpu_T
[1], cpu_T
[1], -1);
2629 tcg_gen_xor_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2631 gen_op_logic_T0_cc();
2635 gen_op_addx_T1_T0_cc();
2637 tcg_gen_ld_i32(cpu_tmp0
, cpu_env
,
2638 offsetof(CPUSPARCState
, psr
));
2639 gen_mov_reg_C(cpu_tmp0
, cpu_tmp0
);
2640 tcg_gen_add_tl(cpu_T
[1], cpu_T
[1], cpu_tmp0
);
2641 tcg_gen_add_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2644 #ifdef TARGET_SPARC64
2645 case 0x9: /* V9 mulx */
2646 tcg_gen_mul_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2650 gen_op_umul_T1_T0();
2652 gen_op_logic_T0_cc();
2655 gen_op_smul_T1_T0();
2657 gen_op_logic_T0_cc();
2661 gen_op_subx_T1_T0_cc();
2663 tcg_gen_ld_i32(cpu_tmp0
, cpu_env
,
2664 offsetof(CPUSPARCState
, psr
));
2665 gen_mov_reg_C(cpu_tmp0
, cpu_tmp0
);
2666 tcg_gen_add_tl(cpu_T
[1], cpu_T
[1], cpu_tmp0
);
2667 tcg_gen_sub_tl(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2670 #ifdef TARGET_SPARC64
2671 case 0xd: /* V9 udivx */
2672 gen_op_udivx_T1_T0();
2676 gen_op_udiv_T1_T0();
2681 gen_op_sdiv_T1_T0();
2688 gen_movl_T0_reg(rd
);
2691 case 0x20: /* taddcc */
2692 gen_op_tadd_T1_T0_cc();
2693 gen_movl_T0_reg(rd
);
2695 case 0x21: /* tsubcc */
2696 gen_op_tsub_T1_T0_cc();
2697 gen_movl_T0_reg(rd
);
2699 case 0x22: /* taddcctv */
2701 gen_op_tadd_T1_T0_ccTV();
2702 gen_movl_T0_reg(rd
);
2704 case 0x23: /* tsubcctv */
2706 gen_op_tsub_T1_T0_ccTV();
2707 gen_movl_T0_reg(rd
);
2709 case 0x24: /* mulscc */
2710 gen_op_mulscc_T1_T0();
2711 gen_movl_T0_reg(rd
);
2713 #ifndef TARGET_SPARC64
2714 case 0x25: /* sll */
2715 tcg_gen_andi_i32(cpu_T
[1], cpu_T
[1], 0x1f);
2716 tcg_gen_shl_i32(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2717 gen_movl_T0_reg(rd
);
2719 case 0x26: /* srl */
2720 tcg_gen_andi_i32(cpu_T
[1], cpu_T
[1], 0x1f);
2721 tcg_gen_shr_i32(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2722 gen_movl_T0_reg(rd
);
2724 case 0x27: /* sra */
2725 tcg_gen_andi_i32(cpu_T
[1], cpu_T
[1], 0x1f);
2726 tcg_gen_sar_i32(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2727 gen_movl_T0_reg(rd
);
2735 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, y
));
2737 #ifndef TARGET_SPARC64
2738 case 0x01 ... 0x0f: /* undefined in the
2742 case 0x10 ... 0x1f: /* implementation-dependent
2748 case 0x2: /* V9 wrccr */
2752 case 0x3: /* V9 wrasi */
2754 gen_op_movl_env_T0(offsetof(CPUSPARCState
, asi
));
2756 case 0x6: /* V9 wrfprs */
2758 gen_op_movl_env_T0(offsetof(CPUSPARCState
, fprs
));
2764 case 0xf: /* V9 sir, nop if user */
2765 #if !defined(CONFIG_USER_ONLY)
2770 case 0x13: /* Graphics Status */
2771 if (gen_trap_ifnofpu(dc
))
2774 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, gsr
));
2776 case 0x17: /* Tick compare */
2777 #if !defined(CONFIG_USER_ONLY)
2778 if (!supervisor(dc
))
2785 gen_op_movtl_env_T0(offsetof(CPUSPARCState
,
2787 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2788 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2789 offsetof(CPUState
, tick
));
2790 tcg_gen_helper_0_2(helper_tick_set_limit
,
2791 r_tickptr
, cpu_T
[0]);
2794 case 0x18: /* System tick */
2795 #if !defined(CONFIG_USER_ONLY)
2796 if (!supervisor(dc
))
2803 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2804 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2805 offsetof(CPUState
, stick
));
2806 tcg_gen_helper_0_2(helper_tick_set_count
,
2807 r_tickptr
, cpu_T
[0]);
2810 case 0x19: /* System tick compare */
2811 #if !defined(CONFIG_USER_ONLY)
2812 if (!supervisor(dc
))
2819 gen_op_movtl_env_T0(offsetof(CPUSPARCState
,
2821 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2822 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2823 offsetof(CPUState
, stick
));
2824 tcg_gen_helper_0_2(helper_tick_set_limit
,
2825 r_tickptr
, cpu_T
[0]);
2829 case 0x10: /* Performance Control */
2830 case 0x11: /* Performance Instrumentation Counter */
2831 case 0x12: /* Dispatch Control */
2832 case 0x14: /* Softint set */
2833 case 0x15: /* Softint clear */
2834 case 0x16: /* Softint write */
2841 #if !defined(CONFIG_USER_ONLY)
2842 case 0x31: /* wrpsr, V9 saved, restored */
2844 if (!supervisor(dc
))
2846 #ifdef TARGET_SPARC64
2854 case 2: /* UA2005 allclean */
2855 case 3: /* UA2005 otherw */
2856 case 4: /* UA2005 normalw */
2857 case 5: /* UA2005 invalw */
2864 tcg_gen_helper_0_1(helper_wrpsr
, cpu_T
[0]);
2872 case 0x32: /* wrwim, V9 wrpr */
2874 if (!supervisor(dc
))
2877 #ifdef TARGET_SPARC64
2883 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2884 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2885 offsetof(CPUState
, tsptr
));
2886 tcg_gen_st_tl(cpu_T
[0], r_tsptr
,
2887 offsetof(trap_state
, tpc
));
2894 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2895 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2896 offsetof(CPUState
, tsptr
));
2897 tcg_gen_st_tl(cpu_T
[0], r_tsptr
,
2898 offsetof(trap_state
, tnpc
));
2905 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2906 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2907 offsetof(CPUState
, tsptr
));
2908 tcg_gen_st_tl(cpu_T
[0], r_tsptr
,
2909 offsetof(trap_state
, tstate
));
2916 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2917 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2918 offsetof(CPUState
, tsptr
));
2919 tcg_gen_st_i32(cpu_T
[0], r_tsptr
,
2920 offsetof(trap_state
, tt
));
2927 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2928 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2929 offsetof(CPUState
, tick
));
2930 tcg_gen_helper_0_2(helper_tick_set_count
,
2931 r_tickptr
, cpu_T
[0]);
2935 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, tbr
));
2939 tcg_gen_helper_0_1(helper_wrpstate
, cpu_T
[0]);
2945 gen_op_movl_env_T0(offsetof(CPUSPARCState
, tl
));
2948 gen_op_movl_env_T0(offsetof(CPUSPARCState
, psrpil
));
2954 gen_op_movl_env_T0(offsetof(CPUSPARCState
, cansave
));
2956 case 11: // canrestore
2957 gen_op_movl_env_T0(offsetof(CPUSPARCState
, canrestore
));
2959 case 12: // cleanwin
2960 gen_op_movl_env_T0(offsetof(CPUSPARCState
, cleanwin
));
2962 case 13: // otherwin
2963 gen_op_movl_env_T0(offsetof(CPUSPARCState
, otherwin
));
2966 gen_op_movl_env_T0(offsetof(CPUSPARCState
, wstate
));
2968 case 16: // UA2005 gl
2969 gen_op_movl_env_T0(offsetof(CPUSPARCState
, gl
));
2971 case 26: // UA2005 strand status
2972 if (!hypervisor(dc
))
2974 gen_op_movl_env_T0(offsetof(CPUSPARCState
, ssr
));
2980 tcg_gen_andi_i32(cpu_T
[0], cpu_T
[0], ((1 << NWINDOWS
) - 1));
2981 gen_op_movl_env_T0(offsetof(CPUSPARCState
, wim
));
2985 case 0x33: /* wrtbr, UA2005 wrhpr */
2987 #ifndef TARGET_SPARC64
2988 if (!supervisor(dc
))
2991 gen_op_movtl_env_T0(offsetof(CPUSPARCState
, tbr
));
2993 if (!hypervisor(dc
))
2998 // XXX gen_op_wrhpstate();
3005 // XXX gen_op_wrhtstate();
3008 gen_op_movl_env_T0(offsetof(CPUSPARCState
, hintp
));
3011 gen_op_movl_env_T0(offsetof(CPUSPARCState
, htba
));
3013 case 31: // hstick_cmpr
3017 gen_op_movtl_env_T0(offsetof(CPUSPARCState
,
3019 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3020 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3021 offsetof(CPUState
, hstick
));
3022 tcg_gen_helper_0_2(helper_tick_set_limit
,
3023 r_tickptr
, cpu_T
[0]);
3026 case 6: // hver readonly
3034 #ifdef TARGET_SPARC64
3035 case 0x2c: /* V9 movcc */
3037 int cc
= GET_FIELD_SP(insn
, 11, 12);
3038 int cond
= GET_FIELD_SP(insn
, 14, 17);
3043 if (insn
& (1 << 18)) {
3045 gen_cond(cpu_T
[2], 0, cond
);
3047 gen_cond(cpu_T
[2], 1, cond
);
3051 gen_fcond(cpu_T
[2], cc
, cond
);
3054 l1
= gen_new_label();
3056 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
3057 tcg_gen_movi_tl(r_zero
, 0);
3058 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_T
[2], r_zero
, l1
);
3059 if (IS_IMM
) { /* immediate */
3060 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3061 gen_movl_simm_T1(rs2
);
3063 rs2
= GET_FIELD_SP(insn
, 0, 4);
3064 gen_movl_reg_T1(rs2
);
3066 gen_movl_T1_reg(rd
);
3070 case 0x2d: /* V9 sdivx */
3071 gen_op_sdivx_T1_T0();
3072 gen_movl_T0_reg(rd
);
3074 case 0x2e: /* V9 popc */
3076 if (IS_IMM
) { /* immediate */
3077 rs2
= GET_FIELD_SPs(insn
, 0, 12);
3078 gen_movl_simm_T1(rs2
);
3079 // XXX optimize: popc(constant)
3082 rs2
= GET_FIELD_SP(insn
, 0, 4);
3083 gen_movl_reg_T1(rs2
);
3085 tcg_gen_helper_1_1(helper_popc
, cpu_T
[0],
3087 gen_movl_T0_reg(rd
);
3089 case 0x2f: /* V9 movr */
3091 int cond
= GET_FIELD_SP(insn
, 10, 12);
3095 rs1
= GET_FIELD(insn
, 13, 17);
3096 gen_movl_reg_T0(rs1
);
3098 l1
= gen_new_label();
3100 r_zero
= tcg_temp_new(TCG_TYPE_TL
);
3101 tcg_gen_movi_tl(r_zero
, 0);
3102 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_T
[0], r_zero
, l1
);
3103 if (IS_IMM
) { /* immediate */
3104 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3105 gen_movl_simm_T1(rs2
);
3107 rs2
= GET_FIELD_SP(insn
, 0, 4);
3108 gen_movl_reg_T1(rs2
);
3110 gen_movl_T1_reg(rd
);
3119 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3120 #ifdef TARGET_SPARC64
3121 int opf
= GET_FIELD_SP(insn
, 5, 13);
3122 rs1
= GET_FIELD(insn
, 13, 17);
3123 rs2
= GET_FIELD(insn
, 27, 31);
3124 if (gen_trap_ifnofpu(dc
))
3128 case 0x000: /* VIS I edge8cc */
3129 case 0x001: /* VIS II edge8n */
3130 case 0x002: /* VIS I edge8lcc */
3131 case 0x003: /* VIS II edge8ln */
3132 case 0x004: /* VIS I edge16cc */
3133 case 0x005: /* VIS II edge16n */
3134 case 0x006: /* VIS I edge16lcc */
3135 case 0x007: /* VIS II edge16ln */
3136 case 0x008: /* VIS I edge32cc */
3137 case 0x009: /* VIS II edge32n */
3138 case 0x00a: /* VIS I edge32lcc */
3139 case 0x00b: /* VIS II edge32ln */
3142 case 0x010: /* VIS I array8 */
3143 gen_movl_reg_T0(rs1
);
3144 gen_movl_reg_T1(rs2
);
3146 gen_movl_T0_reg(rd
);
3148 case 0x012: /* VIS I array16 */
3149 gen_movl_reg_T0(rs1
);
3150 gen_movl_reg_T1(rs2
);
3152 gen_movl_T0_reg(rd
);
3154 case 0x014: /* VIS I array32 */
3155 gen_movl_reg_T0(rs1
);
3156 gen_movl_reg_T1(rs2
);
3158 gen_movl_T0_reg(rd
);
3160 case 0x018: /* VIS I alignaddr */
3161 gen_movl_reg_T0(rs1
);
3162 gen_movl_reg_T1(rs2
);
3164 gen_movl_T0_reg(rd
);
3166 case 0x019: /* VIS II bmask */
3167 case 0x01a: /* VIS I alignaddrl */
3170 case 0x020: /* VIS I fcmple16 */
3171 gen_op_load_fpr_DT0(DFPREG(rs1
));
3172 gen_op_load_fpr_DT1(DFPREG(rs2
));
3174 gen_op_store_DT0_fpr(DFPREG(rd
));
3176 case 0x022: /* VIS I fcmpne16 */
3177 gen_op_load_fpr_DT0(DFPREG(rs1
));
3178 gen_op_load_fpr_DT1(DFPREG(rs2
));
3180 gen_op_store_DT0_fpr(DFPREG(rd
));
3182 case 0x024: /* VIS I fcmple32 */
3183 gen_op_load_fpr_DT0(DFPREG(rs1
));
3184 gen_op_load_fpr_DT1(DFPREG(rs2
));
3186 gen_op_store_DT0_fpr(DFPREG(rd
));
3188 case 0x026: /* VIS I fcmpne32 */
3189 gen_op_load_fpr_DT0(DFPREG(rs1
));
3190 gen_op_load_fpr_DT1(DFPREG(rs2
));
3192 gen_op_store_DT0_fpr(DFPREG(rd
));
3194 case 0x028: /* VIS I fcmpgt16 */
3195 gen_op_load_fpr_DT0(DFPREG(rs1
));
3196 gen_op_load_fpr_DT1(DFPREG(rs2
));
3198 gen_op_store_DT0_fpr(DFPREG(rd
));
3200 case 0x02a: /* VIS I fcmpeq16 */
3201 gen_op_load_fpr_DT0(DFPREG(rs1
));
3202 gen_op_load_fpr_DT1(DFPREG(rs2
));
3204 gen_op_store_DT0_fpr(DFPREG(rd
));
3206 case 0x02c: /* VIS I fcmpgt32 */
3207 gen_op_load_fpr_DT0(DFPREG(rs1
));
3208 gen_op_load_fpr_DT1(DFPREG(rs2
));
3210 gen_op_store_DT0_fpr(DFPREG(rd
));
3212 case 0x02e: /* VIS I fcmpeq32 */
3213 gen_op_load_fpr_DT0(DFPREG(rs1
));
3214 gen_op_load_fpr_DT1(DFPREG(rs2
));
3216 gen_op_store_DT0_fpr(DFPREG(rd
));
3218 case 0x031: /* VIS I fmul8x16 */
3219 gen_op_load_fpr_DT0(DFPREG(rs1
));
3220 gen_op_load_fpr_DT1(DFPREG(rs2
));
3222 gen_op_store_DT0_fpr(DFPREG(rd
));
3224 case 0x033: /* VIS I fmul8x16au */
3225 gen_op_load_fpr_DT0(DFPREG(rs1
));
3226 gen_op_load_fpr_DT1(DFPREG(rs2
));
3227 gen_op_fmul8x16au();
3228 gen_op_store_DT0_fpr(DFPREG(rd
));
3230 case 0x035: /* VIS I fmul8x16al */
3231 gen_op_load_fpr_DT0(DFPREG(rs1
));
3232 gen_op_load_fpr_DT1(DFPREG(rs2
));
3233 gen_op_fmul8x16al();
3234 gen_op_store_DT0_fpr(DFPREG(rd
));
3236 case 0x036: /* VIS I fmul8sux16 */
3237 gen_op_load_fpr_DT0(DFPREG(rs1
));
3238 gen_op_load_fpr_DT1(DFPREG(rs2
));
3239 gen_op_fmul8sux16();
3240 gen_op_store_DT0_fpr(DFPREG(rd
));
3242 case 0x037: /* VIS I fmul8ulx16 */
3243 gen_op_load_fpr_DT0(DFPREG(rs1
));
3244 gen_op_load_fpr_DT1(DFPREG(rs2
));
3245 gen_op_fmul8ulx16();
3246 gen_op_store_DT0_fpr(DFPREG(rd
));
3248 case 0x038: /* VIS I fmuld8sux16 */
3249 gen_op_load_fpr_DT0(DFPREG(rs1
));
3250 gen_op_load_fpr_DT1(DFPREG(rs2
));
3251 gen_op_fmuld8sux16();
3252 gen_op_store_DT0_fpr(DFPREG(rd
));
3254 case 0x039: /* VIS I fmuld8ulx16 */
3255 gen_op_load_fpr_DT0(DFPREG(rs1
));
3256 gen_op_load_fpr_DT1(DFPREG(rs2
));
3257 gen_op_fmuld8ulx16();
3258 gen_op_store_DT0_fpr(DFPREG(rd
));
3260 case 0x03a: /* VIS I fpack32 */
3261 case 0x03b: /* VIS I fpack16 */
3262 case 0x03d: /* VIS I fpackfix */
3263 case 0x03e: /* VIS I pdist */
3266 case 0x048: /* VIS I faligndata */
3267 gen_op_load_fpr_DT0(DFPREG(rs1
));
3268 gen_op_load_fpr_DT1(DFPREG(rs2
));
3269 gen_op_faligndata();
3270 gen_op_store_DT0_fpr(DFPREG(rd
));
3272 case 0x04b: /* VIS I fpmerge */
3273 gen_op_load_fpr_DT0(DFPREG(rs1
));
3274 gen_op_load_fpr_DT1(DFPREG(rs2
));
3276 gen_op_store_DT0_fpr(DFPREG(rd
));
3278 case 0x04c: /* VIS II bshuffle */
3281 case 0x04d: /* VIS I fexpand */
3282 gen_op_load_fpr_DT0(DFPREG(rs1
));
3283 gen_op_load_fpr_DT1(DFPREG(rs2
));
3285 gen_op_store_DT0_fpr(DFPREG(rd
));
3287 case 0x050: /* VIS I fpadd16 */
3288 gen_op_load_fpr_DT0(DFPREG(rs1
));
3289 gen_op_load_fpr_DT1(DFPREG(rs2
));
3291 gen_op_store_DT0_fpr(DFPREG(rd
));
3293 case 0x051: /* VIS I fpadd16s */
3294 gen_op_load_fpr_FT0(rs1
);
3295 gen_op_load_fpr_FT1(rs2
);
3297 gen_op_store_FT0_fpr(rd
);
3299 case 0x052: /* VIS I fpadd32 */
3300 gen_op_load_fpr_DT0(DFPREG(rs1
));
3301 gen_op_load_fpr_DT1(DFPREG(rs2
));
3303 gen_op_store_DT0_fpr(DFPREG(rd
));
3305 case 0x053: /* VIS I fpadd32s */
3306 gen_op_load_fpr_FT0(rs1
);
3307 gen_op_load_fpr_FT1(rs2
);
3309 gen_op_store_FT0_fpr(rd
);
3311 case 0x054: /* VIS I fpsub16 */
3312 gen_op_load_fpr_DT0(DFPREG(rs1
));
3313 gen_op_load_fpr_DT1(DFPREG(rs2
));
3315 gen_op_store_DT0_fpr(DFPREG(rd
));
3317 case 0x055: /* VIS I fpsub16s */
3318 gen_op_load_fpr_FT0(rs1
);
3319 gen_op_load_fpr_FT1(rs2
);
3321 gen_op_store_FT0_fpr(rd
);
3323 case 0x056: /* VIS I fpsub32 */
3324 gen_op_load_fpr_DT0(DFPREG(rs1
));
3325 gen_op_load_fpr_DT1(DFPREG(rs2
));
3327 gen_op_store_DT0_fpr(DFPREG(rd
));
3329 case 0x057: /* VIS I fpsub32s */
3330 gen_op_load_fpr_FT0(rs1
);
3331 gen_op_load_fpr_FT1(rs2
);
3333 gen_op_store_FT0_fpr(rd
);
3335 case 0x060: /* VIS I fzero */
3336 gen_op_movl_DT0_0();
3337 gen_op_store_DT0_fpr(DFPREG(rd
));
3339 case 0x061: /* VIS I fzeros */
3340 gen_op_movl_FT0_0();
3341 gen_op_store_FT0_fpr(rd
);
3343 case 0x062: /* VIS I fnor */
3344 gen_op_load_fpr_DT0(DFPREG(rs1
));
3345 gen_op_load_fpr_DT1(DFPREG(rs2
));
3347 gen_op_store_DT0_fpr(DFPREG(rd
));
3349 case 0x063: /* VIS I fnors */
3350 gen_op_load_fpr_FT0(rs1
);
3351 gen_op_load_fpr_FT1(rs2
);
3353 gen_op_store_FT0_fpr(rd
);
3355 case 0x064: /* VIS I fandnot2 */
3356 gen_op_load_fpr_DT1(DFPREG(rs1
));
3357 gen_op_load_fpr_DT0(DFPREG(rs2
));
3359 gen_op_store_DT0_fpr(DFPREG(rd
));
3361 case 0x065: /* VIS I fandnot2s */
3362 gen_op_load_fpr_FT1(rs1
);
3363 gen_op_load_fpr_FT0(rs2
);
3365 gen_op_store_FT0_fpr(rd
);
3367 case 0x066: /* VIS I fnot2 */
3368 gen_op_load_fpr_DT1(DFPREG(rs2
));
3370 gen_op_store_DT0_fpr(DFPREG(rd
));
3372 case 0x067: /* VIS I fnot2s */
3373 gen_op_load_fpr_FT1(rs2
);
3375 gen_op_store_FT0_fpr(rd
);
3377 case 0x068: /* VIS I fandnot1 */
3378 gen_op_load_fpr_DT0(DFPREG(rs1
));
3379 gen_op_load_fpr_DT1(DFPREG(rs2
));
3381 gen_op_store_DT0_fpr(DFPREG(rd
));
3383 case 0x069: /* VIS I fandnot1s */
3384 gen_op_load_fpr_FT0(rs1
);
3385 gen_op_load_fpr_FT1(rs2
);
3387 gen_op_store_FT0_fpr(rd
);
3389 case 0x06a: /* VIS I fnot1 */
3390 gen_op_load_fpr_DT1(DFPREG(rs1
));
3392 gen_op_store_DT0_fpr(DFPREG(rd
));
3394 case 0x06b: /* VIS I fnot1s */
3395 gen_op_load_fpr_FT1(rs1
);
3397 gen_op_store_FT0_fpr(rd
);
3399 case 0x06c: /* VIS I fxor */
3400 gen_op_load_fpr_DT0(DFPREG(rs1
));
3401 gen_op_load_fpr_DT1(DFPREG(rs2
));
3403 gen_op_store_DT0_fpr(DFPREG(rd
));
3405 case 0x06d: /* VIS I fxors */
3406 gen_op_load_fpr_FT0(rs1
);
3407 gen_op_load_fpr_FT1(rs2
);
3409 gen_op_store_FT0_fpr(rd
);
3411 case 0x06e: /* VIS I fnand */
3412 gen_op_load_fpr_DT0(DFPREG(rs1
));
3413 gen_op_load_fpr_DT1(DFPREG(rs2
));
3415 gen_op_store_DT0_fpr(DFPREG(rd
));
3417 case 0x06f: /* VIS I fnands */
3418 gen_op_load_fpr_FT0(rs1
);
3419 gen_op_load_fpr_FT1(rs2
);
3421 gen_op_store_FT0_fpr(rd
);
3423 case 0x070: /* VIS I fand */
3424 gen_op_load_fpr_DT0(DFPREG(rs1
));
3425 gen_op_load_fpr_DT1(DFPREG(rs2
));
3427 gen_op_store_DT0_fpr(DFPREG(rd
));
3429 case 0x071: /* VIS I fands */
3430 gen_op_load_fpr_FT0(rs1
);
3431 gen_op_load_fpr_FT1(rs2
);
3433 gen_op_store_FT0_fpr(rd
);
3435 case 0x072: /* VIS I fxnor */
3436 gen_op_load_fpr_DT0(DFPREG(rs1
));
3437 gen_op_load_fpr_DT1(DFPREG(rs2
));
3439 gen_op_store_DT0_fpr(DFPREG(rd
));
3441 case 0x073: /* VIS I fxnors */
3442 gen_op_load_fpr_FT0(rs1
);
3443 gen_op_load_fpr_FT1(rs2
);
3445 gen_op_store_FT0_fpr(rd
);
3447 case 0x074: /* VIS I fsrc1 */
3448 gen_op_load_fpr_DT0(DFPREG(rs1
));
3449 gen_op_store_DT0_fpr(DFPREG(rd
));
3451 case 0x075: /* VIS I fsrc1s */
3452 gen_op_load_fpr_FT0(rs1
);
3453 gen_op_store_FT0_fpr(rd
);
3455 case 0x076: /* VIS I fornot2 */
3456 gen_op_load_fpr_DT1(DFPREG(rs1
));
3457 gen_op_load_fpr_DT0(DFPREG(rs2
));
3459 gen_op_store_DT0_fpr(DFPREG(rd
));
3461 case 0x077: /* VIS I fornot2s */
3462 gen_op_load_fpr_FT1(rs1
);
3463 gen_op_load_fpr_FT0(rs2
);
3465 gen_op_store_FT0_fpr(rd
);
3467 case 0x078: /* VIS I fsrc2 */
3468 gen_op_load_fpr_DT0(DFPREG(rs2
));
3469 gen_op_store_DT0_fpr(DFPREG(rd
));
3471 case 0x079: /* VIS I fsrc2s */
3472 gen_op_load_fpr_FT0(rs2
);
3473 gen_op_store_FT0_fpr(rd
);
3475 case 0x07a: /* VIS I fornot1 */
3476 gen_op_load_fpr_DT0(DFPREG(rs1
));
3477 gen_op_load_fpr_DT1(DFPREG(rs2
));
3479 gen_op_store_DT0_fpr(DFPREG(rd
));
3481 case 0x07b: /* VIS I fornot1s */
3482 gen_op_load_fpr_FT0(rs1
);
3483 gen_op_load_fpr_FT1(rs2
);
3485 gen_op_store_FT0_fpr(rd
);
3487 case 0x07c: /* VIS I for */
3488 gen_op_load_fpr_DT0(DFPREG(rs1
));
3489 gen_op_load_fpr_DT1(DFPREG(rs2
));
3491 gen_op_store_DT0_fpr(DFPREG(rd
));
3493 case 0x07d: /* VIS I fors */
3494 gen_op_load_fpr_FT0(rs1
);
3495 gen_op_load_fpr_FT1(rs2
);
3497 gen_op_store_FT0_fpr(rd
);
3499 case 0x07e: /* VIS I fone */
3500 gen_op_movl_DT0_1();
3501 gen_op_store_DT0_fpr(DFPREG(rd
));
3503 case 0x07f: /* VIS I fones */
3504 gen_op_movl_FT0_1();
3505 gen_op_store_FT0_fpr(rd
);
3507 case 0x080: /* VIS I shutdown */
3508 case 0x081: /* VIS II siam */
3517 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
3518 #ifdef TARGET_SPARC64
3523 #ifdef TARGET_SPARC64
3524 } else if (xop
== 0x39) { /* V9 return */
3525 rs1
= GET_FIELD(insn
, 13, 17);
3527 gen_movl_reg_T0(rs1
);
3528 if (IS_IMM
) { /* immediate */
3529 rs2
= GET_FIELDs(insn
, 19, 31);
3530 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
3531 } else { /* register */
3532 rs2
= GET_FIELD(insn
, 27, 31);
3536 gen_movl_reg_T1(rs2
);
3544 gen_op_check_align_T0_3();
3545 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUSPARCState
, npc
));
3546 dc
->npc
= DYNAMIC_PC
;
3550 rs1
= GET_FIELD(insn
, 13, 17);
3551 gen_movl_reg_T0(rs1
);
3552 if (IS_IMM
) { /* immediate */
3553 rs2
= GET_FIELDs(insn
, 19, 31);
3554 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
3555 } else { /* register */
3556 rs2
= GET_FIELD(insn
, 27, 31);
3560 gen_movl_reg_T1(rs2
);
3567 case 0x38: /* jmpl */
3570 tcg_gen_movi_tl(cpu_T
[1], dc
->pc
);
3571 gen_movl_T1_reg(rd
);
3574 gen_op_check_align_T0_3();
3575 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUSPARCState
, npc
));
3576 dc
->npc
= DYNAMIC_PC
;
3579 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3580 case 0x39: /* rett, V9 return */
3582 if (!supervisor(dc
))
3585 gen_op_check_align_T0_3();
3586 tcg_gen_st_tl(cpu_T
[0], cpu_env
, offsetof(CPUSPARCState
, npc
));
3587 dc
->npc
= DYNAMIC_PC
;
3588 tcg_gen_helper_0_0(helper_rett
);
3592 case 0x3b: /* flush */
3593 tcg_gen_helper_0_1(helper_flush
, cpu_T
[0]);
3595 case 0x3c: /* save */
3598 gen_movl_T0_reg(rd
);
3600 case 0x3d: /* restore */
3603 gen_movl_T0_reg(rd
);
3605 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
3606 case 0x3e: /* V9 done/retry */
3610 if (!supervisor(dc
))
3612 dc
->npc
= DYNAMIC_PC
;
3613 dc
->pc
= DYNAMIC_PC
;
3614 tcg_gen_helper_0_0(helper_done
);
3617 if (!supervisor(dc
))
3619 dc
->npc
= DYNAMIC_PC
;
3620 dc
->pc
= DYNAMIC_PC
;
3621 tcg_gen_helper_0_0(helper_retry
);
3636 case 3: /* load/store instructions */
3638 unsigned int xop
= GET_FIELD(insn
, 7, 12);
3639 rs1
= GET_FIELD(insn
, 13, 17);
3641 gen_movl_reg_T0(rs1
);
3642 if (xop
== 0x3c || xop
== 0x3e)
3644 rs2
= GET_FIELD(insn
, 27, 31);
3645 gen_movl_reg_T1(rs2
);
3647 else if (IS_IMM
) { /* immediate */
3648 rs2
= GET_FIELDs(insn
, 19, 31);
3649 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], (int)rs2
);
3650 } else { /* register */
3651 rs2
= GET_FIELD(insn
, 27, 31);
3655 gen_movl_reg_T1(rs2
);
3661 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
3662 (xop
> 0x17 && xop
<= 0x1d ) ||
3663 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
3665 case 0x0: /* load unsigned word */
3666 gen_op_check_align_T0_3();
3667 ABI32_MASK(cpu_T
[0]);
3668 tcg_gen_qemu_ld32u(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3670 case 0x1: /* load unsigned byte */
3671 ABI32_MASK(cpu_T
[0]);
3672 tcg_gen_qemu_ld8u(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3674 case 0x2: /* load unsigned halfword */
3675 gen_op_check_align_T0_1();
3676 ABI32_MASK(cpu_T
[0]);
3677 tcg_gen_qemu_ld16u(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3679 case 0x3: /* load double word */
3685 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
3686 gen_op_check_align_T0_7();
3687 ABI32_MASK(cpu_T
[0]);
3688 tcg_gen_qemu_ld64(r_dword
, cpu_T
[0], dc
->mem_idx
);
3689 tcg_gen_trunc_i64_i32(cpu_T
[0], r_dword
);
3690 gen_movl_T0_reg(rd
+ 1);
3691 tcg_gen_shri_i64(r_dword
, r_dword
, 32);
3692 tcg_gen_trunc_i64_i32(cpu_T
[1], r_dword
);
3695 case 0x9: /* load signed byte */
3696 ABI32_MASK(cpu_T
[0]);
3697 tcg_gen_qemu_ld8s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3699 case 0xa: /* load signed halfword */
3700 gen_op_check_align_T0_1();
3701 ABI32_MASK(cpu_T
[0]);
3702 tcg_gen_qemu_ld16s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3704 case 0xd: /* ldstub -- XXX: should be atomically */
3705 tcg_gen_movi_i32(cpu_tmp0
, 0xff);
3706 ABI32_MASK(cpu_T
[0]);
3707 tcg_gen_qemu_ld8s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3708 tcg_gen_qemu_st8(cpu_tmp0
, cpu_T
[0], dc
->mem_idx
);
3710 case 0x0f: /* swap register with memory. Also atomically */
3711 gen_op_check_align_T0_3();
3712 gen_movl_reg_T1(rd
);
3713 ABI32_MASK(cpu_T
[0]);
3714 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_T
[0], dc
->mem_idx
);
3715 tcg_gen_qemu_st32(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3716 tcg_gen_mov_i32(cpu_T
[1], cpu_tmp0
);
3718 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
3719 case 0x10: /* load word alternate */
3720 #ifndef TARGET_SPARC64
3723 if (!supervisor(dc
))
3726 gen_op_check_align_T0_3();
3727 gen_ld_asi(insn
, 4, 0);
3729 case 0x11: /* load unsigned byte alternate */
3730 #ifndef TARGET_SPARC64
3733 if (!supervisor(dc
))
3736 gen_ld_asi(insn
, 1, 0);
3738 case 0x12: /* load unsigned halfword alternate */
3739 #ifndef TARGET_SPARC64
3742 if (!supervisor(dc
))
3745 gen_op_check_align_T0_1();
3746 gen_ld_asi(insn
, 2, 0);
3748 case 0x13: /* load double word alternate */
3749 #ifndef TARGET_SPARC64
3752 if (!supervisor(dc
))
3757 gen_op_check_align_T0_7();
3759 gen_movl_T0_reg(rd
+ 1);
3761 case 0x19: /* load signed byte alternate */
3762 #ifndef TARGET_SPARC64
3765 if (!supervisor(dc
))
3768 gen_ld_asi(insn
, 1, 1);
3770 case 0x1a: /* load signed halfword alternate */
3771 #ifndef TARGET_SPARC64
3774 if (!supervisor(dc
))
3777 gen_op_check_align_T0_1();
3778 gen_ld_asi(insn
, 2, 1);
3780 case 0x1d: /* ldstuba -- XXX: should be atomically */
3781 #ifndef TARGET_SPARC64
3784 if (!supervisor(dc
))
3787 gen_ldstub_asi(insn
);
3789 case 0x1f: /* swap reg with alt. memory. Also atomically */
3790 #ifndef TARGET_SPARC64
3793 if (!supervisor(dc
))
3796 gen_op_check_align_T0_3();
3797 gen_movl_reg_T1(rd
);
3801 #ifndef TARGET_SPARC64
3802 case 0x30: /* ldc */
3803 case 0x31: /* ldcsr */
3804 case 0x33: /* lddc */
3808 #ifdef TARGET_SPARC64
3809 case 0x08: /* V9 ldsw */
3810 gen_op_check_align_T0_3();
3811 ABI32_MASK(cpu_T
[0]);
3812 tcg_gen_qemu_ld32s(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3814 case 0x0b: /* V9 ldx */
3815 gen_op_check_align_T0_7();
3816 ABI32_MASK(cpu_T
[0]);
3817 tcg_gen_qemu_ld64(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3819 case 0x18: /* V9 ldswa */
3820 gen_op_check_align_T0_3();
3821 gen_ld_asi(insn
, 4, 1);
3823 case 0x1b: /* V9 ldxa */
3824 gen_op_check_align_T0_7();
3825 gen_ld_asi(insn
, 8, 0);
3827 case 0x2d: /* V9 prefetch, no effect */
3829 case 0x30: /* V9 ldfa */
3830 gen_op_check_align_T0_3();
3831 gen_ldf_asi(insn
, 4, rd
);
3833 case 0x33: /* V9 lddfa */
3834 gen_op_check_align_T0_3();
3835 gen_ldf_asi(insn
, 8, DFPREG(rd
));
3837 case 0x3d: /* V9 prefetcha, no effect */
3839 case 0x32: /* V9 ldqfa */
3840 #if defined(CONFIG_USER_ONLY)
3841 gen_op_check_align_T0_3();
3842 gen_ldf_asi(insn
, 16, QFPREG(rd
));
3851 gen_movl_T1_reg(rd
);
3852 #ifdef TARGET_SPARC64
3855 } else if (xop
>= 0x20 && xop
< 0x24) {
3856 if (gen_trap_ifnofpu(dc
))
3859 case 0x20: /* load fpreg */
3860 gen_op_check_align_T0_3();
3862 gen_op_store_FT0_fpr(rd
);
3864 case 0x21: /* load fsr */
3865 gen_op_check_align_T0_3();
3868 tcg_gen_helper_0_0(helper_ldfsr
);
3870 case 0x22: /* load quad fpreg */
3871 #if defined(CONFIG_USER_ONLY)
3872 gen_op_check_align_T0_7();
3874 gen_op_store_QT0_fpr(QFPREG(rd
));
3879 case 0x23: /* load double fpreg */
3880 gen_op_check_align_T0_7();
3882 gen_op_store_DT0_fpr(DFPREG(rd
));
3887 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
3888 xop
== 0xe || xop
== 0x1e) {
3889 gen_movl_reg_T1(rd
);
3891 case 0x4: /* store word */
3892 gen_op_check_align_T0_3();
3893 ABI32_MASK(cpu_T
[0]);
3894 tcg_gen_qemu_st32(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3896 case 0x5: /* store byte */
3897 ABI32_MASK(cpu_T
[0]);
3898 tcg_gen_qemu_st8(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3900 case 0x6: /* store halfword */
3901 gen_op_check_align_T0_1();
3902 ABI32_MASK(cpu_T
[0]);
3903 tcg_gen_qemu_st16(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
3905 case 0x7: /* store double word */
3910 TCGv r_dword
, r_low
;
3912 gen_op_check_align_T0_7();
3913 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
3914 r_low
= tcg_temp_new(TCG_TYPE_I32
);
3915 gen_movl_reg_TN(rd
+ 1, r_low
);
3916 tcg_gen_helper_1_2(helper_pack64
, r_dword
, cpu_T
[1],
3918 tcg_gen_qemu_st64(r_dword
, cpu_T
[0], dc
->mem_idx
);
3920 #else /* __i386__ */
3921 gen_op_check_align_T0_7();
3923 gen_movl_reg_T2(rd
+ 1);
3925 #endif /* __i386__ */
3927 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
3928 case 0x14: /* store word alternate */
3929 #ifndef TARGET_SPARC64
3932 if (!supervisor(dc
))
3935 gen_op_check_align_T0_3();
3936 gen_st_asi(insn
, 4);
3938 case 0x15: /* store byte alternate */
3939 #ifndef TARGET_SPARC64
3942 if (!supervisor(dc
))
3945 gen_st_asi(insn
, 1);
3947 case 0x16: /* store halfword alternate */
3948 #ifndef TARGET_SPARC64
3951 if (!supervisor(dc
))
3954 gen_op_check_align_T0_1();
3955 gen_st_asi(insn
, 2);
3957 case 0x17: /* store double word alternate */
3958 #ifndef TARGET_SPARC64
3961 if (!supervisor(dc
))
3968 TCGv r_dword
, r_temp
, r_size
;
3970 gen_op_check_align_T0_7();
3971 r_dword
= tcg_temp_new(TCG_TYPE_I64
);
3972 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
3973 r_size
= tcg_temp_new(TCG_TYPE_I32
);
3974 gen_movl_reg_TN(rd
+ 1, r_temp
);
3975 tcg_gen_helper_1_2(helper_pack64
, r_dword
, cpu_T
[1],
3977 #ifdef TARGET_SPARC64
3981 offset
= GET_FIELD(insn
, 25, 31);
3982 tcg_gen_addi_tl(cpu_T
[0], cpu_T
[0], offset
);
3983 tcg_gen_ld_i32(r_dword
, cpu_env
, offsetof(CPUSPARCState
, asi
));
3986 asi
= GET_FIELD(insn
, 19, 26);
3987 tcg_gen_movi_i32(r_temp
, asi
);
3988 #ifdef TARGET_SPARC64
3991 tcg_gen_movi_i32(r_size
, 8);
3992 tcg_gen_helper_0_4(helper_st_asi
, cpu_T
[0], r_dword
, r_temp
, r_size
);
3996 #ifdef TARGET_SPARC64
3997 case 0x0e: /* V9 stx */
3998 gen_op_check_align_T0_7();
3999 ABI32_MASK(cpu_T
[0]);
4000 tcg_gen_qemu_st64(cpu_T
[1], cpu_T
[0], dc
->mem_idx
);
4002 case 0x1e: /* V9 stxa */
4003 gen_op_check_align_T0_7();
4004 gen_st_asi(insn
, 8);
4010 } else if (xop
> 0x23 && xop
< 0x28) {
4011 if (gen_trap_ifnofpu(dc
))
4015 gen_op_check_align_T0_3();
4016 gen_op_load_fpr_FT0(rd
);
4019 case 0x25: /* stfsr, V9 stxfsr */
4020 #ifdef CONFIG_USER_ONLY
4021 gen_op_check_align_T0_3();
4027 #ifdef TARGET_SPARC64
4028 #if defined(CONFIG_USER_ONLY)
4029 /* V9 stqf, store quad fpreg */
4030 gen_op_check_align_T0_7();
4031 gen_op_load_fpr_QT0(QFPREG(rd
));
4037 #else /* !TARGET_SPARC64 */
4038 /* stdfq, store floating point queue */
4039 #if defined(CONFIG_USER_ONLY)
4042 if (!supervisor(dc
))
4044 if (gen_trap_ifnofpu(dc
))
4050 gen_op_check_align_T0_7();
4051 gen_op_load_fpr_DT0(DFPREG(rd
));
4057 } else if (xop
> 0x33 && xop
< 0x3f) {
4059 #ifdef TARGET_SPARC64
4060 case 0x34: /* V9 stfa */
4061 gen_op_check_align_T0_3();
4062 gen_op_load_fpr_FT0(rd
);
4063 gen_stf_asi(insn
, 4, rd
);
4065 case 0x36: /* V9 stqfa */
4066 #if defined(CONFIG_USER_ONLY)
4067 gen_op_check_align_T0_7();
4068 gen_op_load_fpr_QT0(QFPREG(rd
));
4069 gen_stf_asi(insn
, 16, QFPREG(rd
));
4074 case 0x37: /* V9 stdfa */
4075 gen_op_check_align_T0_3();
4076 gen_op_load_fpr_DT0(DFPREG(rd
));
4077 gen_stf_asi(insn
, 8, DFPREG(rd
));
4079 case 0x3c: /* V9 casa */
4080 gen_op_check_align_T0_3();
4081 gen_cas_asi(insn
, rd
);
4082 gen_movl_T1_reg(rd
);
4084 case 0x3e: /* V9 casxa */
4085 gen_op_check_align_T0_7();
4086 gen_casx_asi(insn
, rd
);
4087 gen_movl_T1_reg(rd
);
4090 case 0x34: /* stc */
4091 case 0x35: /* stcsr */
4092 case 0x36: /* stdcq */
4093 case 0x37: /* stdc */
4105 /* default case for non jump instructions */
4106 if (dc
->npc
== DYNAMIC_PC
) {
4107 dc
->pc
= DYNAMIC_PC
;
4109 } else if (dc
->npc
== JUMP_PC
) {
4110 /* we can do a static jump */
4111 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_T
[2]);
4115 dc
->npc
= dc
->npc
+ 4;
4121 gen_op_exception(TT_ILL_INSN
);
4124 #if !defined(CONFIG_USER_ONLY)
4127 gen_op_exception(TT_PRIV_INSN
);
4132 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4135 #ifndef TARGET_SPARC64
4138 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4143 #ifndef TARGET_SPARC64
4146 gen_op_exception(TT_NCP_INSN
);
4152 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
4156 static inline int gen_intermediate_code_internal(TranslationBlock
* tb
,
4157 int spc
, CPUSPARCState
*env
)
4159 target_ulong pc_start
, last_pc
;
4160 uint16_t *gen_opc_end
;
4161 DisasContext dc1
, *dc
= &dc1
;
4164 memset(dc
, 0, sizeof(DisasContext
));
4169 dc
->npc
= (target_ulong
) tb
->cs_base
;
4170 dc
->mem_idx
= cpu_mmu_index(env
);
4171 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4172 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4174 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
4175 cpu_regwptr
= tcg_temp_new(TCG_TYPE_PTR
); // XXX
4178 if (env
->nb_breakpoints
> 0) {
4179 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4180 if (env
->breakpoints
[j
] == dc
->pc
) {
4181 if (dc
->pc
!= pc_start
)
4183 tcg_gen_helper_0_0(helper_debug
);
4192 fprintf(logfile
, "Search PC...\n");
4193 j
= gen_opc_ptr
- gen_opc_buf
;
4197 gen_opc_instr_start
[lj
++] = 0;
4198 gen_opc_pc
[lj
] = dc
->pc
;
4199 gen_opc_npc
[lj
] = dc
->npc
;
4200 gen_opc_instr_start
[lj
] = 1;
4204 disas_sparc_insn(dc
);
4208 /* if the next PC is different, we abort now */
4209 if (dc
->pc
!= (last_pc
+ 4))
4211 /* if we reach a page boundary, we stop generation so that the
4212 PC of a TT_TFAULT exception is always in the right page */
4213 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4215 /* if single step mode, we generate only one instruction and
4216 generate an exception */
4217 if (env
->singlestep_enabled
) {
4222 } while ((gen_opc_ptr
< gen_opc_end
) &&
4223 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32));
4227 if (dc
->pc
!= DYNAMIC_PC
&&
4228 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4229 /* static PC and NPC: we can use direct chaining */
4230 gen_branch(dc
, dc
->pc
, dc
->npc
);
4232 if (dc
->pc
!= DYNAMIC_PC
)
4238 *gen_opc_ptr
= INDEX_op_end
;
4240 j
= gen_opc_ptr
- gen_opc_buf
;
4243 gen_opc_instr_start
[lj
++] = 0;
4249 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4250 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4252 tb
->size
= last_pc
+ 4 - pc_start
;
4255 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4256 fprintf(logfile
, "--------------\n");
4257 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4258 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
4259 fprintf(logfile
, "\n");
4265 int gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4267 return gen_intermediate_code_internal(tb
, 0, env
);
4270 int gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4272 return gen_intermediate_code_internal(tb
, 1, env
);
4275 void cpu_reset(CPUSPARCState
*env
)
4280 env
->regwptr
= env
->regbase
+ (env
->cwp
* 16);
4281 #if defined(CONFIG_USER_ONLY)
4282 env
->user_mode_only
= 1;
4283 #ifdef TARGET_SPARC64
4284 env
->cleanwin
= NWINDOWS
- 2;
4285 env
->cansave
= NWINDOWS
- 2;
4286 env
->pstate
= PS_RMO
| PS_PEF
| PS_IE
;
4287 env
->asi
= 0x82; // Primary no-fault
4293 #ifdef TARGET_SPARC64
4294 env
->pstate
= PS_PRIV
;
4295 env
->hpstate
= HS_PRIV
;
4296 env
->pc
= 0x1fff0000000ULL
;
4297 env
->tsptr
= &env
->ts
[env
->tl
];
4300 env
->mmuregs
[0] &= ~(MMU_E
| MMU_NF
);
4301 env
->mmuregs
[0] |= env
->mmu_bm
;
4303 env
->npc
= env
->pc
+ 4;
4307 CPUSPARCState
*cpu_sparc_init(const char *cpu_model
)
4310 const sparc_def_t
*def
;
4313 def
= cpu_sparc_find_by_name(cpu_model
);
4317 env
= qemu_mallocz(sizeof(CPUSPARCState
));
4321 env
->cpu_model_str
= cpu_model
;
4322 env
->version
= def
->iu_version
;
4323 env
->fsr
= def
->fpu_version
;
4324 #if !defined(TARGET_SPARC64)
4325 env
->mmu_bm
= def
->mmu_bm
;
4326 env
->mmu_ctpr_mask
= def
->mmu_ctpr_mask
;
4327 env
->mmu_cxr_mask
= def
->mmu_cxr_mask
;
4328 env
->mmu_sfsr_mask
= def
->mmu_sfsr_mask
;
4329 env
->mmu_trcr_mask
= def
->mmu_trcr_mask
;
4330 env
->mmuregs
[0] |= def
->mmu_version
;
4331 cpu_sparc_set_id(env
, 0);
4334 /* init various static tables */
4338 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
4339 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
4340 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4341 #ifdef TARGET_SPARC64
4342 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
4343 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
4344 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
4345 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
4346 cpu_T
[2] = tcg_global_mem_new(TCG_TYPE_TL
,
4347 TCG_AREG0
, offsetof(CPUState
, t2
), "T2");
4349 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
4350 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
4351 cpu_T
[2] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "T2");
4360 void cpu_sparc_set_id(CPUSPARCState
*env
, unsigned int cpu
)
4362 #if !defined(TARGET_SPARC64)
4363 env
->mxccregs
[7] = ((cpu
+ 8) & 0xf) << 24;
4367 static const sparc_def_t sparc_defs
[] = {
4368 #ifdef TARGET_SPARC64
4370 .name
= "Fujitsu Sparc64",
4371 .iu_version
= ((0x04ULL
<< 48) | (0x02ULL
<< 32) | (0ULL << 24)
4372 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4373 .fpu_version
= 0x00000000,
4377 .name
= "Fujitsu Sparc64 III",
4378 .iu_version
= ((0x04ULL
<< 48) | (0x03ULL
<< 32) | (0ULL << 24)
4379 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4380 .fpu_version
= 0x00000000,
4384 .name
= "Fujitsu Sparc64 IV",
4385 .iu_version
= ((0x04ULL
<< 48) | (0x04ULL
<< 32) | (0ULL << 24)
4386 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4387 .fpu_version
= 0x00000000,
4391 .name
= "Fujitsu Sparc64 V",
4392 .iu_version
= ((0x04ULL
<< 48) | (0x05ULL
<< 32) | (0x51ULL
<< 24)
4393 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4394 .fpu_version
= 0x00000000,
4398 .name
= "TI UltraSparc I",
4399 .iu_version
= ((0x17ULL
<< 48) | (0x10ULL
<< 32) | (0x40ULL
<< 24)
4400 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4401 .fpu_version
= 0x00000000,
4405 .name
= "TI UltraSparc II",
4406 .iu_version
= ((0x17ULL
<< 48) | (0x11ULL
<< 32) | (0x20ULL
<< 24)
4407 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4408 .fpu_version
= 0x00000000,
4412 .name
= "TI UltraSparc IIi",
4413 .iu_version
= ((0x17ULL
<< 48) | (0x12ULL
<< 32) | (0x91ULL
<< 24)
4414 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4415 .fpu_version
= 0x00000000,
4419 .name
= "TI UltraSparc IIe",
4420 .iu_version
= ((0x17ULL
<< 48) | (0x13ULL
<< 32) | (0x14ULL
<< 24)
4421 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4422 .fpu_version
= 0x00000000,
4426 .name
= "Sun UltraSparc III",
4427 .iu_version
= ((0x3eULL
<< 48) | (0x14ULL
<< 32) | (0x34ULL
<< 24)
4428 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4429 .fpu_version
= 0x00000000,
4433 .name
= "Sun UltraSparc III Cu",
4434 .iu_version
= ((0x3eULL
<< 48) | (0x15ULL
<< 32) | (0x41ULL
<< 24)
4435 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4436 .fpu_version
= 0x00000000,
4440 .name
= "Sun UltraSparc IIIi",
4441 .iu_version
= ((0x3eULL
<< 48) | (0x16ULL
<< 32) | (0x34ULL
<< 24)
4442 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4443 .fpu_version
= 0x00000000,
4447 .name
= "Sun UltraSparc IV",
4448 .iu_version
= ((0x3eULL
<< 48) | (0x18ULL
<< 32) | (0x31ULL
<< 24)
4449 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4450 .fpu_version
= 0x00000000,
4454 .name
= "Sun UltraSparc IV+",
4455 .iu_version
= ((0x3eULL
<< 48) | (0x19ULL
<< 32) | (0x22ULL
<< 24)
4456 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4457 .fpu_version
= 0x00000000,
4461 .name
= "Sun UltraSparc IIIi+",
4462 .iu_version
= ((0x3eULL
<< 48) | (0x22ULL
<< 32) | (0ULL << 24)
4463 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4464 .fpu_version
= 0x00000000,
4468 .name
= "NEC UltraSparc I",
4469 .iu_version
= ((0x22ULL
<< 48) | (0x10ULL
<< 32) | (0x40ULL
<< 24)
4470 | (MAXTL
<< 8) | (NWINDOWS
- 1)),
4471 .fpu_version
= 0x00000000,
4476 .name
= "Fujitsu MB86900",
4477 .iu_version
= 0x00 << 24, /* Impl 0, ver 0 */
4478 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4479 .mmu_version
= 0x00 << 24, /* Impl 0, ver 0 */
4480 .mmu_bm
= 0x00004000,
4481 .mmu_ctpr_mask
= 0x007ffff0,
4482 .mmu_cxr_mask
= 0x0000003f,
4483 .mmu_sfsr_mask
= 0xffffffff,
4484 .mmu_trcr_mask
= 0xffffffff,
4487 .name
= "Fujitsu MB86904",
4488 .iu_version
= 0x04 << 24, /* Impl 0, ver 4 */
4489 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4490 .mmu_version
= 0x04 << 24, /* Impl 0, ver 4 */
4491 .mmu_bm
= 0x00004000,
4492 .mmu_ctpr_mask
= 0x00ffffc0,
4493 .mmu_cxr_mask
= 0x000000ff,
4494 .mmu_sfsr_mask
= 0x00016fff,
4495 .mmu_trcr_mask
= 0x00ffffff,
4498 .name
= "Fujitsu MB86907",
4499 .iu_version
= 0x05 << 24, /* Impl 0, ver 5 */
4500 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4501 .mmu_version
= 0x05 << 24, /* Impl 0, ver 5 */
4502 .mmu_bm
= 0x00004000,
4503 .mmu_ctpr_mask
= 0xffffffc0,
4504 .mmu_cxr_mask
= 0x000000ff,
4505 .mmu_sfsr_mask
= 0x00016fff,
4506 .mmu_trcr_mask
= 0xffffffff,
4509 .name
= "LSI L64811",
4510 .iu_version
= 0x10 << 24, /* Impl 1, ver 0 */
4511 .fpu_version
= 1 << 17, /* FPU version 1 (LSI L64814) */
4512 .mmu_version
= 0x10 << 24,
4513 .mmu_bm
= 0x00004000,
4514 .mmu_ctpr_mask
= 0x007ffff0,
4515 .mmu_cxr_mask
= 0x0000003f,
4516 .mmu_sfsr_mask
= 0xffffffff,
4517 .mmu_trcr_mask
= 0xffffffff,
4520 .name
= "Cypress CY7C601",
4521 .iu_version
= 0x11 << 24, /* Impl 1, ver 1 */
4522 .fpu_version
= 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4523 .mmu_version
= 0x10 << 24,
4524 .mmu_bm
= 0x00004000,
4525 .mmu_ctpr_mask
= 0x007ffff0,
4526 .mmu_cxr_mask
= 0x0000003f,
4527 .mmu_sfsr_mask
= 0xffffffff,
4528 .mmu_trcr_mask
= 0xffffffff,
4531 .name
= "Cypress CY7C611",
4532 .iu_version
= 0x13 << 24, /* Impl 1, ver 3 */
4533 .fpu_version
= 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4534 .mmu_version
= 0x10 << 24,
4535 .mmu_bm
= 0x00004000,
4536 .mmu_ctpr_mask
= 0x007ffff0,
4537 .mmu_cxr_mask
= 0x0000003f,
4538 .mmu_sfsr_mask
= 0xffffffff,
4539 .mmu_trcr_mask
= 0xffffffff,
4542 .name
= "TI SuperSparc II",
4543 .iu_version
= 0x40000000,
4544 .fpu_version
= 0 << 17,
4545 .mmu_version
= 0x04000000,
4546 .mmu_bm
= 0x00002000,
4547 .mmu_ctpr_mask
= 0xffffffc0,
4548 .mmu_cxr_mask
= 0x0000ffff,
4549 .mmu_sfsr_mask
= 0xffffffff,
4550 .mmu_trcr_mask
= 0xffffffff,
4553 .name
= "TI MicroSparc I",
4554 .iu_version
= 0x41000000,
4555 .fpu_version
= 4 << 17,
4556 .mmu_version
= 0x41000000,
4557 .mmu_bm
= 0x00004000,
4558 .mmu_ctpr_mask
= 0x007ffff0,
4559 .mmu_cxr_mask
= 0x0000003f,
4560 .mmu_sfsr_mask
= 0x00016fff,
4561 .mmu_trcr_mask
= 0x0000003f,
4564 .name
= "TI MicroSparc II",
4565 .iu_version
= 0x42000000,
4566 .fpu_version
= 4 << 17,
4567 .mmu_version
= 0x02000000,
4568 .mmu_bm
= 0x00004000,
4569 .mmu_ctpr_mask
= 0x00ffffc0,
4570 .mmu_cxr_mask
= 0x000000ff,
4571 .mmu_sfsr_mask
= 0x00016fff,
4572 .mmu_trcr_mask
= 0x00ffffff,
4575 .name
= "TI MicroSparc IIep",
4576 .iu_version
= 0x42000000,
4577 .fpu_version
= 4 << 17,
4578 .mmu_version
= 0x04000000,
4579 .mmu_bm
= 0x00004000,
4580 .mmu_ctpr_mask
= 0x00ffffc0,
4581 .mmu_cxr_mask
= 0x000000ff,
4582 .mmu_sfsr_mask
= 0x00016bff,
4583 .mmu_trcr_mask
= 0x00ffffff,
4586 .name
= "TI SuperSparc 51",
4587 .iu_version
= 0x43000000,
4588 .fpu_version
= 0 << 17,
4589 .mmu_version
= 0x04000000,
4590 .mmu_bm
= 0x00002000,
4591 .mmu_ctpr_mask
= 0xffffffc0,
4592 .mmu_cxr_mask
= 0x0000ffff,
4593 .mmu_sfsr_mask
= 0xffffffff,
4594 .mmu_trcr_mask
= 0xffffffff,
4597 .name
= "TI SuperSparc 61",
4598 .iu_version
= 0x44000000,
4599 .fpu_version
= 0 << 17,
4600 .mmu_version
= 0x04000000,
4601 .mmu_bm
= 0x00002000,
4602 .mmu_ctpr_mask
= 0xffffffc0,
4603 .mmu_cxr_mask
= 0x0000ffff,
4604 .mmu_sfsr_mask
= 0xffffffff,
4605 .mmu_trcr_mask
= 0xffffffff,
4608 .name
= "Ross RT625",
4609 .iu_version
= 0x1e000000,
4610 .fpu_version
= 1 << 17,
4611 .mmu_version
= 0x1e000000,
4612 .mmu_bm
= 0x00004000,
4613 .mmu_ctpr_mask
= 0x007ffff0,
4614 .mmu_cxr_mask
= 0x0000003f,
4615 .mmu_sfsr_mask
= 0xffffffff,
4616 .mmu_trcr_mask
= 0xffffffff,
4619 .name
= "Ross RT620",
4620 .iu_version
= 0x1f000000,
4621 .fpu_version
= 1 << 17,
4622 .mmu_version
= 0x1f000000,
4623 .mmu_bm
= 0x00004000,
4624 .mmu_ctpr_mask
= 0x007ffff0,
4625 .mmu_cxr_mask
= 0x0000003f,
4626 .mmu_sfsr_mask
= 0xffffffff,
4627 .mmu_trcr_mask
= 0xffffffff,
4630 .name
= "BIT B5010",
4631 .iu_version
= 0x20000000,
4632 .fpu_version
= 0 << 17, /* B5010/B5110/B5120/B5210 */
4633 .mmu_version
= 0x20000000,
4634 .mmu_bm
= 0x00004000,
4635 .mmu_ctpr_mask
= 0x007ffff0,
4636 .mmu_cxr_mask
= 0x0000003f,
4637 .mmu_sfsr_mask
= 0xffffffff,
4638 .mmu_trcr_mask
= 0xffffffff,
4641 .name
= "Matsushita MN10501",
4642 .iu_version
= 0x50000000,
4643 .fpu_version
= 0 << 17,
4644 .mmu_version
= 0x50000000,
4645 .mmu_bm
= 0x00004000,
4646 .mmu_ctpr_mask
= 0x007ffff0,
4647 .mmu_cxr_mask
= 0x0000003f,
4648 .mmu_sfsr_mask
= 0xffffffff,
4649 .mmu_trcr_mask
= 0xffffffff,
4652 .name
= "Weitek W8601",
4653 .iu_version
= 0x90 << 24, /* Impl 9, ver 0 */
4654 .fpu_version
= 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
4655 .mmu_version
= 0x10 << 24,
4656 .mmu_bm
= 0x00004000,
4657 .mmu_ctpr_mask
= 0x007ffff0,
4658 .mmu_cxr_mask
= 0x0000003f,
4659 .mmu_sfsr_mask
= 0xffffffff,
4660 .mmu_trcr_mask
= 0xffffffff,
4664 .iu_version
= 0xf2000000,
4665 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4666 .mmu_version
= 0xf2000000,
4667 .mmu_bm
= 0x00004000,
4668 .mmu_ctpr_mask
= 0x007ffff0,
4669 .mmu_cxr_mask
= 0x0000003f,
4670 .mmu_sfsr_mask
= 0xffffffff,
4671 .mmu_trcr_mask
= 0xffffffff,
4675 .iu_version
= 0xf3000000,
4676 .fpu_version
= 4 << 17, /* FPU version 4 (Meiko) */
4677 .mmu_version
= 0xf3000000,
4678 .mmu_bm
= 0x00004000,
4679 .mmu_ctpr_mask
= 0x007ffff0,
4680 .mmu_cxr_mask
= 0x0000003f,
4681 .mmu_sfsr_mask
= 0xffffffff,
4682 .mmu_trcr_mask
= 0xffffffff,
4687 static const sparc_def_t
*cpu_sparc_find_by_name(const unsigned char *name
)
4691 for (i
= 0; i
< sizeof(sparc_defs
) / sizeof(sparc_def_t
); i
++) {
4692 if (strcasecmp(name
, sparc_defs
[i
].name
) == 0) {
4693 return &sparc_defs
[i
];
4699 void sparc_cpu_list (FILE *f
, int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...))
4703 for (i
= 0; i
< sizeof(sparc_defs
) / sizeof(sparc_def_t
); i
++) {
4704 (*cpu_fprintf
)(f
, "Sparc %16s IU " TARGET_FMT_lx
" FPU %08x MMU %08x\n",
4706 sparc_defs
[i
].iu_version
,
4707 sparc_defs
[i
].fpu_version
,
4708 sparc_defs
[i
].mmu_version
);
4712 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
4714 void cpu_dump_state(CPUState
*env
, FILE *f
,
4715 int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...),
4720 cpu_fprintf(f
, "pc: " TARGET_FMT_lx
" npc: " TARGET_FMT_lx
"\n", env
->pc
, env
->npc
);
4721 cpu_fprintf(f
, "General Registers:\n");
4722 for (i
= 0; i
< 4; i
++)
4723 cpu_fprintf(f
, "%%g%c: " TARGET_FMT_lx
"\t", i
+ '0', env
->gregs
[i
]);
4724 cpu_fprintf(f
, "\n");
4726 cpu_fprintf(f
, "%%g%c: " TARGET_FMT_lx
"\t", i
+ '0', env
->gregs
[i
]);
4727 cpu_fprintf(f
, "\nCurrent Register Window:\n");
4728 for (x
= 0; x
< 3; x
++) {
4729 for (i
= 0; i
< 4; i
++)
4730 cpu_fprintf(f
, "%%%c%d: " TARGET_FMT_lx
"\t",
4731 (x
== 0 ? 'o' : (x
== 1 ? 'l' : 'i')), i
,
4732 env
->regwptr
[i
+ x
* 8]);
4733 cpu_fprintf(f
, "\n");
4735 cpu_fprintf(f
, "%%%c%d: " TARGET_FMT_lx
"\t",
4736 (x
== 0 ? 'o' : x
== 1 ? 'l' : 'i'), i
,
4737 env
->regwptr
[i
+ x
* 8]);
4738 cpu_fprintf(f
, "\n");
4740 cpu_fprintf(f
, "\nFloating Point Registers:\n");
4741 for (i
= 0; i
< 32; i
++) {
4743 cpu_fprintf(f
, "%%f%02d:", i
);
4744 cpu_fprintf(f
, " %016lf", env
->fpr
[i
]);
4746 cpu_fprintf(f
, "\n");
4748 #ifdef TARGET_SPARC64
4749 cpu_fprintf(f
, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
4750 env
->pstate
, GET_CCR(env
), env
->asi
, env
->tl
, env
->fprs
);
4751 cpu_fprintf(f
, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
4752 env
->cansave
, env
->canrestore
, env
->otherwin
, env
->wstate
,
4753 env
->cleanwin
, NWINDOWS
- 1 - env
->cwp
);
4755 cpu_fprintf(f
, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env
),
4756 GET_FLAG(PSR_ZERO
, 'Z'), GET_FLAG(PSR_OVF
, 'V'),
4757 GET_FLAG(PSR_NEG
, 'N'), GET_FLAG(PSR_CARRY
, 'C'),
4758 env
->psrs
?'S':'-', env
->psrps
?'P':'-',
4759 env
->psret
?'E':'-', env
->wim
);
4761 cpu_fprintf(f
, "fsr: 0x%08x\n", GET_FSR32(env
));
4764 #if defined(CONFIG_USER_ONLY)
4765 target_phys_addr_t
cpu_get_phys_page_debug(CPUState
*env
, target_ulong addr
)
4771 extern int get_physical_address (CPUState
*env
, target_phys_addr_t
*physical
, int *prot
,
4772 int *access_index
, target_ulong address
, int rw
,
4775 target_phys_addr_t
cpu_get_phys_page_debug(CPUState
*env
, target_ulong addr
)
4777 target_phys_addr_t phys_addr
;
4778 int prot
, access_index
;
4780 if (get_physical_address(env
, &phys_addr
, &prot
, &access_index
, addr
, 2,
4781 MMU_KERNEL_IDX
) != 0)
4782 if (get_physical_address(env
, &phys_addr
, &prot
, &access_index
, addr
,
4783 0, MMU_KERNEL_IDX
) != 0)
4785 if (cpu_get_physical_page_desc(phys_addr
) == IO_MEM_UNASSIGNED
)
4791 void helper_flush(target_ulong addr
)
4794 tb_invalidate_page_range(addr
, addr
+ 8);