4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
48 /* global register indexes */
49 static TCGv cpu_env
, cpu_T
[3], cpu_regwptr
, cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
50 static TCGv cpu_psr
, cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
51 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
55 /* local register indexes (only used inside old micro ops) */
56 static TCGv cpu_tmp0
, cpu_tmp32
, cpu_tmp64
;
58 typedef struct DisasContext
{
59 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
60 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
61 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
65 struct TranslationBlock
*tb
;
71 // This function uses non-native bit order
72 #define GET_FIELD(X, FROM, TO) \
73 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
75 // This function uses the order in the manuals, i.e. bit 0 is 2^0
76 #define GET_FIELD_SP(X, FROM, TO) \
77 GET_FIELD(X, 31 - (TO), 31 - (FROM))
79 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
80 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
84 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
85 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
88 #define DFPREG(r) (r & 0x1e)
89 #define QFPREG(r) (r & 0x1c)
92 static int sign_extend(int x
, int len
)
95 return (x
<< len
) >> len
;
98 #define IS_IMM (insn & (1<<13))
100 /* floating point registers moves */
101 static void gen_op_load_fpr_FT0(unsigned int src
)
103 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
104 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, ft0
));
107 static void gen_op_load_fpr_FT1(unsigned int src
)
109 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
110 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, ft1
));
113 static void gen_op_store_FT0_fpr(unsigned int dst
)
115 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, ft0
));
116 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
]));
119 static void gen_op_load_fpr_DT0(unsigned int src
)
121 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
122 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt0
) + offsetof(CPU_DoubleU
, l
.upper
));
123 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 1]));
124 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt0
) + offsetof(CPU_DoubleU
, l
.lower
));
127 static void gen_op_load_fpr_DT1(unsigned int src
)
129 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
130 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt1
) + offsetof(CPU_DoubleU
, l
.upper
));
131 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 1]));
132 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt1
) + offsetof(CPU_DoubleU
, l
.lower
));
135 static void gen_op_store_DT0_fpr(unsigned int dst
)
137 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt0
) + offsetof(CPU_DoubleU
, l
.upper
));
138 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
]));
139 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, dt0
) + offsetof(CPU_DoubleU
, l
.lower
));
140 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
+ 1]));
143 #ifdef CONFIG_USER_ONLY
144 static void gen_op_load_fpr_QT0(unsigned int src
)
146 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
147 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) + offsetof(CPU_QuadU
, l
.upmost
));
148 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 1]));
149 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) + offsetof(CPU_QuadU
, l
.upper
));
150 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 2]));
151 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) + offsetof(CPU_QuadU
, l
.lower
));
152 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 3]));
153 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) + offsetof(CPU_QuadU
, l
.lowest
));
156 static void gen_op_load_fpr_QT1(unsigned int src
)
158 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
]));
159 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt1
) + offsetof(CPU_QuadU
, l
.upmost
));
160 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 1]));
161 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt1
) + offsetof(CPU_QuadU
, l
.upper
));
162 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 2]));
163 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt1
) + offsetof(CPU_QuadU
, l
.lower
));
164 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[src
+ 3]));
165 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt1
) + offsetof(CPU_QuadU
, l
.lowest
));
168 static void gen_op_store_QT0_fpr(unsigned int dst
)
170 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) + offsetof(CPU_QuadU
, l
.upmost
));
171 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
]));
172 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) + offsetof(CPU_QuadU
, l
.upper
));
173 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
+ 1]));
174 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) + offsetof(CPU_QuadU
, l
.lower
));
175 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
+ 2]));
176 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, qt0
) + offsetof(CPU_QuadU
, l
.lowest
));
177 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fpr
[dst
+ 3]));
182 #ifdef CONFIG_USER_ONLY
183 #define supervisor(dc) 0
184 #ifdef TARGET_SPARC64
185 #define hypervisor(dc) 0
187 #define gen_op_ldst(name) gen_op_##name##_raw()
189 #define supervisor(dc) (dc->mem_idx >= 1)
190 #ifdef TARGET_SPARC64
191 #define hypervisor(dc) (dc->mem_idx == 2)
192 #define OP_LD_TABLE(width) \
193 static GenOpFunc * const gen_op_##width[] = { \
194 &gen_op_##width##_user, \
195 &gen_op_##width##_kernel, \
196 &gen_op_##width##_hypv, \
199 #define OP_LD_TABLE(width) \
200 static GenOpFunc * const gen_op_##width[] = { \
201 &gen_op_##width##_user, \
202 &gen_op_##width##_kernel, \
205 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
208 #ifndef CONFIG_USER_ONLY
211 #endif /* __i386__ */
217 #define ABI32_MASK(addr) tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
219 #define ABI32_MASK(addr)
222 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
225 tcg_gen_movi_tl(tn
, 0);
227 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
229 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
233 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
238 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
240 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
244 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
245 target_ulong pc
, target_ulong npc
)
247 TranslationBlock
*tb
;
250 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
251 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
252 /* jump to same page: we can use a direct jump */
253 tcg_gen_goto_tb(tb_num
);
254 tcg_gen_movi_tl(cpu_pc
, pc
);
255 tcg_gen_movi_tl(cpu_npc
, npc
);
256 tcg_gen_exit_tb((long)tb
+ tb_num
);
258 /* jump to another page: currently not optimized */
259 tcg_gen_movi_tl(cpu_pc
, pc
);
260 tcg_gen_movi_tl(cpu_npc
, npc
);
266 static inline void gen_mov_reg_N(TCGv reg
, TCGv src
)
268 tcg_gen_extu_i32_tl(reg
, src
);
269 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
270 tcg_gen_andi_tl(reg
, reg
, 0x1);
273 static inline void gen_mov_reg_Z(TCGv reg
, TCGv src
)
275 tcg_gen_extu_i32_tl(reg
, src
);
276 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
277 tcg_gen_andi_tl(reg
, reg
, 0x1);
280 static inline void gen_mov_reg_V(TCGv reg
, TCGv src
)
282 tcg_gen_extu_i32_tl(reg
, src
);
283 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
284 tcg_gen_andi_tl(reg
, reg
, 0x1);
287 static inline void gen_mov_reg_C(TCGv reg
, TCGv src
)
289 tcg_gen_extu_i32_tl(reg
, src
);
290 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
291 tcg_gen_andi_tl(reg
, reg
, 0x1);
294 static inline void gen_cc_clear_icc(void)
296 tcg_gen_movi_i32(cpu_psr
, 0);
299 #ifdef TARGET_SPARC64
300 static inline void gen_cc_clear_xcc(void)
302 tcg_gen_movi_i32(cpu_xcc
, 0);
308 env->psr |= PSR_ZERO;
309 if ((int32_t) T0 < 0)
312 static inline void gen_cc_NZ_icc(TCGv dst
)
317 l1
= gen_new_label();
318 l2
= gen_new_label();
319 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
320 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
321 tcg_gen_brcond_tl(TCG_COND_NE
, r_temp
, tcg_const_tl(0), l1
);
322 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
324 tcg_gen_ext_i32_tl(r_temp
, dst
);
325 tcg_gen_brcond_tl(TCG_COND_GE
, r_temp
, tcg_const_tl(0), l2
);
326 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
330 #ifdef TARGET_SPARC64
331 static inline void gen_cc_NZ_xcc(TCGv dst
)
335 l1
= gen_new_label();
336 l2
= gen_new_label();
337 tcg_gen_brcond_tl(TCG_COND_NE
, dst
, tcg_const_tl(0), l1
);
338 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
340 tcg_gen_brcond_tl(TCG_COND_GE
, dst
, tcg_const_tl(0), l2
);
341 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
348 env->psr |= PSR_CARRY;
350 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
355 l1
= gen_new_label();
356 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
357 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
358 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
359 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
363 #ifdef TARGET_SPARC64
364 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
368 l1
= gen_new_label();
369 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
370 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
376 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
379 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
383 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
384 tcg_gen_xor_tl(r_temp
, src1
, src2
);
385 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
386 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
387 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
388 tcg_gen_andi_tl(r_temp
, r_temp
, (1 << 31));
389 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
390 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
391 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
394 #ifdef TARGET_SPARC64
395 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
399 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
400 tcg_gen_xor_tl(r_temp
, src1
, src2
);
401 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
402 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
403 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
404 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
405 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
406 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
407 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
411 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
416 l1
= gen_new_label();
418 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
419 tcg_gen_xor_tl(r_temp
, src1
, src2
);
420 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
421 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
422 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
423 tcg_gen_andi_tl(r_temp
, r_temp
, (1 << 31));
424 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp
, tcg_const_tl(0), l1
);
425 tcg_gen_helper_0_1(raise_exception
, tcg_const_i32(TT_TOVF
));
429 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
433 l1
= gen_new_label();
434 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
435 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
436 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_tmp0
, tcg_const_tl(0), l1
);
437 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
441 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
445 l1
= gen_new_label();
446 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
447 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
448 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_tmp0
, tcg_const_tl(0), l1
);
449 tcg_gen_helper_0_1(raise_exception
, tcg_const_i32(TT_TOVF
));
453 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
455 tcg_gen_mov_tl(cpu_cc_src
, src1
);
456 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
457 tcg_gen_add_tl(dst
, src1
, src2
);
460 gen_cc_C_add_icc(dst
, cpu_cc_src
);
461 gen_cc_V_add_icc(dst
, cpu_cc_src
, cpu_cc_src2
);
462 #ifdef TARGET_SPARC64
465 gen_cc_C_add_xcc(dst
, cpu_cc_src
);
466 gen_cc_V_add_xcc(dst
, cpu_cc_src
, cpu_cc_src2
);
470 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
472 tcg_gen_mov_tl(cpu_cc_src
, src1
);
473 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
474 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
475 tcg_gen_add_tl(dst
, src1
, cpu_tmp0
);
477 gen_cc_C_add_icc(dst
, cpu_cc_src
);
478 #ifdef TARGET_SPARC64
480 gen_cc_C_add_xcc(dst
, cpu_cc_src
);
482 tcg_gen_add_tl(dst
, dst
, cpu_cc_src2
);
484 gen_cc_C_add_icc(dst
, cpu_cc_src
);
485 gen_cc_V_add_icc(dst
, cpu_cc_src
, cpu_cc_src2
);
486 #ifdef TARGET_SPARC64
488 gen_cc_C_add_xcc(dst
, cpu_cc_src
);
489 gen_cc_V_add_xcc(dst
, cpu_cc_src
, cpu_cc_src2
);
493 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
495 tcg_gen_mov_tl(cpu_cc_src
, src1
);
496 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
497 tcg_gen_add_tl(dst
, src1
, src2
);
500 gen_cc_C_add_icc(dst
, cpu_cc_src
);
501 gen_cc_V_add_icc(dst
, cpu_cc_src
, cpu_cc_src2
);
502 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
503 #ifdef TARGET_SPARC64
506 gen_cc_C_add_xcc(dst
, cpu_cc_src
);
507 gen_cc_V_add_xcc(dst
, cpu_cc_src
, cpu_cc_src2
);
511 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
513 tcg_gen_mov_tl(cpu_cc_src
, src1
);
514 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
515 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
516 tcg_gen_add_tl(dst
, src1
, src2
);
517 gen_add_tv(dst
, cpu_cc_src
, cpu_cc_src2
);
520 gen_cc_C_add_icc(dst
, cpu_cc_src
);
521 #ifdef TARGET_SPARC64
524 gen_cc_C_add_xcc(dst
, cpu_cc_src
);
525 gen_cc_V_add_xcc(dst
, cpu_cc_src
, cpu_cc_src2
);
531 env->psr |= PSR_CARRY;
533 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
535 TCGv r_temp1
, r_temp2
;
538 l1
= gen_new_label();
539 r_temp1
= tcg_temp_new(TCG_TYPE_TL
);
540 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
541 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
542 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
543 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
544 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
548 #ifdef TARGET_SPARC64
549 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
553 l1
= gen_new_label();
554 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
555 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
561 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
564 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
568 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
569 tcg_gen_xor_tl(r_temp
, src1
, src2
);
570 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
571 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
572 tcg_gen_andi_tl(r_temp
, r_temp
, (1 << 31));
573 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
574 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
575 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
578 #ifdef TARGET_SPARC64
579 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
583 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
584 tcg_gen_xor_tl(r_temp
, src1
, src2
);
585 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
586 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
587 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
588 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
589 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
590 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
594 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
599 l1
= gen_new_label();
601 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
602 tcg_gen_xor_tl(r_temp
, src1
, src2
);
603 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
604 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
605 tcg_gen_andi_tl(r_temp
, r_temp
, (1 << 31));
606 tcg_gen_brcond_tl(TCG_COND_EQ
, r_temp
, tcg_const_tl(0), l1
);
607 tcg_gen_helper_0_1(raise_exception
, tcg_const_i32(TT_TOVF
));
611 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
613 tcg_gen_mov_tl(cpu_cc_src
, src1
);
614 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
615 tcg_gen_sub_tl(dst
, src1
, src2
);
618 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
619 gen_cc_V_sub_icc(dst
, cpu_cc_src
, cpu_cc_src2
);
620 #ifdef TARGET_SPARC64
623 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
624 gen_cc_V_sub_xcc(dst
, cpu_cc_src
, cpu_cc_src2
);
628 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
630 tcg_gen_mov_tl(cpu_cc_src
, src1
);
631 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
632 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
633 tcg_gen_sub_tl(dst
, src1
, cpu_tmp0
);
635 gen_cc_C_sub_icc(dst
, cpu_cc_src
);
636 #ifdef TARGET_SPARC64
638 gen_cc_C_sub_xcc(dst
, cpu_cc_src
);
640 tcg_gen_sub_tl(dst
, dst
, cpu_cc_src2
);
642 gen_cc_C_sub_icc(dst
, cpu_cc_src
);
643 gen_cc_V_sub_icc(dst
, cpu_cc_src
, cpu_cc_src2
);
644 #ifdef TARGET_SPARC64
646 gen_cc_C_sub_xcc(dst
, cpu_cc_src
);
647 gen_cc_V_sub_xcc(dst
, cpu_cc_src
, cpu_cc_src2
);
651 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
653 tcg_gen_mov_tl(cpu_cc_src
, src1
);
654 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
655 tcg_gen_sub_tl(dst
, src1
, src2
);
658 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
659 gen_cc_V_sub_icc(dst
, cpu_cc_src
, cpu_cc_src2
);
660 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
661 #ifdef TARGET_SPARC64
664 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
665 gen_cc_V_sub_xcc(dst
, cpu_cc_src
, cpu_cc_src2
);
669 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
671 tcg_gen_mov_tl(cpu_cc_src
, src1
);
672 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
673 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
674 tcg_gen_sub_tl(dst
, src1
, src2
);
675 gen_sub_tv(dst
, cpu_cc_src
, cpu_cc_src2
);
678 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
679 #ifdef TARGET_SPARC64
682 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
683 gen_cc_V_sub_xcc(dst
, cpu_cc_src
, cpu_cc_src2
);
687 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
689 TCGv r_temp
, r_temp2
;
692 l1
= gen_new_label();
693 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
694 r_temp2
= tcg_temp_new(TCG_TYPE_I32
);
700 tcg_gen_mov_tl(cpu_cc_src
, src1
);
701 tcg_gen_ld32u_tl(r_temp
, cpu_env
, offsetof(CPUSPARCState
, y
));
702 tcg_gen_trunc_tl_i32(r_temp2
, r_temp
);
703 tcg_gen_andi_i32(r_temp2
, r_temp2
, 0x1);
704 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
705 tcg_gen_brcond_i32(TCG_COND_NE
, r_temp2
, tcg_const_i32(0), l1
);
706 tcg_gen_movi_tl(cpu_cc_src2
, 0);
710 // env->y = (b2 << 31) | (env->y >> 1);
711 tcg_gen_trunc_tl_i32(r_temp2
, cpu_cc_src
);
712 tcg_gen_andi_i32(r_temp2
, r_temp2
, 0x1);
713 tcg_gen_shli_i32(r_temp2
, r_temp2
, 31);
714 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, y
));
715 tcg_gen_shri_i32(cpu_tmp32
, cpu_tmp32
, 1);
716 tcg_gen_or_i32(cpu_tmp32
, cpu_tmp32
, r_temp2
);
717 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, y
));
720 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
721 gen_mov_reg_V(r_temp
, cpu_psr
);
722 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
724 // T0 = (b1 << 31) | (T0 >> 1);
726 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
727 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
728 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
730 /* do addition and update flags */
731 tcg_gen_add_tl(dst
, cpu_cc_src
, cpu_cc_src2
);
735 gen_cc_V_add_icc(dst
, cpu_cc_src
, cpu_cc_src2
);
736 gen_cc_C_add_icc(dst
, cpu_cc_src
);
739 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
741 TCGv r_temp
, r_temp2
;
743 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
744 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
746 tcg_gen_extu_tl_i64(r_temp
, src2
);
747 tcg_gen_extu_tl_i64(r_temp2
, src1
);
748 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
750 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
751 tcg_gen_trunc_i64_i32(r_temp
, r_temp
);
752 tcg_gen_st_i32(r_temp
, cpu_env
, offsetof(CPUSPARCState
, y
));
753 #ifdef TARGET_SPARC64
754 tcg_gen_mov_i64(dst
, r_temp2
);
756 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
760 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
762 TCGv r_temp
, r_temp2
;
764 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
765 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
767 tcg_gen_ext_tl_i64(r_temp
, src2
);
768 tcg_gen_ext_tl_i64(r_temp2
, src1
);
769 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
771 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
772 tcg_gen_trunc_i64_i32(r_temp
, r_temp
);
773 tcg_gen_st_i32(r_temp
, cpu_env
, offsetof(CPUSPARCState
, y
));
774 #ifdef TARGET_SPARC64
775 tcg_gen_mov_i64(dst
, r_temp2
);
777 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
781 #ifdef TARGET_SPARC64
782 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
786 l1
= gen_new_label();
787 tcg_gen_brcond_tl(TCG_COND_NE
, divisor
, tcg_const_tl(0), l1
);
788 tcg_gen_helper_0_1(raise_exception
, tcg_const_i32(TT_DIV_ZERO
));
792 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
796 l1
= gen_new_label();
797 l2
= gen_new_label();
798 tcg_gen_mov_tl(cpu_cc_src
, src1
);
799 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
800 gen_trap_ifdivzero_tl(src2
);
801 tcg_gen_brcond_tl(TCG_COND_NE
, cpu_cc_src
, tcg_const_tl(INT64_MIN
), l1
);
802 tcg_gen_brcond_tl(TCG_COND_NE
, cpu_cc_src2
, tcg_const_tl(-1), l1
);
803 tcg_gen_movi_i64(dst
, INT64_MIN
);
806 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
811 static inline void gen_op_div_cc(TCGv dst
)
817 l1
= gen_new_label();
818 tcg_gen_ld_tl(cpu_tmp0
, cpu_env
, offsetof(CPUSPARCState
, cc_src2
));
819 tcg_gen_brcond_tl(TCG_COND_EQ
, cpu_tmp0
, tcg_const_tl(0), l1
);
820 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
824 static inline void gen_op_logic_cc(TCGv dst
)
828 #ifdef TARGET_SPARC64
835 static inline void gen_op_eval_ba(TCGv dst
)
837 tcg_gen_movi_tl(dst
, 1);
841 static inline void gen_op_eval_be(TCGv dst
, TCGv src
)
843 gen_mov_reg_Z(dst
, src
);
847 static inline void gen_op_eval_ble(TCGv dst
, TCGv src
)
849 gen_mov_reg_N(cpu_tmp0
, src
);
850 gen_mov_reg_V(dst
, src
);
851 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
852 gen_mov_reg_Z(cpu_tmp0
, src
);
853 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
857 static inline void gen_op_eval_bl(TCGv dst
, TCGv src
)
859 gen_mov_reg_V(cpu_tmp0
, src
);
860 gen_mov_reg_N(dst
, src
);
861 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
865 static inline void gen_op_eval_bleu(TCGv dst
, TCGv src
)
867 gen_mov_reg_Z(cpu_tmp0
, src
);
868 gen_mov_reg_C(dst
, src
);
869 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
873 static inline void gen_op_eval_bcs(TCGv dst
, TCGv src
)
875 gen_mov_reg_C(dst
, src
);
879 static inline void gen_op_eval_bvs(TCGv dst
, TCGv src
)
881 gen_mov_reg_V(dst
, src
);
885 static inline void gen_op_eval_bn(TCGv dst
)
887 tcg_gen_movi_tl(dst
, 0);
891 static inline void gen_op_eval_bneg(TCGv dst
, TCGv src
)
893 gen_mov_reg_N(dst
, src
);
897 static inline void gen_op_eval_bne(TCGv dst
, TCGv src
)
899 gen_mov_reg_Z(dst
, src
);
900 tcg_gen_xori_tl(dst
, dst
, 0x1);
904 static inline void gen_op_eval_bg(TCGv dst
, TCGv src
)
906 gen_mov_reg_N(cpu_tmp0
, src
);
907 gen_mov_reg_V(dst
, src
);
908 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
909 gen_mov_reg_Z(cpu_tmp0
, src
);
910 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
911 tcg_gen_xori_tl(dst
, dst
, 0x1);
915 static inline void gen_op_eval_bge(TCGv dst
, TCGv src
)
917 gen_mov_reg_V(cpu_tmp0
, src
);
918 gen_mov_reg_N(dst
, src
);
919 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
920 tcg_gen_xori_tl(dst
, dst
, 0x1);
924 static inline void gen_op_eval_bgu(TCGv dst
, TCGv src
)
926 gen_mov_reg_Z(cpu_tmp0
, src
);
927 gen_mov_reg_C(dst
, src
);
928 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
929 tcg_gen_xori_tl(dst
, dst
, 0x1);
933 static inline void gen_op_eval_bcc(TCGv dst
, TCGv src
)
935 gen_mov_reg_C(dst
, src
);
936 tcg_gen_xori_tl(dst
, dst
, 0x1);
940 static inline void gen_op_eval_bpos(TCGv dst
, TCGv src
)
942 gen_mov_reg_N(dst
, src
);
943 tcg_gen_xori_tl(dst
, dst
, 0x1);
947 static inline void gen_op_eval_bvc(TCGv dst
, TCGv src
)
949 gen_mov_reg_V(dst
, src
);
950 tcg_gen_xori_tl(dst
, dst
, 0x1);
954 FPSR bit field FCC1 | FCC0:
960 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
961 unsigned int fcc_offset
)
963 tcg_gen_extu_i32_tl(reg
, src
);
964 tcg_gen_shri_tl(reg
, reg
, FSR_FCC0_SHIFT
+ fcc_offset
);
965 tcg_gen_andi_tl(reg
, reg
, 0x1);
968 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
969 unsigned int fcc_offset
)
971 tcg_gen_extu_i32_tl(reg
, src
);
972 tcg_gen_shri_tl(reg
, reg
, FSR_FCC1_SHIFT
+ fcc_offset
);
973 tcg_gen_andi_tl(reg
, reg
, 0x1);
977 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
978 unsigned int fcc_offset
)
980 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
981 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
982 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
985 // 1 or 2: FCC0 ^ FCC1
986 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
987 unsigned int fcc_offset
)
989 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
990 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
991 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
995 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
996 unsigned int fcc_offset
)
998 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1002 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1003 unsigned int fcc_offset
)
1005 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1006 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1007 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1008 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1012 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1013 unsigned int fcc_offset
)
1015 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1019 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1020 unsigned int fcc_offset
)
1022 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1023 tcg_gen_xori_tl(dst
, dst
, 0x1);
1024 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1025 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1029 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1030 unsigned int fcc_offset
)
1032 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1033 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1034 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1037 // 0: !(FCC0 | FCC1)
1038 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1039 unsigned int fcc_offset
)
1041 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1042 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1043 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1044 tcg_gen_xori_tl(dst
, dst
, 0x1);
1047 // 0 or 3: !(FCC0 ^ FCC1)
1048 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1049 unsigned int fcc_offset
)
1051 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1052 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1053 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1054 tcg_gen_xori_tl(dst
, dst
, 0x1);
1058 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1059 unsigned int fcc_offset
)
1061 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1062 tcg_gen_xori_tl(dst
, dst
, 0x1);
1065 // !1: !(FCC0 & !FCC1)
1066 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1067 unsigned int fcc_offset
)
1069 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1070 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1071 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1072 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1073 tcg_gen_xori_tl(dst
, dst
, 0x1);
1077 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1078 unsigned int fcc_offset
)
1080 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1081 tcg_gen_xori_tl(dst
, dst
, 0x1);
1084 // !2: !(!FCC0 & FCC1)
1085 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1086 unsigned int fcc_offset
)
1088 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1089 tcg_gen_xori_tl(dst
, dst
, 0x1);
1090 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1091 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1092 tcg_gen_xori_tl(dst
, dst
, 0x1);
1095 // !3: !(FCC0 & FCC1)
1096 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1097 unsigned int fcc_offset
)
1099 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1100 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1101 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1102 tcg_gen_xori_tl(dst
, dst
, 0x1);
1105 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1106 target_ulong pc2
, TCGv r_cond
)
1110 l1
= gen_new_label();
1112 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, tcg_const_tl(0), l1
);
1114 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1117 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1120 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1121 target_ulong pc2
, TCGv r_cond
)
1125 l1
= gen_new_label();
1127 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, tcg_const_tl(0), l1
);
1129 gen_goto_tb(dc
, 0, pc2
, pc1
);
1132 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1135 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1140 l1
= gen_new_label();
1141 l2
= gen_new_label();
1143 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
, tcg_const_tl(0), l1
);
1145 tcg_gen_movi_tl(cpu_npc
, npc1
);
1149 tcg_gen_movi_tl(cpu_npc
, npc2
);
1153 /* call this function before using the condition register as it may
1154 have been set for a jump */
1155 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1157 if (dc
->npc
== JUMP_PC
) {
1158 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1159 dc
->npc
= DYNAMIC_PC
;
1163 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1165 if (dc
->npc
== JUMP_PC
) {
1166 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1167 dc
->npc
= DYNAMIC_PC
;
1168 } else if (dc
->npc
!= DYNAMIC_PC
) {
1169 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1173 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1175 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1179 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1181 if (dc
->npc
== JUMP_PC
) {
1182 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1183 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1184 dc
->pc
= DYNAMIC_PC
;
1185 } else if (dc
->npc
== DYNAMIC_PC
) {
1186 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1187 dc
->pc
= DYNAMIC_PC
;
1193 static inline void gen_op_next_insn(void)
1195 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1196 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1199 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1203 #ifdef TARGET_SPARC64
1213 gen_op_eval_bn(r_dst
);
1216 gen_op_eval_be(r_dst
, r_src
);
1219 gen_op_eval_ble(r_dst
, r_src
);
1222 gen_op_eval_bl(r_dst
, r_src
);
1225 gen_op_eval_bleu(r_dst
, r_src
);
1228 gen_op_eval_bcs(r_dst
, r_src
);
1231 gen_op_eval_bneg(r_dst
, r_src
);
1234 gen_op_eval_bvs(r_dst
, r_src
);
1237 gen_op_eval_ba(r_dst
);
1240 gen_op_eval_bne(r_dst
, r_src
);
1243 gen_op_eval_bg(r_dst
, r_src
);
1246 gen_op_eval_bge(r_dst
, r_src
);
1249 gen_op_eval_bgu(r_dst
, r_src
);
1252 gen_op_eval_bcc(r_dst
, r_src
);
1255 gen_op_eval_bpos(r_dst
, r_src
);
1258 gen_op_eval_bvc(r_dst
, r_src
);
1263 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1265 unsigned int offset
;
1285 gen_op_eval_bn(r_dst
);
1288 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1291 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1294 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1297 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1300 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1303 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1306 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1309 gen_op_eval_ba(r_dst
);
1312 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1315 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1318 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1321 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1324 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1327 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1330 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1335 #ifdef TARGET_SPARC64
1337 static const int gen_tcg_cond_reg
[8] = {
1348 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1352 l1
= gen_new_label();
1353 tcg_gen_movi_tl(r_dst
, 0);
1354 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], r_src
, tcg_const_tl(0), l1
);
1355 tcg_gen_movi_tl(r_dst
, 1);
1360 /* XXX: potentially incorrect if dynamic npc */
1361 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1364 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1365 target_ulong target
= dc
->pc
+ offset
;
1368 /* unconditional not taken */
1370 dc
->pc
= dc
->npc
+ 4;
1371 dc
->npc
= dc
->pc
+ 4;
1374 dc
->npc
= dc
->pc
+ 4;
1376 } else if (cond
== 0x8) {
1377 /* unconditional taken */
1380 dc
->npc
= dc
->pc
+ 4;
1386 flush_cond(dc
, r_cond
);
1387 gen_cond(r_cond
, cc
, cond
);
1389 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1393 dc
->jump_pc
[0] = target
;
1394 dc
->jump_pc
[1] = dc
->npc
+ 4;
1400 /* XXX: potentially incorrect if dynamic npc */
1401 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1404 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1405 target_ulong target
= dc
->pc
+ offset
;
1408 /* unconditional not taken */
1410 dc
->pc
= dc
->npc
+ 4;
1411 dc
->npc
= dc
->pc
+ 4;
1414 dc
->npc
= dc
->pc
+ 4;
1416 } else if (cond
== 0x8) {
1417 /* unconditional taken */
1420 dc
->npc
= dc
->pc
+ 4;
1426 flush_cond(dc
, r_cond
);
1427 gen_fcond(r_cond
, cc
, cond
);
1429 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1433 dc
->jump_pc
[0] = target
;
1434 dc
->jump_pc
[1] = dc
->npc
+ 4;
1440 #ifdef TARGET_SPARC64
1441 /* XXX: potentially incorrect if dynamic npc */
1442 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1443 TCGv r_cond
, TCGv r_reg
)
1445 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1446 target_ulong target
= dc
->pc
+ offset
;
1448 flush_cond(dc
, r_cond
);
1449 gen_cond_reg(r_cond
, cond
, r_reg
);
1451 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1455 dc
->jump_pc
[0] = target
;
1456 dc
->jump_pc
[1] = dc
->npc
+ 4;
1461 static GenOpFunc
* const gen_fcmps
[4] = {
1468 static GenOpFunc
* const gen_fcmpd
[4] = {
1475 #if defined(CONFIG_USER_ONLY)
1476 static GenOpFunc
* const gen_fcmpq
[4] = {
1484 static GenOpFunc
* const gen_fcmpes
[4] = {
1491 static GenOpFunc
* const gen_fcmped
[4] = {
1498 #if defined(CONFIG_USER_ONLY)
1499 static GenOpFunc
* const gen_fcmpeq
[4] = {
1507 static inline void gen_op_fcmps(int fccno
)
1509 tcg_gen_helper_0_0(gen_fcmps
[fccno
]);
1512 static inline void gen_op_fcmpd(int fccno
)
1514 tcg_gen_helper_0_0(gen_fcmpd
[fccno
]);
1517 #if defined(CONFIG_USER_ONLY)
1518 static inline void gen_op_fcmpq(int fccno
)
1520 tcg_gen_helper_0_0(gen_fcmpq
[fccno
]);
1524 static inline void gen_op_fcmpes(int fccno
)
1526 tcg_gen_helper_0_0(gen_fcmpes
[fccno
]);
1529 static inline void gen_op_fcmped(int fccno
)
1531 tcg_gen_helper_0_0(gen_fcmped
[fccno
]);
1534 #if defined(CONFIG_USER_ONLY)
1535 static inline void gen_op_fcmpeq(int fccno
)
1537 tcg_gen_helper_0_0(gen_fcmpeq
[fccno
]);
1543 static inline void gen_op_fcmps(int fccno
)
1545 tcg_gen_helper_0_0(helper_fcmps
);
1548 static inline void gen_op_fcmpd(int fccno
)
1550 tcg_gen_helper_0_0(helper_fcmpd
);
1553 #if defined(CONFIG_USER_ONLY)
1554 static inline void gen_op_fcmpq(int fccno
)
1556 tcg_gen_helper_0_0(helper_fcmpq
);
1560 static inline void gen_op_fcmpes(int fccno
)
1562 tcg_gen_helper_0_0(helper_fcmpes
);
1565 static inline void gen_op_fcmped(int fccno
)
1567 tcg_gen_helper_0_0(helper_fcmped
);
1570 #if defined(CONFIG_USER_ONLY)
1571 static inline void gen_op_fcmpeq(int fccno
)
1573 tcg_gen_helper_0_0(helper_fcmpeq
);
1579 static inline void gen_op_fpexception_im(int fsr_flags
)
1581 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, ~FSR_FTT_MASK
);
1582 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1583 tcg_gen_helper_0_1(raise_exception
, tcg_const_i32(TT_FP_EXCP
));
1586 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1588 #if !defined(CONFIG_USER_ONLY)
1589 if (!dc
->fpu_enabled
) {
1590 save_state(dc
, r_cond
);
1591 tcg_gen_helper_0_1(raise_exception
, tcg_const_i32(TT_NFPU_INSN
));
1599 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1601 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, ~(FSR_FTT_MASK
| FSR_CEXC_MASK
));
1604 static inline void gen_clear_float_exceptions(void)
1606 tcg_gen_helper_0_0(helper_clear_float_exceptions
);
1610 #ifdef TARGET_SPARC64
1611 static inline TCGv
gen_get_asi(int insn
, TCGv r_addr
)
1617 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1618 offset
= GET_FIELD(insn
, 25, 31);
1619 tcg_gen_addi_tl(r_addr
, r_addr
, offset
);
1620 tcg_gen_ld_i32(r_asi
, cpu_env
, offsetof(CPUSPARCState
, asi
));
1622 asi
= GET_FIELD(insn
, 19, 26);
1623 r_asi
= tcg_const_i32(asi
);
1628 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
, int sign
)
1632 r_asi
= gen_get_asi(insn
, addr
);
1633 tcg_gen_helper_1_4(helper_ld_asi
, dst
, addr
, r_asi
,
1634 tcg_const_i32(size
), tcg_const_i32(sign
));
1637 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1641 r_asi
= gen_get_asi(insn
, addr
);
1642 tcg_gen_helper_0_4(helper_st_asi
, addr
, src
, r_asi
, tcg_const_i32(size
));
1645 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1649 r_asi
= gen_get_asi(insn
, addr
);
1650 tcg_gen_helper_0_4(helper_ldf_asi
, addr
, r_asi
, tcg_const_i32(size
),
1654 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1658 r_asi
= gen_get_asi(insn
, addr
);
1659 tcg_gen_helper_0_4(helper_stf_asi
, addr
, r_asi
, tcg_const_i32(size
),
1663 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1667 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
1668 r_asi
= gen_get_asi(insn
, addr
);
1669 tcg_gen_helper_1_4(helper_ld_asi
, r_temp
, addr
, r_asi
,
1670 tcg_const_i32(4), tcg_const_i32(0));
1671 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
,
1673 tcg_gen_extu_i32_tl(dst
, r_temp
);
1676 static inline void gen_ldda_asi(TCGv lo
, TCGv hi
, TCGv addr
, int insn
)
1680 r_asi
= gen_get_asi(insn
, addr
);
1681 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
,
1682 tcg_const_i32(8), tcg_const_i32(0));
1683 tcg_gen_andi_i64(lo
, cpu_tmp64
, 0xffffffffULL
);
1684 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1685 tcg_gen_andi_i64(hi
, cpu_tmp64
, 0xffffffffULL
);
1688 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1692 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
1693 gen_movl_reg_TN(rd
+ 1, r_temp
);
1694 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, hi
,
1696 r_asi
= gen_get_asi(insn
, addr
);
1697 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
,
1701 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
, int rd
)
1705 r_val1
= tcg_temp_new(TCG_TYPE_I32
);
1706 gen_movl_reg_TN(rd
, r_val1
);
1707 r_asi
= gen_get_asi(insn
, addr
);
1708 tcg_gen_helper_1_4(helper_cas_asi
, dst
, addr
, r_val1
, val2
, r_asi
);
1711 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
, int rd
)
1715 gen_movl_reg_TN(rd
, cpu_tmp64
);
1716 r_asi
= gen_get_asi(insn
, addr
);
1717 tcg_gen_helper_1_4(helper_casx_asi
, dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1720 #elif !defined(CONFIG_USER_ONLY)
1722 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
, int sign
)
1726 asi
= GET_FIELD(insn
, 19, 26);
1727 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, tcg_const_i32(asi
),
1728 tcg_const_i32(size
), tcg_const_i32(sign
));
1729 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1732 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1736 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1737 asi
= GET_FIELD(insn
, 19, 26);
1738 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, tcg_const_i32(asi
),
1739 tcg_const_i32(size
));
1742 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1747 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
1748 asi
= GET_FIELD(insn
, 19, 26);
1749 tcg_gen_helper_1_4(helper_ld_asi
, r_temp
, addr
, tcg_const_i32(asi
),
1750 tcg_const_i32(4), tcg_const_i32(0));
1751 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, tcg_const_i32(asi
),
1753 tcg_gen_extu_i32_tl(dst
, r_temp
);
1756 static inline void gen_ldda_asi(TCGv lo
, TCGv hi
, TCGv addr
, int insn
)
1760 asi
= GET_FIELD(insn
, 19, 26);
1761 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, tcg_const_i32(asi
),
1762 tcg_const_i32(8), tcg_const_i32(0));
1763 tcg_gen_trunc_i64_tl(lo
, cpu_tmp64
);
1764 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1765 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1768 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1773 r_temp
= tcg_temp_new(TCG_TYPE_I32
);
1774 gen_movl_reg_TN(rd
+ 1, r_temp
);
1775 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, hi
, r_temp
);
1776 asi
= GET_FIELD(insn
, 19, 26);
1777 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, tcg_const_i32(asi
),
1782 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1783 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1787 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1789 asi
= GET_FIELD(insn
, 19, 26);
1790 tcg_gen_helper_0_4(helper_st_asi
, addr
, tcg_const_i64(0xffULL
),
1791 tcg_const_i32(asi
), tcg_const_i32(1));
1795 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1800 rs1
= GET_FIELD(insn
, 13, 17);
1802 //r_rs1 = tcg_const_tl(0);
1803 tcg_gen_movi_tl(def
, 0);
1805 //r_rs1 = cpu_gregs[rs1];
1806 tcg_gen_mov_tl(def
, cpu_gregs
[rs1
]);
1808 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1812 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1817 if (IS_IMM
) { /* immediate */
1818 rs2
= GET_FIELDs(insn
, 19, 31);
1819 r_rs2
= tcg_const_tl((int)rs2
);
1820 } else { /* register */
1821 rs2
= GET_FIELD(insn
, 27, 31);
1823 r_rs2
= tcg_const_tl(0);
1825 r_rs2
= cpu_gregs
[rs2
];
1827 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1832 /* before an instruction, dc->pc must be static */
1833 static void disas_sparc_insn(DisasContext
* dc
)
1835 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1837 insn
= ldl_code(dc
->pc
);
1838 opc
= GET_FIELD(insn
, 0, 1);
1840 rd
= GET_FIELD(insn
, 2, 6);
1843 cpu_src1
= cpu_T
[0]; // const
1844 cpu_src2
= cpu_T
[1]; // const
1847 cpu_addr
= cpu_T
[0];
1851 case 0: /* branches/sethi */
1853 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1856 #ifdef TARGET_SPARC64
1857 case 0x1: /* V9 BPcc */
1861 target
= GET_FIELD_SP(insn
, 0, 18);
1862 target
= sign_extend(target
, 18);
1864 cc
= GET_FIELD_SP(insn
, 20, 21);
1866 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1868 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1873 case 0x3: /* V9 BPr */
1875 target
= GET_FIELD_SP(insn
, 0, 13) |
1876 (GET_FIELD_SP(insn
, 20, 21) << 14);
1877 target
= sign_extend(target
, 16);
1879 cpu_src1
= get_src1(insn
, cpu_src1
);
1880 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1883 case 0x5: /* V9 FBPcc */
1885 int cc
= GET_FIELD_SP(insn
, 20, 21);
1886 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1888 target
= GET_FIELD_SP(insn
, 0, 18);
1889 target
= sign_extend(target
, 19);
1891 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1895 case 0x7: /* CBN+x */
1900 case 0x2: /* BN+x */
1902 target
= GET_FIELD(insn
, 10, 31);
1903 target
= sign_extend(target
, 22);
1905 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1908 case 0x6: /* FBN+x */
1910 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1912 target
= GET_FIELD(insn
, 10, 31);
1913 target
= sign_extend(target
, 22);
1915 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1918 case 0x4: /* SETHI */
1920 uint32_t value
= GET_FIELD(insn
, 10, 31);
1921 tcg_gen_movi_tl(cpu_dst
, value
<< 10);
1922 gen_movl_TN_reg(rd
, cpu_dst
);
1925 case 0x0: /* UNIMPL */
1934 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1936 gen_movl_TN_reg(15, tcg_const_tl(dc
->pc
));
1938 gen_mov_pc_npc(dc
, cpu_cond
);
1942 case 2: /* FPU & Logical Operations */
1944 unsigned int xop
= GET_FIELD(insn
, 7, 12);
1945 if (xop
== 0x3a) { /* generate trap */
1948 cpu_src1
= get_src1(insn
, cpu_src1
);
1950 rs2
= GET_FIELD(insn
, 25, 31);
1951 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
1953 rs2
= GET_FIELD(insn
, 27, 31);
1955 gen_movl_reg_TN(rs2
, cpu_src2
);
1956 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
1958 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
1960 cond
= GET_FIELD(insn
, 3, 6);
1962 save_state(dc
, cpu_cond
);
1963 tcg_gen_helper_0_1(helper_trap
, cpu_dst
);
1964 } else if (cond
!= 0) {
1965 TCGv r_cond
= tcg_temp_new(TCG_TYPE_TL
);
1966 #ifdef TARGET_SPARC64
1968 int cc
= GET_FIELD_SP(insn
, 11, 12);
1970 save_state(dc
, cpu_cond
);
1972 gen_cond(r_cond
, 0, cond
);
1974 gen_cond(r_cond
, 1, cond
);
1978 save_state(dc
, cpu_cond
);
1979 gen_cond(r_cond
, 0, cond
);
1981 tcg_gen_helper_0_2(helper_trapcc
, cpu_dst
, r_cond
);
1987 } else if (xop
== 0x28) {
1988 rs1
= GET_FIELD(insn
, 13, 17);
1991 #ifndef TARGET_SPARC64
1992 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1993 manual, rdy on the microSPARC
1995 case 0x0f: /* stbar in the SPARCv8 manual,
1996 rdy on the microSPARC II */
1997 case 0x10 ... 0x1f: /* implementation-dependent in the
1998 SPARCv8 manual, rdy on the
2001 tcg_gen_ld_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, y
));
2002 gen_movl_TN_reg(rd
, cpu_dst
);
2004 #ifdef TARGET_SPARC64
2005 case 0x2: /* V9 rdccr */
2006 tcg_gen_helper_1_0(helper_rdccr
, cpu_dst
);
2007 gen_movl_TN_reg(rd
, cpu_dst
);
2009 case 0x3: /* V9 rdasi */
2010 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, asi
));
2011 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2012 gen_movl_TN_reg(rd
, cpu_dst
);
2014 case 0x4: /* V9 rdtick */
2018 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2019 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2020 offsetof(CPUState
, tick
));
2021 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2023 gen_movl_TN_reg(rd
, cpu_dst
);
2026 case 0x5: /* V9 rdpc */
2027 tcg_gen_movi_tl(cpu_dst
, dc
->pc
);
2028 gen_movl_TN_reg(rd
, cpu_dst
);
2030 case 0x6: /* V9 rdfprs */
2031 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fprs
));
2032 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2033 gen_movl_TN_reg(rd
, cpu_dst
);
2035 case 0xf: /* V9 membar */
2036 break; /* no effect */
2037 case 0x13: /* Graphics Status */
2038 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2040 tcg_gen_ld_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, gsr
));
2041 gen_movl_TN_reg(rd
, cpu_dst
);
2043 case 0x17: /* Tick compare */
2044 tcg_gen_ld_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, tick_cmpr
));
2045 gen_movl_TN_reg(rd
, cpu_dst
);
2047 case 0x18: /* System tick */
2051 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2052 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2053 offsetof(CPUState
, stick
));
2054 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2056 gen_movl_TN_reg(rd
, cpu_dst
);
2059 case 0x19: /* System tick compare */
2060 tcg_gen_ld_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, stick_cmpr
));
2061 gen_movl_TN_reg(rd
, cpu_dst
);
2063 case 0x10: /* Performance Control */
2064 case 0x11: /* Performance Instrumentation Counter */
2065 case 0x12: /* Dispatch Control */
2066 case 0x14: /* Softint set, WO */
2067 case 0x15: /* Softint clear, WO */
2068 case 0x16: /* Softint write */
2073 #if !defined(CONFIG_USER_ONLY)
2074 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2075 #ifndef TARGET_SPARC64
2076 if (!supervisor(dc
))
2078 tcg_gen_helper_1_0(helper_rdpsr
, cpu_dst
);
2080 if (!hypervisor(dc
))
2082 rs1
= GET_FIELD(insn
, 13, 17);
2085 // gen_op_rdhpstate();
2088 // gen_op_rdhtstate();
2091 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, hintp
));
2092 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2095 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, htba
));
2096 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2099 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, hver
));
2100 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2102 case 31: // hstick_cmpr
2103 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2104 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, hstick_cmpr
));
2110 gen_movl_TN_reg(rd
, cpu_dst
);
2112 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2113 if (!supervisor(dc
))
2115 #ifdef TARGET_SPARC64
2116 rs1
= GET_FIELD(insn
, 13, 17);
2122 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2123 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2124 offsetof(CPUState
, tsptr
));
2125 tcg_gen_ld_tl(cpu_dst
, r_tsptr
,
2126 offsetof(trap_state
, tpc
));
2133 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2134 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2135 offsetof(CPUState
, tsptr
));
2136 tcg_gen_ld_tl(cpu_dst
, r_tsptr
,
2137 offsetof(trap_state
, tnpc
));
2144 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2145 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2146 offsetof(CPUState
, tsptr
));
2147 tcg_gen_ld_tl(cpu_dst
, r_tsptr
,
2148 offsetof(trap_state
, tstate
));
2155 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2156 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2157 offsetof(CPUState
, tsptr
));
2158 tcg_gen_ld_i32(cpu_dst
, r_tsptr
,
2159 offsetof(trap_state
, tt
));
2166 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2167 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2168 offsetof(CPUState
, tick
));
2169 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2171 gen_movl_TN_reg(rd
, cpu_dst
);
2175 tcg_gen_ld_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, tbr
));
2178 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, pstate
));
2179 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2182 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, tl
));
2183 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2186 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, psrpil
));
2187 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2190 tcg_gen_helper_1_0(helper_rdcwp
, cpu_dst
);
2193 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, cansave
));
2194 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2196 case 11: // canrestore
2197 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, canrestore
));
2198 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2200 case 12: // cleanwin
2201 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, cleanwin
));
2202 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2204 case 13: // otherwin
2205 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, otherwin
));
2206 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2209 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, wstate
));
2210 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2212 case 16: // UA2005 gl
2213 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, gl
));
2214 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2216 case 26: // UA2005 strand status
2217 if (!hypervisor(dc
))
2219 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, ssr
));
2220 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2223 tcg_gen_ld_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, version
));
2230 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, wim
));
2231 tcg_gen_ext_i32_tl(cpu_dst
, cpu_tmp32
);
2233 gen_movl_TN_reg(rd
, cpu_dst
);
2235 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2236 #ifdef TARGET_SPARC64
2237 tcg_gen_helper_0_0(helper_flushw
);
2239 if (!supervisor(dc
))
2241 tcg_gen_ld_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, tbr
));
2242 gen_movl_TN_reg(rd
, cpu_dst
);
2246 } else if (xop
== 0x34) { /* FPU Operations */
2247 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2249 gen_op_clear_ieee_excp_and_FTT();
2250 rs1
= GET_FIELD(insn
, 13, 17);
2251 rs2
= GET_FIELD(insn
, 27, 31);
2252 xop
= GET_FIELD(insn
, 18, 26);
2254 case 0x1: /* fmovs */
2255 gen_op_load_fpr_FT0(rs2
);
2256 gen_op_store_FT0_fpr(rd
);
2258 case 0x5: /* fnegs */
2259 gen_op_load_fpr_FT1(rs2
);
2260 tcg_gen_helper_0_0(helper_fnegs
);
2261 gen_op_store_FT0_fpr(rd
);
2263 case 0x9: /* fabss */
2264 gen_op_load_fpr_FT1(rs2
);
2265 tcg_gen_helper_0_0(helper_fabss
);
2266 gen_op_store_FT0_fpr(rd
);
2268 case 0x29: /* fsqrts */
2269 gen_op_load_fpr_FT1(rs2
);
2270 gen_clear_float_exceptions();
2271 tcg_gen_helper_0_0(helper_fsqrts
);
2272 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2273 gen_op_store_FT0_fpr(rd
);
2275 case 0x2a: /* fsqrtd */
2276 gen_op_load_fpr_DT1(DFPREG(rs2
));
2277 gen_clear_float_exceptions();
2278 tcg_gen_helper_0_0(helper_fsqrtd
);
2279 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2280 gen_op_store_DT0_fpr(DFPREG(rd
));
2282 case 0x2b: /* fsqrtq */
2283 #if defined(CONFIG_USER_ONLY)
2284 gen_op_load_fpr_QT1(QFPREG(rs2
));
2285 gen_clear_float_exceptions();
2286 tcg_gen_helper_0_0(helper_fsqrtq
);
2287 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2288 gen_op_store_QT0_fpr(QFPREG(rd
));
2294 gen_op_load_fpr_FT0(rs1
);
2295 gen_op_load_fpr_FT1(rs2
);
2296 gen_clear_float_exceptions();
2297 tcg_gen_helper_0_0(helper_fadds
);
2298 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2299 gen_op_store_FT0_fpr(rd
);
2302 gen_op_load_fpr_DT0(DFPREG(rs1
));
2303 gen_op_load_fpr_DT1(DFPREG(rs2
));
2304 gen_clear_float_exceptions();
2305 tcg_gen_helper_0_0(helper_faddd
);
2306 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2307 gen_op_store_DT0_fpr(DFPREG(rd
));
2309 case 0x43: /* faddq */
2310 #if defined(CONFIG_USER_ONLY)
2311 gen_op_load_fpr_QT0(QFPREG(rs1
));
2312 gen_op_load_fpr_QT1(QFPREG(rs2
));
2313 gen_clear_float_exceptions();
2314 tcg_gen_helper_0_0(helper_faddq
);
2315 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2316 gen_op_store_QT0_fpr(QFPREG(rd
));
2322 gen_op_load_fpr_FT0(rs1
);
2323 gen_op_load_fpr_FT1(rs2
);
2324 gen_clear_float_exceptions();
2325 tcg_gen_helper_0_0(helper_fsubs
);
2326 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2327 gen_op_store_FT0_fpr(rd
);
2330 gen_op_load_fpr_DT0(DFPREG(rs1
));
2331 gen_op_load_fpr_DT1(DFPREG(rs2
));
2332 gen_clear_float_exceptions();
2333 tcg_gen_helper_0_0(helper_fsubd
);
2334 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2335 gen_op_store_DT0_fpr(DFPREG(rd
));
2337 case 0x47: /* fsubq */
2338 #if defined(CONFIG_USER_ONLY)
2339 gen_op_load_fpr_QT0(QFPREG(rs1
));
2340 gen_op_load_fpr_QT1(QFPREG(rs2
));
2341 gen_clear_float_exceptions();
2342 tcg_gen_helper_0_0(helper_fsubq
);
2343 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2344 gen_op_store_QT0_fpr(QFPREG(rd
));
2350 gen_op_load_fpr_FT0(rs1
);
2351 gen_op_load_fpr_FT1(rs2
);
2352 gen_clear_float_exceptions();
2353 tcg_gen_helper_0_0(helper_fmuls
);
2354 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2355 gen_op_store_FT0_fpr(rd
);
2358 gen_op_load_fpr_DT0(DFPREG(rs1
));
2359 gen_op_load_fpr_DT1(DFPREG(rs2
));
2360 gen_clear_float_exceptions();
2361 tcg_gen_helper_0_0(helper_fmuld
);
2362 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2363 gen_op_store_DT0_fpr(DFPREG(rd
));
2365 case 0x4b: /* fmulq */
2366 #if defined(CONFIG_USER_ONLY)
2367 gen_op_load_fpr_QT0(QFPREG(rs1
));
2368 gen_op_load_fpr_QT1(QFPREG(rs2
));
2369 gen_clear_float_exceptions();
2370 tcg_gen_helper_0_0(helper_fmulq
);
2371 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2372 gen_op_store_QT0_fpr(QFPREG(rd
));
2378 gen_op_load_fpr_FT0(rs1
);
2379 gen_op_load_fpr_FT1(rs2
);
2380 gen_clear_float_exceptions();
2381 tcg_gen_helper_0_0(helper_fdivs
);
2382 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2383 gen_op_store_FT0_fpr(rd
);
2386 gen_op_load_fpr_DT0(DFPREG(rs1
));
2387 gen_op_load_fpr_DT1(DFPREG(rs2
));
2388 gen_clear_float_exceptions();
2389 tcg_gen_helper_0_0(helper_fdivd
);
2390 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2391 gen_op_store_DT0_fpr(DFPREG(rd
));
2393 case 0x4f: /* fdivq */
2394 #if defined(CONFIG_USER_ONLY)
2395 gen_op_load_fpr_QT0(QFPREG(rs1
));
2396 gen_op_load_fpr_QT1(QFPREG(rs2
));
2397 gen_clear_float_exceptions();
2398 tcg_gen_helper_0_0(helper_fdivq
);
2399 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2400 gen_op_store_QT0_fpr(QFPREG(rd
));
2406 gen_op_load_fpr_FT0(rs1
);
2407 gen_op_load_fpr_FT1(rs2
);
2408 gen_clear_float_exceptions();
2409 tcg_gen_helper_0_0(helper_fsmuld
);
2410 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2411 gen_op_store_DT0_fpr(DFPREG(rd
));
2413 case 0x6e: /* fdmulq */
2414 #if defined(CONFIG_USER_ONLY)
2415 gen_op_load_fpr_DT0(DFPREG(rs1
));
2416 gen_op_load_fpr_DT1(DFPREG(rs2
));
2417 gen_clear_float_exceptions();
2418 tcg_gen_helper_0_0(helper_fdmulq
);
2419 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2420 gen_op_store_QT0_fpr(QFPREG(rd
));
2426 gen_op_load_fpr_FT1(rs2
);
2427 gen_clear_float_exceptions();
2428 tcg_gen_helper_0_0(helper_fitos
);
2429 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2430 gen_op_store_FT0_fpr(rd
);
2433 gen_op_load_fpr_DT1(DFPREG(rs2
));
2434 gen_clear_float_exceptions();
2435 tcg_gen_helper_0_0(helper_fdtos
);
2436 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2437 gen_op_store_FT0_fpr(rd
);
2439 case 0xc7: /* fqtos */
2440 #if defined(CONFIG_USER_ONLY)
2441 gen_op_load_fpr_QT1(QFPREG(rs2
));
2442 gen_clear_float_exceptions();
2443 tcg_gen_helper_0_0(helper_fqtos
);
2444 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2445 gen_op_store_FT0_fpr(rd
);
2451 gen_op_load_fpr_FT1(rs2
);
2452 tcg_gen_helper_0_0(helper_fitod
);
2453 gen_op_store_DT0_fpr(DFPREG(rd
));
2456 gen_op_load_fpr_FT1(rs2
);
2457 tcg_gen_helper_0_0(helper_fstod
);
2458 gen_op_store_DT0_fpr(DFPREG(rd
));
2460 case 0xcb: /* fqtod */
2461 #if defined(CONFIG_USER_ONLY)
2462 gen_op_load_fpr_QT1(QFPREG(rs2
));
2463 gen_clear_float_exceptions();
2464 tcg_gen_helper_0_0(helper_fqtod
);
2465 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2466 gen_op_store_DT0_fpr(DFPREG(rd
));
2471 case 0xcc: /* fitoq */
2472 #if defined(CONFIG_USER_ONLY)
2473 gen_op_load_fpr_FT1(rs2
);
2474 tcg_gen_helper_0_0(helper_fitoq
);
2475 gen_op_store_QT0_fpr(QFPREG(rd
));
2480 case 0xcd: /* fstoq */
2481 #if defined(CONFIG_USER_ONLY)
2482 gen_op_load_fpr_FT1(rs2
);
2483 tcg_gen_helper_0_0(helper_fstoq
);
2484 gen_op_store_QT0_fpr(QFPREG(rd
));
2489 case 0xce: /* fdtoq */
2490 #if defined(CONFIG_USER_ONLY)
2491 gen_op_load_fpr_DT1(DFPREG(rs2
));
2492 tcg_gen_helper_0_0(helper_fdtoq
);
2493 gen_op_store_QT0_fpr(QFPREG(rd
));
2499 gen_op_load_fpr_FT1(rs2
);
2500 gen_clear_float_exceptions();
2501 tcg_gen_helper_0_0(helper_fstoi
);
2502 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2503 gen_op_store_FT0_fpr(rd
);
2506 gen_op_load_fpr_DT1(DFPREG(rs2
));
2507 gen_clear_float_exceptions();
2508 tcg_gen_helper_0_0(helper_fdtoi
);
2509 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2510 gen_op_store_FT0_fpr(rd
);
2512 case 0xd3: /* fqtoi */
2513 #if defined(CONFIG_USER_ONLY)
2514 gen_op_load_fpr_QT1(QFPREG(rs2
));
2515 gen_clear_float_exceptions();
2516 tcg_gen_helper_0_0(helper_fqtoi
);
2517 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2518 gen_op_store_FT0_fpr(rd
);
2523 #ifdef TARGET_SPARC64
2524 case 0x2: /* V9 fmovd */
2525 gen_op_load_fpr_DT0(DFPREG(rs2
));
2526 gen_op_store_DT0_fpr(DFPREG(rd
));
2528 case 0x3: /* V9 fmovq */
2529 #if defined(CONFIG_USER_ONLY)
2530 gen_op_load_fpr_QT0(QFPREG(rs2
));
2531 gen_op_store_QT0_fpr(QFPREG(rd
));
2536 case 0x6: /* V9 fnegd */
2537 gen_op_load_fpr_DT1(DFPREG(rs2
));
2538 tcg_gen_helper_0_0(helper_fnegd
);
2539 gen_op_store_DT0_fpr(DFPREG(rd
));
2541 case 0x7: /* V9 fnegq */
2542 #if defined(CONFIG_USER_ONLY)
2543 gen_op_load_fpr_QT1(QFPREG(rs2
));
2544 tcg_gen_helper_0_0(helper_fnegq
);
2545 gen_op_store_QT0_fpr(QFPREG(rd
));
2550 case 0xa: /* V9 fabsd */
2551 gen_op_load_fpr_DT1(DFPREG(rs2
));
2552 tcg_gen_helper_0_0(helper_fabsd
);
2553 gen_op_store_DT0_fpr(DFPREG(rd
));
2555 case 0xb: /* V9 fabsq */
2556 #if defined(CONFIG_USER_ONLY)
2557 gen_op_load_fpr_QT1(QFPREG(rs2
));
2558 tcg_gen_helper_0_0(helper_fabsq
);
2559 gen_op_store_QT0_fpr(QFPREG(rd
));
2564 case 0x81: /* V9 fstox */
2565 gen_op_load_fpr_FT1(rs2
);
2566 gen_clear_float_exceptions();
2567 tcg_gen_helper_0_0(helper_fstox
);
2568 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2569 gen_op_store_DT0_fpr(DFPREG(rd
));
2571 case 0x82: /* V9 fdtox */
2572 gen_op_load_fpr_DT1(DFPREG(rs2
));
2573 gen_clear_float_exceptions();
2574 tcg_gen_helper_0_0(helper_fdtox
);
2575 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2576 gen_op_store_DT0_fpr(DFPREG(rd
));
2578 case 0x83: /* V9 fqtox */
2579 #if defined(CONFIG_USER_ONLY)
2580 gen_op_load_fpr_QT1(QFPREG(rs2
));
2581 gen_clear_float_exceptions();
2582 tcg_gen_helper_0_0(helper_fqtox
);
2583 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2584 gen_op_store_DT0_fpr(DFPREG(rd
));
2589 case 0x84: /* V9 fxtos */
2590 gen_op_load_fpr_DT1(DFPREG(rs2
));
2591 gen_clear_float_exceptions();
2592 tcg_gen_helper_0_0(helper_fxtos
);
2593 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2594 gen_op_store_FT0_fpr(rd
);
2596 case 0x88: /* V9 fxtod */
2597 gen_op_load_fpr_DT1(DFPREG(rs2
));
2598 gen_clear_float_exceptions();
2599 tcg_gen_helper_0_0(helper_fxtod
);
2600 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2601 gen_op_store_DT0_fpr(DFPREG(rd
));
2603 case 0x8c: /* V9 fxtoq */
2604 #if defined(CONFIG_USER_ONLY)
2605 gen_op_load_fpr_DT1(DFPREG(rs2
));
2606 gen_clear_float_exceptions();
2607 tcg_gen_helper_0_0(helper_fxtoq
);
2608 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2609 gen_op_store_QT0_fpr(QFPREG(rd
));
2618 } else if (xop
== 0x35) { /* FPU Operations */
2619 #ifdef TARGET_SPARC64
2622 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2624 gen_op_clear_ieee_excp_and_FTT();
2625 rs1
= GET_FIELD(insn
, 13, 17);
2626 rs2
= GET_FIELD(insn
, 27, 31);
2627 xop
= GET_FIELD(insn
, 18, 26);
2628 #ifdef TARGET_SPARC64
2629 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2632 l1
= gen_new_label();
2633 cond
= GET_FIELD_SP(insn
, 14, 17);
2634 cpu_src1
= get_src1(insn
, cpu_src1
);
2635 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2636 tcg_const_tl(0), l1
);
2637 gen_op_load_fpr_FT0(rs2
);
2638 gen_op_store_FT0_fpr(rd
);
2641 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2644 l1
= gen_new_label();
2645 cond
= GET_FIELD_SP(insn
, 14, 17);
2646 cpu_src1
= get_src1(insn
, cpu_src1
);
2647 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2648 tcg_const_tl(0), l1
);
2649 gen_op_load_fpr_DT0(DFPREG(rs2
));
2650 gen_op_store_DT0_fpr(DFPREG(rd
));
2653 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2654 #if defined(CONFIG_USER_ONLY)
2657 l1
= gen_new_label();
2658 cond
= GET_FIELD_SP(insn
, 14, 17);
2659 cpu_src1
= get_src1(insn
, cpu_src1
);
2660 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2661 tcg_const_tl(0), l1
);
2662 gen_op_load_fpr_QT0(QFPREG(rs2
));
2663 gen_op_store_QT0_fpr(QFPREG(rd
));
2672 #ifdef TARGET_SPARC64
2673 #define FMOVCC(size_FDQ, fcc) \
2678 l1 = gen_new_label(); \
2679 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2680 cond = GET_FIELD_SP(insn, 14, 17); \
2681 gen_fcond(r_cond, fcc, cond); \
2682 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, \
2683 tcg_const_tl(0), l1); \
2684 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2685 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2686 gen_set_label(l1); \
2688 case 0x001: /* V9 fmovscc %fcc0 */
2691 case 0x002: /* V9 fmovdcc %fcc0 */
2694 case 0x003: /* V9 fmovqcc %fcc0 */
2695 #if defined(CONFIG_USER_ONLY)
2701 case 0x041: /* V9 fmovscc %fcc1 */
2704 case 0x042: /* V9 fmovdcc %fcc1 */
2707 case 0x043: /* V9 fmovqcc %fcc1 */
2708 #if defined(CONFIG_USER_ONLY)
2714 case 0x081: /* V9 fmovscc %fcc2 */
2717 case 0x082: /* V9 fmovdcc %fcc2 */
2720 case 0x083: /* V9 fmovqcc %fcc2 */
2721 #if defined(CONFIG_USER_ONLY)
2727 case 0x0c1: /* V9 fmovscc %fcc3 */
2730 case 0x0c2: /* V9 fmovdcc %fcc3 */
2733 case 0x0c3: /* V9 fmovqcc %fcc3 */
2734 #if defined(CONFIG_USER_ONLY)
2741 #define FMOVCC(size_FDQ, icc) \
2746 l1 = gen_new_label(); \
2747 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2748 cond = GET_FIELD_SP(insn, 14, 17); \
2749 gen_cond(r_cond, icc, cond); \
2750 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, \
2751 tcg_const_tl(0), l1); \
2752 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2753 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2754 gen_set_label(l1); \
2757 case 0x101: /* V9 fmovscc %icc */
2760 case 0x102: /* V9 fmovdcc %icc */
2762 case 0x103: /* V9 fmovqcc %icc */
2763 #if defined(CONFIG_USER_ONLY)
2769 case 0x181: /* V9 fmovscc %xcc */
2772 case 0x182: /* V9 fmovdcc %xcc */
2775 case 0x183: /* V9 fmovqcc %xcc */
2776 #if defined(CONFIG_USER_ONLY)
2784 case 0x51: /* fcmps, V9 %fcc */
2785 gen_op_load_fpr_FT0(rs1
);
2786 gen_op_load_fpr_FT1(rs2
);
2787 gen_op_fcmps(rd
& 3);
2789 case 0x52: /* fcmpd, V9 %fcc */
2790 gen_op_load_fpr_DT0(DFPREG(rs1
));
2791 gen_op_load_fpr_DT1(DFPREG(rs2
));
2792 gen_op_fcmpd(rd
& 3);
2794 case 0x53: /* fcmpq, V9 %fcc */
2795 #if defined(CONFIG_USER_ONLY)
2796 gen_op_load_fpr_QT0(QFPREG(rs1
));
2797 gen_op_load_fpr_QT1(QFPREG(rs2
));
2798 gen_op_fcmpq(rd
& 3);
2800 #else /* !defined(CONFIG_USER_ONLY) */
2803 case 0x55: /* fcmpes, V9 %fcc */
2804 gen_op_load_fpr_FT0(rs1
);
2805 gen_op_load_fpr_FT1(rs2
);
2806 gen_op_fcmpes(rd
& 3);
2808 case 0x56: /* fcmped, V9 %fcc */
2809 gen_op_load_fpr_DT0(DFPREG(rs1
));
2810 gen_op_load_fpr_DT1(DFPREG(rs2
));
2811 gen_op_fcmped(rd
& 3);
2813 case 0x57: /* fcmpeq, V9 %fcc */
2814 #if defined(CONFIG_USER_ONLY)
2815 gen_op_load_fpr_QT0(QFPREG(rs1
));
2816 gen_op_load_fpr_QT1(QFPREG(rs2
));
2817 gen_op_fcmpeq(rd
& 3);
2819 #else/* !defined(CONFIG_USER_ONLY) */
2825 } else if (xop
== 0x2) {
2828 rs1
= GET_FIELD(insn
, 13, 17);
2830 // or %g0, x, y -> mov T0, x; mov y, T0
2831 if (IS_IMM
) { /* immediate */
2832 rs2
= GET_FIELDs(insn
, 19, 31);
2833 tcg_gen_movi_tl(cpu_dst
, (int)rs2
);
2834 } else { /* register */
2835 rs2
= GET_FIELD(insn
, 27, 31);
2836 gen_movl_reg_TN(rs2
, cpu_dst
);
2839 cpu_src1
= get_src1(insn
, cpu_src1
);
2840 if (IS_IMM
) { /* immediate */
2841 rs2
= GET_FIELDs(insn
, 19, 31);
2842 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, (int)rs2
);
2843 } else { /* register */
2844 // or x, %g0, y -> mov T1, x; mov y, T1
2845 rs2
= GET_FIELD(insn
, 27, 31);
2847 gen_movl_reg_TN(rs2
, cpu_src2
);
2848 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2850 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2853 gen_movl_TN_reg(rd
, cpu_dst
);
2854 #ifdef TARGET_SPARC64
2855 } else if (xop
== 0x25) { /* sll, V9 sllx */
2856 cpu_src1
= get_src1(insn
, cpu_src1
);
2857 if (IS_IMM
) { /* immediate */
2858 rs2
= GET_FIELDs(insn
, 20, 31);
2859 if (insn
& (1 << 12)) {
2860 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2862 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2863 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
2865 } else { /* register */
2866 rs2
= GET_FIELD(insn
, 27, 31);
2867 gen_movl_reg_TN(rs2
, cpu_src2
);
2868 if (insn
& (1 << 12)) {
2869 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2870 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2872 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2873 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2874 tcg_gen_shl_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2877 gen_movl_TN_reg(rd
, cpu_dst
);
2878 } else if (xop
== 0x26) { /* srl, V9 srlx */
2879 cpu_src1
= get_src1(insn
, cpu_src1
);
2880 if (IS_IMM
) { /* immediate */
2881 rs2
= GET_FIELDs(insn
, 20, 31);
2882 if (insn
& (1 << 12)) {
2883 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2885 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2886 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
2888 } else { /* register */
2889 rs2
= GET_FIELD(insn
, 27, 31);
2890 gen_movl_reg_TN(rs2
, cpu_src2
);
2891 if (insn
& (1 << 12)) {
2892 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2893 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2895 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2896 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2897 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2900 gen_movl_TN_reg(rd
, cpu_dst
);
2901 } else if (xop
== 0x27) { /* sra, V9 srax */
2902 cpu_src1
= get_src1(insn
, cpu_src1
);
2903 if (IS_IMM
) { /* immediate */
2904 rs2
= GET_FIELDs(insn
, 20, 31);
2905 if (insn
& (1 << 12)) {
2906 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2908 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2909 tcg_gen_ext_i32_i64(cpu_dst
, cpu_dst
);
2910 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
2912 } else { /* register */
2913 rs2
= GET_FIELD(insn
, 27, 31);
2914 gen_movl_reg_TN(rs2
, cpu_src2
);
2915 if (insn
& (1 << 12)) {
2916 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2917 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2919 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2920 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
2921 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
2924 gen_movl_TN_reg(rd
, cpu_dst
);
2926 } else if (xop
< 0x36) {
2927 cpu_src1
= get_src1(insn
, cpu_src1
);
2928 cpu_src2
= get_src2(insn
, cpu_src2
);
2930 switch (xop
& ~0x10) {
2933 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2935 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2938 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2940 gen_op_logic_cc(cpu_dst
);
2943 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2945 gen_op_logic_cc(cpu_dst
);
2948 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2950 gen_op_logic_cc(cpu_dst
);
2954 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2956 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2959 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
2960 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2962 gen_op_logic_cc(cpu_dst
);
2965 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
2966 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2968 gen_op_logic_cc(cpu_dst
);
2971 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
2972 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2974 gen_op_logic_cc(cpu_dst
);
2978 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
2980 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
2981 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
2982 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
2985 #ifdef TARGET_SPARC64
2986 case 0x9: /* V9 mulx */
2987 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
2991 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
2993 gen_op_logic_cc(cpu_dst
);
2996 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
2998 gen_op_logic_cc(cpu_dst
);
3002 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3004 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3005 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3006 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3009 #ifdef TARGET_SPARC64
3010 case 0xd: /* V9 udivx */
3011 gen_trap_ifdivzero_tl(cpu_src2
);
3012 tcg_gen_divu_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3016 tcg_gen_helper_1_2(helper_udiv
, cpu_dst
, cpu_src1
, cpu_src2
);
3018 gen_op_div_cc(cpu_dst
);
3021 tcg_gen_helper_1_2(helper_sdiv
, cpu_dst
, cpu_src1
, cpu_src2
);
3023 gen_op_div_cc(cpu_dst
);
3028 gen_movl_TN_reg(rd
, cpu_dst
);
3031 case 0x20: /* taddcc */
3032 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3033 gen_movl_TN_reg(rd
, cpu_dst
);
3035 case 0x21: /* tsubcc */
3036 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3037 gen_movl_TN_reg(rd
, cpu_dst
);
3039 case 0x22: /* taddcctv */
3040 save_state(dc
, cpu_cond
);
3041 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3042 gen_movl_TN_reg(rd
, cpu_dst
);
3044 case 0x23: /* tsubcctv */
3045 save_state(dc
, cpu_cond
);
3046 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3047 gen_movl_TN_reg(rd
, cpu_dst
);
3049 case 0x24: /* mulscc */
3050 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3051 gen_movl_TN_reg(rd
, cpu_dst
);
3053 #ifndef TARGET_SPARC64
3054 case 0x25: /* sll */
3055 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3056 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3057 gen_movl_TN_reg(rd
, cpu_dst
);
3059 case 0x26: /* srl */
3060 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3061 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3062 gen_movl_TN_reg(rd
, cpu_dst
);
3064 case 0x27: /* sra */
3065 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3066 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3067 gen_movl_TN_reg(rd
, cpu_dst
);
3074 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3075 tcg_gen_st_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, y
));
3077 #ifndef TARGET_SPARC64
3078 case 0x01 ... 0x0f: /* undefined in the
3082 case 0x10 ... 0x1f: /* implementation-dependent
3088 case 0x2: /* V9 wrccr */
3089 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3090 tcg_gen_helper_0_1(helper_wrccr
, cpu_dst
);
3092 case 0x3: /* V9 wrasi */
3093 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3094 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3095 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, asi
));
3097 case 0x6: /* V9 wrfprs */
3098 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3099 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3100 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, fprs
));
3101 save_state(dc
, cpu_cond
);
3106 case 0xf: /* V9 sir, nop if user */
3107 #if !defined(CONFIG_USER_ONLY)
3112 case 0x13: /* Graphics Status */
3113 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3115 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3116 tcg_gen_st_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, gsr
));
3118 case 0x17: /* Tick compare */
3119 #if !defined(CONFIG_USER_ONLY)
3120 if (!supervisor(dc
))
3126 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3128 tcg_gen_st_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
,
3130 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3131 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3132 offsetof(CPUState
, tick
));
3133 tcg_gen_helper_0_2(helper_tick_set_limit
,
3134 r_tickptr
, cpu_dst
);
3137 case 0x18: /* System tick */
3138 #if !defined(CONFIG_USER_ONLY)
3139 if (!supervisor(dc
))
3145 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3147 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3148 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3149 offsetof(CPUState
, stick
));
3150 tcg_gen_helper_0_2(helper_tick_set_count
,
3151 r_tickptr
, cpu_dst
);
3154 case 0x19: /* System tick compare */
3155 #if !defined(CONFIG_USER_ONLY)
3156 if (!supervisor(dc
))
3162 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3164 tcg_gen_st_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
,
3166 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3167 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3168 offsetof(CPUState
, stick
));
3169 tcg_gen_helper_0_2(helper_tick_set_limit
,
3170 r_tickptr
, cpu_dst
);
3174 case 0x10: /* Performance Control */
3175 case 0x11: /* Performance Instrumentation Counter */
3176 case 0x12: /* Dispatch Control */
3177 case 0x14: /* Softint set */
3178 case 0x15: /* Softint clear */
3179 case 0x16: /* Softint write */
3186 #if !defined(CONFIG_USER_ONLY)
3187 case 0x31: /* wrpsr, V9 saved, restored */
3189 if (!supervisor(dc
))
3191 #ifdef TARGET_SPARC64
3194 tcg_gen_helper_0_0(helper_saved
);
3197 tcg_gen_helper_0_0(helper_restored
);
3199 case 2: /* UA2005 allclean */
3200 case 3: /* UA2005 otherw */
3201 case 4: /* UA2005 normalw */
3202 case 5: /* UA2005 invalw */
3208 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3209 tcg_gen_helper_0_1(helper_wrpsr
, cpu_dst
);
3210 save_state(dc
, cpu_cond
);
3217 case 0x32: /* wrwim, V9 wrpr */
3219 if (!supervisor(dc
))
3221 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3222 #ifdef TARGET_SPARC64
3228 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3229 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3230 offsetof(CPUState
, tsptr
));
3231 tcg_gen_st_tl(cpu_dst
, r_tsptr
,
3232 offsetof(trap_state
, tpc
));
3239 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3240 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3241 offsetof(CPUState
, tsptr
));
3242 tcg_gen_st_tl(cpu_dst
, r_tsptr
,
3243 offsetof(trap_state
, tnpc
));
3250 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3251 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3252 offsetof(CPUState
, tsptr
));
3253 tcg_gen_st_tl(cpu_dst
, r_tsptr
,
3254 offsetof(trap_state
, tstate
));
3261 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3262 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3263 offsetof(CPUState
, tsptr
));
3264 tcg_gen_st_i32(cpu_dst
, r_tsptr
,
3265 offsetof(trap_state
, tt
));
3272 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3273 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3274 offsetof(CPUState
, tick
));
3275 tcg_gen_helper_0_2(helper_tick_set_count
,
3276 r_tickptr
, cpu_dst
);
3280 tcg_gen_st_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, tbr
));
3283 save_state(dc
, cpu_cond
);
3284 tcg_gen_helper_0_1(helper_wrpstate
, cpu_dst
);
3290 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3291 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, tl
));
3294 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3295 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, psrpil
));
3298 tcg_gen_helper_0_1(helper_wrcwp
, cpu_dst
);
3301 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3302 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, cansave
));
3304 case 11: // canrestore
3305 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3306 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, canrestore
));
3308 case 12: // cleanwin
3309 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3310 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, cleanwin
));
3312 case 13: // otherwin
3313 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3314 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, otherwin
));
3317 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3318 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, wstate
));
3320 case 16: // UA2005 gl
3321 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3322 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, gl
));
3324 case 26: // UA2005 strand status
3325 if (!hypervisor(dc
))
3327 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3328 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, ssr
));
3334 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, ((1 << NWINDOWS
) - 1));
3335 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3336 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, wim
));
3340 case 0x33: /* wrtbr, UA2005 wrhpr */
3342 #ifndef TARGET_SPARC64
3343 if (!supervisor(dc
))
3345 tcg_gen_xor_tl(cpu_dst
, cpu_dst
, cpu_src2
);
3346 tcg_gen_st_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
, tbr
));
3348 if (!hypervisor(dc
))
3350 tcg_gen_xor_tl(cpu_dst
, cpu_dst
, cpu_src2
);
3353 // XXX gen_op_wrhpstate();
3354 save_state(dc
, cpu_cond
);
3360 // XXX gen_op_wrhtstate();
3363 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3364 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, hintp
));
3367 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
3368 tcg_gen_st_i32(cpu_tmp32
, cpu_env
, offsetof(CPUSPARCState
, htba
));
3370 case 31: // hstick_cmpr
3374 tcg_gen_st_tl(cpu_dst
, cpu_env
, offsetof(CPUSPARCState
,
3376 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3377 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3378 offsetof(CPUState
, hstick
));
3379 tcg_gen_helper_0_2(helper_tick_set_limit
,
3380 r_tickptr
, cpu_dst
);
3383 case 6: // hver readonly
3391 #ifdef TARGET_SPARC64
3392 case 0x2c: /* V9 movcc */
3394 int cc
= GET_FIELD_SP(insn
, 11, 12);
3395 int cond
= GET_FIELD_SP(insn
, 14, 17);
3399 r_cond
= tcg_temp_new(TCG_TYPE_TL
);
3400 if (insn
& (1 << 18)) {
3402 gen_cond(r_cond
, 0, cond
);
3404 gen_cond(r_cond
, 1, cond
);
3408 gen_fcond(r_cond
, cc
, cond
);
3411 l1
= gen_new_label();
3413 tcg_gen_brcond_tl(TCG_COND_EQ
, r_cond
,
3414 tcg_const_tl(0), l1
);
3415 if (IS_IMM
) { /* immediate */
3416 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3417 tcg_gen_movi_tl(cpu_dst
, (int)rs2
);
3419 rs2
= GET_FIELD_SP(insn
, 0, 4);
3420 gen_movl_reg_TN(rs2
, cpu_dst
);
3422 gen_movl_TN_reg(rd
, cpu_dst
);
3426 case 0x2d: /* V9 sdivx */
3427 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3428 gen_movl_TN_reg(rd
, cpu_dst
);
3430 case 0x2e: /* V9 popc */
3432 cpu_src2
= get_src2(insn
, cpu_src2
);
3433 tcg_gen_helper_1_1(helper_popc
, cpu_dst
,
3435 gen_movl_TN_reg(rd
, cpu_dst
);
3437 case 0x2f: /* V9 movr */
3439 int cond
= GET_FIELD_SP(insn
, 10, 12);
3442 cpu_src1
= get_src1(insn
, cpu_src1
);
3444 l1
= gen_new_label();
3446 tcg_gen_brcond_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
3447 tcg_const_tl(0), l1
);
3448 if (IS_IMM
) { /* immediate */
3449 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3450 tcg_gen_movi_tl(cpu_dst
, (int)rs2
);
3452 rs2
= GET_FIELD_SP(insn
, 0, 4);
3453 gen_movl_reg_TN(rs2
, cpu_dst
);
3455 gen_movl_TN_reg(rd
, cpu_dst
);
3464 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3465 #ifdef TARGET_SPARC64
3466 int opf
= GET_FIELD_SP(insn
, 5, 13);
3467 rs1
= GET_FIELD(insn
, 13, 17);
3468 rs2
= GET_FIELD(insn
, 27, 31);
3469 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3473 case 0x000: /* VIS I edge8cc */
3474 case 0x001: /* VIS II edge8n */
3475 case 0x002: /* VIS I edge8lcc */
3476 case 0x003: /* VIS II edge8ln */
3477 case 0x004: /* VIS I edge16cc */
3478 case 0x005: /* VIS II edge16n */
3479 case 0x006: /* VIS I edge16lcc */
3480 case 0x007: /* VIS II edge16ln */
3481 case 0x008: /* VIS I edge32cc */
3482 case 0x009: /* VIS II edge32n */
3483 case 0x00a: /* VIS I edge32lcc */
3484 case 0x00b: /* VIS II edge32ln */
3487 case 0x010: /* VIS I array8 */
3488 cpu_src1
= get_src1(insn
, cpu_src1
);
3489 gen_movl_reg_TN(rs2
, cpu_src2
);
3490 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3492 gen_movl_TN_reg(rd
, cpu_dst
);
3494 case 0x012: /* VIS I array16 */
3495 cpu_src1
= get_src1(insn
, cpu_src1
);
3496 gen_movl_reg_TN(rs2
, cpu_src2
);
3497 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3499 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3500 gen_movl_TN_reg(rd
, cpu_dst
);
3502 case 0x014: /* VIS I array32 */
3503 cpu_src1
= get_src1(insn
, cpu_src1
);
3504 gen_movl_reg_TN(rs2
, cpu_src2
);
3505 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3507 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3508 gen_movl_TN_reg(rd
, cpu_dst
);
3510 case 0x018: /* VIS I alignaddr */
3511 cpu_src1
= get_src1(insn
, cpu_src1
);
3512 gen_movl_reg_TN(rs2
, cpu_src2
);
3513 tcg_gen_helper_1_2(helper_alignaddr
, cpu_dst
, cpu_src1
,
3515 gen_movl_TN_reg(rd
, cpu_dst
);
3517 case 0x019: /* VIS II bmask */
3518 case 0x01a: /* VIS I alignaddrl */
3521 case 0x020: /* VIS I fcmple16 */
3522 gen_op_load_fpr_DT0(DFPREG(rs1
));
3523 gen_op_load_fpr_DT1(DFPREG(rs2
));
3524 tcg_gen_helper_0_0(helper_fcmple16
);
3525 gen_op_store_DT0_fpr(DFPREG(rd
));
3527 case 0x022: /* VIS I fcmpne16 */
3528 gen_op_load_fpr_DT0(DFPREG(rs1
));
3529 gen_op_load_fpr_DT1(DFPREG(rs2
));
3530 tcg_gen_helper_0_0(helper_fcmpne16
);
3531 gen_op_store_DT0_fpr(DFPREG(rd
));
3533 case 0x024: /* VIS I fcmple32 */
3534 gen_op_load_fpr_DT0(DFPREG(rs1
));
3535 gen_op_load_fpr_DT1(DFPREG(rs2
));
3536 tcg_gen_helper_0_0(helper_fcmple32
);
3537 gen_op_store_DT0_fpr(DFPREG(rd
));
3539 case 0x026: /* VIS I fcmpne32 */
3540 gen_op_load_fpr_DT0(DFPREG(rs1
));
3541 gen_op_load_fpr_DT1(DFPREG(rs2
));
3542 tcg_gen_helper_0_0(helper_fcmpne32
);
3543 gen_op_store_DT0_fpr(DFPREG(rd
));
3545 case 0x028: /* VIS I fcmpgt16 */
3546 gen_op_load_fpr_DT0(DFPREG(rs1
));
3547 gen_op_load_fpr_DT1(DFPREG(rs2
));
3548 tcg_gen_helper_0_0(helper_fcmpgt16
);
3549 gen_op_store_DT0_fpr(DFPREG(rd
));
3551 case 0x02a: /* VIS I fcmpeq16 */
3552 gen_op_load_fpr_DT0(DFPREG(rs1
));
3553 gen_op_load_fpr_DT1(DFPREG(rs2
));
3554 tcg_gen_helper_0_0(helper_fcmpeq16
);
3555 gen_op_store_DT0_fpr(DFPREG(rd
));
3557 case 0x02c: /* VIS I fcmpgt32 */
3558 gen_op_load_fpr_DT0(DFPREG(rs1
));
3559 gen_op_load_fpr_DT1(DFPREG(rs2
));
3560 tcg_gen_helper_0_0(helper_fcmpgt32
);
3561 gen_op_store_DT0_fpr(DFPREG(rd
));
3563 case 0x02e: /* VIS I fcmpeq32 */
3564 gen_op_load_fpr_DT0(DFPREG(rs1
));
3565 gen_op_load_fpr_DT1(DFPREG(rs2
));
3566 tcg_gen_helper_0_0(helper_fcmpeq32
);
3567 gen_op_store_DT0_fpr(DFPREG(rd
));
3569 case 0x031: /* VIS I fmul8x16 */
3570 gen_op_load_fpr_DT0(DFPREG(rs1
));
3571 gen_op_load_fpr_DT1(DFPREG(rs2
));
3572 tcg_gen_helper_0_0(helper_fmul8x16
);
3573 gen_op_store_DT0_fpr(DFPREG(rd
));
3575 case 0x033: /* VIS I fmul8x16au */
3576 gen_op_load_fpr_DT0(DFPREG(rs1
));
3577 gen_op_load_fpr_DT1(DFPREG(rs2
));
3578 tcg_gen_helper_0_0(helper_fmul8x16au
);
3579 gen_op_store_DT0_fpr(DFPREG(rd
));
3581 case 0x035: /* VIS I fmul8x16al */
3582 gen_op_load_fpr_DT0(DFPREG(rs1
));
3583 gen_op_load_fpr_DT1(DFPREG(rs2
));
3584 tcg_gen_helper_0_0(helper_fmul8x16al
);
3585 gen_op_store_DT0_fpr(DFPREG(rd
));
3587 case 0x036: /* VIS I fmul8sux16 */
3588 gen_op_load_fpr_DT0(DFPREG(rs1
));
3589 gen_op_load_fpr_DT1(DFPREG(rs2
));
3590 tcg_gen_helper_0_0(helper_fmul8sux16
);
3591 gen_op_store_DT0_fpr(DFPREG(rd
));
3593 case 0x037: /* VIS I fmul8ulx16 */
3594 gen_op_load_fpr_DT0(DFPREG(rs1
));
3595 gen_op_load_fpr_DT1(DFPREG(rs2
));
3596 tcg_gen_helper_0_0(helper_fmul8ulx16
);
3597 gen_op_store_DT0_fpr(DFPREG(rd
));
3599 case 0x038: /* VIS I fmuld8sux16 */
3600 gen_op_load_fpr_DT0(DFPREG(rs1
));
3601 gen_op_load_fpr_DT1(DFPREG(rs2
));
3602 tcg_gen_helper_0_0(helper_fmuld8sux16
);
3603 gen_op_store_DT0_fpr(DFPREG(rd
));
3605 case 0x039: /* VIS I fmuld8ulx16 */
3606 gen_op_load_fpr_DT0(DFPREG(rs1
));
3607 gen_op_load_fpr_DT1(DFPREG(rs2
));
3608 tcg_gen_helper_0_0(helper_fmuld8ulx16
);
3609 gen_op_store_DT0_fpr(DFPREG(rd
));
3611 case 0x03a: /* VIS I fpack32 */
3612 case 0x03b: /* VIS I fpack16 */
3613 case 0x03d: /* VIS I fpackfix */
3614 case 0x03e: /* VIS I pdist */
3617 case 0x048: /* VIS I faligndata */
3618 gen_op_load_fpr_DT0(DFPREG(rs1
));
3619 gen_op_load_fpr_DT1(DFPREG(rs2
));
3620 tcg_gen_helper_0_0(helper_faligndata
);
3621 gen_op_store_DT0_fpr(DFPREG(rd
));
3623 case 0x04b: /* VIS I fpmerge */
3624 gen_op_load_fpr_DT0(DFPREG(rs1
));
3625 gen_op_load_fpr_DT1(DFPREG(rs2
));
3626 tcg_gen_helper_0_0(helper_fpmerge
);
3627 gen_op_store_DT0_fpr(DFPREG(rd
));
3629 case 0x04c: /* VIS II bshuffle */
3632 case 0x04d: /* VIS I fexpand */
3633 gen_op_load_fpr_DT0(DFPREG(rs1
));
3634 gen_op_load_fpr_DT1(DFPREG(rs2
));
3635 tcg_gen_helper_0_0(helper_fexpand
);
3636 gen_op_store_DT0_fpr(DFPREG(rd
));
3638 case 0x050: /* VIS I fpadd16 */
3639 gen_op_load_fpr_DT0(DFPREG(rs1
));
3640 gen_op_load_fpr_DT1(DFPREG(rs2
));
3641 tcg_gen_helper_0_0(helper_fpadd16
);
3642 gen_op_store_DT0_fpr(DFPREG(rd
));
3644 case 0x051: /* VIS I fpadd16s */
3645 gen_op_load_fpr_FT0(rs1
);
3646 gen_op_load_fpr_FT1(rs2
);
3647 tcg_gen_helper_0_0(helper_fpadd16s
);
3648 gen_op_store_FT0_fpr(rd
);
3650 case 0x052: /* VIS I fpadd32 */
3651 gen_op_load_fpr_DT0(DFPREG(rs1
));
3652 gen_op_load_fpr_DT1(DFPREG(rs2
));
3653 tcg_gen_helper_0_0(helper_fpadd32
);
3654 gen_op_store_DT0_fpr(DFPREG(rd
));
3656 case 0x053: /* VIS I fpadd32s */
3657 gen_op_load_fpr_FT0(rs1
);
3658 gen_op_load_fpr_FT1(rs2
);
3659 tcg_gen_helper_0_0(helper_fpadd32s
);
3660 gen_op_store_FT0_fpr(rd
);
3662 case 0x054: /* VIS I fpsub16 */
3663 gen_op_load_fpr_DT0(DFPREG(rs1
));
3664 gen_op_load_fpr_DT1(DFPREG(rs2
));
3665 tcg_gen_helper_0_0(helper_fpsub16
);
3666 gen_op_store_DT0_fpr(DFPREG(rd
));
3668 case 0x055: /* VIS I fpsub16s */
3669 gen_op_load_fpr_FT0(rs1
);
3670 gen_op_load_fpr_FT1(rs2
);
3671 tcg_gen_helper_0_0(helper_fpsub16s
);
3672 gen_op_store_FT0_fpr(rd
);
3674 case 0x056: /* VIS I fpsub32 */
3675 gen_op_load_fpr_DT0(DFPREG(rs1
));
3676 gen_op_load_fpr_DT1(DFPREG(rs2
));
3677 tcg_gen_helper_0_0(helper_fpadd32
);
3678 gen_op_store_DT0_fpr(DFPREG(rd
));
3680 case 0x057: /* VIS I fpsub32s */
3681 gen_op_load_fpr_FT0(rs1
);
3682 gen_op_load_fpr_FT1(rs2
);
3683 tcg_gen_helper_0_0(helper_fpsub32s
);
3684 gen_op_store_FT0_fpr(rd
);
3686 case 0x060: /* VIS I fzero */
3687 tcg_gen_helper_0_0(helper_movl_DT0_0
);
3688 gen_op_store_DT0_fpr(DFPREG(rd
));
3690 case 0x061: /* VIS I fzeros */
3691 tcg_gen_helper_0_0(helper_movl_FT0_0
);
3692 gen_op_store_FT0_fpr(rd
);
3694 case 0x062: /* VIS I fnor */
3695 gen_op_load_fpr_DT0(DFPREG(rs1
));
3696 gen_op_load_fpr_DT1(DFPREG(rs2
));
3697 tcg_gen_helper_0_0(helper_fnor
);
3698 gen_op_store_DT0_fpr(DFPREG(rd
));
3700 case 0x063: /* VIS I fnors */
3701 gen_op_load_fpr_FT0(rs1
);
3702 gen_op_load_fpr_FT1(rs2
);
3703 tcg_gen_helper_0_0(helper_fnors
);
3704 gen_op_store_FT0_fpr(rd
);
3706 case 0x064: /* VIS I fandnot2 */
3707 gen_op_load_fpr_DT1(DFPREG(rs1
));
3708 gen_op_load_fpr_DT0(DFPREG(rs2
));
3709 tcg_gen_helper_0_0(helper_fandnot
);
3710 gen_op_store_DT0_fpr(DFPREG(rd
));
3712 case 0x065: /* VIS I fandnot2s */
3713 gen_op_load_fpr_FT1(rs1
);
3714 gen_op_load_fpr_FT0(rs2
);
3715 tcg_gen_helper_0_0(helper_fandnots
);
3716 gen_op_store_FT0_fpr(rd
);
3718 case 0x066: /* VIS I fnot2 */
3719 gen_op_load_fpr_DT1(DFPREG(rs2
));
3720 tcg_gen_helper_0_0(helper_fnot
);
3721 gen_op_store_DT0_fpr(DFPREG(rd
));
3723 case 0x067: /* VIS I fnot2s */
3724 gen_op_load_fpr_FT1(rs2
);
3725 tcg_gen_helper_0_0(helper_fnot
);
3726 gen_op_store_FT0_fpr(rd
);
3728 case 0x068: /* VIS I fandnot1 */
3729 gen_op_load_fpr_DT0(DFPREG(rs1
));
3730 gen_op_load_fpr_DT1(DFPREG(rs2
));
3731 tcg_gen_helper_0_0(helper_fandnot
);
3732 gen_op_store_DT0_fpr(DFPREG(rd
));
3734 case 0x069: /* VIS I fandnot1s */
3735 gen_op_load_fpr_FT0(rs1
);
3736 gen_op_load_fpr_FT1(rs2
);
3737 tcg_gen_helper_0_0(helper_fandnots
);
3738 gen_op_store_FT0_fpr(rd
);
3740 case 0x06a: /* VIS I fnot1 */
3741 gen_op_load_fpr_DT1(DFPREG(rs1
));
3742 tcg_gen_helper_0_0(helper_fnot
);
3743 gen_op_store_DT0_fpr(DFPREG(rd
));
3745 case 0x06b: /* VIS I fnot1s */
3746 gen_op_load_fpr_FT1(rs1
);
3747 tcg_gen_helper_0_0(helper_fnot
);
3748 gen_op_store_FT0_fpr(rd
);
3750 case 0x06c: /* VIS I fxor */
3751 gen_op_load_fpr_DT0(DFPREG(rs1
));
3752 gen_op_load_fpr_DT1(DFPREG(rs2
));
3753 tcg_gen_helper_0_0(helper_fxor
);
3754 gen_op_store_DT0_fpr(DFPREG(rd
));
3756 case 0x06d: /* VIS I fxors */
3757 gen_op_load_fpr_FT0(rs1
);
3758 gen_op_load_fpr_FT1(rs2
);
3759 tcg_gen_helper_0_0(helper_fxors
);
3760 gen_op_store_FT0_fpr(rd
);
3762 case 0x06e: /* VIS I fnand */
3763 gen_op_load_fpr_DT0(DFPREG(rs1
));
3764 gen_op_load_fpr_DT1(DFPREG(rs2
));
3765 tcg_gen_helper_0_0(helper_fnand
);
3766 gen_op_store_DT0_fpr(DFPREG(rd
));
3768 case 0x06f: /* VIS I fnands */
3769 gen_op_load_fpr_FT0(rs1
);
3770 gen_op_load_fpr_FT1(rs2
);
3771 tcg_gen_helper_0_0(helper_fnands
);
3772 gen_op_store_FT0_fpr(rd
);
3774 case 0x070: /* VIS I fand */
3775 gen_op_load_fpr_DT0(DFPREG(rs1
));
3776 gen_op_load_fpr_DT1(DFPREG(rs2
));
3777 tcg_gen_helper_0_0(helper_fand
);
3778 gen_op_store_DT0_fpr(DFPREG(rd
));
3780 case 0x071: /* VIS I fands */
3781 gen_op_load_fpr_FT0(rs1
);
3782 gen_op_load_fpr_FT1(rs2
);
3783 tcg_gen_helper_0_0(helper_fands
);
3784 gen_op_store_FT0_fpr(rd
);
3786 case 0x072: /* VIS I fxnor */
3787 gen_op_load_fpr_DT0(DFPREG(rs1
));
3788 gen_op_load_fpr_DT1(DFPREG(rs2
));
3789 tcg_gen_helper_0_0(helper_fxnor
);
3790 gen_op_store_DT0_fpr(DFPREG(rd
));
3792 case 0x073: /* VIS I fxnors */
3793 gen_op_load_fpr_FT0(rs1
);
3794 gen_op_load_fpr_FT1(rs2
);
3795 tcg_gen_helper_0_0(helper_fxnors
);
3796 gen_op_store_FT0_fpr(rd
);
3798 case 0x074: /* VIS I fsrc1 */
3799 gen_op_load_fpr_DT0(DFPREG(rs1
));
3800 gen_op_store_DT0_fpr(DFPREG(rd
));
3802 case 0x075: /* VIS I fsrc1s */
3803 gen_op_load_fpr_FT0(rs1
);
3804 gen_op_store_FT0_fpr(rd
);
3806 case 0x076: /* VIS I fornot2 */
3807 gen_op_load_fpr_DT1(DFPREG(rs1
));
3808 gen_op_load_fpr_DT0(DFPREG(rs2
));
3809 tcg_gen_helper_0_0(helper_fornot
);
3810 gen_op_store_DT0_fpr(DFPREG(rd
));
3812 case 0x077: /* VIS I fornot2s */
3813 gen_op_load_fpr_FT1(rs1
);
3814 gen_op_load_fpr_FT0(rs2
);
3815 tcg_gen_helper_0_0(helper_fornots
);
3816 gen_op_store_FT0_fpr(rd
);
3818 case 0x078: /* VIS I fsrc2 */
3819 gen_op_load_fpr_DT0(DFPREG(rs2
));
3820 gen_op_store_DT0_fpr(DFPREG(rd
));
3822 case 0x079: /* VIS I fsrc2s */
3823 gen_op_load_fpr_FT0(rs2
);
3824 gen_op_store_FT0_fpr(rd
);
3826 case 0x07a: /* VIS I fornot1 */
3827 gen_op_load_fpr_DT0(DFPREG(rs1
));
3828 gen_op_load_fpr_DT1(DFPREG(rs2
));
3829 tcg_gen_helper_0_0(helper_fornot
);
3830 gen_op_store_DT0_fpr(DFPREG(rd
));
3832 case 0x07b: /* VIS I fornot1s */
3833 gen_op_load_fpr_FT0(rs1
);
3834 gen_op_load_fpr_FT1(rs2
);
3835 tcg_gen_helper_0_0(helper_fornots
);
3836 gen_op_store_FT0_fpr(rd
);
3838 case 0x07c: /* VIS I for */
3839 gen_op_load_fpr_DT0(DFPREG(rs1
));
3840 gen_op_load_fpr_DT1(DFPREG(rs2
));
3841 tcg_gen_helper_0_0(helper_for
);
3842 gen_op_store_DT0_fpr(DFPREG(rd
));
3844 case 0x07d: /* VIS I fors */
3845 gen_op_load_fpr_FT0(rs1
);
3846 gen_op_load_fpr_FT1(rs2
);
3847 tcg_gen_helper_0_0(helper_fors
);
3848 gen_op_store_FT0_fpr(rd
);
3850 case 0x07e: /* VIS I fone */
3851 tcg_gen_helper_0_0(helper_movl_DT0_1
);
3852 gen_op_store_DT0_fpr(DFPREG(rd
));
3854 case 0x07f: /* VIS I fones */
3855 tcg_gen_helper_0_0(helper_movl_FT0_1
);
3856 gen_op_store_FT0_fpr(rd
);
3858 case 0x080: /* VIS I shutdown */
3859 case 0x081: /* VIS II siam */
3868 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
3869 #ifdef TARGET_SPARC64
3874 #ifdef TARGET_SPARC64
3875 } else if (xop
== 0x39) { /* V9 return */
3876 save_state(dc
, cpu_cond
);
3877 cpu_src1
= get_src1(insn
, cpu_src1
);
3878 if (IS_IMM
) { /* immediate */
3879 rs2
= GET_FIELDs(insn
, 19, 31);
3880 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
3881 } else { /* register */
3882 rs2
= GET_FIELD(insn
, 27, 31);
3884 gen_movl_reg_TN(rs2
, cpu_src2
);
3885 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3887 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
3889 tcg_gen_helper_0_0(helper_restore
);
3890 gen_mov_pc_npc(dc
, cpu_cond
);
3891 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
, tcg_const_i32(3));
3892 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
3893 dc
->npc
= DYNAMIC_PC
;
3897 cpu_src1
= get_src1(insn
, cpu_src1
);
3898 if (IS_IMM
) { /* immediate */
3899 rs2
= GET_FIELDs(insn
, 19, 31);
3900 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
3901 } else { /* register */
3902 rs2
= GET_FIELD(insn
, 27, 31);
3904 gen_movl_reg_TN(rs2
, cpu_src2
);
3905 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3907 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
3910 case 0x38: /* jmpl */
3913 tcg_gen_movi_tl(cpu_tmp0
, dc
->pc
);
3914 gen_movl_TN_reg(rd
, cpu_tmp0
);
3916 gen_mov_pc_npc(dc
, cpu_cond
);
3917 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
, tcg_const_i32(3));
3918 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
3919 dc
->npc
= DYNAMIC_PC
;
3922 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3923 case 0x39: /* rett, V9 return */
3925 if (!supervisor(dc
))
3927 gen_mov_pc_npc(dc
, cpu_cond
);
3928 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
, tcg_const_i32(3));
3929 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
3930 dc
->npc
= DYNAMIC_PC
;
3931 tcg_gen_helper_0_0(helper_rett
);
3935 case 0x3b: /* flush */
3936 tcg_gen_helper_0_1(helper_flush
, cpu_dst
);
3938 case 0x3c: /* save */
3939 save_state(dc
, cpu_cond
);
3940 tcg_gen_helper_0_0(helper_save
);
3941 gen_movl_TN_reg(rd
, cpu_dst
);
3943 case 0x3d: /* restore */
3944 save_state(dc
, cpu_cond
);
3945 tcg_gen_helper_0_0(helper_restore
);
3946 gen_movl_TN_reg(rd
, cpu_dst
);
3948 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
3949 case 0x3e: /* V9 done/retry */
3953 if (!supervisor(dc
))
3955 dc
->npc
= DYNAMIC_PC
;
3956 dc
->pc
= DYNAMIC_PC
;
3957 tcg_gen_helper_0_0(helper_done
);
3960 if (!supervisor(dc
))
3962 dc
->npc
= DYNAMIC_PC
;
3963 dc
->pc
= DYNAMIC_PC
;
3964 tcg_gen_helper_0_0(helper_retry
);
3979 case 3: /* load/store instructions */
3981 unsigned int xop
= GET_FIELD(insn
, 7, 12);
3983 save_state(dc
, cpu_cond
);
3984 cpu_src1
= get_src1(insn
, cpu_src1
);
3985 if (xop
== 0x3c || xop
== 0x3e)
3987 rs2
= GET_FIELD(insn
, 27, 31);
3988 gen_movl_reg_TN(rs2
, cpu_src2
);
3990 else if (IS_IMM
) { /* immediate */
3991 rs2
= GET_FIELDs(insn
, 19, 31);
3992 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, (int)rs2
);
3993 } else { /* register */
3994 rs2
= GET_FIELD(insn
, 27, 31);
3996 gen_movl_reg_TN(rs2
, cpu_src2
);
3997 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
3999 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4001 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4002 (xop
> 0x17 && xop
<= 0x1d ) ||
4003 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4005 case 0x0: /* load unsigned word */
4006 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4007 ABI32_MASK(cpu_addr
);
4008 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4010 case 0x1: /* load unsigned byte */
4011 ABI32_MASK(cpu_addr
);
4012 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4014 case 0x2: /* load unsigned halfword */
4015 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(1));
4016 ABI32_MASK(cpu_addr
);
4017 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4019 case 0x3: /* load double word */
4023 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4024 ABI32_MASK(cpu_addr
);
4025 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4026 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4027 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4028 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4029 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4030 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4031 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4034 case 0x9: /* load signed byte */
4035 ABI32_MASK(cpu_addr
);
4036 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4038 case 0xa: /* load signed halfword */
4039 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(1));
4040 ABI32_MASK(cpu_addr
);
4041 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4043 case 0xd: /* ldstub -- XXX: should be atomically */
4044 ABI32_MASK(cpu_addr
);
4045 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4046 tcg_gen_qemu_st8(tcg_const_tl(0xff), cpu_addr
, dc
->mem_idx
);
4048 case 0x0f: /* swap register with memory. Also atomically */
4049 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4050 gen_movl_reg_TN(rd
, cpu_val
);
4051 ABI32_MASK(cpu_addr
);
4052 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4053 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4054 tcg_gen_extu_i32_tl(cpu_val
, cpu_tmp32
);
4056 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4057 case 0x10: /* load word alternate */
4058 #ifndef TARGET_SPARC64
4061 if (!supervisor(dc
))
4064 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4065 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4067 case 0x11: /* load unsigned byte alternate */
4068 #ifndef TARGET_SPARC64
4071 if (!supervisor(dc
))
4074 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4076 case 0x12: /* load unsigned halfword alternate */
4077 #ifndef TARGET_SPARC64
4080 if (!supervisor(dc
))
4083 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(1));
4084 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4086 case 0x13: /* load double word alternate */
4087 #ifndef TARGET_SPARC64
4090 if (!supervisor(dc
))
4095 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4096 gen_ldda_asi(cpu_tmp0
, cpu_val
, cpu_addr
, insn
);
4097 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4099 case 0x19: /* load signed byte alternate */
4100 #ifndef TARGET_SPARC64
4103 if (!supervisor(dc
))
4106 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4108 case 0x1a: /* load signed halfword alternate */
4109 #ifndef TARGET_SPARC64
4112 if (!supervisor(dc
))
4115 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(1));
4116 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4118 case 0x1d: /* ldstuba -- XXX: should be atomically */
4119 #ifndef TARGET_SPARC64
4122 if (!supervisor(dc
))
4125 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4127 case 0x1f: /* swap reg with alt. memory. Also atomically */
4128 #ifndef TARGET_SPARC64
4131 if (!supervisor(dc
))
4134 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4135 gen_movl_reg_TN(rd
, cpu_val
);
4136 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4139 #ifndef TARGET_SPARC64
4140 case 0x30: /* ldc */
4141 case 0x31: /* ldcsr */
4142 case 0x33: /* lddc */
4146 #ifdef TARGET_SPARC64
4147 case 0x08: /* V9 ldsw */
4148 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4149 ABI32_MASK(cpu_addr
);
4150 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4152 case 0x0b: /* V9 ldx */
4153 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4154 ABI32_MASK(cpu_addr
);
4155 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4157 case 0x18: /* V9 ldswa */
4158 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4159 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4161 case 0x1b: /* V9 ldxa */
4162 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4163 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4165 case 0x2d: /* V9 prefetch, no effect */
4167 case 0x30: /* V9 ldfa */
4168 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4169 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4171 case 0x33: /* V9 lddfa */
4172 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4173 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4175 case 0x3d: /* V9 prefetcha, no effect */
4177 case 0x32: /* V9 ldqfa */
4178 #if defined(CONFIG_USER_ONLY)
4179 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4180 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4189 gen_movl_TN_reg(rd
, cpu_val
);
4190 #ifdef TARGET_SPARC64
4193 } else if (xop
>= 0x20 && xop
< 0x24) {
4194 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4197 case 0x20: /* load fpreg */
4198 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4199 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4200 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
4201 offsetof(CPUState
, fpr
[rd
]));
4203 case 0x21: /* load fsr */
4204 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4205 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4206 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
4207 offsetof(CPUState
, ft0
));
4208 tcg_gen_helper_0_0(helper_ldfsr
);
4210 case 0x22: /* load quad fpreg */
4211 #if defined(CONFIG_USER_ONLY)
4212 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4214 gen_op_store_QT0_fpr(QFPREG(rd
));
4219 case 0x23: /* load double fpreg */
4220 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4222 gen_op_store_DT0_fpr(DFPREG(rd
));
4227 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4228 xop
== 0xe || xop
== 0x1e) {
4229 gen_movl_reg_TN(rd
, cpu_val
);
4231 case 0x4: /* store word */
4232 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4233 ABI32_MASK(cpu_addr
);
4234 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4236 case 0x5: /* store byte */
4237 ABI32_MASK(cpu_addr
);
4238 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4240 case 0x6: /* store halfword */
4241 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(1));
4242 ABI32_MASK(cpu_addr
);
4243 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4245 case 0x7: /* store double word */
4252 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4253 r_low
= tcg_temp_new(TCG_TYPE_I32
);
4254 gen_movl_reg_TN(rd
+ 1, r_low
);
4255 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, cpu_val
,
4257 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4259 #else /* __i386__ */
4260 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4261 flush_cond(dc
, cpu_cond
);
4262 gen_movl_reg_TN(rd
+ 1, cpu_cond
);
4264 #endif /* __i386__ */
4266 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4267 case 0x14: /* store word alternate */
4268 #ifndef TARGET_SPARC64
4271 if (!supervisor(dc
))
4274 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4275 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4277 case 0x15: /* store byte alternate */
4278 #ifndef TARGET_SPARC64
4281 if (!supervisor(dc
))
4284 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4286 case 0x16: /* store halfword alternate */
4287 #ifndef TARGET_SPARC64
4290 if (!supervisor(dc
))
4293 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(1));
4294 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4296 case 0x17: /* store double word alternate */
4297 #ifndef TARGET_SPARC64
4300 if (!supervisor(dc
))
4306 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4307 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4311 #ifdef TARGET_SPARC64
4312 case 0x0e: /* V9 stx */
4313 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4314 ABI32_MASK(cpu_addr
);
4315 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4317 case 0x1e: /* V9 stxa */
4318 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4319 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4325 } else if (xop
> 0x23 && xop
< 0x28) {
4326 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4329 case 0x24: /* store fpreg */
4330 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4331 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
4332 offsetof(CPUState
, fpr
[rd
]));
4333 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4335 case 0x25: /* stfsr, V9 stxfsr */
4336 #ifdef CONFIG_USER_ONLY
4337 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4339 tcg_gen_helper_0_0(helper_stfsr
);
4340 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
4341 offsetof(CPUState
, ft0
));
4342 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4345 #ifdef TARGET_SPARC64
4346 #if defined(CONFIG_USER_ONLY)
4347 /* V9 stqf, store quad fpreg */
4348 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4349 gen_op_load_fpr_QT0(QFPREG(rd
));
4355 #else /* !TARGET_SPARC64 */
4356 /* stdfq, store floating point queue */
4357 #if defined(CONFIG_USER_ONLY)
4360 if (!supervisor(dc
))
4362 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4368 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4369 gen_op_load_fpr_DT0(DFPREG(rd
));
4375 } else if (xop
> 0x33 && xop
< 0x3f) {
4377 #ifdef TARGET_SPARC64
4378 case 0x34: /* V9 stfa */
4379 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4380 gen_op_load_fpr_FT0(rd
);
4381 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4383 case 0x36: /* V9 stqfa */
4384 #if defined(CONFIG_USER_ONLY)
4385 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4386 gen_op_load_fpr_QT0(QFPREG(rd
));
4387 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4392 case 0x37: /* V9 stdfa */
4393 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4394 gen_op_load_fpr_DT0(DFPREG(rd
));
4395 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4397 case 0x3c: /* V9 casa */
4398 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(3));
4399 gen_cas_asi(cpu_val
, cpu_addr
, cpu_val
, insn
, rd
);
4400 gen_movl_TN_reg(rd
, cpu_val
);
4402 case 0x3e: /* V9 casxa */
4403 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
, tcg_const_i32(7));
4404 gen_casx_asi(cpu_val
, cpu_addr
, cpu_val
, insn
, rd
);
4405 gen_movl_TN_reg(rd
, cpu_val
);
4408 case 0x34: /* stc */
4409 case 0x35: /* stcsr */
4410 case 0x36: /* stdcq */
4411 case 0x37: /* stdc */
4423 /* default case for non jump instructions */
4424 if (dc
->npc
== DYNAMIC_PC
) {
4425 dc
->pc
= DYNAMIC_PC
;
4427 } else if (dc
->npc
== JUMP_PC
) {
4428 /* we can do a static jump */
4429 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4433 dc
->npc
= dc
->npc
+ 4;
4438 save_state(dc
, cpu_cond
);
4439 tcg_gen_helper_0_1(raise_exception
, tcg_const_i32(TT_ILL_INSN
));
4442 #if !defined(CONFIG_USER_ONLY)
4444 save_state(dc
, cpu_cond
);
4445 tcg_gen_helper_0_1(raise_exception
, tcg_const_i32(TT_PRIV_INSN
));
4449 save_state(dc
, cpu_cond
);
4450 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4453 #ifndef TARGET_SPARC64
4455 save_state(dc
, cpu_cond
);
4456 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4461 #ifndef TARGET_SPARC64
4463 save_state(dc
, cpu_cond
);
4464 tcg_gen_helper_0_1(raise_exception
, tcg_const_i32(TT_NCP_INSN
));
4470 static void tcg_macro_func(TCGContext
*s
, int macro_id
, const int *dead_args
)
4474 static inline int gen_intermediate_code_internal(TranslationBlock
* tb
,
4475 int spc
, CPUSPARCState
*env
)
4477 target_ulong pc_start
, last_pc
;
4478 uint16_t *gen_opc_end
;
4479 DisasContext dc1
, *dc
= &dc1
;
4482 memset(dc
, 0, sizeof(DisasContext
));
4487 dc
->npc
= (target_ulong
) tb
->cs_base
;
4488 dc
->mem_idx
= cpu_mmu_index(env
);
4489 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4490 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4492 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
4493 cpu_tmp32
= tcg_temp_new(TCG_TYPE_I32
);
4494 cpu_tmp64
= tcg_temp_new(TCG_TYPE_I64
);
4496 cpu_cond
= cpu_T
[2];
4499 if (env
->nb_breakpoints
> 0) {
4500 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4501 if (env
->breakpoints
[j
] == dc
->pc
) {
4502 if (dc
->pc
!= pc_start
)
4503 save_state(dc
, cpu_cond
);
4504 tcg_gen_helper_0_0(helper_debug
);
4513 fprintf(logfile
, "Search PC...\n");
4514 j
= gen_opc_ptr
- gen_opc_buf
;
4518 gen_opc_instr_start
[lj
++] = 0;
4519 gen_opc_pc
[lj
] = dc
->pc
;
4520 gen_opc_npc
[lj
] = dc
->npc
;
4521 gen_opc_instr_start
[lj
] = 1;
4525 disas_sparc_insn(dc
);
4529 /* if the next PC is different, we abort now */
4530 if (dc
->pc
!= (last_pc
+ 4))
4532 /* if we reach a page boundary, we stop generation so that the
4533 PC of a TT_TFAULT exception is always in the right page */
4534 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4536 /* if single step mode, we generate only one instruction and
4537 generate an exception */
4538 if (env
->singlestep_enabled
) {
4539 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4543 } while ((gen_opc_ptr
< gen_opc_end
) &&
4544 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32));
4548 if (dc
->pc
!= DYNAMIC_PC
&&
4549 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4550 /* static PC and NPC: we can use direct chaining */
4551 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4553 if (dc
->pc
!= DYNAMIC_PC
)
4554 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4555 save_npc(dc
, cpu_cond
);
4559 *gen_opc_ptr
= INDEX_op_end
;
4561 j
= gen_opc_ptr
- gen_opc_buf
;
4564 gen_opc_instr_start
[lj
++] = 0;
4570 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4571 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4573 tb
->size
= last_pc
+ 4 - pc_start
;
4576 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4577 fprintf(logfile
, "--------------\n");
4578 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4579 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
4580 fprintf(logfile
, "\n");
4586 int gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4588 return gen_intermediate_code_internal(tb
, 0, env
);
4591 int gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4593 return gen_intermediate_code_internal(tb
, 1, env
);
4596 void gen_intermediate_code_init(CPUSPARCState
*env
)
4600 static const char * const gregnames
[8] = {
4601 NULL
, // g0 not used
4611 /* init various static tables */
4615 tcg_set_macro_func(&tcg_ctx
, tcg_macro_func
);
4616 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
4617 cpu_regwptr
= tcg_global_mem_new(TCG_TYPE_PTR
, TCG_AREG0
,
4618 offsetof(CPUState
, regwptr
),
4620 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4621 #ifdef TARGET_SPARC64
4622 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_TL
,
4623 TCG_AREG0
, offsetof(CPUState
, t0
), "T0");
4624 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_TL
,
4625 TCG_AREG0
, offsetof(CPUState
, t1
), "T1");
4626 cpu_T
[2] = tcg_global_mem_new(TCG_TYPE_TL
,
4627 TCG_AREG0
, offsetof(CPUState
, t2
), "T2");
4628 cpu_xcc
= tcg_global_mem_new(TCG_TYPE_I32
,
4629 TCG_AREG0
, offsetof(CPUState
, xcc
),
4632 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG1
, "T0");
4633 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG2
, "T1");
4634 cpu_T
[2] = tcg_global_reg_new(TCG_TYPE_TL
, TCG_AREG3
, "T2");
4636 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
4637 TCG_AREG0
, offsetof(CPUState
, cc_src
),
4639 cpu_cc_src2
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4640 offsetof(CPUState
, cc_src2
),
4642 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
4643 TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4645 cpu_psr
= tcg_global_mem_new(TCG_TYPE_I32
,
4646 TCG_AREG0
, offsetof(CPUState
, psr
),
4648 cpu_fsr
= tcg_global_mem_new(TCG_TYPE_TL
,
4649 TCG_AREG0
, offsetof(CPUState
, fsr
),
4651 cpu_pc
= tcg_global_mem_new(TCG_TYPE_TL
,
4652 TCG_AREG0
, offsetof(CPUState
, pc
),
4654 cpu_npc
= tcg_global_mem_new(TCG_TYPE_TL
,
4655 TCG_AREG0
, offsetof(CPUState
, npc
),
4657 for (i
= 1; i
< 8; i
++)
4658 cpu_gregs
[i
] = tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4659 offsetof(CPUState
, gregs
[i
]),
4664 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
4665 unsigned long searched_pc
, int pc_pos
, void *puc
)
4668 env
->pc
= gen_opc_pc
[pc_pos
];
4669 npc
= gen_opc_npc
[pc_pos
];
4671 /* dynamic NPC: already stored */
4672 } else if (npc
== 2) {
4673 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
4674 /* jump PC: use T2 and the jump targets of the translation */
4676 env
->npc
= gen_opc_jump_pc
[0];
4678 env
->npc
= gen_opc_jump_pc
[1];