4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
40 /* global register indexes */
41 static TCGv cpu_env
, cpu_regwptr
;
42 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
43 static TCGv cpu_psr
, cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
45 #ifndef CONFIG_USER_ONLY
48 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
50 static TCGv cpu_xcc
, cpu_asi
, cpu_fprs
, cpu_gsr
;
51 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
52 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0
, cpu_tmp32
, cpu_tmp64
;
58 /* Floating point registers */
59 static TCGv cpu_fpr
[TARGET_FPREGS
];
61 #include "gen-icount.h"
63 typedef struct DisasContext
{
64 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
70 int address_mask_32bit
;
71 struct TranslationBlock
*tb
;
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
96 static int sign_extend(int x
, int len
)
99 return (x
<< len
) >> len
;
102 #define IS_IMM (insn & (1<<13))
104 /* floating point registers moves */
105 static void gen_op_load_fpr_FT0(unsigned int src
)
107 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, ft0
));
110 static void gen_op_load_fpr_FT1(unsigned int src
)
112 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, ft1
));
115 static void gen_op_store_FT0_fpr(unsigned int dst
)
117 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, ft0
));
120 static void gen_op_load_fpr_DT0(unsigned int src
)
122 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
123 offsetof(CPU_DoubleU
, l
.upper
));
124 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
125 offsetof(CPU_DoubleU
, l
.lower
));
128 static void gen_op_load_fpr_DT1(unsigned int src
)
130 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
131 offsetof(CPU_DoubleU
, l
.upper
));
132 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
133 offsetof(CPU_DoubleU
, l
.lower
));
136 static void gen_op_store_DT0_fpr(unsigned int dst
)
138 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
139 offsetof(CPU_DoubleU
, l
.upper
));
140 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
141 offsetof(CPU_DoubleU
, l
.lower
));
144 static void gen_op_load_fpr_QT0(unsigned int src
)
146 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.upmost
));
148 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
149 offsetof(CPU_QuadU
, l
.upper
));
150 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
151 offsetof(CPU_QuadU
, l
.lower
));
152 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
153 offsetof(CPU_QuadU
, l
.lowest
));
156 static void gen_op_load_fpr_QT1(unsigned int src
)
158 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.upmost
));
160 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
161 offsetof(CPU_QuadU
, l
.upper
));
162 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
163 offsetof(CPU_QuadU
, l
.lower
));
164 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
165 offsetof(CPU_QuadU
, l
.lowest
));
168 static void gen_op_store_QT0_fpr(unsigned int dst
)
170 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.upmost
));
172 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
173 offsetof(CPU_QuadU
, l
.upper
));
174 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
175 offsetof(CPU_QuadU
, l
.lower
));
176 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
177 offsetof(CPU_QuadU
, l
.lowest
));
181 #ifdef CONFIG_USER_ONLY
182 #define supervisor(dc) 0
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) 0
187 #define supervisor(dc) (dc->mem_idx >= 1)
188 #ifdef TARGET_SPARC64
189 #define hypervisor(dc) (dc->mem_idx == 2)
194 #ifdef TARGET_SPARC64
196 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
198 #define AM_CHECK(dc) (1)
202 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
204 #ifdef TARGET_SPARC64
206 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
210 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
213 tcg_gen_movi_tl(tn
, 0);
215 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
217 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
221 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
226 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
228 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
232 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
233 target_ulong pc
, target_ulong npc
)
235 TranslationBlock
*tb
;
238 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
239 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num
);
242 tcg_gen_movi_tl(cpu_pc
, pc
);
243 tcg_gen_movi_tl(cpu_npc
, npc
);
244 tcg_gen_exit_tb((long)tb
+ tb_num
);
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc
, pc
);
248 tcg_gen_movi_tl(cpu_npc
, npc
);
254 static inline void gen_mov_reg_N(TCGv reg
, TCGv src
)
256 tcg_gen_extu_i32_tl(reg
, src
);
257 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
258 tcg_gen_andi_tl(reg
, reg
, 0x1);
261 static inline void gen_mov_reg_Z(TCGv reg
, TCGv src
)
263 tcg_gen_extu_i32_tl(reg
, src
);
264 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
265 tcg_gen_andi_tl(reg
, reg
, 0x1);
268 static inline void gen_mov_reg_V(TCGv reg
, TCGv src
)
270 tcg_gen_extu_i32_tl(reg
, src
);
271 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
272 tcg_gen_andi_tl(reg
, reg
, 0x1);
275 static inline void gen_mov_reg_C(TCGv reg
, TCGv src
)
277 tcg_gen_extu_i32_tl(reg
, src
);
278 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
279 tcg_gen_andi_tl(reg
, reg
, 0x1);
282 static inline void gen_cc_clear_icc(void)
284 tcg_gen_movi_i32(cpu_psr
, 0);
287 #ifdef TARGET_SPARC64
288 static inline void gen_cc_clear_xcc(void)
290 tcg_gen_movi_i32(cpu_xcc
, 0);
296 env->psr |= PSR_ZERO;
297 if ((int32_t) T0 < 0)
300 static inline void gen_cc_NZ_icc(TCGv dst
)
305 l1
= gen_new_label();
306 l2
= gen_new_label();
307 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
308 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
309 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
310 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
312 tcg_gen_ext_i32_tl(r_temp
, dst
);
313 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
314 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
316 tcg_temp_free(r_temp
);
319 #ifdef TARGET_SPARC64
320 static inline void gen_cc_NZ_xcc(TCGv dst
)
324 l1
= gen_new_label();
325 l2
= gen_new_label();
326 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
327 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
329 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
330 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
337 env->psr |= PSR_CARRY;
339 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
341 TCGv r_temp1
, r_temp2
;
344 l1
= gen_new_label();
345 r_temp1
= tcg_temp_new(TCG_TYPE_TL
);
346 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
347 tcg_gen_andi_tl(r_temp1
, dst
, 0xffffffffULL
);
348 tcg_gen_andi_tl(r_temp2
, src1
, 0xffffffffULL
);
349 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
350 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
352 tcg_temp_free(r_temp1
);
353 tcg_temp_free(r_temp2
);
356 #ifdef TARGET_SPARC64
357 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
361 l1
= gen_new_label();
362 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
363 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
369 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
372 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
376 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
377 tcg_gen_xor_tl(r_temp
, src1
, src2
);
378 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
379 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
380 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
381 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
382 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
383 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
384 tcg_temp_free(r_temp
);
385 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
388 #ifdef TARGET_SPARC64
389 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
393 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
394 tcg_gen_xor_tl(r_temp
, src1
, src2
);
395 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
396 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
397 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
398 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
399 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
400 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
401 tcg_temp_free(r_temp
);
402 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
406 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
408 TCGv r_temp
, r_const
;
411 l1
= gen_new_label();
413 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
414 tcg_gen_xor_tl(r_temp
, src1
, src2
);
415 tcg_gen_xori_tl(r_temp
, r_temp
, -1);
416 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
417 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
418 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
419 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
420 r_const
= tcg_const_i32(TT_TOVF
);
421 tcg_gen_helper_0_1(raise_exception
, r_const
);
422 tcg_temp_free(r_const
);
424 tcg_temp_free(r_temp
);
427 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
431 l1
= gen_new_label();
432 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
433 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
434 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
435 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
439 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
444 l1
= gen_new_label();
445 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
446 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
447 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
448 r_const
= tcg_const_i32(TT_TOVF
);
449 tcg_gen_helper_0_1(raise_exception
, r_const
);
450 tcg_temp_free(r_const
);
454 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
456 tcg_gen_mov_tl(cpu_cc_src
, src1
);
457 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
458 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
460 gen_cc_NZ_icc(cpu_cc_dst
);
461 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
462 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
463 #ifdef TARGET_SPARC64
465 gen_cc_NZ_xcc(cpu_cc_dst
);
466 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
467 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
469 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
472 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
474 tcg_gen_mov_tl(cpu_cc_src
, src1
);
475 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
476 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
477 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
479 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
480 #ifdef TARGET_SPARC64
482 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
484 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
485 gen_cc_NZ_icc(cpu_cc_dst
);
486 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
487 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
488 #ifdef TARGET_SPARC64
489 gen_cc_NZ_xcc(cpu_cc_dst
);
490 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
491 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
493 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
496 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
498 tcg_gen_mov_tl(cpu_cc_src
, src1
);
499 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
500 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
502 gen_cc_NZ_icc(cpu_cc_dst
);
503 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
504 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
505 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
506 #ifdef TARGET_SPARC64
508 gen_cc_NZ_xcc(cpu_cc_dst
);
509 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
510 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
512 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
515 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
517 tcg_gen_mov_tl(cpu_cc_src
, src1
);
518 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
519 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
520 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
521 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
523 gen_cc_NZ_icc(cpu_cc_dst
);
524 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
525 #ifdef TARGET_SPARC64
527 gen_cc_NZ_xcc(cpu_cc_dst
);
528 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
529 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
531 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
536 env->psr |= PSR_CARRY;
538 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
540 TCGv r_temp1
, r_temp2
;
543 l1
= gen_new_label();
544 r_temp1
= tcg_temp_new(TCG_TYPE_TL
);
545 r_temp2
= tcg_temp_new(TCG_TYPE_TL
);
546 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
547 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
548 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
549 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
551 tcg_temp_free(r_temp1
);
552 tcg_temp_free(r_temp2
);
555 #ifdef TARGET_SPARC64
556 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
560 l1
= gen_new_label();
561 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
562 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
568 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
571 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
575 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
576 tcg_gen_xor_tl(r_temp
, src1
, src2
);
577 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
578 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
579 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
580 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
581 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
582 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
583 tcg_temp_free(r_temp
);
586 #ifdef TARGET_SPARC64
587 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
591 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
592 tcg_gen_xor_tl(r_temp
, src1
, src2
);
593 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
594 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
595 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
596 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
597 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
598 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
599 tcg_temp_free(r_temp
);
603 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
605 TCGv r_temp
, r_const
;
608 l1
= gen_new_label();
610 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
611 tcg_gen_xor_tl(r_temp
, src1
, src2
);
612 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
613 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
614 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
615 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
616 r_const
= tcg_const_i32(TT_TOVF
);
617 tcg_gen_helper_0_1(raise_exception
, r_const
);
618 tcg_temp_free(r_const
);
620 tcg_temp_free(r_temp
);
623 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
625 tcg_gen_mov_tl(cpu_cc_src
, src1
);
626 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
627 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
629 gen_cc_NZ_icc(cpu_cc_dst
);
630 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
631 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
632 #ifdef TARGET_SPARC64
634 gen_cc_NZ_xcc(cpu_cc_dst
);
635 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
636 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
638 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
641 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
643 tcg_gen_mov_tl(cpu_cc_src
, src1
);
644 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
645 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
646 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
648 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
649 #ifdef TARGET_SPARC64
651 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
653 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
654 gen_cc_NZ_icc(cpu_cc_dst
);
655 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
656 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
657 #ifdef TARGET_SPARC64
658 gen_cc_NZ_xcc(cpu_cc_dst
);
659 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
660 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
662 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
665 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
667 tcg_gen_mov_tl(cpu_cc_src
, src1
);
668 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
669 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
671 gen_cc_NZ_icc(cpu_cc_dst
);
672 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
673 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
674 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
675 #ifdef TARGET_SPARC64
677 gen_cc_NZ_xcc(cpu_cc_dst
);
678 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
679 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
681 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
684 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
686 tcg_gen_mov_tl(cpu_cc_src
, src1
);
687 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
688 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
689 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
690 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
692 gen_cc_NZ_icc(cpu_cc_dst
);
693 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
694 #ifdef TARGET_SPARC64
696 gen_cc_NZ_xcc(cpu_cc_dst
);
697 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
698 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
700 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
703 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
708 l1
= gen_new_label();
709 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
715 tcg_gen_mov_tl(cpu_cc_src
, src1
);
716 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
717 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
718 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
719 tcg_gen_movi_tl(cpu_cc_src2
, 0);
723 // env->y = (b2 << 31) | (env->y >> 1);
724 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
725 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
726 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
727 tcg_gen_or_tl(cpu_y
, cpu_tmp0
, r_temp
);
730 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
731 gen_mov_reg_V(r_temp
, cpu_psr
);
732 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
733 tcg_temp_free(r_temp
);
735 // T0 = (b1 << 31) | (T0 >> 1);
737 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
738 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
739 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
741 /* do addition and update flags */
742 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
745 gen_cc_NZ_icc(cpu_cc_dst
);
746 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
747 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
748 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
751 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
753 TCGv r_temp
, r_temp2
;
755 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
756 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
758 tcg_gen_extu_i32_i64(r_temp
, src2
);
759 tcg_gen_extu_i32_i64(r_temp2
, src1
);
760 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
762 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
763 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
764 tcg_temp_free(r_temp
);
765 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
766 #ifdef TARGET_SPARC64
767 tcg_gen_mov_i64(dst
, r_temp2
);
769 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
771 tcg_temp_free(r_temp2
);
774 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
776 TCGv r_temp
, r_temp2
;
778 r_temp
= tcg_temp_new(TCG_TYPE_I64
);
779 r_temp2
= tcg_temp_new(TCG_TYPE_I64
);
781 tcg_gen_ext_i32_i64(r_temp
, src2
);
782 tcg_gen_ext_i32_i64(r_temp2
, src1
);
783 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
785 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
786 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
787 tcg_temp_free(r_temp
);
788 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
789 #ifdef TARGET_SPARC64
790 tcg_gen_mov_i64(dst
, r_temp2
);
792 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
794 tcg_temp_free(r_temp2
);
797 #ifdef TARGET_SPARC64
798 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
803 l1
= gen_new_label();
804 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
805 r_const
= tcg_const_i32(TT_DIV_ZERO
);
806 tcg_gen_helper_0_1(raise_exception
, r_const
);
807 tcg_temp_free(r_const
);
811 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
815 l1
= gen_new_label();
816 l2
= gen_new_label();
817 tcg_gen_mov_tl(cpu_cc_src
, src1
);
818 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
819 gen_trap_ifdivzero_tl(cpu_cc_src2
);
820 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
821 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
822 tcg_gen_movi_i64(dst
, INT64_MIN
);
825 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
830 static inline void gen_op_div_cc(TCGv dst
)
834 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
836 gen_cc_NZ_icc(cpu_cc_dst
);
837 l1
= gen_new_label();
838 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_src2
, 0, l1
);
839 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
843 static inline void gen_op_logic_cc(TCGv dst
)
845 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
848 gen_cc_NZ_icc(cpu_cc_dst
);
849 #ifdef TARGET_SPARC64
851 gen_cc_NZ_xcc(cpu_cc_dst
);
856 static inline void gen_op_eval_ba(TCGv dst
)
858 tcg_gen_movi_tl(dst
, 1);
862 static inline void gen_op_eval_be(TCGv dst
, TCGv src
)
864 gen_mov_reg_Z(dst
, src
);
868 static inline void gen_op_eval_ble(TCGv dst
, TCGv src
)
870 gen_mov_reg_N(cpu_tmp0
, src
);
871 gen_mov_reg_V(dst
, src
);
872 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
873 gen_mov_reg_Z(cpu_tmp0
, src
);
874 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
878 static inline void gen_op_eval_bl(TCGv dst
, TCGv src
)
880 gen_mov_reg_V(cpu_tmp0
, src
);
881 gen_mov_reg_N(dst
, src
);
882 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
886 static inline void gen_op_eval_bleu(TCGv dst
, TCGv src
)
888 gen_mov_reg_Z(cpu_tmp0
, src
);
889 gen_mov_reg_C(dst
, src
);
890 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
894 static inline void gen_op_eval_bcs(TCGv dst
, TCGv src
)
896 gen_mov_reg_C(dst
, src
);
900 static inline void gen_op_eval_bvs(TCGv dst
, TCGv src
)
902 gen_mov_reg_V(dst
, src
);
906 static inline void gen_op_eval_bn(TCGv dst
)
908 tcg_gen_movi_tl(dst
, 0);
912 static inline void gen_op_eval_bneg(TCGv dst
, TCGv src
)
914 gen_mov_reg_N(dst
, src
);
918 static inline void gen_op_eval_bne(TCGv dst
, TCGv src
)
920 gen_mov_reg_Z(dst
, src
);
921 tcg_gen_xori_tl(dst
, dst
, 0x1);
925 static inline void gen_op_eval_bg(TCGv dst
, TCGv src
)
927 gen_mov_reg_N(cpu_tmp0
, src
);
928 gen_mov_reg_V(dst
, src
);
929 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
930 gen_mov_reg_Z(cpu_tmp0
, src
);
931 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
932 tcg_gen_xori_tl(dst
, dst
, 0x1);
936 static inline void gen_op_eval_bge(TCGv dst
, TCGv src
)
938 gen_mov_reg_V(cpu_tmp0
, src
);
939 gen_mov_reg_N(dst
, src
);
940 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
941 tcg_gen_xori_tl(dst
, dst
, 0x1);
945 static inline void gen_op_eval_bgu(TCGv dst
, TCGv src
)
947 gen_mov_reg_Z(cpu_tmp0
, src
);
948 gen_mov_reg_C(dst
, src
);
949 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
950 tcg_gen_xori_tl(dst
, dst
, 0x1);
954 static inline void gen_op_eval_bcc(TCGv dst
, TCGv src
)
956 gen_mov_reg_C(dst
, src
);
957 tcg_gen_xori_tl(dst
, dst
, 0x1);
961 static inline void gen_op_eval_bpos(TCGv dst
, TCGv src
)
963 gen_mov_reg_N(dst
, src
);
964 tcg_gen_xori_tl(dst
, dst
, 0x1);
968 static inline void gen_op_eval_bvc(TCGv dst
, TCGv src
)
970 gen_mov_reg_V(dst
, src
);
971 tcg_gen_xori_tl(dst
, dst
, 0x1);
975 FPSR bit field FCC1 | FCC0:
981 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
982 unsigned int fcc_offset
)
984 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
985 tcg_gen_andi_tl(reg
, reg
, 0x1);
988 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
989 unsigned int fcc_offset
)
991 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
992 tcg_gen_andi_tl(reg
, reg
, 0x1);
996 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
997 unsigned int fcc_offset
)
999 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1000 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1001 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1004 // 1 or 2: FCC0 ^ FCC1
1005 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
1006 unsigned int fcc_offset
)
1008 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1009 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1010 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1014 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
1015 unsigned int fcc_offset
)
1017 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1021 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1022 unsigned int fcc_offset
)
1024 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1025 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1026 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1027 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1031 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1032 unsigned int fcc_offset
)
1034 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1038 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1039 unsigned int fcc_offset
)
1041 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1042 tcg_gen_xori_tl(dst
, dst
, 0x1);
1043 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1044 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1048 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1049 unsigned int fcc_offset
)
1051 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1052 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1053 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1056 // 0: !(FCC0 | FCC1)
1057 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1058 unsigned int fcc_offset
)
1060 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1061 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1062 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1063 tcg_gen_xori_tl(dst
, dst
, 0x1);
1066 // 0 or 3: !(FCC0 ^ FCC1)
1067 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1068 unsigned int fcc_offset
)
1070 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1071 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1072 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1073 tcg_gen_xori_tl(dst
, dst
, 0x1);
1077 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1078 unsigned int fcc_offset
)
1080 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1081 tcg_gen_xori_tl(dst
, dst
, 0x1);
1084 // !1: !(FCC0 & !FCC1)
1085 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1086 unsigned int fcc_offset
)
1088 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1089 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1090 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1091 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1092 tcg_gen_xori_tl(dst
, dst
, 0x1);
1096 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1097 unsigned int fcc_offset
)
1099 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1100 tcg_gen_xori_tl(dst
, dst
, 0x1);
1103 // !2: !(!FCC0 & FCC1)
1104 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1105 unsigned int fcc_offset
)
1107 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1108 tcg_gen_xori_tl(dst
, dst
, 0x1);
1109 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1110 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1111 tcg_gen_xori_tl(dst
, dst
, 0x1);
1114 // !3: !(FCC0 & FCC1)
1115 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1116 unsigned int fcc_offset
)
1118 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1119 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1120 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1121 tcg_gen_xori_tl(dst
, dst
, 0x1);
1124 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1125 target_ulong pc2
, TCGv r_cond
)
1129 l1
= gen_new_label();
1131 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1133 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1136 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1139 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1140 target_ulong pc2
, TCGv r_cond
)
1144 l1
= gen_new_label();
1146 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1148 gen_goto_tb(dc
, 0, pc2
, pc1
);
1151 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1154 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1159 l1
= gen_new_label();
1160 l2
= gen_new_label();
1162 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1164 tcg_gen_movi_tl(cpu_npc
, npc1
);
1168 tcg_gen_movi_tl(cpu_npc
, npc2
);
1172 /* call this function before using the condition register as it may
1173 have been set for a jump */
1174 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1176 if (dc
->npc
== JUMP_PC
) {
1177 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1178 dc
->npc
= DYNAMIC_PC
;
1182 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1184 if (dc
->npc
== JUMP_PC
) {
1185 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1186 dc
->npc
= DYNAMIC_PC
;
1187 } else if (dc
->npc
!= DYNAMIC_PC
) {
1188 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1192 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1194 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1198 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1200 if (dc
->npc
== JUMP_PC
) {
1201 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1202 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1203 dc
->pc
= DYNAMIC_PC
;
1204 } else if (dc
->npc
== DYNAMIC_PC
) {
1205 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1206 dc
->pc
= DYNAMIC_PC
;
1212 static inline void gen_op_next_insn(void)
1214 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1215 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1218 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1222 #ifdef TARGET_SPARC64
1232 gen_op_eval_bn(r_dst
);
1235 gen_op_eval_be(r_dst
, r_src
);
1238 gen_op_eval_ble(r_dst
, r_src
);
1241 gen_op_eval_bl(r_dst
, r_src
);
1244 gen_op_eval_bleu(r_dst
, r_src
);
1247 gen_op_eval_bcs(r_dst
, r_src
);
1250 gen_op_eval_bneg(r_dst
, r_src
);
1253 gen_op_eval_bvs(r_dst
, r_src
);
1256 gen_op_eval_ba(r_dst
);
1259 gen_op_eval_bne(r_dst
, r_src
);
1262 gen_op_eval_bg(r_dst
, r_src
);
1265 gen_op_eval_bge(r_dst
, r_src
);
1268 gen_op_eval_bgu(r_dst
, r_src
);
1271 gen_op_eval_bcc(r_dst
, r_src
);
1274 gen_op_eval_bpos(r_dst
, r_src
);
1277 gen_op_eval_bvc(r_dst
, r_src
);
1282 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1284 unsigned int offset
;
1304 gen_op_eval_bn(r_dst
);
1307 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1310 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1313 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1316 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1319 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1322 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1325 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1328 gen_op_eval_ba(r_dst
);
1331 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1334 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1337 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1340 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1343 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1346 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1349 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1354 #ifdef TARGET_SPARC64
1356 static const int gen_tcg_cond_reg
[8] = {
1367 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1371 l1
= gen_new_label();
1372 tcg_gen_movi_tl(r_dst
, 0);
1373 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1374 tcg_gen_movi_tl(r_dst
, 1);
1379 /* XXX: potentially incorrect if dynamic npc */
1380 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1383 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1384 target_ulong target
= dc
->pc
+ offset
;
1387 /* unconditional not taken */
1389 dc
->pc
= dc
->npc
+ 4;
1390 dc
->npc
= dc
->pc
+ 4;
1393 dc
->npc
= dc
->pc
+ 4;
1395 } else if (cond
== 0x8) {
1396 /* unconditional taken */
1399 dc
->npc
= dc
->pc
+ 4;
1405 flush_cond(dc
, r_cond
);
1406 gen_cond(r_cond
, cc
, cond
);
1408 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1412 dc
->jump_pc
[0] = target
;
1413 dc
->jump_pc
[1] = dc
->npc
+ 4;
1419 /* XXX: potentially incorrect if dynamic npc */
1420 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1423 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1424 target_ulong target
= dc
->pc
+ offset
;
1427 /* unconditional not taken */
1429 dc
->pc
= dc
->npc
+ 4;
1430 dc
->npc
= dc
->pc
+ 4;
1433 dc
->npc
= dc
->pc
+ 4;
1435 } else if (cond
== 0x8) {
1436 /* unconditional taken */
1439 dc
->npc
= dc
->pc
+ 4;
1445 flush_cond(dc
, r_cond
);
1446 gen_fcond(r_cond
, cc
, cond
);
1448 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1452 dc
->jump_pc
[0] = target
;
1453 dc
->jump_pc
[1] = dc
->npc
+ 4;
1459 #ifdef TARGET_SPARC64
1460 /* XXX: potentially incorrect if dynamic npc */
1461 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1462 TCGv r_cond
, TCGv r_reg
)
1464 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1465 target_ulong target
= dc
->pc
+ offset
;
1467 flush_cond(dc
, r_cond
);
1468 gen_cond_reg(r_cond
, cond
, r_reg
);
1470 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1474 dc
->jump_pc
[0] = target
;
1475 dc
->jump_pc
[1] = dc
->npc
+ 4;
1480 static GenOpFunc
* const gen_fcmpd
[4] = {
1487 static GenOpFunc
* const gen_fcmpq
[4] = {
1494 static GenOpFunc
* const gen_fcmped
[4] = {
1501 static GenOpFunc
* const gen_fcmpeq
[4] = {
1508 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1512 tcg_gen_helper_0_2(helper_fcmps
, r_rs1
, r_rs2
);
1515 tcg_gen_helper_0_2(helper_fcmps_fcc1
, r_rs1
, r_rs2
);
1518 tcg_gen_helper_0_2(helper_fcmps_fcc2
, r_rs1
, r_rs2
);
1521 tcg_gen_helper_0_2(helper_fcmps_fcc3
, r_rs1
, r_rs2
);
1526 static inline void gen_op_fcmpd(int fccno
)
1528 tcg_gen_helper_0_0(gen_fcmpd
[fccno
]);
1531 static inline void gen_op_fcmpq(int fccno
)
1533 tcg_gen_helper_0_0(gen_fcmpq
[fccno
]);
1536 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1540 tcg_gen_helper_0_2(helper_fcmpes
, r_rs1
, r_rs2
);
1543 tcg_gen_helper_0_2(helper_fcmpes_fcc1
, r_rs1
, r_rs2
);
1546 tcg_gen_helper_0_2(helper_fcmpes_fcc2
, r_rs1
, r_rs2
);
1549 tcg_gen_helper_0_2(helper_fcmpes_fcc3
, r_rs1
, r_rs2
);
1554 static inline void gen_op_fcmped(int fccno
)
1556 tcg_gen_helper_0_0(gen_fcmped
[fccno
]);
1559 static inline void gen_op_fcmpeq(int fccno
)
1561 tcg_gen_helper_0_0(gen_fcmpeq
[fccno
]);
1566 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1568 tcg_gen_helper_0_2(helper_fcmps
, r_rs1
, r_rs2
);
1571 static inline void gen_op_fcmpd(int fccno
)
1573 tcg_gen_helper_0_0(helper_fcmpd
);
1576 static inline void gen_op_fcmpq(int fccno
)
1578 tcg_gen_helper_0_0(helper_fcmpq
);
1581 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1583 tcg_gen_helper_0_2(helper_fcmpes
, r_rs1
, r_rs2
);
1586 static inline void gen_op_fcmped(int fccno
)
1588 tcg_gen_helper_0_0(helper_fcmped
);
1591 static inline void gen_op_fcmpeq(int fccno
)
1593 tcg_gen_helper_0_0(helper_fcmpeq
);
1597 static inline void gen_op_fpexception_im(int fsr_flags
)
1601 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1602 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1603 r_const
= tcg_const_i32(TT_FP_EXCP
);
1604 tcg_gen_helper_0_1(raise_exception
, r_const
);
1605 tcg_temp_free(r_const
);
1608 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1610 #if !defined(CONFIG_USER_ONLY)
1611 if (!dc
->fpu_enabled
) {
1614 save_state(dc
, r_cond
);
1615 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1616 tcg_gen_helper_0_1(raise_exception
, r_const
);
1617 tcg_temp_free(r_const
);
1625 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1627 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1630 static inline void gen_clear_float_exceptions(void)
1632 tcg_gen_helper_0_0(helper_clear_float_exceptions
);
1636 #ifdef TARGET_SPARC64
1637 static inline TCGv
gen_get_asi(int insn
, TCGv r_addr
)
1643 r_asi
= tcg_temp_new(TCG_TYPE_I32
);
1644 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1646 asi
= GET_FIELD(insn
, 19, 26);
1647 r_asi
= tcg_const_i32(asi
);
1652 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1655 TCGv r_asi
, r_size
, r_sign
;
1657 r_asi
= gen_get_asi(insn
, addr
);
1658 r_size
= tcg_const_i32(size
);
1659 r_sign
= tcg_const_i32(sign
);
1660 tcg_gen_helper_1_4(helper_ld_asi
, dst
, addr
, r_asi
, r_size
, r_sign
);
1661 tcg_temp_free(r_sign
);
1662 tcg_temp_free(r_size
);
1663 tcg_temp_free(r_asi
);
1666 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1670 r_asi
= gen_get_asi(insn
, addr
);
1671 r_size
= tcg_const_i32(size
);
1672 tcg_gen_helper_0_4(helper_st_asi
, addr
, src
, r_asi
, r_size
);
1673 tcg_temp_free(r_size
);
1674 tcg_temp_free(r_asi
);
1677 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1679 TCGv r_asi
, r_size
, r_rd
;
1681 r_asi
= gen_get_asi(insn
, addr
);
1682 r_size
= tcg_const_i32(size
);
1683 r_rd
= tcg_const_i32(rd
);
1684 tcg_gen_helper_0_4(helper_ldf_asi
, addr
, r_asi
, r_size
, r_rd
);
1685 tcg_temp_free(r_rd
);
1686 tcg_temp_free(r_size
);
1687 tcg_temp_free(r_asi
);
1690 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1692 TCGv r_asi
, r_size
, r_rd
;
1694 r_asi
= gen_get_asi(insn
, addr
);
1695 r_size
= tcg_const_i32(size
);
1696 r_rd
= tcg_const_i32(rd
);
1697 tcg_gen_helper_0_4(helper_stf_asi
, addr
, r_asi
, r_size
, r_rd
);
1698 tcg_temp_free(r_rd
);
1699 tcg_temp_free(r_size
);
1700 tcg_temp_free(r_asi
);
1703 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1705 TCGv r_asi
, r_size
, r_sign
;
1707 r_asi
= gen_get_asi(insn
, addr
);
1708 r_size
= tcg_const_i32(4);
1709 r_sign
= tcg_const_i32(0);
1710 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1711 tcg_temp_free(r_sign
);
1712 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
, r_size
);
1713 tcg_temp_free(r_size
);
1714 tcg_temp_free(r_asi
);
1715 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1718 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1722 r_asi
= gen_get_asi(insn
, addr
);
1723 r_rd
= tcg_const_i32(rd
);
1724 tcg_gen_helper_0_3(helper_ldda_asi
, addr
, r_asi
, r_rd
);
1725 tcg_temp_free(r_rd
);
1726 tcg_temp_free(r_asi
);
1729 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1731 TCGv r_temp
, r_asi
, r_size
;
1733 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
1734 gen_movl_reg_TN(rd
+ 1, r_temp
);
1735 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, hi
,
1737 tcg_temp_free(r_temp
);
1738 r_asi
= gen_get_asi(insn
, addr
);
1739 r_size
= tcg_const_i32(8);
1740 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1741 tcg_temp_free(r_size
);
1742 tcg_temp_free(r_asi
);
1745 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1750 r_val1
= tcg_temp_new(TCG_TYPE_TL
);
1751 gen_movl_reg_TN(rd
, r_val1
);
1752 r_asi
= gen_get_asi(insn
, addr
);
1753 tcg_gen_helper_1_4(helper_cas_asi
, dst
, addr
, r_val1
, val2
, r_asi
);
1754 tcg_temp_free(r_asi
);
1755 tcg_temp_free(r_val1
);
1758 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1763 gen_movl_reg_TN(rd
, cpu_tmp64
);
1764 r_asi
= gen_get_asi(insn
, addr
);
1765 tcg_gen_helper_1_4(helper_casx_asi
, dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1766 tcg_temp_free(r_asi
);
1769 #elif !defined(CONFIG_USER_ONLY)
1771 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1774 TCGv r_asi
, r_size
, r_sign
;
1776 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1777 r_size
= tcg_const_i32(size
);
1778 r_sign
= tcg_const_i32(sign
);
1779 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1780 tcg_temp_free(r_sign
);
1781 tcg_temp_free(r_size
);
1782 tcg_temp_free(r_asi
);
1783 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1786 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1790 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1791 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1792 r_size
= tcg_const_i32(size
);
1793 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1794 tcg_temp_free(r_size
);
1795 tcg_temp_free(r_asi
);
1798 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1800 TCGv r_asi
, r_size
, r_sign
;
1802 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1803 r_size
= tcg_const_i32(4);
1804 r_sign
= tcg_const_i32(0);
1805 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1806 tcg_temp_free(r_sign
);
1807 tcg_gen_helper_0_4(helper_st_asi
, addr
, dst
, r_asi
, r_size
);
1808 tcg_temp_free(r_size
);
1809 tcg_temp_free(r_asi
);
1810 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1813 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1815 TCGv r_asi
, r_size
, r_sign
;
1817 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1818 r_size
= tcg_const_i32(8);
1819 r_sign
= tcg_const_i32(0);
1820 tcg_gen_helper_1_4(helper_ld_asi
, cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1821 tcg_temp_free(r_sign
);
1822 tcg_temp_free(r_size
);
1823 tcg_temp_free(r_asi
);
1824 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1825 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1826 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1827 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1828 gen_movl_TN_reg(rd
, hi
);
1831 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1833 TCGv r_temp
, r_asi
, r_size
;
1835 r_temp
= tcg_temp_new(TCG_TYPE_TL
);
1836 gen_movl_reg_TN(rd
+ 1, r_temp
);
1837 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, hi
, r_temp
);
1838 tcg_temp_free(r_temp
);
1839 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1840 r_size
= tcg_const_i32(8);
1841 tcg_gen_helper_0_4(helper_st_asi
, addr
, cpu_tmp64
, r_asi
, r_size
);
1842 tcg_temp_free(r_size
);
1843 tcg_temp_free(r_asi
);
1847 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1848 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1850 TCGv r_val
, r_asi
, r_size
;
1852 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1854 r_val
= tcg_const_i64(0xffULL
);
1855 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1856 r_size
= tcg_const_i32(1);
1857 tcg_gen_helper_0_4(helper_st_asi
, addr
, r_val
, r_asi
, r_size
);
1858 tcg_temp_free(r_size
);
1859 tcg_temp_free(r_asi
);
1860 tcg_temp_free(r_val
);
1864 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1869 rs1
= GET_FIELD(insn
, 13, 17);
1871 r_rs1
= tcg_const_tl(0); // XXX how to free?
1873 r_rs1
= cpu_gregs
[rs1
];
1875 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1879 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1884 if (IS_IMM
) { /* immediate */
1885 rs2
= GET_FIELDs(insn
, 19, 31);
1886 r_rs2
= tcg_const_tl((int)rs2
); // XXX how to free?
1887 } else { /* register */
1888 rs2
= GET_FIELD(insn
, 27, 31);
1890 r_rs2
= tcg_const_tl(0); // XXX how to free?
1892 r_rs2
= cpu_gregs
[rs2
];
1894 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1899 #define CHECK_IU_FEATURE(dc, FEATURE) \
1900 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1902 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1903 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1906 /* before an instruction, dc->pc must be static */
1907 static void disas_sparc_insn(DisasContext
* dc
)
1909 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1911 if (unlikely(loglevel
& CPU_LOG_TB_OP
))
1912 tcg_gen_debug_insn_start(dc
->pc
);
1913 insn
= ldl_code(dc
->pc
);
1914 opc
= GET_FIELD(insn
, 0, 1);
1916 rd
= GET_FIELD(insn
, 2, 6);
1918 cpu_src1
= tcg_temp_new(TCG_TYPE_TL
); // const
1919 cpu_src2
= tcg_temp_new(TCG_TYPE_TL
); // const
1922 case 0: /* branches/sethi */
1924 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1927 #ifdef TARGET_SPARC64
1928 case 0x1: /* V9 BPcc */
1932 target
= GET_FIELD_SP(insn
, 0, 18);
1933 target
= sign_extend(target
, 18);
1935 cc
= GET_FIELD_SP(insn
, 20, 21);
1937 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1939 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1944 case 0x3: /* V9 BPr */
1946 target
= GET_FIELD_SP(insn
, 0, 13) |
1947 (GET_FIELD_SP(insn
, 20, 21) << 14);
1948 target
= sign_extend(target
, 16);
1950 cpu_src1
= get_src1(insn
, cpu_src1
);
1951 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1954 case 0x5: /* V9 FBPcc */
1956 int cc
= GET_FIELD_SP(insn
, 20, 21);
1957 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1959 target
= GET_FIELD_SP(insn
, 0, 18);
1960 target
= sign_extend(target
, 19);
1962 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1966 case 0x7: /* CBN+x */
1971 case 0x2: /* BN+x */
1973 target
= GET_FIELD(insn
, 10, 31);
1974 target
= sign_extend(target
, 22);
1976 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1979 case 0x6: /* FBN+x */
1981 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1983 target
= GET_FIELD(insn
, 10, 31);
1984 target
= sign_extend(target
, 22);
1986 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1989 case 0x4: /* SETHI */
1991 uint32_t value
= GET_FIELD(insn
, 10, 31);
1994 r_const
= tcg_const_tl(value
<< 10);
1995 gen_movl_TN_reg(rd
, r_const
);
1996 tcg_temp_free(r_const
);
1999 case 0x0: /* UNIMPL */
2008 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2011 r_const
= tcg_const_tl(dc
->pc
);
2012 gen_movl_TN_reg(15, r_const
);
2013 tcg_temp_free(r_const
);
2015 gen_mov_pc_npc(dc
, cpu_cond
);
2019 case 2: /* FPU & Logical Operations */
2021 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2022 if (xop
== 0x3a) { /* generate trap */
2025 cpu_src1
= get_src1(insn
, cpu_src1
);
2027 rs2
= GET_FIELD(insn
, 25, 31);
2028 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2030 rs2
= GET_FIELD(insn
, 27, 31);
2032 gen_movl_reg_TN(rs2
, cpu_src2
);
2033 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2035 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2037 cond
= GET_FIELD(insn
, 3, 6);
2039 save_state(dc
, cpu_cond
);
2040 tcg_gen_helper_0_1(helper_trap
, cpu_dst
);
2041 } else if (cond
!= 0) {
2042 TCGv r_cond
= tcg_temp_new(TCG_TYPE_TL
);
2043 #ifdef TARGET_SPARC64
2045 int cc
= GET_FIELD_SP(insn
, 11, 12);
2047 save_state(dc
, cpu_cond
);
2049 gen_cond(r_cond
, 0, cond
);
2051 gen_cond(r_cond
, 1, cond
);
2055 save_state(dc
, cpu_cond
);
2056 gen_cond(r_cond
, 0, cond
);
2058 tcg_gen_helper_0_2(helper_trapcc
, cpu_dst
, r_cond
);
2059 tcg_temp_free(r_cond
);
2065 } else if (xop
== 0x28) {
2066 rs1
= GET_FIELD(insn
, 13, 17);
2069 #ifndef TARGET_SPARC64
2070 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2071 manual, rdy on the microSPARC
2073 case 0x0f: /* stbar in the SPARCv8 manual,
2074 rdy on the microSPARC II */
2075 case 0x10 ... 0x1f: /* implementation-dependent in the
2076 SPARCv8 manual, rdy on the
2079 gen_movl_TN_reg(rd
, cpu_y
);
2081 #ifdef TARGET_SPARC64
2082 case 0x2: /* V9 rdccr */
2083 tcg_gen_helper_1_0(helper_rdccr
, cpu_dst
);
2084 gen_movl_TN_reg(rd
, cpu_dst
);
2086 case 0x3: /* V9 rdasi */
2087 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2088 gen_movl_TN_reg(rd
, cpu_dst
);
2090 case 0x4: /* V9 rdtick */
2094 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2095 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2096 offsetof(CPUState
, tick
));
2097 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2099 tcg_temp_free(r_tickptr
);
2100 gen_movl_TN_reg(rd
, cpu_dst
);
2103 case 0x5: /* V9 rdpc */
2107 r_const
= tcg_const_tl(dc
->pc
);
2108 gen_movl_TN_reg(rd
, r_const
);
2109 tcg_temp_free(r_const
);
2112 case 0x6: /* V9 rdfprs */
2113 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2114 gen_movl_TN_reg(rd
, cpu_dst
);
2116 case 0xf: /* V9 membar */
2117 break; /* no effect */
2118 case 0x13: /* Graphics Status */
2119 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2121 gen_movl_TN_reg(rd
, cpu_gsr
);
2123 case 0x17: /* Tick compare */
2124 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2126 case 0x18: /* System tick */
2130 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2131 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2132 offsetof(CPUState
, stick
));
2133 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_dst
,
2135 tcg_temp_free(r_tickptr
);
2136 gen_movl_TN_reg(rd
, cpu_dst
);
2139 case 0x19: /* System tick compare */
2140 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2142 case 0x10: /* Performance Control */
2143 case 0x11: /* Performance Instrumentation Counter */
2144 case 0x12: /* Dispatch Control */
2145 case 0x14: /* Softint set, WO */
2146 case 0x15: /* Softint clear, WO */
2147 case 0x16: /* Softint write */
2152 #if !defined(CONFIG_USER_ONLY)
2153 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2154 #ifndef TARGET_SPARC64
2155 if (!supervisor(dc
))
2157 tcg_gen_helper_1_0(helper_rdpsr
, cpu_dst
);
2159 CHECK_IU_FEATURE(dc
, HYPV
);
2160 if (!hypervisor(dc
))
2162 rs1
= GET_FIELD(insn
, 13, 17);
2165 // gen_op_rdhpstate();
2168 // gen_op_rdhtstate();
2171 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2174 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2177 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2179 case 31: // hstick_cmpr
2180 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2186 gen_movl_TN_reg(rd
, cpu_dst
);
2188 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2189 if (!supervisor(dc
))
2191 #ifdef TARGET_SPARC64
2192 rs1
= GET_FIELD(insn
, 13, 17);
2198 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2199 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2200 offsetof(CPUState
, tsptr
));
2201 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2202 offsetof(trap_state
, tpc
));
2203 tcg_temp_free(r_tsptr
);
2210 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2211 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2212 offsetof(CPUState
, tsptr
));
2213 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2214 offsetof(trap_state
, tnpc
));
2215 tcg_temp_free(r_tsptr
);
2222 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2223 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2224 offsetof(CPUState
, tsptr
));
2225 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2226 offsetof(trap_state
, tstate
));
2227 tcg_temp_free(r_tsptr
);
2234 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
2235 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2236 offsetof(CPUState
, tsptr
));
2237 tcg_gen_ld_i32(cpu_tmp0
, r_tsptr
,
2238 offsetof(trap_state
, tt
));
2239 tcg_temp_free(r_tsptr
);
2246 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
2247 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2248 offsetof(CPUState
, tick
));
2249 tcg_gen_helper_1_1(helper_tick_get_count
, cpu_tmp0
,
2251 gen_movl_TN_reg(rd
, cpu_tmp0
);
2252 tcg_temp_free(r_tickptr
);
2256 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2259 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2260 offsetof(CPUSPARCState
, pstate
));
2261 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2264 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2265 offsetof(CPUSPARCState
, tl
));
2266 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2269 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2270 offsetof(CPUSPARCState
, psrpil
));
2271 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2274 tcg_gen_helper_1_0(helper_rdcwp
, cpu_tmp0
);
2277 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2278 offsetof(CPUSPARCState
, cansave
));
2279 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2281 case 11: // canrestore
2282 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2283 offsetof(CPUSPARCState
, canrestore
));
2284 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2286 case 12: // cleanwin
2287 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2288 offsetof(CPUSPARCState
, cleanwin
));
2289 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2291 case 13: // otherwin
2292 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2293 offsetof(CPUSPARCState
, otherwin
));
2294 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2297 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2298 offsetof(CPUSPARCState
, wstate
));
2299 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2301 case 16: // UA2005 gl
2302 CHECK_IU_FEATURE(dc
, GL
);
2303 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2304 offsetof(CPUSPARCState
, gl
));
2305 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2307 case 26: // UA2005 strand status
2308 CHECK_IU_FEATURE(dc
, HYPV
);
2309 if (!hypervisor(dc
))
2311 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_ssr
);
2314 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2321 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2323 gen_movl_TN_reg(rd
, cpu_tmp0
);
2325 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2326 #ifdef TARGET_SPARC64
2327 save_state(dc
, cpu_cond
);
2328 tcg_gen_helper_0_0(helper_flushw
);
2330 if (!supervisor(dc
))
2332 gen_movl_TN_reg(rd
, cpu_tbr
);
2336 } else if (xop
== 0x34) { /* FPU Operations */
2337 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2339 gen_op_clear_ieee_excp_and_FTT();
2340 rs1
= GET_FIELD(insn
, 13, 17);
2341 rs2
= GET_FIELD(insn
, 27, 31);
2342 xop
= GET_FIELD(insn
, 18, 26);
2344 case 0x1: /* fmovs */
2345 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2347 case 0x5: /* fnegs */
2348 tcg_gen_helper_1_1(helper_fnegs
, cpu_fpr
[rd
],
2351 case 0x9: /* fabss */
2352 tcg_gen_helper_1_1(helper_fabss
, cpu_fpr
[rd
],
2355 case 0x29: /* fsqrts */
2356 CHECK_FPU_FEATURE(dc
, FSQRT
);
2357 gen_clear_float_exceptions();
2358 tcg_gen_helper_1_1(helper_fsqrts
, cpu_tmp32
,
2360 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2361 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2363 case 0x2a: /* fsqrtd */
2364 CHECK_FPU_FEATURE(dc
, FSQRT
);
2365 gen_op_load_fpr_DT1(DFPREG(rs2
));
2366 gen_clear_float_exceptions();
2367 tcg_gen_helper_0_0(helper_fsqrtd
);
2368 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2369 gen_op_store_DT0_fpr(DFPREG(rd
));
2371 case 0x2b: /* fsqrtq */
2372 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2373 gen_op_load_fpr_QT1(QFPREG(rs2
));
2374 gen_clear_float_exceptions();
2375 tcg_gen_helper_0_0(helper_fsqrtq
);
2376 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2377 gen_op_store_QT0_fpr(QFPREG(rd
));
2379 case 0x41: /* fadds */
2380 gen_clear_float_exceptions();
2381 tcg_gen_helper_1_2(helper_fadds
, cpu_tmp32
,
2382 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2383 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2384 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2387 gen_op_load_fpr_DT0(DFPREG(rs1
));
2388 gen_op_load_fpr_DT1(DFPREG(rs2
));
2389 gen_clear_float_exceptions();
2390 tcg_gen_helper_0_0(helper_faddd
);
2391 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2392 gen_op_store_DT0_fpr(DFPREG(rd
));
2394 case 0x43: /* faddq */
2395 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2396 gen_op_load_fpr_QT0(QFPREG(rs1
));
2397 gen_op_load_fpr_QT1(QFPREG(rs2
));
2398 gen_clear_float_exceptions();
2399 tcg_gen_helper_0_0(helper_faddq
);
2400 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2401 gen_op_store_QT0_fpr(QFPREG(rd
));
2403 case 0x45: /* fsubs */
2404 gen_clear_float_exceptions();
2405 tcg_gen_helper_1_2(helper_fsubs
, cpu_tmp32
,
2406 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2407 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2408 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2411 gen_op_load_fpr_DT0(DFPREG(rs1
));
2412 gen_op_load_fpr_DT1(DFPREG(rs2
));
2413 gen_clear_float_exceptions();
2414 tcg_gen_helper_0_0(helper_fsubd
);
2415 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2416 gen_op_store_DT0_fpr(DFPREG(rd
));
2418 case 0x47: /* fsubq */
2419 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2420 gen_op_load_fpr_QT0(QFPREG(rs1
));
2421 gen_op_load_fpr_QT1(QFPREG(rs2
));
2422 gen_clear_float_exceptions();
2423 tcg_gen_helper_0_0(helper_fsubq
);
2424 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2425 gen_op_store_QT0_fpr(QFPREG(rd
));
2427 case 0x49: /* fmuls */
2428 CHECK_FPU_FEATURE(dc
, FMUL
);
2429 gen_clear_float_exceptions();
2430 tcg_gen_helper_1_2(helper_fmuls
, cpu_tmp32
,
2431 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2432 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2433 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2435 case 0x4a: /* fmuld */
2436 CHECK_FPU_FEATURE(dc
, FMUL
);
2437 gen_op_load_fpr_DT0(DFPREG(rs1
));
2438 gen_op_load_fpr_DT1(DFPREG(rs2
));
2439 gen_clear_float_exceptions();
2440 tcg_gen_helper_0_0(helper_fmuld
);
2441 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2442 gen_op_store_DT0_fpr(DFPREG(rd
));
2444 case 0x4b: /* fmulq */
2445 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2446 CHECK_FPU_FEATURE(dc
, FMUL
);
2447 gen_op_load_fpr_QT0(QFPREG(rs1
));
2448 gen_op_load_fpr_QT1(QFPREG(rs2
));
2449 gen_clear_float_exceptions();
2450 tcg_gen_helper_0_0(helper_fmulq
);
2451 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2452 gen_op_store_QT0_fpr(QFPREG(rd
));
2454 case 0x4d: /* fdivs */
2455 gen_clear_float_exceptions();
2456 tcg_gen_helper_1_2(helper_fdivs
, cpu_tmp32
,
2457 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2458 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2459 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2462 gen_op_load_fpr_DT0(DFPREG(rs1
));
2463 gen_op_load_fpr_DT1(DFPREG(rs2
));
2464 gen_clear_float_exceptions();
2465 tcg_gen_helper_0_0(helper_fdivd
);
2466 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2467 gen_op_store_DT0_fpr(DFPREG(rd
));
2469 case 0x4f: /* fdivq */
2470 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2471 gen_op_load_fpr_QT0(QFPREG(rs1
));
2472 gen_op_load_fpr_QT1(QFPREG(rs2
));
2473 gen_clear_float_exceptions();
2474 tcg_gen_helper_0_0(helper_fdivq
);
2475 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2476 gen_op_store_QT0_fpr(QFPREG(rd
));
2479 CHECK_FPU_FEATURE(dc
, FSMULD
);
2480 gen_op_load_fpr_FT0(rs1
);
2481 gen_op_load_fpr_FT1(rs2
);
2482 gen_clear_float_exceptions();
2483 tcg_gen_helper_0_0(helper_fsmuld
);
2484 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2485 gen_op_store_DT0_fpr(DFPREG(rd
));
2487 case 0x6e: /* fdmulq */
2488 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2489 gen_op_load_fpr_DT0(DFPREG(rs1
));
2490 gen_op_load_fpr_DT1(DFPREG(rs2
));
2491 gen_clear_float_exceptions();
2492 tcg_gen_helper_0_0(helper_fdmulq
);
2493 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2494 gen_op_store_QT0_fpr(QFPREG(rd
));
2496 case 0xc4: /* fitos */
2497 gen_clear_float_exceptions();
2498 tcg_gen_helper_1_1(helper_fitos
, cpu_tmp32
,
2500 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2501 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2504 gen_op_load_fpr_DT1(DFPREG(rs2
));
2505 gen_clear_float_exceptions();
2506 tcg_gen_helper_0_0(helper_fdtos
);
2507 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2508 gen_op_store_FT0_fpr(rd
);
2510 case 0xc7: /* fqtos */
2511 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2512 gen_op_load_fpr_QT1(QFPREG(rs2
));
2513 gen_clear_float_exceptions();
2514 tcg_gen_helper_1_0(helper_fqtos
, cpu_tmp32
);
2515 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2516 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2519 gen_op_load_fpr_FT1(rs2
);
2520 tcg_gen_helper_0_0(helper_fitod
);
2521 gen_op_store_DT0_fpr(DFPREG(rd
));
2524 gen_op_load_fpr_FT1(rs2
);
2525 tcg_gen_helper_0_0(helper_fstod
);
2526 gen_op_store_DT0_fpr(DFPREG(rd
));
2528 case 0xcb: /* fqtod */
2529 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2530 gen_op_load_fpr_QT1(QFPREG(rs2
));
2531 gen_clear_float_exceptions();
2532 tcg_gen_helper_0_0(helper_fqtod
);
2533 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2534 gen_op_store_DT0_fpr(DFPREG(rd
));
2536 case 0xcc: /* fitoq */
2537 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2538 tcg_gen_helper_0_1(helper_fitoq
, cpu_fpr
[rs2
]);
2539 gen_op_store_QT0_fpr(QFPREG(rd
));
2541 case 0xcd: /* fstoq */
2542 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2543 tcg_gen_helper_0_1(helper_fstoq
, cpu_fpr
[rs2
]);
2544 gen_op_store_QT0_fpr(QFPREG(rd
));
2546 case 0xce: /* fdtoq */
2547 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2548 gen_op_load_fpr_DT1(DFPREG(rs2
));
2549 tcg_gen_helper_0_0(helper_fdtoq
);
2550 gen_op_store_QT0_fpr(QFPREG(rd
));
2552 case 0xd1: /* fstoi */
2553 gen_clear_float_exceptions();
2554 tcg_gen_helper_1_1(helper_fstoi
, cpu_tmp32
,
2556 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2557 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2560 gen_op_load_fpr_DT1(DFPREG(rs2
));
2561 gen_clear_float_exceptions();
2562 tcg_gen_helper_0_0(helper_fdtoi
);
2563 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2564 gen_op_store_FT0_fpr(rd
);
2566 case 0xd3: /* fqtoi */
2567 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2568 gen_op_load_fpr_QT1(QFPREG(rs2
));
2569 gen_clear_float_exceptions();
2570 tcg_gen_helper_1_0(helper_fqtoi
, cpu_tmp32
);
2571 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2572 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2574 #ifdef TARGET_SPARC64
2575 case 0x2: /* V9 fmovd */
2576 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)],
2577 cpu_fpr
[DFPREG(rs2
)]);
2578 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2579 cpu_fpr
[DFPREG(rs2
) + 1]);
2581 case 0x3: /* V9 fmovq */
2582 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2583 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)],
2584 cpu_fpr
[QFPREG(rs2
)]);
2585 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2586 cpu_fpr
[QFPREG(rs2
) + 1]);
2587 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2588 cpu_fpr
[QFPREG(rs2
) + 2]);
2589 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2590 cpu_fpr
[QFPREG(rs2
) + 3]);
2592 case 0x6: /* V9 fnegd */
2593 gen_op_load_fpr_DT1(DFPREG(rs2
));
2594 tcg_gen_helper_0_0(helper_fnegd
);
2595 gen_op_store_DT0_fpr(DFPREG(rd
));
2597 case 0x7: /* V9 fnegq */
2598 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2599 gen_op_load_fpr_QT1(QFPREG(rs2
));
2600 tcg_gen_helper_0_0(helper_fnegq
);
2601 gen_op_store_QT0_fpr(QFPREG(rd
));
2603 case 0xa: /* V9 fabsd */
2604 gen_op_load_fpr_DT1(DFPREG(rs2
));
2605 tcg_gen_helper_0_0(helper_fabsd
);
2606 gen_op_store_DT0_fpr(DFPREG(rd
));
2608 case 0xb: /* V9 fabsq */
2609 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2610 gen_op_load_fpr_QT1(QFPREG(rs2
));
2611 tcg_gen_helper_0_0(helper_fabsq
);
2612 gen_op_store_QT0_fpr(QFPREG(rd
));
2614 case 0x81: /* V9 fstox */
2615 gen_op_load_fpr_FT1(rs2
);
2616 gen_clear_float_exceptions();
2617 tcg_gen_helper_0_0(helper_fstox
);
2618 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2619 gen_op_store_DT0_fpr(DFPREG(rd
));
2621 case 0x82: /* V9 fdtox */
2622 gen_op_load_fpr_DT1(DFPREG(rs2
));
2623 gen_clear_float_exceptions();
2624 tcg_gen_helper_0_0(helper_fdtox
);
2625 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2626 gen_op_store_DT0_fpr(DFPREG(rd
));
2628 case 0x83: /* V9 fqtox */
2629 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2630 gen_op_load_fpr_QT1(QFPREG(rs2
));
2631 gen_clear_float_exceptions();
2632 tcg_gen_helper_0_0(helper_fqtox
);
2633 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2634 gen_op_store_DT0_fpr(DFPREG(rd
));
2636 case 0x84: /* V9 fxtos */
2637 gen_op_load_fpr_DT1(DFPREG(rs2
));
2638 gen_clear_float_exceptions();
2639 tcg_gen_helper_0_0(helper_fxtos
);
2640 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2641 gen_op_store_FT0_fpr(rd
);
2643 case 0x88: /* V9 fxtod */
2644 gen_op_load_fpr_DT1(DFPREG(rs2
));
2645 gen_clear_float_exceptions();
2646 tcg_gen_helper_0_0(helper_fxtod
);
2647 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2648 gen_op_store_DT0_fpr(DFPREG(rd
));
2650 case 0x8c: /* V9 fxtoq */
2651 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2652 gen_op_load_fpr_DT1(DFPREG(rs2
));
2653 gen_clear_float_exceptions();
2654 tcg_gen_helper_0_0(helper_fxtoq
);
2655 tcg_gen_helper_0_0(helper_check_ieee_exceptions
);
2656 gen_op_store_QT0_fpr(QFPREG(rd
));
2662 } else if (xop
== 0x35) { /* FPU Operations */
2663 #ifdef TARGET_SPARC64
2666 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2668 gen_op_clear_ieee_excp_and_FTT();
2669 rs1
= GET_FIELD(insn
, 13, 17);
2670 rs2
= GET_FIELD(insn
, 27, 31);
2671 xop
= GET_FIELD(insn
, 18, 26);
2672 #ifdef TARGET_SPARC64
2673 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2676 l1
= gen_new_label();
2677 cond
= GET_FIELD_SP(insn
, 14, 17);
2678 cpu_src1
= get_src1(insn
, cpu_src1
);
2679 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2681 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2684 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2687 l1
= gen_new_label();
2688 cond
= GET_FIELD_SP(insn
, 14, 17);
2689 cpu_src1
= get_src1(insn
, cpu_src1
);
2690 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2692 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2693 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2696 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2699 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2700 l1
= gen_new_label();
2701 cond
= GET_FIELD_SP(insn
, 14, 17);
2702 cpu_src1
= get_src1(insn
, cpu_src1
);
2703 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2705 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2706 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2707 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2708 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2714 #ifdef TARGET_SPARC64
2715 #define FMOVSCC(fcc) \
2720 l1 = gen_new_label(); \
2721 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2722 cond = GET_FIELD_SP(insn, 14, 17); \
2723 gen_fcond(r_cond, fcc, cond); \
2724 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2726 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2727 gen_set_label(l1); \
2728 tcg_temp_free(r_cond); \
2730 #define FMOVDCC(fcc) \
2735 l1 = gen_new_label(); \
2736 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2737 cond = GET_FIELD_SP(insn, 14, 17); \
2738 gen_fcond(r_cond, fcc, cond); \
2739 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2741 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2742 cpu_fpr[DFPREG(rs2)]); \
2743 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2744 cpu_fpr[DFPREG(rs2) + 1]); \
2745 gen_set_label(l1); \
2746 tcg_temp_free(r_cond); \
2748 #define FMOVQCC(fcc) \
2753 l1 = gen_new_label(); \
2754 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2755 cond = GET_FIELD_SP(insn, 14, 17); \
2756 gen_fcond(r_cond, fcc, cond); \
2757 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2759 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2760 cpu_fpr[QFPREG(rs2)]); \
2761 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2762 cpu_fpr[QFPREG(rs2) + 1]); \
2763 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2764 cpu_fpr[QFPREG(rs2) + 2]); \
2765 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2766 cpu_fpr[QFPREG(rs2) + 3]); \
2767 gen_set_label(l1); \
2768 tcg_temp_free(r_cond); \
2770 case 0x001: /* V9 fmovscc %fcc0 */
2773 case 0x002: /* V9 fmovdcc %fcc0 */
2776 case 0x003: /* V9 fmovqcc %fcc0 */
2777 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2780 case 0x041: /* V9 fmovscc %fcc1 */
2783 case 0x042: /* V9 fmovdcc %fcc1 */
2786 case 0x043: /* V9 fmovqcc %fcc1 */
2787 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2790 case 0x081: /* V9 fmovscc %fcc2 */
2793 case 0x082: /* V9 fmovdcc %fcc2 */
2796 case 0x083: /* V9 fmovqcc %fcc2 */
2797 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2800 case 0x0c1: /* V9 fmovscc %fcc3 */
2803 case 0x0c2: /* V9 fmovdcc %fcc3 */
2806 case 0x0c3: /* V9 fmovqcc %fcc3 */
2807 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2813 #define FMOVCC(size_FDQ, icc) \
2818 l1 = gen_new_label(); \
2819 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2820 cond = GET_FIELD_SP(insn, 14, 17); \
2821 gen_cond(r_cond, icc, cond); \
2822 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2824 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2825 (glue(size_FDQ, FPREG(rs2))); \
2826 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2827 (glue(size_FDQ, FPREG(rd))); \
2828 gen_set_label(l1); \
2829 tcg_temp_free(r_cond); \
2831 #define FMOVSCC(icc) \
2836 l1 = gen_new_label(); \
2837 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2838 cond = GET_FIELD_SP(insn, 14, 17); \
2839 gen_cond(r_cond, icc, cond); \
2840 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2842 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2843 gen_set_label(l1); \
2844 tcg_temp_free(r_cond); \
2846 #define FMOVDCC(icc) \
2851 l1 = gen_new_label(); \
2852 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2853 cond = GET_FIELD_SP(insn, 14, 17); \
2854 gen_cond(r_cond, icc, cond); \
2855 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2857 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2858 cpu_fpr[DFPREG(rs2)]); \
2859 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2860 cpu_fpr[DFPREG(rs2) + 1]); \
2861 gen_set_label(l1); \
2862 tcg_temp_free(r_cond); \
2864 #define FMOVQCC(icc) \
2869 l1 = gen_new_label(); \
2870 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2871 cond = GET_FIELD_SP(insn, 14, 17); \
2872 gen_cond(r_cond, icc, cond); \
2873 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2875 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2876 cpu_fpr[QFPREG(rs2)]); \
2877 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2878 cpu_fpr[QFPREG(rs2) + 1]); \
2879 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2880 cpu_fpr[QFPREG(rs2) + 2]); \
2881 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2882 cpu_fpr[QFPREG(rs2) + 3]); \
2883 gen_set_label(l1); \
2884 tcg_temp_free(r_cond); \
2887 case 0x101: /* V9 fmovscc %icc */
2890 case 0x102: /* V9 fmovdcc %icc */
2892 case 0x103: /* V9 fmovqcc %icc */
2893 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2896 case 0x181: /* V9 fmovscc %xcc */
2899 case 0x182: /* V9 fmovdcc %xcc */
2902 case 0x183: /* V9 fmovqcc %xcc */
2903 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2910 case 0x51: /* fcmps, V9 %fcc */
2911 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2913 case 0x52: /* fcmpd, V9 %fcc */
2914 gen_op_load_fpr_DT0(DFPREG(rs1
));
2915 gen_op_load_fpr_DT1(DFPREG(rs2
));
2916 gen_op_fcmpd(rd
& 3);
2918 case 0x53: /* fcmpq, V9 %fcc */
2919 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2920 gen_op_load_fpr_QT0(QFPREG(rs1
));
2921 gen_op_load_fpr_QT1(QFPREG(rs2
));
2922 gen_op_fcmpq(rd
& 3);
2924 case 0x55: /* fcmpes, V9 %fcc */
2925 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2927 case 0x56: /* fcmped, V9 %fcc */
2928 gen_op_load_fpr_DT0(DFPREG(rs1
));
2929 gen_op_load_fpr_DT1(DFPREG(rs2
));
2930 gen_op_fcmped(rd
& 3);
2932 case 0x57: /* fcmpeq, V9 %fcc */
2933 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2934 gen_op_load_fpr_QT0(QFPREG(rs1
));
2935 gen_op_load_fpr_QT1(QFPREG(rs2
));
2936 gen_op_fcmpeq(rd
& 3);
2941 } else if (xop
== 0x2) {
2944 rs1
= GET_FIELD(insn
, 13, 17);
2946 // or %g0, x, y -> mov T0, x; mov y, T0
2947 if (IS_IMM
) { /* immediate */
2950 rs2
= GET_FIELDs(insn
, 19, 31);
2951 r_const
= tcg_const_tl((int)rs2
);
2952 gen_movl_TN_reg(rd
, r_const
);
2953 tcg_temp_free(r_const
);
2954 } else { /* register */
2955 rs2
= GET_FIELD(insn
, 27, 31);
2956 gen_movl_reg_TN(rs2
, cpu_dst
);
2957 gen_movl_TN_reg(rd
, cpu_dst
);
2960 cpu_src1
= get_src1(insn
, cpu_src1
);
2961 if (IS_IMM
) { /* immediate */
2962 rs2
= GET_FIELDs(insn
, 19, 31);
2963 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, (int)rs2
);
2964 gen_movl_TN_reg(rd
, cpu_dst
);
2965 } else { /* register */
2966 // or x, %g0, y -> mov T1, x; mov y, T1
2967 rs2
= GET_FIELD(insn
, 27, 31);
2969 gen_movl_reg_TN(rs2
, cpu_src2
);
2970 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2971 gen_movl_TN_reg(rd
, cpu_dst
);
2973 gen_movl_TN_reg(rd
, cpu_src1
);
2976 #ifdef TARGET_SPARC64
2977 } else if (xop
== 0x25) { /* sll, V9 sllx */
2978 cpu_src1
= get_src1(insn
, cpu_src1
);
2979 if (IS_IMM
) { /* immediate */
2980 rs2
= GET_FIELDs(insn
, 20, 31);
2981 if (insn
& (1 << 12)) {
2982 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
2984 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, rs2
& 0x1f);
2986 } else { /* register */
2987 rs2
= GET_FIELD(insn
, 27, 31);
2988 gen_movl_reg_TN(rs2
, cpu_src2
);
2989 if (insn
& (1 << 12)) {
2990 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
2992 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
2994 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
2996 gen_movl_TN_reg(rd
, cpu_dst
);
2997 } else if (xop
== 0x26) { /* srl, V9 srlx */
2998 cpu_src1
= get_src1(insn
, cpu_src1
);
2999 if (IS_IMM
) { /* immediate */
3000 rs2
= GET_FIELDs(insn
, 20, 31);
3001 if (insn
& (1 << 12)) {
3002 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3004 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3005 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
3007 } else { /* register */
3008 rs2
= GET_FIELD(insn
, 27, 31);
3009 gen_movl_reg_TN(rs2
, cpu_src2
);
3010 if (insn
& (1 << 12)) {
3011 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3012 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3014 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3015 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3016 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3019 gen_movl_TN_reg(rd
, cpu_dst
);
3020 } else if (xop
== 0x27) { /* sra, V9 srax */
3021 cpu_src1
= get_src1(insn
, cpu_src1
);
3022 if (IS_IMM
) { /* immediate */
3023 rs2
= GET_FIELDs(insn
, 20, 31);
3024 if (insn
& (1 << 12)) {
3025 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, rs2
& 0x3f);
3027 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3028 tcg_gen_ext_i32_i64(cpu_dst
, cpu_dst
);
3029 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, rs2
& 0x1f);
3031 } else { /* register */
3032 rs2
= GET_FIELD(insn
, 27, 31);
3033 gen_movl_reg_TN(rs2
, cpu_src2
);
3034 if (insn
& (1 << 12)) {
3035 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3036 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3038 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3039 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3040 tcg_gen_ext_i32_i64(cpu_dst
, cpu_dst
);
3041 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3044 gen_movl_TN_reg(rd
, cpu_dst
);
3046 } else if (xop
< 0x36) {
3047 cpu_src1
= get_src1(insn
, cpu_src1
);
3048 cpu_src2
= get_src2(insn
, cpu_src2
);
3050 switch (xop
& ~0x10) {
3053 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3055 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3058 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3060 gen_op_logic_cc(cpu_dst
);
3063 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3065 gen_op_logic_cc(cpu_dst
);
3068 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3070 gen_op_logic_cc(cpu_dst
);
3074 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3076 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3079 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3080 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3082 gen_op_logic_cc(cpu_dst
);
3085 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3086 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3088 gen_op_logic_cc(cpu_dst
);
3091 tcg_gen_xori_tl(cpu_tmp0
, cpu_src2
, -1);
3092 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3094 gen_op_logic_cc(cpu_dst
);
3098 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3100 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3101 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3102 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3105 #ifdef TARGET_SPARC64
3106 case 0x9: /* V9 mulx */
3107 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3111 CHECK_IU_FEATURE(dc
, MUL
);
3112 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3114 gen_op_logic_cc(cpu_dst
);
3117 CHECK_IU_FEATURE(dc
, MUL
);
3118 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3120 gen_op_logic_cc(cpu_dst
);
3124 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3126 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3127 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3128 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3131 #ifdef TARGET_SPARC64
3132 case 0xd: /* V9 udivx */
3133 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3134 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3135 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3136 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3140 CHECK_IU_FEATURE(dc
, DIV
);
3141 tcg_gen_helper_1_2(helper_udiv
, cpu_dst
, cpu_src1
,
3144 gen_op_div_cc(cpu_dst
);
3147 CHECK_IU_FEATURE(dc
, DIV
);
3148 tcg_gen_helper_1_2(helper_sdiv
, cpu_dst
, cpu_src1
,
3151 gen_op_div_cc(cpu_dst
);
3156 gen_movl_TN_reg(rd
, cpu_dst
);
3159 case 0x20: /* taddcc */
3160 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3161 gen_movl_TN_reg(rd
, cpu_dst
);
3163 case 0x21: /* tsubcc */
3164 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3165 gen_movl_TN_reg(rd
, cpu_dst
);
3167 case 0x22: /* taddcctv */
3168 save_state(dc
, cpu_cond
);
3169 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3170 gen_movl_TN_reg(rd
, cpu_dst
);
3172 case 0x23: /* tsubcctv */
3173 save_state(dc
, cpu_cond
);
3174 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3175 gen_movl_TN_reg(rd
, cpu_dst
);
3177 case 0x24: /* mulscc */
3178 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3179 gen_movl_TN_reg(rd
, cpu_dst
);
3181 #ifndef TARGET_SPARC64
3182 case 0x25: /* sll */
3183 if (IS_IMM
) { /* immediate */
3184 rs2
= GET_FIELDs(insn
, 20, 31);
3185 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3186 } else { /* register */
3187 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3188 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3190 gen_movl_TN_reg(rd
, cpu_dst
);
3192 case 0x26: /* srl */
3193 if (IS_IMM
) { /* immediate */
3194 rs2
= GET_FIELDs(insn
, 20, 31);
3195 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3196 } else { /* register */
3197 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3198 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3200 gen_movl_TN_reg(rd
, cpu_dst
);
3202 case 0x27: /* sra */
3203 if (IS_IMM
) { /* immediate */
3204 rs2
= GET_FIELDs(insn
, 20, 31);
3205 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, rs2
& 0x1f);
3206 } else { /* register */
3207 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3208 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3210 gen_movl_TN_reg(rd
, cpu_dst
);
3217 tcg_gen_xor_tl(cpu_y
, cpu_src1
, cpu_src2
);
3219 #ifndef TARGET_SPARC64
3220 case 0x01 ... 0x0f: /* undefined in the
3224 case 0x10 ... 0x1f: /* implementation-dependent
3230 case 0x2: /* V9 wrccr */
3231 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3232 tcg_gen_helper_0_1(helper_wrccr
, cpu_dst
);
3234 case 0x3: /* V9 wrasi */
3235 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3236 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3238 case 0x6: /* V9 wrfprs */
3239 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3240 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3241 save_state(dc
, cpu_cond
);
3246 case 0xf: /* V9 sir, nop if user */
3247 #if !defined(CONFIG_USER_ONLY)
3252 case 0x13: /* Graphics Status */
3253 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3255 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3257 case 0x17: /* Tick compare */
3258 #if !defined(CONFIG_USER_ONLY)
3259 if (!supervisor(dc
))
3265 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3267 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3268 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3269 offsetof(CPUState
, tick
));
3270 tcg_gen_helper_0_2(helper_tick_set_limit
,
3271 r_tickptr
, cpu_tick_cmpr
);
3272 tcg_temp_free(r_tickptr
);
3275 case 0x18: /* System tick */
3276 #if !defined(CONFIG_USER_ONLY)
3277 if (!supervisor(dc
))
3283 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3285 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3286 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3287 offsetof(CPUState
, stick
));
3288 tcg_gen_helper_0_2(helper_tick_set_count
,
3289 r_tickptr
, cpu_dst
);
3290 tcg_temp_free(r_tickptr
);
3293 case 0x19: /* System tick compare */
3294 #if !defined(CONFIG_USER_ONLY)
3295 if (!supervisor(dc
))
3301 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3303 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3304 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3305 offsetof(CPUState
, stick
));
3306 tcg_gen_helper_0_2(helper_tick_set_limit
,
3307 r_tickptr
, cpu_stick_cmpr
);
3308 tcg_temp_free(r_tickptr
);
3312 case 0x10: /* Performance Control */
3313 case 0x11: /* Performance Instrumentation
3315 case 0x12: /* Dispatch Control */
3316 case 0x14: /* Softint set */
3317 case 0x15: /* Softint clear */
3318 case 0x16: /* Softint write */
3325 #if !defined(CONFIG_USER_ONLY)
3326 case 0x31: /* wrpsr, V9 saved, restored */
3328 if (!supervisor(dc
))
3330 #ifdef TARGET_SPARC64
3333 tcg_gen_helper_0_0(helper_saved
);
3336 tcg_gen_helper_0_0(helper_restored
);
3338 case 2: /* UA2005 allclean */
3339 case 3: /* UA2005 otherw */
3340 case 4: /* UA2005 normalw */
3341 case 5: /* UA2005 invalw */
3347 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3348 tcg_gen_helper_0_1(helper_wrpsr
, cpu_dst
);
3349 save_state(dc
, cpu_cond
);
3356 case 0x32: /* wrwim, V9 wrpr */
3358 if (!supervisor(dc
))
3360 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3361 #ifdef TARGET_SPARC64
3367 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3368 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3369 offsetof(CPUState
, tsptr
));
3370 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3371 offsetof(trap_state
, tpc
));
3372 tcg_temp_free(r_tsptr
);
3379 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3380 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3381 offsetof(CPUState
, tsptr
));
3382 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3383 offsetof(trap_state
, tnpc
));
3384 tcg_temp_free(r_tsptr
);
3391 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3392 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3393 offsetof(CPUState
, tsptr
));
3394 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3395 offsetof(trap_state
,
3397 tcg_temp_free(r_tsptr
);
3404 r_tsptr
= tcg_temp_new(TCG_TYPE_PTR
);
3405 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3406 offsetof(CPUState
, tsptr
));
3407 tcg_gen_st_i32(cpu_tmp0
, r_tsptr
,
3408 offsetof(trap_state
, tt
));
3409 tcg_temp_free(r_tsptr
);
3416 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3417 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3418 offsetof(CPUState
, tick
));
3419 tcg_gen_helper_0_2(helper_tick_set_count
,
3420 r_tickptr
, cpu_tmp0
);
3421 tcg_temp_free(r_tickptr
);
3425 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3428 save_state(dc
, cpu_cond
);
3429 tcg_gen_helper_0_1(helper_wrpstate
, cpu_tmp0
);
3435 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3436 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3437 offsetof(CPUSPARCState
, tl
));
3440 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3441 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3442 offsetof(CPUSPARCState
,
3446 tcg_gen_helper_0_1(helper_wrcwp
, cpu_tmp0
);
3449 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3450 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3451 offsetof(CPUSPARCState
,
3454 case 11: // canrestore
3455 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3456 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3457 offsetof(CPUSPARCState
,
3460 case 12: // cleanwin
3461 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3462 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3463 offsetof(CPUSPARCState
,
3466 case 13: // otherwin
3467 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3468 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3469 offsetof(CPUSPARCState
,
3473 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3474 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3475 offsetof(CPUSPARCState
,
3478 case 16: // UA2005 gl
3479 CHECK_IU_FEATURE(dc
, GL
);
3480 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3481 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3482 offsetof(CPUSPARCState
, gl
));
3484 case 26: // UA2005 strand status
3485 CHECK_IU_FEATURE(dc
, HYPV
);
3486 if (!hypervisor(dc
))
3488 tcg_gen_trunc_tl_i32(cpu_ssr
, cpu_tmp0
);
3494 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3495 if (dc
->def
->nwindows
!= 32)
3496 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3497 (1 << dc
->def
->nwindows
) - 1);
3498 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3502 case 0x33: /* wrtbr, UA2005 wrhpr */
3504 #ifndef TARGET_SPARC64
3505 if (!supervisor(dc
))
3507 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3509 CHECK_IU_FEATURE(dc
, HYPV
);
3510 if (!hypervisor(dc
))
3512 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3515 // XXX gen_op_wrhpstate();
3516 save_state(dc
, cpu_cond
);
3522 // XXX gen_op_wrhtstate();
3525 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3528 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3530 case 31: // hstick_cmpr
3534 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3535 r_tickptr
= tcg_temp_new(TCG_TYPE_PTR
);
3536 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3537 offsetof(CPUState
, hstick
));
3538 tcg_gen_helper_0_2(helper_tick_set_limit
,
3539 r_tickptr
, cpu_hstick_cmpr
);
3540 tcg_temp_free(r_tickptr
);
3543 case 6: // hver readonly
3551 #ifdef TARGET_SPARC64
3552 case 0x2c: /* V9 movcc */
3554 int cc
= GET_FIELD_SP(insn
, 11, 12);
3555 int cond
= GET_FIELD_SP(insn
, 14, 17);
3559 r_cond
= tcg_temp_new(TCG_TYPE_TL
);
3560 if (insn
& (1 << 18)) {
3562 gen_cond(r_cond
, 0, cond
);
3564 gen_cond(r_cond
, 1, cond
);
3568 gen_fcond(r_cond
, cc
, cond
);
3571 l1
= gen_new_label();
3573 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3574 if (IS_IMM
) { /* immediate */
3577 rs2
= GET_FIELD_SPs(insn
, 0, 10);
3578 r_const
= tcg_const_tl((int)rs2
);
3579 gen_movl_TN_reg(rd
, r_const
);
3580 tcg_temp_free(r_const
);
3582 rs2
= GET_FIELD_SP(insn
, 0, 4);
3583 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3584 gen_movl_TN_reg(rd
, cpu_tmp0
);
3587 tcg_temp_free(r_cond
);
3590 case 0x2d: /* V9 sdivx */
3591 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3592 gen_movl_TN_reg(rd
, cpu_dst
);
3594 case 0x2e: /* V9 popc */
3596 cpu_src2
= get_src2(insn
, cpu_src2
);
3597 tcg_gen_helper_1_1(helper_popc
, cpu_dst
,
3599 gen_movl_TN_reg(rd
, cpu_dst
);
3601 case 0x2f: /* V9 movr */
3603 int cond
= GET_FIELD_SP(insn
, 10, 12);
3606 cpu_src1
= get_src1(insn
, cpu_src1
);
3608 l1
= gen_new_label();
3610 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3612 if (IS_IMM
) { /* immediate */
3615 rs2
= GET_FIELD_SPs(insn
, 0, 9);
3616 r_const
= tcg_const_tl((int)rs2
);
3617 gen_movl_TN_reg(rd
, r_const
);
3618 tcg_temp_free(r_const
);
3620 rs2
= GET_FIELD_SP(insn
, 0, 4);
3621 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3622 gen_movl_TN_reg(rd
, cpu_tmp0
);
3632 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3633 #ifdef TARGET_SPARC64
3634 int opf
= GET_FIELD_SP(insn
, 5, 13);
3635 rs1
= GET_FIELD(insn
, 13, 17);
3636 rs2
= GET_FIELD(insn
, 27, 31);
3637 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3641 case 0x000: /* VIS I edge8cc */
3642 case 0x001: /* VIS II edge8n */
3643 case 0x002: /* VIS I edge8lcc */
3644 case 0x003: /* VIS II edge8ln */
3645 case 0x004: /* VIS I edge16cc */
3646 case 0x005: /* VIS II edge16n */
3647 case 0x006: /* VIS I edge16lcc */
3648 case 0x007: /* VIS II edge16ln */
3649 case 0x008: /* VIS I edge32cc */
3650 case 0x009: /* VIS II edge32n */
3651 case 0x00a: /* VIS I edge32lcc */
3652 case 0x00b: /* VIS II edge32ln */
3655 case 0x010: /* VIS I array8 */
3656 CHECK_FPU_FEATURE(dc
, VIS1
);
3657 cpu_src1
= get_src1(insn
, cpu_src1
);
3658 gen_movl_reg_TN(rs2
, cpu_src2
);
3659 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3661 gen_movl_TN_reg(rd
, cpu_dst
);
3663 case 0x012: /* VIS I array16 */
3664 CHECK_FPU_FEATURE(dc
, VIS1
);
3665 cpu_src1
= get_src1(insn
, cpu_src1
);
3666 gen_movl_reg_TN(rs2
, cpu_src2
);
3667 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3669 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3670 gen_movl_TN_reg(rd
, cpu_dst
);
3672 case 0x014: /* VIS I array32 */
3673 CHECK_FPU_FEATURE(dc
, VIS1
);
3674 cpu_src1
= get_src1(insn
, cpu_src1
);
3675 gen_movl_reg_TN(rs2
, cpu_src2
);
3676 tcg_gen_helper_1_2(helper_array8
, cpu_dst
, cpu_src1
,
3678 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3679 gen_movl_TN_reg(rd
, cpu_dst
);
3681 case 0x018: /* VIS I alignaddr */
3682 CHECK_FPU_FEATURE(dc
, VIS1
);
3683 cpu_src1
= get_src1(insn
, cpu_src1
);
3684 gen_movl_reg_TN(rs2
, cpu_src2
);
3685 tcg_gen_helper_1_2(helper_alignaddr
, cpu_dst
, cpu_src1
,
3687 gen_movl_TN_reg(rd
, cpu_dst
);
3689 case 0x019: /* VIS II bmask */
3690 case 0x01a: /* VIS I alignaddrl */
3693 case 0x020: /* VIS I fcmple16 */
3694 CHECK_FPU_FEATURE(dc
, VIS1
);
3695 gen_op_load_fpr_DT0(DFPREG(rs1
));
3696 gen_op_load_fpr_DT1(DFPREG(rs2
));
3697 tcg_gen_helper_0_0(helper_fcmple16
);
3698 gen_op_store_DT0_fpr(DFPREG(rd
));
3700 case 0x022: /* VIS I fcmpne16 */
3701 CHECK_FPU_FEATURE(dc
, VIS1
);
3702 gen_op_load_fpr_DT0(DFPREG(rs1
));
3703 gen_op_load_fpr_DT1(DFPREG(rs2
));
3704 tcg_gen_helper_0_0(helper_fcmpne16
);
3705 gen_op_store_DT0_fpr(DFPREG(rd
));
3707 case 0x024: /* VIS I fcmple32 */
3708 CHECK_FPU_FEATURE(dc
, VIS1
);
3709 gen_op_load_fpr_DT0(DFPREG(rs1
));
3710 gen_op_load_fpr_DT1(DFPREG(rs2
));
3711 tcg_gen_helper_0_0(helper_fcmple32
);
3712 gen_op_store_DT0_fpr(DFPREG(rd
));
3714 case 0x026: /* VIS I fcmpne32 */
3715 CHECK_FPU_FEATURE(dc
, VIS1
);
3716 gen_op_load_fpr_DT0(DFPREG(rs1
));
3717 gen_op_load_fpr_DT1(DFPREG(rs2
));
3718 tcg_gen_helper_0_0(helper_fcmpne32
);
3719 gen_op_store_DT0_fpr(DFPREG(rd
));
3721 case 0x028: /* VIS I fcmpgt16 */
3722 CHECK_FPU_FEATURE(dc
, VIS1
);
3723 gen_op_load_fpr_DT0(DFPREG(rs1
));
3724 gen_op_load_fpr_DT1(DFPREG(rs2
));
3725 tcg_gen_helper_0_0(helper_fcmpgt16
);
3726 gen_op_store_DT0_fpr(DFPREG(rd
));
3728 case 0x02a: /* VIS I fcmpeq16 */
3729 CHECK_FPU_FEATURE(dc
, VIS1
);
3730 gen_op_load_fpr_DT0(DFPREG(rs1
));
3731 gen_op_load_fpr_DT1(DFPREG(rs2
));
3732 tcg_gen_helper_0_0(helper_fcmpeq16
);
3733 gen_op_store_DT0_fpr(DFPREG(rd
));
3735 case 0x02c: /* VIS I fcmpgt32 */
3736 CHECK_FPU_FEATURE(dc
, VIS1
);
3737 gen_op_load_fpr_DT0(DFPREG(rs1
));
3738 gen_op_load_fpr_DT1(DFPREG(rs2
));
3739 tcg_gen_helper_0_0(helper_fcmpgt32
);
3740 gen_op_store_DT0_fpr(DFPREG(rd
));
3742 case 0x02e: /* VIS I fcmpeq32 */
3743 CHECK_FPU_FEATURE(dc
, VIS1
);
3744 gen_op_load_fpr_DT0(DFPREG(rs1
));
3745 gen_op_load_fpr_DT1(DFPREG(rs2
));
3746 tcg_gen_helper_0_0(helper_fcmpeq32
);
3747 gen_op_store_DT0_fpr(DFPREG(rd
));
3749 case 0x031: /* VIS I fmul8x16 */
3750 CHECK_FPU_FEATURE(dc
, VIS1
);
3751 gen_op_load_fpr_DT0(DFPREG(rs1
));
3752 gen_op_load_fpr_DT1(DFPREG(rs2
));
3753 tcg_gen_helper_0_0(helper_fmul8x16
);
3754 gen_op_store_DT0_fpr(DFPREG(rd
));
3756 case 0x033: /* VIS I fmul8x16au */
3757 CHECK_FPU_FEATURE(dc
, VIS1
);
3758 gen_op_load_fpr_DT0(DFPREG(rs1
));
3759 gen_op_load_fpr_DT1(DFPREG(rs2
));
3760 tcg_gen_helper_0_0(helper_fmul8x16au
);
3761 gen_op_store_DT0_fpr(DFPREG(rd
));
3763 case 0x035: /* VIS I fmul8x16al */
3764 CHECK_FPU_FEATURE(dc
, VIS1
);
3765 gen_op_load_fpr_DT0(DFPREG(rs1
));
3766 gen_op_load_fpr_DT1(DFPREG(rs2
));
3767 tcg_gen_helper_0_0(helper_fmul8x16al
);
3768 gen_op_store_DT0_fpr(DFPREG(rd
));
3770 case 0x036: /* VIS I fmul8sux16 */
3771 CHECK_FPU_FEATURE(dc
, VIS1
);
3772 gen_op_load_fpr_DT0(DFPREG(rs1
));
3773 gen_op_load_fpr_DT1(DFPREG(rs2
));
3774 tcg_gen_helper_0_0(helper_fmul8sux16
);
3775 gen_op_store_DT0_fpr(DFPREG(rd
));
3777 case 0x037: /* VIS I fmul8ulx16 */
3778 CHECK_FPU_FEATURE(dc
, VIS1
);
3779 gen_op_load_fpr_DT0(DFPREG(rs1
));
3780 gen_op_load_fpr_DT1(DFPREG(rs2
));
3781 tcg_gen_helper_0_0(helper_fmul8ulx16
);
3782 gen_op_store_DT0_fpr(DFPREG(rd
));
3784 case 0x038: /* VIS I fmuld8sux16 */
3785 CHECK_FPU_FEATURE(dc
, VIS1
);
3786 gen_op_load_fpr_DT0(DFPREG(rs1
));
3787 gen_op_load_fpr_DT1(DFPREG(rs2
));
3788 tcg_gen_helper_0_0(helper_fmuld8sux16
);
3789 gen_op_store_DT0_fpr(DFPREG(rd
));
3791 case 0x039: /* VIS I fmuld8ulx16 */
3792 CHECK_FPU_FEATURE(dc
, VIS1
);
3793 gen_op_load_fpr_DT0(DFPREG(rs1
));
3794 gen_op_load_fpr_DT1(DFPREG(rs2
));
3795 tcg_gen_helper_0_0(helper_fmuld8ulx16
);
3796 gen_op_store_DT0_fpr(DFPREG(rd
));
3798 case 0x03a: /* VIS I fpack32 */
3799 case 0x03b: /* VIS I fpack16 */
3800 case 0x03d: /* VIS I fpackfix */
3801 case 0x03e: /* VIS I pdist */
3804 case 0x048: /* VIS I faligndata */
3805 CHECK_FPU_FEATURE(dc
, VIS1
);
3806 gen_op_load_fpr_DT0(DFPREG(rs1
));
3807 gen_op_load_fpr_DT1(DFPREG(rs2
));
3808 tcg_gen_helper_0_0(helper_faligndata
);
3809 gen_op_store_DT0_fpr(DFPREG(rd
));
3811 case 0x04b: /* VIS I fpmerge */
3812 CHECK_FPU_FEATURE(dc
, VIS1
);
3813 gen_op_load_fpr_DT0(DFPREG(rs1
));
3814 gen_op_load_fpr_DT1(DFPREG(rs2
));
3815 tcg_gen_helper_0_0(helper_fpmerge
);
3816 gen_op_store_DT0_fpr(DFPREG(rd
));
3818 case 0x04c: /* VIS II bshuffle */
3821 case 0x04d: /* VIS I fexpand */
3822 CHECK_FPU_FEATURE(dc
, VIS1
);
3823 gen_op_load_fpr_DT0(DFPREG(rs1
));
3824 gen_op_load_fpr_DT1(DFPREG(rs2
));
3825 tcg_gen_helper_0_0(helper_fexpand
);
3826 gen_op_store_DT0_fpr(DFPREG(rd
));
3828 case 0x050: /* VIS I fpadd16 */
3829 CHECK_FPU_FEATURE(dc
, VIS1
);
3830 gen_op_load_fpr_DT0(DFPREG(rs1
));
3831 gen_op_load_fpr_DT1(DFPREG(rs2
));
3832 tcg_gen_helper_0_0(helper_fpadd16
);
3833 gen_op_store_DT0_fpr(DFPREG(rd
));
3835 case 0x051: /* VIS I fpadd16s */
3836 CHECK_FPU_FEATURE(dc
, VIS1
);
3837 tcg_gen_helper_1_2(helper_fpadd16s
, cpu_fpr
[rd
],
3838 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3840 case 0x052: /* VIS I fpadd32 */
3841 CHECK_FPU_FEATURE(dc
, VIS1
);
3842 gen_op_load_fpr_DT0(DFPREG(rs1
));
3843 gen_op_load_fpr_DT1(DFPREG(rs2
));
3844 tcg_gen_helper_0_0(helper_fpadd32
);
3845 gen_op_store_DT0_fpr(DFPREG(rd
));
3847 case 0x053: /* VIS I fpadd32s */
3848 CHECK_FPU_FEATURE(dc
, VIS1
);
3849 tcg_gen_helper_1_2(helper_fpadd32s
, cpu_fpr
[rd
],
3850 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3852 case 0x054: /* VIS I fpsub16 */
3853 CHECK_FPU_FEATURE(dc
, VIS1
);
3854 gen_op_load_fpr_DT0(DFPREG(rs1
));
3855 gen_op_load_fpr_DT1(DFPREG(rs2
));
3856 tcg_gen_helper_0_0(helper_fpsub16
);
3857 gen_op_store_DT0_fpr(DFPREG(rd
));
3859 case 0x055: /* VIS I fpsub16s */
3860 CHECK_FPU_FEATURE(dc
, VIS1
);
3861 tcg_gen_helper_1_2(helper_fpsub16s
, cpu_fpr
[rd
],
3862 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3864 case 0x056: /* VIS I fpsub32 */
3865 CHECK_FPU_FEATURE(dc
, VIS1
);
3866 gen_op_load_fpr_DT0(DFPREG(rs1
));
3867 gen_op_load_fpr_DT1(DFPREG(rs2
));
3868 tcg_gen_helper_0_0(helper_fpsub32
);
3869 gen_op_store_DT0_fpr(DFPREG(rd
));
3871 case 0x057: /* VIS I fpsub32s */
3872 CHECK_FPU_FEATURE(dc
, VIS1
);
3873 tcg_gen_helper_1_2(helper_fpsub32s
, cpu_fpr
[rd
],
3874 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3876 case 0x060: /* VIS I fzero */
3877 CHECK_FPU_FEATURE(dc
, VIS1
);
3878 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
3879 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
3881 case 0x061: /* VIS I fzeros */
3882 CHECK_FPU_FEATURE(dc
, VIS1
);
3883 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
3885 case 0x062: /* VIS I fnor */
3886 CHECK_FPU_FEATURE(dc
, VIS1
);
3887 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3888 cpu_fpr
[DFPREG(rs2
)]);
3889 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
, -1);
3890 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3891 cpu_fpr
[DFPREG(rs2
) + 1]);
3892 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
, -1);
3894 case 0x063: /* VIS I fnors */
3895 CHECK_FPU_FEATURE(dc
, VIS1
);
3896 tcg_gen_or_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3897 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_tmp32
, -1);
3899 case 0x064: /* VIS I fandnot2 */
3900 CHECK_FPU_FEATURE(dc
, VIS1
);
3901 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)], -1);
3902 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3903 cpu_fpr
[DFPREG(rs2
)]);
3904 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1], -1);
3905 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3906 cpu_fpr
[DFPREG(rs2
) + 1]);
3908 case 0x065: /* VIS I fandnot2s */
3909 CHECK_FPU_FEATURE(dc
, VIS1
);
3910 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs1
], -1);
3911 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs2
]);
3913 case 0x066: /* VIS I fnot2 */
3914 CHECK_FPU_FEATURE(dc
, VIS1
);
3915 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
3917 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1],
3918 cpu_fpr
[DFPREG(rs2
) + 1], -1);
3920 case 0x067: /* VIS I fnot2s */
3921 CHECK_FPU_FEATURE(dc
, VIS1
);
3922 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], -1);
3924 case 0x068: /* VIS I fandnot1 */
3925 CHECK_FPU_FEATURE(dc
, VIS1
);
3926 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3927 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3928 cpu_fpr
[DFPREG(rs1
)]);
3929 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3930 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3931 cpu_fpr
[DFPREG(rs1
) + 1]);
3933 case 0x069: /* VIS I fandnot1s */
3934 CHECK_FPU_FEATURE(dc
, VIS1
);
3935 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3936 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
3938 case 0x06a: /* VIS I fnot1 */
3939 CHECK_FPU_FEATURE(dc
, VIS1
);
3940 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3942 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1],
3943 cpu_fpr
[DFPREG(rs1
) + 1], -1);
3945 case 0x06b: /* VIS I fnot1s */
3946 CHECK_FPU_FEATURE(dc
, VIS1
);
3947 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], -1);
3949 case 0x06c: /* VIS I fxor */
3950 CHECK_FPU_FEATURE(dc
, VIS1
);
3951 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3952 cpu_fpr
[DFPREG(rs2
)]);
3953 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
3954 cpu_fpr
[DFPREG(rs1
) + 1],
3955 cpu_fpr
[DFPREG(rs2
) + 1]);
3957 case 0x06d: /* VIS I fxors */
3958 CHECK_FPU_FEATURE(dc
, VIS1
);
3959 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3961 case 0x06e: /* VIS I fnand */
3962 CHECK_FPU_FEATURE(dc
, VIS1
);
3963 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
3964 cpu_fpr
[DFPREG(rs2
)]);
3965 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
, -1);
3966 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
3967 cpu_fpr
[DFPREG(rs2
) + 1]);
3968 tcg_gen_xori_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
, -1);
3970 case 0x06f: /* VIS I fnands */
3971 CHECK_FPU_FEATURE(dc
, VIS1
);
3972 tcg_gen_and_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3973 tcg_gen_xori_i32(cpu_fpr
[rd
], cpu_tmp32
, -1);
3975 case 0x070: /* VIS I fand */
3976 CHECK_FPU_FEATURE(dc
, VIS1
);
3977 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
3978 cpu_fpr
[DFPREG(rs2
)]);
3979 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
3980 cpu_fpr
[DFPREG(rs1
) + 1],
3981 cpu_fpr
[DFPREG(rs2
) + 1]);
3983 case 0x071: /* VIS I fands */
3984 CHECK_FPU_FEATURE(dc
, VIS1
);
3985 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3987 case 0x072: /* VIS I fxnor */
3988 CHECK_FPU_FEATURE(dc
, VIS1
);
3989 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
3990 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
3991 cpu_fpr
[DFPREG(rs1
)]);
3992 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
3993 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
3994 cpu_fpr
[DFPREG(rs1
) + 1]);
3996 case 0x073: /* VIS I fxnors */
3997 CHECK_FPU_FEATURE(dc
, VIS1
);
3998 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
3999 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4001 case 0x074: /* VIS I fsrc1 */
4002 CHECK_FPU_FEATURE(dc
, VIS1
);
4003 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4004 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4005 cpu_fpr
[DFPREG(rs1
) + 1]);
4007 case 0x075: /* VIS I fsrc1s */
4008 CHECK_FPU_FEATURE(dc
, VIS1
);
4009 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4011 case 0x076: /* VIS I fornot2 */
4012 CHECK_FPU_FEATURE(dc
, VIS1
);
4013 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)], -1);
4014 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4015 cpu_fpr
[DFPREG(rs2
)]);
4016 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1], -1);
4017 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4018 cpu_fpr
[DFPREG(rs2
) + 1]);
4020 case 0x077: /* VIS I fornot2s */
4021 CHECK_FPU_FEATURE(dc
, VIS1
);
4022 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs1
], -1);
4023 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs2
]);
4025 case 0x078: /* VIS I fsrc2 */
4026 CHECK_FPU_FEATURE(dc
, VIS1
);
4027 gen_op_load_fpr_DT0(DFPREG(rs2
));
4028 gen_op_store_DT0_fpr(DFPREG(rd
));
4030 case 0x079: /* VIS I fsrc2s */
4031 CHECK_FPU_FEATURE(dc
, VIS1
);
4032 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4034 case 0x07a: /* VIS I fornot1 */
4035 CHECK_FPU_FEATURE(dc
, VIS1
);
4036 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4037 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4038 cpu_fpr
[DFPREG(rs1
)]);
4039 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4040 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4041 cpu_fpr
[DFPREG(rs1
) + 1]);
4043 case 0x07b: /* VIS I fornot1s */
4044 CHECK_FPU_FEATURE(dc
, VIS1
);
4045 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4046 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4048 case 0x07c: /* VIS I for */
4049 CHECK_FPU_FEATURE(dc
, VIS1
);
4050 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4051 cpu_fpr
[DFPREG(rs2
)]);
4052 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4053 cpu_fpr
[DFPREG(rs1
) + 1],
4054 cpu_fpr
[DFPREG(rs2
) + 1]);
4056 case 0x07d: /* VIS I fors */
4057 CHECK_FPU_FEATURE(dc
, VIS1
);
4058 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4060 case 0x07e: /* VIS I fone */
4061 CHECK_FPU_FEATURE(dc
, VIS1
);
4062 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4063 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4065 case 0x07f: /* VIS I fones */
4066 CHECK_FPU_FEATURE(dc
, VIS1
);
4067 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4069 case 0x080: /* VIS I shutdown */
4070 case 0x081: /* VIS II siam */
4079 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4080 #ifdef TARGET_SPARC64
4085 #ifdef TARGET_SPARC64
4086 } else if (xop
== 0x39) { /* V9 return */
4089 save_state(dc
, cpu_cond
);
4090 cpu_src1
= get_src1(insn
, cpu_src1
);
4091 if (IS_IMM
) { /* immediate */
4092 rs2
= GET_FIELDs(insn
, 19, 31);
4093 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4094 } else { /* register */
4095 rs2
= GET_FIELD(insn
, 27, 31);
4097 gen_movl_reg_TN(rs2
, cpu_src2
);
4098 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4100 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4102 tcg_gen_helper_0_0(helper_restore
);
4103 gen_mov_pc_npc(dc
, cpu_cond
);
4104 r_const
= tcg_const_i32(3);
4105 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
, r_const
);
4106 tcg_temp_free(r_const
);
4107 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4108 dc
->npc
= DYNAMIC_PC
;
4112 cpu_src1
= get_src1(insn
, cpu_src1
);
4113 if (IS_IMM
) { /* immediate */
4114 rs2
= GET_FIELDs(insn
, 19, 31);
4115 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, (int)rs2
);
4116 } else { /* register */
4117 rs2
= GET_FIELD(insn
, 27, 31);
4119 gen_movl_reg_TN(rs2
, cpu_src2
);
4120 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4122 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4125 case 0x38: /* jmpl */
4129 r_const
= tcg_const_tl(dc
->pc
);
4130 gen_movl_TN_reg(rd
, r_const
);
4131 tcg_temp_free(r_const
);
4132 gen_mov_pc_npc(dc
, cpu_cond
);
4133 r_const
= tcg_const_i32(3);
4134 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
,
4136 tcg_temp_free(r_const
);
4137 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4138 dc
->npc
= DYNAMIC_PC
;
4141 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4142 case 0x39: /* rett, V9 return */
4146 if (!supervisor(dc
))
4148 gen_mov_pc_npc(dc
, cpu_cond
);
4149 r_const
= tcg_const_i32(3);
4150 tcg_gen_helper_0_2(helper_check_align
, cpu_dst
,
4152 tcg_temp_free(r_const
);
4153 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4154 dc
->npc
= DYNAMIC_PC
;
4155 tcg_gen_helper_0_0(helper_rett
);
4159 case 0x3b: /* flush */
4160 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4162 tcg_gen_helper_0_1(helper_flush
, cpu_dst
);
4164 case 0x3c: /* save */
4165 save_state(dc
, cpu_cond
);
4166 tcg_gen_helper_0_0(helper_save
);
4167 gen_movl_TN_reg(rd
, cpu_dst
);
4169 case 0x3d: /* restore */
4170 save_state(dc
, cpu_cond
);
4171 tcg_gen_helper_0_0(helper_restore
);
4172 gen_movl_TN_reg(rd
, cpu_dst
);
4174 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4175 case 0x3e: /* V9 done/retry */
4179 if (!supervisor(dc
))
4181 dc
->npc
= DYNAMIC_PC
;
4182 dc
->pc
= DYNAMIC_PC
;
4183 tcg_gen_helper_0_0(helper_done
);
4186 if (!supervisor(dc
))
4188 dc
->npc
= DYNAMIC_PC
;
4189 dc
->pc
= DYNAMIC_PC
;
4190 tcg_gen_helper_0_0(helper_retry
);
4205 case 3: /* load/store instructions */
4207 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4209 cpu_src1
= get_src1(insn
, cpu_src1
);
4210 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4211 rs2
= GET_FIELD(insn
, 27, 31);
4212 gen_movl_reg_TN(rs2
, cpu_src2
);
4213 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4214 } else if (IS_IMM
) { /* immediate */
4215 rs2
= GET_FIELDs(insn
, 19, 31);
4216 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, (int)rs2
);
4217 } else { /* register */
4218 rs2
= GET_FIELD(insn
, 27, 31);
4220 gen_movl_reg_TN(rs2
, cpu_src2
);
4221 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4223 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4225 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4226 (xop
> 0x17 && xop
<= 0x1d ) ||
4227 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4229 case 0x0: /* load unsigned word */
4230 gen_address_mask(dc
, cpu_addr
);
4231 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4233 case 0x1: /* load unsigned byte */
4234 gen_address_mask(dc
, cpu_addr
);
4235 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4237 case 0x2: /* load unsigned halfword */
4238 gen_address_mask(dc
, cpu_addr
);
4239 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4241 case 0x3: /* load double word */
4247 save_state(dc
, cpu_cond
);
4248 r_const
= tcg_const_i32(7);
4249 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4250 r_const
); // XXX remove
4251 tcg_temp_free(r_const
);
4252 gen_address_mask(dc
, cpu_addr
);
4253 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4254 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4255 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4256 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4257 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4258 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4259 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4262 case 0x9: /* load signed byte */
4263 gen_address_mask(dc
, cpu_addr
);
4264 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4266 case 0xa: /* load signed halfword */
4267 gen_address_mask(dc
, cpu_addr
);
4268 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4270 case 0xd: /* ldstub -- XXX: should be atomically */
4274 gen_address_mask(dc
, cpu_addr
);
4275 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4276 r_const
= tcg_const_tl(0xff);
4277 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4278 tcg_temp_free(r_const
);
4281 case 0x0f: /* swap register with memory. Also
4283 CHECK_IU_FEATURE(dc
, SWAP
);
4284 gen_movl_reg_TN(rd
, cpu_val
);
4285 gen_address_mask(dc
, cpu_addr
);
4286 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4287 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4288 tcg_gen_extu_i32_tl(cpu_val
, cpu_tmp32
);
4290 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4291 case 0x10: /* load word alternate */
4292 #ifndef TARGET_SPARC64
4295 if (!supervisor(dc
))
4298 save_state(dc
, cpu_cond
);
4299 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4301 case 0x11: /* load unsigned byte alternate */
4302 #ifndef TARGET_SPARC64
4305 if (!supervisor(dc
))
4308 save_state(dc
, cpu_cond
);
4309 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4311 case 0x12: /* load unsigned halfword alternate */
4312 #ifndef TARGET_SPARC64
4315 if (!supervisor(dc
))
4318 save_state(dc
, cpu_cond
);
4319 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4321 case 0x13: /* load double word alternate */
4322 #ifndef TARGET_SPARC64
4325 if (!supervisor(dc
))
4330 save_state(dc
, cpu_cond
);
4331 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4333 case 0x19: /* load signed byte alternate */
4334 #ifndef TARGET_SPARC64
4337 if (!supervisor(dc
))
4340 save_state(dc
, cpu_cond
);
4341 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4343 case 0x1a: /* load signed halfword alternate */
4344 #ifndef TARGET_SPARC64
4347 if (!supervisor(dc
))
4350 save_state(dc
, cpu_cond
);
4351 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4353 case 0x1d: /* ldstuba -- XXX: should be atomically */
4354 #ifndef TARGET_SPARC64
4357 if (!supervisor(dc
))
4360 save_state(dc
, cpu_cond
);
4361 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4363 case 0x1f: /* swap reg with alt. memory. Also
4365 CHECK_IU_FEATURE(dc
, SWAP
);
4366 #ifndef TARGET_SPARC64
4369 if (!supervisor(dc
))
4372 save_state(dc
, cpu_cond
);
4373 gen_movl_reg_TN(rd
, cpu_val
);
4374 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4377 #ifndef TARGET_SPARC64
4378 case 0x30: /* ldc */
4379 case 0x31: /* ldcsr */
4380 case 0x33: /* lddc */
4384 #ifdef TARGET_SPARC64
4385 case 0x08: /* V9 ldsw */
4386 gen_address_mask(dc
, cpu_addr
);
4387 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4389 case 0x0b: /* V9 ldx */
4390 gen_address_mask(dc
, cpu_addr
);
4391 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4393 case 0x18: /* V9 ldswa */
4394 save_state(dc
, cpu_cond
);
4395 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4397 case 0x1b: /* V9 ldxa */
4398 save_state(dc
, cpu_cond
);
4399 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4401 case 0x2d: /* V9 prefetch, no effect */
4403 case 0x30: /* V9 ldfa */
4404 save_state(dc
, cpu_cond
);
4405 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4407 case 0x33: /* V9 lddfa */
4408 save_state(dc
, cpu_cond
);
4409 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4411 case 0x3d: /* V9 prefetcha, no effect */
4413 case 0x32: /* V9 ldqfa */
4414 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4415 save_state(dc
, cpu_cond
);
4416 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4422 gen_movl_TN_reg(rd
, cpu_val
);
4423 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4426 } else if (xop
>= 0x20 && xop
< 0x24) {
4427 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4429 save_state(dc
, cpu_cond
);
4431 case 0x20: /* load fpreg */
4432 gen_address_mask(dc
, cpu_addr
);
4433 tcg_gen_qemu_ld32u(cpu_fpr
[rd
], cpu_addr
, dc
->mem_idx
);
4435 case 0x21: /* ldfsr, V9 ldxfsr */
4436 #ifdef TARGET_SPARC64
4437 gen_address_mask(dc
, cpu_addr
);
4439 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4440 tcg_gen_helper_0_1(helper_ldxfsr
, cpu_tmp64
);
4444 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4445 tcg_gen_helper_0_1(helper_ldfsr
, cpu_tmp32
);
4449 case 0x22: /* load quad fpreg */
4453 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4454 r_const
= tcg_const_i32(dc
->mem_idx
);
4455 tcg_gen_helper_0_2(helper_ldqf
, cpu_addr
, r_const
);
4456 tcg_temp_free(r_const
);
4457 gen_op_store_QT0_fpr(QFPREG(rd
));
4460 case 0x23: /* load double fpreg */
4464 r_const
= tcg_const_i32(dc
->mem_idx
);
4465 tcg_gen_helper_0_2(helper_lddf
, cpu_addr
, r_const
);
4466 tcg_temp_free(r_const
);
4467 gen_op_store_DT0_fpr(DFPREG(rd
));
4473 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4474 xop
== 0xe || xop
== 0x1e) {
4475 gen_movl_reg_TN(rd
, cpu_val
);
4477 case 0x4: /* store word */
4478 gen_address_mask(dc
, cpu_addr
);
4479 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4481 case 0x5: /* store byte */
4482 gen_address_mask(dc
, cpu_addr
);
4483 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4485 case 0x6: /* store halfword */
4486 gen_address_mask(dc
, cpu_addr
);
4487 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4489 case 0x7: /* store double word */
4493 TCGv r_low
, r_const
;
4495 save_state(dc
, cpu_cond
);
4496 gen_address_mask(dc
, cpu_addr
);
4497 r_const
= tcg_const_i32(7);
4498 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4499 r_const
); // XXX remove
4500 tcg_temp_free(r_const
);
4501 r_low
= tcg_temp_new(TCG_TYPE_TL
);
4502 gen_movl_reg_TN(rd
+ 1, r_low
);
4503 tcg_gen_helper_1_2(helper_pack64
, cpu_tmp64
, cpu_val
,
4505 tcg_temp_free(r_low
);
4506 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4509 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4510 case 0x14: /* store word alternate */
4511 #ifndef TARGET_SPARC64
4514 if (!supervisor(dc
))
4517 save_state(dc
, cpu_cond
);
4518 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4520 case 0x15: /* store byte alternate */
4521 #ifndef TARGET_SPARC64
4524 if (!supervisor(dc
))
4527 save_state(dc
, cpu_cond
);
4528 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4530 case 0x16: /* store halfword alternate */
4531 #ifndef TARGET_SPARC64
4534 if (!supervisor(dc
))
4537 save_state(dc
, cpu_cond
);
4538 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4540 case 0x17: /* store double word alternate */
4541 #ifndef TARGET_SPARC64
4544 if (!supervisor(dc
))
4550 save_state(dc
, cpu_cond
);
4551 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4555 #ifdef TARGET_SPARC64
4556 case 0x0e: /* V9 stx */
4557 gen_address_mask(dc
, cpu_addr
);
4558 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4560 case 0x1e: /* V9 stxa */
4561 save_state(dc
, cpu_cond
);
4562 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4568 } else if (xop
> 0x23 && xop
< 0x28) {
4569 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4571 save_state(dc
, cpu_cond
);
4573 case 0x24: /* store fpreg */
4574 gen_address_mask(dc
, cpu_addr
);
4575 tcg_gen_qemu_st32(cpu_fpr
[rd
], cpu_addr
, dc
->mem_idx
);
4577 case 0x25: /* stfsr, V9 stxfsr */
4578 #ifdef TARGET_SPARC64
4579 gen_address_mask(dc
, cpu_addr
);
4580 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4582 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4584 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp64
);
4585 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4588 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4589 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4593 #ifdef TARGET_SPARC64
4594 /* V9 stqf, store quad fpreg */
4598 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4599 gen_op_load_fpr_QT0(QFPREG(rd
));
4600 r_const
= tcg_const_i32(dc
->mem_idx
);
4601 tcg_gen_helper_0_2(helper_stqf
, cpu_addr
, r_const
);
4602 tcg_temp_free(r_const
);
4605 #else /* !TARGET_SPARC64 */
4606 /* stdfq, store floating point queue */
4607 #if defined(CONFIG_USER_ONLY)
4610 if (!supervisor(dc
))
4612 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4617 case 0x27: /* store double fpreg */
4621 gen_op_load_fpr_DT0(DFPREG(rd
));
4622 r_const
= tcg_const_i32(dc
->mem_idx
);
4623 tcg_gen_helper_0_2(helper_stdf
, cpu_addr
, r_const
);
4624 tcg_temp_free(r_const
);
4630 } else if (xop
> 0x33 && xop
< 0x3f) {
4631 save_state(dc
, cpu_cond
);
4633 #ifdef TARGET_SPARC64
4634 case 0x34: /* V9 stfa */
4635 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4637 case 0x36: /* V9 stqfa */
4641 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4642 r_const
= tcg_const_i32(7);
4643 tcg_gen_helper_0_2(helper_check_align
, cpu_addr
,
4645 tcg_temp_free(r_const
);
4646 gen_op_load_fpr_QT0(QFPREG(rd
));
4647 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4650 case 0x37: /* V9 stdfa */
4651 gen_op_load_fpr_DT0(DFPREG(rd
));
4652 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4654 case 0x3c: /* V9 casa */
4655 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4656 gen_movl_TN_reg(rd
, cpu_val
);
4658 case 0x3e: /* V9 casxa */
4659 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4660 gen_movl_TN_reg(rd
, cpu_val
);
4663 case 0x34: /* stc */
4664 case 0x35: /* stcsr */
4665 case 0x36: /* stdcq */
4666 case 0x37: /* stdc */
4678 /* default case for non jump instructions */
4679 if (dc
->npc
== DYNAMIC_PC
) {
4680 dc
->pc
= DYNAMIC_PC
;
4682 } else if (dc
->npc
== JUMP_PC
) {
4683 /* we can do a static jump */
4684 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4688 dc
->npc
= dc
->npc
+ 4;
4696 save_state(dc
, cpu_cond
);
4697 r_const
= tcg_const_i32(TT_ILL_INSN
);
4698 tcg_gen_helper_0_1(raise_exception
, r_const
);
4699 tcg_temp_free(r_const
);
4707 save_state(dc
, cpu_cond
);
4708 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4709 tcg_gen_helper_0_1(raise_exception
, r_const
);
4710 tcg_temp_free(r_const
);
4714 #if !defined(CONFIG_USER_ONLY)
4719 save_state(dc
, cpu_cond
);
4720 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4721 tcg_gen_helper_0_1(raise_exception
, r_const
);
4722 tcg_temp_free(r_const
);
4728 save_state(dc
, cpu_cond
);
4729 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4732 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4734 save_state(dc
, cpu_cond
);
4735 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4739 #ifndef TARGET_SPARC64
4744 save_state(dc
, cpu_cond
);
4745 r_const
= tcg_const_i32(TT_NCP_INSN
);
4746 tcg_gen_helper_0_1(raise_exception
, r_const
);
4747 tcg_temp_free(r_const
);
4754 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4755 int spc
, CPUSPARCState
*env
)
4757 target_ulong pc_start
, last_pc
;
4758 uint16_t *gen_opc_end
;
4759 DisasContext dc1
, *dc
= &dc1
;
4764 memset(dc
, 0, sizeof(DisasContext
));
4769 dc
->npc
= (target_ulong
) tb
->cs_base
;
4770 dc
->mem_idx
= cpu_mmu_index(env
);
4772 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4773 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4775 dc
->fpu_enabled
= 0;
4776 #ifdef TARGET_SPARC64
4777 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4779 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4781 cpu_tmp0
= tcg_temp_new(TCG_TYPE_TL
);
4782 cpu_tmp32
= tcg_temp_new(TCG_TYPE_I32
);
4783 cpu_tmp64
= tcg_temp_new(TCG_TYPE_I64
);
4785 cpu_dst
= tcg_temp_local_new(TCG_TYPE_TL
);
4788 cpu_val
= tcg_temp_local_new(TCG_TYPE_TL
);
4789 cpu_addr
= tcg_temp_local_new(TCG_TYPE_TL
);
4792 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4794 max_insns
= CF_COUNT_MASK
;
4797 if (env
->nb_breakpoints
> 0) {
4798 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
4799 if (env
->breakpoints
[j
] == dc
->pc
) {
4800 if (dc
->pc
!= pc_start
)
4801 save_state(dc
, cpu_cond
);
4802 tcg_gen_helper_0_0(helper_debug
);
4811 fprintf(logfile
, "Search PC...\n");
4812 j
= gen_opc_ptr
- gen_opc_buf
;
4816 gen_opc_instr_start
[lj
++] = 0;
4817 gen_opc_pc
[lj
] = dc
->pc
;
4818 gen_opc_npc
[lj
] = dc
->npc
;
4819 gen_opc_instr_start
[lj
] = 1;
4820 gen_opc_icount
[lj
] = num_insns
;
4823 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4826 disas_sparc_insn(dc
);
4831 /* if the next PC is different, we abort now */
4832 if (dc
->pc
!= (last_pc
+ 4))
4834 /* if we reach a page boundary, we stop generation so that the
4835 PC of a TT_TFAULT exception is always in the right page */
4836 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4838 /* if single step mode, we generate only one instruction and
4839 generate an exception */
4840 if (env
->singlestep_enabled
) {
4841 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4845 } while ((gen_opc_ptr
< gen_opc_end
) &&
4846 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
4847 num_insns
< max_insns
);
4850 tcg_temp_free(cpu_addr
);
4851 tcg_temp_free(cpu_val
);
4852 tcg_temp_free(cpu_dst
);
4853 tcg_temp_free(cpu_tmp64
);
4854 tcg_temp_free(cpu_tmp32
);
4855 tcg_temp_free(cpu_tmp0
);
4856 if (tb
->cflags
& CF_LAST_IO
)
4859 if (dc
->pc
!= DYNAMIC_PC
&&
4860 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
4861 /* static PC and NPC: we can use direct chaining */
4862 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
4864 if (dc
->pc
!= DYNAMIC_PC
)
4865 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4866 save_npc(dc
, cpu_cond
);
4870 gen_icount_end(tb
, num_insns
);
4871 *gen_opc_ptr
= INDEX_op_end
;
4873 j
= gen_opc_ptr
- gen_opc_buf
;
4876 gen_opc_instr_start
[lj
++] = 0;
4882 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
4883 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
4885 tb
->size
= last_pc
+ 4 - pc_start
;
4886 tb
->icount
= num_insns
;
4889 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4890 fprintf(logfile
, "--------------\n");
4891 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
4892 target_disas(logfile
, pc_start
, last_pc
+ 4 - pc_start
, 0);
4893 fprintf(logfile
, "\n");
4898 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
4900 gen_intermediate_code_internal(tb
, 0, env
);
4903 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
4905 gen_intermediate_code_internal(tb
, 1, env
);
4908 void gen_intermediate_code_init(CPUSPARCState
*env
)
4912 static const char * const gregnames
[8] = {
4913 NULL
, // g0 not used
4922 static const char * const fregnames
[64] = {
4923 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4924 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4925 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4926 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4927 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4928 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4929 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4930 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4933 /* init various static tables */
4937 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
4938 cpu_regwptr
= tcg_global_mem_new(TCG_TYPE_PTR
, TCG_AREG0
,
4939 offsetof(CPUState
, regwptr
),
4941 #ifdef TARGET_SPARC64
4942 cpu_xcc
= tcg_global_mem_new(TCG_TYPE_I32
,
4943 TCG_AREG0
, offsetof(CPUState
, xcc
),
4945 cpu_asi
= tcg_global_mem_new(TCG_TYPE_I32
,
4946 TCG_AREG0
, offsetof(CPUState
, asi
),
4948 cpu_fprs
= tcg_global_mem_new(TCG_TYPE_I32
,
4949 TCG_AREG0
, offsetof(CPUState
, fprs
),
4951 cpu_gsr
= tcg_global_mem_new(TCG_TYPE_TL
,
4952 TCG_AREG0
, offsetof(CPUState
, gsr
),
4954 cpu_tick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4956 offsetof(CPUState
, tick_cmpr
),
4958 cpu_stick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4960 offsetof(CPUState
, stick_cmpr
),
4962 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_TYPE_TL
,
4964 offsetof(CPUState
, hstick_cmpr
),
4966 cpu_hintp
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4967 offsetof(CPUState
, hintp
),
4969 cpu_htba
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4970 offsetof(CPUState
, htba
),
4972 cpu_hver
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4973 offsetof(CPUState
, hver
),
4975 cpu_ssr
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4976 offsetof(CPUState
, ssr
), "ssr");
4977 cpu_ver
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4978 offsetof(CPUState
, version
), "ver");
4980 cpu_wim
= tcg_global_mem_new(TCG_TYPE_I32
,
4981 TCG_AREG0
, offsetof(CPUState
, wim
),
4984 cpu_cond
= tcg_global_mem_new(TCG_TYPE_TL
,
4985 TCG_AREG0
, offsetof(CPUState
, cond
),
4987 cpu_cc_src
= tcg_global_mem_new(TCG_TYPE_TL
,
4988 TCG_AREG0
, offsetof(CPUState
, cc_src
),
4990 cpu_cc_src2
= tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
4991 offsetof(CPUState
, cc_src2
),
4993 cpu_cc_dst
= tcg_global_mem_new(TCG_TYPE_TL
,
4994 TCG_AREG0
, offsetof(CPUState
, cc_dst
),
4996 cpu_psr
= tcg_global_mem_new(TCG_TYPE_I32
,
4997 TCG_AREG0
, offsetof(CPUState
, psr
),
4999 cpu_fsr
= tcg_global_mem_new(TCG_TYPE_TL
,
5000 TCG_AREG0
, offsetof(CPUState
, fsr
),
5002 cpu_pc
= tcg_global_mem_new(TCG_TYPE_TL
,
5003 TCG_AREG0
, offsetof(CPUState
, pc
),
5005 cpu_npc
= tcg_global_mem_new(TCG_TYPE_TL
,
5006 TCG_AREG0
, offsetof(CPUState
, npc
),
5008 cpu_y
= tcg_global_mem_new(TCG_TYPE_TL
,
5009 TCG_AREG0
, offsetof(CPUState
, y
), "y");
5010 #ifndef CONFIG_USER_ONLY
5011 cpu_tbr
= tcg_global_mem_new(TCG_TYPE_TL
,
5012 TCG_AREG0
, offsetof(CPUState
, tbr
),
5015 for (i
= 1; i
< 8; i
++)
5016 cpu_gregs
[i
] = tcg_global_mem_new(TCG_TYPE_TL
, TCG_AREG0
,
5017 offsetof(CPUState
, gregs
[i
]),
5019 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5020 cpu_fpr
[i
] = tcg_global_mem_new(TCG_TYPE_I32
, TCG_AREG0
,
5021 offsetof(CPUState
, fpr
[i
]),
5024 /* register helpers */
5027 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5032 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5033 unsigned long searched_pc
, int pc_pos
, void *puc
)
5036 env
->pc
= gen_opc_pc
[pc_pos
];
5037 npc
= gen_opc_npc
[pc_pos
];
5039 /* dynamic NPC: already stored */
5040 } else if (npc
== 2) {
5041 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
5042 /* jump PC: use T2 and the jump targets of the translation */
5044 env
->npc
= gen_opc_jump_pc
[0];
5046 env
->npc
= gen_opc_jump_pc
[1];