4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
41 /* global register indexes */
42 static TCGv_ptr cpu_env
, cpu_regwptr
;
43 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
44 static TCGv_i32 cpu_cc_op
;
45 static TCGv_i32 cpu_psr
;
46 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
48 #ifndef CONFIG_USER_ONLY
51 static TCGv cpu_cond
, cpu_dst
, cpu_addr
, cpu_val
;
53 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
55 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
56 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
57 static TCGv_i32 cpu_softint
;
61 /* local register indexes (only used inside old micro ops) */
63 static TCGv_i32 cpu_tmp32
;
64 static TCGv_i64 cpu_tmp64
;
65 /* Floating point registers */
66 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
68 static target_ulong gen_opc_npc
[OPC_BUF_SIZE
];
69 static target_ulong gen_opc_jump_pc
[2];
71 #include "gen-icount.h"
73 typedef struct DisasContext
{
74 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
80 int address_mask_32bit
;
82 uint32_t cc_op
; /* current CC operation */
83 struct TranslationBlock
*tb
;
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
109 static int sign_extend(int x
, int len
)
112 return (x
<< len
) >> len
;
115 #define IS_IMM (insn & (1<<13))
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src
)
120 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
121 offsetof(CPU_DoubleU
, l
.upper
));
122 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
123 offsetof(CPU_DoubleU
, l
.lower
));
126 static void gen_op_load_fpr_DT1(unsigned int src
)
128 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
129 offsetof(CPU_DoubleU
, l
.upper
));
130 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
131 offsetof(CPU_DoubleU
, l
.lower
));
134 static void gen_op_store_DT0_fpr(unsigned int dst
)
136 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
137 offsetof(CPU_DoubleU
, l
.upper
));
138 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
139 offsetof(CPU_DoubleU
, l
.lower
));
142 static void gen_op_load_fpr_QT0(unsigned int src
)
144 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.upmost
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.upper
));
148 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
149 offsetof(CPU_QuadU
, l
.lower
));
150 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
151 offsetof(CPU_QuadU
, l
.lowest
));
154 static void gen_op_load_fpr_QT1(unsigned int src
)
156 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.upmost
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.upper
));
160 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
161 offsetof(CPU_QuadU
, l
.lower
));
162 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
163 offsetof(CPU_QuadU
, l
.lowest
));
166 static void gen_op_store_QT0_fpr(unsigned int dst
)
168 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.upmost
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.upper
));
172 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
173 offsetof(CPU_QuadU
, l
.lower
));
174 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
175 offsetof(CPU_QuadU
, l
.lowest
));
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
185 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
192 #ifdef TARGET_SPARC64
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
196 #define AM_CHECK(dc) (1)
200 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
202 #ifdef TARGET_SPARC64
204 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
208 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
211 tcg_gen_movi_tl(tn
, 0);
213 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
215 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
219 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
224 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
226 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
230 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
231 target_ulong pc
, target_ulong npc
)
233 TranslationBlock
*tb
;
236 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
237 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
239 /* jump to same page: we can use a direct jump */
240 tcg_gen_goto_tb(tb_num
);
241 tcg_gen_movi_tl(cpu_pc
, pc
);
242 tcg_gen_movi_tl(cpu_npc
, npc
);
243 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
245 /* jump to another page: currently not optimized */
246 tcg_gen_movi_tl(cpu_pc
, pc
);
247 tcg_gen_movi_tl(cpu_npc
, npc
);
253 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
255 tcg_gen_extu_i32_tl(reg
, src
);
256 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
257 tcg_gen_andi_tl(reg
, reg
, 0x1);
260 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
262 tcg_gen_extu_i32_tl(reg
, src
);
263 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
264 tcg_gen_andi_tl(reg
, reg
, 0x1);
267 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
269 tcg_gen_extu_i32_tl(reg
, src
);
270 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
271 tcg_gen_andi_tl(reg
, reg
, 0x1);
274 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
276 tcg_gen_extu_i32_tl(reg
, src
);
277 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
278 tcg_gen_andi_tl(reg
, reg
, 0x1);
281 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
287 l1
= gen_new_label();
289 r_temp
= tcg_temp_new();
290 tcg_gen_xor_tl(r_temp
, src1
, src2
);
291 tcg_gen_not_tl(r_temp
, r_temp
);
292 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
293 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
294 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
295 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
296 r_const
= tcg_const_i32(TT_TOVF
);
297 gen_helper_raise_exception(cpu_env
, r_const
);
298 tcg_temp_free_i32(r_const
);
300 tcg_temp_free(r_temp
);
303 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
308 l1
= gen_new_label();
309 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
310 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
311 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
312 r_const
= tcg_const_i32(TT_TOVF
);
313 gen_helper_raise_exception(cpu_env
, r_const
);
314 tcg_temp_free_i32(r_const
);
318 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
320 tcg_gen_mov_tl(cpu_cc_src
, src1
);
321 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
322 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
323 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
326 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
328 tcg_gen_mov_tl(cpu_cc_src
, src1
);
329 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
330 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
331 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
334 static TCGv_i32
gen_add32_carry32(void)
336 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
338 /* Carry is computed from a previous add: (dst < src) */
339 #if TARGET_LONG_BITS == 64
340 cc_src1_32
= tcg_temp_new_i32();
341 cc_src2_32
= tcg_temp_new_i32();
342 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_dst
);
343 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src
);
345 cc_src1_32
= cpu_cc_dst
;
346 cc_src2_32
= cpu_cc_src
;
349 carry_32
= tcg_temp_new_i32();
350 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
352 #if TARGET_LONG_BITS == 64
353 tcg_temp_free_i32(cc_src1_32
);
354 tcg_temp_free_i32(cc_src2_32
);
360 static TCGv_i32
gen_sub32_carry32(void)
362 TCGv_i32 carry_32
, cc_src1_32
, cc_src2_32
;
364 /* Carry is computed from a previous borrow: (src1 < src2) */
365 #if TARGET_LONG_BITS == 64
366 cc_src1_32
= tcg_temp_new_i32();
367 cc_src2_32
= tcg_temp_new_i32();
368 tcg_gen_trunc_i64_i32(cc_src1_32
, cpu_cc_src
);
369 tcg_gen_trunc_i64_i32(cc_src2_32
, cpu_cc_src2
);
371 cc_src1_32
= cpu_cc_src
;
372 cc_src2_32
= cpu_cc_src2
;
375 carry_32
= tcg_temp_new_i32();
376 tcg_gen_setcond_i32(TCG_COND_LTU
, carry_32
, cc_src1_32
, cc_src2_32
);
378 #if TARGET_LONG_BITS == 64
379 tcg_temp_free_i32(cc_src1_32
);
380 tcg_temp_free_i32(cc_src2_32
);
386 static void gen_op_addx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
387 TCGv src2
, int update_cc
)
395 /* Carry is known to be zero. Fall back to plain ADD. */
397 gen_op_add_cc(dst
, src1
, src2
);
399 tcg_gen_add_tl(dst
, src1
, src2
);
406 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
408 /* For 32-bit hosts, we can re-use the host's hardware carry
409 generation by using an ADD2 opcode. We discard the low
410 part of the output. Ideally we'd combine this operation
411 with the add that generated the carry in the first place. */
412 TCGv dst_low
= tcg_temp_new();
413 tcg_gen_op6_i32(INDEX_op_add2_i32
, dst_low
, dst
,
414 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
415 tcg_temp_free(dst_low
);
419 carry_32
= gen_add32_carry32();
425 carry_32
= gen_sub32_carry32();
429 /* We need external help to produce the carry. */
430 carry_32
= tcg_temp_new_i32();
431 gen_helper_compute_C_icc(carry_32
);
435 #if TARGET_LONG_BITS == 64
436 carry
= tcg_temp_new();
437 tcg_gen_extu_i32_i64(carry
, carry_32
);
442 tcg_gen_add_tl(dst
, src1
, src2
);
443 tcg_gen_add_tl(dst
, dst
, carry
);
445 tcg_temp_free_i32(carry_32
);
446 #if TARGET_LONG_BITS == 64
447 tcg_temp_free(carry
);
450 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
454 tcg_gen_mov_tl(cpu_cc_src
, src1
);
455 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
456 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
457 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADDX
);
458 dc
->cc_op
= CC_OP_ADDX
;
462 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
464 tcg_gen_mov_tl(cpu_cc_src
, src1
);
465 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
466 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
467 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
470 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
472 tcg_gen_mov_tl(cpu_cc_src
, src1
);
473 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
474 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
475 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
476 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
477 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
480 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
486 l1
= gen_new_label();
488 r_temp
= tcg_temp_new();
489 tcg_gen_xor_tl(r_temp
, src1
, src2
);
490 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
491 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
492 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
493 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
494 r_const
= tcg_const_i32(TT_TOVF
);
495 gen_helper_raise_exception(cpu_env
, r_const
);
496 tcg_temp_free_i32(r_const
);
498 tcg_temp_free(r_temp
);
501 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
, DisasContext
*dc
)
503 tcg_gen_mov_tl(cpu_cc_src
, src1
);
504 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
506 tcg_gen_mov_tl(cpu_cc_dst
, src1
);
507 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
508 dc
->cc_op
= CC_OP_LOGIC
;
510 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
511 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
512 dc
->cc_op
= CC_OP_SUB
;
514 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
517 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
519 tcg_gen_mov_tl(cpu_cc_src
, src1
);
520 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
521 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
522 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
525 static void gen_op_subx_int(DisasContext
*dc
, TCGv dst
, TCGv src1
,
526 TCGv src2
, int update_cc
)
534 /* Carry is known to be zero. Fall back to plain SUB. */
536 gen_op_sub_cc(dst
, src1
, src2
);
538 tcg_gen_sub_tl(dst
, src1
, src2
);
545 carry_32
= gen_add32_carry32();
551 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
553 /* For 32-bit hosts, we can re-use the host's hardware carry
554 generation by using a SUB2 opcode. We discard the low
555 part of the output. Ideally we'd combine this operation
556 with the add that generated the carry in the first place. */
557 TCGv dst_low
= tcg_temp_new();
558 tcg_gen_op6_i32(INDEX_op_sub2_i32
, dst_low
, dst
,
559 cpu_cc_src
, src1
, cpu_cc_src2
, src2
);
560 tcg_temp_free(dst_low
);
564 carry_32
= gen_sub32_carry32();
568 /* We need external help to produce the carry. */
569 carry_32
= tcg_temp_new_i32();
570 gen_helper_compute_C_icc(carry_32
);
574 #if TARGET_LONG_BITS == 64
575 carry
= tcg_temp_new();
576 tcg_gen_extu_i32_i64(carry
, carry_32
);
581 tcg_gen_sub_tl(dst
, src1
, src2
);
582 tcg_gen_sub_tl(dst
, dst
, carry
);
584 tcg_temp_free_i32(carry_32
);
585 #if TARGET_LONG_BITS == 64
586 tcg_temp_free(carry
);
589 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
593 tcg_gen_mov_tl(cpu_cc_src
, src1
);
594 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
595 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
596 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUBX
);
597 dc
->cc_op
= CC_OP_SUBX
;
601 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
603 tcg_gen_mov_tl(cpu_cc_src
, src1
);
604 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
605 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
606 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
609 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
611 tcg_gen_mov_tl(cpu_cc_src
, src1
);
612 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
613 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
614 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
615 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
616 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
619 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
624 l1
= gen_new_label();
625 r_temp
= tcg_temp_new();
631 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
632 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
633 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
634 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
635 tcg_gen_movi_tl(cpu_cc_src2
, 0);
639 // env->y = (b2 << 31) | (env->y >> 1);
640 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
641 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
642 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
643 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
644 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
645 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
648 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
649 gen_mov_reg_V(r_temp
, cpu_psr
);
650 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
651 tcg_temp_free(r_temp
);
653 // T0 = (b1 << 31) | (T0 >> 1);
655 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
656 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
657 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
659 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
661 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
664 static inline void gen_op_multiply(TCGv dst
, TCGv src1
, TCGv src2
, int sign_ext
)
666 TCGv_i32 r_src1
, r_src2
;
667 TCGv_i64 r_temp
, r_temp2
;
669 r_src1
= tcg_temp_new_i32();
670 r_src2
= tcg_temp_new_i32();
672 tcg_gen_trunc_tl_i32(r_src1
, src1
);
673 tcg_gen_trunc_tl_i32(r_src2
, src2
);
675 r_temp
= tcg_temp_new_i64();
676 r_temp2
= tcg_temp_new_i64();
679 tcg_gen_ext_i32_i64(r_temp
, r_src2
);
680 tcg_gen_ext_i32_i64(r_temp2
, r_src1
);
682 tcg_gen_extu_i32_i64(r_temp
, r_src2
);
683 tcg_gen_extu_i32_i64(r_temp2
, r_src1
);
686 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
688 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
689 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
690 tcg_temp_free_i64(r_temp
);
691 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
693 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
695 tcg_temp_free_i64(r_temp2
);
697 tcg_temp_free_i32(r_src1
);
698 tcg_temp_free_i32(r_src2
);
701 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
703 /* zero-extend truncated operands before multiplication */
704 gen_op_multiply(dst
, src1
, src2
, 0);
707 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
709 /* sign-extend truncated operands before multiplication */
710 gen_op_multiply(dst
, src1
, src2
, 1);
713 #ifdef TARGET_SPARC64
714 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
719 l1
= gen_new_label();
720 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
721 r_const
= tcg_const_i32(TT_DIV_ZERO
);
722 gen_helper_raise_exception(cpu_env
, r_const
);
723 tcg_temp_free_i32(r_const
);
727 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
730 TCGv r_temp1
, r_temp2
;
732 l1
= gen_new_label();
733 l2
= gen_new_label();
734 r_temp1
= tcg_temp_local_new();
735 r_temp2
= tcg_temp_local_new();
736 tcg_gen_mov_tl(r_temp1
, src1
);
737 tcg_gen_mov_tl(r_temp2
, src2
);
738 gen_trap_ifdivzero_tl(r_temp2
);
739 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp1
, INT64_MIN
, l1
);
740 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp2
, -1, l1
);
741 tcg_gen_movi_i64(dst
, INT64_MIN
);
744 tcg_gen_div_i64(dst
, r_temp1
, r_temp2
);
746 tcg_temp_free(r_temp1
);
747 tcg_temp_free(r_temp2
);
752 static inline void gen_op_eval_ba(TCGv dst
)
754 tcg_gen_movi_tl(dst
, 1);
758 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
760 gen_mov_reg_Z(dst
, src
);
764 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
766 gen_mov_reg_N(cpu_tmp0
, src
);
767 gen_mov_reg_V(dst
, src
);
768 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
769 gen_mov_reg_Z(cpu_tmp0
, src
);
770 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
774 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
776 gen_mov_reg_V(cpu_tmp0
, src
);
777 gen_mov_reg_N(dst
, src
);
778 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
782 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
784 gen_mov_reg_Z(cpu_tmp0
, src
);
785 gen_mov_reg_C(dst
, src
);
786 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
790 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
792 gen_mov_reg_C(dst
, src
);
796 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
798 gen_mov_reg_V(dst
, src
);
802 static inline void gen_op_eval_bn(TCGv dst
)
804 tcg_gen_movi_tl(dst
, 0);
808 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
810 gen_mov_reg_N(dst
, src
);
814 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
816 gen_mov_reg_Z(dst
, src
);
817 tcg_gen_xori_tl(dst
, dst
, 0x1);
821 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
823 gen_mov_reg_N(cpu_tmp0
, src
);
824 gen_mov_reg_V(dst
, src
);
825 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
826 gen_mov_reg_Z(cpu_tmp0
, src
);
827 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
828 tcg_gen_xori_tl(dst
, dst
, 0x1);
832 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
834 gen_mov_reg_V(cpu_tmp0
, src
);
835 gen_mov_reg_N(dst
, src
);
836 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
837 tcg_gen_xori_tl(dst
, dst
, 0x1);
841 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
843 gen_mov_reg_Z(cpu_tmp0
, src
);
844 gen_mov_reg_C(dst
, src
);
845 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
846 tcg_gen_xori_tl(dst
, dst
, 0x1);
850 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
852 gen_mov_reg_C(dst
, src
);
853 tcg_gen_xori_tl(dst
, dst
, 0x1);
857 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
859 gen_mov_reg_N(dst
, src
);
860 tcg_gen_xori_tl(dst
, dst
, 0x1);
864 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
866 gen_mov_reg_V(dst
, src
);
867 tcg_gen_xori_tl(dst
, dst
, 0x1);
871 FPSR bit field FCC1 | FCC0:
877 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
878 unsigned int fcc_offset
)
880 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
881 tcg_gen_andi_tl(reg
, reg
, 0x1);
884 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
885 unsigned int fcc_offset
)
887 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
888 tcg_gen_andi_tl(reg
, reg
, 0x1);
892 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
893 unsigned int fcc_offset
)
895 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
896 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
897 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
900 // 1 or 2: FCC0 ^ FCC1
901 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
902 unsigned int fcc_offset
)
904 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
905 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
906 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
910 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
911 unsigned int fcc_offset
)
913 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
917 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
918 unsigned int fcc_offset
)
920 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
921 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
922 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
923 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
927 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
928 unsigned int fcc_offset
)
930 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
934 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
935 unsigned int fcc_offset
)
937 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
938 tcg_gen_xori_tl(dst
, dst
, 0x1);
939 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
940 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
944 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
945 unsigned int fcc_offset
)
947 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
948 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
949 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
953 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
954 unsigned int fcc_offset
)
956 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
957 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
958 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
959 tcg_gen_xori_tl(dst
, dst
, 0x1);
962 // 0 or 3: !(FCC0 ^ FCC1)
963 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
964 unsigned int fcc_offset
)
966 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
967 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
968 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
969 tcg_gen_xori_tl(dst
, dst
, 0x1);
973 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
974 unsigned int fcc_offset
)
976 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
977 tcg_gen_xori_tl(dst
, dst
, 0x1);
980 // !1: !(FCC0 & !FCC1)
981 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
982 unsigned int fcc_offset
)
984 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
985 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
986 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
987 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
988 tcg_gen_xori_tl(dst
, dst
, 0x1);
992 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
993 unsigned int fcc_offset
)
995 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
996 tcg_gen_xori_tl(dst
, dst
, 0x1);
999 // !2: !(!FCC0 & FCC1)
1000 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1001 unsigned int fcc_offset
)
1003 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1004 tcg_gen_xori_tl(dst
, dst
, 0x1);
1005 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1006 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1007 tcg_gen_xori_tl(dst
, dst
, 0x1);
1010 // !3: !(FCC0 & FCC1)
1011 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1012 unsigned int fcc_offset
)
1014 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1015 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1016 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1017 tcg_gen_xori_tl(dst
, dst
, 0x1);
1020 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1021 target_ulong pc2
, TCGv r_cond
)
1025 l1
= gen_new_label();
1027 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1029 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1032 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1035 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1036 target_ulong pc2
, TCGv r_cond
)
1040 l1
= gen_new_label();
1042 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1044 gen_goto_tb(dc
, 0, pc2
, pc1
);
1047 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1050 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1055 l1
= gen_new_label();
1056 l2
= gen_new_label();
1058 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1060 tcg_gen_movi_tl(cpu_npc
, npc1
);
1064 tcg_gen_movi_tl(cpu_npc
, npc2
);
1068 /* call this function before using the condition register as it may
1069 have been set for a jump */
1070 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1072 if (dc
->npc
== JUMP_PC
) {
1073 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1074 dc
->npc
= DYNAMIC_PC
;
1078 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1080 if (dc
->npc
== JUMP_PC
) {
1081 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1082 dc
->npc
= DYNAMIC_PC
;
1083 } else if (dc
->npc
!= DYNAMIC_PC
) {
1084 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1088 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1090 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1091 /* flush pending conditional evaluations before exposing cpu state */
1092 if (dc
->cc_op
!= CC_OP_FLAGS
) {
1093 dc
->cc_op
= CC_OP_FLAGS
;
1094 gen_helper_compute_psr();
1099 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1101 if (dc
->npc
== JUMP_PC
) {
1102 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1103 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1104 dc
->pc
= DYNAMIC_PC
;
1105 } else if (dc
->npc
== DYNAMIC_PC
) {
1106 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1107 dc
->pc
= DYNAMIC_PC
;
1113 static inline void gen_op_next_insn(void)
1115 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1116 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1119 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
,
1124 #ifdef TARGET_SPARC64
1132 switch (dc
->cc_op
) {
1136 gen_helper_compute_psr();
1137 dc
->cc_op
= CC_OP_FLAGS
;
1142 gen_op_eval_bn(r_dst
);
1145 gen_op_eval_be(r_dst
, r_src
);
1148 gen_op_eval_ble(r_dst
, r_src
);
1151 gen_op_eval_bl(r_dst
, r_src
);
1154 gen_op_eval_bleu(r_dst
, r_src
);
1157 gen_op_eval_bcs(r_dst
, r_src
);
1160 gen_op_eval_bneg(r_dst
, r_src
);
1163 gen_op_eval_bvs(r_dst
, r_src
);
1166 gen_op_eval_ba(r_dst
);
1169 gen_op_eval_bne(r_dst
, r_src
);
1172 gen_op_eval_bg(r_dst
, r_src
);
1175 gen_op_eval_bge(r_dst
, r_src
);
1178 gen_op_eval_bgu(r_dst
, r_src
);
1181 gen_op_eval_bcc(r_dst
, r_src
);
1184 gen_op_eval_bpos(r_dst
, r_src
);
1187 gen_op_eval_bvc(r_dst
, r_src
);
1192 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1194 unsigned int offset
;
1214 gen_op_eval_bn(r_dst
);
1217 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1220 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1223 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1226 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1229 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1232 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1235 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1238 gen_op_eval_ba(r_dst
);
1241 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1244 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1247 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1250 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1253 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1256 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1259 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1264 #ifdef TARGET_SPARC64
1266 static const int gen_tcg_cond_reg
[8] = {
1277 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1281 l1
= gen_new_label();
1282 tcg_gen_movi_tl(r_dst
, 0);
1283 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1284 tcg_gen_movi_tl(r_dst
, 1);
1289 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1292 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1293 target_ulong target
= dc
->pc
+ offset
;
1296 /* unconditional not taken */
1298 dc
->pc
= dc
->npc
+ 4;
1299 dc
->npc
= dc
->pc
+ 4;
1302 dc
->npc
= dc
->pc
+ 4;
1304 } else if (cond
== 0x8) {
1305 /* unconditional taken */
1308 dc
->npc
= dc
->pc
+ 4;
1312 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1315 flush_cond(dc
, r_cond
);
1316 gen_cond(r_cond
, cc
, cond
, dc
);
1318 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1322 dc
->jump_pc
[0] = target
;
1323 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1324 dc
->jump_pc
[1] = DYNAMIC_PC
;
1325 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1327 dc
->jump_pc
[1] = dc
->npc
+ 4;
1334 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1337 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1338 target_ulong target
= dc
->pc
+ offset
;
1341 /* unconditional not taken */
1343 dc
->pc
= dc
->npc
+ 4;
1344 dc
->npc
= dc
->pc
+ 4;
1347 dc
->npc
= dc
->pc
+ 4;
1349 } else if (cond
== 0x8) {
1350 /* unconditional taken */
1353 dc
->npc
= dc
->pc
+ 4;
1357 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1360 flush_cond(dc
, r_cond
);
1361 gen_fcond(r_cond
, cc
, cond
);
1363 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1367 dc
->jump_pc
[0] = target
;
1368 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1369 dc
->jump_pc
[1] = DYNAMIC_PC
;
1370 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1372 dc
->jump_pc
[1] = dc
->npc
+ 4;
1379 #ifdef TARGET_SPARC64
1380 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1381 TCGv r_cond
, TCGv r_reg
)
1383 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1384 target_ulong target
= dc
->pc
+ offset
;
1386 flush_cond(dc
, r_cond
);
1387 gen_cond_reg(r_cond
, cond
, r_reg
);
1389 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1393 dc
->jump_pc
[0] = target
;
1394 if (unlikely(dc
->npc
== DYNAMIC_PC
)) {
1395 dc
->jump_pc
[1] = DYNAMIC_PC
;
1396 tcg_gen_addi_tl(cpu_pc
, cpu_npc
, 4);
1398 dc
->jump_pc
[1] = dc
->npc
+ 4;
1404 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1408 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1411 gen_helper_fcmps_fcc1(cpu_env
, r_rs1
, r_rs2
);
1414 gen_helper_fcmps_fcc2(cpu_env
, r_rs1
, r_rs2
);
1417 gen_helper_fcmps_fcc3(cpu_env
, r_rs1
, r_rs2
);
1422 static inline void gen_op_fcmpd(int fccno
)
1426 gen_helper_fcmpd(cpu_env
);
1429 gen_helper_fcmpd_fcc1(cpu_env
);
1432 gen_helper_fcmpd_fcc2(cpu_env
);
1435 gen_helper_fcmpd_fcc3(cpu_env
);
1440 static inline void gen_op_fcmpq(int fccno
)
1444 gen_helper_fcmpq(cpu_env
);
1447 gen_helper_fcmpq_fcc1(cpu_env
);
1450 gen_helper_fcmpq_fcc2(cpu_env
);
1453 gen_helper_fcmpq_fcc3(cpu_env
);
1458 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1462 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1465 gen_helper_fcmpes_fcc1(cpu_env
, r_rs1
, r_rs2
);
1468 gen_helper_fcmpes_fcc2(cpu_env
, r_rs1
, r_rs2
);
1471 gen_helper_fcmpes_fcc3(cpu_env
, r_rs1
, r_rs2
);
1476 static inline void gen_op_fcmped(int fccno
)
1480 gen_helper_fcmped(cpu_env
);
1483 gen_helper_fcmped_fcc1(cpu_env
);
1486 gen_helper_fcmped_fcc2(cpu_env
);
1489 gen_helper_fcmped_fcc3(cpu_env
);
1494 static inline void gen_op_fcmpeq(int fccno
)
1498 gen_helper_fcmpeq(cpu_env
);
1501 gen_helper_fcmpeq_fcc1(cpu_env
);
1504 gen_helper_fcmpeq_fcc2(cpu_env
);
1507 gen_helper_fcmpeq_fcc3(cpu_env
);
1514 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1516 gen_helper_fcmps(cpu_env
, r_rs1
, r_rs2
);
1519 static inline void gen_op_fcmpd(int fccno
)
1521 gen_helper_fcmpd(cpu_env
);
1524 static inline void gen_op_fcmpq(int fccno
)
1526 gen_helper_fcmpq(cpu_env
);
1529 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1531 gen_helper_fcmpes(cpu_env
, r_rs1
, r_rs2
);
1534 static inline void gen_op_fcmped(int fccno
)
1536 gen_helper_fcmped(cpu_env
);
1539 static inline void gen_op_fcmpeq(int fccno
)
1541 gen_helper_fcmpeq(cpu_env
);
1545 static inline void gen_op_fpexception_im(int fsr_flags
)
1549 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1550 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1551 r_const
= tcg_const_i32(TT_FP_EXCP
);
1552 gen_helper_raise_exception(cpu_env
, r_const
);
1553 tcg_temp_free_i32(r_const
);
1556 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1558 #if !defined(CONFIG_USER_ONLY)
1559 if (!dc
->fpu_enabled
) {
1562 save_state(dc
, r_cond
);
1563 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1564 gen_helper_raise_exception(cpu_env
, r_const
);
1565 tcg_temp_free_i32(r_const
);
1573 static inline void gen_update_fprs_dirty(int rd
)
1575 #if defined(TARGET_SPARC64)
1576 tcg_gen_ori_i32(cpu_fprs
, cpu_fprs
, (rd
< 32) ? 1 : 2);
1580 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1582 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1585 static inline void gen_clear_float_exceptions(void)
1587 gen_helper_clear_float_exceptions(cpu_env
);
1591 #ifdef TARGET_SPARC64
1592 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1598 r_asi
= tcg_temp_new_i32();
1599 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1601 asi
= GET_FIELD(insn
, 19, 26);
1602 r_asi
= tcg_const_i32(asi
);
1607 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1610 TCGv_i32 r_asi
, r_size
, r_sign
;
1612 r_asi
= gen_get_asi(insn
, addr
);
1613 r_size
= tcg_const_i32(size
);
1614 r_sign
= tcg_const_i32(sign
);
1615 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1616 tcg_temp_free_i32(r_sign
);
1617 tcg_temp_free_i32(r_size
);
1618 tcg_temp_free_i32(r_asi
);
1621 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1623 TCGv_i32 r_asi
, r_size
;
1625 r_asi
= gen_get_asi(insn
, addr
);
1626 r_size
= tcg_const_i32(size
);
1627 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1628 tcg_temp_free_i32(r_size
);
1629 tcg_temp_free_i32(r_asi
);
1632 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1634 TCGv_i32 r_asi
, r_size
, r_rd
;
1636 r_asi
= gen_get_asi(insn
, addr
);
1637 r_size
= tcg_const_i32(size
);
1638 r_rd
= tcg_const_i32(rd
);
1639 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1640 tcg_temp_free_i32(r_rd
);
1641 tcg_temp_free_i32(r_size
);
1642 tcg_temp_free_i32(r_asi
);
1645 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1647 TCGv_i32 r_asi
, r_size
, r_rd
;
1649 r_asi
= gen_get_asi(insn
, addr
);
1650 r_size
= tcg_const_i32(size
);
1651 r_rd
= tcg_const_i32(rd
);
1652 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1653 tcg_temp_free_i32(r_rd
);
1654 tcg_temp_free_i32(r_size
);
1655 tcg_temp_free_i32(r_asi
);
1658 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1660 TCGv_i32 r_asi
, r_size
, r_sign
;
1662 r_asi
= gen_get_asi(insn
, addr
);
1663 r_size
= tcg_const_i32(4);
1664 r_sign
= tcg_const_i32(0);
1665 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1666 tcg_temp_free_i32(r_sign
);
1667 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1668 tcg_temp_free_i32(r_size
);
1669 tcg_temp_free_i32(r_asi
);
1670 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1673 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1675 TCGv_i32 r_asi
, r_rd
;
1677 r_asi
= gen_get_asi(insn
, addr
);
1678 r_rd
= tcg_const_i32(rd
);
1679 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1680 tcg_temp_free_i32(r_rd
);
1681 tcg_temp_free_i32(r_asi
);
1684 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1686 TCGv_i32 r_asi
, r_size
;
1688 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1689 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1690 r_asi
= gen_get_asi(insn
, addr
);
1691 r_size
= tcg_const_i32(8);
1692 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1693 tcg_temp_free_i32(r_size
);
1694 tcg_temp_free_i32(r_asi
);
1697 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1703 r_val1
= tcg_temp_new();
1704 gen_movl_reg_TN(rd
, r_val1
);
1705 r_asi
= gen_get_asi(insn
, addr
);
1706 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1707 tcg_temp_free_i32(r_asi
);
1708 tcg_temp_free(r_val1
);
1711 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1716 gen_movl_reg_TN(rd
, cpu_tmp64
);
1717 r_asi
= gen_get_asi(insn
, addr
);
1718 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1719 tcg_temp_free_i32(r_asi
);
1722 #elif !defined(CONFIG_USER_ONLY)
1724 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1727 TCGv_i32 r_asi
, r_size
, r_sign
;
1729 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1730 r_size
= tcg_const_i32(size
);
1731 r_sign
= tcg_const_i32(sign
);
1732 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1733 tcg_temp_free(r_sign
);
1734 tcg_temp_free(r_size
);
1735 tcg_temp_free(r_asi
);
1736 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1739 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1741 TCGv_i32 r_asi
, r_size
;
1743 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1744 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1745 r_size
= tcg_const_i32(size
);
1746 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1747 tcg_temp_free(r_size
);
1748 tcg_temp_free(r_asi
);
1751 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1753 TCGv_i32 r_asi
, r_size
, r_sign
;
1756 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1757 r_size
= tcg_const_i32(4);
1758 r_sign
= tcg_const_i32(0);
1759 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1760 tcg_temp_free(r_sign
);
1761 r_val
= tcg_temp_new_i64();
1762 tcg_gen_extu_tl_i64(r_val
, dst
);
1763 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1764 tcg_temp_free_i64(r_val
);
1765 tcg_temp_free(r_size
);
1766 tcg_temp_free(r_asi
);
1767 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1770 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1772 TCGv_i32 r_asi
, r_size
, r_sign
;
1774 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1775 r_size
= tcg_const_i32(8);
1776 r_sign
= tcg_const_i32(0);
1777 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1778 tcg_temp_free(r_sign
);
1779 tcg_temp_free(r_size
);
1780 tcg_temp_free(r_asi
);
1781 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1782 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1783 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1784 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1785 gen_movl_TN_reg(rd
, hi
);
1788 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1790 TCGv_i32 r_asi
, r_size
;
1792 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1793 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1794 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1795 r_size
= tcg_const_i32(8);
1796 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1797 tcg_temp_free(r_size
);
1798 tcg_temp_free(r_asi
);
1802 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1803 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1806 TCGv_i32 r_asi
, r_size
;
1808 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1810 r_val
= tcg_const_i64(0xffULL
);
1811 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1812 r_size
= tcg_const_i32(1);
1813 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1814 tcg_temp_free_i32(r_size
);
1815 tcg_temp_free_i32(r_asi
);
1816 tcg_temp_free_i64(r_val
);
1820 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1825 rs1
= GET_FIELD(insn
, 13, 17);
1827 tcg_gen_movi_tl(def
, 0);
1828 } else if (rs1
< 8) {
1829 r_rs1
= cpu_gregs
[rs1
];
1831 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1836 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1840 if (IS_IMM
) { /* immediate */
1841 target_long simm
= GET_FIELDs(insn
, 19, 31);
1842 tcg_gen_movi_tl(def
, simm
);
1843 } else { /* register */
1844 unsigned int rs2
= GET_FIELD(insn
, 27, 31);
1846 tcg_gen_movi_tl(def
, 0);
1847 } else if (rs2
< 8) {
1848 r_rs2
= cpu_gregs
[rs2
];
1850 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1856 #ifdef TARGET_SPARC64
1857 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr
, TCGv_ptr cpu_env
)
1859 TCGv_i32 r_tl
= tcg_temp_new_i32();
1861 /* load env->tl into r_tl */
1862 tcg_gen_ld_i32(r_tl
, cpu_env
, offsetof(CPUSPARCState
, tl
));
1864 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1865 tcg_gen_andi_i32(r_tl
, r_tl
, MAXTL_MASK
);
1867 /* calculate offset to current trap state from env->ts, reuse r_tl */
1868 tcg_gen_muli_i32(r_tl
, r_tl
, sizeof (trap_state
));
1869 tcg_gen_addi_ptr(r_tsptr
, cpu_env
, offsetof(CPUState
, ts
));
1871 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1873 TCGv_ptr r_tl_tmp
= tcg_temp_new_ptr();
1874 tcg_gen_ext_i32_ptr(r_tl_tmp
, r_tl
);
1875 tcg_gen_add_ptr(r_tsptr
, r_tsptr
, r_tl_tmp
);
1876 tcg_temp_free_ptr(r_tl_tmp
);
1879 tcg_temp_free_i32(r_tl
);
1883 #define CHECK_IU_FEATURE(dc, FEATURE) \
1884 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1886 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1887 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1890 /* before an instruction, dc->pc must be static */
1891 static void disas_sparc_insn(DisasContext
* dc
)
1893 unsigned int insn
, opc
, rs1
, rs2
, rd
;
1894 TCGv cpu_src1
, cpu_src2
, cpu_tmp1
, cpu_tmp2
;
1897 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
1898 tcg_gen_debug_insn_start(dc
->pc
);
1899 insn
= ldl_code(dc
->pc
);
1900 opc
= GET_FIELD(insn
, 0, 1);
1902 rd
= GET_FIELD(insn
, 2, 6);
1904 cpu_tmp1
= cpu_src1
= tcg_temp_new();
1905 cpu_tmp2
= cpu_src2
= tcg_temp_new();
1908 case 0: /* branches/sethi */
1910 unsigned int xop
= GET_FIELD(insn
, 7, 9);
1913 #ifdef TARGET_SPARC64
1914 case 0x1: /* V9 BPcc */
1918 target
= GET_FIELD_SP(insn
, 0, 18);
1919 target
= sign_extend(target
, 19);
1921 cc
= GET_FIELD_SP(insn
, 20, 21);
1923 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1925 do_branch(dc
, target
, insn
, 1, cpu_cond
);
1930 case 0x3: /* V9 BPr */
1932 target
= GET_FIELD_SP(insn
, 0, 13) |
1933 (GET_FIELD_SP(insn
, 20, 21) << 14);
1934 target
= sign_extend(target
, 16);
1936 cpu_src1
= get_src1(insn
, cpu_src1
);
1937 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
1940 case 0x5: /* V9 FBPcc */
1942 int cc
= GET_FIELD_SP(insn
, 20, 21);
1943 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1945 target
= GET_FIELD_SP(insn
, 0, 18);
1946 target
= sign_extend(target
, 19);
1948 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
1952 case 0x7: /* CBN+x */
1957 case 0x2: /* BN+x */
1959 target
= GET_FIELD(insn
, 10, 31);
1960 target
= sign_extend(target
, 22);
1962 do_branch(dc
, target
, insn
, 0, cpu_cond
);
1965 case 0x6: /* FBN+x */
1967 if (gen_trap_ifnofpu(dc
, cpu_cond
))
1969 target
= GET_FIELD(insn
, 10, 31);
1970 target
= sign_extend(target
, 22);
1972 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
1975 case 0x4: /* SETHI */
1977 uint32_t value
= GET_FIELD(insn
, 10, 31);
1980 r_const
= tcg_const_tl(value
<< 10);
1981 gen_movl_TN_reg(rd
, r_const
);
1982 tcg_temp_free(r_const
);
1985 case 0x0: /* UNIMPL */
1994 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
1997 r_const
= tcg_const_tl(dc
->pc
);
1998 gen_movl_TN_reg(15, r_const
);
1999 tcg_temp_free(r_const
);
2001 gen_mov_pc_npc(dc
, cpu_cond
);
2005 case 2: /* FPU & Logical Operations */
2007 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2008 if (xop
== 0x3a) { /* generate trap */
2011 cpu_src1
= get_src1(insn
, cpu_src1
);
2013 rs2
= GET_FIELD(insn
, 25, 31);
2014 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2016 rs2
= GET_FIELD(insn
, 27, 31);
2018 gen_movl_reg_TN(rs2
, cpu_src2
);
2019 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2021 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2024 cond
= GET_FIELD(insn
, 3, 6);
2025 if (cond
== 0x8) { /* Trap Always */
2026 save_state(dc
, cpu_cond
);
2027 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2029 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2031 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2032 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2033 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2036 dc
->def
->features
& CPU_FEATURE_TA0_SHUTDOWN
) {
2038 gen_helper_shutdown();
2041 gen_helper_raise_exception(cpu_env
, cpu_tmp32
);
2043 } else if (cond
!= 0) {
2044 TCGv r_cond
= tcg_temp_new();
2046 #ifdef TARGET_SPARC64
2048 int cc
= GET_FIELD_SP(insn
, 11, 12);
2050 save_state(dc
, cpu_cond
);
2052 gen_cond(r_cond
, 0, cond
, dc
);
2054 gen_cond(r_cond
, 1, cond
, dc
);
2058 save_state(dc
, cpu_cond
);
2059 gen_cond(r_cond
, 0, cond
, dc
);
2061 l1
= gen_new_label();
2062 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2064 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2066 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2068 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2069 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2070 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2071 gen_helper_raise_exception(cpu_env
, cpu_tmp32
);
2074 tcg_temp_free(r_cond
);
2080 } else if (xop
== 0x28) {
2081 rs1
= GET_FIELD(insn
, 13, 17);
2084 #ifndef TARGET_SPARC64
2085 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2086 manual, rdy on the microSPARC
2088 case 0x0f: /* stbar in the SPARCv8 manual,
2089 rdy on the microSPARC II */
2090 case 0x10 ... 0x1f: /* implementation-dependent in the
2091 SPARCv8 manual, rdy on the
2094 if (rs1
== 0x11 && dc
->def
->features
& CPU_FEATURE_ASR17
) {
2097 /* Read Asr17 for a Leon3 monoprocessor */
2098 r_const
= tcg_const_tl((1 << 8)
2099 | (dc
->def
->nwindows
- 1));
2100 gen_movl_TN_reg(rd
, r_const
);
2101 tcg_temp_free(r_const
);
2105 gen_movl_TN_reg(rd
, cpu_y
);
2107 #ifdef TARGET_SPARC64
2108 case 0x2: /* V9 rdccr */
2109 gen_helper_compute_psr();
2110 gen_helper_rdccr(cpu_dst
);
2111 gen_movl_TN_reg(rd
, cpu_dst
);
2113 case 0x3: /* V9 rdasi */
2114 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2115 gen_movl_TN_reg(rd
, cpu_dst
);
2117 case 0x4: /* V9 rdtick */
2121 r_tickptr
= tcg_temp_new_ptr();
2122 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2123 offsetof(CPUState
, tick
));
2124 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2125 tcg_temp_free_ptr(r_tickptr
);
2126 gen_movl_TN_reg(rd
, cpu_dst
);
2129 case 0x5: /* V9 rdpc */
2133 r_const
= tcg_const_tl(dc
->pc
);
2134 gen_movl_TN_reg(rd
, r_const
);
2135 tcg_temp_free(r_const
);
2138 case 0x6: /* V9 rdfprs */
2139 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2140 gen_movl_TN_reg(rd
, cpu_dst
);
2142 case 0xf: /* V9 membar */
2143 break; /* no effect */
2144 case 0x13: /* Graphics Status */
2145 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2147 gen_movl_TN_reg(rd
, cpu_gsr
);
2149 case 0x16: /* Softint */
2150 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2151 gen_movl_TN_reg(rd
, cpu_dst
);
2153 case 0x17: /* Tick compare */
2154 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2156 case 0x18: /* System tick */
2160 r_tickptr
= tcg_temp_new_ptr();
2161 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2162 offsetof(CPUState
, stick
));
2163 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2164 tcg_temp_free_ptr(r_tickptr
);
2165 gen_movl_TN_reg(rd
, cpu_dst
);
2168 case 0x19: /* System tick compare */
2169 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2171 case 0x10: /* Performance Control */
2172 case 0x11: /* Performance Instrumentation Counter */
2173 case 0x12: /* Dispatch Control */
2174 case 0x14: /* Softint set, WO */
2175 case 0x15: /* Softint clear, WO */
2180 #if !defined(CONFIG_USER_ONLY)
2181 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2182 #ifndef TARGET_SPARC64
2183 if (!supervisor(dc
))
2185 gen_helper_compute_psr();
2186 dc
->cc_op
= CC_OP_FLAGS
;
2187 gen_helper_rdpsr(cpu_dst
);
2189 CHECK_IU_FEATURE(dc
, HYPV
);
2190 if (!hypervisor(dc
))
2192 rs1
= GET_FIELD(insn
, 13, 17);
2195 // gen_op_rdhpstate();
2198 // gen_op_rdhtstate();
2201 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2204 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2207 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2209 case 31: // hstick_cmpr
2210 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2216 gen_movl_TN_reg(rd
, cpu_dst
);
2218 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2219 if (!supervisor(dc
))
2221 #ifdef TARGET_SPARC64
2222 rs1
= GET_FIELD(insn
, 13, 17);
2228 r_tsptr
= tcg_temp_new_ptr();
2229 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2230 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2231 offsetof(trap_state
, tpc
));
2232 tcg_temp_free_ptr(r_tsptr
);
2239 r_tsptr
= tcg_temp_new_ptr();
2240 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2241 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2242 offsetof(trap_state
, tnpc
));
2243 tcg_temp_free_ptr(r_tsptr
);
2250 r_tsptr
= tcg_temp_new_ptr();
2251 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2252 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2253 offsetof(trap_state
, tstate
));
2254 tcg_temp_free_ptr(r_tsptr
);
2261 r_tsptr
= tcg_temp_new_ptr();
2262 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
2263 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2264 offsetof(trap_state
, tt
));
2265 tcg_temp_free_ptr(r_tsptr
);
2266 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2273 r_tickptr
= tcg_temp_new_ptr();
2274 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2275 offsetof(CPUState
, tick
));
2276 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2277 gen_movl_TN_reg(rd
, cpu_tmp0
);
2278 tcg_temp_free_ptr(r_tickptr
);
2282 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2285 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2286 offsetof(CPUSPARCState
, pstate
));
2287 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2290 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2291 offsetof(CPUSPARCState
, tl
));
2292 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2295 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2296 offsetof(CPUSPARCState
, psrpil
));
2297 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2300 gen_helper_rdcwp(cpu_tmp0
);
2303 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2304 offsetof(CPUSPARCState
, cansave
));
2305 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2307 case 11: // canrestore
2308 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2309 offsetof(CPUSPARCState
, canrestore
));
2310 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2312 case 12: // cleanwin
2313 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2314 offsetof(CPUSPARCState
, cleanwin
));
2315 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2317 case 13: // otherwin
2318 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2319 offsetof(CPUSPARCState
, otherwin
));
2320 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2323 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2324 offsetof(CPUSPARCState
, wstate
));
2325 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2327 case 16: // UA2005 gl
2328 CHECK_IU_FEATURE(dc
, GL
);
2329 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2330 offsetof(CPUSPARCState
, gl
));
2331 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2333 case 26: // UA2005 strand status
2334 CHECK_IU_FEATURE(dc
, HYPV
);
2335 if (!hypervisor(dc
))
2337 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2340 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2347 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2349 gen_movl_TN_reg(rd
, cpu_tmp0
);
2351 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2352 #ifdef TARGET_SPARC64
2353 save_state(dc
, cpu_cond
);
2354 gen_helper_flushw();
2356 if (!supervisor(dc
))
2358 gen_movl_TN_reg(rd
, cpu_tbr
);
2362 } else if (xop
== 0x34) { /* FPU Operations */
2363 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2365 gen_op_clear_ieee_excp_and_FTT();
2366 rs1
= GET_FIELD(insn
, 13, 17);
2367 rs2
= GET_FIELD(insn
, 27, 31);
2368 xop
= GET_FIELD(insn
, 18, 26);
2369 save_state(dc
, cpu_cond
);
2371 case 0x1: /* fmovs */
2372 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2373 gen_update_fprs_dirty(rd
);
2375 case 0x5: /* fnegs */
2376 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2377 gen_update_fprs_dirty(rd
);
2379 case 0x9: /* fabss */
2380 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2381 gen_update_fprs_dirty(rd
);
2383 case 0x29: /* fsqrts */
2384 CHECK_FPU_FEATURE(dc
, FSQRT
);
2385 gen_clear_float_exceptions();
2386 gen_helper_fsqrts(cpu_tmp32
, cpu_env
, cpu_fpr
[rs2
]);
2387 gen_helper_check_ieee_exceptions(cpu_env
);
2388 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2389 gen_update_fprs_dirty(rd
);
2391 case 0x2a: /* fsqrtd */
2392 CHECK_FPU_FEATURE(dc
, FSQRT
);
2393 gen_op_load_fpr_DT1(DFPREG(rs2
));
2394 gen_clear_float_exceptions();
2395 gen_helper_fsqrtd(cpu_env
);
2396 gen_helper_check_ieee_exceptions(cpu_env
);
2397 gen_op_store_DT0_fpr(DFPREG(rd
));
2398 gen_update_fprs_dirty(DFPREG(rd
));
2400 case 0x2b: /* fsqrtq */
2401 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2402 gen_op_load_fpr_QT1(QFPREG(rs2
));
2403 gen_clear_float_exceptions();
2404 gen_helper_fsqrtq(cpu_env
);
2405 gen_helper_check_ieee_exceptions(cpu_env
);
2406 gen_op_store_QT0_fpr(QFPREG(rd
));
2407 gen_update_fprs_dirty(QFPREG(rd
));
2409 case 0x41: /* fadds */
2410 gen_clear_float_exceptions();
2411 gen_helper_fadds(cpu_tmp32
, cpu_env
, cpu_fpr
[rs1
],
2413 gen_helper_check_ieee_exceptions(cpu_env
);
2414 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2415 gen_update_fprs_dirty(rd
);
2417 case 0x42: /* faddd */
2418 gen_op_load_fpr_DT0(DFPREG(rs1
));
2419 gen_op_load_fpr_DT1(DFPREG(rs2
));
2420 gen_clear_float_exceptions();
2421 gen_helper_faddd(cpu_env
);
2422 gen_helper_check_ieee_exceptions(cpu_env
);
2423 gen_op_store_DT0_fpr(DFPREG(rd
));
2424 gen_update_fprs_dirty(DFPREG(rd
));
2426 case 0x43: /* faddq */
2427 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2428 gen_op_load_fpr_QT0(QFPREG(rs1
));
2429 gen_op_load_fpr_QT1(QFPREG(rs2
));
2430 gen_clear_float_exceptions();
2431 gen_helper_faddq(cpu_env
);
2432 gen_helper_check_ieee_exceptions(cpu_env
);
2433 gen_op_store_QT0_fpr(QFPREG(rd
));
2434 gen_update_fprs_dirty(QFPREG(rd
));
2436 case 0x45: /* fsubs */
2437 gen_clear_float_exceptions();
2438 gen_helper_fsubs(cpu_tmp32
, cpu_env
, cpu_fpr
[rs1
],
2440 gen_helper_check_ieee_exceptions(cpu_env
);
2441 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2442 gen_update_fprs_dirty(rd
);
2444 case 0x46: /* fsubd */
2445 gen_op_load_fpr_DT0(DFPREG(rs1
));
2446 gen_op_load_fpr_DT1(DFPREG(rs2
));
2447 gen_clear_float_exceptions();
2448 gen_helper_fsubd(cpu_env
);
2449 gen_helper_check_ieee_exceptions(cpu_env
);
2450 gen_op_store_DT0_fpr(DFPREG(rd
));
2451 gen_update_fprs_dirty(DFPREG(rd
));
2453 case 0x47: /* fsubq */
2454 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2455 gen_op_load_fpr_QT0(QFPREG(rs1
));
2456 gen_op_load_fpr_QT1(QFPREG(rs2
));
2457 gen_clear_float_exceptions();
2458 gen_helper_fsubq(cpu_env
);
2459 gen_helper_check_ieee_exceptions(cpu_env
);
2460 gen_op_store_QT0_fpr(QFPREG(rd
));
2461 gen_update_fprs_dirty(QFPREG(rd
));
2463 case 0x49: /* fmuls */
2464 CHECK_FPU_FEATURE(dc
, FMUL
);
2465 gen_clear_float_exceptions();
2466 gen_helper_fmuls(cpu_tmp32
, cpu_env
, cpu_fpr
[rs1
],
2468 gen_helper_check_ieee_exceptions(cpu_env
);
2469 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2470 gen_update_fprs_dirty(rd
);
2472 case 0x4a: /* fmuld */
2473 CHECK_FPU_FEATURE(dc
, FMUL
);
2474 gen_op_load_fpr_DT0(DFPREG(rs1
));
2475 gen_op_load_fpr_DT1(DFPREG(rs2
));
2476 gen_clear_float_exceptions();
2477 gen_helper_fmuld(cpu_env
);
2478 gen_helper_check_ieee_exceptions(cpu_env
);
2479 gen_op_store_DT0_fpr(DFPREG(rd
));
2480 gen_update_fprs_dirty(DFPREG(rd
));
2482 case 0x4b: /* fmulq */
2483 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2484 CHECK_FPU_FEATURE(dc
, FMUL
);
2485 gen_op_load_fpr_QT0(QFPREG(rs1
));
2486 gen_op_load_fpr_QT1(QFPREG(rs2
));
2487 gen_clear_float_exceptions();
2488 gen_helper_fmulq(cpu_env
);
2489 gen_helper_check_ieee_exceptions(cpu_env
);
2490 gen_op_store_QT0_fpr(QFPREG(rd
));
2491 gen_update_fprs_dirty(QFPREG(rd
));
2493 case 0x4d: /* fdivs */
2494 gen_clear_float_exceptions();
2495 gen_helper_fdivs(cpu_tmp32
, cpu_env
, cpu_fpr
[rs1
],
2497 gen_helper_check_ieee_exceptions(cpu_env
);
2498 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2499 gen_update_fprs_dirty(rd
);
2501 case 0x4e: /* fdivd */
2502 gen_op_load_fpr_DT0(DFPREG(rs1
));
2503 gen_op_load_fpr_DT1(DFPREG(rs2
));
2504 gen_clear_float_exceptions();
2505 gen_helper_fdivd(cpu_env
);
2506 gen_helper_check_ieee_exceptions(cpu_env
);
2507 gen_op_store_DT0_fpr(DFPREG(rd
));
2508 gen_update_fprs_dirty(DFPREG(rd
));
2510 case 0x4f: /* fdivq */
2511 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2512 gen_op_load_fpr_QT0(QFPREG(rs1
));
2513 gen_op_load_fpr_QT1(QFPREG(rs2
));
2514 gen_clear_float_exceptions();
2515 gen_helper_fdivq(cpu_env
);
2516 gen_helper_check_ieee_exceptions(cpu_env
);
2517 gen_op_store_QT0_fpr(QFPREG(rd
));
2518 gen_update_fprs_dirty(QFPREG(rd
));
2520 case 0x69: /* fsmuld */
2521 CHECK_FPU_FEATURE(dc
, FSMULD
);
2522 gen_clear_float_exceptions();
2523 gen_helper_fsmuld(cpu_env
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2524 gen_helper_check_ieee_exceptions(cpu_env
);
2525 gen_op_store_DT0_fpr(DFPREG(rd
));
2526 gen_update_fprs_dirty(DFPREG(rd
));
2528 case 0x6e: /* fdmulq */
2529 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2530 gen_op_load_fpr_DT0(DFPREG(rs1
));
2531 gen_op_load_fpr_DT1(DFPREG(rs2
));
2532 gen_clear_float_exceptions();
2533 gen_helper_fdmulq(cpu_env
);
2534 gen_helper_check_ieee_exceptions(cpu_env
);
2535 gen_op_store_QT0_fpr(QFPREG(rd
));
2536 gen_update_fprs_dirty(QFPREG(rd
));
2538 case 0xc4: /* fitos */
2539 gen_clear_float_exceptions();
2540 gen_helper_fitos(cpu_tmp32
, cpu_env
, cpu_fpr
[rs2
]);
2541 gen_helper_check_ieee_exceptions(cpu_env
);
2542 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2543 gen_update_fprs_dirty(rd
);
2545 case 0xc6: /* fdtos */
2546 gen_op_load_fpr_DT1(DFPREG(rs2
));
2547 gen_clear_float_exceptions();
2548 gen_helper_fdtos(cpu_tmp32
, cpu_env
);
2549 gen_helper_check_ieee_exceptions(cpu_env
);
2550 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2551 gen_update_fprs_dirty(rd
);
2553 case 0xc7: /* fqtos */
2554 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2555 gen_op_load_fpr_QT1(QFPREG(rs2
));
2556 gen_clear_float_exceptions();
2557 gen_helper_fqtos(cpu_tmp32
, cpu_env
);
2558 gen_helper_check_ieee_exceptions(cpu_env
);
2559 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2560 gen_update_fprs_dirty(rd
);
2562 case 0xc8: /* fitod */
2563 gen_helper_fitod(cpu_env
, cpu_fpr
[rs2
]);
2564 gen_op_store_DT0_fpr(DFPREG(rd
));
2565 gen_update_fprs_dirty(DFPREG(rd
));
2567 case 0xc9: /* fstod */
2568 gen_helper_fstod(cpu_env
, cpu_fpr
[rs2
]);
2569 gen_op_store_DT0_fpr(DFPREG(rd
));
2570 gen_update_fprs_dirty(DFPREG(rd
));
2572 case 0xcb: /* fqtod */
2573 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2574 gen_op_load_fpr_QT1(QFPREG(rs2
));
2575 gen_clear_float_exceptions();
2576 gen_helper_fqtod(cpu_env
);
2577 gen_helper_check_ieee_exceptions(cpu_env
);
2578 gen_op_store_DT0_fpr(DFPREG(rd
));
2579 gen_update_fprs_dirty(DFPREG(rd
));
2581 case 0xcc: /* fitoq */
2582 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2583 gen_helper_fitoq(cpu_env
, cpu_fpr
[rs2
]);
2584 gen_op_store_QT0_fpr(QFPREG(rd
));
2585 gen_update_fprs_dirty(QFPREG(rd
));
2587 case 0xcd: /* fstoq */
2588 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2589 gen_helper_fstoq(cpu_env
, cpu_fpr
[rs2
]);
2590 gen_op_store_QT0_fpr(QFPREG(rd
));
2591 gen_update_fprs_dirty(QFPREG(rd
));
2593 case 0xce: /* fdtoq */
2594 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2595 gen_op_load_fpr_DT1(DFPREG(rs2
));
2596 gen_helper_fdtoq(cpu_env
);
2597 gen_op_store_QT0_fpr(QFPREG(rd
));
2598 gen_update_fprs_dirty(QFPREG(rd
));
2600 case 0xd1: /* fstoi */
2601 gen_clear_float_exceptions();
2602 gen_helper_fstoi(cpu_tmp32
, cpu_env
, cpu_fpr
[rs2
]);
2603 gen_helper_check_ieee_exceptions(cpu_env
);
2604 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2605 gen_update_fprs_dirty(rd
);
2607 case 0xd2: /* fdtoi */
2608 gen_op_load_fpr_DT1(DFPREG(rs2
));
2609 gen_clear_float_exceptions();
2610 gen_helper_fdtoi(cpu_tmp32
, cpu_env
);
2611 gen_helper_check_ieee_exceptions(cpu_env
);
2612 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2613 gen_update_fprs_dirty(rd
);
2615 case 0xd3: /* fqtoi */
2616 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2617 gen_op_load_fpr_QT1(QFPREG(rs2
));
2618 gen_clear_float_exceptions();
2619 gen_helper_fqtoi(cpu_tmp32
, cpu_env
);
2620 gen_helper_check_ieee_exceptions(cpu_env
);
2621 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2622 gen_update_fprs_dirty(rd
);
2624 #ifdef TARGET_SPARC64
2625 case 0x2: /* V9 fmovd */
2626 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2627 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2628 cpu_fpr
[DFPREG(rs2
) + 1]);
2629 gen_update_fprs_dirty(DFPREG(rd
));
2631 case 0x3: /* V9 fmovq */
2632 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2633 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2634 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2635 cpu_fpr
[QFPREG(rs2
) + 1]);
2636 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2637 cpu_fpr
[QFPREG(rs2
) + 2]);
2638 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2639 cpu_fpr
[QFPREG(rs2
) + 3]);
2640 gen_update_fprs_dirty(QFPREG(rd
));
2642 case 0x6: /* V9 fnegd */
2643 gen_op_load_fpr_DT1(DFPREG(rs2
));
2644 gen_helper_fnegd(cpu_env
);
2645 gen_op_store_DT0_fpr(DFPREG(rd
));
2646 gen_update_fprs_dirty(DFPREG(rd
));
2648 case 0x7: /* V9 fnegq */
2649 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2650 gen_op_load_fpr_QT1(QFPREG(rs2
));
2651 gen_helper_fnegq(cpu_env
);
2652 gen_op_store_QT0_fpr(QFPREG(rd
));
2653 gen_update_fprs_dirty(QFPREG(rd
));
2655 case 0xa: /* V9 fabsd */
2656 gen_op_load_fpr_DT1(DFPREG(rs2
));
2657 gen_helper_fabsd(cpu_env
);
2658 gen_op_store_DT0_fpr(DFPREG(rd
));
2659 gen_update_fprs_dirty(DFPREG(rd
));
2661 case 0xb: /* V9 fabsq */
2662 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2663 gen_op_load_fpr_QT1(QFPREG(rs2
));
2664 gen_helper_fabsq(cpu_env
);
2665 gen_op_store_QT0_fpr(QFPREG(rd
));
2666 gen_update_fprs_dirty(QFPREG(rd
));
2668 case 0x81: /* V9 fstox */
2669 gen_clear_float_exceptions();
2670 gen_helper_fstox(cpu_env
, cpu_fpr
[rs2
]);
2671 gen_helper_check_ieee_exceptions(cpu_env
);
2672 gen_op_store_DT0_fpr(DFPREG(rd
));
2673 gen_update_fprs_dirty(DFPREG(rd
));
2675 case 0x82: /* V9 fdtox */
2676 gen_op_load_fpr_DT1(DFPREG(rs2
));
2677 gen_clear_float_exceptions();
2678 gen_helper_fdtox(cpu_env
);
2679 gen_helper_check_ieee_exceptions(cpu_env
);
2680 gen_op_store_DT0_fpr(DFPREG(rd
));
2681 gen_update_fprs_dirty(DFPREG(rd
));
2683 case 0x83: /* V9 fqtox */
2684 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2685 gen_op_load_fpr_QT1(QFPREG(rs2
));
2686 gen_clear_float_exceptions();
2687 gen_helper_fqtox(cpu_env
);
2688 gen_helper_check_ieee_exceptions(cpu_env
);
2689 gen_op_store_DT0_fpr(DFPREG(rd
));
2690 gen_update_fprs_dirty(DFPREG(rd
));
2692 case 0x84: /* V9 fxtos */
2693 gen_op_load_fpr_DT1(DFPREG(rs2
));
2694 gen_clear_float_exceptions();
2695 gen_helper_fxtos(cpu_tmp32
, cpu_env
);
2696 gen_helper_check_ieee_exceptions(cpu_env
);
2697 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2698 gen_update_fprs_dirty(rd
);
2700 case 0x88: /* V9 fxtod */
2701 gen_op_load_fpr_DT1(DFPREG(rs2
));
2702 gen_clear_float_exceptions();
2703 gen_helper_fxtod(cpu_env
);
2704 gen_helper_check_ieee_exceptions(cpu_env
);
2705 gen_op_store_DT0_fpr(DFPREG(rd
));
2706 gen_update_fprs_dirty(DFPREG(rd
));
2708 case 0x8c: /* V9 fxtoq */
2709 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2710 gen_op_load_fpr_DT1(DFPREG(rs2
));
2711 gen_clear_float_exceptions();
2712 gen_helper_fxtoq(cpu_env
);
2713 gen_helper_check_ieee_exceptions(cpu_env
);
2714 gen_op_store_QT0_fpr(QFPREG(rd
));
2715 gen_update_fprs_dirty(QFPREG(rd
));
2721 } else if (xop
== 0x35) { /* FPU Operations */
2722 #ifdef TARGET_SPARC64
2725 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2727 gen_op_clear_ieee_excp_and_FTT();
2728 rs1
= GET_FIELD(insn
, 13, 17);
2729 rs2
= GET_FIELD(insn
, 27, 31);
2730 xop
= GET_FIELD(insn
, 18, 26);
2731 save_state(dc
, cpu_cond
);
2732 #ifdef TARGET_SPARC64
2733 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2736 l1
= gen_new_label();
2737 cond
= GET_FIELD_SP(insn
, 14, 17);
2738 cpu_src1
= get_src1(insn
, cpu_src1
);
2739 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2741 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2742 gen_update_fprs_dirty(rd
);
2745 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2748 l1
= gen_new_label();
2749 cond
= GET_FIELD_SP(insn
, 14, 17);
2750 cpu_src1
= get_src1(insn
, cpu_src1
);
2751 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2753 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2754 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2755 gen_update_fprs_dirty(DFPREG(rd
));
2758 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2761 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2762 l1
= gen_new_label();
2763 cond
= GET_FIELD_SP(insn
, 14, 17);
2764 cpu_src1
= get_src1(insn
, cpu_src1
);
2765 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2767 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2768 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2769 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2770 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2771 gen_update_fprs_dirty(QFPREG(rd
));
2777 #ifdef TARGET_SPARC64
2778 #define FMOVSCC(fcc) \
2783 l1 = gen_new_label(); \
2784 r_cond = tcg_temp_new(); \
2785 cond = GET_FIELD_SP(insn, 14, 17); \
2786 gen_fcond(r_cond, fcc, cond); \
2787 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2789 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2790 gen_update_fprs_dirty(rd); \
2791 gen_set_label(l1); \
2792 tcg_temp_free(r_cond); \
2794 #define FMOVDCC(fcc) \
2799 l1 = gen_new_label(); \
2800 r_cond = tcg_temp_new(); \
2801 cond = GET_FIELD_SP(insn, 14, 17); \
2802 gen_fcond(r_cond, fcc, cond); \
2803 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2805 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2806 cpu_fpr[DFPREG(rs2)]); \
2807 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2808 cpu_fpr[DFPREG(rs2) + 1]); \
2809 gen_update_fprs_dirty(DFPREG(rd)); \
2810 gen_set_label(l1); \
2811 tcg_temp_free(r_cond); \
2813 #define FMOVQCC(fcc) \
2818 l1 = gen_new_label(); \
2819 r_cond = tcg_temp_new(); \
2820 cond = GET_FIELD_SP(insn, 14, 17); \
2821 gen_fcond(r_cond, fcc, cond); \
2822 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2824 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2825 cpu_fpr[QFPREG(rs2)]); \
2826 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2827 cpu_fpr[QFPREG(rs2) + 1]); \
2828 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2829 cpu_fpr[QFPREG(rs2) + 2]); \
2830 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2831 cpu_fpr[QFPREG(rs2) + 3]); \
2832 gen_update_fprs_dirty(QFPREG(rd)); \
2833 gen_set_label(l1); \
2834 tcg_temp_free(r_cond); \
2836 case 0x001: /* V9 fmovscc %fcc0 */
2839 case 0x002: /* V9 fmovdcc %fcc0 */
2842 case 0x003: /* V9 fmovqcc %fcc0 */
2843 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2846 case 0x041: /* V9 fmovscc %fcc1 */
2849 case 0x042: /* V9 fmovdcc %fcc1 */
2852 case 0x043: /* V9 fmovqcc %fcc1 */
2853 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2856 case 0x081: /* V9 fmovscc %fcc2 */
2859 case 0x082: /* V9 fmovdcc %fcc2 */
2862 case 0x083: /* V9 fmovqcc %fcc2 */
2863 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2866 case 0x0c1: /* V9 fmovscc %fcc3 */
2869 case 0x0c2: /* V9 fmovdcc %fcc3 */
2872 case 0x0c3: /* V9 fmovqcc %fcc3 */
2873 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2879 #define FMOVSCC(icc) \
2884 l1 = gen_new_label(); \
2885 r_cond = tcg_temp_new(); \
2886 cond = GET_FIELD_SP(insn, 14, 17); \
2887 gen_cond(r_cond, icc, cond, dc); \
2888 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2890 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2891 gen_update_fprs_dirty(rd); \
2892 gen_set_label(l1); \
2893 tcg_temp_free(r_cond); \
2895 #define FMOVDCC(icc) \
2900 l1 = gen_new_label(); \
2901 r_cond = tcg_temp_new(); \
2902 cond = GET_FIELD_SP(insn, 14, 17); \
2903 gen_cond(r_cond, icc, cond, dc); \
2904 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2906 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2907 cpu_fpr[DFPREG(rs2)]); \
2908 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2909 cpu_fpr[DFPREG(rs2) + 1]); \
2910 gen_update_fprs_dirty(DFPREG(rd)); \
2911 gen_set_label(l1); \
2912 tcg_temp_free(r_cond); \
2914 #define FMOVQCC(icc) \
2919 l1 = gen_new_label(); \
2920 r_cond = tcg_temp_new(); \
2921 cond = GET_FIELD_SP(insn, 14, 17); \
2922 gen_cond(r_cond, icc, cond, dc); \
2923 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2925 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2926 cpu_fpr[QFPREG(rs2)]); \
2927 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2928 cpu_fpr[QFPREG(rs2) + 1]); \
2929 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2930 cpu_fpr[QFPREG(rs2) + 2]); \
2931 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2932 cpu_fpr[QFPREG(rs2) + 3]); \
2933 gen_update_fprs_dirty(QFPREG(rd)); \
2934 gen_set_label(l1); \
2935 tcg_temp_free(r_cond); \
2938 case 0x101: /* V9 fmovscc %icc */
2941 case 0x102: /* V9 fmovdcc %icc */
2944 case 0x103: /* V9 fmovqcc %icc */
2945 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2948 case 0x181: /* V9 fmovscc %xcc */
2951 case 0x182: /* V9 fmovdcc %xcc */
2954 case 0x183: /* V9 fmovqcc %xcc */
2955 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2962 case 0x51: /* fcmps, V9 %fcc */
2963 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2965 case 0x52: /* fcmpd, V9 %fcc */
2966 gen_op_load_fpr_DT0(DFPREG(rs1
));
2967 gen_op_load_fpr_DT1(DFPREG(rs2
));
2968 gen_op_fcmpd(rd
& 3);
2970 case 0x53: /* fcmpq, V9 %fcc */
2971 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2972 gen_op_load_fpr_QT0(QFPREG(rs1
));
2973 gen_op_load_fpr_QT1(QFPREG(rs2
));
2974 gen_op_fcmpq(rd
& 3);
2976 case 0x55: /* fcmpes, V9 %fcc */
2977 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2979 case 0x56: /* fcmped, V9 %fcc */
2980 gen_op_load_fpr_DT0(DFPREG(rs1
));
2981 gen_op_load_fpr_DT1(DFPREG(rs2
));
2982 gen_op_fcmped(rd
& 3);
2984 case 0x57: /* fcmpeq, V9 %fcc */
2985 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2986 gen_op_load_fpr_QT0(QFPREG(rs1
));
2987 gen_op_load_fpr_QT1(QFPREG(rs2
));
2988 gen_op_fcmpeq(rd
& 3);
2993 } else if (xop
== 0x2) {
2996 rs1
= GET_FIELD(insn
, 13, 17);
2998 // or %g0, x, y -> mov T0, x; mov y, T0
2999 if (IS_IMM
) { /* immediate */
3002 simm
= GET_FIELDs(insn
, 19, 31);
3003 r_const
= tcg_const_tl(simm
);
3004 gen_movl_TN_reg(rd
, r_const
);
3005 tcg_temp_free(r_const
);
3006 } else { /* register */
3007 rs2
= GET_FIELD(insn
, 27, 31);
3008 gen_movl_reg_TN(rs2
, cpu_dst
);
3009 gen_movl_TN_reg(rd
, cpu_dst
);
3012 cpu_src1
= get_src1(insn
, cpu_src1
);
3013 if (IS_IMM
) { /* immediate */
3014 simm
= GET_FIELDs(insn
, 19, 31);
3015 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3016 gen_movl_TN_reg(rd
, cpu_dst
);
3017 } else { /* register */
3018 // or x, %g0, y -> mov T1, x; mov y, T1
3019 rs2
= GET_FIELD(insn
, 27, 31);
3021 gen_movl_reg_TN(rs2
, cpu_src2
);
3022 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3023 gen_movl_TN_reg(rd
, cpu_dst
);
3025 gen_movl_TN_reg(rd
, cpu_src1
);
3028 #ifdef TARGET_SPARC64
3029 } else if (xop
== 0x25) { /* sll, V9 sllx */
3030 cpu_src1
= get_src1(insn
, cpu_src1
);
3031 if (IS_IMM
) { /* immediate */
3032 simm
= GET_FIELDs(insn
, 20, 31);
3033 if (insn
& (1 << 12)) {
3034 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3036 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
3038 } else { /* register */
3039 rs2
= GET_FIELD(insn
, 27, 31);
3040 gen_movl_reg_TN(rs2
, cpu_src2
);
3041 if (insn
& (1 << 12)) {
3042 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3044 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3046 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3048 gen_movl_TN_reg(rd
, cpu_dst
);
3049 } else if (xop
== 0x26) { /* srl, V9 srlx */
3050 cpu_src1
= get_src1(insn
, cpu_src1
);
3051 if (IS_IMM
) { /* immediate */
3052 simm
= GET_FIELDs(insn
, 20, 31);
3053 if (insn
& (1 << 12)) {
3054 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3056 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3057 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3059 } else { /* register */
3060 rs2
= GET_FIELD(insn
, 27, 31);
3061 gen_movl_reg_TN(rs2
, cpu_src2
);
3062 if (insn
& (1 << 12)) {
3063 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3064 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3066 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3067 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3068 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3071 gen_movl_TN_reg(rd
, cpu_dst
);
3072 } else if (xop
== 0x27) { /* sra, V9 srax */
3073 cpu_src1
= get_src1(insn
, cpu_src1
);
3074 if (IS_IMM
) { /* immediate */
3075 simm
= GET_FIELDs(insn
, 20, 31);
3076 if (insn
& (1 << 12)) {
3077 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3079 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3080 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3081 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3083 } else { /* register */
3084 rs2
= GET_FIELD(insn
, 27, 31);
3085 gen_movl_reg_TN(rs2
, cpu_src2
);
3086 if (insn
& (1 << 12)) {
3087 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3088 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3090 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3091 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3092 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3093 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3096 gen_movl_TN_reg(rd
, cpu_dst
);
3098 } else if (xop
< 0x36) {
3100 cpu_src1
= get_src1(insn
, cpu_src1
);
3101 cpu_src2
= get_src2(insn
, cpu_src2
);
3102 switch (xop
& ~0x10) {
3105 simm
= GET_FIELDs(insn
, 19, 31);
3107 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3108 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3109 dc
->cc_op
= CC_OP_ADD
;
3111 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3115 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3116 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3117 dc
->cc_op
= CC_OP_ADD
;
3119 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3125 simm
= GET_FIELDs(insn
, 19, 31);
3126 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3128 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3131 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3132 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3133 dc
->cc_op
= CC_OP_LOGIC
;
3138 simm
= GET_FIELDs(insn
, 19, 31);
3139 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3141 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3144 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3145 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3146 dc
->cc_op
= CC_OP_LOGIC
;
3151 simm
= GET_FIELDs(insn
, 19, 31);
3152 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3154 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3157 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3158 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3159 dc
->cc_op
= CC_OP_LOGIC
;
3164 simm
= GET_FIELDs(insn
, 19, 31);
3166 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
, dc
);
3168 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3172 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3173 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_SUB
);
3174 dc
->cc_op
= CC_OP_SUB
;
3176 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3180 case 0x5: /* andn */
3182 simm
= GET_FIELDs(insn
, 19, 31);
3183 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3185 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3188 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3189 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3190 dc
->cc_op
= CC_OP_LOGIC
;
3195 simm
= GET_FIELDs(insn
, 19, 31);
3196 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3198 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3201 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3202 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3203 dc
->cc_op
= CC_OP_LOGIC
;
3206 case 0x7: /* xorn */
3208 simm
= GET_FIELDs(insn
, 19, 31);
3209 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3211 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3212 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3215 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3216 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3217 dc
->cc_op
= CC_OP_LOGIC
;
3220 case 0x8: /* addx, V9 addc */
3221 gen_op_addx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3224 #ifdef TARGET_SPARC64
3225 case 0x9: /* V9 mulx */
3227 simm
= GET_FIELDs(insn
, 19, 31);
3228 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3230 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3234 case 0xa: /* umul */
3235 CHECK_IU_FEATURE(dc
, MUL
);
3236 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3238 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3239 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3240 dc
->cc_op
= CC_OP_LOGIC
;
3243 case 0xb: /* smul */
3244 CHECK_IU_FEATURE(dc
, MUL
);
3245 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3247 tcg_gen_mov_tl(cpu_cc_dst
, cpu_dst
);
3248 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_LOGIC
);
3249 dc
->cc_op
= CC_OP_LOGIC
;
3252 case 0xc: /* subx, V9 subc */
3253 gen_op_subx_int(dc
, cpu_dst
, cpu_src1
, cpu_src2
,
3256 #ifdef TARGET_SPARC64
3257 case 0xd: /* V9 udivx */
3259 TCGv r_temp1
, r_temp2
;
3260 r_temp1
= tcg_temp_local_new();
3261 r_temp2
= tcg_temp_local_new();
3262 tcg_gen_mov_tl(r_temp1
, cpu_src1
);
3263 tcg_gen_mov_tl(r_temp2
, cpu_src2
);
3264 gen_trap_ifdivzero_tl(r_temp2
);
3265 tcg_gen_divu_i64(cpu_dst
, r_temp1
, r_temp2
);
3266 tcg_temp_free(r_temp1
);
3267 tcg_temp_free(r_temp2
);
3271 case 0xe: /* udiv */
3272 CHECK_IU_FEATURE(dc
, DIV
);
3274 gen_helper_udiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3275 dc
->cc_op
= CC_OP_DIV
;
3277 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3280 case 0xf: /* sdiv */
3281 CHECK_IU_FEATURE(dc
, DIV
);
3283 gen_helper_sdiv_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3284 dc
->cc_op
= CC_OP_DIV
;
3286 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3292 gen_movl_TN_reg(rd
, cpu_dst
);
3294 cpu_src1
= get_src1(insn
, cpu_src1
);
3295 cpu_src2
= get_src2(insn
, cpu_src2
);
3297 case 0x20: /* taddcc */
3298 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3299 gen_movl_TN_reg(rd
, cpu_dst
);
3300 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADD
);
3301 dc
->cc_op
= CC_OP_TADD
;
3303 case 0x21: /* tsubcc */
3304 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3305 gen_movl_TN_reg(rd
, cpu_dst
);
3306 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUB
);
3307 dc
->cc_op
= CC_OP_TSUB
;
3309 case 0x22: /* taddcctv */
3310 save_state(dc
, cpu_cond
);
3311 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3312 gen_movl_TN_reg(rd
, cpu_dst
);
3313 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TADDTV
);
3314 dc
->cc_op
= CC_OP_TADDTV
;
3316 case 0x23: /* tsubcctv */
3317 save_state(dc
, cpu_cond
);
3318 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3319 gen_movl_TN_reg(rd
, cpu_dst
);
3320 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_TSUBTV
);
3321 dc
->cc_op
= CC_OP_TSUBTV
;
3323 case 0x24: /* mulscc */
3324 gen_helper_compute_psr();
3325 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3326 gen_movl_TN_reg(rd
, cpu_dst
);
3327 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_ADD
);
3328 dc
->cc_op
= CC_OP_ADD
;
3330 #ifndef TARGET_SPARC64
3331 case 0x25: /* sll */
3332 if (IS_IMM
) { /* immediate */
3333 simm
= GET_FIELDs(insn
, 20, 31);
3334 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3335 } else { /* register */
3336 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3337 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3339 gen_movl_TN_reg(rd
, cpu_dst
);
3341 case 0x26: /* srl */
3342 if (IS_IMM
) { /* immediate */
3343 simm
= GET_FIELDs(insn
, 20, 31);
3344 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3345 } else { /* register */
3346 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3347 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3349 gen_movl_TN_reg(rd
, cpu_dst
);
3351 case 0x27: /* sra */
3352 if (IS_IMM
) { /* immediate */
3353 simm
= GET_FIELDs(insn
, 20, 31);
3354 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3355 } else { /* register */
3356 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3357 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3359 gen_movl_TN_reg(rd
, cpu_dst
);
3366 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3367 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3369 #ifndef TARGET_SPARC64
3370 case 0x01 ... 0x0f: /* undefined in the
3374 case 0x10 ... 0x1f: /* implementation-dependent
3380 case 0x2: /* V9 wrccr */
3381 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3382 gen_helper_wrccr(cpu_dst
);
3383 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3384 dc
->cc_op
= CC_OP_FLAGS
;
3386 case 0x3: /* V9 wrasi */
3387 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3388 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, 0xff);
3389 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3391 case 0x6: /* V9 wrfprs */
3392 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3393 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3394 save_state(dc
, cpu_cond
);
3399 case 0xf: /* V9 sir, nop if user */
3400 #if !defined(CONFIG_USER_ONLY)
3401 if (supervisor(dc
)) {
3406 case 0x13: /* Graphics Status */
3407 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3409 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3411 case 0x14: /* Softint set */
3412 if (!supervisor(dc
))
3414 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3415 gen_helper_set_softint(cpu_tmp64
);
3417 case 0x15: /* Softint clear */
3418 if (!supervisor(dc
))
3420 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3421 gen_helper_clear_softint(cpu_tmp64
);
3423 case 0x16: /* Softint write */
3424 if (!supervisor(dc
))
3426 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3427 gen_helper_write_softint(cpu_tmp64
);
3429 case 0x17: /* Tick compare */
3430 #if !defined(CONFIG_USER_ONLY)
3431 if (!supervisor(dc
))
3437 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3439 r_tickptr
= tcg_temp_new_ptr();
3440 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3441 offsetof(CPUState
, tick
));
3442 gen_helper_tick_set_limit(r_tickptr
,
3444 tcg_temp_free_ptr(r_tickptr
);
3447 case 0x18: /* System tick */
3448 #if !defined(CONFIG_USER_ONLY)
3449 if (!supervisor(dc
))
3455 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3457 r_tickptr
= tcg_temp_new_ptr();
3458 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3459 offsetof(CPUState
, stick
));
3460 gen_helper_tick_set_count(r_tickptr
,
3462 tcg_temp_free_ptr(r_tickptr
);
3465 case 0x19: /* System tick compare */
3466 #if !defined(CONFIG_USER_ONLY)
3467 if (!supervisor(dc
))
3473 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3475 r_tickptr
= tcg_temp_new_ptr();
3476 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3477 offsetof(CPUState
, stick
));
3478 gen_helper_tick_set_limit(r_tickptr
,
3480 tcg_temp_free_ptr(r_tickptr
);
3484 case 0x10: /* Performance Control */
3485 case 0x11: /* Performance Instrumentation
3487 case 0x12: /* Dispatch Control */
3494 #if !defined(CONFIG_USER_ONLY)
3495 case 0x31: /* wrpsr, V9 saved, restored */
3497 if (!supervisor(dc
))
3499 #ifdef TARGET_SPARC64
3505 gen_helper_restored();
3507 case 2: /* UA2005 allclean */
3508 case 3: /* UA2005 otherw */
3509 case 4: /* UA2005 normalw */
3510 case 5: /* UA2005 invalw */
3516 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3517 gen_helper_wrpsr(cpu_dst
);
3518 tcg_gen_movi_i32(cpu_cc_op
, CC_OP_FLAGS
);
3519 dc
->cc_op
= CC_OP_FLAGS
;
3520 save_state(dc
, cpu_cond
);
3527 case 0x32: /* wrwim, V9 wrpr */
3529 if (!supervisor(dc
))
3531 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3532 #ifdef TARGET_SPARC64
3538 r_tsptr
= tcg_temp_new_ptr();
3539 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3540 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3541 offsetof(trap_state
, tpc
));
3542 tcg_temp_free_ptr(r_tsptr
);
3549 r_tsptr
= tcg_temp_new_ptr();
3550 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3551 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3552 offsetof(trap_state
, tnpc
));
3553 tcg_temp_free_ptr(r_tsptr
);
3560 r_tsptr
= tcg_temp_new_ptr();
3561 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3562 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3563 offsetof(trap_state
,
3565 tcg_temp_free_ptr(r_tsptr
);
3572 r_tsptr
= tcg_temp_new_ptr();
3573 gen_load_trap_state_at_tl(r_tsptr
, cpu_env
);
3574 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3575 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3576 offsetof(trap_state
, tt
));
3577 tcg_temp_free_ptr(r_tsptr
);
3584 r_tickptr
= tcg_temp_new_ptr();
3585 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3586 offsetof(CPUState
, tick
));
3587 gen_helper_tick_set_count(r_tickptr
,
3589 tcg_temp_free_ptr(r_tickptr
);
3593 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3597 TCGv r_tmp
= tcg_temp_local_new();
3599 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3600 save_state(dc
, cpu_cond
);
3601 gen_helper_wrpstate(r_tmp
);
3602 tcg_temp_free(r_tmp
);
3603 dc
->npc
= DYNAMIC_PC
;
3608 TCGv r_tmp
= tcg_temp_local_new();
3610 tcg_gen_mov_tl(r_tmp
, cpu_tmp0
);
3611 save_state(dc
, cpu_cond
);
3612 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_tmp
);
3613 tcg_temp_free(r_tmp
);
3614 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3615 offsetof(CPUSPARCState
, tl
));
3616 dc
->npc
= DYNAMIC_PC
;
3620 gen_helper_wrpil(cpu_tmp0
);
3623 gen_helper_wrcwp(cpu_tmp0
);
3626 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3627 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3628 offsetof(CPUSPARCState
,
3631 case 11: // canrestore
3632 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3633 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3634 offsetof(CPUSPARCState
,
3637 case 12: // cleanwin
3638 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3639 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3640 offsetof(CPUSPARCState
,
3643 case 13: // otherwin
3644 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3645 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3646 offsetof(CPUSPARCState
,
3650 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3651 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3652 offsetof(CPUSPARCState
,
3655 case 16: // UA2005 gl
3656 CHECK_IU_FEATURE(dc
, GL
);
3657 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3658 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3659 offsetof(CPUSPARCState
, gl
));
3661 case 26: // UA2005 strand status
3662 CHECK_IU_FEATURE(dc
, HYPV
);
3663 if (!hypervisor(dc
))
3665 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3671 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3672 if (dc
->def
->nwindows
!= 32)
3673 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3674 (1 << dc
->def
->nwindows
) - 1);
3675 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3679 case 0x33: /* wrtbr, UA2005 wrhpr */
3681 #ifndef TARGET_SPARC64
3682 if (!supervisor(dc
))
3684 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3686 CHECK_IU_FEATURE(dc
, HYPV
);
3687 if (!hypervisor(dc
))
3689 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3692 // XXX gen_op_wrhpstate();
3693 save_state(dc
, cpu_cond
);
3699 // XXX gen_op_wrhtstate();
3702 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3705 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3707 case 31: // hstick_cmpr
3711 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3712 r_tickptr
= tcg_temp_new_ptr();
3713 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3714 offsetof(CPUState
, hstick
));
3715 gen_helper_tick_set_limit(r_tickptr
,
3717 tcg_temp_free_ptr(r_tickptr
);
3720 case 6: // hver readonly
3728 #ifdef TARGET_SPARC64
3729 case 0x2c: /* V9 movcc */
3731 int cc
= GET_FIELD_SP(insn
, 11, 12);
3732 int cond
= GET_FIELD_SP(insn
, 14, 17);
3736 r_cond
= tcg_temp_new();
3737 if (insn
& (1 << 18)) {
3739 gen_cond(r_cond
, 0, cond
, dc
);
3741 gen_cond(r_cond
, 1, cond
, dc
);
3745 gen_fcond(r_cond
, cc
, cond
);
3748 l1
= gen_new_label();
3750 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3751 if (IS_IMM
) { /* immediate */
3754 simm
= GET_FIELD_SPs(insn
, 0, 10);
3755 r_const
= tcg_const_tl(simm
);
3756 gen_movl_TN_reg(rd
, r_const
);
3757 tcg_temp_free(r_const
);
3759 rs2
= GET_FIELD_SP(insn
, 0, 4);
3760 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3761 gen_movl_TN_reg(rd
, cpu_tmp0
);
3764 tcg_temp_free(r_cond
);
3767 case 0x2d: /* V9 sdivx */
3768 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3769 gen_movl_TN_reg(rd
, cpu_dst
);
3771 case 0x2e: /* V9 popc */
3773 cpu_src2
= get_src2(insn
, cpu_src2
);
3774 gen_helper_popc(cpu_dst
, cpu_src2
);
3775 gen_movl_TN_reg(rd
, cpu_dst
);
3777 case 0x2f: /* V9 movr */
3779 int cond
= GET_FIELD_SP(insn
, 10, 12);
3782 cpu_src1
= get_src1(insn
, cpu_src1
);
3784 l1
= gen_new_label();
3786 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3788 if (IS_IMM
) { /* immediate */
3791 simm
= GET_FIELD_SPs(insn
, 0, 9);
3792 r_const
= tcg_const_tl(simm
);
3793 gen_movl_TN_reg(rd
, r_const
);
3794 tcg_temp_free(r_const
);
3796 rs2
= GET_FIELD_SP(insn
, 0, 4);
3797 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3798 gen_movl_TN_reg(rd
, cpu_tmp0
);
3808 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3809 #ifdef TARGET_SPARC64
3810 int opf
= GET_FIELD_SP(insn
, 5, 13);
3811 rs1
= GET_FIELD(insn
, 13, 17);
3812 rs2
= GET_FIELD(insn
, 27, 31);
3813 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3817 case 0x000: /* VIS I edge8cc */
3818 case 0x001: /* VIS II edge8n */
3819 case 0x002: /* VIS I edge8lcc */
3820 case 0x003: /* VIS II edge8ln */
3821 case 0x004: /* VIS I edge16cc */
3822 case 0x005: /* VIS II edge16n */
3823 case 0x006: /* VIS I edge16lcc */
3824 case 0x007: /* VIS II edge16ln */
3825 case 0x008: /* VIS I edge32cc */
3826 case 0x009: /* VIS II edge32n */
3827 case 0x00a: /* VIS I edge32lcc */
3828 case 0x00b: /* VIS II edge32ln */
3831 case 0x010: /* VIS I array8 */
3832 CHECK_FPU_FEATURE(dc
, VIS1
);
3833 cpu_src1
= get_src1(insn
, cpu_src1
);
3834 gen_movl_reg_TN(rs2
, cpu_src2
);
3835 gen_helper_array8(cpu_dst
, cpu_env
, cpu_src1
, cpu_src2
);
3836 gen_movl_TN_reg(rd
, cpu_dst
);
3838 case 0x012: /* VIS I array16 */
3839 CHECK_FPU_FEATURE(dc
, VIS1
);
3840 cpu_src1
= get_src1(insn
, cpu_src1
);
3841 gen_movl_reg_TN(rs2
, cpu_src2
);
3842 gen_helper_array8(cpu_dst
, cpu_env
, cpu_src1
, cpu_src2
);
3843 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3844 gen_movl_TN_reg(rd
, cpu_dst
);
3846 case 0x014: /* VIS I array32 */
3847 CHECK_FPU_FEATURE(dc
, VIS1
);
3848 cpu_src1
= get_src1(insn
, cpu_src1
);
3849 gen_movl_reg_TN(rs2
, cpu_src2
);
3850 gen_helper_array8(cpu_dst
, cpu_env
, cpu_src1
, cpu_src2
);
3851 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3852 gen_movl_TN_reg(rd
, cpu_dst
);
3854 case 0x018: /* VIS I alignaddr */
3855 CHECK_FPU_FEATURE(dc
, VIS1
);
3856 cpu_src1
= get_src1(insn
, cpu_src1
);
3857 gen_movl_reg_TN(rs2
, cpu_src2
);
3858 gen_helper_alignaddr(cpu_dst
, cpu_env
, cpu_src1
, cpu_src2
);
3859 gen_movl_TN_reg(rd
, cpu_dst
);
3861 case 0x019: /* VIS II bmask */
3862 case 0x01a: /* VIS I alignaddrl */
3865 case 0x020: /* VIS I fcmple16 */
3866 CHECK_FPU_FEATURE(dc
, VIS1
);
3867 gen_op_load_fpr_DT0(DFPREG(rs1
));
3868 gen_op_load_fpr_DT1(DFPREG(rs2
));
3869 gen_helper_fcmple16(cpu_dst
, cpu_env
);
3870 gen_movl_TN_reg(rd
, cpu_dst
);
3872 case 0x022: /* VIS I fcmpne16 */
3873 CHECK_FPU_FEATURE(dc
, VIS1
);
3874 gen_op_load_fpr_DT0(DFPREG(rs1
));
3875 gen_op_load_fpr_DT1(DFPREG(rs2
));
3876 gen_helper_fcmpne16(cpu_dst
, cpu_env
);
3877 gen_movl_TN_reg(rd
, cpu_dst
);
3879 case 0x024: /* VIS I fcmple32 */
3880 CHECK_FPU_FEATURE(dc
, VIS1
);
3881 gen_op_load_fpr_DT0(DFPREG(rs1
));
3882 gen_op_load_fpr_DT1(DFPREG(rs2
));
3883 gen_helper_fcmple32(cpu_dst
, cpu_env
);
3884 gen_movl_TN_reg(rd
, cpu_dst
);
3886 case 0x026: /* VIS I fcmpne32 */
3887 CHECK_FPU_FEATURE(dc
, VIS1
);
3888 gen_op_load_fpr_DT0(DFPREG(rs1
));
3889 gen_op_load_fpr_DT1(DFPREG(rs2
));
3890 gen_helper_fcmpne32(cpu_dst
, cpu_env
);
3891 gen_movl_TN_reg(rd
, cpu_dst
);
3893 case 0x028: /* VIS I fcmpgt16 */
3894 CHECK_FPU_FEATURE(dc
, VIS1
);
3895 gen_op_load_fpr_DT0(DFPREG(rs1
));
3896 gen_op_load_fpr_DT1(DFPREG(rs2
));
3897 gen_helper_fcmpgt16(cpu_dst
, cpu_env
);
3898 gen_movl_TN_reg(rd
, cpu_dst
);
3900 case 0x02a: /* VIS I fcmpeq16 */
3901 CHECK_FPU_FEATURE(dc
, VIS1
);
3902 gen_op_load_fpr_DT0(DFPREG(rs1
));
3903 gen_op_load_fpr_DT1(DFPREG(rs2
));
3904 gen_helper_fcmpeq16(cpu_dst
, cpu_env
);
3905 gen_movl_TN_reg(rd
, cpu_dst
);
3907 case 0x02c: /* VIS I fcmpgt32 */
3908 CHECK_FPU_FEATURE(dc
, VIS1
);
3909 gen_op_load_fpr_DT0(DFPREG(rs1
));
3910 gen_op_load_fpr_DT1(DFPREG(rs2
));
3911 gen_helper_fcmpgt32(cpu_dst
, cpu_env
);
3912 gen_movl_TN_reg(rd
, cpu_dst
);
3914 case 0x02e: /* VIS I fcmpeq32 */
3915 CHECK_FPU_FEATURE(dc
, VIS1
);
3916 gen_op_load_fpr_DT0(DFPREG(rs1
));
3917 gen_op_load_fpr_DT1(DFPREG(rs2
));
3918 gen_helper_fcmpeq32(cpu_dst
, cpu_env
);
3919 gen_movl_TN_reg(rd
, cpu_dst
);
3921 case 0x031: /* VIS I fmul8x16 */
3922 CHECK_FPU_FEATURE(dc
, VIS1
);
3923 gen_op_load_fpr_DT0(DFPREG(rs1
));
3924 gen_op_load_fpr_DT1(DFPREG(rs2
));
3925 gen_helper_fmul8x16(cpu_env
);
3926 gen_op_store_DT0_fpr(DFPREG(rd
));
3927 gen_update_fprs_dirty(DFPREG(rd
));
3929 case 0x033: /* VIS I fmul8x16au */
3930 CHECK_FPU_FEATURE(dc
, VIS1
);
3931 gen_op_load_fpr_DT0(DFPREG(rs1
));
3932 gen_op_load_fpr_DT1(DFPREG(rs2
));
3933 gen_helper_fmul8x16au(cpu_env
);
3934 gen_op_store_DT0_fpr(DFPREG(rd
));
3935 gen_update_fprs_dirty(DFPREG(rd
));
3937 case 0x035: /* VIS I fmul8x16al */
3938 CHECK_FPU_FEATURE(dc
, VIS1
);
3939 gen_op_load_fpr_DT0(DFPREG(rs1
));
3940 gen_op_load_fpr_DT1(DFPREG(rs2
));
3941 gen_helper_fmul8x16al(cpu_env
);
3942 gen_op_store_DT0_fpr(DFPREG(rd
));
3943 gen_update_fprs_dirty(DFPREG(rd
));
3945 case 0x036: /* VIS I fmul8sux16 */
3946 CHECK_FPU_FEATURE(dc
, VIS1
);
3947 gen_op_load_fpr_DT0(DFPREG(rs1
));
3948 gen_op_load_fpr_DT1(DFPREG(rs2
));
3949 gen_helper_fmul8sux16(cpu_env
);
3950 gen_op_store_DT0_fpr(DFPREG(rd
));
3951 gen_update_fprs_dirty(DFPREG(rd
));
3953 case 0x037: /* VIS I fmul8ulx16 */
3954 CHECK_FPU_FEATURE(dc
, VIS1
);
3955 gen_op_load_fpr_DT0(DFPREG(rs1
));
3956 gen_op_load_fpr_DT1(DFPREG(rs2
));
3957 gen_helper_fmul8ulx16(cpu_env
);
3958 gen_op_store_DT0_fpr(DFPREG(rd
));
3959 gen_update_fprs_dirty(DFPREG(rd
));
3961 case 0x038: /* VIS I fmuld8sux16 */
3962 CHECK_FPU_FEATURE(dc
, VIS1
);
3963 gen_op_load_fpr_DT0(DFPREG(rs1
));
3964 gen_op_load_fpr_DT1(DFPREG(rs2
));
3965 gen_helper_fmuld8sux16(cpu_env
);
3966 gen_op_store_DT0_fpr(DFPREG(rd
));
3967 gen_update_fprs_dirty(DFPREG(rd
));
3969 case 0x039: /* VIS I fmuld8ulx16 */
3970 CHECK_FPU_FEATURE(dc
, VIS1
);
3971 gen_op_load_fpr_DT0(DFPREG(rs1
));
3972 gen_op_load_fpr_DT1(DFPREG(rs2
));
3973 gen_helper_fmuld8ulx16(cpu_env
);
3974 gen_op_store_DT0_fpr(DFPREG(rd
));
3975 gen_update_fprs_dirty(DFPREG(rd
));
3977 case 0x03a: /* VIS I fpack32 */
3978 case 0x03b: /* VIS I fpack16 */
3979 case 0x03d: /* VIS I fpackfix */
3980 case 0x03e: /* VIS I pdist */
3983 case 0x048: /* VIS I faligndata */
3984 CHECK_FPU_FEATURE(dc
, VIS1
);
3985 gen_op_load_fpr_DT0(DFPREG(rs1
));
3986 gen_op_load_fpr_DT1(DFPREG(rs2
));
3987 gen_helper_faligndata(cpu_env
);
3988 gen_op_store_DT0_fpr(DFPREG(rd
));
3989 gen_update_fprs_dirty(DFPREG(rd
));
3991 case 0x04b: /* VIS I fpmerge */
3992 CHECK_FPU_FEATURE(dc
, VIS1
);
3993 gen_op_load_fpr_DT0(DFPREG(rs1
));
3994 gen_op_load_fpr_DT1(DFPREG(rs2
));
3995 gen_helper_fpmerge(cpu_env
);
3996 gen_op_store_DT0_fpr(DFPREG(rd
));
3997 gen_update_fprs_dirty(DFPREG(rd
));
3999 case 0x04c: /* VIS II bshuffle */
4002 case 0x04d: /* VIS I fexpand */
4003 CHECK_FPU_FEATURE(dc
, VIS1
);
4004 gen_op_load_fpr_DT0(DFPREG(rs1
));
4005 gen_op_load_fpr_DT1(DFPREG(rs2
));
4006 gen_helper_fexpand(cpu_env
);
4007 gen_op_store_DT0_fpr(DFPREG(rd
));
4008 gen_update_fprs_dirty(DFPREG(rd
));
4010 case 0x050: /* VIS I fpadd16 */
4011 CHECK_FPU_FEATURE(dc
, VIS1
);
4012 gen_op_load_fpr_DT0(DFPREG(rs1
));
4013 gen_op_load_fpr_DT1(DFPREG(rs2
));
4014 gen_helper_fpadd16(cpu_env
);
4015 gen_op_store_DT0_fpr(DFPREG(rd
));
4016 gen_update_fprs_dirty(DFPREG(rd
));
4018 case 0x051: /* VIS I fpadd16s */
4019 CHECK_FPU_FEATURE(dc
, VIS1
);
4020 gen_helper_fpadd16s(cpu_env
, cpu_fpr
[rd
],
4021 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4022 gen_update_fprs_dirty(rd
);
4024 case 0x052: /* VIS I fpadd32 */
4025 CHECK_FPU_FEATURE(dc
, VIS1
);
4026 gen_op_load_fpr_DT0(DFPREG(rs1
));
4027 gen_op_load_fpr_DT1(DFPREG(rs2
));
4028 gen_helper_fpadd32(cpu_env
);
4029 gen_op_store_DT0_fpr(DFPREG(rd
));
4030 gen_update_fprs_dirty(DFPREG(rd
));
4032 case 0x053: /* VIS I fpadd32s */
4033 CHECK_FPU_FEATURE(dc
, VIS1
);
4034 gen_helper_fpadd32s(cpu_env
, cpu_fpr
[rd
],
4035 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4036 gen_update_fprs_dirty(rd
);
4038 case 0x054: /* VIS I fpsub16 */
4039 CHECK_FPU_FEATURE(dc
, VIS1
);
4040 gen_op_load_fpr_DT0(DFPREG(rs1
));
4041 gen_op_load_fpr_DT1(DFPREG(rs2
));
4042 gen_helper_fpsub16(cpu_env
);
4043 gen_op_store_DT0_fpr(DFPREG(rd
));
4044 gen_update_fprs_dirty(DFPREG(rd
));
4046 case 0x055: /* VIS I fpsub16s */
4047 CHECK_FPU_FEATURE(dc
, VIS1
);
4048 gen_helper_fpsub16s(cpu_env
, cpu_fpr
[rd
],
4049 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4050 gen_update_fprs_dirty(rd
);
4052 case 0x056: /* VIS I fpsub32 */
4053 CHECK_FPU_FEATURE(dc
, VIS1
);
4054 gen_op_load_fpr_DT0(DFPREG(rs1
));
4055 gen_op_load_fpr_DT1(DFPREG(rs2
));
4056 gen_helper_fpsub32(cpu_env
);
4057 gen_op_store_DT0_fpr(DFPREG(rd
));
4058 gen_update_fprs_dirty(DFPREG(rd
));
4060 case 0x057: /* VIS I fpsub32s */
4061 CHECK_FPU_FEATURE(dc
, VIS1
);
4062 gen_helper_fpsub32s(cpu_env
, cpu_fpr
[rd
],
4063 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4064 gen_update_fprs_dirty(rd
);
4066 case 0x060: /* VIS I fzero */
4067 CHECK_FPU_FEATURE(dc
, VIS1
);
4068 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
4069 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
4070 gen_update_fprs_dirty(DFPREG(rd
));
4072 case 0x061: /* VIS I fzeros */
4073 CHECK_FPU_FEATURE(dc
, VIS1
);
4074 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
4075 gen_update_fprs_dirty(rd
);
4077 case 0x062: /* VIS I fnor */
4078 CHECK_FPU_FEATURE(dc
, VIS1
);
4079 tcg_gen_nor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4080 cpu_fpr
[DFPREG(rs2
)]);
4081 tcg_gen_nor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4082 cpu_fpr
[DFPREG(rs1
) + 1],
4083 cpu_fpr
[DFPREG(rs2
) + 1]);
4084 gen_update_fprs_dirty(DFPREG(rd
));
4086 case 0x063: /* VIS I fnors */
4087 CHECK_FPU_FEATURE(dc
, VIS1
);
4088 tcg_gen_nor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4089 gen_update_fprs_dirty(rd
);
4091 case 0x064: /* VIS I fandnot2 */
4092 CHECK_FPU_FEATURE(dc
, VIS1
);
4093 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4094 cpu_fpr
[DFPREG(rs2
)]);
4095 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4096 cpu_fpr
[DFPREG(rs1
) + 1],
4097 cpu_fpr
[DFPREG(rs2
) + 1]);
4098 gen_update_fprs_dirty(DFPREG(rd
));
4100 case 0x065: /* VIS I fandnot2s */
4101 CHECK_FPU_FEATURE(dc
, VIS1
);
4102 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4103 gen_update_fprs_dirty(rd
);
4105 case 0x066: /* VIS I fnot2 */
4106 CHECK_FPU_FEATURE(dc
, VIS1
);
4107 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
4108 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4109 cpu_fpr
[DFPREG(rs2
) + 1]);
4110 gen_update_fprs_dirty(DFPREG(rd
));
4112 case 0x067: /* VIS I fnot2s */
4113 CHECK_FPU_FEATURE(dc
, VIS1
);
4114 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4115 gen_update_fprs_dirty(rd
);
4117 case 0x068: /* VIS I fandnot1 */
4118 CHECK_FPU_FEATURE(dc
, VIS1
);
4119 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4120 cpu_fpr
[DFPREG(rs1
)]);
4121 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4122 cpu_fpr
[DFPREG(rs2
) + 1],
4123 cpu_fpr
[DFPREG(rs1
) + 1]);
4124 gen_update_fprs_dirty(DFPREG(rd
));
4126 case 0x069: /* VIS I fandnot1s */
4127 CHECK_FPU_FEATURE(dc
, VIS1
);
4128 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4129 gen_update_fprs_dirty(rd
);
4131 case 0x06a: /* VIS I fnot1 */
4132 CHECK_FPU_FEATURE(dc
, VIS1
);
4133 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4134 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4135 cpu_fpr
[DFPREG(rs1
) + 1]);
4136 gen_update_fprs_dirty(DFPREG(rd
));
4138 case 0x06b: /* VIS I fnot1s */
4139 CHECK_FPU_FEATURE(dc
, VIS1
);
4140 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4141 gen_update_fprs_dirty(rd
);
4143 case 0x06c: /* VIS I fxor */
4144 CHECK_FPU_FEATURE(dc
, VIS1
);
4145 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4146 cpu_fpr
[DFPREG(rs2
)]);
4147 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4148 cpu_fpr
[DFPREG(rs1
) + 1],
4149 cpu_fpr
[DFPREG(rs2
) + 1]);
4150 gen_update_fprs_dirty(DFPREG(rd
));
4152 case 0x06d: /* VIS I fxors */
4153 CHECK_FPU_FEATURE(dc
, VIS1
);
4154 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4155 gen_update_fprs_dirty(rd
);
4157 case 0x06e: /* VIS I fnand */
4158 CHECK_FPU_FEATURE(dc
, VIS1
);
4159 tcg_gen_nand_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4160 cpu_fpr
[DFPREG(rs2
)]);
4161 tcg_gen_nand_i32(cpu_fpr
[DFPREG(rd
) + 1],
4162 cpu_fpr
[DFPREG(rs1
) + 1],
4163 cpu_fpr
[DFPREG(rs2
) + 1]);
4164 gen_update_fprs_dirty(DFPREG(rd
));
4166 case 0x06f: /* VIS I fnands */
4167 CHECK_FPU_FEATURE(dc
, VIS1
);
4168 tcg_gen_nand_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4169 gen_update_fprs_dirty(rd
);
4171 case 0x070: /* VIS I fand */
4172 CHECK_FPU_FEATURE(dc
, VIS1
);
4173 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4174 cpu_fpr
[DFPREG(rs2
)]);
4175 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4176 cpu_fpr
[DFPREG(rs1
) + 1],
4177 cpu_fpr
[DFPREG(rs2
) + 1]);
4178 gen_update_fprs_dirty(DFPREG(rd
));
4180 case 0x071: /* VIS I fands */
4181 CHECK_FPU_FEATURE(dc
, VIS1
);
4182 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4183 gen_update_fprs_dirty(rd
);
4185 case 0x072: /* VIS I fxnor */
4186 CHECK_FPU_FEATURE(dc
, VIS1
);
4187 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4188 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4189 cpu_fpr
[DFPREG(rs1
)]);
4190 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4191 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4192 cpu_fpr
[DFPREG(rs1
) + 1]);
4193 gen_update_fprs_dirty(DFPREG(rd
));
4195 case 0x073: /* VIS I fxnors */
4196 CHECK_FPU_FEATURE(dc
, VIS1
);
4197 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4198 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4199 gen_update_fprs_dirty(rd
);
4201 case 0x074: /* VIS I fsrc1 */
4202 CHECK_FPU_FEATURE(dc
, VIS1
);
4203 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4204 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4205 cpu_fpr
[DFPREG(rs1
) + 1]);
4206 gen_update_fprs_dirty(DFPREG(rd
));
4208 case 0x075: /* VIS I fsrc1s */
4209 CHECK_FPU_FEATURE(dc
, VIS1
);
4210 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4211 gen_update_fprs_dirty(rd
);
4213 case 0x076: /* VIS I fornot2 */
4214 CHECK_FPU_FEATURE(dc
, VIS1
);
4215 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4216 cpu_fpr
[DFPREG(rs2
)]);
4217 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4218 cpu_fpr
[DFPREG(rs1
) + 1],
4219 cpu_fpr
[DFPREG(rs2
) + 1]);
4220 gen_update_fprs_dirty(DFPREG(rd
));
4222 case 0x077: /* VIS I fornot2s */
4223 CHECK_FPU_FEATURE(dc
, VIS1
);
4224 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4225 gen_update_fprs_dirty(rd
);
4227 case 0x078: /* VIS I fsrc2 */
4228 CHECK_FPU_FEATURE(dc
, VIS1
);
4229 gen_op_load_fpr_DT0(DFPREG(rs2
));
4230 gen_op_store_DT0_fpr(DFPREG(rd
));
4231 gen_update_fprs_dirty(DFPREG(rd
));
4233 case 0x079: /* VIS I fsrc2s */
4234 CHECK_FPU_FEATURE(dc
, VIS1
);
4235 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4236 gen_update_fprs_dirty(rd
);
4238 case 0x07a: /* VIS I fornot1 */
4239 CHECK_FPU_FEATURE(dc
, VIS1
);
4240 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4241 cpu_fpr
[DFPREG(rs1
)]);
4242 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4243 cpu_fpr
[DFPREG(rs2
) + 1],
4244 cpu_fpr
[DFPREG(rs1
) + 1]);
4245 gen_update_fprs_dirty(DFPREG(rd
));
4247 case 0x07b: /* VIS I fornot1s */
4248 CHECK_FPU_FEATURE(dc
, VIS1
);
4249 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4250 gen_update_fprs_dirty(rd
);
4252 case 0x07c: /* VIS I for */
4253 CHECK_FPU_FEATURE(dc
, VIS1
);
4254 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4255 cpu_fpr
[DFPREG(rs2
)]);
4256 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4257 cpu_fpr
[DFPREG(rs1
) + 1],
4258 cpu_fpr
[DFPREG(rs2
) + 1]);
4259 gen_update_fprs_dirty(DFPREG(rd
));
4261 case 0x07d: /* VIS I fors */
4262 CHECK_FPU_FEATURE(dc
, VIS1
);
4263 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4264 gen_update_fprs_dirty(rd
);
4266 case 0x07e: /* VIS I fone */
4267 CHECK_FPU_FEATURE(dc
, VIS1
);
4268 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4269 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4270 gen_update_fprs_dirty(DFPREG(rd
));
4272 case 0x07f: /* VIS I fones */
4273 CHECK_FPU_FEATURE(dc
, VIS1
);
4274 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4275 gen_update_fprs_dirty(rd
);
4277 case 0x080: /* VIS I shutdown */
4278 case 0x081: /* VIS II siam */
4287 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4288 #ifdef TARGET_SPARC64
4293 #ifdef TARGET_SPARC64
4294 } else if (xop
== 0x39) { /* V9 return */
4297 save_state(dc
, cpu_cond
);
4298 cpu_src1
= get_src1(insn
, cpu_src1
);
4299 if (IS_IMM
) { /* immediate */
4300 simm
= GET_FIELDs(insn
, 19, 31);
4301 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4302 } else { /* register */
4303 rs2
= GET_FIELD(insn
, 27, 31);
4305 gen_movl_reg_TN(rs2
, cpu_src2
);
4306 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4308 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4310 gen_helper_restore();
4311 gen_mov_pc_npc(dc
, cpu_cond
);
4312 r_const
= tcg_const_i32(3);
4313 gen_helper_check_align(cpu_dst
, r_const
);
4314 tcg_temp_free_i32(r_const
);
4315 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4316 dc
->npc
= DYNAMIC_PC
;
4320 cpu_src1
= get_src1(insn
, cpu_src1
);
4321 if (IS_IMM
) { /* immediate */
4322 simm
= GET_FIELDs(insn
, 19, 31);
4323 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4324 } else { /* register */
4325 rs2
= GET_FIELD(insn
, 27, 31);
4327 gen_movl_reg_TN(rs2
, cpu_src2
);
4328 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4330 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4333 case 0x38: /* jmpl */
4338 r_pc
= tcg_const_tl(dc
->pc
);
4339 gen_movl_TN_reg(rd
, r_pc
);
4340 tcg_temp_free(r_pc
);
4341 gen_mov_pc_npc(dc
, cpu_cond
);
4342 r_const
= tcg_const_i32(3);
4343 gen_helper_check_align(cpu_dst
, r_const
);
4344 tcg_temp_free_i32(r_const
);
4345 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4346 dc
->npc
= DYNAMIC_PC
;
4349 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4350 case 0x39: /* rett, V9 return */
4354 if (!supervisor(dc
))
4356 gen_mov_pc_npc(dc
, cpu_cond
);
4357 r_const
= tcg_const_i32(3);
4358 gen_helper_check_align(cpu_dst
, r_const
);
4359 tcg_temp_free_i32(r_const
);
4360 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4361 dc
->npc
= DYNAMIC_PC
;
4366 case 0x3b: /* flush */
4367 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4371 case 0x3c: /* save */
4372 save_state(dc
, cpu_cond
);
4374 gen_movl_TN_reg(rd
, cpu_dst
);
4376 case 0x3d: /* restore */
4377 save_state(dc
, cpu_cond
);
4378 gen_helper_restore();
4379 gen_movl_TN_reg(rd
, cpu_dst
);
4381 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4382 case 0x3e: /* V9 done/retry */
4386 if (!supervisor(dc
))
4388 dc
->npc
= DYNAMIC_PC
;
4389 dc
->pc
= DYNAMIC_PC
;
4393 if (!supervisor(dc
))
4395 dc
->npc
= DYNAMIC_PC
;
4396 dc
->pc
= DYNAMIC_PC
;
4412 case 3: /* load/store instructions */
4414 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4416 /* flush pending conditional evaluations before exposing
4418 if (dc
->cc_op
!= CC_OP_FLAGS
) {
4419 dc
->cc_op
= CC_OP_FLAGS
;
4420 gen_helper_compute_psr();
4422 cpu_src1
= get_src1(insn
, cpu_src1
);
4423 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4424 rs2
= GET_FIELD(insn
, 27, 31);
4425 gen_movl_reg_TN(rs2
, cpu_src2
);
4426 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4427 } else if (IS_IMM
) { /* immediate */
4428 simm
= GET_FIELDs(insn
, 19, 31);
4429 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4430 } else { /* register */
4431 rs2
= GET_FIELD(insn
, 27, 31);
4433 gen_movl_reg_TN(rs2
, cpu_src2
);
4434 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4436 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4438 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4439 (xop
> 0x17 && xop
<= 0x1d ) ||
4440 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4442 case 0x0: /* ld, V9 lduw, load unsigned word */
4443 gen_address_mask(dc
, cpu_addr
);
4444 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4446 case 0x1: /* ldub, load unsigned byte */
4447 gen_address_mask(dc
, cpu_addr
);
4448 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4450 case 0x2: /* lduh, load unsigned halfword */
4451 gen_address_mask(dc
, cpu_addr
);
4452 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4454 case 0x3: /* ldd, load double word */
4460 save_state(dc
, cpu_cond
);
4461 r_const
= tcg_const_i32(7);
4462 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4463 tcg_temp_free_i32(r_const
);
4464 gen_address_mask(dc
, cpu_addr
);
4465 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4466 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4467 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4468 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4469 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4470 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4471 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4474 case 0x9: /* ldsb, load signed byte */
4475 gen_address_mask(dc
, cpu_addr
);
4476 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4478 case 0xa: /* ldsh, load signed halfword */
4479 gen_address_mask(dc
, cpu_addr
);
4480 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4482 case 0xd: /* ldstub -- XXX: should be atomically */
4486 gen_address_mask(dc
, cpu_addr
);
4487 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4488 r_const
= tcg_const_tl(0xff);
4489 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4490 tcg_temp_free(r_const
);
4493 case 0x0f: /* swap, swap register with memory. Also
4495 CHECK_IU_FEATURE(dc
, SWAP
);
4496 gen_movl_reg_TN(rd
, cpu_val
);
4497 gen_address_mask(dc
, cpu_addr
);
4498 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4499 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4500 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4502 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4503 case 0x10: /* lda, V9 lduwa, load word alternate */
4504 #ifndef TARGET_SPARC64
4507 if (!supervisor(dc
))
4510 save_state(dc
, cpu_cond
);
4511 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4513 case 0x11: /* lduba, load unsigned byte alternate */
4514 #ifndef TARGET_SPARC64
4517 if (!supervisor(dc
))
4520 save_state(dc
, cpu_cond
);
4521 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4523 case 0x12: /* lduha, load unsigned halfword alternate */
4524 #ifndef TARGET_SPARC64
4527 if (!supervisor(dc
))
4530 save_state(dc
, cpu_cond
);
4531 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4533 case 0x13: /* ldda, load double word alternate */
4534 #ifndef TARGET_SPARC64
4537 if (!supervisor(dc
))
4542 save_state(dc
, cpu_cond
);
4543 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4545 case 0x19: /* ldsba, load signed byte alternate */
4546 #ifndef TARGET_SPARC64
4549 if (!supervisor(dc
))
4552 save_state(dc
, cpu_cond
);
4553 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4555 case 0x1a: /* ldsha, load signed halfword alternate */
4556 #ifndef TARGET_SPARC64
4559 if (!supervisor(dc
))
4562 save_state(dc
, cpu_cond
);
4563 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4565 case 0x1d: /* ldstuba -- XXX: should be atomically */
4566 #ifndef TARGET_SPARC64
4569 if (!supervisor(dc
))
4572 save_state(dc
, cpu_cond
);
4573 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4575 case 0x1f: /* swapa, swap reg with alt. memory. Also
4577 CHECK_IU_FEATURE(dc
, SWAP
);
4578 #ifndef TARGET_SPARC64
4581 if (!supervisor(dc
))
4584 save_state(dc
, cpu_cond
);
4585 gen_movl_reg_TN(rd
, cpu_val
);
4586 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4589 #ifndef TARGET_SPARC64
4590 case 0x30: /* ldc */
4591 case 0x31: /* ldcsr */
4592 case 0x33: /* lddc */
4596 #ifdef TARGET_SPARC64
4597 case 0x08: /* V9 ldsw */
4598 gen_address_mask(dc
, cpu_addr
);
4599 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4601 case 0x0b: /* V9 ldx */
4602 gen_address_mask(dc
, cpu_addr
);
4603 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4605 case 0x18: /* V9 ldswa */
4606 save_state(dc
, cpu_cond
);
4607 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4609 case 0x1b: /* V9 ldxa */
4610 save_state(dc
, cpu_cond
);
4611 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4613 case 0x2d: /* V9 prefetch, no effect */
4615 case 0x30: /* V9 ldfa */
4616 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4619 save_state(dc
, cpu_cond
);
4620 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4621 gen_update_fprs_dirty(rd
);
4623 case 0x33: /* V9 lddfa */
4624 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4627 save_state(dc
, cpu_cond
);
4628 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4629 gen_update_fprs_dirty(DFPREG(rd
));
4631 case 0x3d: /* V9 prefetcha, no effect */
4633 case 0x32: /* V9 ldqfa */
4634 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4635 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4638 save_state(dc
, cpu_cond
);
4639 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4640 gen_update_fprs_dirty(QFPREG(rd
));
4646 gen_movl_TN_reg(rd
, cpu_val
);
4647 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4650 } else if (xop
>= 0x20 && xop
< 0x24) {
4651 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4653 save_state(dc
, cpu_cond
);
4655 case 0x20: /* ldf, load fpreg */
4656 gen_address_mask(dc
, cpu_addr
);
4657 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4658 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4659 gen_update_fprs_dirty(rd
);
4661 case 0x21: /* ldfsr, V9 ldxfsr */
4662 #ifdef TARGET_SPARC64
4663 gen_address_mask(dc
, cpu_addr
);
4665 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4666 gen_helper_ldxfsr(cpu_env
, cpu_tmp64
);
4668 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4669 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
4670 gen_helper_ldfsr(cpu_env
, cpu_tmp32
);
4674 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4675 gen_helper_ldfsr(cpu_env
, cpu_tmp32
);
4679 case 0x22: /* ldqf, load quad fpreg */
4683 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4684 r_const
= tcg_const_i32(dc
->mem_idx
);
4685 gen_address_mask(dc
, cpu_addr
);
4686 gen_helper_ldqf(cpu_addr
, r_const
);
4687 tcg_temp_free_i32(r_const
);
4688 gen_op_store_QT0_fpr(QFPREG(rd
));
4689 gen_update_fprs_dirty(QFPREG(rd
));
4692 case 0x23: /* lddf, load double fpreg */
4696 r_const
= tcg_const_i32(dc
->mem_idx
);
4697 gen_address_mask(dc
, cpu_addr
);
4698 gen_helper_lddf(cpu_addr
, r_const
);
4699 tcg_temp_free_i32(r_const
);
4700 gen_op_store_DT0_fpr(DFPREG(rd
));
4701 gen_update_fprs_dirty(DFPREG(rd
));
4707 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) ||
4708 xop
== 0xe || xop
== 0x1e) {
4709 gen_movl_reg_TN(rd
, cpu_val
);
4711 case 0x4: /* st, store word */
4712 gen_address_mask(dc
, cpu_addr
);
4713 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4715 case 0x5: /* stb, store byte */
4716 gen_address_mask(dc
, cpu_addr
);
4717 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4719 case 0x6: /* sth, store halfword */
4720 gen_address_mask(dc
, cpu_addr
);
4721 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4723 case 0x7: /* std, store double word */
4729 save_state(dc
, cpu_cond
);
4730 gen_address_mask(dc
, cpu_addr
);
4731 r_const
= tcg_const_i32(7);
4732 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4733 tcg_temp_free_i32(r_const
);
4734 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4735 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4736 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4739 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4740 case 0x14: /* sta, V9 stwa, store word alternate */
4741 #ifndef TARGET_SPARC64
4744 if (!supervisor(dc
))
4747 save_state(dc
, cpu_cond
);
4748 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4749 dc
->npc
= DYNAMIC_PC
;
4751 case 0x15: /* stba, store byte alternate */
4752 #ifndef TARGET_SPARC64
4755 if (!supervisor(dc
))
4758 save_state(dc
, cpu_cond
);
4759 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4760 dc
->npc
= DYNAMIC_PC
;
4762 case 0x16: /* stha, store halfword alternate */
4763 #ifndef TARGET_SPARC64
4766 if (!supervisor(dc
))
4769 save_state(dc
, cpu_cond
);
4770 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4771 dc
->npc
= DYNAMIC_PC
;
4773 case 0x17: /* stda, store double word alternate */
4774 #ifndef TARGET_SPARC64
4777 if (!supervisor(dc
))
4783 save_state(dc
, cpu_cond
);
4784 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4788 #ifdef TARGET_SPARC64
4789 case 0x0e: /* V9 stx */
4790 gen_address_mask(dc
, cpu_addr
);
4791 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4793 case 0x1e: /* V9 stxa */
4794 save_state(dc
, cpu_cond
);
4795 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4796 dc
->npc
= DYNAMIC_PC
;
4802 } else if (xop
> 0x23 && xop
< 0x28) {
4803 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4805 save_state(dc
, cpu_cond
);
4807 case 0x24: /* stf, store fpreg */
4808 gen_address_mask(dc
, cpu_addr
);
4809 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4810 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4812 case 0x25: /* stfsr, V9 stxfsr */
4813 #ifdef TARGET_SPARC64
4814 gen_address_mask(dc
, cpu_addr
);
4815 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4817 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4819 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4821 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4822 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4826 #ifdef TARGET_SPARC64
4827 /* V9 stqf, store quad fpreg */
4831 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4832 gen_op_load_fpr_QT0(QFPREG(rd
));
4833 r_const
= tcg_const_i32(dc
->mem_idx
);
4834 gen_address_mask(dc
, cpu_addr
);
4835 gen_helper_stqf(cpu_addr
, r_const
);
4836 tcg_temp_free_i32(r_const
);
4839 #else /* !TARGET_SPARC64 */
4840 /* stdfq, store floating point queue */
4841 #if defined(CONFIG_USER_ONLY)
4844 if (!supervisor(dc
))
4846 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4851 case 0x27: /* stdf, store double fpreg */
4855 gen_op_load_fpr_DT0(DFPREG(rd
));
4856 r_const
= tcg_const_i32(dc
->mem_idx
);
4857 gen_address_mask(dc
, cpu_addr
);
4858 gen_helper_stdf(cpu_addr
, r_const
);
4859 tcg_temp_free_i32(r_const
);
4865 } else if (xop
> 0x33 && xop
< 0x3f) {
4866 save_state(dc
, cpu_cond
);
4868 #ifdef TARGET_SPARC64
4869 case 0x34: /* V9 stfa */
4870 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4873 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4875 case 0x36: /* V9 stqfa */
4879 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4880 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4883 r_const
= tcg_const_i32(7);
4884 gen_helper_check_align(cpu_addr
, r_const
);
4885 tcg_temp_free_i32(r_const
);
4886 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4889 case 0x37: /* V9 stdfa */
4890 if (gen_trap_ifnofpu(dc
, cpu_cond
)) {
4893 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4895 case 0x3c: /* V9 casa */
4896 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4897 gen_movl_TN_reg(rd
, cpu_val
);
4899 case 0x3e: /* V9 casxa */
4900 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4901 gen_movl_TN_reg(rd
, cpu_val
);
4904 case 0x34: /* stc */
4905 case 0x35: /* stcsr */
4906 case 0x36: /* stdcq */
4907 case 0x37: /* stdc */
4918 /* default case for non jump instructions */
4919 if (dc
->npc
== DYNAMIC_PC
) {
4920 dc
->pc
= DYNAMIC_PC
;
4922 } else if (dc
->npc
== JUMP_PC
) {
4923 /* we can do a static jump */
4924 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4928 dc
->npc
= dc
->npc
+ 4;
4936 save_state(dc
, cpu_cond
);
4937 r_const
= tcg_const_i32(TT_ILL_INSN
);
4938 gen_helper_raise_exception(cpu_env
, r_const
);
4939 tcg_temp_free_i32(r_const
);
4947 save_state(dc
, cpu_cond
);
4948 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4949 gen_helper_raise_exception(cpu_env
, r_const
);
4950 tcg_temp_free_i32(r_const
);
4954 #if !defined(CONFIG_USER_ONLY)
4959 save_state(dc
, cpu_cond
);
4960 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4961 gen_helper_raise_exception(cpu_env
, r_const
);
4962 tcg_temp_free_i32(r_const
);
4968 save_state(dc
, cpu_cond
);
4969 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4972 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4974 save_state(dc
, cpu_cond
);
4975 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4979 #ifndef TARGET_SPARC64
4984 save_state(dc
, cpu_cond
);
4985 r_const
= tcg_const_i32(TT_NCP_INSN
);
4986 gen_helper_raise_exception(cpu_env
, r_const
);
4987 tcg_temp_free(r_const
);
4993 tcg_temp_free(cpu_tmp1
);
4994 tcg_temp_free(cpu_tmp2
);
4997 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4998 int spc
, CPUSPARCState
*env
)
5000 target_ulong pc_start
, last_pc
;
5001 uint16_t *gen_opc_end
;
5002 DisasContext dc1
, *dc
= &dc1
;
5008 memset(dc
, 0, sizeof(DisasContext
));
5013 dc
->npc
= (target_ulong
) tb
->cs_base
;
5014 dc
->cc_op
= CC_OP_DYNAMIC
;
5015 dc
->mem_idx
= cpu_mmu_index(env
);
5017 dc
->fpu_enabled
= tb_fpu_enabled(tb
->flags
);
5018 dc
->address_mask_32bit
= tb_am_enabled(tb
->flags
);
5019 dc
->singlestep
= (env
->singlestep_enabled
|| singlestep
);
5020 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
5022 cpu_tmp0
= tcg_temp_new();
5023 cpu_tmp32
= tcg_temp_new_i32();
5024 cpu_tmp64
= tcg_temp_new_i64();
5026 cpu_dst
= tcg_temp_local_new();
5029 cpu_val
= tcg_temp_local_new();
5030 cpu_addr
= tcg_temp_local_new();
5033 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5035 max_insns
= CF_COUNT_MASK
;
5038 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5039 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5040 if (bp
->pc
== dc
->pc
) {
5041 if (dc
->pc
!= pc_start
)
5042 save_state(dc
, cpu_cond
);
5043 gen_helper_debug(cpu_env
);
5051 qemu_log("Search PC...\n");
5052 j
= gen_opc_ptr
- gen_opc_buf
;
5056 gen_opc_instr_start
[lj
++] = 0;
5057 gen_opc_pc
[lj
] = dc
->pc
;
5058 gen_opc_npc
[lj
] = dc
->npc
;
5059 gen_opc_instr_start
[lj
] = 1;
5060 gen_opc_icount
[lj
] = num_insns
;
5063 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
5066 disas_sparc_insn(dc
);
5071 /* if the next PC is different, we abort now */
5072 if (dc
->pc
!= (last_pc
+ 4))
5074 /* if we reach a page boundary, we stop generation so that the
5075 PC of a TT_TFAULT exception is always in the right page */
5076 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
5078 /* if single step mode, we generate only one instruction and
5079 generate an exception */
5080 if (dc
->singlestep
) {
5083 } while ((gen_opc_ptr
< gen_opc_end
) &&
5084 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
5085 num_insns
< max_insns
);
5088 tcg_temp_free(cpu_addr
);
5089 tcg_temp_free(cpu_val
);
5090 tcg_temp_free(cpu_dst
);
5091 tcg_temp_free_i64(cpu_tmp64
);
5092 tcg_temp_free_i32(cpu_tmp32
);
5093 tcg_temp_free(cpu_tmp0
);
5094 if (tb
->cflags
& CF_LAST_IO
)
5097 if (dc
->pc
!= DYNAMIC_PC
&&
5098 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
5099 /* static PC and NPC: we can use direct chaining */
5100 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
5102 if (dc
->pc
!= DYNAMIC_PC
)
5103 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
5104 save_npc(dc
, cpu_cond
);
5108 gen_icount_end(tb
, num_insns
);
5109 *gen_opc_ptr
= INDEX_op_end
;
5111 j
= gen_opc_ptr
- gen_opc_buf
;
5114 gen_opc_instr_start
[lj
++] = 0;
5118 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
5119 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
5121 tb
->size
= last_pc
+ 4 - pc_start
;
5122 tb
->icount
= num_insns
;
5125 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5126 qemu_log("--------------\n");
5127 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5128 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
5134 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5136 gen_intermediate_code_internal(tb
, 0, env
);
5139 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5141 gen_intermediate_code_internal(tb
, 1, env
);
5144 void gen_intermediate_code_init(CPUSPARCState
*env
)
5148 static const char * const gregnames
[8] = {
5149 NULL
, // g0 not used
5158 static const char * const fregnames
[64] = {
5159 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5160 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5161 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5162 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5163 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5164 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5165 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5166 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5169 /* init various static tables */
5173 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5174 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5175 offsetof(CPUState
, regwptr
),
5177 #ifdef TARGET_SPARC64
5178 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5180 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5182 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5184 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5186 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5187 offsetof(CPUState
, tick_cmpr
),
5189 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5190 offsetof(CPUState
, stick_cmpr
),
5192 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5193 offsetof(CPUState
, hstick_cmpr
),
5195 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5197 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5199 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5201 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5202 offsetof(CPUState
, ssr
), "ssr");
5203 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5204 offsetof(CPUState
, version
), "ver");
5205 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5206 offsetof(CPUState
, softint
),
5209 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5212 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5214 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5216 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5217 offsetof(CPUState
, cc_src2
),
5219 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5221 cpu_cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, cc_op
),
5223 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5225 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5227 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5229 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5231 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5232 #ifndef CONFIG_USER_ONLY
5233 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5236 for (i
= 1; i
< 8; i
++)
5237 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5238 offsetof(CPUState
, gregs
[i
]),
5240 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5241 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5242 offsetof(CPUState
, fpr
[i
]),
5245 /* register helpers */
5247 #define GEN_HELPER 2
5252 void restore_state_to_opc(CPUState
*env
, TranslationBlock
*tb
, int pc_pos
)
5255 env
->pc
= gen_opc_pc
[pc_pos
];
5256 npc
= gen_opc_npc
[pc_pos
];
5258 /* dynamic NPC: already stored */
5259 } else if (npc
== 2) {
5260 /* jump PC: use 'cond' and the jump targets of the translation */
5262 env
->npc
= gen_opc_jump_pc
[0];
5264 env
->npc
= gen_opc_jump_pc
[1];
5270 /* flush pending conditional evaluations before exposing cpu state */
5271 if (CC_OP
!= CC_OP_FLAGS
) {
5272 helper_compute_psr();