]>
git.proxmox.com Git - qemu.git/blob - target-sparc/translate.c
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
43 /* global register indexes */
44 static TCGv_ptr cpu_env
, cpu_regwptr
;
45 static TCGv cpu_cc_src
, cpu_cc_src2
, cpu_cc_dst
;
46 static TCGv_i32 cpu_psr
;
47 static TCGv cpu_fsr
, cpu_pc
, cpu_npc
, cpu_gregs
[8];
49 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_cond
, cpu_src1
, cpu_src2
, cpu_dst
, cpu_addr
, cpu_val
;
54 static TCGv_i32 cpu_xcc
, cpu_asi
, cpu_fprs
;
56 static TCGv cpu_tick_cmpr
, cpu_stick_cmpr
, cpu_hstick_cmpr
;
57 static TCGv cpu_hintp
, cpu_htba
, cpu_hver
, cpu_ssr
, cpu_ver
;
58 static TCGv_i32 cpu_softint
;
62 /* local register indexes (only used inside old micro ops) */
64 static TCGv_i32 cpu_tmp32
;
65 static TCGv_i64 cpu_tmp64
;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr
[TARGET_FPREGS
];
69 #include "gen-icount.h"
71 typedef struct DisasContext
{
72 target_ulong pc
; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc
; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc
[2]; /* used when JUMP_PC pc value is used */
78 int address_mask_32bit
;
79 struct TranslationBlock
*tb
;
83 // This function uses non-native bit order
84 #define GET_FIELD(X, FROM, TO) \
85 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
87 // This function uses the order in the manuals, i.e. bit 0 is 2^0
88 #define GET_FIELD_SP(X, FROM, TO) \
89 GET_FIELD(X, 31 - (TO), 31 - (FROM))
91 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
92 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
95 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
96 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
98 #define DFPREG(r) (r & 0x1e)
99 #define QFPREG(r) (r & 0x1c)
102 #define UA2005_HTRAP_MASK 0xff
103 #define V8_TRAP_MASK 0x7f
105 static int sign_extend(int x
, int len
)
108 return (x
<< len
) >> len
;
111 #define IS_IMM (insn & (1<<13))
113 /* floating point registers moves */
114 static void gen_op_load_fpr_DT0(unsigned int src
)
116 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
117 offsetof(CPU_DoubleU
, l
.upper
));
118 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
119 offsetof(CPU_DoubleU
, l
.lower
));
122 static void gen_op_load_fpr_DT1(unsigned int src
)
124 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
125 offsetof(CPU_DoubleU
, l
.upper
));
126 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt1
) +
127 offsetof(CPU_DoubleU
, l
.lower
));
130 static void gen_op_store_DT0_fpr(unsigned int dst
)
132 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
133 offsetof(CPU_DoubleU
, l
.upper
));
134 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, dt0
) +
135 offsetof(CPU_DoubleU
, l
.lower
));
138 static void gen_op_load_fpr_QT0(unsigned int src
)
140 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
141 offsetof(CPU_QuadU
, l
.upmost
));
142 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
143 offsetof(CPU_QuadU
, l
.upper
));
144 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
145 offsetof(CPU_QuadU
, l
.lower
));
146 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
147 offsetof(CPU_QuadU
, l
.lowest
));
150 static void gen_op_load_fpr_QT1(unsigned int src
)
152 tcg_gen_st_i32(cpu_fpr
[src
], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
153 offsetof(CPU_QuadU
, l
.upmost
));
154 tcg_gen_st_i32(cpu_fpr
[src
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
155 offsetof(CPU_QuadU
, l
.upper
));
156 tcg_gen_st_i32(cpu_fpr
[src
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
157 offsetof(CPU_QuadU
, l
.lower
));
158 tcg_gen_st_i32(cpu_fpr
[src
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt1
) +
159 offsetof(CPU_QuadU
, l
.lowest
));
162 static void gen_op_store_QT0_fpr(unsigned int dst
)
164 tcg_gen_ld_i32(cpu_fpr
[dst
], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
165 offsetof(CPU_QuadU
, l
.upmost
));
166 tcg_gen_ld_i32(cpu_fpr
[dst
+ 1], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
167 offsetof(CPU_QuadU
, l
.upper
));
168 tcg_gen_ld_i32(cpu_fpr
[dst
+ 2], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
169 offsetof(CPU_QuadU
, l
.lower
));
170 tcg_gen_ld_i32(cpu_fpr
[dst
+ 3], cpu_env
, offsetof(CPUSPARCState
, qt0
) +
171 offsetof(CPU_QuadU
, l
.lowest
));
175 #ifdef CONFIG_USER_ONLY
176 #define supervisor(dc) 0
177 #ifdef TARGET_SPARC64
178 #define hypervisor(dc) 0
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
188 #ifdef TARGET_SPARC64
190 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
192 #define AM_CHECK(dc) (1)
196 static inline void gen_address_mask(DisasContext
*dc
, TCGv addr
)
198 #ifdef TARGET_SPARC64
200 tcg_gen_andi_tl(addr
, addr
, 0xffffffffULL
);
204 static inline void gen_movl_reg_TN(int reg
, TCGv tn
)
207 tcg_gen_movi_tl(tn
, 0);
209 tcg_gen_mov_tl(tn
, cpu_gregs
[reg
]);
211 tcg_gen_ld_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
215 static inline void gen_movl_TN_reg(int reg
, TCGv tn
)
220 tcg_gen_mov_tl(cpu_gregs
[reg
], tn
);
222 tcg_gen_st_tl(tn
, cpu_regwptr
, (reg
- 8) * sizeof(target_ulong
));
226 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
,
227 target_ulong pc
, target_ulong npc
)
229 TranslationBlock
*tb
;
232 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) &&
233 (npc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
)) {
234 /* jump to same page: we can use a direct jump */
235 tcg_gen_goto_tb(tb_num
);
236 tcg_gen_movi_tl(cpu_pc
, pc
);
237 tcg_gen_movi_tl(cpu_npc
, npc
);
238 tcg_gen_exit_tb((long)tb
+ tb_num
);
240 /* jump to another page: currently not optimized */
241 tcg_gen_movi_tl(cpu_pc
, pc
);
242 tcg_gen_movi_tl(cpu_npc
, npc
);
248 static inline void gen_mov_reg_N(TCGv reg
, TCGv_i32 src
)
250 tcg_gen_extu_i32_tl(reg
, src
);
251 tcg_gen_shri_tl(reg
, reg
, PSR_NEG_SHIFT
);
252 tcg_gen_andi_tl(reg
, reg
, 0x1);
255 static inline void gen_mov_reg_Z(TCGv reg
, TCGv_i32 src
)
257 tcg_gen_extu_i32_tl(reg
, src
);
258 tcg_gen_shri_tl(reg
, reg
, PSR_ZERO_SHIFT
);
259 tcg_gen_andi_tl(reg
, reg
, 0x1);
262 static inline void gen_mov_reg_V(TCGv reg
, TCGv_i32 src
)
264 tcg_gen_extu_i32_tl(reg
, src
);
265 tcg_gen_shri_tl(reg
, reg
, PSR_OVF_SHIFT
);
266 tcg_gen_andi_tl(reg
, reg
, 0x1);
269 static inline void gen_mov_reg_C(TCGv reg
, TCGv_i32 src
)
271 tcg_gen_extu_i32_tl(reg
, src
);
272 tcg_gen_shri_tl(reg
, reg
, PSR_CARRY_SHIFT
);
273 tcg_gen_andi_tl(reg
, reg
, 0x1);
276 static inline void gen_cc_clear_icc(void)
278 tcg_gen_movi_i32(cpu_psr
, 0);
281 #ifdef TARGET_SPARC64
282 static inline void gen_cc_clear_xcc(void)
284 tcg_gen_movi_i32(cpu_xcc
, 0);
290 env->psr |= PSR_ZERO;
291 if ((int32_t) T0 < 0)
294 static inline void gen_cc_NZ_icc(TCGv dst
)
299 l1
= gen_new_label();
300 l2
= gen_new_label();
301 r_temp
= tcg_temp_new();
302 tcg_gen_andi_tl(r_temp
, dst
, 0xffffffffULL
);
303 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
304 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_ZERO
);
306 tcg_gen_ext32s_tl(r_temp
, dst
);
307 tcg_gen_brcondi_tl(TCG_COND_GE
, r_temp
, 0, l2
);
308 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_NEG
);
310 tcg_temp_free(r_temp
);
313 #ifdef TARGET_SPARC64
314 static inline void gen_cc_NZ_xcc(TCGv dst
)
318 l1
= gen_new_label();
319 l2
= gen_new_label();
320 tcg_gen_brcondi_tl(TCG_COND_NE
, dst
, 0, l1
);
321 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_ZERO
);
323 tcg_gen_brcondi_tl(TCG_COND_GE
, dst
, 0, l2
);
324 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_NEG
);
331 env->psr |= PSR_CARRY;
333 static inline void gen_cc_C_add_icc(TCGv dst
, TCGv src1
)
335 TCGv r_temp1
, r_temp2
;
338 l1
= gen_new_label();
339 r_temp1
= tcg_temp_new();
340 r_temp2
= tcg_temp_new();
341 tcg_gen_andi_tl(r_temp1
, dst
, 0xffffffffULL
);
342 tcg_gen_andi_tl(r_temp2
, src1
, 0xffffffffULL
);
343 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
344 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
346 tcg_temp_free(r_temp1
);
347 tcg_temp_free(r_temp2
);
350 #ifdef TARGET_SPARC64
351 static inline void gen_cc_C_add_xcc(TCGv dst
, TCGv src1
)
355 l1
= gen_new_label();
356 tcg_gen_brcond_tl(TCG_COND_GEU
, dst
, src1
, l1
);
357 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
363 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
366 static inline void gen_cc_V_add_icc(TCGv dst
, TCGv src1
, TCGv src2
)
370 r_temp
= tcg_temp_new();
371 tcg_gen_xor_tl(r_temp
, src1
, src2
);
372 tcg_gen_not_tl(r_temp
, r_temp
);
373 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
374 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
375 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
376 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
377 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
378 tcg_temp_free(r_temp
);
379 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
382 #ifdef TARGET_SPARC64
383 static inline void gen_cc_V_add_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
387 r_temp
= tcg_temp_new();
388 tcg_gen_xor_tl(r_temp
, src1
, src2
);
389 tcg_gen_not_tl(r_temp
, r_temp
);
390 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
391 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
392 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
393 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
394 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
395 tcg_temp_free(r_temp
);
396 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
400 static inline void gen_add_tv(TCGv dst
, TCGv src1
, TCGv src2
)
406 l1
= gen_new_label();
408 r_temp
= tcg_temp_new();
409 tcg_gen_xor_tl(r_temp
, src1
, src2
);
410 tcg_gen_not_tl(r_temp
, r_temp
);
411 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
412 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
413 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
414 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
415 r_const
= tcg_const_i32(TT_TOVF
);
416 gen_helper_raise_exception(r_const
);
417 tcg_temp_free_i32(r_const
);
419 tcg_temp_free(r_temp
);
422 static inline void gen_cc_V_tag(TCGv src1
, TCGv src2
)
426 l1
= gen_new_label();
427 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
428 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
429 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
430 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
434 static inline void gen_tag_tv(TCGv src1
, TCGv src2
)
439 l1
= gen_new_label();
440 tcg_gen_or_tl(cpu_tmp0
, src1
, src2
);
441 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x3);
442 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_tmp0
, 0, l1
);
443 r_const
= tcg_const_i32(TT_TOVF
);
444 gen_helper_raise_exception(r_const
);
445 tcg_temp_free_i32(r_const
);
449 static inline void gen_op_add_cc2(TCGv dst
)
452 gen_cc_NZ_icc(cpu_cc_dst
);
453 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
454 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
455 #ifdef TARGET_SPARC64
457 gen_cc_NZ_xcc(cpu_cc_dst
);
458 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
459 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
461 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
464 static inline void gen_op_addi_cc(TCGv dst
, TCGv src1
, target_long src2
)
466 tcg_gen_mov_tl(cpu_cc_src
, src1
);
467 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
468 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
472 static inline void gen_op_add_cc(TCGv dst
, TCGv src1
, TCGv src2
)
474 tcg_gen_mov_tl(cpu_cc_src
, src1
);
475 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
476 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
480 static inline void gen_op_addx_cc2(TCGv dst
)
482 gen_cc_NZ_icc(cpu_cc_dst
);
483 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
484 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
485 #ifdef TARGET_SPARC64
486 gen_cc_NZ_xcc(cpu_cc_dst
);
487 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
488 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
490 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
493 static inline void gen_op_addxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
495 tcg_gen_mov_tl(cpu_cc_src
, src1
);
496 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
497 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
498 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
500 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
501 #ifdef TARGET_SPARC64
503 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
505 tcg_gen_addi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
506 gen_op_addx_cc2(dst
);
509 static inline void gen_op_addx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
511 tcg_gen_mov_tl(cpu_cc_src
, src1
);
512 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
513 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
514 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
516 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
517 #ifdef TARGET_SPARC64
519 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
521 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
522 gen_op_addx_cc2(dst
);
525 static inline void gen_op_tadd_cc(TCGv dst
, TCGv src1
, TCGv src2
)
527 tcg_gen_mov_tl(cpu_cc_src
, src1
);
528 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
529 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
531 gen_cc_NZ_icc(cpu_cc_dst
);
532 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
533 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
534 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
535 #ifdef TARGET_SPARC64
537 gen_cc_NZ_xcc(cpu_cc_dst
);
538 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
539 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
541 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
544 static inline void gen_op_tadd_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
546 tcg_gen_mov_tl(cpu_cc_src
, src1
);
547 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
548 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
549 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
550 gen_add_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
552 gen_cc_NZ_icc(cpu_cc_dst
);
553 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
554 #ifdef TARGET_SPARC64
556 gen_cc_NZ_xcc(cpu_cc_dst
);
557 gen_cc_C_add_xcc(cpu_cc_dst
, cpu_cc_src
);
558 gen_cc_V_add_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
560 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
565 env->psr |= PSR_CARRY;
567 static inline void gen_cc_C_sub_icc(TCGv src1
, TCGv src2
)
569 TCGv r_temp1
, r_temp2
;
572 l1
= gen_new_label();
573 r_temp1
= tcg_temp_new();
574 r_temp2
= tcg_temp_new();
575 tcg_gen_andi_tl(r_temp1
, src1
, 0xffffffffULL
);
576 tcg_gen_andi_tl(r_temp2
, src2
, 0xffffffffULL
);
577 tcg_gen_brcond_tl(TCG_COND_GEU
, r_temp1
, r_temp2
, l1
);
578 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_CARRY
);
580 tcg_temp_free(r_temp1
);
581 tcg_temp_free(r_temp2
);
584 #ifdef TARGET_SPARC64
585 static inline void gen_cc_C_sub_xcc(TCGv src1
, TCGv src2
)
589 l1
= gen_new_label();
590 tcg_gen_brcond_tl(TCG_COND_GEU
, src1
, src2
, l1
);
591 tcg_gen_ori_i32(cpu_xcc
, cpu_xcc
, PSR_CARRY
);
597 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
600 static inline void gen_cc_V_sub_icc(TCGv dst
, TCGv src1
, TCGv src2
)
604 r_temp
= tcg_temp_new();
605 tcg_gen_xor_tl(r_temp
, src1
, src2
);
606 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
607 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
608 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
609 tcg_gen_shri_tl(r_temp
, r_temp
, 31 - PSR_OVF_SHIFT
);
610 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
611 tcg_gen_or_i32(cpu_psr
, cpu_psr
, cpu_tmp32
);
612 tcg_temp_free(r_temp
);
615 #ifdef TARGET_SPARC64
616 static inline void gen_cc_V_sub_xcc(TCGv dst
, TCGv src1
, TCGv src2
)
620 r_temp
= tcg_temp_new();
621 tcg_gen_xor_tl(r_temp
, src1
, src2
);
622 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
623 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
624 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 63));
625 tcg_gen_shri_tl(r_temp
, r_temp
, 63 - PSR_OVF_SHIFT
);
626 tcg_gen_trunc_tl_i32(cpu_tmp32
, r_temp
);
627 tcg_gen_or_i32(cpu_xcc
, cpu_xcc
, cpu_tmp32
);
628 tcg_temp_free(r_temp
);
632 static inline void gen_sub_tv(TCGv dst
, TCGv src1
, TCGv src2
)
638 l1
= gen_new_label();
640 r_temp
= tcg_temp_new();
641 tcg_gen_xor_tl(r_temp
, src1
, src2
);
642 tcg_gen_xor_tl(cpu_tmp0
, src1
, dst
);
643 tcg_gen_and_tl(r_temp
, r_temp
, cpu_tmp0
);
644 tcg_gen_andi_tl(r_temp
, r_temp
, (1ULL << 31));
645 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_temp
, 0, l1
);
646 r_const
= tcg_const_i32(TT_TOVF
);
647 gen_helper_raise_exception(r_const
);
648 tcg_temp_free_i32(r_const
);
650 tcg_temp_free(r_temp
);
653 static inline void gen_op_sub_cc2(TCGv dst
)
656 gen_cc_NZ_icc(cpu_cc_dst
);
657 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
658 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
659 #ifdef TARGET_SPARC64
661 gen_cc_NZ_xcc(cpu_cc_dst
);
662 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
663 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
665 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
668 static inline void gen_op_subi_cc(TCGv dst
, TCGv src1
, target_long src2
)
670 tcg_gen_mov_tl(cpu_cc_src
, src1
);
671 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
672 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_src
, src2
);
676 static inline void gen_op_sub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
678 tcg_gen_mov_tl(cpu_cc_src
, src1
);
679 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
680 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
684 static inline void gen_op_subx_cc2(TCGv dst
)
686 gen_cc_NZ_icc(cpu_cc_dst
);
687 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
688 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
689 #ifdef TARGET_SPARC64
690 gen_cc_NZ_xcc(cpu_cc_dst
);
691 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
692 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
694 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
697 static inline void gen_op_subxi_cc(TCGv dst
, TCGv src1
, target_long src2
)
699 tcg_gen_mov_tl(cpu_cc_src
, src1
);
700 tcg_gen_movi_tl(cpu_cc_src2
, src2
);
701 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
702 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
704 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
705 #ifdef TARGET_SPARC64
707 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
709 tcg_gen_subi_tl(cpu_cc_dst
, cpu_cc_dst
, src2
);
710 gen_op_subx_cc2(dst
);
713 static inline void gen_op_subx_cc(TCGv dst
, TCGv src1
, TCGv src2
)
715 tcg_gen_mov_tl(cpu_cc_src
, src1
);
716 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
717 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
718 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_tmp0
);
720 gen_cc_C_sub_icc(cpu_cc_dst
, cpu_cc_src
);
721 #ifdef TARGET_SPARC64
723 gen_cc_C_sub_xcc(cpu_cc_dst
, cpu_cc_src
);
725 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_dst
, cpu_cc_src2
);
726 gen_op_subx_cc2(dst
);
729 static inline void gen_op_tsub_cc(TCGv dst
, TCGv src1
, TCGv src2
)
731 tcg_gen_mov_tl(cpu_cc_src
, src1
);
732 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
733 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
735 gen_cc_NZ_icc(cpu_cc_dst
);
736 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
737 gen_cc_V_sub_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
738 gen_cc_V_tag(cpu_cc_src
, cpu_cc_src2
);
739 #ifdef TARGET_SPARC64
741 gen_cc_NZ_xcc(cpu_cc_dst
);
742 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
743 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
745 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
748 static inline void gen_op_tsub_ccTV(TCGv dst
, TCGv src1
, TCGv src2
)
750 tcg_gen_mov_tl(cpu_cc_src
, src1
);
751 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
752 gen_tag_tv(cpu_cc_src
, cpu_cc_src2
);
753 tcg_gen_sub_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
754 gen_sub_tv(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
756 gen_cc_NZ_icc(cpu_cc_dst
);
757 gen_cc_C_sub_icc(cpu_cc_src
, cpu_cc_src2
);
758 #ifdef TARGET_SPARC64
760 gen_cc_NZ_xcc(cpu_cc_dst
);
761 gen_cc_C_sub_xcc(cpu_cc_src
, cpu_cc_src2
);
762 gen_cc_V_sub_xcc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
764 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
767 static inline void gen_op_mulscc(TCGv dst
, TCGv src1
, TCGv src2
)
772 l1
= gen_new_label();
773 r_temp
= tcg_temp_new();
779 tcg_gen_andi_tl(cpu_cc_src
, src1
, 0xffffffff);
780 tcg_gen_andi_tl(r_temp
, cpu_y
, 0x1);
781 tcg_gen_andi_tl(cpu_cc_src2
, src2
, 0xffffffff);
782 tcg_gen_brcondi_tl(TCG_COND_NE
, r_temp
, 0, l1
);
783 tcg_gen_movi_tl(cpu_cc_src2
, 0);
787 // env->y = (b2 << 31) | (env->y >> 1);
788 tcg_gen_andi_tl(r_temp
, cpu_cc_src
, 0x1);
789 tcg_gen_shli_tl(r_temp
, r_temp
, 31);
790 tcg_gen_shri_tl(cpu_tmp0
, cpu_y
, 1);
791 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0x7fffffff);
792 tcg_gen_or_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
793 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
796 gen_mov_reg_N(cpu_tmp0
, cpu_psr
);
797 gen_mov_reg_V(r_temp
, cpu_psr
);
798 tcg_gen_xor_tl(cpu_tmp0
, cpu_tmp0
, r_temp
);
799 tcg_temp_free(r_temp
);
801 // T0 = (b1 << 31) | (T0 >> 1);
803 tcg_gen_shli_tl(cpu_tmp0
, cpu_tmp0
, 31);
804 tcg_gen_shri_tl(cpu_cc_src
, cpu_cc_src
, 1);
805 tcg_gen_or_tl(cpu_cc_src
, cpu_cc_src
, cpu_tmp0
);
807 /* do addition and update flags */
808 tcg_gen_add_tl(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
811 gen_cc_NZ_icc(cpu_cc_dst
);
812 gen_cc_V_add_icc(cpu_cc_dst
, cpu_cc_src
, cpu_cc_src2
);
813 gen_cc_C_add_icc(cpu_cc_dst
, cpu_cc_src
);
814 tcg_gen_mov_tl(dst
, cpu_cc_dst
);
817 static inline void gen_op_umul(TCGv dst
, TCGv src1
, TCGv src2
)
819 TCGv_i64 r_temp
, r_temp2
;
821 r_temp
= tcg_temp_new_i64();
822 r_temp2
= tcg_temp_new_i64();
824 tcg_gen_extu_tl_i64(r_temp
, src2
);
825 tcg_gen_extu_tl_i64(r_temp2
, src1
);
826 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
828 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
829 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
830 tcg_temp_free_i64(r_temp
);
831 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
832 #ifdef TARGET_SPARC64
833 tcg_gen_mov_i64(dst
, r_temp2
);
835 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
837 tcg_temp_free_i64(r_temp2
);
840 static inline void gen_op_smul(TCGv dst
, TCGv src1
, TCGv src2
)
842 TCGv_i64 r_temp
, r_temp2
;
844 r_temp
= tcg_temp_new_i64();
845 r_temp2
= tcg_temp_new_i64();
847 tcg_gen_ext_tl_i64(r_temp
, src2
);
848 tcg_gen_ext_tl_i64(r_temp2
, src1
);
849 tcg_gen_mul_i64(r_temp2
, r_temp
, r_temp2
);
851 tcg_gen_shri_i64(r_temp
, r_temp2
, 32);
852 tcg_gen_trunc_i64_tl(cpu_tmp0
, r_temp
);
853 tcg_temp_free_i64(r_temp
);
854 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
855 #ifdef TARGET_SPARC64
856 tcg_gen_mov_i64(dst
, r_temp2
);
858 tcg_gen_trunc_i64_tl(dst
, r_temp2
);
860 tcg_temp_free_i64(r_temp2
);
863 #ifdef TARGET_SPARC64
864 static inline void gen_trap_ifdivzero_tl(TCGv divisor
)
869 l1
= gen_new_label();
870 tcg_gen_brcondi_tl(TCG_COND_NE
, divisor
, 0, l1
);
871 r_const
= tcg_const_i32(TT_DIV_ZERO
);
872 gen_helper_raise_exception(r_const
);
873 tcg_temp_free_i32(r_const
);
877 static inline void gen_op_sdivx(TCGv dst
, TCGv src1
, TCGv src2
)
881 l1
= gen_new_label();
882 l2
= gen_new_label();
883 tcg_gen_mov_tl(cpu_cc_src
, src1
);
884 tcg_gen_mov_tl(cpu_cc_src2
, src2
);
885 gen_trap_ifdivzero_tl(cpu_cc_src2
);
886 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src
, INT64_MIN
, l1
);
887 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_cc_src2
, -1, l1
);
888 tcg_gen_movi_i64(dst
, INT64_MIN
);
891 tcg_gen_div_i64(dst
, cpu_cc_src
, cpu_cc_src2
);
896 static inline void gen_op_div_cc(TCGv dst
)
900 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
902 gen_cc_NZ_icc(cpu_cc_dst
);
903 l1
= gen_new_label();
904 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_cc_src2
, 0, l1
);
905 tcg_gen_ori_i32(cpu_psr
, cpu_psr
, PSR_OVF
);
909 static inline void gen_op_logic_cc(TCGv dst
)
911 tcg_gen_mov_tl(cpu_cc_dst
, dst
);
914 gen_cc_NZ_icc(cpu_cc_dst
);
915 #ifdef TARGET_SPARC64
917 gen_cc_NZ_xcc(cpu_cc_dst
);
922 static inline void gen_op_eval_ba(TCGv dst
)
924 tcg_gen_movi_tl(dst
, 1);
928 static inline void gen_op_eval_be(TCGv dst
, TCGv_i32 src
)
930 gen_mov_reg_Z(dst
, src
);
934 static inline void gen_op_eval_ble(TCGv dst
, TCGv_i32 src
)
936 gen_mov_reg_N(cpu_tmp0
, src
);
937 gen_mov_reg_V(dst
, src
);
938 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
939 gen_mov_reg_Z(cpu_tmp0
, src
);
940 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
944 static inline void gen_op_eval_bl(TCGv dst
, TCGv_i32 src
)
946 gen_mov_reg_V(cpu_tmp0
, src
);
947 gen_mov_reg_N(dst
, src
);
948 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
952 static inline void gen_op_eval_bleu(TCGv dst
, TCGv_i32 src
)
954 gen_mov_reg_Z(cpu_tmp0
, src
);
955 gen_mov_reg_C(dst
, src
);
956 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
960 static inline void gen_op_eval_bcs(TCGv dst
, TCGv_i32 src
)
962 gen_mov_reg_C(dst
, src
);
966 static inline void gen_op_eval_bvs(TCGv dst
, TCGv_i32 src
)
968 gen_mov_reg_V(dst
, src
);
972 static inline void gen_op_eval_bn(TCGv dst
)
974 tcg_gen_movi_tl(dst
, 0);
978 static inline void gen_op_eval_bneg(TCGv dst
, TCGv_i32 src
)
980 gen_mov_reg_N(dst
, src
);
984 static inline void gen_op_eval_bne(TCGv dst
, TCGv_i32 src
)
986 gen_mov_reg_Z(dst
, src
);
987 tcg_gen_xori_tl(dst
, dst
, 0x1);
991 static inline void gen_op_eval_bg(TCGv dst
, TCGv_i32 src
)
993 gen_mov_reg_N(cpu_tmp0
, src
);
994 gen_mov_reg_V(dst
, src
);
995 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
996 gen_mov_reg_Z(cpu_tmp0
, src
);
997 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
998 tcg_gen_xori_tl(dst
, dst
, 0x1);
1002 static inline void gen_op_eval_bge(TCGv dst
, TCGv_i32 src
)
1004 gen_mov_reg_V(cpu_tmp0
, src
);
1005 gen_mov_reg_N(dst
, src
);
1006 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1007 tcg_gen_xori_tl(dst
, dst
, 0x1);
1011 static inline void gen_op_eval_bgu(TCGv dst
, TCGv_i32 src
)
1013 gen_mov_reg_Z(cpu_tmp0
, src
);
1014 gen_mov_reg_C(dst
, src
);
1015 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1016 tcg_gen_xori_tl(dst
, dst
, 0x1);
1020 static inline void gen_op_eval_bcc(TCGv dst
, TCGv_i32 src
)
1022 gen_mov_reg_C(dst
, src
);
1023 tcg_gen_xori_tl(dst
, dst
, 0x1);
1027 static inline void gen_op_eval_bpos(TCGv dst
, TCGv_i32 src
)
1029 gen_mov_reg_N(dst
, src
);
1030 tcg_gen_xori_tl(dst
, dst
, 0x1);
1034 static inline void gen_op_eval_bvc(TCGv dst
, TCGv_i32 src
)
1036 gen_mov_reg_V(dst
, src
);
1037 tcg_gen_xori_tl(dst
, dst
, 0x1);
1041 FPSR bit field FCC1 | FCC0:
1047 static inline void gen_mov_reg_FCC0(TCGv reg
, TCGv src
,
1048 unsigned int fcc_offset
)
1050 tcg_gen_shri_tl(reg
, src
, FSR_FCC0_SHIFT
+ fcc_offset
);
1051 tcg_gen_andi_tl(reg
, reg
, 0x1);
1054 static inline void gen_mov_reg_FCC1(TCGv reg
, TCGv src
,
1055 unsigned int fcc_offset
)
1057 tcg_gen_shri_tl(reg
, src
, FSR_FCC1_SHIFT
+ fcc_offset
);
1058 tcg_gen_andi_tl(reg
, reg
, 0x1);
1062 static inline void gen_op_eval_fbne(TCGv dst
, TCGv src
,
1063 unsigned int fcc_offset
)
1065 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1066 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1067 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1070 // 1 or 2: FCC0 ^ FCC1
1071 static inline void gen_op_eval_fblg(TCGv dst
, TCGv src
,
1072 unsigned int fcc_offset
)
1074 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1075 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1076 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1080 static inline void gen_op_eval_fbul(TCGv dst
, TCGv src
,
1081 unsigned int fcc_offset
)
1083 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1087 static inline void gen_op_eval_fbl(TCGv dst
, TCGv src
,
1088 unsigned int fcc_offset
)
1090 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1091 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1092 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1093 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1097 static inline void gen_op_eval_fbug(TCGv dst
, TCGv src
,
1098 unsigned int fcc_offset
)
1100 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1104 static inline void gen_op_eval_fbg(TCGv dst
, TCGv src
,
1105 unsigned int fcc_offset
)
1107 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1108 tcg_gen_xori_tl(dst
, dst
, 0x1);
1109 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1110 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1114 static inline void gen_op_eval_fbu(TCGv dst
, TCGv src
,
1115 unsigned int fcc_offset
)
1117 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1118 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1119 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1122 // 0: !(FCC0 | FCC1)
1123 static inline void gen_op_eval_fbe(TCGv dst
, TCGv src
,
1124 unsigned int fcc_offset
)
1126 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1127 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1128 tcg_gen_or_tl(dst
, dst
, cpu_tmp0
);
1129 tcg_gen_xori_tl(dst
, dst
, 0x1);
1132 // 0 or 3: !(FCC0 ^ FCC1)
1133 static inline void gen_op_eval_fbue(TCGv dst
, TCGv src
,
1134 unsigned int fcc_offset
)
1136 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1137 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1138 tcg_gen_xor_tl(dst
, dst
, cpu_tmp0
);
1139 tcg_gen_xori_tl(dst
, dst
, 0x1);
1143 static inline void gen_op_eval_fbge(TCGv dst
, TCGv src
,
1144 unsigned int fcc_offset
)
1146 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1147 tcg_gen_xori_tl(dst
, dst
, 0x1);
1150 // !1: !(FCC0 & !FCC1)
1151 static inline void gen_op_eval_fbuge(TCGv dst
, TCGv src
,
1152 unsigned int fcc_offset
)
1154 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1155 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1156 tcg_gen_xori_tl(cpu_tmp0
, cpu_tmp0
, 0x1);
1157 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1158 tcg_gen_xori_tl(dst
, dst
, 0x1);
1162 static inline void gen_op_eval_fble(TCGv dst
, TCGv src
,
1163 unsigned int fcc_offset
)
1165 gen_mov_reg_FCC1(dst
, src
, fcc_offset
);
1166 tcg_gen_xori_tl(dst
, dst
, 0x1);
1169 // !2: !(!FCC0 & FCC1)
1170 static inline void gen_op_eval_fbule(TCGv dst
, TCGv src
,
1171 unsigned int fcc_offset
)
1173 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1174 tcg_gen_xori_tl(dst
, dst
, 0x1);
1175 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1176 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1177 tcg_gen_xori_tl(dst
, dst
, 0x1);
1180 // !3: !(FCC0 & FCC1)
1181 static inline void gen_op_eval_fbo(TCGv dst
, TCGv src
,
1182 unsigned int fcc_offset
)
1184 gen_mov_reg_FCC0(dst
, src
, fcc_offset
);
1185 gen_mov_reg_FCC1(cpu_tmp0
, src
, fcc_offset
);
1186 tcg_gen_and_tl(dst
, dst
, cpu_tmp0
);
1187 tcg_gen_xori_tl(dst
, dst
, 0x1);
1190 static inline void gen_branch2(DisasContext
*dc
, target_ulong pc1
,
1191 target_ulong pc2
, TCGv r_cond
)
1195 l1
= gen_new_label();
1197 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1199 gen_goto_tb(dc
, 0, pc1
, pc1
+ 4);
1202 gen_goto_tb(dc
, 1, pc2
, pc2
+ 4);
1205 static inline void gen_branch_a(DisasContext
*dc
, target_ulong pc1
,
1206 target_ulong pc2
, TCGv r_cond
)
1210 l1
= gen_new_label();
1212 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1214 gen_goto_tb(dc
, 0, pc2
, pc1
);
1217 gen_goto_tb(dc
, 1, pc2
+ 4, pc2
+ 8);
1220 static inline void gen_generic_branch(target_ulong npc1
, target_ulong npc2
,
1225 l1
= gen_new_label();
1226 l2
= gen_new_label();
1228 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
1230 tcg_gen_movi_tl(cpu_npc
, npc1
);
1234 tcg_gen_movi_tl(cpu_npc
, npc2
);
1238 /* call this function before using the condition register as it may
1239 have been set for a jump */
1240 static inline void flush_cond(DisasContext
*dc
, TCGv cond
)
1242 if (dc
->npc
== JUMP_PC
) {
1243 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1244 dc
->npc
= DYNAMIC_PC
;
1248 static inline void save_npc(DisasContext
*dc
, TCGv cond
)
1250 if (dc
->npc
== JUMP_PC
) {
1251 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1252 dc
->npc
= DYNAMIC_PC
;
1253 } else if (dc
->npc
!= DYNAMIC_PC
) {
1254 tcg_gen_movi_tl(cpu_npc
, dc
->npc
);
1258 static inline void save_state(DisasContext
*dc
, TCGv cond
)
1260 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
1264 static inline void gen_mov_pc_npc(DisasContext
*dc
, TCGv cond
)
1266 if (dc
->npc
== JUMP_PC
) {
1267 gen_generic_branch(dc
->jump_pc
[0], dc
->jump_pc
[1], cond
);
1268 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1269 dc
->pc
= DYNAMIC_PC
;
1270 } else if (dc
->npc
== DYNAMIC_PC
) {
1271 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1272 dc
->pc
= DYNAMIC_PC
;
1278 static inline void gen_op_next_insn(void)
1280 tcg_gen_mov_tl(cpu_pc
, cpu_npc
);
1281 tcg_gen_addi_tl(cpu_npc
, cpu_npc
, 4);
1284 static inline void gen_cond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1288 #ifdef TARGET_SPARC64
1298 gen_op_eval_bn(r_dst
);
1301 gen_op_eval_be(r_dst
, r_src
);
1304 gen_op_eval_ble(r_dst
, r_src
);
1307 gen_op_eval_bl(r_dst
, r_src
);
1310 gen_op_eval_bleu(r_dst
, r_src
);
1313 gen_op_eval_bcs(r_dst
, r_src
);
1316 gen_op_eval_bneg(r_dst
, r_src
);
1319 gen_op_eval_bvs(r_dst
, r_src
);
1322 gen_op_eval_ba(r_dst
);
1325 gen_op_eval_bne(r_dst
, r_src
);
1328 gen_op_eval_bg(r_dst
, r_src
);
1331 gen_op_eval_bge(r_dst
, r_src
);
1334 gen_op_eval_bgu(r_dst
, r_src
);
1337 gen_op_eval_bcc(r_dst
, r_src
);
1340 gen_op_eval_bpos(r_dst
, r_src
);
1343 gen_op_eval_bvc(r_dst
, r_src
);
1348 static inline void gen_fcond(TCGv r_dst
, unsigned int cc
, unsigned int cond
)
1350 unsigned int offset
;
1370 gen_op_eval_bn(r_dst
);
1373 gen_op_eval_fbne(r_dst
, cpu_fsr
, offset
);
1376 gen_op_eval_fblg(r_dst
, cpu_fsr
, offset
);
1379 gen_op_eval_fbul(r_dst
, cpu_fsr
, offset
);
1382 gen_op_eval_fbl(r_dst
, cpu_fsr
, offset
);
1385 gen_op_eval_fbug(r_dst
, cpu_fsr
, offset
);
1388 gen_op_eval_fbg(r_dst
, cpu_fsr
, offset
);
1391 gen_op_eval_fbu(r_dst
, cpu_fsr
, offset
);
1394 gen_op_eval_ba(r_dst
);
1397 gen_op_eval_fbe(r_dst
, cpu_fsr
, offset
);
1400 gen_op_eval_fbue(r_dst
, cpu_fsr
, offset
);
1403 gen_op_eval_fbge(r_dst
, cpu_fsr
, offset
);
1406 gen_op_eval_fbuge(r_dst
, cpu_fsr
, offset
);
1409 gen_op_eval_fble(r_dst
, cpu_fsr
, offset
);
1412 gen_op_eval_fbule(r_dst
, cpu_fsr
, offset
);
1415 gen_op_eval_fbo(r_dst
, cpu_fsr
, offset
);
1420 #ifdef TARGET_SPARC64
1422 static const int gen_tcg_cond_reg
[8] = {
1433 static inline void gen_cond_reg(TCGv r_dst
, int cond
, TCGv r_src
)
1437 l1
= gen_new_label();
1438 tcg_gen_movi_tl(r_dst
, 0);
1439 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], r_src
, 0, l1
);
1440 tcg_gen_movi_tl(r_dst
, 1);
1445 /* XXX: potentially incorrect if dynamic npc */
1446 static void do_branch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1449 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1450 target_ulong target
= dc
->pc
+ offset
;
1453 /* unconditional not taken */
1455 dc
->pc
= dc
->npc
+ 4;
1456 dc
->npc
= dc
->pc
+ 4;
1459 dc
->npc
= dc
->pc
+ 4;
1461 } else if (cond
== 0x8) {
1462 /* unconditional taken */
1465 dc
->npc
= dc
->pc
+ 4;
1471 flush_cond(dc
, r_cond
);
1472 gen_cond(r_cond
, cc
, cond
);
1474 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1478 dc
->jump_pc
[0] = target
;
1479 dc
->jump_pc
[1] = dc
->npc
+ 4;
1485 /* XXX: potentially incorrect if dynamic npc */
1486 static void do_fbranch(DisasContext
*dc
, int32_t offset
, uint32_t insn
, int cc
,
1489 unsigned int cond
= GET_FIELD(insn
, 3, 6), a
= (insn
& (1 << 29));
1490 target_ulong target
= dc
->pc
+ offset
;
1493 /* unconditional not taken */
1495 dc
->pc
= dc
->npc
+ 4;
1496 dc
->npc
= dc
->pc
+ 4;
1499 dc
->npc
= dc
->pc
+ 4;
1501 } else if (cond
== 0x8) {
1502 /* unconditional taken */
1505 dc
->npc
= dc
->pc
+ 4;
1511 flush_cond(dc
, r_cond
);
1512 gen_fcond(r_cond
, cc
, cond
);
1514 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1518 dc
->jump_pc
[0] = target
;
1519 dc
->jump_pc
[1] = dc
->npc
+ 4;
1525 #ifdef TARGET_SPARC64
1526 /* XXX: potentially incorrect if dynamic npc */
1527 static void do_branch_reg(DisasContext
*dc
, int32_t offset
, uint32_t insn
,
1528 TCGv r_cond
, TCGv r_reg
)
1530 unsigned int cond
= GET_FIELD_SP(insn
, 25, 27), a
= (insn
& (1 << 29));
1531 target_ulong target
= dc
->pc
+ offset
;
1533 flush_cond(dc
, r_cond
);
1534 gen_cond_reg(r_cond
, cond
, r_reg
);
1536 gen_branch_a(dc
, target
, dc
->npc
, r_cond
);
1540 dc
->jump_pc
[0] = target
;
1541 dc
->jump_pc
[1] = dc
->npc
+ 4;
1546 static inline void gen_op_fcmps(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1550 gen_helper_fcmps(r_rs1
, r_rs2
);
1553 gen_helper_fcmps_fcc1(r_rs1
, r_rs2
);
1556 gen_helper_fcmps_fcc2(r_rs1
, r_rs2
);
1559 gen_helper_fcmps_fcc3(r_rs1
, r_rs2
);
1564 static inline void gen_op_fcmpd(int fccno
)
1571 gen_helper_fcmpd_fcc1();
1574 gen_helper_fcmpd_fcc2();
1577 gen_helper_fcmpd_fcc3();
1582 static inline void gen_op_fcmpq(int fccno
)
1589 gen_helper_fcmpq_fcc1();
1592 gen_helper_fcmpq_fcc2();
1595 gen_helper_fcmpq_fcc3();
1600 static inline void gen_op_fcmpes(int fccno
, TCGv_i32 r_rs1
, TCGv_i32 r_rs2
)
1604 gen_helper_fcmpes(r_rs1
, r_rs2
);
1607 gen_helper_fcmpes_fcc1(r_rs1
, r_rs2
);
1610 gen_helper_fcmpes_fcc2(r_rs1
, r_rs2
);
1613 gen_helper_fcmpes_fcc3(r_rs1
, r_rs2
);
1618 static inline void gen_op_fcmped(int fccno
)
1622 gen_helper_fcmped();
1625 gen_helper_fcmped_fcc1();
1628 gen_helper_fcmped_fcc2();
1631 gen_helper_fcmped_fcc3();
1636 static inline void gen_op_fcmpeq(int fccno
)
1640 gen_helper_fcmpeq();
1643 gen_helper_fcmpeq_fcc1();
1646 gen_helper_fcmpeq_fcc2();
1649 gen_helper_fcmpeq_fcc3();
1656 static inline void gen_op_fcmps(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1658 gen_helper_fcmps(r_rs1
, r_rs2
);
1661 static inline void gen_op_fcmpd(int fccno
)
1666 static inline void gen_op_fcmpq(int fccno
)
1671 static inline void gen_op_fcmpes(int fccno
, TCGv r_rs1
, TCGv r_rs2
)
1673 gen_helper_fcmpes(r_rs1
, r_rs2
);
1676 static inline void gen_op_fcmped(int fccno
)
1678 gen_helper_fcmped();
1681 static inline void gen_op_fcmpeq(int fccno
)
1683 gen_helper_fcmpeq();
1687 static inline void gen_op_fpexception_im(int fsr_flags
)
1691 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_NMASK
);
1692 tcg_gen_ori_tl(cpu_fsr
, cpu_fsr
, fsr_flags
);
1693 r_const
= tcg_const_i32(TT_FP_EXCP
);
1694 gen_helper_raise_exception(r_const
);
1695 tcg_temp_free_i32(r_const
);
1698 static int gen_trap_ifnofpu(DisasContext
*dc
, TCGv r_cond
)
1700 #if !defined(CONFIG_USER_ONLY)
1701 if (!dc
->fpu_enabled
) {
1704 save_state(dc
, r_cond
);
1705 r_const
= tcg_const_i32(TT_NFPU_INSN
);
1706 gen_helper_raise_exception(r_const
);
1707 tcg_temp_free_i32(r_const
);
1715 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1717 tcg_gen_andi_tl(cpu_fsr
, cpu_fsr
, FSR_FTT_CEXC_NMASK
);
1720 static inline void gen_clear_float_exceptions(void)
1722 gen_helper_clear_float_exceptions();
1726 #ifdef TARGET_SPARC64
1727 static inline TCGv_i32
gen_get_asi(int insn
, TCGv r_addr
)
1733 r_asi
= tcg_temp_new_i32();
1734 tcg_gen_mov_i32(r_asi
, cpu_asi
);
1736 asi
= GET_FIELD(insn
, 19, 26);
1737 r_asi
= tcg_const_i32(asi
);
1742 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1745 TCGv_i32 r_asi
, r_size
, r_sign
;
1747 r_asi
= gen_get_asi(insn
, addr
);
1748 r_size
= tcg_const_i32(size
);
1749 r_sign
= tcg_const_i32(sign
);
1750 gen_helper_ld_asi(dst
, addr
, r_asi
, r_size
, r_sign
);
1751 tcg_temp_free_i32(r_sign
);
1752 tcg_temp_free_i32(r_size
);
1753 tcg_temp_free_i32(r_asi
);
1756 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1758 TCGv_i32 r_asi
, r_size
;
1760 r_asi
= gen_get_asi(insn
, addr
);
1761 r_size
= tcg_const_i32(size
);
1762 gen_helper_st_asi(addr
, src
, r_asi
, r_size
);
1763 tcg_temp_free_i32(r_size
);
1764 tcg_temp_free_i32(r_asi
);
1767 static inline void gen_ldf_asi(TCGv addr
, int insn
, int size
, int rd
)
1769 TCGv_i32 r_asi
, r_size
, r_rd
;
1771 r_asi
= gen_get_asi(insn
, addr
);
1772 r_size
= tcg_const_i32(size
);
1773 r_rd
= tcg_const_i32(rd
);
1774 gen_helper_ldf_asi(addr
, r_asi
, r_size
, r_rd
);
1775 tcg_temp_free_i32(r_rd
);
1776 tcg_temp_free_i32(r_size
);
1777 tcg_temp_free_i32(r_asi
);
1780 static inline void gen_stf_asi(TCGv addr
, int insn
, int size
, int rd
)
1782 TCGv_i32 r_asi
, r_size
, r_rd
;
1784 r_asi
= gen_get_asi(insn
, addr
);
1785 r_size
= tcg_const_i32(size
);
1786 r_rd
= tcg_const_i32(rd
);
1787 gen_helper_stf_asi(addr
, r_asi
, r_size
, r_rd
);
1788 tcg_temp_free_i32(r_rd
);
1789 tcg_temp_free_i32(r_size
);
1790 tcg_temp_free_i32(r_asi
);
1793 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1795 TCGv_i32 r_asi
, r_size
, r_sign
;
1797 r_asi
= gen_get_asi(insn
, addr
);
1798 r_size
= tcg_const_i32(4);
1799 r_sign
= tcg_const_i32(0);
1800 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1801 tcg_temp_free_i32(r_sign
);
1802 gen_helper_st_asi(addr
, dst
, r_asi
, r_size
);
1803 tcg_temp_free_i32(r_size
);
1804 tcg_temp_free_i32(r_asi
);
1805 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1808 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1810 TCGv_i32 r_asi
, r_rd
;
1812 r_asi
= gen_get_asi(insn
, addr
);
1813 r_rd
= tcg_const_i32(rd
);
1814 gen_helper_ldda_asi(addr
, r_asi
, r_rd
);
1815 tcg_temp_free_i32(r_rd
);
1816 tcg_temp_free_i32(r_asi
);
1819 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1821 TCGv_i32 r_asi
, r_size
;
1823 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1824 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1825 r_asi
= gen_get_asi(insn
, addr
);
1826 r_size
= tcg_const_i32(8);
1827 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1828 tcg_temp_free_i32(r_size
);
1829 tcg_temp_free_i32(r_asi
);
1832 static inline void gen_cas_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1838 r_val1
= tcg_temp_new();
1839 gen_movl_reg_TN(rd
, r_val1
);
1840 r_asi
= gen_get_asi(insn
, addr
);
1841 gen_helper_cas_asi(dst
, addr
, r_val1
, val2
, r_asi
);
1842 tcg_temp_free_i32(r_asi
);
1843 tcg_temp_free(r_val1
);
1846 static inline void gen_casx_asi(TCGv dst
, TCGv addr
, TCGv val2
, int insn
,
1851 gen_movl_reg_TN(rd
, cpu_tmp64
);
1852 r_asi
= gen_get_asi(insn
, addr
);
1853 gen_helper_casx_asi(dst
, addr
, cpu_tmp64
, val2
, r_asi
);
1854 tcg_temp_free_i32(r_asi
);
1857 #elif !defined(CONFIG_USER_ONLY)
1859 static inline void gen_ld_asi(TCGv dst
, TCGv addr
, int insn
, int size
,
1862 TCGv_i32 r_asi
, r_size
, r_sign
;
1864 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1865 r_size
= tcg_const_i32(size
);
1866 r_sign
= tcg_const_i32(sign
);
1867 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1868 tcg_temp_free(r_sign
);
1869 tcg_temp_free(r_size
);
1870 tcg_temp_free(r_asi
);
1871 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1874 static inline void gen_st_asi(TCGv src
, TCGv addr
, int insn
, int size
)
1876 TCGv_i32 r_asi
, r_size
;
1878 tcg_gen_extu_tl_i64(cpu_tmp64
, src
);
1879 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1880 r_size
= tcg_const_i32(size
);
1881 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1882 tcg_temp_free(r_size
);
1883 tcg_temp_free(r_asi
);
1886 static inline void gen_swap_asi(TCGv dst
, TCGv addr
, int insn
)
1888 TCGv_i32 r_asi
, r_size
, r_sign
;
1891 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1892 r_size
= tcg_const_i32(4);
1893 r_sign
= tcg_const_i32(0);
1894 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1895 tcg_temp_free(r_sign
);
1896 r_val
= tcg_temp_new_i64();
1897 tcg_gen_extu_tl_i64(r_val
, dst
);
1898 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1899 tcg_temp_free_i64(r_val
);
1900 tcg_temp_free(r_size
);
1901 tcg_temp_free(r_asi
);
1902 tcg_gen_trunc_i64_tl(dst
, cpu_tmp64
);
1905 static inline void gen_ldda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1907 TCGv_i32 r_asi
, r_size
, r_sign
;
1909 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1910 r_size
= tcg_const_i32(8);
1911 r_sign
= tcg_const_i32(0);
1912 gen_helper_ld_asi(cpu_tmp64
, addr
, r_asi
, r_size
, r_sign
);
1913 tcg_temp_free(r_sign
);
1914 tcg_temp_free(r_size
);
1915 tcg_temp_free(r_asi
);
1916 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
1917 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
1918 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
1919 tcg_gen_trunc_i64_tl(hi
, cpu_tmp64
);
1920 gen_movl_TN_reg(rd
, hi
);
1923 static inline void gen_stda_asi(TCGv hi
, TCGv addr
, int insn
, int rd
)
1925 TCGv_i32 r_asi
, r_size
;
1927 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
1928 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, hi
);
1929 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1930 r_size
= tcg_const_i32(8);
1931 gen_helper_st_asi(addr
, cpu_tmp64
, r_asi
, r_size
);
1932 tcg_temp_free(r_size
);
1933 tcg_temp_free(r_asi
);
1937 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1938 static inline void gen_ldstub_asi(TCGv dst
, TCGv addr
, int insn
)
1941 TCGv_i32 r_asi
, r_size
;
1943 gen_ld_asi(dst
, addr
, insn
, 1, 0);
1945 r_val
= tcg_const_i64(0xffULL
);
1946 r_asi
= tcg_const_i32(GET_FIELD(insn
, 19, 26));
1947 r_size
= tcg_const_i32(1);
1948 gen_helper_st_asi(addr
, r_val
, r_asi
, r_size
);
1949 tcg_temp_free_i32(r_size
);
1950 tcg_temp_free_i32(r_asi
);
1951 tcg_temp_free_i64(r_val
);
1955 static inline TCGv
get_src1(unsigned int insn
, TCGv def
)
1960 rs1
= GET_FIELD(insn
, 13, 17);
1962 r_rs1
= tcg_const_tl(0); // XXX how to free?
1964 r_rs1
= cpu_gregs
[rs1
];
1966 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs1
- 8) * sizeof(target_ulong
));
1970 static inline TCGv
get_src2(unsigned int insn
, TCGv def
)
1974 if (IS_IMM
) { /* immediate */
1977 simm
= GET_FIELDs(insn
, 19, 31);
1978 r_rs2
= tcg_const_tl(simm
); // XXX how to free?
1979 } else { /* register */
1982 rs2
= GET_FIELD(insn
, 27, 31);
1984 r_rs2
= tcg_const_tl(0); // XXX how to free?
1986 r_rs2
= cpu_gregs
[rs2
];
1988 tcg_gen_ld_tl(def
, cpu_regwptr
, (rs2
- 8) * sizeof(target_ulong
));
1993 #define CHECK_IU_FEATURE(dc, FEATURE) \
1994 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1996 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1997 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2000 /* before an instruction, dc->pc must be static */
2001 static void disas_sparc_insn(DisasContext
* dc
)
2003 unsigned int insn
, opc
, rs1
, rs2
, rd
;
2006 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
2007 tcg_gen_debug_insn_start(dc
->pc
);
2008 insn
= ldl_code(dc
->pc
);
2009 opc
= GET_FIELD(insn
, 0, 1);
2011 rd
= GET_FIELD(insn
, 2, 6);
2013 cpu_src1
= tcg_temp_new(); // const
2014 cpu_src2
= tcg_temp_new(); // const
2017 case 0: /* branches/sethi */
2019 unsigned int xop
= GET_FIELD(insn
, 7, 9);
2022 #ifdef TARGET_SPARC64
2023 case 0x1: /* V9 BPcc */
2027 target
= GET_FIELD_SP(insn
, 0, 18);
2028 target
= sign_extend(target
, 18);
2030 cc
= GET_FIELD_SP(insn
, 20, 21);
2032 do_branch(dc
, target
, insn
, 0, cpu_cond
);
2034 do_branch(dc
, target
, insn
, 1, cpu_cond
);
2039 case 0x3: /* V9 BPr */
2041 target
= GET_FIELD_SP(insn
, 0, 13) |
2042 (GET_FIELD_SP(insn
, 20, 21) << 14);
2043 target
= sign_extend(target
, 16);
2045 cpu_src1
= get_src1(insn
, cpu_src1
);
2046 do_branch_reg(dc
, target
, insn
, cpu_cond
, cpu_src1
);
2049 case 0x5: /* V9 FBPcc */
2051 int cc
= GET_FIELD_SP(insn
, 20, 21);
2052 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2054 target
= GET_FIELD_SP(insn
, 0, 18);
2055 target
= sign_extend(target
, 19);
2057 do_fbranch(dc
, target
, insn
, cc
, cpu_cond
);
2061 case 0x7: /* CBN+x */
2066 case 0x2: /* BN+x */
2068 target
= GET_FIELD(insn
, 10, 31);
2069 target
= sign_extend(target
, 22);
2071 do_branch(dc
, target
, insn
, 0, cpu_cond
);
2074 case 0x6: /* FBN+x */
2076 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2078 target
= GET_FIELD(insn
, 10, 31);
2079 target
= sign_extend(target
, 22);
2081 do_fbranch(dc
, target
, insn
, 0, cpu_cond
);
2084 case 0x4: /* SETHI */
2086 uint32_t value
= GET_FIELD(insn
, 10, 31);
2089 r_const
= tcg_const_tl(value
<< 10);
2090 gen_movl_TN_reg(rd
, r_const
);
2091 tcg_temp_free(r_const
);
2094 case 0x0: /* UNIMPL */
2103 target_long target
= GET_FIELDs(insn
, 2, 31) << 2;
2106 r_const
= tcg_const_tl(dc
->pc
);
2107 gen_movl_TN_reg(15, r_const
);
2108 tcg_temp_free(r_const
);
2110 gen_mov_pc_npc(dc
, cpu_cond
);
2114 case 2: /* FPU & Logical Operations */
2116 unsigned int xop
= GET_FIELD(insn
, 7, 12);
2117 if (xop
== 0x3a) { /* generate trap */
2120 cpu_src1
= get_src1(insn
, cpu_src1
);
2122 rs2
= GET_FIELD(insn
, 25, 31);
2123 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, rs2
);
2125 rs2
= GET_FIELD(insn
, 27, 31);
2127 gen_movl_reg_TN(rs2
, cpu_src2
);
2128 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
2130 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
2132 cond
= GET_FIELD(insn
, 3, 6);
2134 save_state(dc
, cpu_cond
);
2135 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2137 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2139 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2140 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2141 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2142 gen_helper_raise_exception(cpu_tmp32
);
2143 } else if (cond
!= 0) {
2144 TCGv r_cond
= tcg_temp_new();
2146 #ifdef TARGET_SPARC64
2148 int cc
= GET_FIELD_SP(insn
, 11, 12);
2150 save_state(dc
, cpu_cond
);
2152 gen_cond(r_cond
, 0, cond
);
2154 gen_cond(r_cond
, 1, cond
);
2158 save_state(dc
, cpu_cond
);
2159 gen_cond(r_cond
, 0, cond
);
2161 l1
= gen_new_label();
2162 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
2164 if ((dc
->def
->features
& CPU_FEATURE_HYPV
) &&
2166 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, UA2005_HTRAP_MASK
);
2168 tcg_gen_andi_tl(cpu_dst
, cpu_dst
, V8_TRAP_MASK
);
2169 tcg_gen_addi_tl(cpu_dst
, cpu_dst
, TT_TRAP
);
2170 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_dst
);
2171 gen_helper_raise_exception(cpu_tmp32
);
2174 tcg_temp_free(r_cond
);
2180 } else if (xop
== 0x28) {
2181 rs1
= GET_FIELD(insn
, 13, 17);
2184 #ifndef TARGET_SPARC64
2185 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2186 manual, rdy on the microSPARC
2188 case 0x0f: /* stbar in the SPARCv8 manual,
2189 rdy on the microSPARC II */
2190 case 0x10 ... 0x1f: /* implementation-dependent in the
2191 SPARCv8 manual, rdy on the
2194 gen_movl_TN_reg(rd
, cpu_y
);
2196 #ifdef TARGET_SPARC64
2197 case 0x2: /* V9 rdccr */
2198 gen_helper_rdccr(cpu_dst
);
2199 gen_movl_TN_reg(rd
, cpu_dst
);
2201 case 0x3: /* V9 rdasi */
2202 tcg_gen_ext_i32_tl(cpu_dst
, cpu_asi
);
2203 gen_movl_TN_reg(rd
, cpu_dst
);
2205 case 0x4: /* V9 rdtick */
2209 r_tickptr
= tcg_temp_new_ptr();
2210 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2211 offsetof(CPUState
, tick
));
2212 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2213 tcg_temp_free_ptr(r_tickptr
);
2214 gen_movl_TN_reg(rd
, cpu_dst
);
2217 case 0x5: /* V9 rdpc */
2221 r_const
= tcg_const_tl(dc
->pc
);
2222 gen_movl_TN_reg(rd
, r_const
);
2223 tcg_temp_free(r_const
);
2226 case 0x6: /* V9 rdfprs */
2227 tcg_gen_ext_i32_tl(cpu_dst
, cpu_fprs
);
2228 gen_movl_TN_reg(rd
, cpu_dst
);
2230 case 0xf: /* V9 membar */
2231 break; /* no effect */
2232 case 0x13: /* Graphics Status */
2233 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2235 gen_movl_TN_reg(rd
, cpu_gsr
);
2237 case 0x16: /* Softint */
2238 tcg_gen_ext_i32_tl(cpu_dst
, cpu_softint
);
2239 gen_movl_TN_reg(rd
, cpu_dst
);
2241 case 0x17: /* Tick compare */
2242 gen_movl_TN_reg(rd
, cpu_tick_cmpr
);
2244 case 0x18: /* System tick */
2248 r_tickptr
= tcg_temp_new_ptr();
2249 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2250 offsetof(CPUState
, stick
));
2251 gen_helper_tick_get_count(cpu_dst
, r_tickptr
);
2252 tcg_temp_free_ptr(r_tickptr
);
2253 gen_movl_TN_reg(rd
, cpu_dst
);
2256 case 0x19: /* System tick compare */
2257 gen_movl_TN_reg(rd
, cpu_stick_cmpr
);
2259 case 0x10: /* Performance Control */
2260 case 0x11: /* Performance Instrumentation Counter */
2261 case 0x12: /* Dispatch Control */
2262 case 0x14: /* Softint set, WO */
2263 case 0x15: /* Softint clear, WO */
2268 #if !defined(CONFIG_USER_ONLY)
2269 } else if (xop
== 0x29) { /* rdpsr / UA2005 rdhpr */
2270 #ifndef TARGET_SPARC64
2271 if (!supervisor(dc
))
2273 gen_helper_rdpsr(cpu_dst
);
2275 CHECK_IU_FEATURE(dc
, HYPV
);
2276 if (!hypervisor(dc
))
2278 rs1
= GET_FIELD(insn
, 13, 17);
2281 // gen_op_rdhpstate();
2284 // gen_op_rdhtstate();
2287 tcg_gen_mov_tl(cpu_dst
, cpu_hintp
);
2290 tcg_gen_mov_tl(cpu_dst
, cpu_htba
);
2293 tcg_gen_mov_tl(cpu_dst
, cpu_hver
);
2295 case 31: // hstick_cmpr
2296 tcg_gen_mov_tl(cpu_dst
, cpu_hstick_cmpr
);
2302 gen_movl_TN_reg(rd
, cpu_dst
);
2304 } else if (xop
== 0x2a) { /* rdwim / V9 rdpr */
2305 if (!supervisor(dc
))
2307 #ifdef TARGET_SPARC64
2308 rs1
= GET_FIELD(insn
, 13, 17);
2314 r_tsptr
= tcg_temp_new_ptr();
2315 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2316 offsetof(CPUState
, tsptr
));
2317 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2318 offsetof(trap_state
, tpc
));
2319 tcg_temp_free_ptr(r_tsptr
);
2326 r_tsptr
= tcg_temp_new_ptr();
2327 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2328 offsetof(CPUState
, tsptr
));
2329 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2330 offsetof(trap_state
, tnpc
));
2331 tcg_temp_free_ptr(r_tsptr
);
2338 r_tsptr
= tcg_temp_new_ptr();
2339 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2340 offsetof(CPUState
, tsptr
));
2341 tcg_gen_ld_tl(cpu_tmp0
, r_tsptr
,
2342 offsetof(trap_state
, tstate
));
2343 tcg_temp_free_ptr(r_tsptr
);
2350 r_tsptr
= tcg_temp_new_ptr();
2351 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
2352 offsetof(CPUState
, tsptr
));
2353 tcg_gen_ld_i32(cpu_tmp32
, r_tsptr
,
2354 offsetof(trap_state
, tt
));
2355 tcg_temp_free_ptr(r_tsptr
);
2356 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2363 r_tickptr
= tcg_temp_new_ptr();
2364 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
2365 offsetof(CPUState
, tick
));
2366 gen_helper_tick_get_count(cpu_tmp0
, r_tickptr
);
2367 gen_movl_TN_reg(rd
, cpu_tmp0
);
2368 tcg_temp_free_ptr(r_tickptr
);
2372 tcg_gen_mov_tl(cpu_tmp0
, cpu_tbr
);
2375 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2376 offsetof(CPUSPARCState
, pstate
));
2377 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2380 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2381 offsetof(CPUSPARCState
, tl
));
2382 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2385 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2386 offsetof(CPUSPARCState
, psrpil
));
2387 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2390 gen_helper_rdcwp(cpu_tmp0
);
2393 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2394 offsetof(CPUSPARCState
, cansave
));
2395 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2397 case 11: // canrestore
2398 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2399 offsetof(CPUSPARCState
, canrestore
));
2400 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2402 case 12: // cleanwin
2403 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2404 offsetof(CPUSPARCState
, cleanwin
));
2405 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2407 case 13: // otherwin
2408 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2409 offsetof(CPUSPARCState
, otherwin
));
2410 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2413 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2414 offsetof(CPUSPARCState
, wstate
));
2415 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2417 case 16: // UA2005 gl
2418 CHECK_IU_FEATURE(dc
, GL
);
2419 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
,
2420 offsetof(CPUSPARCState
, gl
));
2421 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_tmp32
);
2423 case 26: // UA2005 strand status
2424 CHECK_IU_FEATURE(dc
, HYPV
);
2425 if (!hypervisor(dc
))
2427 tcg_gen_mov_tl(cpu_tmp0
, cpu_ssr
);
2430 tcg_gen_mov_tl(cpu_tmp0
, cpu_ver
);
2437 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_wim
);
2439 gen_movl_TN_reg(rd
, cpu_tmp0
);
2441 } else if (xop
== 0x2b) { /* rdtbr / V9 flushw */
2442 #ifdef TARGET_SPARC64
2443 save_state(dc
, cpu_cond
);
2444 gen_helper_flushw();
2446 if (!supervisor(dc
))
2448 gen_movl_TN_reg(rd
, cpu_tbr
);
2452 } else if (xop
== 0x34) { /* FPU Operations */
2453 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2455 gen_op_clear_ieee_excp_and_FTT();
2456 rs1
= GET_FIELD(insn
, 13, 17);
2457 rs2
= GET_FIELD(insn
, 27, 31);
2458 xop
= GET_FIELD(insn
, 18, 26);
2460 case 0x1: /* fmovs */
2461 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2463 case 0x5: /* fnegs */
2464 gen_helper_fnegs(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2466 case 0x9: /* fabss */
2467 gen_helper_fabss(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2469 case 0x29: /* fsqrts */
2470 CHECK_FPU_FEATURE(dc
, FSQRT
);
2471 gen_clear_float_exceptions();
2472 gen_helper_fsqrts(cpu_tmp32
, cpu_fpr
[rs2
]);
2473 gen_helper_check_ieee_exceptions();
2474 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2476 case 0x2a: /* fsqrtd */
2477 CHECK_FPU_FEATURE(dc
, FSQRT
);
2478 gen_op_load_fpr_DT1(DFPREG(rs2
));
2479 gen_clear_float_exceptions();
2480 gen_helper_fsqrtd();
2481 gen_helper_check_ieee_exceptions();
2482 gen_op_store_DT0_fpr(DFPREG(rd
));
2484 case 0x2b: /* fsqrtq */
2485 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2486 gen_op_load_fpr_QT1(QFPREG(rs2
));
2487 gen_clear_float_exceptions();
2488 gen_helper_fsqrtq();
2489 gen_helper_check_ieee_exceptions();
2490 gen_op_store_QT0_fpr(QFPREG(rd
));
2492 case 0x41: /* fadds */
2493 gen_clear_float_exceptions();
2494 gen_helper_fadds(cpu_tmp32
,
2495 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2496 gen_helper_check_ieee_exceptions();
2497 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2499 case 0x42: /* faddd */
2500 gen_op_load_fpr_DT0(DFPREG(rs1
));
2501 gen_op_load_fpr_DT1(DFPREG(rs2
));
2502 gen_clear_float_exceptions();
2504 gen_helper_check_ieee_exceptions();
2505 gen_op_store_DT0_fpr(DFPREG(rd
));
2507 case 0x43: /* faddq */
2508 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2509 gen_op_load_fpr_QT0(QFPREG(rs1
));
2510 gen_op_load_fpr_QT1(QFPREG(rs2
));
2511 gen_clear_float_exceptions();
2513 gen_helper_check_ieee_exceptions();
2514 gen_op_store_QT0_fpr(QFPREG(rd
));
2516 case 0x45: /* fsubs */
2517 gen_clear_float_exceptions();
2518 gen_helper_fsubs(cpu_tmp32
,
2519 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2520 gen_helper_check_ieee_exceptions();
2521 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2523 case 0x46: /* fsubd */
2524 gen_op_load_fpr_DT0(DFPREG(rs1
));
2525 gen_op_load_fpr_DT1(DFPREG(rs2
));
2526 gen_clear_float_exceptions();
2528 gen_helper_check_ieee_exceptions();
2529 gen_op_store_DT0_fpr(DFPREG(rd
));
2531 case 0x47: /* fsubq */
2532 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2533 gen_op_load_fpr_QT0(QFPREG(rs1
));
2534 gen_op_load_fpr_QT1(QFPREG(rs2
));
2535 gen_clear_float_exceptions();
2537 gen_helper_check_ieee_exceptions();
2538 gen_op_store_QT0_fpr(QFPREG(rd
));
2540 case 0x49: /* fmuls */
2541 CHECK_FPU_FEATURE(dc
, FMUL
);
2542 gen_clear_float_exceptions();
2543 gen_helper_fmuls(cpu_tmp32
,
2544 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2545 gen_helper_check_ieee_exceptions();
2546 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2548 case 0x4a: /* fmuld */
2549 CHECK_FPU_FEATURE(dc
, FMUL
);
2550 gen_op_load_fpr_DT0(DFPREG(rs1
));
2551 gen_op_load_fpr_DT1(DFPREG(rs2
));
2552 gen_clear_float_exceptions();
2554 gen_helper_check_ieee_exceptions();
2555 gen_op_store_DT0_fpr(DFPREG(rd
));
2557 case 0x4b: /* fmulq */
2558 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2559 CHECK_FPU_FEATURE(dc
, FMUL
);
2560 gen_op_load_fpr_QT0(QFPREG(rs1
));
2561 gen_op_load_fpr_QT1(QFPREG(rs2
));
2562 gen_clear_float_exceptions();
2564 gen_helper_check_ieee_exceptions();
2565 gen_op_store_QT0_fpr(QFPREG(rd
));
2567 case 0x4d: /* fdivs */
2568 gen_clear_float_exceptions();
2569 gen_helper_fdivs(cpu_tmp32
,
2570 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2571 gen_helper_check_ieee_exceptions();
2572 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2574 case 0x4e: /* fdivd */
2575 gen_op_load_fpr_DT0(DFPREG(rs1
));
2576 gen_op_load_fpr_DT1(DFPREG(rs2
));
2577 gen_clear_float_exceptions();
2579 gen_helper_check_ieee_exceptions();
2580 gen_op_store_DT0_fpr(DFPREG(rd
));
2582 case 0x4f: /* fdivq */
2583 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2584 gen_op_load_fpr_QT0(QFPREG(rs1
));
2585 gen_op_load_fpr_QT1(QFPREG(rs2
));
2586 gen_clear_float_exceptions();
2588 gen_helper_check_ieee_exceptions();
2589 gen_op_store_QT0_fpr(QFPREG(rd
));
2591 case 0x69: /* fsmuld */
2592 CHECK_FPU_FEATURE(dc
, FSMULD
);
2593 gen_clear_float_exceptions();
2594 gen_helper_fsmuld(cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
2595 gen_helper_check_ieee_exceptions();
2596 gen_op_store_DT0_fpr(DFPREG(rd
));
2598 case 0x6e: /* fdmulq */
2599 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2600 gen_op_load_fpr_DT0(DFPREG(rs1
));
2601 gen_op_load_fpr_DT1(DFPREG(rs2
));
2602 gen_clear_float_exceptions();
2603 gen_helper_fdmulq();
2604 gen_helper_check_ieee_exceptions();
2605 gen_op_store_QT0_fpr(QFPREG(rd
));
2607 case 0xc4: /* fitos */
2608 gen_clear_float_exceptions();
2609 gen_helper_fitos(cpu_tmp32
, cpu_fpr
[rs2
]);
2610 gen_helper_check_ieee_exceptions();
2611 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2613 case 0xc6: /* fdtos */
2614 gen_op_load_fpr_DT1(DFPREG(rs2
));
2615 gen_clear_float_exceptions();
2616 gen_helper_fdtos(cpu_tmp32
);
2617 gen_helper_check_ieee_exceptions();
2618 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2620 case 0xc7: /* fqtos */
2621 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2622 gen_op_load_fpr_QT1(QFPREG(rs2
));
2623 gen_clear_float_exceptions();
2624 gen_helper_fqtos(cpu_tmp32
);
2625 gen_helper_check_ieee_exceptions();
2626 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2628 case 0xc8: /* fitod */
2629 gen_helper_fitod(cpu_fpr
[rs2
]);
2630 gen_op_store_DT0_fpr(DFPREG(rd
));
2632 case 0xc9: /* fstod */
2633 gen_helper_fstod(cpu_fpr
[rs2
]);
2634 gen_op_store_DT0_fpr(DFPREG(rd
));
2636 case 0xcb: /* fqtod */
2637 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2638 gen_op_load_fpr_QT1(QFPREG(rs2
));
2639 gen_clear_float_exceptions();
2641 gen_helper_check_ieee_exceptions();
2642 gen_op_store_DT0_fpr(DFPREG(rd
));
2644 case 0xcc: /* fitoq */
2645 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2646 gen_helper_fitoq(cpu_fpr
[rs2
]);
2647 gen_op_store_QT0_fpr(QFPREG(rd
));
2649 case 0xcd: /* fstoq */
2650 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2651 gen_helper_fstoq(cpu_fpr
[rs2
]);
2652 gen_op_store_QT0_fpr(QFPREG(rd
));
2654 case 0xce: /* fdtoq */
2655 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2656 gen_op_load_fpr_DT1(DFPREG(rs2
));
2658 gen_op_store_QT0_fpr(QFPREG(rd
));
2660 case 0xd1: /* fstoi */
2661 gen_clear_float_exceptions();
2662 gen_helper_fstoi(cpu_tmp32
, cpu_fpr
[rs2
]);
2663 gen_helper_check_ieee_exceptions();
2664 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2666 case 0xd2: /* fdtoi */
2667 gen_op_load_fpr_DT1(DFPREG(rs2
));
2668 gen_clear_float_exceptions();
2669 gen_helper_fdtoi(cpu_tmp32
);
2670 gen_helper_check_ieee_exceptions();
2671 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2673 case 0xd3: /* fqtoi */
2674 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2675 gen_op_load_fpr_QT1(QFPREG(rs2
));
2676 gen_clear_float_exceptions();
2677 gen_helper_fqtoi(cpu_tmp32
);
2678 gen_helper_check_ieee_exceptions();
2679 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2681 #ifdef TARGET_SPARC64
2682 case 0x2: /* V9 fmovd */
2683 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)],
2684 cpu_fpr
[DFPREG(rs2
)]);
2685 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
2686 cpu_fpr
[DFPREG(rs2
) + 1]);
2688 case 0x3: /* V9 fmovq */
2689 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2690 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)],
2691 cpu_fpr
[QFPREG(rs2
)]);
2692 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1],
2693 cpu_fpr
[QFPREG(rs2
) + 1]);
2694 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2],
2695 cpu_fpr
[QFPREG(rs2
) + 2]);
2696 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3],
2697 cpu_fpr
[QFPREG(rs2
) + 3]);
2699 case 0x6: /* V9 fnegd */
2700 gen_op_load_fpr_DT1(DFPREG(rs2
));
2702 gen_op_store_DT0_fpr(DFPREG(rd
));
2704 case 0x7: /* V9 fnegq */
2705 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2706 gen_op_load_fpr_QT1(QFPREG(rs2
));
2708 gen_op_store_QT0_fpr(QFPREG(rd
));
2710 case 0xa: /* V9 fabsd */
2711 gen_op_load_fpr_DT1(DFPREG(rs2
));
2713 gen_op_store_DT0_fpr(DFPREG(rd
));
2715 case 0xb: /* V9 fabsq */
2716 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2717 gen_op_load_fpr_QT1(QFPREG(rs2
));
2719 gen_op_store_QT0_fpr(QFPREG(rd
));
2721 case 0x81: /* V9 fstox */
2722 gen_clear_float_exceptions();
2723 gen_helper_fstox(cpu_fpr
[rs2
]);
2724 gen_helper_check_ieee_exceptions();
2725 gen_op_store_DT0_fpr(DFPREG(rd
));
2727 case 0x82: /* V9 fdtox */
2728 gen_op_load_fpr_DT1(DFPREG(rs2
));
2729 gen_clear_float_exceptions();
2731 gen_helper_check_ieee_exceptions();
2732 gen_op_store_DT0_fpr(DFPREG(rd
));
2734 case 0x83: /* V9 fqtox */
2735 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2736 gen_op_load_fpr_QT1(QFPREG(rs2
));
2737 gen_clear_float_exceptions();
2739 gen_helper_check_ieee_exceptions();
2740 gen_op_store_DT0_fpr(DFPREG(rd
));
2742 case 0x84: /* V9 fxtos */
2743 gen_op_load_fpr_DT1(DFPREG(rs2
));
2744 gen_clear_float_exceptions();
2745 gen_helper_fxtos(cpu_tmp32
);
2746 gen_helper_check_ieee_exceptions();
2747 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_tmp32
);
2749 case 0x88: /* V9 fxtod */
2750 gen_op_load_fpr_DT1(DFPREG(rs2
));
2751 gen_clear_float_exceptions();
2753 gen_helper_check_ieee_exceptions();
2754 gen_op_store_DT0_fpr(DFPREG(rd
));
2756 case 0x8c: /* V9 fxtoq */
2757 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2758 gen_op_load_fpr_DT1(DFPREG(rs2
));
2759 gen_clear_float_exceptions();
2761 gen_helper_check_ieee_exceptions();
2762 gen_op_store_QT0_fpr(QFPREG(rd
));
2768 } else if (xop
== 0x35) { /* FPU Operations */
2769 #ifdef TARGET_SPARC64
2772 if (gen_trap_ifnofpu(dc
, cpu_cond
))
2774 gen_op_clear_ieee_excp_and_FTT();
2775 rs1
= GET_FIELD(insn
, 13, 17);
2776 rs2
= GET_FIELD(insn
, 27, 31);
2777 xop
= GET_FIELD(insn
, 18, 26);
2778 #ifdef TARGET_SPARC64
2779 if ((xop
& 0x11f) == 0x005) { // V9 fmovsr
2782 l1
= gen_new_label();
2783 cond
= GET_FIELD_SP(insn
, 14, 17);
2784 cpu_src1
= get_src1(insn
, cpu_src1
);
2785 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2787 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
2790 } else if ((xop
& 0x11f) == 0x006) { // V9 fmovdr
2793 l1
= gen_new_label();
2794 cond
= GET_FIELD_SP(insn
, 14, 17);
2795 cpu_src1
= get_src1(insn
, cpu_src1
);
2796 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2798 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
2799 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_fpr
[DFPREG(rs2
) + 1]);
2802 } else if ((xop
& 0x11f) == 0x007) { // V9 fmovqr
2805 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2806 l1
= gen_new_label();
2807 cond
= GET_FIELD_SP(insn
, 14, 17);
2808 cpu_src1
= get_src1(insn
, cpu_src1
);
2809 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
], cpu_src1
,
2811 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
)], cpu_fpr
[QFPREG(rs2
)]);
2812 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 1], cpu_fpr
[QFPREG(rs2
) + 1]);
2813 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 2], cpu_fpr
[QFPREG(rs2
) + 2]);
2814 tcg_gen_mov_i32(cpu_fpr
[QFPREG(rd
) + 3], cpu_fpr
[QFPREG(rs2
) + 3]);
2820 #ifdef TARGET_SPARC64
2821 #define FMOVSCC(fcc) \
2826 l1 = gen_new_label(); \
2827 r_cond = tcg_temp_new(); \
2828 cond = GET_FIELD_SP(insn, 14, 17); \
2829 gen_fcond(r_cond, fcc, cond); \
2830 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2832 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2833 gen_set_label(l1); \
2834 tcg_temp_free(r_cond); \
2836 #define FMOVDCC(fcc) \
2841 l1 = gen_new_label(); \
2842 r_cond = tcg_temp_new(); \
2843 cond = GET_FIELD_SP(insn, 14, 17); \
2844 gen_fcond(r_cond, fcc, cond); \
2845 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2847 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2848 cpu_fpr[DFPREG(rs2)]); \
2849 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2850 cpu_fpr[DFPREG(rs2) + 1]); \
2851 gen_set_label(l1); \
2852 tcg_temp_free(r_cond); \
2854 #define FMOVQCC(fcc) \
2859 l1 = gen_new_label(); \
2860 r_cond = tcg_temp_new(); \
2861 cond = GET_FIELD_SP(insn, 14, 17); \
2862 gen_fcond(r_cond, fcc, cond); \
2863 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2865 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2866 cpu_fpr[QFPREG(rs2)]); \
2867 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2868 cpu_fpr[QFPREG(rs2) + 1]); \
2869 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2870 cpu_fpr[QFPREG(rs2) + 2]); \
2871 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2872 cpu_fpr[QFPREG(rs2) + 3]); \
2873 gen_set_label(l1); \
2874 tcg_temp_free(r_cond); \
2876 case 0x001: /* V9 fmovscc %fcc0 */
2879 case 0x002: /* V9 fmovdcc %fcc0 */
2882 case 0x003: /* V9 fmovqcc %fcc0 */
2883 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2886 case 0x041: /* V9 fmovscc %fcc1 */
2889 case 0x042: /* V9 fmovdcc %fcc1 */
2892 case 0x043: /* V9 fmovqcc %fcc1 */
2893 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2896 case 0x081: /* V9 fmovscc %fcc2 */
2899 case 0x082: /* V9 fmovdcc %fcc2 */
2902 case 0x083: /* V9 fmovqcc %fcc2 */
2903 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2906 case 0x0c1: /* V9 fmovscc %fcc3 */
2909 case 0x0c2: /* V9 fmovdcc %fcc3 */
2912 case 0x0c3: /* V9 fmovqcc %fcc3 */
2913 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2919 #define FMOVSCC(icc) \
2924 l1 = gen_new_label(); \
2925 r_cond = tcg_temp_new(); \
2926 cond = GET_FIELD_SP(insn, 14, 17); \
2927 gen_cond(r_cond, icc, cond); \
2928 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2930 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2931 gen_set_label(l1); \
2932 tcg_temp_free(r_cond); \
2934 #define FMOVDCC(icc) \
2939 l1 = gen_new_label(); \
2940 r_cond = tcg_temp_new(); \
2941 cond = GET_FIELD_SP(insn, 14, 17); \
2942 gen_cond(r_cond, icc, cond); \
2943 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2945 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2946 cpu_fpr[DFPREG(rs2)]); \
2947 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2948 cpu_fpr[DFPREG(rs2) + 1]); \
2949 gen_set_label(l1); \
2950 tcg_temp_free(r_cond); \
2952 #define FMOVQCC(icc) \
2957 l1 = gen_new_label(); \
2958 r_cond = tcg_temp_new(); \
2959 cond = GET_FIELD_SP(insn, 14, 17); \
2960 gen_cond(r_cond, icc, cond); \
2961 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2963 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2964 cpu_fpr[QFPREG(rs2)]); \
2965 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2966 cpu_fpr[QFPREG(rs2) + 1]); \
2967 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2968 cpu_fpr[QFPREG(rs2) + 2]); \
2969 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2970 cpu_fpr[QFPREG(rs2) + 3]); \
2971 gen_set_label(l1); \
2972 tcg_temp_free(r_cond); \
2975 case 0x101: /* V9 fmovscc %icc */
2978 case 0x102: /* V9 fmovdcc %icc */
2980 case 0x103: /* V9 fmovqcc %icc */
2981 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2984 case 0x181: /* V9 fmovscc %xcc */
2987 case 0x182: /* V9 fmovdcc %xcc */
2990 case 0x183: /* V9 fmovqcc %xcc */
2991 CHECK_FPU_FEATURE(dc
, FLOAT128
);
2998 case 0x51: /* fcmps, V9 %fcc */
2999 gen_op_fcmps(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3001 case 0x52: /* fcmpd, V9 %fcc */
3002 gen_op_load_fpr_DT0(DFPREG(rs1
));
3003 gen_op_load_fpr_DT1(DFPREG(rs2
));
3004 gen_op_fcmpd(rd
& 3);
3006 case 0x53: /* fcmpq, V9 %fcc */
3007 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3008 gen_op_load_fpr_QT0(QFPREG(rs1
));
3009 gen_op_load_fpr_QT1(QFPREG(rs2
));
3010 gen_op_fcmpq(rd
& 3);
3012 case 0x55: /* fcmpes, V9 %fcc */
3013 gen_op_fcmpes(rd
& 3, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
3015 case 0x56: /* fcmped, V9 %fcc */
3016 gen_op_load_fpr_DT0(DFPREG(rs1
));
3017 gen_op_load_fpr_DT1(DFPREG(rs2
));
3018 gen_op_fcmped(rd
& 3);
3020 case 0x57: /* fcmpeq, V9 %fcc */
3021 CHECK_FPU_FEATURE(dc
, FLOAT128
);
3022 gen_op_load_fpr_QT0(QFPREG(rs1
));
3023 gen_op_load_fpr_QT1(QFPREG(rs2
));
3024 gen_op_fcmpeq(rd
& 3);
3029 } else if (xop
== 0x2) {
3032 rs1
= GET_FIELD(insn
, 13, 17);
3034 // or %g0, x, y -> mov T0, x; mov y, T0
3035 if (IS_IMM
) { /* immediate */
3038 simm
= GET_FIELDs(insn
, 19, 31);
3039 r_const
= tcg_const_tl(simm
);
3040 gen_movl_TN_reg(rd
, r_const
);
3041 tcg_temp_free(r_const
);
3042 } else { /* register */
3043 rs2
= GET_FIELD(insn
, 27, 31);
3044 gen_movl_reg_TN(rs2
, cpu_dst
);
3045 gen_movl_TN_reg(rd
, cpu_dst
);
3048 cpu_src1
= get_src1(insn
, cpu_src1
);
3049 if (IS_IMM
) { /* immediate */
3050 simm
= GET_FIELDs(insn
, 19, 31);
3051 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3052 gen_movl_TN_reg(rd
, cpu_dst
);
3053 } else { /* register */
3054 // or x, %g0, y -> mov T1, x; mov y, T1
3055 rs2
= GET_FIELD(insn
, 27, 31);
3057 gen_movl_reg_TN(rs2
, cpu_src2
);
3058 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3059 gen_movl_TN_reg(rd
, cpu_dst
);
3061 gen_movl_TN_reg(rd
, cpu_src1
);
3064 #ifdef TARGET_SPARC64
3065 } else if (xop
== 0x25) { /* sll, V9 sllx */
3066 cpu_src1
= get_src1(insn
, cpu_src1
);
3067 if (IS_IMM
) { /* immediate */
3068 simm
= GET_FIELDs(insn
, 20, 31);
3069 if (insn
& (1 << 12)) {
3070 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3072 tcg_gen_shli_i64(cpu_dst
, cpu_src1
, simm
& 0x1f);
3074 } else { /* register */
3075 rs2
= GET_FIELD(insn
, 27, 31);
3076 gen_movl_reg_TN(rs2
, cpu_src2
);
3077 if (insn
& (1 << 12)) {
3078 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3080 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3082 tcg_gen_shl_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3084 gen_movl_TN_reg(rd
, cpu_dst
);
3085 } else if (xop
== 0x26) { /* srl, V9 srlx */
3086 cpu_src1
= get_src1(insn
, cpu_src1
);
3087 if (IS_IMM
) { /* immediate */
3088 simm
= GET_FIELDs(insn
, 20, 31);
3089 if (insn
& (1 << 12)) {
3090 tcg_gen_shri_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3092 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3093 tcg_gen_shri_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3095 } else { /* register */
3096 rs2
= GET_FIELD(insn
, 27, 31);
3097 gen_movl_reg_TN(rs2
, cpu_src2
);
3098 if (insn
& (1 << 12)) {
3099 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3100 tcg_gen_shr_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3102 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3103 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3104 tcg_gen_shr_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3107 gen_movl_TN_reg(rd
, cpu_dst
);
3108 } else if (xop
== 0x27) { /* sra, V9 srax */
3109 cpu_src1
= get_src1(insn
, cpu_src1
);
3110 if (IS_IMM
) { /* immediate */
3111 simm
= GET_FIELDs(insn
, 20, 31);
3112 if (insn
& (1 << 12)) {
3113 tcg_gen_sari_i64(cpu_dst
, cpu_src1
, simm
& 0x3f);
3115 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3116 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3117 tcg_gen_sari_i64(cpu_dst
, cpu_dst
, simm
& 0x1f);
3119 } else { /* register */
3120 rs2
= GET_FIELD(insn
, 27, 31);
3121 gen_movl_reg_TN(rs2
, cpu_src2
);
3122 if (insn
& (1 << 12)) {
3123 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x3f);
3124 tcg_gen_sar_i64(cpu_dst
, cpu_src1
, cpu_tmp0
);
3126 tcg_gen_andi_i64(cpu_tmp0
, cpu_src2
, 0x1f);
3127 tcg_gen_andi_i64(cpu_dst
, cpu_src1
, 0xffffffffULL
);
3128 tcg_gen_ext32s_i64(cpu_dst
, cpu_dst
);
3129 tcg_gen_sar_i64(cpu_dst
, cpu_dst
, cpu_tmp0
);
3132 gen_movl_TN_reg(rd
, cpu_dst
);
3134 } else if (xop
< 0x36) {
3136 cpu_src1
= get_src1(insn
, cpu_src1
);
3137 cpu_src2
= get_src2(insn
, cpu_src2
);
3138 switch (xop
& ~0x10) {
3141 simm
= GET_FIELDs(insn
, 19, 31);
3143 gen_op_addi_cc(cpu_dst
, cpu_src1
, simm
);
3145 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
3149 gen_op_add_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3151 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3157 simm
= GET_FIELDs(insn
, 19, 31);
3158 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, simm
);
3160 tcg_gen_and_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3163 gen_op_logic_cc(cpu_dst
);
3168 simm
= GET_FIELDs(insn
, 19, 31);
3169 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, simm
);
3171 tcg_gen_or_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3174 gen_op_logic_cc(cpu_dst
);
3178 simm
= GET_FIELDs(insn
, 19, 31);
3179 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, simm
);
3181 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3184 gen_op_logic_cc(cpu_dst
);
3188 simm
= GET_FIELDs(insn
, 19, 31);
3190 gen_op_subi_cc(cpu_dst
, cpu_src1
, simm
);
3192 tcg_gen_subi_tl(cpu_dst
, cpu_src1
, simm
);
3196 gen_op_sub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3198 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3202 case 0x5: /* andn */
3204 simm
= GET_FIELDs(insn
, 19, 31);
3205 tcg_gen_andi_tl(cpu_dst
, cpu_src1
, ~simm
);
3207 tcg_gen_andc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3210 gen_op_logic_cc(cpu_dst
);
3214 simm
= GET_FIELDs(insn
, 19, 31);
3215 tcg_gen_ori_tl(cpu_dst
, cpu_src1
, ~simm
);
3217 tcg_gen_orc_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3220 gen_op_logic_cc(cpu_dst
);
3222 case 0x7: /* xorn */
3224 simm
= GET_FIELDs(insn
, 19, 31);
3225 tcg_gen_xori_tl(cpu_dst
, cpu_src1
, ~simm
);
3227 tcg_gen_not_tl(cpu_tmp0
, cpu_src2
);
3228 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3231 gen_op_logic_cc(cpu_dst
);
3233 case 0x8: /* addx, V9 addc */
3235 simm
= GET_FIELDs(insn
, 19, 31);
3237 gen_op_addxi_cc(cpu_dst
, cpu_src1
, simm
);
3239 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3240 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3241 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3245 gen_op_addx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3247 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3248 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3249 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3253 #ifdef TARGET_SPARC64
3254 case 0x9: /* V9 mulx */
3256 simm
= GET_FIELDs(insn
, 19, 31);
3257 tcg_gen_muli_i64(cpu_dst
, cpu_src1
, simm
);
3259 tcg_gen_mul_i64(cpu_dst
, cpu_src1
, cpu_src2
);
3263 case 0xa: /* umul */
3264 CHECK_IU_FEATURE(dc
, MUL
);
3265 gen_op_umul(cpu_dst
, cpu_src1
, cpu_src2
);
3267 gen_op_logic_cc(cpu_dst
);
3269 case 0xb: /* smul */
3270 CHECK_IU_FEATURE(dc
, MUL
);
3271 gen_op_smul(cpu_dst
, cpu_src1
, cpu_src2
);
3273 gen_op_logic_cc(cpu_dst
);
3275 case 0xc: /* subx, V9 subc */
3277 simm
= GET_FIELDs(insn
, 19, 31);
3279 gen_op_subxi_cc(cpu_dst
, cpu_src1
, simm
);
3281 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3282 tcg_gen_addi_tl(cpu_tmp0
, cpu_tmp0
, simm
);
3283 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3287 gen_op_subx_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3289 gen_mov_reg_C(cpu_tmp0
, cpu_psr
);
3290 tcg_gen_add_tl(cpu_tmp0
, cpu_src2
, cpu_tmp0
);
3291 tcg_gen_sub_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3295 #ifdef TARGET_SPARC64
3296 case 0xd: /* V9 udivx */
3297 tcg_gen_mov_tl(cpu_cc_src
, cpu_src1
);
3298 tcg_gen_mov_tl(cpu_cc_src2
, cpu_src2
);
3299 gen_trap_ifdivzero_tl(cpu_cc_src2
);
3300 tcg_gen_divu_i64(cpu_dst
, cpu_cc_src
, cpu_cc_src2
);
3303 case 0xe: /* udiv */
3304 CHECK_IU_FEATURE(dc
, DIV
);
3305 gen_helper_udiv(cpu_dst
, cpu_src1
, cpu_src2
);
3307 gen_op_div_cc(cpu_dst
);
3309 case 0xf: /* sdiv */
3310 CHECK_IU_FEATURE(dc
, DIV
);
3311 gen_helper_sdiv(cpu_dst
, cpu_src1
, cpu_src2
);
3313 gen_op_div_cc(cpu_dst
);
3318 gen_movl_TN_reg(rd
, cpu_dst
);
3320 cpu_src1
= get_src1(insn
, cpu_src1
);
3321 cpu_src2
= get_src2(insn
, cpu_src2
);
3323 case 0x20: /* taddcc */
3324 gen_op_tadd_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3325 gen_movl_TN_reg(rd
, cpu_dst
);
3327 case 0x21: /* tsubcc */
3328 gen_op_tsub_cc(cpu_dst
, cpu_src1
, cpu_src2
);
3329 gen_movl_TN_reg(rd
, cpu_dst
);
3331 case 0x22: /* taddcctv */
3332 save_state(dc
, cpu_cond
);
3333 gen_op_tadd_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3334 gen_movl_TN_reg(rd
, cpu_dst
);
3336 case 0x23: /* tsubcctv */
3337 save_state(dc
, cpu_cond
);
3338 gen_op_tsub_ccTV(cpu_dst
, cpu_src1
, cpu_src2
);
3339 gen_movl_TN_reg(rd
, cpu_dst
);
3341 case 0x24: /* mulscc */
3342 gen_op_mulscc(cpu_dst
, cpu_src1
, cpu_src2
);
3343 gen_movl_TN_reg(rd
, cpu_dst
);
3345 #ifndef TARGET_SPARC64
3346 case 0x25: /* sll */
3347 if (IS_IMM
) { /* immediate */
3348 simm
= GET_FIELDs(insn
, 20, 31);
3349 tcg_gen_shli_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3350 } else { /* register */
3351 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3352 tcg_gen_shl_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3354 gen_movl_TN_reg(rd
, cpu_dst
);
3356 case 0x26: /* srl */
3357 if (IS_IMM
) { /* immediate */
3358 simm
= GET_FIELDs(insn
, 20, 31);
3359 tcg_gen_shri_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3360 } else { /* register */
3361 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3362 tcg_gen_shr_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3364 gen_movl_TN_reg(rd
, cpu_dst
);
3366 case 0x27: /* sra */
3367 if (IS_IMM
) { /* immediate */
3368 simm
= GET_FIELDs(insn
, 20, 31);
3369 tcg_gen_sari_tl(cpu_dst
, cpu_src1
, simm
& 0x1f);
3370 } else { /* register */
3371 tcg_gen_andi_tl(cpu_tmp0
, cpu_src2
, 0x1f);
3372 tcg_gen_sar_tl(cpu_dst
, cpu_src1
, cpu_tmp0
);
3374 gen_movl_TN_reg(rd
, cpu_dst
);
3381 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3382 tcg_gen_andi_tl(cpu_y
, cpu_tmp0
, 0xffffffff);
3384 #ifndef TARGET_SPARC64
3385 case 0x01 ... 0x0f: /* undefined in the
3389 case 0x10 ... 0x1f: /* implementation-dependent
3395 case 0x2: /* V9 wrccr */
3396 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3397 gen_helper_wrccr(cpu_dst
);
3399 case 0x3: /* V9 wrasi */
3400 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3401 tcg_gen_trunc_tl_i32(cpu_asi
, cpu_dst
);
3403 case 0x6: /* V9 wrfprs */
3404 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3405 tcg_gen_trunc_tl_i32(cpu_fprs
, cpu_dst
);
3406 save_state(dc
, cpu_cond
);
3411 case 0xf: /* V9 sir, nop if user */
3412 #if !defined(CONFIG_USER_ONLY)
3417 case 0x13: /* Graphics Status */
3418 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3420 tcg_gen_xor_tl(cpu_gsr
, cpu_src1
, cpu_src2
);
3422 case 0x14: /* Softint set */
3423 if (!supervisor(dc
))
3425 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3426 gen_helper_set_softint(cpu_tmp64
);
3428 case 0x15: /* Softint clear */
3429 if (!supervisor(dc
))
3431 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3432 gen_helper_clear_softint(cpu_tmp64
);
3434 case 0x16: /* Softint write */
3435 if (!supervisor(dc
))
3437 tcg_gen_xor_tl(cpu_tmp64
, cpu_src1
, cpu_src2
);
3438 gen_helper_write_softint(cpu_tmp64
);
3440 case 0x17: /* Tick compare */
3441 #if !defined(CONFIG_USER_ONLY)
3442 if (!supervisor(dc
))
3448 tcg_gen_xor_tl(cpu_tick_cmpr
, cpu_src1
,
3450 r_tickptr
= tcg_temp_new_ptr();
3451 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3452 offsetof(CPUState
, tick
));
3453 gen_helper_tick_set_limit(r_tickptr
,
3455 tcg_temp_free_ptr(r_tickptr
);
3458 case 0x18: /* System tick */
3459 #if !defined(CONFIG_USER_ONLY)
3460 if (!supervisor(dc
))
3466 tcg_gen_xor_tl(cpu_dst
, cpu_src1
,
3468 r_tickptr
= tcg_temp_new_ptr();
3469 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3470 offsetof(CPUState
, stick
));
3471 gen_helper_tick_set_count(r_tickptr
,
3473 tcg_temp_free_ptr(r_tickptr
);
3476 case 0x19: /* System tick compare */
3477 #if !defined(CONFIG_USER_ONLY)
3478 if (!supervisor(dc
))
3484 tcg_gen_xor_tl(cpu_stick_cmpr
, cpu_src1
,
3486 r_tickptr
= tcg_temp_new_ptr();
3487 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3488 offsetof(CPUState
, stick
));
3489 gen_helper_tick_set_limit(r_tickptr
,
3491 tcg_temp_free_ptr(r_tickptr
);
3495 case 0x10: /* Performance Control */
3496 case 0x11: /* Performance Instrumentation
3498 case 0x12: /* Dispatch Control */
3505 #if !defined(CONFIG_USER_ONLY)
3506 case 0x31: /* wrpsr, V9 saved, restored */
3508 if (!supervisor(dc
))
3510 #ifdef TARGET_SPARC64
3516 gen_helper_restored();
3518 case 2: /* UA2005 allclean */
3519 case 3: /* UA2005 otherw */
3520 case 4: /* UA2005 normalw */
3521 case 5: /* UA2005 invalw */
3527 tcg_gen_xor_tl(cpu_dst
, cpu_src1
, cpu_src2
);
3528 gen_helper_wrpsr(cpu_dst
);
3529 save_state(dc
, cpu_cond
);
3536 case 0x32: /* wrwim, V9 wrpr */
3538 if (!supervisor(dc
))
3540 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3541 #ifdef TARGET_SPARC64
3547 r_tsptr
= tcg_temp_new_ptr();
3548 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3549 offsetof(CPUState
, tsptr
));
3550 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3551 offsetof(trap_state
, tpc
));
3552 tcg_temp_free_ptr(r_tsptr
);
3559 r_tsptr
= tcg_temp_new_ptr();
3560 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3561 offsetof(CPUState
, tsptr
));
3562 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3563 offsetof(trap_state
, tnpc
));
3564 tcg_temp_free_ptr(r_tsptr
);
3571 r_tsptr
= tcg_temp_new_ptr();
3572 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3573 offsetof(CPUState
, tsptr
));
3574 tcg_gen_st_tl(cpu_tmp0
, r_tsptr
,
3575 offsetof(trap_state
,
3577 tcg_temp_free_ptr(r_tsptr
);
3584 r_tsptr
= tcg_temp_new_ptr();
3585 tcg_gen_ld_ptr(r_tsptr
, cpu_env
,
3586 offsetof(CPUState
, tsptr
));
3587 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3588 tcg_gen_st_i32(cpu_tmp32
, r_tsptr
,
3589 offsetof(trap_state
, tt
));
3590 tcg_temp_free_ptr(r_tsptr
);
3597 r_tickptr
= tcg_temp_new_ptr();
3598 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3599 offsetof(CPUState
, tick
));
3600 gen_helper_tick_set_count(r_tickptr
,
3602 tcg_temp_free_ptr(r_tickptr
);
3606 tcg_gen_mov_tl(cpu_tbr
, cpu_tmp0
);
3609 save_state(dc
, cpu_cond
);
3610 gen_helper_wrpstate(cpu_tmp0
);
3616 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3617 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3618 offsetof(CPUSPARCState
, tl
));
3621 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3622 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3623 offsetof(CPUSPARCState
,
3627 gen_helper_wrcwp(cpu_tmp0
);
3630 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3631 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3632 offsetof(CPUSPARCState
,
3635 case 11: // canrestore
3636 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3637 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3638 offsetof(CPUSPARCState
,
3641 case 12: // cleanwin
3642 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3643 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3644 offsetof(CPUSPARCState
,
3647 case 13: // otherwin
3648 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3649 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3650 offsetof(CPUSPARCState
,
3654 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3655 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3656 offsetof(CPUSPARCState
,
3659 case 16: // UA2005 gl
3660 CHECK_IU_FEATURE(dc
, GL
);
3661 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3662 tcg_gen_st_i32(cpu_tmp32
, cpu_env
,
3663 offsetof(CPUSPARCState
, gl
));
3665 case 26: // UA2005 strand status
3666 CHECK_IU_FEATURE(dc
, HYPV
);
3667 if (!hypervisor(dc
))
3669 tcg_gen_mov_tl(cpu_ssr
, cpu_tmp0
);
3675 tcg_gen_trunc_tl_i32(cpu_tmp32
, cpu_tmp0
);
3676 if (dc
->def
->nwindows
!= 32)
3677 tcg_gen_andi_tl(cpu_tmp32
, cpu_tmp32
,
3678 (1 << dc
->def
->nwindows
) - 1);
3679 tcg_gen_mov_i32(cpu_wim
, cpu_tmp32
);
3683 case 0x33: /* wrtbr, UA2005 wrhpr */
3685 #ifndef TARGET_SPARC64
3686 if (!supervisor(dc
))
3688 tcg_gen_xor_tl(cpu_tbr
, cpu_src1
, cpu_src2
);
3690 CHECK_IU_FEATURE(dc
, HYPV
);
3691 if (!hypervisor(dc
))
3693 tcg_gen_xor_tl(cpu_tmp0
, cpu_src1
, cpu_src2
);
3696 // XXX gen_op_wrhpstate();
3697 save_state(dc
, cpu_cond
);
3703 // XXX gen_op_wrhtstate();
3706 tcg_gen_mov_tl(cpu_hintp
, cpu_tmp0
);
3709 tcg_gen_mov_tl(cpu_htba
, cpu_tmp0
);
3711 case 31: // hstick_cmpr
3715 tcg_gen_mov_tl(cpu_hstick_cmpr
, cpu_tmp0
);
3716 r_tickptr
= tcg_temp_new_ptr();
3717 tcg_gen_ld_ptr(r_tickptr
, cpu_env
,
3718 offsetof(CPUState
, hstick
));
3719 gen_helper_tick_set_limit(r_tickptr
,
3721 tcg_temp_free_ptr(r_tickptr
);
3724 case 6: // hver readonly
3732 #ifdef TARGET_SPARC64
3733 case 0x2c: /* V9 movcc */
3735 int cc
= GET_FIELD_SP(insn
, 11, 12);
3736 int cond
= GET_FIELD_SP(insn
, 14, 17);
3740 r_cond
= tcg_temp_new();
3741 if (insn
& (1 << 18)) {
3743 gen_cond(r_cond
, 0, cond
);
3745 gen_cond(r_cond
, 1, cond
);
3749 gen_fcond(r_cond
, cc
, cond
);
3752 l1
= gen_new_label();
3754 tcg_gen_brcondi_tl(TCG_COND_EQ
, r_cond
, 0, l1
);
3755 if (IS_IMM
) { /* immediate */
3758 simm
= GET_FIELD_SPs(insn
, 0, 10);
3759 r_const
= tcg_const_tl(simm
);
3760 gen_movl_TN_reg(rd
, r_const
);
3761 tcg_temp_free(r_const
);
3763 rs2
= GET_FIELD_SP(insn
, 0, 4);
3764 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3765 gen_movl_TN_reg(rd
, cpu_tmp0
);
3768 tcg_temp_free(r_cond
);
3771 case 0x2d: /* V9 sdivx */
3772 gen_op_sdivx(cpu_dst
, cpu_src1
, cpu_src2
);
3773 gen_movl_TN_reg(rd
, cpu_dst
);
3775 case 0x2e: /* V9 popc */
3777 cpu_src2
= get_src2(insn
, cpu_src2
);
3778 gen_helper_popc(cpu_dst
, cpu_src2
);
3779 gen_movl_TN_reg(rd
, cpu_dst
);
3781 case 0x2f: /* V9 movr */
3783 int cond
= GET_FIELD_SP(insn
, 10, 12);
3786 cpu_src1
= get_src1(insn
, cpu_src1
);
3788 l1
= gen_new_label();
3790 tcg_gen_brcondi_tl(gen_tcg_cond_reg
[cond
],
3792 if (IS_IMM
) { /* immediate */
3795 simm
= GET_FIELD_SPs(insn
, 0, 9);
3796 r_const
= tcg_const_tl(simm
);
3797 gen_movl_TN_reg(rd
, r_const
);
3798 tcg_temp_free(r_const
);
3800 rs2
= GET_FIELD_SP(insn
, 0, 4);
3801 gen_movl_reg_TN(rs2
, cpu_tmp0
);
3802 gen_movl_TN_reg(rd
, cpu_tmp0
);
3812 } else if (xop
== 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3813 #ifdef TARGET_SPARC64
3814 int opf
= GET_FIELD_SP(insn
, 5, 13);
3815 rs1
= GET_FIELD(insn
, 13, 17);
3816 rs2
= GET_FIELD(insn
, 27, 31);
3817 if (gen_trap_ifnofpu(dc
, cpu_cond
))
3821 case 0x000: /* VIS I edge8cc */
3822 case 0x001: /* VIS II edge8n */
3823 case 0x002: /* VIS I edge8lcc */
3824 case 0x003: /* VIS II edge8ln */
3825 case 0x004: /* VIS I edge16cc */
3826 case 0x005: /* VIS II edge16n */
3827 case 0x006: /* VIS I edge16lcc */
3828 case 0x007: /* VIS II edge16ln */
3829 case 0x008: /* VIS I edge32cc */
3830 case 0x009: /* VIS II edge32n */
3831 case 0x00a: /* VIS I edge32lcc */
3832 case 0x00b: /* VIS II edge32ln */
3835 case 0x010: /* VIS I array8 */
3836 CHECK_FPU_FEATURE(dc
, VIS1
);
3837 cpu_src1
= get_src1(insn
, cpu_src1
);
3838 gen_movl_reg_TN(rs2
, cpu_src2
);
3839 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3840 gen_movl_TN_reg(rd
, cpu_dst
);
3842 case 0x012: /* VIS I array16 */
3843 CHECK_FPU_FEATURE(dc
, VIS1
);
3844 cpu_src1
= get_src1(insn
, cpu_src1
);
3845 gen_movl_reg_TN(rs2
, cpu_src2
);
3846 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3847 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 1);
3848 gen_movl_TN_reg(rd
, cpu_dst
);
3850 case 0x014: /* VIS I array32 */
3851 CHECK_FPU_FEATURE(dc
, VIS1
);
3852 cpu_src1
= get_src1(insn
, cpu_src1
);
3853 gen_movl_reg_TN(rs2
, cpu_src2
);
3854 gen_helper_array8(cpu_dst
, cpu_src1
, cpu_src2
);
3855 tcg_gen_shli_i64(cpu_dst
, cpu_dst
, 2);
3856 gen_movl_TN_reg(rd
, cpu_dst
);
3858 case 0x018: /* VIS I alignaddr */
3859 CHECK_FPU_FEATURE(dc
, VIS1
);
3860 cpu_src1
= get_src1(insn
, cpu_src1
);
3861 gen_movl_reg_TN(rs2
, cpu_src2
);
3862 gen_helper_alignaddr(cpu_dst
, cpu_src1
, cpu_src2
);
3863 gen_movl_TN_reg(rd
, cpu_dst
);
3865 case 0x019: /* VIS II bmask */
3866 case 0x01a: /* VIS I alignaddrl */
3869 case 0x020: /* VIS I fcmple16 */
3870 CHECK_FPU_FEATURE(dc
, VIS1
);
3871 gen_op_load_fpr_DT0(DFPREG(rs1
));
3872 gen_op_load_fpr_DT1(DFPREG(rs2
));
3873 gen_helper_fcmple16();
3874 gen_op_store_DT0_fpr(DFPREG(rd
));
3876 case 0x022: /* VIS I fcmpne16 */
3877 CHECK_FPU_FEATURE(dc
, VIS1
);
3878 gen_op_load_fpr_DT0(DFPREG(rs1
));
3879 gen_op_load_fpr_DT1(DFPREG(rs2
));
3880 gen_helper_fcmpne16();
3881 gen_op_store_DT0_fpr(DFPREG(rd
));
3883 case 0x024: /* VIS I fcmple32 */
3884 CHECK_FPU_FEATURE(dc
, VIS1
);
3885 gen_op_load_fpr_DT0(DFPREG(rs1
));
3886 gen_op_load_fpr_DT1(DFPREG(rs2
));
3887 gen_helper_fcmple32();
3888 gen_op_store_DT0_fpr(DFPREG(rd
));
3890 case 0x026: /* VIS I fcmpne32 */
3891 CHECK_FPU_FEATURE(dc
, VIS1
);
3892 gen_op_load_fpr_DT0(DFPREG(rs1
));
3893 gen_op_load_fpr_DT1(DFPREG(rs2
));
3894 gen_helper_fcmpne32();
3895 gen_op_store_DT0_fpr(DFPREG(rd
));
3897 case 0x028: /* VIS I fcmpgt16 */
3898 CHECK_FPU_FEATURE(dc
, VIS1
);
3899 gen_op_load_fpr_DT0(DFPREG(rs1
));
3900 gen_op_load_fpr_DT1(DFPREG(rs2
));
3901 gen_helper_fcmpgt16();
3902 gen_op_store_DT0_fpr(DFPREG(rd
));
3904 case 0x02a: /* VIS I fcmpeq16 */
3905 CHECK_FPU_FEATURE(dc
, VIS1
);
3906 gen_op_load_fpr_DT0(DFPREG(rs1
));
3907 gen_op_load_fpr_DT1(DFPREG(rs2
));
3908 gen_helper_fcmpeq16();
3909 gen_op_store_DT0_fpr(DFPREG(rd
));
3911 case 0x02c: /* VIS I fcmpgt32 */
3912 CHECK_FPU_FEATURE(dc
, VIS1
);
3913 gen_op_load_fpr_DT0(DFPREG(rs1
));
3914 gen_op_load_fpr_DT1(DFPREG(rs2
));
3915 gen_helper_fcmpgt32();
3916 gen_op_store_DT0_fpr(DFPREG(rd
));
3918 case 0x02e: /* VIS I fcmpeq32 */
3919 CHECK_FPU_FEATURE(dc
, VIS1
);
3920 gen_op_load_fpr_DT0(DFPREG(rs1
));
3921 gen_op_load_fpr_DT1(DFPREG(rs2
));
3922 gen_helper_fcmpeq32();
3923 gen_op_store_DT0_fpr(DFPREG(rd
));
3925 case 0x031: /* VIS I fmul8x16 */
3926 CHECK_FPU_FEATURE(dc
, VIS1
);
3927 gen_op_load_fpr_DT0(DFPREG(rs1
));
3928 gen_op_load_fpr_DT1(DFPREG(rs2
));
3929 gen_helper_fmul8x16();
3930 gen_op_store_DT0_fpr(DFPREG(rd
));
3932 case 0x033: /* VIS I fmul8x16au */
3933 CHECK_FPU_FEATURE(dc
, VIS1
);
3934 gen_op_load_fpr_DT0(DFPREG(rs1
));
3935 gen_op_load_fpr_DT1(DFPREG(rs2
));
3936 gen_helper_fmul8x16au();
3937 gen_op_store_DT0_fpr(DFPREG(rd
));
3939 case 0x035: /* VIS I fmul8x16al */
3940 CHECK_FPU_FEATURE(dc
, VIS1
);
3941 gen_op_load_fpr_DT0(DFPREG(rs1
));
3942 gen_op_load_fpr_DT1(DFPREG(rs2
));
3943 gen_helper_fmul8x16al();
3944 gen_op_store_DT0_fpr(DFPREG(rd
));
3946 case 0x036: /* VIS I fmul8sux16 */
3947 CHECK_FPU_FEATURE(dc
, VIS1
);
3948 gen_op_load_fpr_DT0(DFPREG(rs1
));
3949 gen_op_load_fpr_DT1(DFPREG(rs2
));
3950 gen_helper_fmul8sux16();
3951 gen_op_store_DT0_fpr(DFPREG(rd
));
3953 case 0x037: /* VIS I fmul8ulx16 */
3954 CHECK_FPU_FEATURE(dc
, VIS1
);
3955 gen_op_load_fpr_DT0(DFPREG(rs1
));
3956 gen_op_load_fpr_DT1(DFPREG(rs2
));
3957 gen_helper_fmul8ulx16();
3958 gen_op_store_DT0_fpr(DFPREG(rd
));
3960 case 0x038: /* VIS I fmuld8sux16 */
3961 CHECK_FPU_FEATURE(dc
, VIS1
);
3962 gen_op_load_fpr_DT0(DFPREG(rs1
));
3963 gen_op_load_fpr_DT1(DFPREG(rs2
));
3964 gen_helper_fmuld8sux16();
3965 gen_op_store_DT0_fpr(DFPREG(rd
));
3967 case 0x039: /* VIS I fmuld8ulx16 */
3968 CHECK_FPU_FEATURE(dc
, VIS1
);
3969 gen_op_load_fpr_DT0(DFPREG(rs1
));
3970 gen_op_load_fpr_DT1(DFPREG(rs2
));
3971 gen_helper_fmuld8ulx16();
3972 gen_op_store_DT0_fpr(DFPREG(rd
));
3974 case 0x03a: /* VIS I fpack32 */
3975 case 0x03b: /* VIS I fpack16 */
3976 case 0x03d: /* VIS I fpackfix */
3977 case 0x03e: /* VIS I pdist */
3980 case 0x048: /* VIS I faligndata */
3981 CHECK_FPU_FEATURE(dc
, VIS1
);
3982 gen_op_load_fpr_DT0(DFPREG(rs1
));
3983 gen_op_load_fpr_DT1(DFPREG(rs2
));
3984 gen_helper_faligndata();
3985 gen_op_store_DT0_fpr(DFPREG(rd
));
3987 case 0x04b: /* VIS I fpmerge */
3988 CHECK_FPU_FEATURE(dc
, VIS1
);
3989 gen_op_load_fpr_DT0(DFPREG(rs1
));
3990 gen_op_load_fpr_DT1(DFPREG(rs2
));
3991 gen_helper_fpmerge();
3992 gen_op_store_DT0_fpr(DFPREG(rd
));
3994 case 0x04c: /* VIS II bshuffle */
3997 case 0x04d: /* VIS I fexpand */
3998 CHECK_FPU_FEATURE(dc
, VIS1
);
3999 gen_op_load_fpr_DT0(DFPREG(rs1
));
4000 gen_op_load_fpr_DT1(DFPREG(rs2
));
4001 gen_helper_fexpand();
4002 gen_op_store_DT0_fpr(DFPREG(rd
));
4004 case 0x050: /* VIS I fpadd16 */
4005 CHECK_FPU_FEATURE(dc
, VIS1
);
4006 gen_op_load_fpr_DT0(DFPREG(rs1
));
4007 gen_op_load_fpr_DT1(DFPREG(rs2
));
4008 gen_helper_fpadd16();
4009 gen_op_store_DT0_fpr(DFPREG(rd
));
4011 case 0x051: /* VIS I fpadd16s */
4012 CHECK_FPU_FEATURE(dc
, VIS1
);
4013 gen_helper_fpadd16s(cpu_fpr
[rd
],
4014 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4016 case 0x052: /* VIS I fpadd32 */
4017 CHECK_FPU_FEATURE(dc
, VIS1
);
4018 gen_op_load_fpr_DT0(DFPREG(rs1
));
4019 gen_op_load_fpr_DT1(DFPREG(rs2
));
4020 gen_helper_fpadd32();
4021 gen_op_store_DT0_fpr(DFPREG(rd
));
4023 case 0x053: /* VIS I fpadd32s */
4024 CHECK_FPU_FEATURE(dc
, VIS1
);
4025 gen_helper_fpadd32s(cpu_fpr
[rd
],
4026 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4028 case 0x054: /* VIS I fpsub16 */
4029 CHECK_FPU_FEATURE(dc
, VIS1
);
4030 gen_op_load_fpr_DT0(DFPREG(rs1
));
4031 gen_op_load_fpr_DT1(DFPREG(rs2
));
4032 gen_helper_fpsub16();
4033 gen_op_store_DT0_fpr(DFPREG(rd
));
4035 case 0x055: /* VIS I fpsub16s */
4036 CHECK_FPU_FEATURE(dc
, VIS1
);
4037 gen_helper_fpsub16s(cpu_fpr
[rd
],
4038 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4040 case 0x056: /* VIS I fpsub32 */
4041 CHECK_FPU_FEATURE(dc
, VIS1
);
4042 gen_op_load_fpr_DT0(DFPREG(rs1
));
4043 gen_op_load_fpr_DT1(DFPREG(rs2
));
4044 gen_helper_fpsub32();
4045 gen_op_store_DT0_fpr(DFPREG(rd
));
4047 case 0x057: /* VIS I fpsub32s */
4048 CHECK_FPU_FEATURE(dc
, VIS1
);
4049 gen_helper_fpsub32s(cpu_fpr
[rd
],
4050 cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4052 case 0x060: /* VIS I fzero */
4053 CHECK_FPU_FEATURE(dc
, VIS1
);
4054 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], 0);
4055 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], 0);
4057 case 0x061: /* VIS I fzeros */
4058 CHECK_FPU_FEATURE(dc
, VIS1
);
4059 tcg_gen_movi_i32(cpu_fpr
[rd
], 0);
4061 case 0x062: /* VIS I fnor */
4062 CHECK_FPU_FEATURE(dc
, VIS1
);
4063 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4064 cpu_fpr
[DFPREG(rs2
)]);
4065 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4066 cpu_fpr
[DFPREG(rs2
) + 1]);
4068 case 0x063: /* VIS I fnors */
4069 CHECK_FPU_FEATURE(dc
, VIS1
);
4070 tcg_gen_nor_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4072 case 0x064: /* VIS I fandnot2 */
4073 CHECK_FPU_FEATURE(dc
, VIS1
);
4074 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4075 cpu_fpr
[DFPREG(rs2
)]);
4076 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4077 cpu_fpr
[DFPREG(rs1
) + 1],
4078 cpu_fpr
[DFPREG(rs2
) + 1]);
4080 case 0x065: /* VIS I fandnot2s */
4081 CHECK_FPU_FEATURE(dc
, VIS1
);
4082 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4084 case 0x066: /* VIS I fnot2 */
4085 CHECK_FPU_FEATURE(dc
, VIS1
);
4086 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)]);
4087 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4088 cpu_fpr
[DFPREG(rs2
) + 1]);
4090 case 0x067: /* VIS I fnot2s */
4091 CHECK_FPU_FEATURE(dc
, VIS1
);
4092 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4094 case 0x068: /* VIS I fandnot1 */
4095 CHECK_FPU_FEATURE(dc
, VIS1
);
4096 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4097 cpu_fpr
[DFPREG(rs1
)]);
4098 tcg_gen_andc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4099 cpu_fpr
[DFPREG(rs2
) + 1],
4100 cpu_fpr
[DFPREG(rs1
) + 1]);
4102 case 0x069: /* VIS I fandnot1s */
4103 CHECK_FPU_FEATURE(dc
, VIS1
);
4104 tcg_gen_andc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4106 case 0x06a: /* VIS I fnot1 */
4107 CHECK_FPU_FEATURE(dc
, VIS1
);
4108 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4109 tcg_gen_not_i32(cpu_fpr
[DFPREG(rd
) + 1],
4110 cpu_fpr
[DFPREG(rs1
) + 1]);
4112 case 0x06b: /* VIS I fnot1s */
4113 CHECK_FPU_FEATURE(dc
, VIS1
);
4114 tcg_gen_not_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4116 case 0x06c: /* VIS I fxor */
4117 CHECK_FPU_FEATURE(dc
, VIS1
);
4118 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4119 cpu_fpr
[DFPREG(rs2
)]);
4120 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1],
4121 cpu_fpr
[DFPREG(rs1
) + 1],
4122 cpu_fpr
[DFPREG(rs2
) + 1]);
4124 case 0x06d: /* VIS I fxors */
4125 CHECK_FPU_FEATURE(dc
, VIS1
);
4126 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4128 case 0x06e: /* VIS I fnand */
4129 CHECK_FPU_FEATURE(dc
, VIS1
);
4130 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
)],
4131 cpu_fpr
[DFPREG(rs2
)]);
4132 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs1
) + 1],
4133 cpu_fpr
[DFPREG(rs2
) + 1]);
4135 case 0x06f: /* VIS I fnands */
4136 CHECK_FPU_FEATURE(dc
, VIS1
);
4137 tcg_gen_nand_i32(cpu_tmp32
, cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4139 case 0x070: /* VIS I fand */
4140 CHECK_FPU_FEATURE(dc
, VIS1
);
4141 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4142 cpu_fpr
[DFPREG(rs2
)]);
4143 tcg_gen_and_i32(cpu_fpr
[DFPREG(rd
) + 1],
4144 cpu_fpr
[DFPREG(rs1
) + 1],
4145 cpu_fpr
[DFPREG(rs2
) + 1]);
4147 case 0x071: /* VIS I fands */
4148 CHECK_FPU_FEATURE(dc
, VIS1
);
4149 tcg_gen_and_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4151 case 0x072: /* VIS I fxnor */
4152 CHECK_FPU_FEATURE(dc
, VIS1
);
4153 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
)], -1);
4154 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
)], cpu_tmp32
,
4155 cpu_fpr
[DFPREG(rs1
)]);
4156 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[DFPREG(rs2
) + 1], -1);
4157 tcg_gen_xor_i32(cpu_fpr
[DFPREG(rd
) + 1], cpu_tmp32
,
4158 cpu_fpr
[DFPREG(rs1
) + 1]);
4160 case 0x073: /* VIS I fxnors */
4161 CHECK_FPU_FEATURE(dc
, VIS1
);
4162 tcg_gen_xori_i32(cpu_tmp32
, cpu_fpr
[rs2
], -1);
4163 tcg_gen_xor_i32(cpu_fpr
[rd
], cpu_tmp32
, cpu_fpr
[rs1
]);
4165 case 0x074: /* VIS I fsrc1 */
4166 CHECK_FPU_FEATURE(dc
, VIS1
);
4167 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)]);
4168 tcg_gen_mov_i32(cpu_fpr
[DFPREG(rd
) + 1],
4169 cpu_fpr
[DFPREG(rs1
) + 1]);
4171 case 0x075: /* VIS I fsrc1s */
4172 CHECK_FPU_FEATURE(dc
, VIS1
);
4173 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
]);
4175 case 0x076: /* VIS I fornot2 */
4176 CHECK_FPU_FEATURE(dc
, VIS1
);
4177 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4178 cpu_fpr
[DFPREG(rs2
)]);
4179 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4180 cpu_fpr
[DFPREG(rs1
) + 1],
4181 cpu_fpr
[DFPREG(rs2
) + 1]);
4183 case 0x077: /* VIS I fornot2s */
4184 CHECK_FPU_FEATURE(dc
, VIS1
);
4185 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4187 case 0x078: /* VIS I fsrc2 */
4188 CHECK_FPU_FEATURE(dc
, VIS1
);
4189 gen_op_load_fpr_DT0(DFPREG(rs2
));
4190 gen_op_store_DT0_fpr(DFPREG(rd
));
4192 case 0x079: /* VIS I fsrc2s */
4193 CHECK_FPU_FEATURE(dc
, VIS1
);
4194 tcg_gen_mov_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
]);
4196 case 0x07a: /* VIS I fornot1 */
4197 CHECK_FPU_FEATURE(dc
, VIS1
);
4198 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs2
)],
4199 cpu_fpr
[DFPREG(rs1
)]);
4200 tcg_gen_orc_i32(cpu_fpr
[DFPREG(rd
) + 1],
4201 cpu_fpr
[DFPREG(rs2
) + 1],
4202 cpu_fpr
[DFPREG(rs1
) + 1]);
4204 case 0x07b: /* VIS I fornot1s */
4205 CHECK_FPU_FEATURE(dc
, VIS1
);
4206 tcg_gen_orc_i32(cpu_fpr
[rd
], cpu_fpr
[rs2
], cpu_fpr
[rs1
]);
4208 case 0x07c: /* VIS I for */
4209 CHECK_FPU_FEATURE(dc
, VIS1
);
4210 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
)], cpu_fpr
[DFPREG(rs1
)],
4211 cpu_fpr
[DFPREG(rs2
)]);
4212 tcg_gen_or_i32(cpu_fpr
[DFPREG(rd
) + 1],
4213 cpu_fpr
[DFPREG(rs1
) + 1],
4214 cpu_fpr
[DFPREG(rs2
) + 1]);
4216 case 0x07d: /* VIS I fors */
4217 CHECK_FPU_FEATURE(dc
, VIS1
);
4218 tcg_gen_or_i32(cpu_fpr
[rd
], cpu_fpr
[rs1
], cpu_fpr
[rs2
]);
4220 case 0x07e: /* VIS I fone */
4221 CHECK_FPU_FEATURE(dc
, VIS1
);
4222 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
)], -1);
4223 tcg_gen_movi_i32(cpu_fpr
[DFPREG(rd
) + 1], -1);
4225 case 0x07f: /* VIS I fones */
4226 CHECK_FPU_FEATURE(dc
, VIS1
);
4227 tcg_gen_movi_i32(cpu_fpr
[rd
], -1);
4229 case 0x080: /* VIS I shutdown */
4230 case 0x081: /* VIS II siam */
4239 } else if (xop
== 0x37) { /* V8 CPop2, V9 impdep2 */
4240 #ifdef TARGET_SPARC64
4245 #ifdef TARGET_SPARC64
4246 } else if (xop
== 0x39) { /* V9 return */
4249 save_state(dc
, cpu_cond
);
4250 cpu_src1
= get_src1(insn
, cpu_src1
);
4251 if (IS_IMM
) { /* immediate */
4252 simm
= GET_FIELDs(insn
, 19, 31);
4253 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4254 } else { /* register */
4255 rs2
= GET_FIELD(insn
, 27, 31);
4257 gen_movl_reg_TN(rs2
, cpu_src2
);
4258 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4260 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4262 gen_helper_restore();
4263 gen_mov_pc_npc(dc
, cpu_cond
);
4264 r_const
= tcg_const_i32(3);
4265 gen_helper_check_align(cpu_dst
, r_const
);
4266 tcg_temp_free_i32(r_const
);
4267 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4268 dc
->npc
= DYNAMIC_PC
;
4272 cpu_src1
= get_src1(insn
, cpu_src1
);
4273 if (IS_IMM
) { /* immediate */
4274 simm
= GET_FIELDs(insn
, 19, 31);
4275 tcg_gen_addi_tl(cpu_dst
, cpu_src1
, simm
);
4276 } else { /* register */
4277 rs2
= GET_FIELD(insn
, 27, 31);
4279 gen_movl_reg_TN(rs2
, cpu_src2
);
4280 tcg_gen_add_tl(cpu_dst
, cpu_src1
, cpu_src2
);
4282 tcg_gen_mov_tl(cpu_dst
, cpu_src1
);
4285 case 0x38: /* jmpl */
4290 r_pc
= tcg_const_tl(dc
->pc
);
4291 gen_movl_TN_reg(rd
, r_pc
);
4292 tcg_temp_free(r_pc
);
4293 gen_mov_pc_npc(dc
, cpu_cond
);
4294 r_const
= tcg_const_i32(3);
4295 gen_helper_check_align(cpu_dst
, r_const
);
4296 tcg_temp_free_i32(r_const
);
4297 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4298 dc
->npc
= DYNAMIC_PC
;
4301 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4302 case 0x39: /* rett, V9 return */
4306 if (!supervisor(dc
))
4308 gen_mov_pc_npc(dc
, cpu_cond
);
4309 r_const
= tcg_const_i32(3);
4310 gen_helper_check_align(cpu_dst
, r_const
);
4311 tcg_temp_free_i32(r_const
);
4312 tcg_gen_mov_tl(cpu_npc
, cpu_dst
);
4313 dc
->npc
= DYNAMIC_PC
;
4318 case 0x3b: /* flush */
4319 if (!((dc
)->def
->features
& CPU_FEATURE_FLUSH
))
4321 gen_helper_flush(cpu_dst
);
4323 case 0x3c: /* save */
4324 save_state(dc
, cpu_cond
);
4326 gen_movl_TN_reg(rd
, cpu_dst
);
4328 case 0x3d: /* restore */
4329 save_state(dc
, cpu_cond
);
4330 gen_helper_restore();
4331 gen_movl_TN_reg(rd
, cpu_dst
);
4333 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4334 case 0x3e: /* V9 done/retry */
4338 if (!supervisor(dc
))
4340 dc
->npc
= DYNAMIC_PC
;
4341 dc
->pc
= DYNAMIC_PC
;
4345 if (!supervisor(dc
))
4347 dc
->npc
= DYNAMIC_PC
;
4348 dc
->pc
= DYNAMIC_PC
;
4364 case 3: /* load/store instructions */
4366 unsigned int xop
= GET_FIELD(insn
, 7, 12);
4368 cpu_src1
= get_src1(insn
, cpu_src1
);
4369 if (xop
== 0x3c || xop
== 0x3e) { // V9 casa/casxa
4370 rs2
= GET_FIELD(insn
, 27, 31);
4371 gen_movl_reg_TN(rs2
, cpu_src2
);
4372 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4373 } else if (IS_IMM
) { /* immediate */
4374 simm
= GET_FIELDs(insn
, 19, 31);
4375 tcg_gen_addi_tl(cpu_addr
, cpu_src1
, simm
);
4376 } else { /* register */
4377 rs2
= GET_FIELD(insn
, 27, 31);
4379 gen_movl_reg_TN(rs2
, cpu_src2
);
4380 tcg_gen_add_tl(cpu_addr
, cpu_src1
, cpu_src2
);
4382 tcg_gen_mov_tl(cpu_addr
, cpu_src1
);
4384 if (xop
< 4 || (xop
> 7 && xop
< 0x14 && xop
!= 0x0e) ||
4385 (xop
> 0x17 && xop
<= 0x1d ) ||
4386 (xop
> 0x2c && xop
<= 0x33) || xop
== 0x1f || xop
== 0x3d) {
4388 case 0x0: /* ld, V9 lduw, load unsigned word */
4389 gen_address_mask(dc
, cpu_addr
);
4390 tcg_gen_qemu_ld32u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4392 case 0x1: /* ldub, load unsigned byte */
4393 gen_address_mask(dc
, cpu_addr
);
4394 tcg_gen_qemu_ld8u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4396 case 0x2: /* lduh, load unsigned halfword */
4397 gen_address_mask(dc
, cpu_addr
);
4398 tcg_gen_qemu_ld16u(cpu_val
, cpu_addr
, dc
->mem_idx
);
4400 case 0x3: /* ldd, load double word */
4406 save_state(dc
, cpu_cond
);
4407 r_const
= tcg_const_i32(7);
4408 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4409 tcg_temp_free_i32(r_const
);
4410 gen_address_mask(dc
, cpu_addr
);
4411 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4412 tcg_gen_trunc_i64_tl(cpu_tmp0
, cpu_tmp64
);
4413 tcg_gen_andi_tl(cpu_tmp0
, cpu_tmp0
, 0xffffffffULL
);
4414 gen_movl_TN_reg(rd
+ 1, cpu_tmp0
);
4415 tcg_gen_shri_i64(cpu_tmp64
, cpu_tmp64
, 32);
4416 tcg_gen_trunc_i64_tl(cpu_val
, cpu_tmp64
);
4417 tcg_gen_andi_tl(cpu_val
, cpu_val
, 0xffffffffULL
);
4420 case 0x9: /* ldsb, load signed byte */
4421 gen_address_mask(dc
, cpu_addr
);
4422 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4424 case 0xa: /* ldsh, load signed halfword */
4425 gen_address_mask(dc
, cpu_addr
);
4426 tcg_gen_qemu_ld16s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4428 case 0xd: /* ldstub -- XXX: should be atomically */
4432 gen_address_mask(dc
, cpu_addr
);
4433 tcg_gen_qemu_ld8s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4434 r_const
= tcg_const_tl(0xff);
4435 tcg_gen_qemu_st8(r_const
, cpu_addr
, dc
->mem_idx
);
4436 tcg_temp_free(r_const
);
4439 case 0x0f: /* swap, swap register with memory. Also
4441 CHECK_IU_FEATURE(dc
, SWAP
);
4442 gen_movl_reg_TN(rd
, cpu_val
);
4443 gen_address_mask(dc
, cpu_addr
);
4444 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4445 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4446 tcg_gen_mov_tl(cpu_val
, cpu_tmp0
);
4448 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4449 case 0x10: /* lda, V9 lduwa, load word alternate */
4450 #ifndef TARGET_SPARC64
4453 if (!supervisor(dc
))
4456 save_state(dc
, cpu_cond
);
4457 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 0);
4459 case 0x11: /* lduba, load unsigned byte alternate */
4460 #ifndef TARGET_SPARC64
4463 if (!supervisor(dc
))
4466 save_state(dc
, cpu_cond
);
4467 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 0);
4469 case 0x12: /* lduha, load unsigned halfword alternate */
4470 #ifndef TARGET_SPARC64
4473 if (!supervisor(dc
))
4476 save_state(dc
, cpu_cond
);
4477 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 0);
4479 case 0x13: /* ldda, load double word alternate */
4480 #ifndef TARGET_SPARC64
4483 if (!supervisor(dc
))
4488 save_state(dc
, cpu_cond
);
4489 gen_ldda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4491 case 0x19: /* ldsba, load signed byte alternate */
4492 #ifndef TARGET_SPARC64
4495 if (!supervisor(dc
))
4498 save_state(dc
, cpu_cond
);
4499 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 1, 1);
4501 case 0x1a: /* ldsha, load signed halfword alternate */
4502 #ifndef TARGET_SPARC64
4505 if (!supervisor(dc
))
4508 save_state(dc
, cpu_cond
);
4509 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 2, 1);
4511 case 0x1d: /* ldstuba -- XXX: should be atomically */
4512 #ifndef TARGET_SPARC64
4515 if (!supervisor(dc
))
4518 save_state(dc
, cpu_cond
);
4519 gen_ldstub_asi(cpu_val
, cpu_addr
, insn
);
4521 case 0x1f: /* swapa, swap reg with alt. memory. Also
4523 CHECK_IU_FEATURE(dc
, SWAP
);
4524 #ifndef TARGET_SPARC64
4527 if (!supervisor(dc
))
4530 save_state(dc
, cpu_cond
);
4531 gen_movl_reg_TN(rd
, cpu_val
);
4532 gen_swap_asi(cpu_val
, cpu_addr
, insn
);
4535 #ifndef TARGET_SPARC64
4536 case 0x30: /* ldc */
4537 case 0x31: /* ldcsr */
4538 case 0x33: /* lddc */
4542 #ifdef TARGET_SPARC64
4543 case 0x08: /* V9 ldsw */
4544 gen_address_mask(dc
, cpu_addr
);
4545 tcg_gen_qemu_ld32s(cpu_val
, cpu_addr
, dc
->mem_idx
);
4547 case 0x0b: /* V9 ldx */
4548 gen_address_mask(dc
, cpu_addr
);
4549 tcg_gen_qemu_ld64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4551 case 0x18: /* V9 ldswa */
4552 save_state(dc
, cpu_cond
);
4553 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 4, 1);
4555 case 0x1b: /* V9 ldxa */
4556 save_state(dc
, cpu_cond
);
4557 gen_ld_asi(cpu_val
, cpu_addr
, insn
, 8, 0);
4559 case 0x2d: /* V9 prefetch, no effect */
4561 case 0x30: /* V9 ldfa */
4562 save_state(dc
, cpu_cond
);
4563 gen_ldf_asi(cpu_addr
, insn
, 4, rd
);
4565 case 0x33: /* V9 lddfa */
4566 save_state(dc
, cpu_cond
);
4567 gen_ldf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4569 case 0x3d: /* V9 prefetcha, no effect */
4571 case 0x32: /* V9 ldqfa */
4572 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4573 save_state(dc
, cpu_cond
);
4574 gen_ldf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4580 gen_movl_TN_reg(rd
, cpu_val
);
4581 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4584 } else if (xop
>= 0x20 && xop
< 0x24) {
4585 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4587 save_state(dc
, cpu_cond
);
4589 case 0x20: /* ldf, load fpreg */
4590 gen_address_mask(dc
, cpu_addr
);
4591 tcg_gen_qemu_ld32u(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4592 tcg_gen_trunc_tl_i32(cpu_fpr
[rd
], cpu_tmp0
);
4594 case 0x21: /* ldfsr, V9 ldxfsr */
4595 #ifdef TARGET_SPARC64
4596 gen_address_mask(dc
, cpu_addr
);
4598 tcg_gen_qemu_ld64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4599 gen_helper_ldxfsr(cpu_tmp64
);
4603 tcg_gen_qemu_ld32u(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4604 gen_helper_ldfsr(cpu_tmp32
);
4608 case 0x22: /* ldqf, load quad fpreg */
4612 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4613 r_const
= tcg_const_i32(dc
->mem_idx
);
4614 gen_helper_ldqf(cpu_addr
, r_const
);
4615 tcg_temp_free_i32(r_const
);
4616 gen_op_store_QT0_fpr(QFPREG(rd
));
4619 case 0x23: /* lddf, load double fpreg */
4623 r_const
= tcg_const_i32(dc
->mem_idx
);
4624 gen_helper_lddf(cpu_addr
, r_const
);
4625 tcg_temp_free_i32(r_const
);
4626 gen_op_store_DT0_fpr(DFPREG(rd
));
4632 } else if (xop
< 8 || (xop
>= 0x14 && xop
< 0x18) || \
4633 xop
== 0xe || xop
== 0x1e) {
4634 gen_movl_reg_TN(rd
, cpu_val
);
4636 case 0x4: /* st, store word */
4637 gen_address_mask(dc
, cpu_addr
);
4638 tcg_gen_qemu_st32(cpu_val
, cpu_addr
, dc
->mem_idx
);
4640 case 0x5: /* stb, store byte */
4641 gen_address_mask(dc
, cpu_addr
);
4642 tcg_gen_qemu_st8(cpu_val
, cpu_addr
, dc
->mem_idx
);
4644 case 0x6: /* sth, store halfword */
4645 gen_address_mask(dc
, cpu_addr
);
4646 tcg_gen_qemu_st16(cpu_val
, cpu_addr
, dc
->mem_idx
);
4648 case 0x7: /* std, store double word */
4654 save_state(dc
, cpu_cond
);
4655 gen_address_mask(dc
, cpu_addr
);
4656 r_const
= tcg_const_i32(7);
4657 gen_helper_check_align(cpu_addr
, r_const
); // XXX remove
4658 tcg_temp_free_i32(r_const
);
4659 gen_movl_reg_TN(rd
+ 1, cpu_tmp0
);
4660 tcg_gen_concat_tl_i64(cpu_tmp64
, cpu_tmp0
, cpu_val
);
4661 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4664 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4665 case 0x14: /* sta, V9 stwa, store word alternate */
4666 #ifndef TARGET_SPARC64
4669 if (!supervisor(dc
))
4672 save_state(dc
, cpu_cond
);
4673 gen_st_asi(cpu_val
, cpu_addr
, insn
, 4);
4675 case 0x15: /* stba, store byte alternate */
4676 #ifndef TARGET_SPARC64
4679 if (!supervisor(dc
))
4682 save_state(dc
, cpu_cond
);
4683 gen_st_asi(cpu_val
, cpu_addr
, insn
, 1);
4685 case 0x16: /* stha, store halfword alternate */
4686 #ifndef TARGET_SPARC64
4689 if (!supervisor(dc
))
4692 save_state(dc
, cpu_cond
);
4693 gen_st_asi(cpu_val
, cpu_addr
, insn
, 2);
4695 case 0x17: /* stda, store double word alternate */
4696 #ifndef TARGET_SPARC64
4699 if (!supervisor(dc
))
4705 save_state(dc
, cpu_cond
);
4706 gen_stda_asi(cpu_val
, cpu_addr
, insn
, rd
);
4710 #ifdef TARGET_SPARC64
4711 case 0x0e: /* V9 stx */
4712 gen_address_mask(dc
, cpu_addr
);
4713 tcg_gen_qemu_st64(cpu_val
, cpu_addr
, dc
->mem_idx
);
4715 case 0x1e: /* V9 stxa */
4716 save_state(dc
, cpu_cond
);
4717 gen_st_asi(cpu_val
, cpu_addr
, insn
, 8);
4723 } else if (xop
> 0x23 && xop
< 0x28) {
4724 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4726 save_state(dc
, cpu_cond
);
4728 case 0x24: /* stf, store fpreg */
4729 gen_address_mask(dc
, cpu_addr
);
4730 tcg_gen_ext_i32_tl(cpu_tmp0
, cpu_fpr
[rd
]);
4731 tcg_gen_qemu_st32(cpu_tmp0
, cpu_addr
, dc
->mem_idx
);
4733 case 0x25: /* stfsr, V9 stxfsr */
4734 #ifdef TARGET_SPARC64
4735 gen_address_mask(dc
, cpu_addr
);
4736 tcg_gen_ld_i64(cpu_tmp64
, cpu_env
, offsetof(CPUState
, fsr
));
4738 tcg_gen_qemu_st64(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4740 tcg_gen_qemu_st32(cpu_tmp64
, cpu_addr
, dc
->mem_idx
);
4742 tcg_gen_ld_i32(cpu_tmp32
, cpu_env
, offsetof(CPUState
, fsr
));
4743 tcg_gen_qemu_st32(cpu_tmp32
, cpu_addr
, dc
->mem_idx
);
4747 #ifdef TARGET_SPARC64
4748 /* V9 stqf, store quad fpreg */
4752 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4753 gen_op_load_fpr_QT0(QFPREG(rd
));
4754 r_const
= tcg_const_i32(dc
->mem_idx
);
4755 gen_helper_stqf(cpu_addr
, r_const
);
4756 tcg_temp_free_i32(r_const
);
4759 #else /* !TARGET_SPARC64 */
4760 /* stdfq, store floating point queue */
4761 #if defined(CONFIG_USER_ONLY)
4764 if (!supervisor(dc
))
4766 if (gen_trap_ifnofpu(dc
, cpu_cond
))
4771 case 0x27: /* stdf, store double fpreg */
4775 gen_op_load_fpr_DT0(DFPREG(rd
));
4776 r_const
= tcg_const_i32(dc
->mem_idx
);
4777 gen_helper_stdf(cpu_addr
, r_const
);
4778 tcg_temp_free_i32(r_const
);
4784 } else if (xop
> 0x33 && xop
< 0x3f) {
4785 save_state(dc
, cpu_cond
);
4787 #ifdef TARGET_SPARC64
4788 case 0x34: /* V9 stfa */
4789 gen_stf_asi(cpu_addr
, insn
, 4, rd
);
4791 case 0x36: /* V9 stqfa */
4795 CHECK_FPU_FEATURE(dc
, FLOAT128
);
4796 r_const
= tcg_const_i32(7);
4797 gen_helper_check_align(cpu_addr
, r_const
);
4798 tcg_temp_free_i32(r_const
);
4799 gen_op_load_fpr_QT0(QFPREG(rd
));
4800 gen_stf_asi(cpu_addr
, insn
, 16, QFPREG(rd
));
4803 case 0x37: /* V9 stdfa */
4804 gen_op_load_fpr_DT0(DFPREG(rd
));
4805 gen_stf_asi(cpu_addr
, insn
, 8, DFPREG(rd
));
4807 case 0x3c: /* V9 casa */
4808 gen_cas_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4809 gen_movl_TN_reg(rd
, cpu_val
);
4811 case 0x3e: /* V9 casxa */
4812 gen_casx_asi(cpu_val
, cpu_addr
, cpu_src2
, insn
, rd
);
4813 gen_movl_TN_reg(rd
, cpu_val
);
4816 case 0x34: /* stc */
4817 case 0x35: /* stcsr */
4818 case 0x36: /* stdcq */
4819 case 0x37: /* stdc */
4831 /* default case for non jump instructions */
4832 if (dc
->npc
== DYNAMIC_PC
) {
4833 dc
->pc
= DYNAMIC_PC
;
4835 } else if (dc
->npc
== JUMP_PC
) {
4836 /* we can do a static jump */
4837 gen_branch2(dc
, dc
->jump_pc
[0], dc
->jump_pc
[1], cpu_cond
);
4841 dc
->npc
= dc
->npc
+ 4;
4849 save_state(dc
, cpu_cond
);
4850 r_const
= tcg_const_i32(TT_ILL_INSN
);
4851 gen_helper_raise_exception(r_const
);
4852 tcg_temp_free_i32(r_const
);
4860 save_state(dc
, cpu_cond
);
4861 r_const
= tcg_const_i32(TT_UNIMP_FLUSH
);
4862 gen_helper_raise_exception(r_const
);
4863 tcg_temp_free_i32(r_const
);
4867 #if !defined(CONFIG_USER_ONLY)
4872 save_state(dc
, cpu_cond
);
4873 r_const
= tcg_const_i32(TT_PRIV_INSN
);
4874 gen_helper_raise_exception(r_const
);
4875 tcg_temp_free_i32(r_const
);
4881 save_state(dc
, cpu_cond
);
4882 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP
);
4885 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4887 save_state(dc
, cpu_cond
);
4888 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR
);
4892 #ifndef TARGET_SPARC64
4897 save_state(dc
, cpu_cond
);
4898 r_const
= tcg_const_i32(TT_NCP_INSN
);
4899 gen_helper_raise_exception(r_const
);
4900 tcg_temp_free(r_const
);
4907 static inline void gen_intermediate_code_internal(TranslationBlock
* tb
,
4908 int spc
, CPUSPARCState
*env
)
4910 target_ulong pc_start
, last_pc
;
4911 uint16_t *gen_opc_end
;
4912 DisasContext dc1
, *dc
= &dc1
;
4918 memset(dc
, 0, sizeof(DisasContext
));
4923 dc
->npc
= (target_ulong
) tb
->cs_base
;
4924 dc
->mem_idx
= cpu_mmu_index(env
);
4926 if ((dc
->def
->features
& CPU_FEATURE_FLOAT
))
4927 dc
->fpu_enabled
= cpu_fpu_enabled(env
);
4929 dc
->fpu_enabled
= 0;
4930 #ifdef TARGET_SPARC64
4931 dc
->address_mask_32bit
= env
->pstate
& PS_AM
;
4933 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
4935 cpu_tmp0
= tcg_temp_new();
4936 cpu_tmp32
= tcg_temp_new_i32();
4937 cpu_tmp64
= tcg_temp_new_i64();
4939 cpu_dst
= tcg_temp_local_new();
4942 cpu_val
= tcg_temp_local_new();
4943 cpu_addr
= tcg_temp_local_new();
4946 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
4948 max_insns
= CF_COUNT_MASK
;
4951 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
4952 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
4953 if (bp
->pc
== dc
->pc
) {
4954 if (dc
->pc
!= pc_start
)
4955 save_state(dc
, cpu_cond
);
4964 qemu_log("Search PC...\n");
4965 j
= gen_opc_ptr
- gen_opc_buf
;
4969 gen_opc_instr_start
[lj
++] = 0;
4970 gen_opc_pc
[lj
] = dc
->pc
;
4971 gen_opc_npc
[lj
] = dc
->npc
;
4972 gen_opc_instr_start
[lj
] = 1;
4973 gen_opc_icount
[lj
] = num_insns
;
4976 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
4979 disas_sparc_insn(dc
);
4984 /* if the next PC is different, we abort now */
4985 if (dc
->pc
!= (last_pc
+ 4))
4987 /* if we reach a page boundary, we stop generation so that the
4988 PC of a TT_TFAULT exception is always in the right page */
4989 if ((dc
->pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
4991 /* if single step mode, we generate only one instruction and
4992 generate an exception */
4993 if (env
->singlestep_enabled
|| singlestep
) {
4994 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
4998 } while ((gen_opc_ptr
< gen_opc_end
) &&
4999 (dc
->pc
- pc_start
) < (TARGET_PAGE_SIZE
- 32) &&
5000 num_insns
< max_insns
);
5003 tcg_temp_free(cpu_addr
);
5004 tcg_temp_free(cpu_val
);
5005 tcg_temp_free(cpu_dst
);
5006 tcg_temp_free_i64(cpu_tmp64
);
5007 tcg_temp_free_i32(cpu_tmp32
);
5008 tcg_temp_free(cpu_tmp0
);
5009 if (tb
->cflags
& CF_LAST_IO
)
5012 if (dc
->pc
!= DYNAMIC_PC
&&
5013 (dc
->npc
!= DYNAMIC_PC
&& dc
->npc
!= JUMP_PC
)) {
5014 /* static PC and NPC: we can use direct chaining */
5015 gen_goto_tb(dc
, 0, dc
->pc
, dc
->npc
);
5017 if (dc
->pc
!= DYNAMIC_PC
)
5018 tcg_gen_movi_tl(cpu_pc
, dc
->pc
);
5019 save_npc(dc
, cpu_cond
);
5023 gen_icount_end(tb
, num_insns
);
5024 *gen_opc_ptr
= INDEX_op_end
;
5026 j
= gen_opc_ptr
- gen_opc_buf
;
5029 gen_opc_instr_start
[lj
++] = 0;
5033 gen_opc_jump_pc
[0] = dc
->jump_pc
[0];
5034 gen_opc_jump_pc
[1] = dc
->jump_pc
[1];
5036 tb
->size
= last_pc
+ 4 - pc_start
;
5037 tb
->icount
= num_insns
;
5040 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5041 qemu_log("--------------\n");
5042 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5043 log_target_disas(pc_start
, last_pc
+ 4 - pc_start
, 0);
5049 void gen_intermediate_code(CPUSPARCState
* env
, TranslationBlock
* tb
)
5051 gen_intermediate_code_internal(tb
, 0, env
);
5054 void gen_intermediate_code_pc(CPUSPARCState
* env
, TranslationBlock
* tb
)
5056 gen_intermediate_code_internal(tb
, 1, env
);
5059 void gen_intermediate_code_init(CPUSPARCState
*env
)
5063 static const char * const gregnames
[8] = {
5064 NULL
, // g0 not used
5073 static const char * const fregnames
[64] = {
5074 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5075 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5076 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5077 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5078 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5079 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5080 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5081 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5084 /* init various static tables */
5088 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
5089 cpu_regwptr
= tcg_global_mem_new_ptr(TCG_AREG0
,
5090 offsetof(CPUState
, regwptr
),
5092 #ifdef TARGET_SPARC64
5093 cpu_xcc
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, xcc
),
5095 cpu_asi
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, asi
),
5097 cpu_fprs
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, fprs
),
5099 cpu_gsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, gsr
),
5101 cpu_tick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5102 offsetof(CPUState
, tick_cmpr
),
5104 cpu_stick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5105 offsetof(CPUState
, stick_cmpr
),
5107 cpu_hstick_cmpr
= tcg_global_mem_new(TCG_AREG0
,
5108 offsetof(CPUState
, hstick_cmpr
),
5110 cpu_hintp
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hintp
),
5112 cpu_htba
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, htba
),
5114 cpu_hver
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, hver
),
5116 cpu_ssr
= tcg_global_mem_new(TCG_AREG0
,
5117 offsetof(CPUState
, ssr
), "ssr");
5118 cpu_ver
= tcg_global_mem_new(TCG_AREG0
,
5119 offsetof(CPUState
, version
), "ver");
5120 cpu_softint
= tcg_global_mem_new_i32(TCG_AREG0
,
5121 offsetof(CPUState
, softint
),
5124 cpu_wim
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, wim
),
5127 cpu_cond
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cond
),
5129 cpu_cc_src
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_src
),
5131 cpu_cc_src2
= tcg_global_mem_new(TCG_AREG0
,
5132 offsetof(CPUState
, cc_src2
),
5134 cpu_cc_dst
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, cc_dst
),
5136 cpu_psr
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUState
, psr
),
5138 cpu_fsr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, fsr
),
5140 cpu_pc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, pc
),
5142 cpu_npc
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, npc
),
5144 cpu_y
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, y
), "y");
5145 #ifndef CONFIG_USER_ONLY
5146 cpu_tbr
= tcg_global_mem_new(TCG_AREG0
, offsetof(CPUState
, tbr
),
5149 for (i
= 1; i
< 8; i
++)
5150 cpu_gregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
5151 offsetof(CPUState
, gregs
[i
]),
5153 for (i
= 0; i
< TARGET_FPREGS
; i
++)
5154 cpu_fpr
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
5155 offsetof(CPUState
, fpr
[i
]),
5158 /* register helpers */
5160 #define GEN_HELPER 2
5165 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
5166 unsigned long searched_pc
, int pc_pos
, void *puc
)
5169 env
->pc
= gen_opc_pc
[pc_pos
];
5170 npc
= gen_opc_npc
[pc_pos
];
5172 /* dynamic NPC: already stored */
5173 } else if (npc
== 2) {
5174 target_ulong t2
= (target_ulong
)(unsigned long)puc
;
5175 /* jump PC: use T2 and the jump targets of the translation */
5177 env
->npc
= gen_opc_jump_pc
[0];
5179 env
->npc
= gen_opc_jump_pc
[1];