]> git.proxmox.com Git - qemu.git/blob - target-sparc/translate.c
target-sparc: Tidy gen_generic_branch interface
[qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
70
71 #include "gen-icount.h"
72
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 TCGv_i32 t32[3];
86 int n_t32;
87 } DisasContext;
88
89 // This function uses non-native bit order
90 #define GET_FIELD(X, FROM, TO) \
91 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92
93 // This function uses the order in the manuals, i.e. bit 0 is 2^0
94 #define GET_FIELD_SP(X, FROM, TO) \
95 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96
97 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99
100 #ifdef TARGET_SPARC64
101 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #else
104 #define DFPREG(r) (r & 0x1e)
105 #define QFPREG(r) (r & 0x1c)
106 #endif
107
108 #define UA2005_HTRAP_MASK 0xff
109 #define V8_TRAP_MASK 0x7f
110
111 static int sign_extend(int x, int len)
112 {
113 len = 32 - len;
114 return (x << len) >> len;
115 }
116
117 #define IS_IMM (insn & (1<<13))
118
119 static inline void gen_update_fprs_dirty(int rd)
120 {
121 #if defined(TARGET_SPARC64)
122 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
123 #endif
124 }
125
126 /* floating point registers moves */
127 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
128 {
129 #if TCG_TARGET_REG_BITS == 32
130 if (src & 1) {
131 return TCGV_LOW(cpu_fpr[src / 2]);
132 } else {
133 return TCGV_HIGH(cpu_fpr[src / 2]);
134 }
135 #else
136 if (src & 1) {
137 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
138 } else {
139 TCGv_i32 ret = tcg_temp_local_new_i32();
140 TCGv_i64 t = tcg_temp_new_i64();
141
142 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
143 tcg_gen_trunc_i64_i32(ret, t);
144 tcg_temp_free_i64(t);
145
146 dc->t32[dc->n_t32++] = ret;
147 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
148
149 return ret;
150 }
151 #endif
152 }
153
154 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
155 {
156 #if TCG_TARGET_REG_BITS == 32
157 if (dst & 1) {
158 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
159 } else {
160 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
161 }
162 #else
163 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
164 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
165 (dst & 1 ? 0 : 32), 32);
166 #endif
167 gen_update_fprs_dirty(dst);
168 }
169
170 static TCGv_i32 gen_dest_fpr_F(void)
171 {
172 return cpu_tmp32;
173 }
174
175 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
176 {
177 src = DFPREG(src);
178 return cpu_fpr[src / 2];
179 }
180
181 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
182 {
183 dst = DFPREG(dst);
184 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
185 gen_update_fprs_dirty(dst);
186 }
187
188 static TCGv_i64 gen_dest_fpr_D(void)
189 {
190 return cpu_tmp64;
191 }
192
193 static void gen_op_load_fpr_QT0(unsigned int src)
194 {
195 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
196 offsetof(CPU_QuadU, ll.upper));
197 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
198 offsetof(CPU_QuadU, ll.lower));
199 }
200
201 static void gen_op_load_fpr_QT1(unsigned int src)
202 {
203 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
204 offsetof(CPU_QuadU, ll.upper));
205 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
206 offsetof(CPU_QuadU, ll.lower));
207 }
208
209 static void gen_op_store_QT0_fpr(unsigned int dst)
210 {
211 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
212 offsetof(CPU_QuadU, ll.upper));
213 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
214 offsetof(CPU_QuadU, ll.lower));
215 }
216
217 #ifdef TARGET_SPARC64
218 static void gen_move_Q(unsigned int rd, unsigned int rs)
219 {
220 rd = QFPREG(rd);
221 rs = QFPREG(rs);
222
223 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
224 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
225 gen_update_fprs_dirty(rd);
226 }
227 #endif
228
229 /* moves */
230 #ifdef CONFIG_USER_ONLY
231 #define supervisor(dc) 0
232 #ifdef TARGET_SPARC64
233 #define hypervisor(dc) 0
234 #endif
235 #else
236 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237 #ifdef TARGET_SPARC64
238 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
239 #else
240 #endif
241 #endif
242
243 #ifdef TARGET_SPARC64
244 #ifndef TARGET_ABI32
245 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
246 #else
247 #define AM_CHECK(dc) (1)
248 #endif
249 #endif
250
251 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
252 {
253 #ifdef TARGET_SPARC64
254 if (AM_CHECK(dc))
255 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
256 #endif
257 }
258
259 static inline void gen_movl_reg_TN(int reg, TCGv tn)
260 {
261 if (reg == 0)
262 tcg_gen_movi_tl(tn, 0);
263 else if (reg < 8)
264 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
265 else {
266 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
267 }
268 }
269
270 static inline void gen_movl_TN_reg(int reg, TCGv tn)
271 {
272 if (reg == 0)
273 return;
274 else if (reg < 8)
275 tcg_gen_mov_tl(cpu_gregs[reg], tn);
276 else {
277 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
278 }
279 }
280
281 static inline void gen_goto_tb(DisasContext *s, int tb_num,
282 target_ulong pc, target_ulong npc)
283 {
284 TranslationBlock *tb;
285
286 tb = s->tb;
287 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
288 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
289 !s->singlestep) {
290 /* jump to same page: we can use a direct jump */
291 tcg_gen_goto_tb(tb_num);
292 tcg_gen_movi_tl(cpu_pc, pc);
293 tcg_gen_movi_tl(cpu_npc, npc);
294 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
295 } else {
296 /* jump to another page: currently not optimized */
297 tcg_gen_movi_tl(cpu_pc, pc);
298 tcg_gen_movi_tl(cpu_npc, npc);
299 tcg_gen_exit_tb(0);
300 }
301 }
302
303 // XXX suboptimal
304 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
305 {
306 tcg_gen_extu_i32_tl(reg, src);
307 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
308 tcg_gen_andi_tl(reg, reg, 0x1);
309 }
310
311 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
312 {
313 tcg_gen_extu_i32_tl(reg, src);
314 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
315 tcg_gen_andi_tl(reg, reg, 0x1);
316 }
317
318 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
319 {
320 tcg_gen_extu_i32_tl(reg, src);
321 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
322 tcg_gen_andi_tl(reg, reg, 0x1);
323 }
324
325 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
326 {
327 tcg_gen_extu_i32_tl(reg, src);
328 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
329 tcg_gen_andi_tl(reg, reg, 0x1);
330 }
331
332 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
333 {
334 TCGv r_temp;
335 TCGv_i32 r_const;
336 int l1;
337
338 l1 = gen_new_label();
339
340 r_temp = tcg_temp_new();
341 tcg_gen_xor_tl(r_temp, src1, src2);
342 tcg_gen_not_tl(r_temp, r_temp);
343 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
344 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
345 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
346 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
347 r_const = tcg_const_i32(TT_TOVF);
348 gen_helper_raise_exception(cpu_env, r_const);
349 tcg_temp_free_i32(r_const);
350 gen_set_label(l1);
351 tcg_temp_free(r_temp);
352 }
353
354 static inline void gen_tag_tv(TCGv src1, TCGv src2)
355 {
356 int l1;
357 TCGv_i32 r_const;
358
359 l1 = gen_new_label();
360 tcg_gen_or_tl(cpu_tmp0, src1, src2);
361 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
362 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
363 r_const = tcg_const_i32(TT_TOVF);
364 gen_helper_raise_exception(cpu_env, r_const);
365 tcg_temp_free_i32(r_const);
366 gen_set_label(l1);
367 }
368
369 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
370 {
371 tcg_gen_mov_tl(cpu_cc_src, src1);
372 tcg_gen_movi_tl(cpu_cc_src2, src2);
373 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
374 tcg_gen_mov_tl(dst, cpu_cc_dst);
375 }
376
377 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
378 {
379 tcg_gen_mov_tl(cpu_cc_src, src1);
380 tcg_gen_mov_tl(cpu_cc_src2, src2);
381 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
382 tcg_gen_mov_tl(dst, cpu_cc_dst);
383 }
384
385 static TCGv_i32 gen_add32_carry32(void)
386 {
387 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
388
389 /* Carry is computed from a previous add: (dst < src) */
390 #if TARGET_LONG_BITS == 64
391 cc_src1_32 = tcg_temp_new_i32();
392 cc_src2_32 = tcg_temp_new_i32();
393 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
394 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
395 #else
396 cc_src1_32 = cpu_cc_dst;
397 cc_src2_32 = cpu_cc_src;
398 #endif
399
400 carry_32 = tcg_temp_new_i32();
401 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
402
403 #if TARGET_LONG_BITS == 64
404 tcg_temp_free_i32(cc_src1_32);
405 tcg_temp_free_i32(cc_src2_32);
406 #endif
407
408 return carry_32;
409 }
410
411 static TCGv_i32 gen_sub32_carry32(void)
412 {
413 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
414
415 /* Carry is computed from a previous borrow: (src1 < src2) */
416 #if TARGET_LONG_BITS == 64
417 cc_src1_32 = tcg_temp_new_i32();
418 cc_src2_32 = tcg_temp_new_i32();
419 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
420 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
421 #else
422 cc_src1_32 = cpu_cc_src;
423 cc_src2_32 = cpu_cc_src2;
424 #endif
425
426 carry_32 = tcg_temp_new_i32();
427 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
428
429 #if TARGET_LONG_BITS == 64
430 tcg_temp_free_i32(cc_src1_32);
431 tcg_temp_free_i32(cc_src2_32);
432 #endif
433
434 return carry_32;
435 }
436
437 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
438 TCGv src2, int update_cc)
439 {
440 TCGv_i32 carry_32;
441 TCGv carry;
442
443 switch (dc->cc_op) {
444 case CC_OP_DIV:
445 case CC_OP_LOGIC:
446 /* Carry is known to be zero. Fall back to plain ADD. */
447 if (update_cc) {
448 gen_op_add_cc(dst, src1, src2);
449 } else {
450 tcg_gen_add_tl(dst, src1, src2);
451 }
452 return;
453
454 case CC_OP_ADD:
455 case CC_OP_TADD:
456 case CC_OP_TADDTV:
457 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
458 {
459 /* For 32-bit hosts, we can re-use the host's hardware carry
460 generation by using an ADD2 opcode. We discard the low
461 part of the output. Ideally we'd combine this operation
462 with the add that generated the carry in the first place. */
463 TCGv dst_low = tcg_temp_new();
464 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
465 cpu_cc_src, src1, cpu_cc_src2, src2);
466 tcg_temp_free(dst_low);
467 goto add_done;
468 }
469 #endif
470 carry_32 = gen_add32_carry32();
471 break;
472
473 case CC_OP_SUB:
474 case CC_OP_TSUB:
475 case CC_OP_TSUBTV:
476 carry_32 = gen_sub32_carry32();
477 break;
478
479 default:
480 /* We need external help to produce the carry. */
481 carry_32 = tcg_temp_new_i32();
482 gen_helper_compute_C_icc(carry_32, cpu_env);
483 break;
484 }
485
486 #if TARGET_LONG_BITS == 64
487 carry = tcg_temp_new();
488 tcg_gen_extu_i32_i64(carry, carry_32);
489 #else
490 carry = carry_32;
491 #endif
492
493 tcg_gen_add_tl(dst, src1, src2);
494 tcg_gen_add_tl(dst, dst, carry);
495
496 tcg_temp_free_i32(carry_32);
497 #if TARGET_LONG_BITS == 64
498 tcg_temp_free(carry);
499 #endif
500
501 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
502 add_done:
503 #endif
504 if (update_cc) {
505 tcg_gen_mov_tl(cpu_cc_src, src1);
506 tcg_gen_mov_tl(cpu_cc_src2, src2);
507 tcg_gen_mov_tl(cpu_cc_dst, dst);
508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
509 dc->cc_op = CC_OP_ADDX;
510 }
511 }
512
513 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
514 {
515 tcg_gen_mov_tl(cpu_cc_src, src1);
516 tcg_gen_mov_tl(cpu_cc_src2, src2);
517 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518 tcg_gen_mov_tl(dst, cpu_cc_dst);
519 }
520
521 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
522 {
523 tcg_gen_mov_tl(cpu_cc_src, src1);
524 tcg_gen_mov_tl(cpu_cc_src2, src2);
525 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
526 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
527 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528 tcg_gen_mov_tl(dst, cpu_cc_dst);
529 }
530
531 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
532 {
533 TCGv r_temp;
534 TCGv_i32 r_const;
535 int l1;
536
537 l1 = gen_new_label();
538
539 r_temp = tcg_temp_new();
540 tcg_gen_xor_tl(r_temp, src1, src2);
541 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
542 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
543 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
544 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
545 r_const = tcg_const_i32(TT_TOVF);
546 gen_helper_raise_exception(cpu_env, r_const);
547 tcg_temp_free_i32(r_const);
548 gen_set_label(l1);
549 tcg_temp_free(r_temp);
550 }
551
552 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
553 {
554 tcg_gen_mov_tl(cpu_cc_src, src1);
555 tcg_gen_movi_tl(cpu_cc_src2, src2);
556 if (src2 == 0) {
557 tcg_gen_mov_tl(cpu_cc_dst, src1);
558 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
559 dc->cc_op = CC_OP_LOGIC;
560 } else {
561 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
562 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
563 dc->cc_op = CC_OP_SUB;
564 }
565 tcg_gen_mov_tl(dst, cpu_cc_dst);
566 }
567
568 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
569 {
570 tcg_gen_mov_tl(cpu_cc_src, src1);
571 tcg_gen_mov_tl(cpu_cc_src2, src2);
572 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
573 tcg_gen_mov_tl(dst, cpu_cc_dst);
574 }
575
576 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
577 TCGv src2, int update_cc)
578 {
579 TCGv_i32 carry_32;
580 TCGv carry;
581
582 switch (dc->cc_op) {
583 case CC_OP_DIV:
584 case CC_OP_LOGIC:
585 /* Carry is known to be zero. Fall back to plain SUB. */
586 if (update_cc) {
587 gen_op_sub_cc(dst, src1, src2);
588 } else {
589 tcg_gen_sub_tl(dst, src1, src2);
590 }
591 return;
592
593 case CC_OP_ADD:
594 case CC_OP_TADD:
595 case CC_OP_TADDTV:
596 carry_32 = gen_add32_carry32();
597 break;
598
599 case CC_OP_SUB:
600 case CC_OP_TSUB:
601 case CC_OP_TSUBTV:
602 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
603 {
604 /* For 32-bit hosts, we can re-use the host's hardware carry
605 generation by using a SUB2 opcode. We discard the low
606 part of the output. Ideally we'd combine this operation
607 with the add that generated the carry in the first place. */
608 TCGv dst_low = tcg_temp_new();
609 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
610 cpu_cc_src, src1, cpu_cc_src2, src2);
611 tcg_temp_free(dst_low);
612 goto sub_done;
613 }
614 #endif
615 carry_32 = gen_sub32_carry32();
616 break;
617
618 default:
619 /* We need external help to produce the carry. */
620 carry_32 = tcg_temp_new_i32();
621 gen_helper_compute_C_icc(carry_32, cpu_env);
622 break;
623 }
624
625 #if TARGET_LONG_BITS == 64
626 carry = tcg_temp_new();
627 tcg_gen_extu_i32_i64(carry, carry_32);
628 #else
629 carry = carry_32;
630 #endif
631
632 tcg_gen_sub_tl(dst, src1, src2);
633 tcg_gen_sub_tl(dst, dst, carry);
634
635 tcg_temp_free_i32(carry_32);
636 #if TARGET_LONG_BITS == 64
637 tcg_temp_free(carry);
638 #endif
639
640 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
641 sub_done:
642 #endif
643 if (update_cc) {
644 tcg_gen_mov_tl(cpu_cc_src, src1);
645 tcg_gen_mov_tl(cpu_cc_src2, src2);
646 tcg_gen_mov_tl(cpu_cc_dst, dst);
647 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
648 dc->cc_op = CC_OP_SUBX;
649 }
650 }
651
652 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
653 {
654 tcg_gen_mov_tl(cpu_cc_src, src1);
655 tcg_gen_mov_tl(cpu_cc_src2, src2);
656 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 tcg_gen_mov_tl(dst, cpu_cc_dst);
658 }
659
660 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
661 {
662 tcg_gen_mov_tl(cpu_cc_src, src1);
663 tcg_gen_mov_tl(cpu_cc_src2, src2);
664 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
665 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667 tcg_gen_mov_tl(dst, cpu_cc_dst);
668 }
669
670 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
671 {
672 TCGv r_temp;
673 int l1;
674
675 l1 = gen_new_label();
676 r_temp = tcg_temp_new();
677
678 /* old op:
679 if (!(env->y & 1))
680 T1 = 0;
681 */
682 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
683 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
684 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
685 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
686 tcg_gen_movi_tl(cpu_cc_src2, 0);
687 gen_set_label(l1);
688
689 // b2 = T0 & 1;
690 // env->y = (b2 << 31) | (env->y >> 1);
691 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
692 tcg_gen_shli_tl(r_temp, r_temp, 31);
693 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
694 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
695 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
696 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
697
698 // b1 = N ^ V;
699 gen_mov_reg_N(cpu_tmp0, cpu_psr);
700 gen_mov_reg_V(r_temp, cpu_psr);
701 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
702 tcg_temp_free(r_temp);
703
704 // T0 = (b1 << 31) | (T0 >> 1);
705 // src1 = T0;
706 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
707 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
708 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
709
710 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
711
712 tcg_gen_mov_tl(dst, cpu_cc_dst);
713 }
714
715 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
716 {
717 TCGv_i32 r_src1, r_src2;
718 TCGv_i64 r_temp, r_temp2;
719
720 r_src1 = tcg_temp_new_i32();
721 r_src2 = tcg_temp_new_i32();
722
723 tcg_gen_trunc_tl_i32(r_src1, src1);
724 tcg_gen_trunc_tl_i32(r_src2, src2);
725
726 r_temp = tcg_temp_new_i64();
727 r_temp2 = tcg_temp_new_i64();
728
729 if (sign_ext) {
730 tcg_gen_ext_i32_i64(r_temp, r_src2);
731 tcg_gen_ext_i32_i64(r_temp2, r_src1);
732 } else {
733 tcg_gen_extu_i32_i64(r_temp, r_src2);
734 tcg_gen_extu_i32_i64(r_temp2, r_src1);
735 }
736
737 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
738
739 tcg_gen_shri_i64(r_temp, r_temp2, 32);
740 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
741 tcg_temp_free_i64(r_temp);
742 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
743
744 tcg_gen_trunc_i64_tl(dst, r_temp2);
745
746 tcg_temp_free_i64(r_temp2);
747
748 tcg_temp_free_i32(r_src1);
749 tcg_temp_free_i32(r_src2);
750 }
751
752 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
753 {
754 /* zero-extend truncated operands before multiplication */
755 gen_op_multiply(dst, src1, src2, 0);
756 }
757
758 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
759 {
760 /* sign-extend truncated operands before multiplication */
761 gen_op_multiply(dst, src1, src2, 1);
762 }
763
764 #ifdef TARGET_SPARC64
765 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
766 {
767 TCGv_i32 r_const;
768 int l1;
769
770 l1 = gen_new_label();
771 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
772 r_const = tcg_const_i32(TT_DIV_ZERO);
773 gen_helper_raise_exception(cpu_env, r_const);
774 tcg_temp_free_i32(r_const);
775 gen_set_label(l1);
776 }
777
778 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
779 {
780 int l1, l2;
781 TCGv r_temp1, r_temp2;
782
783 l1 = gen_new_label();
784 l2 = gen_new_label();
785 r_temp1 = tcg_temp_local_new();
786 r_temp2 = tcg_temp_local_new();
787 tcg_gen_mov_tl(r_temp1, src1);
788 tcg_gen_mov_tl(r_temp2, src2);
789 gen_trap_ifdivzero_tl(r_temp2);
790 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
791 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
792 tcg_gen_movi_i64(dst, INT64_MIN);
793 tcg_gen_br(l2);
794 gen_set_label(l1);
795 tcg_gen_div_i64(dst, r_temp1, r_temp2);
796 gen_set_label(l2);
797 tcg_temp_free(r_temp1);
798 tcg_temp_free(r_temp2);
799 }
800 #endif
801
802 // 1
803 static inline void gen_op_eval_ba(TCGv dst)
804 {
805 tcg_gen_movi_tl(dst, 1);
806 }
807
808 // Z
809 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
810 {
811 gen_mov_reg_Z(dst, src);
812 }
813
814 // Z | (N ^ V)
815 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
816 {
817 gen_mov_reg_N(cpu_tmp0, src);
818 gen_mov_reg_V(dst, src);
819 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
820 gen_mov_reg_Z(cpu_tmp0, src);
821 tcg_gen_or_tl(dst, dst, cpu_tmp0);
822 }
823
824 // N ^ V
825 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
826 {
827 gen_mov_reg_V(cpu_tmp0, src);
828 gen_mov_reg_N(dst, src);
829 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
830 }
831
832 // C | Z
833 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
834 {
835 gen_mov_reg_Z(cpu_tmp0, src);
836 gen_mov_reg_C(dst, src);
837 tcg_gen_or_tl(dst, dst, cpu_tmp0);
838 }
839
840 // C
841 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
842 {
843 gen_mov_reg_C(dst, src);
844 }
845
846 // V
847 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
848 {
849 gen_mov_reg_V(dst, src);
850 }
851
852 // 0
853 static inline void gen_op_eval_bn(TCGv dst)
854 {
855 tcg_gen_movi_tl(dst, 0);
856 }
857
858 // N
859 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
860 {
861 gen_mov_reg_N(dst, src);
862 }
863
864 // !Z
865 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
866 {
867 gen_mov_reg_Z(dst, src);
868 tcg_gen_xori_tl(dst, dst, 0x1);
869 }
870
871 // !(Z | (N ^ V))
872 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
873 {
874 gen_mov_reg_N(cpu_tmp0, src);
875 gen_mov_reg_V(dst, src);
876 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877 gen_mov_reg_Z(cpu_tmp0, src);
878 tcg_gen_or_tl(dst, dst, cpu_tmp0);
879 tcg_gen_xori_tl(dst, dst, 0x1);
880 }
881
882 // !(N ^ V)
883 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
884 {
885 gen_mov_reg_V(cpu_tmp0, src);
886 gen_mov_reg_N(dst, src);
887 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
888 tcg_gen_xori_tl(dst, dst, 0x1);
889 }
890
891 // !(C | Z)
892 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
893 {
894 gen_mov_reg_Z(cpu_tmp0, src);
895 gen_mov_reg_C(dst, src);
896 tcg_gen_or_tl(dst, dst, cpu_tmp0);
897 tcg_gen_xori_tl(dst, dst, 0x1);
898 }
899
900 // !C
901 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
902 {
903 gen_mov_reg_C(dst, src);
904 tcg_gen_xori_tl(dst, dst, 0x1);
905 }
906
907 // !N
908 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
909 {
910 gen_mov_reg_N(dst, src);
911 tcg_gen_xori_tl(dst, dst, 0x1);
912 }
913
914 // !V
915 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
916 {
917 gen_mov_reg_V(dst, src);
918 tcg_gen_xori_tl(dst, dst, 0x1);
919 }
920
921 /*
922 FPSR bit field FCC1 | FCC0:
923 0 =
924 1 <
925 2 >
926 3 unordered
927 */
928 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
929 unsigned int fcc_offset)
930 {
931 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
932 tcg_gen_andi_tl(reg, reg, 0x1);
933 }
934
935 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
936 unsigned int fcc_offset)
937 {
938 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
939 tcg_gen_andi_tl(reg, reg, 0x1);
940 }
941
942 // !0: FCC0 | FCC1
943 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
944 unsigned int fcc_offset)
945 {
946 gen_mov_reg_FCC0(dst, src, fcc_offset);
947 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
948 tcg_gen_or_tl(dst, dst, cpu_tmp0);
949 }
950
951 // 1 or 2: FCC0 ^ FCC1
952 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
953 unsigned int fcc_offset)
954 {
955 gen_mov_reg_FCC0(dst, src, fcc_offset);
956 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
957 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
958 }
959
960 // 1 or 3: FCC0
961 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
962 unsigned int fcc_offset)
963 {
964 gen_mov_reg_FCC0(dst, src, fcc_offset);
965 }
966
967 // 1: FCC0 & !FCC1
968 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
969 unsigned int fcc_offset)
970 {
971 gen_mov_reg_FCC0(dst, src, fcc_offset);
972 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
973 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
974 tcg_gen_and_tl(dst, dst, cpu_tmp0);
975 }
976
977 // 2 or 3: FCC1
978 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
979 unsigned int fcc_offset)
980 {
981 gen_mov_reg_FCC1(dst, src, fcc_offset);
982 }
983
984 // 2: !FCC0 & FCC1
985 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
986 unsigned int fcc_offset)
987 {
988 gen_mov_reg_FCC0(dst, src, fcc_offset);
989 tcg_gen_xori_tl(dst, dst, 0x1);
990 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
991 tcg_gen_and_tl(dst, dst, cpu_tmp0);
992 }
993
994 // 3: FCC0 & FCC1
995 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
996 unsigned int fcc_offset)
997 {
998 gen_mov_reg_FCC0(dst, src, fcc_offset);
999 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1000 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1001 }
1002
1003 // 0: !(FCC0 | FCC1)
1004 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1005 unsigned int fcc_offset)
1006 {
1007 gen_mov_reg_FCC0(dst, src, fcc_offset);
1008 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1010 tcg_gen_xori_tl(dst, dst, 0x1);
1011 }
1012
1013 // 0 or 3: !(FCC0 ^ FCC1)
1014 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1016 {
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1018 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1019 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1020 tcg_gen_xori_tl(dst, dst, 0x1);
1021 }
1022
1023 // 0 or 2: !FCC0
1024 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1025 unsigned int fcc_offset)
1026 {
1027 gen_mov_reg_FCC0(dst, src, fcc_offset);
1028 tcg_gen_xori_tl(dst, dst, 0x1);
1029 }
1030
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1033 unsigned int fcc_offset)
1034 {
1035 gen_mov_reg_FCC0(dst, src, fcc_offset);
1036 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1037 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1038 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039 tcg_gen_xori_tl(dst, dst, 0x1);
1040 }
1041
1042 // 0 or 1: !FCC1
1043 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1044 unsigned int fcc_offset)
1045 {
1046 gen_mov_reg_FCC1(dst, src, fcc_offset);
1047 tcg_gen_xori_tl(dst, dst, 0x1);
1048 }
1049
1050 // !2: !(!FCC0 & FCC1)
1051 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1052 unsigned int fcc_offset)
1053 {
1054 gen_mov_reg_FCC0(dst, src, fcc_offset);
1055 tcg_gen_xori_tl(dst, dst, 0x1);
1056 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1058 tcg_gen_xori_tl(dst, dst, 0x1);
1059 }
1060
1061 // !3: !(FCC0 & FCC1)
1062 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1063 unsigned int fcc_offset)
1064 {
1065 gen_mov_reg_FCC0(dst, src, fcc_offset);
1066 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1069 }
1070
1071 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1072 target_ulong pc2, TCGv r_cond)
1073 {
1074 int l1;
1075
1076 l1 = gen_new_label();
1077
1078 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1079
1080 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1081
1082 gen_set_label(l1);
1083 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1084 }
1085
1086 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1087 target_ulong pc2, TCGv r_cond)
1088 {
1089 int l1;
1090
1091 l1 = gen_new_label();
1092
1093 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1094
1095 gen_goto_tb(dc, 0, pc2, pc1);
1096
1097 gen_set_label(l1);
1098 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1099 }
1100
1101 static inline void gen_generic_branch(DisasContext *dc)
1102 {
1103 int l1, l2;
1104
1105 l1 = gen_new_label();
1106 l2 = gen_new_label();
1107
1108 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
1109
1110 tcg_gen_movi_tl(cpu_npc, dc->jump_pc[0]);
1111 tcg_gen_br(l2);
1112
1113 gen_set_label(l1);
1114 tcg_gen_movi_tl(cpu_npc, dc->jump_pc[1]);
1115 gen_set_label(l2);
1116 }
1117
1118 /* call this function before using the condition register as it may
1119 have been set for a jump */
1120 static inline void flush_cond(DisasContext *dc)
1121 {
1122 if (dc->npc == JUMP_PC) {
1123 gen_generic_branch(dc);
1124 dc->npc = DYNAMIC_PC;
1125 }
1126 }
1127
1128 static inline void save_npc(DisasContext *dc)
1129 {
1130 if (dc->npc == JUMP_PC) {
1131 gen_generic_branch(dc);
1132 dc->npc = DYNAMIC_PC;
1133 } else if (dc->npc != DYNAMIC_PC) {
1134 tcg_gen_movi_tl(cpu_npc, dc->npc);
1135 }
1136 }
1137
1138 static inline void save_state(DisasContext *dc)
1139 {
1140 tcg_gen_movi_tl(cpu_pc, dc->pc);
1141 /* flush pending conditional evaluations before exposing cpu state */
1142 if (dc->cc_op != CC_OP_FLAGS) {
1143 dc->cc_op = CC_OP_FLAGS;
1144 gen_helper_compute_psr(cpu_env);
1145 }
1146 save_npc(dc);
1147 }
1148
1149 static inline void gen_mov_pc_npc(DisasContext *dc)
1150 {
1151 if (dc->npc == JUMP_PC) {
1152 gen_generic_branch(dc);
1153 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1154 dc->pc = DYNAMIC_PC;
1155 } else if (dc->npc == DYNAMIC_PC) {
1156 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1157 dc->pc = DYNAMIC_PC;
1158 } else {
1159 dc->pc = dc->npc;
1160 }
1161 }
1162
1163 static inline void gen_op_next_insn(void)
1164 {
1165 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1166 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1167 }
1168
1169 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1170 DisasContext *dc)
1171 {
1172 TCGv_i32 r_src;
1173
1174 #ifdef TARGET_SPARC64
1175 if (cc)
1176 r_src = cpu_xcc;
1177 else
1178 r_src = cpu_psr;
1179 #else
1180 r_src = cpu_psr;
1181 #endif
1182 switch (dc->cc_op) {
1183 case CC_OP_FLAGS:
1184 break;
1185 default:
1186 gen_helper_compute_psr(cpu_env);
1187 dc->cc_op = CC_OP_FLAGS;
1188 break;
1189 }
1190 switch (cond) {
1191 case 0x0:
1192 gen_op_eval_bn(r_dst);
1193 break;
1194 case 0x1:
1195 gen_op_eval_be(r_dst, r_src);
1196 break;
1197 case 0x2:
1198 gen_op_eval_ble(r_dst, r_src);
1199 break;
1200 case 0x3:
1201 gen_op_eval_bl(r_dst, r_src);
1202 break;
1203 case 0x4:
1204 gen_op_eval_bleu(r_dst, r_src);
1205 break;
1206 case 0x5:
1207 gen_op_eval_bcs(r_dst, r_src);
1208 break;
1209 case 0x6:
1210 gen_op_eval_bneg(r_dst, r_src);
1211 break;
1212 case 0x7:
1213 gen_op_eval_bvs(r_dst, r_src);
1214 break;
1215 case 0x8:
1216 gen_op_eval_ba(r_dst);
1217 break;
1218 case 0x9:
1219 gen_op_eval_bne(r_dst, r_src);
1220 break;
1221 case 0xa:
1222 gen_op_eval_bg(r_dst, r_src);
1223 break;
1224 case 0xb:
1225 gen_op_eval_bge(r_dst, r_src);
1226 break;
1227 case 0xc:
1228 gen_op_eval_bgu(r_dst, r_src);
1229 break;
1230 case 0xd:
1231 gen_op_eval_bcc(r_dst, r_src);
1232 break;
1233 case 0xe:
1234 gen_op_eval_bpos(r_dst, r_src);
1235 break;
1236 case 0xf:
1237 gen_op_eval_bvc(r_dst, r_src);
1238 break;
1239 }
1240 }
1241
1242 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1243 {
1244 unsigned int offset;
1245
1246 switch (cc) {
1247 default:
1248 case 0x0:
1249 offset = 0;
1250 break;
1251 case 0x1:
1252 offset = 32 - 10;
1253 break;
1254 case 0x2:
1255 offset = 34 - 10;
1256 break;
1257 case 0x3:
1258 offset = 36 - 10;
1259 break;
1260 }
1261
1262 switch (cond) {
1263 case 0x0:
1264 gen_op_eval_bn(r_dst);
1265 break;
1266 case 0x1:
1267 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1268 break;
1269 case 0x2:
1270 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1271 break;
1272 case 0x3:
1273 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1274 break;
1275 case 0x4:
1276 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1277 break;
1278 case 0x5:
1279 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1280 break;
1281 case 0x6:
1282 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1283 break;
1284 case 0x7:
1285 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1286 break;
1287 case 0x8:
1288 gen_op_eval_ba(r_dst);
1289 break;
1290 case 0x9:
1291 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1292 break;
1293 case 0xa:
1294 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1295 break;
1296 case 0xb:
1297 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1298 break;
1299 case 0xc:
1300 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1301 break;
1302 case 0xd:
1303 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1304 break;
1305 case 0xe:
1306 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1307 break;
1308 case 0xf:
1309 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1310 break;
1311 }
1312 }
1313
1314 #ifdef TARGET_SPARC64
1315 // Inverted logic
1316 static const int gen_tcg_cond_reg[8] = {
1317 -1,
1318 TCG_COND_NE,
1319 TCG_COND_GT,
1320 TCG_COND_GE,
1321 -1,
1322 TCG_COND_EQ,
1323 TCG_COND_LE,
1324 TCG_COND_LT,
1325 };
1326
1327 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1328 {
1329 int l1;
1330
1331 l1 = gen_new_label();
1332 tcg_gen_movi_tl(r_dst, 0);
1333 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1334 tcg_gen_movi_tl(r_dst, 1);
1335 gen_set_label(l1);
1336 }
1337 #endif
1338
1339 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1340 {
1341 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1342 target_ulong target = dc->pc + offset;
1343
1344 #ifdef TARGET_SPARC64
1345 if (unlikely(AM_CHECK(dc))) {
1346 target &= 0xffffffffULL;
1347 }
1348 #endif
1349 if (cond == 0x0) {
1350 /* unconditional not taken */
1351 if (a) {
1352 dc->pc = dc->npc + 4;
1353 dc->npc = dc->pc + 4;
1354 } else {
1355 dc->pc = dc->npc;
1356 dc->npc = dc->pc + 4;
1357 }
1358 } else if (cond == 0x8) {
1359 /* unconditional taken */
1360 if (a) {
1361 dc->pc = target;
1362 dc->npc = dc->pc + 4;
1363 } else {
1364 dc->pc = dc->npc;
1365 dc->npc = target;
1366 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1367 }
1368 } else {
1369 flush_cond(dc);
1370 gen_cond(cpu_cond, cc, cond, dc);
1371 if (a) {
1372 gen_branch_a(dc, target, dc->npc, cpu_cond);
1373 dc->is_br = 1;
1374 } else {
1375 dc->pc = dc->npc;
1376 dc->jump_pc[0] = target;
1377 if (unlikely(dc->npc == DYNAMIC_PC)) {
1378 dc->jump_pc[1] = DYNAMIC_PC;
1379 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1380 } else {
1381 dc->jump_pc[1] = dc->npc + 4;
1382 dc->npc = JUMP_PC;
1383 }
1384 }
1385 }
1386 }
1387
1388 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1389 {
1390 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1391 target_ulong target = dc->pc + offset;
1392
1393 #ifdef TARGET_SPARC64
1394 if (unlikely(AM_CHECK(dc))) {
1395 target &= 0xffffffffULL;
1396 }
1397 #endif
1398 if (cond == 0x0) {
1399 /* unconditional not taken */
1400 if (a) {
1401 dc->pc = dc->npc + 4;
1402 dc->npc = dc->pc + 4;
1403 } else {
1404 dc->pc = dc->npc;
1405 dc->npc = dc->pc + 4;
1406 }
1407 } else if (cond == 0x8) {
1408 /* unconditional taken */
1409 if (a) {
1410 dc->pc = target;
1411 dc->npc = dc->pc + 4;
1412 } else {
1413 dc->pc = dc->npc;
1414 dc->npc = target;
1415 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1416 }
1417 } else {
1418 flush_cond(dc);
1419 gen_fcond(cpu_cond, cc, cond);
1420 if (a) {
1421 gen_branch_a(dc, target, dc->npc, cpu_cond);
1422 dc->is_br = 1;
1423 } else {
1424 dc->pc = dc->npc;
1425 dc->jump_pc[0] = target;
1426 if (unlikely(dc->npc == DYNAMIC_PC)) {
1427 dc->jump_pc[1] = DYNAMIC_PC;
1428 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1429 } else {
1430 dc->jump_pc[1] = dc->npc + 4;
1431 dc->npc = JUMP_PC;
1432 }
1433 }
1434 }
1435 }
1436
1437 #ifdef TARGET_SPARC64
1438 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1439 TCGv r_reg)
1440 {
1441 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1442 target_ulong target = dc->pc + offset;
1443
1444 if (unlikely(AM_CHECK(dc))) {
1445 target &= 0xffffffffULL;
1446 }
1447 flush_cond(dc);
1448 gen_cond_reg(cpu_cond, cond, r_reg);
1449 if (a) {
1450 gen_branch_a(dc, target, dc->npc, cpu_cond);
1451 dc->is_br = 1;
1452 } else {
1453 dc->pc = dc->npc;
1454 dc->jump_pc[0] = target;
1455 if (unlikely(dc->npc == DYNAMIC_PC)) {
1456 dc->jump_pc[1] = DYNAMIC_PC;
1457 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1458 } else {
1459 dc->jump_pc[1] = dc->npc + 4;
1460 dc->npc = JUMP_PC;
1461 }
1462 }
1463 }
1464
1465 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1466 {
1467 switch (fccno) {
1468 case 0:
1469 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1470 break;
1471 case 1:
1472 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1473 break;
1474 case 2:
1475 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1476 break;
1477 case 3:
1478 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1479 break;
1480 }
1481 }
1482
1483 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1484 {
1485 switch (fccno) {
1486 case 0:
1487 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1488 break;
1489 case 1:
1490 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1491 break;
1492 case 2:
1493 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1494 break;
1495 case 3:
1496 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1497 break;
1498 }
1499 }
1500
1501 static inline void gen_op_fcmpq(int fccno)
1502 {
1503 switch (fccno) {
1504 case 0:
1505 gen_helper_fcmpq(cpu_env);
1506 break;
1507 case 1:
1508 gen_helper_fcmpq_fcc1(cpu_env);
1509 break;
1510 case 2:
1511 gen_helper_fcmpq_fcc2(cpu_env);
1512 break;
1513 case 3:
1514 gen_helper_fcmpq_fcc3(cpu_env);
1515 break;
1516 }
1517 }
1518
1519 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1520 {
1521 switch (fccno) {
1522 case 0:
1523 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1524 break;
1525 case 1:
1526 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1527 break;
1528 case 2:
1529 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1530 break;
1531 case 3:
1532 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1533 break;
1534 }
1535 }
1536
1537 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1538 {
1539 switch (fccno) {
1540 case 0:
1541 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1542 break;
1543 case 1:
1544 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1545 break;
1546 case 2:
1547 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1548 break;
1549 case 3:
1550 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1551 break;
1552 }
1553 }
1554
1555 static inline void gen_op_fcmpeq(int fccno)
1556 {
1557 switch (fccno) {
1558 case 0:
1559 gen_helper_fcmpeq(cpu_env);
1560 break;
1561 case 1:
1562 gen_helper_fcmpeq_fcc1(cpu_env);
1563 break;
1564 case 2:
1565 gen_helper_fcmpeq_fcc2(cpu_env);
1566 break;
1567 case 3:
1568 gen_helper_fcmpeq_fcc3(cpu_env);
1569 break;
1570 }
1571 }
1572
1573 #else
1574
1575 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1576 {
1577 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1578 }
1579
1580 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1581 {
1582 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1583 }
1584
1585 static inline void gen_op_fcmpq(int fccno)
1586 {
1587 gen_helper_fcmpq(cpu_env);
1588 }
1589
1590 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1591 {
1592 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1593 }
1594
1595 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1596 {
1597 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1598 }
1599
1600 static inline void gen_op_fcmpeq(int fccno)
1601 {
1602 gen_helper_fcmpeq(cpu_env);
1603 }
1604 #endif
1605
1606 static inline void gen_op_fpexception_im(int fsr_flags)
1607 {
1608 TCGv_i32 r_const;
1609
1610 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1611 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1612 r_const = tcg_const_i32(TT_FP_EXCP);
1613 gen_helper_raise_exception(cpu_env, r_const);
1614 tcg_temp_free_i32(r_const);
1615 }
1616
1617 static int gen_trap_ifnofpu(DisasContext *dc)
1618 {
1619 #if !defined(CONFIG_USER_ONLY)
1620 if (!dc->fpu_enabled) {
1621 TCGv_i32 r_const;
1622
1623 save_state(dc);
1624 r_const = tcg_const_i32(TT_NFPU_INSN);
1625 gen_helper_raise_exception(cpu_env, r_const);
1626 tcg_temp_free_i32(r_const);
1627 dc->is_br = 1;
1628 return 1;
1629 }
1630 #endif
1631 return 0;
1632 }
1633
1634 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1635 {
1636 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1637 }
1638
1639 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1640 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1641 {
1642 TCGv_i32 dst, src;
1643
1644 src = gen_load_fpr_F(dc, rs);
1645 dst = gen_dest_fpr_F();
1646
1647 gen(dst, cpu_env, src);
1648
1649 gen_store_fpr_F(dc, rd, dst);
1650 }
1651
1652 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1653 void (*gen)(TCGv_i32, TCGv_i32))
1654 {
1655 TCGv_i32 dst, src;
1656
1657 src = gen_load_fpr_F(dc, rs);
1658 dst = gen_dest_fpr_F();
1659
1660 gen(dst, src);
1661
1662 gen_store_fpr_F(dc, rd, dst);
1663 }
1664
1665 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1666 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1667 {
1668 TCGv_i32 dst, src1, src2;
1669
1670 src1 = gen_load_fpr_F(dc, rs1);
1671 src2 = gen_load_fpr_F(dc, rs2);
1672 dst = gen_dest_fpr_F();
1673
1674 gen(dst, cpu_env, src1, src2);
1675
1676 gen_store_fpr_F(dc, rd, dst);
1677 }
1678
1679 #ifdef TARGET_SPARC64
1680 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1681 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1682 {
1683 TCGv_i32 dst, src1, src2;
1684
1685 src1 = gen_load_fpr_F(dc, rs1);
1686 src2 = gen_load_fpr_F(dc, rs2);
1687 dst = gen_dest_fpr_F();
1688
1689 gen(dst, src1, src2);
1690
1691 gen_store_fpr_F(dc, rd, dst);
1692 }
1693 #endif
1694
1695 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1696 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1697 {
1698 TCGv_i64 dst, src;
1699
1700 src = gen_load_fpr_D(dc, rs);
1701 dst = gen_dest_fpr_D();
1702
1703 gen(dst, cpu_env, src);
1704
1705 gen_store_fpr_D(dc, rd, dst);
1706 }
1707
1708 #ifdef TARGET_SPARC64
1709 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1710 void (*gen)(TCGv_i64, TCGv_i64))
1711 {
1712 TCGv_i64 dst, src;
1713
1714 src = gen_load_fpr_D(dc, rs);
1715 dst = gen_dest_fpr_D();
1716
1717 gen(dst, src);
1718
1719 gen_store_fpr_D(dc, rd, dst);
1720 }
1721 #endif
1722
1723 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1724 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1725 {
1726 TCGv_i64 dst, src1, src2;
1727
1728 src1 = gen_load_fpr_D(dc, rs1);
1729 src2 = gen_load_fpr_D(dc, rs2);
1730 dst = gen_dest_fpr_D();
1731
1732 gen(dst, cpu_env, src1, src2);
1733
1734 gen_store_fpr_D(dc, rd, dst);
1735 }
1736
1737 #ifdef TARGET_SPARC64
1738 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1739 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1740 {
1741 TCGv_i64 dst, src1, src2;
1742
1743 src1 = gen_load_fpr_D(dc, rs1);
1744 src2 = gen_load_fpr_D(dc, rs2);
1745 dst = gen_dest_fpr_D();
1746
1747 gen(dst, src1, src2);
1748
1749 gen_store_fpr_D(dc, rd, dst);
1750 }
1751
1752 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1753 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1754 {
1755 TCGv_i64 dst, src1, src2;
1756
1757 src1 = gen_load_fpr_D(dc, rs1);
1758 src2 = gen_load_fpr_D(dc, rs2);
1759 dst = gen_dest_fpr_D();
1760
1761 gen(dst, cpu_gsr, src1, src2);
1762
1763 gen_store_fpr_D(dc, rd, dst);
1764 }
1765
1766 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1767 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1768 {
1769 TCGv_i64 dst, src0, src1, src2;
1770
1771 src1 = gen_load_fpr_D(dc, rs1);
1772 src2 = gen_load_fpr_D(dc, rs2);
1773 src0 = gen_load_fpr_D(dc, rd);
1774 dst = gen_dest_fpr_D();
1775
1776 gen(dst, src0, src1, src2);
1777
1778 gen_store_fpr_D(dc, rd, dst);
1779 }
1780 #endif
1781
1782 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1783 void (*gen)(TCGv_ptr))
1784 {
1785 gen_op_load_fpr_QT1(QFPREG(rs));
1786
1787 gen(cpu_env);
1788
1789 gen_op_store_QT0_fpr(QFPREG(rd));
1790 gen_update_fprs_dirty(QFPREG(rd));
1791 }
1792
1793 #ifdef TARGET_SPARC64
1794 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1795 void (*gen)(TCGv_ptr))
1796 {
1797 gen_op_load_fpr_QT1(QFPREG(rs));
1798
1799 gen(cpu_env);
1800
1801 gen_op_store_QT0_fpr(QFPREG(rd));
1802 gen_update_fprs_dirty(QFPREG(rd));
1803 }
1804 #endif
1805
1806 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1807 void (*gen)(TCGv_ptr))
1808 {
1809 gen_op_load_fpr_QT0(QFPREG(rs1));
1810 gen_op_load_fpr_QT1(QFPREG(rs2));
1811
1812 gen(cpu_env);
1813
1814 gen_op_store_QT0_fpr(QFPREG(rd));
1815 gen_update_fprs_dirty(QFPREG(rd));
1816 }
1817
1818 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1819 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1820 {
1821 TCGv_i64 dst;
1822 TCGv_i32 src1, src2;
1823
1824 src1 = gen_load_fpr_F(dc, rs1);
1825 src2 = gen_load_fpr_F(dc, rs2);
1826 dst = gen_dest_fpr_D();
1827
1828 gen(dst, cpu_env, src1, src2);
1829
1830 gen_store_fpr_D(dc, rd, dst);
1831 }
1832
1833 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1834 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1835 {
1836 TCGv_i64 src1, src2;
1837
1838 src1 = gen_load_fpr_D(dc, rs1);
1839 src2 = gen_load_fpr_D(dc, rs2);
1840
1841 gen(cpu_env, src1, src2);
1842
1843 gen_op_store_QT0_fpr(QFPREG(rd));
1844 gen_update_fprs_dirty(QFPREG(rd));
1845 }
1846
1847 #ifdef TARGET_SPARC64
1848 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1849 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1850 {
1851 TCGv_i64 dst;
1852 TCGv_i32 src;
1853
1854 src = gen_load_fpr_F(dc, rs);
1855 dst = gen_dest_fpr_D();
1856
1857 gen(dst, cpu_env, src);
1858
1859 gen_store_fpr_D(dc, rd, dst);
1860 }
1861 #endif
1862
1863 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1864 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1865 {
1866 TCGv_i64 dst;
1867 TCGv_i32 src;
1868
1869 src = gen_load_fpr_F(dc, rs);
1870 dst = gen_dest_fpr_D();
1871
1872 gen(dst, cpu_env, src);
1873
1874 gen_store_fpr_D(dc, rd, dst);
1875 }
1876
1877 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1878 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1879 {
1880 TCGv_i32 dst;
1881 TCGv_i64 src;
1882
1883 src = gen_load_fpr_D(dc, rs);
1884 dst = gen_dest_fpr_F();
1885
1886 gen(dst, cpu_env, src);
1887
1888 gen_store_fpr_F(dc, rd, dst);
1889 }
1890
1891 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1892 void (*gen)(TCGv_i32, TCGv_ptr))
1893 {
1894 TCGv_i32 dst;
1895
1896 gen_op_load_fpr_QT1(QFPREG(rs));
1897 dst = gen_dest_fpr_F();
1898
1899 gen(dst, cpu_env);
1900
1901 gen_store_fpr_F(dc, rd, dst);
1902 }
1903
1904 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1905 void (*gen)(TCGv_i64, TCGv_ptr))
1906 {
1907 TCGv_i64 dst;
1908
1909 gen_op_load_fpr_QT1(QFPREG(rs));
1910 dst = gen_dest_fpr_D();
1911
1912 gen(dst, cpu_env);
1913
1914 gen_store_fpr_D(dc, rd, dst);
1915 }
1916
1917 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1918 void (*gen)(TCGv_ptr, TCGv_i32))
1919 {
1920 TCGv_i32 src;
1921
1922 src = gen_load_fpr_F(dc, rs);
1923
1924 gen(cpu_env, src);
1925
1926 gen_op_store_QT0_fpr(QFPREG(rd));
1927 gen_update_fprs_dirty(QFPREG(rd));
1928 }
1929
1930 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1931 void (*gen)(TCGv_ptr, TCGv_i64))
1932 {
1933 TCGv_i64 src;
1934
1935 src = gen_load_fpr_D(dc, rs);
1936
1937 gen(cpu_env, src);
1938
1939 gen_op_store_QT0_fpr(QFPREG(rd));
1940 gen_update_fprs_dirty(QFPREG(rd));
1941 }
1942
1943 /* asi moves */
1944 #ifdef TARGET_SPARC64
1945 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1946 {
1947 int asi;
1948 TCGv_i32 r_asi;
1949
1950 if (IS_IMM) {
1951 r_asi = tcg_temp_new_i32();
1952 tcg_gen_mov_i32(r_asi, cpu_asi);
1953 } else {
1954 asi = GET_FIELD(insn, 19, 26);
1955 r_asi = tcg_const_i32(asi);
1956 }
1957 return r_asi;
1958 }
1959
1960 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1961 int sign)
1962 {
1963 TCGv_i32 r_asi, r_size, r_sign;
1964
1965 r_asi = gen_get_asi(insn, addr);
1966 r_size = tcg_const_i32(size);
1967 r_sign = tcg_const_i32(sign);
1968 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1969 tcg_temp_free_i32(r_sign);
1970 tcg_temp_free_i32(r_size);
1971 tcg_temp_free_i32(r_asi);
1972 }
1973
1974 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1975 {
1976 TCGv_i32 r_asi, r_size;
1977
1978 r_asi = gen_get_asi(insn, addr);
1979 r_size = tcg_const_i32(size);
1980 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
1981 tcg_temp_free_i32(r_size);
1982 tcg_temp_free_i32(r_asi);
1983 }
1984
1985 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1986 {
1987 TCGv_i32 r_asi, r_size, r_rd;
1988
1989 r_asi = gen_get_asi(insn, addr);
1990 r_size = tcg_const_i32(size);
1991 r_rd = tcg_const_i32(rd);
1992 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1993 tcg_temp_free_i32(r_rd);
1994 tcg_temp_free_i32(r_size);
1995 tcg_temp_free_i32(r_asi);
1996 }
1997
1998 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1999 {
2000 TCGv_i32 r_asi, r_size, r_rd;
2001
2002 r_asi = gen_get_asi(insn, addr);
2003 r_size = tcg_const_i32(size);
2004 r_rd = tcg_const_i32(rd);
2005 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2006 tcg_temp_free_i32(r_rd);
2007 tcg_temp_free_i32(r_size);
2008 tcg_temp_free_i32(r_asi);
2009 }
2010
2011 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2012 {
2013 TCGv_i32 r_asi, r_size, r_sign;
2014
2015 r_asi = gen_get_asi(insn, addr);
2016 r_size = tcg_const_i32(4);
2017 r_sign = tcg_const_i32(0);
2018 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2019 tcg_temp_free_i32(r_sign);
2020 gen_helper_st_asi(cpu_env, addr, dst, r_asi, r_size);
2021 tcg_temp_free_i32(r_size);
2022 tcg_temp_free_i32(r_asi);
2023 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2024 }
2025
2026 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2027 {
2028 TCGv_i32 r_asi, r_rd;
2029
2030 r_asi = gen_get_asi(insn, addr);
2031 r_rd = tcg_const_i32(rd);
2032 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2033 tcg_temp_free_i32(r_rd);
2034 tcg_temp_free_i32(r_asi);
2035 }
2036
2037 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2038 {
2039 TCGv_i32 r_asi, r_size;
2040
2041 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2042 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2043 r_asi = gen_get_asi(insn, addr);
2044 r_size = tcg_const_i32(8);
2045 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2046 tcg_temp_free_i32(r_size);
2047 tcg_temp_free_i32(r_asi);
2048 }
2049
2050 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2051 int rd)
2052 {
2053 TCGv r_val1;
2054 TCGv_i32 r_asi;
2055
2056 r_val1 = tcg_temp_new();
2057 gen_movl_reg_TN(rd, r_val1);
2058 r_asi = gen_get_asi(insn, addr);
2059 gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
2060 tcg_temp_free_i32(r_asi);
2061 tcg_temp_free(r_val1);
2062 }
2063
2064 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2065 int rd)
2066 {
2067 TCGv_i32 r_asi;
2068
2069 gen_movl_reg_TN(rd, cpu_tmp64);
2070 r_asi = gen_get_asi(insn, addr);
2071 gen_helper_casx_asi(dst, cpu_env, addr, cpu_tmp64, val2, r_asi);
2072 tcg_temp_free_i32(r_asi);
2073 }
2074
2075 #elif !defined(CONFIG_USER_ONLY)
2076
2077 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2078 int sign)
2079 {
2080 TCGv_i32 r_asi, r_size, r_sign;
2081
2082 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2083 r_size = tcg_const_i32(size);
2084 r_sign = tcg_const_i32(sign);
2085 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2086 tcg_temp_free(r_sign);
2087 tcg_temp_free(r_size);
2088 tcg_temp_free(r_asi);
2089 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2090 }
2091
2092 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2093 {
2094 TCGv_i32 r_asi, r_size;
2095
2096 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2097 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2098 r_size = tcg_const_i32(size);
2099 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2100 tcg_temp_free(r_size);
2101 tcg_temp_free(r_asi);
2102 }
2103
2104 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2105 {
2106 TCGv_i32 r_asi, r_size, r_sign;
2107 TCGv_i64 r_val;
2108
2109 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2110 r_size = tcg_const_i32(4);
2111 r_sign = tcg_const_i32(0);
2112 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2113 tcg_temp_free(r_sign);
2114 r_val = tcg_temp_new_i64();
2115 tcg_gen_extu_tl_i64(r_val, dst);
2116 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2117 tcg_temp_free_i64(r_val);
2118 tcg_temp_free(r_size);
2119 tcg_temp_free(r_asi);
2120 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2121 }
2122
2123 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2124 {
2125 TCGv_i32 r_asi, r_size, r_sign;
2126
2127 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2128 r_size = tcg_const_i32(8);
2129 r_sign = tcg_const_i32(0);
2130 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2131 tcg_temp_free(r_sign);
2132 tcg_temp_free(r_size);
2133 tcg_temp_free(r_asi);
2134 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2135 gen_movl_TN_reg(rd + 1, cpu_tmp0);
2136 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2137 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2138 gen_movl_TN_reg(rd, hi);
2139 }
2140
2141 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2142 {
2143 TCGv_i32 r_asi, r_size;
2144
2145 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2146 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2147 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2148 r_size = tcg_const_i32(8);
2149 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2150 tcg_temp_free(r_size);
2151 tcg_temp_free(r_asi);
2152 }
2153 #endif
2154
2155 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2156 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2157 {
2158 TCGv_i64 r_val;
2159 TCGv_i32 r_asi, r_size;
2160
2161 gen_ld_asi(dst, addr, insn, 1, 0);
2162
2163 r_val = tcg_const_i64(0xffULL);
2164 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2165 r_size = tcg_const_i32(1);
2166 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2167 tcg_temp_free_i32(r_size);
2168 tcg_temp_free_i32(r_asi);
2169 tcg_temp_free_i64(r_val);
2170 }
2171 #endif
2172
2173 static inline TCGv get_src1(unsigned int insn, TCGv def)
2174 {
2175 TCGv r_rs1 = def;
2176 unsigned int rs1;
2177
2178 rs1 = GET_FIELD(insn, 13, 17);
2179 if (rs1 == 0) {
2180 tcg_gen_movi_tl(def, 0);
2181 } else if (rs1 < 8) {
2182 r_rs1 = cpu_gregs[rs1];
2183 } else {
2184 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2185 }
2186 return r_rs1;
2187 }
2188
2189 static inline TCGv get_src2(unsigned int insn, TCGv def)
2190 {
2191 TCGv r_rs2 = def;
2192
2193 if (IS_IMM) { /* immediate */
2194 target_long simm = GET_FIELDs(insn, 19, 31);
2195 tcg_gen_movi_tl(def, simm);
2196 } else { /* register */
2197 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2198 if (rs2 == 0) {
2199 tcg_gen_movi_tl(def, 0);
2200 } else if (rs2 < 8) {
2201 r_rs2 = cpu_gregs[rs2];
2202 } else {
2203 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2204 }
2205 }
2206 return r_rs2;
2207 }
2208
2209 #ifdef TARGET_SPARC64
2210 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2211 {
2212 TCGv_i32 r_tl = tcg_temp_new_i32();
2213
2214 /* load env->tl into r_tl */
2215 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2216
2217 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2218 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2219
2220 /* calculate offset to current trap state from env->ts, reuse r_tl */
2221 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2222 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2223
2224 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2225 {
2226 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2227 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2228 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2229 tcg_temp_free_ptr(r_tl_tmp);
2230 }
2231
2232 tcg_temp_free_i32(r_tl);
2233 }
2234
2235 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2236 int width, bool cc, bool left)
2237 {
2238 TCGv lo1, lo2, t1, t2;
2239 uint64_t amask, tabl, tabr;
2240 int shift, imask, omask;
2241
2242 if (cc) {
2243 tcg_gen_mov_tl(cpu_cc_src, s1);
2244 tcg_gen_mov_tl(cpu_cc_src2, s2);
2245 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2246 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2247 dc->cc_op = CC_OP_SUB;
2248 }
2249
2250 /* Theory of operation: there are two tables, left and right (not to
2251 be confused with the left and right versions of the opcode). These
2252 are indexed by the low 3 bits of the inputs. To make things "easy",
2253 these tables are loaded into two constants, TABL and TABR below.
2254 The operation index = (input & imask) << shift calculates the index
2255 into the constant, while val = (table >> index) & omask calculates
2256 the value we're looking for. */
2257 switch (width) {
2258 case 8:
2259 imask = 0x7;
2260 shift = 3;
2261 omask = 0xff;
2262 if (left) {
2263 tabl = 0x80c0e0f0f8fcfeffULL;
2264 tabr = 0xff7f3f1f0f070301ULL;
2265 } else {
2266 tabl = 0x0103070f1f3f7fffULL;
2267 tabr = 0xfffefcf8f0e0c080ULL;
2268 }
2269 break;
2270 case 16:
2271 imask = 0x6;
2272 shift = 1;
2273 omask = 0xf;
2274 if (left) {
2275 tabl = 0x8cef;
2276 tabr = 0xf731;
2277 } else {
2278 tabl = 0x137f;
2279 tabr = 0xfec8;
2280 }
2281 break;
2282 case 32:
2283 imask = 0x4;
2284 shift = 0;
2285 omask = 0x3;
2286 if (left) {
2287 tabl = (2 << 2) | 3;
2288 tabr = (3 << 2) | 1;
2289 } else {
2290 tabl = (1 << 2) | 3;
2291 tabr = (3 << 2) | 2;
2292 }
2293 break;
2294 default:
2295 abort();
2296 }
2297
2298 lo1 = tcg_temp_new();
2299 lo2 = tcg_temp_new();
2300 tcg_gen_andi_tl(lo1, s1, imask);
2301 tcg_gen_andi_tl(lo2, s2, imask);
2302 tcg_gen_shli_tl(lo1, lo1, shift);
2303 tcg_gen_shli_tl(lo2, lo2, shift);
2304
2305 t1 = tcg_const_tl(tabl);
2306 t2 = tcg_const_tl(tabr);
2307 tcg_gen_shr_tl(lo1, t1, lo1);
2308 tcg_gen_shr_tl(lo2, t2, lo2);
2309 tcg_gen_andi_tl(dst, lo1, omask);
2310 tcg_gen_andi_tl(lo2, lo2, omask);
2311
2312 amask = -8;
2313 if (AM_CHECK(dc)) {
2314 amask &= 0xffffffffULL;
2315 }
2316 tcg_gen_andi_tl(s1, s1, amask);
2317 tcg_gen_andi_tl(s2, s2, amask);
2318
2319 /* We want to compute
2320 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2321 We've already done dst = lo1, so this reduces to
2322 dst &= (s1 == s2 ? -1 : lo2)
2323 Which we perform by
2324 lo2 |= -(s1 == s2)
2325 dst &= lo2
2326 */
2327 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2328 tcg_gen_neg_tl(t1, t1);
2329 tcg_gen_or_tl(lo2, lo2, t1);
2330 tcg_gen_and_tl(dst, dst, lo2);
2331
2332 tcg_temp_free(lo1);
2333 tcg_temp_free(lo2);
2334 tcg_temp_free(t1);
2335 tcg_temp_free(t2);
2336 }
2337
2338 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2339 {
2340 TCGv tmp = tcg_temp_new();
2341
2342 tcg_gen_add_tl(tmp, s1, s2);
2343 tcg_gen_andi_tl(dst, tmp, -8);
2344 if (left) {
2345 tcg_gen_neg_tl(tmp, tmp);
2346 }
2347 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2348
2349 tcg_temp_free(tmp);
2350 }
2351
2352 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2353 {
2354 TCGv t1, t2, shift;
2355
2356 t1 = tcg_temp_new();
2357 t2 = tcg_temp_new();
2358 shift = tcg_temp_new();
2359
2360 tcg_gen_andi_tl(shift, gsr, 7);
2361 tcg_gen_shli_tl(shift, shift, 3);
2362 tcg_gen_shl_tl(t1, s1, shift);
2363
2364 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2365 shift of (up to 63) followed by a constant shift of 1. */
2366 tcg_gen_xori_tl(shift, shift, 63);
2367 tcg_gen_shr_tl(t2, s2, shift);
2368 tcg_gen_shri_tl(t2, t2, 1);
2369
2370 tcg_gen_or_tl(dst, t1, t2);
2371
2372 tcg_temp_free(t1);
2373 tcg_temp_free(t2);
2374 tcg_temp_free(shift);
2375 }
2376 #endif
2377
2378 #define CHECK_IU_FEATURE(dc, FEATURE) \
2379 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2380 goto illegal_insn;
2381 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2382 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2383 goto nfpu_insn;
2384
2385 /* before an instruction, dc->pc must be static */
2386 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2387 {
2388 unsigned int opc, rs1, rs2, rd;
2389 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2390 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2391 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2392 target_long simm;
2393
2394 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2395 tcg_gen_debug_insn_start(dc->pc);
2396 }
2397
2398 opc = GET_FIELD(insn, 0, 1);
2399
2400 rd = GET_FIELD(insn, 2, 6);
2401
2402 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2403 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2404
2405 switch (opc) {
2406 case 0: /* branches/sethi */
2407 {
2408 unsigned int xop = GET_FIELD(insn, 7, 9);
2409 int32_t target;
2410 switch (xop) {
2411 #ifdef TARGET_SPARC64
2412 case 0x1: /* V9 BPcc */
2413 {
2414 int cc;
2415
2416 target = GET_FIELD_SP(insn, 0, 18);
2417 target = sign_extend(target, 19);
2418 target <<= 2;
2419 cc = GET_FIELD_SP(insn, 20, 21);
2420 if (cc == 0)
2421 do_branch(dc, target, insn, 0);
2422 else if (cc == 2)
2423 do_branch(dc, target, insn, 1);
2424 else
2425 goto illegal_insn;
2426 goto jmp_insn;
2427 }
2428 case 0x3: /* V9 BPr */
2429 {
2430 target = GET_FIELD_SP(insn, 0, 13) |
2431 (GET_FIELD_SP(insn, 20, 21) << 14);
2432 target = sign_extend(target, 16);
2433 target <<= 2;
2434 cpu_src1 = get_src1(insn, cpu_src1);
2435 do_branch_reg(dc, target, insn, cpu_src1);
2436 goto jmp_insn;
2437 }
2438 case 0x5: /* V9 FBPcc */
2439 {
2440 int cc = GET_FIELD_SP(insn, 20, 21);
2441 if (gen_trap_ifnofpu(dc)) {
2442 goto jmp_insn;
2443 }
2444 target = GET_FIELD_SP(insn, 0, 18);
2445 target = sign_extend(target, 19);
2446 target <<= 2;
2447 do_fbranch(dc, target, insn, cc);
2448 goto jmp_insn;
2449 }
2450 #else
2451 case 0x7: /* CBN+x */
2452 {
2453 goto ncp_insn;
2454 }
2455 #endif
2456 case 0x2: /* BN+x */
2457 {
2458 target = GET_FIELD(insn, 10, 31);
2459 target = sign_extend(target, 22);
2460 target <<= 2;
2461 do_branch(dc, target, insn, 0);
2462 goto jmp_insn;
2463 }
2464 case 0x6: /* FBN+x */
2465 {
2466 if (gen_trap_ifnofpu(dc)) {
2467 goto jmp_insn;
2468 }
2469 target = GET_FIELD(insn, 10, 31);
2470 target = sign_extend(target, 22);
2471 target <<= 2;
2472 do_fbranch(dc, target, insn, 0);
2473 goto jmp_insn;
2474 }
2475 case 0x4: /* SETHI */
2476 if (rd) { // nop
2477 uint32_t value = GET_FIELD(insn, 10, 31);
2478 TCGv r_const;
2479
2480 r_const = tcg_const_tl(value << 10);
2481 gen_movl_TN_reg(rd, r_const);
2482 tcg_temp_free(r_const);
2483 }
2484 break;
2485 case 0x0: /* UNIMPL */
2486 default:
2487 goto illegal_insn;
2488 }
2489 break;
2490 }
2491 break;
2492 case 1: /*CALL*/
2493 {
2494 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2495 TCGv r_const;
2496
2497 r_const = tcg_const_tl(dc->pc);
2498 gen_movl_TN_reg(15, r_const);
2499 tcg_temp_free(r_const);
2500 target += dc->pc;
2501 gen_mov_pc_npc(dc);
2502 #ifdef TARGET_SPARC64
2503 if (unlikely(AM_CHECK(dc))) {
2504 target &= 0xffffffffULL;
2505 }
2506 #endif
2507 dc->npc = target;
2508 }
2509 goto jmp_insn;
2510 case 2: /* FPU & Logical Operations */
2511 {
2512 unsigned int xop = GET_FIELD(insn, 7, 12);
2513 if (xop == 0x3a) { /* generate trap */
2514 int cond;
2515
2516 cpu_src1 = get_src1(insn, cpu_src1);
2517 if (IS_IMM) {
2518 rs2 = GET_FIELD(insn, 25, 31);
2519 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2520 } else {
2521 rs2 = GET_FIELD(insn, 27, 31);
2522 if (rs2 != 0) {
2523 gen_movl_reg_TN(rs2, cpu_src2);
2524 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2525 } else
2526 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2527 }
2528
2529 cond = GET_FIELD(insn, 3, 6);
2530 if (cond == 0x8) { /* Trap Always */
2531 save_state(dc);
2532 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2533 supervisor(dc))
2534 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2535 else
2536 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2537 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2538 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2539 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2540
2541 } else if (cond != 0) {
2542 TCGv r_cond = tcg_temp_new();
2543 int l1;
2544 #ifdef TARGET_SPARC64
2545 /* V9 icc/xcc */
2546 int cc = GET_FIELD_SP(insn, 11, 12);
2547
2548 save_state(dc);
2549 if (cc == 0)
2550 gen_cond(r_cond, 0, cond, dc);
2551 else if (cc == 2)
2552 gen_cond(r_cond, 1, cond, dc);
2553 else
2554 goto illegal_insn;
2555 #else
2556 save_state(dc);
2557 gen_cond(r_cond, 0, cond, dc);
2558 #endif
2559 l1 = gen_new_label();
2560 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2561
2562 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2563 supervisor(dc))
2564 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2565 else
2566 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2567 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2568 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2569 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2570
2571 gen_set_label(l1);
2572 tcg_temp_free(r_cond);
2573 }
2574 gen_op_next_insn();
2575 tcg_gen_exit_tb(0);
2576 dc->is_br = 1;
2577 goto jmp_insn;
2578 } else if (xop == 0x28) {
2579 rs1 = GET_FIELD(insn, 13, 17);
2580 switch(rs1) {
2581 case 0: /* rdy */
2582 #ifndef TARGET_SPARC64
2583 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2584 manual, rdy on the microSPARC
2585 II */
2586 case 0x0f: /* stbar in the SPARCv8 manual,
2587 rdy on the microSPARC II */
2588 case 0x10 ... 0x1f: /* implementation-dependent in the
2589 SPARCv8 manual, rdy on the
2590 microSPARC II */
2591 /* Read Asr17 */
2592 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2593 TCGv r_const;
2594
2595 /* Read Asr17 for a Leon3 monoprocessor */
2596 r_const = tcg_const_tl((1 << 8)
2597 | (dc->def->nwindows - 1));
2598 gen_movl_TN_reg(rd, r_const);
2599 tcg_temp_free(r_const);
2600 break;
2601 }
2602 #endif
2603 gen_movl_TN_reg(rd, cpu_y);
2604 break;
2605 #ifdef TARGET_SPARC64
2606 case 0x2: /* V9 rdccr */
2607 gen_helper_compute_psr(cpu_env);
2608 gen_helper_rdccr(cpu_dst, cpu_env);
2609 gen_movl_TN_reg(rd, cpu_dst);
2610 break;
2611 case 0x3: /* V9 rdasi */
2612 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2613 gen_movl_TN_reg(rd, cpu_dst);
2614 break;
2615 case 0x4: /* V9 rdtick */
2616 {
2617 TCGv_ptr r_tickptr;
2618
2619 r_tickptr = tcg_temp_new_ptr();
2620 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2621 offsetof(CPUSPARCState, tick));
2622 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2623 tcg_temp_free_ptr(r_tickptr);
2624 gen_movl_TN_reg(rd, cpu_dst);
2625 }
2626 break;
2627 case 0x5: /* V9 rdpc */
2628 {
2629 TCGv r_const;
2630
2631 if (unlikely(AM_CHECK(dc))) {
2632 r_const = tcg_const_tl(dc->pc & 0xffffffffULL);
2633 } else {
2634 r_const = tcg_const_tl(dc->pc);
2635 }
2636 gen_movl_TN_reg(rd, r_const);
2637 tcg_temp_free(r_const);
2638 }
2639 break;
2640 case 0x6: /* V9 rdfprs */
2641 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2642 gen_movl_TN_reg(rd, cpu_dst);
2643 break;
2644 case 0xf: /* V9 membar */
2645 break; /* no effect */
2646 case 0x13: /* Graphics Status */
2647 if (gen_trap_ifnofpu(dc)) {
2648 goto jmp_insn;
2649 }
2650 gen_movl_TN_reg(rd, cpu_gsr);
2651 break;
2652 case 0x16: /* Softint */
2653 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2654 gen_movl_TN_reg(rd, cpu_dst);
2655 break;
2656 case 0x17: /* Tick compare */
2657 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2658 break;
2659 case 0x18: /* System tick */
2660 {
2661 TCGv_ptr r_tickptr;
2662
2663 r_tickptr = tcg_temp_new_ptr();
2664 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2665 offsetof(CPUSPARCState, stick));
2666 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2667 tcg_temp_free_ptr(r_tickptr);
2668 gen_movl_TN_reg(rd, cpu_dst);
2669 }
2670 break;
2671 case 0x19: /* System tick compare */
2672 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2673 break;
2674 case 0x10: /* Performance Control */
2675 case 0x11: /* Performance Instrumentation Counter */
2676 case 0x12: /* Dispatch Control */
2677 case 0x14: /* Softint set, WO */
2678 case 0x15: /* Softint clear, WO */
2679 #endif
2680 default:
2681 goto illegal_insn;
2682 }
2683 #if !defined(CONFIG_USER_ONLY)
2684 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2685 #ifndef TARGET_SPARC64
2686 if (!supervisor(dc))
2687 goto priv_insn;
2688 gen_helper_compute_psr(cpu_env);
2689 dc->cc_op = CC_OP_FLAGS;
2690 gen_helper_rdpsr(cpu_dst, cpu_env);
2691 #else
2692 CHECK_IU_FEATURE(dc, HYPV);
2693 if (!hypervisor(dc))
2694 goto priv_insn;
2695 rs1 = GET_FIELD(insn, 13, 17);
2696 switch (rs1) {
2697 case 0: // hpstate
2698 // gen_op_rdhpstate();
2699 break;
2700 case 1: // htstate
2701 // gen_op_rdhtstate();
2702 break;
2703 case 3: // hintp
2704 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2705 break;
2706 case 5: // htba
2707 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2708 break;
2709 case 6: // hver
2710 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2711 break;
2712 case 31: // hstick_cmpr
2713 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2714 break;
2715 default:
2716 goto illegal_insn;
2717 }
2718 #endif
2719 gen_movl_TN_reg(rd, cpu_dst);
2720 break;
2721 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2722 if (!supervisor(dc))
2723 goto priv_insn;
2724 #ifdef TARGET_SPARC64
2725 rs1 = GET_FIELD(insn, 13, 17);
2726 switch (rs1) {
2727 case 0: // tpc
2728 {
2729 TCGv_ptr r_tsptr;
2730
2731 r_tsptr = tcg_temp_new_ptr();
2732 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2733 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2734 offsetof(trap_state, tpc));
2735 tcg_temp_free_ptr(r_tsptr);
2736 }
2737 break;
2738 case 1: // tnpc
2739 {
2740 TCGv_ptr r_tsptr;
2741
2742 r_tsptr = tcg_temp_new_ptr();
2743 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2744 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2745 offsetof(trap_state, tnpc));
2746 tcg_temp_free_ptr(r_tsptr);
2747 }
2748 break;
2749 case 2: // tstate
2750 {
2751 TCGv_ptr r_tsptr;
2752
2753 r_tsptr = tcg_temp_new_ptr();
2754 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2755 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2756 offsetof(trap_state, tstate));
2757 tcg_temp_free_ptr(r_tsptr);
2758 }
2759 break;
2760 case 3: // tt
2761 {
2762 TCGv_ptr r_tsptr;
2763
2764 r_tsptr = tcg_temp_new_ptr();
2765 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2766 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2767 offsetof(trap_state, tt));
2768 tcg_temp_free_ptr(r_tsptr);
2769 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2770 }
2771 break;
2772 case 4: // tick
2773 {
2774 TCGv_ptr r_tickptr;
2775
2776 r_tickptr = tcg_temp_new_ptr();
2777 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2778 offsetof(CPUSPARCState, tick));
2779 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2780 gen_movl_TN_reg(rd, cpu_tmp0);
2781 tcg_temp_free_ptr(r_tickptr);
2782 }
2783 break;
2784 case 5: // tba
2785 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2786 break;
2787 case 6: // pstate
2788 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2789 offsetof(CPUSPARCState, pstate));
2790 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2791 break;
2792 case 7: // tl
2793 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2794 offsetof(CPUSPARCState, tl));
2795 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2796 break;
2797 case 8: // pil
2798 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2799 offsetof(CPUSPARCState, psrpil));
2800 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2801 break;
2802 case 9: // cwp
2803 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2804 break;
2805 case 10: // cansave
2806 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2807 offsetof(CPUSPARCState, cansave));
2808 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2809 break;
2810 case 11: // canrestore
2811 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2812 offsetof(CPUSPARCState, canrestore));
2813 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2814 break;
2815 case 12: // cleanwin
2816 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2817 offsetof(CPUSPARCState, cleanwin));
2818 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2819 break;
2820 case 13: // otherwin
2821 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2822 offsetof(CPUSPARCState, otherwin));
2823 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2824 break;
2825 case 14: // wstate
2826 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2827 offsetof(CPUSPARCState, wstate));
2828 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2829 break;
2830 case 16: // UA2005 gl
2831 CHECK_IU_FEATURE(dc, GL);
2832 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2833 offsetof(CPUSPARCState, gl));
2834 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2835 break;
2836 case 26: // UA2005 strand status
2837 CHECK_IU_FEATURE(dc, HYPV);
2838 if (!hypervisor(dc))
2839 goto priv_insn;
2840 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2841 break;
2842 case 31: // ver
2843 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2844 break;
2845 case 15: // fq
2846 default:
2847 goto illegal_insn;
2848 }
2849 #else
2850 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2851 #endif
2852 gen_movl_TN_reg(rd, cpu_tmp0);
2853 break;
2854 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2855 #ifdef TARGET_SPARC64
2856 save_state(dc);
2857 gen_helper_flushw(cpu_env);
2858 #else
2859 if (!supervisor(dc))
2860 goto priv_insn;
2861 gen_movl_TN_reg(rd, cpu_tbr);
2862 #endif
2863 break;
2864 #endif
2865 } else if (xop == 0x34) { /* FPU Operations */
2866 if (gen_trap_ifnofpu(dc)) {
2867 goto jmp_insn;
2868 }
2869 gen_op_clear_ieee_excp_and_FTT();
2870 rs1 = GET_FIELD(insn, 13, 17);
2871 rs2 = GET_FIELD(insn, 27, 31);
2872 xop = GET_FIELD(insn, 18, 26);
2873 save_state(dc);
2874 switch (xop) {
2875 case 0x1: /* fmovs */
2876 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2877 gen_store_fpr_F(dc, rd, cpu_src1_32);
2878 break;
2879 case 0x5: /* fnegs */
2880 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2881 break;
2882 case 0x9: /* fabss */
2883 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2884 break;
2885 case 0x29: /* fsqrts */
2886 CHECK_FPU_FEATURE(dc, FSQRT);
2887 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2888 break;
2889 case 0x2a: /* fsqrtd */
2890 CHECK_FPU_FEATURE(dc, FSQRT);
2891 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2892 break;
2893 case 0x2b: /* fsqrtq */
2894 CHECK_FPU_FEATURE(dc, FLOAT128);
2895 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2896 break;
2897 case 0x41: /* fadds */
2898 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2899 break;
2900 case 0x42: /* faddd */
2901 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2902 break;
2903 case 0x43: /* faddq */
2904 CHECK_FPU_FEATURE(dc, FLOAT128);
2905 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2906 break;
2907 case 0x45: /* fsubs */
2908 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2909 break;
2910 case 0x46: /* fsubd */
2911 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2912 break;
2913 case 0x47: /* fsubq */
2914 CHECK_FPU_FEATURE(dc, FLOAT128);
2915 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2916 break;
2917 case 0x49: /* fmuls */
2918 CHECK_FPU_FEATURE(dc, FMUL);
2919 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2920 break;
2921 case 0x4a: /* fmuld */
2922 CHECK_FPU_FEATURE(dc, FMUL);
2923 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2924 break;
2925 case 0x4b: /* fmulq */
2926 CHECK_FPU_FEATURE(dc, FLOAT128);
2927 CHECK_FPU_FEATURE(dc, FMUL);
2928 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2929 break;
2930 case 0x4d: /* fdivs */
2931 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2932 break;
2933 case 0x4e: /* fdivd */
2934 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2935 break;
2936 case 0x4f: /* fdivq */
2937 CHECK_FPU_FEATURE(dc, FLOAT128);
2938 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2939 break;
2940 case 0x69: /* fsmuld */
2941 CHECK_FPU_FEATURE(dc, FSMULD);
2942 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2943 break;
2944 case 0x6e: /* fdmulq */
2945 CHECK_FPU_FEATURE(dc, FLOAT128);
2946 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2947 break;
2948 case 0xc4: /* fitos */
2949 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2950 break;
2951 case 0xc6: /* fdtos */
2952 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2953 break;
2954 case 0xc7: /* fqtos */
2955 CHECK_FPU_FEATURE(dc, FLOAT128);
2956 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2957 break;
2958 case 0xc8: /* fitod */
2959 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2960 break;
2961 case 0xc9: /* fstod */
2962 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2963 break;
2964 case 0xcb: /* fqtod */
2965 CHECK_FPU_FEATURE(dc, FLOAT128);
2966 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2967 break;
2968 case 0xcc: /* fitoq */
2969 CHECK_FPU_FEATURE(dc, FLOAT128);
2970 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2971 break;
2972 case 0xcd: /* fstoq */
2973 CHECK_FPU_FEATURE(dc, FLOAT128);
2974 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2975 break;
2976 case 0xce: /* fdtoq */
2977 CHECK_FPU_FEATURE(dc, FLOAT128);
2978 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2979 break;
2980 case 0xd1: /* fstoi */
2981 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2982 break;
2983 case 0xd2: /* fdtoi */
2984 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2985 break;
2986 case 0xd3: /* fqtoi */
2987 CHECK_FPU_FEATURE(dc, FLOAT128);
2988 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2989 break;
2990 #ifdef TARGET_SPARC64
2991 case 0x2: /* V9 fmovd */
2992 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2993 gen_store_fpr_D(dc, rd, cpu_src1_64);
2994 break;
2995 case 0x3: /* V9 fmovq */
2996 CHECK_FPU_FEATURE(dc, FLOAT128);
2997 gen_move_Q(rd, rs2);
2998 break;
2999 case 0x6: /* V9 fnegd */
3000 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3001 break;
3002 case 0x7: /* V9 fnegq */
3003 CHECK_FPU_FEATURE(dc, FLOAT128);
3004 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3005 break;
3006 case 0xa: /* V9 fabsd */
3007 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3008 break;
3009 case 0xb: /* V9 fabsq */
3010 CHECK_FPU_FEATURE(dc, FLOAT128);
3011 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3012 break;
3013 case 0x81: /* V9 fstox */
3014 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3015 break;
3016 case 0x82: /* V9 fdtox */
3017 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3018 break;
3019 case 0x83: /* V9 fqtox */
3020 CHECK_FPU_FEATURE(dc, FLOAT128);
3021 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3022 break;
3023 case 0x84: /* V9 fxtos */
3024 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3025 break;
3026 case 0x88: /* V9 fxtod */
3027 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3028 break;
3029 case 0x8c: /* V9 fxtoq */
3030 CHECK_FPU_FEATURE(dc, FLOAT128);
3031 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3032 break;
3033 #endif
3034 default:
3035 goto illegal_insn;
3036 }
3037 } else if (xop == 0x35) { /* FPU Operations */
3038 #ifdef TARGET_SPARC64
3039 int cond;
3040 #endif
3041 if (gen_trap_ifnofpu(dc)) {
3042 goto jmp_insn;
3043 }
3044 gen_op_clear_ieee_excp_and_FTT();
3045 rs1 = GET_FIELD(insn, 13, 17);
3046 rs2 = GET_FIELD(insn, 27, 31);
3047 xop = GET_FIELD(insn, 18, 26);
3048 save_state(dc);
3049 #ifdef TARGET_SPARC64
3050 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
3051 int l1;
3052
3053 l1 = gen_new_label();
3054 cond = GET_FIELD_SP(insn, 14, 17);
3055 cpu_src1 = get_src1(insn, cpu_src1);
3056 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3057 0, l1);
3058 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3059 gen_store_fpr_F(dc, rd, cpu_src1_32);
3060 gen_set_label(l1);
3061 break;
3062 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3063 int l1;
3064
3065 l1 = gen_new_label();
3066 cond = GET_FIELD_SP(insn, 14, 17);
3067 cpu_src1 = get_src1(insn, cpu_src1);
3068 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3069 0, l1);
3070 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3071 gen_store_fpr_D(dc, rd, cpu_src1_64);
3072 gen_set_label(l1);
3073 break;
3074 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3075 int l1;
3076
3077 CHECK_FPU_FEATURE(dc, FLOAT128);
3078 l1 = gen_new_label();
3079 cond = GET_FIELD_SP(insn, 14, 17);
3080 cpu_src1 = get_src1(insn, cpu_src1);
3081 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3082 0, l1);
3083 gen_move_Q(rd, rs2);
3084 gen_set_label(l1);
3085 break;
3086 }
3087 #endif
3088 switch (xop) {
3089 #ifdef TARGET_SPARC64
3090 #define FMOVSCC(fcc) \
3091 { \
3092 TCGv r_cond; \
3093 int l1; \
3094 \
3095 l1 = gen_new_label(); \
3096 r_cond = tcg_temp_new(); \
3097 cond = GET_FIELD_SP(insn, 14, 17); \
3098 gen_fcond(r_cond, fcc, cond); \
3099 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3100 0, l1); \
3101 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3102 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3103 gen_set_label(l1); \
3104 tcg_temp_free(r_cond); \
3105 }
3106 #define FMOVDCC(fcc) \
3107 { \
3108 TCGv r_cond; \
3109 int l1; \
3110 \
3111 l1 = gen_new_label(); \
3112 r_cond = tcg_temp_new(); \
3113 cond = GET_FIELD_SP(insn, 14, 17); \
3114 gen_fcond(r_cond, fcc, cond); \
3115 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3116 0, l1); \
3117 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3118 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3119 gen_set_label(l1); \
3120 tcg_temp_free(r_cond); \
3121 }
3122 #define FMOVQCC(fcc) \
3123 { \
3124 TCGv r_cond; \
3125 int l1; \
3126 \
3127 l1 = gen_new_label(); \
3128 r_cond = tcg_temp_new(); \
3129 cond = GET_FIELD_SP(insn, 14, 17); \
3130 gen_fcond(r_cond, fcc, cond); \
3131 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3132 0, l1); \
3133 gen_move_Q(rd, rs2); \
3134 gen_set_label(l1); \
3135 tcg_temp_free(r_cond); \
3136 }
3137 case 0x001: /* V9 fmovscc %fcc0 */
3138 FMOVSCC(0);
3139 break;
3140 case 0x002: /* V9 fmovdcc %fcc0 */
3141 FMOVDCC(0);
3142 break;
3143 case 0x003: /* V9 fmovqcc %fcc0 */
3144 CHECK_FPU_FEATURE(dc, FLOAT128);
3145 FMOVQCC(0);
3146 break;
3147 case 0x041: /* V9 fmovscc %fcc1 */
3148 FMOVSCC(1);
3149 break;
3150 case 0x042: /* V9 fmovdcc %fcc1 */
3151 FMOVDCC(1);
3152 break;
3153 case 0x043: /* V9 fmovqcc %fcc1 */
3154 CHECK_FPU_FEATURE(dc, FLOAT128);
3155 FMOVQCC(1);
3156 break;
3157 case 0x081: /* V9 fmovscc %fcc2 */
3158 FMOVSCC(2);
3159 break;
3160 case 0x082: /* V9 fmovdcc %fcc2 */
3161 FMOVDCC(2);
3162 break;
3163 case 0x083: /* V9 fmovqcc %fcc2 */
3164 CHECK_FPU_FEATURE(dc, FLOAT128);
3165 FMOVQCC(2);
3166 break;
3167 case 0x0c1: /* V9 fmovscc %fcc3 */
3168 FMOVSCC(3);
3169 break;
3170 case 0x0c2: /* V9 fmovdcc %fcc3 */
3171 FMOVDCC(3);
3172 break;
3173 case 0x0c3: /* V9 fmovqcc %fcc3 */
3174 CHECK_FPU_FEATURE(dc, FLOAT128);
3175 FMOVQCC(3);
3176 break;
3177 #undef FMOVSCC
3178 #undef FMOVDCC
3179 #undef FMOVQCC
3180 #define FMOVSCC(icc) \
3181 { \
3182 TCGv r_cond; \
3183 int l1; \
3184 \
3185 l1 = gen_new_label(); \
3186 r_cond = tcg_temp_new(); \
3187 cond = GET_FIELD_SP(insn, 14, 17); \
3188 gen_cond(r_cond, icc, cond, dc); \
3189 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3190 0, l1); \
3191 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3192 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3193 gen_set_label(l1); \
3194 tcg_temp_free(r_cond); \
3195 }
3196 #define FMOVDCC(icc) \
3197 { \
3198 TCGv r_cond; \
3199 int l1; \
3200 \
3201 l1 = gen_new_label(); \
3202 r_cond = tcg_temp_new(); \
3203 cond = GET_FIELD_SP(insn, 14, 17); \
3204 gen_cond(r_cond, icc, cond, dc); \
3205 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3206 0, l1); \
3207 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3208 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3209 gen_update_fprs_dirty(DFPREG(rd)); \
3210 gen_set_label(l1); \
3211 tcg_temp_free(r_cond); \
3212 }
3213 #define FMOVQCC(icc) \
3214 { \
3215 TCGv r_cond; \
3216 int l1; \
3217 \
3218 l1 = gen_new_label(); \
3219 r_cond = tcg_temp_new(); \
3220 cond = GET_FIELD_SP(insn, 14, 17); \
3221 gen_cond(r_cond, icc, cond, dc); \
3222 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3223 0, l1); \
3224 gen_move_Q(rd, rs2); \
3225 gen_set_label(l1); \
3226 tcg_temp_free(r_cond); \
3227 }
3228
3229 case 0x101: /* V9 fmovscc %icc */
3230 FMOVSCC(0);
3231 break;
3232 case 0x102: /* V9 fmovdcc %icc */
3233 FMOVDCC(0);
3234 break;
3235 case 0x103: /* V9 fmovqcc %icc */
3236 CHECK_FPU_FEATURE(dc, FLOAT128);
3237 FMOVQCC(0);
3238 break;
3239 case 0x181: /* V9 fmovscc %xcc */
3240 FMOVSCC(1);
3241 break;
3242 case 0x182: /* V9 fmovdcc %xcc */
3243 FMOVDCC(1);
3244 break;
3245 case 0x183: /* V9 fmovqcc %xcc */
3246 CHECK_FPU_FEATURE(dc, FLOAT128);
3247 FMOVQCC(1);
3248 break;
3249 #undef FMOVSCC
3250 #undef FMOVDCC
3251 #undef FMOVQCC
3252 #endif
3253 case 0x51: /* fcmps, V9 %fcc */
3254 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3255 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3256 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3257 break;
3258 case 0x52: /* fcmpd, V9 %fcc */
3259 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3260 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3261 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3262 break;
3263 case 0x53: /* fcmpq, V9 %fcc */
3264 CHECK_FPU_FEATURE(dc, FLOAT128);
3265 gen_op_load_fpr_QT0(QFPREG(rs1));
3266 gen_op_load_fpr_QT1(QFPREG(rs2));
3267 gen_op_fcmpq(rd & 3);
3268 break;
3269 case 0x55: /* fcmpes, V9 %fcc */
3270 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3271 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3272 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3273 break;
3274 case 0x56: /* fcmped, V9 %fcc */
3275 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3276 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3277 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3278 break;
3279 case 0x57: /* fcmpeq, V9 %fcc */
3280 CHECK_FPU_FEATURE(dc, FLOAT128);
3281 gen_op_load_fpr_QT0(QFPREG(rs1));
3282 gen_op_load_fpr_QT1(QFPREG(rs2));
3283 gen_op_fcmpeq(rd & 3);
3284 break;
3285 default:
3286 goto illegal_insn;
3287 }
3288 } else if (xop == 0x2) {
3289 // clr/mov shortcut
3290
3291 rs1 = GET_FIELD(insn, 13, 17);
3292 if (rs1 == 0) {
3293 // or %g0, x, y -> mov T0, x; mov y, T0
3294 if (IS_IMM) { /* immediate */
3295 TCGv r_const;
3296
3297 simm = GET_FIELDs(insn, 19, 31);
3298 r_const = tcg_const_tl(simm);
3299 gen_movl_TN_reg(rd, r_const);
3300 tcg_temp_free(r_const);
3301 } else { /* register */
3302 rs2 = GET_FIELD(insn, 27, 31);
3303 gen_movl_reg_TN(rs2, cpu_dst);
3304 gen_movl_TN_reg(rd, cpu_dst);
3305 }
3306 } else {
3307 cpu_src1 = get_src1(insn, cpu_src1);
3308 if (IS_IMM) { /* immediate */
3309 simm = GET_FIELDs(insn, 19, 31);
3310 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3311 gen_movl_TN_reg(rd, cpu_dst);
3312 } else { /* register */
3313 // or x, %g0, y -> mov T1, x; mov y, T1
3314 rs2 = GET_FIELD(insn, 27, 31);
3315 if (rs2 != 0) {
3316 gen_movl_reg_TN(rs2, cpu_src2);
3317 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3318 gen_movl_TN_reg(rd, cpu_dst);
3319 } else
3320 gen_movl_TN_reg(rd, cpu_src1);
3321 }
3322 }
3323 #ifdef TARGET_SPARC64
3324 } else if (xop == 0x25) { /* sll, V9 sllx */
3325 cpu_src1 = get_src1(insn, cpu_src1);
3326 if (IS_IMM) { /* immediate */
3327 simm = GET_FIELDs(insn, 20, 31);
3328 if (insn & (1 << 12)) {
3329 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3330 } else {
3331 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3332 }
3333 } else { /* register */
3334 rs2 = GET_FIELD(insn, 27, 31);
3335 gen_movl_reg_TN(rs2, cpu_src2);
3336 if (insn & (1 << 12)) {
3337 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3338 } else {
3339 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3340 }
3341 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3342 }
3343 gen_movl_TN_reg(rd, cpu_dst);
3344 } else if (xop == 0x26) { /* srl, V9 srlx */
3345 cpu_src1 = get_src1(insn, cpu_src1);
3346 if (IS_IMM) { /* immediate */
3347 simm = GET_FIELDs(insn, 20, 31);
3348 if (insn & (1 << 12)) {
3349 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3350 } else {
3351 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3352 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3353 }
3354 } else { /* register */
3355 rs2 = GET_FIELD(insn, 27, 31);
3356 gen_movl_reg_TN(rs2, cpu_src2);
3357 if (insn & (1 << 12)) {
3358 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3359 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3360 } else {
3361 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3362 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3363 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3364 }
3365 }
3366 gen_movl_TN_reg(rd, cpu_dst);
3367 } else if (xop == 0x27) { /* sra, V9 srax */
3368 cpu_src1 = get_src1(insn, cpu_src1);
3369 if (IS_IMM) { /* immediate */
3370 simm = GET_FIELDs(insn, 20, 31);
3371 if (insn & (1 << 12)) {
3372 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3373 } else {
3374 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3375 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3376 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3377 }
3378 } else { /* register */
3379 rs2 = GET_FIELD(insn, 27, 31);
3380 gen_movl_reg_TN(rs2, cpu_src2);
3381 if (insn & (1 << 12)) {
3382 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3383 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3384 } else {
3385 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3386 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3387 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3388 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3389 }
3390 }
3391 gen_movl_TN_reg(rd, cpu_dst);
3392 #endif
3393 } else if (xop < 0x36) {
3394 if (xop < 0x20) {
3395 cpu_src1 = get_src1(insn, cpu_src1);
3396 cpu_src2 = get_src2(insn, cpu_src2);
3397 switch (xop & ~0x10) {
3398 case 0x0: /* add */
3399 if (IS_IMM) {
3400 simm = GET_FIELDs(insn, 19, 31);
3401 if (xop & 0x10) {
3402 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3403 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3404 dc->cc_op = CC_OP_ADD;
3405 } else {
3406 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3407 }
3408 } else {
3409 if (xop & 0x10) {
3410 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3411 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3412 dc->cc_op = CC_OP_ADD;
3413 } else {
3414 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3415 }
3416 }
3417 break;
3418 case 0x1: /* and */
3419 if (IS_IMM) {
3420 simm = GET_FIELDs(insn, 19, 31);
3421 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3422 } else {
3423 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3424 }
3425 if (xop & 0x10) {
3426 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3427 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3428 dc->cc_op = CC_OP_LOGIC;
3429 }
3430 break;
3431 case 0x2: /* or */
3432 if (IS_IMM) {
3433 simm = GET_FIELDs(insn, 19, 31);
3434 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3435 } else {
3436 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3437 }
3438 if (xop & 0x10) {
3439 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3440 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3441 dc->cc_op = CC_OP_LOGIC;
3442 }
3443 break;
3444 case 0x3: /* xor */
3445 if (IS_IMM) {
3446 simm = GET_FIELDs(insn, 19, 31);
3447 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3448 } else {
3449 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3450 }
3451 if (xop & 0x10) {
3452 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3453 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3454 dc->cc_op = CC_OP_LOGIC;
3455 }
3456 break;
3457 case 0x4: /* sub */
3458 if (IS_IMM) {
3459 simm = GET_FIELDs(insn, 19, 31);
3460 if (xop & 0x10) {
3461 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3462 } else {
3463 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3464 }
3465 } else {
3466 if (xop & 0x10) {
3467 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3468 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3469 dc->cc_op = CC_OP_SUB;
3470 } else {
3471 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3472 }
3473 }
3474 break;
3475 case 0x5: /* andn */
3476 if (IS_IMM) {
3477 simm = GET_FIELDs(insn, 19, 31);
3478 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3479 } else {
3480 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3481 }
3482 if (xop & 0x10) {
3483 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3484 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3485 dc->cc_op = CC_OP_LOGIC;
3486 }
3487 break;
3488 case 0x6: /* orn */
3489 if (IS_IMM) {
3490 simm = GET_FIELDs(insn, 19, 31);
3491 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3492 } else {
3493 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3494 }
3495 if (xop & 0x10) {
3496 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3497 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3498 dc->cc_op = CC_OP_LOGIC;
3499 }
3500 break;
3501 case 0x7: /* xorn */
3502 if (IS_IMM) {
3503 simm = GET_FIELDs(insn, 19, 31);
3504 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3505 } else {
3506 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3507 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3508 }
3509 if (xop & 0x10) {
3510 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3511 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3512 dc->cc_op = CC_OP_LOGIC;
3513 }
3514 break;
3515 case 0x8: /* addx, V9 addc */
3516 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3517 (xop & 0x10));
3518 break;
3519 #ifdef TARGET_SPARC64
3520 case 0x9: /* V9 mulx */
3521 if (IS_IMM) {
3522 simm = GET_FIELDs(insn, 19, 31);
3523 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3524 } else {
3525 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3526 }
3527 break;
3528 #endif
3529 case 0xa: /* umul */
3530 CHECK_IU_FEATURE(dc, MUL);
3531 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3532 if (xop & 0x10) {
3533 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3534 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3535 dc->cc_op = CC_OP_LOGIC;
3536 }
3537 break;
3538 case 0xb: /* smul */
3539 CHECK_IU_FEATURE(dc, MUL);
3540 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3541 if (xop & 0x10) {
3542 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3543 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3544 dc->cc_op = CC_OP_LOGIC;
3545 }
3546 break;
3547 case 0xc: /* subx, V9 subc */
3548 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3549 (xop & 0x10));
3550 break;
3551 #ifdef TARGET_SPARC64
3552 case 0xd: /* V9 udivx */
3553 {
3554 TCGv r_temp1, r_temp2;
3555 r_temp1 = tcg_temp_local_new();
3556 r_temp2 = tcg_temp_local_new();
3557 tcg_gen_mov_tl(r_temp1, cpu_src1);
3558 tcg_gen_mov_tl(r_temp2, cpu_src2);
3559 gen_trap_ifdivzero_tl(r_temp2);
3560 tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3561 tcg_temp_free(r_temp1);
3562 tcg_temp_free(r_temp2);
3563 }
3564 break;
3565 #endif
3566 case 0xe: /* udiv */
3567 CHECK_IU_FEATURE(dc, DIV);
3568 if (xop & 0x10) {
3569 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3570 cpu_src2);
3571 dc->cc_op = CC_OP_DIV;
3572 } else {
3573 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3574 cpu_src2);
3575 }
3576 break;
3577 case 0xf: /* sdiv */
3578 CHECK_IU_FEATURE(dc, DIV);
3579 if (xop & 0x10) {
3580 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3581 cpu_src2);
3582 dc->cc_op = CC_OP_DIV;
3583 } else {
3584 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3585 cpu_src2);
3586 }
3587 break;
3588 default:
3589 goto illegal_insn;
3590 }
3591 gen_movl_TN_reg(rd, cpu_dst);
3592 } else {
3593 cpu_src1 = get_src1(insn, cpu_src1);
3594 cpu_src2 = get_src2(insn, cpu_src2);
3595 switch (xop) {
3596 case 0x20: /* taddcc */
3597 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3598 gen_movl_TN_reg(rd, cpu_dst);
3599 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3600 dc->cc_op = CC_OP_TADD;
3601 break;
3602 case 0x21: /* tsubcc */
3603 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3604 gen_movl_TN_reg(rd, cpu_dst);
3605 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3606 dc->cc_op = CC_OP_TSUB;
3607 break;
3608 case 0x22: /* taddcctv */
3609 save_state(dc);
3610 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3611 gen_movl_TN_reg(rd, cpu_dst);
3612 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3613 dc->cc_op = CC_OP_TADDTV;
3614 break;
3615 case 0x23: /* tsubcctv */
3616 save_state(dc);
3617 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3618 gen_movl_TN_reg(rd, cpu_dst);
3619 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3620 dc->cc_op = CC_OP_TSUBTV;
3621 break;
3622 case 0x24: /* mulscc */
3623 gen_helper_compute_psr(cpu_env);
3624 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3625 gen_movl_TN_reg(rd, cpu_dst);
3626 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3627 dc->cc_op = CC_OP_ADD;
3628 break;
3629 #ifndef TARGET_SPARC64
3630 case 0x25: /* sll */
3631 if (IS_IMM) { /* immediate */
3632 simm = GET_FIELDs(insn, 20, 31);
3633 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3634 } else { /* register */
3635 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3636 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3637 }
3638 gen_movl_TN_reg(rd, cpu_dst);
3639 break;
3640 case 0x26: /* srl */
3641 if (IS_IMM) { /* immediate */
3642 simm = GET_FIELDs(insn, 20, 31);
3643 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3644 } else { /* register */
3645 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3646 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3647 }
3648 gen_movl_TN_reg(rd, cpu_dst);
3649 break;
3650 case 0x27: /* sra */
3651 if (IS_IMM) { /* immediate */
3652 simm = GET_FIELDs(insn, 20, 31);
3653 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3654 } else { /* register */
3655 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3656 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3657 }
3658 gen_movl_TN_reg(rd, cpu_dst);
3659 break;
3660 #endif
3661 case 0x30:
3662 {
3663 switch(rd) {
3664 case 0: /* wry */
3665 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3666 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3667 break;
3668 #ifndef TARGET_SPARC64
3669 case 0x01 ... 0x0f: /* undefined in the
3670 SPARCv8 manual, nop
3671 on the microSPARC
3672 II */
3673 case 0x10 ... 0x1f: /* implementation-dependent
3674 in the SPARCv8
3675 manual, nop on the
3676 microSPARC II */
3677 break;
3678 #else
3679 case 0x2: /* V9 wrccr */
3680 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3681 gen_helper_wrccr(cpu_env, cpu_dst);
3682 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3683 dc->cc_op = CC_OP_FLAGS;
3684 break;
3685 case 0x3: /* V9 wrasi */
3686 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3687 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3688 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3689 break;
3690 case 0x6: /* V9 wrfprs */
3691 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3692 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3693 save_state(dc);
3694 gen_op_next_insn();
3695 tcg_gen_exit_tb(0);
3696 dc->is_br = 1;
3697 break;
3698 case 0xf: /* V9 sir, nop if user */
3699 #if !defined(CONFIG_USER_ONLY)
3700 if (supervisor(dc)) {
3701 ; // XXX
3702 }
3703 #endif
3704 break;
3705 case 0x13: /* Graphics Status */
3706 if (gen_trap_ifnofpu(dc)) {
3707 goto jmp_insn;
3708 }
3709 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3710 break;
3711 case 0x14: /* Softint set */
3712 if (!supervisor(dc))
3713 goto illegal_insn;
3714 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3715 gen_helper_set_softint(cpu_env, cpu_tmp64);
3716 break;
3717 case 0x15: /* Softint clear */
3718 if (!supervisor(dc))
3719 goto illegal_insn;
3720 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3721 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3722 break;
3723 case 0x16: /* Softint write */
3724 if (!supervisor(dc))
3725 goto illegal_insn;
3726 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3727 gen_helper_write_softint(cpu_env, cpu_tmp64);
3728 break;
3729 case 0x17: /* Tick compare */
3730 #if !defined(CONFIG_USER_ONLY)
3731 if (!supervisor(dc))
3732 goto illegal_insn;
3733 #endif
3734 {
3735 TCGv_ptr r_tickptr;
3736
3737 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3738 cpu_src2);
3739 r_tickptr = tcg_temp_new_ptr();
3740 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3741 offsetof(CPUSPARCState, tick));
3742 gen_helper_tick_set_limit(r_tickptr,
3743 cpu_tick_cmpr);
3744 tcg_temp_free_ptr(r_tickptr);
3745 }
3746 break;
3747 case 0x18: /* System tick */
3748 #if !defined(CONFIG_USER_ONLY)
3749 if (!supervisor(dc))
3750 goto illegal_insn;
3751 #endif
3752 {
3753 TCGv_ptr r_tickptr;
3754
3755 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3756 cpu_src2);
3757 r_tickptr = tcg_temp_new_ptr();
3758 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3759 offsetof(CPUSPARCState, stick));
3760 gen_helper_tick_set_count(r_tickptr,
3761 cpu_dst);
3762 tcg_temp_free_ptr(r_tickptr);
3763 }
3764 break;
3765 case 0x19: /* System tick compare */
3766 #if !defined(CONFIG_USER_ONLY)
3767 if (!supervisor(dc))
3768 goto illegal_insn;
3769 #endif
3770 {
3771 TCGv_ptr r_tickptr;
3772
3773 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3774 cpu_src2);
3775 r_tickptr = tcg_temp_new_ptr();
3776 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3777 offsetof(CPUSPARCState, stick));
3778 gen_helper_tick_set_limit(r_tickptr,
3779 cpu_stick_cmpr);
3780 tcg_temp_free_ptr(r_tickptr);
3781 }
3782 break;
3783
3784 case 0x10: /* Performance Control */
3785 case 0x11: /* Performance Instrumentation
3786 Counter */
3787 case 0x12: /* Dispatch Control */
3788 #endif
3789 default:
3790 goto illegal_insn;
3791 }
3792 }
3793 break;
3794 #if !defined(CONFIG_USER_ONLY)
3795 case 0x31: /* wrpsr, V9 saved, restored */
3796 {
3797 if (!supervisor(dc))
3798 goto priv_insn;
3799 #ifdef TARGET_SPARC64
3800 switch (rd) {
3801 case 0:
3802 gen_helper_saved(cpu_env);
3803 break;
3804 case 1:
3805 gen_helper_restored(cpu_env);
3806 break;
3807 case 2: /* UA2005 allclean */
3808 case 3: /* UA2005 otherw */
3809 case 4: /* UA2005 normalw */
3810 case 5: /* UA2005 invalw */
3811 // XXX
3812 default:
3813 goto illegal_insn;
3814 }
3815 #else
3816 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3817 gen_helper_wrpsr(cpu_env, cpu_dst);
3818 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3819 dc->cc_op = CC_OP_FLAGS;
3820 save_state(dc);
3821 gen_op_next_insn();
3822 tcg_gen_exit_tb(0);
3823 dc->is_br = 1;
3824 #endif
3825 }
3826 break;
3827 case 0x32: /* wrwim, V9 wrpr */
3828 {
3829 if (!supervisor(dc))
3830 goto priv_insn;
3831 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3832 #ifdef TARGET_SPARC64
3833 switch (rd) {
3834 case 0: // tpc
3835 {
3836 TCGv_ptr r_tsptr;
3837
3838 r_tsptr = tcg_temp_new_ptr();
3839 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3840 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3841 offsetof(trap_state, tpc));
3842 tcg_temp_free_ptr(r_tsptr);
3843 }
3844 break;
3845 case 1: // tnpc
3846 {
3847 TCGv_ptr r_tsptr;
3848
3849 r_tsptr = tcg_temp_new_ptr();
3850 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3851 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3852 offsetof(trap_state, tnpc));
3853 tcg_temp_free_ptr(r_tsptr);
3854 }
3855 break;
3856 case 2: // tstate
3857 {
3858 TCGv_ptr r_tsptr;
3859
3860 r_tsptr = tcg_temp_new_ptr();
3861 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3862 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3863 offsetof(trap_state,
3864 tstate));
3865 tcg_temp_free_ptr(r_tsptr);
3866 }
3867 break;
3868 case 3: // tt
3869 {
3870 TCGv_ptr r_tsptr;
3871
3872 r_tsptr = tcg_temp_new_ptr();
3873 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3874 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3875 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3876 offsetof(trap_state, tt));
3877 tcg_temp_free_ptr(r_tsptr);
3878 }
3879 break;
3880 case 4: // tick
3881 {
3882 TCGv_ptr r_tickptr;
3883
3884 r_tickptr = tcg_temp_new_ptr();
3885 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3886 offsetof(CPUSPARCState, tick));
3887 gen_helper_tick_set_count(r_tickptr,
3888 cpu_tmp0);
3889 tcg_temp_free_ptr(r_tickptr);
3890 }
3891 break;
3892 case 5: // tba
3893 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3894 break;
3895 case 6: // pstate
3896 {
3897 TCGv r_tmp = tcg_temp_local_new();
3898
3899 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3900 save_state(dc);
3901 gen_helper_wrpstate(cpu_env, r_tmp);
3902 tcg_temp_free(r_tmp);
3903 dc->npc = DYNAMIC_PC;
3904 }
3905 break;
3906 case 7: // tl
3907 {
3908 TCGv r_tmp = tcg_temp_local_new();
3909
3910 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3911 save_state(dc);
3912 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3913 tcg_temp_free(r_tmp);
3914 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3915 offsetof(CPUSPARCState, tl));
3916 dc->npc = DYNAMIC_PC;
3917 }
3918 break;
3919 case 8: // pil
3920 gen_helper_wrpil(cpu_env, cpu_tmp0);
3921 break;
3922 case 9: // cwp
3923 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3924 break;
3925 case 10: // cansave
3926 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3927 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3928 offsetof(CPUSPARCState,
3929 cansave));
3930 break;
3931 case 11: // canrestore
3932 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3933 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3934 offsetof(CPUSPARCState,
3935 canrestore));
3936 break;
3937 case 12: // cleanwin
3938 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3939 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3940 offsetof(CPUSPARCState,
3941 cleanwin));
3942 break;
3943 case 13: // otherwin
3944 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3945 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3946 offsetof(CPUSPARCState,
3947 otherwin));
3948 break;
3949 case 14: // wstate
3950 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3951 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3952 offsetof(CPUSPARCState,
3953 wstate));
3954 break;
3955 case 16: // UA2005 gl
3956 CHECK_IU_FEATURE(dc, GL);
3957 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3958 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3959 offsetof(CPUSPARCState, gl));
3960 break;
3961 case 26: // UA2005 strand status
3962 CHECK_IU_FEATURE(dc, HYPV);
3963 if (!hypervisor(dc))
3964 goto priv_insn;
3965 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3966 break;
3967 default:
3968 goto illegal_insn;
3969 }
3970 #else
3971 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3972 if (dc->def->nwindows != 32)
3973 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3974 (1 << dc->def->nwindows) - 1);
3975 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3976 #endif
3977 }
3978 break;
3979 case 0x33: /* wrtbr, UA2005 wrhpr */
3980 {
3981 #ifndef TARGET_SPARC64
3982 if (!supervisor(dc))
3983 goto priv_insn;
3984 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3985 #else
3986 CHECK_IU_FEATURE(dc, HYPV);
3987 if (!hypervisor(dc))
3988 goto priv_insn;
3989 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3990 switch (rd) {
3991 case 0: // hpstate
3992 // XXX gen_op_wrhpstate();
3993 save_state(dc);
3994 gen_op_next_insn();
3995 tcg_gen_exit_tb(0);
3996 dc->is_br = 1;
3997 break;
3998 case 1: // htstate
3999 // XXX gen_op_wrhtstate();
4000 break;
4001 case 3: // hintp
4002 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4003 break;
4004 case 5: // htba
4005 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4006 break;
4007 case 31: // hstick_cmpr
4008 {
4009 TCGv_ptr r_tickptr;
4010
4011 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4012 r_tickptr = tcg_temp_new_ptr();
4013 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4014 offsetof(CPUSPARCState, hstick));
4015 gen_helper_tick_set_limit(r_tickptr,
4016 cpu_hstick_cmpr);
4017 tcg_temp_free_ptr(r_tickptr);
4018 }
4019 break;
4020 case 6: // hver readonly
4021 default:
4022 goto illegal_insn;
4023 }
4024 #endif
4025 }
4026 break;
4027 #endif
4028 #ifdef TARGET_SPARC64
4029 case 0x2c: /* V9 movcc */
4030 {
4031 int cc = GET_FIELD_SP(insn, 11, 12);
4032 int cond = GET_FIELD_SP(insn, 14, 17);
4033 TCGv r_cond;
4034 int l1;
4035
4036 r_cond = tcg_temp_new();
4037 if (insn & (1 << 18)) {
4038 if (cc == 0)
4039 gen_cond(r_cond, 0, cond, dc);
4040 else if (cc == 2)
4041 gen_cond(r_cond, 1, cond, dc);
4042 else
4043 goto illegal_insn;
4044 } else {
4045 gen_fcond(r_cond, cc, cond);
4046 }
4047
4048 l1 = gen_new_label();
4049
4050 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
4051 if (IS_IMM) { /* immediate */
4052 TCGv r_const;
4053
4054 simm = GET_FIELD_SPs(insn, 0, 10);
4055 r_const = tcg_const_tl(simm);
4056 gen_movl_TN_reg(rd, r_const);
4057 tcg_temp_free(r_const);
4058 } else {
4059 rs2 = GET_FIELD_SP(insn, 0, 4);
4060 gen_movl_reg_TN(rs2, cpu_tmp0);
4061 gen_movl_TN_reg(rd, cpu_tmp0);
4062 }
4063 gen_set_label(l1);
4064 tcg_temp_free(r_cond);
4065 break;
4066 }
4067 case 0x2d: /* V9 sdivx */
4068 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
4069 gen_movl_TN_reg(rd, cpu_dst);
4070 break;
4071 case 0x2e: /* V9 popc */
4072 {
4073 cpu_src2 = get_src2(insn, cpu_src2);
4074 gen_helper_popc(cpu_dst, cpu_src2);
4075 gen_movl_TN_reg(rd, cpu_dst);
4076 }
4077 case 0x2f: /* V9 movr */
4078 {
4079 int cond = GET_FIELD_SP(insn, 10, 12);
4080 int l1;
4081
4082 cpu_src1 = get_src1(insn, cpu_src1);
4083
4084 l1 = gen_new_label();
4085
4086 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
4087 cpu_src1, 0, l1);
4088 if (IS_IMM) { /* immediate */
4089 TCGv r_const;
4090
4091 simm = GET_FIELD_SPs(insn, 0, 9);
4092 r_const = tcg_const_tl(simm);
4093 gen_movl_TN_reg(rd, r_const);
4094 tcg_temp_free(r_const);
4095 } else {
4096 rs2 = GET_FIELD_SP(insn, 0, 4);
4097 gen_movl_reg_TN(rs2, cpu_tmp0);
4098 gen_movl_TN_reg(rd, cpu_tmp0);
4099 }
4100 gen_set_label(l1);
4101 break;
4102 }
4103 #endif
4104 default:
4105 goto illegal_insn;
4106 }
4107 }
4108 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4109 #ifdef TARGET_SPARC64
4110 int opf = GET_FIELD_SP(insn, 5, 13);
4111 rs1 = GET_FIELD(insn, 13, 17);
4112 rs2 = GET_FIELD(insn, 27, 31);
4113 if (gen_trap_ifnofpu(dc)) {
4114 goto jmp_insn;
4115 }
4116
4117 switch (opf) {
4118 case 0x000: /* VIS I edge8cc */
4119 CHECK_FPU_FEATURE(dc, VIS1);
4120 gen_movl_reg_TN(rs1, cpu_src1);
4121 gen_movl_reg_TN(rs2, cpu_src2);
4122 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4123 gen_movl_TN_reg(rd, cpu_dst);
4124 break;
4125 case 0x001: /* VIS II edge8n */
4126 CHECK_FPU_FEATURE(dc, VIS2);
4127 gen_movl_reg_TN(rs1, cpu_src1);
4128 gen_movl_reg_TN(rs2, cpu_src2);
4129 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4130 gen_movl_TN_reg(rd, cpu_dst);
4131 break;
4132 case 0x002: /* VIS I edge8lcc */
4133 CHECK_FPU_FEATURE(dc, VIS1);
4134 gen_movl_reg_TN(rs1, cpu_src1);
4135 gen_movl_reg_TN(rs2, cpu_src2);
4136 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4137 gen_movl_TN_reg(rd, cpu_dst);
4138 break;
4139 case 0x003: /* VIS II edge8ln */
4140 CHECK_FPU_FEATURE(dc, VIS2);
4141 gen_movl_reg_TN(rs1, cpu_src1);
4142 gen_movl_reg_TN(rs2, cpu_src2);
4143 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4144 gen_movl_TN_reg(rd, cpu_dst);
4145 break;
4146 case 0x004: /* VIS I edge16cc */
4147 CHECK_FPU_FEATURE(dc, VIS1);
4148 gen_movl_reg_TN(rs1, cpu_src1);
4149 gen_movl_reg_TN(rs2, cpu_src2);
4150 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4151 gen_movl_TN_reg(rd, cpu_dst);
4152 break;
4153 case 0x005: /* VIS II edge16n */
4154 CHECK_FPU_FEATURE(dc, VIS2);
4155 gen_movl_reg_TN(rs1, cpu_src1);
4156 gen_movl_reg_TN(rs2, cpu_src2);
4157 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4158 gen_movl_TN_reg(rd, cpu_dst);
4159 break;
4160 case 0x006: /* VIS I edge16lcc */
4161 CHECK_FPU_FEATURE(dc, VIS1);
4162 gen_movl_reg_TN(rs1, cpu_src1);
4163 gen_movl_reg_TN(rs2, cpu_src2);
4164 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4165 gen_movl_TN_reg(rd, cpu_dst);
4166 break;
4167 case 0x007: /* VIS II edge16ln */
4168 CHECK_FPU_FEATURE(dc, VIS2);
4169 gen_movl_reg_TN(rs1, cpu_src1);
4170 gen_movl_reg_TN(rs2, cpu_src2);
4171 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4172 gen_movl_TN_reg(rd, cpu_dst);
4173 break;
4174 case 0x008: /* VIS I edge32cc */
4175 CHECK_FPU_FEATURE(dc, VIS1);
4176 gen_movl_reg_TN(rs1, cpu_src1);
4177 gen_movl_reg_TN(rs2, cpu_src2);
4178 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4179 gen_movl_TN_reg(rd, cpu_dst);
4180 break;
4181 case 0x009: /* VIS II edge32n */
4182 CHECK_FPU_FEATURE(dc, VIS2);
4183 gen_movl_reg_TN(rs1, cpu_src1);
4184 gen_movl_reg_TN(rs2, cpu_src2);
4185 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4186 gen_movl_TN_reg(rd, cpu_dst);
4187 break;
4188 case 0x00a: /* VIS I edge32lcc */
4189 CHECK_FPU_FEATURE(dc, VIS1);
4190 gen_movl_reg_TN(rs1, cpu_src1);
4191 gen_movl_reg_TN(rs2, cpu_src2);
4192 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4193 gen_movl_TN_reg(rd, cpu_dst);
4194 break;
4195 case 0x00b: /* VIS II edge32ln */
4196 CHECK_FPU_FEATURE(dc, VIS2);
4197 gen_movl_reg_TN(rs1, cpu_src1);
4198 gen_movl_reg_TN(rs2, cpu_src2);
4199 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4200 gen_movl_TN_reg(rd, cpu_dst);
4201 break;
4202 case 0x010: /* VIS I array8 */
4203 CHECK_FPU_FEATURE(dc, VIS1);
4204 cpu_src1 = get_src1(insn, cpu_src1);
4205 gen_movl_reg_TN(rs2, cpu_src2);
4206 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4207 gen_movl_TN_reg(rd, cpu_dst);
4208 break;
4209 case 0x012: /* VIS I array16 */
4210 CHECK_FPU_FEATURE(dc, VIS1);
4211 cpu_src1 = get_src1(insn, cpu_src1);
4212 gen_movl_reg_TN(rs2, cpu_src2);
4213 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4214 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4215 gen_movl_TN_reg(rd, cpu_dst);
4216 break;
4217 case 0x014: /* VIS I array32 */
4218 CHECK_FPU_FEATURE(dc, VIS1);
4219 cpu_src1 = get_src1(insn, cpu_src1);
4220 gen_movl_reg_TN(rs2, cpu_src2);
4221 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4222 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4223 gen_movl_TN_reg(rd, cpu_dst);
4224 break;
4225 case 0x018: /* VIS I alignaddr */
4226 CHECK_FPU_FEATURE(dc, VIS1);
4227 cpu_src1 = get_src1(insn, cpu_src1);
4228 gen_movl_reg_TN(rs2, cpu_src2);
4229 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4230 gen_movl_TN_reg(rd, cpu_dst);
4231 break;
4232 case 0x01a: /* VIS I alignaddrl */
4233 CHECK_FPU_FEATURE(dc, VIS1);
4234 cpu_src1 = get_src1(insn, cpu_src1);
4235 gen_movl_reg_TN(rs2, cpu_src2);
4236 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4237 gen_movl_TN_reg(rd, cpu_dst);
4238 break;
4239 case 0x019: /* VIS II bmask */
4240 CHECK_FPU_FEATURE(dc, VIS2);
4241 cpu_src1 = get_src1(insn, cpu_src1);
4242 cpu_src2 = get_src1(insn, cpu_src2);
4243 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4244 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4245 gen_movl_TN_reg(rd, cpu_dst);
4246 break;
4247 case 0x020: /* VIS I fcmple16 */
4248 CHECK_FPU_FEATURE(dc, VIS1);
4249 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4250 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4251 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4252 gen_movl_TN_reg(rd, cpu_dst);
4253 break;
4254 case 0x022: /* VIS I fcmpne16 */
4255 CHECK_FPU_FEATURE(dc, VIS1);
4256 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4257 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4258 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4259 gen_movl_TN_reg(rd, cpu_dst);
4260 break;
4261 case 0x024: /* VIS I fcmple32 */
4262 CHECK_FPU_FEATURE(dc, VIS1);
4263 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4264 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4265 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4266 gen_movl_TN_reg(rd, cpu_dst);
4267 break;
4268 case 0x026: /* VIS I fcmpne32 */
4269 CHECK_FPU_FEATURE(dc, VIS1);
4270 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4271 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4272 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4273 gen_movl_TN_reg(rd, cpu_dst);
4274 break;
4275 case 0x028: /* VIS I fcmpgt16 */
4276 CHECK_FPU_FEATURE(dc, VIS1);
4277 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4278 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4279 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4280 gen_movl_TN_reg(rd, cpu_dst);
4281 break;
4282 case 0x02a: /* VIS I fcmpeq16 */
4283 CHECK_FPU_FEATURE(dc, VIS1);
4284 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4285 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4286 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4287 gen_movl_TN_reg(rd, cpu_dst);
4288 break;
4289 case 0x02c: /* VIS I fcmpgt32 */
4290 CHECK_FPU_FEATURE(dc, VIS1);
4291 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4292 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4293 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4294 gen_movl_TN_reg(rd, cpu_dst);
4295 break;
4296 case 0x02e: /* VIS I fcmpeq32 */
4297 CHECK_FPU_FEATURE(dc, VIS1);
4298 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4299 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4300 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4301 gen_movl_TN_reg(rd, cpu_dst);
4302 break;
4303 case 0x031: /* VIS I fmul8x16 */
4304 CHECK_FPU_FEATURE(dc, VIS1);
4305 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4306 break;
4307 case 0x033: /* VIS I fmul8x16au */
4308 CHECK_FPU_FEATURE(dc, VIS1);
4309 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4310 break;
4311 case 0x035: /* VIS I fmul8x16al */
4312 CHECK_FPU_FEATURE(dc, VIS1);
4313 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4314 break;
4315 case 0x036: /* VIS I fmul8sux16 */
4316 CHECK_FPU_FEATURE(dc, VIS1);
4317 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4318 break;
4319 case 0x037: /* VIS I fmul8ulx16 */
4320 CHECK_FPU_FEATURE(dc, VIS1);
4321 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4322 break;
4323 case 0x038: /* VIS I fmuld8sux16 */
4324 CHECK_FPU_FEATURE(dc, VIS1);
4325 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4326 break;
4327 case 0x039: /* VIS I fmuld8ulx16 */
4328 CHECK_FPU_FEATURE(dc, VIS1);
4329 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4330 break;
4331 case 0x03a: /* VIS I fpack32 */
4332 CHECK_FPU_FEATURE(dc, VIS1);
4333 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4334 break;
4335 case 0x03b: /* VIS I fpack16 */
4336 CHECK_FPU_FEATURE(dc, VIS1);
4337 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4338 cpu_dst_32 = gen_dest_fpr_F();
4339 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4340 gen_store_fpr_F(dc, rd, cpu_dst_32);
4341 break;
4342 case 0x03d: /* VIS I fpackfix */
4343 CHECK_FPU_FEATURE(dc, VIS1);
4344 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4345 cpu_dst_32 = gen_dest_fpr_F();
4346 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4347 gen_store_fpr_F(dc, rd, cpu_dst_32);
4348 break;
4349 case 0x03e: /* VIS I pdist */
4350 CHECK_FPU_FEATURE(dc, VIS1);
4351 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4352 break;
4353 case 0x048: /* VIS I faligndata */
4354 CHECK_FPU_FEATURE(dc, VIS1);
4355 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4356 break;
4357 case 0x04b: /* VIS I fpmerge */
4358 CHECK_FPU_FEATURE(dc, VIS1);
4359 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4360 break;
4361 case 0x04c: /* VIS II bshuffle */
4362 CHECK_FPU_FEATURE(dc, VIS2);
4363 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4364 break;
4365 case 0x04d: /* VIS I fexpand */
4366 CHECK_FPU_FEATURE(dc, VIS1);
4367 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4368 break;
4369 case 0x050: /* VIS I fpadd16 */
4370 CHECK_FPU_FEATURE(dc, VIS1);
4371 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4372 break;
4373 case 0x051: /* VIS I fpadd16s */
4374 CHECK_FPU_FEATURE(dc, VIS1);
4375 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4376 break;
4377 case 0x052: /* VIS I fpadd32 */
4378 CHECK_FPU_FEATURE(dc, VIS1);
4379 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4380 break;
4381 case 0x053: /* VIS I fpadd32s */
4382 CHECK_FPU_FEATURE(dc, VIS1);
4383 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4384 break;
4385 case 0x054: /* VIS I fpsub16 */
4386 CHECK_FPU_FEATURE(dc, VIS1);
4387 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4388 break;
4389 case 0x055: /* VIS I fpsub16s */
4390 CHECK_FPU_FEATURE(dc, VIS1);
4391 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4392 break;
4393 case 0x056: /* VIS I fpsub32 */
4394 CHECK_FPU_FEATURE(dc, VIS1);
4395 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4396 break;
4397 case 0x057: /* VIS I fpsub32s */
4398 CHECK_FPU_FEATURE(dc, VIS1);
4399 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4400 break;
4401 case 0x060: /* VIS I fzero */
4402 CHECK_FPU_FEATURE(dc, VIS1);
4403 cpu_dst_64 = gen_dest_fpr_D();
4404 tcg_gen_movi_i64(cpu_dst_64, 0);
4405 gen_store_fpr_D(dc, rd, cpu_dst_64);
4406 break;
4407 case 0x061: /* VIS I fzeros */
4408 CHECK_FPU_FEATURE(dc, VIS1);
4409 cpu_dst_32 = gen_dest_fpr_F();
4410 tcg_gen_movi_i32(cpu_dst_32, 0);
4411 gen_store_fpr_F(dc, rd, cpu_dst_32);
4412 break;
4413 case 0x062: /* VIS I fnor */
4414 CHECK_FPU_FEATURE(dc, VIS1);
4415 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4416 break;
4417 case 0x063: /* VIS I fnors */
4418 CHECK_FPU_FEATURE(dc, VIS1);
4419 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4420 break;
4421 case 0x064: /* VIS I fandnot2 */
4422 CHECK_FPU_FEATURE(dc, VIS1);
4423 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4424 break;
4425 case 0x065: /* VIS I fandnot2s */
4426 CHECK_FPU_FEATURE(dc, VIS1);
4427 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4428 break;
4429 case 0x066: /* VIS I fnot2 */
4430 CHECK_FPU_FEATURE(dc, VIS1);
4431 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4432 break;
4433 case 0x067: /* VIS I fnot2s */
4434 CHECK_FPU_FEATURE(dc, VIS1);
4435 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4436 break;
4437 case 0x068: /* VIS I fandnot1 */
4438 CHECK_FPU_FEATURE(dc, VIS1);
4439 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4440 break;
4441 case 0x069: /* VIS I fandnot1s */
4442 CHECK_FPU_FEATURE(dc, VIS1);
4443 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4444 break;
4445 case 0x06a: /* VIS I fnot1 */
4446 CHECK_FPU_FEATURE(dc, VIS1);
4447 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4448 break;
4449 case 0x06b: /* VIS I fnot1s */
4450 CHECK_FPU_FEATURE(dc, VIS1);
4451 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4452 break;
4453 case 0x06c: /* VIS I fxor */
4454 CHECK_FPU_FEATURE(dc, VIS1);
4455 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4456 break;
4457 case 0x06d: /* VIS I fxors */
4458 CHECK_FPU_FEATURE(dc, VIS1);
4459 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4460 break;
4461 case 0x06e: /* VIS I fnand */
4462 CHECK_FPU_FEATURE(dc, VIS1);
4463 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4464 break;
4465 case 0x06f: /* VIS I fnands */
4466 CHECK_FPU_FEATURE(dc, VIS1);
4467 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4468 break;
4469 case 0x070: /* VIS I fand */
4470 CHECK_FPU_FEATURE(dc, VIS1);
4471 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4472 break;
4473 case 0x071: /* VIS I fands */
4474 CHECK_FPU_FEATURE(dc, VIS1);
4475 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4476 break;
4477 case 0x072: /* VIS I fxnor */
4478 CHECK_FPU_FEATURE(dc, VIS1);
4479 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4480 break;
4481 case 0x073: /* VIS I fxnors */
4482 CHECK_FPU_FEATURE(dc, VIS1);
4483 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4484 break;
4485 case 0x074: /* VIS I fsrc1 */
4486 CHECK_FPU_FEATURE(dc, VIS1);
4487 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4488 gen_store_fpr_D(dc, rd, cpu_src1_64);
4489 break;
4490 case 0x075: /* VIS I fsrc1s */
4491 CHECK_FPU_FEATURE(dc, VIS1);
4492 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4493 gen_store_fpr_F(dc, rd, cpu_src1_32);
4494 break;
4495 case 0x076: /* VIS I fornot2 */
4496 CHECK_FPU_FEATURE(dc, VIS1);
4497 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4498 break;
4499 case 0x077: /* VIS I fornot2s */
4500 CHECK_FPU_FEATURE(dc, VIS1);
4501 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4502 break;
4503 case 0x078: /* VIS I fsrc2 */
4504 CHECK_FPU_FEATURE(dc, VIS1);
4505 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4506 gen_store_fpr_D(dc, rd, cpu_src1_64);
4507 break;
4508 case 0x079: /* VIS I fsrc2s */
4509 CHECK_FPU_FEATURE(dc, VIS1);
4510 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4511 gen_store_fpr_F(dc, rd, cpu_src1_32);
4512 break;
4513 case 0x07a: /* VIS I fornot1 */
4514 CHECK_FPU_FEATURE(dc, VIS1);
4515 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4516 break;
4517 case 0x07b: /* VIS I fornot1s */
4518 CHECK_FPU_FEATURE(dc, VIS1);
4519 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4520 break;
4521 case 0x07c: /* VIS I for */
4522 CHECK_FPU_FEATURE(dc, VIS1);
4523 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4524 break;
4525 case 0x07d: /* VIS I fors */
4526 CHECK_FPU_FEATURE(dc, VIS1);
4527 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4528 break;
4529 case 0x07e: /* VIS I fone */
4530 CHECK_FPU_FEATURE(dc, VIS1);
4531 cpu_dst_64 = gen_dest_fpr_D();
4532 tcg_gen_movi_i64(cpu_dst_64, -1);
4533 gen_store_fpr_D(dc, rd, cpu_dst_64);
4534 break;
4535 case 0x07f: /* VIS I fones */
4536 CHECK_FPU_FEATURE(dc, VIS1);
4537 cpu_dst_32 = gen_dest_fpr_F();
4538 tcg_gen_movi_i32(cpu_dst_32, -1);
4539 gen_store_fpr_F(dc, rd, cpu_dst_32);
4540 break;
4541 case 0x080: /* VIS I shutdown */
4542 case 0x081: /* VIS II siam */
4543 // XXX
4544 goto illegal_insn;
4545 default:
4546 goto illegal_insn;
4547 }
4548 #else
4549 goto ncp_insn;
4550 #endif
4551 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4552 #ifdef TARGET_SPARC64
4553 goto illegal_insn;
4554 #else
4555 goto ncp_insn;
4556 #endif
4557 #ifdef TARGET_SPARC64
4558 } else if (xop == 0x39) { /* V9 return */
4559 TCGv_i32 r_const;
4560
4561 save_state(dc);
4562 cpu_src1 = get_src1(insn, cpu_src1);
4563 if (IS_IMM) { /* immediate */
4564 simm = GET_FIELDs(insn, 19, 31);
4565 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4566 } else { /* register */
4567 rs2 = GET_FIELD(insn, 27, 31);
4568 if (rs2) {
4569 gen_movl_reg_TN(rs2, cpu_src2);
4570 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4571 } else
4572 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4573 }
4574 gen_helper_restore(cpu_env);
4575 gen_mov_pc_npc(dc);
4576 r_const = tcg_const_i32(3);
4577 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4578 tcg_temp_free_i32(r_const);
4579 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4580 dc->npc = DYNAMIC_PC;
4581 goto jmp_insn;
4582 #endif
4583 } else {
4584 cpu_src1 = get_src1(insn, cpu_src1);
4585 if (IS_IMM) { /* immediate */
4586 simm = GET_FIELDs(insn, 19, 31);
4587 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4588 } else { /* register */
4589 rs2 = GET_FIELD(insn, 27, 31);
4590 if (rs2) {
4591 gen_movl_reg_TN(rs2, cpu_src2);
4592 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4593 } else
4594 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4595 }
4596 switch (xop) {
4597 case 0x38: /* jmpl */
4598 {
4599 TCGv r_pc;
4600 TCGv_i32 r_const;
4601
4602 r_pc = tcg_const_tl(dc->pc);
4603 gen_movl_TN_reg(rd, r_pc);
4604 tcg_temp_free(r_pc);
4605 gen_mov_pc_npc(dc);
4606 r_const = tcg_const_i32(3);
4607 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4608 tcg_temp_free_i32(r_const);
4609 gen_address_mask(dc, cpu_dst);
4610 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4611 dc->npc = DYNAMIC_PC;
4612 }
4613 goto jmp_insn;
4614 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4615 case 0x39: /* rett, V9 return */
4616 {
4617 TCGv_i32 r_const;
4618
4619 if (!supervisor(dc))
4620 goto priv_insn;
4621 gen_mov_pc_npc(dc);
4622 r_const = tcg_const_i32(3);
4623 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4624 tcg_temp_free_i32(r_const);
4625 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4626 dc->npc = DYNAMIC_PC;
4627 gen_helper_rett(cpu_env);
4628 }
4629 goto jmp_insn;
4630 #endif
4631 case 0x3b: /* flush */
4632 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4633 goto unimp_flush;
4634 /* nop */
4635 break;
4636 case 0x3c: /* save */
4637 save_state(dc);
4638 gen_helper_save(cpu_env);
4639 gen_movl_TN_reg(rd, cpu_dst);
4640 break;
4641 case 0x3d: /* restore */
4642 save_state(dc);
4643 gen_helper_restore(cpu_env);
4644 gen_movl_TN_reg(rd, cpu_dst);
4645 break;
4646 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4647 case 0x3e: /* V9 done/retry */
4648 {
4649 switch (rd) {
4650 case 0:
4651 if (!supervisor(dc))
4652 goto priv_insn;
4653 dc->npc = DYNAMIC_PC;
4654 dc->pc = DYNAMIC_PC;
4655 gen_helper_done(cpu_env);
4656 goto jmp_insn;
4657 case 1:
4658 if (!supervisor(dc))
4659 goto priv_insn;
4660 dc->npc = DYNAMIC_PC;
4661 dc->pc = DYNAMIC_PC;
4662 gen_helper_retry(cpu_env);
4663 goto jmp_insn;
4664 default:
4665 goto illegal_insn;
4666 }
4667 }
4668 break;
4669 #endif
4670 default:
4671 goto illegal_insn;
4672 }
4673 }
4674 break;
4675 }
4676 break;
4677 case 3: /* load/store instructions */
4678 {
4679 unsigned int xop = GET_FIELD(insn, 7, 12);
4680
4681 /* flush pending conditional evaluations before exposing
4682 cpu state */
4683 if (dc->cc_op != CC_OP_FLAGS) {
4684 dc->cc_op = CC_OP_FLAGS;
4685 gen_helper_compute_psr(cpu_env);
4686 }
4687 cpu_src1 = get_src1(insn, cpu_src1);
4688 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4689 rs2 = GET_FIELD(insn, 27, 31);
4690 gen_movl_reg_TN(rs2, cpu_src2);
4691 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4692 } else if (IS_IMM) { /* immediate */
4693 simm = GET_FIELDs(insn, 19, 31);
4694 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4695 } else { /* register */
4696 rs2 = GET_FIELD(insn, 27, 31);
4697 if (rs2 != 0) {
4698 gen_movl_reg_TN(rs2, cpu_src2);
4699 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4700 } else
4701 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4702 }
4703 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4704 (xop > 0x17 && xop <= 0x1d ) ||
4705 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4706 switch (xop) {
4707 case 0x0: /* ld, V9 lduw, load unsigned word */
4708 gen_address_mask(dc, cpu_addr);
4709 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4710 break;
4711 case 0x1: /* ldub, load unsigned byte */
4712 gen_address_mask(dc, cpu_addr);
4713 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4714 break;
4715 case 0x2: /* lduh, load unsigned halfword */
4716 gen_address_mask(dc, cpu_addr);
4717 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4718 break;
4719 case 0x3: /* ldd, load double word */
4720 if (rd & 1)
4721 goto illegal_insn;
4722 else {
4723 TCGv_i32 r_const;
4724
4725 save_state(dc);
4726 r_const = tcg_const_i32(7);
4727 /* XXX remove alignment check */
4728 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4729 tcg_temp_free_i32(r_const);
4730 gen_address_mask(dc, cpu_addr);
4731 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4732 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4733 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4734 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4735 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4736 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4737 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4738 }
4739 break;
4740 case 0x9: /* ldsb, load signed byte */
4741 gen_address_mask(dc, cpu_addr);
4742 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4743 break;
4744 case 0xa: /* ldsh, load signed halfword */
4745 gen_address_mask(dc, cpu_addr);
4746 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4747 break;
4748 case 0xd: /* ldstub -- XXX: should be atomically */
4749 {
4750 TCGv r_const;
4751
4752 gen_address_mask(dc, cpu_addr);
4753 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4754 r_const = tcg_const_tl(0xff);
4755 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4756 tcg_temp_free(r_const);
4757 }
4758 break;
4759 case 0x0f: /* swap, swap register with memory. Also
4760 atomically */
4761 CHECK_IU_FEATURE(dc, SWAP);
4762 gen_movl_reg_TN(rd, cpu_val);
4763 gen_address_mask(dc, cpu_addr);
4764 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4765 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4766 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4767 break;
4768 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4769 case 0x10: /* lda, V9 lduwa, load word alternate */
4770 #ifndef TARGET_SPARC64
4771 if (IS_IMM)
4772 goto illegal_insn;
4773 if (!supervisor(dc))
4774 goto priv_insn;
4775 #endif
4776 save_state(dc);
4777 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4778 break;
4779 case 0x11: /* lduba, load unsigned byte alternate */
4780 #ifndef TARGET_SPARC64
4781 if (IS_IMM)
4782 goto illegal_insn;
4783 if (!supervisor(dc))
4784 goto priv_insn;
4785 #endif
4786 save_state(dc);
4787 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4788 break;
4789 case 0x12: /* lduha, load unsigned halfword alternate */
4790 #ifndef TARGET_SPARC64
4791 if (IS_IMM)
4792 goto illegal_insn;
4793 if (!supervisor(dc))
4794 goto priv_insn;
4795 #endif
4796 save_state(dc);
4797 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4798 break;
4799 case 0x13: /* ldda, load double word alternate */
4800 #ifndef TARGET_SPARC64
4801 if (IS_IMM)
4802 goto illegal_insn;
4803 if (!supervisor(dc))
4804 goto priv_insn;
4805 #endif
4806 if (rd & 1)
4807 goto illegal_insn;
4808 save_state(dc);
4809 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4810 goto skip_move;
4811 case 0x19: /* ldsba, load signed byte alternate */
4812 #ifndef TARGET_SPARC64
4813 if (IS_IMM)
4814 goto illegal_insn;
4815 if (!supervisor(dc))
4816 goto priv_insn;
4817 #endif
4818 save_state(dc);
4819 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4820 break;
4821 case 0x1a: /* ldsha, load signed halfword alternate */
4822 #ifndef TARGET_SPARC64
4823 if (IS_IMM)
4824 goto illegal_insn;
4825 if (!supervisor(dc))
4826 goto priv_insn;
4827 #endif
4828 save_state(dc);
4829 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4830 break;
4831 case 0x1d: /* ldstuba -- XXX: should be atomically */
4832 #ifndef TARGET_SPARC64
4833 if (IS_IMM)
4834 goto illegal_insn;
4835 if (!supervisor(dc))
4836 goto priv_insn;
4837 #endif
4838 save_state(dc);
4839 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4840 break;
4841 case 0x1f: /* swapa, swap reg with alt. memory. Also
4842 atomically */
4843 CHECK_IU_FEATURE(dc, SWAP);
4844 #ifndef TARGET_SPARC64
4845 if (IS_IMM)
4846 goto illegal_insn;
4847 if (!supervisor(dc))
4848 goto priv_insn;
4849 #endif
4850 save_state(dc);
4851 gen_movl_reg_TN(rd, cpu_val);
4852 gen_swap_asi(cpu_val, cpu_addr, insn);
4853 break;
4854
4855 #ifndef TARGET_SPARC64
4856 case 0x30: /* ldc */
4857 case 0x31: /* ldcsr */
4858 case 0x33: /* lddc */
4859 goto ncp_insn;
4860 #endif
4861 #endif
4862 #ifdef TARGET_SPARC64
4863 case 0x08: /* V9 ldsw */
4864 gen_address_mask(dc, cpu_addr);
4865 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4866 break;
4867 case 0x0b: /* V9 ldx */
4868 gen_address_mask(dc, cpu_addr);
4869 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4870 break;
4871 case 0x18: /* V9 ldswa */
4872 save_state(dc);
4873 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4874 break;
4875 case 0x1b: /* V9 ldxa */
4876 save_state(dc);
4877 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4878 break;
4879 case 0x2d: /* V9 prefetch, no effect */
4880 goto skip_move;
4881 case 0x30: /* V9 ldfa */
4882 if (gen_trap_ifnofpu(dc)) {
4883 goto jmp_insn;
4884 }
4885 save_state(dc);
4886 gen_ldf_asi(cpu_addr, insn, 4, rd);
4887 gen_update_fprs_dirty(rd);
4888 goto skip_move;
4889 case 0x33: /* V9 lddfa */
4890 if (gen_trap_ifnofpu(dc)) {
4891 goto jmp_insn;
4892 }
4893 save_state(dc);
4894 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4895 gen_update_fprs_dirty(DFPREG(rd));
4896 goto skip_move;
4897 case 0x3d: /* V9 prefetcha, no effect */
4898 goto skip_move;
4899 case 0x32: /* V9 ldqfa */
4900 CHECK_FPU_FEATURE(dc, FLOAT128);
4901 if (gen_trap_ifnofpu(dc)) {
4902 goto jmp_insn;
4903 }
4904 save_state(dc);
4905 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4906 gen_update_fprs_dirty(QFPREG(rd));
4907 goto skip_move;
4908 #endif
4909 default:
4910 goto illegal_insn;
4911 }
4912 gen_movl_TN_reg(rd, cpu_val);
4913 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4914 skip_move: ;
4915 #endif
4916 } else if (xop >= 0x20 && xop < 0x24) {
4917 if (gen_trap_ifnofpu(dc)) {
4918 goto jmp_insn;
4919 }
4920 save_state(dc);
4921 switch (xop) {
4922 case 0x20: /* ldf, load fpreg */
4923 gen_address_mask(dc, cpu_addr);
4924 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4925 cpu_dst_32 = gen_dest_fpr_F();
4926 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4927 gen_store_fpr_F(dc, rd, cpu_dst_32);
4928 break;
4929 case 0x21: /* ldfsr, V9 ldxfsr */
4930 #ifdef TARGET_SPARC64
4931 gen_address_mask(dc, cpu_addr);
4932 if (rd == 1) {
4933 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4934 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4935 } else {
4936 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4937 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4938 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4939 }
4940 #else
4941 {
4942 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4943 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4944 }
4945 #endif
4946 break;
4947 case 0x22: /* ldqf, load quad fpreg */
4948 {
4949 TCGv_i32 r_const;
4950
4951 CHECK_FPU_FEATURE(dc, FLOAT128);
4952 r_const = tcg_const_i32(dc->mem_idx);
4953 gen_address_mask(dc, cpu_addr);
4954 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4955 tcg_temp_free_i32(r_const);
4956 gen_op_store_QT0_fpr(QFPREG(rd));
4957 gen_update_fprs_dirty(QFPREG(rd));
4958 }
4959 break;
4960 case 0x23: /* lddf, load double fpreg */
4961 gen_address_mask(dc, cpu_addr);
4962 cpu_dst_64 = gen_dest_fpr_D();
4963 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4964 gen_store_fpr_D(dc, rd, cpu_dst_64);
4965 break;
4966 default:
4967 goto illegal_insn;
4968 }
4969 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4970 xop == 0xe || xop == 0x1e) {
4971 gen_movl_reg_TN(rd, cpu_val);
4972 switch (xop) {
4973 case 0x4: /* st, store word */
4974 gen_address_mask(dc, cpu_addr);
4975 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4976 break;
4977 case 0x5: /* stb, store byte */
4978 gen_address_mask(dc, cpu_addr);
4979 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4980 break;
4981 case 0x6: /* sth, store halfword */
4982 gen_address_mask(dc, cpu_addr);
4983 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4984 break;
4985 case 0x7: /* std, store double word */
4986 if (rd & 1)
4987 goto illegal_insn;
4988 else {
4989 TCGv_i32 r_const;
4990
4991 save_state(dc);
4992 gen_address_mask(dc, cpu_addr);
4993 r_const = tcg_const_i32(7);
4994 /* XXX remove alignment check */
4995 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4996 tcg_temp_free_i32(r_const);
4997 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4998 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4999 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5000 }
5001 break;
5002 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5003 case 0x14: /* sta, V9 stwa, store word alternate */
5004 #ifndef TARGET_SPARC64
5005 if (IS_IMM)
5006 goto illegal_insn;
5007 if (!supervisor(dc))
5008 goto priv_insn;
5009 #endif
5010 save_state(dc);
5011 gen_st_asi(cpu_val, cpu_addr, insn, 4);
5012 dc->npc = DYNAMIC_PC;
5013 break;
5014 case 0x15: /* stba, store byte alternate */
5015 #ifndef TARGET_SPARC64
5016 if (IS_IMM)
5017 goto illegal_insn;
5018 if (!supervisor(dc))
5019 goto priv_insn;
5020 #endif
5021 save_state(dc);
5022 gen_st_asi(cpu_val, cpu_addr, insn, 1);
5023 dc->npc = DYNAMIC_PC;
5024 break;
5025 case 0x16: /* stha, store halfword alternate */
5026 #ifndef TARGET_SPARC64
5027 if (IS_IMM)
5028 goto illegal_insn;
5029 if (!supervisor(dc))
5030 goto priv_insn;
5031 #endif
5032 save_state(dc);
5033 gen_st_asi(cpu_val, cpu_addr, insn, 2);
5034 dc->npc = DYNAMIC_PC;
5035 break;
5036 case 0x17: /* stda, store double word alternate */
5037 #ifndef TARGET_SPARC64
5038 if (IS_IMM)
5039 goto illegal_insn;
5040 if (!supervisor(dc))
5041 goto priv_insn;
5042 #endif
5043 if (rd & 1)
5044 goto illegal_insn;
5045 else {
5046 save_state(dc);
5047 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
5048 }
5049 break;
5050 #endif
5051 #ifdef TARGET_SPARC64
5052 case 0x0e: /* V9 stx */
5053 gen_address_mask(dc, cpu_addr);
5054 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5055 break;
5056 case 0x1e: /* V9 stxa */
5057 save_state(dc);
5058 gen_st_asi(cpu_val, cpu_addr, insn, 8);
5059 dc->npc = DYNAMIC_PC;
5060 break;
5061 #endif
5062 default:
5063 goto illegal_insn;
5064 }
5065 } else if (xop > 0x23 && xop < 0x28) {
5066 if (gen_trap_ifnofpu(dc)) {
5067 goto jmp_insn;
5068 }
5069 save_state(dc);
5070 switch (xop) {
5071 case 0x24: /* stf, store fpreg */
5072 gen_address_mask(dc, cpu_addr);
5073 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5074 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5075 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5076 break;
5077 case 0x25: /* stfsr, V9 stxfsr */
5078 #ifdef TARGET_SPARC64
5079 gen_address_mask(dc, cpu_addr);
5080 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
5081 if (rd == 1)
5082 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5083 else
5084 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
5085 #else
5086 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
5087 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
5088 #endif
5089 break;
5090 case 0x26:
5091 #ifdef TARGET_SPARC64
5092 /* V9 stqf, store quad fpreg */
5093 {
5094 TCGv_i32 r_const;
5095
5096 CHECK_FPU_FEATURE(dc, FLOAT128);
5097 gen_op_load_fpr_QT0(QFPREG(rd));
5098 r_const = tcg_const_i32(dc->mem_idx);
5099 gen_address_mask(dc, cpu_addr);
5100 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5101 tcg_temp_free_i32(r_const);
5102 }
5103 break;
5104 #else /* !TARGET_SPARC64 */
5105 /* stdfq, store floating point queue */
5106 #if defined(CONFIG_USER_ONLY)
5107 goto illegal_insn;
5108 #else
5109 if (!supervisor(dc))
5110 goto priv_insn;
5111 if (gen_trap_ifnofpu(dc)) {
5112 goto jmp_insn;
5113 }
5114 goto nfq_insn;
5115 #endif
5116 #endif
5117 case 0x27: /* stdf, store double fpreg */
5118 gen_address_mask(dc, cpu_addr);
5119 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5120 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5121 break;
5122 default:
5123 goto illegal_insn;
5124 }
5125 } else if (xop > 0x33 && xop < 0x3f) {
5126 save_state(dc);
5127 switch (xop) {
5128 #ifdef TARGET_SPARC64
5129 case 0x34: /* V9 stfa */
5130 if (gen_trap_ifnofpu(dc)) {
5131 goto jmp_insn;
5132 }
5133 gen_stf_asi(cpu_addr, insn, 4, rd);
5134 break;
5135 case 0x36: /* V9 stqfa */
5136 {
5137 TCGv_i32 r_const;
5138
5139 CHECK_FPU_FEATURE(dc, FLOAT128);
5140 if (gen_trap_ifnofpu(dc)) {
5141 goto jmp_insn;
5142 }
5143 r_const = tcg_const_i32(7);
5144 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5145 tcg_temp_free_i32(r_const);
5146 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5147 }
5148 break;
5149 case 0x37: /* V9 stdfa */
5150 if (gen_trap_ifnofpu(dc)) {
5151 goto jmp_insn;
5152 }
5153 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5154 break;
5155 case 0x3c: /* V9 casa */
5156 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5157 gen_movl_TN_reg(rd, cpu_val);
5158 break;
5159 case 0x3e: /* V9 casxa */
5160 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5161 gen_movl_TN_reg(rd, cpu_val);
5162 break;
5163 #else
5164 case 0x34: /* stc */
5165 case 0x35: /* stcsr */
5166 case 0x36: /* stdcq */
5167 case 0x37: /* stdc */
5168 goto ncp_insn;
5169 #endif
5170 default:
5171 goto illegal_insn;
5172 }
5173 } else
5174 goto illegal_insn;
5175 }
5176 break;
5177 }
5178 /* default case for non jump instructions */
5179 if (dc->npc == DYNAMIC_PC) {
5180 dc->pc = DYNAMIC_PC;
5181 gen_op_next_insn();
5182 } else if (dc->npc == JUMP_PC) {
5183 /* we can do a static jump */
5184 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5185 dc->is_br = 1;
5186 } else {
5187 dc->pc = dc->npc;
5188 dc->npc = dc->npc + 4;
5189 }
5190 jmp_insn:
5191 goto egress;
5192 illegal_insn:
5193 {
5194 TCGv_i32 r_const;
5195
5196 save_state(dc);
5197 r_const = tcg_const_i32(TT_ILL_INSN);
5198 gen_helper_raise_exception(cpu_env, r_const);
5199 tcg_temp_free_i32(r_const);
5200 dc->is_br = 1;
5201 }
5202 goto egress;
5203 unimp_flush:
5204 {
5205 TCGv_i32 r_const;
5206
5207 save_state(dc);
5208 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5209 gen_helper_raise_exception(cpu_env, r_const);
5210 tcg_temp_free_i32(r_const);
5211 dc->is_br = 1;
5212 }
5213 goto egress;
5214 #if !defined(CONFIG_USER_ONLY)
5215 priv_insn:
5216 {
5217 TCGv_i32 r_const;
5218
5219 save_state(dc);
5220 r_const = tcg_const_i32(TT_PRIV_INSN);
5221 gen_helper_raise_exception(cpu_env, r_const);
5222 tcg_temp_free_i32(r_const);
5223 dc->is_br = 1;
5224 }
5225 goto egress;
5226 #endif
5227 nfpu_insn:
5228 save_state(dc);
5229 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5230 dc->is_br = 1;
5231 goto egress;
5232 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5233 nfq_insn:
5234 save_state(dc);
5235 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5236 dc->is_br = 1;
5237 goto egress;
5238 #endif
5239 #ifndef TARGET_SPARC64
5240 ncp_insn:
5241 {
5242 TCGv r_const;
5243
5244 save_state(dc);
5245 r_const = tcg_const_i32(TT_NCP_INSN);
5246 gen_helper_raise_exception(cpu_env, r_const);
5247 tcg_temp_free(r_const);
5248 dc->is_br = 1;
5249 }
5250 goto egress;
5251 #endif
5252 egress:
5253 tcg_temp_free(cpu_tmp1);
5254 tcg_temp_free(cpu_tmp2);
5255 if (dc->n_t32 != 0) {
5256 int i;
5257 for (i = dc->n_t32 - 1; i >= 0; --i) {
5258 tcg_temp_free_i32(dc->t32[i]);
5259 }
5260 dc->n_t32 = 0;
5261 }
5262 }
5263
5264 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5265 int spc, CPUSPARCState *env)
5266 {
5267 target_ulong pc_start, last_pc;
5268 uint16_t *gen_opc_end;
5269 DisasContext dc1, *dc = &dc1;
5270 CPUBreakpoint *bp;
5271 int j, lj = -1;
5272 int num_insns;
5273 int max_insns;
5274 unsigned int insn;
5275
5276 memset(dc, 0, sizeof(DisasContext));
5277 dc->tb = tb;
5278 pc_start = tb->pc;
5279 dc->pc = pc_start;
5280 last_pc = dc->pc;
5281 dc->npc = (target_ulong) tb->cs_base;
5282 dc->cc_op = CC_OP_DYNAMIC;
5283 dc->mem_idx = cpu_mmu_index(env);
5284 dc->def = env->def;
5285 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5286 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5287 dc->singlestep = (env->singlestep_enabled || singlestep);
5288 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5289
5290 cpu_tmp0 = tcg_temp_new();
5291 cpu_tmp32 = tcg_temp_new_i32();
5292 cpu_tmp64 = tcg_temp_new_i64();
5293
5294 cpu_dst = tcg_temp_local_new();
5295
5296 // loads and stores
5297 cpu_val = tcg_temp_local_new();
5298 cpu_addr = tcg_temp_local_new();
5299
5300 num_insns = 0;
5301 max_insns = tb->cflags & CF_COUNT_MASK;
5302 if (max_insns == 0)
5303 max_insns = CF_COUNT_MASK;
5304 gen_icount_start();
5305 do {
5306 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5307 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5308 if (bp->pc == dc->pc) {
5309 if (dc->pc != pc_start)
5310 save_state(dc);
5311 gen_helper_debug(cpu_env);
5312 tcg_gen_exit_tb(0);
5313 dc->is_br = 1;
5314 goto exit_gen_loop;
5315 }
5316 }
5317 }
5318 if (spc) {
5319 qemu_log("Search PC...\n");
5320 j = gen_opc_ptr - gen_opc_buf;
5321 if (lj < j) {
5322 lj++;
5323 while (lj < j)
5324 gen_opc_instr_start[lj++] = 0;
5325 gen_opc_pc[lj] = dc->pc;
5326 gen_opc_npc[lj] = dc->npc;
5327 gen_opc_instr_start[lj] = 1;
5328 gen_opc_icount[lj] = num_insns;
5329 }
5330 }
5331 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5332 gen_io_start();
5333 last_pc = dc->pc;
5334 insn = cpu_ldl_code(env, dc->pc);
5335 disas_sparc_insn(dc, insn);
5336 num_insns++;
5337
5338 if (dc->is_br)
5339 break;
5340 /* if the next PC is different, we abort now */
5341 if (dc->pc != (last_pc + 4))
5342 break;
5343 /* if we reach a page boundary, we stop generation so that the
5344 PC of a TT_TFAULT exception is always in the right page */
5345 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5346 break;
5347 /* if single step mode, we generate only one instruction and
5348 generate an exception */
5349 if (dc->singlestep) {
5350 break;
5351 }
5352 } while ((gen_opc_ptr < gen_opc_end) &&
5353 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5354 num_insns < max_insns);
5355
5356 exit_gen_loop:
5357 tcg_temp_free(cpu_addr);
5358 tcg_temp_free(cpu_val);
5359 tcg_temp_free(cpu_dst);
5360 tcg_temp_free_i64(cpu_tmp64);
5361 tcg_temp_free_i32(cpu_tmp32);
5362 tcg_temp_free(cpu_tmp0);
5363
5364 if (tb->cflags & CF_LAST_IO)
5365 gen_io_end();
5366 if (!dc->is_br) {
5367 if (dc->pc != DYNAMIC_PC &&
5368 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5369 /* static PC and NPC: we can use direct chaining */
5370 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5371 } else {
5372 if (dc->pc != DYNAMIC_PC)
5373 tcg_gen_movi_tl(cpu_pc, dc->pc);
5374 save_npc(dc);
5375 tcg_gen_exit_tb(0);
5376 }
5377 }
5378 gen_icount_end(tb, num_insns);
5379 *gen_opc_ptr = INDEX_op_end;
5380 if (spc) {
5381 j = gen_opc_ptr - gen_opc_buf;
5382 lj++;
5383 while (lj <= j)
5384 gen_opc_instr_start[lj++] = 0;
5385 #if 0
5386 log_page_dump();
5387 #endif
5388 gen_opc_jump_pc[0] = dc->jump_pc[0];
5389 gen_opc_jump_pc[1] = dc->jump_pc[1];
5390 } else {
5391 tb->size = last_pc + 4 - pc_start;
5392 tb->icount = num_insns;
5393 }
5394 #ifdef DEBUG_DISAS
5395 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5396 qemu_log("--------------\n");
5397 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5398 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5399 qemu_log("\n");
5400 }
5401 #endif
5402 }
5403
5404 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5405 {
5406 gen_intermediate_code_internal(tb, 0, env);
5407 }
5408
5409 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5410 {
5411 gen_intermediate_code_internal(tb, 1, env);
5412 }
5413
5414 void gen_intermediate_code_init(CPUSPARCState *env)
5415 {
5416 unsigned int i;
5417 static int inited;
5418 static const char * const gregnames[8] = {
5419 NULL, // g0 not used
5420 "g1",
5421 "g2",
5422 "g3",
5423 "g4",
5424 "g5",
5425 "g6",
5426 "g7",
5427 };
5428 static const char * const fregnames[32] = {
5429 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5430 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5431 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5432 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5433 };
5434
5435 /* init various static tables */
5436 if (!inited) {
5437 inited = 1;
5438
5439 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5440 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5441 offsetof(CPUSPARCState, regwptr),
5442 "regwptr");
5443 #ifdef TARGET_SPARC64
5444 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5445 "xcc");
5446 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5447 "asi");
5448 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5449 "fprs");
5450 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5451 "gsr");
5452 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5453 offsetof(CPUSPARCState, tick_cmpr),
5454 "tick_cmpr");
5455 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5456 offsetof(CPUSPARCState, stick_cmpr),
5457 "stick_cmpr");
5458 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5459 offsetof(CPUSPARCState, hstick_cmpr),
5460 "hstick_cmpr");
5461 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5462 "hintp");
5463 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5464 "htba");
5465 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5466 "hver");
5467 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5468 offsetof(CPUSPARCState, ssr), "ssr");
5469 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5470 offsetof(CPUSPARCState, version), "ver");
5471 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5472 offsetof(CPUSPARCState, softint),
5473 "softint");
5474 #else
5475 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5476 "wim");
5477 #endif
5478 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5479 "cond");
5480 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5481 "cc_src");
5482 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5483 offsetof(CPUSPARCState, cc_src2),
5484 "cc_src2");
5485 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5486 "cc_dst");
5487 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5488 "cc_op");
5489 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5490 "psr");
5491 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5492 "fsr");
5493 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5494 "pc");
5495 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5496 "npc");
5497 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5498 #ifndef CONFIG_USER_ONLY
5499 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5500 "tbr");
5501 #endif
5502 for (i = 1; i < 8; i++) {
5503 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5504 offsetof(CPUSPARCState, gregs[i]),
5505 gregnames[i]);
5506 }
5507 for (i = 0; i < TARGET_DPREGS; i++) {
5508 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5509 offsetof(CPUSPARCState, fpr[i]),
5510 fregnames[i]);
5511 }
5512
5513 /* register helpers */
5514
5515 #define GEN_HELPER 2
5516 #include "helper.h"
5517 }
5518 }
5519
5520 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5521 {
5522 target_ulong npc;
5523 env->pc = gen_opc_pc[pc_pos];
5524 npc = gen_opc_npc[pc_pos];
5525 if (npc == 1) {
5526 /* dynamic NPC: already stored */
5527 } else if (npc == 2) {
5528 /* jump PC: use 'cond' and the jump targets of the translation */
5529 if (env->cond) {
5530 env->npc = gen_opc_jump_pc[0];
5531 } else {
5532 env->npc = gen_opc_jump_pc[1];
5533 }
5534 } else {
5535 env->npc = npc;
5536 }
5537
5538 /* flush pending conditional evaluations before exposing cpu state */
5539 if (CC_OP != CC_OP_FLAGS) {
5540 helper_compute_psr(env);
5541 }
5542 }