]> git.proxmox.com Git - qemu.git/blame_incremental - target-sparc/translate.c
target-sparc: Use gen_load_gpr in get_src[12]
[qemu.git] / target-sparc / translate.c
... / ...
CommitLineData
1/*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21#include <stdarg.h>
22#include <stdlib.h>
23#include <stdio.h>
24#include <string.h>
25#include <inttypes.h>
26
27#include "cpu.h"
28#include "disas.h"
29#include "helper.h"
30#include "tcg-op.h"
31
32#define GEN_HELPER 1
33#include "helper.h"
34
35#define DEBUG_DISAS
36
37#define DYNAMIC_PC 1 /* dynamic pc value */
38#define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
40
41/* global register indexes */
42static TCGv_ptr cpu_env, cpu_regwptr;
43static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44static TCGv_i32 cpu_cc_op;
45static TCGv_i32 cpu_psr;
46static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47static TCGv cpu_y;
48#ifndef CONFIG_USER_ONLY
49static TCGv cpu_tbr;
50#endif
51static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52#ifdef TARGET_SPARC64
53static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54static TCGv cpu_gsr;
55static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57static TCGv_i32 cpu_softint;
58#else
59static TCGv cpu_wim;
60#endif
61/* local register indexes (only used inside old micro ops) */
62static TCGv cpu_tmp0;
63static TCGv_i32 cpu_tmp32;
64static TCGv_i64 cpu_tmp64;
65/* Floating point registers */
66static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67
68static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69static target_ulong gen_opc_jump_pc[2];
70
71#include "gen-icount.h"
72
73typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 TCGv_i32 t32[3];
86 TCGv ttl[5];
87 int n_t32;
88 int n_ttl;
89} DisasContext;
90
91typedef struct {
92 TCGCond cond;
93 bool is_bool;
94 bool g1, g2;
95 TCGv c1, c2;
96} DisasCompare;
97
98// This function uses non-native bit order
99#define GET_FIELD(X, FROM, TO) \
100 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
101
102// This function uses the order in the manuals, i.e. bit 0 is 2^0
103#define GET_FIELD_SP(X, FROM, TO) \
104 GET_FIELD(X, 31 - (TO), 31 - (FROM))
105
106#define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
107#define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
108
109#ifdef TARGET_SPARC64
110#define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
111#define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
112#else
113#define DFPREG(r) (r & 0x1e)
114#define QFPREG(r) (r & 0x1c)
115#endif
116
117#define UA2005_HTRAP_MASK 0xff
118#define V8_TRAP_MASK 0x7f
119
120static int sign_extend(int x, int len)
121{
122 len = 32 - len;
123 return (x << len) >> len;
124}
125
126#define IS_IMM (insn & (1<<13))
127
128static inline void gen_update_fprs_dirty(int rd)
129{
130#if defined(TARGET_SPARC64)
131 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
132#endif
133}
134
135/* floating point registers moves */
136static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
137{
138#if TCG_TARGET_REG_BITS == 32
139 if (src & 1) {
140 return TCGV_LOW(cpu_fpr[src / 2]);
141 } else {
142 return TCGV_HIGH(cpu_fpr[src / 2]);
143 }
144#else
145 if (src & 1) {
146 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
147 } else {
148 TCGv_i32 ret = tcg_temp_new_i32();
149 TCGv_i64 t = tcg_temp_new_i64();
150
151 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
152 tcg_gen_trunc_i64_i32(ret, t);
153 tcg_temp_free_i64(t);
154
155 dc->t32[dc->n_t32++] = ret;
156 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
157
158 return ret;
159 }
160#endif
161}
162
163static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
164{
165#if TCG_TARGET_REG_BITS == 32
166 if (dst & 1) {
167 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
168 } else {
169 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
170 }
171#else
172 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
173 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
174 (dst & 1 ? 0 : 32), 32);
175#endif
176 gen_update_fprs_dirty(dst);
177}
178
179static TCGv_i32 gen_dest_fpr_F(void)
180{
181 return cpu_tmp32;
182}
183
184static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
185{
186 src = DFPREG(src);
187 return cpu_fpr[src / 2];
188}
189
190static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
191{
192 dst = DFPREG(dst);
193 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
194 gen_update_fprs_dirty(dst);
195}
196
197static TCGv_i64 gen_dest_fpr_D(void)
198{
199 return cpu_tmp64;
200}
201
202static void gen_op_load_fpr_QT0(unsigned int src)
203{
204 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
205 offsetof(CPU_QuadU, ll.upper));
206 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
207 offsetof(CPU_QuadU, ll.lower));
208}
209
210static void gen_op_load_fpr_QT1(unsigned int src)
211{
212 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
213 offsetof(CPU_QuadU, ll.upper));
214 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
215 offsetof(CPU_QuadU, ll.lower));
216}
217
218static void gen_op_store_QT0_fpr(unsigned int dst)
219{
220 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
221 offsetof(CPU_QuadU, ll.upper));
222 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
223 offsetof(CPU_QuadU, ll.lower));
224}
225
226#ifdef TARGET_SPARC64
227static void gen_move_Q(unsigned int rd, unsigned int rs)
228{
229 rd = QFPREG(rd);
230 rs = QFPREG(rs);
231
232 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
233 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
234 gen_update_fprs_dirty(rd);
235}
236#endif
237
238/* moves */
239#ifdef CONFIG_USER_ONLY
240#define supervisor(dc) 0
241#ifdef TARGET_SPARC64
242#define hypervisor(dc) 0
243#endif
244#else
245#define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
246#ifdef TARGET_SPARC64
247#define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
248#else
249#endif
250#endif
251
252#ifdef TARGET_SPARC64
253#ifndef TARGET_ABI32
254#define AM_CHECK(dc) ((dc)->address_mask_32bit)
255#else
256#define AM_CHECK(dc) (1)
257#endif
258#endif
259
260static inline void gen_address_mask(DisasContext *dc, TCGv addr)
261{
262#ifdef TARGET_SPARC64
263 if (AM_CHECK(dc))
264 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
265#endif
266}
267
268static inline TCGv get_temp_tl(DisasContext *dc)
269{
270 TCGv t;
271 assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
272 dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
273 return t;
274}
275
276static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
277{
278 if (reg == 0 || reg >= 8) {
279 TCGv t = get_temp_tl(dc);
280 if (reg == 0) {
281 tcg_gen_movi_tl(t, 0);
282 } else {
283 tcg_gen_ld_tl(t, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
284 }
285 return t;
286 } else {
287 return cpu_gregs[reg];
288 }
289}
290
291static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
292{
293 if (reg > 0) {
294 if (reg < 8) {
295 tcg_gen_mov_tl(cpu_gregs[reg], v);
296 } else {
297 tcg_gen_st_tl(v, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
298 }
299 }
300}
301
302static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
303{
304 if (reg == 0 || reg >= 8) {
305 return get_temp_tl(dc);
306 } else {
307 return cpu_gregs[reg];
308 }
309}
310
311static inline void gen_movl_reg_TN(int reg, TCGv tn)
312{
313 if (reg == 0)
314 tcg_gen_movi_tl(tn, 0);
315 else if (reg < 8)
316 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
317 else {
318 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
319 }
320}
321
322static inline void gen_movl_TN_reg(int reg, TCGv tn)
323{
324 if (reg == 0)
325 return;
326 else if (reg < 8)
327 tcg_gen_mov_tl(cpu_gregs[reg], tn);
328 else {
329 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
330 }
331}
332
333static inline void gen_goto_tb(DisasContext *s, int tb_num,
334 target_ulong pc, target_ulong npc)
335{
336 TranslationBlock *tb;
337
338 tb = s->tb;
339 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
340 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
341 !s->singlestep) {
342 /* jump to same page: we can use a direct jump */
343 tcg_gen_goto_tb(tb_num);
344 tcg_gen_movi_tl(cpu_pc, pc);
345 tcg_gen_movi_tl(cpu_npc, npc);
346 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
347 } else {
348 /* jump to another page: currently not optimized */
349 tcg_gen_movi_tl(cpu_pc, pc);
350 tcg_gen_movi_tl(cpu_npc, npc);
351 tcg_gen_exit_tb(0);
352 }
353}
354
355// XXX suboptimal
356static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
357{
358 tcg_gen_extu_i32_tl(reg, src);
359 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
360 tcg_gen_andi_tl(reg, reg, 0x1);
361}
362
363static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
364{
365 tcg_gen_extu_i32_tl(reg, src);
366 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
367 tcg_gen_andi_tl(reg, reg, 0x1);
368}
369
370static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
371{
372 tcg_gen_extu_i32_tl(reg, src);
373 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
374 tcg_gen_andi_tl(reg, reg, 0x1);
375}
376
377static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
378{
379 tcg_gen_extu_i32_tl(reg, src);
380 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
381 tcg_gen_andi_tl(reg, reg, 0x1);
382}
383
384static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
385{
386 tcg_gen_mov_tl(cpu_cc_src, src1);
387 tcg_gen_movi_tl(cpu_cc_src2, src2);
388 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
389 tcg_gen_mov_tl(dst, cpu_cc_dst);
390}
391
392static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
393{
394 tcg_gen_mov_tl(cpu_cc_src, src1);
395 tcg_gen_mov_tl(cpu_cc_src2, src2);
396 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
397 tcg_gen_mov_tl(dst, cpu_cc_dst);
398}
399
400static TCGv_i32 gen_add32_carry32(void)
401{
402 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
403
404 /* Carry is computed from a previous add: (dst < src) */
405#if TARGET_LONG_BITS == 64
406 cc_src1_32 = tcg_temp_new_i32();
407 cc_src2_32 = tcg_temp_new_i32();
408 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
409 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
410#else
411 cc_src1_32 = cpu_cc_dst;
412 cc_src2_32 = cpu_cc_src;
413#endif
414
415 carry_32 = tcg_temp_new_i32();
416 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
417
418#if TARGET_LONG_BITS == 64
419 tcg_temp_free_i32(cc_src1_32);
420 tcg_temp_free_i32(cc_src2_32);
421#endif
422
423 return carry_32;
424}
425
426static TCGv_i32 gen_sub32_carry32(void)
427{
428 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
429
430 /* Carry is computed from a previous borrow: (src1 < src2) */
431#if TARGET_LONG_BITS == 64
432 cc_src1_32 = tcg_temp_new_i32();
433 cc_src2_32 = tcg_temp_new_i32();
434 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
435 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
436#else
437 cc_src1_32 = cpu_cc_src;
438 cc_src2_32 = cpu_cc_src2;
439#endif
440
441 carry_32 = tcg_temp_new_i32();
442 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
443
444#if TARGET_LONG_BITS == 64
445 tcg_temp_free_i32(cc_src1_32);
446 tcg_temp_free_i32(cc_src2_32);
447#endif
448
449 return carry_32;
450}
451
452static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
453 TCGv src2, int update_cc)
454{
455 TCGv_i32 carry_32;
456 TCGv carry;
457
458 switch (dc->cc_op) {
459 case CC_OP_DIV:
460 case CC_OP_LOGIC:
461 /* Carry is known to be zero. Fall back to plain ADD. */
462 if (update_cc) {
463 gen_op_add_cc(dst, src1, src2);
464 } else {
465 tcg_gen_add_tl(dst, src1, src2);
466 }
467 return;
468
469 case CC_OP_ADD:
470 case CC_OP_TADD:
471 case CC_OP_TADDTV:
472#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
473 {
474 /* For 32-bit hosts, we can re-use the host's hardware carry
475 generation by using an ADD2 opcode. We discard the low
476 part of the output. Ideally we'd combine this operation
477 with the add that generated the carry in the first place. */
478 TCGv dst_low = tcg_temp_new();
479 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
480 cpu_cc_src, src1, cpu_cc_src2, src2);
481 tcg_temp_free(dst_low);
482 goto add_done;
483 }
484#endif
485 carry_32 = gen_add32_carry32();
486 break;
487
488 case CC_OP_SUB:
489 case CC_OP_TSUB:
490 case CC_OP_TSUBTV:
491 carry_32 = gen_sub32_carry32();
492 break;
493
494 default:
495 /* We need external help to produce the carry. */
496 carry_32 = tcg_temp_new_i32();
497 gen_helper_compute_C_icc(carry_32, cpu_env);
498 break;
499 }
500
501#if TARGET_LONG_BITS == 64
502 carry = tcg_temp_new();
503 tcg_gen_extu_i32_i64(carry, carry_32);
504#else
505 carry = carry_32;
506#endif
507
508 tcg_gen_add_tl(dst, src1, src2);
509 tcg_gen_add_tl(dst, dst, carry);
510
511 tcg_temp_free_i32(carry_32);
512#if TARGET_LONG_BITS == 64
513 tcg_temp_free(carry);
514#endif
515
516#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
517 add_done:
518#endif
519 if (update_cc) {
520 tcg_gen_mov_tl(cpu_cc_src, src1);
521 tcg_gen_mov_tl(cpu_cc_src2, src2);
522 tcg_gen_mov_tl(cpu_cc_dst, dst);
523 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
524 dc->cc_op = CC_OP_ADDX;
525 }
526}
527
528static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
529{
530 tcg_gen_mov_tl(cpu_cc_src, src1);
531 tcg_gen_movi_tl(cpu_cc_src2, src2);
532 if (src2 == 0) {
533 tcg_gen_mov_tl(cpu_cc_dst, src1);
534 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
535 dc->cc_op = CC_OP_LOGIC;
536 } else {
537 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
538 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
539 dc->cc_op = CC_OP_SUB;
540 }
541 tcg_gen_mov_tl(dst, cpu_cc_dst);
542}
543
544static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
545{
546 tcg_gen_mov_tl(cpu_cc_src, src1);
547 tcg_gen_mov_tl(cpu_cc_src2, src2);
548 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
549 tcg_gen_mov_tl(dst, cpu_cc_dst);
550}
551
552static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
553 TCGv src2, int update_cc)
554{
555 TCGv_i32 carry_32;
556 TCGv carry;
557
558 switch (dc->cc_op) {
559 case CC_OP_DIV:
560 case CC_OP_LOGIC:
561 /* Carry is known to be zero. Fall back to plain SUB. */
562 if (update_cc) {
563 gen_op_sub_cc(dst, src1, src2);
564 } else {
565 tcg_gen_sub_tl(dst, src1, src2);
566 }
567 return;
568
569 case CC_OP_ADD:
570 case CC_OP_TADD:
571 case CC_OP_TADDTV:
572 carry_32 = gen_add32_carry32();
573 break;
574
575 case CC_OP_SUB:
576 case CC_OP_TSUB:
577 case CC_OP_TSUBTV:
578#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
579 {
580 /* For 32-bit hosts, we can re-use the host's hardware carry
581 generation by using a SUB2 opcode. We discard the low
582 part of the output. Ideally we'd combine this operation
583 with the add that generated the carry in the first place. */
584 TCGv dst_low = tcg_temp_new();
585 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
586 cpu_cc_src, src1, cpu_cc_src2, src2);
587 tcg_temp_free(dst_low);
588 goto sub_done;
589 }
590#endif
591 carry_32 = gen_sub32_carry32();
592 break;
593
594 default:
595 /* We need external help to produce the carry. */
596 carry_32 = tcg_temp_new_i32();
597 gen_helper_compute_C_icc(carry_32, cpu_env);
598 break;
599 }
600
601#if TARGET_LONG_BITS == 64
602 carry = tcg_temp_new();
603 tcg_gen_extu_i32_i64(carry, carry_32);
604#else
605 carry = carry_32;
606#endif
607
608 tcg_gen_sub_tl(dst, src1, src2);
609 tcg_gen_sub_tl(dst, dst, carry);
610
611 tcg_temp_free_i32(carry_32);
612#if TARGET_LONG_BITS == 64
613 tcg_temp_free(carry);
614#endif
615
616#if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
617 sub_done:
618#endif
619 if (update_cc) {
620 tcg_gen_mov_tl(cpu_cc_src, src1);
621 tcg_gen_mov_tl(cpu_cc_src2, src2);
622 tcg_gen_mov_tl(cpu_cc_dst, dst);
623 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
624 dc->cc_op = CC_OP_SUBX;
625 }
626}
627
628static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
629{
630 TCGv r_temp, zero;
631
632 r_temp = tcg_temp_new();
633
634 /* old op:
635 if (!(env->y & 1))
636 T1 = 0;
637 */
638 zero = tcg_const_tl(0);
639 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
640 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
641 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
642 tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
643 zero, cpu_cc_src2);
644 tcg_temp_free(zero);
645
646 // b2 = T0 & 1;
647 // env->y = (b2 << 31) | (env->y >> 1);
648 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
649 tcg_gen_shli_tl(r_temp, r_temp, 31);
650 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
651 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
652 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
653 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
654
655 // b1 = N ^ V;
656 gen_mov_reg_N(cpu_tmp0, cpu_psr);
657 gen_mov_reg_V(r_temp, cpu_psr);
658 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
659 tcg_temp_free(r_temp);
660
661 // T0 = (b1 << 31) | (T0 >> 1);
662 // src1 = T0;
663 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
664 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
665 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
666
667 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
668
669 tcg_gen_mov_tl(dst, cpu_cc_dst);
670}
671
672static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
673{
674 TCGv_i32 r_src1, r_src2;
675 TCGv_i64 r_temp, r_temp2;
676
677 r_src1 = tcg_temp_new_i32();
678 r_src2 = tcg_temp_new_i32();
679
680 tcg_gen_trunc_tl_i32(r_src1, src1);
681 tcg_gen_trunc_tl_i32(r_src2, src2);
682
683 r_temp = tcg_temp_new_i64();
684 r_temp2 = tcg_temp_new_i64();
685
686 if (sign_ext) {
687 tcg_gen_ext_i32_i64(r_temp, r_src2);
688 tcg_gen_ext_i32_i64(r_temp2, r_src1);
689 } else {
690 tcg_gen_extu_i32_i64(r_temp, r_src2);
691 tcg_gen_extu_i32_i64(r_temp2, r_src1);
692 }
693
694 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
695
696 tcg_gen_shri_i64(r_temp, r_temp2, 32);
697 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
698 tcg_temp_free_i64(r_temp);
699 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
700
701 tcg_gen_trunc_i64_tl(dst, r_temp2);
702
703 tcg_temp_free_i64(r_temp2);
704
705 tcg_temp_free_i32(r_src1);
706 tcg_temp_free_i32(r_src2);
707}
708
709static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
710{
711 /* zero-extend truncated operands before multiplication */
712 gen_op_multiply(dst, src1, src2, 0);
713}
714
715static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
716{
717 /* sign-extend truncated operands before multiplication */
718 gen_op_multiply(dst, src1, src2, 1);
719}
720
721// 1
722static inline void gen_op_eval_ba(TCGv dst)
723{
724 tcg_gen_movi_tl(dst, 1);
725}
726
727// Z
728static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
729{
730 gen_mov_reg_Z(dst, src);
731}
732
733// Z | (N ^ V)
734static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
735{
736 gen_mov_reg_N(cpu_tmp0, src);
737 gen_mov_reg_V(dst, src);
738 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
739 gen_mov_reg_Z(cpu_tmp0, src);
740 tcg_gen_or_tl(dst, dst, cpu_tmp0);
741}
742
743// N ^ V
744static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
745{
746 gen_mov_reg_V(cpu_tmp0, src);
747 gen_mov_reg_N(dst, src);
748 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
749}
750
751// C | Z
752static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
753{
754 gen_mov_reg_Z(cpu_tmp0, src);
755 gen_mov_reg_C(dst, src);
756 tcg_gen_or_tl(dst, dst, cpu_tmp0);
757}
758
759// C
760static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
761{
762 gen_mov_reg_C(dst, src);
763}
764
765// V
766static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
767{
768 gen_mov_reg_V(dst, src);
769}
770
771// 0
772static inline void gen_op_eval_bn(TCGv dst)
773{
774 tcg_gen_movi_tl(dst, 0);
775}
776
777// N
778static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
779{
780 gen_mov_reg_N(dst, src);
781}
782
783// !Z
784static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
785{
786 gen_mov_reg_Z(dst, src);
787 tcg_gen_xori_tl(dst, dst, 0x1);
788}
789
790// !(Z | (N ^ V))
791static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
792{
793 gen_mov_reg_N(cpu_tmp0, src);
794 gen_mov_reg_V(dst, src);
795 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
796 gen_mov_reg_Z(cpu_tmp0, src);
797 tcg_gen_or_tl(dst, dst, cpu_tmp0);
798 tcg_gen_xori_tl(dst, dst, 0x1);
799}
800
801// !(N ^ V)
802static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
803{
804 gen_mov_reg_V(cpu_tmp0, src);
805 gen_mov_reg_N(dst, src);
806 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
807 tcg_gen_xori_tl(dst, dst, 0x1);
808}
809
810// !(C | Z)
811static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
812{
813 gen_mov_reg_Z(cpu_tmp0, src);
814 gen_mov_reg_C(dst, src);
815 tcg_gen_or_tl(dst, dst, cpu_tmp0);
816 tcg_gen_xori_tl(dst, dst, 0x1);
817}
818
819// !C
820static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
821{
822 gen_mov_reg_C(dst, src);
823 tcg_gen_xori_tl(dst, dst, 0x1);
824}
825
826// !N
827static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
828{
829 gen_mov_reg_N(dst, src);
830 tcg_gen_xori_tl(dst, dst, 0x1);
831}
832
833// !V
834static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
835{
836 gen_mov_reg_V(dst, src);
837 tcg_gen_xori_tl(dst, dst, 0x1);
838}
839
840/*
841 FPSR bit field FCC1 | FCC0:
842 0 =
843 1 <
844 2 >
845 3 unordered
846*/
847static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
848 unsigned int fcc_offset)
849{
850 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
851 tcg_gen_andi_tl(reg, reg, 0x1);
852}
853
854static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
855 unsigned int fcc_offset)
856{
857 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
858 tcg_gen_andi_tl(reg, reg, 0x1);
859}
860
861// !0: FCC0 | FCC1
862static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
863 unsigned int fcc_offset)
864{
865 gen_mov_reg_FCC0(dst, src, fcc_offset);
866 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
867 tcg_gen_or_tl(dst, dst, cpu_tmp0);
868}
869
870// 1 or 2: FCC0 ^ FCC1
871static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
872 unsigned int fcc_offset)
873{
874 gen_mov_reg_FCC0(dst, src, fcc_offset);
875 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
876 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877}
878
879// 1 or 3: FCC0
880static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
881 unsigned int fcc_offset)
882{
883 gen_mov_reg_FCC0(dst, src, fcc_offset);
884}
885
886// 1: FCC0 & !FCC1
887static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
888 unsigned int fcc_offset)
889{
890 gen_mov_reg_FCC0(dst, src, fcc_offset);
891 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
892 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
893 tcg_gen_and_tl(dst, dst, cpu_tmp0);
894}
895
896// 2 or 3: FCC1
897static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
898 unsigned int fcc_offset)
899{
900 gen_mov_reg_FCC1(dst, src, fcc_offset);
901}
902
903// 2: !FCC0 & FCC1
904static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
905 unsigned int fcc_offset)
906{
907 gen_mov_reg_FCC0(dst, src, fcc_offset);
908 tcg_gen_xori_tl(dst, dst, 0x1);
909 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
910 tcg_gen_and_tl(dst, dst, cpu_tmp0);
911}
912
913// 3: FCC0 & FCC1
914static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
915 unsigned int fcc_offset)
916{
917 gen_mov_reg_FCC0(dst, src, fcc_offset);
918 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
919 tcg_gen_and_tl(dst, dst, cpu_tmp0);
920}
921
922// 0: !(FCC0 | FCC1)
923static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
924 unsigned int fcc_offset)
925{
926 gen_mov_reg_FCC0(dst, src, fcc_offset);
927 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
928 tcg_gen_or_tl(dst, dst, cpu_tmp0);
929 tcg_gen_xori_tl(dst, dst, 0x1);
930}
931
932// 0 or 3: !(FCC0 ^ FCC1)
933static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
934 unsigned int fcc_offset)
935{
936 gen_mov_reg_FCC0(dst, src, fcc_offset);
937 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
938 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
939 tcg_gen_xori_tl(dst, dst, 0x1);
940}
941
942// 0 or 2: !FCC0
943static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
944 unsigned int fcc_offset)
945{
946 gen_mov_reg_FCC0(dst, src, fcc_offset);
947 tcg_gen_xori_tl(dst, dst, 0x1);
948}
949
950// !1: !(FCC0 & !FCC1)
951static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
952 unsigned int fcc_offset)
953{
954 gen_mov_reg_FCC0(dst, src, fcc_offset);
955 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
956 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
957 tcg_gen_and_tl(dst, dst, cpu_tmp0);
958 tcg_gen_xori_tl(dst, dst, 0x1);
959}
960
961// 0 or 1: !FCC1
962static inline void gen_op_eval_fble(TCGv dst, TCGv src,
963 unsigned int fcc_offset)
964{
965 gen_mov_reg_FCC1(dst, src, fcc_offset);
966 tcg_gen_xori_tl(dst, dst, 0x1);
967}
968
969// !2: !(!FCC0 & FCC1)
970static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
971 unsigned int fcc_offset)
972{
973 gen_mov_reg_FCC0(dst, src, fcc_offset);
974 tcg_gen_xori_tl(dst, dst, 0x1);
975 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
976 tcg_gen_and_tl(dst, dst, cpu_tmp0);
977 tcg_gen_xori_tl(dst, dst, 0x1);
978}
979
980// !3: !(FCC0 & FCC1)
981static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
982 unsigned int fcc_offset)
983{
984 gen_mov_reg_FCC0(dst, src, fcc_offset);
985 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986 tcg_gen_and_tl(dst, dst, cpu_tmp0);
987 tcg_gen_xori_tl(dst, dst, 0x1);
988}
989
990static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
991 target_ulong pc2, TCGv r_cond)
992{
993 int l1;
994
995 l1 = gen_new_label();
996
997 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
998
999 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1000
1001 gen_set_label(l1);
1002 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1003}
1004
1005static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1006 target_ulong pc2, TCGv r_cond)
1007{
1008 int l1;
1009
1010 l1 = gen_new_label();
1011
1012 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1013
1014 gen_goto_tb(dc, 0, pc2, pc1);
1015
1016 gen_set_label(l1);
1017 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1018}
1019
1020static inline void gen_generic_branch(DisasContext *dc)
1021{
1022 TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1023 TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1024 TCGv zero = tcg_const_tl(0);
1025
1026 tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1027
1028 tcg_temp_free(npc0);
1029 tcg_temp_free(npc1);
1030 tcg_temp_free(zero);
1031}
1032
1033/* call this function before using the condition register as it may
1034 have been set for a jump */
1035static inline void flush_cond(DisasContext *dc)
1036{
1037 if (dc->npc == JUMP_PC) {
1038 gen_generic_branch(dc);
1039 dc->npc = DYNAMIC_PC;
1040 }
1041}
1042
1043static inline void save_npc(DisasContext *dc)
1044{
1045 if (dc->npc == JUMP_PC) {
1046 gen_generic_branch(dc);
1047 dc->npc = DYNAMIC_PC;
1048 } else if (dc->npc != DYNAMIC_PC) {
1049 tcg_gen_movi_tl(cpu_npc, dc->npc);
1050 }
1051}
1052
1053static inline void update_psr(DisasContext *dc)
1054{
1055 if (dc->cc_op != CC_OP_FLAGS) {
1056 dc->cc_op = CC_OP_FLAGS;
1057 gen_helper_compute_psr(cpu_env);
1058 }
1059}
1060
1061static inline void save_state(DisasContext *dc)
1062{
1063 tcg_gen_movi_tl(cpu_pc, dc->pc);
1064 save_npc(dc);
1065}
1066
1067static inline void gen_mov_pc_npc(DisasContext *dc)
1068{
1069 if (dc->npc == JUMP_PC) {
1070 gen_generic_branch(dc);
1071 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1072 dc->pc = DYNAMIC_PC;
1073 } else if (dc->npc == DYNAMIC_PC) {
1074 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1075 dc->pc = DYNAMIC_PC;
1076 } else {
1077 dc->pc = dc->npc;
1078 }
1079}
1080
1081static inline void gen_op_next_insn(void)
1082{
1083 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1084 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1085}
1086
1087static void free_compare(DisasCompare *cmp)
1088{
1089 if (!cmp->g1) {
1090 tcg_temp_free(cmp->c1);
1091 }
1092 if (!cmp->g2) {
1093 tcg_temp_free(cmp->c2);
1094 }
1095}
1096
1097static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1098 DisasContext *dc)
1099{
1100 static int subcc_cond[16] = {
1101 TCG_COND_NEVER,
1102 TCG_COND_EQ,
1103 TCG_COND_LE,
1104 TCG_COND_LT,
1105 TCG_COND_LEU,
1106 TCG_COND_LTU,
1107 -1, /* neg */
1108 -1, /* overflow */
1109 TCG_COND_ALWAYS,
1110 TCG_COND_NE,
1111 TCG_COND_GT,
1112 TCG_COND_GE,
1113 TCG_COND_GTU,
1114 TCG_COND_GEU,
1115 -1, /* pos */
1116 -1, /* no overflow */
1117 };
1118
1119 static int logic_cond[16] = {
1120 TCG_COND_NEVER,
1121 TCG_COND_EQ, /* eq: Z */
1122 TCG_COND_LE, /* le: Z | (N ^ V) -> Z | N */
1123 TCG_COND_LT, /* lt: N ^ V -> N */
1124 TCG_COND_EQ, /* leu: C | Z -> Z */
1125 TCG_COND_NEVER, /* ltu: C -> 0 */
1126 TCG_COND_LT, /* neg: N */
1127 TCG_COND_NEVER, /* vs: V -> 0 */
1128 TCG_COND_ALWAYS,
1129 TCG_COND_NE, /* ne: !Z */
1130 TCG_COND_GT, /* gt: !(Z | (N ^ V)) -> !(Z | N) */
1131 TCG_COND_GE, /* ge: !(N ^ V) -> !N */
1132 TCG_COND_NE, /* gtu: !(C | Z) -> !Z */
1133 TCG_COND_ALWAYS, /* geu: !C -> 1 */
1134 TCG_COND_GE, /* pos: !N */
1135 TCG_COND_ALWAYS, /* vc: !V -> 1 */
1136 };
1137
1138 TCGv_i32 r_src;
1139 TCGv r_dst;
1140
1141#ifdef TARGET_SPARC64
1142 if (xcc) {
1143 r_src = cpu_xcc;
1144 } else {
1145 r_src = cpu_psr;
1146 }
1147#else
1148 r_src = cpu_psr;
1149#endif
1150
1151 switch (dc->cc_op) {
1152 case CC_OP_LOGIC:
1153 cmp->cond = logic_cond[cond];
1154 do_compare_dst_0:
1155 cmp->is_bool = false;
1156 cmp->g2 = false;
1157 cmp->c2 = tcg_const_tl(0);
1158#ifdef TARGET_SPARC64
1159 if (!xcc) {
1160 cmp->g1 = false;
1161 cmp->c1 = tcg_temp_new();
1162 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1163 break;
1164 }
1165#endif
1166 cmp->g1 = true;
1167 cmp->c1 = cpu_cc_dst;
1168 break;
1169
1170 case CC_OP_SUB:
1171 switch (cond) {
1172 case 6: /* neg */
1173 case 14: /* pos */
1174 cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1175 goto do_compare_dst_0;
1176
1177 case 7: /* overflow */
1178 case 15: /* !overflow */
1179 goto do_dynamic;
1180
1181 default:
1182 cmp->cond = subcc_cond[cond];
1183 cmp->is_bool = false;
1184#ifdef TARGET_SPARC64
1185 if (!xcc) {
1186 /* Note that sign-extension works for unsigned compares as
1187 long as both operands are sign-extended. */
1188 cmp->g1 = cmp->g2 = false;
1189 cmp->c1 = tcg_temp_new();
1190 cmp->c2 = tcg_temp_new();
1191 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1192 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1193 break;
1194 }
1195#endif
1196 cmp->g1 = cmp->g2 = true;
1197 cmp->c1 = cpu_cc_src;
1198 cmp->c2 = cpu_cc_src2;
1199 break;
1200 }
1201 break;
1202
1203 default:
1204 do_dynamic:
1205 gen_helper_compute_psr(cpu_env);
1206 dc->cc_op = CC_OP_FLAGS;
1207 /* FALLTHRU */
1208
1209 case CC_OP_FLAGS:
1210 /* We're going to generate a boolean result. */
1211 cmp->cond = TCG_COND_NE;
1212 cmp->is_bool = true;
1213 cmp->g1 = cmp->g2 = false;
1214 cmp->c1 = r_dst = tcg_temp_new();
1215 cmp->c2 = tcg_const_tl(0);
1216
1217 switch (cond) {
1218 case 0x0:
1219 gen_op_eval_bn(r_dst);
1220 break;
1221 case 0x1:
1222 gen_op_eval_be(r_dst, r_src);
1223 break;
1224 case 0x2:
1225 gen_op_eval_ble(r_dst, r_src);
1226 break;
1227 case 0x3:
1228 gen_op_eval_bl(r_dst, r_src);
1229 break;
1230 case 0x4:
1231 gen_op_eval_bleu(r_dst, r_src);
1232 break;
1233 case 0x5:
1234 gen_op_eval_bcs(r_dst, r_src);
1235 break;
1236 case 0x6:
1237 gen_op_eval_bneg(r_dst, r_src);
1238 break;
1239 case 0x7:
1240 gen_op_eval_bvs(r_dst, r_src);
1241 break;
1242 case 0x8:
1243 gen_op_eval_ba(r_dst);
1244 break;
1245 case 0x9:
1246 gen_op_eval_bne(r_dst, r_src);
1247 break;
1248 case 0xa:
1249 gen_op_eval_bg(r_dst, r_src);
1250 break;
1251 case 0xb:
1252 gen_op_eval_bge(r_dst, r_src);
1253 break;
1254 case 0xc:
1255 gen_op_eval_bgu(r_dst, r_src);
1256 break;
1257 case 0xd:
1258 gen_op_eval_bcc(r_dst, r_src);
1259 break;
1260 case 0xe:
1261 gen_op_eval_bpos(r_dst, r_src);
1262 break;
1263 case 0xf:
1264 gen_op_eval_bvc(r_dst, r_src);
1265 break;
1266 }
1267 break;
1268 }
1269}
1270
1271static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1272{
1273 unsigned int offset;
1274 TCGv r_dst;
1275
1276 /* For now we still generate a straight boolean result. */
1277 cmp->cond = TCG_COND_NE;
1278 cmp->is_bool = true;
1279 cmp->g1 = cmp->g2 = false;
1280 cmp->c1 = r_dst = tcg_temp_new();
1281 cmp->c2 = tcg_const_tl(0);
1282
1283 switch (cc) {
1284 default:
1285 case 0x0:
1286 offset = 0;
1287 break;
1288 case 0x1:
1289 offset = 32 - 10;
1290 break;
1291 case 0x2:
1292 offset = 34 - 10;
1293 break;
1294 case 0x3:
1295 offset = 36 - 10;
1296 break;
1297 }
1298
1299 switch (cond) {
1300 case 0x0:
1301 gen_op_eval_bn(r_dst);
1302 break;
1303 case 0x1:
1304 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1305 break;
1306 case 0x2:
1307 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0x3:
1310 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1311 break;
1312 case 0x4:
1313 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1314 break;
1315 case 0x5:
1316 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1317 break;
1318 case 0x6:
1319 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1320 break;
1321 case 0x7:
1322 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1323 break;
1324 case 0x8:
1325 gen_op_eval_ba(r_dst);
1326 break;
1327 case 0x9:
1328 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1329 break;
1330 case 0xa:
1331 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1332 break;
1333 case 0xb:
1334 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1335 break;
1336 case 0xc:
1337 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1338 break;
1339 case 0xd:
1340 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1341 break;
1342 case 0xe:
1343 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1344 break;
1345 case 0xf:
1346 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1347 break;
1348 }
1349}
1350
1351static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1352 DisasContext *dc)
1353{
1354 DisasCompare cmp;
1355 gen_compare(&cmp, cc, cond, dc);
1356
1357 /* The interface is to return a boolean in r_dst. */
1358 if (cmp.is_bool) {
1359 tcg_gen_mov_tl(r_dst, cmp.c1);
1360 } else {
1361 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1362 }
1363
1364 free_compare(&cmp);
1365}
1366
1367static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1368{
1369 DisasCompare cmp;
1370 gen_fcompare(&cmp, cc, cond);
1371
1372 /* The interface is to return a boolean in r_dst. */
1373 if (cmp.is_bool) {
1374 tcg_gen_mov_tl(r_dst, cmp.c1);
1375 } else {
1376 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1377 }
1378
1379 free_compare(&cmp);
1380}
1381
1382#ifdef TARGET_SPARC64
1383// Inverted logic
1384static const int gen_tcg_cond_reg[8] = {
1385 -1,
1386 TCG_COND_NE,
1387 TCG_COND_GT,
1388 TCG_COND_GE,
1389 -1,
1390 TCG_COND_EQ,
1391 TCG_COND_LE,
1392 TCG_COND_LT,
1393};
1394
1395static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1396{
1397 cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1398 cmp->is_bool = false;
1399 cmp->g1 = true;
1400 cmp->g2 = false;
1401 cmp->c1 = r_src;
1402 cmp->c2 = tcg_const_tl(0);
1403}
1404
1405static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1406{
1407 DisasCompare cmp;
1408 gen_compare_reg(&cmp, cond, r_src);
1409
1410 /* The interface is to return a boolean in r_dst. */
1411 tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1412
1413 free_compare(&cmp);
1414}
1415#endif
1416
1417static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1418{
1419 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1420 target_ulong target = dc->pc + offset;
1421
1422#ifdef TARGET_SPARC64
1423 if (unlikely(AM_CHECK(dc))) {
1424 target &= 0xffffffffULL;
1425 }
1426#endif
1427 if (cond == 0x0) {
1428 /* unconditional not taken */
1429 if (a) {
1430 dc->pc = dc->npc + 4;
1431 dc->npc = dc->pc + 4;
1432 } else {
1433 dc->pc = dc->npc;
1434 dc->npc = dc->pc + 4;
1435 }
1436 } else if (cond == 0x8) {
1437 /* unconditional taken */
1438 if (a) {
1439 dc->pc = target;
1440 dc->npc = dc->pc + 4;
1441 } else {
1442 dc->pc = dc->npc;
1443 dc->npc = target;
1444 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1445 }
1446 } else {
1447 flush_cond(dc);
1448 gen_cond(cpu_cond, cc, cond, dc);
1449 if (a) {
1450 gen_branch_a(dc, target, dc->npc, cpu_cond);
1451 dc->is_br = 1;
1452 } else {
1453 dc->pc = dc->npc;
1454 dc->jump_pc[0] = target;
1455 if (unlikely(dc->npc == DYNAMIC_PC)) {
1456 dc->jump_pc[1] = DYNAMIC_PC;
1457 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1458 } else {
1459 dc->jump_pc[1] = dc->npc + 4;
1460 dc->npc = JUMP_PC;
1461 }
1462 }
1463 }
1464}
1465
1466static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1467{
1468 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1469 target_ulong target = dc->pc + offset;
1470
1471#ifdef TARGET_SPARC64
1472 if (unlikely(AM_CHECK(dc))) {
1473 target &= 0xffffffffULL;
1474 }
1475#endif
1476 if (cond == 0x0) {
1477 /* unconditional not taken */
1478 if (a) {
1479 dc->pc = dc->npc + 4;
1480 dc->npc = dc->pc + 4;
1481 } else {
1482 dc->pc = dc->npc;
1483 dc->npc = dc->pc + 4;
1484 }
1485 } else if (cond == 0x8) {
1486 /* unconditional taken */
1487 if (a) {
1488 dc->pc = target;
1489 dc->npc = dc->pc + 4;
1490 } else {
1491 dc->pc = dc->npc;
1492 dc->npc = target;
1493 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1494 }
1495 } else {
1496 flush_cond(dc);
1497 gen_fcond(cpu_cond, cc, cond);
1498 if (a) {
1499 gen_branch_a(dc, target, dc->npc, cpu_cond);
1500 dc->is_br = 1;
1501 } else {
1502 dc->pc = dc->npc;
1503 dc->jump_pc[0] = target;
1504 if (unlikely(dc->npc == DYNAMIC_PC)) {
1505 dc->jump_pc[1] = DYNAMIC_PC;
1506 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1507 } else {
1508 dc->jump_pc[1] = dc->npc + 4;
1509 dc->npc = JUMP_PC;
1510 }
1511 }
1512 }
1513}
1514
1515#ifdef TARGET_SPARC64
1516static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1517 TCGv r_reg)
1518{
1519 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1520 target_ulong target = dc->pc + offset;
1521
1522 if (unlikely(AM_CHECK(dc))) {
1523 target &= 0xffffffffULL;
1524 }
1525 flush_cond(dc);
1526 gen_cond_reg(cpu_cond, cond, r_reg);
1527 if (a) {
1528 gen_branch_a(dc, target, dc->npc, cpu_cond);
1529 dc->is_br = 1;
1530 } else {
1531 dc->pc = dc->npc;
1532 dc->jump_pc[0] = target;
1533 if (unlikely(dc->npc == DYNAMIC_PC)) {
1534 dc->jump_pc[1] = DYNAMIC_PC;
1535 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1536 } else {
1537 dc->jump_pc[1] = dc->npc + 4;
1538 dc->npc = JUMP_PC;
1539 }
1540 }
1541}
1542
1543static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1544{
1545 switch (fccno) {
1546 case 0:
1547 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1548 break;
1549 case 1:
1550 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1551 break;
1552 case 2:
1553 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1554 break;
1555 case 3:
1556 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1557 break;
1558 }
1559}
1560
1561static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1562{
1563 switch (fccno) {
1564 case 0:
1565 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1566 break;
1567 case 1:
1568 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1569 break;
1570 case 2:
1571 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1572 break;
1573 case 3:
1574 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1575 break;
1576 }
1577}
1578
1579static inline void gen_op_fcmpq(int fccno)
1580{
1581 switch (fccno) {
1582 case 0:
1583 gen_helper_fcmpq(cpu_env);
1584 break;
1585 case 1:
1586 gen_helper_fcmpq_fcc1(cpu_env);
1587 break;
1588 case 2:
1589 gen_helper_fcmpq_fcc2(cpu_env);
1590 break;
1591 case 3:
1592 gen_helper_fcmpq_fcc3(cpu_env);
1593 break;
1594 }
1595}
1596
1597static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1598{
1599 switch (fccno) {
1600 case 0:
1601 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1602 break;
1603 case 1:
1604 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1605 break;
1606 case 2:
1607 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1608 break;
1609 case 3:
1610 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1611 break;
1612 }
1613}
1614
1615static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1616{
1617 switch (fccno) {
1618 case 0:
1619 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1620 break;
1621 case 1:
1622 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1623 break;
1624 case 2:
1625 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1626 break;
1627 case 3:
1628 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1629 break;
1630 }
1631}
1632
1633static inline void gen_op_fcmpeq(int fccno)
1634{
1635 switch (fccno) {
1636 case 0:
1637 gen_helper_fcmpeq(cpu_env);
1638 break;
1639 case 1:
1640 gen_helper_fcmpeq_fcc1(cpu_env);
1641 break;
1642 case 2:
1643 gen_helper_fcmpeq_fcc2(cpu_env);
1644 break;
1645 case 3:
1646 gen_helper_fcmpeq_fcc3(cpu_env);
1647 break;
1648 }
1649}
1650
1651#else
1652
1653static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1654{
1655 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1656}
1657
1658static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1659{
1660 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1661}
1662
1663static inline void gen_op_fcmpq(int fccno)
1664{
1665 gen_helper_fcmpq(cpu_env);
1666}
1667
1668static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1669{
1670 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1671}
1672
1673static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1674{
1675 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1676}
1677
1678static inline void gen_op_fcmpeq(int fccno)
1679{
1680 gen_helper_fcmpeq(cpu_env);
1681}
1682#endif
1683
1684static inline void gen_op_fpexception_im(int fsr_flags)
1685{
1686 TCGv_i32 r_const;
1687
1688 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1689 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1690 r_const = tcg_const_i32(TT_FP_EXCP);
1691 gen_helper_raise_exception(cpu_env, r_const);
1692 tcg_temp_free_i32(r_const);
1693}
1694
1695static int gen_trap_ifnofpu(DisasContext *dc)
1696{
1697#if !defined(CONFIG_USER_ONLY)
1698 if (!dc->fpu_enabled) {
1699 TCGv_i32 r_const;
1700
1701 save_state(dc);
1702 r_const = tcg_const_i32(TT_NFPU_INSN);
1703 gen_helper_raise_exception(cpu_env, r_const);
1704 tcg_temp_free_i32(r_const);
1705 dc->is_br = 1;
1706 return 1;
1707 }
1708#endif
1709 return 0;
1710}
1711
1712static inline void gen_op_clear_ieee_excp_and_FTT(void)
1713{
1714 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1715}
1716
1717static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1718 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1719{
1720 TCGv_i32 dst, src;
1721
1722 src = gen_load_fpr_F(dc, rs);
1723 dst = gen_dest_fpr_F();
1724
1725 gen(dst, cpu_env, src);
1726
1727 gen_store_fpr_F(dc, rd, dst);
1728}
1729
1730static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1731 void (*gen)(TCGv_i32, TCGv_i32))
1732{
1733 TCGv_i32 dst, src;
1734
1735 src = gen_load_fpr_F(dc, rs);
1736 dst = gen_dest_fpr_F();
1737
1738 gen(dst, src);
1739
1740 gen_store_fpr_F(dc, rd, dst);
1741}
1742
1743static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1744 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1745{
1746 TCGv_i32 dst, src1, src2;
1747
1748 src1 = gen_load_fpr_F(dc, rs1);
1749 src2 = gen_load_fpr_F(dc, rs2);
1750 dst = gen_dest_fpr_F();
1751
1752 gen(dst, cpu_env, src1, src2);
1753
1754 gen_store_fpr_F(dc, rd, dst);
1755}
1756
1757#ifdef TARGET_SPARC64
1758static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1759 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1760{
1761 TCGv_i32 dst, src1, src2;
1762
1763 src1 = gen_load_fpr_F(dc, rs1);
1764 src2 = gen_load_fpr_F(dc, rs2);
1765 dst = gen_dest_fpr_F();
1766
1767 gen(dst, src1, src2);
1768
1769 gen_store_fpr_F(dc, rd, dst);
1770}
1771#endif
1772
1773static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1774 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1775{
1776 TCGv_i64 dst, src;
1777
1778 src = gen_load_fpr_D(dc, rs);
1779 dst = gen_dest_fpr_D();
1780
1781 gen(dst, cpu_env, src);
1782
1783 gen_store_fpr_D(dc, rd, dst);
1784}
1785
1786#ifdef TARGET_SPARC64
1787static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1788 void (*gen)(TCGv_i64, TCGv_i64))
1789{
1790 TCGv_i64 dst, src;
1791
1792 src = gen_load_fpr_D(dc, rs);
1793 dst = gen_dest_fpr_D();
1794
1795 gen(dst, src);
1796
1797 gen_store_fpr_D(dc, rd, dst);
1798}
1799#endif
1800
1801static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1802 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1803{
1804 TCGv_i64 dst, src1, src2;
1805
1806 src1 = gen_load_fpr_D(dc, rs1);
1807 src2 = gen_load_fpr_D(dc, rs2);
1808 dst = gen_dest_fpr_D();
1809
1810 gen(dst, cpu_env, src1, src2);
1811
1812 gen_store_fpr_D(dc, rd, dst);
1813}
1814
1815#ifdef TARGET_SPARC64
1816static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1817 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1818{
1819 TCGv_i64 dst, src1, src2;
1820
1821 src1 = gen_load_fpr_D(dc, rs1);
1822 src2 = gen_load_fpr_D(dc, rs2);
1823 dst = gen_dest_fpr_D();
1824
1825 gen(dst, src1, src2);
1826
1827 gen_store_fpr_D(dc, rd, dst);
1828}
1829
1830static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1831 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1832{
1833 TCGv_i64 dst, src1, src2;
1834
1835 src1 = gen_load_fpr_D(dc, rs1);
1836 src2 = gen_load_fpr_D(dc, rs2);
1837 dst = gen_dest_fpr_D();
1838
1839 gen(dst, cpu_gsr, src1, src2);
1840
1841 gen_store_fpr_D(dc, rd, dst);
1842}
1843
1844static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1845 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1846{
1847 TCGv_i64 dst, src0, src1, src2;
1848
1849 src1 = gen_load_fpr_D(dc, rs1);
1850 src2 = gen_load_fpr_D(dc, rs2);
1851 src0 = gen_load_fpr_D(dc, rd);
1852 dst = gen_dest_fpr_D();
1853
1854 gen(dst, src0, src1, src2);
1855
1856 gen_store_fpr_D(dc, rd, dst);
1857}
1858#endif
1859
1860static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1861 void (*gen)(TCGv_ptr))
1862{
1863 gen_op_load_fpr_QT1(QFPREG(rs));
1864
1865 gen(cpu_env);
1866
1867 gen_op_store_QT0_fpr(QFPREG(rd));
1868 gen_update_fprs_dirty(QFPREG(rd));
1869}
1870
1871#ifdef TARGET_SPARC64
1872static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1873 void (*gen)(TCGv_ptr))
1874{
1875 gen_op_load_fpr_QT1(QFPREG(rs));
1876
1877 gen(cpu_env);
1878
1879 gen_op_store_QT0_fpr(QFPREG(rd));
1880 gen_update_fprs_dirty(QFPREG(rd));
1881}
1882#endif
1883
1884static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1885 void (*gen)(TCGv_ptr))
1886{
1887 gen_op_load_fpr_QT0(QFPREG(rs1));
1888 gen_op_load_fpr_QT1(QFPREG(rs2));
1889
1890 gen(cpu_env);
1891
1892 gen_op_store_QT0_fpr(QFPREG(rd));
1893 gen_update_fprs_dirty(QFPREG(rd));
1894}
1895
1896static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1897 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1898{
1899 TCGv_i64 dst;
1900 TCGv_i32 src1, src2;
1901
1902 src1 = gen_load_fpr_F(dc, rs1);
1903 src2 = gen_load_fpr_F(dc, rs2);
1904 dst = gen_dest_fpr_D();
1905
1906 gen(dst, cpu_env, src1, src2);
1907
1908 gen_store_fpr_D(dc, rd, dst);
1909}
1910
1911static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1912 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1913{
1914 TCGv_i64 src1, src2;
1915
1916 src1 = gen_load_fpr_D(dc, rs1);
1917 src2 = gen_load_fpr_D(dc, rs2);
1918
1919 gen(cpu_env, src1, src2);
1920
1921 gen_op_store_QT0_fpr(QFPREG(rd));
1922 gen_update_fprs_dirty(QFPREG(rd));
1923}
1924
1925#ifdef TARGET_SPARC64
1926static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1927 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1928{
1929 TCGv_i64 dst;
1930 TCGv_i32 src;
1931
1932 src = gen_load_fpr_F(dc, rs);
1933 dst = gen_dest_fpr_D();
1934
1935 gen(dst, cpu_env, src);
1936
1937 gen_store_fpr_D(dc, rd, dst);
1938}
1939#endif
1940
1941static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1942 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1943{
1944 TCGv_i64 dst;
1945 TCGv_i32 src;
1946
1947 src = gen_load_fpr_F(dc, rs);
1948 dst = gen_dest_fpr_D();
1949
1950 gen(dst, cpu_env, src);
1951
1952 gen_store_fpr_D(dc, rd, dst);
1953}
1954
1955static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1956 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1957{
1958 TCGv_i32 dst;
1959 TCGv_i64 src;
1960
1961 src = gen_load_fpr_D(dc, rs);
1962 dst = gen_dest_fpr_F();
1963
1964 gen(dst, cpu_env, src);
1965
1966 gen_store_fpr_F(dc, rd, dst);
1967}
1968
1969static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1970 void (*gen)(TCGv_i32, TCGv_ptr))
1971{
1972 TCGv_i32 dst;
1973
1974 gen_op_load_fpr_QT1(QFPREG(rs));
1975 dst = gen_dest_fpr_F();
1976
1977 gen(dst, cpu_env);
1978
1979 gen_store_fpr_F(dc, rd, dst);
1980}
1981
1982static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1983 void (*gen)(TCGv_i64, TCGv_ptr))
1984{
1985 TCGv_i64 dst;
1986
1987 gen_op_load_fpr_QT1(QFPREG(rs));
1988 dst = gen_dest_fpr_D();
1989
1990 gen(dst, cpu_env);
1991
1992 gen_store_fpr_D(dc, rd, dst);
1993}
1994
1995static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1996 void (*gen)(TCGv_ptr, TCGv_i32))
1997{
1998 TCGv_i32 src;
1999
2000 src = gen_load_fpr_F(dc, rs);
2001
2002 gen(cpu_env, src);
2003
2004 gen_op_store_QT0_fpr(QFPREG(rd));
2005 gen_update_fprs_dirty(QFPREG(rd));
2006}
2007
2008static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2009 void (*gen)(TCGv_ptr, TCGv_i64))
2010{
2011 TCGv_i64 src;
2012
2013 src = gen_load_fpr_D(dc, rs);
2014
2015 gen(cpu_env, src);
2016
2017 gen_op_store_QT0_fpr(QFPREG(rd));
2018 gen_update_fprs_dirty(QFPREG(rd));
2019}
2020
2021/* asi moves */
2022#ifdef TARGET_SPARC64
2023static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
2024{
2025 int asi;
2026 TCGv_i32 r_asi;
2027
2028 if (IS_IMM) {
2029 r_asi = tcg_temp_new_i32();
2030 tcg_gen_mov_i32(r_asi, cpu_asi);
2031 } else {
2032 asi = GET_FIELD(insn, 19, 26);
2033 r_asi = tcg_const_i32(asi);
2034 }
2035 return r_asi;
2036}
2037
2038static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2039 int sign)
2040{
2041 TCGv_i32 r_asi, r_size, r_sign;
2042
2043 r_asi = gen_get_asi(insn, addr);
2044 r_size = tcg_const_i32(size);
2045 r_sign = tcg_const_i32(sign);
2046 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
2047 tcg_temp_free_i32(r_sign);
2048 tcg_temp_free_i32(r_size);
2049 tcg_temp_free_i32(r_asi);
2050}
2051
2052static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2053{
2054 TCGv_i32 r_asi, r_size;
2055
2056 r_asi = gen_get_asi(insn, addr);
2057 r_size = tcg_const_i32(size);
2058 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
2059 tcg_temp_free_i32(r_size);
2060 tcg_temp_free_i32(r_asi);
2061}
2062
2063static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
2064{
2065 TCGv_i32 r_asi, r_size, r_rd;
2066
2067 r_asi = gen_get_asi(insn, addr);
2068 r_size = tcg_const_i32(size);
2069 r_rd = tcg_const_i32(rd);
2070 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2071 tcg_temp_free_i32(r_rd);
2072 tcg_temp_free_i32(r_size);
2073 tcg_temp_free_i32(r_asi);
2074}
2075
2076static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2077{
2078 TCGv_i32 r_asi, r_size, r_rd;
2079
2080 r_asi = gen_get_asi(insn, addr);
2081 r_size = tcg_const_i32(size);
2082 r_rd = tcg_const_i32(rd);
2083 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2084 tcg_temp_free_i32(r_rd);
2085 tcg_temp_free_i32(r_size);
2086 tcg_temp_free_i32(r_asi);
2087}
2088
2089static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2090{
2091 TCGv_i32 r_asi, r_size, r_sign;
2092
2093 r_asi = gen_get_asi(insn, addr);
2094 r_size = tcg_const_i32(4);
2095 r_sign = tcg_const_i32(0);
2096 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2097 tcg_temp_free_i32(r_sign);
2098 gen_helper_st_asi(cpu_env, addr, dst, r_asi, r_size);
2099 tcg_temp_free_i32(r_size);
2100 tcg_temp_free_i32(r_asi);
2101 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2102}
2103
2104static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2105{
2106 TCGv_i32 r_asi, r_rd;
2107
2108 r_asi = gen_get_asi(insn, addr);
2109 r_rd = tcg_const_i32(rd);
2110 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2111 tcg_temp_free_i32(r_rd);
2112 tcg_temp_free_i32(r_asi);
2113}
2114
2115static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2116{
2117 TCGv_i32 r_asi, r_size;
2118
2119 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2120 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2121 r_asi = gen_get_asi(insn, addr);
2122 r_size = tcg_const_i32(8);
2123 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2124 tcg_temp_free_i32(r_size);
2125 tcg_temp_free_i32(r_asi);
2126}
2127
2128static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2129 int rd)
2130{
2131 TCGv r_val1;
2132 TCGv_i32 r_asi;
2133
2134 r_val1 = tcg_temp_new();
2135 gen_movl_reg_TN(rd, r_val1);
2136 r_asi = gen_get_asi(insn, addr);
2137 gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
2138 tcg_temp_free_i32(r_asi);
2139 tcg_temp_free(r_val1);
2140}
2141
2142static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2143 int rd)
2144{
2145 TCGv_i32 r_asi;
2146
2147 gen_movl_reg_TN(rd, cpu_tmp64);
2148 r_asi = gen_get_asi(insn, addr);
2149 gen_helper_casx_asi(dst, cpu_env, addr, cpu_tmp64, val2, r_asi);
2150 tcg_temp_free_i32(r_asi);
2151}
2152
2153#elif !defined(CONFIG_USER_ONLY)
2154
2155static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2156 int sign)
2157{
2158 TCGv_i32 r_asi, r_size, r_sign;
2159
2160 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2161 r_size = tcg_const_i32(size);
2162 r_sign = tcg_const_i32(sign);
2163 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2164 tcg_temp_free(r_sign);
2165 tcg_temp_free(r_size);
2166 tcg_temp_free(r_asi);
2167 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2168}
2169
2170static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2171{
2172 TCGv_i32 r_asi, r_size;
2173
2174 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2175 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2176 r_size = tcg_const_i32(size);
2177 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2178 tcg_temp_free(r_size);
2179 tcg_temp_free(r_asi);
2180}
2181
2182static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2183{
2184 TCGv_i32 r_asi, r_size, r_sign;
2185 TCGv_i64 r_val;
2186
2187 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2188 r_size = tcg_const_i32(4);
2189 r_sign = tcg_const_i32(0);
2190 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2191 tcg_temp_free(r_sign);
2192 r_val = tcg_temp_new_i64();
2193 tcg_gen_extu_tl_i64(r_val, dst);
2194 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2195 tcg_temp_free_i64(r_val);
2196 tcg_temp_free(r_size);
2197 tcg_temp_free(r_asi);
2198 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2199}
2200
2201static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2202{
2203 TCGv_i32 r_asi, r_size, r_sign;
2204
2205 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2206 r_size = tcg_const_i32(8);
2207 r_sign = tcg_const_i32(0);
2208 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2209 tcg_temp_free(r_sign);
2210 tcg_temp_free(r_size);
2211 tcg_temp_free(r_asi);
2212 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2213 gen_movl_TN_reg(rd + 1, cpu_tmp0);
2214 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2215 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2216 gen_movl_TN_reg(rd, hi);
2217}
2218
2219static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2220{
2221 TCGv_i32 r_asi, r_size;
2222
2223 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2224 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2225 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2226 r_size = tcg_const_i32(8);
2227 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2228 tcg_temp_free(r_size);
2229 tcg_temp_free(r_asi);
2230}
2231#endif
2232
2233#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2234static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2235{
2236 TCGv_i64 r_val;
2237 TCGv_i32 r_asi, r_size;
2238
2239 gen_ld_asi(dst, addr, insn, 1, 0);
2240
2241 r_val = tcg_const_i64(0xffULL);
2242 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2243 r_size = tcg_const_i32(1);
2244 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2245 tcg_temp_free_i32(r_size);
2246 tcg_temp_free_i32(r_asi);
2247 tcg_temp_free_i64(r_val);
2248}
2249#endif
2250
2251static TCGv get_src1(DisasContext *dc, unsigned int insn)
2252{
2253 unsigned int rs1 = GET_FIELD(insn, 13, 17);
2254 return gen_load_gpr(dc, rs1);
2255}
2256
2257static TCGv get_src2(DisasContext *dc, unsigned int insn)
2258{
2259 if (IS_IMM) { /* immediate */
2260 target_long simm = GET_FIELDs(insn, 19, 31);
2261 TCGv t = get_temp_tl(dc);
2262 tcg_gen_movi_tl(t, simm);
2263 return t;
2264 } else { /* register */
2265 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2266 return gen_load_gpr(dc, rs2);
2267 }
2268}
2269
2270#ifdef TARGET_SPARC64
2271static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2272{
2273 TCGv_i32 c32, zero, dst, s1, s2;
2274
2275 /* We have two choices here: extend the 32 bit data and use movcond_i64,
2276 or fold the comparison down to 32 bits and use movcond_i32. Choose
2277 the later. */
2278 c32 = tcg_temp_new_i32();
2279 if (cmp->is_bool) {
2280 tcg_gen_trunc_i64_i32(c32, cmp->c1);
2281 } else {
2282 TCGv_i64 c64 = tcg_temp_new_i64();
2283 tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2284 tcg_gen_trunc_i64_i32(c32, c64);
2285 tcg_temp_free_i64(c64);
2286 }
2287
2288 s1 = gen_load_fpr_F(dc, rs);
2289 s2 = gen_load_fpr_F(dc, rd);
2290 dst = gen_dest_fpr_F();
2291 zero = tcg_const_i32(0);
2292
2293 tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2294
2295 tcg_temp_free_i32(c32);
2296 tcg_temp_free_i32(zero);
2297 gen_store_fpr_F(dc, rd, dst);
2298}
2299
2300static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2301{
2302 TCGv_i64 dst = gen_dest_fpr_D();
2303 tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2304 gen_load_fpr_D(dc, rs),
2305 gen_load_fpr_D(dc, rd));
2306 gen_store_fpr_D(dc, rd, dst);
2307}
2308
2309static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2310{
2311 int qd = QFPREG(rd);
2312 int qs = QFPREG(rs);
2313
2314 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2315 cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2316 tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2317 cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2318
2319 gen_update_fprs_dirty(qd);
2320}
2321
2322static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2323{
2324 TCGv_i32 r_tl = tcg_temp_new_i32();
2325
2326 /* load env->tl into r_tl */
2327 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2328
2329 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2330 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2331
2332 /* calculate offset to current trap state from env->ts, reuse r_tl */
2333 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2334 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2335
2336 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2337 {
2338 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2339 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2340 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2341 tcg_temp_free_ptr(r_tl_tmp);
2342 }
2343
2344 tcg_temp_free_i32(r_tl);
2345}
2346
2347static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2348 int width, bool cc, bool left)
2349{
2350 TCGv lo1, lo2, t1, t2;
2351 uint64_t amask, tabl, tabr;
2352 int shift, imask, omask;
2353
2354 if (cc) {
2355 tcg_gen_mov_tl(cpu_cc_src, s1);
2356 tcg_gen_mov_tl(cpu_cc_src2, s2);
2357 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2358 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2359 dc->cc_op = CC_OP_SUB;
2360 }
2361
2362 /* Theory of operation: there are two tables, left and right (not to
2363 be confused with the left and right versions of the opcode). These
2364 are indexed by the low 3 bits of the inputs. To make things "easy",
2365 these tables are loaded into two constants, TABL and TABR below.
2366 The operation index = (input & imask) << shift calculates the index
2367 into the constant, while val = (table >> index) & omask calculates
2368 the value we're looking for. */
2369 switch (width) {
2370 case 8:
2371 imask = 0x7;
2372 shift = 3;
2373 omask = 0xff;
2374 if (left) {
2375 tabl = 0x80c0e0f0f8fcfeffULL;
2376 tabr = 0xff7f3f1f0f070301ULL;
2377 } else {
2378 tabl = 0x0103070f1f3f7fffULL;
2379 tabr = 0xfffefcf8f0e0c080ULL;
2380 }
2381 break;
2382 case 16:
2383 imask = 0x6;
2384 shift = 1;
2385 omask = 0xf;
2386 if (left) {
2387 tabl = 0x8cef;
2388 tabr = 0xf731;
2389 } else {
2390 tabl = 0x137f;
2391 tabr = 0xfec8;
2392 }
2393 break;
2394 case 32:
2395 imask = 0x4;
2396 shift = 0;
2397 omask = 0x3;
2398 if (left) {
2399 tabl = (2 << 2) | 3;
2400 tabr = (3 << 2) | 1;
2401 } else {
2402 tabl = (1 << 2) | 3;
2403 tabr = (3 << 2) | 2;
2404 }
2405 break;
2406 default:
2407 abort();
2408 }
2409
2410 lo1 = tcg_temp_new();
2411 lo2 = tcg_temp_new();
2412 tcg_gen_andi_tl(lo1, s1, imask);
2413 tcg_gen_andi_tl(lo2, s2, imask);
2414 tcg_gen_shli_tl(lo1, lo1, shift);
2415 tcg_gen_shli_tl(lo2, lo2, shift);
2416
2417 t1 = tcg_const_tl(tabl);
2418 t2 = tcg_const_tl(tabr);
2419 tcg_gen_shr_tl(lo1, t1, lo1);
2420 tcg_gen_shr_tl(lo2, t2, lo2);
2421 tcg_gen_andi_tl(dst, lo1, omask);
2422 tcg_gen_andi_tl(lo2, lo2, omask);
2423
2424 amask = -8;
2425 if (AM_CHECK(dc)) {
2426 amask &= 0xffffffffULL;
2427 }
2428 tcg_gen_andi_tl(s1, s1, amask);
2429 tcg_gen_andi_tl(s2, s2, amask);
2430
2431 /* We want to compute
2432 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2433 We've already done dst = lo1, so this reduces to
2434 dst &= (s1 == s2 ? -1 : lo2)
2435 Which we perform by
2436 lo2 |= -(s1 == s2)
2437 dst &= lo2
2438 */
2439 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2440 tcg_gen_neg_tl(t1, t1);
2441 tcg_gen_or_tl(lo2, lo2, t1);
2442 tcg_gen_and_tl(dst, dst, lo2);
2443
2444 tcg_temp_free(lo1);
2445 tcg_temp_free(lo2);
2446 tcg_temp_free(t1);
2447 tcg_temp_free(t2);
2448}
2449
2450static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2451{
2452 TCGv tmp = tcg_temp_new();
2453
2454 tcg_gen_add_tl(tmp, s1, s2);
2455 tcg_gen_andi_tl(dst, tmp, -8);
2456 if (left) {
2457 tcg_gen_neg_tl(tmp, tmp);
2458 }
2459 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2460
2461 tcg_temp_free(tmp);
2462}
2463
2464static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2465{
2466 TCGv t1, t2, shift;
2467
2468 t1 = tcg_temp_new();
2469 t2 = tcg_temp_new();
2470 shift = tcg_temp_new();
2471
2472 tcg_gen_andi_tl(shift, gsr, 7);
2473 tcg_gen_shli_tl(shift, shift, 3);
2474 tcg_gen_shl_tl(t1, s1, shift);
2475
2476 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2477 shift of (up to 63) followed by a constant shift of 1. */
2478 tcg_gen_xori_tl(shift, shift, 63);
2479 tcg_gen_shr_tl(t2, s2, shift);
2480 tcg_gen_shri_tl(t2, t2, 1);
2481
2482 tcg_gen_or_tl(dst, t1, t2);
2483
2484 tcg_temp_free(t1);
2485 tcg_temp_free(t2);
2486 tcg_temp_free(shift);
2487}
2488#endif
2489
2490#define CHECK_IU_FEATURE(dc, FEATURE) \
2491 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2492 goto illegal_insn;
2493#define CHECK_FPU_FEATURE(dc, FEATURE) \
2494 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2495 goto nfpu_insn;
2496
2497/* before an instruction, dc->pc must be static */
2498static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2499{
2500 unsigned int opc, rs1, rs2, rd;
2501 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2502 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2503 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2504 target_long simm;
2505
2506 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2507 tcg_gen_debug_insn_start(dc->pc);
2508 }
2509
2510 opc = GET_FIELD(insn, 0, 1);
2511
2512 rd = GET_FIELD(insn, 2, 6);
2513
2514 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2515 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2516
2517 switch (opc) {
2518 case 0: /* branches/sethi */
2519 {
2520 unsigned int xop = GET_FIELD(insn, 7, 9);
2521 int32_t target;
2522 switch (xop) {
2523#ifdef TARGET_SPARC64
2524 case 0x1: /* V9 BPcc */
2525 {
2526 int cc;
2527
2528 target = GET_FIELD_SP(insn, 0, 18);
2529 target = sign_extend(target, 19);
2530 target <<= 2;
2531 cc = GET_FIELD_SP(insn, 20, 21);
2532 if (cc == 0)
2533 do_branch(dc, target, insn, 0);
2534 else if (cc == 2)
2535 do_branch(dc, target, insn, 1);
2536 else
2537 goto illegal_insn;
2538 goto jmp_insn;
2539 }
2540 case 0x3: /* V9 BPr */
2541 {
2542 target = GET_FIELD_SP(insn, 0, 13) |
2543 (GET_FIELD_SP(insn, 20, 21) << 14);
2544 target = sign_extend(target, 16);
2545 target <<= 2;
2546 cpu_src1 = get_src1(dc, insn);
2547 do_branch_reg(dc, target, insn, cpu_src1);
2548 goto jmp_insn;
2549 }
2550 case 0x5: /* V9 FBPcc */
2551 {
2552 int cc = GET_FIELD_SP(insn, 20, 21);
2553 if (gen_trap_ifnofpu(dc)) {
2554 goto jmp_insn;
2555 }
2556 target = GET_FIELD_SP(insn, 0, 18);
2557 target = sign_extend(target, 19);
2558 target <<= 2;
2559 do_fbranch(dc, target, insn, cc);
2560 goto jmp_insn;
2561 }
2562#else
2563 case 0x7: /* CBN+x */
2564 {
2565 goto ncp_insn;
2566 }
2567#endif
2568 case 0x2: /* BN+x */
2569 {
2570 target = GET_FIELD(insn, 10, 31);
2571 target = sign_extend(target, 22);
2572 target <<= 2;
2573 do_branch(dc, target, insn, 0);
2574 goto jmp_insn;
2575 }
2576 case 0x6: /* FBN+x */
2577 {
2578 if (gen_trap_ifnofpu(dc)) {
2579 goto jmp_insn;
2580 }
2581 target = GET_FIELD(insn, 10, 31);
2582 target = sign_extend(target, 22);
2583 target <<= 2;
2584 do_fbranch(dc, target, insn, 0);
2585 goto jmp_insn;
2586 }
2587 case 0x4: /* SETHI */
2588 /* Special-case %g0 because that's the canonical nop. */
2589 if (rd) {
2590 uint32_t value = GET_FIELD(insn, 10, 31);
2591 TCGv t = gen_dest_gpr(dc, rd);
2592 tcg_gen_movi_tl(t, value << 10);
2593 gen_store_gpr(dc, rd, t);
2594 }
2595 break;
2596 case 0x0: /* UNIMPL */
2597 default:
2598 goto illegal_insn;
2599 }
2600 break;
2601 }
2602 break;
2603 case 1: /*CALL*/
2604 {
2605 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2606 TCGv o7 = gen_dest_gpr(dc, 15);
2607
2608 tcg_gen_movi_tl(o7, dc->pc);
2609 gen_store_gpr(dc, 15, o7);
2610 target += dc->pc;
2611 gen_mov_pc_npc(dc);
2612#ifdef TARGET_SPARC64
2613 if (unlikely(AM_CHECK(dc))) {
2614 target &= 0xffffffffULL;
2615 }
2616#endif
2617 dc->npc = target;
2618 }
2619 goto jmp_insn;
2620 case 2: /* FPU & Logical Operations */
2621 {
2622 unsigned int xop = GET_FIELD(insn, 7, 12);
2623 if (xop == 0x3a) { /* generate trap */
2624 int cond = GET_FIELD(insn, 3, 6);
2625 TCGv_i32 trap;
2626 int l1 = -1, mask;
2627
2628 if (cond == 0) {
2629 /* Trap never. */
2630 break;
2631 }
2632
2633 save_state(dc);
2634
2635 if (cond != 8) {
2636 /* Conditional trap. */
2637 DisasCompare cmp;
2638#ifdef TARGET_SPARC64
2639 /* V9 icc/xcc */
2640 int cc = GET_FIELD_SP(insn, 11, 12);
2641 if (cc == 0) {
2642 gen_compare(&cmp, 0, cond, dc);
2643 } else if (cc == 2) {
2644 gen_compare(&cmp, 1, cond, dc);
2645 } else {
2646 goto illegal_insn;
2647 }
2648#else
2649 gen_compare(&cmp, 0, cond, dc);
2650#endif
2651 l1 = gen_new_label();
2652 tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
2653 cmp.c1, cmp.c2, l1);
2654 free_compare(&cmp);
2655 }
2656
2657 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
2658 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
2659
2660 /* Don't use the normal temporaries, as they may well have
2661 gone out of scope with the branch above. While we're
2662 doing that we might as well pre-truncate to 32-bit. */
2663 trap = tcg_temp_new_i32();
2664
2665 rs1 = GET_FIELD_SP(insn, 14, 18);
2666 if (IS_IMM) {
2667 rs2 = GET_FIELD_SP(insn, 0, 6);
2668 if (rs1 == 0) {
2669 tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
2670 /* Signal that the trap value is fully constant. */
2671 mask = 0;
2672 } else {
2673 TCGv t1 = gen_load_gpr(dc, rs1);
2674 tcg_gen_trunc_tl_i32(trap, t1);
2675 tcg_gen_addi_i32(trap, trap, rs2);
2676 }
2677 } else {
2678 TCGv t1, t2;
2679 rs2 = GET_FIELD_SP(insn, 0, 4);
2680 t1 = gen_load_gpr(dc, rs1);
2681 t2 = gen_load_gpr(dc, rs2);
2682 tcg_gen_add_tl(t1, t1, t2);
2683 tcg_gen_trunc_tl_i32(trap, t1);
2684 }
2685 if (mask != 0) {
2686 tcg_gen_andi_i32(trap, trap, mask);
2687 tcg_gen_addi_i32(trap, trap, TT_TRAP);
2688 }
2689
2690 gen_helper_raise_exception(cpu_env, trap);
2691 tcg_temp_free_i32(trap);
2692
2693 if (cond == 8) {
2694 /* An unconditional trap ends the TB. */
2695 dc->is_br = 1;
2696 goto jmp_insn;
2697 } else {
2698 /* A conditional trap falls through to the next insn. */
2699 gen_set_label(l1);
2700 break;
2701 }
2702 } else if (xop == 0x28) {
2703 rs1 = GET_FIELD(insn, 13, 17);
2704 switch(rs1) {
2705 case 0: /* rdy */
2706#ifndef TARGET_SPARC64
2707 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2708 manual, rdy on the microSPARC
2709 II */
2710 case 0x0f: /* stbar in the SPARCv8 manual,
2711 rdy on the microSPARC II */
2712 case 0x10 ... 0x1f: /* implementation-dependent in the
2713 SPARCv8 manual, rdy on the
2714 microSPARC II */
2715 /* Read Asr17 */
2716 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2717 TCGv t = gen_dest_gpr(dc, rd);
2718 /* Read Asr17 for a Leon3 monoprocessor */
2719 tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
2720 gen_store_gpr(dc, rd, t);
2721 break;
2722 }
2723#endif
2724 gen_store_gpr(dc, rd, cpu_y);
2725 break;
2726#ifdef TARGET_SPARC64
2727 case 0x2: /* V9 rdccr */
2728 update_psr(dc);
2729 gen_helper_rdccr(cpu_dst, cpu_env);
2730 gen_store_gpr(dc, rd, cpu_dst);
2731 break;
2732 case 0x3: /* V9 rdasi */
2733 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2734 gen_store_gpr(dc, rd, cpu_dst);
2735 break;
2736 case 0x4: /* V9 rdtick */
2737 {
2738 TCGv_ptr r_tickptr;
2739
2740 r_tickptr = tcg_temp_new_ptr();
2741 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2742 offsetof(CPUSPARCState, tick));
2743 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2744 tcg_temp_free_ptr(r_tickptr);
2745 gen_store_gpr(dc, rd, cpu_dst);
2746 }
2747 break;
2748 case 0x5: /* V9 rdpc */
2749 {
2750 TCGv t = gen_dest_gpr(dc, rd);
2751 if (unlikely(AM_CHECK(dc))) {
2752 tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
2753 } else {
2754 tcg_gen_movi_tl(t, dc->pc);
2755 }
2756 gen_store_gpr(dc, rd, t);
2757 }
2758 break;
2759 case 0x6: /* V9 rdfprs */
2760 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2761 gen_store_gpr(dc, rd, cpu_dst);
2762 break;
2763 case 0xf: /* V9 membar */
2764 break; /* no effect */
2765 case 0x13: /* Graphics Status */
2766 if (gen_trap_ifnofpu(dc)) {
2767 goto jmp_insn;
2768 }
2769 gen_store_gpr(dc, rd, cpu_gsr);
2770 break;
2771 case 0x16: /* Softint */
2772 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2773 gen_store_gpr(dc, rd, cpu_dst);
2774 break;
2775 case 0x17: /* Tick compare */
2776 gen_store_gpr(dc, rd, cpu_tick_cmpr);
2777 break;
2778 case 0x18: /* System tick */
2779 {
2780 TCGv_ptr r_tickptr;
2781
2782 r_tickptr = tcg_temp_new_ptr();
2783 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2784 offsetof(CPUSPARCState, stick));
2785 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2786 tcg_temp_free_ptr(r_tickptr);
2787 gen_store_gpr(dc, rd, cpu_dst);
2788 }
2789 break;
2790 case 0x19: /* System tick compare */
2791 gen_store_gpr(dc, rd, cpu_stick_cmpr);
2792 break;
2793 case 0x10: /* Performance Control */
2794 case 0x11: /* Performance Instrumentation Counter */
2795 case 0x12: /* Dispatch Control */
2796 case 0x14: /* Softint set, WO */
2797 case 0x15: /* Softint clear, WO */
2798#endif
2799 default:
2800 goto illegal_insn;
2801 }
2802#if !defined(CONFIG_USER_ONLY)
2803 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2804#ifndef TARGET_SPARC64
2805 if (!supervisor(dc)) {
2806 goto priv_insn;
2807 }
2808 update_psr(dc);
2809 gen_helper_rdpsr(cpu_dst, cpu_env);
2810#else
2811 CHECK_IU_FEATURE(dc, HYPV);
2812 if (!hypervisor(dc))
2813 goto priv_insn;
2814 rs1 = GET_FIELD(insn, 13, 17);
2815 switch (rs1) {
2816 case 0: // hpstate
2817 // gen_op_rdhpstate();
2818 break;
2819 case 1: // htstate
2820 // gen_op_rdhtstate();
2821 break;
2822 case 3: // hintp
2823 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2824 break;
2825 case 5: // htba
2826 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2827 break;
2828 case 6: // hver
2829 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2830 break;
2831 case 31: // hstick_cmpr
2832 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2833 break;
2834 default:
2835 goto illegal_insn;
2836 }
2837#endif
2838 gen_store_gpr(dc, rd, cpu_dst);
2839 break;
2840 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2841 if (!supervisor(dc))
2842 goto priv_insn;
2843#ifdef TARGET_SPARC64
2844 rs1 = GET_FIELD(insn, 13, 17);
2845 switch (rs1) {
2846 case 0: // tpc
2847 {
2848 TCGv_ptr r_tsptr;
2849
2850 r_tsptr = tcg_temp_new_ptr();
2851 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2852 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2853 offsetof(trap_state, tpc));
2854 tcg_temp_free_ptr(r_tsptr);
2855 }
2856 break;
2857 case 1: // tnpc
2858 {
2859 TCGv_ptr r_tsptr;
2860
2861 r_tsptr = tcg_temp_new_ptr();
2862 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2863 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2864 offsetof(trap_state, tnpc));
2865 tcg_temp_free_ptr(r_tsptr);
2866 }
2867 break;
2868 case 2: // tstate
2869 {
2870 TCGv_ptr r_tsptr;
2871
2872 r_tsptr = tcg_temp_new_ptr();
2873 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2874 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2875 offsetof(trap_state, tstate));
2876 tcg_temp_free_ptr(r_tsptr);
2877 }
2878 break;
2879 case 3: // tt
2880 {
2881 TCGv_ptr r_tsptr;
2882
2883 r_tsptr = tcg_temp_new_ptr();
2884 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2885 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2886 offsetof(trap_state, tt));
2887 tcg_temp_free_ptr(r_tsptr);
2888 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2889 }
2890 break;
2891 case 4: // tick
2892 {
2893 TCGv_ptr r_tickptr;
2894
2895 r_tickptr = tcg_temp_new_ptr();
2896 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2897 offsetof(CPUSPARCState, tick));
2898 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2899 tcg_temp_free_ptr(r_tickptr);
2900 }
2901 break;
2902 case 5: // tba
2903 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2904 break;
2905 case 6: // pstate
2906 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2907 offsetof(CPUSPARCState, pstate));
2908 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2909 break;
2910 case 7: // tl
2911 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2912 offsetof(CPUSPARCState, tl));
2913 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2914 break;
2915 case 8: // pil
2916 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2917 offsetof(CPUSPARCState, psrpil));
2918 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2919 break;
2920 case 9: // cwp
2921 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2922 break;
2923 case 10: // cansave
2924 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2925 offsetof(CPUSPARCState, cansave));
2926 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2927 break;
2928 case 11: // canrestore
2929 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2930 offsetof(CPUSPARCState, canrestore));
2931 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2932 break;
2933 case 12: // cleanwin
2934 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2935 offsetof(CPUSPARCState, cleanwin));
2936 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2937 break;
2938 case 13: // otherwin
2939 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2940 offsetof(CPUSPARCState, otherwin));
2941 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2942 break;
2943 case 14: // wstate
2944 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2945 offsetof(CPUSPARCState, wstate));
2946 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2947 break;
2948 case 16: // UA2005 gl
2949 CHECK_IU_FEATURE(dc, GL);
2950 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2951 offsetof(CPUSPARCState, gl));
2952 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2953 break;
2954 case 26: // UA2005 strand status
2955 CHECK_IU_FEATURE(dc, HYPV);
2956 if (!hypervisor(dc))
2957 goto priv_insn;
2958 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2959 break;
2960 case 31: // ver
2961 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2962 break;
2963 case 15: // fq
2964 default:
2965 goto illegal_insn;
2966 }
2967#else
2968 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2969#endif
2970 gen_store_gpr(dc, rd, cpu_tmp0);
2971 break;
2972 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2973#ifdef TARGET_SPARC64
2974 save_state(dc);
2975 gen_helper_flushw(cpu_env);
2976#else
2977 if (!supervisor(dc))
2978 goto priv_insn;
2979 gen_store_gpr(dc, rd, cpu_tbr);
2980#endif
2981 break;
2982#endif
2983 } else if (xop == 0x34) { /* FPU Operations */
2984 if (gen_trap_ifnofpu(dc)) {
2985 goto jmp_insn;
2986 }
2987 gen_op_clear_ieee_excp_and_FTT();
2988 rs1 = GET_FIELD(insn, 13, 17);
2989 rs2 = GET_FIELD(insn, 27, 31);
2990 xop = GET_FIELD(insn, 18, 26);
2991 save_state(dc);
2992 switch (xop) {
2993 case 0x1: /* fmovs */
2994 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2995 gen_store_fpr_F(dc, rd, cpu_src1_32);
2996 break;
2997 case 0x5: /* fnegs */
2998 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2999 break;
3000 case 0x9: /* fabss */
3001 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3002 break;
3003 case 0x29: /* fsqrts */
3004 CHECK_FPU_FEATURE(dc, FSQRT);
3005 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3006 break;
3007 case 0x2a: /* fsqrtd */
3008 CHECK_FPU_FEATURE(dc, FSQRT);
3009 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3010 break;
3011 case 0x2b: /* fsqrtq */
3012 CHECK_FPU_FEATURE(dc, FLOAT128);
3013 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3014 break;
3015 case 0x41: /* fadds */
3016 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3017 break;
3018 case 0x42: /* faddd */
3019 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3020 break;
3021 case 0x43: /* faddq */
3022 CHECK_FPU_FEATURE(dc, FLOAT128);
3023 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3024 break;
3025 case 0x45: /* fsubs */
3026 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3027 break;
3028 case 0x46: /* fsubd */
3029 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3030 break;
3031 case 0x47: /* fsubq */
3032 CHECK_FPU_FEATURE(dc, FLOAT128);
3033 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3034 break;
3035 case 0x49: /* fmuls */
3036 CHECK_FPU_FEATURE(dc, FMUL);
3037 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3038 break;
3039 case 0x4a: /* fmuld */
3040 CHECK_FPU_FEATURE(dc, FMUL);
3041 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3042 break;
3043 case 0x4b: /* fmulq */
3044 CHECK_FPU_FEATURE(dc, FLOAT128);
3045 CHECK_FPU_FEATURE(dc, FMUL);
3046 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3047 break;
3048 case 0x4d: /* fdivs */
3049 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3050 break;
3051 case 0x4e: /* fdivd */
3052 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3053 break;
3054 case 0x4f: /* fdivq */
3055 CHECK_FPU_FEATURE(dc, FLOAT128);
3056 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3057 break;
3058 case 0x69: /* fsmuld */
3059 CHECK_FPU_FEATURE(dc, FSMULD);
3060 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3061 break;
3062 case 0x6e: /* fdmulq */
3063 CHECK_FPU_FEATURE(dc, FLOAT128);
3064 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3065 break;
3066 case 0xc4: /* fitos */
3067 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3068 break;
3069 case 0xc6: /* fdtos */
3070 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3071 break;
3072 case 0xc7: /* fqtos */
3073 CHECK_FPU_FEATURE(dc, FLOAT128);
3074 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3075 break;
3076 case 0xc8: /* fitod */
3077 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3078 break;
3079 case 0xc9: /* fstod */
3080 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3081 break;
3082 case 0xcb: /* fqtod */
3083 CHECK_FPU_FEATURE(dc, FLOAT128);
3084 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3085 break;
3086 case 0xcc: /* fitoq */
3087 CHECK_FPU_FEATURE(dc, FLOAT128);
3088 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3089 break;
3090 case 0xcd: /* fstoq */
3091 CHECK_FPU_FEATURE(dc, FLOAT128);
3092 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3093 break;
3094 case 0xce: /* fdtoq */
3095 CHECK_FPU_FEATURE(dc, FLOAT128);
3096 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3097 break;
3098 case 0xd1: /* fstoi */
3099 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3100 break;
3101 case 0xd2: /* fdtoi */
3102 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3103 break;
3104 case 0xd3: /* fqtoi */
3105 CHECK_FPU_FEATURE(dc, FLOAT128);
3106 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3107 break;
3108#ifdef TARGET_SPARC64
3109 case 0x2: /* V9 fmovd */
3110 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3111 gen_store_fpr_D(dc, rd, cpu_src1_64);
3112 break;
3113 case 0x3: /* V9 fmovq */
3114 CHECK_FPU_FEATURE(dc, FLOAT128);
3115 gen_move_Q(rd, rs2);
3116 break;
3117 case 0x6: /* V9 fnegd */
3118 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3119 break;
3120 case 0x7: /* V9 fnegq */
3121 CHECK_FPU_FEATURE(dc, FLOAT128);
3122 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3123 break;
3124 case 0xa: /* V9 fabsd */
3125 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3126 break;
3127 case 0xb: /* V9 fabsq */
3128 CHECK_FPU_FEATURE(dc, FLOAT128);
3129 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3130 break;
3131 case 0x81: /* V9 fstox */
3132 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3133 break;
3134 case 0x82: /* V9 fdtox */
3135 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3136 break;
3137 case 0x83: /* V9 fqtox */
3138 CHECK_FPU_FEATURE(dc, FLOAT128);
3139 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3140 break;
3141 case 0x84: /* V9 fxtos */
3142 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3143 break;
3144 case 0x88: /* V9 fxtod */
3145 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3146 break;
3147 case 0x8c: /* V9 fxtoq */
3148 CHECK_FPU_FEATURE(dc, FLOAT128);
3149 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3150 break;
3151#endif
3152 default:
3153 goto illegal_insn;
3154 }
3155 } else if (xop == 0x35) { /* FPU Operations */
3156#ifdef TARGET_SPARC64
3157 int cond;
3158#endif
3159 if (gen_trap_ifnofpu(dc)) {
3160 goto jmp_insn;
3161 }
3162 gen_op_clear_ieee_excp_and_FTT();
3163 rs1 = GET_FIELD(insn, 13, 17);
3164 rs2 = GET_FIELD(insn, 27, 31);
3165 xop = GET_FIELD(insn, 18, 26);
3166 save_state(dc);
3167
3168#ifdef TARGET_SPARC64
3169#define FMOVR(sz) \
3170 do { \
3171 DisasCompare cmp; \
3172 cond = GET_FIELD_SP(insn, 14, 17); \
3173 cpu_src1 = get_src1(dc, insn); \
3174 gen_compare_reg(&cmp, cond, cpu_src1); \
3175 gen_fmov##sz(dc, &cmp, rd, rs2); \
3176 free_compare(&cmp); \
3177 } while (0)
3178
3179 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3180 FMOVR(s);
3181 break;
3182 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3183 FMOVR(d);
3184 break;
3185 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3186 CHECK_FPU_FEATURE(dc, FLOAT128);
3187 FMOVR(q);
3188 break;
3189 }
3190#undef FMOVR
3191#endif
3192 switch (xop) {
3193#ifdef TARGET_SPARC64
3194#define FMOVCC(fcc, sz) \
3195 do { \
3196 DisasCompare cmp; \
3197 cond = GET_FIELD_SP(insn, 14, 17); \
3198 gen_fcompare(&cmp, fcc, cond); \
3199 gen_fmov##sz(dc, &cmp, rd, rs2); \
3200 free_compare(&cmp); \
3201 } while (0)
3202
3203 case 0x001: /* V9 fmovscc %fcc0 */
3204 FMOVCC(0, s);
3205 break;
3206 case 0x002: /* V9 fmovdcc %fcc0 */
3207 FMOVCC(0, d);
3208 break;
3209 case 0x003: /* V9 fmovqcc %fcc0 */
3210 CHECK_FPU_FEATURE(dc, FLOAT128);
3211 FMOVCC(0, q);
3212 break;
3213 case 0x041: /* V9 fmovscc %fcc1 */
3214 FMOVCC(1, s);
3215 break;
3216 case 0x042: /* V9 fmovdcc %fcc1 */
3217 FMOVCC(1, d);
3218 break;
3219 case 0x043: /* V9 fmovqcc %fcc1 */
3220 CHECK_FPU_FEATURE(dc, FLOAT128);
3221 FMOVCC(1, q);
3222 break;
3223 case 0x081: /* V9 fmovscc %fcc2 */
3224 FMOVCC(2, s);
3225 break;
3226 case 0x082: /* V9 fmovdcc %fcc2 */
3227 FMOVCC(2, d);
3228 break;
3229 case 0x083: /* V9 fmovqcc %fcc2 */
3230 CHECK_FPU_FEATURE(dc, FLOAT128);
3231 FMOVCC(2, q);
3232 break;
3233 case 0x0c1: /* V9 fmovscc %fcc3 */
3234 FMOVCC(3, s);
3235 break;
3236 case 0x0c2: /* V9 fmovdcc %fcc3 */
3237 FMOVCC(3, d);
3238 break;
3239 case 0x0c3: /* V9 fmovqcc %fcc3 */
3240 CHECK_FPU_FEATURE(dc, FLOAT128);
3241 FMOVCC(3, q);
3242 break;
3243#undef FMOVCC
3244#define FMOVCC(xcc, sz) \
3245 do { \
3246 DisasCompare cmp; \
3247 cond = GET_FIELD_SP(insn, 14, 17); \
3248 gen_compare(&cmp, xcc, cond, dc); \
3249 gen_fmov##sz(dc, &cmp, rd, rs2); \
3250 free_compare(&cmp); \
3251 } while (0)
3252
3253 case 0x101: /* V9 fmovscc %icc */
3254 FMOVCC(0, s);
3255 break;
3256 case 0x102: /* V9 fmovdcc %icc */
3257 FMOVCC(0, d);
3258 break;
3259 case 0x103: /* V9 fmovqcc %icc */
3260 CHECK_FPU_FEATURE(dc, FLOAT128);
3261 FMOVCC(0, q);
3262 break;
3263 case 0x181: /* V9 fmovscc %xcc */
3264 FMOVCC(1, s);
3265 break;
3266 case 0x182: /* V9 fmovdcc %xcc */
3267 FMOVCC(1, d);
3268 break;
3269 case 0x183: /* V9 fmovqcc %xcc */
3270 CHECK_FPU_FEATURE(dc, FLOAT128);
3271 FMOVCC(1, q);
3272 break;
3273#undef FMOVCC
3274#endif
3275 case 0x51: /* fcmps, V9 %fcc */
3276 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3277 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3278 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3279 break;
3280 case 0x52: /* fcmpd, V9 %fcc */
3281 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3282 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3283 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3284 break;
3285 case 0x53: /* fcmpq, V9 %fcc */
3286 CHECK_FPU_FEATURE(dc, FLOAT128);
3287 gen_op_load_fpr_QT0(QFPREG(rs1));
3288 gen_op_load_fpr_QT1(QFPREG(rs2));
3289 gen_op_fcmpq(rd & 3);
3290 break;
3291 case 0x55: /* fcmpes, V9 %fcc */
3292 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3293 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3294 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3295 break;
3296 case 0x56: /* fcmped, V9 %fcc */
3297 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3298 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3299 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3300 break;
3301 case 0x57: /* fcmpeq, V9 %fcc */
3302 CHECK_FPU_FEATURE(dc, FLOAT128);
3303 gen_op_load_fpr_QT0(QFPREG(rs1));
3304 gen_op_load_fpr_QT1(QFPREG(rs2));
3305 gen_op_fcmpeq(rd & 3);
3306 break;
3307 default:
3308 goto illegal_insn;
3309 }
3310 } else if (xop == 0x2) {
3311 TCGv dst = gen_dest_gpr(dc, rd);
3312 rs1 = GET_FIELD(insn, 13, 17);
3313 if (rs1 == 0) {
3314 /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3315 if (IS_IMM) { /* immediate */
3316 simm = GET_FIELDs(insn, 19, 31);
3317 tcg_gen_movi_tl(dst, simm);
3318 gen_store_gpr(dc, rd, dst);
3319 } else { /* register */
3320 rs2 = GET_FIELD(insn, 27, 31);
3321 if (rs2 == 0) {
3322 tcg_gen_movi_tl(dst, 0);
3323 gen_store_gpr(dc, rd, dst);
3324 } else {
3325 cpu_src2 = gen_load_gpr(dc, rs2);
3326 gen_store_gpr(dc, rd, cpu_src2);
3327 }
3328 }
3329 } else {
3330 cpu_src1 = get_src1(dc, insn);
3331 if (IS_IMM) { /* immediate */
3332 simm = GET_FIELDs(insn, 19, 31);
3333 tcg_gen_ori_tl(dst, cpu_src1, simm);
3334 gen_store_gpr(dc, rd, dst);
3335 } else { /* register */
3336 rs2 = GET_FIELD(insn, 27, 31);
3337 if (rs2 == 0) {
3338 /* mov shortcut: or x, %g0, y -> mov x, y */
3339 gen_store_gpr(dc, rd, cpu_src1);
3340 } else {
3341 cpu_src2 = gen_load_gpr(dc, rs2);
3342 tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3343 gen_store_gpr(dc, rd, dst);
3344 }
3345 }
3346 }
3347#ifdef TARGET_SPARC64
3348 } else if (xop == 0x25) { /* sll, V9 sllx */
3349 cpu_src1 = get_src1(dc, insn);
3350 if (IS_IMM) { /* immediate */
3351 simm = GET_FIELDs(insn, 20, 31);
3352 if (insn & (1 << 12)) {
3353 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3354 } else {
3355 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3356 }
3357 } else { /* register */
3358 rs2 = GET_FIELD(insn, 27, 31);
3359 cpu_src2 = gen_load_gpr(dc, rs2);
3360 if (insn & (1 << 12)) {
3361 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3362 } else {
3363 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3364 }
3365 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3366 }
3367 gen_store_gpr(dc, rd, cpu_dst);
3368 } else if (xop == 0x26) { /* srl, V9 srlx */
3369 cpu_src1 = get_src1(dc, insn);
3370 if (IS_IMM) { /* immediate */
3371 simm = GET_FIELDs(insn, 20, 31);
3372 if (insn & (1 << 12)) {
3373 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3374 } else {
3375 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3376 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3377 }
3378 } else { /* register */
3379 rs2 = GET_FIELD(insn, 27, 31);
3380 cpu_src2 = gen_load_gpr(dc, rs2);
3381 if (insn & (1 << 12)) {
3382 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3383 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3384 } else {
3385 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3386 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3387 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3388 }
3389 }
3390 gen_store_gpr(dc, rd, cpu_dst);
3391 } else if (xop == 0x27) { /* sra, V9 srax */
3392 cpu_src1 = get_src1(dc, insn);
3393 if (IS_IMM) { /* immediate */
3394 simm = GET_FIELDs(insn, 20, 31);
3395 if (insn & (1 << 12)) {
3396 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3397 } else {
3398 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3399 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3400 }
3401 } else { /* register */
3402 rs2 = GET_FIELD(insn, 27, 31);
3403 cpu_src2 = gen_load_gpr(dc, rs2);
3404 if (insn & (1 << 12)) {
3405 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3406 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3407 } else {
3408 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3409 tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3410 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3411 }
3412 }
3413 gen_store_gpr(dc, rd, cpu_dst);
3414#endif
3415 } else if (xop < 0x36) {
3416 if (xop < 0x20) {
3417 cpu_src1 = get_src1(dc, insn);
3418 cpu_src2 = get_src2(dc, insn);
3419 switch (xop & ~0x10) {
3420 case 0x0: /* add */
3421 if (xop & 0x10) {
3422 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3423 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3424 dc->cc_op = CC_OP_ADD;
3425 } else {
3426 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3427 }
3428 break;
3429 case 0x1: /* and */
3430 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3431 if (xop & 0x10) {
3432 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3433 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3434 dc->cc_op = CC_OP_LOGIC;
3435 }
3436 break;
3437 case 0x2: /* or */
3438 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3439 if (xop & 0x10) {
3440 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3441 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3442 dc->cc_op = CC_OP_LOGIC;
3443 }
3444 break;
3445 case 0x3: /* xor */
3446 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3447 if (xop & 0x10) {
3448 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3449 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3450 dc->cc_op = CC_OP_LOGIC;
3451 }
3452 break;
3453 case 0x4: /* sub */
3454 if (xop & 0x10) {
3455 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3456 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3457 dc->cc_op = CC_OP_SUB;
3458 } else {
3459 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3460 }
3461 break;
3462 case 0x5: /* andn */
3463 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3464 if (xop & 0x10) {
3465 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3466 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3467 dc->cc_op = CC_OP_LOGIC;
3468 }
3469 break;
3470 case 0x6: /* orn */
3471 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3472 if (xop & 0x10) {
3473 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3474 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3475 dc->cc_op = CC_OP_LOGIC;
3476 }
3477 break;
3478 case 0x7: /* xorn */
3479 tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3480 if (xop & 0x10) {
3481 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3482 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3483 dc->cc_op = CC_OP_LOGIC;
3484 }
3485 break;
3486 case 0x8: /* addx, V9 addc */
3487 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3488 (xop & 0x10));
3489 break;
3490#ifdef TARGET_SPARC64
3491 case 0x9: /* V9 mulx */
3492 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3493 break;
3494#endif
3495 case 0xa: /* umul */
3496 CHECK_IU_FEATURE(dc, MUL);
3497 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3498 if (xop & 0x10) {
3499 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3500 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3501 dc->cc_op = CC_OP_LOGIC;
3502 }
3503 break;
3504 case 0xb: /* smul */
3505 CHECK_IU_FEATURE(dc, MUL);
3506 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3507 if (xop & 0x10) {
3508 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3509 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3510 dc->cc_op = CC_OP_LOGIC;
3511 }
3512 break;
3513 case 0xc: /* subx, V9 subc */
3514 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3515 (xop & 0x10));
3516 break;
3517#ifdef TARGET_SPARC64
3518 case 0xd: /* V9 udivx */
3519 gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3520 break;
3521#endif
3522 case 0xe: /* udiv */
3523 CHECK_IU_FEATURE(dc, DIV);
3524 if (xop & 0x10) {
3525 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3526 cpu_src2);
3527 dc->cc_op = CC_OP_DIV;
3528 } else {
3529 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3530 cpu_src2);
3531 }
3532 break;
3533 case 0xf: /* sdiv */
3534 CHECK_IU_FEATURE(dc, DIV);
3535 if (xop & 0x10) {
3536 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3537 cpu_src2);
3538 dc->cc_op = CC_OP_DIV;
3539 } else {
3540 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3541 cpu_src2);
3542 }
3543 break;
3544 default:
3545 goto illegal_insn;
3546 }
3547 gen_store_gpr(dc, rd, cpu_dst);
3548 } else {
3549 cpu_src1 = get_src1(dc, insn);
3550 cpu_src2 = get_src2(dc, insn);
3551 switch (xop) {
3552 case 0x20: /* taddcc */
3553 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3554 gen_store_gpr(dc, rd, cpu_dst);
3555 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3556 dc->cc_op = CC_OP_TADD;
3557 break;
3558 case 0x21: /* tsubcc */
3559 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3560 gen_store_gpr(dc, rd, cpu_dst);
3561 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3562 dc->cc_op = CC_OP_TSUB;
3563 break;
3564 case 0x22: /* taddcctv */
3565 gen_helper_taddcctv(cpu_dst, cpu_env,
3566 cpu_src1, cpu_src2);
3567 gen_store_gpr(dc, rd, cpu_dst);
3568 dc->cc_op = CC_OP_TADDTV;
3569 break;
3570 case 0x23: /* tsubcctv */
3571 gen_helper_tsubcctv(cpu_dst, cpu_env,
3572 cpu_src1, cpu_src2);
3573 gen_store_gpr(dc, rd, cpu_dst);
3574 dc->cc_op = CC_OP_TSUBTV;
3575 break;
3576 case 0x24: /* mulscc */
3577 update_psr(dc);
3578 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3579 gen_store_gpr(dc, rd, cpu_dst);
3580 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3581 dc->cc_op = CC_OP_ADD;
3582 break;
3583#ifndef TARGET_SPARC64
3584 case 0x25: /* sll */
3585 if (IS_IMM) { /* immediate */
3586 simm = GET_FIELDs(insn, 20, 31);
3587 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3588 } else { /* register */
3589 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3590 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3591 }
3592 gen_store_gpr(dc, rd, cpu_dst);
3593 break;
3594 case 0x26: /* srl */
3595 if (IS_IMM) { /* immediate */
3596 simm = GET_FIELDs(insn, 20, 31);
3597 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3598 } else { /* register */
3599 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3600 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3601 }
3602 gen_store_gpr(dc, rd, cpu_dst);
3603 break;
3604 case 0x27: /* sra */
3605 if (IS_IMM) { /* immediate */
3606 simm = GET_FIELDs(insn, 20, 31);
3607 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3608 } else { /* register */
3609 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3610 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3611 }
3612 gen_store_gpr(dc, rd, cpu_dst);
3613 break;
3614#endif
3615 case 0x30:
3616 {
3617 switch(rd) {
3618 case 0: /* wry */
3619 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3620 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3621 break;
3622#ifndef TARGET_SPARC64
3623 case 0x01 ... 0x0f: /* undefined in the
3624 SPARCv8 manual, nop
3625 on the microSPARC
3626 II */
3627 case 0x10 ... 0x1f: /* implementation-dependent
3628 in the SPARCv8
3629 manual, nop on the
3630 microSPARC II */
3631 break;
3632#else
3633 case 0x2: /* V9 wrccr */
3634 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3635 gen_helper_wrccr(cpu_env, cpu_dst);
3636 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3637 dc->cc_op = CC_OP_FLAGS;
3638 break;
3639 case 0x3: /* V9 wrasi */
3640 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3641 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3642 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3643 break;
3644 case 0x6: /* V9 wrfprs */
3645 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3646 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3647 save_state(dc);
3648 gen_op_next_insn();
3649 tcg_gen_exit_tb(0);
3650 dc->is_br = 1;
3651 break;
3652 case 0xf: /* V9 sir, nop if user */
3653#if !defined(CONFIG_USER_ONLY)
3654 if (supervisor(dc)) {
3655 ; // XXX
3656 }
3657#endif
3658 break;
3659 case 0x13: /* Graphics Status */
3660 if (gen_trap_ifnofpu(dc)) {
3661 goto jmp_insn;
3662 }
3663 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3664 break;
3665 case 0x14: /* Softint set */
3666 if (!supervisor(dc))
3667 goto illegal_insn;
3668 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3669 gen_helper_set_softint(cpu_env, cpu_tmp64);
3670 break;
3671 case 0x15: /* Softint clear */
3672 if (!supervisor(dc))
3673 goto illegal_insn;
3674 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3675 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3676 break;
3677 case 0x16: /* Softint write */
3678 if (!supervisor(dc))
3679 goto illegal_insn;
3680 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3681 gen_helper_write_softint(cpu_env, cpu_tmp64);
3682 break;
3683 case 0x17: /* Tick compare */
3684#if !defined(CONFIG_USER_ONLY)
3685 if (!supervisor(dc))
3686 goto illegal_insn;
3687#endif
3688 {
3689 TCGv_ptr r_tickptr;
3690
3691 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3692 cpu_src2);
3693 r_tickptr = tcg_temp_new_ptr();
3694 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3695 offsetof(CPUSPARCState, tick));
3696 gen_helper_tick_set_limit(r_tickptr,
3697 cpu_tick_cmpr);
3698 tcg_temp_free_ptr(r_tickptr);
3699 }
3700 break;
3701 case 0x18: /* System tick */
3702#if !defined(CONFIG_USER_ONLY)
3703 if (!supervisor(dc))
3704 goto illegal_insn;
3705#endif
3706 {
3707 TCGv_ptr r_tickptr;
3708
3709 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3710 cpu_src2);
3711 r_tickptr = tcg_temp_new_ptr();
3712 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3713 offsetof(CPUSPARCState, stick));
3714 gen_helper_tick_set_count(r_tickptr,
3715 cpu_dst);
3716 tcg_temp_free_ptr(r_tickptr);
3717 }
3718 break;
3719 case 0x19: /* System tick compare */
3720#if !defined(CONFIG_USER_ONLY)
3721 if (!supervisor(dc))
3722 goto illegal_insn;
3723#endif
3724 {
3725 TCGv_ptr r_tickptr;
3726
3727 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3728 cpu_src2);
3729 r_tickptr = tcg_temp_new_ptr();
3730 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3731 offsetof(CPUSPARCState, stick));
3732 gen_helper_tick_set_limit(r_tickptr,
3733 cpu_stick_cmpr);
3734 tcg_temp_free_ptr(r_tickptr);
3735 }
3736 break;
3737
3738 case 0x10: /* Performance Control */
3739 case 0x11: /* Performance Instrumentation
3740 Counter */
3741 case 0x12: /* Dispatch Control */
3742#endif
3743 default:
3744 goto illegal_insn;
3745 }
3746 }
3747 break;
3748#if !defined(CONFIG_USER_ONLY)
3749 case 0x31: /* wrpsr, V9 saved, restored */
3750 {
3751 if (!supervisor(dc))
3752 goto priv_insn;
3753#ifdef TARGET_SPARC64
3754 switch (rd) {
3755 case 0:
3756 gen_helper_saved(cpu_env);
3757 break;
3758 case 1:
3759 gen_helper_restored(cpu_env);
3760 break;
3761 case 2: /* UA2005 allclean */
3762 case 3: /* UA2005 otherw */
3763 case 4: /* UA2005 normalw */
3764 case 5: /* UA2005 invalw */
3765 // XXX
3766 default:
3767 goto illegal_insn;
3768 }
3769#else
3770 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3771 gen_helper_wrpsr(cpu_env, cpu_dst);
3772 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3773 dc->cc_op = CC_OP_FLAGS;
3774 save_state(dc);
3775 gen_op_next_insn();
3776 tcg_gen_exit_tb(0);
3777 dc->is_br = 1;
3778#endif
3779 }
3780 break;
3781 case 0x32: /* wrwim, V9 wrpr */
3782 {
3783 if (!supervisor(dc))
3784 goto priv_insn;
3785 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3786#ifdef TARGET_SPARC64
3787 switch (rd) {
3788 case 0: // tpc
3789 {
3790 TCGv_ptr r_tsptr;
3791
3792 r_tsptr = tcg_temp_new_ptr();
3793 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3794 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3795 offsetof(trap_state, tpc));
3796 tcg_temp_free_ptr(r_tsptr);
3797 }
3798 break;
3799 case 1: // tnpc
3800 {
3801 TCGv_ptr r_tsptr;
3802
3803 r_tsptr = tcg_temp_new_ptr();
3804 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3805 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3806 offsetof(trap_state, tnpc));
3807 tcg_temp_free_ptr(r_tsptr);
3808 }
3809 break;
3810 case 2: // tstate
3811 {
3812 TCGv_ptr r_tsptr;
3813
3814 r_tsptr = tcg_temp_new_ptr();
3815 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3816 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3817 offsetof(trap_state,
3818 tstate));
3819 tcg_temp_free_ptr(r_tsptr);
3820 }
3821 break;
3822 case 3: // tt
3823 {
3824 TCGv_ptr r_tsptr;
3825
3826 r_tsptr = tcg_temp_new_ptr();
3827 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3828 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3829 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3830 offsetof(trap_state, tt));
3831 tcg_temp_free_ptr(r_tsptr);
3832 }
3833 break;
3834 case 4: // tick
3835 {
3836 TCGv_ptr r_tickptr;
3837
3838 r_tickptr = tcg_temp_new_ptr();
3839 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3840 offsetof(CPUSPARCState, tick));
3841 gen_helper_tick_set_count(r_tickptr,
3842 cpu_tmp0);
3843 tcg_temp_free_ptr(r_tickptr);
3844 }
3845 break;
3846 case 5: // tba
3847 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3848 break;
3849 case 6: // pstate
3850 save_state(dc);
3851 gen_helper_wrpstate(cpu_env, cpu_tmp0);
3852 dc->npc = DYNAMIC_PC;
3853 break;
3854 case 7: // tl
3855 save_state(dc);
3856 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3857 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3858 offsetof(CPUSPARCState, tl));
3859 dc->npc = DYNAMIC_PC;
3860 break;
3861 case 8: // pil
3862 gen_helper_wrpil(cpu_env, cpu_tmp0);
3863 break;
3864 case 9: // cwp
3865 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3866 break;
3867 case 10: // cansave
3868 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3869 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3870 offsetof(CPUSPARCState,
3871 cansave));
3872 break;
3873 case 11: // canrestore
3874 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3875 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3876 offsetof(CPUSPARCState,
3877 canrestore));
3878 break;
3879 case 12: // cleanwin
3880 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3881 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3882 offsetof(CPUSPARCState,
3883 cleanwin));
3884 break;
3885 case 13: // otherwin
3886 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3887 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3888 offsetof(CPUSPARCState,
3889 otherwin));
3890 break;
3891 case 14: // wstate
3892 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3893 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3894 offsetof(CPUSPARCState,
3895 wstate));
3896 break;
3897 case 16: // UA2005 gl
3898 CHECK_IU_FEATURE(dc, GL);
3899 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3900 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3901 offsetof(CPUSPARCState, gl));
3902 break;
3903 case 26: // UA2005 strand status
3904 CHECK_IU_FEATURE(dc, HYPV);
3905 if (!hypervisor(dc))
3906 goto priv_insn;
3907 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3908 break;
3909 default:
3910 goto illegal_insn;
3911 }
3912#else
3913 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3914 if (dc->def->nwindows != 32)
3915 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3916 (1 << dc->def->nwindows) - 1);
3917 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3918#endif
3919 }
3920 break;
3921 case 0x33: /* wrtbr, UA2005 wrhpr */
3922 {
3923#ifndef TARGET_SPARC64
3924 if (!supervisor(dc))
3925 goto priv_insn;
3926 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3927#else
3928 CHECK_IU_FEATURE(dc, HYPV);
3929 if (!hypervisor(dc))
3930 goto priv_insn;
3931 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3932 switch (rd) {
3933 case 0: // hpstate
3934 // XXX gen_op_wrhpstate();
3935 save_state(dc);
3936 gen_op_next_insn();
3937 tcg_gen_exit_tb(0);
3938 dc->is_br = 1;
3939 break;
3940 case 1: // htstate
3941 // XXX gen_op_wrhtstate();
3942 break;
3943 case 3: // hintp
3944 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3945 break;
3946 case 5: // htba
3947 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3948 break;
3949 case 31: // hstick_cmpr
3950 {
3951 TCGv_ptr r_tickptr;
3952
3953 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3954 r_tickptr = tcg_temp_new_ptr();
3955 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3956 offsetof(CPUSPARCState, hstick));
3957 gen_helper_tick_set_limit(r_tickptr,
3958 cpu_hstick_cmpr);
3959 tcg_temp_free_ptr(r_tickptr);
3960 }
3961 break;
3962 case 6: // hver readonly
3963 default:
3964 goto illegal_insn;
3965 }
3966#endif
3967 }
3968 break;
3969#endif
3970#ifdef TARGET_SPARC64
3971 case 0x2c: /* V9 movcc */
3972 {
3973 int cc = GET_FIELD_SP(insn, 11, 12);
3974 int cond = GET_FIELD_SP(insn, 14, 17);
3975 DisasCompare cmp;
3976 TCGv dst;
3977
3978 if (insn & (1 << 18)) {
3979 if (cc == 0) {
3980 gen_compare(&cmp, 0, cond, dc);
3981 } else if (cc == 2) {
3982 gen_compare(&cmp, 1, cond, dc);
3983 } else {
3984 goto illegal_insn;
3985 }
3986 } else {
3987 gen_fcompare(&cmp, cc, cond);
3988 }
3989
3990 /* The get_src2 above loaded the normal 13-bit
3991 immediate field, not the 11-bit field we have
3992 in movcc. But it did handle the reg case. */
3993 if (IS_IMM) {
3994 simm = GET_FIELD_SPs(insn, 0, 10);
3995 tcg_gen_movi_tl(cpu_src2, simm);
3996 }
3997
3998 dst = gen_load_gpr(dc, rd);
3999 tcg_gen_movcond_tl(cmp.cond, dst,
4000 cmp.c1, cmp.c2,
4001 cpu_src2, dst);
4002 free_compare(&cmp);
4003 gen_store_gpr(dc, rd, dst);
4004 break;
4005 }
4006 case 0x2d: /* V9 sdivx */
4007 gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4008 gen_store_gpr(dc, rd, cpu_dst);
4009 break;
4010 case 0x2e: /* V9 popc */
4011 gen_helper_popc(cpu_dst, cpu_src2);
4012 gen_store_gpr(dc, rd, cpu_dst);
4013 break;
4014 case 0x2f: /* V9 movr */
4015 {
4016 int cond = GET_FIELD_SP(insn, 10, 12);
4017 DisasCompare cmp;
4018 TCGv dst;
4019
4020 gen_compare_reg(&cmp, cond, cpu_src1);
4021
4022 /* The get_src2 above loaded the normal 13-bit
4023 immediate field, not the 10-bit field we have
4024 in movr. But it did handle the reg case. */
4025 if (IS_IMM) {
4026 simm = GET_FIELD_SPs(insn, 0, 9);
4027 tcg_gen_movi_tl(cpu_src2, simm);
4028 }
4029
4030 dst = gen_load_gpr(dc, rd);
4031 tcg_gen_movcond_tl(cmp.cond, dst,
4032 cmp.c1, cmp.c2,
4033 cpu_src2, dst);
4034 free_compare(&cmp);
4035 gen_store_gpr(dc, rd, dst);
4036 break;
4037 }
4038#endif
4039 default:
4040 goto illegal_insn;
4041 }
4042 }
4043 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4044#ifdef TARGET_SPARC64
4045 int opf = GET_FIELD_SP(insn, 5, 13);
4046 rs1 = GET_FIELD(insn, 13, 17);
4047 rs2 = GET_FIELD(insn, 27, 31);
4048 if (gen_trap_ifnofpu(dc)) {
4049 goto jmp_insn;
4050 }
4051
4052 switch (opf) {
4053 case 0x000: /* VIS I edge8cc */
4054 CHECK_FPU_FEATURE(dc, VIS1);
4055 cpu_src1 = gen_load_gpr(dc, rs1);
4056 cpu_src2 = gen_load_gpr(dc, rs2);
4057 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4058 gen_store_gpr(dc, rd, cpu_dst);
4059 break;
4060 case 0x001: /* VIS II edge8n */
4061 CHECK_FPU_FEATURE(dc, VIS2);
4062 cpu_src1 = gen_load_gpr(dc, rs1);
4063 cpu_src2 = gen_load_gpr(dc, rs2);
4064 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4065 gen_store_gpr(dc, rd, cpu_dst);
4066 break;
4067 case 0x002: /* VIS I edge8lcc */
4068 CHECK_FPU_FEATURE(dc, VIS1);
4069 cpu_src1 = gen_load_gpr(dc, rs1);
4070 cpu_src2 = gen_load_gpr(dc, rs2);
4071 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4072 gen_store_gpr(dc, rd, cpu_dst);
4073 break;
4074 case 0x003: /* VIS II edge8ln */
4075 CHECK_FPU_FEATURE(dc, VIS2);
4076 cpu_src1 = gen_load_gpr(dc, rs1);
4077 cpu_src2 = gen_load_gpr(dc, rs2);
4078 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4079 gen_store_gpr(dc, rd, cpu_dst);
4080 break;
4081 case 0x004: /* VIS I edge16cc */
4082 CHECK_FPU_FEATURE(dc, VIS1);
4083 cpu_src1 = gen_load_gpr(dc, rs1);
4084 cpu_src2 = gen_load_gpr(dc, rs2);
4085 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4086 gen_store_gpr(dc, rd, cpu_dst);
4087 break;
4088 case 0x005: /* VIS II edge16n */
4089 CHECK_FPU_FEATURE(dc, VIS2);
4090 cpu_src1 = gen_load_gpr(dc, rs1);
4091 cpu_src2 = gen_load_gpr(dc, rs2);
4092 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4093 gen_store_gpr(dc, rd, cpu_dst);
4094 break;
4095 case 0x006: /* VIS I edge16lcc */
4096 CHECK_FPU_FEATURE(dc, VIS1);
4097 cpu_src1 = gen_load_gpr(dc, rs1);
4098 cpu_src2 = gen_load_gpr(dc, rs2);
4099 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4100 gen_store_gpr(dc, rd, cpu_dst);
4101 break;
4102 case 0x007: /* VIS II edge16ln */
4103 CHECK_FPU_FEATURE(dc, VIS2);
4104 cpu_src1 = gen_load_gpr(dc, rs1);
4105 cpu_src2 = gen_load_gpr(dc, rs2);
4106 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4107 gen_store_gpr(dc, rd, cpu_dst);
4108 break;
4109 case 0x008: /* VIS I edge32cc */
4110 CHECK_FPU_FEATURE(dc, VIS1);
4111 cpu_src1 = gen_load_gpr(dc, rs1);
4112 cpu_src2 = gen_load_gpr(dc, rs2);
4113 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4114 gen_store_gpr(dc, rd, cpu_dst);
4115 break;
4116 case 0x009: /* VIS II edge32n */
4117 CHECK_FPU_FEATURE(dc, VIS2);
4118 cpu_src1 = gen_load_gpr(dc, rs1);
4119 cpu_src2 = gen_load_gpr(dc, rs2);
4120 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4121 gen_store_gpr(dc, rd, cpu_dst);
4122 break;
4123 case 0x00a: /* VIS I edge32lcc */
4124 CHECK_FPU_FEATURE(dc, VIS1);
4125 cpu_src1 = gen_load_gpr(dc, rs1);
4126 cpu_src2 = gen_load_gpr(dc, rs2);
4127 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4128 gen_store_gpr(dc, rd, cpu_dst);
4129 break;
4130 case 0x00b: /* VIS II edge32ln */
4131 CHECK_FPU_FEATURE(dc, VIS2);
4132 cpu_src1 = gen_load_gpr(dc, rs1);
4133 cpu_src2 = gen_load_gpr(dc, rs2);
4134 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4135 gen_store_gpr(dc, rd, cpu_dst);
4136 break;
4137 case 0x010: /* VIS I array8 */
4138 CHECK_FPU_FEATURE(dc, VIS1);
4139 cpu_src1 = gen_load_gpr(dc, rs1);
4140 cpu_src2 = gen_load_gpr(dc, rs2);
4141 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4142 gen_store_gpr(dc, rd, cpu_dst);
4143 break;
4144 case 0x012: /* VIS I array16 */
4145 CHECK_FPU_FEATURE(dc, VIS1);
4146 cpu_src1 = gen_load_gpr(dc, rs1);
4147 cpu_src2 = gen_load_gpr(dc, rs2);
4148 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4149 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4150 gen_store_gpr(dc, rd, cpu_dst);
4151 break;
4152 case 0x014: /* VIS I array32 */
4153 CHECK_FPU_FEATURE(dc, VIS1);
4154 cpu_src1 = gen_load_gpr(dc, rs1);
4155 cpu_src2 = gen_load_gpr(dc, rs2);
4156 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4157 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4158 gen_store_gpr(dc, rd, cpu_dst);
4159 break;
4160 case 0x018: /* VIS I alignaddr */
4161 CHECK_FPU_FEATURE(dc, VIS1);
4162 cpu_src1 = gen_load_gpr(dc, rs1);
4163 cpu_src2 = gen_load_gpr(dc, rs2);
4164 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4165 gen_store_gpr(dc, rd, cpu_dst);
4166 break;
4167 case 0x01a: /* VIS I alignaddrl */
4168 CHECK_FPU_FEATURE(dc, VIS1);
4169 cpu_src1 = gen_load_gpr(dc, rs1);
4170 cpu_src2 = gen_load_gpr(dc, rs2);
4171 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4172 gen_store_gpr(dc, rd, cpu_dst);
4173 break;
4174 case 0x019: /* VIS II bmask */
4175 CHECK_FPU_FEATURE(dc, VIS2);
4176 cpu_src1 = gen_load_gpr(dc, rs1);
4177 cpu_src2 = gen_load_gpr(dc, rs2);
4178 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4179 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4180 gen_store_gpr(dc, rd, cpu_dst);
4181 break;
4182 case 0x020: /* VIS I fcmple16 */
4183 CHECK_FPU_FEATURE(dc, VIS1);
4184 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4185 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4186 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4187 gen_store_gpr(dc, rd, cpu_dst);
4188 break;
4189 case 0x022: /* VIS I fcmpne16 */
4190 CHECK_FPU_FEATURE(dc, VIS1);
4191 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4192 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4193 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4194 gen_store_gpr(dc, rd, cpu_dst);
4195 break;
4196 case 0x024: /* VIS I fcmple32 */
4197 CHECK_FPU_FEATURE(dc, VIS1);
4198 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4199 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4200 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4201 gen_store_gpr(dc, rd, cpu_dst);
4202 break;
4203 case 0x026: /* VIS I fcmpne32 */
4204 CHECK_FPU_FEATURE(dc, VIS1);
4205 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4206 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4207 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4208 gen_store_gpr(dc, rd, cpu_dst);
4209 break;
4210 case 0x028: /* VIS I fcmpgt16 */
4211 CHECK_FPU_FEATURE(dc, VIS1);
4212 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4213 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4214 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4215 gen_store_gpr(dc, rd, cpu_dst);
4216 break;
4217 case 0x02a: /* VIS I fcmpeq16 */
4218 CHECK_FPU_FEATURE(dc, VIS1);
4219 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4220 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4221 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4222 gen_store_gpr(dc, rd, cpu_dst);
4223 break;
4224 case 0x02c: /* VIS I fcmpgt32 */
4225 CHECK_FPU_FEATURE(dc, VIS1);
4226 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4227 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4228 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4229 gen_store_gpr(dc, rd, cpu_dst);
4230 break;
4231 case 0x02e: /* VIS I fcmpeq32 */
4232 CHECK_FPU_FEATURE(dc, VIS1);
4233 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4234 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4235 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4236 gen_store_gpr(dc, rd, cpu_dst);
4237 break;
4238 case 0x031: /* VIS I fmul8x16 */
4239 CHECK_FPU_FEATURE(dc, VIS1);
4240 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4241 break;
4242 case 0x033: /* VIS I fmul8x16au */
4243 CHECK_FPU_FEATURE(dc, VIS1);
4244 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4245 break;
4246 case 0x035: /* VIS I fmul8x16al */
4247 CHECK_FPU_FEATURE(dc, VIS1);
4248 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4249 break;
4250 case 0x036: /* VIS I fmul8sux16 */
4251 CHECK_FPU_FEATURE(dc, VIS1);
4252 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4253 break;
4254 case 0x037: /* VIS I fmul8ulx16 */
4255 CHECK_FPU_FEATURE(dc, VIS1);
4256 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4257 break;
4258 case 0x038: /* VIS I fmuld8sux16 */
4259 CHECK_FPU_FEATURE(dc, VIS1);
4260 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4261 break;
4262 case 0x039: /* VIS I fmuld8ulx16 */
4263 CHECK_FPU_FEATURE(dc, VIS1);
4264 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4265 break;
4266 case 0x03a: /* VIS I fpack32 */
4267 CHECK_FPU_FEATURE(dc, VIS1);
4268 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4269 break;
4270 case 0x03b: /* VIS I fpack16 */
4271 CHECK_FPU_FEATURE(dc, VIS1);
4272 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4273 cpu_dst_32 = gen_dest_fpr_F();
4274 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4275 gen_store_fpr_F(dc, rd, cpu_dst_32);
4276 break;
4277 case 0x03d: /* VIS I fpackfix */
4278 CHECK_FPU_FEATURE(dc, VIS1);
4279 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4280 cpu_dst_32 = gen_dest_fpr_F();
4281 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4282 gen_store_fpr_F(dc, rd, cpu_dst_32);
4283 break;
4284 case 0x03e: /* VIS I pdist */
4285 CHECK_FPU_FEATURE(dc, VIS1);
4286 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4287 break;
4288 case 0x048: /* VIS I faligndata */
4289 CHECK_FPU_FEATURE(dc, VIS1);
4290 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4291 break;
4292 case 0x04b: /* VIS I fpmerge */
4293 CHECK_FPU_FEATURE(dc, VIS1);
4294 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4295 break;
4296 case 0x04c: /* VIS II bshuffle */
4297 CHECK_FPU_FEATURE(dc, VIS2);
4298 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4299 break;
4300 case 0x04d: /* VIS I fexpand */
4301 CHECK_FPU_FEATURE(dc, VIS1);
4302 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4303 break;
4304 case 0x050: /* VIS I fpadd16 */
4305 CHECK_FPU_FEATURE(dc, VIS1);
4306 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4307 break;
4308 case 0x051: /* VIS I fpadd16s */
4309 CHECK_FPU_FEATURE(dc, VIS1);
4310 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4311 break;
4312 case 0x052: /* VIS I fpadd32 */
4313 CHECK_FPU_FEATURE(dc, VIS1);
4314 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4315 break;
4316 case 0x053: /* VIS I fpadd32s */
4317 CHECK_FPU_FEATURE(dc, VIS1);
4318 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4319 break;
4320 case 0x054: /* VIS I fpsub16 */
4321 CHECK_FPU_FEATURE(dc, VIS1);
4322 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4323 break;
4324 case 0x055: /* VIS I fpsub16s */
4325 CHECK_FPU_FEATURE(dc, VIS1);
4326 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4327 break;
4328 case 0x056: /* VIS I fpsub32 */
4329 CHECK_FPU_FEATURE(dc, VIS1);
4330 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4331 break;
4332 case 0x057: /* VIS I fpsub32s */
4333 CHECK_FPU_FEATURE(dc, VIS1);
4334 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4335 break;
4336 case 0x060: /* VIS I fzero */
4337 CHECK_FPU_FEATURE(dc, VIS1);
4338 cpu_dst_64 = gen_dest_fpr_D();
4339 tcg_gen_movi_i64(cpu_dst_64, 0);
4340 gen_store_fpr_D(dc, rd, cpu_dst_64);
4341 break;
4342 case 0x061: /* VIS I fzeros */
4343 CHECK_FPU_FEATURE(dc, VIS1);
4344 cpu_dst_32 = gen_dest_fpr_F();
4345 tcg_gen_movi_i32(cpu_dst_32, 0);
4346 gen_store_fpr_F(dc, rd, cpu_dst_32);
4347 break;
4348 case 0x062: /* VIS I fnor */
4349 CHECK_FPU_FEATURE(dc, VIS1);
4350 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4351 break;
4352 case 0x063: /* VIS I fnors */
4353 CHECK_FPU_FEATURE(dc, VIS1);
4354 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4355 break;
4356 case 0x064: /* VIS I fandnot2 */
4357 CHECK_FPU_FEATURE(dc, VIS1);
4358 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4359 break;
4360 case 0x065: /* VIS I fandnot2s */
4361 CHECK_FPU_FEATURE(dc, VIS1);
4362 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4363 break;
4364 case 0x066: /* VIS I fnot2 */
4365 CHECK_FPU_FEATURE(dc, VIS1);
4366 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4367 break;
4368 case 0x067: /* VIS I fnot2s */
4369 CHECK_FPU_FEATURE(dc, VIS1);
4370 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4371 break;
4372 case 0x068: /* VIS I fandnot1 */
4373 CHECK_FPU_FEATURE(dc, VIS1);
4374 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4375 break;
4376 case 0x069: /* VIS I fandnot1s */
4377 CHECK_FPU_FEATURE(dc, VIS1);
4378 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4379 break;
4380 case 0x06a: /* VIS I fnot1 */
4381 CHECK_FPU_FEATURE(dc, VIS1);
4382 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4383 break;
4384 case 0x06b: /* VIS I fnot1s */
4385 CHECK_FPU_FEATURE(dc, VIS1);
4386 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4387 break;
4388 case 0x06c: /* VIS I fxor */
4389 CHECK_FPU_FEATURE(dc, VIS1);
4390 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4391 break;
4392 case 0x06d: /* VIS I fxors */
4393 CHECK_FPU_FEATURE(dc, VIS1);
4394 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4395 break;
4396 case 0x06e: /* VIS I fnand */
4397 CHECK_FPU_FEATURE(dc, VIS1);
4398 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4399 break;
4400 case 0x06f: /* VIS I fnands */
4401 CHECK_FPU_FEATURE(dc, VIS1);
4402 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4403 break;
4404 case 0x070: /* VIS I fand */
4405 CHECK_FPU_FEATURE(dc, VIS1);
4406 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4407 break;
4408 case 0x071: /* VIS I fands */
4409 CHECK_FPU_FEATURE(dc, VIS1);
4410 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4411 break;
4412 case 0x072: /* VIS I fxnor */
4413 CHECK_FPU_FEATURE(dc, VIS1);
4414 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4415 break;
4416 case 0x073: /* VIS I fxnors */
4417 CHECK_FPU_FEATURE(dc, VIS1);
4418 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4419 break;
4420 case 0x074: /* VIS I fsrc1 */
4421 CHECK_FPU_FEATURE(dc, VIS1);
4422 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4423 gen_store_fpr_D(dc, rd, cpu_src1_64);
4424 break;
4425 case 0x075: /* VIS I fsrc1s */
4426 CHECK_FPU_FEATURE(dc, VIS1);
4427 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4428 gen_store_fpr_F(dc, rd, cpu_src1_32);
4429 break;
4430 case 0x076: /* VIS I fornot2 */
4431 CHECK_FPU_FEATURE(dc, VIS1);
4432 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4433 break;
4434 case 0x077: /* VIS I fornot2s */
4435 CHECK_FPU_FEATURE(dc, VIS1);
4436 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4437 break;
4438 case 0x078: /* VIS I fsrc2 */
4439 CHECK_FPU_FEATURE(dc, VIS1);
4440 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4441 gen_store_fpr_D(dc, rd, cpu_src1_64);
4442 break;
4443 case 0x079: /* VIS I fsrc2s */
4444 CHECK_FPU_FEATURE(dc, VIS1);
4445 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4446 gen_store_fpr_F(dc, rd, cpu_src1_32);
4447 break;
4448 case 0x07a: /* VIS I fornot1 */
4449 CHECK_FPU_FEATURE(dc, VIS1);
4450 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4451 break;
4452 case 0x07b: /* VIS I fornot1s */
4453 CHECK_FPU_FEATURE(dc, VIS1);
4454 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4455 break;
4456 case 0x07c: /* VIS I for */
4457 CHECK_FPU_FEATURE(dc, VIS1);
4458 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4459 break;
4460 case 0x07d: /* VIS I fors */
4461 CHECK_FPU_FEATURE(dc, VIS1);
4462 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4463 break;
4464 case 0x07e: /* VIS I fone */
4465 CHECK_FPU_FEATURE(dc, VIS1);
4466 cpu_dst_64 = gen_dest_fpr_D();
4467 tcg_gen_movi_i64(cpu_dst_64, -1);
4468 gen_store_fpr_D(dc, rd, cpu_dst_64);
4469 break;
4470 case 0x07f: /* VIS I fones */
4471 CHECK_FPU_FEATURE(dc, VIS1);
4472 cpu_dst_32 = gen_dest_fpr_F();
4473 tcg_gen_movi_i32(cpu_dst_32, -1);
4474 gen_store_fpr_F(dc, rd, cpu_dst_32);
4475 break;
4476 case 0x080: /* VIS I shutdown */
4477 case 0x081: /* VIS II siam */
4478 // XXX
4479 goto illegal_insn;
4480 default:
4481 goto illegal_insn;
4482 }
4483#else
4484 goto ncp_insn;
4485#endif
4486 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4487#ifdef TARGET_SPARC64
4488 goto illegal_insn;
4489#else
4490 goto ncp_insn;
4491#endif
4492#ifdef TARGET_SPARC64
4493 } else if (xop == 0x39) { /* V9 return */
4494 TCGv_i32 r_const;
4495
4496 save_state(dc);
4497 cpu_src1 = get_src1(dc, insn);
4498 if (IS_IMM) { /* immediate */
4499 simm = GET_FIELDs(insn, 19, 31);
4500 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4501 } else { /* register */
4502 rs2 = GET_FIELD(insn, 27, 31);
4503 if (rs2) {
4504 cpu_src2 = gen_load_gpr(dc, rs2);
4505 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4506 } else {
4507 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4508 }
4509 }
4510 gen_helper_restore(cpu_env);
4511 gen_mov_pc_npc(dc);
4512 r_const = tcg_const_i32(3);
4513 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4514 tcg_temp_free_i32(r_const);
4515 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4516 dc->npc = DYNAMIC_PC;
4517 goto jmp_insn;
4518#endif
4519 } else {
4520 cpu_src1 = get_src1(dc, insn);
4521 if (IS_IMM) { /* immediate */
4522 simm = GET_FIELDs(insn, 19, 31);
4523 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4524 } else { /* register */
4525 rs2 = GET_FIELD(insn, 27, 31);
4526 if (rs2) {
4527 cpu_src2 = gen_load_gpr(dc, rs2);
4528 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4529 } else {
4530 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4531 }
4532 }
4533 switch (xop) {
4534 case 0x38: /* jmpl */
4535 {
4536 TCGv t;
4537 TCGv_i32 r_const;
4538
4539 t = gen_dest_gpr(dc, rd);
4540 tcg_gen_movi_tl(t, dc->pc);
4541 gen_store_gpr(dc, rd, t);
4542 gen_mov_pc_npc(dc);
4543 r_const = tcg_const_i32(3);
4544 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4545 tcg_temp_free_i32(r_const);
4546 gen_address_mask(dc, cpu_dst);
4547 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4548 dc->npc = DYNAMIC_PC;
4549 }
4550 goto jmp_insn;
4551#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4552 case 0x39: /* rett, V9 return */
4553 {
4554 TCGv_i32 r_const;
4555
4556 if (!supervisor(dc))
4557 goto priv_insn;
4558 gen_mov_pc_npc(dc);
4559 r_const = tcg_const_i32(3);
4560 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4561 tcg_temp_free_i32(r_const);
4562 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4563 dc->npc = DYNAMIC_PC;
4564 gen_helper_rett(cpu_env);
4565 }
4566 goto jmp_insn;
4567#endif
4568 case 0x3b: /* flush */
4569 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4570 goto unimp_flush;
4571 /* nop */
4572 break;
4573 case 0x3c: /* save */
4574 save_state(dc);
4575 gen_helper_save(cpu_env);
4576 gen_store_gpr(dc, rd, cpu_dst);
4577 break;
4578 case 0x3d: /* restore */
4579 save_state(dc);
4580 gen_helper_restore(cpu_env);
4581 gen_store_gpr(dc, rd, cpu_dst);
4582 break;
4583#if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4584 case 0x3e: /* V9 done/retry */
4585 {
4586 switch (rd) {
4587 case 0:
4588 if (!supervisor(dc))
4589 goto priv_insn;
4590 dc->npc = DYNAMIC_PC;
4591 dc->pc = DYNAMIC_PC;
4592 gen_helper_done(cpu_env);
4593 goto jmp_insn;
4594 case 1:
4595 if (!supervisor(dc))
4596 goto priv_insn;
4597 dc->npc = DYNAMIC_PC;
4598 dc->pc = DYNAMIC_PC;
4599 gen_helper_retry(cpu_env);
4600 goto jmp_insn;
4601 default:
4602 goto illegal_insn;
4603 }
4604 }
4605 break;
4606#endif
4607 default:
4608 goto illegal_insn;
4609 }
4610 }
4611 break;
4612 }
4613 break;
4614 case 3: /* load/store instructions */
4615 {
4616 unsigned int xop = GET_FIELD(insn, 7, 12);
4617
4618 cpu_src1 = get_src1(dc, insn);
4619 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4620 rs2 = GET_FIELD(insn, 27, 31);
4621 cpu_src2 = gen_load_gpr(dc, rs2);
4622 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4623 } else if (IS_IMM) { /* immediate */
4624 simm = GET_FIELDs(insn, 19, 31);
4625 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4626 } else { /* register */
4627 rs2 = GET_FIELD(insn, 27, 31);
4628 if (rs2 != 0) {
4629 cpu_src2 = gen_load_gpr(dc, rs2);
4630 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4631 } else {
4632 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4633 }
4634 }
4635 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4636 (xop > 0x17 && xop <= 0x1d ) ||
4637 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4638 switch (xop) {
4639 case 0x0: /* ld, V9 lduw, load unsigned word */
4640 gen_address_mask(dc, cpu_addr);
4641 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4642 break;
4643 case 0x1: /* ldub, load unsigned byte */
4644 gen_address_mask(dc, cpu_addr);
4645 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4646 break;
4647 case 0x2: /* lduh, load unsigned halfword */
4648 gen_address_mask(dc, cpu_addr);
4649 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4650 break;
4651 case 0x3: /* ldd, load double word */
4652 if (rd & 1)
4653 goto illegal_insn;
4654 else {
4655 TCGv_i32 r_const;
4656
4657 save_state(dc);
4658 r_const = tcg_const_i32(7);
4659 /* XXX remove alignment check */
4660 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4661 tcg_temp_free_i32(r_const);
4662 gen_address_mask(dc, cpu_addr);
4663 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4664 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4665 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4666 gen_store_gpr(dc, rd + 1, cpu_tmp0);
4667 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4668 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4669 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4670 }
4671 break;
4672 case 0x9: /* ldsb, load signed byte */
4673 gen_address_mask(dc, cpu_addr);
4674 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4675 break;
4676 case 0xa: /* ldsh, load signed halfword */
4677 gen_address_mask(dc, cpu_addr);
4678 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4679 break;
4680 case 0xd: /* ldstub -- XXX: should be atomically */
4681 {
4682 TCGv r_const;
4683
4684 gen_address_mask(dc, cpu_addr);
4685 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4686 r_const = tcg_const_tl(0xff);
4687 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4688 tcg_temp_free(r_const);
4689 }
4690 break;
4691 case 0x0f: /* swap, swap register with memory. Also
4692 atomically */
4693 CHECK_IU_FEATURE(dc, SWAP);
4694 gen_movl_reg_TN(rd, cpu_val);
4695 gen_address_mask(dc, cpu_addr);
4696 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4697 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4698 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4699 break;
4700#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4701 case 0x10: /* lda, V9 lduwa, load word alternate */
4702#ifndef TARGET_SPARC64
4703 if (IS_IMM)
4704 goto illegal_insn;
4705 if (!supervisor(dc))
4706 goto priv_insn;
4707#endif
4708 save_state(dc);
4709 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4710 break;
4711 case 0x11: /* lduba, load unsigned byte alternate */
4712#ifndef TARGET_SPARC64
4713 if (IS_IMM)
4714 goto illegal_insn;
4715 if (!supervisor(dc))
4716 goto priv_insn;
4717#endif
4718 save_state(dc);
4719 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4720 break;
4721 case 0x12: /* lduha, load unsigned halfword alternate */
4722#ifndef TARGET_SPARC64
4723 if (IS_IMM)
4724 goto illegal_insn;
4725 if (!supervisor(dc))
4726 goto priv_insn;
4727#endif
4728 save_state(dc);
4729 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4730 break;
4731 case 0x13: /* ldda, load double word alternate */
4732#ifndef TARGET_SPARC64
4733 if (IS_IMM)
4734 goto illegal_insn;
4735 if (!supervisor(dc))
4736 goto priv_insn;
4737#endif
4738 if (rd & 1)
4739 goto illegal_insn;
4740 save_state(dc);
4741 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4742 goto skip_move;
4743 case 0x19: /* ldsba, load signed byte alternate */
4744#ifndef TARGET_SPARC64
4745 if (IS_IMM)
4746 goto illegal_insn;
4747 if (!supervisor(dc))
4748 goto priv_insn;
4749#endif
4750 save_state(dc);
4751 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4752 break;
4753 case 0x1a: /* ldsha, load signed halfword alternate */
4754#ifndef TARGET_SPARC64
4755 if (IS_IMM)
4756 goto illegal_insn;
4757 if (!supervisor(dc))
4758 goto priv_insn;
4759#endif
4760 save_state(dc);
4761 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4762 break;
4763 case 0x1d: /* ldstuba -- XXX: should be atomically */
4764#ifndef TARGET_SPARC64
4765 if (IS_IMM)
4766 goto illegal_insn;
4767 if (!supervisor(dc))
4768 goto priv_insn;
4769#endif
4770 save_state(dc);
4771 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4772 break;
4773 case 0x1f: /* swapa, swap reg with alt. memory. Also
4774 atomically */
4775 CHECK_IU_FEATURE(dc, SWAP);
4776#ifndef TARGET_SPARC64
4777 if (IS_IMM)
4778 goto illegal_insn;
4779 if (!supervisor(dc))
4780 goto priv_insn;
4781#endif
4782 save_state(dc);
4783 gen_movl_reg_TN(rd, cpu_val);
4784 gen_swap_asi(cpu_val, cpu_addr, insn);
4785 break;
4786
4787#ifndef TARGET_SPARC64
4788 case 0x30: /* ldc */
4789 case 0x31: /* ldcsr */
4790 case 0x33: /* lddc */
4791 goto ncp_insn;
4792#endif
4793#endif
4794#ifdef TARGET_SPARC64
4795 case 0x08: /* V9 ldsw */
4796 gen_address_mask(dc, cpu_addr);
4797 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4798 break;
4799 case 0x0b: /* V9 ldx */
4800 gen_address_mask(dc, cpu_addr);
4801 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4802 break;
4803 case 0x18: /* V9 ldswa */
4804 save_state(dc);
4805 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4806 break;
4807 case 0x1b: /* V9 ldxa */
4808 save_state(dc);
4809 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4810 break;
4811 case 0x2d: /* V9 prefetch, no effect */
4812 goto skip_move;
4813 case 0x30: /* V9 ldfa */
4814 if (gen_trap_ifnofpu(dc)) {
4815 goto jmp_insn;
4816 }
4817 save_state(dc);
4818 gen_ldf_asi(cpu_addr, insn, 4, rd);
4819 gen_update_fprs_dirty(rd);
4820 goto skip_move;
4821 case 0x33: /* V9 lddfa */
4822 if (gen_trap_ifnofpu(dc)) {
4823 goto jmp_insn;
4824 }
4825 save_state(dc);
4826 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4827 gen_update_fprs_dirty(DFPREG(rd));
4828 goto skip_move;
4829 case 0x3d: /* V9 prefetcha, no effect */
4830 goto skip_move;
4831 case 0x32: /* V9 ldqfa */
4832 CHECK_FPU_FEATURE(dc, FLOAT128);
4833 if (gen_trap_ifnofpu(dc)) {
4834 goto jmp_insn;
4835 }
4836 save_state(dc);
4837 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4838 gen_update_fprs_dirty(QFPREG(rd));
4839 goto skip_move;
4840#endif
4841 default:
4842 goto illegal_insn;
4843 }
4844 gen_store_gpr(dc, rd, cpu_val);
4845#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4846 skip_move: ;
4847#endif
4848 } else if (xop >= 0x20 && xop < 0x24) {
4849 if (gen_trap_ifnofpu(dc)) {
4850 goto jmp_insn;
4851 }
4852 save_state(dc);
4853 switch (xop) {
4854 case 0x20: /* ldf, load fpreg */
4855 gen_address_mask(dc, cpu_addr);
4856 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4857 cpu_dst_32 = gen_dest_fpr_F();
4858 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4859 gen_store_fpr_F(dc, rd, cpu_dst_32);
4860 break;
4861 case 0x21: /* ldfsr, V9 ldxfsr */
4862#ifdef TARGET_SPARC64
4863 gen_address_mask(dc, cpu_addr);
4864 if (rd == 1) {
4865 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4866 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4867 } else {
4868 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4869 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4870 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4871 }
4872#else
4873 {
4874 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4875 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4876 }
4877#endif
4878 break;
4879 case 0x22: /* ldqf, load quad fpreg */
4880 {
4881 TCGv_i32 r_const;
4882
4883 CHECK_FPU_FEATURE(dc, FLOAT128);
4884 r_const = tcg_const_i32(dc->mem_idx);
4885 gen_address_mask(dc, cpu_addr);
4886 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4887 tcg_temp_free_i32(r_const);
4888 gen_op_store_QT0_fpr(QFPREG(rd));
4889 gen_update_fprs_dirty(QFPREG(rd));
4890 }
4891 break;
4892 case 0x23: /* lddf, load double fpreg */
4893 gen_address_mask(dc, cpu_addr);
4894 cpu_dst_64 = gen_dest_fpr_D();
4895 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4896 gen_store_fpr_D(dc, rd, cpu_dst_64);
4897 break;
4898 default:
4899 goto illegal_insn;
4900 }
4901 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4902 xop == 0xe || xop == 0x1e) {
4903 gen_movl_reg_TN(rd, cpu_val);
4904 switch (xop) {
4905 case 0x4: /* st, store word */
4906 gen_address_mask(dc, cpu_addr);
4907 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4908 break;
4909 case 0x5: /* stb, store byte */
4910 gen_address_mask(dc, cpu_addr);
4911 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4912 break;
4913 case 0x6: /* sth, store halfword */
4914 gen_address_mask(dc, cpu_addr);
4915 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4916 break;
4917 case 0x7: /* std, store double word */
4918 if (rd & 1)
4919 goto illegal_insn;
4920 else {
4921 TCGv_i32 r_const;
4922
4923 save_state(dc);
4924 gen_address_mask(dc, cpu_addr);
4925 r_const = tcg_const_i32(7);
4926 /* XXX remove alignment check */
4927 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4928 tcg_temp_free_i32(r_const);
4929 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4930 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4931 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4932 }
4933 break;
4934#if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4935 case 0x14: /* sta, V9 stwa, store word alternate */
4936#ifndef TARGET_SPARC64
4937 if (IS_IMM)
4938 goto illegal_insn;
4939 if (!supervisor(dc))
4940 goto priv_insn;
4941#endif
4942 save_state(dc);
4943 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4944 dc->npc = DYNAMIC_PC;
4945 break;
4946 case 0x15: /* stba, store byte alternate */
4947#ifndef TARGET_SPARC64
4948 if (IS_IMM)
4949 goto illegal_insn;
4950 if (!supervisor(dc))
4951 goto priv_insn;
4952#endif
4953 save_state(dc);
4954 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4955 dc->npc = DYNAMIC_PC;
4956 break;
4957 case 0x16: /* stha, store halfword alternate */
4958#ifndef TARGET_SPARC64
4959 if (IS_IMM)
4960 goto illegal_insn;
4961 if (!supervisor(dc))
4962 goto priv_insn;
4963#endif
4964 save_state(dc);
4965 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4966 dc->npc = DYNAMIC_PC;
4967 break;
4968 case 0x17: /* stda, store double word alternate */
4969#ifndef TARGET_SPARC64
4970 if (IS_IMM)
4971 goto illegal_insn;
4972 if (!supervisor(dc))
4973 goto priv_insn;
4974#endif
4975 if (rd & 1)
4976 goto illegal_insn;
4977 else {
4978 save_state(dc);
4979 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4980 }
4981 break;
4982#endif
4983#ifdef TARGET_SPARC64
4984 case 0x0e: /* V9 stx */
4985 gen_address_mask(dc, cpu_addr);
4986 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4987 break;
4988 case 0x1e: /* V9 stxa */
4989 save_state(dc);
4990 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4991 dc->npc = DYNAMIC_PC;
4992 break;
4993#endif
4994 default:
4995 goto illegal_insn;
4996 }
4997 } else if (xop > 0x23 && xop < 0x28) {
4998 if (gen_trap_ifnofpu(dc)) {
4999 goto jmp_insn;
5000 }
5001 save_state(dc);
5002 switch (xop) {
5003 case 0x24: /* stf, store fpreg */
5004 gen_address_mask(dc, cpu_addr);
5005 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5006 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5007 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5008 break;
5009 case 0x25: /* stfsr, V9 stxfsr */
5010#ifdef TARGET_SPARC64
5011 gen_address_mask(dc, cpu_addr);
5012 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
5013 if (rd == 1)
5014 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5015 else
5016 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
5017#else
5018 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
5019 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
5020#endif
5021 break;
5022 case 0x26:
5023#ifdef TARGET_SPARC64
5024 /* V9 stqf, store quad fpreg */
5025 {
5026 TCGv_i32 r_const;
5027
5028 CHECK_FPU_FEATURE(dc, FLOAT128);
5029 gen_op_load_fpr_QT0(QFPREG(rd));
5030 r_const = tcg_const_i32(dc->mem_idx);
5031 gen_address_mask(dc, cpu_addr);
5032 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5033 tcg_temp_free_i32(r_const);
5034 }
5035 break;
5036#else /* !TARGET_SPARC64 */
5037 /* stdfq, store floating point queue */
5038#if defined(CONFIG_USER_ONLY)
5039 goto illegal_insn;
5040#else
5041 if (!supervisor(dc))
5042 goto priv_insn;
5043 if (gen_trap_ifnofpu(dc)) {
5044 goto jmp_insn;
5045 }
5046 goto nfq_insn;
5047#endif
5048#endif
5049 case 0x27: /* stdf, store double fpreg */
5050 gen_address_mask(dc, cpu_addr);
5051 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5052 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5053 break;
5054 default:
5055 goto illegal_insn;
5056 }
5057 } else if (xop > 0x33 && xop < 0x3f) {
5058 save_state(dc);
5059 switch (xop) {
5060#ifdef TARGET_SPARC64
5061 case 0x34: /* V9 stfa */
5062 if (gen_trap_ifnofpu(dc)) {
5063 goto jmp_insn;
5064 }
5065 gen_stf_asi(cpu_addr, insn, 4, rd);
5066 break;
5067 case 0x36: /* V9 stqfa */
5068 {
5069 TCGv_i32 r_const;
5070
5071 CHECK_FPU_FEATURE(dc, FLOAT128);
5072 if (gen_trap_ifnofpu(dc)) {
5073 goto jmp_insn;
5074 }
5075 r_const = tcg_const_i32(7);
5076 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5077 tcg_temp_free_i32(r_const);
5078 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5079 }
5080 break;
5081 case 0x37: /* V9 stdfa */
5082 if (gen_trap_ifnofpu(dc)) {
5083 goto jmp_insn;
5084 }
5085 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5086 break;
5087 case 0x3c: /* V9 casa */
5088 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5089 gen_store_gpr(dc, rd, cpu_val);
5090 break;
5091 case 0x3e: /* V9 casxa */
5092 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5093 gen_store_gpr(dc, rd, cpu_val);
5094 break;
5095#else
5096 case 0x34: /* stc */
5097 case 0x35: /* stcsr */
5098 case 0x36: /* stdcq */
5099 case 0x37: /* stdc */
5100 goto ncp_insn;
5101#endif
5102 default:
5103 goto illegal_insn;
5104 }
5105 } else
5106 goto illegal_insn;
5107 }
5108 break;
5109 }
5110 /* default case for non jump instructions */
5111 if (dc->npc == DYNAMIC_PC) {
5112 dc->pc = DYNAMIC_PC;
5113 gen_op_next_insn();
5114 } else if (dc->npc == JUMP_PC) {
5115 /* we can do a static jump */
5116 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5117 dc->is_br = 1;
5118 } else {
5119 dc->pc = dc->npc;
5120 dc->npc = dc->npc + 4;
5121 }
5122 jmp_insn:
5123 goto egress;
5124 illegal_insn:
5125 {
5126 TCGv_i32 r_const;
5127
5128 save_state(dc);
5129 r_const = tcg_const_i32(TT_ILL_INSN);
5130 gen_helper_raise_exception(cpu_env, r_const);
5131 tcg_temp_free_i32(r_const);
5132 dc->is_br = 1;
5133 }
5134 goto egress;
5135 unimp_flush:
5136 {
5137 TCGv_i32 r_const;
5138
5139 save_state(dc);
5140 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5141 gen_helper_raise_exception(cpu_env, r_const);
5142 tcg_temp_free_i32(r_const);
5143 dc->is_br = 1;
5144 }
5145 goto egress;
5146#if !defined(CONFIG_USER_ONLY)
5147 priv_insn:
5148 {
5149 TCGv_i32 r_const;
5150
5151 save_state(dc);
5152 r_const = tcg_const_i32(TT_PRIV_INSN);
5153 gen_helper_raise_exception(cpu_env, r_const);
5154 tcg_temp_free_i32(r_const);
5155 dc->is_br = 1;
5156 }
5157 goto egress;
5158#endif
5159 nfpu_insn:
5160 save_state(dc);
5161 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5162 dc->is_br = 1;
5163 goto egress;
5164#if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5165 nfq_insn:
5166 save_state(dc);
5167 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5168 dc->is_br = 1;
5169 goto egress;
5170#endif
5171#ifndef TARGET_SPARC64
5172 ncp_insn:
5173 {
5174 TCGv r_const;
5175
5176 save_state(dc);
5177 r_const = tcg_const_i32(TT_NCP_INSN);
5178 gen_helper_raise_exception(cpu_env, r_const);
5179 tcg_temp_free(r_const);
5180 dc->is_br = 1;
5181 }
5182 goto egress;
5183#endif
5184 egress:
5185 tcg_temp_free(cpu_tmp1);
5186 tcg_temp_free(cpu_tmp2);
5187 if (dc->n_t32 != 0) {
5188 int i;
5189 for (i = dc->n_t32 - 1; i >= 0; --i) {
5190 tcg_temp_free_i32(dc->t32[i]);
5191 }
5192 dc->n_t32 = 0;
5193 }
5194 if (dc->n_ttl != 0) {
5195 int i;
5196 for (i = dc->n_ttl - 1; i >= 0; --i) {
5197 tcg_temp_free(dc->ttl[i]);
5198 }
5199 dc->n_ttl = 0;
5200 }
5201}
5202
5203static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5204 int spc, CPUSPARCState *env)
5205{
5206 target_ulong pc_start, last_pc;
5207 uint16_t *gen_opc_end;
5208 DisasContext dc1, *dc = &dc1;
5209 CPUBreakpoint *bp;
5210 int j, lj = -1;
5211 int num_insns;
5212 int max_insns;
5213 unsigned int insn;
5214
5215 memset(dc, 0, sizeof(DisasContext));
5216 dc->tb = tb;
5217 pc_start = tb->pc;
5218 dc->pc = pc_start;
5219 last_pc = dc->pc;
5220 dc->npc = (target_ulong) tb->cs_base;
5221 dc->cc_op = CC_OP_DYNAMIC;
5222 dc->mem_idx = cpu_mmu_index(env);
5223 dc->def = env->def;
5224 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5225 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5226 dc->singlestep = (env->singlestep_enabled || singlestep);
5227 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5228
5229 num_insns = 0;
5230 max_insns = tb->cflags & CF_COUNT_MASK;
5231 if (max_insns == 0)
5232 max_insns = CF_COUNT_MASK;
5233 gen_icount_start();
5234 do {
5235 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5236 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5237 if (bp->pc == dc->pc) {
5238 if (dc->pc != pc_start)
5239 save_state(dc);
5240 gen_helper_debug(cpu_env);
5241 tcg_gen_exit_tb(0);
5242 dc->is_br = 1;
5243 goto exit_gen_loop;
5244 }
5245 }
5246 }
5247 if (spc) {
5248 qemu_log("Search PC...\n");
5249 j = gen_opc_ptr - gen_opc_buf;
5250 if (lj < j) {
5251 lj++;
5252 while (lj < j)
5253 gen_opc_instr_start[lj++] = 0;
5254 gen_opc_pc[lj] = dc->pc;
5255 gen_opc_npc[lj] = dc->npc;
5256 gen_opc_instr_start[lj] = 1;
5257 gen_opc_icount[lj] = num_insns;
5258 }
5259 }
5260 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5261 gen_io_start();
5262 last_pc = dc->pc;
5263 insn = cpu_ldl_code(env, dc->pc);
5264
5265 cpu_tmp0 = tcg_temp_new();
5266 cpu_tmp32 = tcg_temp_new_i32();
5267 cpu_tmp64 = tcg_temp_new_i64();
5268 cpu_dst = tcg_temp_new();
5269 cpu_val = tcg_temp_new();
5270 cpu_addr = tcg_temp_new();
5271
5272 disas_sparc_insn(dc, insn);
5273 num_insns++;
5274
5275 tcg_temp_free(cpu_addr);
5276 tcg_temp_free(cpu_val);
5277 tcg_temp_free(cpu_dst);
5278 tcg_temp_free_i64(cpu_tmp64);
5279 tcg_temp_free_i32(cpu_tmp32);
5280 tcg_temp_free(cpu_tmp0);
5281
5282 if (dc->is_br)
5283 break;
5284 /* if the next PC is different, we abort now */
5285 if (dc->pc != (last_pc + 4))
5286 break;
5287 /* if we reach a page boundary, we stop generation so that the
5288 PC of a TT_TFAULT exception is always in the right page */
5289 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5290 break;
5291 /* if single step mode, we generate only one instruction and
5292 generate an exception */
5293 if (dc->singlestep) {
5294 break;
5295 }
5296 } while ((gen_opc_ptr < gen_opc_end) &&
5297 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5298 num_insns < max_insns);
5299
5300 exit_gen_loop:
5301 if (tb->cflags & CF_LAST_IO) {
5302 gen_io_end();
5303 }
5304 if (!dc->is_br) {
5305 if (dc->pc != DYNAMIC_PC &&
5306 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5307 /* static PC and NPC: we can use direct chaining */
5308 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5309 } else {
5310 if (dc->pc != DYNAMIC_PC) {
5311 tcg_gen_movi_tl(cpu_pc, dc->pc);
5312 }
5313 save_npc(dc);
5314 tcg_gen_exit_tb(0);
5315 }
5316 }
5317 gen_icount_end(tb, num_insns);
5318 *gen_opc_ptr = INDEX_op_end;
5319 if (spc) {
5320 j = gen_opc_ptr - gen_opc_buf;
5321 lj++;
5322 while (lj <= j)
5323 gen_opc_instr_start[lj++] = 0;
5324#if 0
5325 log_page_dump();
5326#endif
5327 gen_opc_jump_pc[0] = dc->jump_pc[0];
5328 gen_opc_jump_pc[1] = dc->jump_pc[1];
5329 } else {
5330 tb->size = last_pc + 4 - pc_start;
5331 tb->icount = num_insns;
5332 }
5333#ifdef DEBUG_DISAS
5334 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5335 qemu_log("--------------\n");
5336 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5337 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5338 qemu_log("\n");
5339 }
5340#endif
5341}
5342
5343void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5344{
5345 gen_intermediate_code_internal(tb, 0, env);
5346}
5347
5348void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5349{
5350 gen_intermediate_code_internal(tb, 1, env);
5351}
5352
5353void gen_intermediate_code_init(CPUSPARCState *env)
5354{
5355 unsigned int i;
5356 static int inited;
5357 static const char * const gregnames[8] = {
5358 NULL, // g0 not used
5359 "g1",
5360 "g2",
5361 "g3",
5362 "g4",
5363 "g5",
5364 "g6",
5365 "g7",
5366 };
5367 static const char * const fregnames[32] = {
5368 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5369 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5370 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5371 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5372 };
5373
5374 /* init various static tables */
5375 if (!inited) {
5376 inited = 1;
5377
5378 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5379 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5380 offsetof(CPUSPARCState, regwptr),
5381 "regwptr");
5382#ifdef TARGET_SPARC64
5383 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5384 "xcc");
5385 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5386 "asi");
5387 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5388 "fprs");
5389 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5390 "gsr");
5391 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5392 offsetof(CPUSPARCState, tick_cmpr),
5393 "tick_cmpr");
5394 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5395 offsetof(CPUSPARCState, stick_cmpr),
5396 "stick_cmpr");
5397 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5398 offsetof(CPUSPARCState, hstick_cmpr),
5399 "hstick_cmpr");
5400 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5401 "hintp");
5402 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5403 "htba");
5404 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5405 "hver");
5406 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5407 offsetof(CPUSPARCState, ssr), "ssr");
5408 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5409 offsetof(CPUSPARCState, version), "ver");
5410 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5411 offsetof(CPUSPARCState, softint),
5412 "softint");
5413#else
5414 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5415 "wim");
5416#endif
5417 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5418 "cond");
5419 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5420 "cc_src");
5421 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5422 offsetof(CPUSPARCState, cc_src2),
5423 "cc_src2");
5424 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5425 "cc_dst");
5426 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5427 "cc_op");
5428 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5429 "psr");
5430 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5431 "fsr");
5432 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5433 "pc");
5434 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5435 "npc");
5436 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5437#ifndef CONFIG_USER_ONLY
5438 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5439 "tbr");
5440#endif
5441 for (i = 1; i < 8; i++) {
5442 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5443 offsetof(CPUSPARCState, gregs[i]),
5444 gregnames[i]);
5445 }
5446 for (i = 0; i < TARGET_DPREGS; i++) {
5447 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5448 offsetof(CPUSPARCState, fpr[i]),
5449 fregnames[i]);
5450 }
5451
5452 /* register helpers */
5453
5454#define GEN_HELPER 2
5455#include "helper.h"
5456 }
5457}
5458
5459void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5460{
5461 target_ulong npc;
5462 env->pc = gen_opc_pc[pc_pos];
5463 npc = gen_opc_npc[pc_pos];
5464 if (npc == 1) {
5465 /* dynamic NPC: already stored */
5466 } else if (npc == 2) {
5467 /* jump PC: use 'cond' and the jump targets of the translation */
5468 if (env->cond) {
5469 env->npc = gen_opc_jump_pc[0];
5470 } else {
5471 env->npc = gen_opc_jump_pc[1];
5472 }
5473 } else {
5474 env->npc = npc;
5475 }
5476}