]> git.proxmox.com Git - mirror_qemu.git/blob - target-sparc/translate.c
Merge remote-tracking branch 'qmp/queue/qmp' into staging
[mirror_qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
70
71 #include "gen-icount.h"
72
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 TCGv_i32 t32[3];
86 int n_t32;
87 } DisasContext;
88
89 // This function uses non-native bit order
90 #define GET_FIELD(X, FROM, TO) \
91 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92
93 // This function uses the order in the manuals, i.e. bit 0 is 2^0
94 #define GET_FIELD_SP(X, FROM, TO) \
95 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96
97 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99
100 #ifdef TARGET_SPARC64
101 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #else
104 #define DFPREG(r) (r & 0x1e)
105 #define QFPREG(r) (r & 0x1c)
106 #endif
107
108 #define UA2005_HTRAP_MASK 0xff
109 #define V8_TRAP_MASK 0x7f
110
111 static int sign_extend(int x, int len)
112 {
113 len = 32 - len;
114 return (x << len) >> len;
115 }
116
117 #define IS_IMM (insn & (1<<13))
118
119 static inline void gen_update_fprs_dirty(int rd)
120 {
121 #if defined(TARGET_SPARC64)
122 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
123 #endif
124 }
125
126 /* floating point registers moves */
127 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
128 {
129 #if TCG_TARGET_REG_BITS == 32
130 if (src & 1) {
131 return TCGV_LOW(cpu_fpr[src / 2]);
132 } else {
133 return TCGV_HIGH(cpu_fpr[src / 2]);
134 }
135 #else
136 if (src & 1) {
137 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
138 } else {
139 TCGv_i32 ret = tcg_temp_local_new_i32();
140 TCGv_i64 t = tcg_temp_new_i64();
141
142 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
143 tcg_gen_trunc_i64_i32(ret, t);
144 tcg_temp_free_i64(t);
145
146 dc->t32[dc->n_t32++] = ret;
147 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
148
149 return ret;
150 }
151 #endif
152 }
153
154 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
155 {
156 #if TCG_TARGET_REG_BITS == 32
157 if (dst & 1) {
158 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
159 } else {
160 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
161 }
162 #else
163 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
164 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
165 (dst & 1 ? 0 : 32), 32);
166 #endif
167 gen_update_fprs_dirty(dst);
168 }
169
170 static TCGv_i32 gen_dest_fpr_F(void)
171 {
172 return cpu_tmp32;
173 }
174
175 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
176 {
177 src = DFPREG(src);
178 return cpu_fpr[src / 2];
179 }
180
181 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
182 {
183 dst = DFPREG(dst);
184 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
185 gen_update_fprs_dirty(dst);
186 }
187
188 static TCGv_i64 gen_dest_fpr_D(void)
189 {
190 return cpu_tmp64;
191 }
192
193 static void gen_op_load_fpr_QT0(unsigned int src)
194 {
195 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
196 offsetof(CPU_QuadU, ll.upper));
197 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
198 offsetof(CPU_QuadU, ll.lower));
199 }
200
201 static void gen_op_load_fpr_QT1(unsigned int src)
202 {
203 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
204 offsetof(CPU_QuadU, ll.upper));
205 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
206 offsetof(CPU_QuadU, ll.lower));
207 }
208
209 static void gen_op_store_QT0_fpr(unsigned int dst)
210 {
211 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
212 offsetof(CPU_QuadU, ll.upper));
213 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
214 offsetof(CPU_QuadU, ll.lower));
215 }
216
217 #ifdef TARGET_SPARC64
218 static void gen_move_Q(unsigned int rd, unsigned int rs)
219 {
220 rd = QFPREG(rd);
221 rs = QFPREG(rs);
222
223 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
224 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
225 gen_update_fprs_dirty(rd);
226 }
227 #endif
228
229 /* moves */
230 #ifdef CONFIG_USER_ONLY
231 #define supervisor(dc) 0
232 #ifdef TARGET_SPARC64
233 #define hypervisor(dc) 0
234 #endif
235 #else
236 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237 #ifdef TARGET_SPARC64
238 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
239 #else
240 #endif
241 #endif
242
243 #ifdef TARGET_SPARC64
244 #ifndef TARGET_ABI32
245 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
246 #else
247 #define AM_CHECK(dc) (1)
248 #endif
249 #endif
250
251 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
252 {
253 #ifdef TARGET_SPARC64
254 if (AM_CHECK(dc))
255 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
256 #endif
257 }
258
259 static inline void gen_movl_reg_TN(int reg, TCGv tn)
260 {
261 if (reg == 0)
262 tcg_gen_movi_tl(tn, 0);
263 else if (reg < 8)
264 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
265 else {
266 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
267 }
268 }
269
270 static inline void gen_movl_TN_reg(int reg, TCGv tn)
271 {
272 if (reg == 0)
273 return;
274 else if (reg < 8)
275 tcg_gen_mov_tl(cpu_gregs[reg], tn);
276 else {
277 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
278 }
279 }
280
281 static inline void gen_goto_tb(DisasContext *s, int tb_num,
282 target_ulong pc, target_ulong npc)
283 {
284 TranslationBlock *tb;
285
286 tb = s->tb;
287 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
288 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
289 !s->singlestep) {
290 /* jump to same page: we can use a direct jump */
291 tcg_gen_goto_tb(tb_num);
292 tcg_gen_movi_tl(cpu_pc, pc);
293 tcg_gen_movi_tl(cpu_npc, npc);
294 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
295 } else {
296 /* jump to another page: currently not optimized */
297 tcg_gen_movi_tl(cpu_pc, pc);
298 tcg_gen_movi_tl(cpu_npc, npc);
299 tcg_gen_exit_tb(0);
300 }
301 }
302
303 // XXX suboptimal
304 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
305 {
306 tcg_gen_extu_i32_tl(reg, src);
307 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
308 tcg_gen_andi_tl(reg, reg, 0x1);
309 }
310
311 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
312 {
313 tcg_gen_extu_i32_tl(reg, src);
314 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
315 tcg_gen_andi_tl(reg, reg, 0x1);
316 }
317
318 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
319 {
320 tcg_gen_extu_i32_tl(reg, src);
321 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
322 tcg_gen_andi_tl(reg, reg, 0x1);
323 }
324
325 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
326 {
327 tcg_gen_extu_i32_tl(reg, src);
328 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
329 tcg_gen_andi_tl(reg, reg, 0x1);
330 }
331
332 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
333 {
334 TCGv r_temp;
335 TCGv_i32 r_const;
336 int l1;
337
338 l1 = gen_new_label();
339
340 r_temp = tcg_temp_new();
341 tcg_gen_xor_tl(r_temp, src1, src2);
342 tcg_gen_not_tl(r_temp, r_temp);
343 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
344 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
345 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
346 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
347 r_const = tcg_const_i32(TT_TOVF);
348 gen_helper_raise_exception(cpu_env, r_const);
349 tcg_temp_free_i32(r_const);
350 gen_set_label(l1);
351 tcg_temp_free(r_temp);
352 }
353
354 static inline void gen_tag_tv(TCGv src1, TCGv src2)
355 {
356 int l1;
357 TCGv_i32 r_const;
358
359 l1 = gen_new_label();
360 tcg_gen_or_tl(cpu_tmp0, src1, src2);
361 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
362 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
363 r_const = tcg_const_i32(TT_TOVF);
364 gen_helper_raise_exception(cpu_env, r_const);
365 tcg_temp_free_i32(r_const);
366 gen_set_label(l1);
367 }
368
369 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
370 {
371 tcg_gen_mov_tl(cpu_cc_src, src1);
372 tcg_gen_movi_tl(cpu_cc_src2, src2);
373 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
374 tcg_gen_mov_tl(dst, cpu_cc_dst);
375 }
376
377 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
378 {
379 tcg_gen_mov_tl(cpu_cc_src, src1);
380 tcg_gen_mov_tl(cpu_cc_src2, src2);
381 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
382 tcg_gen_mov_tl(dst, cpu_cc_dst);
383 }
384
385 static TCGv_i32 gen_add32_carry32(void)
386 {
387 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
388
389 /* Carry is computed from a previous add: (dst < src) */
390 #if TARGET_LONG_BITS == 64
391 cc_src1_32 = tcg_temp_new_i32();
392 cc_src2_32 = tcg_temp_new_i32();
393 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
394 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
395 #else
396 cc_src1_32 = cpu_cc_dst;
397 cc_src2_32 = cpu_cc_src;
398 #endif
399
400 carry_32 = tcg_temp_new_i32();
401 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
402
403 #if TARGET_LONG_BITS == 64
404 tcg_temp_free_i32(cc_src1_32);
405 tcg_temp_free_i32(cc_src2_32);
406 #endif
407
408 return carry_32;
409 }
410
411 static TCGv_i32 gen_sub32_carry32(void)
412 {
413 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
414
415 /* Carry is computed from a previous borrow: (src1 < src2) */
416 #if TARGET_LONG_BITS == 64
417 cc_src1_32 = tcg_temp_new_i32();
418 cc_src2_32 = tcg_temp_new_i32();
419 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
420 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
421 #else
422 cc_src1_32 = cpu_cc_src;
423 cc_src2_32 = cpu_cc_src2;
424 #endif
425
426 carry_32 = tcg_temp_new_i32();
427 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
428
429 #if TARGET_LONG_BITS == 64
430 tcg_temp_free_i32(cc_src1_32);
431 tcg_temp_free_i32(cc_src2_32);
432 #endif
433
434 return carry_32;
435 }
436
437 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
438 TCGv src2, int update_cc)
439 {
440 TCGv_i32 carry_32;
441 TCGv carry;
442
443 switch (dc->cc_op) {
444 case CC_OP_DIV:
445 case CC_OP_LOGIC:
446 /* Carry is known to be zero. Fall back to plain ADD. */
447 if (update_cc) {
448 gen_op_add_cc(dst, src1, src2);
449 } else {
450 tcg_gen_add_tl(dst, src1, src2);
451 }
452 return;
453
454 case CC_OP_ADD:
455 case CC_OP_TADD:
456 case CC_OP_TADDTV:
457 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
458 {
459 /* For 32-bit hosts, we can re-use the host's hardware carry
460 generation by using an ADD2 opcode. We discard the low
461 part of the output. Ideally we'd combine this operation
462 with the add that generated the carry in the first place. */
463 TCGv dst_low = tcg_temp_new();
464 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
465 cpu_cc_src, src1, cpu_cc_src2, src2);
466 tcg_temp_free(dst_low);
467 goto add_done;
468 }
469 #endif
470 carry_32 = gen_add32_carry32();
471 break;
472
473 case CC_OP_SUB:
474 case CC_OP_TSUB:
475 case CC_OP_TSUBTV:
476 carry_32 = gen_sub32_carry32();
477 break;
478
479 default:
480 /* We need external help to produce the carry. */
481 carry_32 = tcg_temp_new_i32();
482 gen_helper_compute_C_icc(carry_32, cpu_env);
483 break;
484 }
485
486 #if TARGET_LONG_BITS == 64
487 carry = tcg_temp_new();
488 tcg_gen_extu_i32_i64(carry, carry_32);
489 #else
490 carry = carry_32;
491 #endif
492
493 tcg_gen_add_tl(dst, src1, src2);
494 tcg_gen_add_tl(dst, dst, carry);
495
496 tcg_temp_free_i32(carry_32);
497 #if TARGET_LONG_BITS == 64
498 tcg_temp_free(carry);
499 #endif
500
501 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
502 add_done:
503 #endif
504 if (update_cc) {
505 tcg_gen_mov_tl(cpu_cc_src, src1);
506 tcg_gen_mov_tl(cpu_cc_src2, src2);
507 tcg_gen_mov_tl(cpu_cc_dst, dst);
508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
509 dc->cc_op = CC_OP_ADDX;
510 }
511 }
512
513 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
514 {
515 tcg_gen_mov_tl(cpu_cc_src, src1);
516 tcg_gen_mov_tl(cpu_cc_src2, src2);
517 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518 tcg_gen_mov_tl(dst, cpu_cc_dst);
519 }
520
521 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
522 {
523 tcg_gen_mov_tl(cpu_cc_src, src1);
524 tcg_gen_mov_tl(cpu_cc_src2, src2);
525 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
526 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
527 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528 tcg_gen_mov_tl(dst, cpu_cc_dst);
529 }
530
531 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
532 {
533 TCGv r_temp;
534 TCGv_i32 r_const;
535 int l1;
536
537 l1 = gen_new_label();
538
539 r_temp = tcg_temp_new();
540 tcg_gen_xor_tl(r_temp, src1, src2);
541 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
542 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
543 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
544 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
545 r_const = tcg_const_i32(TT_TOVF);
546 gen_helper_raise_exception(cpu_env, r_const);
547 tcg_temp_free_i32(r_const);
548 gen_set_label(l1);
549 tcg_temp_free(r_temp);
550 }
551
552 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
553 {
554 tcg_gen_mov_tl(cpu_cc_src, src1);
555 tcg_gen_movi_tl(cpu_cc_src2, src2);
556 if (src2 == 0) {
557 tcg_gen_mov_tl(cpu_cc_dst, src1);
558 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
559 dc->cc_op = CC_OP_LOGIC;
560 } else {
561 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
562 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
563 dc->cc_op = CC_OP_SUB;
564 }
565 tcg_gen_mov_tl(dst, cpu_cc_dst);
566 }
567
568 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
569 {
570 tcg_gen_mov_tl(cpu_cc_src, src1);
571 tcg_gen_mov_tl(cpu_cc_src2, src2);
572 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
573 tcg_gen_mov_tl(dst, cpu_cc_dst);
574 }
575
576 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
577 TCGv src2, int update_cc)
578 {
579 TCGv_i32 carry_32;
580 TCGv carry;
581
582 switch (dc->cc_op) {
583 case CC_OP_DIV:
584 case CC_OP_LOGIC:
585 /* Carry is known to be zero. Fall back to plain SUB. */
586 if (update_cc) {
587 gen_op_sub_cc(dst, src1, src2);
588 } else {
589 tcg_gen_sub_tl(dst, src1, src2);
590 }
591 return;
592
593 case CC_OP_ADD:
594 case CC_OP_TADD:
595 case CC_OP_TADDTV:
596 carry_32 = gen_add32_carry32();
597 break;
598
599 case CC_OP_SUB:
600 case CC_OP_TSUB:
601 case CC_OP_TSUBTV:
602 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
603 {
604 /* For 32-bit hosts, we can re-use the host's hardware carry
605 generation by using a SUB2 opcode. We discard the low
606 part of the output. Ideally we'd combine this operation
607 with the add that generated the carry in the first place. */
608 TCGv dst_low = tcg_temp_new();
609 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
610 cpu_cc_src, src1, cpu_cc_src2, src2);
611 tcg_temp_free(dst_low);
612 goto sub_done;
613 }
614 #endif
615 carry_32 = gen_sub32_carry32();
616 break;
617
618 default:
619 /* We need external help to produce the carry. */
620 carry_32 = tcg_temp_new_i32();
621 gen_helper_compute_C_icc(carry_32, cpu_env);
622 break;
623 }
624
625 #if TARGET_LONG_BITS == 64
626 carry = tcg_temp_new();
627 tcg_gen_extu_i32_i64(carry, carry_32);
628 #else
629 carry = carry_32;
630 #endif
631
632 tcg_gen_sub_tl(dst, src1, src2);
633 tcg_gen_sub_tl(dst, dst, carry);
634
635 tcg_temp_free_i32(carry_32);
636 #if TARGET_LONG_BITS == 64
637 tcg_temp_free(carry);
638 #endif
639
640 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
641 sub_done:
642 #endif
643 if (update_cc) {
644 tcg_gen_mov_tl(cpu_cc_src, src1);
645 tcg_gen_mov_tl(cpu_cc_src2, src2);
646 tcg_gen_mov_tl(cpu_cc_dst, dst);
647 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
648 dc->cc_op = CC_OP_SUBX;
649 }
650 }
651
652 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
653 {
654 tcg_gen_mov_tl(cpu_cc_src, src1);
655 tcg_gen_mov_tl(cpu_cc_src2, src2);
656 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 tcg_gen_mov_tl(dst, cpu_cc_dst);
658 }
659
660 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
661 {
662 tcg_gen_mov_tl(cpu_cc_src, src1);
663 tcg_gen_mov_tl(cpu_cc_src2, src2);
664 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
665 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667 tcg_gen_mov_tl(dst, cpu_cc_dst);
668 }
669
670 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
671 {
672 TCGv r_temp;
673 int l1;
674
675 l1 = gen_new_label();
676 r_temp = tcg_temp_new();
677
678 /* old op:
679 if (!(env->y & 1))
680 T1 = 0;
681 */
682 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
683 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
684 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
685 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
686 tcg_gen_movi_tl(cpu_cc_src2, 0);
687 gen_set_label(l1);
688
689 // b2 = T0 & 1;
690 // env->y = (b2 << 31) | (env->y >> 1);
691 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
692 tcg_gen_shli_tl(r_temp, r_temp, 31);
693 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
694 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
695 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
696 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
697
698 // b1 = N ^ V;
699 gen_mov_reg_N(cpu_tmp0, cpu_psr);
700 gen_mov_reg_V(r_temp, cpu_psr);
701 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
702 tcg_temp_free(r_temp);
703
704 // T0 = (b1 << 31) | (T0 >> 1);
705 // src1 = T0;
706 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
707 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
708 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
709
710 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
711
712 tcg_gen_mov_tl(dst, cpu_cc_dst);
713 }
714
715 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
716 {
717 TCGv_i32 r_src1, r_src2;
718 TCGv_i64 r_temp, r_temp2;
719
720 r_src1 = tcg_temp_new_i32();
721 r_src2 = tcg_temp_new_i32();
722
723 tcg_gen_trunc_tl_i32(r_src1, src1);
724 tcg_gen_trunc_tl_i32(r_src2, src2);
725
726 r_temp = tcg_temp_new_i64();
727 r_temp2 = tcg_temp_new_i64();
728
729 if (sign_ext) {
730 tcg_gen_ext_i32_i64(r_temp, r_src2);
731 tcg_gen_ext_i32_i64(r_temp2, r_src1);
732 } else {
733 tcg_gen_extu_i32_i64(r_temp, r_src2);
734 tcg_gen_extu_i32_i64(r_temp2, r_src1);
735 }
736
737 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
738
739 tcg_gen_shri_i64(r_temp, r_temp2, 32);
740 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
741 tcg_temp_free_i64(r_temp);
742 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
743
744 tcg_gen_trunc_i64_tl(dst, r_temp2);
745
746 tcg_temp_free_i64(r_temp2);
747
748 tcg_temp_free_i32(r_src1);
749 tcg_temp_free_i32(r_src2);
750 }
751
752 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
753 {
754 /* zero-extend truncated operands before multiplication */
755 gen_op_multiply(dst, src1, src2, 0);
756 }
757
758 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
759 {
760 /* sign-extend truncated operands before multiplication */
761 gen_op_multiply(dst, src1, src2, 1);
762 }
763
764 #ifdef TARGET_SPARC64
765 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
766 {
767 TCGv_i32 r_const;
768 int l1;
769
770 l1 = gen_new_label();
771 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
772 r_const = tcg_const_i32(TT_DIV_ZERO);
773 gen_helper_raise_exception(cpu_env, r_const);
774 tcg_temp_free_i32(r_const);
775 gen_set_label(l1);
776 }
777
778 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
779 {
780 int l1, l2;
781 TCGv r_temp1, r_temp2;
782
783 l1 = gen_new_label();
784 l2 = gen_new_label();
785 r_temp1 = tcg_temp_local_new();
786 r_temp2 = tcg_temp_local_new();
787 tcg_gen_mov_tl(r_temp1, src1);
788 tcg_gen_mov_tl(r_temp2, src2);
789 gen_trap_ifdivzero_tl(r_temp2);
790 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
791 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
792 tcg_gen_movi_i64(dst, INT64_MIN);
793 tcg_gen_br(l2);
794 gen_set_label(l1);
795 tcg_gen_div_i64(dst, r_temp1, r_temp2);
796 gen_set_label(l2);
797 tcg_temp_free(r_temp1);
798 tcg_temp_free(r_temp2);
799 }
800 #endif
801
802 // 1
803 static inline void gen_op_eval_ba(TCGv dst)
804 {
805 tcg_gen_movi_tl(dst, 1);
806 }
807
808 // Z
809 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
810 {
811 gen_mov_reg_Z(dst, src);
812 }
813
814 // Z | (N ^ V)
815 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
816 {
817 gen_mov_reg_N(cpu_tmp0, src);
818 gen_mov_reg_V(dst, src);
819 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
820 gen_mov_reg_Z(cpu_tmp0, src);
821 tcg_gen_or_tl(dst, dst, cpu_tmp0);
822 }
823
824 // N ^ V
825 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
826 {
827 gen_mov_reg_V(cpu_tmp0, src);
828 gen_mov_reg_N(dst, src);
829 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
830 }
831
832 // C | Z
833 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
834 {
835 gen_mov_reg_Z(cpu_tmp0, src);
836 gen_mov_reg_C(dst, src);
837 tcg_gen_or_tl(dst, dst, cpu_tmp0);
838 }
839
840 // C
841 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
842 {
843 gen_mov_reg_C(dst, src);
844 }
845
846 // V
847 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
848 {
849 gen_mov_reg_V(dst, src);
850 }
851
852 // 0
853 static inline void gen_op_eval_bn(TCGv dst)
854 {
855 tcg_gen_movi_tl(dst, 0);
856 }
857
858 // N
859 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
860 {
861 gen_mov_reg_N(dst, src);
862 }
863
864 // !Z
865 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
866 {
867 gen_mov_reg_Z(dst, src);
868 tcg_gen_xori_tl(dst, dst, 0x1);
869 }
870
871 // !(Z | (N ^ V))
872 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
873 {
874 gen_mov_reg_N(cpu_tmp0, src);
875 gen_mov_reg_V(dst, src);
876 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877 gen_mov_reg_Z(cpu_tmp0, src);
878 tcg_gen_or_tl(dst, dst, cpu_tmp0);
879 tcg_gen_xori_tl(dst, dst, 0x1);
880 }
881
882 // !(N ^ V)
883 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
884 {
885 gen_mov_reg_V(cpu_tmp0, src);
886 gen_mov_reg_N(dst, src);
887 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
888 tcg_gen_xori_tl(dst, dst, 0x1);
889 }
890
891 // !(C | Z)
892 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
893 {
894 gen_mov_reg_Z(cpu_tmp0, src);
895 gen_mov_reg_C(dst, src);
896 tcg_gen_or_tl(dst, dst, cpu_tmp0);
897 tcg_gen_xori_tl(dst, dst, 0x1);
898 }
899
900 // !C
901 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
902 {
903 gen_mov_reg_C(dst, src);
904 tcg_gen_xori_tl(dst, dst, 0x1);
905 }
906
907 // !N
908 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
909 {
910 gen_mov_reg_N(dst, src);
911 tcg_gen_xori_tl(dst, dst, 0x1);
912 }
913
914 // !V
915 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
916 {
917 gen_mov_reg_V(dst, src);
918 tcg_gen_xori_tl(dst, dst, 0x1);
919 }
920
921 /*
922 FPSR bit field FCC1 | FCC0:
923 0 =
924 1 <
925 2 >
926 3 unordered
927 */
928 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
929 unsigned int fcc_offset)
930 {
931 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
932 tcg_gen_andi_tl(reg, reg, 0x1);
933 }
934
935 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
936 unsigned int fcc_offset)
937 {
938 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
939 tcg_gen_andi_tl(reg, reg, 0x1);
940 }
941
942 // !0: FCC0 | FCC1
943 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
944 unsigned int fcc_offset)
945 {
946 gen_mov_reg_FCC0(dst, src, fcc_offset);
947 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
948 tcg_gen_or_tl(dst, dst, cpu_tmp0);
949 }
950
951 // 1 or 2: FCC0 ^ FCC1
952 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
953 unsigned int fcc_offset)
954 {
955 gen_mov_reg_FCC0(dst, src, fcc_offset);
956 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
957 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
958 }
959
960 // 1 or 3: FCC0
961 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
962 unsigned int fcc_offset)
963 {
964 gen_mov_reg_FCC0(dst, src, fcc_offset);
965 }
966
967 // 1: FCC0 & !FCC1
968 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
969 unsigned int fcc_offset)
970 {
971 gen_mov_reg_FCC0(dst, src, fcc_offset);
972 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
973 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
974 tcg_gen_and_tl(dst, dst, cpu_tmp0);
975 }
976
977 // 2 or 3: FCC1
978 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
979 unsigned int fcc_offset)
980 {
981 gen_mov_reg_FCC1(dst, src, fcc_offset);
982 }
983
984 // 2: !FCC0 & FCC1
985 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
986 unsigned int fcc_offset)
987 {
988 gen_mov_reg_FCC0(dst, src, fcc_offset);
989 tcg_gen_xori_tl(dst, dst, 0x1);
990 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
991 tcg_gen_and_tl(dst, dst, cpu_tmp0);
992 }
993
994 // 3: FCC0 & FCC1
995 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
996 unsigned int fcc_offset)
997 {
998 gen_mov_reg_FCC0(dst, src, fcc_offset);
999 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1000 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1001 }
1002
1003 // 0: !(FCC0 | FCC1)
1004 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1005 unsigned int fcc_offset)
1006 {
1007 gen_mov_reg_FCC0(dst, src, fcc_offset);
1008 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1010 tcg_gen_xori_tl(dst, dst, 0x1);
1011 }
1012
1013 // 0 or 3: !(FCC0 ^ FCC1)
1014 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1016 {
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1018 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1019 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1020 tcg_gen_xori_tl(dst, dst, 0x1);
1021 }
1022
1023 // 0 or 2: !FCC0
1024 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1025 unsigned int fcc_offset)
1026 {
1027 gen_mov_reg_FCC0(dst, src, fcc_offset);
1028 tcg_gen_xori_tl(dst, dst, 0x1);
1029 }
1030
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1033 unsigned int fcc_offset)
1034 {
1035 gen_mov_reg_FCC0(dst, src, fcc_offset);
1036 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1037 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1038 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039 tcg_gen_xori_tl(dst, dst, 0x1);
1040 }
1041
1042 // 0 or 1: !FCC1
1043 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1044 unsigned int fcc_offset)
1045 {
1046 gen_mov_reg_FCC1(dst, src, fcc_offset);
1047 tcg_gen_xori_tl(dst, dst, 0x1);
1048 }
1049
1050 // !2: !(!FCC0 & FCC1)
1051 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1052 unsigned int fcc_offset)
1053 {
1054 gen_mov_reg_FCC0(dst, src, fcc_offset);
1055 tcg_gen_xori_tl(dst, dst, 0x1);
1056 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1058 tcg_gen_xori_tl(dst, dst, 0x1);
1059 }
1060
1061 // !3: !(FCC0 & FCC1)
1062 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1063 unsigned int fcc_offset)
1064 {
1065 gen_mov_reg_FCC0(dst, src, fcc_offset);
1066 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1069 }
1070
1071 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1072 target_ulong pc2, TCGv r_cond)
1073 {
1074 int l1;
1075
1076 l1 = gen_new_label();
1077
1078 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1079
1080 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1081
1082 gen_set_label(l1);
1083 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1084 }
1085
1086 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1087 target_ulong pc2, TCGv r_cond)
1088 {
1089 int l1;
1090
1091 l1 = gen_new_label();
1092
1093 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1094
1095 gen_goto_tb(dc, 0, pc2, pc1);
1096
1097 gen_set_label(l1);
1098 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1099 }
1100
1101 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1102 TCGv r_cond)
1103 {
1104 int l1, l2;
1105
1106 l1 = gen_new_label();
1107 l2 = gen_new_label();
1108
1109 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1110
1111 tcg_gen_movi_tl(cpu_npc, npc1);
1112 tcg_gen_br(l2);
1113
1114 gen_set_label(l1);
1115 tcg_gen_movi_tl(cpu_npc, npc2);
1116 gen_set_label(l2);
1117 }
1118
1119 /* call this function before using the condition register as it may
1120 have been set for a jump */
1121 static inline void flush_cond(DisasContext *dc, TCGv cond)
1122 {
1123 if (dc->npc == JUMP_PC) {
1124 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1125 dc->npc = DYNAMIC_PC;
1126 }
1127 }
1128
1129 static inline void save_npc(DisasContext *dc, TCGv cond)
1130 {
1131 if (dc->npc == JUMP_PC) {
1132 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1133 dc->npc = DYNAMIC_PC;
1134 } else if (dc->npc != DYNAMIC_PC) {
1135 tcg_gen_movi_tl(cpu_npc, dc->npc);
1136 }
1137 }
1138
1139 static inline void save_state(DisasContext *dc, TCGv cond)
1140 {
1141 tcg_gen_movi_tl(cpu_pc, dc->pc);
1142 /* flush pending conditional evaluations before exposing cpu state */
1143 if (dc->cc_op != CC_OP_FLAGS) {
1144 dc->cc_op = CC_OP_FLAGS;
1145 gen_helper_compute_psr(cpu_env);
1146 }
1147 save_npc(dc, cond);
1148 }
1149
1150 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1151 {
1152 if (dc->npc == JUMP_PC) {
1153 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1154 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1155 dc->pc = DYNAMIC_PC;
1156 } else if (dc->npc == DYNAMIC_PC) {
1157 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1158 dc->pc = DYNAMIC_PC;
1159 } else {
1160 dc->pc = dc->npc;
1161 }
1162 }
1163
1164 static inline void gen_op_next_insn(void)
1165 {
1166 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1167 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1168 }
1169
1170 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1171 DisasContext *dc)
1172 {
1173 TCGv_i32 r_src;
1174
1175 #ifdef TARGET_SPARC64
1176 if (cc)
1177 r_src = cpu_xcc;
1178 else
1179 r_src = cpu_psr;
1180 #else
1181 r_src = cpu_psr;
1182 #endif
1183 switch (dc->cc_op) {
1184 case CC_OP_FLAGS:
1185 break;
1186 default:
1187 gen_helper_compute_psr(cpu_env);
1188 dc->cc_op = CC_OP_FLAGS;
1189 break;
1190 }
1191 switch (cond) {
1192 case 0x0:
1193 gen_op_eval_bn(r_dst);
1194 break;
1195 case 0x1:
1196 gen_op_eval_be(r_dst, r_src);
1197 break;
1198 case 0x2:
1199 gen_op_eval_ble(r_dst, r_src);
1200 break;
1201 case 0x3:
1202 gen_op_eval_bl(r_dst, r_src);
1203 break;
1204 case 0x4:
1205 gen_op_eval_bleu(r_dst, r_src);
1206 break;
1207 case 0x5:
1208 gen_op_eval_bcs(r_dst, r_src);
1209 break;
1210 case 0x6:
1211 gen_op_eval_bneg(r_dst, r_src);
1212 break;
1213 case 0x7:
1214 gen_op_eval_bvs(r_dst, r_src);
1215 break;
1216 case 0x8:
1217 gen_op_eval_ba(r_dst);
1218 break;
1219 case 0x9:
1220 gen_op_eval_bne(r_dst, r_src);
1221 break;
1222 case 0xa:
1223 gen_op_eval_bg(r_dst, r_src);
1224 break;
1225 case 0xb:
1226 gen_op_eval_bge(r_dst, r_src);
1227 break;
1228 case 0xc:
1229 gen_op_eval_bgu(r_dst, r_src);
1230 break;
1231 case 0xd:
1232 gen_op_eval_bcc(r_dst, r_src);
1233 break;
1234 case 0xe:
1235 gen_op_eval_bpos(r_dst, r_src);
1236 break;
1237 case 0xf:
1238 gen_op_eval_bvc(r_dst, r_src);
1239 break;
1240 }
1241 }
1242
1243 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1244 {
1245 unsigned int offset;
1246
1247 switch (cc) {
1248 default:
1249 case 0x0:
1250 offset = 0;
1251 break;
1252 case 0x1:
1253 offset = 32 - 10;
1254 break;
1255 case 0x2:
1256 offset = 34 - 10;
1257 break;
1258 case 0x3:
1259 offset = 36 - 10;
1260 break;
1261 }
1262
1263 switch (cond) {
1264 case 0x0:
1265 gen_op_eval_bn(r_dst);
1266 break;
1267 case 0x1:
1268 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1269 break;
1270 case 0x2:
1271 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1272 break;
1273 case 0x3:
1274 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1275 break;
1276 case 0x4:
1277 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1278 break;
1279 case 0x5:
1280 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1281 break;
1282 case 0x6:
1283 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1284 break;
1285 case 0x7:
1286 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1287 break;
1288 case 0x8:
1289 gen_op_eval_ba(r_dst);
1290 break;
1291 case 0x9:
1292 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1293 break;
1294 case 0xa:
1295 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1296 break;
1297 case 0xb:
1298 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1299 break;
1300 case 0xc:
1301 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1302 break;
1303 case 0xd:
1304 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1305 break;
1306 case 0xe:
1307 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0xf:
1310 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1311 break;
1312 }
1313 }
1314
1315 #ifdef TARGET_SPARC64
1316 // Inverted logic
1317 static const int gen_tcg_cond_reg[8] = {
1318 -1,
1319 TCG_COND_NE,
1320 TCG_COND_GT,
1321 TCG_COND_GE,
1322 -1,
1323 TCG_COND_EQ,
1324 TCG_COND_LE,
1325 TCG_COND_LT,
1326 };
1327
1328 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1329 {
1330 int l1;
1331
1332 l1 = gen_new_label();
1333 tcg_gen_movi_tl(r_dst, 0);
1334 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1335 tcg_gen_movi_tl(r_dst, 1);
1336 gen_set_label(l1);
1337 }
1338 #endif
1339
1340 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1341 TCGv r_cond)
1342 {
1343 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1344 target_ulong target = dc->pc + offset;
1345
1346 #ifdef TARGET_SPARC64
1347 if (unlikely(AM_CHECK(dc))) {
1348 target &= 0xffffffffULL;
1349 }
1350 #endif
1351 if (cond == 0x0) {
1352 /* unconditional not taken */
1353 if (a) {
1354 dc->pc = dc->npc + 4;
1355 dc->npc = dc->pc + 4;
1356 } else {
1357 dc->pc = dc->npc;
1358 dc->npc = dc->pc + 4;
1359 }
1360 } else if (cond == 0x8) {
1361 /* unconditional taken */
1362 if (a) {
1363 dc->pc = target;
1364 dc->npc = dc->pc + 4;
1365 } else {
1366 dc->pc = dc->npc;
1367 dc->npc = target;
1368 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1369 }
1370 } else {
1371 flush_cond(dc, r_cond);
1372 gen_cond(r_cond, cc, cond, dc);
1373 if (a) {
1374 gen_branch_a(dc, target, dc->npc, r_cond);
1375 dc->is_br = 1;
1376 } else {
1377 dc->pc = dc->npc;
1378 dc->jump_pc[0] = target;
1379 if (unlikely(dc->npc == DYNAMIC_PC)) {
1380 dc->jump_pc[1] = DYNAMIC_PC;
1381 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1382 } else {
1383 dc->jump_pc[1] = dc->npc + 4;
1384 dc->npc = JUMP_PC;
1385 }
1386 }
1387 }
1388 }
1389
1390 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1391 TCGv r_cond)
1392 {
1393 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1394 target_ulong target = dc->pc + offset;
1395
1396 #ifdef TARGET_SPARC64
1397 if (unlikely(AM_CHECK(dc))) {
1398 target &= 0xffffffffULL;
1399 }
1400 #endif
1401 if (cond == 0x0) {
1402 /* unconditional not taken */
1403 if (a) {
1404 dc->pc = dc->npc + 4;
1405 dc->npc = dc->pc + 4;
1406 } else {
1407 dc->pc = dc->npc;
1408 dc->npc = dc->pc + 4;
1409 }
1410 } else if (cond == 0x8) {
1411 /* unconditional taken */
1412 if (a) {
1413 dc->pc = target;
1414 dc->npc = dc->pc + 4;
1415 } else {
1416 dc->pc = dc->npc;
1417 dc->npc = target;
1418 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1419 }
1420 } else {
1421 flush_cond(dc, r_cond);
1422 gen_fcond(r_cond, cc, cond);
1423 if (a) {
1424 gen_branch_a(dc, target, dc->npc, r_cond);
1425 dc->is_br = 1;
1426 } else {
1427 dc->pc = dc->npc;
1428 dc->jump_pc[0] = target;
1429 if (unlikely(dc->npc == DYNAMIC_PC)) {
1430 dc->jump_pc[1] = DYNAMIC_PC;
1431 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1432 } else {
1433 dc->jump_pc[1] = dc->npc + 4;
1434 dc->npc = JUMP_PC;
1435 }
1436 }
1437 }
1438 }
1439
1440 #ifdef TARGET_SPARC64
1441 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1442 TCGv r_cond, TCGv r_reg)
1443 {
1444 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1445 target_ulong target = dc->pc + offset;
1446
1447 if (unlikely(AM_CHECK(dc))) {
1448 target &= 0xffffffffULL;
1449 }
1450 flush_cond(dc, r_cond);
1451 gen_cond_reg(r_cond, cond, r_reg);
1452 if (a) {
1453 gen_branch_a(dc, target, dc->npc, r_cond);
1454 dc->is_br = 1;
1455 } else {
1456 dc->pc = dc->npc;
1457 dc->jump_pc[0] = target;
1458 if (unlikely(dc->npc == DYNAMIC_PC)) {
1459 dc->jump_pc[1] = DYNAMIC_PC;
1460 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1461 } else {
1462 dc->jump_pc[1] = dc->npc + 4;
1463 dc->npc = JUMP_PC;
1464 }
1465 }
1466 }
1467
1468 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1469 {
1470 switch (fccno) {
1471 case 0:
1472 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1473 break;
1474 case 1:
1475 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1476 break;
1477 case 2:
1478 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1479 break;
1480 case 3:
1481 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1482 break;
1483 }
1484 }
1485
1486 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1487 {
1488 switch (fccno) {
1489 case 0:
1490 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1491 break;
1492 case 1:
1493 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1494 break;
1495 case 2:
1496 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1497 break;
1498 case 3:
1499 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1500 break;
1501 }
1502 }
1503
1504 static inline void gen_op_fcmpq(int fccno)
1505 {
1506 switch (fccno) {
1507 case 0:
1508 gen_helper_fcmpq(cpu_env);
1509 break;
1510 case 1:
1511 gen_helper_fcmpq_fcc1(cpu_env);
1512 break;
1513 case 2:
1514 gen_helper_fcmpq_fcc2(cpu_env);
1515 break;
1516 case 3:
1517 gen_helper_fcmpq_fcc3(cpu_env);
1518 break;
1519 }
1520 }
1521
1522 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1523 {
1524 switch (fccno) {
1525 case 0:
1526 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1527 break;
1528 case 1:
1529 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1530 break;
1531 case 2:
1532 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1533 break;
1534 case 3:
1535 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1536 break;
1537 }
1538 }
1539
1540 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1541 {
1542 switch (fccno) {
1543 case 0:
1544 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1545 break;
1546 case 1:
1547 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1548 break;
1549 case 2:
1550 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1551 break;
1552 case 3:
1553 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1554 break;
1555 }
1556 }
1557
1558 static inline void gen_op_fcmpeq(int fccno)
1559 {
1560 switch (fccno) {
1561 case 0:
1562 gen_helper_fcmpeq(cpu_env);
1563 break;
1564 case 1:
1565 gen_helper_fcmpeq_fcc1(cpu_env);
1566 break;
1567 case 2:
1568 gen_helper_fcmpeq_fcc2(cpu_env);
1569 break;
1570 case 3:
1571 gen_helper_fcmpeq_fcc3(cpu_env);
1572 break;
1573 }
1574 }
1575
1576 #else
1577
1578 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1579 {
1580 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1581 }
1582
1583 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1584 {
1585 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1586 }
1587
1588 static inline void gen_op_fcmpq(int fccno)
1589 {
1590 gen_helper_fcmpq(cpu_env);
1591 }
1592
1593 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1594 {
1595 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1596 }
1597
1598 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1599 {
1600 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1601 }
1602
1603 static inline void gen_op_fcmpeq(int fccno)
1604 {
1605 gen_helper_fcmpeq(cpu_env);
1606 }
1607 #endif
1608
1609 static inline void gen_op_fpexception_im(int fsr_flags)
1610 {
1611 TCGv_i32 r_const;
1612
1613 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1614 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1615 r_const = tcg_const_i32(TT_FP_EXCP);
1616 gen_helper_raise_exception(cpu_env, r_const);
1617 tcg_temp_free_i32(r_const);
1618 }
1619
1620 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1621 {
1622 #if !defined(CONFIG_USER_ONLY)
1623 if (!dc->fpu_enabled) {
1624 TCGv_i32 r_const;
1625
1626 save_state(dc, r_cond);
1627 r_const = tcg_const_i32(TT_NFPU_INSN);
1628 gen_helper_raise_exception(cpu_env, r_const);
1629 tcg_temp_free_i32(r_const);
1630 dc->is_br = 1;
1631 return 1;
1632 }
1633 #endif
1634 return 0;
1635 }
1636
1637 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1638 {
1639 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1640 }
1641
1642 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1643 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1644 {
1645 TCGv_i32 dst, src;
1646
1647 src = gen_load_fpr_F(dc, rs);
1648 dst = gen_dest_fpr_F();
1649
1650 gen(dst, cpu_env, src);
1651
1652 gen_store_fpr_F(dc, rd, dst);
1653 }
1654
1655 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1656 void (*gen)(TCGv_i32, TCGv_i32))
1657 {
1658 TCGv_i32 dst, src;
1659
1660 src = gen_load_fpr_F(dc, rs);
1661 dst = gen_dest_fpr_F();
1662
1663 gen(dst, src);
1664
1665 gen_store_fpr_F(dc, rd, dst);
1666 }
1667
1668 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1669 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1670 {
1671 TCGv_i32 dst, src1, src2;
1672
1673 src1 = gen_load_fpr_F(dc, rs1);
1674 src2 = gen_load_fpr_F(dc, rs2);
1675 dst = gen_dest_fpr_F();
1676
1677 gen(dst, cpu_env, src1, src2);
1678
1679 gen_store_fpr_F(dc, rd, dst);
1680 }
1681
1682 #ifdef TARGET_SPARC64
1683 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1684 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1685 {
1686 TCGv_i32 dst, src1, src2;
1687
1688 src1 = gen_load_fpr_F(dc, rs1);
1689 src2 = gen_load_fpr_F(dc, rs2);
1690 dst = gen_dest_fpr_F();
1691
1692 gen(dst, src1, src2);
1693
1694 gen_store_fpr_F(dc, rd, dst);
1695 }
1696 #endif
1697
1698 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1699 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1700 {
1701 TCGv_i64 dst, src;
1702
1703 src = gen_load_fpr_D(dc, rs);
1704 dst = gen_dest_fpr_D();
1705
1706 gen(dst, cpu_env, src);
1707
1708 gen_store_fpr_D(dc, rd, dst);
1709 }
1710
1711 #ifdef TARGET_SPARC64
1712 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1713 void (*gen)(TCGv_i64, TCGv_i64))
1714 {
1715 TCGv_i64 dst, src;
1716
1717 src = gen_load_fpr_D(dc, rs);
1718 dst = gen_dest_fpr_D();
1719
1720 gen(dst, src);
1721
1722 gen_store_fpr_D(dc, rd, dst);
1723 }
1724 #endif
1725
1726 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1727 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1728 {
1729 TCGv_i64 dst, src1, src2;
1730
1731 src1 = gen_load_fpr_D(dc, rs1);
1732 src2 = gen_load_fpr_D(dc, rs2);
1733 dst = gen_dest_fpr_D();
1734
1735 gen(dst, cpu_env, src1, src2);
1736
1737 gen_store_fpr_D(dc, rd, dst);
1738 }
1739
1740 #ifdef TARGET_SPARC64
1741 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1742 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1743 {
1744 TCGv_i64 dst, src1, src2;
1745
1746 src1 = gen_load_fpr_D(dc, rs1);
1747 src2 = gen_load_fpr_D(dc, rs2);
1748 dst = gen_dest_fpr_D();
1749
1750 gen(dst, src1, src2);
1751
1752 gen_store_fpr_D(dc, rd, dst);
1753 }
1754
1755 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1756 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1757 {
1758 TCGv_i64 dst, src1, src2;
1759
1760 src1 = gen_load_fpr_D(dc, rs1);
1761 src2 = gen_load_fpr_D(dc, rs2);
1762 dst = gen_dest_fpr_D();
1763
1764 gen(dst, cpu_gsr, src1, src2);
1765
1766 gen_store_fpr_D(dc, rd, dst);
1767 }
1768
1769 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1770 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1771 {
1772 TCGv_i64 dst, src0, src1, src2;
1773
1774 src1 = gen_load_fpr_D(dc, rs1);
1775 src2 = gen_load_fpr_D(dc, rs2);
1776 src0 = gen_load_fpr_D(dc, rd);
1777 dst = gen_dest_fpr_D();
1778
1779 gen(dst, src0, src1, src2);
1780
1781 gen_store_fpr_D(dc, rd, dst);
1782 }
1783 #endif
1784
1785 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1786 void (*gen)(TCGv_ptr))
1787 {
1788 gen_op_load_fpr_QT1(QFPREG(rs));
1789
1790 gen(cpu_env);
1791
1792 gen_op_store_QT0_fpr(QFPREG(rd));
1793 gen_update_fprs_dirty(QFPREG(rd));
1794 }
1795
1796 #ifdef TARGET_SPARC64
1797 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1798 void (*gen)(TCGv_ptr))
1799 {
1800 gen_op_load_fpr_QT1(QFPREG(rs));
1801
1802 gen(cpu_env);
1803
1804 gen_op_store_QT0_fpr(QFPREG(rd));
1805 gen_update_fprs_dirty(QFPREG(rd));
1806 }
1807 #endif
1808
1809 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1810 void (*gen)(TCGv_ptr))
1811 {
1812 gen_op_load_fpr_QT0(QFPREG(rs1));
1813 gen_op_load_fpr_QT1(QFPREG(rs2));
1814
1815 gen(cpu_env);
1816
1817 gen_op_store_QT0_fpr(QFPREG(rd));
1818 gen_update_fprs_dirty(QFPREG(rd));
1819 }
1820
1821 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1822 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1823 {
1824 TCGv_i64 dst;
1825 TCGv_i32 src1, src2;
1826
1827 src1 = gen_load_fpr_F(dc, rs1);
1828 src2 = gen_load_fpr_F(dc, rs2);
1829 dst = gen_dest_fpr_D();
1830
1831 gen(dst, cpu_env, src1, src2);
1832
1833 gen_store_fpr_D(dc, rd, dst);
1834 }
1835
1836 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1837 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1838 {
1839 TCGv_i64 src1, src2;
1840
1841 src1 = gen_load_fpr_D(dc, rs1);
1842 src2 = gen_load_fpr_D(dc, rs2);
1843
1844 gen(cpu_env, src1, src2);
1845
1846 gen_op_store_QT0_fpr(QFPREG(rd));
1847 gen_update_fprs_dirty(QFPREG(rd));
1848 }
1849
1850 #ifdef TARGET_SPARC64
1851 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1852 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1853 {
1854 TCGv_i64 dst;
1855 TCGv_i32 src;
1856
1857 src = gen_load_fpr_F(dc, rs);
1858 dst = gen_dest_fpr_D();
1859
1860 gen(dst, cpu_env, src);
1861
1862 gen_store_fpr_D(dc, rd, dst);
1863 }
1864 #endif
1865
1866 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1867 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1868 {
1869 TCGv_i64 dst;
1870 TCGv_i32 src;
1871
1872 src = gen_load_fpr_F(dc, rs);
1873 dst = gen_dest_fpr_D();
1874
1875 gen(dst, cpu_env, src);
1876
1877 gen_store_fpr_D(dc, rd, dst);
1878 }
1879
1880 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1881 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1882 {
1883 TCGv_i32 dst;
1884 TCGv_i64 src;
1885
1886 src = gen_load_fpr_D(dc, rs);
1887 dst = gen_dest_fpr_F();
1888
1889 gen(dst, cpu_env, src);
1890
1891 gen_store_fpr_F(dc, rd, dst);
1892 }
1893
1894 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1895 void (*gen)(TCGv_i32, TCGv_ptr))
1896 {
1897 TCGv_i32 dst;
1898
1899 gen_op_load_fpr_QT1(QFPREG(rs));
1900 dst = gen_dest_fpr_F();
1901
1902 gen(dst, cpu_env);
1903
1904 gen_store_fpr_F(dc, rd, dst);
1905 }
1906
1907 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1908 void (*gen)(TCGv_i64, TCGv_ptr))
1909 {
1910 TCGv_i64 dst;
1911
1912 gen_op_load_fpr_QT1(QFPREG(rs));
1913 dst = gen_dest_fpr_D();
1914
1915 gen(dst, cpu_env);
1916
1917 gen_store_fpr_D(dc, rd, dst);
1918 }
1919
1920 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1921 void (*gen)(TCGv_ptr, TCGv_i32))
1922 {
1923 TCGv_i32 src;
1924
1925 src = gen_load_fpr_F(dc, rs);
1926
1927 gen(cpu_env, src);
1928
1929 gen_op_store_QT0_fpr(QFPREG(rd));
1930 gen_update_fprs_dirty(QFPREG(rd));
1931 }
1932
1933 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1934 void (*gen)(TCGv_ptr, TCGv_i64))
1935 {
1936 TCGv_i64 src;
1937
1938 src = gen_load_fpr_D(dc, rs);
1939
1940 gen(cpu_env, src);
1941
1942 gen_op_store_QT0_fpr(QFPREG(rd));
1943 gen_update_fprs_dirty(QFPREG(rd));
1944 }
1945
1946 /* asi moves */
1947 #ifdef TARGET_SPARC64
1948 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1949 {
1950 int asi;
1951 TCGv_i32 r_asi;
1952
1953 if (IS_IMM) {
1954 r_asi = tcg_temp_new_i32();
1955 tcg_gen_mov_i32(r_asi, cpu_asi);
1956 } else {
1957 asi = GET_FIELD(insn, 19, 26);
1958 r_asi = tcg_const_i32(asi);
1959 }
1960 return r_asi;
1961 }
1962
1963 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1964 int sign)
1965 {
1966 TCGv_i32 r_asi, r_size, r_sign;
1967
1968 r_asi = gen_get_asi(insn, addr);
1969 r_size = tcg_const_i32(size);
1970 r_sign = tcg_const_i32(sign);
1971 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1972 tcg_temp_free_i32(r_sign);
1973 tcg_temp_free_i32(r_size);
1974 tcg_temp_free_i32(r_asi);
1975 }
1976
1977 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1978 {
1979 TCGv_i32 r_asi, r_size;
1980
1981 r_asi = gen_get_asi(insn, addr);
1982 r_size = tcg_const_i32(size);
1983 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
1984 tcg_temp_free_i32(r_size);
1985 tcg_temp_free_i32(r_asi);
1986 }
1987
1988 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1989 {
1990 TCGv_i32 r_asi, r_size, r_rd;
1991
1992 r_asi = gen_get_asi(insn, addr);
1993 r_size = tcg_const_i32(size);
1994 r_rd = tcg_const_i32(rd);
1995 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1996 tcg_temp_free_i32(r_rd);
1997 tcg_temp_free_i32(r_size);
1998 tcg_temp_free_i32(r_asi);
1999 }
2000
2001 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2002 {
2003 TCGv_i32 r_asi, r_size, r_rd;
2004
2005 r_asi = gen_get_asi(insn, addr);
2006 r_size = tcg_const_i32(size);
2007 r_rd = tcg_const_i32(rd);
2008 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2009 tcg_temp_free_i32(r_rd);
2010 tcg_temp_free_i32(r_size);
2011 tcg_temp_free_i32(r_asi);
2012 }
2013
2014 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2015 {
2016 TCGv_i32 r_asi, r_size, r_sign;
2017
2018 r_asi = gen_get_asi(insn, addr);
2019 r_size = tcg_const_i32(4);
2020 r_sign = tcg_const_i32(0);
2021 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2022 tcg_temp_free_i32(r_sign);
2023 gen_helper_st_asi(cpu_env, addr, dst, r_asi, r_size);
2024 tcg_temp_free_i32(r_size);
2025 tcg_temp_free_i32(r_asi);
2026 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2027 }
2028
2029 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2030 {
2031 TCGv_i32 r_asi, r_rd;
2032
2033 r_asi = gen_get_asi(insn, addr);
2034 r_rd = tcg_const_i32(rd);
2035 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2036 tcg_temp_free_i32(r_rd);
2037 tcg_temp_free_i32(r_asi);
2038 }
2039
2040 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2041 {
2042 TCGv_i32 r_asi, r_size;
2043
2044 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2045 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2046 r_asi = gen_get_asi(insn, addr);
2047 r_size = tcg_const_i32(8);
2048 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2049 tcg_temp_free_i32(r_size);
2050 tcg_temp_free_i32(r_asi);
2051 }
2052
2053 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2054 int rd)
2055 {
2056 TCGv r_val1;
2057 TCGv_i32 r_asi;
2058
2059 r_val1 = tcg_temp_new();
2060 gen_movl_reg_TN(rd, r_val1);
2061 r_asi = gen_get_asi(insn, addr);
2062 gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
2063 tcg_temp_free_i32(r_asi);
2064 tcg_temp_free(r_val1);
2065 }
2066
2067 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2068 int rd)
2069 {
2070 TCGv_i32 r_asi;
2071
2072 gen_movl_reg_TN(rd, cpu_tmp64);
2073 r_asi = gen_get_asi(insn, addr);
2074 gen_helper_casx_asi(dst, cpu_env, addr, cpu_tmp64, val2, r_asi);
2075 tcg_temp_free_i32(r_asi);
2076 }
2077
2078 #elif !defined(CONFIG_USER_ONLY)
2079
2080 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2081 int sign)
2082 {
2083 TCGv_i32 r_asi, r_size, r_sign;
2084
2085 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2086 r_size = tcg_const_i32(size);
2087 r_sign = tcg_const_i32(sign);
2088 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2089 tcg_temp_free(r_sign);
2090 tcg_temp_free(r_size);
2091 tcg_temp_free(r_asi);
2092 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2093 }
2094
2095 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2096 {
2097 TCGv_i32 r_asi, r_size;
2098
2099 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2100 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2101 r_size = tcg_const_i32(size);
2102 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2103 tcg_temp_free(r_size);
2104 tcg_temp_free(r_asi);
2105 }
2106
2107 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2108 {
2109 TCGv_i32 r_asi, r_size, r_sign;
2110 TCGv_i64 r_val;
2111
2112 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2113 r_size = tcg_const_i32(4);
2114 r_sign = tcg_const_i32(0);
2115 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2116 tcg_temp_free(r_sign);
2117 r_val = tcg_temp_new_i64();
2118 tcg_gen_extu_tl_i64(r_val, dst);
2119 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2120 tcg_temp_free_i64(r_val);
2121 tcg_temp_free(r_size);
2122 tcg_temp_free(r_asi);
2123 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2124 }
2125
2126 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2127 {
2128 TCGv_i32 r_asi, r_size, r_sign;
2129
2130 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2131 r_size = tcg_const_i32(8);
2132 r_sign = tcg_const_i32(0);
2133 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2134 tcg_temp_free(r_sign);
2135 tcg_temp_free(r_size);
2136 tcg_temp_free(r_asi);
2137 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2138 gen_movl_TN_reg(rd + 1, cpu_tmp0);
2139 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2140 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2141 gen_movl_TN_reg(rd, hi);
2142 }
2143
2144 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2145 {
2146 TCGv_i32 r_asi, r_size;
2147
2148 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2149 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2150 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2151 r_size = tcg_const_i32(8);
2152 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2153 tcg_temp_free(r_size);
2154 tcg_temp_free(r_asi);
2155 }
2156 #endif
2157
2158 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2159 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2160 {
2161 TCGv_i64 r_val;
2162 TCGv_i32 r_asi, r_size;
2163
2164 gen_ld_asi(dst, addr, insn, 1, 0);
2165
2166 r_val = tcg_const_i64(0xffULL);
2167 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2168 r_size = tcg_const_i32(1);
2169 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2170 tcg_temp_free_i32(r_size);
2171 tcg_temp_free_i32(r_asi);
2172 tcg_temp_free_i64(r_val);
2173 }
2174 #endif
2175
2176 static inline TCGv get_src1(unsigned int insn, TCGv def)
2177 {
2178 TCGv r_rs1 = def;
2179 unsigned int rs1;
2180
2181 rs1 = GET_FIELD(insn, 13, 17);
2182 if (rs1 == 0) {
2183 tcg_gen_movi_tl(def, 0);
2184 } else if (rs1 < 8) {
2185 r_rs1 = cpu_gregs[rs1];
2186 } else {
2187 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2188 }
2189 return r_rs1;
2190 }
2191
2192 static inline TCGv get_src2(unsigned int insn, TCGv def)
2193 {
2194 TCGv r_rs2 = def;
2195
2196 if (IS_IMM) { /* immediate */
2197 target_long simm = GET_FIELDs(insn, 19, 31);
2198 tcg_gen_movi_tl(def, simm);
2199 } else { /* register */
2200 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2201 if (rs2 == 0) {
2202 tcg_gen_movi_tl(def, 0);
2203 } else if (rs2 < 8) {
2204 r_rs2 = cpu_gregs[rs2];
2205 } else {
2206 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2207 }
2208 }
2209 return r_rs2;
2210 }
2211
2212 #ifdef TARGET_SPARC64
2213 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2214 {
2215 TCGv_i32 r_tl = tcg_temp_new_i32();
2216
2217 /* load env->tl into r_tl */
2218 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2219
2220 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2221 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2222
2223 /* calculate offset to current trap state from env->ts, reuse r_tl */
2224 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2225 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2226
2227 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2228 {
2229 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2230 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2231 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2232 tcg_temp_free_ptr(r_tl_tmp);
2233 }
2234
2235 tcg_temp_free_i32(r_tl);
2236 }
2237
2238 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2239 int width, bool cc, bool left)
2240 {
2241 TCGv lo1, lo2, t1, t2;
2242 uint64_t amask, tabl, tabr;
2243 int shift, imask, omask;
2244
2245 if (cc) {
2246 tcg_gen_mov_tl(cpu_cc_src, s1);
2247 tcg_gen_mov_tl(cpu_cc_src2, s2);
2248 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2249 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2250 dc->cc_op = CC_OP_SUB;
2251 }
2252
2253 /* Theory of operation: there are two tables, left and right (not to
2254 be confused with the left and right versions of the opcode). These
2255 are indexed by the low 3 bits of the inputs. To make things "easy",
2256 these tables are loaded into two constants, TABL and TABR below.
2257 The operation index = (input & imask) << shift calculates the index
2258 into the constant, while val = (table >> index) & omask calculates
2259 the value we're looking for. */
2260 switch (width) {
2261 case 8:
2262 imask = 0x7;
2263 shift = 3;
2264 omask = 0xff;
2265 if (left) {
2266 tabl = 0x80c0e0f0f8fcfeffULL;
2267 tabr = 0xff7f3f1f0f070301ULL;
2268 } else {
2269 tabl = 0x0103070f1f3f7fffULL;
2270 tabr = 0xfffefcf8f0e0c080ULL;
2271 }
2272 break;
2273 case 16:
2274 imask = 0x6;
2275 shift = 1;
2276 omask = 0xf;
2277 if (left) {
2278 tabl = 0x8cef;
2279 tabr = 0xf731;
2280 } else {
2281 tabl = 0x137f;
2282 tabr = 0xfec8;
2283 }
2284 break;
2285 case 32:
2286 imask = 0x4;
2287 shift = 0;
2288 omask = 0x3;
2289 if (left) {
2290 tabl = (2 << 2) | 3;
2291 tabr = (3 << 2) | 1;
2292 } else {
2293 tabl = (1 << 2) | 3;
2294 tabr = (3 << 2) | 2;
2295 }
2296 break;
2297 default:
2298 abort();
2299 }
2300
2301 lo1 = tcg_temp_new();
2302 lo2 = tcg_temp_new();
2303 tcg_gen_andi_tl(lo1, s1, imask);
2304 tcg_gen_andi_tl(lo2, s2, imask);
2305 tcg_gen_shli_tl(lo1, lo1, shift);
2306 tcg_gen_shli_tl(lo2, lo2, shift);
2307
2308 t1 = tcg_const_tl(tabl);
2309 t2 = tcg_const_tl(tabr);
2310 tcg_gen_shr_tl(lo1, t1, lo1);
2311 tcg_gen_shr_tl(lo2, t2, lo2);
2312 tcg_gen_andi_tl(dst, lo1, omask);
2313 tcg_gen_andi_tl(lo2, lo2, omask);
2314
2315 amask = -8;
2316 if (AM_CHECK(dc)) {
2317 amask &= 0xffffffffULL;
2318 }
2319 tcg_gen_andi_tl(s1, s1, amask);
2320 tcg_gen_andi_tl(s2, s2, amask);
2321
2322 /* We want to compute
2323 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2324 We've already done dst = lo1, so this reduces to
2325 dst &= (s1 == s2 ? -1 : lo2)
2326 Which we perform by
2327 lo2 |= -(s1 == s2)
2328 dst &= lo2
2329 */
2330 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2331 tcg_gen_neg_tl(t1, t1);
2332 tcg_gen_or_tl(lo2, lo2, t1);
2333 tcg_gen_and_tl(dst, dst, lo2);
2334
2335 tcg_temp_free(lo1);
2336 tcg_temp_free(lo2);
2337 tcg_temp_free(t1);
2338 tcg_temp_free(t2);
2339 }
2340
2341 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2342 {
2343 TCGv tmp = tcg_temp_new();
2344
2345 tcg_gen_add_tl(tmp, s1, s2);
2346 tcg_gen_andi_tl(dst, tmp, -8);
2347 if (left) {
2348 tcg_gen_neg_tl(tmp, tmp);
2349 }
2350 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2351
2352 tcg_temp_free(tmp);
2353 }
2354
2355 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2356 {
2357 TCGv t1, t2, shift;
2358
2359 t1 = tcg_temp_new();
2360 t2 = tcg_temp_new();
2361 shift = tcg_temp_new();
2362
2363 tcg_gen_andi_tl(shift, gsr, 7);
2364 tcg_gen_shli_tl(shift, shift, 3);
2365 tcg_gen_shl_tl(t1, s1, shift);
2366
2367 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2368 shift of (up to 63) followed by a constant shift of 1. */
2369 tcg_gen_xori_tl(shift, shift, 63);
2370 tcg_gen_shr_tl(t2, s2, shift);
2371 tcg_gen_shri_tl(t2, t2, 1);
2372
2373 tcg_gen_or_tl(dst, t1, t2);
2374
2375 tcg_temp_free(t1);
2376 tcg_temp_free(t2);
2377 tcg_temp_free(shift);
2378 }
2379 #endif
2380
2381 #define CHECK_IU_FEATURE(dc, FEATURE) \
2382 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2383 goto illegal_insn;
2384 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2385 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2386 goto nfpu_insn;
2387
2388 /* before an instruction, dc->pc must be static */
2389 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2390 {
2391 unsigned int opc, rs1, rs2, rd;
2392 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2393 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2394 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2395 target_long simm;
2396
2397 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
2398 tcg_gen_debug_insn_start(dc->pc);
2399 }
2400
2401 opc = GET_FIELD(insn, 0, 1);
2402
2403 rd = GET_FIELD(insn, 2, 6);
2404
2405 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2406 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2407
2408 switch (opc) {
2409 case 0: /* branches/sethi */
2410 {
2411 unsigned int xop = GET_FIELD(insn, 7, 9);
2412 int32_t target;
2413 switch (xop) {
2414 #ifdef TARGET_SPARC64
2415 case 0x1: /* V9 BPcc */
2416 {
2417 int cc;
2418
2419 target = GET_FIELD_SP(insn, 0, 18);
2420 target = sign_extend(target, 19);
2421 target <<= 2;
2422 cc = GET_FIELD_SP(insn, 20, 21);
2423 if (cc == 0)
2424 do_branch(dc, target, insn, 0, cpu_cond);
2425 else if (cc == 2)
2426 do_branch(dc, target, insn, 1, cpu_cond);
2427 else
2428 goto illegal_insn;
2429 goto jmp_insn;
2430 }
2431 case 0x3: /* V9 BPr */
2432 {
2433 target = GET_FIELD_SP(insn, 0, 13) |
2434 (GET_FIELD_SP(insn, 20, 21) << 14);
2435 target = sign_extend(target, 16);
2436 target <<= 2;
2437 cpu_src1 = get_src1(insn, cpu_src1);
2438 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2439 goto jmp_insn;
2440 }
2441 case 0x5: /* V9 FBPcc */
2442 {
2443 int cc = GET_FIELD_SP(insn, 20, 21);
2444 if (gen_trap_ifnofpu(dc, cpu_cond))
2445 goto jmp_insn;
2446 target = GET_FIELD_SP(insn, 0, 18);
2447 target = sign_extend(target, 19);
2448 target <<= 2;
2449 do_fbranch(dc, target, insn, cc, cpu_cond);
2450 goto jmp_insn;
2451 }
2452 #else
2453 case 0x7: /* CBN+x */
2454 {
2455 goto ncp_insn;
2456 }
2457 #endif
2458 case 0x2: /* BN+x */
2459 {
2460 target = GET_FIELD(insn, 10, 31);
2461 target = sign_extend(target, 22);
2462 target <<= 2;
2463 do_branch(dc, target, insn, 0, cpu_cond);
2464 goto jmp_insn;
2465 }
2466 case 0x6: /* FBN+x */
2467 {
2468 if (gen_trap_ifnofpu(dc, cpu_cond))
2469 goto jmp_insn;
2470 target = GET_FIELD(insn, 10, 31);
2471 target = sign_extend(target, 22);
2472 target <<= 2;
2473 do_fbranch(dc, target, insn, 0, cpu_cond);
2474 goto jmp_insn;
2475 }
2476 case 0x4: /* SETHI */
2477 if (rd) { // nop
2478 uint32_t value = GET_FIELD(insn, 10, 31);
2479 TCGv r_const;
2480
2481 r_const = tcg_const_tl(value << 10);
2482 gen_movl_TN_reg(rd, r_const);
2483 tcg_temp_free(r_const);
2484 }
2485 break;
2486 case 0x0: /* UNIMPL */
2487 default:
2488 goto illegal_insn;
2489 }
2490 break;
2491 }
2492 break;
2493 case 1: /*CALL*/
2494 {
2495 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2496 TCGv r_const;
2497
2498 r_const = tcg_const_tl(dc->pc);
2499 gen_movl_TN_reg(15, r_const);
2500 tcg_temp_free(r_const);
2501 target += dc->pc;
2502 gen_mov_pc_npc(dc, cpu_cond);
2503 #ifdef TARGET_SPARC64
2504 if (unlikely(AM_CHECK(dc))) {
2505 target &= 0xffffffffULL;
2506 }
2507 #endif
2508 dc->npc = target;
2509 }
2510 goto jmp_insn;
2511 case 2: /* FPU & Logical Operations */
2512 {
2513 unsigned int xop = GET_FIELD(insn, 7, 12);
2514 if (xop == 0x3a) { /* generate trap */
2515 int cond;
2516
2517 cpu_src1 = get_src1(insn, cpu_src1);
2518 if (IS_IMM) {
2519 rs2 = GET_FIELD(insn, 25, 31);
2520 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2521 } else {
2522 rs2 = GET_FIELD(insn, 27, 31);
2523 if (rs2 != 0) {
2524 gen_movl_reg_TN(rs2, cpu_src2);
2525 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2526 } else
2527 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2528 }
2529
2530 cond = GET_FIELD(insn, 3, 6);
2531 if (cond == 0x8) { /* Trap Always */
2532 save_state(dc, cpu_cond);
2533 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2534 supervisor(dc))
2535 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2536 else
2537 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2538 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2539 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2540 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2541
2542 } else if (cond != 0) {
2543 TCGv r_cond = tcg_temp_new();
2544 int l1;
2545 #ifdef TARGET_SPARC64
2546 /* V9 icc/xcc */
2547 int cc = GET_FIELD_SP(insn, 11, 12);
2548
2549 save_state(dc, cpu_cond);
2550 if (cc == 0)
2551 gen_cond(r_cond, 0, cond, dc);
2552 else if (cc == 2)
2553 gen_cond(r_cond, 1, cond, dc);
2554 else
2555 goto illegal_insn;
2556 #else
2557 save_state(dc, cpu_cond);
2558 gen_cond(r_cond, 0, cond, dc);
2559 #endif
2560 l1 = gen_new_label();
2561 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2562
2563 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2564 supervisor(dc))
2565 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2566 else
2567 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2568 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2569 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2570 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2571
2572 gen_set_label(l1);
2573 tcg_temp_free(r_cond);
2574 }
2575 gen_op_next_insn();
2576 tcg_gen_exit_tb(0);
2577 dc->is_br = 1;
2578 goto jmp_insn;
2579 } else if (xop == 0x28) {
2580 rs1 = GET_FIELD(insn, 13, 17);
2581 switch(rs1) {
2582 case 0: /* rdy */
2583 #ifndef TARGET_SPARC64
2584 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2585 manual, rdy on the microSPARC
2586 II */
2587 case 0x0f: /* stbar in the SPARCv8 manual,
2588 rdy on the microSPARC II */
2589 case 0x10 ... 0x1f: /* implementation-dependent in the
2590 SPARCv8 manual, rdy on the
2591 microSPARC II */
2592 /* Read Asr17 */
2593 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2594 TCGv r_const;
2595
2596 /* Read Asr17 for a Leon3 monoprocessor */
2597 r_const = tcg_const_tl((1 << 8)
2598 | (dc->def->nwindows - 1));
2599 gen_movl_TN_reg(rd, r_const);
2600 tcg_temp_free(r_const);
2601 break;
2602 }
2603 #endif
2604 gen_movl_TN_reg(rd, cpu_y);
2605 break;
2606 #ifdef TARGET_SPARC64
2607 case 0x2: /* V9 rdccr */
2608 gen_helper_compute_psr(cpu_env);
2609 gen_helper_rdccr(cpu_dst, cpu_env);
2610 gen_movl_TN_reg(rd, cpu_dst);
2611 break;
2612 case 0x3: /* V9 rdasi */
2613 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2614 gen_movl_TN_reg(rd, cpu_dst);
2615 break;
2616 case 0x4: /* V9 rdtick */
2617 {
2618 TCGv_ptr r_tickptr;
2619
2620 r_tickptr = tcg_temp_new_ptr();
2621 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2622 offsetof(CPUSPARCState, tick));
2623 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2624 tcg_temp_free_ptr(r_tickptr);
2625 gen_movl_TN_reg(rd, cpu_dst);
2626 }
2627 break;
2628 case 0x5: /* V9 rdpc */
2629 {
2630 TCGv r_const;
2631
2632 if (unlikely(AM_CHECK(dc))) {
2633 r_const = tcg_const_tl(dc->pc & 0xffffffffULL);
2634 } else {
2635 r_const = tcg_const_tl(dc->pc);
2636 }
2637 gen_movl_TN_reg(rd, r_const);
2638 tcg_temp_free(r_const);
2639 }
2640 break;
2641 case 0x6: /* V9 rdfprs */
2642 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2643 gen_movl_TN_reg(rd, cpu_dst);
2644 break;
2645 case 0xf: /* V9 membar */
2646 break; /* no effect */
2647 case 0x13: /* Graphics Status */
2648 if (gen_trap_ifnofpu(dc, cpu_cond))
2649 goto jmp_insn;
2650 gen_movl_TN_reg(rd, cpu_gsr);
2651 break;
2652 case 0x16: /* Softint */
2653 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2654 gen_movl_TN_reg(rd, cpu_dst);
2655 break;
2656 case 0x17: /* Tick compare */
2657 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2658 break;
2659 case 0x18: /* System tick */
2660 {
2661 TCGv_ptr r_tickptr;
2662
2663 r_tickptr = tcg_temp_new_ptr();
2664 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2665 offsetof(CPUSPARCState, stick));
2666 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2667 tcg_temp_free_ptr(r_tickptr);
2668 gen_movl_TN_reg(rd, cpu_dst);
2669 }
2670 break;
2671 case 0x19: /* System tick compare */
2672 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2673 break;
2674 case 0x10: /* Performance Control */
2675 case 0x11: /* Performance Instrumentation Counter */
2676 case 0x12: /* Dispatch Control */
2677 case 0x14: /* Softint set, WO */
2678 case 0x15: /* Softint clear, WO */
2679 #endif
2680 default:
2681 goto illegal_insn;
2682 }
2683 #if !defined(CONFIG_USER_ONLY)
2684 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2685 #ifndef TARGET_SPARC64
2686 if (!supervisor(dc))
2687 goto priv_insn;
2688 gen_helper_compute_psr(cpu_env);
2689 dc->cc_op = CC_OP_FLAGS;
2690 gen_helper_rdpsr(cpu_dst, cpu_env);
2691 #else
2692 CHECK_IU_FEATURE(dc, HYPV);
2693 if (!hypervisor(dc))
2694 goto priv_insn;
2695 rs1 = GET_FIELD(insn, 13, 17);
2696 switch (rs1) {
2697 case 0: // hpstate
2698 // gen_op_rdhpstate();
2699 break;
2700 case 1: // htstate
2701 // gen_op_rdhtstate();
2702 break;
2703 case 3: // hintp
2704 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2705 break;
2706 case 5: // htba
2707 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2708 break;
2709 case 6: // hver
2710 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2711 break;
2712 case 31: // hstick_cmpr
2713 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2714 break;
2715 default:
2716 goto illegal_insn;
2717 }
2718 #endif
2719 gen_movl_TN_reg(rd, cpu_dst);
2720 break;
2721 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2722 if (!supervisor(dc))
2723 goto priv_insn;
2724 #ifdef TARGET_SPARC64
2725 rs1 = GET_FIELD(insn, 13, 17);
2726 switch (rs1) {
2727 case 0: // tpc
2728 {
2729 TCGv_ptr r_tsptr;
2730
2731 r_tsptr = tcg_temp_new_ptr();
2732 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2733 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2734 offsetof(trap_state, tpc));
2735 tcg_temp_free_ptr(r_tsptr);
2736 }
2737 break;
2738 case 1: // tnpc
2739 {
2740 TCGv_ptr r_tsptr;
2741
2742 r_tsptr = tcg_temp_new_ptr();
2743 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2744 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2745 offsetof(trap_state, tnpc));
2746 tcg_temp_free_ptr(r_tsptr);
2747 }
2748 break;
2749 case 2: // tstate
2750 {
2751 TCGv_ptr r_tsptr;
2752
2753 r_tsptr = tcg_temp_new_ptr();
2754 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2755 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2756 offsetof(trap_state, tstate));
2757 tcg_temp_free_ptr(r_tsptr);
2758 }
2759 break;
2760 case 3: // tt
2761 {
2762 TCGv_ptr r_tsptr;
2763
2764 r_tsptr = tcg_temp_new_ptr();
2765 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2766 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2767 offsetof(trap_state, tt));
2768 tcg_temp_free_ptr(r_tsptr);
2769 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2770 }
2771 break;
2772 case 4: // tick
2773 {
2774 TCGv_ptr r_tickptr;
2775
2776 r_tickptr = tcg_temp_new_ptr();
2777 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2778 offsetof(CPUSPARCState, tick));
2779 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2780 gen_movl_TN_reg(rd, cpu_tmp0);
2781 tcg_temp_free_ptr(r_tickptr);
2782 }
2783 break;
2784 case 5: // tba
2785 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2786 break;
2787 case 6: // pstate
2788 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2789 offsetof(CPUSPARCState, pstate));
2790 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2791 break;
2792 case 7: // tl
2793 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2794 offsetof(CPUSPARCState, tl));
2795 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2796 break;
2797 case 8: // pil
2798 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2799 offsetof(CPUSPARCState, psrpil));
2800 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2801 break;
2802 case 9: // cwp
2803 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2804 break;
2805 case 10: // cansave
2806 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2807 offsetof(CPUSPARCState, cansave));
2808 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2809 break;
2810 case 11: // canrestore
2811 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2812 offsetof(CPUSPARCState, canrestore));
2813 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2814 break;
2815 case 12: // cleanwin
2816 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2817 offsetof(CPUSPARCState, cleanwin));
2818 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2819 break;
2820 case 13: // otherwin
2821 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2822 offsetof(CPUSPARCState, otherwin));
2823 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2824 break;
2825 case 14: // wstate
2826 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2827 offsetof(CPUSPARCState, wstate));
2828 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2829 break;
2830 case 16: // UA2005 gl
2831 CHECK_IU_FEATURE(dc, GL);
2832 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2833 offsetof(CPUSPARCState, gl));
2834 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2835 break;
2836 case 26: // UA2005 strand status
2837 CHECK_IU_FEATURE(dc, HYPV);
2838 if (!hypervisor(dc))
2839 goto priv_insn;
2840 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2841 break;
2842 case 31: // ver
2843 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2844 break;
2845 case 15: // fq
2846 default:
2847 goto illegal_insn;
2848 }
2849 #else
2850 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2851 #endif
2852 gen_movl_TN_reg(rd, cpu_tmp0);
2853 break;
2854 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2855 #ifdef TARGET_SPARC64
2856 save_state(dc, cpu_cond);
2857 gen_helper_flushw(cpu_env);
2858 #else
2859 if (!supervisor(dc))
2860 goto priv_insn;
2861 gen_movl_TN_reg(rd, cpu_tbr);
2862 #endif
2863 break;
2864 #endif
2865 } else if (xop == 0x34) { /* FPU Operations */
2866 if (gen_trap_ifnofpu(dc, cpu_cond))
2867 goto jmp_insn;
2868 gen_op_clear_ieee_excp_and_FTT();
2869 rs1 = GET_FIELD(insn, 13, 17);
2870 rs2 = GET_FIELD(insn, 27, 31);
2871 xop = GET_FIELD(insn, 18, 26);
2872 save_state(dc, cpu_cond);
2873 switch (xop) {
2874 case 0x1: /* fmovs */
2875 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2876 gen_store_fpr_F(dc, rd, cpu_src1_32);
2877 break;
2878 case 0x5: /* fnegs */
2879 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2880 break;
2881 case 0x9: /* fabss */
2882 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2883 break;
2884 case 0x29: /* fsqrts */
2885 CHECK_FPU_FEATURE(dc, FSQRT);
2886 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2887 break;
2888 case 0x2a: /* fsqrtd */
2889 CHECK_FPU_FEATURE(dc, FSQRT);
2890 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2891 break;
2892 case 0x2b: /* fsqrtq */
2893 CHECK_FPU_FEATURE(dc, FLOAT128);
2894 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2895 break;
2896 case 0x41: /* fadds */
2897 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2898 break;
2899 case 0x42: /* faddd */
2900 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2901 break;
2902 case 0x43: /* faddq */
2903 CHECK_FPU_FEATURE(dc, FLOAT128);
2904 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2905 break;
2906 case 0x45: /* fsubs */
2907 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2908 break;
2909 case 0x46: /* fsubd */
2910 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2911 break;
2912 case 0x47: /* fsubq */
2913 CHECK_FPU_FEATURE(dc, FLOAT128);
2914 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2915 break;
2916 case 0x49: /* fmuls */
2917 CHECK_FPU_FEATURE(dc, FMUL);
2918 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2919 break;
2920 case 0x4a: /* fmuld */
2921 CHECK_FPU_FEATURE(dc, FMUL);
2922 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2923 break;
2924 case 0x4b: /* fmulq */
2925 CHECK_FPU_FEATURE(dc, FLOAT128);
2926 CHECK_FPU_FEATURE(dc, FMUL);
2927 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2928 break;
2929 case 0x4d: /* fdivs */
2930 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2931 break;
2932 case 0x4e: /* fdivd */
2933 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2934 break;
2935 case 0x4f: /* fdivq */
2936 CHECK_FPU_FEATURE(dc, FLOAT128);
2937 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2938 break;
2939 case 0x69: /* fsmuld */
2940 CHECK_FPU_FEATURE(dc, FSMULD);
2941 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2942 break;
2943 case 0x6e: /* fdmulq */
2944 CHECK_FPU_FEATURE(dc, FLOAT128);
2945 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2946 break;
2947 case 0xc4: /* fitos */
2948 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2949 break;
2950 case 0xc6: /* fdtos */
2951 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2952 break;
2953 case 0xc7: /* fqtos */
2954 CHECK_FPU_FEATURE(dc, FLOAT128);
2955 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2956 break;
2957 case 0xc8: /* fitod */
2958 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2959 break;
2960 case 0xc9: /* fstod */
2961 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2962 break;
2963 case 0xcb: /* fqtod */
2964 CHECK_FPU_FEATURE(dc, FLOAT128);
2965 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2966 break;
2967 case 0xcc: /* fitoq */
2968 CHECK_FPU_FEATURE(dc, FLOAT128);
2969 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2970 break;
2971 case 0xcd: /* fstoq */
2972 CHECK_FPU_FEATURE(dc, FLOAT128);
2973 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2974 break;
2975 case 0xce: /* fdtoq */
2976 CHECK_FPU_FEATURE(dc, FLOAT128);
2977 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2978 break;
2979 case 0xd1: /* fstoi */
2980 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2981 break;
2982 case 0xd2: /* fdtoi */
2983 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2984 break;
2985 case 0xd3: /* fqtoi */
2986 CHECK_FPU_FEATURE(dc, FLOAT128);
2987 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2988 break;
2989 #ifdef TARGET_SPARC64
2990 case 0x2: /* V9 fmovd */
2991 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2992 gen_store_fpr_D(dc, rd, cpu_src1_64);
2993 break;
2994 case 0x3: /* V9 fmovq */
2995 CHECK_FPU_FEATURE(dc, FLOAT128);
2996 gen_move_Q(rd, rs2);
2997 break;
2998 case 0x6: /* V9 fnegd */
2999 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3000 break;
3001 case 0x7: /* V9 fnegq */
3002 CHECK_FPU_FEATURE(dc, FLOAT128);
3003 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3004 break;
3005 case 0xa: /* V9 fabsd */
3006 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3007 break;
3008 case 0xb: /* V9 fabsq */
3009 CHECK_FPU_FEATURE(dc, FLOAT128);
3010 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3011 break;
3012 case 0x81: /* V9 fstox */
3013 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3014 break;
3015 case 0x82: /* V9 fdtox */
3016 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3017 break;
3018 case 0x83: /* V9 fqtox */
3019 CHECK_FPU_FEATURE(dc, FLOAT128);
3020 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3021 break;
3022 case 0x84: /* V9 fxtos */
3023 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3024 break;
3025 case 0x88: /* V9 fxtod */
3026 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3027 break;
3028 case 0x8c: /* V9 fxtoq */
3029 CHECK_FPU_FEATURE(dc, FLOAT128);
3030 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3031 break;
3032 #endif
3033 default:
3034 goto illegal_insn;
3035 }
3036 } else if (xop == 0x35) { /* FPU Operations */
3037 #ifdef TARGET_SPARC64
3038 int cond;
3039 #endif
3040 if (gen_trap_ifnofpu(dc, cpu_cond))
3041 goto jmp_insn;
3042 gen_op_clear_ieee_excp_and_FTT();
3043 rs1 = GET_FIELD(insn, 13, 17);
3044 rs2 = GET_FIELD(insn, 27, 31);
3045 xop = GET_FIELD(insn, 18, 26);
3046 save_state(dc, cpu_cond);
3047 #ifdef TARGET_SPARC64
3048 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
3049 int l1;
3050
3051 l1 = gen_new_label();
3052 cond = GET_FIELD_SP(insn, 14, 17);
3053 cpu_src1 = get_src1(insn, cpu_src1);
3054 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3055 0, l1);
3056 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3057 gen_store_fpr_F(dc, rd, cpu_src1_32);
3058 gen_set_label(l1);
3059 break;
3060 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3061 int l1;
3062
3063 l1 = gen_new_label();
3064 cond = GET_FIELD_SP(insn, 14, 17);
3065 cpu_src1 = get_src1(insn, cpu_src1);
3066 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3067 0, l1);
3068 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3069 gen_store_fpr_D(dc, rd, cpu_src1_64);
3070 gen_set_label(l1);
3071 break;
3072 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3073 int l1;
3074
3075 CHECK_FPU_FEATURE(dc, FLOAT128);
3076 l1 = gen_new_label();
3077 cond = GET_FIELD_SP(insn, 14, 17);
3078 cpu_src1 = get_src1(insn, cpu_src1);
3079 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3080 0, l1);
3081 gen_move_Q(rd, rs2);
3082 gen_set_label(l1);
3083 break;
3084 }
3085 #endif
3086 switch (xop) {
3087 #ifdef TARGET_SPARC64
3088 #define FMOVSCC(fcc) \
3089 { \
3090 TCGv r_cond; \
3091 int l1; \
3092 \
3093 l1 = gen_new_label(); \
3094 r_cond = tcg_temp_new(); \
3095 cond = GET_FIELD_SP(insn, 14, 17); \
3096 gen_fcond(r_cond, fcc, cond); \
3097 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3098 0, l1); \
3099 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3100 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3101 gen_set_label(l1); \
3102 tcg_temp_free(r_cond); \
3103 }
3104 #define FMOVDCC(fcc) \
3105 { \
3106 TCGv r_cond; \
3107 int l1; \
3108 \
3109 l1 = gen_new_label(); \
3110 r_cond = tcg_temp_new(); \
3111 cond = GET_FIELD_SP(insn, 14, 17); \
3112 gen_fcond(r_cond, fcc, cond); \
3113 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3114 0, l1); \
3115 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3116 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3117 gen_set_label(l1); \
3118 tcg_temp_free(r_cond); \
3119 }
3120 #define FMOVQCC(fcc) \
3121 { \
3122 TCGv r_cond; \
3123 int l1; \
3124 \
3125 l1 = gen_new_label(); \
3126 r_cond = tcg_temp_new(); \
3127 cond = GET_FIELD_SP(insn, 14, 17); \
3128 gen_fcond(r_cond, fcc, cond); \
3129 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3130 0, l1); \
3131 gen_move_Q(rd, rs2); \
3132 gen_set_label(l1); \
3133 tcg_temp_free(r_cond); \
3134 }
3135 case 0x001: /* V9 fmovscc %fcc0 */
3136 FMOVSCC(0);
3137 break;
3138 case 0x002: /* V9 fmovdcc %fcc0 */
3139 FMOVDCC(0);
3140 break;
3141 case 0x003: /* V9 fmovqcc %fcc0 */
3142 CHECK_FPU_FEATURE(dc, FLOAT128);
3143 FMOVQCC(0);
3144 break;
3145 case 0x041: /* V9 fmovscc %fcc1 */
3146 FMOVSCC(1);
3147 break;
3148 case 0x042: /* V9 fmovdcc %fcc1 */
3149 FMOVDCC(1);
3150 break;
3151 case 0x043: /* V9 fmovqcc %fcc1 */
3152 CHECK_FPU_FEATURE(dc, FLOAT128);
3153 FMOVQCC(1);
3154 break;
3155 case 0x081: /* V9 fmovscc %fcc2 */
3156 FMOVSCC(2);
3157 break;
3158 case 0x082: /* V9 fmovdcc %fcc2 */
3159 FMOVDCC(2);
3160 break;
3161 case 0x083: /* V9 fmovqcc %fcc2 */
3162 CHECK_FPU_FEATURE(dc, FLOAT128);
3163 FMOVQCC(2);
3164 break;
3165 case 0x0c1: /* V9 fmovscc %fcc3 */
3166 FMOVSCC(3);
3167 break;
3168 case 0x0c2: /* V9 fmovdcc %fcc3 */
3169 FMOVDCC(3);
3170 break;
3171 case 0x0c3: /* V9 fmovqcc %fcc3 */
3172 CHECK_FPU_FEATURE(dc, FLOAT128);
3173 FMOVQCC(3);
3174 break;
3175 #undef FMOVSCC
3176 #undef FMOVDCC
3177 #undef FMOVQCC
3178 #define FMOVSCC(icc) \
3179 { \
3180 TCGv r_cond; \
3181 int l1; \
3182 \
3183 l1 = gen_new_label(); \
3184 r_cond = tcg_temp_new(); \
3185 cond = GET_FIELD_SP(insn, 14, 17); \
3186 gen_cond(r_cond, icc, cond, dc); \
3187 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3188 0, l1); \
3189 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3190 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3191 gen_set_label(l1); \
3192 tcg_temp_free(r_cond); \
3193 }
3194 #define FMOVDCC(icc) \
3195 { \
3196 TCGv r_cond; \
3197 int l1; \
3198 \
3199 l1 = gen_new_label(); \
3200 r_cond = tcg_temp_new(); \
3201 cond = GET_FIELD_SP(insn, 14, 17); \
3202 gen_cond(r_cond, icc, cond, dc); \
3203 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3204 0, l1); \
3205 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3206 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3207 gen_update_fprs_dirty(DFPREG(rd)); \
3208 gen_set_label(l1); \
3209 tcg_temp_free(r_cond); \
3210 }
3211 #define FMOVQCC(icc) \
3212 { \
3213 TCGv r_cond; \
3214 int l1; \
3215 \
3216 l1 = gen_new_label(); \
3217 r_cond = tcg_temp_new(); \
3218 cond = GET_FIELD_SP(insn, 14, 17); \
3219 gen_cond(r_cond, icc, cond, dc); \
3220 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3221 0, l1); \
3222 gen_move_Q(rd, rs2); \
3223 gen_set_label(l1); \
3224 tcg_temp_free(r_cond); \
3225 }
3226
3227 case 0x101: /* V9 fmovscc %icc */
3228 FMOVSCC(0);
3229 break;
3230 case 0x102: /* V9 fmovdcc %icc */
3231 FMOVDCC(0);
3232 break;
3233 case 0x103: /* V9 fmovqcc %icc */
3234 CHECK_FPU_FEATURE(dc, FLOAT128);
3235 FMOVQCC(0);
3236 break;
3237 case 0x181: /* V9 fmovscc %xcc */
3238 FMOVSCC(1);
3239 break;
3240 case 0x182: /* V9 fmovdcc %xcc */
3241 FMOVDCC(1);
3242 break;
3243 case 0x183: /* V9 fmovqcc %xcc */
3244 CHECK_FPU_FEATURE(dc, FLOAT128);
3245 FMOVQCC(1);
3246 break;
3247 #undef FMOVSCC
3248 #undef FMOVDCC
3249 #undef FMOVQCC
3250 #endif
3251 case 0x51: /* fcmps, V9 %fcc */
3252 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3253 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3254 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3255 break;
3256 case 0x52: /* fcmpd, V9 %fcc */
3257 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3258 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3259 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3260 break;
3261 case 0x53: /* fcmpq, V9 %fcc */
3262 CHECK_FPU_FEATURE(dc, FLOAT128);
3263 gen_op_load_fpr_QT0(QFPREG(rs1));
3264 gen_op_load_fpr_QT1(QFPREG(rs2));
3265 gen_op_fcmpq(rd & 3);
3266 break;
3267 case 0x55: /* fcmpes, V9 %fcc */
3268 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3269 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3270 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3271 break;
3272 case 0x56: /* fcmped, V9 %fcc */
3273 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3274 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3275 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3276 break;
3277 case 0x57: /* fcmpeq, V9 %fcc */
3278 CHECK_FPU_FEATURE(dc, FLOAT128);
3279 gen_op_load_fpr_QT0(QFPREG(rs1));
3280 gen_op_load_fpr_QT1(QFPREG(rs2));
3281 gen_op_fcmpeq(rd & 3);
3282 break;
3283 default:
3284 goto illegal_insn;
3285 }
3286 } else if (xop == 0x2) {
3287 // clr/mov shortcut
3288
3289 rs1 = GET_FIELD(insn, 13, 17);
3290 if (rs1 == 0) {
3291 // or %g0, x, y -> mov T0, x; mov y, T0
3292 if (IS_IMM) { /* immediate */
3293 TCGv r_const;
3294
3295 simm = GET_FIELDs(insn, 19, 31);
3296 r_const = tcg_const_tl(simm);
3297 gen_movl_TN_reg(rd, r_const);
3298 tcg_temp_free(r_const);
3299 } else { /* register */
3300 rs2 = GET_FIELD(insn, 27, 31);
3301 gen_movl_reg_TN(rs2, cpu_dst);
3302 gen_movl_TN_reg(rd, cpu_dst);
3303 }
3304 } else {
3305 cpu_src1 = get_src1(insn, cpu_src1);
3306 if (IS_IMM) { /* immediate */
3307 simm = GET_FIELDs(insn, 19, 31);
3308 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3309 gen_movl_TN_reg(rd, cpu_dst);
3310 } else { /* register */
3311 // or x, %g0, y -> mov T1, x; mov y, T1
3312 rs2 = GET_FIELD(insn, 27, 31);
3313 if (rs2 != 0) {
3314 gen_movl_reg_TN(rs2, cpu_src2);
3315 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3316 gen_movl_TN_reg(rd, cpu_dst);
3317 } else
3318 gen_movl_TN_reg(rd, cpu_src1);
3319 }
3320 }
3321 #ifdef TARGET_SPARC64
3322 } else if (xop == 0x25) { /* sll, V9 sllx */
3323 cpu_src1 = get_src1(insn, cpu_src1);
3324 if (IS_IMM) { /* immediate */
3325 simm = GET_FIELDs(insn, 20, 31);
3326 if (insn & (1 << 12)) {
3327 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3328 } else {
3329 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3330 }
3331 } else { /* register */
3332 rs2 = GET_FIELD(insn, 27, 31);
3333 gen_movl_reg_TN(rs2, cpu_src2);
3334 if (insn & (1 << 12)) {
3335 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3336 } else {
3337 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3338 }
3339 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3340 }
3341 gen_movl_TN_reg(rd, cpu_dst);
3342 } else if (xop == 0x26) { /* srl, V9 srlx */
3343 cpu_src1 = get_src1(insn, cpu_src1);
3344 if (IS_IMM) { /* immediate */
3345 simm = GET_FIELDs(insn, 20, 31);
3346 if (insn & (1 << 12)) {
3347 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3348 } else {
3349 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3350 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3351 }
3352 } else { /* register */
3353 rs2 = GET_FIELD(insn, 27, 31);
3354 gen_movl_reg_TN(rs2, cpu_src2);
3355 if (insn & (1 << 12)) {
3356 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3357 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3358 } else {
3359 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3360 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3361 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3362 }
3363 }
3364 gen_movl_TN_reg(rd, cpu_dst);
3365 } else if (xop == 0x27) { /* sra, V9 srax */
3366 cpu_src1 = get_src1(insn, cpu_src1);
3367 if (IS_IMM) { /* immediate */
3368 simm = GET_FIELDs(insn, 20, 31);
3369 if (insn & (1 << 12)) {
3370 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3371 } else {
3372 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3373 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3374 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3375 }
3376 } else { /* register */
3377 rs2 = GET_FIELD(insn, 27, 31);
3378 gen_movl_reg_TN(rs2, cpu_src2);
3379 if (insn & (1 << 12)) {
3380 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3381 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3382 } else {
3383 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3384 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3385 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3386 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3387 }
3388 }
3389 gen_movl_TN_reg(rd, cpu_dst);
3390 #endif
3391 } else if (xop < 0x36) {
3392 if (xop < 0x20) {
3393 cpu_src1 = get_src1(insn, cpu_src1);
3394 cpu_src2 = get_src2(insn, cpu_src2);
3395 switch (xop & ~0x10) {
3396 case 0x0: /* add */
3397 if (IS_IMM) {
3398 simm = GET_FIELDs(insn, 19, 31);
3399 if (xop & 0x10) {
3400 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3401 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3402 dc->cc_op = CC_OP_ADD;
3403 } else {
3404 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3405 }
3406 } else {
3407 if (xop & 0x10) {
3408 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3409 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3410 dc->cc_op = CC_OP_ADD;
3411 } else {
3412 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3413 }
3414 }
3415 break;
3416 case 0x1: /* and */
3417 if (IS_IMM) {
3418 simm = GET_FIELDs(insn, 19, 31);
3419 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3420 } else {
3421 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3422 }
3423 if (xop & 0x10) {
3424 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3425 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3426 dc->cc_op = CC_OP_LOGIC;
3427 }
3428 break;
3429 case 0x2: /* or */
3430 if (IS_IMM) {
3431 simm = GET_FIELDs(insn, 19, 31);
3432 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3433 } else {
3434 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3435 }
3436 if (xop & 0x10) {
3437 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3438 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3439 dc->cc_op = CC_OP_LOGIC;
3440 }
3441 break;
3442 case 0x3: /* xor */
3443 if (IS_IMM) {
3444 simm = GET_FIELDs(insn, 19, 31);
3445 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3446 } else {
3447 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3448 }
3449 if (xop & 0x10) {
3450 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3451 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3452 dc->cc_op = CC_OP_LOGIC;
3453 }
3454 break;
3455 case 0x4: /* sub */
3456 if (IS_IMM) {
3457 simm = GET_FIELDs(insn, 19, 31);
3458 if (xop & 0x10) {
3459 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3460 } else {
3461 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3462 }
3463 } else {
3464 if (xop & 0x10) {
3465 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3466 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3467 dc->cc_op = CC_OP_SUB;
3468 } else {
3469 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3470 }
3471 }
3472 break;
3473 case 0x5: /* andn */
3474 if (IS_IMM) {
3475 simm = GET_FIELDs(insn, 19, 31);
3476 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3477 } else {
3478 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3479 }
3480 if (xop & 0x10) {
3481 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3482 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3483 dc->cc_op = CC_OP_LOGIC;
3484 }
3485 break;
3486 case 0x6: /* orn */
3487 if (IS_IMM) {
3488 simm = GET_FIELDs(insn, 19, 31);
3489 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3490 } else {
3491 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3492 }
3493 if (xop & 0x10) {
3494 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3495 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3496 dc->cc_op = CC_OP_LOGIC;
3497 }
3498 break;
3499 case 0x7: /* xorn */
3500 if (IS_IMM) {
3501 simm = GET_FIELDs(insn, 19, 31);
3502 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3503 } else {
3504 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3505 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3506 }
3507 if (xop & 0x10) {
3508 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3509 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3510 dc->cc_op = CC_OP_LOGIC;
3511 }
3512 break;
3513 case 0x8: /* addx, V9 addc */
3514 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3515 (xop & 0x10));
3516 break;
3517 #ifdef TARGET_SPARC64
3518 case 0x9: /* V9 mulx */
3519 if (IS_IMM) {
3520 simm = GET_FIELDs(insn, 19, 31);
3521 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3522 } else {
3523 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3524 }
3525 break;
3526 #endif
3527 case 0xa: /* umul */
3528 CHECK_IU_FEATURE(dc, MUL);
3529 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3530 if (xop & 0x10) {
3531 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3532 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3533 dc->cc_op = CC_OP_LOGIC;
3534 }
3535 break;
3536 case 0xb: /* smul */
3537 CHECK_IU_FEATURE(dc, MUL);
3538 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3539 if (xop & 0x10) {
3540 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3541 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3542 dc->cc_op = CC_OP_LOGIC;
3543 }
3544 break;
3545 case 0xc: /* subx, V9 subc */
3546 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3547 (xop & 0x10));
3548 break;
3549 #ifdef TARGET_SPARC64
3550 case 0xd: /* V9 udivx */
3551 {
3552 TCGv r_temp1, r_temp2;
3553 r_temp1 = tcg_temp_local_new();
3554 r_temp2 = tcg_temp_local_new();
3555 tcg_gen_mov_tl(r_temp1, cpu_src1);
3556 tcg_gen_mov_tl(r_temp2, cpu_src2);
3557 gen_trap_ifdivzero_tl(r_temp2);
3558 tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3559 tcg_temp_free(r_temp1);
3560 tcg_temp_free(r_temp2);
3561 }
3562 break;
3563 #endif
3564 case 0xe: /* udiv */
3565 CHECK_IU_FEATURE(dc, DIV);
3566 if (xop & 0x10) {
3567 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3568 cpu_src2);
3569 dc->cc_op = CC_OP_DIV;
3570 } else {
3571 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3572 cpu_src2);
3573 }
3574 break;
3575 case 0xf: /* sdiv */
3576 CHECK_IU_FEATURE(dc, DIV);
3577 if (xop & 0x10) {
3578 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3579 cpu_src2);
3580 dc->cc_op = CC_OP_DIV;
3581 } else {
3582 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3583 cpu_src2);
3584 }
3585 break;
3586 default:
3587 goto illegal_insn;
3588 }
3589 gen_movl_TN_reg(rd, cpu_dst);
3590 } else {
3591 cpu_src1 = get_src1(insn, cpu_src1);
3592 cpu_src2 = get_src2(insn, cpu_src2);
3593 switch (xop) {
3594 case 0x20: /* taddcc */
3595 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3596 gen_movl_TN_reg(rd, cpu_dst);
3597 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3598 dc->cc_op = CC_OP_TADD;
3599 break;
3600 case 0x21: /* tsubcc */
3601 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3602 gen_movl_TN_reg(rd, cpu_dst);
3603 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3604 dc->cc_op = CC_OP_TSUB;
3605 break;
3606 case 0x22: /* taddcctv */
3607 save_state(dc, cpu_cond);
3608 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3609 gen_movl_TN_reg(rd, cpu_dst);
3610 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3611 dc->cc_op = CC_OP_TADDTV;
3612 break;
3613 case 0x23: /* tsubcctv */
3614 save_state(dc, cpu_cond);
3615 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3616 gen_movl_TN_reg(rd, cpu_dst);
3617 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3618 dc->cc_op = CC_OP_TSUBTV;
3619 break;
3620 case 0x24: /* mulscc */
3621 gen_helper_compute_psr(cpu_env);
3622 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3623 gen_movl_TN_reg(rd, cpu_dst);
3624 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3625 dc->cc_op = CC_OP_ADD;
3626 break;
3627 #ifndef TARGET_SPARC64
3628 case 0x25: /* sll */
3629 if (IS_IMM) { /* immediate */
3630 simm = GET_FIELDs(insn, 20, 31);
3631 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3632 } else { /* register */
3633 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3634 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3635 }
3636 gen_movl_TN_reg(rd, cpu_dst);
3637 break;
3638 case 0x26: /* srl */
3639 if (IS_IMM) { /* immediate */
3640 simm = GET_FIELDs(insn, 20, 31);
3641 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3642 } else { /* register */
3643 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3644 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3645 }
3646 gen_movl_TN_reg(rd, cpu_dst);
3647 break;
3648 case 0x27: /* sra */
3649 if (IS_IMM) { /* immediate */
3650 simm = GET_FIELDs(insn, 20, 31);
3651 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3652 } else { /* register */
3653 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3654 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3655 }
3656 gen_movl_TN_reg(rd, cpu_dst);
3657 break;
3658 #endif
3659 case 0x30:
3660 {
3661 switch(rd) {
3662 case 0: /* wry */
3663 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3664 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3665 break;
3666 #ifndef TARGET_SPARC64
3667 case 0x01 ... 0x0f: /* undefined in the
3668 SPARCv8 manual, nop
3669 on the microSPARC
3670 II */
3671 case 0x10 ... 0x1f: /* implementation-dependent
3672 in the SPARCv8
3673 manual, nop on the
3674 microSPARC II */
3675 break;
3676 #else
3677 case 0x2: /* V9 wrccr */
3678 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3679 gen_helper_wrccr(cpu_env, cpu_dst);
3680 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3681 dc->cc_op = CC_OP_FLAGS;
3682 break;
3683 case 0x3: /* V9 wrasi */
3684 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3685 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3686 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3687 break;
3688 case 0x6: /* V9 wrfprs */
3689 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3690 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3691 save_state(dc, cpu_cond);
3692 gen_op_next_insn();
3693 tcg_gen_exit_tb(0);
3694 dc->is_br = 1;
3695 break;
3696 case 0xf: /* V9 sir, nop if user */
3697 #if !defined(CONFIG_USER_ONLY)
3698 if (supervisor(dc)) {
3699 ; // XXX
3700 }
3701 #endif
3702 break;
3703 case 0x13: /* Graphics Status */
3704 if (gen_trap_ifnofpu(dc, cpu_cond))
3705 goto jmp_insn;
3706 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3707 break;
3708 case 0x14: /* Softint set */
3709 if (!supervisor(dc))
3710 goto illegal_insn;
3711 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3712 gen_helper_set_softint(cpu_env, cpu_tmp64);
3713 break;
3714 case 0x15: /* Softint clear */
3715 if (!supervisor(dc))
3716 goto illegal_insn;
3717 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3718 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3719 break;
3720 case 0x16: /* Softint write */
3721 if (!supervisor(dc))
3722 goto illegal_insn;
3723 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3724 gen_helper_write_softint(cpu_env, cpu_tmp64);
3725 break;
3726 case 0x17: /* Tick compare */
3727 #if !defined(CONFIG_USER_ONLY)
3728 if (!supervisor(dc))
3729 goto illegal_insn;
3730 #endif
3731 {
3732 TCGv_ptr r_tickptr;
3733
3734 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3735 cpu_src2);
3736 r_tickptr = tcg_temp_new_ptr();
3737 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3738 offsetof(CPUSPARCState, tick));
3739 gen_helper_tick_set_limit(r_tickptr,
3740 cpu_tick_cmpr);
3741 tcg_temp_free_ptr(r_tickptr);
3742 }
3743 break;
3744 case 0x18: /* System tick */
3745 #if !defined(CONFIG_USER_ONLY)
3746 if (!supervisor(dc))
3747 goto illegal_insn;
3748 #endif
3749 {
3750 TCGv_ptr r_tickptr;
3751
3752 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3753 cpu_src2);
3754 r_tickptr = tcg_temp_new_ptr();
3755 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3756 offsetof(CPUSPARCState, stick));
3757 gen_helper_tick_set_count(r_tickptr,
3758 cpu_dst);
3759 tcg_temp_free_ptr(r_tickptr);
3760 }
3761 break;
3762 case 0x19: /* System tick compare */
3763 #if !defined(CONFIG_USER_ONLY)
3764 if (!supervisor(dc))
3765 goto illegal_insn;
3766 #endif
3767 {
3768 TCGv_ptr r_tickptr;
3769
3770 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3771 cpu_src2);
3772 r_tickptr = tcg_temp_new_ptr();
3773 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3774 offsetof(CPUSPARCState, stick));
3775 gen_helper_tick_set_limit(r_tickptr,
3776 cpu_stick_cmpr);
3777 tcg_temp_free_ptr(r_tickptr);
3778 }
3779 break;
3780
3781 case 0x10: /* Performance Control */
3782 case 0x11: /* Performance Instrumentation
3783 Counter */
3784 case 0x12: /* Dispatch Control */
3785 #endif
3786 default:
3787 goto illegal_insn;
3788 }
3789 }
3790 break;
3791 #if !defined(CONFIG_USER_ONLY)
3792 case 0x31: /* wrpsr, V9 saved, restored */
3793 {
3794 if (!supervisor(dc))
3795 goto priv_insn;
3796 #ifdef TARGET_SPARC64
3797 switch (rd) {
3798 case 0:
3799 gen_helper_saved(cpu_env);
3800 break;
3801 case 1:
3802 gen_helper_restored(cpu_env);
3803 break;
3804 case 2: /* UA2005 allclean */
3805 case 3: /* UA2005 otherw */
3806 case 4: /* UA2005 normalw */
3807 case 5: /* UA2005 invalw */
3808 // XXX
3809 default:
3810 goto illegal_insn;
3811 }
3812 #else
3813 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3814 gen_helper_wrpsr(cpu_env, cpu_dst);
3815 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3816 dc->cc_op = CC_OP_FLAGS;
3817 save_state(dc, cpu_cond);
3818 gen_op_next_insn();
3819 tcg_gen_exit_tb(0);
3820 dc->is_br = 1;
3821 #endif
3822 }
3823 break;
3824 case 0x32: /* wrwim, V9 wrpr */
3825 {
3826 if (!supervisor(dc))
3827 goto priv_insn;
3828 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3829 #ifdef TARGET_SPARC64
3830 switch (rd) {
3831 case 0: // tpc
3832 {
3833 TCGv_ptr r_tsptr;
3834
3835 r_tsptr = tcg_temp_new_ptr();
3836 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3837 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3838 offsetof(trap_state, tpc));
3839 tcg_temp_free_ptr(r_tsptr);
3840 }
3841 break;
3842 case 1: // tnpc
3843 {
3844 TCGv_ptr r_tsptr;
3845
3846 r_tsptr = tcg_temp_new_ptr();
3847 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3848 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3849 offsetof(trap_state, tnpc));
3850 tcg_temp_free_ptr(r_tsptr);
3851 }
3852 break;
3853 case 2: // tstate
3854 {
3855 TCGv_ptr r_tsptr;
3856
3857 r_tsptr = tcg_temp_new_ptr();
3858 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3859 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3860 offsetof(trap_state,
3861 tstate));
3862 tcg_temp_free_ptr(r_tsptr);
3863 }
3864 break;
3865 case 3: // tt
3866 {
3867 TCGv_ptr r_tsptr;
3868
3869 r_tsptr = tcg_temp_new_ptr();
3870 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3871 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3872 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3873 offsetof(trap_state, tt));
3874 tcg_temp_free_ptr(r_tsptr);
3875 }
3876 break;
3877 case 4: // tick
3878 {
3879 TCGv_ptr r_tickptr;
3880
3881 r_tickptr = tcg_temp_new_ptr();
3882 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3883 offsetof(CPUSPARCState, tick));
3884 gen_helper_tick_set_count(r_tickptr,
3885 cpu_tmp0);
3886 tcg_temp_free_ptr(r_tickptr);
3887 }
3888 break;
3889 case 5: // tba
3890 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3891 break;
3892 case 6: // pstate
3893 {
3894 TCGv r_tmp = tcg_temp_local_new();
3895
3896 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3897 save_state(dc, cpu_cond);
3898 gen_helper_wrpstate(cpu_env, r_tmp);
3899 tcg_temp_free(r_tmp);
3900 dc->npc = DYNAMIC_PC;
3901 }
3902 break;
3903 case 7: // tl
3904 {
3905 TCGv r_tmp = tcg_temp_local_new();
3906
3907 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3908 save_state(dc, cpu_cond);
3909 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3910 tcg_temp_free(r_tmp);
3911 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3912 offsetof(CPUSPARCState, tl));
3913 dc->npc = DYNAMIC_PC;
3914 }
3915 break;
3916 case 8: // pil
3917 gen_helper_wrpil(cpu_env, cpu_tmp0);
3918 break;
3919 case 9: // cwp
3920 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3921 break;
3922 case 10: // cansave
3923 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3924 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3925 offsetof(CPUSPARCState,
3926 cansave));
3927 break;
3928 case 11: // canrestore
3929 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3930 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3931 offsetof(CPUSPARCState,
3932 canrestore));
3933 break;
3934 case 12: // cleanwin
3935 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3936 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3937 offsetof(CPUSPARCState,
3938 cleanwin));
3939 break;
3940 case 13: // otherwin
3941 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3942 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3943 offsetof(CPUSPARCState,
3944 otherwin));
3945 break;
3946 case 14: // wstate
3947 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3948 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3949 offsetof(CPUSPARCState,
3950 wstate));
3951 break;
3952 case 16: // UA2005 gl
3953 CHECK_IU_FEATURE(dc, GL);
3954 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3955 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3956 offsetof(CPUSPARCState, gl));
3957 break;
3958 case 26: // UA2005 strand status
3959 CHECK_IU_FEATURE(dc, HYPV);
3960 if (!hypervisor(dc))
3961 goto priv_insn;
3962 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3963 break;
3964 default:
3965 goto illegal_insn;
3966 }
3967 #else
3968 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3969 if (dc->def->nwindows != 32)
3970 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3971 (1 << dc->def->nwindows) - 1);
3972 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3973 #endif
3974 }
3975 break;
3976 case 0x33: /* wrtbr, UA2005 wrhpr */
3977 {
3978 #ifndef TARGET_SPARC64
3979 if (!supervisor(dc))
3980 goto priv_insn;
3981 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3982 #else
3983 CHECK_IU_FEATURE(dc, HYPV);
3984 if (!hypervisor(dc))
3985 goto priv_insn;
3986 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3987 switch (rd) {
3988 case 0: // hpstate
3989 // XXX gen_op_wrhpstate();
3990 save_state(dc, cpu_cond);
3991 gen_op_next_insn();
3992 tcg_gen_exit_tb(0);
3993 dc->is_br = 1;
3994 break;
3995 case 1: // htstate
3996 // XXX gen_op_wrhtstate();
3997 break;
3998 case 3: // hintp
3999 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4000 break;
4001 case 5: // htba
4002 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4003 break;
4004 case 31: // hstick_cmpr
4005 {
4006 TCGv_ptr r_tickptr;
4007
4008 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4009 r_tickptr = tcg_temp_new_ptr();
4010 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4011 offsetof(CPUSPARCState, hstick));
4012 gen_helper_tick_set_limit(r_tickptr,
4013 cpu_hstick_cmpr);
4014 tcg_temp_free_ptr(r_tickptr);
4015 }
4016 break;
4017 case 6: // hver readonly
4018 default:
4019 goto illegal_insn;
4020 }
4021 #endif
4022 }
4023 break;
4024 #endif
4025 #ifdef TARGET_SPARC64
4026 case 0x2c: /* V9 movcc */
4027 {
4028 int cc = GET_FIELD_SP(insn, 11, 12);
4029 int cond = GET_FIELD_SP(insn, 14, 17);
4030 TCGv r_cond;
4031 int l1;
4032
4033 r_cond = tcg_temp_new();
4034 if (insn & (1 << 18)) {
4035 if (cc == 0)
4036 gen_cond(r_cond, 0, cond, dc);
4037 else if (cc == 2)
4038 gen_cond(r_cond, 1, cond, dc);
4039 else
4040 goto illegal_insn;
4041 } else {
4042 gen_fcond(r_cond, cc, cond);
4043 }
4044
4045 l1 = gen_new_label();
4046
4047 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
4048 if (IS_IMM) { /* immediate */
4049 TCGv r_const;
4050
4051 simm = GET_FIELD_SPs(insn, 0, 10);
4052 r_const = tcg_const_tl(simm);
4053 gen_movl_TN_reg(rd, r_const);
4054 tcg_temp_free(r_const);
4055 } else {
4056 rs2 = GET_FIELD_SP(insn, 0, 4);
4057 gen_movl_reg_TN(rs2, cpu_tmp0);
4058 gen_movl_TN_reg(rd, cpu_tmp0);
4059 }
4060 gen_set_label(l1);
4061 tcg_temp_free(r_cond);
4062 break;
4063 }
4064 case 0x2d: /* V9 sdivx */
4065 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
4066 gen_movl_TN_reg(rd, cpu_dst);
4067 break;
4068 case 0x2e: /* V9 popc */
4069 {
4070 cpu_src2 = get_src2(insn, cpu_src2);
4071 gen_helper_popc(cpu_dst, cpu_src2);
4072 gen_movl_TN_reg(rd, cpu_dst);
4073 }
4074 case 0x2f: /* V9 movr */
4075 {
4076 int cond = GET_FIELD_SP(insn, 10, 12);
4077 int l1;
4078
4079 cpu_src1 = get_src1(insn, cpu_src1);
4080
4081 l1 = gen_new_label();
4082
4083 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
4084 cpu_src1, 0, l1);
4085 if (IS_IMM) { /* immediate */
4086 TCGv r_const;
4087
4088 simm = GET_FIELD_SPs(insn, 0, 9);
4089 r_const = tcg_const_tl(simm);
4090 gen_movl_TN_reg(rd, r_const);
4091 tcg_temp_free(r_const);
4092 } else {
4093 rs2 = GET_FIELD_SP(insn, 0, 4);
4094 gen_movl_reg_TN(rs2, cpu_tmp0);
4095 gen_movl_TN_reg(rd, cpu_tmp0);
4096 }
4097 gen_set_label(l1);
4098 break;
4099 }
4100 #endif
4101 default:
4102 goto illegal_insn;
4103 }
4104 }
4105 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4106 #ifdef TARGET_SPARC64
4107 int opf = GET_FIELD_SP(insn, 5, 13);
4108 rs1 = GET_FIELD(insn, 13, 17);
4109 rs2 = GET_FIELD(insn, 27, 31);
4110 if (gen_trap_ifnofpu(dc, cpu_cond))
4111 goto jmp_insn;
4112
4113 switch (opf) {
4114 case 0x000: /* VIS I edge8cc */
4115 CHECK_FPU_FEATURE(dc, VIS1);
4116 gen_movl_reg_TN(rs1, cpu_src1);
4117 gen_movl_reg_TN(rs2, cpu_src2);
4118 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4119 gen_movl_TN_reg(rd, cpu_dst);
4120 break;
4121 case 0x001: /* VIS II edge8n */
4122 CHECK_FPU_FEATURE(dc, VIS2);
4123 gen_movl_reg_TN(rs1, cpu_src1);
4124 gen_movl_reg_TN(rs2, cpu_src2);
4125 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4126 gen_movl_TN_reg(rd, cpu_dst);
4127 break;
4128 case 0x002: /* VIS I edge8lcc */
4129 CHECK_FPU_FEATURE(dc, VIS1);
4130 gen_movl_reg_TN(rs1, cpu_src1);
4131 gen_movl_reg_TN(rs2, cpu_src2);
4132 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4133 gen_movl_TN_reg(rd, cpu_dst);
4134 break;
4135 case 0x003: /* VIS II edge8ln */
4136 CHECK_FPU_FEATURE(dc, VIS2);
4137 gen_movl_reg_TN(rs1, cpu_src1);
4138 gen_movl_reg_TN(rs2, cpu_src2);
4139 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4140 gen_movl_TN_reg(rd, cpu_dst);
4141 break;
4142 case 0x004: /* VIS I edge16cc */
4143 CHECK_FPU_FEATURE(dc, VIS1);
4144 gen_movl_reg_TN(rs1, cpu_src1);
4145 gen_movl_reg_TN(rs2, cpu_src2);
4146 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4147 gen_movl_TN_reg(rd, cpu_dst);
4148 break;
4149 case 0x005: /* VIS II edge16n */
4150 CHECK_FPU_FEATURE(dc, VIS2);
4151 gen_movl_reg_TN(rs1, cpu_src1);
4152 gen_movl_reg_TN(rs2, cpu_src2);
4153 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4154 gen_movl_TN_reg(rd, cpu_dst);
4155 break;
4156 case 0x006: /* VIS I edge16lcc */
4157 CHECK_FPU_FEATURE(dc, VIS1);
4158 gen_movl_reg_TN(rs1, cpu_src1);
4159 gen_movl_reg_TN(rs2, cpu_src2);
4160 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4161 gen_movl_TN_reg(rd, cpu_dst);
4162 break;
4163 case 0x007: /* VIS II edge16ln */
4164 CHECK_FPU_FEATURE(dc, VIS2);
4165 gen_movl_reg_TN(rs1, cpu_src1);
4166 gen_movl_reg_TN(rs2, cpu_src2);
4167 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4168 gen_movl_TN_reg(rd, cpu_dst);
4169 break;
4170 case 0x008: /* VIS I edge32cc */
4171 CHECK_FPU_FEATURE(dc, VIS1);
4172 gen_movl_reg_TN(rs1, cpu_src1);
4173 gen_movl_reg_TN(rs2, cpu_src2);
4174 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4175 gen_movl_TN_reg(rd, cpu_dst);
4176 break;
4177 case 0x009: /* VIS II edge32n */
4178 CHECK_FPU_FEATURE(dc, VIS2);
4179 gen_movl_reg_TN(rs1, cpu_src1);
4180 gen_movl_reg_TN(rs2, cpu_src2);
4181 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4182 gen_movl_TN_reg(rd, cpu_dst);
4183 break;
4184 case 0x00a: /* VIS I edge32lcc */
4185 CHECK_FPU_FEATURE(dc, VIS1);
4186 gen_movl_reg_TN(rs1, cpu_src1);
4187 gen_movl_reg_TN(rs2, cpu_src2);
4188 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4189 gen_movl_TN_reg(rd, cpu_dst);
4190 break;
4191 case 0x00b: /* VIS II edge32ln */
4192 CHECK_FPU_FEATURE(dc, VIS2);
4193 gen_movl_reg_TN(rs1, cpu_src1);
4194 gen_movl_reg_TN(rs2, cpu_src2);
4195 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4196 gen_movl_TN_reg(rd, cpu_dst);
4197 break;
4198 case 0x010: /* VIS I array8 */
4199 CHECK_FPU_FEATURE(dc, VIS1);
4200 cpu_src1 = get_src1(insn, cpu_src1);
4201 gen_movl_reg_TN(rs2, cpu_src2);
4202 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4203 gen_movl_TN_reg(rd, cpu_dst);
4204 break;
4205 case 0x012: /* VIS I array16 */
4206 CHECK_FPU_FEATURE(dc, VIS1);
4207 cpu_src1 = get_src1(insn, cpu_src1);
4208 gen_movl_reg_TN(rs2, cpu_src2);
4209 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4210 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4211 gen_movl_TN_reg(rd, cpu_dst);
4212 break;
4213 case 0x014: /* VIS I array32 */
4214 CHECK_FPU_FEATURE(dc, VIS1);
4215 cpu_src1 = get_src1(insn, cpu_src1);
4216 gen_movl_reg_TN(rs2, cpu_src2);
4217 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4218 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4219 gen_movl_TN_reg(rd, cpu_dst);
4220 break;
4221 case 0x018: /* VIS I alignaddr */
4222 CHECK_FPU_FEATURE(dc, VIS1);
4223 cpu_src1 = get_src1(insn, cpu_src1);
4224 gen_movl_reg_TN(rs2, cpu_src2);
4225 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4226 gen_movl_TN_reg(rd, cpu_dst);
4227 break;
4228 case 0x01a: /* VIS I alignaddrl */
4229 CHECK_FPU_FEATURE(dc, VIS1);
4230 cpu_src1 = get_src1(insn, cpu_src1);
4231 gen_movl_reg_TN(rs2, cpu_src2);
4232 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4233 gen_movl_TN_reg(rd, cpu_dst);
4234 break;
4235 case 0x019: /* VIS II bmask */
4236 CHECK_FPU_FEATURE(dc, VIS2);
4237 cpu_src1 = get_src1(insn, cpu_src1);
4238 cpu_src2 = get_src1(insn, cpu_src2);
4239 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4240 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4241 gen_movl_TN_reg(rd, cpu_dst);
4242 break;
4243 case 0x020: /* VIS I fcmple16 */
4244 CHECK_FPU_FEATURE(dc, VIS1);
4245 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4246 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4247 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4248 gen_movl_TN_reg(rd, cpu_dst);
4249 break;
4250 case 0x022: /* VIS I fcmpne16 */
4251 CHECK_FPU_FEATURE(dc, VIS1);
4252 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4253 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4254 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4255 gen_movl_TN_reg(rd, cpu_dst);
4256 break;
4257 case 0x024: /* VIS I fcmple32 */
4258 CHECK_FPU_FEATURE(dc, VIS1);
4259 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4260 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4261 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4262 gen_movl_TN_reg(rd, cpu_dst);
4263 break;
4264 case 0x026: /* VIS I fcmpne32 */
4265 CHECK_FPU_FEATURE(dc, VIS1);
4266 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4267 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4268 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4269 gen_movl_TN_reg(rd, cpu_dst);
4270 break;
4271 case 0x028: /* VIS I fcmpgt16 */
4272 CHECK_FPU_FEATURE(dc, VIS1);
4273 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4274 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4275 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4276 gen_movl_TN_reg(rd, cpu_dst);
4277 break;
4278 case 0x02a: /* VIS I fcmpeq16 */
4279 CHECK_FPU_FEATURE(dc, VIS1);
4280 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4281 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4282 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4283 gen_movl_TN_reg(rd, cpu_dst);
4284 break;
4285 case 0x02c: /* VIS I fcmpgt32 */
4286 CHECK_FPU_FEATURE(dc, VIS1);
4287 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4288 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4289 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4290 gen_movl_TN_reg(rd, cpu_dst);
4291 break;
4292 case 0x02e: /* VIS I fcmpeq32 */
4293 CHECK_FPU_FEATURE(dc, VIS1);
4294 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4295 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4296 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4297 gen_movl_TN_reg(rd, cpu_dst);
4298 break;
4299 case 0x031: /* VIS I fmul8x16 */
4300 CHECK_FPU_FEATURE(dc, VIS1);
4301 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4302 break;
4303 case 0x033: /* VIS I fmul8x16au */
4304 CHECK_FPU_FEATURE(dc, VIS1);
4305 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4306 break;
4307 case 0x035: /* VIS I fmul8x16al */
4308 CHECK_FPU_FEATURE(dc, VIS1);
4309 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4310 break;
4311 case 0x036: /* VIS I fmul8sux16 */
4312 CHECK_FPU_FEATURE(dc, VIS1);
4313 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4314 break;
4315 case 0x037: /* VIS I fmul8ulx16 */
4316 CHECK_FPU_FEATURE(dc, VIS1);
4317 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4318 break;
4319 case 0x038: /* VIS I fmuld8sux16 */
4320 CHECK_FPU_FEATURE(dc, VIS1);
4321 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4322 break;
4323 case 0x039: /* VIS I fmuld8ulx16 */
4324 CHECK_FPU_FEATURE(dc, VIS1);
4325 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4326 break;
4327 case 0x03a: /* VIS I fpack32 */
4328 CHECK_FPU_FEATURE(dc, VIS1);
4329 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4330 break;
4331 case 0x03b: /* VIS I fpack16 */
4332 CHECK_FPU_FEATURE(dc, VIS1);
4333 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4334 cpu_dst_32 = gen_dest_fpr_F();
4335 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4336 gen_store_fpr_F(dc, rd, cpu_dst_32);
4337 break;
4338 case 0x03d: /* VIS I fpackfix */
4339 CHECK_FPU_FEATURE(dc, VIS1);
4340 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4341 cpu_dst_32 = gen_dest_fpr_F();
4342 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4343 gen_store_fpr_F(dc, rd, cpu_dst_32);
4344 break;
4345 case 0x03e: /* VIS I pdist */
4346 CHECK_FPU_FEATURE(dc, VIS1);
4347 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4348 break;
4349 case 0x048: /* VIS I faligndata */
4350 CHECK_FPU_FEATURE(dc, VIS1);
4351 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4352 break;
4353 case 0x04b: /* VIS I fpmerge */
4354 CHECK_FPU_FEATURE(dc, VIS1);
4355 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4356 break;
4357 case 0x04c: /* VIS II bshuffle */
4358 CHECK_FPU_FEATURE(dc, VIS2);
4359 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4360 break;
4361 case 0x04d: /* VIS I fexpand */
4362 CHECK_FPU_FEATURE(dc, VIS1);
4363 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4364 break;
4365 case 0x050: /* VIS I fpadd16 */
4366 CHECK_FPU_FEATURE(dc, VIS1);
4367 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4368 break;
4369 case 0x051: /* VIS I fpadd16s */
4370 CHECK_FPU_FEATURE(dc, VIS1);
4371 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4372 break;
4373 case 0x052: /* VIS I fpadd32 */
4374 CHECK_FPU_FEATURE(dc, VIS1);
4375 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4376 break;
4377 case 0x053: /* VIS I fpadd32s */
4378 CHECK_FPU_FEATURE(dc, VIS1);
4379 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4380 break;
4381 case 0x054: /* VIS I fpsub16 */
4382 CHECK_FPU_FEATURE(dc, VIS1);
4383 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4384 break;
4385 case 0x055: /* VIS I fpsub16s */
4386 CHECK_FPU_FEATURE(dc, VIS1);
4387 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4388 break;
4389 case 0x056: /* VIS I fpsub32 */
4390 CHECK_FPU_FEATURE(dc, VIS1);
4391 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4392 break;
4393 case 0x057: /* VIS I fpsub32s */
4394 CHECK_FPU_FEATURE(dc, VIS1);
4395 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4396 break;
4397 case 0x060: /* VIS I fzero */
4398 CHECK_FPU_FEATURE(dc, VIS1);
4399 cpu_dst_64 = gen_dest_fpr_D();
4400 tcg_gen_movi_i64(cpu_dst_64, 0);
4401 gen_store_fpr_D(dc, rd, cpu_dst_64);
4402 break;
4403 case 0x061: /* VIS I fzeros */
4404 CHECK_FPU_FEATURE(dc, VIS1);
4405 cpu_dst_32 = gen_dest_fpr_F();
4406 tcg_gen_movi_i32(cpu_dst_32, 0);
4407 gen_store_fpr_F(dc, rd, cpu_dst_32);
4408 break;
4409 case 0x062: /* VIS I fnor */
4410 CHECK_FPU_FEATURE(dc, VIS1);
4411 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4412 break;
4413 case 0x063: /* VIS I fnors */
4414 CHECK_FPU_FEATURE(dc, VIS1);
4415 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4416 break;
4417 case 0x064: /* VIS I fandnot2 */
4418 CHECK_FPU_FEATURE(dc, VIS1);
4419 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4420 break;
4421 case 0x065: /* VIS I fandnot2s */
4422 CHECK_FPU_FEATURE(dc, VIS1);
4423 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4424 break;
4425 case 0x066: /* VIS I fnot2 */
4426 CHECK_FPU_FEATURE(dc, VIS1);
4427 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4428 break;
4429 case 0x067: /* VIS I fnot2s */
4430 CHECK_FPU_FEATURE(dc, VIS1);
4431 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4432 break;
4433 case 0x068: /* VIS I fandnot1 */
4434 CHECK_FPU_FEATURE(dc, VIS1);
4435 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4436 break;
4437 case 0x069: /* VIS I fandnot1s */
4438 CHECK_FPU_FEATURE(dc, VIS1);
4439 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4440 break;
4441 case 0x06a: /* VIS I fnot1 */
4442 CHECK_FPU_FEATURE(dc, VIS1);
4443 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4444 break;
4445 case 0x06b: /* VIS I fnot1s */
4446 CHECK_FPU_FEATURE(dc, VIS1);
4447 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4448 break;
4449 case 0x06c: /* VIS I fxor */
4450 CHECK_FPU_FEATURE(dc, VIS1);
4451 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4452 break;
4453 case 0x06d: /* VIS I fxors */
4454 CHECK_FPU_FEATURE(dc, VIS1);
4455 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4456 break;
4457 case 0x06e: /* VIS I fnand */
4458 CHECK_FPU_FEATURE(dc, VIS1);
4459 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4460 break;
4461 case 0x06f: /* VIS I fnands */
4462 CHECK_FPU_FEATURE(dc, VIS1);
4463 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4464 break;
4465 case 0x070: /* VIS I fand */
4466 CHECK_FPU_FEATURE(dc, VIS1);
4467 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4468 break;
4469 case 0x071: /* VIS I fands */
4470 CHECK_FPU_FEATURE(dc, VIS1);
4471 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4472 break;
4473 case 0x072: /* VIS I fxnor */
4474 CHECK_FPU_FEATURE(dc, VIS1);
4475 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4476 break;
4477 case 0x073: /* VIS I fxnors */
4478 CHECK_FPU_FEATURE(dc, VIS1);
4479 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4480 break;
4481 case 0x074: /* VIS I fsrc1 */
4482 CHECK_FPU_FEATURE(dc, VIS1);
4483 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4484 gen_store_fpr_D(dc, rd, cpu_src1_64);
4485 break;
4486 case 0x075: /* VIS I fsrc1s */
4487 CHECK_FPU_FEATURE(dc, VIS1);
4488 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4489 gen_store_fpr_F(dc, rd, cpu_src1_32);
4490 break;
4491 case 0x076: /* VIS I fornot2 */
4492 CHECK_FPU_FEATURE(dc, VIS1);
4493 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4494 break;
4495 case 0x077: /* VIS I fornot2s */
4496 CHECK_FPU_FEATURE(dc, VIS1);
4497 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4498 break;
4499 case 0x078: /* VIS I fsrc2 */
4500 CHECK_FPU_FEATURE(dc, VIS1);
4501 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4502 gen_store_fpr_D(dc, rd, cpu_src1_64);
4503 break;
4504 case 0x079: /* VIS I fsrc2s */
4505 CHECK_FPU_FEATURE(dc, VIS1);
4506 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4507 gen_store_fpr_F(dc, rd, cpu_src1_32);
4508 break;
4509 case 0x07a: /* VIS I fornot1 */
4510 CHECK_FPU_FEATURE(dc, VIS1);
4511 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4512 break;
4513 case 0x07b: /* VIS I fornot1s */
4514 CHECK_FPU_FEATURE(dc, VIS1);
4515 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4516 break;
4517 case 0x07c: /* VIS I for */
4518 CHECK_FPU_FEATURE(dc, VIS1);
4519 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4520 break;
4521 case 0x07d: /* VIS I fors */
4522 CHECK_FPU_FEATURE(dc, VIS1);
4523 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4524 break;
4525 case 0x07e: /* VIS I fone */
4526 CHECK_FPU_FEATURE(dc, VIS1);
4527 cpu_dst_64 = gen_dest_fpr_D();
4528 tcg_gen_movi_i64(cpu_dst_64, -1);
4529 gen_store_fpr_D(dc, rd, cpu_dst_64);
4530 break;
4531 case 0x07f: /* VIS I fones */
4532 CHECK_FPU_FEATURE(dc, VIS1);
4533 cpu_dst_32 = gen_dest_fpr_F();
4534 tcg_gen_movi_i32(cpu_dst_32, -1);
4535 gen_store_fpr_F(dc, rd, cpu_dst_32);
4536 break;
4537 case 0x080: /* VIS I shutdown */
4538 case 0x081: /* VIS II siam */
4539 // XXX
4540 goto illegal_insn;
4541 default:
4542 goto illegal_insn;
4543 }
4544 #else
4545 goto ncp_insn;
4546 #endif
4547 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4548 #ifdef TARGET_SPARC64
4549 goto illegal_insn;
4550 #else
4551 goto ncp_insn;
4552 #endif
4553 #ifdef TARGET_SPARC64
4554 } else if (xop == 0x39) { /* V9 return */
4555 TCGv_i32 r_const;
4556
4557 save_state(dc, cpu_cond);
4558 cpu_src1 = get_src1(insn, cpu_src1);
4559 if (IS_IMM) { /* immediate */
4560 simm = GET_FIELDs(insn, 19, 31);
4561 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4562 } else { /* register */
4563 rs2 = GET_FIELD(insn, 27, 31);
4564 if (rs2) {
4565 gen_movl_reg_TN(rs2, cpu_src2);
4566 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4567 } else
4568 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4569 }
4570 gen_helper_restore(cpu_env);
4571 gen_mov_pc_npc(dc, cpu_cond);
4572 r_const = tcg_const_i32(3);
4573 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4574 tcg_temp_free_i32(r_const);
4575 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4576 dc->npc = DYNAMIC_PC;
4577 goto jmp_insn;
4578 #endif
4579 } else {
4580 cpu_src1 = get_src1(insn, cpu_src1);
4581 if (IS_IMM) { /* immediate */
4582 simm = GET_FIELDs(insn, 19, 31);
4583 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4584 } else { /* register */
4585 rs2 = GET_FIELD(insn, 27, 31);
4586 if (rs2) {
4587 gen_movl_reg_TN(rs2, cpu_src2);
4588 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4589 } else
4590 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4591 }
4592 switch (xop) {
4593 case 0x38: /* jmpl */
4594 {
4595 TCGv r_pc;
4596 TCGv_i32 r_const;
4597
4598 r_pc = tcg_const_tl(dc->pc);
4599 gen_movl_TN_reg(rd, r_pc);
4600 tcg_temp_free(r_pc);
4601 gen_mov_pc_npc(dc, cpu_cond);
4602 r_const = tcg_const_i32(3);
4603 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4604 tcg_temp_free_i32(r_const);
4605 gen_address_mask(dc, cpu_dst);
4606 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4607 dc->npc = DYNAMIC_PC;
4608 }
4609 goto jmp_insn;
4610 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4611 case 0x39: /* rett, V9 return */
4612 {
4613 TCGv_i32 r_const;
4614
4615 if (!supervisor(dc))
4616 goto priv_insn;
4617 gen_mov_pc_npc(dc, cpu_cond);
4618 r_const = tcg_const_i32(3);
4619 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4620 tcg_temp_free_i32(r_const);
4621 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4622 dc->npc = DYNAMIC_PC;
4623 gen_helper_rett(cpu_env);
4624 }
4625 goto jmp_insn;
4626 #endif
4627 case 0x3b: /* flush */
4628 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4629 goto unimp_flush;
4630 /* nop */
4631 break;
4632 case 0x3c: /* save */
4633 save_state(dc, cpu_cond);
4634 gen_helper_save(cpu_env);
4635 gen_movl_TN_reg(rd, cpu_dst);
4636 break;
4637 case 0x3d: /* restore */
4638 save_state(dc, cpu_cond);
4639 gen_helper_restore(cpu_env);
4640 gen_movl_TN_reg(rd, cpu_dst);
4641 break;
4642 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4643 case 0x3e: /* V9 done/retry */
4644 {
4645 switch (rd) {
4646 case 0:
4647 if (!supervisor(dc))
4648 goto priv_insn;
4649 dc->npc = DYNAMIC_PC;
4650 dc->pc = DYNAMIC_PC;
4651 gen_helper_done(cpu_env);
4652 goto jmp_insn;
4653 case 1:
4654 if (!supervisor(dc))
4655 goto priv_insn;
4656 dc->npc = DYNAMIC_PC;
4657 dc->pc = DYNAMIC_PC;
4658 gen_helper_retry(cpu_env);
4659 goto jmp_insn;
4660 default:
4661 goto illegal_insn;
4662 }
4663 }
4664 break;
4665 #endif
4666 default:
4667 goto illegal_insn;
4668 }
4669 }
4670 break;
4671 }
4672 break;
4673 case 3: /* load/store instructions */
4674 {
4675 unsigned int xop = GET_FIELD(insn, 7, 12);
4676
4677 /* flush pending conditional evaluations before exposing
4678 cpu state */
4679 if (dc->cc_op != CC_OP_FLAGS) {
4680 dc->cc_op = CC_OP_FLAGS;
4681 gen_helper_compute_psr(cpu_env);
4682 }
4683 cpu_src1 = get_src1(insn, cpu_src1);
4684 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4685 rs2 = GET_FIELD(insn, 27, 31);
4686 gen_movl_reg_TN(rs2, cpu_src2);
4687 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4688 } else if (IS_IMM) { /* immediate */
4689 simm = GET_FIELDs(insn, 19, 31);
4690 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4691 } else { /* register */
4692 rs2 = GET_FIELD(insn, 27, 31);
4693 if (rs2 != 0) {
4694 gen_movl_reg_TN(rs2, cpu_src2);
4695 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4696 } else
4697 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4698 }
4699 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4700 (xop > 0x17 && xop <= 0x1d ) ||
4701 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4702 switch (xop) {
4703 case 0x0: /* ld, V9 lduw, load unsigned word */
4704 gen_address_mask(dc, cpu_addr);
4705 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4706 break;
4707 case 0x1: /* ldub, load unsigned byte */
4708 gen_address_mask(dc, cpu_addr);
4709 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4710 break;
4711 case 0x2: /* lduh, load unsigned halfword */
4712 gen_address_mask(dc, cpu_addr);
4713 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4714 break;
4715 case 0x3: /* ldd, load double word */
4716 if (rd & 1)
4717 goto illegal_insn;
4718 else {
4719 TCGv_i32 r_const;
4720
4721 save_state(dc, cpu_cond);
4722 r_const = tcg_const_i32(7);
4723 /* XXX remove alignment check */
4724 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4725 tcg_temp_free_i32(r_const);
4726 gen_address_mask(dc, cpu_addr);
4727 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4728 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4729 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4730 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4731 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4732 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4733 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4734 }
4735 break;
4736 case 0x9: /* ldsb, load signed byte */
4737 gen_address_mask(dc, cpu_addr);
4738 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4739 break;
4740 case 0xa: /* ldsh, load signed halfword */
4741 gen_address_mask(dc, cpu_addr);
4742 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4743 break;
4744 case 0xd: /* ldstub -- XXX: should be atomically */
4745 {
4746 TCGv r_const;
4747
4748 gen_address_mask(dc, cpu_addr);
4749 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4750 r_const = tcg_const_tl(0xff);
4751 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4752 tcg_temp_free(r_const);
4753 }
4754 break;
4755 case 0x0f: /* swap, swap register with memory. Also
4756 atomically */
4757 CHECK_IU_FEATURE(dc, SWAP);
4758 gen_movl_reg_TN(rd, cpu_val);
4759 gen_address_mask(dc, cpu_addr);
4760 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4761 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4762 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4763 break;
4764 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4765 case 0x10: /* lda, V9 lduwa, load word alternate */
4766 #ifndef TARGET_SPARC64
4767 if (IS_IMM)
4768 goto illegal_insn;
4769 if (!supervisor(dc))
4770 goto priv_insn;
4771 #endif
4772 save_state(dc, cpu_cond);
4773 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4774 break;
4775 case 0x11: /* lduba, load unsigned byte alternate */
4776 #ifndef TARGET_SPARC64
4777 if (IS_IMM)
4778 goto illegal_insn;
4779 if (!supervisor(dc))
4780 goto priv_insn;
4781 #endif
4782 save_state(dc, cpu_cond);
4783 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4784 break;
4785 case 0x12: /* lduha, load unsigned halfword alternate */
4786 #ifndef TARGET_SPARC64
4787 if (IS_IMM)
4788 goto illegal_insn;
4789 if (!supervisor(dc))
4790 goto priv_insn;
4791 #endif
4792 save_state(dc, cpu_cond);
4793 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4794 break;
4795 case 0x13: /* ldda, load double word alternate */
4796 #ifndef TARGET_SPARC64
4797 if (IS_IMM)
4798 goto illegal_insn;
4799 if (!supervisor(dc))
4800 goto priv_insn;
4801 #endif
4802 if (rd & 1)
4803 goto illegal_insn;
4804 save_state(dc, cpu_cond);
4805 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4806 goto skip_move;
4807 case 0x19: /* ldsba, load signed byte alternate */
4808 #ifndef TARGET_SPARC64
4809 if (IS_IMM)
4810 goto illegal_insn;
4811 if (!supervisor(dc))
4812 goto priv_insn;
4813 #endif
4814 save_state(dc, cpu_cond);
4815 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4816 break;
4817 case 0x1a: /* ldsha, load signed halfword alternate */
4818 #ifndef TARGET_SPARC64
4819 if (IS_IMM)
4820 goto illegal_insn;
4821 if (!supervisor(dc))
4822 goto priv_insn;
4823 #endif
4824 save_state(dc, cpu_cond);
4825 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4826 break;
4827 case 0x1d: /* ldstuba -- XXX: should be atomically */
4828 #ifndef TARGET_SPARC64
4829 if (IS_IMM)
4830 goto illegal_insn;
4831 if (!supervisor(dc))
4832 goto priv_insn;
4833 #endif
4834 save_state(dc, cpu_cond);
4835 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4836 break;
4837 case 0x1f: /* swapa, swap reg with alt. memory. Also
4838 atomically */
4839 CHECK_IU_FEATURE(dc, SWAP);
4840 #ifndef TARGET_SPARC64
4841 if (IS_IMM)
4842 goto illegal_insn;
4843 if (!supervisor(dc))
4844 goto priv_insn;
4845 #endif
4846 save_state(dc, cpu_cond);
4847 gen_movl_reg_TN(rd, cpu_val);
4848 gen_swap_asi(cpu_val, cpu_addr, insn);
4849 break;
4850
4851 #ifndef TARGET_SPARC64
4852 case 0x30: /* ldc */
4853 case 0x31: /* ldcsr */
4854 case 0x33: /* lddc */
4855 goto ncp_insn;
4856 #endif
4857 #endif
4858 #ifdef TARGET_SPARC64
4859 case 0x08: /* V9 ldsw */
4860 gen_address_mask(dc, cpu_addr);
4861 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4862 break;
4863 case 0x0b: /* V9 ldx */
4864 gen_address_mask(dc, cpu_addr);
4865 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4866 break;
4867 case 0x18: /* V9 ldswa */
4868 save_state(dc, cpu_cond);
4869 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4870 break;
4871 case 0x1b: /* V9 ldxa */
4872 save_state(dc, cpu_cond);
4873 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4874 break;
4875 case 0x2d: /* V9 prefetch, no effect */
4876 goto skip_move;
4877 case 0x30: /* V9 ldfa */
4878 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4879 goto jmp_insn;
4880 }
4881 save_state(dc, cpu_cond);
4882 gen_ldf_asi(cpu_addr, insn, 4, rd);
4883 gen_update_fprs_dirty(rd);
4884 goto skip_move;
4885 case 0x33: /* V9 lddfa */
4886 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4887 goto jmp_insn;
4888 }
4889 save_state(dc, cpu_cond);
4890 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4891 gen_update_fprs_dirty(DFPREG(rd));
4892 goto skip_move;
4893 case 0x3d: /* V9 prefetcha, no effect */
4894 goto skip_move;
4895 case 0x32: /* V9 ldqfa */
4896 CHECK_FPU_FEATURE(dc, FLOAT128);
4897 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4898 goto jmp_insn;
4899 }
4900 save_state(dc, cpu_cond);
4901 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4902 gen_update_fprs_dirty(QFPREG(rd));
4903 goto skip_move;
4904 #endif
4905 default:
4906 goto illegal_insn;
4907 }
4908 gen_movl_TN_reg(rd, cpu_val);
4909 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4910 skip_move: ;
4911 #endif
4912 } else if (xop >= 0x20 && xop < 0x24) {
4913 if (gen_trap_ifnofpu(dc, cpu_cond))
4914 goto jmp_insn;
4915 save_state(dc, cpu_cond);
4916 switch (xop) {
4917 case 0x20: /* ldf, load fpreg */
4918 gen_address_mask(dc, cpu_addr);
4919 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4920 cpu_dst_32 = gen_dest_fpr_F();
4921 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4922 gen_store_fpr_F(dc, rd, cpu_dst_32);
4923 break;
4924 case 0x21: /* ldfsr, V9 ldxfsr */
4925 #ifdef TARGET_SPARC64
4926 gen_address_mask(dc, cpu_addr);
4927 if (rd == 1) {
4928 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4929 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4930 } else {
4931 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4932 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4933 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4934 }
4935 #else
4936 {
4937 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4938 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4939 }
4940 #endif
4941 break;
4942 case 0x22: /* ldqf, load quad fpreg */
4943 {
4944 TCGv_i32 r_const;
4945
4946 CHECK_FPU_FEATURE(dc, FLOAT128);
4947 r_const = tcg_const_i32(dc->mem_idx);
4948 gen_address_mask(dc, cpu_addr);
4949 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4950 tcg_temp_free_i32(r_const);
4951 gen_op_store_QT0_fpr(QFPREG(rd));
4952 gen_update_fprs_dirty(QFPREG(rd));
4953 }
4954 break;
4955 case 0x23: /* lddf, load double fpreg */
4956 gen_address_mask(dc, cpu_addr);
4957 cpu_dst_64 = gen_dest_fpr_D();
4958 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4959 gen_store_fpr_D(dc, rd, cpu_dst_64);
4960 break;
4961 default:
4962 goto illegal_insn;
4963 }
4964 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4965 xop == 0xe || xop == 0x1e) {
4966 gen_movl_reg_TN(rd, cpu_val);
4967 switch (xop) {
4968 case 0x4: /* st, store word */
4969 gen_address_mask(dc, cpu_addr);
4970 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4971 break;
4972 case 0x5: /* stb, store byte */
4973 gen_address_mask(dc, cpu_addr);
4974 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4975 break;
4976 case 0x6: /* sth, store halfword */
4977 gen_address_mask(dc, cpu_addr);
4978 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4979 break;
4980 case 0x7: /* std, store double word */
4981 if (rd & 1)
4982 goto illegal_insn;
4983 else {
4984 TCGv_i32 r_const;
4985
4986 save_state(dc, cpu_cond);
4987 gen_address_mask(dc, cpu_addr);
4988 r_const = tcg_const_i32(7);
4989 /* XXX remove alignment check */
4990 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4991 tcg_temp_free_i32(r_const);
4992 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4993 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4994 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4995 }
4996 break;
4997 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4998 case 0x14: /* sta, V9 stwa, store word alternate */
4999 #ifndef TARGET_SPARC64
5000 if (IS_IMM)
5001 goto illegal_insn;
5002 if (!supervisor(dc))
5003 goto priv_insn;
5004 #endif
5005 save_state(dc, cpu_cond);
5006 gen_st_asi(cpu_val, cpu_addr, insn, 4);
5007 dc->npc = DYNAMIC_PC;
5008 break;
5009 case 0x15: /* stba, store byte alternate */
5010 #ifndef TARGET_SPARC64
5011 if (IS_IMM)
5012 goto illegal_insn;
5013 if (!supervisor(dc))
5014 goto priv_insn;
5015 #endif
5016 save_state(dc, cpu_cond);
5017 gen_st_asi(cpu_val, cpu_addr, insn, 1);
5018 dc->npc = DYNAMIC_PC;
5019 break;
5020 case 0x16: /* stha, store halfword alternate */
5021 #ifndef TARGET_SPARC64
5022 if (IS_IMM)
5023 goto illegal_insn;
5024 if (!supervisor(dc))
5025 goto priv_insn;
5026 #endif
5027 save_state(dc, cpu_cond);
5028 gen_st_asi(cpu_val, cpu_addr, insn, 2);
5029 dc->npc = DYNAMIC_PC;
5030 break;
5031 case 0x17: /* stda, store double word alternate */
5032 #ifndef TARGET_SPARC64
5033 if (IS_IMM)
5034 goto illegal_insn;
5035 if (!supervisor(dc))
5036 goto priv_insn;
5037 #endif
5038 if (rd & 1)
5039 goto illegal_insn;
5040 else {
5041 save_state(dc, cpu_cond);
5042 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
5043 }
5044 break;
5045 #endif
5046 #ifdef TARGET_SPARC64
5047 case 0x0e: /* V9 stx */
5048 gen_address_mask(dc, cpu_addr);
5049 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5050 break;
5051 case 0x1e: /* V9 stxa */
5052 save_state(dc, cpu_cond);
5053 gen_st_asi(cpu_val, cpu_addr, insn, 8);
5054 dc->npc = DYNAMIC_PC;
5055 break;
5056 #endif
5057 default:
5058 goto illegal_insn;
5059 }
5060 } else if (xop > 0x23 && xop < 0x28) {
5061 if (gen_trap_ifnofpu(dc, cpu_cond))
5062 goto jmp_insn;
5063 save_state(dc, cpu_cond);
5064 switch (xop) {
5065 case 0x24: /* stf, store fpreg */
5066 gen_address_mask(dc, cpu_addr);
5067 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5068 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5069 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5070 break;
5071 case 0x25: /* stfsr, V9 stxfsr */
5072 #ifdef TARGET_SPARC64
5073 gen_address_mask(dc, cpu_addr);
5074 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
5075 if (rd == 1)
5076 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5077 else
5078 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
5079 #else
5080 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
5081 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
5082 #endif
5083 break;
5084 case 0x26:
5085 #ifdef TARGET_SPARC64
5086 /* V9 stqf, store quad fpreg */
5087 {
5088 TCGv_i32 r_const;
5089
5090 CHECK_FPU_FEATURE(dc, FLOAT128);
5091 gen_op_load_fpr_QT0(QFPREG(rd));
5092 r_const = tcg_const_i32(dc->mem_idx);
5093 gen_address_mask(dc, cpu_addr);
5094 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5095 tcg_temp_free_i32(r_const);
5096 }
5097 break;
5098 #else /* !TARGET_SPARC64 */
5099 /* stdfq, store floating point queue */
5100 #if defined(CONFIG_USER_ONLY)
5101 goto illegal_insn;
5102 #else
5103 if (!supervisor(dc))
5104 goto priv_insn;
5105 if (gen_trap_ifnofpu(dc, cpu_cond))
5106 goto jmp_insn;
5107 goto nfq_insn;
5108 #endif
5109 #endif
5110 case 0x27: /* stdf, store double fpreg */
5111 gen_address_mask(dc, cpu_addr);
5112 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5113 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5114 break;
5115 default:
5116 goto illegal_insn;
5117 }
5118 } else if (xop > 0x33 && xop < 0x3f) {
5119 save_state(dc, cpu_cond);
5120 switch (xop) {
5121 #ifdef TARGET_SPARC64
5122 case 0x34: /* V9 stfa */
5123 if (gen_trap_ifnofpu(dc, cpu_cond)) {
5124 goto jmp_insn;
5125 }
5126 gen_stf_asi(cpu_addr, insn, 4, rd);
5127 break;
5128 case 0x36: /* V9 stqfa */
5129 {
5130 TCGv_i32 r_const;
5131
5132 CHECK_FPU_FEATURE(dc, FLOAT128);
5133 if (gen_trap_ifnofpu(dc, cpu_cond)) {
5134 goto jmp_insn;
5135 }
5136 r_const = tcg_const_i32(7);
5137 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5138 tcg_temp_free_i32(r_const);
5139 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5140 }
5141 break;
5142 case 0x37: /* V9 stdfa */
5143 if (gen_trap_ifnofpu(dc, cpu_cond)) {
5144 goto jmp_insn;
5145 }
5146 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5147 break;
5148 case 0x3c: /* V9 casa */
5149 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5150 gen_movl_TN_reg(rd, cpu_val);
5151 break;
5152 case 0x3e: /* V9 casxa */
5153 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5154 gen_movl_TN_reg(rd, cpu_val);
5155 break;
5156 #else
5157 case 0x34: /* stc */
5158 case 0x35: /* stcsr */
5159 case 0x36: /* stdcq */
5160 case 0x37: /* stdc */
5161 goto ncp_insn;
5162 #endif
5163 default:
5164 goto illegal_insn;
5165 }
5166 } else
5167 goto illegal_insn;
5168 }
5169 break;
5170 }
5171 /* default case for non jump instructions */
5172 if (dc->npc == DYNAMIC_PC) {
5173 dc->pc = DYNAMIC_PC;
5174 gen_op_next_insn();
5175 } else if (dc->npc == JUMP_PC) {
5176 /* we can do a static jump */
5177 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5178 dc->is_br = 1;
5179 } else {
5180 dc->pc = dc->npc;
5181 dc->npc = dc->npc + 4;
5182 }
5183 jmp_insn:
5184 goto egress;
5185 illegal_insn:
5186 {
5187 TCGv_i32 r_const;
5188
5189 save_state(dc, cpu_cond);
5190 r_const = tcg_const_i32(TT_ILL_INSN);
5191 gen_helper_raise_exception(cpu_env, r_const);
5192 tcg_temp_free_i32(r_const);
5193 dc->is_br = 1;
5194 }
5195 goto egress;
5196 unimp_flush:
5197 {
5198 TCGv_i32 r_const;
5199
5200 save_state(dc, cpu_cond);
5201 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5202 gen_helper_raise_exception(cpu_env, r_const);
5203 tcg_temp_free_i32(r_const);
5204 dc->is_br = 1;
5205 }
5206 goto egress;
5207 #if !defined(CONFIG_USER_ONLY)
5208 priv_insn:
5209 {
5210 TCGv_i32 r_const;
5211
5212 save_state(dc, cpu_cond);
5213 r_const = tcg_const_i32(TT_PRIV_INSN);
5214 gen_helper_raise_exception(cpu_env, r_const);
5215 tcg_temp_free_i32(r_const);
5216 dc->is_br = 1;
5217 }
5218 goto egress;
5219 #endif
5220 nfpu_insn:
5221 save_state(dc, cpu_cond);
5222 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5223 dc->is_br = 1;
5224 goto egress;
5225 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5226 nfq_insn:
5227 save_state(dc, cpu_cond);
5228 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5229 dc->is_br = 1;
5230 goto egress;
5231 #endif
5232 #ifndef TARGET_SPARC64
5233 ncp_insn:
5234 {
5235 TCGv r_const;
5236
5237 save_state(dc, cpu_cond);
5238 r_const = tcg_const_i32(TT_NCP_INSN);
5239 gen_helper_raise_exception(cpu_env, r_const);
5240 tcg_temp_free(r_const);
5241 dc->is_br = 1;
5242 }
5243 goto egress;
5244 #endif
5245 egress:
5246 tcg_temp_free(cpu_tmp1);
5247 tcg_temp_free(cpu_tmp2);
5248 if (dc->n_t32 != 0) {
5249 int i;
5250 for (i = dc->n_t32 - 1; i >= 0; --i) {
5251 tcg_temp_free_i32(dc->t32[i]);
5252 }
5253 dc->n_t32 = 0;
5254 }
5255 }
5256
5257 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5258 int spc, CPUSPARCState *env)
5259 {
5260 target_ulong pc_start, last_pc;
5261 uint16_t *gen_opc_end;
5262 DisasContext dc1, *dc = &dc1;
5263 CPUBreakpoint *bp;
5264 int j, lj = -1;
5265 int num_insns;
5266 int max_insns;
5267 unsigned int insn;
5268
5269 memset(dc, 0, sizeof(DisasContext));
5270 dc->tb = tb;
5271 pc_start = tb->pc;
5272 dc->pc = pc_start;
5273 last_pc = dc->pc;
5274 dc->npc = (target_ulong) tb->cs_base;
5275 dc->cc_op = CC_OP_DYNAMIC;
5276 dc->mem_idx = cpu_mmu_index(env);
5277 dc->def = env->def;
5278 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5279 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5280 dc->singlestep = (env->singlestep_enabled || singlestep);
5281 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5282
5283 cpu_tmp0 = tcg_temp_new();
5284 cpu_tmp32 = tcg_temp_new_i32();
5285 cpu_tmp64 = tcg_temp_new_i64();
5286
5287 cpu_dst = tcg_temp_local_new();
5288
5289 // loads and stores
5290 cpu_val = tcg_temp_local_new();
5291 cpu_addr = tcg_temp_local_new();
5292
5293 num_insns = 0;
5294 max_insns = tb->cflags & CF_COUNT_MASK;
5295 if (max_insns == 0)
5296 max_insns = CF_COUNT_MASK;
5297 gen_icount_start();
5298 do {
5299 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5300 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5301 if (bp->pc == dc->pc) {
5302 if (dc->pc != pc_start)
5303 save_state(dc, cpu_cond);
5304 gen_helper_debug(cpu_env);
5305 tcg_gen_exit_tb(0);
5306 dc->is_br = 1;
5307 goto exit_gen_loop;
5308 }
5309 }
5310 }
5311 if (spc) {
5312 qemu_log("Search PC...\n");
5313 j = gen_opc_ptr - gen_opc_buf;
5314 if (lj < j) {
5315 lj++;
5316 while (lj < j)
5317 gen_opc_instr_start[lj++] = 0;
5318 gen_opc_pc[lj] = dc->pc;
5319 gen_opc_npc[lj] = dc->npc;
5320 gen_opc_instr_start[lj] = 1;
5321 gen_opc_icount[lj] = num_insns;
5322 }
5323 }
5324 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5325 gen_io_start();
5326 last_pc = dc->pc;
5327 insn = cpu_ldl_code(env, dc->pc);
5328 disas_sparc_insn(dc, insn);
5329 num_insns++;
5330
5331 if (dc->is_br)
5332 break;
5333 /* if the next PC is different, we abort now */
5334 if (dc->pc != (last_pc + 4))
5335 break;
5336 /* if we reach a page boundary, we stop generation so that the
5337 PC of a TT_TFAULT exception is always in the right page */
5338 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5339 break;
5340 /* if single step mode, we generate only one instruction and
5341 generate an exception */
5342 if (dc->singlestep) {
5343 break;
5344 }
5345 } while ((gen_opc_ptr < gen_opc_end) &&
5346 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5347 num_insns < max_insns);
5348
5349 exit_gen_loop:
5350 tcg_temp_free(cpu_addr);
5351 tcg_temp_free(cpu_val);
5352 tcg_temp_free(cpu_dst);
5353 tcg_temp_free_i64(cpu_tmp64);
5354 tcg_temp_free_i32(cpu_tmp32);
5355 tcg_temp_free(cpu_tmp0);
5356
5357 if (tb->cflags & CF_LAST_IO)
5358 gen_io_end();
5359 if (!dc->is_br) {
5360 if (dc->pc != DYNAMIC_PC &&
5361 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5362 /* static PC and NPC: we can use direct chaining */
5363 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5364 } else {
5365 if (dc->pc != DYNAMIC_PC)
5366 tcg_gen_movi_tl(cpu_pc, dc->pc);
5367 save_npc(dc, cpu_cond);
5368 tcg_gen_exit_tb(0);
5369 }
5370 }
5371 gen_icount_end(tb, num_insns);
5372 *gen_opc_ptr = INDEX_op_end;
5373 if (spc) {
5374 j = gen_opc_ptr - gen_opc_buf;
5375 lj++;
5376 while (lj <= j)
5377 gen_opc_instr_start[lj++] = 0;
5378 #if 0
5379 log_page_dump();
5380 #endif
5381 gen_opc_jump_pc[0] = dc->jump_pc[0];
5382 gen_opc_jump_pc[1] = dc->jump_pc[1];
5383 } else {
5384 tb->size = last_pc + 4 - pc_start;
5385 tb->icount = num_insns;
5386 }
5387 #ifdef DEBUG_DISAS
5388 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5389 qemu_log("--------------\n");
5390 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5391 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5392 qemu_log("\n");
5393 }
5394 #endif
5395 }
5396
5397 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5398 {
5399 gen_intermediate_code_internal(tb, 0, env);
5400 }
5401
5402 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5403 {
5404 gen_intermediate_code_internal(tb, 1, env);
5405 }
5406
5407 void gen_intermediate_code_init(CPUSPARCState *env)
5408 {
5409 unsigned int i;
5410 static int inited;
5411 static const char * const gregnames[8] = {
5412 NULL, // g0 not used
5413 "g1",
5414 "g2",
5415 "g3",
5416 "g4",
5417 "g5",
5418 "g6",
5419 "g7",
5420 };
5421 static const char * const fregnames[32] = {
5422 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5423 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5424 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5425 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5426 };
5427
5428 /* init various static tables */
5429 if (!inited) {
5430 inited = 1;
5431
5432 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5433 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5434 offsetof(CPUSPARCState, regwptr),
5435 "regwptr");
5436 #ifdef TARGET_SPARC64
5437 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5438 "xcc");
5439 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5440 "asi");
5441 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5442 "fprs");
5443 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5444 "gsr");
5445 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5446 offsetof(CPUSPARCState, tick_cmpr),
5447 "tick_cmpr");
5448 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5449 offsetof(CPUSPARCState, stick_cmpr),
5450 "stick_cmpr");
5451 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5452 offsetof(CPUSPARCState, hstick_cmpr),
5453 "hstick_cmpr");
5454 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5455 "hintp");
5456 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5457 "htba");
5458 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5459 "hver");
5460 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5461 offsetof(CPUSPARCState, ssr), "ssr");
5462 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5463 offsetof(CPUSPARCState, version), "ver");
5464 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5465 offsetof(CPUSPARCState, softint),
5466 "softint");
5467 #else
5468 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5469 "wim");
5470 #endif
5471 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5472 "cond");
5473 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5474 "cc_src");
5475 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5476 offsetof(CPUSPARCState, cc_src2),
5477 "cc_src2");
5478 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5479 "cc_dst");
5480 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5481 "cc_op");
5482 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5483 "psr");
5484 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5485 "fsr");
5486 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5487 "pc");
5488 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5489 "npc");
5490 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5491 #ifndef CONFIG_USER_ONLY
5492 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5493 "tbr");
5494 #endif
5495 for (i = 1; i < 8; i++) {
5496 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5497 offsetof(CPUSPARCState, gregs[i]),
5498 gregnames[i]);
5499 }
5500 for (i = 0; i < TARGET_DPREGS; i++) {
5501 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5502 offsetof(CPUSPARCState, fpr[i]),
5503 fregnames[i]);
5504 }
5505
5506 /* register helpers */
5507
5508 #define GEN_HELPER 2
5509 #include "helper.h"
5510 }
5511 }
5512
5513 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5514 {
5515 target_ulong npc;
5516 env->pc = gen_opc_pc[pc_pos];
5517 npc = gen_opc_npc[pc_pos];
5518 if (npc == 1) {
5519 /* dynamic NPC: already stored */
5520 } else if (npc == 2) {
5521 /* jump PC: use 'cond' and the jump targets of the translation */
5522 if (env->cond) {
5523 env->npc = gen_opc_jump_pc[0];
5524 } else {
5525 env->npc = gen_opc_jump_pc[1];
5526 }
5527 } else {
5528 env->npc = npc;
5529 }
5530
5531 /* flush pending conditional evaluations before exposing cpu state */
5532 if (CC_OP != CC_OP_FLAGS) {
5533 helper_compute_psr(env);
5534 }
5535 }