]> git.proxmox.com Git - mirror_qemu.git/blob - target-sparc/translate.c
Merge remote-tracking branch 'origin/master' into staging
[mirror_qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
67
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
70
71 #include "gen-icount.h"
72
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 TCGv_i32 t32[3];
86 int n_t32;
87 } DisasContext;
88
89 // This function uses non-native bit order
90 #define GET_FIELD(X, FROM, TO) \
91 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
92
93 // This function uses the order in the manuals, i.e. bit 0 is 2^0
94 #define GET_FIELD_SP(X, FROM, TO) \
95 GET_FIELD(X, 31 - (TO), 31 - (FROM))
96
97 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
98 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
99
100 #ifdef TARGET_SPARC64
101 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
102 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
103 #else
104 #define DFPREG(r) (r & 0x1e)
105 #define QFPREG(r) (r & 0x1c)
106 #endif
107
108 #define UA2005_HTRAP_MASK 0xff
109 #define V8_TRAP_MASK 0x7f
110
111 static int sign_extend(int x, int len)
112 {
113 len = 32 - len;
114 return (x << len) >> len;
115 }
116
117 #define IS_IMM (insn & (1<<13))
118
119 static inline void gen_update_fprs_dirty(int rd)
120 {
121 #if defined(TARGET_SPARC64)
122 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
123 #endif
124 }
125
126 /* floating point registers moves */
127 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
128 {
129 #if TCG_TARGET_REG_BITS == 32
130 if (src & 1) {
131 return TCGV_LOW(cpu_fpr[src / 2]);
132 } else {
133 return TCGV_HIGH(cpu_fpr[src / 2]);
134 }
135 #else
136 if (src & 1) {
137 return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
138 } else {
139 TCGv_i32 ret = tcg_temp_local_new_i32();
140 TCGv_i64 t = tcg_temp_new_i64();
141
142 tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
143 tcg_gen_trunc_i64_i32(ret, t);
144 tcg_temp_free_i64(t);
145
146 dc->t32[dc->n_t32++] = ret;
147 assert(dc->n_t32 <= ARRAY_SIZE(dc->t32));
148
149 return ret;
150 }
151 #endif
152 }
153
154 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
155 {
156 #if TCG_TARGET_REG_BITS == 32
157 if (dst & 1) {
158 tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
159 } else {
160 tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
161 }
162 #else
163 TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
164 tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
165 (dst & 1 ? 0 : 32), 32);
166 #endif
167 gen_update_fprs_dirty(dst);
168 }
169
170 static TCGv_i32 gen_dest_fpr_F(void)
171 {
172 return cpu_tmp32;
173 }
174
175 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
176 {
177 src = DFPREG(src);
178 return cpu_fpr[src / 2];
179 }
180
181 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
182 {
183 dst = DFPREG(dst);
184 tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
185 gen_update_fprs_dirty(dst);
186 }
187
188 static TCGv_i64 gen_dest_fpr_D(void)
189 {
190 return cpu_tmp64;
191 }
192
193 static void gen_op_load_fpr_QT0(unsigned int src)
194 {
195 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
196 offsetof(CPU_QuadU, ll.upper));
197 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
198 offsetof(CPU_QuadU, ll.lower));
199 }
200
201 static void gen_op_load_fpr_QT1(unsigned int src)
202 {
203 tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
204 offsetof(CPU_QuadU, ll.upper));
205 tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
206 offsetof(CPU_QuadU, ll.lower));
207 }
208
209 static void gen_op_store_QT0_fpr(unsigned int dst)
210 {
211 tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
212 offsetof(CPU_QuadU, ll.upper));
213 tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
214 offsetof(CPU_QuadU, ll.lower));
215 }
216
217 #ifdef TARGET_SPARC64
218 static void gen_move_Q(unsigned int rd, unsigned int rs)
219 {
220 rd = QFPREG(rd);
221 rs = QFPREG(rs);
222
223 tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
224 tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
225 gen_update_fprs_dirty(rd);
226 }
227 #endif
228
229 /* moves */
230 #ifdef CONFIG_USER_ONLY
231 #define supervisor(dc) 0
232 #ifdef TARGET_SPARC64
233 #define hypervisor(dc) 0
234 #endif
235 #else
236 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
237 #ifdef TARGET_SPARC64
238 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
239 #else
240 #endif
241 #endif
242
243 #ifdef TARGET_SPARC64
244 #ifndef TARGET_ABI32
245 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
246 #else
247 #define AM_CHECK(dc) (1)
248 #endif
249 #endif
250
251 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
252 {
253 #ifdef TARGET_SPARC64
254 if (AM_CHECK(dc))
255 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
256 #endif
257 }
258
259 static inline void gen_movl_reg_TN(int reg, TCGv tn)
260 {
261 if (reg == 0)
262 tcg_gen_movi_tl(tn, 0);
263 else if (reg < 8)
264 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
265 else {
266 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
267 }
268 }
269
270 static inline void gen_movl_TN_reg(int reg, TCGv tn)
271 {
272 if (reg == 0)
273 return;
274 else if (reg < 8)
275 tcg_gen_mov_tl(cpu_gregs[reg], tn);
276 else {
277 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
278 }
279 }
280
281 static inline void gen_goto_tb(DisasContext *s, int tb_num,
282 target_ulong pc, target_ulong npc)
283 {
284 TranslationBlock *tb;
285
286 tb = s->tb;
287 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
288 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
289 !s->singlestep) {
290 /* jump to same page: we can use a direct jump */
291 tcg_gen_goto_tb(tb_num);
292 tcg_gen_movi_tl(cpu_pc, pc);
293 tcg_gen_movi_tl(cpu_npc, npc);
294 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
295 } else {
296 /* jump to another page: currently not optimized */
297 tcg_gen_movi_tl(cpu_pc, pc);
298 tcg_gen_movi_tl(cpu_npc, npc);
299 tcg_gen_exit_tb(0);
300 }
301 }
302
303 // XXX suboptimal
304 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
305 {
306 tcg_gen_extu_i32_tl(reg, src);
307 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
308 tcg_gen_andi_tl(reg, reg, 0x1);
309 }
310
311 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
312 {
313 tcg_gen_extu_i32_tl(reg, src);
314 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
315 tcg_gen_andi_tl(reg, reg, 0x1);
316 }
317
318 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
319 {
320 tcg_gen_extu_i32_tl(reg, src);
321 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
322 tcg_gen_andi_tl(reg, reg, 0x1);
323 }
324
325 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
326 {
327 tcg_gen_extu_i32_tl(reg, src);
328 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
329 tcg_gen_andi_tl(reg, reg, 0x1);
330 }
331
332 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
333 {
334 TCGv r_temp;
335 TCGv_i32 r_const;
336 int l1;
337
338 l1 = gen_new_label();
339
340 r_temp = tcg_temp_new();
341 tcg_gen_xor_tl(r_temp, src1, src2);
342 tcg_gen_not_tl(r_temp, r_temp);
343 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
344 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
345 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
346 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
347 r_const = tcg_const_i32(TT_TOVF);
348 gen_helper_raise_exception(cpu_env, r_const);
349 tcg_temp_free_i32(r_const);
350 gen_set_label(l1);
351 tcg_temp_free(r_temp);
352 }
353
354 static inline void gen_tag_tv(TCGv src1, TCGv src2)
355 {
356 int l1;
357 TCGv_i32 r_const;
358
359 l1 = gen_new_label();
360 tcg_gen_or_tl(cpu_tmp0, src1, src2);
361 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
362 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
363 r_const = tcg_const_i32(TT_TOVF);
364 gen_helper_raise_exception(cpu_env, r_const);
365 tcg_temp_free_i32(r_const);
366 gen_set_label(l1);
367 }
368
369 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
370 {
371 tcg_gen_mov_tl(cpu_cc_src, src1);
372 tcg_gen_movi_tl(cpu_cc_src2, src2);
373 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
374 tcg_gen_mov_tl(dst, cpu_cc_dst);
375 }
376
377 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
378 {
379 tcg_gen_mov_tl(cpu_cc_src, src1);
380 tcg_gen_mov_tl(cpu_cc_src2, src2);
381 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
382 tcg_gen_mov_tl(dst, cpu_cc_dst);
383 }
384
385 static TCGv_i32 gen_add32_carry32(void)
386 {
387 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
388
389 /* Carry is computed from a previous add: (dst < src) */
390 #if TARGET_LONG_BITS == 64
391 cc_src1_32 = tcg_temp_new_i32();
392 cc_src2_32 = tcg_temp_new_i32();
393 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
394 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
395 #else
396 cc_src1_32 = cpu_cc_dst;
397 cc_src2_32 = cpu_cc_src;
398 #endif
399
400 carry_32 = tcg_temp_new_i32();
401 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
402
403 #if TARGET_LONG_BITS == 64
404 tcg_temp_free_i32(cc_src1_32);
405 tcg_temp_free_i32(cc_src2_32);
406 #endif
407
408 return carry_32;
409 }
410
411 static TCGv_i32 gen_sub32_carry32(void)
412 {
413 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
414
415 /* Carry is computed from a previous borrow: (src1 < src2) */
416 #if TARGET_LONG_BITS == 64
417 cc_src1_32 = tcg_temp_new_i32();
418 cc_src2_32 = tcg_temp_new_i32();
419 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
420 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
421 #else
422 cc_src1_32 = cpu_cc_src;
423 cc_src2_32 = cpu_cc_src2;
424 #endif
425
426 carry_32 = tcg_temp_new_i32();
427 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
428
429 #if TARGET_LONG_BITS == 64
430 tcg_temp_free_i32(cc_src1_32);
431 tcg_temp_free_i32(cc_src2_32);
432 #endif
433
434 return carry_32;
435 }
436
437 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
438 TCGv src2, int update_cc)
439 {
440 TCGv_i32 carry_32;
441 TCGv carry;
442
443 switch (dc->cc_op) {
444 case CC_OP_DIV:
445 case CC_OP_LOGIC:
446 /* Carry is known to be zero. Fall back to plain ADD. */
447 if (update_cc) {
448 gen_op_add_cc(dst, src1, src2);
449 } else {
450 tcg_gen_add_tl(dst, src1, src2);
451 }
452 return;
453
454 case CC_OP_ADD:
455 case CC_OP_TADD:
456 case CC_OP_TADDTV:
457 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
458 {
459 /* For 32-bit hosts, we can re-use the host's hardware carry
460 generation by using an ADD2 opcode. We discard the low
461 part of the output. Ideally we'd combine this operation
462 with the add that generated the carry in the first place. */
463 TCGv dst_low = tcg_temp_new();
464 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
465 cpu_cc_src, src1, cpu_cc_src2, src2);
466 tcg_temp_free(dst_low);
467 goto add_done;
468 }
469 #endif
470 carry_32 = gen_add32_carry32();
471 break;
472
473 case CC_OP_SUB:
474 case CC_OP_TSUB:
475 case CC_OP_TSUBTV:
476 carry_32 = gen_sub32_carry32();
477 break;
478
479 default:
480 /* We need external help to produce the carry. */
481 carry_32 = tcg_temp_new_i32();
482 gen_helper_compute_C_icc(carry_32, cpu_env);
483 break;
484 }
485
486 #if TARGET_LONG_BITS == 64
487 carry = tcg_temp_new();
488 tcg_gen_extu_i32_i64(carry, carry_32);
489 #else
490 carry = carry_32;
491 #endif
492
493 tcg_gen_add_tl(dst, src1, src2);
494 tcg_gen_add_tl(dst, dst, carry);
495
496 tcg_temp_free_i32(carry_32);
497 #if TARGET_LONG_BITS == 64
498 tcg_temp_free(carry);
499 #endif
500
501 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
502 add_done:
503 #endif
504 if (update_cc) {
505 tcg_gen_mov_tl(cpu_cc_src, src1);
506 tcg_gen_mov_tl(cpu_cc_src2, src2);
507 tcg_gen_mov_tl(cpu_cc_dst, dst);
508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
509 dc->cc_op = CC_OP_ADDX;
510 }
511 }
512
513 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
514 {
515 tcg_gen_mov_tl(cpu_cc_src, src1);
516 tcg_gen_mov_tl(cpu_cc_src2, src2);
517 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
518 tcg_gen_mov_tl(dst, cpu_cc_dst);
519 }
520
521 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
522 {
523 tcg_gen_mov_tl(cpu_cc_src, src1);
524 tcg_gen_mov_tl(cpu_cc_src2, src2);
525 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
526 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
527 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
528 tcg_gen_mov_tl(dst, cpu_cc_dst);
529 }
530
531 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
532 {
533 TCGv r_temp;
534 TCGv_i32 r_const;
535 int l1;
536
537 l1 = gen_new_label();
538
539 r_temp = tcg_temp_new();
540 tcg_gen_xor_tl(r_temp, src1, src2);
541 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
542 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
543 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
544 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
545 r_const = tcg_const_i32(TT_TOVF);
546 gen_helper_raise_exception(cpu_env, r_const);
547 tcg_temp_free_i32(r_const);
548 gen_set_label(l1);
549 tcg_temp_free(r_temp);
550 }
551
552 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
553 {
554 tcg_gen_mov_tl(cpu_cc_src, src1);
555 tcg_gen_movi_tl(cpu_cc_src2, src2);
556 if (src2 == 0) {
557 tcg_gen_mov_tl(cpu_cc_dst, src1);
558 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
559 dc->cc_op = CC_OP_LOGIC;
560 } else {
561 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
562 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
563 dc->cc_op = CC_OP_SUB;
564 }
565 tcg_gen_mov_tl(dst, cpu_cc_dst);
566 }
567
568 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
569 {
570 tcg_gen_mov_tl(cpu_cc_src, src1);
571 tcg_gen_mov_tl(cpu_cc_src2, src2);
572 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
573 tcg_gen_mov_tl(dst, cpu_cc_dst);
574 }
575
576 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
577 TCGv src2, int update_cc)
578 {
579 TCGv_i32 carry_32;
580 TCGv carry;
581
582 switch (dc->cc_op) {
583 case CC_OP_DIV:
584 case CC_OP_LOGIC:
585 /* Carry is known to be zero. Fall back to plain SUB. */
586 if (update_cc) {
587 gen_op_sub_cc(dst, src1, src2);
588 } else {
589 tcg_gen_sub_tl(dst, src1, src2);
590 }
591 return;
592
593 case CC_OP_ADD:
594 case CC_OP_TADD:
595 case CC_OP_TADDTV:
596 carry_32 = gen_add32_carry32();
597 break;
598
599 case CC_OP_SUB:
600 case CC_OP_TSUB:
601 case CC_OP_TSUBTV:
602 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
603 {
604 /* For 32-bit hosts, we can re-use the host's hardware carry
605 generation by using a SUB2 opcode. We discard the low
606 part of the output. Ideally we'd combine this operation
607 with the add that generated the carry in the first place. */
608 TCGv dst_low = tcg_temp_new();
609 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
610 cpu_cc_src, src1, cpu_cc_src2, src2);
611 tcg_temp_free(dst_low);
612 goto sub_done;
613 }
614 #endif
615 carry_32 = gen_sub32_carry32();
616 break;
617
618 default:
619 /* We need external help to produce the carry. */
620 carry_32 = tcg_temp_new_i32();
621 gen_helper_compute_C_icc(carry_32, cpu_env);
622 break;
623 }
624
625 #if TARGET_LONG_BITS == 64
626 carry = tcg_temp_new();
627 tcg_gen_extu_i32_i64(carry, carry_32);
628 #else
629 carry = carry_32;
630 #endif
631
632 tcg_gen_sub_tl(dst, src1, src2);
633 tcg_gen_sub_tl(dst, dst, carry);
634
635 tcg_temp_free_i32(carry_32);
636 #if TARGET_LONG_BITS == 64
637 tcg_temp_free(carry);
638 #endif
639
640 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
641 sub_done:
642 #endif
643 if (update_cc) {
644 tcg_gen_mov_tl(cpu_cc_src, src1);
645 tcg_gen_mov_tl(cpu_cc_src2, src2);
646 tcg_gen_mov_tl(cpu_cc_dst, dst);
647 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
648 dc->cc_op = CC_OP_SUBX;
649 }
650 }
651
652 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
653 {
654 tcg_gen_mov_tl(cpu_cc_src, src1);
655 tcg_gen_mov_tl(cpu_cc_src2, src2);
656 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 tcg_gen_mov_tl(dst, cpu_cc_dst);
658 }
659
660 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
661 {
662 tcg_gen_mov_tl(cpu_cc_src, src1);
663 tcg_gen_mov_tl(cpu_cc_src2, src2);
664 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
665 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
666 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
667 tcg_gen_mov_tl(dst, cpu_cc_dst);
668 }
669
670 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
671 {
672 TCGv r_temp;
673 int l1;
674
675 l1 = gen_new_label();
676 r_temp = tcg_temp_new();
677
678 /* old op:
679 if (!(env->y & 1))
680 T1 = 0;
681 */
682 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
683 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
684 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
685 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
686 tcg_gen_movi_tl(cpu_cc_src2, 0);
687 gen_set_label(l1);
688
689 // b2 = T0 & 1;
690 // env->y = (b2 << 31) | (env->y >> 1);
691 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
692 tcg_gen_shli_tl(r_temp, r_temp, 31);
693 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
694 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
695 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
696 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
697
698 // b1 = N ^ V;
699 gen_mov_reg_N(cpu_tmp0, cpu_psr);
700 gen_mov_reg_V(r_temp, cpu_psr);
701 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
702 tcg_temp_free(r_temp);
703
704 // T0 = (b1 << 31) | (T0 >> 1);
705 // src1 = T0;
706 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
707 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
708 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
709
710 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
711
712 tcg_gen_mov_tl(dst, cpu_cc_dst);
713 }
714
715 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
716 {
717 TCGv_i32 r_src1, r_src2;
718 TCGv_i64 r_temp, r_temp2;
719
720 r_src1 = tcg_temp_new_i32();
721 r_src2 = tcg_temp_new_i32();
722
723 tcg_gen_trunc_tl_i32(r_src1, src1);
724 tcg_gen_trunc_tl_i32(r_src2, src2);
725
726 r_temp = tcg_temp_new_i64();
727 r_temp2 = tcg_temp_new_i64();
728
729 if (sign_ext) {
730 tcg_gen_ext_i32_i64(r_temp, r_src2);
731 tcg_gen_ext_i32_i64(r_temp2, r_src1);
732 } else {
733 tcg_gen_extu_i32_i64(r_temp, r_src2);
734 tcg_gen_extu_i32_i64(r_temp2, r_src1);
735 }
736
737 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
738
739 tcg_gen_shri_i64(r_temp, r_temp2, 32);
740 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
741 tcg_temp_free_i64(r_temp);
742 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
743
744 tcg_gen_trunc_i64_tl(dst, r_temp2);
745
746 tcg_temp_free_i64(r_temp2);
747
748 tcg_temp_free_i32(r_src1);
749 tcg_temp_free_i32(r_src2);
750 }
751
752 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
753 {
754 /* zero-extend truncated operands before multiplication */
755 gen_op_multiply(dst, src1, src2, 0);
756 }
757
758 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
759 {
760 /* sign-extend truncated operands before multiplication */
761 gen_op_multiply(dst, src1, src2, 1);
762 }
763
764 #ifdef TARGET_SPARC64
765 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
766 {
767 TCGv_i32 r_const;
768 int l1;
769
770 l1 = gen_new_label();
771 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
772 r_const = tcg_const_i32(TT_DIV_ZERO);
773 gen_helper_raise_exception(cpu_env, r_const);
774 tcg_temp_free_i32(r_const);
775 gen_set_label(l1);
776 }
777
778 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
779 {
780 int l1, l2;
781 TCGv r_temp1, r_temp2;
782
783 l1 = gen_new_label();
784 l2 = gen_new_label();
785 r_temp1 = tcg_temp_local_new();
786 r_temp2 = tcg_temp_local_new();
787 tcg_gen_mov_tl(r_temp1, src1);
788 tcg_gen_mov_tl(r_temp2, src2);
789 gen_trap_ifdivzero_tl(r_temp2);
790 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
791 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
792 tcg_gen_movi_i64(dst, INT64_MIN);
793 tcg_gen_br(l2);
794 gen_set_label(l1);
795 tcg_gen_div_i64(dst, r_temp1, r_temp2);
796 gen_set_label(l2);
797 tcg_temp_free(r_temp1);
798 tcg_temp_free(r_temp2);
799 }
800 #endif
801
802 // 1
803 static inline void gen_op_eval_ba(TCGv dst)
804 {
805 tcg_gen_movi_tl(dst, 1);
806 }
807
808 // Z
809 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
810 {
811 gen_mov_reg_Z(dst, src);
812 }
813
814 // Z | (N ^ V)
815 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
816 {
817 gen_mov_reg_N(cpu_tmp0, src);
818 gen_mov_reg_V(dst, src);
819 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
820 gen_mov_reg_Z(cpu_tmp0, src);
821 tcg_gen_or_tl(dst, dst, cpu_tmp0);
822 }
823
824 // N ^ V
825 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
826 {
827 gen_mov_reg_V(cpu_tmp0, src);
828 gen_mov_reg_N(dst, src);
829 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
830 }
831
832 // C | Z
833 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
834 {
835 gen_mov_reg_Z(cpu_tmp0, src);
836 gen_mov_reg_C(dst, src);
837 tcg_gen_or_tl(dst, dst, cpu_tmp0);
838 }
839
840 // C
841 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
842 {
843 gen_mov_reg_C(dst, src);
844 }
845
846 // V
847 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
848 {
849 gen_mov_reg_V(dst, src);
850 }
851
852 // 0
853 static inline void gen_op_eval_bn(TCGv dst)
854 {
855 tcg_gen_movi_tl(dst, 0);
856 }
857
858 // N
859 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
860 {
861 gen_mov_reg_N(dst, src);
862 }
863
864 // !Z
865 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
866 {
867 gen_mov_reg_Z(dst, src);
868 tcg_gen_xori_tl(dst, dst, 0x1);
869 }
870
871 // !(Z | (N ^ V))
872 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
873 {
874 gen_mov_reg_N(cpu_tmp0, src);
875 gen_mov_reg_V(dst, src);
876 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
877 gen_mov_reg_Z(cpu_tmp0, src);
878 tcg_gen_or_tl(dst, dst, cpu_tmp0);
879 tcg_gen_xori_tl(dst, dst, 0x1);
880 }
881
882 // !(N ^ V)
883 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
884 {
885 gen_mov_reg_V(cpu_tmp0, src);
886 gen_mov_reg_N(dst, src);
887 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
888 tcg_gen_xori_tl(dst, dst, 0x1);
889 }
890
891 // !(C | Z)
892 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
893 {
894 gen_mov_reg_Z(cpu_tmp0, src);
895 gen_mov_reg_C(dst, src);
896 tcg_gen_or_tl(dst, dst, cpu_tmp0);
897 tcg_gen_xori_tl(dst, dst, 0x1);
898 }
899
900 // !C
901 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
902 {
903 gen_mov_reg_C(dst, src);
904 tcg_gen_xori_tl(dst, dst, 0x1);
905 }
906
907 // !N
908 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
909 {
910 gen_mov_reg_N(dst, src);
911 tcg_gen_xori_tl(dst, dst, 0x1);
912 }
913
914 // !V
915 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
916 {
917 gen_mov_reg_V(dst, src);
918 tcg_gen_xori_tl(dst, dst, 0x1);
919 }
920
921 /*
922 FPSR bit field FCC1 | FCC0:
923 0 =
924 1 <
925 2 >
926 3 unordered
927 */
928 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
929 unsigned int fcc_offset)
930 {
931 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
932 tcg_gen_andi_tl(reg, reg, 0x1);
933 }
934
935 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
936 unsigned int fcc_offset)
937 {
938 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
939 tcg_gen_andi_tl(reg, reg, 0x1);
940 }
941
942 // !0: FCC0 | FCC1
943 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
944 unsigned int fcc_offset)
945 {
946 gen_mov_reg_FCC0(dst, src, fcc_offset);
947 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
948 tcg_gen_or_tl(dst, dst, cpu_tmp0);
949 }
950
951 // 1 or 2: FCC0 ^ FCC1
952 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
953 unsigned int fcc_offset)
954 {
955 gen_mov_reg_FCC0(dst, src, fcc_offset);
956 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
957 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
958 }
959
960 // 1 or 3: FCC0
961 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
962 unsigned int fcc_offset)
963 {
964 gen_mov_reg_FCC0(dst, src, fcc_offset);
965 }
966
967 // 1: FCC0 & !FCC1
968 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
969 unsigned int fcc_offset)
970 {
971 gen_mov_reg_FCC0(dst, src, fcc_offset);
972 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
973 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
974 tcg_gen_and_tl(dst, dst, cpu_tmp0);
975 }
976
977 // 2 or 3: FCC1
978 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
979 unsigned int fcc_offset)
980 {
981 gen_mov_reg_FCC1(dst, src, fcc_offset);
982 }
983
984 // 2: !FCC0 & FCC1
985 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
986 unsigned int fcc_offset)
987 {
988 gen_mov_reg_FCC0(dst, src, fcc_offset);
989 tcg_gen_xori_tl(dst, dst, 0x1);
990 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
991 tcg_gen_and_tl(dst, dst, cpu_tmp0);
992 }
993
994 // 3: FCC0 & FCC1
995 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
996 unsigned int fcc_offset)
997 {
998 gen_mov_reg_FCC0(dst, src, fcc_offset);
999 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1000 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1001 }
1002
1003 // 0: !(FCC0 | FCC1)
1004 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1005 unsigned int fcc_offset)
1006 {
1007 gen_mov_reg_FCC0(dst, src, fcc_offset);
1008 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1009 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1010 tcg_gen_xori_tl(dst, dst, 0x1);
1011 }
1012
1013 // 0 or 3: !(FCC0 ^ FCC1)
1014 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1016 {
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1018 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1019 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1020 tcg_gen_xori_tl(dst, dst, 0x1);
1021 }
1022
1023 // 0 or 2: !FCC0
1024 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1025 unsigned int fcc_offset)
1026 {
1027 gen_mov_reg_FCC0(dst, src, fcc_offset);
1028 tcg_gen_xori_tl(dst, dst, 0x1);
1029 }
1030
1031 // !1: !(FCC0 & !FCC1)
1032 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1033 unsigned int fcc_offset)
1034 {
1035 gen_mov_reg_FCC0(dst, src, fcc_offset);
1036 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1037 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1038 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039 tcg_gen_xori_tl(dst, dst, 0x1);
1040 }
1041
1042 // 0 or 1: !FCC1
1043 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1044 unsigned int fcc_offset)
1045 {
1046 gen_mov_reg_FCC1(dst, src, fcc_offset);
1047 tcg_gen_xori_tl(dst, dst, 0x1);
1048 }
1049
1050 // !2: !(!FCC0 & FCC1)
1051 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1052 unsigned int fcc_offset)
1053 {
1054 gen_mov_reg_FCC0(dst, src, fcc_offset);
1055 tcg_gen_xori_tl(dst, dst, 0x1);
1056 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1058 tcg_gen_xori_tl(dst, dst, 0x1);
1059 }
1060
1061 // !3: !(FCC0 & FCC1)
1062 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1063 unsigned int fcc_offset)
1064 {
1065 gen_mov_reg_FCC0(dst, src, fcc_offset);
1066 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1069 }
1070
1071 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1072 target_ulong pc2, TCGv r_cond)
1073 {
1074 int l1;
1075
1076 l1 = gen_new_label();
1077
1078 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1079
1080 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1081
1082 gen_set_label(l1);
1083 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1084 }
1085
1086 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1087 target_ulong pc2, TCGv r_cond)
1088 {
1089 int l1;
1090
1091 l1 = gen_new_label();
1092
1093 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1094
1095 gen_goto_tb(dc, 0, pc2, pc1);
1096
1097 gen_set_label(l1);
1098 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1099 }
1100
1101 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1102 TCGv r_cond)
1103 {
1104 int l1, l2;
1105
1106 l1 = gen_new_label();
1107 l2 = gen_new_label();
1108
1109 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1110
1111 tcg_gen_movi_tl(cpu_npc, npc1);
1112 tcg_gen_br(l2);
1113
1114 gen_set_label(l1);
1115 tcg_gen_movi_tl(cpu_npc, npc2);
1116 gen_set_label(l2);
1117 }
1118
1119 /* call this function before using the condition register as it may
1120 have been set for a jump */
1121 static inline void flush_cond(DisasContext *dc, TCGv cond)
1122 {
1123 if (dc->npc == JUMP_PC) {
1124 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1125 dc->npc = DYNAMIC_PC;
1126 }
1127 }
1128
1129 static inline void save_npc(DisasContext *dc, TCGv cond)
1130 {
1131 if (dc->npc == JUMP_PC) {
1132 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1133 dc->npc = DYNAMIC_PC;
1134 } else if (dc->npc != DYNAMIC_PC) {
1135 tcg_gen_movi_tl(cpu_npc, dc->npc);
1136 }
1137 }
1138
1139 static inline void save_state(DisasContext *dc, TCGv cond)
1140 {
1141 tcg_gen_movi_tl(cpu_pc, dc->pc);
1142 /* flush pending conditional evaluations before exposing cpu state */
1143 if (dc->cc_op != CC_OP_FLAGS) {
1144 dc->cc_op = CC_OP_FLAGS;
1145 gen_helper_compute_psr(cpu_env);
1146 }
1147 save_npc(dc, cond);
1148 }
1149
1150 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1151 {
1152 if (dc->npc == JUMP_PC) {
1153 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1154 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1155 dc->pc = DYNAMIC_PC;
1156 } else if (dc->npc == DYNAMIC_PC) {
1157 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1158 dc->pc = DYNAMIC_PC;
1159 } else {
1160 dc->pc = dc->npc;
1161 }
1162 }
1163
1164 static inline void gen_op_next_insn(void)
1165 {
1166 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1167 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1168 }
1169
1170 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1171 DisasContext *dc)
1172 {
1173 TCGv_i32 r_src;
1174
1175 #ifdef TARGET_SPARC64
1176 if (cc)
1177 r_src = cpu_xcc;
1178 else
1179 r_src = cpu_psr;
1180 #else
1181 r_src = cpu_psr;
1182 #endif
1183 switch (dc->cc_op) {
1184 case CC_OP_FLAGS:
1185 break;
1186 default:
1187 gen_helper_compute_psr(cpu_env);
1188 dc->cc_op = CC_OP_FLAGS;
1189 break;
1190 }
1191 switch (cond) {
1192 case 0x0:
1193 gen_op_eval_bn(r_dst);
1194 break;
1195 case 0x1:
1196 gen_op_eval_be(r_dst, r_src);
1197 break;
1198 case 0x2:
1199 gen_op_eval_ble(r_dst, r_src);
1200 break;
1201 case 0x3:
1202 gen_op_eval_bl(r_dst, r_src);
1203 break;
1204 case 0x4:
1205 gen_op_eval_bleu(r_dst, r_src);
1206 break;
1207 case 0x5:
1208 gen_op_eval_bcs(r_dst, r_src);
1209 break;
1210 case 0x6:
1211 gen_op_eval_bneg(r_dst, r_src);
1212 break;
1213 case 0x7:
1214 gen_op_eval_bvs(r_dst, r_src);
1215 break;
1216 case 0x8:
1217 gen_op_eval_ba(r_dst);
1218 break;
1219 case 0x9:
1220 gen_op_eval_bne(r_dst, r_src);
1221 break;
1222 case 0xa:
1223 gen_op_eval_bg(r_dst, r_src);
1224 break;
1225 case 0xb:
1226 gen_op_eval_bge(r_dst, r_src);
1227 break;
1228 case 0xc:
1229 gen_op_eval_bgu(r_dst, r_src);
1230 break;
1231 case 0xd:
1232 gen_op_eval_bcc(r_dst, r_src);
1233 break;
1234 case 0xe:
1235 gen_op_eval_bpos(r_dst, r_src);
1236 break;
1237 case 0xf:
1238 gen_op_eval_bvc(r_dst, r_src);
1239 break;
1240 }
1241 }
1242
1243 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1244 {
1245 unsigned int offset;
1246
1247 switch (cc) {
1248 default:
1249 case 0x0:
1250 offset = 0;
1251 break;
1252 case 0x1:
1253 offset = 32 - 10;
1254 break;
1255 case 0x2:
1256 offset = 34 - 10;
1257 break;
1258 case 0x3:
1259 offset = 36 - 10;
1260 break;
1261 }
1262
1263 switch (cond) {
1264 case 0x0:
1265 gen_op_eval_bn(r_dst);
1266 break;
1267 case 0x1:
1268 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1269 break;
1270 case 0x2:
1271 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1272 break;
1273 case 0x3:
1274 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1275 break;
1276 case 0x4:
1277 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1278 break;
1279 case 0x5:
1280 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1281 break;
1282 case 0x6:
1283 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1284 break;
1285 case 0x7:
1286 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1287 break;
1288 case 0x8:
1289 gen_op_eval_ba(r_dst);
1290 break;
1291 case 0x9:
1292 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1293 break;
1294 case 0xa:
1295 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1296 break;
1297 case 0xb:
1298 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1299 break;
1300 case 0xc:
1301 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1302 break;
1303 case 0xd:
1304 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1305 break;
1306 case 0xe:
1307 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0xf:
1310 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1311 break;
1312 }
1313 }
1314
1315 #ifdef TARGET_SPARC64
1316 // Inverted logic
1317 static const int gen_tcg_cond_reg[8] = {
1318 -1,
1319 TCG_COND_NE,
1320 TCG_COND_GT,
1321 TCG_COND_GE,
1322 -1,
1323 TCG_COND_EQ,
1324 TCG_COND_LE,
1325 TCG_COND_LT,
1326 };
1327
1328 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1329 {
1330 int l1;
1331
1332 l1 = gen_new_label();
1333 tcg_gen_movi_tl(r_dst, 0);
1334 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1335 tcg_gen_movi_tl(r_dst, 1);
1336 gen_set_label(l1);
1337 }
1338 #endif
1339
1340 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1341 TCGv r_cond)
1342 {
1343 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1344 target_ulong target = dc->pc + offset;
1345
1346 #ifdef TARGET_SPARC64
1347 if (unlikely(AM_CHECK(dc))) {
1348 target &= 0xffffffffULL;
1349 }
1350 #endif
1351 if (cond == 0x0) {
1352 /* unconditional not taken */
1353 if (a) {
1354 dc->pc = dc->npc + 4;
1355 dc->npc = dc->pc + 4;
1356 } else {
1357 dc->pc = dc->npc;
1358 dc->npc = dc->pc + 4;
1359 }
1360 } else if (cond == 0x8) {
1361 /* unconditional taken */
1362 if (a) {
1363 dc->pc = target;
1364 dc->npc = dc->pc + 4;
1365 } else {
1366 dc->pc = dc->npc;
1367 dc->npc = target;
1368 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1369 }
1370 } else {
1371 flush_cond(dc, r_cond);
1372 gen_cond(r_cond, cc, cond, dc);
1373 if (a) {
1374 gen_branch_a(dc, target, dc->npc, r_cond);
1375 dc->is_br = 1;
1376 } else {
1377 dc->pc = dc->npc;
1378 dc->jump_pc[0] = target;
1379 if (unlikely(dc->npc == DYNAMIC_PC)) {
1380 dc->jump_pc[1] = DYNAMIC_PC;
1381 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1382 } else {
1383 dc->jump_pc[1] = dc->npc + 4;
1384 dc->npc = JUMP_PC;
1385 }
1386 }
1387 }
1388 }
1389
1390 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1391 TCGv r_cond)
1392 {
1393 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1394 target_ulong target = dc->pc + offset;
1395
1396 #ifdef TARGET_SPARC64
1397 if (unlikely(AM_CHECK(dc))) {
1398 target &= 0xffffffffULL;
1399 }
1400 #endif
1401 if (cond == 0x0) {
1402 /* unconditional not taken */
1403 if (a) {
1404 dc->pc = dc->npc + 4;
1405 dc->npc = dc->pc + 4;
1406 } else {
1407 dc->pc = dc->npc;
1408 dc->npc = dc->pc + 4;
1409 }
1410 } else if (cond == 0x8) {
1411 /* unconditional taken */
1412 if (a) {
1413 dc->pc = target;
1414 dc->npc = dc->pc + 4;
1415 } else {
1416 dc->pc = dc->npc;
1417 dc->npc = target;
1418 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1419 }
1420 } else {
1421 flush_cond(dc, r_cond);
1422 gen_fcond(r_cond, cc, cond);
1423 if (a) {
1424 gen_branch_a(dc, target, dc->npc, r_cond);
1425 dc->is_br = 1;
1426 } else {
1427 dc->pc = dc->npc;
1428 dc->jump_pc[0] = target;
1429 if (unlikely(dc->npc == DYNAMIC_PC)) {
1430 dc->jump_pc[1] = DYNAMIC_PC;
1431 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1432 } else {
1433 dc->jump_pc[1] = dc->npc + 4;
1434 dc->npc = JUMP_PC;
1435 }
1436 }
1437 }
1438 }
1439
1440 #ifdef TARGET_SPARC64
1441 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1442 TCGv r_cond, TCGv r_reg)
1443 {
1444 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1445 target_ulong target = dc->pc + offset;
1446
1447 if (unlikely(AM_CHECK(dc))) {
1448 target &= 0xffffffffULL;
1449 }
1450 flush_cond(dc, r_cond);
1451 gen_cond_reg(r_cond, cond, r_reg);
1452 if (a) {
1453 gen_branch_a(dc, target, dc->npc, r_cond);
1454 dc->is_br = 1;
1455 } else {
1456 dc->pc = dc->npc;
1457 dc->jump_pc[0] = target;
1458 if (unlikely(dc->npc == DYNAMIC_PC)) {
1459 dc->jump_pc[1] = DYNAMIC_PC;
1460 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1461 } else {
1462 dc->jump_pc[1] = dc->npc + 4;
1463 dc->npc = JUMP_PC;
1464 }
1465 }
1466 }
1467
1468 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1469 {
1470 switch (fccno) {
1471 case 0:
1472 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1473 break;
1474 case 1:
1475 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1476 break;
1477 case 2:
1478 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1479 break;
1480 case 3:
1481 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1482 break;
1483 }
1484 }
1485
1486 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1487 {
1488 switch (fccno) {
1489 case 0:
1490 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1491 break;
1492 case 1:
1493 gen_helper_fcmpd_fcc1(cpu_env, r_rs1, r_rs2);
1494 break;
1495 case 2:
1496 gen_helper_fcmpd_fcc2(cpu_env, r_rs1, r_rs2);
1497 break;
1498 case 3:
1499 gen_helper_fcmpd_fcc3(cpu_env, r_rs1, r_rs2);
1500 break;
1501 }
1502 }
1503
1504 static inline void gen_op_fcmpq(int fccno)
1505 {
1506 switch (fccno) {
1507 case 0:
1508 gen_helper_fcmpq(cpu_env);
1509 break;
1510 case 1:
1511 gen_helper_fcmpq_fcc1(cpu_env);
1512 break;
1513 case 2:
1514 gen_helper_fcmpq_fcc2(cpu_env);
1515 break;
1516 case 3:
1517 gen_helper_fcmpq_fcc3(cpu_env);
1518 break;
1519 }
1520 }
1521
1522 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1523 {
1524 switch (fccno) {
1525 case 0:
1526 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1527 break;
1528 case 1:
1529 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1530 break;
1531 case 2:
1532 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1533 break;
1534 case 3:
1535 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1536 break;
1537 }
1538 }
1539
1540 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1541 {
1542 switch (fccno) {
1543 case 0:
1544 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1545 break;
1546 case 1:
1547 gen_helper_fcmped_fcc1(cpu_env, r_rs1, r_rs2);
1548 break;
1549 case 2:
1550 gen_helper_fcmped_fcc2(cpu_env, r_rs1, r_rs2);
1551 break;
1552 case 3:
1553 gen_helper_fcmped_fcc3(cpu_env, r_rs1, r_rs2);
1554 break;
1555 }
1556 }
1557
1558 static inline void gen_op_fcmpeq(int fccno)
1559 {
1560 switch (fccno) {
1561 case 0:
1562 gen_helper_fcmpeq(cpu_env);
1563 break;
1564 case 1:
1565 gen_helper_fcmpeq_fcc1(cpu_env);
1566 break;
1567 case 2:
1568 gen_helper_fcmpeq_fcc2(cpu_env);
1569 break;
1570 case 3:
1571 gen_helper_fcmpeq_fcc3(cpu_env);
1572 break;
1573 }
1574 }
1575
1576 #else
1577
1578 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1579 {
1580 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1581 }
1582
1583 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1584 {
1585 gen_helper_fcmpd(cpu_env, r_rs1, r_rs2);
1586 }
1587
1588 static inline void gen_op_fcmpq(int fccno)
1589 {
1590 gen_helper_fcmpq(cpu_env);
1591 }
1592
1593 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1594 {
1595 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1596 }
1597
1598 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1599 {
1600 gen_helper_fcmped(cpu_env, r_rs1, r_rs2);
1601 }
1602
1603 static inline void gen_op_fcmpeq(int fccno)
1604 {
1605 gen_helper_fcmpeq(cpu_env);
1606 }
1607 #endif
1608
1609 static inline void gen_op_fpexception_im(int fsr_flags)
1610 {
1611 TCGv_i32 r_const;
1612
1613 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1614 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1615 r_const = tcg_const_i32(TT_FP_EXCP);
1616 gen_helper_raise_exception(cpu_env, r_const);
1617 tcg_temp_free_i32(r_const);
1618 }
1619
1620 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1621 {
1622 #if !defined(CONFIG_USER_ONLY)
1623 if (!dc->fpu_enabled) {
1624 TCGv_i32 r_const;
1625
1626 save_state(dc, r_cond);
1627 r_const = tcg_const_i32(TT_NFPU_INSN);
1628 gen_helper_raise_exception(cpu_env, r_const);
1629 tcg_temp_free_i32(r_const);
1630 dc->is_br = 1;
1631 return 1;
1632 }
1633 #endif
1634 return 0;
1635 }
1636
1637 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1638 {
1639 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1640 }
1641
1642 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1643 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1644 {
1645 TCGv_i32 dst, src;
1646
1647 src = gen_load_fpr_F(dc, rs);
1648 dst = gen_dest_fpr_F();
1649
1650 gen(dst, cpu_env, src);
1651
1652 gen_store_fpr_F(dc, rd, dst);
1653 }
1654
1655 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1656 void (*gen)(TCGv_i32, TCGv_i32))
1657 {
1658 TCGv_i32 dst, src;
1659
1660 src = gen_load_fpr_F(dc, rs);
1661 dst = gen_dest_fpr_F();
1662
1663 gen(dst, src);
1664
1665 gen_store_fpr_F(dc, rd, dst);
1666 }
1667
1668 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1669 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1670 {
1671 TCGv_i32 dst, src1, src2;
1672
1673 src1 = gen_load_fpr_F(dc, rs1);
1674 src2 = gen_load_fpr_F(dc, rs2);
1675 dst = gen_dest_fpr_F();
1676
1677 gen(dst, cpu_env, src1, src2);
1678
1679 gen_store_fpr_F(dc, rd, dst);
1680 }
1681
1682 #ifdef TARGET_SPARC64
1683 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1684 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1685 {
1686 TCGv_i32 dst, src1, src2;
1687
1688 src1 = gen_load_fpr_F(dc, rs1);
1689 src2 = gen_load_fpr_F(dc, rs2);
1690 dst = gen_dest_fpr_F();
1691
1692 gen(dst, src1, src2);
1693
1694 gen_store_fpr_F(dc, rd, dst);
1695 }
1696 #endif
1697
1698 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1699 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1700 {
1701 TCGv_i64 dst, src;
1702
1703 src = gen_load_fpr_D(dc, rs);
1704 dst = gen_dest_fpr_D();
1705
1706 gen(dst, cpu_env, src);
1707
1708 gen_store_fpr_D(dc, rd, dst);
1709 }
1710
1711 #ifdef TARGET_SPARC64
1712 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1713 void (*gen)(TCGv_i64, TCGv_i64))
1714 {
1715 TCGv_i64 dst, src;
1716
1717 src = gen_load_fpr_D(dc, rs);
1718 dst = gen_dest_fpr_D();
1719
1720 gen(dst, src);
1721
1722 gen_store_fpr_D(dc, rd, dst);
1723 }
1724 #endif
1725
1726 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1727 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1728 {
1729 TCGv_i64 dst, src1, src2;
1730
1731 src1 = gen_load_fpr_D(dc, rs1);
1732 src2 = gen_load_fpr_D(dc, rs2);
1733 dst = gen_dest_fpr_D();
1734
1735 gen(dst, cpu_env, src1, src2);
1736
1737 gen_store_fpr_D(dc, rd, dst);
1738 }
1739
1740 #ifdef TARGET_SPARC64
1741 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1742 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1743 {
1744 TCGv_i64 dst, src1, src2;
1745
1746 src1 = gen_load_fpr_D(dc, rs1);
1747 src2 = gen_load_fpr_D(dc, rs2);
1748 dst = gen_dest_fpr_D();
1749
1750 gen(dst, src1, src2);
1751
1752 gen_store_fpr_D(dc, rd, dst);
1753 }
1754
1755 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1756 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1757 {
1758 TCGv_i64 dst, src1, src2;
1759
1760 src1 = gen_load_fpr_D(dc, rs1);
1761 src2 = gen_load_fpr_D(dc, rs2);
1762 dst = gen_dest_fpr_D();
1763
1764 gen(dst, cpu_gsr, src1, src2);
1765
1766 gen_store_fpr_D(dc, rd, dst);
1767 }
1768
1769 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1770 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1771 {
1772 TCGv_i64 dst, src0, src1, src2;
1773
1774 src1 = gen_load_fpr_D(dc, rs1);
1775 src2 = gen_load_fpr_D(dc, rs2);
1776 src0 = gen_load_fpr_D(dc, rd);
1777 dst = gen_dest_fpr_D();
1778
1779 gen(dst, src0, src1, src2);
1780
1781 gen_store_fpr_D(dc, rd, dst);
1782 }
1783 #endif
1784
1785 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1786 void (*gen)(TCGv_ptr))
1787 {
1788 gen_op_load_fpr_QT1(QFPREG(rs));
1789
1790 gen(cpu_env);
1791
1792 gen_op_store_QT0_fpr(QFPREG(rd));
1793 gen_update_fprs_dirty(QFPREG(rd));
1794 }
1795
1796 #ifdef TARGET_SPARC64
1797 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1798 void (*gen)(TCGv_ptr))
1799 {
1800 gen_op_load_fpr_QT1(QFPREG(rs));
1801
1802 gen(cpu_env);
1803
1804 gen_op_store_QT0_fpr(QFPREG(rd));
1805 gen_update_fprs_dirty(QFPREG(rd));
1806 }
1807 #endif
1808
1809 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1810 void (*gen)(TCGv_ptr))
1811 {
1812 gen_op_load_fpr_QT0(QFPREG(rs1));
1813 gen_op_load_fpr_QT1(QFPREG(rs2));
1814
1815 gen(cpu_env);
1816
1817 gen_op_store_QT0_fpr(QFPREG(rd));
1818 gen_update_fprs_dirty(QFPREG(rd));
1819 }
1820
1821 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1822 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1823 {
1824 TCGv_i64 dst;
1825 TCGv_i32 src1, src2;
1826
1827 src1 = gen_load_fpr_F(dc, rs1);
1828 src2 = gen_load_fpr_F(dc, rs2);
1829 dst = gen_dest_fpr_D();
1830
1831 gen(dst, cpu_env, src1, src2);
1832
1833 gen_store_fpr_D(dc, rd, dst);
1834 }
1835
1836 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1837 void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1838 {
1839 TCGv_i64 src1, src2;
1840
1841 src1 = gen_load_fpr_D(dc, rs1);
1842 src2 = gen_load_fpr_D(dc, rs2);
1843
1844 gen(cpu_env, src1, src2);
1845
1846 gen_op_store_QT0_fpr(QFPREG(rd));
1847 gen_update_fprs_dirty(QFPREG(rd));
1848 }
1849
1850 #ifdef TARGET_SPARC64
1851 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1852 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1853 {
1854 TCGv_i64 dst;
1855 TCGv_i32 src;
1856
1857 src = gen_load_fpr_F(dc, rs);
1858 dst = gen_dest_fpr_D();
1859
1860 gen(dst, cpu_env, src);
1861
1862 gen_store_fpr_D(dc, rd, dst);
1863 }
1864 #endif
1865
1866 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1867 void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1868 {
1869 TCGv_i64 dst;
1870 TCGv_i32 src;
1871
1872 src = gen_load_fpr_F(dc, rs);
1873 dst = gen_dest_fpr_D();
1874
1875 gen(dst, cpu_env, src);
1876
1877 gen_store_fpr_D(dc, rd, dst);
1878 }
1879
1880 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1881 void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1882 {
1883 TCGv_i32 dst;
1884 TCGv_i64 src;
1885
1886 src = gen_load_fpr_D(dc, rs);
1887 dst = gen_dest_fpr_F();
1888
1889 gen(dst, cpu_env, src);
1890
1891 gen_store_fpr_F(dc, rd, dst);
1892 }
1893
1894 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1895 void (*gen)(TCGv_i32, TCGv_ptr))
1896 {
1897 TCGv_i32 dst;
1898
1899 gen_op_load_fpr_QT1(QFPREG(rs));
1900 dst = gen_dest_fpr_F();
1901
1902 gen(dst, cpu_env);
1903
1904 gen_store_fpr_F(dc, rd, dst);
1905 }
1906
1907 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1908 void (*gen)(TCGv_i64, TCGv_ptr))
1909 {
1910 TCGv_i64 dst;
1911
1912 gen_op_load_fpr_QT1(QFPREG(rs));
1913 dst = gen_dest_fpr_D();
1914
1915 gen(dst, cpu_env);
1916
1917 gen_store_fpr_D(dc, rd, dst);
1918 }
1919
1920 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1921 void (*gen)(TCGv_ptr, TCGv_i32))
1922 {
1923 TCGv_i32 src;
1924
1925 src = gen_load_fpr_F(dc, rs);
1926
1927 gen(cpu_env, src);
1928
1929 gen_op_store_QT0_fpr(QFPREG(rd));
1930 gen_update_fprs_dirty(QFPREG(rd));
1931 }
1932
1933 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1934 void (*gen)(TCGv_ptr, TCGv_i64))
1935 {
1936 TCGv_i64 src;
1937
1938 src = gen_load_fpr_D(dc, rs);
1939
1940 gen(cpu_env, src);
1941
1942 gen_op_store_QT0_fpr(QFPREG(rd));
1943 gen_update_fprs_dirty(QFPREG(rd));
1944 }
1945
1946 /* asi moves */
1947 #ifdef TARGET_SPARC64
1948 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1949 {
1950 int asi;
1951 TCGv_i32 r_asi;
1952
1953 if (IS_IMM) {
1954 r_asi = tcg_temp_new_i32();
1955 tcg_gen_mov_i32(r_asi, cpu_asi);
1956 } else {
1957 asi = GET_FIELD(insn, 19, 26);
1958 r_asi = tcg_const_i32(asi);
1959 }
1960 return r_asi;
1961 }
1962
1963 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1964 int sign)
1965 {
1966 TCGv_i32 r_asi, r_size, r_sign;
1967
1968 r_asi = gen_get_asi(insn, addr);
1969 r_size = tcg_const_i32(size);
1970 r_sign = tcg_const_i32(sign);
1971 gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_size, r_sign);
1972 tcg_temp_free_i32(r_sign);
1973 tcg_temp_free_i32(r_size);
1974 tcg_temp_free_i32(r_asi);
1975 }
1976
1977 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1978 {
1979 TCGv_i32 r_asi, r_size;
1980
1981 r_asi = gen_get_asi(insn, addr);
1982 r_size = tcg_const_i32(size);
1983 gen_helper_st_asi(cpu_env, addr, src, r_asi, r_size);
1984 tcg_temp_free_i32(r_size);
1985 tcg_temp_free_i32(r_asi);
1986 }
1987
1988 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1989 {
1990 TCGv_i32 r_asi, r_size, r_rd;
1991
1992 r_asi = gen_get_asi(insn, addr);
1993 r_size = tcg_const_i32(size);
1994 r_rd = tcg_const_i32(rd);
1995 gen_helper_ldf_asi(cpu_env, addr, r_asi, r_size, r_rd);
1996 tcg_temp_free_i32(r_rd);
1997 tcg_temp_free_i32(r_size);
1998 tcg_temp_free_i32(r_asi);
1999 }
2000
2001 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
2002 {
2003 TCGv_i32 r_asi, r_size, r_rd;
2004
2005 r_asi = gen_get_asi(insn, addr);
2006 r_size = tcg_const_i32(size);
2007 r_rd = tcg_const_i32(rd);
2008 gen_helper_stf_asi(cpu_env, addr, r_asi, r_size, r_rd);
2009 tcg_temp_free_i32(r_rd);
2010 tcg_temp_free_i32(r_size);
2011 tcg_temp_free_i32(r_asi);
2012 }
2013
2014 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2015 {
2016 TCGv_i32 r_asi, r_size, r_sign;
2017
2018 r_asi = gen_get_asi(insn, addr);
2019 r_size = tcg_const_i32(4);
2020 r_sign = tcg_const_i32(0);
2021 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2022 tcg_temp_free_i32(r_sign);
2023 gen_helper_st_asi(cpu_env, addr, dst, r_asi, r_size);
2024 tcg_temp_free_i32(r_size);
2025 tcg_temp_free_i32(r_asi);
2026 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2027 }
2028
2029 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2030 {
2031 TCGv_i32 r_asi, r_rd;
2032
2033 r_asi = gen_get_asi(insn, addr);
2034 r_rd = tcg_const_i32(rd);
2035 gen_helper_ldda_asi(cpu_env, addr, r_asi, r_rd);
2036 tcg_temp_free_i32(r_rd);
2037 tcg_temp_free_i32(r_asi);
2038 }
2039
2040 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2041 {
2042 TCGv_i32 r_asi, r_size;
2043
2044 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2045 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2046 r_asi = gen_get_asi(insn, addr);
2047 r_size = tcg_const_i32(8);
2048 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2049 tcg_temp_free_i32(r_size);
2050 tcg_temp_free_i32(r_asi);
2051 }
2052
2053 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2054 int rd)
2055 {
2056 TCGv r_val1;
2057 TCGv_i32 r_asi;
2058
2059 r_val1 = tcg_temp_new();
2060 gen_movl_reg_TN(rd, r_val1);
2061 r_asi = gen_get_asi(insn, addr);
2062 gen_helper_cas_asi(dst, cpu_env, addr, r_val1, val2, r_asi);
2063 tcg_temp_free_i32(r_asi);
2064 tcg_temp_free(r_val1);
2065 }
2066
2067 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
2068 int rd)
2069 {
2070 TCGv_i32 r_asi;
2071
2072 gen_movl_reg_TN(rd, cpu_tmp64);
2073 r_asi = gen_get_asi(insn, addr);
2074 gen_helper_casx_asi(dst, cpu_env, addr, cpu_tmp64, val2, r_asi);
2075 tcg_temp_free_i32(r_asi);
2076 }
2077
2078 #elif !defined(CONFIG_USER_ONLY)
2079
2080 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
2081 int sign)
2082 {
2083 TCGv_i32 r_asi, r_size, r_sign;
2084
2085 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2086 r_size = tcg_const_i32(size);
2087 r_sign = tcg_const_i32(sign);
2088 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2089 tcg_temp_free(r_sign);
2090 tcg_temp_free(r_size);
2091 tcg_temp_free(r_asi);
2092 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2093 }
2094
2095 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
2096 {
2097 TCGv_i32 r_asi, r_size;
2098
2099 tcg_gen_extu_tl_i64(cpu_tmp64, src);
2100 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2101 r_size = tcg_const_i32(size);
2102 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2103 tcg_temp_free(r_size);
2104 tcg_temp_free(r_asi);
2105 }
2106
2107 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
2108 {
2109 TCGv_i32 r_asi, r_size, r_sign;
2110 TCGv_i64 r_val;
2111
2112 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2113 r_size = tcg_const_i32(4);
2114 r_sign = tcg_const_i32(0);
2115 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2116 tcg_temp_free(r_sign);
2117 r_val = tcg_temp_new_i64();
2118 tcg_gen_extu_tl_i64(r_val, dst);
2119 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2120 tcg_temp_free_i64(r_val);
2121 tcg_temp_free(r_size);
2122 tcg_temp_free(r_asi);
2123 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
2124 }
2125
2126 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
2127 {
2128 TCGv_i32 r_asi, r_size, r_sign;
2129
2130 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2131 r_size = tcg_const_i32(8);
2132 r_sign = tcg_const_i32(0);
2133 gen_helper_ld_asi(cpu_tmp64, cpu_env, addr, r_asi, r_size, r_sign);
2134 tcg_temp_free(r_sign);
2135 tcg_temp_free(r_size);
2136 tcg_temp_free(r_asi);
2137 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
2138 gen_movl_TN_reg(rd + 1, cpu_tmp0);
2139 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
2140 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
2141 gen_movl_TN_reg(rd, hi);
2142 }
2143
2144 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
2145 {
2146 TCGv_i32 r_asi, r_size;
2147
2148 gen_movl_reg_TN(rd + 1, cpu_tmp0);
2149 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
2150 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2151 r_size = tcg_const_i32(8);
2152 gen_helper_st_asi(cpu_env, addr, cpu_tmp64, r_asi, r_size);
2153 tcg_temp_free(r_size);
2154 tcg_temp_free(r_asi);
2155 }
2156 #endif
2157
2158 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2159 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
2160 {
2161 TCGv_i64 r_val;
2162 TCGv_i32 r_asi, r_size;
2163
2164 gen_ld_asi(dst, addr, insn, 1, 0);
2165
2166 r_val = tcg_const_i64(0xffULL);
2167 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
2168 r_size = tcg_const_i32(1);
2169 gen_helper_st_asi(cpu_env, addr, r_val, r_asi, r_size);
2170 tcg_temp_free_i32(r_size);
2171 tcg_temp_free_i32(r_asi);
2172 tcg_temp_free_i64(r_val);
2173 }
2174 #endif
2175
2176 static inline TCGv get_src1(unsigned int insn, TCGv def)
2177 {
2178 TCGv r_rs1 = def;
2179 unsigned int rs1;
2180
2181 rs1 = GET_FIELD(insn, 13, 17);
2182 if (rs1 == 0) {
2183 tcg_gen_movi_tl(def, 0);
2184 } else if (rs1 < 8) {
2185 r_rs1 = cpu_gregs[rs1];
2186 } else {
2187 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
2188 }
2189 return r_rs1;
2190 }
2191
2192 static inline TCGv get_src2(unsigned int insn, TCGv def)
2193 {
2194 TCGv r_rs2 = def;
2195
2196 if (IS_IMM) { /* immediate */
2197 target_long simm = GET_FIELDs(insn, 19, 31);
2198 tcg_gen_movi_tl(def, simm);
2199 } else { /* register */
2200 unsigned int rs2 = GET_FIELD(insn, 27, 31);
2201 if (rs2 == 0) {
2202 tcg_gen_movi_tl(def, 0);
2203 } else if (rs2 < 8) {
2204 r_rs2 = cpu_gregs[rs2];
2205 } else {
2206 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
2207 }
2208 }
2209 return r_rs2;
2210 }
2211
2212 #ifdef TARGET_SPARC64
2213 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
2214 {
2215 TCGv_i32 r_tl = tcg_temp_new_i32();
2216
2217 /* load env->tl into r_tl */
2218 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2219
2220 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2221 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2222
2223 /* calculate offset to current trap state from env->ts, reuse r_tl */
2224 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2225 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2226
2227 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2228 {
2229 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2230 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2231 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2232 tcg_temp_free_ptr(r_tl_tmp);
2233 }
2234
2235 tcg_temp_free_i32(r_tl);
2236 }
2237
2238 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2239 int width, bool cc, bool left)
2240 {
2241 TCGv lo1, lo2, t1, t2;
2242 uint64_t amask, tabl, tabr;
2243 int shift, imask, omask;
2244
2245 if (cc) {
2246 tcg_gen_mov_tl(cpu_cc_src, s1);
2247 tcg_gen_mov_tl(cpu_cc_src2, s2);
2248 tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2249 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2250 dc->cc_op = CC_OP_SUB;
2251 }
2252
2253 /* Theory of operation: there are two tables, left and right (not to
2254 be confused with the left and right versions of the opcode). These
2255 are indexed by the low 3 bits of the inputs. To make things "easy",
2256 these tables are loaded into two constants, TABL and TABR below.
2257 The operation index = (input & imask) << shift calculates the index
2258 into the constant, while val = (table >> index) & omask calculates
2259 the value we're looking for. */
2260 switch (width) {
2261 case 8:
2262 imask = 0x7;
2263 shift = 3;
2264 omask = 0xff;
2265 if (left) {
2266 tabl = 0x80c0e0f0f8fcfeffULL;
2267 tabr = 0xff7f3f1f0f070301ULL;
2268 } else {
2269 tabl = 0x0103070f1f3f7fffULL;
2270 tabr = 0xfffefcf8f0e0c080ULL;
2271 }
2272 break;
2273 case 16:
2274 imask = 0x6;
2275 shift = 1;
2276 omask = 0xf;
2277 if (left) {
2278 tabl = 0x8cef;
2279 tabr = 0xf731;
2280 } else {
2281 tabl = 0x137f;
2282 tabr = 0xfec8;
2283 }
2284 break;
2285 case 32:
2286 imask = 0x4;
2287 shift = 0;
2288 omask = 0x3;
2289 if (left) {
2290 tabl = (2 << 2) | 3;
2291 tabr = (3 << 2) | 1;
2292 } else {
2293 tabl = (1 << 2) | 3;
2294 tabr = (3 << 2) | 2;
2295 }
2296 break;
2297 default:
2298 abort();
2299 }
2300
2301 lo1 = tcg_temp_new();
2302 lo2 = tcg_temp_new();
2303 tcg_gen_andi_tl(lo1, s1, imask);
2304 tcg_gen_andi_tl(lo2, s2, imask);
2305 tcg_gen_shli_tl(lo1, lo1, shift);
2306 tcg_gen_shli_tl(lo2, lo2, shift);
2307
2308 t1 = tcg_const_tl(tabl);
2309 t2 = tcg_const_tl(tabr);
2310 tcg_gen_shr_tl(lo1, t1, lo1);
2311 tcg_gen_shr_tl(lo2, t2, lo2);
2312 tcg_gen_andi_tl(dst, lo1, omask);
2313 tcg_gen_andi_tl(lo2, lo2, omask);
2314
2315 amask = -8;
2316 if (AM_CHECK(dc)) {
2317 amask &= 0xffffffffULL;
2318 }
2319 tcg_gen_andi_tl(s1, s1, amask);
2320 tcg_gen_andi_tl(s2, s2, amask);
2321
2322 /* We want to compute
2323 dst = (s1 == s2 ? lo1 : lo1 & lo2).
2324 We've already done dst = lo1, so this reduces to
2325 dst &= (s1 == s2 ? -1 : lo2)
2326 Which we perform by
2327 lo2 |= -(s1 == s2)
2328 dst &= lo2
2329 */
2330 tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
2331 tcg_gen_neg_tl(t1, t1);
2332 tcg_gen_or_tl(lo2, lo2, t1);
2333 tcg_gen_and_tl(dst, dst, lo2);
2334
2335 tcg_temp_free(lo1);
2336 tcg_temp_free(lo2);
2337 tcg_temp_free(t1);
2338 tcg_temp_free(t2);
2339 }
2340
2341 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2342 {
2343 TCGv tmp = tcg_temp_new();
2344
2345 tcg_gen_add_tl(tmp, s1, s2);
2346 tcg_gen_andi_tl(dst, tmp, -8);
2347 if (left) {
2348 tcg_gen_neg_tl(tmp, tmp);
2349 }
2350 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2351
2352 tcg_temp_free(tmp);
2353 }
2354
2355 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2356 {
2357 TCGv t1, t2, shift;
2358
2359 t1 = tcg_temp_new();
2360 t2 = tcg_temp_new();
2361 shift = tcg_temp_new();
2362
2363 tcg_gen_andi_tl(shift, gsr, 7);
2364 tcg_gen_shli_tl(shift, shift, 3);
2365 tcg_gen_shl_tl(t1, s1, shift);
2366
2367 /* A shift of 64 does not produce 0 in TCG. Divide this into a
2368 shift of (up to 63) followed by a constant shift of 1. */
2369 tcg_gen_xori_tl(shift, shift, 63);
2370 tcg_gen_shr_tl(t2, s2, shift);
2371 tcg_gen_shri_tl(t2, t2, 1);
2372
2373 tcg_gen_or_tl(dst, t1, t2);
2374
2375 tcg_temp_free(t1);
2376 tcg_temp_free(t2);
2377 tcg_temp_free(shift);
2378 }
2379 #endif
2380
2381 #define CHECK_IU_FEATURE(dc, FEATURE) \
2382 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2383 goto illegal_insn;
2384 #define CHECK_FPU_FEATURE(dc, FEATURE) \
2385 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
2386 goto nfpu_insn;
2387
2388 /* before an instruction, dc->pc must be static */
2389 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2390 {
2391 unsigned int opc, rs1, rs2, rd;
2392 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
2393 TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2394 TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2395 target_long simm;
2396
2397 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2398 tcg_gen_debug_insn_start(dc->pc);
2399
2400 opc = GET_FIELD(insn, 0, 1);
2401
2402 rd = GET_FIELD(insn, 2, 6);
2403
2404 cpu_tmp1 = cpu_src1 = tcg_temp_new();
2405 cpu_tmp2 = cpu_src2 = tcg_temp_new();
2406
2407 switch (opc) {
2408 case 0: /* branches/sethi */
2409 {
2410 unsigned int xop = GET_FIELD(insn, 7, 9);
2411 int32_t target;
2412 switch (xop) {
2413 #ifdef TARGET_SPARC64
2414 case 0x1: /* V9 BPcc */
2415 {
2416 int cc;
2417
2418 target = GET_FIELD_SP(insn, 0, 18);
2419 target = sign_extend(target, 19);
2420 target <<= 2;
2421 cc = GET_FIELD_SP(insn, 20, 21);
2422 if (cc == 0)
2423 do_branch(dc, target, insn, 0, cpu_cond);
2424 else if (cc == 2)
2425 do_branch(dc, target, insn, 1, cpu_cond);
2426 else
2427 goto illegal_insn;
2428 goto jmp_insn;
2429 }
2430 case 0x3: /* V9 BPr */
2431 {
2432 target = GET_FIELD_SP(insn, 0, 13) |
2433 (GET_FIELD_SP(insn, 20, 21) << 14);
2434 target = sign_extend(target, 16);
2435 target <<= 2;
2436 cpu_src1 = get_src1(insn, cpu_src1);
2437 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2438 goto jmp_insn;
2439 }
2440 case 0x5: /* V9 FBPcc */
2441 {
2442 int cc = GET_FIELD_SP(insn, 20, 21);
2443 if (gen_trap_ifnofpu(dc, cpu_cond))
2444 goto jmp_insn;
2445 target = GET_FIELD_SP(insn, 0, 18);
2446 target = sign_extend(target, 19);
2447 target <<= 2;
2448 do_fbranch(dc, target, insn, cc, cpu_cond);
2449 goto jmp_insn;
2450 }
2451 #else
2452 case 0x7: /* CBN+x */
2453 {
2454 goto ncp_insn;
2455 }
2456 #endif
2457 case 0x2: /* BN+x */
2458 {
2459 target = GET_FIELD(insn, 10, 31);
2460 target = sign_extend(target, 22);
2461 target <<= 2;
2462 do_branch(dc, target, insn, 0, cpu_cond);
2463 goto jmp_insn;
2464 }
2465 case 0x6: /* FBN+x */
2466 {
2467 if (gen_trap_ifnofpu(dc, cpu_cond))
2468 goto jmp_insn;
2469 target = GET_FIELD(insn, 10, 31);
2470 target = sign_extend(target, 22);
2471 target <<= 2;
2472 do_fbranch(dc, target, insn, 0, cpu_cond);
2473 goto jmp_insn;
2474 }
2475 case 0x4: /* SETHI */
2476 if (rd) { // nop
2477 uint32_t value = GET_FIELD(insn, 10, 31);
2478 TCGv r_const;
2479
2480 r_const = tcg_const_tl(value << 10);
2481 gen_movl_TN_reg(rd, r_const);
2482 tcg_temp_free(r_const);
2483 }
2484 break;
2485 case 0x0: /* UNIMPL */
2486 default:
2487 goto illegal_insn;
2488 }
2489 break;
2490 }
2491 break;
2492 case 1: /*CALL*/
2493 {
2494 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2495 TCGv r_const;
2496
2497 r_const = tcg_const_tl(dc->pc);
2498 gen_movl_TN_reg(15, r_const);
2499 tcg_temp_free(r_const);
2500 target += dc->pc;
2501 gen_mov_pc_npc(dc, cpu_cond);
2502 #ifdef TARGET_SPARC64
2503 if (unlikely(AM_CHECK(dc))) {
2504 target &= 0xffffffffULL;
2505 }
2506 #endif
2507 dc->npc = target;
2508 }
2509 goto jmp_insn;
2510 case 2: /* FPU & Logical Operations */
2511 {
2512 unsigned int xop = GET_FIELD(insn, 7, 12);
2513 if (xop == 0x3a) { /* generate trap */
2514 int cond;
2515
2516 cpu_src1 = get_src1(insn, cpu_src1);
2517 if (IS_IMM) {
2518 rs2 = GET_FIELD(insn, 25, 31);
2519 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2520 } else {
2521 rs2 = GET_FIELD(insn, 27, 31);
2522 if (rs2 != 0) {
2523 gen_movl_reg_TN(rs2, cpu_src2);
2524 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2525 } else
2526 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2527 }
2528
2529 cond = GET_FIELD(insn, 3, 6);
2530 if (cond == 0x8) { /* Trap Always */
2531 save_state(dc, cpu_cond);
2532 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2533 supervisor(dc))
2534 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2535 else
2536 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2537 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2538 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2539 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2540
2541 } else if (cond != 0) {
2542 TCGv r_cond = tcg_temp_new();
2543 int l1;
2544 #ifdef TARGET_SPARC64
2545 /* V9 icc/xcc */
2546 int cc = GET_FIELD_SP(insn, 11, 12);
2547
2548 save_state(dc, cpu_cond);
2549 if (cc == 0)
2550 gen_cond(r_cond, 0, cond, dc);
2551 else if (cc == 2)
2552 gen_cond(r_cond, 1, cond, dc);
2553 else
2554 goto illegal_insn;
2555 #else
2556 save_state(dc, cpu_cond);
2557 gen_cond(r_cond, 0, cond, dc);
2558 #endif
2559 l1 = gen_new_label();
2560 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2561
2562 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2563 supervisor(dc))
2564 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2565 else
2566 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2567 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2568 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2569 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2570
2571 gen_set_label(l1);
2572 tcg_temp_free(r_cond);
2573 }
2574 gen_op_next_insn();
2575 tcg_gen_exit_tb(0);
2576 dc->is_br = 1;
2577 goto jmp_insn;
2578 } else if (xop == 0x28) {
2579 rs1 = GET_FIELD(insn, 13, 17);
2580 switch(rs1) {
2581 case 0: /* rdy */
2582 #ifndef TARGET_SPARC64
2583 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2584 manual, rdy on the microSPARC
2585 II */
2586 case 0x0f: /* stbar in the SPARCv8 manual,
2587 rdy on the microSPARC II */
2588 case 0x10 ... 0x1f: /* implementation-dependent in the
2589 SPARCv8 manual, rdy on the
2590 microSPARC II */
2591 /* Read Asr17 */
2592 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2593 TCGv r_const;
2594
2595 /* Read Asr17 for a Leon3 monoprocessor */
2596 r_const = tcg_const_tl((1 << 8)
2597 | (dc->def->nwindows - 1));
2598 gen_movl_TN_reg(rd, r_const);
2599 tcg_temp_free(r_const);
2600 break;
2601 }
2602 #endif
2603 gen_movl_TN_reg(rd, cpu_y);
2604 break;
2605 #ifdef TARGET_SPARC64
2606 case 0x2: /* V9 rdccr */
2607 gen_helper_compute_psr(cpu_env);
2608 gen_helper_rdccr(cpu_dst, cpu_env);
2609 gen_movl_TN_reg(rd, cpu_dst);
2610 break;
2611 case 0x3: /* V9 rdasi */
2612 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2613 gen_movl_TN_reg(rd, cpu_dst);
2614 break;
2615 case 0x4: /* V9 rdtick */
2616 {
2617 TCGv_ptr r_tickptr;
2618
2619 r_tickptr = tcg_temp_new_ptr();
2620 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2621 offsetof(CPUSPARCState, tick));
2622 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2623 tcg_temp_free_ptr(r_tickptr);
2624 gen_movl_TN_reg(rd, cpu_dst);
2625 }
2626 break;
2627 case 0x5: /* V9 rdpc */
2628 {
2629 TCGv r_const;
2630
2631 if (unlikely(AM_CHECK(dc))) {
2632 r_const = tcg_const_tl(dc->pc & 0xffffffffULL);
2633 } else {
2634 r_const = tcg_const_tl(dc->pc);
2635 }
2636 gen_movl_TN_reg(rd, r_const);
2637 tcg_temp_free(r_const);
2638 }
2639 break;
2640 case 0x6: /* V9 rdfprs */
2641 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2642 gen_movl_TN_reg(rd, cpu_dst);
2643 break;
2644 case 0xf: /* V9 membar */
2645 break; /* no effect */
2646 case 0x13: /* Graphics Status */
2647 if (gen_trap_ifnofpu(dc, cpu_cond))
2648 goto jmp_insn;
2649 gen_movl_TN_reg(rd, cpu_gsr);
2650 break;
2651 case 0x16: /* Softint */
2652 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2653 gen_movl_TN_reg(rd, cpu_dst);
2654 break;
2655 case 0x17: /* Tick compare */
2656 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2657 break;
2658 case 0x18: /* System tick */
2659 {
2660 TCGv_ptr r_tickptr;
2661
2662 r_tickptr = tcg_temp_new_ptr();
2663 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2664 offsetof(CPUSPARCState, stick));
2665 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2666 tcg_temp_free_ptr(r_tickptr);
2667 gen_movl_TN_reg(rd, cpu_dst);
2668 }
2669 break;
2670 case 0x19: /* System tick compare */
2671 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2672 break;
2673 case 0x10: /* Performance Control */
2674 case 0x11: /* Performance Instrumentation Counter */
2675 case 0x12: /* Dispatch Control */
2676 case 0x14: /* Softint set, WO */
2677 case 0x15: /* Softint clear, WO */
2678 #endif
2679 default:
2680 goto illegal_insn;
2681 }
2682 #if !defined(CONFIG_USER_ONLY)
2683 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2684 #ifndef TARGET_SPARC64
2685 if (!supervisor(dc))
2686 goto priv_insn;
2687 gen_helper_compute_psr(cpu_env);
2688 dc->cc_op = CC_OP_FLAGS;
2689 gen_helper_rdpsr(cpu_dst, cpu_env);
2690 #else
2691 CHECK_IU_FEATURE(dc, HYPV);
2692 if (!hypervisor(dc))
2693 goto priv_insn;
2694 rs1 = GET_FIELD(insn, 13, 17);
2695 switch (rs1) {
2696 case 0: // hpstate
2697 // gen_op_rdhpstate();
2698 break;
2699 case 1: // htstate
2700 // gen_op_rdhtstate();
2701 break;
2702 case 3: // hintp
2703 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2704 break;
2705 case 5: // htba
2706 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2707 break;
2708 case 6: // hver
2709 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2710 break;
2711 case 31: // hstick_cmpr
2712 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2713 break;
2714 default:
2715 goto illegal_insn;
2716 }
2717 #endif
2718 gen_movl_TN_reg(rd, cpu_dst);
2719 break;
2720 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2721 if (!supervisor(dc))
2722 goto priv_insn;
2723 #ifdef TARGET_SPARC64
2724 rs1 = GET_FIELD(insn, 13, 17);
2725 switch (rs1) {
2726 case 0: // tpc
2727 {
2728 TCGv_ptr r_tsptr;
2729
2730 r_tsptr = tcg_temp_new_ptr();
2731 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2732 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2733 offsetof(trap_state, tpc));
2734 tcg_temp_free_ptr(r_tsptr);
2735 }
2736 break;
2737 case 1: // tnpc
2738 {
2739 TCGv_ptr r_tsptr;
2740
2741 r_tsptr = tcg_temp_new_ptr();
2742 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2743 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2744 offsetof(trap_state, tnpc));
2745 tcg_temp_free_ptr(r_tsptr);
2746 }
2747 break;
2748 case 2: // tstate
2749 {
2750 TCGv_ptr r_tsptr;
2751
2752 r_tsptr = tcg_temp_new_ptr();
2753 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2754 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2755 offsetof(trap_state, tstate));
2756 tcg_temp_free_ptr(r_tsptr);
2757 }
2758 break;
2759 case 3: // tt
2760 {
2761 TCGv_ptr r_tsptr;
2762
2763 r_tsptr = tcg_temp_new_ptr();
2764 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2765 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2766 offsetof(trap_state, tt));
2767 tcg_temp_free_ptr(r_tsptr);
2768 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2769 }
2770 break;
2771 case 4: // tick
2772 {
2773 TCGv_ptr r_tickptr;
2774
2775 r_tickptr = tcg_temp_new_ptr();
2776 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2777 offsetof(CPUSPARCState, tick));
2778 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2779 gen_movl_TN_reg(rd, cpu_tmp0);
2780 tcg_temp_free_ptr(r_tickptr);
2781 }
2782 break;
2783 case 5: // tba
2784 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2785 break;
2786 case 6: // pstate
2787 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2788 offsetof(CPUSPARCState, pstate));
2789 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2790 break;
2791 case 7: // tl
2792 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2793 offsetof(CPUSPARCState, tl));
2794 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2795 break;
2796 case 8: // pil
2797 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2798 offsetof(CPUSPARCState, psrpil));
2799 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2800 break;
2801 case 9: // cwp
2802 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2803 break;
2804 case 10: // cansave
2805 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2806 offsetof(CPUSPARCState, cansave));
2807 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2808 break;
2809 case 11: // canrestore
2810 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2811 offsetof(CPUSPARCState, canrestore));
2812 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2813 break;
2814 case 12: // cleanwin
2815 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2816 offsetof(CPUSPARCState, cleanwin));
2817 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2818 break;
2819 case 13: // otherwin
2820 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2821 offsetof(CPUSPARCState, otherwin));
2822 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2823 break;
2824 case 14: // wstate
2825 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2826 offsetof(CPUSPARCState, wstate));
2827 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2828 break;
2829 case 16: // UA2005 gl
2830 CHECK_IU_FEATURE(dc, GL);
2831 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2832 offsetof(CPUSPARCState, gl));
2833 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2834 break;
2835 case 26: // UA2005 strand status
2836 CHECK_IU_FEATURE(dc, HYPV);
2837 if (!hypervisor(dc))
2838 goto priv_insn;
2839 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2840 break;
2841 case 31: // ver
2842 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2843 break;
2844 case 15: // fq
2845 default:
2846 goto illegal_insn;
2847 }
2848 #else
2849 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2850 #endif
2851 gen_movl_TN_reg(rd, cpu_tmp0);
2852 break;
2853 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2854 #ifdef TARGET_SPARC64
2855 save_state(dc, cpu_cond);
2856 gen_helper_flushw(cpu_env);
2857 #else
2858 if (!supervisor(dc))
2859 goto priv_insn;
2860 gen_movl_TN_reg(rd, cpu_tbr);
2861 #endif
2862 break;
2863 #endif
2864 } else if (xop == 0x34) { /* FPU Operations */
2865 if (gen_trap_ifnofpu(dc, cpu_cond))
2866 goto jmp_insn;
2867 gen_op_clear_ieee_excp_and_FTT();
2868 rs1 = GET_FIELD(insn, 13, 17);
2869 rs2 = GET_FIELD(insn, 27, 31);
2870 xop = GET_FIELD(insn, 18, 26);
2871 save_state(dc, cpu_cond);
2872 switch (xop) {
2873 case 0x1: /* fmovs */
2874 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
2875 gen_store_fpr_F(dc, rd, cpu_src1_32);
2876 break;
2877 case 0x5: /* fnegs */
2878 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
2879 break;
2880 case 0x9: /* fabss */
2881 gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
2882 break;
2883 case 0x29: /* fsqrts */
2884 CHECK_FPU_FEATURE(dc, FSQRT);
2885 gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
2886 break;
2887 case 0x2a: /* fsqrtd */
2888 CHECK_FPU_FEATURE(dc, FSQRT);
2889 gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
2890 break;
2891 case 0x2b: /* fsqrtq */
2892 CHECK_FPU_FEATURE(dc, FLOAT128);
2893 gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
2894 break;
2895 case 0x41: /* fadds */
2896 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
2897 break;
2898 case 0x42: /* faddd */
2899 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
2900 break;
2901 case 0x43: /* faddq */
2902 CHECK_FPU_FEATURE(dc, FLOAT128);
2903 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
2904 break;
2905 case 0x45: /* fsubs */
2906 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
2907 break;
2908 case 0x46: /* fsubd */
2909 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
2910 break;
2911 case 0x47: /* fsubq */
2912 CHECK_FPU_FEATURE(dc, FLOAT128);
2913 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
2914 break;
2915 case 0x49: /* fmuls */
2916 CHECK_FPU_FEATURE(dc, FMUL);
2917 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
2918 break;
2919 case 0x4a: /* fmuld */
2920 CHECK_FPU_FEATURE(dc, FMUL);
2921 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
2922 break;
2923 case 0x4b: /* fmulq */
2924 CHECK_FPU_FEATURE(dc, FLOAT128);
2925 CHECK_FPU_FEATURE(dc, FMUL);
2926 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
2927 break;
2928 case 0x4d: /* fdivs */
2929 gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
2930 break;
2931 case 0x4e: /* fdivd */
2932 gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
2933 break;
2934 case 0x4f: /* fdivq */
2935 CHECK_FPU_FEATURE(dc, FLOAT128);
2936 gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
2937 break;
2938 case 0x69: /* fsmuld */
2939 CHECK_FPU_FEATURE(dc, FSMULD);
2940 gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
2941 break;
2942 case 0x6e: /* fdmulq */
2943 CHECK_FPU_FEATURE(dc, FLOAT128);
2944 gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
2945 break;
2946 case 0xc4: /* fitos */
2947 gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
2948 break;
2949 case 0xc6: /* fdtos */
2950 gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
2951 break;
2952 case 0xc7: /* fqtos */
2953 CHECK_FPU_FEATURE(dc, FLOAT128);
2954 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
2955 break;
2956 case 0xc8: /* fitod */
2957 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
2958 break;
2959 case 0xc9: /* fstod */
2960 gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
2961 break;
2962 case 0xcb: /* fqtod */
2963 CHECK_FPU_FEATURE(dc, FLOAT128);
2964 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
2965 break;
2966 case 0xcc: /* fitoq */
2967 CHECK_FPU_FEATURE(dc, FLOAT128);
2968 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
2969 break;
2970 case 0xcd: /* fstoq */
2971 CHECK_FPU_FEATURE(dc, FLOAT128);
2972 gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
2973 break;
2974 case 0xce: /* fdtoq */
2975 CHECK_FPU_FEATURE(dc, FLOAT128);
2976 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
2977 break;
2978 case 0xd1: /* fstoi */
2979 gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
2980 break;
2981 case 0xd2: /* fdtoi */
2982 gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
2983 break;
2984 case 0xd3: /* fqtoi */
2985 CHECK_FPU_FEATURE(dc, FLOAT128);
2986 gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
2987 break;
2988 #ifdef TARGET_SPARC64
2989 case 0x2: /* V9 fmovd */
2990 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
2991 gen_store_fpr_D(dc, rd, cpu_src1_64);
2992 break;
2993 case 0x3: /* V9 fmovq */
2994 CHECK_FPU_FEATURE(dc, FLOAT128);
2995 gen_move_Q(rd, rs2);
2996 break;
2997 case 0x6: /* V9 fnegd */
2998 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
2999 break;
3000 case 0x7: /* V9 fnegq */
3001 CHECK_FPU_FEATURE(dc, FLOAT128);
3002 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3003 break;
3004 case 0xa: /* V9 fabsd */
3005 gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3006 break;
3007 case 0xb: /* V9 fabsq */
3008 CHECK_FPU_FEATURE(dc, FLOAT128);
3009 gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3010 break;
3011 case 0x81: /* V9 fstox */
3012 gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3013 break;
3014 case 0x82: /* V9 fdtox */
3015 gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3016 break;
3017 case 0x83: /* V9 fqtox */
3018 CHECK_FPU_FEATURE(dc, FLOAT128);
3019 gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3020 break;
3021 case 0x84: /* V9 fxtos */
3022 gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3023 break;
3024 case 0x88: /* V9 fxtod */
3025 gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3026 break;
3027 case 0x8c: /* V9 fxtoq */
3028 CHECK_FPU_FEATURE(dc, FLOAT128);
3029 gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3030 break;
3031 #endif
3032 default:
3033 goto illegal_insn;
3034 }
3035 } else if (xop == 0x35) { /* FPU Operations */
3036 #ifdef TARGET_SPARC64
3037 int cond;
3038 #endif
3039 if (gen_trap_ifnofpu(dc, cpu_cond))
3040 goto jmp_insn;
3041 gen_op_clear_ieee_excp_and_FTT();
3042 rs1 = GET_FIELD(insn, 13, 17);
3043 rs2 = GET_FIELD(insn, 27, 31);
3044 xop = GET_FIELD(insn, 18, 26);
3045 save_state(dc, cpu_cond);
3046 #ifdef TARGET_SPARC64
3047 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
3048 int l1;
3049
3050 l1 = gen_new_label();
3051 cond = GET_FIELD_SP(insn, 14, 17);
3052 cpu_src1 = get_src1(insn, cpu_src1);
3053 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3054 0, l1);
3055 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3056 gen_store_fpr_F(dc, rd, cpu_src1_32);
3057 gen_set_label(l1);
3058 break;
3059 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3060 int l1;
3061
3062 l1 = gen_new_label();
3063 cond = GET_FIELD_SP(insn, 14, 17);
3064 cpu_src1 = get_src1(insn, cpu_src1);
3065 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3066 0, l1);
3067 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3068 gen_store_fpr_D(dc, rd, cpu_src1_64);
3069 gen_set_label(l1);
3070 break;
3071 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3072 int l1;
3073
3074 CHECK_FPU_FEATURE(dc, FLOAT128);
3075 l1 = gen_new_label();
3076 cond = GET_FIELD_SP(insn, 14, 17);
3077 cpu_src1 = get_src1(insn, cpu_src1);
3078 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
3079 0, l1);
3080 gen_move_Q(rd, rs2);
3081 gen_set_label(l1);
3082 break;
3083 }
3084 #endif
3085 switch (xop) {
3086 #ifdef TARGET_SPARC64
3087 #define FMOVSCC(fcc) \
3088 { \
3089 TCGv r_cond; \
3090 int l1; \
3091 \
3092 l1 = gen_new_label(); \
3093 r_cond = tcg_temp_new(); \
3094 cond = GET_FIELD_SP(insn, 14, 17); \
3095 gen_fcond(r_cond, fcc, cond); \
3096 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3097 0, l1); \
3098 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3099 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3100 gen_set_label(l1); \
3101 tcg_temp_free(r_cond); \
3102 }
3103 #define FMOVDCC(fcc) \
3104 { \
3105 TCGv r_cond; \
3106 int l1; \
3107 \
3108 l1 = gen_new_label(); \
3109 r_cond = tcg_temp_new(); \
3110 cond = GET_FIELD_SP(insn, 14, 17); \
3111 gen_fcond(r_cond, fcc, cond); \
3112 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3113 0, l1); \
3114 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3115 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3116 gen_set_label(l1); \
3117 tcg_temp_free(r_cond); \
3118 }
3119 #define FMOVQCC(fcc) \
3120 { \
3121 TCGv r_cond; \
3122 int l1; \
3123 \
3124 l1 = gen_new_label(); \
3125 r_cond = tcg_temp_new(); \
3126 cond = GET_FIELD_SP(insn, 14, 17); \
3127 gen_fcond(r_cond, fcc, cond); \
3128 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3129 0, l1); \
3130 gen_move_Q(rd, rs2); \
3131 gen_set_label(l1); \
3132 tcg_temp_free(r_cond); \
3133 }
3134 case 0x001: /* V9 fmovscc %fcc0 */
3135 FMOVSCC(0);
3136 break;
3137 case 0x002: /* V9 fmovdcc %fcc0 */
3138 FMOVDCC(0);
3139 break;
3140 case 0x003: /* V9 fmovqcc %fcc0 */
3141 CHECK_FPU_FEATURE(dc, FLOAT128);
3142 FMOVQCC(0);
3143 break;
3144 case 0x041: /* V9 fmovscc %fcc1 */
3145 FMOVSCC(1);
3146 break;
3147 case 0x042: /* V9 fmovdcc %fcc1 */
3148 FMOVDCC(1);
3149 break;
3150 case 0x043: /* V9 fmovqcc %fcc1 */
3151 CHECK_FPU_FEATURE(dc, FLOAT128);
3152 FMOVQCC(1);
3153 break;
3154 case 0x081: /* V9 fmovscc %fcc2 */
3155 FMOVSCC(2);
3156 break;
3157 case 0x082: /* V9 fmovdcc %fcc2 */
3158 FMOVDCC(2);
3159 break;
3160 case 0x083: /* V9 fmovqcc %fcc2 */
3161 CHECK_FPU_FEATURE(dc, FLOAT128);
3162 FMOVQCC(2);
3163 break;
3164 case 0x0c1: /* V9 fmovscc %fcc3 */
3165 FMOVSCC(3);
3166 break;
3167 case 0x0c2: /* V9 fmovdcc %fcc3 */
3168 FMOVDCC(3);
3169 break;
3170 case 0x0c3: /* V9 fmovqcc %fcc3 */
3171 CHECK_FPU_FEATURE(dc, FLOAT128);
3172 FMOVQCC(3);
3173 break;
3174 #undef FMOVSCC
3175 #undef FMOVDCC
3176 #undef FMOVQCC
3177 #define FMOVSCC(icc) \
3178 { \
3179 TCGv r_cond; \
3180 int l1; \
3181 \
3182 l1 = gen_new_label(); \
3183 r_cond = tcg_temp_new(); \
3184 cond = GET_FIELD_SP(insn, 14, 17); \
3185 gen_cond(r_cond, icc, cond, dc); \
3186 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3187 0, l1); \
3188 cpu_src1_32 = gen_load_fpr_F(dc, rs2); \
3189 gen_store_fpr_F(dc, rd, cpu_src1_32); \
3190 gen_set_label(l1); \
3191 tcg_temp_free(r_cond); \
3192 }
3193 #define FMOVDCC(icc) \
3194 { \
3195 TCGv r_cond; \
3196 int l1; \
3197 \
3198 l1 = gen_new_label(); \
3199 r_cond = tcg_temp_new(); \
3200 cond = GET_FIELD_SP(insn, 14, 17); \
3201 gen_cond(r_cond, icc, cond, dc); \
3202 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3203 0, l1); \
3204 cpu_src1_64 = gen_load_fpr_D(dc, rs2); \
3205 gen_store_fpr_D(dc, rd, cpu_src1_64); \
3206 gen_update_fprs_dirty(DFPREG(rd)); \
3207 gen_set_label(l1); \
3208 tcg_temp_free(r_cond); \
3209 }
3210 #define FMOVQCC(icc) \
3211 { \
3212 TCGv r_cond; \
3213 int l1; \
3214 \
3215 l1 = gen_new_label(); \
3216 r_cond = tcg_temp_new(); \
3217 cond = GET_FIELD_SP(insn, 14, 17); \
3218 gen_cond(r_cond, icc, cond, dc); \
3219 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
3220 0, l1); \
3221 gen_move_Q(rd, rs2); \
3222 gen_set_label(l1); \
3223 tcg_temp_free(r_cond); \
3224 }
3225
3226 case 0x101: /* V9 fmovscc %icc */
3227 FMOVSCC(0);
3228 break;
3229 case 0x102: /* V9 fmovdcc %icc */
3230 FMOVDCC(0);
3231 break;
3232 case 0x103: /* V9 fmovqcc %icc */
3233 CHECK_FPU_FEATURE(dc, FLOAT128);
3234 FMOVQCC(0);
3235 break;
3236 case 0x181: /* V9 fmovscc %xcc */
3237 FMOVSCC(1);
3238 break;
3239 case 0x182: /* V9 fmovdcc %xcc */
3240 FMOVDCC(1);
3241 break;
3242 case 0x183: /* V9 fmovqcc %xcc */
3243 CHECK_FPU_FEATURE(dc, FLOAT128);
3244 FMOVQCC(1);
3245 break;
3246 #undef FMOVSCC
3247 #undef FMOVDCC
3248 #undef FMOVQCC
3249 #endif
3250 case 0x51: /* fcmps, V9 %fcc */
3251 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3252 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3253 gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3254 break;
3255 case 0x52: /* fcmpd, V9 %fcc */
3256 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3257 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3258 gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3259 break;
3260 case 0x53: /* fcmpq, V9 %fcc */
3261 CHECK_FPU_FEATURE(dc, FLOAT128);
3262 gen_op_load_fpr_QT0(QFPREG(rs1));
3263 gen_op_load_fpr_QT1(QFPREG(rs2));
3264 gen_op_fcmpq(rd & 3);
3265 break;
3266 case 0x55: /* fcmpes, V9 %fcc */
3267 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3268 cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3269 gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3270 break;
3271 case 0x56: /* fcmped, V9 %fcc */
3272 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3273 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3274 gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3275 break;
3276 case 0x57: /* fcmpeq, V9 %fcc */
3277 CHECK_FPU_FEATURE(dc, FLOAT128);
3278 gen_op_load_fpr_QT0(QFPREG(rs1));
3279 gen_op_load_fpr_QT1(QFPREG(rs2));
3280 gen_op_fcmpeq(rd & 3);
3281 break;
3282 default:
3283 goto illegal_insn;
3284 }
3285 } else if (xop == 0x2) {
3286 // clr/mov shortcut
3287
3288 rs1 = GET_FIELD(insn, 13, 17);
3289 if (rs1 == 0) {
3290 // or %g0, x, y -> mov T0, x; mov y, T0
3291 if (IS_IMM) { /* immediate */
3292 TCGv r_const;
3293
3294 simm = GET_FIELDs(insn, 19, 31);
3295 r_const = tcg_const_tl(simm);
3296 gen_movl_TN_reg(rd, r_const);
3297 tcg_temp_free(r_const);
3298 } else { /* register */
3299 rs2 = GET_FIELD(insn, 27, 31);
3300 gen_movl_reg_TN(rs2, cpu_dst);
3301 gen_movl_TN_reg(rd, cpu_dst);
3302 }
3303 } else {
3304 cpu_src1 = get_src1(insn, cpu_src1);
3305 if (IS_IMM) { /* immediate */
3306 simm = GET_FIELDs(insn, 19, 31);
3307 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3308 gen_movl_TN_reg(rd, cpu_dst);
3309 } else { /* register */
3310 // or x, %g0, y -> mov T1, x; mov y, T1
3311 rs2 = GET_FIELD(insn, 27, 31);
3312 if (rs2 != 0) {
3313 gen_movl_reg_TN(rs2, cpu_src2);
3314 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3315 gen_movl_TN_reg(rd, cpu_dst);
3316 } else
3317 gen_movl_TN_reg(rd, cpu_src1);
3318 }
3319 }
3320 #ifdef TARGET_SPARC64
3321 } else if (xop == 0x25) { /* sll, V9 sllx */
3322 cpu_src1 = get_src1(insn, cpu_src1);
3323 if (IS_IMM) { /* immediate */
3324 simm = GET_FIELDs(insn, 20, 31);
3325 if (insn & (1 << 12)) {
3326 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3327 } else {
3328 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3329 }
3330 } else { /* register */
3331 rs2 = GET_FIELD(insn, 27, 31);
3332 gen_movl_reg_TN(rs2, cpu_src2);
3333 if (insn & (1 << 12)) {
3334 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3335 } else {
3336 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3337 }
3338 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3339 }
3340 gen_movl_TN_reg(rd, cpu_dst);
3341 } else if (xop == 0x26) { /* srl, V9 srlx */
3342 cpu_src1 = get_src1(insn, cpu_src1);
3343 if (IS_IMM) { /* immediate */
3344 simm = GET_FIELDs(insn, 20, 31);
3345 if (insn & (1 << 12)) {
3346 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3347 } else {
3348 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3349 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3350 }
3351 } else { /* register */
3352 rs2 = GET_FIELD(insn, 27, 31);
3353 gen_movl_reg_TN(rs2, cpu_src2);
3354 if (insn & (1 << 12)) {
3355 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3356 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3357 } else {
3358 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3359 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3360 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3361 }
3362 }
3363 gen_movl_TN_reg(rd, cpu_dst);
3364 } else if (xop == 0x27) { /* sra, V9 srax */
3365 cpu_src1 = get_src1(insn, cpu_src1);
3366 if (IS_IMM) { /* immediate */
3367 simm = GET_FIELDs(insn, 20, 31);
3368 if (insn & (1 << 12)) {
3369 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3370 } else {
3371 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3372 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3373 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3374 }
3375 } else { /* register */
3376 rs2 = GET_FIELD(insn, 27, 31);
3377 gen_movl_reg_TN(rs2, cpu_src2);
3378 if (insn & (1 << 12)) {
3379 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3380 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3381 } else {
3382 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3383 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3384 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3385 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3386 }
3387 }
3388 gen_movl_TN_reg(rd, cpu_dst);
3389 #endif
3390 } else if (xop < 0x36) {
3391 if (xop < 0x20) {
3392 cpu_src1 = get_src1(insn, cpu_src1);
3393 cpu_src2 = get_src2(insn, cpu_src2);
3394 switch (xop & ~0x10) {
3395 case 0x0: /* add */
3396 if (IS_IMM) {
3397 simm = GET_FIELDs(insn, 19, 31);
3398 if (xop & 0x10) {
3399 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3400 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3401 dc->cc_op = CC_OP_ADD;
3402 } else {
3403 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3404 }
3405 } else {
3406 if (xop & 0x10) {
3407 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3408 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3409 dc->cc_op = CC_OP_ADD;
3410 } else {
3411 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3412 }
3413 }
3414 break;
3415 case 0x1: /* and */
3416 if (IS_IMM) {
3417 simm = GET_FIELDs(insn, 19, 31);
3418 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3419 } else {
3420 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3421 }
3422 if (xop & 0x10) {
3423 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3424 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3425 dc->cc_op = CC_OP_LOGIC;
3426 }
3427 break;
3428 case 0x2: /* or */
3429 if (IS_IMM) {
3430 simm = GET_FIELDs(insn, 19, 31);
3431 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3432 } else {
3433 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3434 }
3435 if (xop & 0x10) {
3436 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3437 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3438 dc->cc_op = CC_OP_LOGIC;
3439 }
3440 break;
3441 case 0x3: /* xor */
3442 if (IS_IMM) {
3443 simm = GET_FIELDs(insn, 19, 31);
3444 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3445 } else {
3446 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3447 }
3448 if (xop & 0x10) {
3449 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3450 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3451 dc->cc_op = CC_OP_LOGIC;
3452 }
3453 break;
3454 case 0x4: /* sub */
3455 if (IS_IMM) {
3456 simm = GET_FIELDs(insn, 19, 31);
3457 if (xop & 0x10) {
3458 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3459 } else {
3460 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3461 }
3462 } else {
3463 if (xop & 0x10) {
3464 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3465 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3466 dc->cc_op = CC_OP_SUB;
3467 } else {
3468 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3469 }
3470 }
3471 break;
3472 case 0x5: /* andn */
3473 if (IS_IMM) {
3474 simm = GET_FIELDs(insn, 19, 31);
3475 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3476 } else {
3477 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3478 }
3479 if (xop & 0x10) {
3480 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3481 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3482 dc->cc_op = CC_OP_LOGIC;
3483 }
3484 break;
3485 case 0x6: /* orn */
3486 if (IS_IMM) {
3487 simm = GET_FIELDs(insn, 19, 31);
3488 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3489 } else {
3490 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3491 }
3492 if (xop & 0x10) {
3493 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3494 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3495 dc->cc_op = CC_OP_LOGIC;
3496 }
3497 break;
3498 case 0x7: /* xorn */
3499 if (IS_IMM) {
3500 simm = GET_FIELDs(insn, 19, 31);
3501 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3502 } else {
3503 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3504 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3505 }
3506 if (xop & 0x10) {
3507 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3508 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3509 dc->cc_op = CC_OP_LOGIC;
3510 }
3511 break;
3512 case 0x8: /* addx, V9 addc */
3513 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3514 (xop & 0x10));
3515 break;
3516 #ifdef TARGET_SPARC64
3517 case 0x9: /* V9 mulx */
3518 if (IS_IMM) {
3519 simm = GET_FIELDs(insn, 19, 31);
3520 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3521 } else {
3522 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3523 }
3524 break;
3525 #endif
3526 case 0xa: /* umul */
3527 CHECK_IU_FEATURE(dc, MUL);
3528 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3529 if (xop & 0x10) {
3530 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3531 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3532 dc->cc_op = CC_OP_LOGIC;
3533 }
3534 break;
3535 case 0xb: /* smul */
3536 CHECK_IU_FEATURE(dc, MUL);
3537 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3538 if (xop & 0x10) {
3539 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3540 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3541 dc->cc_op = CC_OP_LOGIC;
3542 }
3543 break;
3544 case 0xc: /* subx, V9 subc */
3545 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3546 (xop & 0x10));
3547 break;
3548 #ifdef TARGET_SPARC64
3549 case 0xd: /* V9 udivx */
3550 {
3551 TCGv r_temp1, r_temp2;
3552 r_temp1 = tcg_temp_local_new();
3553 r_temp2 = tcg_temp_local_new();
3554 tcg_gen_mov_tl(r_temp1, cpu_src1);
3555 tcg_gen_mov_tl(r_temp2, cpu_src2);
3556 gen_trap_ifdivzero_tl(r_temp2);
3557 tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3558 tcg_temp_free(r_temp1);
3559 tcg_temp_free(r_temp2);
3560 }
3561 break;
3562 #endif
3563 case 0xe: /* udiv */
3564 CHECK_IU_FEATURE(dc, DIV);
3565 if (xop & 0x10) {
3566 gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
3567 cpu_src2);
3568 dc->cc_op = CC_OP_DIV;
3569 } else {
3570 gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
3571 cpu_src2);
3572 }
3573 break;
3574 case 0xf: /* sdiv */
3575 CHECK_IU_FEATURE(dc, DIV);
3576 if (xop & 0x10) {
3577 gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
3578 cpu_src2);
3579 dc->cc_op = CC_OP_DIV;
3580 } else {
3581 gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
3582 cpu_src2);
3583 }
3584 break;
3585 default:
3586 goto illegal_insn;
3587 }
3588 gen_movl_TN_reg(rd, cpu_dst);
3589 } else {
3590 cpu_src1 = get_src1(insn, cpu_src1);
3591 cpu_src2 = get_src2(insn, cpu_src2);
3592 switch (xop) {
3593 case 0x20: /* taddcc */
3594 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3595 gen_movl_TN_reg(rd, cpu_dst);
3596 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3597 dc->cc_op = CC_OP_TADD;
3598 break;
3599 case 0x21: /* tsubcc */
3600 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3601 gen_movl_TN_reg(rd, cpu_dst);
3602 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3603 dc->cc_op = CC_OP_TSUB;
3604 break;
3605 case 0x22: /* taddcctv */
3606 save_state(dc, cpu_cond);
3607 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3608 gen_movl_TN_reg(rd, cpu_dst);
3609 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3610 dc->cc_op = CC_OP_TADDTV;
3611 break;
3612 case 0x23: /* tsubcctv */
3613 save_state(dc, cpu_cond);
3614 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3615 gen_movl_TN_reg(rd, cpu_dst);
3616 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3617 dc->cc_op = CC_OP_TSUBTV;
3618 break;
3619 case 0x24: /* mulscc */
3620 gen_helper_compute_psr(cpu_env);
3621 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3622 gen_movl_TN_reg(rd, cpu_dst);
3623 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3624 dc->cc_op = CC_OP_ADD;
3625 break;
3626 #ifndef TARGET_SPARC64
3627 case 0x25: /* sll */
3628 if (IS_IMM) { /* immediate */
3629 simm = GET_FIELDs(insn, 20, 31);
3630 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3631 } else { /* register */
3632 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3633 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3634 }
3635 gen_movl_TN_reg(rd, cpu_dst);
3636 break;
3637 case 0x26: /* srl */
3638 if (IS_IMM) { /* immediate */
3639 simm = GET_FIELDs(insn, 20, 31);
3640 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3641 } else { /* register */
3642 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3643 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3644 }
3645 gen_movl_TN_reg(rd, cpu_dst);
3646 break;
3647 case 0x27: /* sra */
3648 if (IS_IMM) { /* immediate */
3649 simm = GET_FIELDs(insn, 20, 31);
3650 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3651 } else { /* register */
3652 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3653 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3654 }
3655 gen_movl_TN_reg(rd, cpu_dst);
3656 break;
3657 #endif
3658 case 0x30:
3659 {
3660 switch(rd) {
3661 case 0: /* wry */
3662 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3663 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3664 break;
3665 #ifndef TARGET_SPARC64
3666 case 0x01 ... 0x0f: /* undefined in the
3667 SPARCv8 manual, nop
3668 on the microSPARC
3669 II */
3670 case 0x10 ... 0x1f: /* implementation-dependent
3671 in the SPARCv8
3672 manual, nop on the
3673 microSPARC II */
3674 break;
3675 #else
3676 case 0x2: /* V9 wrccr */
3677 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3678 gen_helper_wrccr(cpu_env, cpu_dst);
3679 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3680 dc->cc_op = CC_OP_FLAGS;
3681 break;
3682 case 0x3: /* V9 wrasi */
3683 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3684 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3685 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3686 break;
3687 case 0x6: /* V9 wrfprs */
3688 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3689 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3690 save_state(dc, cpu_cond);
3691 gen_op_next_insn();
3692 tcg_gen_exit_tb(0);
3693 dc->is_br = 1;
3694 break;
3695 case 0xf: /* V9 sir, nop if user */
3696 #if !defined(CONFIG_USER_ONLY)
3697 if (supervisor(dc)) {
3698 ; // XXX
3699 }
3700 #endif
3701 break;
3702 case 0x13: /* Graphics Status */
3703 if (gen_trap_ifnofpu(dc, cpu_cond))
3704 goto jmp_insn;
3705 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3706 break;
3707 case 0x14: /* Softint set */
3708 if (!supervisor(dc))
3709 goto illegal_insn;
3710 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3711 gen_helper_set_softint(cpu_env, cpu_tmp64);
3712 break;
3713 case 0x15: /* Softint clear */
3714 if (!supervisor(dc))
3715 goto illegal_insn;
3716 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3717 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3718 break;
3719 case 0x16: /* Softint write */
3720 if (!supervisor(dc))
3721 goto illegal_insn;
3722 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3723 gen_helper_write_softint(cpu_env, cpu_tmp64);
3724 break;
3725 case 0x17: /* Tick compare */
3726 #if !defined(CONFIG_USER_ONLY)
3727 if (!supervisor(dc))
3728 goto illegal_insn;
3729 #endif
3730 {
3731 TCGv_ptr r_tickptr;
3732
3733 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3734 cpu_src2);
3735 r_tickptr = tcg_temp_new_ptr();
3736 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3737 offsetof(CPUSPARCState, tick));
3738 gen_helper_tick_set_limit(r_tickptr,
3739 cpu_tick_cmpr);
3740 tcg_temp_free_ptr(r_tickptr);
3741 }
3742 break;
3743 case 0x18: /* System tick */
3744 #if !defined(CONFIG_USER_ONLY)
3745 if (!supervisor(dc))
3746 goto illegal_insn;
3747 #endif
3748 {
3749 TCGv_ptr r_tickptr;
3750
3751 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3752 cpu_src2);
3753 r_tickptr = tcg_temp_new_ptr();
3754 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3755 offsetof(CPUSPARCState, stick));
3756 gen_helper_tick_set_count(r_tickptr,
3757 cpu_dst);
3758 tcg_temp_free_ptr(r_tickptr);
3759 }
3760 break;
3761 case 0x19: /* System tick compare */
3762 #if !defined(CONFIG_USER_ONLY)
3763 if (!supervisor(dc))
3764 goto illegal_insn;
3765 #endif
3766 {
3767 TCGv_ptr r_tickptr;
3768
3769 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3770 cpu_src2);
3771 r_tickptr = tcg_temp_new_ptr();
3772 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3773 offsetof(CPUSPARCState, stick));
3774 gen_helper_tick_set_limit(r_tickptr,
3775 cpu_stick_cmpr);
3776 tcg_temp_free_ptr(r_tickptr);
3777 }
3778 break;
3779
3780 case 0x10: /* Performance Control */
3781 case 0x11: /* Performance Instrumentation
3782 Counter */
3783 case 0x12: /* Dispatch Control */
3784 #endif
3785 default:
3786 goto illegal_insn;
3787 }
3788 }
3789 break;
3790 #if !defined(CONFIG_USER_ONLY)
3791 case 0x31: /* wrpsr, V9 saved, restored */
3792 {
3793 if (!supervisor(dc))
3794 goto priv_insn;
3795 #ifdef TARGET_SPARC64
3796 switch (rd) {
3797 case 0:
3798 gen_helper_saved(cpu_env);
3799 break;
3800 case 1:
3801 gen_helper_restored(cpu_env);
3802 break;
3803 case 2: /* UA2005 allclean */
3804 case 3: /* UA2005 otherw */
3805 case 4: /* UA2005 normalw */
3806 case 5: /* UA2005 invalw */
3807 // XXX
3808 default:
3809 goto illegal_insn;
3810 }
3811 #else
3812 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3813 gen_helper_wrpsr(cpu_env, cpu_dst);
3814 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3815 dc->cc_op = CC_OP_FLAGS;
3816 save_state(dc, cpu_cond);
3817 gen_op_next_insn();
3818 tcg_gen_exit_tb(0);
3819 dc->is_br = 1;
3820 #endif
3821 }
3822 break;
3823 case 0x32: /* wrwim, V9 wrpr */
3824 {
3825 if (!supervisor(dc))
3826 goto priv_insn;
3827 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3828 #ifdef TARGET_SPARC64
3829 switch (rd) {
3830 case 0: // tpc
3831 {
3832 TCGv_ptr r_tsptr;
3833
3834 r_tsptr = tcg_temp_new_ptr();
3835 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3836 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3837 offsetof(trap_state, tpc));
3838 tcg_temp_free_ptr(r_tsptr);
3839 }
3840 break;
3841 case 1: // tnpc
3842 {
3843 TCGv_ptr r_tsptr;
3844
3845 r_tsptr = tcg_temp_new_ptr();
3846 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3847 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3848 offsetof(trap_state, tnpc));
3849 tcg_temp_free_ptr(r_tsptr);
3850 }
3851 break;
3852 case 2: // tstate
3853 {
3854 TCGv_ptr r_tsptr;
3855
3856 r_tsptr = tcg_temp_new_ptr();
3857 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3858 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3859 offsetof(trap_state,
3860 tstate));
3861 tcg_temp_free_ptr(r_tsptr);
3862 }
3863 break;
3864 case 3: // tt
3865 {
3866 TCGv_ptr r_tsptr;
3867
3868 r_tsptr = tcg_temp_new_ptr();
3869 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3870 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3871 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3872 offsetof(trap_state, tt));
3873 tcg_temp_free_ptr(r_tsptr);
3874 }
3875 break;
3876 case 4: // tick
3877 {
3878 TCGv_ptr r_tickptr;
3879
3880 r_tickptr = tcg_temp_new_ptr();
3881 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3882 offsetof(CPUSPARCState, tick));
3883 gen_helper_tick_set_count(r_tickptr,
3884 cpu_tmp0);
3885 tcg_temp_free_ptr(r_tickptr);
3886 }
3887 break;
3888 case 5: // tba
3889 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3890 break;
3891 case 6: // pstate
3892 {
3893 TCGv r_tmp = tcg_temp_local_new();
3894
3895 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3896 save_state(dc, cpu_cond);
3897 gen_helper_wrpstate(cpu_env, r_tmp);
3898 tcg_temp_free(r_tmp);
3899 dc->npc = DYNAMIC_PC;
3900 }
3901 break;
3902 case 7: // tl
3903 {
3904 TCGv r_tmp = tcg_temp_local_new();
3905
3906 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3907 save_state(dc, cpu_cond);
3908 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3909 tcg_temp_free(r_tmp);
3910 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3911 offsetof(CPUSPARCState, tl));
3912 dc->npc = DYNAMIC_PC;
3913 }
3914 break;
3915 case 8: // pil
3916 gen_helper_wrpil(cpu_env, cpu_tmp0);
3917 break;
3918 case 9: // cwp
3919 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3920 break;
3921 case 10: // cansave
3922 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3923 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3924 offsetof(CPUSPARCState,
3925 cansave));
3926 break;
3927 case 11: // canrestore
3928 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3929 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3930 offsetof(CPUSPARCState,
3931 canrestore));
3932 break;
3933 case 12: // cleanwin
3934 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3935 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3936 offsetof(CPUSPARCState,
3937 cleanwin));
3938 break;
3939 case 13: // otherwin
3940 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3941 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3942 offsetof(CPUSPARCState,
3943 otherwin));
3944 break;
3945 case 14: // wstate
3946 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3947 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3948 offsetof(CPUSPARCState,
3949 wstate));
3950 break;
3951 case 16: // UA2005 gl
3952 CHECK_IU_FEATURE(dc, GL);
3953 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3954 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3955 offsetof(CPUSPARCState, gl));
3956 break;
3957 case 26: // UA2005 strand status
3958 CHECK_IU_FEATURE(dc, HYPV);
3959 if (!hypervisor(dc))
3960 goto priv_insn;
3961 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3962 break;
3963 default:
3964 goto illegal_insn;
3965 }
3966 #else
3967 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3968 if (dc->def->nwindows != 32)
3969 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3970 (1 << dc->def->nwindows) - 1);
3971 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3972 #endif
3973 }
3974 break;
3975 case 0x33: /* wrtbr, UA2005 wrhpr */
3976 {
3977 #ifndef TARGET_SPARC64
3978 if (!supervisor(dc))
3979 goto priv_insn;
3980 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3981 #else
3982 CHECK_IU_FEATURE(dc, HYPV);
3983 if (!hypervisor(dc))
3984 goto priv_insn;
3985 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3986 switch (rd) {
3987 case 0: // hpstate
3988 // XXX gen_op_wrhpstate();
3989 save_state(dc, cpu_cond);
3990 gen_op_next_insn();
3991 tcg_gen_exit_tb(0);
3992 dc->is_br = 1;
3993 break;
3994 case 1: // htstate
3995 // XXX gen_op_wrhtstate();
3996 break;
3997 case 3: // hintp
3998 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3999 break;
4000 case 5: // htba
4001 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4002 break;
4003 case 31: // hstick_cmpr
4004 {
4005 TCGv_ptr r_tickptr;
4006
4007 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4008 r_tickptr = tcg_temp_new_ptr();
4009 tcg_gen_ld_ptr(r_tickptr, cpu_env,
4010 offsetof(CPUSPARCState, hstick));
4011 gen_helper_tick_set_limit(r_tickptr,
4012 cpu_hstick_cmpr);
4013 tcg_temp_free_ptr(r_tickptr);
4014 }
4015 break;
4016 case 6: // hver readonly
4017 default:
4018 goto illegal_insn;
4019 }
4020 #endif
4021 }
4022 break;
4023 #endif
4024 #ifdef TARGET_SPARC64
4025 case 0x2c: /* V9 movcc */
4026 {
4027 int cc = GET_FIELD_SP(insn, 11, 12);
4028 int cond = GET_FIELD_SP(insn, 14, 17);
4029 TCGv r_cond;
4030 int l1;
4031
4032 r_cond = tcg_temp_new();
4033 if (insn & (1 << 18)) {
4034 if (cc == 0)
4035 gen_cond(r_cond, 0, cond, dc);
4036 else if (cc == 2)
4037 gen_cond(r_cond, 1, cond, dc);
4038 else
4039 goto illegal_insn;
4040 } else {
4041 gen_fcond(r_cond, cc, cond);
4042 }
4043
4044 l1 = gen_new_label();
4045
4046 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
4047 if (IS_IMM) { /* immediate */
4048 TCGv r_const;
4049
4050 simm = GET_FIELD_SPs(insn, 0, 10);
4051 r_const = tcg_const_tl(simm);
4052 gen_movl_TN_reg(rd, r_const);
4053 tcg_temp_free(r_const);
4054 } else {
4055 rs2 = GET_FIELD_SP(insn, 0, 4);
4056 gen_movl_reg_TN(rs2, cpu_tmp0);
4057 gen_movl_TN_reg(rd, cpu_tmp0);
4058 }
4059 gen_set_label(l1);
4060 tcg_temp_free(r_cond);
4061 break;
4062 }
4063 case 0x2d: /* V9 sdivx */
4064 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
4065 gen_movl_TN_reg(rd, cpu_dst);
4066 break;
4067 case 0x2e: /* V9 popc */
4068 {
4069 cpu_src2 = get_src2(insn, cpu_src2);
4070 gen_helper_popc(cpu_dst, cpu_src2);
4071 gen_movl_TN_reg(rd, cpu_dst);
4072 }
4073 case 0x2f: /* V9 movr */
4074 {
4075 int cond = GET_FIELD_SP(insn, 10, 12);
4076 int l1;
4077
4078 cpu_src1 = get_src1(insn, cpu_src1);
4079
4080 l1 = gen_new_label();
4081
4082 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
4083 cpu_src1, 0, l1);
4084 if (IS_IMM) { /* immediate */
4085 TCGv r_const;
4086
4087 simm = GET_FIELD_SPs(insn, 0, 9);
4088 r_const = tcg_const_tl(simm);
4089 gen_movl_TN_reg(rd, r_const);
4090 tcg_temp_free(r_const);
4091 } else {
4092 rs2 = GET_FIELD_SP(insn, 0, 4);
4093 gen_movl_reg_TN(rs2, cpu_tmp0);
4094 gen_movl_TN_reg(rd, cpu_tmp0);
4095 }
4096 gen_set_label(l1);
4097 break;
4098 }
4099 #endif
4100 default:
4101 goto illegal_insn;
4102 }
4103 }
4104 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4105 #ifdef TARGET_SPARC64
4106 int opf = GET_FIELD_SP(insn, 5, 13);
4107 rs1 = GET_FIELD(insn, 13, 17);
4108 rs2 = GET_FIELD(insn, 27, 31);
4109 if (gen_trap_ifnofpu(dc, cpu_cond))
4110 goto jmp_insn;
4111
4112 switch (opf) {
4113 case 0x000: /* VIS I edge8cc */
4114 CHECK_FPU_FEATURE(dc, VIS1);
4115 gen_movl_reg_TN(rs1, cpu_src1);
4116 gen_movl_reg_TN(rs2, cpu_src2);
4117 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4118 gen_movl_TN_reg(rd, cpu_dst);
4119 break;
4120 case 0x001: /* VIS II edge8n */
4121 CHECK_FPU_FEATURE(dc, VIS2);
4122 gen_movl_reg_TN(rs1, cpu_src1);
4123 gen_movl_reg_TN(rs2, cpu_src2);
4124 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4125 gen_movl_TN_reg(rd, cpu_dst);
4126 break;
4127 case 0x002: /* VIS I edge8lcc */
4128 CHECK_FPU_FEATURE(dc, VIS1);
4129 gen_movl_reg_TN(rs1, cpu_src1);
4130 gen_movl_reg_TN(rs2, cpu_src2);
4131 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4132 gen_movl_TN_reg(rd, cpu_dst);
4133 break;
4134 case 0x003: /* VIS II edge8ln */
4135 CHECK_FPU_FEATURE(dc, VIS2);
4136 gen_movl_reg_TN(rs1, cpu_src1);
4137 gen_movl_reg_TN(rs2, cpu_src2);
4138 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4139 gen_movl_TN_reg(rd, cpu_dst);
4140 break;
4141 case 0x004: /* VIS I edge16cc */
4142 CHECK_FPU_FEATURE(dc, VIS1);
4143 gen_movl_reg_TN(rs1, cpu_src1);
4144 gen_movl_reg_TN(rs2, cpu_src2);
4145 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4146 gen_movl_TN_reg(rd, cpu_dst);
4147 break;
4148 case 0x005: /* VIS II edge16n */
4149 CHECK_FPU_FEATURE(dc, VIS2);
4150 gen_movl_reg_TN(rs1, cpu_src1);
4151 gen_movl_reg_TN(rs2, cpu_src2);
4152 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4153 gen_movl_TN_reg(rd, cpu_dst);
4154 break;
4155 case 0x006: /* VIS I edge16lcc */
4156 CHECK_FPU_FEATURE(dc, VIS1);
4157 gen_movl_reg_TN(rs1, cpu_src1);
4158 gen_movl_reg_TN(rs2, cpu_src2);
4159 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4160 gen_movl_TN_reg(rd, cpu_dst);
4161 break;
4162 case 0x007: /* VIS II edge16ln */
4163 CHECK_FPU_FEATURE(dc, VIS2);
4164 gen_movl_reg_TN(rs1, cpu_src1);
4165 gen_movl_reg_TN(rs2, cpu_src2);
4166 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4167 gen_movl_TN_reg(rd, cpu_dst);
4168 break;
4169 case 0x008: /* VIS I edge32cc */
4170 CHECK_FPU_FEATURE(dc, VIS1);
4171 gen_movl_reg_TN(rs1, cpu_src1);
4172 gen_movl_reg_TN(rs2, cpu_src2);
4173 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4174 gen_movl_TN_reg(rd, cpu_dst);
4175 break;
4176 case 0x009: /* VIS II edge32n */
4177 CHECK_FPU_FEATURE(dc, VIS2);
4178 gen_movl_reg_TN(rs1, cpu_src1);
4179 gen_movl_reg_TN(rs2, cpu_src2);
4180 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4181 gen_movl_TN_reg(rd, cpu_dst);
4182 break;
4183 case 0x00a: /* VIS I edge32lcc */
4184 CHECK_FPU_FEATURE(dc, VIS1);
4185 gen_movl_reg_TN(rs1, cpu_src1);
4186 gen_movl_reg_TN(rs2, cpu_src2);
4187 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4188 gen_movl_TN_reg(rd, cpu_dst);
4189 break;
4190 case 0x00b: /* VIS II edge32ln */
4191 CHECK_FPU_FEATURE(dc, VIS2);
4192 gen_movl_reg_TN(rs1, cpu_src1);
4193 gen_movl_reg_TN(rs2, cpu_src2);
4194 gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4195 gen_movl_TN_reg(rd, cpu_dst);
4196 break;
4197 case 0x010: /* VIS I array8 */
4198 CHECK_FPU_FEATURE(dc, VIS1);
4199 cpu_src1 = get_src1(insn, cpu_src1);
4200 gen_movl_reg_TN(rs2, cpu_src2);
4201 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4202 gen_movl_TN_reg(rd, cpu_dst);
4203 break;
4204 case 0x012: /* VIS I array16 */
4205 CHECK_FPU_FEATURE(dc, VIS1);
4206 cpu_src1 = get_src1(insn, cpu_src1);
4207 gen_movl_reg_TN(rs2, cpu_src2);
4208 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4209 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4210 gen_movl_TN_reg(rd, cpu_dst);
4211 break;
4212 case 0x014: /* VIS I array32 */
4213 CHECK_FPU_FEATURE(dc, VIS1);
4214 cpu_src1 = get_src1(insn, cpu_src1);
4215 gen_movl_reg_TN(rs2, cpu_src2);
4216 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4217 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4218 gen_movl_TN_reg(rd, cpu_dst);
4219 break;
4220 case 0x018: /* VIS I alignaddr */
4221 CHECK_FPU_FEATURE(dc, VIS1);
4222 cpu_src1 = get_src1(insn, cpu_src1);
4223 gen_movl_reg_TN(rs2, cpu_src2);
4224 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4225 gen_movl_TN_reg(rd, cpu_dst);
4226 break;
4227 case 0x01a: /* VIS I alignaddrl */
4228 CHECK_FPU_FEATURE(dc, VIS1);
4229 cpu_src1 = get_src1(insn, cpu_src1);
4230 gen_movl_reg_TN(rs2, cpu_src2);
4231 gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4232 gen_movl_TN_reg(rd, cpu_dst);
4233 break;
4234 case 0x019: /* VIS II bmask */
4235 CHECK_FPU_FEATURE(dc, VIS2);
4236 cpu_src1 = get_src1(insn, cpu_src1);
4237 cpu_src2 = get_src1(insn, cpu_src2);
4238 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4239 tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4240 gen_movl_TN_reg(rd, cpu_dst);
4241 break;
4242 case 0x020: /* VIS I fcmple16 */
4243 CHECK_FPU_FEATURE(dc, VIS1);
4244 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4245 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4246 gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4247 gen_movl_TN_reg(rd, cpu_dst);
4248 break;
4249 case 0x022: /* VIS I fcmpne16 */
4250 CHECK_FPU_FEATURE(dc, VIS1);
4251 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4252 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4253 gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4254 gen_movl_TN_reg(rd, cpu_dst);
4255 break;
4256 case 0x024: /* VIS I fcmple32 */
4257 CHECK_FPU_FEATURE(dc, VIS1);
4258 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4259 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4260 gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4261 gen_movl_TN_reg(rd, cpu_dst);
4262 break;
4263 case 0x026: /* VIS I fcmpne32 */
4264 CHECK_FPU_FEATURE(dc, VIS1);
4265 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4266 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4267 gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4268 gen_movl_TN_reg(rd, cpu_dst);
4269 break;
4270 case 0x028: /* VIS I fcmpgt16 */
4271 CHECK_FPU_FEATURE(dc, VIS1);
4272 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4273 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4274 gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4275 gen_movl_TN_reg(rd, cpu_dst);
4276 break;
4277 case 0x02a: /* VIS I fcmpeq16 */
4278 CHECK_FPU_FEATURE(dc, VIS1);
4279 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4280 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4281 gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4282 gen_movl_TN_reg(rd, cpu_dst);
4283 break;
4284 case 0x02c: /* VIS I fcmpgt32 */
4285 CHECK_FPU_FEATURE(dc, VIS1);
4286 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4287 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4288 gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4289 gen_movl_TN_reg(rd, cpu_dst);
4290 break;
4291 case 0x02e: /* VIS I fcmpeq32 */
4292 CHECK_FPU_FEATURE(dc, VIS1);
4293 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4294 cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4295 gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4296 gen_movl_TN_reg(rd, cpu_dst);
4297 break;
4298 case 0x031: /* VIS I fmul8x16 */
4299 CHECK_FPU_FEATURE(dc, VIS1);
4300 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4301 break;
4302 case 0x033: /* VIS I fmul8x16au */
4303 CHECK_FPU_FEATURE(dc, VIS1);
4304 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4305 break;
4306 case 0x035: /* VIS I fmul8x16al */
4307 CHECK_FPU_FEATURE(dc, VIS1);
4308 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4309 break;
4310 case 0x036: /* VIS I fmul8sux16 */
4311 CHECK_FPU_FEATURE(dc, VIS1);
4312 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4313 break;
4314 case 0x037: /* VIS I fmul8ulx16 */
4315 CHECK_FPU_FEATURE(dc, VIS1);
4316 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4317 break;
4318 case 0x038: /* VIS I fmuld8sux16 */
4319 CHECK_FPU_FEATURE(dc, VIS1);
4320 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4321 break;
4322 case 0x039: /* VIS I fmuld8ulx16 */
4323 CHECK_FPU_FEATURE(dc, VIS1);
4324 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4325 break;
4326 case 0x03a: /* VIS I fpack32 */
4327 CHECK_FPU_FEATURE(dc, VIS1);
4328 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4329 break;
4330 case 0x03b: /* VIS I fpack16 */
4331 CHECK_FPU_FEATURE(dc, VIS1);
4332 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4333 cpu_dst_32 = gen_dest_fpr_F();
4334 gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4335 gen_store_fpr_F(dc, rd, cpu_dst_32);
4336 break;
4337 case 0x03d: /* VIS I fpackfix */
4338 CHECK_FPU_FEATURE(dc, VIS1);
4339 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4340 cpu_dst_32 = gen_dest_fpr_F();
4341 gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4342 gen_store_fpr_F(dc, rd, cpu_dst_32);
4343 break;
4344 case 0x03e: /* VIS I pdist */
4345 CHECK_FPU_FEATURE(dc, VIS1);
4346 gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4347 break;
4348 case 0x048: /* VIS I faligndata */
4349 CHECK_FPU_FEATURE(dc, VIS1);
4350 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4351 break;
4352 case 0x04b: /* VIS I fpmerge */
4353 CHECK_FPU_FEATURE(dc, VIS1);
4354 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4355 break;
4356 case 0x04c: /* VIS II bshuffle */
4357 CHECK_FPU_FEATURE(dc, VIS2);
4358 gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4359 break;
4360 case 0x04d: /* VIS I fexpand */
4361 CHECK_FPU_FEATURE(dc, VIS1);
4362 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4363 break;
4364 case 0x050: /* VIS I fpadd16 */
4365 CHECK_FPU_FEATURE(dc, VIS1);
4366 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4367 break;
4368 case 0x051: /* VIS I fpadd16s */
4369 CHECK_FPU_FEATURE(dc, VIS1);
4370 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4371 break;
4372 case 0x052: /* VIS I fpadd32 */
4373 CHECK_FPU_FEATURE(dc, VIS1);
4374 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4375 break;
4376 case 0x053: /* VIS I fpadd32s */
4377 CHECK_FPU_FEATURE(dc, VIS1);
4378 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4379 break;
4380 case 0x054: /* VIS I fpsub16 */
4381 CHECK_FPU_FEATURE(dc, VIS1);
4382 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4383 break;
4384 case 0x055: /* VIS I fpsub16s */
4385 CHECK_FPU_FEATURE(dc, VIS1);
4386 gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4387 break;
4388 case 0x056: /* VIS I fpsub32 */
4389 CHECK_FPU_FEATURE(dc, VIS1);
4390 gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4391 break;
4392 case 0x057: /* VIS I fpsub32s */
4393 CHECK_FPU_FEATURE(dc, VIS1);
4394 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4395 break;
4396 case 0x060: /* VIS I fzero */
4397 CHECK_FPU_FEATURE(dc, VIS1);
4398 cpu_dst_64 = gen_dest_fpr_D();
4399 tcg_gen_movi_i64(cpu_dst_64, 0);
4400 gen_store_fpr_D(dc, rd, cpu_dst_64);
4401 break;
4402 case 0x061: /* VIS I fzeros */
4403 CHECK_FPU_FEATURE(dc, VIS1);
4404 cpu_dst_32 = gen_dest_fpr_F();
4405 tcg_gen_movi_i32(cpu_dst_32, 0);
4406 gen_store_fpr_F(dc, rd, cpu_dst_32);
4407 break;
4408 case 0x062: /* VIS I fnor */
4409 CHECK_FPU_FEATURE(dc, VIS1);
4410 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4411 break;
4412 case 0x063: /* VIS I fnors */
4413 CHECK_FPU_FEATURE(dc, VIS1);
4414 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4415 break;
4416 case 0x064: /* VIS I fandnot2 */
4417 CHECK_FPU_FEATURE(dc, VIS1);
4418 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4419 break;
4420 case 0x065: /* VIS I fandnot2s */
4421 CHECK_FPU_FEATURE(dc, VIS1);
4422 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4423 break;
4424 case 0x066: /* VIS I fnot2 */
4425 CHECK_FPU_FEATURE(dc, VIS1);
4426 gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4427 break;
4428 case 0x067: /* VIS I fnot2s */
4429 CHECK_FPU_FEATURE(dc, VIS1);
4430 gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4431 break;
4432 case 0x068: /* VIS I fandnot1 */
4433 CHECK_FPU_FEATURE(dc, VIS1);
4434 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4435 break;
4436 case 0x069: /* VIS I fandnot1s */
4437 CHECK_FPU_FEATURE(dc, VIS1);
4438 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4439 break;
4440 case 0x06a: /* VIS I fnot1 */
4441 CHECK_FPU_FEATURE(dc, VIS1);
4442 gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4443 break;
4444 case 0x06b: /* VIS I fnot1s */
4445 CHECK_FPU_FEATURE(dc, VIS1);
4446 gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4447 break;
4448 case 0x06c: /* VIS I fxor */
4449 CHECK_FPU_FEATURE(dc, VIS1);
4450 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4451 break;
4452 case 0x06d: /* VIS I fxors */
4453 CHECK_FPU_FEATURE(dc, VIS1);
4454 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4455 break;
4456 case 0x06e: /* VIS I fnand */
4457 CHECK_FPU_FEATURE(dc, VIS1);
4458 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4459 break;
4460 case 0x06f: /* VIS I fnands */
4461 CHECK_FPU_FEATURE(dc, VIS1);
4462 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4463 break;
4464 case 0x070: /* VIS I fand */
4465 CHECK_FPU_FEATURE(dc, VIS1);
4466 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4467 break;
4468 case 0x071: /* VIS I fands */
4469 CHECK_FPU_FEATURE(dc, VIS1);
4470 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4471 break;
4472 case 0x072: /* VIS I fxnor */
4473 CHECK_FPU_FEATURE(dc, VIS1);
4474 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4475 break;
4476 case 0x073: /* VIS I fxnors */
4477 CHECK_FPU_FEATURE(dc, VIS1);
4478 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4479 break;
4480 case 0x074: /* VIS I fsrc1 */
4481 CHECK_FPU_FEATURE(dc, VIS1);
4482 cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4483 gen_store_fpr_D(dc, rd, cpu_src1_64);
4484 break;
4485 case 0x075: /* VIS I fsrc1s */
4486 CHECK_FPU_FEATURE(dc, VIS1);
4487 cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4488 gen_store_fpr_F(dc, rd, cpu_src1_32);
4489 break;
4490 case 0x076: /* VIS I fornot2 */
4491 CHECK_FPU_FEATURE(dc, VIS1);
4492 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4493 break;
4494 case 0x077: /* VIS I fornot2s */
4495 CHECK_FPU_FEATURE(dc, VIS1);
4496 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4497 break;
4498 case 0x078: /* VIS I fsrc2 */
4499 CHECK_FPU_FEATURE(dc, VIS1);
4500 cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4501 gen_store_fpr_D(dc, rd, cpu_src1_64);
4502 break;
4503 case 0x079: /* VIS I fsrc2s */
4504 CHECK_FPU_FEATURE(dc, VIS1);
4505 cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4506 gen_store_fpr_F(dc, rd, cpu_src1_32);
4507 break;
4508 case 0x07a: /* VIS I fornot1 */
4509 CHECK_FPU_FEATURE(dc, VIS1);
4510 gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4511 break;
4512 case 0x07b: /* VIS I fornot1s */
4513 CHECK_FPU_FEATURE(dc, VIS1);
4514 gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4515 break;
4516 case 0x07c: /* VIS I for */
4517 CHECK_FPU_FEATURE(dc, VIS1);
4518 gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4519 break;
4520 case 0x07d: /* VIS I fors */
4521 CHECK_FPU_FEATURE(dc, VIS1);
4522 gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4523 break;
4524 case 0x07e: /* VIS I fone */
4525 CHECK_FPU_FEATURE(dc, VIS1);
4526 cpu_dst_64 = gen_dest_fpr_D();
4527 tcg_gen_movi_i64(cpu_dst_64, -1);
4528 gen_store_fpr_D(dc, rd, cpu_dst_64);
4529 break;
4530 case 0x07f: /* VIS I fones */
4531 CHECK_FPU_FEATURE(dc, VIS1);
4532 cpu_dst_32 = gen_dest_fpr_F();
4533 tcg_gen_movi_i32(cpu_dst_32, -1);
4534 gen_store_fpr_F(dc, rd, cpu_dst_32);
4535 break;
4536 case 0x080: /* VIS I shutdown */
4537 case 0x081: /* VIS II siam */
4538 // XXX
4539 goto illegal_insn;
4540 default:
4541 goto illegal_insn;
4542 }
4543 #else
4544 goto ncp_insn;
4545 #endif
4546 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4547 #ifdef TARGET_SPARC64
4548 goto illegal_insn;
4549 #else
4550 goto ncp_insn;
4551 #endif
4552 #ifdef TARGET_SPARC64
4553 } else if (xop == 0x39) { /* V9 return */
4554 TCGv_i32 r_const;
4555
4556 save_state(dc, cpu_cond);
4557 cpu_src1 = get_src1(insn, cpu_src1);
4558 if (IS_IMM) { /* immediate */
4559 simm = GET_FIELDs(insn, 19, 31);
4560 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4561 } else { /* register */
4562 rs2 = GET_FIELD(insn, 27, 31);
4563 if (rs2) {
4564 gen_movl_reg_TN(rs2, cpu_src2);
4565 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4566 } else
4567 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4568 }
4569 gen_helper_restore(cpu_env);
4570 gen_mov_pc_npc(dc, cpu_cond);
4571 r_const = tcg_const_i32(3);
4572 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4573 tcg_temp_free_i32(r_const);
4574 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4575 dc->npc = DYNAMIC_PC;
4576 goto jmp_insn;
4577 #endif
4578 } else {
4579 cpu_src1 = get_src1(insn, cpu_src1);
4580 if (IS_IMM) { /* immediate */
4581 simm = GET_FIELDs(insn, 19, 31);
4582 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4583 } else { /* register */
4584 rs2 = GET_FIELD(insn, 27, 31);
4585 if (rs2) {
4586 gen_movl_reg_TN(rs2, cpu_src2);
4587 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4588 } else
4589 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4590 }
4591 switch (xop) {
4592 case 0x38: /* jmpl */
4593 {
4594 TCGv r_pc;
4595 TCGv_i32 r_const;
4596
4597 r_pc = tcg_const_tl(dc->pc);
4598 gen_movl_TN_reg(rd, r_pc);
4599 tcg_temp_free(r_pc);
4600 gen_mov_pc_npc(dc, cpu_cond);
4601 r_const = tcg_const_i32(3);
4602 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4603 tcg_temp_free_i32(r_const);
4604 gen_address_mask(dc, cpu_dst);
4605 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4606 dc->npc = DYNAMIC_PC;
4607 }
4608 goto jmp_insn;
4609 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4610 case 0x39: /* rett, V9 return */
4611 {
4612 TCGv_i32 r_const;
4613
4614 if (!supervisor(dc))
4615 goto priv_insn;
4616 gen_mov_pc_npc(dc, cpu_cond);
4617 r_const = tcg_const_i32(3);
4618 gen_helper_check_align(cpu_env, cpu_dst, r_const);
4619 tcg_temp_free_i32(r_const);
4620 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4621 dc->npc = DYNAMIC_PC;
4622 gen_helper_rett(cpu_env);
4623 }
4624 goto jmp_insn;
4625 #endif
4626 case 0x3b: /* flush */
4627 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4628 goto unimp_flush;
4629 /* nop */
4630 break;
4631 case 0x3c: /* save */
4632 save_state(dc, cpu_cond);
4633 gen_helper_save(cpu_env);
4634 gen_movl_TN_reg(rd, cpu_dst);
4635 break;
4636 case 0x3d: /* restore */
4637 save_state(dc, cpu_cond);
4638 gen_helper_restore(cpu_env);
4639 gen_movl_TN_reg(rd, cpu_dst);
4640 break;
4641 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4642 case 0x3e: /* V9 done/retry */
4643 {
4644 switch (rd) {
4645 case 0:
4646 if (!supervisor(dc))
4647 goto priv_insn;
4648 dc->npc = DYNAMIC_PC;
4649 dc->pc = DYNAMIC_PC;
4650 gen_helper_done(cpu_env);
4651 goto jmp_insn;
4652 case 1:
4653 if (!supervisor(dc))
4654 goto priv_insn;
4655 dc->npc = DYNAMIC_PC;
4656 dc->pc = DYNAMIC_PC;
4657 gen_helper_retry(cpu_env);
4658 goto jmp_insn;
4659 default:
4660 goto illegal_insn;
4661 }
4662 }
4663 break;
4664 #endif
4665 default:
4666 goto illegal_insn;
4667 }
4668 }
4669 break;
4670 }
4671 break;
4672 case 3: /* load/store instructions */
4673 {
4674 unsigned int xop = GET_FIELD(insn, 7, 12);
4675
4676 /* flush pending conditional evaluations before exposing
4677 cpu state */
4678 if (dc->cc_op != CC_OP_FLAGS) {
4679 dc->cc_op = CC_OP_FLAGS;
4680 gen_helper_compute_psr(cpu_env);
4681 }
4682 cpu_src1 = get_src1(insn, cpu_src1);
4683 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4684 rs2 = GET_FIELD(insn, 27, 31);
4685 gen_movl_reg_TN(rs2, cpu_src2);
4686 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4687 } else if (IS_IMM) { /* immediate */
4688 simm = GET_FIELDs(insn, 19, 31);
4689 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4690 } else { /* register */
4691 rs2 = GET_FIELD(insn, 27, 31);
4692 if (rs2 != 0) {
4693 gen_movl_reg_TN(rs2, cpu_src2);
4694 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4695 } else
4696 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4697 }
4698 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4699 (xop > 0x17 && xop <= 0x1d ) ||
4700 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4701 switch (xop) {
4702 case 0x0: /* ld, V9 lduw, load unsigned word */
4703 gen_address_mask(dc, cpu_addr);
4704 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4705 break;
4706 case 0x1: /* ldub, load unsigned byte */
4707 gen_address_mask(dc, cpu_addr);
4708 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4709 break;
4710 case 0x2: /* lduh, load unsigned halfword */
4711 gen_address_mask(dc, cpu_addr);
4712 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4713 break;
4714 case 0x3: /* ldd, load double word */
4715 if (rd & 1)
4716 goto illegal_insn;
4717 else {
4718 TCGv_i32 r_const;
4719
4720 save_state(dc, cpu_cond);
4721 r_const = tcg_const_i32(7);
4722 /* XXX remove alignment check */
4723 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4724 tcg_temp_free_i32(r_const);
4725 gen_address_mask(dc, cpu_addr);
4726 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4727 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4728 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4729 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4730 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4731 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4732 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4733 }
4734 break;
4735 case 0x9: /* ldsb, load signed byte */
4736 gen_address_mask(dc, cpu_addr);
4737 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4738 break;
4739 case 0xa: /* ldsh, load signed halfword */
4740 gen_address_mask(dc, cpu_addr);
4741 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4742 break;
4743 case 0xd: /* ldstub -- XXX: should be atomically */
4744 {
4745 TCGv r_const;
4746
4747 gen_address_mask(dc, cpu_addr);
4748 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4749 r_const = tcg_const_tl(0xff);
4750 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4751 tcg_temp_free(r_const);
4752 }
4753 break;
4754 case 0x0f: /* swap, swap register with memory. Also
4755 atomically */
4756 CHECK_IU_FEATURE(dc, SWAP);
4757 gen_movl_reg_TN(rd, cpu_val);
4758 gen_address_mask(dc, cpu_addr);
4759 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4760 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4761 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4762 break;
4763 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4764 case 0x10: /* lda, V9 lduwa, load word alternate */
4765 #ifndef TARGET_SPARC64
4766 if (IS_IMM)
4767 goto illegal_insn;
4768 if (!supervisor(dc))
4769 goto priv_insn;
4770 #endif
4771 save_state(dc, cpu_cond);
4772 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4773 break;
4774 case 0x11: /* lduba, load unsigned byte alternate */
4775 #ifndef TARGET_SPARC64
4776 if (IS_IMM)
4777 goto illegal_insn;
4778 if (!supervisor(dc))
4779 goto priv_insn;
4780 #endif
4781 save_state(dc, cpu_cond);
4782 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4783 break;
4784 case 0x12: /* lduha, load unsigned halfword alternate */
4785 #ifndef TARGET_SPARC64
4786 if (IS_IMM)
4787 goto illegal_insn;
4788 if (!supervisor(dc))
4789 goto priv_insn;
4790 #endif
4791 save_state(dc, cpu_cond);
4792 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4793 break;
4794 case 0x13: /* ldda, load double word alternate */
4795 #ifndef TARGET_SPARC64
4796 if (IS_IMM)
4797 goto illegal_insn;
4798 if (!supervisor(dc))
4799 goto priv_insn;
4800 #endif
4801 if (rd & 1)
4802 goto illegal_insn;
4803 save_state(dc, cpu_cond);
4804 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4805 goto skip_move;
4806 case 0x19: /* ldsba, load signed byte alternate */
4807 #ifndef TARGET_SPARC64
4808 if (IS_IMM)
4809 goto illegal_insn;
4810 if (!supervisor(dc))
4811 goto priv_insn;
4812 #endif
4813 save_state(dc, cpu_cond);
4814 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4815 break;
4816 case 0x1a: /* ldsha, load signed halfword alternate */
4817 #ifndef TARGET_SPARC64
4818 if (IS_IMM)
4819 goto illegal_insn;
4820 if (!supervisor(dc))
4821 goto priv_insn;
4822 #endif
4823 save_state(dc, cpu_cond);
4824 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4825 break;
4826 case 0x1d: /* ldstuba -- XXX: should be atomically */
4827 #ifndef TARGET_SPARC64
4828 if (IS_IMM)
4829 goto illegal_insn;
4830 if (!supervisor(dc))
4831 goto priv_insn;
4832 #endif
4833 save_state(dc, cpu_cond);
4834 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4835 break;
4836 case 0x1f: /* swapa, swap reg with alt. memory. Also
4837 atomically */
4838 CHECK_IU_FEATURE(dc, SWAP);
4839 #ifndef TARGET_SPARC64
4840 if (IS_IMM)
4841 goto illegal_insn;
4842 if (!supervisor(dc))
4843 goto priv_insn;
4844 #endif
4845 save_state(dc, cpu_cond);
4846 gen_movl_reg_TN(rd, cpu_val);
4847 gen_swap_asi(cpu_val, cpu_addr, insn);
4848 break;
4849
4850 #ifndef TARGET_SPARC64
4851 case 0x30: /* ldc */
4852 case 0x31: /* ldcsr */
4853 case 0x33: /* lddc */
4854 goto ncp_insn;
4855 #endif
4856 #endif
4857 #ifdef TARGET_SPARC64
4858 case 0x08: /* V9 ldsw */
4859 gen_address_mask(dc, cpu_addr);
4860 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4861 break;
4862 case 0x0b: /* V9 ldx */
4863 gen_address_mask(dc, cpu_addr);
4864 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4865 break;
4866 case 0x18: /* V9 ldswa */
4867 save_state(dc, cpu_cond);
4868 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4869 break;
4870 case 0x1b: /* V9 ldxa */
4871 save_state(dc, cpu_cond);
4872 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4873 break;
4874 case 0x2d: /* V9 prefetch, no effect */
4875 goto skip_move;
4876 case 0x30: /* V9 ldfa */
4877 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4878 goto jmp_insn;
4879 }
4880 save_state(dc, cpu_cond);
4881 gen_ldf_asi(cpu_addr, insn, 4, rd);
4882 gen_update_fprs_dirty(rd);
4883 goto skip_move;
4884 case 0x33: /* V9 lddfa */
4885 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4886 goto jmp_insn;
4887 }
4888 save_state(dc, cpu_cond);
4889 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4890 gen_update_fprs_dirty(DFPREG(rd));
4891 goto skip_move;
4892 case 0x3d: /* V9 prefetcha, no effect */
4893 goto skip_move;
4894 case 0x32: /* V9 ldqfa */
4895 CHECK_FPU_FEATURE(dc, FLOAT128);
4896 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4897 goto jmp_insn;
4898 }
4899 save_state(dc, cpu_cond);
4900 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4901 gen_update_fprs_dirty(QFPREG(rd));
4902 goto skip_move;
4903 #endif
4904 default:
4905 goto illegal_insn;
4906 }
4907 gen_movl_TN_reg(rd, cpu_val);
4908 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4909 skip_move: ;
4910 #endif
4911 } else if (xop >= 0x20 && xop < 0x24) {
4912 if (gen_trap_ifnofpu(dc, cpu_cond))
4913 goto jmp_insn;
4914 save_state(dc, cpu_cond);
4915 switch (xop) {
4916 case 0x20: /* ldf, load fpreg */
4917 gen_address_mask(dc, cpu_addr);
4918 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4919 cpu_dst_32 = gen_dest_fpr_F();
4920 tcg_gen_trunc_tl_i32(cpu_dst_32, cpu_tmp0);
4921 gen_store_fpr_F(dc, rd, cpu_dst_32);
4922 break;
4923 case 0x21: /* ldfsr, V9 ldxfsr */
4924 #ifdef TARGET_SPARC64
4925 gen_address_mask(dc, cpu_addr);
4926 if (rd == 1) {
4927 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4928 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4929 } else {
4930 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4931 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4932 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4933 }
4934 #else
4935 {
4936 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4937 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4938 }
4939 #endif
4940 break;
4941 case 0x22: /* ldqf, load quad fpreg */
4942 {
4943 TCGv_i32 r_const;
4944
4945 CHECK_FPU_FEATURE(dc, FLOAT128);
4946 r_const = tcg_const_i32(dc->mem_idx);
4947 gen_address_mask(dc, cpu_addr);
4948 gen_helper_ldqf(cpu_env, cpu_addr, r_const);
4949 tcg_temp_free_i32(r_const);
4950 gen_op_store_QT0_fpr(QFPREG(rd));
4951 gen_update_fprs_dirty(QFPREG(rd));
4952 }
4953 break;
4954 case 0x23: /* lddf, load double fpreg */
4955 gen_address_mask(dc, cpu_addr);
4956 cpu_dst_64 = gen_dest_fpr_D();
4957 tcg_gen_qemu_ld64(cpu_dst_64, cpu_addr, dc->mem_idx);
4958 gen_store_fpr_D(dc, rd, cpu_dst_64);
4959 break;
4960 default:
4961 goto illegal_insn;
4962 }
4963 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4964 xop == 0xe || xop == 0x1e) {
4965 gen_movl_reg_TN(rd, cpu_val);
4966 switch (xop) {
4967 case 0x4: /* st, store word */
4968 gen_address_mask(dc, cpu_addr);
4969 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4970 break;
4971 case 0x5: /* stb, store byte */
4972 gen_address_mask(dc, cpu_addr);
4973 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4974 break;
4975 case 0x6: /* sth, store halfword */
4976 gen_address_mask(dc, cpu_addr);
4977 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4978 break;
4979 case 0x7: /* std, store double word */
4980 if (rd & 1)
4981 goto illegal_insn;
4982 else {
4983 TCGv_i32 r_const;
4984
4985 save_state(dc, cpu_cond);
4986 gen_address_mask(dc, cpu_addr);
4987 r_const = tcg_const_i32(7);
4988 /* XXX remove alignment check */
4989 gen_helper_check_align(cpu_env, cpu_addr, r_const);
4990 tcg_temp_free_i32(r_const);
4991 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4992 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4993 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4994 }
4995 break;
4996 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4997 case 0x14: /* sta, V9 stwa, store word alternate */
4998 #ifndef TARGET_SPARC64
4999 if (IS_IMM)
5000 goto illegal_insn;
5001 if (!supervisor(dc))
5002 goto priv_insn;
5003 #endif
5004 save_state(dc, cpu_cond);
5005 gen_st_asi(cpu_val, cpu_addr, insn, 4);
5006 dc->npc = DYNAMIC_PC;
5007 break;
5008 case 0x15: /* stba, store byte alternate */
5009 #ifndef TARGET_SPARC64
5010 if (IS_IMM)
5011 goto illegal_insn;
5012 if (!supervisor(dc))
5013 goto priv_insn;
5014 #endif
5015 save_state(dc, cpu_cond);
5016 gen_st_asi(cpu_val, cpu_addr, insn, 1);
5017 dc->npc = DYNAMIC_PC;
5018 break;
5019 case 0x16: /* stha, store halfword alternate */
5020 #ifndef TARGET_SPARC64
5021 if (IS_IMM)
5022 goto illegal_insn;
5023 if (!supervisor(dc))
5024 goto priv_insn;
5025 #endif
5026 save_state(dc, cpu_cond);
5027 gen_st_asi(cpu_val, cpu_addr, insn, 2);
5028 dc->npc = DYNAMIC_PC;
5029 break;
5030 case 0x17: /* stda, store double word alternate */
5031 #ifndef TARGET_SPARC64
5032 if (IS_IMM)
5033 goto illegal_insn;
5034 if (!supervisor(dc))
5035 goto priv_insn;
5036 #endif
5037 if (rd & 1)
5038 goto illegal_insn;
5039 else {
5040 save_state(dc, cpu_cond);
5041 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
5042 }
5043 break;
5044 #endif
5045 #ifdef TARGET_SPARC64
5046 case 0x0e: /* V9 stx */
5047 gen_address_mask(dc, cpu_addr);
5048 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5049 break;
5050 case 0x1e: /* V9 stxa */
5051 save_state(dc, cpu_cond);
5052 gen_st_asi(cpu_val, cpu_addr, insn, 8);
5053 dc->npc = DYNAMIC_PC;
5054 break;
5055 #endif
5056 default:
5057 goto illegal_insn;
5058 }
5059 } else if (xop > 0x23 && xop < 0x28) {
5060 if (gen_trap_ifnofpu(dc, cpu_cond))
5061 goto jmp_insn;
5062 save_state(dc, cpu_cond);
5063 switch (xop) {
5064 case 0x24: /* stf, store fpreg */
5065 gen_address_mask(dc, cpu_addr);
5066 cpu_src1_32 = gen_load_fpr_F(dc, rd);
5067 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_src1_32);
5068 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
5069 break;
5070 case 0x25: /* stfsr, V9 stxfsr */
5071 #ifdef TARGET_SPARC64
5072 gen_address_mask(dc, cpu_addr);
5073 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUSPARCState, fsr));
5074 if (rd == 1)
5075 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
5076 else
5077 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
5078 #else
5079 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fsr));
5080 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
5081 #endif
5082 break;
5083 case 0x26:
5084 #ifdef TARGET_SPARC64
5085 /* V9 stqf, store quad fpreg */
5086 {
5087 TCGv_i32 r_const;
5088
5089 CHECK_FPU_FEATURE(dc, FLOAT128);
5090 gen_op_load_fpr_QT0(QFPREG(rd));
5091 r_const = tcg_const_i32(dc->mem_idx);
5092 gen_address_mask(dc, cpu_addr);
5093 gen_helper_stqf(cpu_env, cpu_addr, r_const);
5094 tcg_temp_free_i32(r_const);
5095 }
5096 break;
5097 #else /* !TARGET_SPARC64 */
5098 /* stdfq, store floating point queue */
5099 #if defined(CONFIG_USER_ONLY)
5100 goto illegal_insn;
5101 #else
5102 if (!supervisor(dc))
5103 goto priv_insn;
5104 if (gen_trap_ifnofpu(dc, cpu_cond))
5105 goto jmp_insn;
5106 goto nfq_insn;
5107 #endif
5108 #endif
5109 case 0x27: /* stdf, store double fpreg */
5110 gen_address_mask(dc, cpu_addr);
5111 cpu_src1_64 = gen_load_fpr_D(dc, rd);
5112 tcg_gen_qemu_st64(cpu_src1_64, cpu_addr, dc->mem_idx);
5113 break;
5114 default:
5115 goto illegal_insn;
5116 }
5117 } else if (xop > 0x33 && xop < 0x3f) {
5118 save_state(dc, cpu_cond);
5119 switch (xop) {
5120 #ifdef TARGET_SPARC64
5121 case 0x34: /* V9 stfa */
5122 if (gen_trap_ifnofpu(dc, cpu_cond)) {
5123 goto jmp_insn;
5124 }
5125 gen_stf_asi(cpu_addr, insn, 4, rd);
5126 break;
5127 case 0x36: /* V9 stqfa */
5128 {
5129 TCGv_i32 r_const;
5130
5131 CHECK_FPU_FEATURE(dc, FLOAT128);
5132 if (gen_trap_ifnofpu(dc, cpu_cond)) {
5133 goto jmp_insn;
5134 }
5135 r_const = tcg_const_i32(7);
5136 gen_helper_check_align(cpu_env, cpu_addr, r_const);
5137 tcg_temp_free_i32(r_const);
5138 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
5139 }
5140 break;
5141 case 0x37: /* V9 stdfa */
5142 if (gen_trap_ifnofpu(dc, cpu_cond)) {
5143 goto jmp_insn;
5144 }
5145 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
5146 break;
5147 case 0x3c: /* V9 casa */
5148 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5149 gen_movl_TN_reg(rd, cpu_val);
5150 break;
5151 case 0x3e: /* V9 casxa */
5152 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
5153 gen_movl_TN_reg(rd, cpu_val);
5154 break;
5155 #else
5156 case 0x34: /* stc */
5157 case 0x35: /* stcsr */
5158 case 0x36: /* stdcq */
5159 case 0x37: /* stdc */
5160 goto ncp_insn;
5161 #endif
5162 default:
5163 goto illegal_insn;
5164 }
5165 } else
5166 goto illegal_insn;
5167 }
5168 break;
5169 }
5170 /* default case for non jump instructions */
5171 if (dc->npc == DYNAMIC_PC) {
5172 dc->pc = DYNAMIC_PC;
5173 gen_op_next_insn();
5174 } else if (dc->npc == JUMP_PC) {
5175 /* we can do a static jump */
5176 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5177 dc->is_br = 1;
5178 } else {
5179 dc->pc = dc->npc;
5180 dc->npc = dc->npc + 4;
5181 }
5182 jmp_insn:
5183 goto egress;
5184 illegal_insn:
5185 {
5186 TCGv_i32 r_const;
5187
5188 save_state(dc, cpu_cond);
5189 r_const = tcg_const_i32(TT_ILL_INSN);
5190 gen_helper_raise_exception(cpu_env, r_const);
5191 tcg_temp_free_i32(r_const);
5192 dc->is_br = 1;
5193 }
5194 goto egress;
5195 unimp_flush:
5196 {
5197 TCGv_i32 r_const;
5198
5199 save_state(dc, cpu_cond);
5200 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
5201 gen_helper_raise_exception(cpu_env, r_const);
5202 tcg_temp_free_i32(r_const);
5203 dc->is_br = 1;
5204 }
5205 goto egress;
5206 #if !defined(CONFIG_USER_ONLY)
5207 priv_insn:
5208 {
5209 TCGv_i32 r_const;
5210
5211 save_state(dc, cpu_cond);
5212 r_const = tcg_const_i32(TT_PRIV_INSN);
5213 gen_helper_raise_exception(cpu_env, r_const);
5214 tcg_temp_free_i32(r_const);
5215 dc->is_br = 1;
5216 }
5217 goto egress;
5218 #endif
5219 nfpu_insn:
5220 save_state(dc, cpu_cond);
5221 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
5222 dc->is_br = 1;
5223 goto egress;
5224 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5225 nfq_insn:
5226 save_state(dc, cpu_cond);
5227 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
5228 dc->is_br = 1;
5229 goto egress;
5230 #endif
5231 #ifndef TARGET_SPARC64
5232 ncp_insn:
5233 {
5234 TCGv r_const;
5235
5236 save_state(dc, cpu_cond);
5237 r_const = tcg_const_i32(TT_NCP_INSN);
5238 gen_helper_raise_exception(cpu_env, r_const);
5239 tcg_temp_free(r_const);
5240 dc->is_br = 1;
5241 }
5242 goto egress;
5243 #endif
5244 egress:
5245 tcg_temp_free(cpu_tmp1);
5246 tcg_temp_free(cpu_tmp2);
5247 if (dc->n_t32 != 0) {
5248 int i;
5249 for (i = dc->n_t32 - 1; i >= 0; --i) {
5250 tcg_temp_free_i32(dc->t32[i]);
5251 }
5252 dc->n_t32 = 0;
5253 }
5254 }
5255
5256 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
5257 int spc, CPUSPARCState *env)
5258 {
5259 target_ulong pc_start, last_pc;
5260 uint16_t *gen_opc_end;
5261 DisasContext dc1, *dc = &dc1;
5262 CPUBreakpoint *bp;
5263 int j, lj = -1;
5264 int num_insns;
5265 int max_insns;
5266 unsigned int insn;
5267
5268 memset(dc, 0, sizeof(DisasContext));
5269 dc->tb = tb;
5270 pc_start = tb->pc;
5271 dc->pc = pc_start;
5272 last_pc = dc->pc;
5273 dc->npc = (target_ulong) tb->cs_base;
5274 dc->cc_op = CC_OP_DYNAMIC;
5275 dc->mem_idx = cpu_mmu_index(env);
5276 dc->def = env->def;
5277 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5278 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5279 dc->singlestep = (env->singlestep_enabled || singlestep);
5280 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5281
5282 cpu_tmp0 = tcg_temp_new();
5283 cpu_tmp32 = tcg_temp_new_i32();
5284 cpu_tmp64 = tcg_temp_new_i64();
5285
5286 cpu_dst = tcg_temp_local_new();
5287
5288 // loads and stores
5289 cpu_val = tcg_temp_local_new();
5290 cpu_addr = tcg_temp_local_new();
5291
5292 num_insns = 0;
5293 max_insns = tb->cflags & CF_COUNT_MASK;
5294 if (max_insns == 0)
5295 max_insns = CF_COUNT_MASK;
5296 gen_icount_start();
5297 do {
5298 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5299 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5300 if (bp->pc == dc->pc) {
5301 if (dc->pc != pc_start)
5302 save_state(dc, cpu_cond);
5303 gen_helper_debug(cpu_env);
5304 tcg_gen_exit_tb(0);
5305 dc->is_br = 1;
5306 goto exit_gen_loop;
5307 }
5308 }
5309 }
5310 if (spc) {
5311 qemu_log("Search PC...\n");
5312 j = gen_opc_ptr - gen_opc_buf;
5313 if (lj < j) {
5314 lj++;
5315 while (lj < j)
5316 gen_opc_instr_start[lj++] = 0;
5317 gen_opc_pc[lj] = dc->pc;
5318 gen_opc_npc[lj] = dc->npc;
5319 gen_opc_instr_start[lj] = 1;
5320 gen_opc_icount[lj] = num_insns;
5321 }
5322 }
5323 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5324 gen_io_start();
5325 last_pc = dc->pc;
5326 insn = cpu_ldl_code(env, dc->pc);
5327 disas_sparc_insn(dc, insn);
5328 num_insns++;
5329
5330 if (dc->is_br)
5331 break;
5332 /* if the next PC is different, we abort now */
5333 if (dc->pc != (last_pc + 4))
5334 break;
5335 /* if we reach a page boundary, we stop generation so that the
5336 PC of a TT_TFAULT exception is always in the right page */
5337 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5338 break;
5339 /* if single step mode, we generate only one instruction and
5340 generate an exception */
5341 if (dc->singlestep) {
5342 break;
5343 }
5344 } while ((gen_opc_ptr < gen_opc_end) &&
5345 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5346 num_insns < max_insns);
5347
5348 exit_gen_loop:
5349 tcg_temp_free(cpu_addr);
5350 tcg_temp_free(cpu_val);
5351 tcg_temp_free(cpu_dst);
5352 tcg_temp_free_i64(cpu_tmp64);
5353 tcg_temp_free_i32(cpu_tmp32);
5354 tcg_temp_free(cpu_tmp0);
5355
5356 if (tb->cflags & CF_LAST_IO)
5357 gen_io_end();
5358 if (!dc->is_br) {
5359 if (dc->pc != DYNAMIC_PC &&
5360 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5361 /* static PC and NPC: we can use direct chaining */
5362 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5363 } else {
5364 if (dc->pc != DYNAMIC_PC)
5365 tcg_gen_movi_tl(cpu_pc, dc->pc);
5366 save_npc(dc, cpu_cond);
5367 tcg_gen_exit_tb(0);
5368 }
5369 }
5370 gen_icount_end(tb, num_insns);
5371 *gen_opc_ptr = INDEX_op_end;
5372 if (spc) {
5373 j = gen_opc_ptr - gen_opc_buf;
5374 lj++;
5375 while (lj <= j)
5376 gen_opc_instr_start[lj++] = 0;
5377 #if 0
5378 log_page_dump();
5379 #endif
5380 gen_opc_jump_pc[0] = dc->jump_pc[0];
5381 gen_opc_jump_pc[1] = dc->jump_pc[1];
5382 } else {
5383 tb->size = last_pc + 4 - pc_start;
5384 tb->icount = num_insns;
5385 }
5386 #ifdef DEBUG_DISAS
5387 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5388 qemu_log("--------------\n");
5389 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5390 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5391 qemu_log("\n");
5392 }
5393 #endif
5394 }
5395
5396 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5397 {
5398 gen_intermediate_code_internal(tb, 0, env);
5399 }
5400
5401 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5402 {
5403 gen_intermediate_code_internal(tb, 1, env);
5404 }
5405
5406 void gen_intermediate_code_init(CPUSPARCState *env)
5407 {
5408 unsigned int i;
5409 static int inited;
5410 static const char * const gregnames[8] = {
5411 NULL, // g0 not used
5412 "g1",
5413 "g2",
5414 "g3",
5415 "g4",
5416 "g5",
5417 "g6",
5418 "g7",
5419 };
5420 static const char * const fregnames[32] = {
5421 "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5422 "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5423 "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5424 "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5425 };
5426
5427 /* init various static tables */
5428 if (!inited) {
5429 inited = 1;
5430
5431 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5432 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5433 offsetof(CPUSPARCState, regwptr),
5434 "regwptr");
5435 #ifdef TARGET_SPARC64
5436 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, xcc),
5437 "xcc");
5438 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, asi),
5439 "asi");
5440 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, fprs),
5441 "fprs");
5442 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, gsr),
5443 "gsr");
5444 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5445 offsetof(CPUSPARCState, tick_cmpr),
5446 "tick_cmpr");
5447 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5448 offsetof(CPUSPARCState, stick_cmpr),
5449 "stick_cmpr");
5450 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5451 offsetof(CPUSPARCState, hstick_cmpr),
5452 "hstick_cmpr");
5453 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hintp),
5454 "hintp");
5455 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, htba),
5456 "htba");
5457 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, hver),
5458 "hver");
5459 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5460 offsetof(CPUSPARCState, ssr), "ssr");
5461 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5462 offsetof(CPUSPARCState, version), "ver");
5463 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5464 offsetof(CPUSPARCState, softint),
5465 "softint");
5466 #else
5467 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, wim),
5468 "wim");
5469 #endif
5470 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cond),
5471 "cond");
5472 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_src),
5473 "cc_src");
5474 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5475 offsetof(CPUSPARCState, cc_src2),
5476 "cc_src2");
5477 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, cc_dst),
5478 "cc_dst");
5479 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, cc_op),
5480 "cc_op");
5481 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUSPARCState, psr),
5482 "psr");
5483 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, fsr),
5484 "fsr");
5485 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, pc),
5486 "pc");
5487 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, npc),
5488 "npc");
5489 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, y), "y");
5490 #ifndef CONFIG_USER_ONLY
5491 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUSPARCState, tbr),
5492 "tbr");
5493 #endif
5494 for (i = 1; i < 8; i++) {
5495 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5496 offsetof(CPUSPARCState, gregs[i]),
5497 gregnames[i]);
5498 }
5499 for (i = 0; i < TARGET_DPREGS; i++) {
5500 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
5501 offsetof(CPUSPARCState, fpr[i]),
5502 fregnames[i]);
5503 }
5504
5505 /* register helpers */
5506
5507 #define GEN_HELPER 2
5508 #include "helper.h"
5509 }
5510 }
5511
5512 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb, int pc_pos)
5513 {
5514 target_ulong npc;
5515 env->pc = gen_opc_pc[pc_pos];
5516 npc = gen_opc_npc[pc_pos];
5517 if (npc == 1) {
5518 /* dynamic NPC: already stored */
5519 } else if (npc == 2) {
5520 /* jump PC: use 'cond' and the jump targets of the translation */
5521 if (env->cond) {
5522 env->npc = gen_opc_jump_pc[0];
5523 } else {
5524 env->npc = gen_opc_jump_pc[1];
5525 }
5526 } else {
5527 env->npc = npc;
5528 }
5529
5530 /* flush pending conditional evaluations before exposing cpu state */
5531 if (CC_OP != CC_OP_FLAGS) {
5532 helper_compute_psr(env);
5533 }
5534 }