]> git.proxmox.com Git - qemu.git/blob - target-sparc/translate.c
Sparc: avoid AREG0 for softint op helpers and Leon cache control
[qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
67
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
70
71 #include "gen-icount.h"
72
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 } DisasContext;
86
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
90
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
94
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
97
98 #ifdef TARGET_SPARC64
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101 #else
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
104 #endif
105
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
108
109 static int sign_extend(int x, int len)
110 {
111 len = 32 - len;
112 return (x << len) >> len;
113 }
114
115 #define IS_IMM (insn & (1<<13))
116
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src)
119 {
120 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
121 offsetof(CPU_DoubleU, l.upper));
122 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
123 offsetof(CPU_DoubleU, l.lower));
124 }
125
126 static void gen_op_load_fpr_DT1(unsigned int src)
127 {
128 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
129 offsetof(CPU_DoubleU, l.upper));
130 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
131 offsetof(CPU_DoubleU, l.lower));
132 }
133
134 static void gen_op_store_DT0_fpr(unsigned int dst)
135 {
136 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
137 offsetof(CPU_DoubleU, l.upper));
138 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
139 offsetof(CPU_DoubleU, l.lower));
140 }
141
142 static void gen_op_load_fpr_QT0(unsigned int src)
143 {
144 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.upmost));
146 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.upper));
148 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.lower));
150 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
151 offsetof(CPU_QuadU, l.lowest));
152 }
153
154 static void gen_op_load_fpr_QT1(unsigned int src)
155 {
156 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.upmost));
158 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.upper));
160 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.lower));
162 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
163 offsetof(CPU_QuadU, l.lowest));
164 }
165
166 static void gen_op_store_QT0_fpr(unsigned int dst)
167 {
168 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.upmost));
170 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.upper));
172 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.lower));
174 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
175 offsetof(CPU_QuadU, l.lowest));
176 }
177
178 /* moves */
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
183 #endif
184 #else
185 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
188 #else
189 #endif
190 #endif
191
192 #ifdef TARGET_SPARC64
193 #ifndef TARGET_ABI32
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
195 #else
196 #define AM_CHECK(dc) (1)
197 #endif
198 #endif
199
200 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
201 {
202 #ifdef TARGET_SPARC64
203 if (AM_CHECK(dc))
204 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 #endif
206 }
207
208 static inline void gen_movl_reg_TN(int reg, TCGv tn)
209 {
210 if (reg == 0)
211 tcg_gen_movi_tl(tn, 0);
212 else if (reg < 8)
213 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
214 else {
215 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 }
217 }
218
219 static inline void gen_movl_TN_reg(int reg, TCGv tn)
220 {
221 if (reg == 0)
222 return;
223 else if (reg < 8)
224 tcg_gen_mov_tl(cpu_gregs[reg], tn);
225 else {
226 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 }
228 }
229
230 static inline void gen_goto_tb(DisasContext *s, int tb_num,
231 target_ulong pc, target_ulong npc)
232 {
233 TranslationBlock *tb;
234
235 tb = s->tb;
236 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
237 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238 !s->singlestep) {
239 /* jump to same page: we can use a direct jump */
240 tcg_gen_goto_tb(tb_num);
241 tcg_gen_movi_tl(cpu_pc, pc);
242 tcg_gen_movi_tl(cpu_npc, npc);
243 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
244 } else {
245 /* jump to another page: currently not optimized */
246 tcg_gen_movi_tl(cpu_pc, pc);
247 tcg_gen_movi_tl(cpu_npc, npc);
248 tcg_gen_exit_tb(0);
249 }
250 }
251
252 // XXX suboptimal
253 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
254 {
255 tcg_gen_extu_i32_tl(reg, src);
256 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
257 tcg_gen_andi_tl(reg, reg, 0x1);
258 }
259
260 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
261 {
262 tcg_gen_extu_i32_tl(reg, src);
263 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
264 tcg_gen_andi_tl(reg, reg, 0x1);
265 }
266
267 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
268 {
269 tcg_gen_extu_i32_tl(reg, src);
270 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
271 tcg_gen_andi_tl(reg, reg, 0x1);
272 }
273
274 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
275 {
276 tcg_gen_extu_i32_tl(reg, src);
277 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
278 tcg_gen_andi_tl(reg, reg, 0x1);
279 }
280
281 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
282 {
283 TCGv r_temp;
284 TCGv_i32 r_const;
285 int l1;
286
287 l1 = gen_new_label();
288
289 r_temp = tcg_temp_new();
290 tcg_gen_xor_tl(r_temp, src1, src2);
291 tcg_gen_not_tl(r_temp, r_temp);
292 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
293 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
294 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
295 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
296 r_const = tcg_const_i32(TT_TOVF);
297 gen_helper_raise_exception(cpu_env, r_const);
298 tcg_temp_free_i32(r_const);
299 gen_set_label(l1);
300 tcg_temp_free(r_temp);
301 }
302
303 static inline void gen_tag_tv(TCGv src1, TCGv src2)
304 {
305 int l1;
306 TCGv_i32 r_const;
307
308 l1 = gen_new_label();
309 tcg_gen_or_tl(cpu_tmp0, src1, src2);
310 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
311 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
312 r_const = tcg_const_i32(TT_TOVF);
313 gen_helper_raise_exception(cpu_env, r_const);
314 tcg_temp_free_i32(r_const);
315 gen_set_label(l1);
316 }
317
318 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
319 {
320 tcg_gen_mov_tl(cpu_cc_src, src1);
321 tcg_gen_movi_tl(cpu_cc_src2, src2);
322 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
323 tcg_gen_mov_tl(dst, cpu_cc_dst);
324 }
325
326 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
327 {
328 tcg_gen_mov_tl(cpu_cc_src, src1);
329 tcg_gen_mov_tl(cpu_cc_src2, src2);
330 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
331 tcg_gen_mov_tl(dst, cpu_cc_dst);
332 }
333
334 static TCGv_i32 gen_add32_carry32(void)
335 {
336 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
337
338 /* Carry is computed from a previous add: (dst < src) */
339 #if TARGET_LONG_BITS == 64
340 cc_src1_32 = tcg_temp_new_i32();
341 cc_src2_32 = tcg_temp_new_i32();
342 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
343 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
344 #else
345 cc_src1_32 = cpu_cc_dst;
346 cc_src2_32 = cpu_cc_src;
347 #endif
348
349 carry_32 = tcg_temp_new_i32();
350 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
351
352 #if TARGET_LONG_BITS == 64
353 tcg_temp_free_i32(cc_src1_32);
354 tcg_temp_free_i32(cc_src2_32);
355 #endif
356
357 return carry_32;
358 }
359
360 static TCGv_i32 gen_sub32_carry32(void)
361 {
362 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
363
364 /* Carry is computed from a previous borrow: (src1 < src2) */
365 #if TARGET_LONG_BITS == 64
366 cc_src1_32 = tcg_temp_new_i32();
367 cc_src2_32 = tcg_temp_new_i32();
368 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
369 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
370 #else
371 cc_src1_32 = cpu_cc_src;
372 cc_src2_32 = cpu_cc_src2;
373 #endif
374
375 carry_32 = tcg_temp_new_i32();
376 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
377
378 #if TARGET_LONG_BITS == 64
379 tcg_temp_free_i32(cc_src1_32);
380 tcg_temp_free_i32(cc_src2_32);
381 #endif
382
383 return carry_32;
384 }
385
386 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
387 TCGv src2, int update_cc)
388 {
389 TCGv_i32 carry_32;
390 TCGv carry;
391
392 switch (dc->cc_op) {
393 case CC_OP_DIV:
394 case CC_OP_LOGIC:
395 /* Carry is known to be zero. Fall back to plain ADD. */
396 if (update_cc) {
397 gen_op_add_cc(dst, src1, src2);
398 } else {
399 tcg_gen_add_tl(dst, src1, src2);
400 }
401 return;
402
403 case CC_OP_ADD:
404 case CC_OP_TADD:
405 case CC_OP_TADDTV:
406 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
407 {
408 /* For 32-bit hosts, we can re-use the host's hardware carry
409 generation by using an ADD2 opcode. We discard the low
410 part of the output. Ideally we'd combine this operation
411 with the add that generated the carry in the first place. */
412 TCGv dst_low = tcg_temp_new();
413 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
414 cpu_cc_src, src1, cpu_cc_src2, src2);
415 tcg_temp_free(dst_low);
416 goto add_done;
417 }
418 #endif
419 carry_32 = gen_add32_carry32();
420 break;
421
422 case CC_OP_SUB:
423 case CC_OP_TSUB:
424 case CC_OP_TSUBTV:
425 carry_32 = gen_sub32_carry32();
426 break;
427
428 default:
429 /* We need external help to produce the carry. */
430 carry_32 = tcg_temp_new_i32();
431 gen_helper_compute_C_icc(carry_32, cpu_env);
432 break;
433 }
434
435 #if TARGET_LONG_BITS == 64
436 carry = tcg_temp_new();
437 tcg_gen_extu_i32_i64(carry, carry_32);
438 #else
439 carry = carry_32;
440 #endif
441
442 tcg_gen_add_tl(dst, src1, src2);
443 tcg_gen_add_tl(dst, dst, carry);
444
445 tcg_temp_free_i32(carry_32);
446 #if TARGET_LONG_BITS == 64
447 tcg_temp_free(carry);
448 #endif
449
450 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
451 add_done:
452 #endif
453 if (update_cc) {
454 tcg_gen_mov_tl(cpu_cc_src, src1);
455 tcg_gen_mov_tl(cpu_cc_src2, src2);
456 tcg_gen_mov_tl(cpu_cc_dst, dst);
457 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
458 dc->cc_op = CC_OP_ADDX;
459 }
460 }
461
462 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
463 {
464 tcg_gen_mov_tl(cpu_cc_src, src1);
465 tcg_gen_mov_tl(cpu_cc_src2, src2);
466 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
467 tcg_gen_mov_tl(dst, cpu_cc_dst);
468 }
469
470 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
471 {
472 tcg_gen_mov_tl(cpu_cc_src, src1);
473 tcg_gen_mov_tl(cpu_cc_src2, src2);
474 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
475 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
476 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 tcg_gen_mov_tl(dst, cpu_cc_dst);
478 }
479
480 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
481 {
482 TCGv r_temp;
483 TCGv_i32 r_const;
484 int l1;
485
486 l1 = gen_new_label();
487
488 r_temp = tcg_temp_new();
489 tcg_gen_xor_tl(r_temp, src1, src2);
490 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
491 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
492 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
493 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
494 r_const = tcg_const_i32(TT_TOVF);
495 gen_helper_raise_exception(cpu_env, r_const);
496 tcg_temp_free_i32(r_const);
497 gen_set_label(l1);
498 tcg_temp_free(r_temp);
499 }
500
501 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
502 {
503 tcg_gen_mov_tl(cpu_cc_src, src1);
504 tcg_gen_movi_tl(cpu_cc_src2, src2);
505 if (src2 == 0) {
506 tcg_gen_mov_tl(cpu_cc_dst, src1);
507 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
508 dc->cc_op = CC_OP_LOGIC;
509 } else {
510 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
511 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
512 dc->cc_op = CC_OP_SUB;
513 }
514 tcg_gen_mov_tl(dst, cpu_cc_dst);
515 }
516
517 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
518 {
519 tcg_gen_mov_tl(cpu_cc_src, src1);
520 tcg_gen_mov_tl(cpu_cc_src2, src2);
521 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
522 tcg_gen_mov_tl(dst, cpu_cc_dst);
523 }
524
525 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
526 TCGv src2, int update_cc)
527 {
528 TCGv_i32 carry_32;
529 TCGv carry;
530
531 switch (dc->cc_op) {
532 case CC_OP_DIV:
533 case CC_OP_LOGIC:
534 /* Carry is known to be zero. Fall back to plain SUB. */
535 if (update_cc) {
536 gen_op_sub_cc(dst, src1, src2);
537 } else {
538 tcg_gen_sub_tl(dst, src1, src2);
539 }
540 return;
541
542 case CC_OP_ADD:
543 case CC_OP_TADD:
544 case CC_OP_TADDTV:
545 carry_32 = gen_add32_carry32();
546 break;
547
548 case CC_OP_SUB:
549 case CC_OP_TSUB:
550 case CC_OP_TSUBTV:
551 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
552 {
553 /* For 32-bit hosts, we can re-use the host's hardware carry
554 generation by using a SUB2 opcode. We discard the low
555 part of the output. Ideally we'd combine this operation
556 with the add that generated the carry in the first place. */
557 TCGv dst_low = tcg_temp_new();
558 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
559 cpu_cc_src, src1, cpu_cc_src2, src2);
560 tcg_temp_free(dst_low);
561 goto sub_done;
562 }
563 #endif
564 carry_32 = gen_sub32_carry32();
565 break;
566
567 default:
568 /* We need external help to produce the carry. */
569 carry_32 = tcg_temp_new_i32();
570 gen_helper_compute_C_icc(carry_32, cpu_env);
571 break;
572 }
573
574 #if TARGET_LONG_BITS == 64
575 carry = tcg_temp_new();
576 tcg_gen_extu_i32_i64(carry, carry_32);
577 #else
578 carry = carry_32;
579 #endif
580
581 tcg_gen_sub_tl(dst, src1, src2);
582 tcg_gen_sub_tl(dst, dst, carry);
583
584 tcg_temp_free_i32(carry_32);
585 #if TARGET_LONG_BITS == 64
586 tcg_temp_free(carry);
587 #endif
588
589 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
590 sub_done:
591 #endif
592 if (update_cc) {
593 tcg_gen_mov_tl(cpu_cc_src, src1);
594 tcg_gen_mov_tl(cpu_cc_src2, src2);
595 tcg_gen_mov_tl(cpu_cc_dst, dst);
596 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
597 dc->cc_op = CC_OP_SUBX;
598 }
599 }
600
601 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
602 {
603 tcg_gen_mov_tl(cpu_cc_src, src1);
604 tcg_gen_mov_tl(cpu_cc_src2, src2);
605 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
606 tcg_gen_mov_tl(dst, cpu_cc_dst);
607 }
608
609 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
610 {
611 tcg_gen_mov_tl(cpu_cc_src, src1);
612 tcg_gen_mov_tl(cpu_cc_src2, src2);
613 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
614 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
615 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616 tcg_gen_mov_tl(dst, cpu_cc_dst);
617 }
618
619 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
620 {
621 TCGv r_temp;
622 int l1;
623
624 l1 = gen_new_label();
625 r_temp = tcg_temp_new();
626
627 /* old op:
628 if (!(env->y & 1))
629 T1 = 0;
630 */
631 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
632 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
633 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
634 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
635 tcg_gen_movi_tl(cpu_cc_src2, 0);
636 gen_set_label(l1);
637
638 // b2 = T0 & 1;
639 // env->y = (b2 << 31) | (env->y >> 1);
640 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
641 tcg_gen_shli_tl(r_temp, r_temp, 31);
642 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
643 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
644 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
645 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
646
647 // b1 = N ^ V;
648 gen_mov_reg_N(cpu_tmp0, cpu_psr);
649 gen_mov_reg_V(r_temp, cpu_psr);
650 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
651 tcg_temp_free(r_temp);
652
653 // T0 = (b1 << 31) | (T0 >> 1);
654 // src1 = T0;
655 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
656 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
657 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
658
659 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
660
661 tcg_gen_mov_tl(dst, cpu_cc_dst);
662 }
663
664 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
665 {
666 TCGv_i32 r_src1, r_src2;
667 TCGv_i64 r_temp, r_temp2;
668
669 r_src1 = tcg_temp_new_i32();
670 r_src2 = tcg_temp_new_i32();
671
672 tcg_gen_trunc_tl_i32(r_src1, src1);
673 tcg_gen_trunc_tl_i32(r_src2, src2);
674
675 r_temp = tcg_temp_new_i64();
676 r_temp2 = tcg_temp_new_i64();
677
678 if (sign_ext) {
679 tcg_gen_ext_i32_i64(r_temp, r_src2);
680 tcg_gen_ext_i32_i64(r_temp2, r_src1);
681 } else {
682 tcg_gen_extu_i32_i64(r_temp, r_src2);
683 tcg_gen_extu_i32_i64(r_temp2, r_src1);
684 }
685
686 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
687
688 tcg_gen_shri_i64(r_temp, r_temp2, 32);
689 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
690 tcg_temp_free_i64(r_temp);
691 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
692
693 tcg_gen_trunc_i64_tl(dst, r_temp2);
694
695 tcg_temp_free_i64(r_temp2);
696
697 tcg_temp_free_i32(r_src1);
698 tcg_temp_free_i32(r_src2);
699 }
700
701 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
702 {
703 /* zero-extend truncated operands before multiplication */
704 gen_op_multiply(dst, src1, src2, 0);
705 }
706
707 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
708 {
709 /* sign-extend truncated operands before multiplication */
710 gen_op_multiply(dst, src1, src2, 1);
711 }
712
713 #ifdef TARGET_SPARC64
714 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
715 {
716 TCGv_i32 r_const;
717 int l1;
718
719 l1 = gen_new_label();
720 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
721 r_const = tcg_const_i32(TT_DIV_ZERO);
722 gen_helper_raise_exception(cpu_env, r_const);
723 tcg_temp_free_i32(r_const);
724 gen_set_label(l1);
725 }
726
727 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
728 {
729 int l1, l2;
730 TCGv r_temp1, r_temp2;
731
732 l1 = gen_new_label();
733 l2 = gen_new_label();
734 r_temp1 = tcg_temp_local_new();
735 r_temp2 = tcg_temp_local_new();
736 tcg_gen_mov_tl(r_temp1, src1);
737 tcg_gen_mov_tl(r_temp2, src2);
738 gen_trap_ifdivzero_tl(r_temp2);
739 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
740 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
741 tcg_gen_movi_i64(dst, INT64_MIN);
742 tcg_gen_br(l2);
743 gen_set_label(l1);
744 tcg_gen_div_i64(dst, r_temp1, r_temp2);
745 gen_set_label(l2);
746 tcg_temp_free(r_temp1);
747 tcg_temp_free(r_temp2);
748 }
749 #endif
750
751 // 1
752 static inline void gen_op_eval_ba(TCGv dst)
753 {
754 tcg_gen_movi_tl(dst, 1);
755 }
756
757 // Z
758 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
759 {
760 gen_mov_reg_Z(dst, src);
761 }
762
763 // Z | (N ^ V)
764 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
765 {
766 gen_mov_reg_N(cpu_tmp0, src);
767 gen_mov_reg_V(dst, src);
768 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
769 gen_mov_reg_Z(cpu_tmp0, src);
770 tcg_gen_or_tl(dst, dst, cpu_tmp0);
771 }
772
773 // N ^ V
774 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
775 {
776 gen_mov_reg_V(cpu_tmp0, src);
777 gen_mov_reg_N(dst, src);
778 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
779 }
780
781 // C | Z
782 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
783 {
784 gen_mov_reg_Z(cpu_tmp0, src);
785 gen_mov_reg_C(dst, src);
786 tcg_gen_or_tl(dst, dst, cpu_tmp0);
787 }
788
789 // C
790 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
791 {
792 gen_mov_reg_C(dst, src);
793 }
794
795 // V
796 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
797 {
798 gen_mov_reg_V(dst, src);
799 }
800
801 // 0
802 static inline void gen_op_eval_bn(TCGv dst)
803 {
804 tcg_gen_movi_tl(dst, 0);
805 }
806
807 // N
808 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
809 {
810 gen_mov_reg_N(dst, src);
811 }
812
813 // !Z
814 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
815 {
816 gen_mov_reg_Z(dst, src);
817 tcg_gen_xori_tl(dst, dst, 0x1);
818 }
819
820 // !(Z | (N ^ V))
821 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
822 {
823 gen_mov_reg_N(cpu_tmp0, src);
824 gen_mov_reg_V(dst, src);
825 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
826 gen_mov_reg_Z(cpu_tmp0, src);
827 tcg_gen_or_tl(dst, dst, cpu_tmp0);
828 tcg_gen_xori_tl(dst, dst, 0x1);
829 }
830
831 // !(N ^ V)
832 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
833 {
834 gen_mov_reg_V(cpu_tmp0, src);
835 gen_mov_reg_N(dst, src);
836 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
837 tcg_gen_xori_tl(dst, dst, 0x1);
838 }
839
840 // !(C | Z)
841 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
842 {
843 gen_mov_reg_Z(cpu_tmp0, src);
844 gen_mov_reg_C(dst, src);
845 tcg_gen_or_tl(dst, dst, cpu_tmp0);
846 tcg_gen_xori_tl(dst, dst, 0x1);
847 }
848
849 // !C
850 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
851 {
852 gen_mov_reg_C(dst, src);
853 tcg_gen_xori_tl(dst, dst, 0x1);
854 }
855
856 // !N
857 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
858 {
859 gen_mov_reg_N(dst, src);
860 tcg_gen_xori_tl(dst, dst, 0x1);
861 }
862
863 // !V
864 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
865 {
866 gen_mov_reg_V(dst, src);
867 tcg_gen_xori_tl(dst, dst, 0x1);
868 }
869
870 /*
871 FPSR bit field FCC1 | FCC0:
872 0 =
873 1 <
874 2 >
875 3 unordered
876 */
877 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
878 unsigned int fcc_offset)
879 {
880 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
881 tcg_gen_andi_tl(reg, reg, 0x1);
882 }
883
884 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
885 unsigned int fcc_offset)
886 {
887 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
888 tcg_gen_andi_tl(reg, reg, 0x1);
889 }
890
891 // !0: FCC0 | FCC1
892 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
893 unsigned int fcc_offset)
894 {
895 gen_mov_reg_FCC0(dst, src, fcc_offset);
896 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
897 tcg_gen_or_tl(dst, dst, cpu_tmp0);
898 }
899
900 // 1 or 2: FCC0 ^ FCC1
901 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
902 unsigned int fcc_offset)
903 {
904 gen_mov_reg_FCC0(dst, src, fcc_offset);
905 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
906 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
907 }
908
909 // 1 or 3: FCC0
910 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
911 unsigned int fcc_offset)
912 {
913 gen_mov_reg_FCC0(dst, src, fcc_offset);
914 }
915
916 // 1: FCC0 & !FCC1
917 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
918 unsigned int fcc_offset)
919 {
920 gen_mov_reg_FCC0(dst, src, fcc_offset);
921 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
922 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
923 tcg_gen_and_tl(dst, dst, cpu_tmp0);
924 }
925
926 // 2 or 3: FCC1
927 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
928 unsigned int fcc_offset)
929 {
930 gen_mov_reg_FCC1(dst, src, fcc_offset);
931 }
932
933 // 2: !FCC0 & FCC1
934 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
935 unsigned int fcc_offset)
936 {
937 gen_mov_reg_FCC0(dst, src, fcc_offset);
938 tcg_gen_xori_tl(dst, dst, 0x1);
939 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
940 tcg_gen_and_tl(dst, dst, cpu_tmp0);
941 }
942
943 // 3: FCC0 & FCC1
944 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
945 unsigned int fcc_offset)
946 {
947 gen_mov_reg_FCC0(dst, src, fcc_offset);
948 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
949 tcg_gen_and_tl(dst, dst, cpu_tmp0);
950 }
951
952 // 0: !(FCC0 | FCC1)
953 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
954 unsigned int fcc_offset)
955 {
956 gen_mov_reg_FCC0(dst, src, fcc_offset);
957 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
958 tcg_gen_or_tl(dst, dst, cpu_tmp0);
959 tcg_gen_xori_tl(dst, dst, 0x1);
960 }
961
962 // 0 or 3: !(FCC0 ^ FCC1)
963 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
964 unsigned int fcc_offset)
965 {
966 gen_mov_reg_FCC0(dst, src, fcc_offset);
967 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
968 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
969 tcg_gen_xori_tl(dst, dst, 0x1);
970 }
971
972 // 0 or 2: !FCC0
973 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
974 unsigned int fcc_offset)
975 {
976 gen_mov_reg_FCC0(dst, src, fcc_offset);
977 tcg_gen_xori_tl(dst, dst, 0x1);
978 }
979
980 // !1: !(FCC0 & !FCC1)
981 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
982 unsigned int fcc_offset)
983 {
984 gen_mov_reg_FCC0(dst, src, fcc_offset);
985 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
987 tcg_gen_and_tl(dst, dst, cpu_tmp0);
988 tcg_gen_xori_tl(dst, dst, 0x1);
989 }
990
991 // 0 or 1: !FCC1
992 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
993 unsigned int fcc_offset)
994 {
995 gen_mov_reg_FCC1(dst, src, fcc_offset);
996 tcg_gen_xori_tl(dst, dst, 0x1);
997 }
998
999 // !2: !(!FCC0 & FCC1)
1000 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1001 unsigned int fcc_offset)
1002 {
1003 gen_mov_reg_FCC0(dst, src, fcc_offset);
1004 tcg_gen_xori_tl(dst, dst, 0x1);
1005 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1006 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1007 tcg_gen_xori_tl(dst, dst, 0x1);
1008 }
1009
1010 // !3: !(FCC0 & FCC1)
1011 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1012 unsigned int fcc_offset)
1013 {
1014 gen_mov_reg_FCC0(dst, src, fcc_offset);
1015 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1016 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1017 tcg_gen_xori_tl(dst, dst, 0x1);
1018 }
1019
1020 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1021 target_ulong pc2, TCGv r_cond)
1022 {
1023 int l1;
1024
1025 l1 = gen_new_label();
1026
1027 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1028
1029 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1030
1031 gen_set_label(l1);
1032 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1033 }
1034
1035 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1036 target_ulong pc2, TCGv r_cond)
1037 {
1038 int l1;
1039
1040 l1 = gen_new_label();
1041
1042 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1043
1044 gen_goto_tb(dc, 0, pc2, pc1);
1045
1046 gen_set_label(l1);
1047 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1048 }
1049
1050 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1051 TCGv r_cond)
1052 {
1053 int l1, l2;
1054
1055 l1 = gen_new_label();
1056 l2 = gen_new_label();
1057
1058 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1059
1060 tcg_gen_movi_tl(cpu_npc, npc1);
1061 tcg_gen_br(l2);
1062
1063 gen_set_label(l1);
1064 tcg_gen_movi_tl(cpu_npc, npc2);
1065 gen_set_label(l2);
1066 }
1067
1068 /* call this function before using the condition register as it may
1069 have been set for a jump */
1070 static inline void flush_cond(DisasContext *dc, TCGv cond)
1071 {
1072 if (dc->npc == JUMP_PC) {
1073 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1074 dc->npc = DYNAMIC_PC;
1075 }
1076 }
1077
1078 static inline void save_npc(DisasContext *dc, TCGv cond)
1079 {
1080 if (dc->npc == JUMP_PC) {
1081 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1082 dc->npc = DYNAMIC_PC;
1083 } else if (dc->npc != DYNAMIC_PC) {
1084 tcg_gen_movi_tl(cpu_npc, dc->npc);
1085 }
1086 }
1087
1088 static inline void save_state(DisasContext *dc, TCGv cond)
1089 {
1090 tcg_gen_movi_tl(cpu_pc, dc->pc);
1091 /* flush pending conditional evaluations before exposing cpu state */
1092 if (dc->cc_op != CC_OP_FLAGS) {
1093 dc->cc_op = CC_OP_FLAGS;
1094 gen_helper_compute_psr(cpu_env);
1095 }
1096 save_npc(dc, cond);
1097 }
1098
1099 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1100 {
1101 if (dc->npc == JUMP_PC) {
1102 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1103 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1104 dc->pc = DYNAMIC_PC;
1105 } else if (dc->npc == DYNAMIC_PC) {
1106 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1107 dc->pc = DYNAMIC_PC;
1108 } else {
1109 dc->pc = dc->npc;
1110 }
1111 }
1112
1113 static inline void gen_op_next_insn(void)
1114 {
1115 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1116 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1117 }
1118
1119 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1120 DisasContext *dc)
1121 {
1122 TCGv_i32 r_src;
1123
1124 #ifdef TARGET_SPARC64
1125 if (cc)
1126 r_src = cpu_xcc;
1127 else
1128 r_src = cpu_psr;
1129 #else
1130 r_src = cpu_psr;
1131 #endif
1132 switch (dc->cc_op) {
1133 case CC_OP_FLAGS:
1134 break;
1135 default:
1136 gen_helper_compute_psr(cpu_env);
1137 dc->cc_op = CC_OP_FLAGS;
1138 break;
1139 }
1140 switch (cond) {
1141 case 0x0:
1142 gen_op_eval_bn(r_dst);
1143 break;
1144 case 0x1:
1145 gen_op_eval_be(r_dst, r_src);
1146 break;
1147 case 0x2:
1148 gen_op_eval_ble(r_dst, r_src);
1149 break;
1150 case 0x3:
1151 gen_op_eval_bl(r_dst, r_src);
1152 break;
1153 case 0x4:
1154 gen_op_eval_bleu(r_dst, r_src);
1155 break;
1156 case 0x5:
1157 gen_op_eval_bcs(r_dst, r_src);
1158 break;
1159 case 0x6:
1160 gen_op_eval_bneg(r_dst, r_src);
1161 break;
1162 case 0x7:
1163 gen_op_eval_bvs(r_dst, r_src);
1164 break;
1165 case 0x8:
1166 gen_op_eval_ba(r_dst);
1167 break;
1168 case 0x9:
1169 gen_op_eval_bne(r_dst, r_src);
1170 break;
1171 case 0xa:
1172 gen_op_eval_bg(r_dst, r_src);
1173 break;
1174 case 0xb:
1175 gen_op_eval_bge(r_dst, r_src);
1176 break;
1177 case 0xc:
1178 gen_op_eval_bgu(r_dst, r_src);
1179 break;
1180 case 0xd:
1181 gen_op_eval_bcc(r_dst, r_src);
1182 break;
1183 case 0xe:
1184 gen_op_eval_bpos(r_dst, r_src);
1185 break;
1186 case 0xf:
1187 gen_op_eval_bvc(r_dst, r_src);
1188 break;
1189 }
1190 }
1191
1192 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1193 {
1194 unsigned int offset;
1195
1196 switch (cc) {
1197 default:
1198 case 0x0:
1199 offset = 0;
1200 break;
1201 case 0x1:
1202 offset = 32 - 10;
1203 break;
1204 case 0x2:
1205 offset = 34 - 10;
1206 break;
1207 case 0x3:
1208 offset = 36 - 10;
1209 break;
1210 }
1211
1212 switch (cond) {
1213 case 0x0:
1214 gen_op_eval_bn(r_dst);
1215 break;
1216 case 0x1:
1217 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1218 break;
1219 case 0x2:
1220 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1221 break;
1222 case 0x3:
1223 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1224 break;
1225 case 0x4:
1226 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1227 break;
1228 case 0x5:
1229 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1230 break;
1231 case 0x6:
1232 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1233 break;
1234 case 0x7:
1235 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1236 break;
1237 case 0x8:
1238 gen_op_eval_ba(r_dst);
1239 break;
1240 case 0x9:
1241 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1242 break;
1243 case 0xa:
1244 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1245 break;
1246 case 0xb:
1247 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1248 break;
1249 case 0xc:
1250 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1251 break;
1252 case 0xd:
1253 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1254 break;
1255 case 0xe:
1256 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1257 break;
1258 case 0xf:
1259 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1260 break;
1261 }
1262 }
1263
1264 #ifdef TARGET_SPARC64
1265 // Inverted logic
1266 static const int gen_tcg_cond_reg[8] = {
1267 -1,
1268 TCG_COND_NE,
1269 TCG_COND_GT,
1270 TCG_COND_GE,
1271 -1,
1272 TCG_COND_EQ,
1273 TCG_COND_LE,
1274 TCG_COND_LT,
1275 };
1276
1277 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1278 {
1279 int l1;
1280
1281 l1 = gen_new_label();
1282 tcg_gen_movi_tl(r_dst, 0);
1283 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1284 tcg_gen_movi_tl(r_dst, 1);
1285 gen_set_label(l1);
1286 }
1287 #endif
1288
1289 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1290 TCGv r_cond)
1291 {
1292 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1293 target_ulong target = dc->pc + offset;
1294
1295 if (cond == 0x0) {
1296 /* unconditional not taken */
1297 if (a) {
1298 dc->pc = dc->npc + 4;
1299 dc->npc = dc->pc + 4;
1300 } else {
1301 dc->pc = dc->npc;
1302 dc->npc = dc->pc + 4;
1303 }
1304 } else if (cond == 0x8) {
1305 /* unconditional taken */
1306 if (a) {
1307 dc->pc = target;
1308 dc->npc = dc->pc + 4;
1309 } else {
1310 dc->pc = dc->npc;
1311 dc->npc = target;
1312 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1313 }
1314 } else {
1315 flush_cond(dc, r_cond);
1316 gen_cond(r_cond, cc, cond, dc);
1317 if (a) {
1318 gen_branch_a(dc, target, dc->npc, r_cond);
1319 dc->is_br = 1;
1320 } else {
1321 dc->pc = dc->npc;
1322 dc->jump_pc[0] = target;
1323 if (unlikely(dc->npc == DYNAMIC_PC)) {
1324 dc->jump_pc[1] = DYNAMIC_PC;
1325 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1326 } else {
1327 dc->jump_pc[1] = dc->npc + 4;
1328 dc->npc = JUMP_PC;
1329 }
1330 }
1331 }
1332 }
1333
1334 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1335 TCGv r_cond)
1336 {
1337 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1338 target_ulong target = dc->pc + offset;
1339
1340 if (cond == 0x0) {
1341 /* unconditional not taken */
1342 if (a) {
1343 dc->pc = dc->npc + 4;
1344 dc->npc = dc->pc + 4;
1345 } else {
1346 dc->pc = dc->npc;
1347 dc->npc = dc->pc + 4;
1348 }
1349 } else if (cond == 0x8) {
1350 /* unconditional taken */
1351 if (a) {
1352 dc->pc = target;
1353 dc->npc = dc->pc + 4;
1354 } else {
1355 dc->pc = dc->npc;
1356 dc->npc = target;
1357 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1358 }
1359 } else {
1360 flush_cond(dc, r_cond);
1361 gen_fcond(r_cond, cc, cond);
1362 if (a) {
1363 gen_branch_a(dc, target, dc->npc, r_cond);
1364 dc->is_br = 1;
1365 } else {
1366 dc->pc = dc->npc;
1367 dc->jump_pc[0] = target;
1368 if (unlikely(dc->npc == DYNAMIC_PC)) {
1369 dc->jump_pc[1] = DYNAMIC_PC;
1370 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1371 } else {
1372 dc->jump_pc[1] = dc->npc + 4;
1373 dc->npc = JUMP_PC;
1374 }
1375 }
1376 }
1377 }
1378
1379 #ifdef TARGET_SPARC64
1380 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1381 TCGv r_cond, TCGv r_reg)
1382 {
1383 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1384 target_ulong target = dc->pc + offset;
1385
1386 flush_cond(dc, r_cond);
1387 gen_cond_reg(r_cond, cond, r_reg);
1388 if (a) {
1389 gen_branch_a(dc, target, dc->npc, r_cond);
1390 dc->is_br = 1;
1391 } else {
1392 dc->pc = dc->npc;
1393 dc->jump_pc[0] = target;
1394 if (unlikely(dc->npc == DYNAMIC_PC)) {
1395 dc->jump_pc[1] = DYNAMIC_PC;
1396 tcg_gen_addi_tl(cpu_pc, cpu_npc, 4);
1397 } else {
1398 dc->jump_pc[1] = dc->npc + 4;
1399 dc->npc = JUMP_PC;
1400 }
1401 }
1402 }
1403
1404 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1405 {
1406 switch (fccno) {
1407 case 0:
1408 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1409 break;
1410 case 1:
1411 gen_helper_fcmps_fcc1(cpu_env, r_rs1, r_rs2);
1412 break;
1413 case 2:
1414 gen_helper_fcmps_fcc2(cpu_env, r_rs1, r_rs2);
1415 break;
1416 case 3:
1417 gen_helper_fcmps_fcc3(cpu_env, r_rs1, r_rs2);
1418 break;
1419 }
1420 }
1421
1422 static inline void gen_op_fcmpd(int fccno)
1423 {
1424 switch (fccno) {
1425 case 0:
1426 gen_helper_fcmpd(cpu_env);
1427 break;
1428 case 1:
1429 gen_helper_fcmpd_fcc1(cpu_env);
1430 break;
1431 case 2:
1432 gen_helper_fcmpd_fcc2(cpu_env);
1433 break;
1434 case 3:
1435 gen_helper_fcmpd_fcc3(cpu_env);
1436 break;
1437 }
1438 }
1439
1440 static inline void gen_op_fcmpq(int fccno)
1441 {
1442 switch (fccno) {
1443 case 0:
1444 gen_helper_fcmpq(cpu_env);
1445 break;
1446 case 1:
1447 gen_helper_fcmpq_fcc1(cpu_env);
1448 break;
1449 case 2:
1450 gen_helper_fcmpq_fcc2(cpu_env);
1451 break;
1452 case 3:
1453 gen_helper_fcmpq_fcc3(cpu_env);
1454 break;
1455 }
1456 }
1457
1458 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1459 {
1460 switch (fccno) {
1461 case 0:
1462 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1463 break;
1464 case 1:
1465 gen_helper_fcmpes_fcc1(cpu_env, r_rs1, r_rs2);
1466 break;
1467 case 2:
1468 gen_helper_fcmpes_fcc2(cpu_env, r_rs1, r_rs2);
1469 break;
1470 case 3:
1471 gen_helper_fcmpes_fcc3(cpu_env, r_rs1, r_rs2);
1472 break;
1473 }
1474 }
1475
1476 static inline void gen_op_fcmped(int fccno)
1477 {
1478 switch (fccno) {
1479 case 0:
1480 gen_helper_fcmped(cpu_env);
1481 break;
1482 case 1:
1483 gen_helper_fcmped_fcc1(cpu_env);
1484 break;
1485 case 2:
1486 gen_helper_fcmped_fcc2(cpu_env);
1487 break;
1488 case 3:
1489 gen_helper_fcmped_fcc3(cpu_env);
1490 break;
1491 }
1492 }
1493
1494 static inline void gen_op_fcmpeq(int fccno)
1495 {
1496 switch (fccno) {
1497 case 0:
1498 gen_helper_fcmpeq(cpu_env);
1499 break;
1500 case 1:
1501 gen_helper_fcmpeq_fcc1(cpu_env);
1502 break;
1503 case 2:
1504 gen_helper_fcmpeq_fcc2(cpu_env);
1505 break;
1506 case 3:
1507 gen_helper_fcmpeq_fcc3(cpu_env);
1508 break;
1509 }
1510 }
1511
1512 #else
1513
1514 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1515 {
1516 gen_helper_fcmps(cpu_env, r_rs1, r_rs2);
1517 }
1518
1519 static inline void gen_op_fcmpd(int fccno)
1520 {
1521 gen_helper_fcmpd(cpu_env);
1522 }
1523
1524 static inline void gen_op_fcmpq(int fccno)
1525 {
1526 gen_helper_fcmpq(cpu_env);
1527 }
1528
1529 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1530 {
1531 gen_helper_fcmpes(cpu_env, r_rs1, r_rs2);
1532 }
1533
1534 static inline void gen_op_fcmped(int fccno)
1535 {
1536 gen_helper_fcmped(cpu_env);
1537 }
1538
1539 static inline void gen_op_fcmpeq(int fccno)
1540 {
1541 gen_helper_fcmpeq(cpu_env);
1542 }
1543 #endif
1544
1545 static inline void gen_op_fpexception_im(int fsr_flags)
1546 {
1547 TCGv_i32 r_const;
1548
1549 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1550 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1551 r_const = tcg_const_i32(TT_FP_EXCP);
1552 gen_helper_raise_exception(cpu_env, r_const);
1553 tcg_temp_free_i32(r_const);
1554 }
1555
1556 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1557 {
1558 #if !defined(CONFIG_USER_ONLY)
1559 if (!dc->fpu_enabled) {
1560 TCGv_i32 r_const;
1561
1562 save_state(dc, r_cond);
1563 r_const = tcg_const_i32(TT_NFPU_INSN);
1564 gen_helper_raise_exception(cpu_env, r_const);
1565 tcg_temp_free_i32(r_const);
1566 dc->is_br = 1;
1567 return 1;
1568 }
1569 #endif
1570 return 0;
1571 }
1572
1573 static inline void gen_update_fprs_dirty(int rd)
1574 {
1575 #if defined(TARGET_SPARC64)
1576 tcg_gen_ori_i32(cpu_fprs, cpu_fprs, (rd < 32) ? 1 : 2);
1577 #endif
1578 }
1579
1580 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1581 {
1582 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1583 }
1584
1585 static inline void gen_clear_float_exceptions(void)
1586 {
1587 gen_helper_clear_float_exceptions(cpu_env);
1588 }
1589
1590 /* asi moves */
1591 #ifdef TARGET_SPARC64
1592 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1593 {
1594 int asi;
1595 TCGv_i32 r_asi;
1596
1597 if (IS_IMM) {
1598 r_asi = tcg_temp_new_i32();
1599 tcg_gen_mov_i32(r_asi, cpu_asi);
1600 } else {
1601 asi = GET_FIELD(insn, 19, 26);
1602 r_asi = tcg_const_i32(asi);
1603 }
1604 return r_asi;
1605 }
1606
1607 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1608 int sign)
1609 {
1610 TCGv_i32 r_asi, r_size, r_sign;
1611
1612 r_asi = gen_get_asi(insn, addr);
1613 r_size = tcg_const_i32(size);
1614 r_sign = tcg_const_i32(sign);
1615 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1616 tcg_temp_free_i32(r_sign);
1617 tcg_temp_free_i32(r_size);
1618 tcg_temp_free_i32(r_asi);
1619 }
1620
1621 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1622 {
1623 TCGv_i32 r_asi, r_size;
1624
1625 r_asi = gen_get_asi(insn, addr);
1626 r_size = tcg_const_i32(size);
1627 gen_helper_st_asi(addr, src, r_asi, r_size);
1628 tcg_temp_free_i32(r_size);
1629 tcg_temp_free_i32(r_asi);
1630 }
1631
1632 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1633 {
1634 TCGv_i32 r_asi, r_size, r_rd;
1635
1636 r_asi = gen_get_asi(insn, addr);
1637 r_size = tcg_const_i32(size);
1638 r_rd = tcg_const_i32(rd);
1639 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1640 tcg_temp_free_i32(r_rd);
1641 tcg_temp_free_i32(r_size);
1642 tcg_temp_free_i32(r_asi);
1643 }
1644
1645 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1646 {
1647 TCGv_i32 r_asi, r_size, r_rd;
1648
1649 r_asi = gen_get_asi(insn, addr);
1650 r_size = tcg_const_i32(size);
1651 r_rd = tcg_const_i32(rd);
1652 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1653 tcg_temp_free_i32(r_rd);
1654 tcg_temp_free_i32(r_size);
1655 tcg_temp_free_i32(r_asi);
1656 }
1657
1658 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1659 {
1660 TCGv_i32 r_asi, r_size, r_sign;
1661
1662 r_asi = gen_get_asi(insn, addr);
1663 r_size = tcg_const_i32(4);
1664 r_sign = tcg_const_i32(0);
1665 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1666 tcg_temp_free_i32(r_sign);
1667 gen_helper_st_asi(addr, dst, r_asi, r_size);
1668 tcg_temp_free_i32(r_size);
1669 tcg_temp_free_i32(r_asi);
1670 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1671 }
1672
1673 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1674 {
1675 TCGv_i32 r_asi, r_rd;
1676
1677 r_asi = gen_get_asi(insn, addr);
1678 r_rd = tcg_const_i32(rd);
1679 gen_helper_ldda_asi(addr, r_asi, r_rd);
1680 tcg_temp_free_i32(r_rd);
1681 tcg_temp_free_i32(r_asi);
1682 }
1683
1684 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1685 {
1686 TCGv_i32 r_asi, r_size;
1687
1688 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1689 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1690 r_asi = gen_get_asi(insn, addr);
1691 r_size = tcg_const_i32(8);
1692 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1693 tcg_temp_free_i32(r_size);
1694 tcg_temp_free_i32(r_asi);
1695 }
1696
1697 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1698 int rd)
1699 {
1700 TCGv r_val1;
1701 TCGv_i32 r_asi;
1702
1703 r_val1 = tcg_temp_new();
1704 gen_movl_reg_TN(rd, r_val1);
1705 r_asi = gen_get_asi(insn, addr);
1706 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1707 tcg_temp_free_i32(r_asi);
1708 tcg_temp_free(r_val1);
1709 }
1710
1711 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1712 int rd)
1713 {
1714 TCGv_i32 r_asi;
1715
1716 gen_movl_reg_TN(rd, cpu_tmp64);
1717 r_asi = gen_get_asi(insn, addr);
1718 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1719 tcg_temp_free_i32(r_asi);
1720 }
1721
1722 #elif !defined(CONFIG_USER_ONLY)
1723
1724 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1725 int sign)
1726 {
1727 TCGv_i32 r_asi, r_size, r_sign;
1728
1729 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1730 r_size = tcg_const_i32(size);
1731 r_sign = tcg_const_i32(sign);
1732 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1733 tcg_temp_free(r_sign);
1734 tcg_temp_free(r_size);
1735 tcg_temp_free(r_asi);
1736 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1737 }
1738
1739 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1740 {
1741 TCGv_i32 r_asi, r_size;
1742
1743 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1744 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1745 r_size = tcg_const_i32(size);
1746 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1747 tcg_temp_free(r_size);
1748 tcg_temp_free(r_asi);
1749 }
1750
1751 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1752 {
1753 TCGv_i32 r_asi, r_size, r_sign;
1754 TCGv_i64 r_val;
1755
1756 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1757 r_size = tcg_const_i32(4);
1758 r_sign = tcg_const_i32(0);
1759 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1760 tcg_temp_free(r_sign);
1761 r_val = tcg_temp_new_i64();
1762 tcg_gen_extu_tl_i64(r_val, dst);
1763 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1764 tcg_temp_free_i64(r_val);
1765 tcg_temp_free(r_size);
1766 tcg_temp_free(r_asi);
1767 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1768 }
1769
1770 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1771 {
1772 TCGv_i32 r_asi, r_size, r_sign;
1773
1774 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1775 r_size = tcg_const_i32(8);
1776 r_sign = tcg_const_i32(0);
1777 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1778 tcg_temp_free(r_sign);
1779 tcg_temp_free(r_size);
1780 tcg_temp_free(r_asi);
1781 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1782 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1783 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1784 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1785 gen_movl_TN_reg(rd, hi);
1786 }
1787
1788 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1789 {
1790 TCGv_i32 r_asi, r_size;
1791
1792 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1793 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1794 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1795 r_size = tcg_const_i32(8);
1796 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1797 tcg_temp_free(r_size);
1798 tcg_temp_free(r_asi);
1799 }
1800 #endif
1801
1802 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1803 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1804 {
1805 TCGv_i64 r_val;
1806 TCGv_i32 r_asi, r_size;
1807
1808 gen_ld_asi(dst, addr, insn, 1, 0);
1809
1810 r_val = tcg_const_i64(0xffULL);
1811 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1812 r_size = tcg_const_i32(1);
1813 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1814 tcg_temp_free_i32(r_size);
1815 tcg_temp_free_i32(r_asi);
1816 tcg_temp_free_i64(r_val);
1817 }
1818 #endif
1819
1820 static inline TCGv get_src1(unsigned int insn, TCGv def)
1821 {
1822 TCGv r_rs1 = def;
1823 unsigned int rs1;
1824
1825 rs1 = GET_FIELD(insn, 13, 17);
1826 if (rs1 == 0) {
1827 tcg_gen_movi_tl(def, 0);
1828 } else if (rs1 < 8) {
1829 r_rs1 = cpu_gregs[rs1];
1830 } else {
1831 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1832 }
1833 return r_rs1;
1834 }
1835
1836 static inline TCGv get_src2(unsigned int insn, TCGv def)
1837 {
1838 TCGv r_rs2 = def;
1839
1840 if (IS_IMM) { /* immediate */
1841 target_long simm = GET_FIELDs(insn, 19, 31);
1842 tcg_gen_movi_tl(def, simm);
1843 } else { /* register */
1844 unsigned int rs2 = GET_FIELD(insn, 27, 31);
1845 if (rs2 == 0) {
1846 tcg_gen_movi_tl(def, 0);
1847 } else if (rs2 < 8) {
1848 r_rs2 = cpu_gregs[rs2];
1849 } else {
1850 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1851 }
1852 }
1853 return r_rs2;
1854 }
1855
1856 #ifdef TARGET_SPARC64
1857 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1858 {
1859 TCGv_i32 r_tl = tcg_temp_new_i32();
1860
1861 /* load env->tl into r_tl */
1862 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1863
1864 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1865 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1866
1867 /* calculate offset to current trap state from env->ts, reuse r_tl */
1868 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1869 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1870
1871 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1872 {
1873 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1874 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1875 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1876 tcg_temp_free_ptr(r_tl_tmp);
1877 }
1878
1879 tcg_temp_free_i32(r_tl);
1880 }
1881 #endif
1882
1883 #define CHECK_IU_FEATURE(dc, FEATURE) \
1884 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1885 goto illegal_insn;
1886 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1887 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1888 goto nfpu_insn;
1889
1890 /* before an instruction, dc->pc must be static */
1891 static void disas_sparc_insn(DisasContext * dc)
1892 {
1893 unsigned int insn, opc, rs1, rs2, rd;
1894 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1895 target_long simm;
1896
1897 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1898 tcg_gen_debug_insn_start(dc->pc);
1899 insn = ldl_code(dc->pc);
1900 opc = GET_FIELD(insn, 0, 1);
1901
1902 rd = GET_FIELD(insn, 2, 6);
1903
1904 cpu_tmp1 = cpu_src1 = tcg_temp_new();
1905 cpu_tmp2 = cpu_src2 = tcg_temp_new();
1906
1907 switch (opc) {
1908 case 0: /* branches/sethi */
1909 {
1910 unsigned int xop = GET_FIELD(insn, 7, 9);
1911 int32_t target;
1912 switch (xop) {
1913 #ifdef TARGET_SPARC64
1914 case 0x1: /* V9 BPcc */
1915 {
1916 int cc;
1917
1918 target = GET_FIELD_SP(insn, 0, 18);
1919 target = sign_extend(target, 19);
1920 target <<= 2;
1921 cc = GET_FIELD_SP(insn, 20, 21);
1922 if (cc == 0)
1923 do_branch(dc, target, insn, 0, cpu_cond);
1924 else if (cc == 2)
1925 do_branch(dc, target, insn, 1, cpu_cond);
1926 else
1927 goto illegal_insn;
1928 goto jmp_insn;
1929 }
1930 case 0x3: /* V9 BPr */
1931 {
1932 target = GET_FIELD_SP(insn, 0, 13) |
1933 (GET_FIELD_SP(insn, 20, 21) << 14);
1934 target = sign_extend(target, 16);
1935 target <<= 2;
1936 cpu_src1 = get_src1(insn, cpu_src1);
1937 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1938 goto jmp_insn;
1939 }
1940 case 0x5: /* V9 FBPcc */
1941 {
1942 int cc = GET_FIELD_SP(insn, 20, 21);
1943 if (gen_trap_ifnofpu(dc, cpu_cond))
1944 goto jmp_insn;
1945 target = GET_FIELD_SP(insn, 0, 18);
1946 target = sign_extend(target, 19);
1947 target <<= 2;
1948 do_fbranch(dc, target, insn, cc, cpu_cond);
1949 goto jmp_insn;
1950 }
1951 #else
1952 case 0x7: /* CBN+x */
1953 {
1954 goto ncp_insn;
1955 }
1956 #endif
1957 case 0x2: /* BN+x */
1958 {
1959 target = GET_FIELD(insn, 10, 31);
1960 target = sign_extend(target, 22);
1961 target <<= 2;
1962 do_branch(dc, target, insn, 0, cpu_cond);
1963 goto jmp_insn;
1964 }
1965 case 0x6: /* FBN+x */
1966 {
1967 if (gen_trap_ifnofpu(dc, cpu_cond))
1968 goto jmp_insn;
1969 target = GET_FIELD(insn, 10, 31);
1970 target = sign_extend(target, 22);
1971 target <<= 2;
1972 do_fbranch(dc, target, insn, 0, cpu_cond);
1973 goto jmp_insn;
1974 }
1975 case 0x4: /* SETHI */
1976 if (rd) { // nop
1977 uint32_t value = GET_FIELD(insn, 10, 31);
1978 TCGv r_const;
1979
1980 r_const = tcg_const_tl(value << 10);
1981 gen_movl_TN_reg(rd, r_const);
1982 tcg_temp_free(r_const);
1983 }
1984 break;
1985 case 0x0: /* UNIMPL */
1986 default:
1987 goto illegal_insn;
1988 }
1989 break;
1990 }
1991 break;
1992 case 1: /*CALL*/
1993 {
1994 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1995 TCGv r_const;
1996
1997 r_const = tcg_const_tl(dc->pc);
1998 gen_movl_TN_reg(15, r_const);
1999 tcg_temp_free(r_const);
2000 target += dc->pc;
2001 gen_mov_pc_npc(dc, cpu_cond);
2002 dc->npc = target;
2003 }
2004 goto jmp_insn;
2005 case 2: /* FPU & Logical Operations */
2006 {
2007 unsigned int xop = GET_FIELD(insn, 7, 12);
2008 if (xop == 0x3a) { /* generate trap */
2009 int cond;
2010
2011 cpu_src1 = get_src1(insn, cpu_src1);
2012 if (IS_IMM) {
2013 rs2 = GET_FIELD(insn, 25, 31);
2014 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2015 } else {
2016 rs2 = GET_FIELD(insn, 27, 31);
2017 if (rs2 != 0) {
2018 gen_movl_reg_TN(rs2, cpu_src2);
2019 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2020 } else
2021 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2022 }
2023
2024 cond = GET_FIELD(insn, 3, 6);
2025 if (cond == 0x8) { /* Trap Always */
2026 save_state(dc, cpu_cond);
2027 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2028 supervisor(dc))
2029 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2030 else
2031 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2032 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2033 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2034
2035 if (rs2 == 0 &&
2036 dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2037
2038 gen_helper_shutdown();
2039
2040 } else {
2041 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2042 }
2043 } else if (cond != 0) {
2044 TCGv r_cond = tcg_temp_new();
2045 int l1;
2046 #ifdef TARGET_SPARC64
2047 /* V9 icc/xcc */
2048 int cc = GET_FIELD_SP(insn, 11, 12);
2049
2050 save_state(dc, cpu_cond);
2051 if (cc == 0)
2052 gen_cond(r_cond, 0, cond, dc);
2053 else if (cc == 2)
2054 gen_cond(r_cond, 1, cond, dc);
2055 else
2056 goto illegal_insn;
2057 #else
2058 save_state(dc, cpu_cond);
2059 gen_cond(r_cond, 0, cond, dc);
2060 #endif
2061 l1 = gen_new_label();
2062 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2063
2064 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2065 supervisor(dc))
2066 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2067 else
2068 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2069 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2070 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2071 gen_helper_raise_exception(cpu_env, cpu_tmp32);
2072
2073 gen_set_label(l1);
2074 tcg_temp_free(r_cond);
2075 }
2076 gen_op_next_insn();
2077 tcg_gen_exit_tb(0);
2078 dc->is_br = 1;
2079 goto jmp_insn;
2080 } else if (xop == 0x28) {
2081 rs1 = GET_FIELD(insn, 13, 17);
2082 switch(rs1) {
2083 case 0: /* rdy */
2084 #ifndef TARGET_SPARC64
2085 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2086 manual, rdy on the microSPARC
2087 II */
2088 case 0x0f: /* stbar in the SPARCv8 manual,
2089 rdy on the microSPARC II */
2090 case 0x10 ... 0x1f: /* implementation-dependent in the
2091 SPARCv8 manual, rdy on the
2092 microSPARC II */
2093 /* Read Asr17 */
2094 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2095 TCGv r_const;
2096
2097 /* Read Asr17 for a Leon3 monoprocessor */
2098 r_const = tcg_const_tl((1 << 8)
2099 | (dc->def->nwindows - 1));
2100 gen_movl_TN_reg(rd, r_const);
2101 tcg_temp_free(r_const);
2102 break;
2103 }
2104 #endif
2105 gen_movl_TN_reg(rd, cpu_y);
2106 break;
2107 #ifdef TARGET_SPARC64
2108 case 0x2: /* V9 rdccr */
2109 gen_helper_compute_psr(cpu_env);
2110 gen_helper_rdccr(cpu_dst, cpu_env);
2111 gen_movl_TN_reg(rd, cpu_dst);
2112 break;
2113 case 0x3: /* V9 rdasi */
2114 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2115 gen_movl_TN_reg(rd, cpu_dst);
2116 break;
2117 case 0x4: /* V9 rdtick */
2118 {
2119 TCGv_ptr r_tickptr;
2120
2121 r_tickptr = tcg_temp_new_ptr();
2122 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2123 offsetof(CPUState, tick));
2124 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2125 tcg_temp_free_ptr(r_tickptr);
2126 gen_movl_TN_reg(rd, cpu_dst);
2127 }
2128 break;
2129 case 0x5: /* V9 rdpc */
2130 {
2131 TCGv r_const;
2132
2133 r_const = tcg_const_tl(dc->pc);
2134 gen_movl_TN_reg(rd, r_const);
2135 tcg_temp_free(r_const);
2136 }
2137 break;
2138 case 0x6: /* V9 rdfprs */
2139 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2140 gen_movl_TN_reg(rd, cpu_dst);
2141 break;
2142 case 0xf: /* V9 membar */
2143 break; /* no effect */
2144 case 0x13: /* Graphics Status */
2145 if (gen_trap_ifnofpu(dc, cpu_cond))
2146 goto jmp_insn;
2147 gen_movl_TN_reg(rd, cpu_gsr);
2148 break;
2149 case 0x16: /* Softint */
2150 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2151 gen_movl_TN_reg(rd, cpu_dst);
2152 break;
2153 case 0x17: /* Tick compare */
2154 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2155 break;
2156 case 0x18: /* System tick */
2157 {
2158 TCGv_ptr r_tickptr;
2159
2160 r_tickptr = tcg_temp_new_ptr();
2161 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2162 offsetof(CPUState, stick));
2163 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2164 tcg_temp_free_ptr(r_tickptr);
2165 gen_movl_TN_reg(rd, cpu_dst);
2166 }
2167 break;
2168 case 0x19: /* System tick compare */
2169 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2170 break;
2171 case 0x10: /* Performance Control */
2172 case 0x11: /* Performance Instrumentation Counter */
2173 case 0x12: /* Dispatch Control */
2174 case 0x14: /* Softint set, WO */
2175 case 0x15: /* Softint clear, WO */
2176 #endif
2177 default:
2178 goto illegal_insn;
2179 }
2180 #if !defined(CONFIG_USER_ONLY)
2181 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2182 #ifndef TARGET_SPARC64
2183 if (!supervisor(dc))
2184 goto priv_insn;
2185 gen_helper_compute_psr(cpu_env);
2186 dc->cc_op = CC_OP_FLAGS;
2187 gen_helper_rdpsr(cpu_dst, cpu_env);
2188 #else
2189 CHECK_IU_FEATURE(dc, HYPV);
2190 if (!hypervisor(dc))
2191 goto priv_insn;
2192 rs1 = GET_FIELD(insn, 13, 17);
2193 switch (rs1) {
2194 case 0: // hpstate
2195 // gen_op_rdhpstate();
2196 break;
2197 case 1: // htstate
2198 // gen_op_rdhtstate();
2199 break;
2200 case 3: // hintp
2201 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2202 break;
2203 case 5: // htba
2204 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2205 break;
2206 case 6: // hver
2207 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2208 break;
2209 case 31: // hstick_cmpr
2210 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2211 break;
2212 default:
2213 goto illegal_insn;
2214 }
2215 #endif
2216 gen_movl_TN_reg(rd, cpu_dst);
2217 break;
2218 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2219 if (!supervisor(dc))
2220 goto priv_insn;
2221 #ifdef TARGET_SPARC64
2222 rs1 = GET_FIELD(insn, 13, 17);
2223 switch (rs1) {
2224 case 0: // tpc
2225 {
2226 TCGv_ptr r_tsptr;
2227
2228 r_tsptr = tcg_temp_new_ptr();
2229 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2230 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2231 offsetof(trap_state, tpc));
2232 tcg_temp_free_ptr(r_tsptr);
2233 }
2234 break;
2235 case 1: // tnpc
2236 {
2237 TCGv_ptr r_tsptr;
2238
2239 r_tsptr = tcg_temp_new_ptr();
2240 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2241 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2242 offsetof(trap_state, tnpc));
2243 tcg_temp_free_ptr(r_tsptr);
2244 }
2245 break;
2246 case 2: // tstate
2247 {
2248 TCGv_ptr r_tsptr;
2249
2250 r_tsptr = tcg_temp_new_ptr();
2251 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2252 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2253 offsetof(trap_state, tstate));
2254 tcg_temp_free_ptr(r_tsptr);
2255 }
2256 break;
2257 case 3: // tt
2258 {
2259 TCGv_ptr r_tsptr;
2260
2261 r_tsptr = tcg_temp_new_ptr();
2262 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2263 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2264 offsetof(trap_state, tt));
2265 tcg_temp_free_ptr(r_tsptr);
2266 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2267 }
2268 break;
2269 case 4: // tick
2270 {
2271 TCGv_ptr r_tickptr;
2272
2273 r_tickptr = tcg_temp_new_ptr();
2274 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2275 offsetof(CPUState, tick));
2276 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2277 gen_movl_TN_reg(rd, cpu_tmp0);
2278 tcg_temp_free_ptr(r_tickptr);
2279 }
2280 break;
2281 case 5: // tba
2282 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2283 break;
2284 case 6: // pstate
2285 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2286 offsetof(CPUSPARCState, pstate));
2287 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2288 break;
2289 case 7: // tl
2290 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2291 offsetof(CPUSPARCState, tl));
2292 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2293 break;
2294 case 8: // pil
2295 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2296 offsetof(CPUSPARCState, psrpil));
2297 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2298 break;
2299 case 9: // cwp
2300 gen_helper_rdcwp(cpu_tmp0, cpu_env);
2301 break;
2302 case 10: // cansave
2303 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2304 offsetof(CPUSPARCState, cansave));
2305 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2306 break;
2307 case 11: // canrestore
2308 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2309 offsetof(CPUSPARCState, canrestore));
2310 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2311 break;
2312 case 12: // cleanwin
2313 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2314 offsetof(CPUSPARCState, cleanwin));
2315 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2316 break;
2317 case 13: // otherwin
2318 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2319 offsetof(CPUSPARCState, otherwin));
2320 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2321 break;
2322 case 14: // wstate
2323 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2324 offsetof(CPUSPARCState, wstate));
2325 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2326 break;
2327 case 16: // UA2005 gl
2328 CHECK_IU_FEATURE(dc, GL);
2329 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2330 offsetof(CPUSPARCState, gl));
2331 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2332 break;
2333 case 26: // UA2005 strand status
2334 CHECK_IU_FEATURE(dc, HYPV);
2335 if (!hypervisor(dc))
2336 goto priv_insn;
2337 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2338 break;
2339 case 31: // ver
2340 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2341 break;
2342 case 15: // fq
2343 default:
2344 goto illegal_insn;
2345 }
2346 #else
2347 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2348 #endif
2349 gen_movl_TN_reg(rd, cpu_tmp0);
2350 break;
2351 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2352 #ifdef TARGET_SPARC64
2353 save_state(dc, cpu_cond);
2354 gen_helper_flushw(cpu_env);
2355 #else
2356 if (!supervisor(dc))
2357 goto priv_insn;
2358 gen_movl_TN_reg(rd, cpu_tbr);
2359 #endif
2360 break;
2361 #endif
2362 } else if (xop == 0x34) { /* FPU Operations */
2363 if (gen_trap_ifnofpu(dc, cpu_cond))
2364 goto jmp_insn;
2365 gen_op_clear_ieee_excp_and_FTT();
2366 rs1 = GET_FIELD(insn, 13, 17);
2367 rs2 = GET_FIELD(insn, 27, 31);
2368 xop = GET_FIELD(insn, 18, 26);
2369 save_state(dc, cpu_cond);
2370 switch (xop) {
2371 case 0x1: /* fmovs */
2372 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2373 gen_update_fprs_dirty(rd);
2374 break;
2375 case 0x5: /* fnegs */
2376 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2377 gen_update_fprs_dirty(rd);
2378 break;
2379 case 0x9: /* fabss */
2380 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2381 gen_update_fprs_dirty(rd);
2382 break;
2383 case 0x29: /* fsqrts */
2384 CHECK_FPU_FEATURE(dc, FSQRT);
2385 gen_clear_float_exceptions();
2386 gen_helper_fsqrts(cpu_tmp32, cpu_env, cpu_fpr[rs2]);
2387 gen_helper_check_ieee_exceptions(cpu_env);
2388 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2389 gen_update_fprs_dirty(rd);
2390 break;
2391 case 0x2a: /* fsqrtd */
2392 CHECK_FPU_FEATURE(dc, FSQRT);
2393 gen_op_load_fpr_DT1(DFPREG(rs2));
2394 gen_clear_float_exceptions();
2395 gen_helper_fsqrtd(cpu_env);
2396 gen_helper_check_ieee_exceptions(cpu_env);
2397 gen_op_store_DT0_fpr(DFPREG(rd));
2398 gen_update_fprs_dirty(DFPREG(rd));
2399 break;
2400 case 0x2b: /* fsqrtq */
2401 CHECK_FPU_FEATURE(dc, FLOAT128);
2402 gen_op_load_fpr_QT1(QFPREG(rs2));
2403 gen_clear_float_exceptions();
2404 gen_helper_fsqrtq(cpu_env);
2405 gen_helper_check_ieee_exceptions(cpu_env);
2406 gen_op_store_QT0_fpr(QFPREG(rd));
2407 gen_update_fprs_dirty(QFPREG(rd));
2408 break;
2409 case 0x41: /* fadds */
2410 gen_clear_float_exceptions();
2411 gen_helper_fadds(cpu_tmp32, cpu_env, cpu_fpr[rs1],
2412 cpu_fpr[rs2]);
2413 gen_helper_check_ieee_exceptions(cpu_env);
2414 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2415 gen_update_fprs_dirty(rd);
2416 break;
2417 case 0x42: /* faddd */
2418 gen_op_load_fpr_DT0(DFPREG(rs1));
2419 gen_op_load_fpr_DT1(DFPREG(rs2));
2420 gen_clear_float_exceptions();
2421 gen_helper_faddd(cpu_env);
2422 gen_helper_check_ieee_exceptions(cpu_env);
2423 gen_op_store_DT0_fpr(DFPREG(rd));
2424 gen_update_fprs_dirty(DFPREG(rd));
2425 break;
2426 case 0x43: /* faddq */
2427 CHECK_FPU_FEATURE(dc, FLOAT128);
2428 gen_op_load_fpr_QT0(QFPREG(rs1));
2429 gen_op_load_fpr_QT1(QFPREG(rs2));
2430 gen_clear_float_exceptions();
2431 gen_helper_faddq(cpu_env);
2432 gen_helper_check_ieee_exceptions(cpu_env);
2433 gen_op_store_QT0_fpr(QFPREG(rd));
2434 gen_update_fprs_dirty(QFPREG(rd));
2435 break;
2436 case 0x45: /* fsubs */
2437 gen_clear_float_exceptions();
2438 gen_helper_fsubs(cpu_tmp32, cpu_env, cpu_fpr[rs1],
2439 cpu_fpr[rs2]);
2440 gen_helper_check_ieee_exceptions(cpu_env);
2441 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2442 gen_update_fprs_dirty(rd);
2443 break;
2444 case 0x46: /* fsubd */
2445 gen_op_load_fpr_DT0(DFPREG(rs1));
2446 gen_op_load_fpr_DT1(DFPREG(rs2));
2447 gen_clear_float_exceptions();
2448 gen_helper_fsubd(cpu_env);
2449 gen_helper_check_ieee_exceptions(cpu_env);
2450 gen_op_store_DT0_fpr(DFPREG(rd));
2451 gen_update_fprs_dirty(DFPREG(rd));
2452 break;
2453 case 0x47: /* fsubq */
2454 CHECK_FPU_FEATURE(dc, FLOAT128);
2455 gen_op_load_fpr_QT0(QFPREG(rs1));
2456 gen_op_load_fpr_QT1(QFPREG(rs2));
2457 gen_clear_float_exceptions();
2458 gen_helper_fsubq(cpu_env);
2459 gen_helper_check_ieee_exceptions(cpu_env);
2460 gen_op_store_QT0_fpr(QFPREG(rd));
2461 gen_update_fprs_dirty(QFPREG(rd));
2462 break;
2463 case 0x49: /* fmuls */
2464 CHECK_FPU_FEATURE(dc, FMUL);
2465 gen_clear_float_exceptions();
2466 gen_helper_fmuls(cpu_tmp32, cpu_env, cpu_fpr[rs1],
2467 cpu_fpr[rs2]);
2468 gen_helper_check_ieee_exceptions(cpu_env);
2469 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2470 gen_update_fprs_dirty(rd);
2471 break;
2472 case 0x4a: /* fmuld */
2473 CHECK_FPU_FEATURE(dc, FMUL);
2474 gen_op_load_fpr_DT0(DFPREG(rs1));
2475 gen_op_load_fpr_DT1(DFPREG(rs2));
2476 gen_clear_float_exceptions();
2477 gen_helper_fmuld(cpu_env);
2478 gen_helper_check_ieee_exceptions(cpu_env);
2479 gen_op_store_DT0_fpr(DFPREG(rd));
2480 gen_update_fprs_dirty(DFPREG(rd));
2481 break;
2482 case 0x4b: /* fmulq */
2483 CHECK_FPU_FEATURE(dc, FLOAT128);
2484 CHECK_FPU_FEATURE(dc, FMUL);
2485 gen_op_load_fpr_QT0(QFPREG(rs1));
2486 gen_op_load_fpr_QT1(QFPREG(rs2));
2487 gen_clear_float_exceptions();
2488 gen_helper_fmulq(cpu_env);
2489 gen_helper_check_ieee_exceptions(cpu_env);
2490 gen_op_store_QT0_fpr(QFPREG(rd));
2491 gen_update_fprs_dirty(QFPREG(rd));
2492 break;
2493 case 0x4d: /* fdivs */
2494 gen_clear_float_exceptions();
2495 gen_helper_fdivs(cpu_tmp32, cpu_env, cpu_fpr[rs1],
2496 cpu_fpr[rs2]);
2497 gen_helper_check_ieee_exceptions(cpu_env);
2498 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2499 gen_update_fprs_dirty(rd);
2500 break;
2501 case 0x4e: /* fdivd */
2502 gen_op_load_fpr_DT0(DFPREG(rs1));
2503 gen_op_load_fpr_DT1(DFPREG(rs2));
2504 gen_clear_float_exceptions();
2505 gen_helper_fdivd(cpu_env);
2506 gen_helper_check_ieee_exceptions(cpu_env);
2507 gen_op_store_DT0_fpr(DFPREG(rd));
2508 gen_update_fprs_dirty(DFPREG(rd));
2509 break;
2510 case 0x4f: /* fdivq */
2511 CHECK_FPU_FEATURE(dc, FLOAT128);
2512 gen_op_load_fpr_QT0(QFPREG(rs1));
2513 gen_op_load_fpr_QT1(QFPREG(rs2));
2514 gen_clear_float_exceptions();
2515 gen_helper_fdivq(cpu_env);
2516 gen_helper_check_ieee_exceptions(cpu_env);
2517 gen_op_store_QT0_fpr(QFPREG(rd));
2518 gen_update_fprs_dirty(QFPREG(rd));
2519 break;
2520 case 0x69: /* fsmuld */
2521 CHECK_FPU_FEATURE(dc, FSMULD);
2522 gen_clear_float_exceptions();
2523 gen_helper_fsmuld(cpu_env, cpu_fpr[rs1], cpu_fpr[rs2]);
2524 gen_helper_check_ieee_exceptions(cpu_env);
2525 gen_op_store_DT0_fpr(DFPREG(rd));
2526 gen_update_fprs_dirty(DFPREG(rd));
2527 break;
2528 case 0x6e: /* fdmulq */
2529 CHECK_FPU_FEATURE(dc, FLOAT128);
2530 gen_op_load_fpr_DT0(DFPREG(rs1));
2531 gen_op_load_fpr_DT1(DFPREG(rs2));
2532 gen_clear_float_exceptions();
2533 gen_helper_fdmulq(cpu_env);
2534 gen_helper_check_ieee_exceptions(cpu_env);
2535 gen_op_store_QT0_fpr(QFPREG(rd));
2536 gen_update_fprs_dirty(QFPREG(rd));
2537 break;
2538 case 0xc4: /* fitos */
2539 gen_clear_float_exceptions();
2540 gen_helper_fitos(cpu_tmp32, cpu_env, cpu_fpr[rs2]);
2541 gen_helper_check_ieee_exceptions(cpu_env);
2542 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2543 gen_update_fprs_dirty(rd);
2544 break;
2545 case 0xc6: /* fdtos */
2546 gen_op_load_fpr_DT1(DFPREG(rs2));
2547 gen_clear_float_exceptions();
2548 gen_helper_fdtos(cpu_tmp32, cpu_env);
2549 gen_helper_check_ieee_exceptions(cpu_env);
2550 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2551 gen_update_fprs_dirty(rd);
2552 break;
2553 case 0xc7: /* fqtos */
2554 CHECK_FPU_FEATURE(dc, FLOAT128);
2555 gen_op_load_fpr_QT1(QFPREG(rs2));
2556 gen_clear_float_exceptions();
2557 gen_helper_fqtos(cpu_tmp32, cpu_env);
2558 gen_helper_check_ieee_exceptions(cpu_env);
2559 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2560 gen_update_fprs_dirty(rd);
2561 break;
2562 case 0xc8: /* fitod */
2563 gen_helper_fitod(cpu_env, cpu_fpr[rs2]);
2564 gen_op_store_DT0_fpr(DFPREG(rd));
2565 gen_update_fprs_dirty(DFPREG(rd));
2566 break;
2567 case 0xc9: /* fstod */
2568 gen_helper_fstod(cpu_env, cpu_fpr[rs2]);
2569 gen_op_store_DT0_fpr(DFPREG(rd));
2570 gen_update_fprs_dirty(DFPREG(rd));
2571 break;
2572 case 0xcb: /* fqtod */
2573 CHECK_FPU_FEATURE(dc, FLOAT128);
2574 gen_op_load_fpr_QT1(QFPREG(rs2));
2575 gen_clear_float_exceptions();
2576 gen_helper_fqtod(cpu_env);
2577 gen_helper_check_ieee_exceptions(cpu_env);
2578 gen_op_store_DT0_fpr(DFPREG(rd));
2579 gen_update_fprs_dirty(DFPREG(rd));
2580 break;
2581 case 0xcc: /* fitoq */
2582 CHECK_FPU_FEATURE(dc, FLOAT128);
2583 gen_helper_fitoq(cpu_env, cpu_fpr[rs2]);
2584 gen_op_store_QT0_fpr(QFPREG(rd));
2585 gen_update_fprs_dirty(QFPREG(rd));
2586 break;
2587 case 0xcd: /* fstoq */
2588 CHECK_FPU_FEATURE(dc, FLOAT128);
2589 gen_helper_fstoq(cpu_env, cpu_fpr[rs2]);
2590 gen_op_store_QT0_fpr(QFPREG(rd));
2591 gen_update_fprs_dirty(QFPREG(rd));
2592 break;
2593 case 0xce: /* fdtoq */
2594 CHECK_FPU_FEATURE(dc, FLOAT128);
2595 gen_op_load_fpr_DT1(DFPREG(rs2));
2596 gen_helper_fdtoq(cpu_env);
2597 gen_op_store_QT0_fpr(QFPREG(rd));
2598 gen_update_fprs_dirty(QFPREG(rd));
2599 break;
2600 case 0xd1: /* fstoi */
2601 gen_clear_float_exceptions();
2602 gen_helper_fstoi(cpu_tmp32, cpu_env, cpu_fpr[rs2]);
2603 gen_helper_check_ieee_exceptions(cpu_env);
2604 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2605 gen_update_fprs_dirty(rd);
2606 break;
2607 case 0xd2: /* fdtoi */
2608 gen_op_load_fpr_DT1(DFPREG(rs2));
2609 gen_clear_float_exceptions();
2610 gen_helper_fdtoi(cpu_tmp32, cpu_env);
2611 gen_helper_check_ieee_exceptions(cpu_env);
2612 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2613 gen_update_fprs_dirty(rd);
2614 break;
2615 case 0xd3: /* fqtoi */
2616 CHECK_FPU_FEATURE(dc, FLOAT128);
2617 gen_op_load_fpr_QT1(QFPREG(rs2));
2618 gen_clear_float_exceptions();
2619 gen_helper_fqtoi(cpu_tmp32, cpu_env);
2620 gen_helper_check_ieee_exceptions(cpu_env);
2621 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2622 gen_update_fprs_dirty(rd);
2623 break;
2624 #ifdef TARGET_SPARC64
2625 case 0x2: /* V9 fmovd */
2626 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2627 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2628 cpu_fpr[DFPREG(rs2) + 1]);
2629 gen_update_fprs_dirty(DFPREG(rd));
2630 break;
2631 case 0x3: /* V9 fmovq */
2632 CHECK_FPU_FEATURE(dc, FLOAT128);
2633 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2634 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2635 cpu_fpr[QFPREG(rs2) + 1]);
2636 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2637 cpu_fpr[QFPREG(rs2) + 2]);
2638 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2639 cpu_fpr[QFPREG(rs2) + 3]);
2640 gen_update_fprs_dirty(QFPREG(rd));
2641 break;
2642 case 0x6: /* V9 fnegd */
2643 gen_op_load_fpr_DT1(DFPREG(rs2));
2644 gen_helper_fnegd(cpu_env);
2645 gen_op_store_DT0_fpr(DFPREG(rd));
2646 gen_update_fprs_dirty(DFPREG(rd));
2647 break;
2648 case 0x7: /* V9 fnegq */
2649 CHECK_FPU_FEATURE(dc, FLOAT128);
2650 gen_op_load_fpr_QT1(QFPREG(rs2));
2651 gen_helper_fnegq(cpu_env);
2652 gen_op_store_QT0_fpr(QFPREG(rd));
2653 gen_update_fprs_dirty(QFPREG(rd));
2654 break;
2655 case 0xa: /* V9 fabsd */
2656 gen_op_load_fpr_DT1(DFPREG(rs2));
2657 gen_helper_fabsd(cpu_env);
2658 gen_op_store_DT0_fpr(DFPREG(rd));
2659 gen_update_fprs_dirty(DFPREG(rd));
2660 break;
2661 case 0xb: /* V9 fabsq */
2662 CHECK_FPU_FEATURE(dc, FLOAT128);
2663 gen_op_load_fpr_QT1(QFPREG(rs2));
2664 gen_helper_fabsq(cpu_env);
2665 gen_op_store_QT0_fpr(QFPREG(rd));
2666 gen_update_fprs_dirty(QFPREG(rd));
2667 break;
2668 case 0x81: /* V9 fstox */
2669 gen_clear_float_exceptions();
2670 gen_helper_fstox(cpu_env, cpu_fpr[rs2]);
2671 gen_helper_check_ieee_exceptions(cpu_env);
2672 gen_op_store_DT0_fpr(DFPREG(rd));
2673 gen_update_fprs_dirty(DFPREG(rd));
2674 break;
2675 case 0x82: /* V9 fdtox */
2676 gen_op_load_fpr_DT1(DFPREG(rs2));
2677 gen_clear_float_exceptions();
2678 gen_helper_fdtox(cpu_env);
2679 gen_helper_check_ieee_exceptions(cpu_env);
2680 gen_op_store_DT0_fpr(DFPREG(rd));
2681 gen_update_fprs_dirty(DFPREG(rd));
2682 break;
2683 case 0x83: /* V9 fqtox */
2684 CHECK_FPU_FEATURE(dc, FLOAT128);
2685 gen_op_load_fpr_QT1(QFPREG(rs2));
2686 gen_clear_float_exceptions();
2687 gen_helper_fqtox(cpu_env);
2688 gen_helper_check_ieee_exceptions(cpu_env);
2689 gen_op_store_DT0_fpr(DFPREG(rd));
2690 gen_update_fprs_dirty(DFPREG(rd));
2691 break;
2692 case 0x84: /* V9 fxtos */
2693 gen_op_load_fpr_DT1(DFPREG(rs2));
2694 gen_clear_float_exceptions();
2695 gen_helper_fxtos(cpu_tmp32, cpu_env);
2696 gen_helper_check_ieee_exceptions(cpu_env);
2697 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2698 gen_update_fprs_dirty(rd);
2699 break;
2700 case 0x88: /* V9 fxtod */
2701 gen_op_load_fpr_DT1(DFPREG(rs2));
2702 gen_clear_float_exceptions();
2703 gen_helper_fxtod(cpu_env);
2704 gen_helper_check_ieee_exceptions(cpu_env);
2705 gen_op_store_DT0_fpr(DFPREG(rd));
2706 gen_update_fprs_dirty(DFPREG(rd));
2707 break;
2708 case 0x8c: /* V9 fxtoq */
2709 CHECK_FPU_FEATURE(dc, FLOAT128);
2710 gen_op_load_fpr_DT1(DFPREG(rs2));
2711 gen_clear_float_exceptions();
2712 gen_helper_fxtoq(cpu_env);
2713 gen_helper_check_ieee_exceptions(cpu_env);
2714 gen_op_store_QT0_fpr(QFPREG(rd));
2715 gen_update_fprs_dirty(QFPREG(rd));
2716 break;
2717 #endif
2718 default:
2719 goto illegal_insn;
2720 }
2721 } else if (xop == 0x35) { /* FPU Operations */
2722 #ifdef TARGET_SPARC64
2723 int cond;
2724 #endif
2725 if (gen_trap_ifnofpu(dc, cpu_cond))
2726 goto jmp_insn;
2727 gen_op_clear_ieee_excp_and_FTT();
2728 rs1 = GET_FIELD(insn, 13, 17);
2729 rs2 = GET_FIELD(insn, 27, 31);
2730 xop = GET_FIELD(insn, 18, 26);
2731 save_state(dc, cpu_cond);
2732 #ifdef TARGET_SPARC64
2733 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2734 int l1;
2735
2736 l1 = gen_new_label();
2737 cond = GET_FIELD_SP(insn, 14, 17);
2738 cpu_src1 = get_src1(insn, cpu_src1);
2739 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2740 0, l1);
2741 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2742 gen_update_fprs_dirty(rd);
2743 gen_set_label(l1);
2744 break;
2745 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2746 int l1;
2747
2748 l1 = gen_new_label();
2749 cond = GET_FIELD_SP(insn, 14, 17);
2750 cpu_src1 = get_src1(insn, cpu_src1);
2751 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2752 0, l1);
2753 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2754 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2755 gen_update_fprs_dirty(DFPREG(rd));
2756 gen_set_label(l1);
2757 break;
2758 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2759 int l1;
2760
2761 CHECK_FPU_FEATURE(dc, FLOAT128);
2762 l1 = gen_new_label();
2763 cond = GET_FIELD_SP(insn, 14, 17);
2764 cpu_src1 = get_src1(insn, cpu_src1);
2765 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2766 0, l1);
2767 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2768 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2769 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2770 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2771 gen_update_fprs_dirty(QFPREG(rd));
2772 gen_set_label(l1);
2773 break;
2774 }
2775 #endif
2776 switch (xop) {
2777 #ifdef TARGET_SPARC64
2778 #define FMOVSCC(fcc) \
2779 { \
2780 TCGv r_cond; \
2781 int l1; \
2782 \
2783 l1 = gen_new_label(); \
2784 r_cond = tcg_temp_new(); \
2785 cond = GET_FIELD_SP(insn, 14, 17); \
2786 gen_fcond(r_cond, fcc, cond); \
2787 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2788 0, l1); \
2789 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2790 gen_update_fprs_dirty(rd); \
2791 gen_set_label(l1); \
2792 tcg_temp_free(r_cond); \
2793 }
2794 #define FMOVDCC(fcc) \
2795 { \
2796 TCGv r_cond; \
2797 int l1; \
2798 \
2799 l1 = gen_new_label(); \
2800 r_cond = tcg_temp_new(); \
2801 cond = GET_FIELD_SP(insn, 14, 17); \
2802 gen_fcond(r_cond, fcc, cond); \
2803 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2804 0, l1); \
2805 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2806 cpu_fpr[DFPREG(rs2)]); \
2807 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2808 cpu_fpr[DFPREG(rs2) + 1]); \
2809 gen_update_fprs_dirty(DFPREG(rd)); \
2810 gen_set_label(l1); \
2811 tcg_temp_free(r_cond); \
2812 }
2813 #define FMOVQCC(fcc) \
2814 { \
2815 TCGv r_cond; \
2816 int l1; \
2817 \
2818 l1 = gen_new_label(); \
2819 r_cond = tcg_temp_new(); \
2820 cond = GET_FIELD_SP(insn, 14, 17); \
2821 gen_fcond(r_cond, fcc, cond); \
2822 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2823 0, l1); \
2824 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2825 cpu_fpr[QFPREG(rs2)]); \
2826 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2827 cpu_fpr[QFPREG(rs2) + 1]); \
2828 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2829 cpu_fpr[QFPREG(rs2) + 2]); \
2830 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2831 cpu_fpr[QFPREG(rs2) + 3]); \
2832 gen_update_fprs_dirty(QFPREG(rd)); \
2833 gen_set_label(l1); \
2834 tcg_temp_free(r_cond); \
2835 }
2836 case 0x001: /* V9 fmovscc %fcc0 */
2837 FMOVSCC(0);
2838 break;
2839 case 0x002: /* V9 fmovdcc %fcc0 */
2840 FMOVDCC(0);
2841 break;
2842 case 0x003: /* V9 fmovqcc %fcc0 */
2843 CHECK_FPU_FEATURE(dc, FLOAT128);
2844 FMOVQCC(0);
2845 break;
2846 case 0x041: /* V9 fmovscc %fcc1 */
2847 FMOVSCC(1);
2848 break;
2849 case 0x042: /* V9 fmovdcc %fcc1 */
2850 FMOVDCC(1);
2851 break;
2852 case 0x043: /* V9 fmovqcc %fcc1 */
2853 CHECK_FPU_FEATURE(dc, FLOAT128);
2854 FMOVQCC(1);
2855 break;
2856 case 0x081: /* V9 fmovscc %fcc2 */
2857 FMOVSCC(2);
2858 break;
2859 case 0x082: /* V9 fmovdcc %fcc2 */
2860 FMOVDCC(2);
2861 break;
2862 case 0x083: /* V9 fmovqcc %fcc2 */
2863 CHECK_FPU_FEATURE(dc, FLOAT128);
2864 FMOVQCC(2);
2865 break;
2866 case 0x0c1: /* V9 fmovscc %fcc3 */
2867 FMOVSCC(3);
2868 break;
2869 case 0x0c2: /* V9 fmovdcc %fcc3 */
2870 FMOVDCC(3);
2871 break;
2872 case 0x0c3: /* V9 fmovqcc %fcc3 */
2873 CHECK_FPU_FEATURE(dc, FLOAT128);
2874 FMOVQCC(3);
2875 break;
2876 #undef FMOVSCC
2877 #undef FMOVDCC
2878 #undef FMOVQCC
2879 #define FMOVSCC(icc) \
2880 { \
2881 TCGv r_cond; \
2882 int l1; \
2883 \
2884 l1 = gen_new_label(); \
2885 r_cond = tcg_temp_new(); \
2886 cond = GET_FIELD_SP(insn, 14, 17); \
2887 gen_cond(r_cond, icc, cond, dc); \
2888 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2889 0, l1); \
2890 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2891 gen_update_fprs_dirty(rd); \
2892 gen_set_label(l1); \
2893 tcg_temp_free(r_cond); \
2894 }
2895 #define FMOVDCC(icc) \
2896 { \
2897 TCGv r_cond; \
2898 int l1; \
2899 \
2900 l1 = gen_new_label(); \
2901 r_cond = tcg_temp_new(); \
2902 cond = GET_FIELD_SP(insn, 14, 17); \
2903 gen_cond(r_cond, icc, cond, dc); \
2904 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2905 0, l1); \
2906 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2907 cpu_fpr[DFPREG(rs2)]); \
2908 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2909 cpu_fpr[DFPREG(rs2) + 1]); \
2910 gen_update_fprs_dirty(DFPREG(rd)); \
2911 gen_set_label(l1); \
2912 tcg_temp_free(r_cond); \
2913 }
2914 #define FMOVQCC(icc) \
2915 { \
2916 TCGv r_cond; \
2917 int l1; \
2918 \
2919 l1 = gen_new_label(); \
2920 r_cond = tcg_temp_new(); \
2921 cond = GET_FIELD_SP(insn, 14, 17); \
2922 gen_cond(r_cond, icc, cond, dc); \
2923 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2924 0, l1); \
2925 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2926 cpu_fpr[QFPREG(rs2)]); \
2927 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2928 cpu_fpr[QFPREG(rs2) + 1]); \
2929 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2930 cpu_fpr[QFPREG(rs2) + 2]); \
2931 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2932 cpu_fpr[QFPREG(rs2) + 3]); \
2933 gen_update_fprs_dirty(QFPREG(rd)); \
2934 gen_set_label(l1); \
2935 tcg_temp_free(r_cond); \
2936 }
2937
2938 case 0x101: /* V9 fmovscc %icc */
2939 FMOVSCC(0);
2940 break;
2941 case 0x102: /* V9 fmovdcc %icc */
2942 FMOVDCC(0);
2943 break;
2944 case 0x103: /* V9 fmovqcc %icc */
2945 CHECK_FPU_FEATURE(dc, FLOAT128);
2946 FMOVQCC(0);
2947 break;
2948 case 0x181: /* V9 fmovscc %xcc */
2949 FMOVSCC(1);
2950 break;
2951 case 0x182: /* V9 fmovdcc %xcc */
2952 FMOVDCC(1);
2953 break;
2954 case 0x183: /* V9 fmovqcc %xcc */
2955 CHECK_FPU_FEATURE(dc, FLOAT128);
2956 FMOVQCC(1);
2957 break;
2958 #undef FMOVSCC
2959 #undef FMOVDCC
2960 #undef FMOVQCC
2961 #endif
2962 case 0x51: /* fcmps, V9 %fcc */
2963 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2964 break;
2965 case 0x52: /* fcmpd, V9 %fcc */
2966 gen_op_load_fpr_DT0(DFPREG(rs1));
2967 gen_op_load_fpr_DT1(DFPREG(rs2));
2968 gen_op_fcmpd(rd & 3);
2969 break;
2970 case 0x53: /* fcmpq, V9 %fcc */
2971 CHECK_FPU_FEATURE(dc, FLOAT128);
2972 gen_op_load_fpr_QT0(QFPREG(rs1));
2973 gen_op_load_fpr_QT1(QFPREG(rs2));
2974 gen_op_fcmpq(rd & 3);
2975 break;
2976 case 0x55: /* fcmpes, V9 %fcc */
2977 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2978 break;
2979 case 0x56: /* fcmped, V9 %fcc */
2980 gen_op_load_fpr_DT0(DFPREG(rs1));
2981 gen_op_load_fpr_DT1(DFPREG(rs2));
2982 gen_op_fcmped(rd & 3);
2983 break;
2984 case 0x57: /* fcmpeq, V9 %fcc */
2985 CHECK_FPU_FEATURE(dc, FLOAT128);
2986 gen_op_load_fpr_QT0(QFPREG(rs1));
2987 gen_op_load_fpr_QT1(QFPREG(rs2));
2988 gen_op_fcmpeq(rd & 3);
2989 break;
2990 default:
2991 goto illegal_insn;
2992 }
2993 } else if (xop == 0x2) {
2994 // clr/mov shortcut
2995
2996 rs1 = GET_FIELD(insn, 13, 17);
2997 if (rs1 == 0) {
2998 // or %g0, x, y -> mov T0, x; mov y, T0
2999 if (IS_IMM) { /* immediate */
3000 TCGv r_const;
3001
3002 simm = GET_FIELDs(insn, 19, 31);
3003 r_const = tcg_const_tl(simm);
3004 gen_movl_TN_reg(rd, r_const);
3005 tcg_temp_free(r_const);
3006 } else { /* register */
3007 rs2 = GET_FIELD(insn, 27, 31);
3008 gen_movl_reg_TN(rs2, cpu_dst);
3009 gen_movl_TN_reg(rd, cpu_dst);
3010 }
3011 } else {
3012 cpu_src1 = get_src1(insn, cpu_src1);
3013 if (IS_IMM) { /* immediate */
3014 simm = GET_FIELDs(insn, 19, 31);
3015 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3016 gen_movl_TN_reg(rd, cpu_dst);
3017 } else { /* register */
3018 // or x, %g0, y -> mov T1, x; mov y, T1
3019 rs2 = GET_FIELD(insn, 27, 31);
3020 if (rs2 != 0) {
3021 gen_movl_reg_TN(rs2, cpu_src2);
3022 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3023 gen_movl_TN_reg(rd, cpu_dst);
3024 } else
3025 gen_movl_TN_reg(rd, cpu_src1);
3026 }
3027 }
3028 #ifdef TARGET_SPARC64
3029 } else if (xop == 0x25) { /* sll, V9 sllx */
3030 cpu_src1 = get_src1(insn, cpu_src1);
3031 if (IS_IMM) { /* immediate */
3032 simm = GET_FIELDs(insn, 20, 31);
3033 if (insn & (1 << 12)) {
3034 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3035 } else {
3036 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3037 }
3038 } else { /* register */
3039 rs2 = GET_FIELD(insn, 27, 31);
3040 gen_movl_reg_TN(rs2, cpu_src2);
3041 if (insn & (1 << 12)) {
3042 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3043 } else {
3044 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3045 }
3046 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3047 }
3048 gen_movl_TN_reg(rd, cpu_dst);
3049 } else if (xop == 0x26) { /* srl, V9 srlx */
3050 cpu_src1 = get_src1(insn, cpu_src1);
3051 if (IS_IMM) { /* immediate */
3052 simm = GET_FIELDs(insn, 20, 31);
3053 if (insn & (1 << 12)) {
3054 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3055 } else {
3056 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3057 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3058 }
3059 } else { /* register */
3060 rs2 = GET_FIELD(insn, 27, 31);
3061 gen_movl_reg_TN(rs2, cpu_src2);
3062 if (insn & (1 << 12)) {
3063 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3064 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3065 } else {
3066 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3067 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3068 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3069 }
3070 }
3071 gen_movl_TN_reg(rd, cpu_dst);
3072 } else if (xop == 0x27) { /* sra, V9 srax */
3073 cpu_src1 = get_src1(insn, cpu_src1);
3074 if (IS_IMM) { /* immediate */
3075 simm = GET_FIELDs(insn, 20, 31);
3076 if (insn & (1 << 12)) {
3077 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3078 } else {
3079 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3080 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3081 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3082 }
3083 } else { /* register */
3084 rs2 = GET_FIELD(insn, 27, 31);
3085 gen_movl_reg_TN(rs2, cpu_src2);
3086 if (insn & (1 << 12)) {
3087 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3088 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3089 } else {
3090 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3091 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3092 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3093 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3094 }
3095 }
3096 gen_movl_TN_reg(rd, cpu_dst);
3097 #endif
3098 } else if (xop < 0x36) {
3099 if (xop < 0x20) {
3100 cpu_src1 = get_src1(insn, cpu_src1);
3101 cpu_src2 = get_src2(insn, cpu_src2);
3102 switch (xop & ~0x10) {
3103 case 0x0: /* add */
3104 if (IS_IMM) {
3105 simm = GET_FIELDs(insn, 19, 31);
3106 if (xop & 0x10) {
3107 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3108 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3109 dc->cc_op = CC_OP_ADD;
3110 } else {
3111 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3112 }
3113 } else {
3114 if (xop & 0x10) {
3115 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3116 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3117 dc->cc_op = CC_OP_ADD;
3118 } else {
3119 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3120 }
3121 }
3122 break;
3123 case 0x1: /* and */
3124 if (IS_IMM) {
3125 simm = GET_FIELDs(insn, 19, 31);
3126 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3127 } else {
3128 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3129 }
3130 if (xop & 0x10) {
3131 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3132 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3133 dc->cc_op = CC_OP_LOGIC;
3134 }
3135 break;
3136 case 0x2: /* or */
3137 if (IS_IMM) {
3138 simm = GET_FIELDs(insn, 19, 31);
3139 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3140 } else {
3141 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3142 }
3143 if (xop & 0x10) {
3144 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3145 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3146 dc->cc_op = CC_OP_LOGIC;
3147 }
3148 break;
3149 case 0x3: /* xor */
3150 if (IS_IMM) {
3151 simm = GET_FIELDs(insn, 19, 31);
3152 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3153 } else {
3154 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3155 }
3156 if (xop & 0x10) {
3157 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3158 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3159 dc->cc_op = CC_OP_LOGIC;
3160 }
3161 break;
3162 case 0x4: /* sub */
3163 if (IS_IMM) {
3164 simm = GET_FIELDs(insn, 19, 31);
3165 if (xop & 0x10) {
3166 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3167 } else {
3168 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3169 }
3170 } else {
3171 if (xop & 0x10) {
3172 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3173 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3174 dc->cc_op = CC_OP_SUB;
3175 } else {
3176 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3177 }
3178 }
3179 break;
3180 case 0x5: /* andn */
3181 if (IS_IMM) {
3182 simm = GET_FIELDs(insn, 19, 31);
3183 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3184 } else {
3185 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3186 }
3187 if (xop & 0x10) {
3188 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3189 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3190 dc->cc_op = CC_OP_LOGIC;
3191 }
3192 break;
3193 case 0x6: /* orn */
3194 if (IS_IMM) {
3195 simm = GET_FIELDs(insn, 19, 31);
3196 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3197 } else {
3198 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3199 }
3200 if (xop & 0x10) {
3201 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3202 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3203 dc->cc_op = CC_OP_LOGIC;
3204 }
3205 break;
3206 case 0x7: /* xorn */
3207 if (IS_IMM) {
3208 simm = GET_FIELDs(insn, 19, 31);
3209 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3210 } else {
3211 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3212 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3213 }
3214 if (xop & 0x10) {
3215 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3216 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3217 dc->cc_op = CC_OP_LOGIC;
3218 }
3219 break;
3220 case 0x8: /* addx, V9 addc */
3221 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3222 (xop & 0x10));
3223 break;
3224 #ifdef TARGET_SPARC64
3225 case 0x9: /* V9 mulx */
3226 if (IS_IMM) {
3227 simm = GET_FIELDs(insn, 19, 31);
3228 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3229 } else {
3230 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3231 }
3232 break;
3233 #endif
3234 case 0xa: /* umul */
3235 CHECK_IU_FEATURE(dc, MUL);
3236 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3237 if (xop & 0x10) {
3238 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3239 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3240 dc->cc_op = CC_OP_LOGIC;
3241 }
3242 break;
3243 case 0xb: /* smul */
3244 CHECK_IU_FEATURE(dc, MUL);
3245 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3246 if (xop & 0x10) {
3247 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3248 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3249 dc->cc_op = CC_OP_LOGIC;
3250 }
3251 break;
3252 case 0xc: /* subx, V9 subc */
3253 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3254 (xop & 0x10));
3255 break;
3256 #ifdef TARGET_SPARC64
3257 case 0xd: /* V9 udivx */
3258 {
3259 TCGv r_temp1, r_temp2;
3260 r_temp1 = tcg_temp_local_new();
3261 r_temp2 = tcg_temp_local_new();
3262 tcg_gen_mov_tl(r_temp1, cpu_src1);
3263 tcg_gen_mov_tl(r_temp2, cpu_src2);
3264 gen_trap_ifdivzero_tl(r_temp2);
3265 tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3266 tcg_temp_free(r_temp1);
3267 tcg_temp_free(r_temp2);
3268 }
3269 break;
3270 #endif
3271 case 0xe: /* udiv */
3272 CHECK_IU_FEATURE(dc, DIV);
3273 if (xop & 0x10) {
3274 gen_helper_udiv_cc(cpu_dst, cpu_src1, cpu_src2);
3275 dc->cc_op = CC_OP_DIV;
3276 } else {
3277 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3278 }
3279 break;
3280 case 0xf: /* sdiv */
3281 CHECK_IU_FEATURE(dc, DIV);
3282 if (xop & 0x10) {
3283 gen_helper_sdiv_cc(cpu_dst, cpu_src1, cpu_src2);
3284 dc->cc_op = CC_OP_DIV;
3285 } else {
3286 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3287 }
3288 break;
3289 default:
3290 goto illegal_insn;
3291 }
3292 gen_movl_TN_reg(rd, cpu_dst);
3293 } else {
3294 cpu_src1 = get_src1(insn, cpu_src1);
3295 cpu_src2 = get_src2(insn, cpu_src2);
3296 switch (xop) {
3297 case 0x20: /* taddcc */
3298 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3299 gen_movl_TN_reg(rd, cpu_dst);
3300 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3301 dc->cc_op = CC_OP_TADD;
3302 break;
3303 case 0x21: /* tsubcc */
3304 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3305 gen_movl_TN_reg(rd, cpu_dst);
3306 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3307 dc->cc_op = CC_OP_TSUB;
3308 break;
3309 case 0x22: /* taddcctv */
3310 save_state(dc, cpu_cond);
3311 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3312 gen_movl_TN_reg(rd, cpu_dst);
3313 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3314 dc->cc_op = CC_OP_TADDTV;
3315 break;
3316 case 0x23: /* tsubcctv */
3317 save_state(dc, cpu_cond);
3318 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3319 gen_movl_TN_reg(rd, cpu_dst);
3320 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3321 dc->cc_op = CC_OP_TSUBTV;
3322 break;
3323 case 0x24: /* mulscc */
3324 gen_helper_compute_psr(cpu_env);
3325 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3326 gen_movl_TN_reg(rd, cpu_dst);
3327 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3328 dc->cc_op = CC_OP_ADD;
3329 break;
3330 #ifndef TARGET_SPARC64
3331 case 0x25: /* sll */
3332 if (IS_IMM) { /* immediate */
3333 simm = GET_FIELDs(insn, 20, 31);
3334 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3335 } else { /* register */
3336 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3337 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3338 }
3339 gen_movl_TN_reg(rd, cpu_dst);
3340 break;
3341 case 0x26: /* srl */
3342 if (IS_IMM) { /* immediate */
3343 simm = GET_FIELDs(insn, 20, 31);
3344 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3345 } else { /* register */
3346 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3347 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3348 }
3349 gen_movl_TN_reg(rd, cpu_dst);
3350 break;
3351 case 0x27: /* sra */
3352 if (IS_IMM) { /* immediate */
3353 simm = GET_FIELDs(insn, 20, 31);
3354 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3355 } else { /* register */
3356 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3357 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3358 }
3359 gen_movl_TN_reg(rd, cpu_dst);
3360 break;
3361 #endif
3362 case 0x30:
3363 {
3364 switch(rd) {
3365 case 0: /* wry */
3366 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3367 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3368 break;
3369 #ifndef TARGET_SPARC64
3370 case 0x01 ... 0x0f: /* undefined in the
3371 SPARCv8 manual, nop
3372 on the microSPARC
3373 II */
3374 case 0x10 ... 0x1f: /* implementation-dependent
3375 in the SPARCv8
3376 manual, nop on the
3377 microSPARC II */
3378 break;
3379 #else
3380 case 0x2: /* V9 wrccr */
3381 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3382 gen_helper_wrccr(cpu_env, cpu_dst);
3383 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3384 dc->cc_op = CC_OP_FLAGS;
3385 break;
3386 case 0x3: /* V9 wrasi */
3387 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3388 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3389 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3390 break;
3391 case 0x6: /* V9 wrfprs */
3392 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3393 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3394 save_state(dc, cpu_cond);
3395 gen_op_next_insn();
3396 tcg_gen_exit_tb(0);
3397 dc->is_br = 1;
3398 break;
3399 case 0xf: /* V9 sir, nop if user */
3400 #if !defined(CONFIG_USER_ONLY)
3401 if (supervisor(dc)) {
3402 ; // XXX
3403 }
3404 #endif
3405 break;
3406 case 0x13: /* Graphics Status */
3407 if (gen_trap_ifnofpu(dc, cpu_cond))
3408 goto jmp_insn;
3409 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3410 break;
3411 case 0x14: /* Softint set */
3412 if (!supervisor(dc))
3413 goto illegal_insn;
3414 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3415 gen_helper_set_softint(cpu_env, cpu_tmp64);
3416 break;
3417 case 0x15: /* Softint clear */
3418 if (!supervisor(dc))
3419 goto illegal_insn;
3420 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3421 gen_helper_clear_softint(cpu_env, cpu_tmp64);
3422 break;
3423 case 0x16: /* Softint write */
3424 if (!supervisor(dc))
3425 goto illegal_insn;
3426 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3427 gen_helper_write_softint(cpu_env, cpu_tmp64);
3428 break;
3429 case 0x17: /* Tick compare */
3430 #if !defined(CONFIG_USER_ONLY)
3431 if (!supervisor(dc))
3432 goto illegal_insn;
3433 #endif
3434 {
3435 TCGv_ptr r_tickptr;
3436
3437 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3438 cpu_src2);
3439 r_tickptr = tcg_temp_new_ptr();
3440 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3441 offsetof(CPUState, tick));
3442 gen_helper_tick_set_limit(r_tickptr,
3443 cpu_tick_cmpr);
3444 tcg_temp_free_ptr(r_tickptr);
3445 }
3446 break;
3447 case 0x18: /* System tick */
3448 #if !defined(CONFIG_USER_ONLY)
3449 if (!supervisor(dc))
3450 goto illegal_insn;
3451 #endif
3452 {
3453 TCGv_ptr r_tickptr;
3454
3455 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3456 cpu_src2);
3457 r_tickptr = tcg_temp_new_ptr();
3458 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3459 offsetof(CPUState, stick));
3460 gen_helper_tick_set_count(r_tickptr,
3461 cpu_dst);
3462 tcg_temp_free_ptr(r_tickptr);
3463 }
3464 break;
3465 case 0x19: /* System tick compare */
3466 #if !defined(CONFIG_USER_ONLY)
3467 if (!supervisor(dc))
3468 goto illegal_insn;
3469 #endif
3470 {
3471 TCGv_ptr r_tickptr;
3472
3473 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3474 cpu_src2);
3475 r_tickptr = tcg_temp_new_ptr();
3476 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3477 offsetof(CPUState, stick));
3478 gen_helper_tick_set_limit(r_tickptr,
3479 cpu_stick_cmpr);
3480 tcg_temp_free_ptr(r_tickptr);
3481 }
3482 break;
3483
3484 case 0x10: /* Performance Control */
3485 case 0x11: /* Performance Instrumentation
3486 Counter */
3487 case 0x12: /* Dispatch Control */
3488 #endif
3489 default:
3490 goto illegal_insn;
3491 }
3492 }
3493 break;
3494 #if !defined(CONFIG_USER_ONLY)
3495 case 0x31: /* wrpsr, V9 saved, restored */
3496 {
3497 if (!supervisor(dc))
3498 goto priv_insn;
3499 #ifdef TARGET_SPARC64
3500 switch (rd) {
3501 case 0:
3502 gen_helper_saved(cpu_env);
3503 break;
3504 case 1:
3505 gen_helper_restored(cpu_env);
3506 break;
3507 case 2: /* UA2005 allclean */
3508 case 3: /* UA2005 otherw */
3509 case 4: /* UA2005 normalw */
3510 case 5: /* UA2005 invalw */
3511 // XXX
3512 default:
3513 goto illegal_insn;
3514 }
3515 #else
3516 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3517 gen_helper_wrpsr(cpu_env, cpu_dst);
3518 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3519 dc->cc_op = CC_OP_FLAGS;
3520 save_state(dc, cpu_cond);
3521 gen_op_next_insn();
3522 tcg_gen_exit_tb(0);
3523 dc->is_br = 1;
3524 #endif
3525 }
3526 break;
3527 case 0x32: /* wrwim, V9 wrpr */
3528 {
3529 if (!supervisor(dc))
3530 goto priv_insn;
3531 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3532 #ifdef TARGET_SPARC64
3533 switch (rd) {
3534 case 0: // tpc
3535 {
3536 TCGv_ptr r_tsptr;
3537
3538 r_tsptr = tcg_temp_new_ptr();
3539 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3540 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3541 offsetof(trap_state, tpc));
3542 tcg_temp_free_ptr(r_tsptr);
3543 }
3544 break;
3545 case 1: // tnpc
3546 {
3547 TCGv_ptr r_tsptr;
3548
3549 r_tsptr = tcg_temp_new_ptr();
3550 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3551 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3552 offsetof(trap_state, tnpc));
3553 tcg_temp_free_ptr(r_tsptr);
3554 }
3555 break;
3556 case 2: // tstate
3557 {
3558 TCGv_ptr r_tsptr;
3559
3560 r_tsptr = tcg_temp_new_ptr();
3561 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3562 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3563 offsetof(trap_state,
3564 tstate));
3565 tcg_temp_free_ptr(r_tsptr);
3566 }
3567 break;
3568 case 3: // tt
3569 {
3570 TCGv_ptr r_tsptr;
3571
3572 r_tsptr = tcg_temp_new_ptr();
3573 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3574 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3575 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3576 offsetof(trap_state, tt));
3577 tcg_temp_free_ptr(r_tsptr);
3578 }
3579 break;
3580 case 4: // tick
3581 {
3582 TCGv_ptr r_tickptr;
3583
3584 r_tickptr = tcg_temp_new_ptr();
3585 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3586 offsetof(CPUState, tick));
3587 gen_helper_tick_set_count(r_tickptr,
3588 cpu_tmp0);
3589 tcg_temp_free_ptr(r_tickptr);
3590 }
3591 break;
3592 case 5: // tba
3593 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3594 break;
3595 case 6: // pstate
3596 {
3597 TCGv r_tmp = tcg_temp_local_new();
3598
3599 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3600 save_state(dc, cpu_cond);
3601 gen_helper_wrpstate(cpu_env, r_tmp);
3602 tcg_temp_free(r_tmp);
3603 dc->npc = DYNAMIC_PC;
3604 }
3605 break;
3606 case 7: // tl
3607 {
3608 TCGv r_tmp = tcg_temp_local_new();
3609
3610 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3611 save_state(dc, cpu_cond);
3612 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3613 tcg_temp_free(r_tmp);
3614 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3615 offsetof(CPUSPARCState, tl));
3616 dc->npc = DYNAMIC_PC;
3617 }
3618 break;
3619 case 8: // pil
3620 gen_helper_wrpil(cpu_env, cpu_tmp0);
3621 break;
3622 case 9: // cwp
3623 gen_helper_wrcwp(cpu_env, cpu_tmp0);
3624 break;
3625 case 10: // cansave
3626 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3627 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3628 offsetof(CPUSPARCState,
3629 cansave));
3630 break;
3631 case 11: // canrestore
3632 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3633 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3634 offsetof(CPUSPARCState,
3635 canrestore));
3636 break;
3637 case 12: // cleanwin
3638 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3639 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3640 offsetof(CPUSPARCState,
3641 cleanwin));
3642 break;
3643 case 13: // otherwin
3644 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3645 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3646 offsetof(CPUSPARCState,
3647 otherwin));
3648 break;
3649 case 14: // wstate
3650 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3651 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3652 offsetof(CPUSPARCState,
3653 wstate));
3654 break;
3655 case 16: // UA2005 gl
3656 CHECK_IU_FEATURE(dc, GL);
3657 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3658 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3659 offsetof(CPUSPARCState, gl));
3660 break;
3661 case 26: // UA2005 strand status
3662 CHECK_IU_FEATURE(dc, HYPV);
3663 if (!hypervisor(dc))
3664 goto priv_insn;
3665 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3666 break;
3667 default:
3668 goto illegal_insn;
3669 }
3670 #else
3671 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3672 if (dc->def->nwindows != 32)
3673 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3674 (1 << dc->def->nwindows) - 1);
3675 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3676 #endif
3677 }
3678 break;
3679 case 0x33: /* wrtbr, UA2005 wrhpr */
3680 {
3681 #ifndef TARGET_SPARC64
3682 if (!supervisor(dc))
3683 goto priv_insn;
3684 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3685 #else
3686 CHECK_IU_FEATURE(dc, HYPV);
3687 if (!hypervisor(dc))
3688 goto priv_insn;
3689 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3690 switch (rd) {
3691 case 0: // hpstate
3692 // XXX gen_op_wrhpstate();
3693 save_state(dc, cpu_cond);
3694 gen_op_next_insn();
3695 tcg_gen_exit_tb(0);
3696 dc->is_br = 1;
3697 break;
3698 case 1: // htstate
3699 // XXX gen_op_wrhtstate();
3700 break;
3701 case 3: // hintp
3702 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3703 break;
3704 case 5: // htba
3705 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3706 break;
3707 case 31: // hstick_cmpr
3708 {
3709 TCGv_ptr r_tickptr;
3710
3711 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3712 r_tickptr = tcg_temp_new_ptr();
3713 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3714 offsetof(CPUState, hstick));
3715 gen_helper_tick_set_limit(r_tickptr,
3716 cpu_hstick_cmpr);
3717 tcg_temp_free_ptr(r_tickptr);
3718 }
3719 break;
3720 case 6: // hver readonly
3721 default:
3722 goto illegal_insn;
3723 }
3724 #endif
3725 }
3726 break;
3727 #endif
3728 #ifdef TARGET_SPARC64
3729 case 0x2c: /* V9 movcc */
3730 {
3731 int cc = GET_FIELD_SP(insn, 11, 12);
3732 int cond = GET_FIELD_SP(insn, 14, 17);
3733 TCGv r_cond;
3734 int l1;
3735
3736 r_cond = tcg_temp_new();
3737 if (insn & (1 << 18)) {
3738 if (cc == 0)
3739 gen_cond(r_cond, 0, cond, dc);
3740 else if (cc == 2)
3741 gen_cond(r_cond, 1, cond, dc);
3742 else
3743 goto illegal_insn;
3744 } else {
3745 gen_fcond(r_cond, cc, cond);
3746 }
3747
3748 l1 = gen_new_label();
3749
3750 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3751 if (IS_IMM) { /* immediate */
3752 TCGv r_const;
3753
3754 simm = GET_FIELD_SPs(insn, 0, 10);
3755 r_const = tcg_const_tl(simm);
3756 gen_movl_TN_reg(rd, r_const);
3757 tcg_temp_free(r_const);
3758 } else {
3759 rs2 = GET_FIELD_SP(insn, 0, 4);
3760 gen_movl_reg_TN(rs2, cpu_tmp0);
3761 gen_movl_TN_reg(rd, cpu_tmp0);
3762 }
3763 gen_set_label(l1);
3764 tcg_temp_free(r_cond);
3765 break;
3766 }
3767 case 0x2d: /* V9 sdivx */
3768 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3769 gen_movl_TN_reg(rd, cpu_dst);
3770 break;
3771 case 0x2e: /* V9 popc */
3772 {
3773 cpu_src2 = get_src2(insn, cpu_src2);
3774 gen_helper_popc(cpu_dst, cpu_src2);
3775 gen_movl_TN_reg(rd, cpu_dst);
3776 }
3777 case 0x2f: /* V9 movr */
3778 {
3779 int cond = GET_FIELD_SP(insn, 10, 12);
3780 int l1;
3781
3782 cpu_src1 = get_src1(insn, cpu_src1);
3783
3784 l1 = gen_new_label();
3785
3786 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3787 cpu_src1, 0, l1);
3788 if (IS_IMM) { /* immediate */
3789 TCGv r_const;
3790
3791 simm = GET_FIELD_SPs(insn, 0, 9);
3792 r_const = tcg_const_tl(simm);
3793 gen_movl_TN_reg(rd, r_const);
3794 tcg_temp_free(r_const);
3795 } else {
3796 rs2 = GET_FIELD_SP(insn, 0, 4);
3797 gen_movl_reg_TN(rs2, cpu_tmp0);
3798 gen_movl_TN_reg(rd, cpu_tmp0);
3799 }
3800 gen_set_label(l1);
3801 break;
3802 }
3803 #endif
3804 default:
3805 goto illegal_insn;
3806 }
3807 }
3808 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3809 #ifdef TARGET_SPARC64
3810 int opf = GET_FIELD_SP(insn, 5, 13);
3811 rs1 = GET_FIELD(insn, 13, 17);
3812 rs2 = GET_FIELD(insn, 27, 31);
3813 if (gen_trap_ifnofpu(dc, cpu_cond))
3814 goto jmp_insn;
3815
3816 switch (opf) {
3817 case 0x000: /* VIS I edge8cc */
3818 case 0x001: /* VIS II edge8n */
3819 case 0x002: /* VIS I edge8lcc */
3820 case 0x003: /* VIS II edge8ln */
3821 case 0x004: /* VIS I edge16cc */
3822 case 0x005: /* VIS II edge16n */
3823 case 0x006: /* VIS I edge16lcc */
3824 case 0x007: /* VIS II edge16ln */
3825 case 0x008: /* VIS I edge32cc */
3826 case 0x009: /* VIS II edge32n */
3827 case 0x00a: /* VIS I edge32lcc */
3828 case 0x00b: /* VIS II edge32ln */
3829 // XXX
3830 goto illegal_insn;
3831 case 0x010: /* VIS I array8 */
3832 CHECK_FPU_FEATURE(dc, VIS1);
3833 cpu_src1 = get_src1(insn, cpu_src1);
3834 gen_movl_reg_TN(rs2, cpu_src2);
3835 gen_helper_array8(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3836 gen_movl_TN_reg(rd, cpu_dst);
3837 break;
3838 case 0x012: /* VIS I array16 */
3839 CHECK_FPU_FEATURE(dc, VIS1);
3840 cpu_src1 = get_src1(insn, cpu_src1);
3841 gen_movl_reg_TN(rs2, cpu_src2);
3842 gen_helper_array8(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3843 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3844 gen_movl_TN_reg(rd, cpu_dst);
3845 break;
3846 case 0x014: /* VIS I array32 */
3847 CHECK_FPU_FEATURE(dc, VIS1);
3848 cpu_src1 = get_src1(insn, cpu_src1);
3849 gen_movl_reg_TN(rs2, cpu_src2);
3850 gen_helper_array8(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3851 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3852 gen_movl_TN_reg(rd, cpu_dst);
3853 break;
3854 case 0x018: /* VIS I alignaddr */
3855 CHECK_FPU_FEATURE(dc, VIS1);
3856 cpu_src1 = get_src1(insn, cpu_src1);
3857 gen_movl_reg_TN(rs2, cpu_src2);
3858 gen_helper_alignaddr(cpu_dst, cpu_env, cpu_src1, cpu_src2);
3859 gen_movl_TN_reg(rd, cpu_dst);
3860 break;
3861 case 0x019: /* VIS II bmask */
3862 case 0x01a: /* VIS I alignaddrl */
3863 // XXX
3864 goto illegal_insn;
3865 case 0x020: /* VIS I fcmple16 */
3866 CHECK_FPU_FEATURE(dc, VIS1);
3867 gen_op_load_fpr_DT0(DFPREG(rs1));
3868 gen_op_load_fpr_DT1(DFPREG(rs2));
3869 gen_helper_fcmple16(cpu_dst, cpu_env);
3870 gen_movl_TN_reg(rd, cpu_dst);
3871 break;
3872 case 0x022: /* VIS I fcmpne16 */
3873 CHECK_FPU_FEATURE(dc, VIS1);
3874 gen_op_load_fpr_DT0(DFPREG(rs1));
3875 gen_op_load_fpr_DT1(DFPREG(rs2));
3876 gen_helper_fcmpne16(cpu_dst, cpu_env);
3877 gen_movl_TN_reg(rd, cpu_dst);
3878 break;
3879 case 0x024: /* VIS I fcmple32 */
3880 CHECK_FPU_FEATURE(dc, VIS1);
3881 gen_op_load_fpr_DT0(DFPREG(rs1));
3882 gen_op_load_fpr_DT1(DFPREG(rs2));
3883 gen_helper_fcmple32(cpu_dst, cpu_env);
3884 gen_movl_TN_reg(rd, cpu_dst);
3885 break;
3886 case 0x026: /* VIS I fcmpne32 */
3887 CHECK_FPU_FEATURE(dc, VIS1);
3888 gen_op_load_fpr_DT0(DFPREG(rs1));
3889 gen_op_load_fpr_DT1(DFPREG(rs2));
3890 gen_helper_fcmpne32(cpu_dst, cpu_env);
3891 gen_movl_TN_reg(rd, cpu_dst);
3892 break;
3893 case 0x028: /* VIS I fcmpgt16 */
3894 CHECK_FPU_FEATURE(dc, VIS1);
3895 gen_op_load_fpr_DT0(DFPREG(rs1));
3896 gen_op_load_fpr_DT1(DFPREG(rs2));
3897 gen_helper_fcmpgt16(cpu_dst, cpu_env);
3898 gen_movl_TN_reg(rd, cpu_dst);
3899 break;
3900 case 0x02a: /* VIS I fcmpeq16 */
3901 CHECK_FPU_FEATURE(dc, VIS1);
3902 gen_op_load_fpr_DT0(DFPREG(rs1));
3903 gen_op_load_fpr_DT1(DFPREG(rs2));
3904 gen_helper_fcmpeq16(cpu_dst, cpu_env);
3905 gen_movl_TN_reg(rd, cpu_dst);
3906 break;
3907 case 0x02c: /* VIS I fcmpgt32 */
3908 CHECK_FPU_FEATURE(dc, VIS1);
3909 gen_op_load_fpr_DT0(DFPREG(rs1));
3910 gen_op_load_fpr_DT1(DFPREG(rs2));
3911 gen_helper_fcmpgt32(cpu_dst, cpu_env);
3912 gen_movl_TN_reg(rd, cpu_dst);
3913 break;
3914 case 0x02e: /* VIS I fcmpeq32 */
3915 CHECK_FPU_FEATURE(dc, VIS1);
3916 gen_op_load_fpr_DT0(DFPREG(rs1));
3917 gen_op_load_fpr_DT1(DFPREG(rs2));
3918 gen_helper_fcmpeq32(cpu_dst, cpu_env);
3919 gen_movl_TN_reg(rd, cpu_dst);
3920 break;
3921 case 0x031: /* VIS I fmul8x16 */
3922 CHECK_FPU_FEATURE(dc, VIS1);
3923 gen_op_load_fpr_DT0(DFPREG(rs1));
3924 gen_op_load_fpr_DT1(DFPREG(rs2));
3925 gen_helper_fmul8x16(cpu_env);
3926 gen_op_store_DT0_fpr(DFPREG(rd));
3927 gen_update_fprs_dirty(DFPREG(rd));
3928 break;
3929 case 0x033: /* VIS I fmul8x16au */
3930 CHECK_FPU_FEATURE(dc, VIS1);
3931 gen_op_load_fpr_DT0(DFPREG(rs1));
3932 gen_op_load_fpr_DT1(DFPREG(rs2));
3933 gen_helper_fmul8x16au(cpu_env);
3934 gen_op_store_DT0_fpr(DFPREG(rd));
3935 gen_update_fprs_dirty(DFPREG(rd));
3936 break;
3937 case 0x035: /* VIS I fmul8x16al */
3938 CHECK_FPU_FEATURE(dc, VIS1);
3939 gen_op_load_fpr_DT0(DFPREG(rs1));
3940 gen_op_load_fpr_DT1(DFPREG(rs2));
3941 gen_helper_fmul8x16al(cpu_env);
3942 gen_op_store_DT0_fpr(DFPREG(rd));
3943 gen_update_fprs_dirty(DFPREG(rd));
3944 break;
3945 case 0x036: /* VIS I fmul8sux16 */
3946 CHECK_FPU_FEATURE(dc, VIS1);
3947 gen_op_load_fpr_DT0(DFPREG(rs1));
3948 gen_op_load_fpr_DT1(DFPREG(rs2));
3949 gen_helper_fmul8sux16(cpu_env);
3950 gen_op_store_DT0_fpr(DFPREG(rd));
3951 gen_update_fprs_dirty(DFPREG(rd));
3952 break;
3953 case 0x037: /* VIS I fmul8ulx16 */
3954 CHECK_FPU_FEATURE(dc, VIS1);
3955 gen_op_load_fpr_DT0(DFPREG(rs1));
3956 gen_op_load_fpr_DT1(DFPREG(rs2));
3957 gen_helper_fmul8ulx16(cpu_env);
3958 gen_op_store_DT0_fpr(DFPREG(rd));
3959 gen_update_fprs_dirty(DFPREG(rd));
3960 break;
3961 case 0x038: /* VIS I fmuld8sux16 */
3962 CHECK_FPU_FEATURE(dc, VIS1);
3963 gen_op_load_fpr_DT0(DFPREG(rs1));
3964 gen_op_load_fpr_DT1(DFPREG(rs2));
3965 gen_helper_fmuld8sux16(cpu_env);
3966 gen_op_store_DT0_fpr(DFPREG(rd));
3967 gen_update_fprs_dirty(DFPREG(rd));
3968 break;
3969 case 0x039: /* VIS I fmuld8ulx16 */
3970 CHECK_FPU_FEATURE(dc, VIS1);
3971 gen_op_load_fpr_DT0(DFPREG(rs1));
3972 gen_op_load_fpr_DT1(DFPREG(rs2));
3973 gen_helper_fmuld8ulx16(cpu_env);
3974 gen_op_store_DT0_fpr(DFPREG(rd));
3975 gen_update_fprs_dirty(DFPREG(rd));
3976 break;
3977 case 0x03a: /* VIS I fpack32 */
3978 case 0x03b: /* VIS I fpack16 */
3979 case 0x03d: /* VIS I fpackfix */
3980 case 0x03e: /* VIS I pdist */
3981 // XXX
3982 goto illegal_insn;
3983 case 0x048: /* VIS I faligndata */
3984 CHECK_FPU_FEATURE(dc, VIS1);
3985 gen_op_load_fpr_DT0(DFPREG(rs1));
3986 gen_op_load_fpr_DT1(DFPREG(rs2));
3987 gen_helper_faligndata(cpu_env);
3988 gen_op_store_DT0_fpr(DFPREG(rd));
3989 gen_update_fprs_dirty(DFPREG(rd));
3990 break;
3991 case 0x04b: /* VIS I fpmerge */
3992 CHECK_FPU_FEATURE(dc, VIS1);
3993 gen_op_load_fpr_DT0(DFPREG(rs1));
3994 gen_op_load_fpr_DT1(DFPREG(rs2));
3995 gen_helper_fpmerge(cpu_env);
3996 gen_op_store_DT0_fpr(DFPREG(rd));
3997 gen_update_fprs_dirty(DFPREG(rd));
3998 break;
3999 case 0x04c: /* VIS II bshuffle */
4000 // XXX
4001 goto illegal_insn;
4002 case 0x04d: /* VIS I fexpand */
4003 CHECK_FPU_FEATURE(dc, VIS1);
4004 gen_op_load_fpr_DT0(DFPREG(rs1));
4005 gen_op_load_fpr_DT1(DFPREG(rs2));
4006 gen_helper_fexpand(cpu_env);
4007 gen_op_store_DT0_fpr(DFPREG(rd));
4008 gen_update_fprs_dirty(DFPREG(rd));
4009 break;
4010 case 0x050: /* VIS I fpadd16 */
4011 CHECK_FPU_FEATURE(dc, VIS1);
4012 gen_op_load_fpr_DT0(DFPREG(rs1));
4013 gen_op_load_fpr_DT1(DFPREG(rs2));
4014 gen_helper_fpadd16(cpu_env);
4015 gen_op_store_DT0_fpr(DFPREG(rd));
4016 gen_update_fprs_dirty(DFPREG(rd));
4017 break;
4018 case 0x051: /* VIS I fpadd16s */
4019 CHECK_FPU_FEATURE(dc, VIS1);
4020 gen_helper_fpadd16s(cpu_fpr[rd], cpu_env,
4021 cpu_fpr[rs1], cpu_fpr[rs2]);
4022 gen_update_fprs_dirty(rd);
4023 break;
4024 case 0x052: /* VIS I fpadd32 */
4025 CHECK_FPU_FEATURE(dc, VIS1);
4026 gen_op_load_fpr_DT0(DFPREG(rs1));
4027 gen_op_load_fpr_DT1(DFPREG(rs2));
4028 gen_helper_fpadd32(cpu_env);
4029 gen_op_store_DT0_fpr(DFPREG(rd));
4030 gen_update_fprs_dirty(DFPREG(rd));
4031 break;
4032 case 0x053: /* VIS I fpadd32s */
4033 CHECK_FPU_FEATURE(dc, VIS1);
4034 gen_helper_fpadd32s(cpu_fpr[rd], cpu_env,
4035 cpu_fpr[rs1], cpu_fpr[rs2]);
4036 gen_update_fprs_dirty(rd);
4037 break;
4038 case 0x054: /* VIS I fpsub16 */
4039 CHECK_FPU_FEATURE(dc, VIS1);
4040 gen_op_load_fpr_DT0(DFPREG(rs1));
4041 gen_op_load_fpr_DT1(DFPREG(rs2));
4042 gen_helper_fpsub16(cpu_env);
4043 gen_op_store_DT0_fpr(DFPREG(rd));
4044 gen_update_fprs_dirty(DFPREG(rd));
4045 break;
4046 case 0x055: /* VIS I fpsub16s */
4047 CHECK_FPU_FEATURE(dc, VIS1);
4048 gen_helper_fpsub16s(cpu_fpr[rd], cpu_env,
4049 cpu_fpr[rs1], cpu_fpr[rs2]);
4050 gen_update_fprs_dirty(rd);
4051 break;
4052 case 0x056: /* VIS I fpsub32 */
4053 CHECK_FPU_FEATURE(dc, VIS1);
4054 gen_op_load_fpr_DT0(DFPREG(rs1));
4055 gen_op_load_fpr_DT1(DFPREG(rs2));
4056 gen_helper_fpsub32(cpu_env);
4057 gen_op_store_DT0_fpr(DFPREG(rd));
4058 gen_update_fprs_dirty(DFPREG(rd));
4059 break;
4060 case 0x057: /* VIS I fpsub32s */
4061 CHECK_FPU_FEATURE(dc, VIS1);
4062 gen_helper_fpsub32s(cpu_fpr[rd], cpu_env,
4063 cpu_fpr[rs1], cpu_fpr[rs2]);
4064 gen_update_fprs_dirty(rd);
4065 break;
4066 case 0x060: /* VIS I fzero */
4067 CHECK_FPU_FEATURE(dc, VIS1);
4068 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4069 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4070 gen_update_fprs_dirty(DFPREG(rd));
4071 break;
4072 case 0x061: /* VIS I fzeros */
4073 CHECK_FPU_FEATURE(dc, VIS1);
4074 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4075 gen_update_fprs_dirty(rd);
4076 break;
4077 case 0x062: /* VIS I fnor */
4078 CHECK_FPU_FEATURE(dc, VIS1);
4079 tcg_gen_nor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4080 cpu_fpr[DFPREG(rs2)]);
4081 tcg_gen_nor_i32(cpu_fpr[DFPREG(rd) + 1],
4082 cpu_fpr[DFPREG(rs1) + 1],
4083 cpu_fpr[DFPREG(rs2) + 1]);
4084 gen_update_fprs_dirty(DFPREG(rd));
4085 break;
4086 case 0x063: /* VIS I fnors */
4087 CHECK_FPU_FEATURE(dc, VIS1);
4088 tcg_gen_nor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4089 gen_update_fprs_dirty(rd);
4090 break;
4091 case 0x064: /* VIS I fandnot2 */
4092 CHECK_FPU_FEATURE(dc, VIS1);
4093 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4094 cpu_fpr[DFPREG(rs2)]);
4095 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4096 cpu_fpr[DFPREG(rs1) + 1],
4097 cpu_fpr[DFPREG(rs2) + 1]);
4098 gen_update_fprs_dirty(DFPREG(rd));
4099 break;
4100 case 0x065: /* VIS I fandnot2s */
4101 CHECK_FPU_FEATURE(dc, VIS1);
4102 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4103 gen_update_fprs_dirty(rd);
4104 break;
4105 case 0x066: /* VIS I fnot2 */
4106 CHECK_FPU_FEATURE(dc, VIS1);
4107 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4108 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4109 cpu_fpr[DFPREG(rs2) + 1]);
4110 gen_update_fprs_dirty(DFPREG(rd));
4111 break;
4112 case 0x067: /* VIS I fnot2s */
4113 CHECK_FPU_FEATURE(dc, VIS1);
4114 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4115 gen_update_fprs_dirty(rd);
4116 break;
4117 case 0x068: /* VIS I fandnot1 */
4118 CHECK_FPU_FEATURE(dc, VIS1);
4119 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4120 cpu_fpr[DFPREG(rs1)]);
4121 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4122 cpu_fpr[DFPREG(rs2) + 1],
4123 cpu_fpr[DFPREG(rs1) + 1]);
4124 gen_update_fprs_dirty(DFPREG(rd));
4125 break;
4126 case 0x069: /* VIS I fandnot1s */
4127 CHECK_FPU_FEATURE(dc, VIS1);
4128 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4129 gen_update_fprs_dirty(rd);
4130 break;
4131 case 0x06a: /* VIS I fnot1 */
4132 CHECK_FPU_FEATURE(dc, VIS1);
4133 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4134 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4135 cpu_fpr[DFPREG(rs1) + 1]);
4136 gen_update_fprs_dirty(DFPREG(rd));
4137 break;
4138 case 0x06b: /* VIS I fnot1s */
4139 CHECK_FPU_FEATURE(dc, VIS1);
4140 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4141 gen_update_fprs_dirty(rd);
4142 break;
4143 case 0x06c: /* VIS I fxor */
4144 CHECK_FPU_FEATURE(dc, VIS1);
4145 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4146 cpu_fpr[DFPREG(rs2)]);
4147 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4148 cpu_fpr[DFPREG(rs1) + 1],
4149 cpu_fpr[DFPREG(rs2) + 1]);
4150 gen_update_fprs_dirty(DFPREG(rd));
4151 break;
4152 case 0x06d: /* VIS I fxors */
4153 CHECK_FPU_FEATURE(dc, VIS1);
4154 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4155 gen_update_fprs_dirty(rd);
4156 break;
4157 case 0x06e: /* VIS I fnand */
4158 CHECK_FPU_FEATURE(dc, VIS1);
4159 tcg_gen_nand_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4160 cpu_fpr[DFPREG(rs2)]);
4161 tcg_gen_nand_i32(cpu_fpr[DFPREG(rd) + 1],
4162 cpu_fpr[DFPREG(rs1) + 1],
4163 cpu_fpr[DFPREG(rs2) + 1]);
4164 gen_update_fprs_dirty(DFPREG(rd));
4165 break;
4166 case 0x06f: /* VIS I fnands */
4167 CHECK_FPU_FEATURE(dc, VIS1);
4168 tcg_gen_nand_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4169 gen_update_fprs_dirty(rd);
4170 break;
4171 case 0x070: /* VIS I fand */
4172 CHECK_FPU_FEATURE(dc, VIS1);
4173 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4174 cpu_fpr[DFPREG(rs2)]);
4175 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4176 cpu_fpr[DFPREG(rs1) + 1],
4177 cpu_fpr[DFPREG(rs2) + 1]);
4178 gen_update_fprs_dirty(DFPREG(rd));
4179 break;
4180 case 0x071: /* VIS I fands */
4181 CHECK_FPU_FEATURE(dc, VIS1);
4182 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4183 gen_update_fprs_dirty(rd);
4184 break;
4185 case 0x072: /* VIS I fxnor */
4186 CHECK_FPU_FEATURE(dc, VIS1);
4187 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4188 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4189 cpu_fpr[DFPREG(rs1)]);
4190 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4191 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4192 cpu_fpr[DFPREG(rs1) + 1]);
4193 gen_update_fprs_dirty(DFPREG(rd));
4194 break;
4195 case 0x073: /* VIS I fxnors */
4196 CHECK_FPU_FEATURE(dc, VIS1);
4197 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4198 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4199 gen_update_fprs_dirty(rd);
4200 break;
4201 case 0x074: /* VIS I fsrc1 */
4202 CHECK_FPU_FEATURE(dc, VIS1);
4203 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4204 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4205 cpu_fpr[DFPREG(rs1) + 1]);
4206 gen_update_fprs_dirty(DFPREG(rd));
4207 break;
4208 case 0x075: /* VIS I fsrc1s */
4209 CHECK_FPU_FEATURE(dc, VIS1);
4210 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4211 gen_update_fprs_dirty(rd);
4212 break;
4213 case 0x076: /* VIS I fornot2 */
4214 CHECK_FPU_FEATURE(dc, VIS1);
4215 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4216 cpu_fpr[DFPREG(rs2)]);
4217 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4218 cpu_fpr[DFPREG(rs1) + 1],
4219 cpu_fpr[DFPREG(rs2) + 1]);
4220 gen_update_fprs_dirty(DFPREG(rd));
4221 break;
4222 case 0x077: /* VIS I fornot2s */
4223 CHECK_FPU_FEATURE(dc, VIS1);
4224 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4225 gen_update_fprs_dirty(rd);
4226 break;
4227 case 0x078: /* VIS I fsrc2 */
4228 CHECK_FPU_FEATURE(dc, VIS1);
4229 gen_op_load_fpr_DT0(DFPREG(rs2));
4230 gen_op_store_DT0_fpr(DFPREG(rd));
4231 gen_update_fprs_dirty(DFPREG(rd));
4232 break;
4233 case 0x079: /* VIS I fsrc2s */
4234 CHECK_FPU_FEATURE(dc, VIS1);
4235 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4236 gen_update_fprs_dirty(rd);
4237 break;
4238 case 0x07a: /* VIS I fornot1 */
4239 CHECK_FPU_FEATURE(dc, VIS1);
4240 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4241 cpu_fpr[DFPREG(rs1)]);
4242 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4243 cpu_fpr[DFPREG(rs2) + 1],
4244 cpu_fpr[DFPREG(rs1) + 1]);
4245 gen_update_fprs_dirty(DFPREG(rd));
4246 break;
4247 case 0x07b: /* VIS I fornot1s */
4248 CHECK_FPU_FEATURE(dc, VIS1);
4249 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4250 gen_update_fprs_dirty(rd);
4251 break;
4252 case 0x07c: /* VIS I for */
4253 CHECK_FPU_FEATURE(dc, VIS1);
4254 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4255 cpu_fpr[DFPREG(rs2)]);
4256 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4257 cpu_fpr[DFPREG(rs1) + 1],
4258 cpu_fpr[DFPREG(rs2) + 1]);
4259 gen_update_fprs_dirty(DFPREG(rd));
4260 break;
4261 case 0x07d: /* VIS I fors */
4262 CHECK_FPU_FEATURE(dc, VIS1);
4263 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4264 gen_update_fprs_dirty(rd);
4265 break;
4266 case 0x07e: /* VIS I fone */
4267 CHECK_FPU_FEATURE(dc, VIS1);
4268 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4269 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4270 gen_update_fprs_dirty(DFPREG(rd));
4271 break;
4272 case 0x07f: /* VIS I fones */
4273 CHECK_FPU_FEATURE(dc, VIS1);
4274 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4275 gen_update_fprs_dirty(rd);
4276 break;
4277 case 0x080: /* VIS I shutdown */
4278 case 0x081: /* VIS II siam */
4279 // XXX
4280 goto illegal_insn;
4281 default:
4282 goto illegal_insn;
4283 }
4284 #else
4285 goto ncp_insn;
4286 #endif
4287 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4288 #ifdef TARGET_SPARC64
4289 goto illegal_insn;
4290 #else
4291 goto ncp_insn;
4292 #endif
4293 #ifdef TARGET_SPARC64
4294 } else if (xop == 0x39) { /* V9 return */
4295 TCGv_i32 r_const;
4296
4297 save_state(dc, cpu_cond);
4298 cpu_src1 = get_src1(insn, cpu_src1);
4299 if (IS_IMM) { /* immediate */
4300 simm = GET_FIELDs(insn, 19, 31);
4301 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4302 } else { /* register */
4303 rs2 = GET_FIELD(insn, 27, 31);
4304 if (rs2) {
4305 gen_movl_reg_TN(rs2, cpu_src2);
4306 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4307 } else
4308 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4309 }
4310 gen_helper_restore(cpu_env);
4311 gen_mov_pc_npc(dc, cpu_cond);
4312 r_const = tcg_const_i32(3);
4313 gen_helper_check_align(cpu_dst, r_const);
4314 tcg_temp_free_i32(r_const);
4315 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4316 dc->npc = DYNAMIC_PC;
4317 goto jmp_insn;
4318 #endif
4319 } else {
4320 cpu_src1 = get_src1(insn, cpu_src1);
4321 if (IS_IMM) { /* immediate */
4322 simm = GET_FIELDs(insn, 19, 31);
4323 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4324 } else { /* register */
4325 rs2 = GET_FIELD(insn, 27, 31);
4326 if (rs2) {
4327 gen_movl_reg_TN(rs2, cpu_src2);
4328 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4329 } else
4330 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4331 }
4332 switch (xop) {
4333 case 0x38: /* jmpl */
4334 {
4335 TCGv r_pc;
4336 TCGv_i32 r_const;
4337
4338 r_pc = tcg_const_tl(dc->pc);
4339 gen_movl_TN_reg(rd, r_pc);
4340 tcg_temp_free(r_pc);
4341 gen_mov_pc_npc(dc, cpu_cond);
4342 r_const = tcg_const_i32(3);
4343 gen_helper_check_align(cpu_dst, r_const);
4344 tcg_temp_free_i32(r_const);
4345 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4346 dc->npc = DYNAMIC_PC;
4347 }
4348 goto jmp_insn;
4349 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4350 case 0x39: /* rett, V9 return */
4351 {
4352 TCGv_i32 r_const;
4353
4354 if (!supervisor(dc))
4355 goto priv_insn;
4356 gen_mov_pc_npc(dc, cpu_cond);
4357 r_const = tcg_const_i32(3);
4358 gen_helper_check_align(cpu_dst, r_const);
4359 tcg_temp_free_i32(r_const);
4360 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4361 dc->npc = DYNAMIC_PC;
4362 gen_helper_rett(cpu_env);
4363 }
4364 goto jmp_insn;
4365 #endif
4366 case 0x3b: /* flush */
4367 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4368 goto unimp_flush;
4369 /* nop */
4370 break;
4371 case 0x3c: /* save */
4372 save_state(dc, cpu_cond);
4373 gen_helper_save(cpu_env);
4374 gen_movl_TN_reg(rd, cpu_dst);
4375 break;
4376 case 0x3d: /* restore */
4377 save_state(dc, cpu_cond);
4378 gen_helper_restore(cpu_env);
4379 gen_movl_TN_reg(rd, cpu_dst);
4380 break;
4381 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4382 case 0x3e: /* V9 done/retry */
4383 {
4384 switch (rd) {
4385 case 0:
4386 if (!supervisor(dc))
4387 goto priv_insn;
4388 dc->npc = DYNAMIC_PC;
4389 dc->pc = DYNAMIC_PC;
4390 gen_helper_done(cpu_env);
4391 goto jmp_insn;
4392 case 1:
4393 if (!supervisor(dc))
4394 goto priv_insn;
4395 dc->npc = DYNAMIC_PC;
4396 dc->pc = DYNAMIC_PC;
4397 gen_helper_retry(cpu_env);
4398 goto jmp_insn;
4399 default:
4400 goto illegal_insn;
4401 }
4402 }
4403 break;
4404 #endif
4405 default:
4406 goto illegal_insn;
4407 }
4408 }
4409 break;
4410 }
4411 break;
4412 case 3: /* load/store instructions */
4413 {
4414 unsigned int xop = GET_FIELD(insn, 7, 12);
4415
4416 /* flush pending conditional evaluations before exposing
4417 cpu state */
4418 if (dc->cc_op != CC_OP_FLAGS) {
4419 dc->cc_op = CC_OP_FLAGS;
4420 gen_helper_compute_psr(cpu_env);
4421 }
4422 cpu_src1 = get_src1(insn, cpu_src1);
4423 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4424 rs2 = GET_FIELD(insn, 27, 31);
4425 gen_movl_reg_TN(rs2, cpu_src2);
4426 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4427 } else if (IS_IMM) { /* immediate */
4428 simm = GET_FIELDs(insn, 19, 31);
4429 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4430 } else { /* register */
4431 rs2 = GET_FIELD(insn, 27, 31);
4432 if (rs2 != 0) {
4433 gen_movl_reg_TN(rs2, cpu_src2);
4434 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4435 } else
4436 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4437 }
4438 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4439 (xop > 0x17 && xop <= 0x1d ) ||
4440 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4441 switch (xop) {
4442 case 0x0: /* ld, V9 lduw, load unsigned word */
4443 gen_address_mask(dc, cpu_addr);
4444 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4445 break;
4446 case 0x1: /* ldub, load unsigned byte */
4447 gen_address_mask(dc, cpu_addr);
4448 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4449 break;
4450 case 0x2: /* lduh, load unsigned halfword */
4451 gen_address_mask(dc, cpu_addr);
4452 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4453 break;
4454 case 0x3: /* ldd, load double word */
4455 if (rd & 1)
4456 goto illegal_insn;
4457 else {
4458 TCGv_i32 r_const;
4459
4460 save_state(dc, cpu_cond);
4461 r_const = tcg_const_i32(7);
4462 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4463 tcg_temp_free_i32(r_const);
4464 gen_address_mask(dc, cpu_addr);
4465 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4466 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4467 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4468 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4469 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4470 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4471 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4472 }
4473 break;
4474 case 0x9: /* ldsb, load signed byte */
4475 gen_address_mask(dc, cpu_addr);
4476 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4477 break;
4478 case 0xa: /* ldsh, load signed halfword */
4479 gen_address_mask(dc, cpu_addr);
4480 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4481 break;
4482 case 0xd: /* ldstub -- XXX: should be atomically */
4483 {
4484 TCGv r_const;
4485
4486 gen_address_mask(dc, cpu_addr);
4487 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4488 r_const = tcg_const_tl(0xff);
4489 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4490 tcg_temp_free(r_const);
4491 }
4492 break;
4493 case 0x0f: /* swap, swap register with memory. Also
4494 atomically */
4495 CHECK_IU_FEATURE(dc, SWAP);
4496 gen_movl_reg_TN(rd, cpu_val);
4497 gen_address_mask(dc, cpu_addr);
4498 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4499 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4500 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4501 break;
4502 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4503 case 0x10: /* lda, V9 lduwa, load word alternate */
4504 #ifndef TARGET_SPARC64
4505 if (IS_IMM)
4506 goto illegal_insn;
4507 if (!supervisor(dc))
4508 goto priv_insn;
4509 #endif
4510 save_state(dc, cpu_cond);
4511 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4512 break;
4513 case 0x11: /* lduba, load unsigned byte alternate */
4514 #ifndef TARGET_SPARC64
4515 if (IS_IMM)
4516 goto illegal_insn;
4517 if (!supervisor(dc))
4518 goto priv_insn;
4519 #endif
4520 save_state(dc, cpu_cond);
4521 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4522 break;
4523 case 0x12: /* lduha, load unsigned halfword alternate */
4524 #ifndef TARGET_SPARC64
4525 if (IS_IMM)
4526 goto illegal_insn;
4527 if (!supervisor(dc))
4528 goto priv_insn;
4529 #endif
4530 save_state(dc, cpu_cond);
4531 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4532 break;
4533 case 0x13: /* ldda, load double word alternate */
4534 #ifndef TARGET_SPARC64
4535 if (IS_IMM)
4536 goto illegal_insn;
4537 if (!supervisor(dc))
4538 goto priv_insn;
4539 #endif
4540 if (rd & 1)
4541 goto illegal_insn;
4542 save_state(dc, cpu_cond);
4543 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4544 goto skip_move;
4545 case 0x19: /* ldsba, load signed byte alternate */
4546 #ifndef TARGET_SPARC64
4547 if (IS_IMM)
4548 goto illegal_insn;
4549 if (!supervisor(dc))
4550 goto priv_insn;
4551 #endif
4552 save_state(dc, cpu_cond);
4553 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4554 break;
4555 case 0x1a: /* ldsha, load signed halfword alternate */
4556 #ifndef TARGET_SPARC64
4557 if (IS_IMM)
4558 goto illegal_insn;
4559 if (!supervisor(dc))
4560 goto priv_insn;
4561 #endif
4562 save_state(dc, cpu_cond);
4563 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4564 break;
4565 case 0x1d: /* ldstuba -- XXX: should be atomically */
4566 #ifndef TARGET_SPARC64
4567 if (IS_IMM)
4568 goto illegal_insn;
4569 if (!supervisor(dc))
4570 goto priv_insn;
4571 #endif
4572 save_state(dc, cpu_cond);
4573 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4574 break;
4575 case 0x1f: /* swapa, swap reg with alt. memory. Also
4576 atomically */
4577 CHECK_IU_FEATURE(dc, SWAP);
4578 #ifndef TARGET_SPARC64
4579 if (IS_IMM)
4580 goto illegal_insn;
4581 if (!supervisor(dc))
4582 goto priv_insn;
4583 #endif
4584 save_state(dc, cpu_cond);
4585 gen_movl_reg_TN(rd, cpu_val);
4586 gen_swap_asi(cpu_val, cpu_addr, insn);
4587 break;
4588
4589 #ifndef TARGET_SPARC64
4590 case 0x30: /* ldc */
4591 case 0x31: /* ldcsr */
4592 case 0x33: /* lddc */
4593 goto ncp_insn;
4594 #endif
4595 #endif
4596 #ifdef TARGET_SPARC64
4597 case 0x08: /* V9 ldsw */
4598 gen_address_mask(dc, cpu_addr);
4599 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4600 break;
4601 case 0x0b: /* V9 ldx */
4602 gen_address_mask(dc, cpu_addr);
4603 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4604 break;
4605 case 0x18: /* V9 ldswa */
4606 save_state(dc, cpu_cond);
4607 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4608 break;
4609 case 0x1b: /* V9 ldxa */
4610 save_state(dc, cpu_cond);
4611 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4612 break;
4613 case 0x2d: /* V9 prefetch, no effect */
4614 goto skip_move;
4615 case 0x30: /* V9 ldfa */
4616 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4617 goto jmp_insn;
4618 }
4619 save_state(dc, cpu_cond);
4620 gen_ldf_asi(cpu_addr, insn, 4, rd);
4621 gen_update_fprs_dirty(rd);
4622 goto skip_move;
4623 case 0x33: /* V9 lddfa */
4624 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4625 goto jmp_insn;
4626 }
4627 save_state(dc, cpu_cond);
4628 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4629 gen_update_fprs_dirty(DFPREG(rd));
4630 goto skip_move;
4631 case 0x3d: /* V9 prefetcha, no effect */
4632 goto skip_move;
4633 case 0x32: /* V9 ldqfa */
4634 CHECK_FPU_FEATURE(dc, FLOAT128);
4635 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4636 goto jmp_insn;
4637 }
4638 save_state(dc, cpu_cond);
4639 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4640 gen_update_fprs_dirty(QFPREG(rd));
4641 goto skip_move;
4642 #endif
4643 default:
4644 goto illegal_insn;
4645 }
4646 gen_movl_TN_reg(rd, cpu_val);
4647 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4648 skip_move: ;
4649 #endif
4650 } else if (xop >= 0x20 && xop < 0x24) {
4651 if (gen_trap_ifnofpu(dc, cpu_cond))
4652 goto jmp_insn;
4653 save_state(dc, cpu_cond);
4654 switch (xop) {
4655 case 0x20: /* ldf, load fpreg */
4656 gen_address_mask(dc, cpu_addr);
4657 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4658 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4659 gen_update_fprs_dirty(rd);
4660 break;
4661 case 0x21: /* ldfsr, V9 ldxfsr */
4662 #ifdef TARGET_SPARC64
4663 gen_address_mask(dc, cpu_addr);
4664 if (rd == 1) {
4665 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4666 gen_helper_ldxfsr(cpu_env, cpu_tmp64);
4667 } else {
4668 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4669 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4670 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4671 }
4672 #else
4673 {
4674 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4675 gen_helper_ldfsr(cpu_env, cpu_tmp32);
4676 }
4677 #endif
4678 break;
4679 case 0x22: /* ldqf, load quad fpreg */
4680 {
4681 TCGv_i32 r_const;
4682
4683 CHECK_FPU_FEATURE(dc, FLOAT128);
4684 r_const = tcg_const_i32(dc->mem_idx);
4685 gen_address_mask(dc, cpu_addr);
4686 gen_helper_ldqf(cpu_addr, r_const);
4687 tcg_temp_free_i32(r_const);
4688 gen_op_store_QT0_fpr(QFPREG(rd));
4689 gen_update_fprs_dirty(QFPREG(rd));
4690 }
4691 break;
4692 case 0x23: /* lddf, load double fpreg */
4693 {
4694 TCGv_i32 r_const;
4695
4696 r_const = tcg_const_i32(dc->mem_idx);
4697 gen_address_mask(dc, cpu_addr);
4698 gen_helper_lddf(cpu_addr, r_const);
4699 tcg_temp_free_i32(r_const);
4700 gen_op_store_DT0_fpr(DFPREG(rd));
4701 gen_update_fprs_dirty(DFPREG(rd));
4702 }
4703 break;
4704 default:
4705 goto illegal_insn;
4706 }
4707 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4708 xop == 0xe || xop == 0x1e) {
4709 gen_movl_reg_TN(rd, cpu_val);
4710 switch (xop) {
4711 case 0x4: /* st, store word */
4712 gen_address_mask(dc, cpu_addr);
4713 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4714 break;
4715 case 0x5: /* stb, store byte */
4716 gen_address_mask(dc, cpu_addr);
4717 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4718 break;
4719 case 0x6: /* sth, store halfword */
4720 gen_address_mask(dc, cpu_addr);
4721 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4722 break;
4723 case 0x7: /* std, store double word */
4724 if (rd & 1)
4725 goto illegal_insn;
4726 else {
4727 TCGv_i32 r_const;
4728
4729 save_state(dc, cpu_cond);
4730 gen_address_mask(dc, cpu_addr);
4731 r_const = tcg_const_i32(7);
4732 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4733 tcg_temp_free_i32(r_const);
4734 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4735 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4736 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4737 }
4738 break;
4739 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4740 case 0x14: /* sta, V9 stwa, store word alternate */
4741 #ifndef TARGET_SPARC64
4742 if (IS_IMM)
4743 goto illegal_insn;
4744 if (!supervisor(dc))
4745 goto priv_insn;
4746 #endif
4747 save_state(dc, cpu_cond);
4748 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4749 dc->npc = DYNAMIC_PC;
4750 break;
4751 case 0x15: /* stba, store byte alternate */
4752 #ifndef TARGET_SPARC64
4753 if (IS_IMM)
4754 goto illegal_insn;
4755 if (!supervisor(dc))
4756 goto priv_insn;
4757 #endif
4758 save_state(dc, cpu_cond);
4759 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4760 dc->npc = DYNAMIC_PC;
4761 break;
4762 case 0x16: /* stha, store halfword alternate */
4763 #ifndef TARGET_SPARC64
4764 if (IS_IMM)
4765 goto illegal_insn;
4766 if (!supervisor(dc))
4767 goto priv_insn;
4768 #endif
4769 save_state(dc, cpu_cond);
4770 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4771 dc->npc = DYNAMIC_PC;
4772 break;
4773 case 0x17: /* stda, store double word alternate */
4774 #ifndef TARGET_SPARC64
4775 if (IS_IMM)
4776 goto illegal_insn;
4777 if (!supervisor(dc))
4778 goto priv_insn;
4779 #endif
4780 if (rd & 1)
4781 goto illegal_insn;
4782 else {
4783 save_state(dc, cpu_cond);
4784 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4785 }
4786 break;
4787 #endif
4788 #ifdef TARGET_SPARC64
4789 case 0x0e: /* V9 stx */
4790 gen_address_mask(dc, cpu_addr);
4791 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4792 break;
4793 case 0x1e: /* V9 stxa */
4794 save_state(dc, cpu_cond);
4795 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4796 dc->npc = DYNAMIC_PC;
4797 break;
4798 #endif
4799 default:
4800 goto illegal_insn;
4801 }
4802 } else if (xop > 0x23 && xop < 0x28) {
4803 if (gen_trap_ifnofpu(dc, cpu_cond))
4804 goto jmp_insn;
4805 save_state(dc, cpu_cond);
4806 switch (xop) {
4807 case 0x24: /* stf, store fpreg */
4808 gen_address_mask(dc, cpu_addr);
4809 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4810 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4811 break;
4812 case 0x25: /* stfsr, V9 stxfsr */
4813 #ifdef TARGET_SPARC64
4814 gen_address_mask(dc, cpu_addr);
4815 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4816 if (rd == 1)
4817 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4818 else
4819 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4820 #else
4821 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4822 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4823 #endif
4824 break;
4825 case 0x26:
4826 #ifdef TARGET_SPARC64
4827 /* V9 stqf, store quad fpreg */
4828 {
4829 TCGv_i32 r_const;
4830
4831 CHECK_FPU_FEATURE(dc, FLOAT128);
4832 gen_op_load_fpr_QT0(QFPREG(rd));
4833 r_const = tcg_const_i32(dc->mem_idx);
4834 gen_address_mask(dc, cpu_addr);
4835 gen_helper_stqf(cpu_addr, r_const);
4836 tcg_temp_free_i32(r_const);
4837 }
4838 break;
4839 #else /* !TARGET_SPARC64 */
4840 /* stdfq, store floating point queue */
4841 #if defined(CONFIG_USER_ONLY)
4842 goto illegal_insn;
4843 #else
4844 if (!supervisor(dc))
4845 goto priv_insn;
4846 if (gen_trap_ifnofpu(dc, cpu_cond))
4847 goto jmp_insn;
4848 goto nfq_insn;
4849 #endif
4850 #endif
4851 case 0x27: /* stdf, store double fpreg */
4852 {
4853 TCGv_i32 r_const;
4854
4855 gen_op_load_fpr_DT0(DFPREG(rd));
4856 r_const = tcg_const_i32(dc->mem_idx);
4857 gen_address_mask(dc, cpu_addr);
4858 gen_helper_stdf(cpu_addr, r_const);
4859 tcg_temp_free_i32(r_const);
4860 }
4861 break;
4862 default:
4863 goto illegal_insn;
4864 }
4865 } else if (xop > 0x33 && xop < 0x3f) {
4866 save_state(dc, cpu_cond);
4867 switch (xop) {
4868 #ifdef TARGET_SPARC64
4869 case 0x34: /* V9 stfa */
4870 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4871 goto jmp_insn;
4872 }
4873 gen_stf_asi(cpu_addr, insn, 4, rd);
4874 break;
4875 case 0x36: /* V9 stqfa */
4876 {
4877 TCGv_i32 r_const;
4878
4879 CHECK_FPU_FEATURE(dc, FLOAT128);
4880 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4881 goto jmp_insn;
4882 }
4883 r_const = tcg_const_i32(7);
4884 gen_helper_check_align(cpu_addr, r_const);
4885 tcg_temp_free_i32(r_const);
4886 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4887 }
4888 break;
4889 case 0x37: /* V9 stdfa */
4890 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4891 goto jmp_insn;
4892 }
4893 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4894 break;
4895 case 0x3c: /* V9 casa */
4896 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4897 gen_movl_TN_reg(rd, cpu_val);
4898 break;
4899 case 0x3e: /* V9 casxa */
4900 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4901 gen_movl_TN_reg(rd, cpu_val);
4902 break;
4903 #else
4904 case 0x34: /* stc */
4905 case 0x35: /* stcsr */
4906 case 0x36: /* stdcq */
4907 case 0x37: /* stdc */
4908 goto ncp_insn;
4909 #endif
4910 default:
4911 goto illegal_insn;
4912 }
4913 } else
4914 goto illegal_insn;
4915 }
4916 break;
4917 }
4918 /* default case for non jump instructions */
4919 if (dc->npc == DYNAMIC_PC) {
4920 dc->pc = DYNAMIC_PC;
4921 gen_op_next_insn();
4922 } else if (dc->npc == JUMP_PC) {
4923 /* we can do a static jump */
4924 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4925 dc->is_br = 1;
4926 } else {
4927 dc->pc = dc->npc;
4928 dc->npc = dc->npc + 4;
4929 }
4930 jmp_insn:
4931 goto egress;
4932 illegal_insn:
4933 {
4934 TCGv_i32 r_const;
4935
4936 save_state(dc, cpu_cond);
4937 r_const = tcg_const_i32(TT_ILL_INSN);
4938 gen_helper_raise_exception(cpu_env, r_const);
4939 tcg_temp_free_i32(r_const);
4940 dc->is_br = 1;
4941 }
4942 goto egress;
4943 unimp_flush:
4944 {
4945 TCGv_i32 r_const;
4946
4947 save_state(dc, cpu_cond);
4948 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4949 gen_helper_raise_exception(cpu_env, r_const);
4950 tcg_temp_free_i32(r_const);
4951 dc->is_br = 1;
4952 }
4953 goto egress;
4954 #if !defined(CONFIG_USER_ONLY)
4955 priv_insn:
4956 {
4957 TCGv_i32 r_const;
4958
4959 save_state(dc, cpu_cond);
4960 r_const = tcg_const_i32(TT_PRIV_INSN);
4961 gen_helper_raise_exception(cpu_env, r_const);
4962 tcg_temp_free_i32(r_const);
4963 dc->is_br = 1;
4964 }
4965 goto egress;
4966 #endif
4967 nfpu_insn:
4968 save_state(dc, cpu_cond);
4969 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4970 dc->is_br = 1;
4971 goto egress;
4972 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4973 nfq_insn:
4974 save_state(dc, cpu_cond);
4975 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4976 dc->is_br = 1;
4977 goto egress;
4978 #endif
4979 #ifndef TARGET_SPARC64
4980 ncp_insn:
4981 {
4982 TCGv r_const;
4983
4984 save_state(dc, cpu_cond);
4985 r_const = tcg_const_i32(TT_NCP_INSN);
4986 gen_helper_raise_exception(cpu_env, r_const);
4987 tcg_temp_free(r_const);
4988 dc->is_br = 1;
4989 }
4990 goto egress;
4991 #endif
4992 egress:
4993 tcg_temp_free(cpu_tmp1);
4994 tcg_temp_free(cpu_tmp2);
4995 }
4996
4997 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4998 int spc, CPUSPARCState *env)
4999 {
5000 target_ulong pc_start, last_pc;
5001 uint16_t *gen_opc_end;
5002 DisasContext dc1, *dc = &dc1;
5003 CPUBreakpoint *bp;
5004 int j, lj = -1;
5005 int num_insns;
5006 int max_insns;
5007
5008 memset(dc, 0, sizeof(DisasContext));
5009 dc->tb = tb;
5010 pc_start = tb->pc;
5011 dc->pc = pc_start;
5012 last_pc = dc->pc;
5013 dc->npc = (target_ulong) tb->cs_base;
5014 dc->cc_op = CC_OP_DYNAMIC;
5015 dc->mem_idx = cpu_mmu_index(env);
5016 dc->def = env->def;
5017 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5018 dc->address_mask_32bit = tb_am_enabled(tb->flags);
5019 dc->singlestep = (env->singlestep_enabled || singlestep);
5020 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5021
5022 cpu_tmp0 = tcg_temp_new();
5023 cpu_tmp32 = tcg_temp_new_i32();
5024 cpu_tmp64 = tcg_temp_new_i64();
5025
5026 cpu_dst = tcg_temp_local_new();
5027
5028 // loads and stores
5029 cpu_val = tcg_temp_local_new();
5030 cpu_addr = tcg_temp_local_new();
5031
5032 num_insns = 0;
5033 max_insns = tb->cflags & CF_COUNT_MASK;
5034 if (max_insns == 0)
5035 max_insns = CF_COUNT_MASK;
5036 gen_icount_start();
5037 do {
5038 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5039 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5040 if (bp->pc == dc->pc) {
5041 if (dc->pc != pc_start)
5042 save_state(dc, cpu_cond);
5043 gen_helper_debug(cpu_env);
5044 tcg_gen_exit_tb(0);
5045 dc->is_br = 1;
5046 goto exit_gen_loop;
5047 }
5048 }
5049 }
5050 if (spc) {
5051 qemu_log("Search PC...\n");
5052 j = gen_opc_ptr - gen_opc_buf;
5053 if (lj < j) {
5054 lj++;
5055 while (lj < j)
5056 gen_opc_instr_start[lj++] = 0;
5057 gen_opc_pc[lj] = dc->pc;
5058 gen_opc_npc[lj] = dc->npc;
5059 gen_opc_instr_start[lj] = 1;
5060 gen_opc_icount[lj] = num_insns;
5061 }
5062 }
5063 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
5064 gen_io_start();
5065 last_pc = dc->pc;
5066 disas_sparc_insn(dc);
5067 num_insns++;
5068
5069 if (dc->is_br)
5070 break;
5071 /* if the next PC is different, we abort now */
5072 if (dc->pc != (last_pc + 4))
5073 break;
5074 /* if we reach a page boundary, we stop generation so that the
5075 PC of a TT_TFAULT exception is always in the right page */
5076 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5077 break;
5078 /* if single step mode, we generate only one instruction and
5079 generate an exception */
5080 if (dc->singlestep) {
5081 break;
5082 }
5083 } while ((gen_opc_ptr < gen_opc_end) &&
5084 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5085 num_insns < max_insns);
5086
5087 exit_gen_loop:
5088 tcg_temp_free(cpu_addr);
5089 tcg_temp_free(cpu_val);
5090 tcg_temp_free(cpu_dst);
5091 tcg_temp_free_i64(cpu_tmp64);
5092 tcg_temp_free_i32(cpu_tmp32);
5093 tcg_temp_free(cpu_tmp0);
5094 if (tb->cflags & CF_LAST_IO)
5095 gen_io_end();
5096 if (!dc->is_br) {
5097 if (dc->pc != DYNAMIC_PC &&
5098 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5099 /* static PC and NPC: we can use direct chaining */
5100 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5101 } else {
5102 if (dc->pc != DYNAMIC_PC)
5103 tcg_gen_movi_tl(cpu_pc, dc->pc);
5104 save_npc(dc, cpu_cond);
5105 tcg_gen_exit_tb(0);
5106 }
5107 }
5108 gen_icount_end(tb, num_insns);
5109 *gen_opc_ptr = INDEX_op_end;
5110 if (spc) {
5111 j = gen_opc_ptr - gen_opc_buf;
5112 lj++;
5113 while (lj <= j)
5114 gen_opc_instr_start[lj++] = 0;
5115 #if 0
5116 log_page_dump();
5117 #endif
5118 gen_opc_jump_pc[0] = dc->jump_pc[0];
5119 gen_opc_jump_pc[1] = dc->jump_pc[1];
5120 } else {
5121 tb->size = last_pc + 4 - pc_start;
5122 tb->icount = num_insns;
5123 }
5124 #ifdef DEBUG_DISAS
5125 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5126 qemu_log("--------------\n");
5127 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5128 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5129 qemu_log("\n");
5130 }
5131 #endif
5132 }
5133
5134 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5135 {
5136 gen_intermediate_code_internal(tb, 0, env);
5137 }
5138
5139 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5140 {
5141 gen_intermediate_code_internal(tb, 1, env);
5142 }
5143
5144 void gen_intermediate_code_init(CPUSPARCState *env)
5145 {
5146 unsigned int i;
5147 static int inited;
5148 static const char * const gregnames[8] = {
5149 NULL, // g0 not used
5150 "g1",
5151 "g2",
5152 "g3",
5153 "g4",
5154 "g5",
5155 "g6",
5156 "g7",
5157 };
5158 static const char * const fregnames[64] = {
5159 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5160 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5161 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5162 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5163 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5164 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5165 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5166 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5167 };
5168
5169 /* init various static tables */
5170 if (!inited) {
5171 inited = 1;
5172
5173 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5174 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5175 offsetof(CPUState, regwptr),
5176 "regwptr");
5177 #ifdef TARGET_SPARC64
5178 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5179 "xcc");
5180 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5181 "asi");
5182 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5183 "fprs");
5184 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5185 "gsr");
5186 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5187 offsetof(CPUState, tick_cmpr),
5188 "tick_cmpr");
5189 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5190 offsetof(CPUState, stick_cmpr),
5191 "stick_cmpr");
5192 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5193 offsetof(CPUState, hstick_cmpr),
5194 "hstick_cmpr");
5195 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5196 "hintp");
5197 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5198 "htba");
5199 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5200 "hver");
5201 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5202 offsetof(CPUState, ssr), "ssr");
5203 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5204 offsetof(CPUState, version), "ver");
5205 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5206 offsetof(CPUState, softint),
5207 "softint");
5208 #else
5209 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5210 "wim");
5211 #endif
5212 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5213 "cond");
5214 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5215 "cc_src");
5216 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5217 offsetof(CPUState, cc_src2),
5218 "cc_src2");
5219 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5220 "cc_dst");
5221 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5222 "cc_op");
5223 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5224 "psr");
5225 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5226 "fsr");
5227 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5228 "pc");
5229 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5230 "npc");
5231 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5232 #ifndef CONFIG_USER_ONLY
5233 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5234 "tbr");
5235 #endif
5236 for (i = 1; i < 8; i++)
5237 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5238 offsetof(CPUState, gregs[i]),
5239 gregnames[i]);
5240 for (i = 0; i < TARGET_FPREGS; i++)
5241 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5242 offsetof(CPUState, fpr[i]),
5243 fregnames[i]);
5244
5245 /* register helpers */
5246
5247 #define GEN_HELPER 2
5248 #include "helper.h"
5249 }
5250 }
5251
5252 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5253 {
5254 target_ulong npc;
5255 env->pc = gen_opc_pc[pc_pos];
5256 npc = gen_opc_npc[pc_pos];
5257 if (npc == 1) {
5258 /* dynamic NPC: already stored */
5259 } else if (npc == 2) {
5260 /* jump PC: use 'cond' and the jump targets of the translation */
5261 if (env->cond) {
5262 env->npc = gen_opc_jump_pc[0];
5263 } else {
5264 env->npc = gen_opc_jump_pc[1];
5265 }
5266 } else {
5267 env->npc = npc;
5268 }
5269
5270 /* flush pending conditional evaluations before exposing cpu state */
5271 if (CC_OP != CC_OP_FLAGS) {
5272 helper_compute_psr(env);
5273 }
5274 }