]> git.proxmox.com Git - qemu.git/blob - target-sparc/translate.c
f68b3bcdd8646acfe16d20412b4e3fd5326eba8e
[qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21 #include <stdarg.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24 #include <string.h>
25 #include <inttypes.h>
26
27 #include "cpu.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31
32 #define GEN_HELPER 1
33 #include "helper.h"
34
35 #define DEBUG_DISAS
36
37 #define DYNAMIC_PC 1 /* dynamic pc value */
38 #define JUMP_PC 2 /* dynamic pc value which takes only two values
39 according to jump_pc[T2] */
40
41 /* global register indexes */
42 static TCGv_ptr cpu_env, cpu_regwptr;
43 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
44 static TCGv_i32 cpu_cc_op;
45 static TCGv_i32 cpu_psr;
46 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
47 static TCGv cpu_y;
48 #ifndef CONFIG_USER_ONLY
49 static TCGv cpu_tbr;
50 #endif
51 static TCGv cpu_cond, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
54 static TCGv cpu_gsr;
55 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
56 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
57 static TCGv_i32 cpu_softint;
58 #else
59 static TCGv cpu_wim;
60 #endif
61 /* local register indexes (only used inside old micro ops) */
62 static TCGv cpu_tmp0;
63 static TCGv_i32 cpu_tmp32;
64 static TCGv_i64 cpu_tmp64;
65 /* Floating point registers */
66 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
67
68 static target_ulong gen_opc_npc[OPC_BUF_SIZE];
69 static target_ulong gen_opc_jump_pc[2];
70
71 #include "gen-icount.h"
72
73 typedef struct DisasContext {
74 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
75 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
76 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
77 int is_br;
78 int mem_idx;
79 int fpu_enabled;
80 int address_mask_32bit;
81 int singlestep;
82 uint32_t cc_op; /* current CC operation */
83 struct TranslationBlock *tb;
84 sparc_def_t *def;
85 } DisasContext;
86
87 // This function uses non-native bit order
88 #define GET_FIELD(X, FROM, TO) \
89 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
90
91 // This function uses the order in the manuals, i.e. bit 0 is 2^0
92 #define GET_FIELD_SP(X, FROM, TO) \
93 GET_FIELD(X, 31 - (TO), 31 - (FROM))
94
95 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
96 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
97
98 #ifdef TARGET_SPARC64
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101 #else
102 #define DFPREG(r) (r & 0x1e)
103 #define QFPREG(r) (r & 0x1c)
104 #endif
105
106 #define UA2005_HTRAP_MASK 0xff
107 #define V8_TRAP_MASK 0x7f
108
109 static int sign_extend(int x, int len)
110 {
111 len = 32 - len;
112 return (x << len) >> len;
113 }
114
115 #define IS_IMM (insn & (1<<13))
116
117 /* floating point registers moves */
118 static void gen_op_load_fpr_DT0(unsigned int src)
119 {
120 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
121 offsetof(CPU_DoubleU, l.upper));
122 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
123 offsetof(CPU_DoubleU, l.lower));
124 }
125
126 static void gen_op_load_fpr_DT1(unsigned int src)
127 {
128 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
129 offsetof(CPU_DoubleU, l.upper));
130 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
131 offsetof(CPU_DoubleU, l.lower));
132 }
133
134 static void gen_op_store_DT0_fpr(unsigned int dst)
135 {
136 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
137 offsetof(CPU_DoubleU, l.upper));
138 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
139 offsetof(CPU_DoubleU, l.lower));
140 }
141
142 static void gen_op_load_fpr_QT0(unsigned int src)
143 {
144 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.upmost));
146 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.upper));
148 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.lower));
150 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
151 offsetof(CPU_QuadU, l.lowest));
152 }
153
154 static void gen_op_load_fpr_QT1(unsigned int src)
155 {
156 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.upmost));
158 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.upper));
160 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.lower));
162 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
163 offsetof(CPU_QuadU, l.lowest));
164 }
165
166 static void gen_op_store_QT0_fpr(unsigned int dst)
167 {
168 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.upmost));
170 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.upper));
172 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.lower));
174 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
175 offsetof(CPU_QuadU, l.lowest));
176 }
177
178 /* moves */
179 #ifdef CONFIG_USER_ONLY
180 #define supervisor(dc) 0
181 #ifdef TARGET_SPARC64
182 #define hypervisor(dc) 0
183 #endif
184 #else
185 #define supervisor(dc) (dc->mem_idx >= MMU_KERNEL_IDX)
186 #ifdef TARGET_SPARC64
187 #define hypervisor(dc) (dc->mem_idx == MMU_HYPV_IDX)
188 #else
189 #endif
190 #endif
191
192 #ifdef TARGET_SPARC64
193 #ifndef TARGET_ABI32
194 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
195 #else
196 #define AM_CHECK(dc) (1)
197 #endif
198 #endif
199
200 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
201 {
202 #ifdef TARGET_SPARC64
203 if (AM_CHECK(dc))
204 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
205 #endif
206 }
207
208 static inline void gen_movl_reg_TN(int reg, TCGv tn)
209 {
210 if (reg == 0)
211 tcg_gen_movi_tl(tn, 0);
212 else if (reg < 8)
213 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
214 else {
215 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
216 }
217 }
218
219 static inline void gen_movl_TN_reg(int reg, TCGv tn)
220 {
221 if (reg == 0)
222 return;
223 else if (reg < 8)
224 tcg_gen_mov_tl(cpu_gregs[reg], tn);
225 else {
226 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
227 }
228 }
229
230 static inline void gen_goto_tb(DisasContext *s, int tb_num,
231 target_ulong pc, target_ulong npc)
232 {
233 TranslationBlock *tb;
234
235 tb = s->tb;
236 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
237 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
238 !s->singlestep) {
239 /* jump to same page: we can use a direct jump */
240 tcg_gen_goto_tb(tb_num);
241 tcg_gen_movi_tl(cpu_pc, pc);
242 tcg_gen_movi_tl(cpu_npc, npc);
243 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
244 } else {
245 /* jump to another page: currently not optimized */
246 tcg_gen_movi_tl(cpu_pc, pc);
247 tcg_gen_movi_tl(cpu_npc, npc);
248 tcg_gen_exit_tb(0);
249 }
250 }
251
252 // XXX suboptimal
253 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
254 {
255 tcg_gen_extu_i32_tl(reg, src);
256 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
257 tcg_gen_andi_tl(reg, reg, 0x1);
258 }
259
260 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
261 {
262 tcg_gen_extu_i32_tl(reg, src);
263 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
264 tcg_gen_andi_tl(reg, reg, 0x1);
265 }
266
267 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
268 {
269 tcg_gen_extu_i32_tl(reg, src);
270 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
271 tcg_gen_andi_tl(reg, reg, 0x1);
272 }
273
274 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
275 {
276 tcg_gen_extu_i32_tl(reg, src);
277 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
278 tcg_gen_andi_tl(reg, reg, 0x1);
279 }
280
281 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
282 {
283 TCGv r_temp;
284 TCGv_i32 r_const;
285 int l1;
286
287 l1 = gen_new_label();
288
289 r_temp = tcg_temp_new();
290 tcg_gen_xor_tl(r_temp, src1, src2);
291 tcg_gen_not_tl(r_temp, r_temp);
292 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
293 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
294 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
295 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
296 r_const = tcg_const_i32(TT_TOVF);
297 gen_helper_raise_exception(r_const);
298 tcg_temp_free_i32(r_const);
299 gen_set_label(l1);
300 tcg_temp_free(r_temp);
301 }
302
303 static inline void gen_tag_tv(TCGv src1, TCGv src2)
304 {
305 int l1;
306 TCGv_i32 r_const;
307
308 l1 = gen_new_label();
309 tcg_gen_or_tl(cpu_tmp0, src1, src2);
310 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
311 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
312 r_const = tcg_const_i32(TT_TOVF);
313 gen_helper_raise_exception(r_const);
314 tcg_temp_free_i32(r_const);
315 gen_set_label(l1);
316 }
317
318 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
319 {
320 tcg_gen_mov_tl(cpu_cc_src, src1);
321 tcg_gen_movi_tl(cpu_cc_src2, src2);
322 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
323 tcg_gen_mov_tl(dst, cpu_cc_dst);
324 }
325
326 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
327 {
328 tcg_gen_mov_tl(cpu_cc_src, src1);
329 tcg_gen_mov_tl(cpu_cc_src2, src2);
330 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
331 tcg_gen_mov_tl(dst, cpu_cc_dst);
332 }
333
334 static TCGv_i32 gen_add32_carry32(void)
335 {
336 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
337
338 /* Carry is computed from a previous add: (dst < src) */
339 #if TARGET_LONG_BITS == 64
340 cc_src1_32 = tcg_temp_new_i32();
341 cc_src2_32 = tcg_temp_new_i32();
342 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_dst);
343 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src);
344 #else
345 cc_src1_32 = cpu_cc_dst;
346 cc_src2_32 = cpu_cc_src;
347 #endif
348
349 carry_32 = tcg_temp_new_i32();
350 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
351
352 #if TARGET_LONG_BITS == 64
353 tcg_temp_free_i32(cc_src1_32);
354 tcg_temp_free_i32(cc_src2_32);
355 #endif
356
357 return carry_32;
358 }
359
360 static TCGv_i32 gen_sub32_carry32(void)
361 {
362 TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
363
364 /* Carry is computed from a previous borrow: (src1 < src2) */
365 #if TARGET_LONG_BITS == 64
366 cc_src1_32 = tcg_temp_new_i32();
367 cc_src2_32 = tcg_temp_new_i32();
368 tcg_gen_trunc_i64_i32(cc_src1_32, cpu_cc_src);
369 tcg_gen_trunc_i64_i32(cc_src2_32, cpu_cc_src2);
370 #else
371 cc_src1_32 = cpu_cc_src;
372 cc_src2_32 = cpu_cc_src2;
373 #endif
374
375 carry_32 = tcg_temp_new_i32();
376 tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
377
378 #if TARGET_LONG_BITS == 64
379 tcg_temp_free_i32(cc_src1_32);
380 tcg_temp_free_i32(cc_src2_32);
381 #endif
382
383 return carry_32;
384 }
385
386 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
387 TCGv src2, int update_cc)
388 {
389 TCGv_i32 carry_32;
390 TCGv carry;
391
392 switch (dc->cc_op) {
393 case CC_OP_DIV:
394 case CC_OP_LOGIC:
395 /* Carry is known to be zero. Fall back to plain ADD. */
396 if (update_cc) {
397 gen_op_add_cc(dst, src1, src2);
398 } else {
399 tcg_gen_add_tl(dst, src1, src2);
400 }
401 return;
402
403 case CC_OP_ADD:
404 case CC_OP_TADD:
405 case CC_OP_TADDTV:
406 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
407 {
408 /* For 32-bit hosts, we can re-use the host's hardware carry
409 generation by using an ADD2 opcode. We discard the low
410 part of the output. Ideally we'd combine this operation
411 with the add that generated the carry in the first place. */
412 TCGv dst_low = tcg_temp_new();
413 tcg_gen_op6_i32(INDEX_op_add2_i32, dst_low, dst,
414 cpu_cc_src, src1, cpu_cc_src2, src2);
415 tcg_temp_free(dst_low);
416 goto add_done;
417 }
418 #endif
419 carry_32 = gen_add32_carry32();
420 break;
421
422 case CC_OP_SUB:
423 case CC_OP_TSUB:
424 case CC_OP_TSUBTV:
425 carry_32 = gen_sub32_carry32();
426 break;
427
428 default:
429 /* We need external help to produce the carry. */
430 carry_32 = tcg_temp_new_i32();
431 gen_helper_compute_C_icc(carry_32);
432 break;
433 }
434
435 #if TARGET_LONG_BITS == 64
436 carry = tcg_temp_new();
437 tcg_gen_extu_i32_i64(carry, carry_32);
438 #else
439 carry = carry_32;
440 #endif
441
442 tcg_gen_add_tl(dst, src1, src2);
443 tcg_gen_add_tl(dst, dst, carry);
444
445 tcg_temp_free_i32(carry_32);
446 #if TARGET_LONG_BITS == 64
447 tcg_temp_free(carry);
448 #endif
449
450 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
451 add_done:
452 #endif
453 if (update_cc) {
454 tcg_gen_mov_tl(cpu_cc_src, src1);
455 tcg_gen_mov_tl(cpu_cc_src2, src2);
456 tcg_gen_mov_tl(cpu_cc_dst, dst);
457 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
458 dc->cc_op = CC_OP_ADDX;
459 }
460 }
461
462 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
463 {
464 tcg_gen_mov_tl(cpu_cc_src, src1);
465 tcg_gen_mov_tl(cpu_cc_src2, src2);
466 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
467 tcg_gen_mov_tl(dst, cpu_cc_dst);
468 }
469
470 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
471 {
472 tcg_gen_mov_tl(cpu_cc_src, src1);
473 tcg_gen_mov_tl(cpu_cc_src2, src2);
474 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
475 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
476 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 tcg_gen_mov_tl(dst, cpu_cc_dst);
478 }
479
480 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
481 {
482 TCGv r_temp;
483 TCGv_i32 r_const;
484 int l1;
485
486 l1 = gen_new_label();
487
488 r_temp = tcg_temp_new();
489 tcg_gen_xor_tl(r_temp, src1, src2);
490 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
491 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
492 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
493 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
494 r_const = tcg_const_i32(TT_TOVF);
495 gen_helper_raise_exception(r_const);
496 tcg_temp_free_i32(r_const);
497 gen_set_label(l1);
498 tcg_temp_free(r_temp);
499 }
500
501 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2, DisasContext *dc)
502 {
503 tcg_gen_mov_tl(cpu_cc_src, src1);
504 tcg_gen_movi_tl(cpu_cc_src2, src2);
505 if (src2 == 0) {
506 tcg_gen_mov_tl(cpu_cc_dst, src1);
507 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
508 dc->cc_op = CC_OP_LOGIC;
509 } else {
510 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
511 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
512 dc->cc_op = CC_OP_SUB;
513 }
514 tcg_gen_mov_tl(dst, cpu_cc_dst);
515 }
516
517 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
518 {
519 tcg_gen_mov_tl(cpu_cc_src, src1);
520 tcg_gen_mov_tl(cpu_cc_src2, src2);
521 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
522 tcg_gen_mov_tl(dst, cpu_cc_dst);
523 }
524
525 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
526 TCGv src2, int update_cc)
527 {
528 TCGv_i32 carry_32;
529 TCGv carry;
530
531 switch (dc->cc_op) {
532 case CC_OP_DIV:
533 case CC_OP_LOGIC:
534 /* Carry is known to be zero. Fall back to plain SUB. */
535 if (update_cc) {
536 gen_op_sub_cc(dst, src1, src2);
537 } else {
538 tcg_gen_sub_tl(dst, src1, src2);
539 }
540 return;
541
542 case CC_OP_ADD:
543 case CC_OP_TADD:
544 case CC_OP_TADDTV:
545 carry_32 = gen_add32_carry32();
546 break;
547
548 case CC_OP_SUB:
549 case CC_OP_TSUB:
550 case CC_OP_TSUBTV:
551 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
552 {
553 /* For 32-bit hosts, we can re-use the host's hardware carry
554 generation by using a SUB2 opcode. We discard the low
555 part of the output. Ideally we'd combine this operation
556 with the add that generated the carry in the first place. */
557 TCGv dst_low = tcg_temp_new();
558 tcg_gen_op6_i32(INDEX_op_sub2_i32, dst_low, dst,
559 cpu_cc_src, src1, cpu_cc_src2, src2);
560 tcg_temp_free(dst_low);
561 goto sub_done;
562 }
563 #endif
564 carry_32 = gen_sub32_carry32();
565 break;
566
567 default:
568 /* We need external help to produce the carry. */
569 carry_32 = tcg_temp_new_i32();
570 gen_helper_compute_C_icc(carry_32);
571 break;
572 }
573
574 #if TARGET_LONG_BITS == 64
575 carry = tcg_temp_new();
576 tcg_gen_extu_i32_i64(carry, carry_32);
577 #else
578 carry = carry_32;
579 #endif
580
581 tcg_gen_sub_tl(dst, src1, src2);
582 tcg_gen_sub_tl(dst, dst, carry);
583
584 tcg_temp_free_i32(carry_32);
585 #if TARGET_LONG_BITS == 64
586 tcg_temp_free(carry);
587 #endif
588
589 #if TCG_TARGET_REG_BITS == 32 && TARGET_LONG_BITS == 32
590 sub_done:
591 #endif
592 if (update_cc) {
593 tcg_gen_mov_tl(cpu_cc_src, src1);
594 tcg_gen_mov_tl(cpu_cc_src2, src2);
595 tcg_gen_mov_tl(cpu_cc_dst, dst);
596 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
597 dc->cc_op = CC_OP_SUBX;
598 }
599 }
600
601 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
602 {
603 tcg_gen_mov_tl(cpu_cc_src, src1);
604 tcg_gen_mov_tl(cpu_cc_src2, src2);
605 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
606 tcg_gen_mov_tl(dst, cpu_cc_dst);
607 }
608
609 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
610 {
611 tcg_gen_mov_tl(cpu_cc_src, src1);
612 tcg_gen_mov_tl(cpu_cc_src2, src2);
613 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
614 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
615 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
616 tcg_gen_mov_tl(dst, cpu_cc_dst);
617 }
618
619 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
620 {
621 TCGv r_temp;
622 int l1;
623
624 l1 = gen_new_label();
625 r_temp = tcg_temp_new();
626
627 /* old op:
628 if (!(env->y & 1))
629 T1 = 0;
630 */
631 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
632 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
633 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
634 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
635 tcg_gen_movi_tl(cpu_cc_src2, 0);
636 gen_set_label(l1);
637
638 // b2 = T0 & 1;
639 // env->y = (b2 << 31) | (env->y >> 1);
640 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
641 tcg_gen_shli_tl(r_temp, r_temp, 31);
642 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
643 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
644 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
645 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
646
647 // b1 = N ^ V;
648 gen_mov_reg_N(cpu_tmp0, cpu_psr);
649 gen_mov_reg_V(r_temp, cpu_psr);
650 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
651 tcg_temp_free(r_temp);
652
653 // T0 = (b1 << 31) | (T0 >> 1);
654 // src1 = T0;
655 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
656 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
657 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
658
659 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
660
661 tcg_gen_mov_tl(dst, cpu_cc_dst);
662 }
663
664 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
665 {
666 TCGv_i32 r_src1, r_src2;
667 TCGv_i64 r_temp, r_temp2;
668
669 r_src1 = tcg_temp_new_i32();
670 r_src2 = tcg_temp_new_i32();
671
672 tcg_gen_trunc_tl_i32(r_src1, src1);
673 tcg_gen_trunc_tl_i32(r_src2, src2);
674
675 r_temp = tcg_temp_new_i64();
676 r_temp2 = tcg_temp_new_i64();
677
678 if (sign_ext) {
679 tcg_gen_ext_i32_i64(r_temp, r_src2);
680 tcg_gen_ext_i32_i64(r_temp2, r_src1);
681 } else {
682 tcg_gen_extu_i32_i64(r_temp, r_src2);
683 tcg_gen_extu_i32_i64(r_temp2, r_src1);
684 }
685
686 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
687
688 tcg_gen_shri_i64(r_temp, r_temp2, 32);
689 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
690 tcg_temp_free_i64(r_temp);
691 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
692
693 tcg_gen_trunc_i64_tl(dst, r_temp2);
694
695 tcg_temp_free_i64(r_temp2);
696
697 tcg_temp_free_i32(r_src1);
698 tcg_temp_free_i32(r_src2);
699 }
700
701 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
702 {
703 /* zero-extend truncated operands before multiplication */
704 gen_op_multiply(dst, src1, src2, 0);
705 }
706
707 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
708 {
709 /* sign-extend truncated operands before multiplication */
710 gen_op_multiply(dst, src1, src2, 1);
711 }
712
713 #ifdef TARGET_SPARC64
714 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
715 {
716 TCGv_i32 r_const;
717 int l1;
718
719 l1 = gen_new_label();
720 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
721 r_const = tcg_const_i32(TT_DIV_ZERO);
722 gen_helper_raise_exception(r_const);
723 tcg_temp_free_i32(r_const);
724 gen_set_label(l1);
725 }
726
727 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
728 {
729 int l1, l2;
730 TCGv r_temp1, r_temp2;
731
732 l1 = gen_new_label();
733 l2 = gen_new_label();
734 r_temp1 = tcg_temp_local_new();
735 r_temp2 = tcg_temp_local_new();
736 tcg_gen_mov_tl(r_temp1, src1);
737 tcg_gen_mov_tl(r_temp2, src2);
738 gen_trap_ifdivzero_tl(r_temp2);
739 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp1, INT64_MIN, l1);
740 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp2, -1, l1);
741 tcg_gen_movi_i64(dst, INT64_MIN);
742 tcg_gen_br(l2);
743 gen_set_label(l1);
744 tcg_gen_div_i64(dst, r_temp1, r_temp2);
745 gen_set_label(l2);
746 tcg_temp_free(r_temp1);
747 tcg_temp_free(r_temp2);
748 }
749 #endif
750
751 // 1
752 static inline void gen_op_eval_ba(TCGv dst)
753 {
754 tcg_gen_movi_tl(dst, 1);
755 }
756
757 // Z
758 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
759 {
760 gen_mov_reg_Z(dst, src);
761 }
762
763 // Z | (N ^ V)
764 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
765 {
766 gen_mov_reg_N(cpu_tmp0, src);
767 gen_mov_reg_V(dst, src);
768 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
769 gen_mov_reg_Z(cpu_tmp0, src);
770 tcg_gen_or_tl(dst, dst, cpu_tmp0);
771 }
772
773 // N ^ V
774 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
775 {
776 gen_mov_reg_V(cpu_tmp0, src);
777 gen_mov_reg_N(dst, src);
778 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
779 }
780
781 // C | Z
782 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
783 {
784 gen_mov_reg_Z(cpu_tmp0, src);
785 gen_mov_reg_C(dst, src);
786 tcg_gen_or_tl(dst, dst, cpu_tmp0);
787 }
788
789 // C
790 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
791 {
792 gen_mov_reg_C(dst, src);
793 }
794
795 // V
796 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
797 {
798 gen_mov_reg_V(dst, src);
799 }
800
801 // 0
802 static inline void gen_op_eval_bn(TCGv dst)
803 {
804 tcg_gen_movi_tl(dst, 0);
805 }
806
807 // N
808 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
809 {
810 gen_mov_reg_N(dst, src);
811 }
812
813 // !Z
814 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
815 {
816 gen_mov_reg_Z(dst, src);
817 tcg_gen_xori_tl(dst, dst, 0x1);
818 }
819
820 // !(Z | (N ^ V))
821 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
822 {
823 gen_mov_reg_N(cpu_tmp0, src);
824 gen_mov_reg_V(dst, src);
825 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
826 gen_mov_reg_Z(cpu_tmp0, src);
827 tcg_gen_or_tl(dst, dst, cpu_tmp0);
828 tcg_gen_xori_tl(dst, dst, 0x1);
829 }
830
831 // !(N ^ V)
832 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
833 {
834 gen_mov_reg_V(cpu_tmp0, src);
835 gen_mov_reg_N(dst, src);
836 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
837 tcg_gen_xori_tl(dst, dst, 0x1);
838 }
839
840 // !(C | Z)
841 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
842 {
843 gen_mov_reg_Z(cpu_tmp0, src);
844 gen_mov_reg_C(dst, src);
845 tcg_gen_or_tl(dst, dst, cpu_tmp0);
846 tcg_gen_xori_tl(dst, dst, 0x1);
847 }
848
849 // !C
850 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
851 {
852 gen_mov_reg_C(dst, src);
853 tcg_gen_xori_tl(dst, dst, 0x1);
854 }
855
856 // !N
857 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
858 {
859 gen_mov_reg_N(dst, src);
860 tcg_gen_xori_tl(dst, dst, 0x1);
861 }
862
863 // !V
864 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
865 {
866 gen_mov_reg_V(dst, src);
867 tcg_gen_xori_tl(dst, dst, 0x1);
868 }
869
870 /*
871 FPSR bit field FCC1 | FCC0:
872 0 =
873 1 <
874 2 >
875 3 unordered
876 */
877 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
878 unsigned int fcc_offset)
879 {
880 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
881 tcg_gen_andi_tl(reg, reg, 0x1);
882 }
883
884 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
885 unsigned int fcc_offset)
886 {
887 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
888 tcg_gen_andi_tl(reg, reg, 0x1);
889 }
890
891 // !0: FCC0 | FCC1
892 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
893 unsigned int fcc_offset)
894 {
895 gen_mov_reg_FCC0(dst, src, fcc_offset);
896 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
897 tcg_gen_or_tl(dst, dst, cpu_tmp0);
898 }
899
900 // 1 or 2: FCC0 ^ FCC1
901 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
902 unsigned int fcc_offset)
903 {
904 gen_mov_reg_FCC0(dst, src, fcc_offset);
905 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
906 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
907 }
908
909 // 1 or 3: FCC0
910 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
911 unsigned int fcc_offset)
912 {
913 gen_mov_reg_FCC0(dst, src, fcc_offset);
914 }
915
916 // 1: FCC0 & !FCC1
917 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
918 unsigned int fcc_offset)
919 {
920 gen_mov_reg_FCC0(dst, src, fcc_offset);
921 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
922 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
923 tcg_gen_and_tl(dst, dst, cpu_tmp0);
924 }
925
926 // 2 or 3: FCC1
927 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
928 unsigned int fcc_offset)
929 {
930 gen_mov_reg_FCC1(dst, src, fcc_offset);
931 }
932
933 // 2: !FCC0 & FCC1
934 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
935 unsigned int fcc_offset)
936 {
937 gen_mov_reg_FCC0(dst, src, fcc_offset);
938 tcg_gen_xori_tl(dst, dst, 0x1);
939 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
940 tcg_gen_and_tl(dst, dst, cpu_tmp0);
941 }
942
943 // 3: FCC0 & FCC1
944 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
945 unsigned int fcc_offset)
946 {
947 gen_mov_reg_FCC0(dst, src, fcc_offset);
948 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
949 tcg_gen_and_tl(dst, dst, cpu_tmp0);
950 }
951
952 // 0: !(FCC0 | FCC1)
953 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
954 unsigned int fcc_offset)
955 {
956 gen_mov_reg_FCC0(dst, src, fcc_offset);
957 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
958 tcg_gen_or_tl(dst, dst, cpu_tmp0);
959 tcg_gen_xori_tl(dst, dst, 0x1);
960 }
961
962 // 0 or 3: !(FCC0 ^ FCC1)
963 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
964 unsigned int fcc_offset)
965 {
966 gen_mov_reg_FCC0(dst, src, fcc_offset);
967 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
968 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
969 tcg_gen_xori_tl(dst, dst, 0x1);
970 }
971
972 // 0 or 2: !FCC0
973 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
974 unsigned int fcc_offset)
975 {
976 gen_mov_reg_FCC0(dst, src, fcc_offset);
977 tcg_gen_xori_tl(dst, dst, 0x1);
978 }
979
980 // !1: !(FCC0 & !FCC1)
981 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
982 unsigned int fcc_offset)
983 {
984 gen_mov_reg_FCC0(dst, src, fcc_offset);
985 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
987 tcg_gen_and_tl(dst, dst, cpu_tmp0);
988 tcg_gen_xori_tl(dst, dst, 0x1);
989 }
990
991 // 0 or 1: !FCC1
992 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
993 unsigned int fcc_offset)
994 {
995 gen_mov_reg_FCC1(dst, src, fcc_offset);
996 tcg_gen_xori_tl(dst, dst, 0x1);
997 }
998
999 // !2: !(!FCC0 & FCC1)
1000 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1001 unsigned int fcc_offset)
1002 {
1003 gen_mov_reg_FCC0(dst, src, fcc_offset);
1004 tcg_gen_xori_tl(dst, dst, 0x1);
1005 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1006 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1007 tcg_gen_xori_tl(dst, dst, 0x1);
1008 }
1009
1010 // !3: !(FCC0 & FCC1)
1011 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1012 unsigned int fcc_offset)
1013 {
1014 gen_mov_reg_FCC0(dst, src, fcc_offset);
1015 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1016 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1017 tcg_gen_xori_tl(dst, dst, 0x1);
1018 }
1019
1020 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1021 target_ulong pc2, TCGv r_cond)
1022 {
1023 int l1;
1024
1025 l1 = gen_new_label();
1026
1027 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1028
1029 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1030
1031 gen_set_label(l1);
1032 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1033 }
1034
1035 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1036 target_ulong pc2, TCGv r_cond)
1037 {
1038 int l1;
1039
1040 l1 = gen_new_label();
1041
1042 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1043
1044 gen_goto_tb(dc, 0, pc2, pc1);
1045
1046 gen_set_label(l1);
1047 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1048 }
1049
1050 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1051 TCGv r_cond)
1052 {
1053 int l1, l2;
1054
1055 l1 = gen_new_label();
1056 l2 = gen_new_label();
1057
1058 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1059
1060 tcg_gen_movi_tl(cpu_npc, npc1);
1061 tcg_gen_br(l2);
1062
1063 gen_set_label(l1);
1064 tcg_gen_movi_tl(cpu_npc, npc2);
1065 gen_set_label(l2);
1066 }
1067
1068 /* call this function before using the condition register as it may
1069 have been set for a jump */
1070 static inline void flush_cond(DisasContext *dc, TCGv cond)
1071 {
1072 if (dc->npc == JUMP_PC) {
1073 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1074 dc->npc = DYNAMIC_PC;
1075 }
1076 }
1077
1078 static inline void save_npc(DisasContext *dc, TCGv cond)
1079 {
1080 if (dc->npc == JUMP_PC) {
1081 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1082 dc->npc = DYNAMIC_PC;
1083 } else if (dc->npc != DYNAMIC_PC) {
1084 tcg_gen_movi_tl(cpu_npc, dc->npc);
1085 }
1086 }
1087
1088 static inline void save_state(DisasContext *dc, TCGv cond)
1089 {
1090 tcg_gen_movi_tl(cpu_pc, dc->pc);
1091 /* flush pending conditional evaluations before exposing cpu state */
1092 if (dc->cc_op != CC_OP_FLAGS) {
1093 dc->cc_op = CC_OP_FLAGS;
1094 gen_helper_compute_psr();
1095 }
1096 save_npc(dc, cond);
1097 }
1098
1099 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1100 {
1101 if (dc->npc == JUMP_PC) {
1102 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1103 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1104 dc->pc = DYNAMIC_PC;
1105 } else if (dc->npc == DYNAMIC_PC) {
1106 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1107 dc->pc = DYNAMIC_PC;
1108 } else {
1109 dc->pc = dc->npc;
1110 }
1111 }
1112
1113 static inline void gen_op_next_insn(void)
1114 {
1115 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1116 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1117 }
1118
1119 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1120 DisasContext *dc)
1121 {
1122 TCGv_i32 r_src;
1123
1124 #ifdef TARGET_SPARC64
1125 if (cc)
1126 r_src = cpu_xcc;
1127 else
1128 r_src = cpu_psr;
1129 #else
1130 r_src = cpu_psr;
1131 #endif
1132 switch (dc->cc_op) {
1133 case CC_OP_FLAGS:
1134 break;
1135 default:
1136 gen_helper_compute_psr();
1137 dc->cc_op = CC_OP_FLAGS;
1138 break;
1139 }
1140 switch (cond) {
1141 case 0x0:
1142 gen_op_eval_bn(r_dst);
1143 break;
1144 case 0x1:
1145 gen_op_eval_be(r_dst, r_src);
1146 break;
1147 case 0x2:
1148 gen_op_eval_ble(r_dst, r_src);
1149 break;
1150 case 0x3:
1151 gen_op_eval_bl(r_dst, r_src);
1152 break;
1153 case 0x4:
1154 gen_op_eval_bleu(r_dst, r_src);
1155 break;
1156 case 0x5:
1157 gen_op_eval_bcs(r_dst, r_src);
1158 break;
1159 case 0x6:
1160 gen_op_eval_bneg(r_dst, r_src);
1161 break;
1162 case 0x7:
1163 gen_op_eval_bvs(r_dst, r_src);
1164 break;
1165 case 0x8:
1166 gen_op_eval_ba(r_dst);
1167 break;
1168 case 0x9:
1169 gen_op_eval_bne(r_dst, r_src);
1170 break;
1171 case 0xa:
1172 gen_op_eval_bg(r_dst, r_src);
1173 break;
1174 case 0xb:
1175 gen_op_eval_bge(r_dst, r_src);
1176 break;
1177 case 0xc:
1178 gen_op_eval_bgu(r_dst, r_src);
1179 break;
1180 case 0xd:
1181 gen_op_eval_bcc(r_dst, r_src);
1182 break;
1183 case 0xe:
1184 gen_op_eval_bpos(r_dst, r_src);
1185 break;
1186 case 0xf:
1187 gen_op_eval_bvc(r_dst, r_src);
1188 break;
1189 }
1190 }
1191
1192 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1193 {
1194 unsigned int offset;
1195
1196 switch (cc) {
1197 default:
1198 case 0x0:
1199 offset = 0;
1200 break;
1201 case 0x1:
1202 offset = 32 - 10;
1203 break;
1204 case 0x2:
1205 offset = 34 - 10;
1206 break;
1207 case 0x3:
1208 offset = 36 - 10;
1209 break;
1210 }
1211
1212 switch (cond) {
1213 case 0x0:
1214 gen_op_eval_bn(r_dst);
1215 break;
1216 case 0x1:
1217 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1218 break;
1219 case 0x2:
1220 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1221 break;
1222 case 0x3:
1223 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1224 break;
1225 case 0x4:
1226 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1227 break;
1228 case 0x5:
1229 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1230 break;
1231 case 0x6:
1232 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1233 break;
1234 case 0x7:
1235 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1236 break;
1237 case 0x8:
1238 gen_op_eval_ba(r_dst);
1239 break;
1240 case 0x9:
1241 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1242 break;
1243 case 0xa:
1244 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1245 break;
1246 case 0xb:
1247 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1248 break;
1249 case 0xc:
1250 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1251 break;
1252 case 0xd:
1253 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1254 break;
1255 case 0xe:
1256 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1257 break;
1258 case 0xf:
1259 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1260 break;
1261 }
1262 }
1263
1264 #ifdef TARGET_SPARC64
1265 // Inverted logic
1266 static const int gen_tcg_cond_reg[8] = {
1267 -1,
1268 TCG_COND_NE,
1269 TCG_COND_GT,
1270 TCG_COND_GE,
1271 -1,
1272 TCG_COND_EQ,
1273 TCG_COND_LE,
1274 TCG_COND_LT,
1275 };
1276
1277 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1278 {
1279 int l1;
1280
1281 l1 = gen_new_label();
1282 tcg_gen_movi_tl(r_dst, 0);
1283 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1284 tcg_gen_movi_tl(r_dst, 1);
1285 gen_set_label(l1);
1286 }
1287 #endif
1288
1289 /* XXX: potentially incorrect if dynamic npc */
1290 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1291 TCGv r_cond)
1292 {
1293 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1294 target_ulong target = dc->pc + offset;
1295
1296 if (cond == 0x0) {
1297 /* unconditional not taken */
1298 if (a) {
1299 dc->pc = dc->npc + 4;
1300 dc->npc = dc->pc + 4;
1301 } else {
1302 dc->pc = dc->npc;
1303 dc->npc = dc->pc + 4;
1304 }
1305 } else if (cond == 0x8) {
1306 /* unconditional taken */
1307 if (a) {
1308 dc->pc = target;
1309 dc->npc = dc->pc + 4;
1310 } else {
1311 dc->pc = dc->npc;
1312 dc->npc = target;
1313 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1314 }
1315 } else {
1316 flush_cond(dc, r_cond);
1317 gen_cond(r_cond, cc, cond, dc);
1318 if (a) {
1319 gen_branch_a(dc, target, dc->npc, r_cond);
1320 dc->is_br = 1;
1321 } else {
1322 dc->pc = dc->npc;
1323 dc->jump_pc[0] = target;
1324 dc->jump_pc[1] = dc->npc + 4;
1325 dc->npc = JUMP_PC;
1326 }
1327 }
1328 }
1329
1330 /* XXX: potentially incorrect if dynamic npc */
1331 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1332 TCGv r_cond)
1333 {
1334 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1335 target_ulong target = dc->pc + offset;
1336
1337 if (cond == 0x0) {
1338 /* unconditional not taken */
1339 if (a) {
1340 dc->pc = dc->npc + 4;
1341 dc->npc = dc->pc + 4;
1342 } else {
1343 dc->pc = dc->npc;
1344 dc->npc = dc->pc + 4;
1345 }
1346 } else if (cond == 0x8) {
1347 /* unconditional taken */
1348 if (a) {
1349 dc->pc = target;
1350 dc->npc = dc->pc + 4;
1351 } else {
1352 dc->pc = dc->npc;
1353 dc->npc = target;
1354 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1355 }
1356 } else {
1357 flush_cond(dc, r_cond);
1358 gen_fcond(r_cond, cc, cond);
1359 if (a) {
1360 gen_branch_a(dc, target, dc->npc, r_cond);
1361 dc->is_br = 1;
1362 } else {
1363 dc->pc = dc->npc;
1364 dc->jump_pc[0] = target;
1365 dc->jump_pc[1] = dc->npc + 4;
1366 dc->npc = JUMP_PC;
1367 }
1368 }
1369 }
1370
1371 #ifdef TARGET_SPARC64
1372 /* XXX: potentially incorrect if dynamic npc */
1373 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1374 TCGv r_cond, TCGv r_reg)
1375 {
1376 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1377 target_ulong target = dc->pc + offset;
1378
1379 flush_cond(dc, r_cond);
1380 gen_cond_reg(r_cond, cond, r_reg);
1381 if (a) {
1382 gen_branch_a(dc, target, dc->npc, r_cond);
1383 dc->is_br = 1;
1384 } else {
1385 dc->pc = dc->npc;
1386 dc->jump_pc[0] = target;
1387 dc->jump_pc[1] = dc->npc + 4;
1388 dc->npc = JUMP_PC;
1389 }
1390 }
1391
1392 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1393 {
1394 switch (fccno) {
1395 case 0:
1396 gen_helper_fcmps(r_rs1, r_rs2);
1397 break;
1398 case 1:
1399 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1400 break;
1401 case 2:
1402 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1403 break;
1404 case 3:
1405 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1406 break;
1407 }
1408 }
1409
1410 static inline void gen_op_fcmpd(int fccno)
1411 {
1412 switch (fccno) {
1413 case 0:
1414 gen_helper_fcmpd();
1415 break;
1416 case 1:
1417 gen_helper_fcmpd_fcc1();
1418 break;
1419 case 2:
1420 gen_helper_fcmpd_fcc2();
1421 break;
1422 case 3:
1423 gen_helper_fcmpd_fcc3();
1424 break;
1425 }
1426 }
1427
1428 static inline void gen_op_fcmpq(int fccno)
1429 {
1430 switch (fccno) {
1431 case 0:
1432 gen_helper_fcmpq();
1433 break;
1434 case 1:
1435 gen_helper_fcmpq_fcc1();
1436 break;
1437 case 2:
1438 gen_helper_fcmpq_fcc2();
1439 break;
1440 case 3:
1441 gen_helper_fcmpq_fcc3();
1442 break;
1443 }
1444 }
1445
1446 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1447 {
1448 switch (fccno) {
1449 case 0:
1450 gen_helper_fcmpes(r_rs1, r_rs2);
1451 break;
1452 case 1:
1453 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1454 break;
1455 case 2:
1456 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1457 break;
1458 case 3:
1459 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1460 break;
1461 }
1462 }
1463
1464 static inline void gen_op_fcmped(int fccno)
1465 {
1466 switch (fccno) {
1467 case 0:
1468 gen_helper_fcmped();
1469 break;
1470 case 1:
1471 gen_helper_fcmped_fcc1();
1472 break;
1473 case 2:
1474 gen_helper_fcmped_fcc2();
1475 break;
1476 case 3:
1477 gen_helper_fcmped_fcc3();
1478 break;
1479 }
1480 }
1481
1482 static inline void gen_op_fcmpeq(int fccno)
1483 {
1484 switch (fccno) {
1485 case 0:
1486 gen_helper_fcmpeq();
1487 break;
1488 case 1:
1489 gen_helper_fcmpeq_fcc1();
1490 break;
1491 case 2:
1492 gen_helper_fcmpeq_fcc2();
1493 break;
1494 case 3:
1495 gen_helper_fcmpeq_fcc3();
1496 break;
1497 }
1498 }
1499
1500 #else
1501
1502 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1503 {
1504 gen_helper_fcmps(r_rs1, r_rs2);
1505 }
1506
1507 static inline void gen_op_fcmpd(int fccno)
1508 {
1509 gen_helper_fcmpd();
1510 }
1511
1512 static inline void gen_op_fcmpq(int fccno)
1513 {
1514 gen_helper_fcmpq();
1515 }
1516
1517 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1518 {
1519 gen_helper_fcmpes(r_rs1, r_rs2);
1520 }
1521
1522 static inline void gen_op_fcmped(int fccno)
1523 {
1524 gen_helper_fcmped();
1525 }
1526
1527 static inline void gen_op_fcmpeq(int fccno)
1528 {
1529 gen_helper_fcmpeq();
1530 }
1531 #endif
1532
1533 static inline void gen_op_fpexception_im(int fsr_flags)
1534 {
1535 TCGv_i32 r_const;
1536
1537 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1538 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1539 r_const = tcg_const_i32(TT_FP_EXCP);
1540 gen_helper_raise_exception(r_const);
1541 tcg_temp_free_i32(r_const);
1542 }
1543
1544 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1545 {
1546 #if !defined(CONFIG_USER_ONLY)
1547 if (!dc->fpu_enabled) {
1548 TCGv_i32 r_const;
1549
1550 save_state(dc, r_cond);
1551 r_const = tcg_const_i32(TT_NFPU_INSN);
1552 gen_helper_raise_exception(r_const);
1553 tcg_temp_free_i32(r_const);
1554 dc->is_br = 1;
1555 return 1;
1556 }
1557 #endif
1558 return 0;
1559 }
1560
1561 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1562 {
1563 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1564 }
1565
1566 static inline void gen_clear_float_exceptions(void)
1567 {
1568 gen_helper_clear_float_exceptions();
1569 }
1570
1571 /* asi moves */
1572 #ifdef TARGET_SPARC64
1573 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1574 {
1575 int asi;
1576 TCGv_i32 r_asi;
1577
1578 if (IS_IMM) {
1579 r_asi = tcg_temp_new_i32();
1580 tcg_gen_mov_i32(r_asi, cpu_asi);
1581 } else {
1582 asi = GET_FIELD(insn, 19, 26);
1583 r_asi = tcg_const_i32(asi);
1584 }
1585 return r_asi;
1586 }
1587
1588 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1589 int sign)
1590 {
1591 TCGv_i32 r_asi, r_size, r_sign;
1592
1593 r_asi = gen_get_asi(insn, addr);
1594 r_size = tcg_const_i32(size);
1595 r_sign = tcg_const_i32(sign);
1596 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1597 tcg_temp_free_i32(r_sign);
1598 tcg_temp_free_i32(r_size);
1599 tcg_temp_free_i32(r_asi);
1600 }
1601
1602 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1603 {
1604 TCGv_i32 r_asi, r_size;
1605
1606 r_asi = gen_get_asi(insn, addr);
1607 r_size = tcg_const_i32(size);
1608 gen_helper_st_asi(addr, src, r_asi, r_size);
1609 tcg_temp_free_i32(r_size);
1610 tcg_temp_free_i32(r_asi);
1611 }
1612
1613 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1614 {
1615 TCGv_i32 r_asi, r_size, r_rd;
1616
1617 r_asi = gen_get_asi(insn, addr);
1618 r_size = tcg_const_i32(size);
1619 r_rd = tcg_const_i32(rd);
1620 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1621 tcg_temp_free_i32(r_rd);
1622 tcg_temp_free_i32(r_size);
1623 tcg_temp_free_i32(r_asi);
1624 }
1625
1626 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1627 {
1628 TCGv_i32 r_asi, r_size, r_rd;
1629
1630 r_asi = gen_get_asi(insn, addr);
1631 r_size = tcg_const_i32(size);
1632 r_rd = tcg_const_i32(rd);
1633 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1634 tcg_temp_free_i32(r_rd);
1635 tcg_temp_free_i32(r_size);
1636 tcg_temp_free_i32(r_asi);
1637 }
1638
1639 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1640 {
1641 TCGv_i32 r_asi, r_size, r_sign;
1642
1643 r_asi = gen_get_asi(insn, addr);
1644 r_size = tcg_const_i32(4);
1645 r_sign = tcg_const_i32(0);
1646 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1647 tcg_temp_free_i32(r_sign);
1648 gen_helper_st_asi(addr, dst, r_asi, r_size);
1649 tcg_temp_free_i32(r_size);
1650 tcg_temp_free_i32(r_asi);
1651 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1652 }
1653
1654 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1655 {
1656 TCGv_i32 r_asi, r_rd;
1657
1658 r_asi = gen_get_asi(insn, addr);
1659 r_rd = tcg_const_i32(rd);
1660 gen_helper_ldda_asi(addr, r_asi, r_rd);
1661 tcg_temp_free_i32(r_rd);
1662 tcg_temp_free_i32(r_asi);
1663 }
1664
1665 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1666 {
1667 TCGv_i32 r_asi, r_size;
1668
1669 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1670 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1671 r_asi = gen_get_asi(insn, addr);
1672 r_size = tcg_const_i32(8);
1673 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1674 tcg_temp_free_i32(r_size);
1675 tcg_temp_free_i32(r_asi);
1676 }
1677
1678 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1679 int rd)
1680 {
1681 TCGv r_val1;
1682 TCGv_i32 r_asi;
1683
1684 r_val1 = tcg_temp_new();
1685 gen_movl_reg_TN(rd, r_val1);
1686 r_asi = gen_get_asi(insn, addr);
1687 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1688 tcg_temp_free_i32(r_asi);
1689 tcg_temp_free(r_val1);
1690 }
1691
1692 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1693 int rd)
1694 {
1695 TCGv_i32 r_asi;
1696
1697 gen_movl_reg_TN(rd, cpu_tmp64);
1698 r_asi = gen_get_asi(insn, addr);
1699 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1700 tcg_temp_free_i32(r_asi);
1701 }
1702
1703 #elif !defined(CONFIG_USER_ONLY)
1704
1705 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1706 int sign)
1707 {
1708 TCGv_i32 r_asi, r_size, r_sign;
1709
1710 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1711 r_size = tcg_const_i32(size);
1712 r_sign = tcg_const_i32(sign);
1713 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1714 tcg_temp_free(r_sign);
1715 tcg_temp_free(r_size);
1716 tcg_temp_free(r_asi);
1717 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1718 }
1719
1720 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1721 {
1722 TCGv_i32 r_asi, r_size;
1723
1724 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1725 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1726 r_size = tcg_const_i32(size);
1727 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1728 tcg_temp_free(r_size);
1729 tcg_temp_free(r_asi);
1730 }
1731
1732 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1733 {
1734 TCGv_i32 r_asi, r_size, r_sign;
1735 TCGv_i64 r_val;
1736
1737 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1738 r_size = tcg_const_i32(4);
1739 r_sign = tcg_const_i32(0);
1740 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1741 tcg_temp_free(r_sign);
1742 r_val = tcg_temp_new_i64();
1743 tcg_gen_extu_tl_i64(r_val, dst);
1744 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1745 tcg_temp_free_i64(r_val);
1746 tcg_temp_free(r_size);
1747 tcg_temp_free(r_asi);
1748 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1749 }
1750
1751 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1752 {
1753 TCGv_i32 r_asi, r_size, r_sign;
1754
1755 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1756 r_size = tcg_const_i32(8);
1757 r_sign = tcg_const_i32(0);
1758 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1759 tcg_temp_free(r_sign);
1760 tcg_temp_free(r_size);
1761 tcg_temp_free(r_asi);
1762 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1763 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1764 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1765 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1766 gen_movl_TN_reg(rd, hi);
1767 }
1768
1769 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1770 {
1771 TCGv_i32 r_asi, r_size;
1772
1773 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1774 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1775 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1776 r_size = tcg_const_i32(8);
1777 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1778 tcg_temp_free(r_size);
1779 tcg_temp_free(r_asi);
1780 }
1781 #endif
1782
1783 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1784 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1785 {
1786 TCGv_i64 r_val;
1787 TCGv_i32 r_asi, r_size;
1788
1789 gen_ld_asi(dst, addr, insn, 1, 0);
1790
1791 r_val = tcg_const_i64(0xffULL);
1792 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1793 r_size = tcg_const_i32(1);
1794 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1795 tcg_temp_free_i32(r_size);
1796 tcg_temp_free_i32(r_asi);
1797 tcg_temp_free_i64(r_val);
1798 }
1799 #endif
1800
1801 static inline TCGv get_src1(unsigned int insn, TCGv def)
1802 {
1803 TCGv r_rs1 = def;
1804 unsigned int rs1;
1805
1806 rs1 = GET_FIELD(insn, 13, 17);
1807 if (rs1 == 0) {
1808 tcg_gen_movi_tl(def, 0);
1809 } else if (rs1 < 8) {
1810 r_rs1 = cpu_gregs[rs1];
1811 } else {
1812 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1813 }
1814 return r_rs1;
1815 }
1816
1817 static inline TCGv get_src2(unsigned int insn, TCGv def)
1818 {
1819 TCGv r_rs2 = def;
1820
1821 if (IS_IMM) { /* immediate */
1822 target_long simm = GET_FIELDs(insn, 19, 31);
1823 tcg_gen_movi_tl(def, simm);
1824 } else { /* register */
1825 unsigned int rs2 = GET_FIELD(insn, 27, 31);
1826 if (rs2 == 0) {
1827 tcg_gen_movi_tl(def, 0);
1828 } else if (rs2 < 8) {
1829 r_rs2 = cpu_gregs[rs2];
1830 } else {
1831 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1832 }
1833 }
1834 return r_rs2;
1835 }
1836
1837 #ifdef TARGET_SPARC64
1838 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_ptr cpu_env)
1839 {
1840 TCGv_i32 r_tl = tcg_temp_new_i32();
1841
1842 /* load env->tl into r_tl */
1843 tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
1844
1845 /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
1846 tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
1847
1848 /* calculate offset to current trap state from env->ts, reuse r_tl */
1849 tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
1850 tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUState, ts));
1851
1852 /* tsptr = env->ts[env->tl & MAXTL_MASK] */
1853 {
1854 TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
1855 tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
1856 tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
1857 tcg_temp_free_ptr(r_tl_tmp);
1858 }
1859
1860 tcg_temp_free_i32(r_tl);
1861 }
1862 #endif
1863
1864 #define CHECK_IU_FEATURE(dc, FEATURE) \
1865 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1866 goto illegal_insn;
1867 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1868 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1869 goto nfpu_insn;
1870
1871 /* before an instruction, dc->pc must be static */
1872 static void disas_sparc_insn(DisasContext * dc)
1873 {
1874 unsigned int insn, opc, rs1, rs2, rd;
1875 TCGv cpu_src1, cpu_src2, cpu_tmp1, cpu_tmp2;
1876 target_long simm;
1877
1878 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
1879 tcg_gen_debug_insn_start(dc->pc);
1880 insn = ldl_code(dc->pc);
1881 opc = GET_FIELD(insn, 0, 1);
1882
1883 rd = GET_FIELD(insn, 2, 6);
1884
1885 cpu_tmp1 = cpu_src1 = tcg_temp_new();
1886 cpu_tmp2 = cpu_src2 = tcg_temp_new();
1887
1888 switch (opc) {
1889 case 0: /* branches/sethi */
1890 {
1891 unsigned int xop = GET_FIELD(insn, 7, 9);
1892 int32_t target;
1893 switch (xop) {
1894 #ifdef TARGET_SPARC64
1895 case 0x1: /* V9 BPcc */
1896 {
1897 int cc;
1898
1899 target = GET_FIELD_SP(insn, 0, 18);
1900 target = sign_extend(target, 19);
1901 target <<= 2;
1902 cc = GET_FIELD_SP(insn, 20, 21);
1903 if (cc == 0)
1904 do_branch(dc, target, insn, 0, cpu_cond);
1905 else if (cc == 2)
1906 do_branch(dc, target, insn, 1, cpu_cond);
1907 else
1908 goto illegal_insn;
1909 goto jmp_insn;
1910 }
1911 case 0x3: /* V9 BPr */
1912 {
1913 target = GET_FIELD_SP(insn, 0, 13) |
1914 (GET_FIELD_SP(insn, 20, 21) << 14);
1915 target = sign_extend(target, 16);
1916 target <<= 2;
1917 cpu_src1 = get_src1(insn, cpu_src1);
1918 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1919 goto jmp_insn;
1920 }
1921 case 0x5: /* V9 FBPcc */
1922 {
1923 int cc = GET_FIELD_SP(insn, 20, 21);
1924 if (gen_trap_ifnofpu(dc, cpu_cond))
1925 goto jmp_insn;
1926 target = GET_FIELD_SP(insn, 0, 18);
1927 target = sign_extend(target, 19);
1928 target <<= 2;
1929 do_fbranch(dc, target, insn, cc, cpu_cond);
1930 goto jmp_insn;
1931 }
1932 #else
1933 case 0x7: /* CBN+x */
1934 {
1935 goto ncp_insn;
1936 }
1937 #endif
1938 case 0x2: /* BN+x */
1939 {
1940 target = GET_FIELD(insn, 10, 31);
1941 target = sign_extend(target, 22);
1942 target <<= 2;
1943 do_branch(dc, target, insn, 0, cpu_cond);
1944 goto jmp_insn;
1945 }
1946 case 0x6: /* FBN+x */
1947 {
1948 if (gen_trap_ifnofpu(dc, cpu_cond))
1949 goto jmp_insn;
1950 target = GET_FIELD(insn, 10, 31);
1951 target = sign_extend(target, 22);
1952 target <<= 2;
1953 do_fbranch(dc, target, insn, 0, cpu_cond);
1954 goto jmp_insn;
1955 }
1956 case 0x4: /* SETHI */
1957 if (rd) { // nop
1958 uint32_t value = GET_FIELD(insn, 10, 31);
1959 TCGv r_const;
1960
1961 r_const = tcg_const_tl(value << 10);
1962 gen_movl_TN_reg(rd, r_const);
1963 tcg_temp_free(r_const);
1964 }
1965 break;
1966 case 0x0: /* UNIMPL */
1967 default:
1968 goto illegal_insn;
1969 }
1970 break;
1971 }
1972 break;
1973 case 1: /*CALL*/
1974 {
1975 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1976 TCGv r_const;
1977
1978 r_const = tcg_const_tl(dc->pc);
1979 gen_movl_TN_reg(15, r_const);
1980 tcg_temp_free(r_const);
1981 target += dc->pc;
1982 gen_mov_pc_npc(dc, cpu_cond);
1983 dc->npc = target;
1984 }
1985 goto jmp_insn;
1986 case 2: /* FPU & Logical Operations */
1987 {
1988 unsigned int xop = GET_FIELD(insn, 7, 12);
1989 if (xop == 0x3a) { /* generate trap */
1990 int cond;
1991
1992 cpu_src1 = get_src1(insn, cpu_src1);
1993 if (IS_IMM) {
1994 rs2 = GET_FIELD(insn, 25, 31);
1995 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1996 } else {
1997 rs2 = GET_FIELD(insn, 27, 31);
1998 if (rs2 != 0) {
1999 gen_movl_reg_TN(rs2, cpu_src2);
2000 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2001 } else
2002 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2003 }
2004
2005 cond = GET_FIELD(insn, 3, 6);
2006 if (cond == 0x8) { /* Trap Always */
2007 save_state(dc, cpu_cond);
2008 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2009 supervisor(dc))
2010 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2011 else
2012 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2013 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2014 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2015
2016 if (rs2 == 0 &&
2017 dc->def->features & CPU_FEATURE_TA0_SHUTDOWN) {
2018
2019 gen_helper_shutdown();
2020
2021 } else {
2022 gen_helper_raise_exception(cpu_tmp32);
2023 }
2024 } else if (cond != 0) {
2025 TCGv r_cond = tcg_temp_new();
2026 int l1;
2027 #ifdef TARGET_SPARC64
2028 /* V9 icc/xcc */
2029 int cc = GET_FIELD_SP(insn, 11, 12);
2030
2031 save_state(dc, cpu_cond);
2032 if (cc == 0)
2033 gen_cond(r_cond, 0, cond, dc);
2034 else if (cc == 2)
2035 gen_cond(r_cond, 1, cond, dc);
2036 else
2037 goto illegal_insn;
2038 #else
2039 save_state(dc, cpu_cond);
2040 gen_cond(r_cond, 0, cond, dc);
2041 #endif
2042 l1 = gen_new_label();
2043 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2044
2045 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2046 supervisor(dc))
2047 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2048 else
2049 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2050 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2051 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2052 gen_helper_raise_exception(cpu_tmp32);
2053
2054 gen_set_label(l1);
2055 tcg_temp_free(r_cond);
2056 }
2057 gen_op_next_insn();
2058 tcg_gen_exit_tb(0);
2059 dc->is_br = 1;
2060 goto jmp_insn;
2061 } else if (xop == 0x28) {
2062 rs1 = GET_FIELD(insn, 13, 17);
2063 switch(rs1) {
2064 case 0: /* rdy */
2065 #ifndef TARGET_SPARC64
2066 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2067 manual, rdy on the microSPARC
2068 II */
2069 case 0x0f: /* stbar in the SPARCv8 manual,
2070 rdy on the microSPARC II */
2071 case 0x10 ... 0x1f: /* implementation-dependent in the
2072 SPARCv8 manual, rdy on the
2073 microSPARC II */
2074 /* Read Asr17 */
2075 if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
2076 TCGv r_const;
2077
2078 /* Read Asr17 for a Leon3 monoprocessor */
2079 r_const = tcg_const_tl((1 << 8)
2080 | (dc->def->nwindows - 1));
2081 gen_movl_TN_reg(rd, r_const);
2082 tcg_temp_free(r_const);
2083 break;
2084 }
2085 #endif
2086 gen_movl_TN_reg(rd, cpu_y);
2087 break;
2088 #ifdef TARGET_SPARC64
2089 case 0x2: /* V9 rdccr */
2090 gen_helper_compute_psr();
2091 gen_helper_rdccr(cpu_dst);
2092 gen_movl_TN_reg(rd, cpu_dst);
2093 break;
2094 case 0x3: /* V9 rdasi */
2095 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2096 gen_movl_TN_reg(rd, cpu_dst);
2097 break;
2098 case 0x4: /* V9 rdtick */
2099 {
2100 TCGv_ptr r_tickptr;
2101
2102 r_tickptr = tcg_temp_new_ptr();
2103 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2104 offsetof(CPUState, tick));
2105 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2106 tcg_temp_free_ptr(r_tickptr);
2107 gen_movl_TN_reg(rd, cpu_dst);
2108 }
2109 break;
2110 case 0x5: /* V9 rdpc */
2111 {
2112 TCGv r_const;
2113
2114 r_const = tcg_const_tl(dc->pc);
2115 gen_movl_TN_reg(rd, r_const);
2116 tcg_temp_free(r_const);
2117 }
2118 break;
2119 case 0x6: /* V9 rdfprs */
2120 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2121 gen_movl_TN_reg(rd, cpu_dst);
2122 break;
2123 case 0xf: /* V9 membar */
2124 break; /* no effect */
2125 case 0x13: /* Graphics Status */
2126 if (gen_trap_ifnofpu(dc, cpu_cond))
2127 goto jmp_insn;
2128 gen_movl_TN_reg(rd, cpu_gsr);
2129 break;
2130 case 0x16: /* Softint */
2131 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2132 gen_movl_TN_reg(rd, cpu_dst);
2133 break;
2134 case 0x17: /* Tick compare */
2135 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2136 break;
2137 case 0x18: /* System tick */
2138 {
2139 TCGv_ptr r_tickptr;
2140
2141 r_tickptr = tcg_temp_new_ptr();
2142 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2143 offsetof(CPUState, stick));
2144 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2145 tcg_temp_free_ptr(r_tickptr);
2146 gen_movl_TN_reg(rd, cpu_dst);
2147 }
2148 break;
2149 case 0x19: /* System tick compare */
2150 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2151 break;
2152 case 0x10: /* Performance Control */
2153 case 0x11: /* Performance Instrumentation Counter */
2154 case 0x12: /* Dispatch Control */
2155 case 0x14: /* Softint set, WO */
2156 case 0x15: /* Softint clear, WO */
2157 #endif
2158 default:
2159 goto illegal_insn;
2160 }
2161 #if !defined(CONFIG_USER_ONLY)
2162 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2163 #ifndef TARGET_SPARC64
2164 if (!supervisor(dc))
2165 goto priv_insn;
2166 gen_helper_compute_psr();
2167 dc->cc_op = CC_OP_FLAGS;
2168 gen_helper_rdpsr(cpu_dst);
2169 #else
2170 CHECK_IU_FEATURE(dc, HYPV);
2171 if (!hypervisor(dc))
2172 goto priv_insn;
2173 rs1 = GET_FIELD(insn, 13, 17);
2174 switch (rs1) {
2175 case 0: // hpstate
2176 // gen_op_rdhpstate();
2177 break;
2178 case 1: // htstate
2179 // gen_op_rdhtstate();
2180 break;
2181 case 3: // hintp
2182 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2183 break;
2184 case 5: // htba
2185 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2186 break;
2187 case 6: // hver
2188 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2189 break;
2190 case 31: // hstick_cmpr
2191 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2192 break;
2193 default:
2194 goto illegal_insn;
2195 }
2196 #endif
2197 gen_movl_TN_reg(rd, cpu_dst);
2198 break;
2199 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2200 if (!supervisor(dc))
2201 goto priv_insn;
2202 #ifdef TARGET_SPARC64
2203 rs1 = GET_FIELD(insn, 13, 17);
2204 switch (rs1) {
2205 case 0: // tpc
2206 {
2207 TCGv_ptr r_tsptr;
2208
2209 r_tsptr = tcg_temp_new_ptr();
2210 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2211 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2212 offsetof(trap_state, tpc));
2213 tcg_temp_free_ptr(r_tsptr);
2214 }
2215 break;
2216 case 1: // tnpc
2217 {
2218 TCGv_ptr r_tsptr;
2219
2220 r_tsptr = tcg_temp_new_ptr();
2221 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2222 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2223 offsetof(trap_state, tnpc));
2224 tcg_temp_free_ptr(r_tsptr);
2225 }
2226 break;
2227 case 2: // tstate
2228 {
2229 TCGv_ptr r_tsptr;
2230
2231 r_tsptr = tcg_temp_new_ptr();
2232 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2233 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2234 offsetof(trap_state, tstate));
2235 tcg_temp_free_ptr(r_tsptr);
2236 }
2237 break;
2238 case 3: // tt
2239 {
2240 TCGv_ptr r_tsptr;
2241
2242 r_tsptr = tcg_temp_new_ptr();
2243 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
2244 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2245 offsetof(trap_state, tt));
2246 tcg_temp_free_ptr(r_tsptr);
2247 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2248 }
2249 break;
2250 case 4: // tick
2251 {
2252 TCGv_ptr r_tickptr;
2253
2254 r_tickptr = tcg_temp_new_ptr();
2255 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2256 offsetof(CPUState, tick));
2257 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2258 gen_movl_TN_reg(rd, cpu_tmp0);
2259 tcg_temp_free_ptr(r_tickptr);
2260 }
2261 break;
2262 case 5: // tba
2263 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2264 break;
2265 case 6: // pstate
2266 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2267 offsetof(CPUSPARCState, pstate));
2268 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2269 break;
2270 case 7: // tl
2271 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2272 offsetof(CPUSPARCState, tl));
2273 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2274 break;
2275 case 8: // pil
2276 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2277 offsetof(CPUSPARCState, psrpil));
2278 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2279 break;
2280 case 9: // cwp
2281 gen_helper_rdcwp(cpu_tmp0);
2282 break;
2283 case 10: // cansave
2284 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2285 offsetof(CPUSPARCState, cansave));
2286 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2287 break;
2288 case 11: // canrestore
2289 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2290 offsetof(CPUSPARCState, canrestore));
2291 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2292 break;
2293 case 12: // cleanwin
2294 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2295 offsetof(CPUSPARCState, cleanwin));
2296 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2297 break;
2298 case 13: // otherwin
2299 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2300 offsetof(CPUSPARCState, otherwin));
2301 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2302 break;
2303 case 14: // wstate
2304 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2305 offsetof(CPUSPARCState, wstate));
2306 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2307 break;
2308 case 16: // UA2005 gl
2309 CHECK_IU_FEATURE(dc, GL);
2310 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2311 offsetof(CPUSPARCState, gl));
2312 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2313 break;
2314 case 26: // UA2005 strand status
2315 CHECK_IU_FEATURE(dc, HYPV);
2316 if (!hypervisor(dc))
2317 goto priv_insn;
2318 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2319 break;
2320 case 31: // ver
2321 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2322 break;
2323 case 15: // fq
2324 default:
2325 goto illegal_insn;
2326 }
2327 #else
2328 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2329 #endif
2330 gen_movl_TN_reg(rd, cpu_tmp0);
2331 break;
2332 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2333 #ifdef TARGET_SPARC64
2334 save_state(dc, cpu_cond);
2335 gen_helper_flushw();
2336 #else
2337 if (!supervisor(dc))
2338 goto priv_insn;
2339 gen_movl_TN_reg(rd, cpu_tbr);
2340 #endif
2341 break;
2342 #endif
2343 } else if (xop == 0x34) { /* FPU Operations */
2344 if (gen_trap_ifnofpu(dc, cpu_cond))
2345 goto jmp_insn;
2346 gen_op_clear_ieee_excp_and_FTT();
2347 rs1 = GET_FIELD(insn, 13, 17);
2348 rs2 = GET_FIELD(insn, 27, 31);
2349 xop = GET_FIELD(insn, 18, 26);
2350 save_state(dc, cpu_cond);
2351 switch (xop) {
2352 case 0x1: /* fmovs */
2353 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2354 break;
2355 case 0x5: /* fnegs */
2356 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2357 break;
2358 case 0x9: /* fabss */
2359 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2360 break;
2361 case 0x29: /* fsqrts */
2362 CHECK_FPU_FEATURE(dc, FSQRT);
2363 gen_clear_float_exceptions();
2364 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2365 gen_helper_check_ieee_exceptions();
2366 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2367 break;
2368 case 0x2a: /* fsqrtd */
2369 CHECK_FPU_FEATURE(dc, FSQRT);
2370 gen_op_load_fpr_DT1(DFPREG(rs2));
2371 gen_clear_float_exceptions();
2372 gen_helper_fsqrtd();
2373 gen_helper_check_ieee_exceptions();
2374 gen_op_store_DT0_fpr(DFPREG(rd));
2375 break;
2376 case 0x2b: /* fsqrtq */
2377 CHECK_FPU_FEATURE(dc, FLOAT128);
2378 gen_op_load_fpr_QT1(QFPREG(rs2));
2379 gen_clear_float_exceptions();
2380 gen_helper_fsqrtq();
2381 gen_helper_check_ieee_exceptions();
2382 gen_op_store_QT0_fpr(QFPREG(rd));
2383 break;
2384 case 0x41: /* fadds */
2385 gen_clear_float_exceptions();
2386 gen_helper_fadds(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2387 gen_helper_check_ieee_exceptions();
2388 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2389 break;
2390 case 0x42: /* faddd */
2391 gen_op_load_fpr_DT0(DFPREG(rs1));
2392 gen_op_load_fpr_DT1(DFPREG(rs2));
2393 gen_clear_float_exceptions();
2394 gen_helper_faddd();
2395 gen_helper_check_ieee_exceptions();
2396 gen_op_store_DT0_fpr(DFPREG(rd));
2397 break;
2398 case 0x43: /* faddq */
2399 CHECK_FPU_FEATURE(dc, FLOAT128);
2400 gen_op_load_fpr_QT0(QFPREG(rs1));
2401 gen_op_load_fpr_QT1(QFPREG(rs2));
2402 gen_clear_float_exceptions();
2403 gen_helper_faddq();
2404 gen_helper_check_ieee_exceptions();
2405 gen_op_store_QT0_fpr(QFPREG(rd));
2406 break;
2407 case 0x45: /* fsubs */
2408 gen_clear_float_exceptions();
2409 gen_helper_fsubs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2410 gen_helper_check_ieee_exceptions();
2411 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2412 break;
2413 case 0x46: /* fsubd */
2414 gen_op_load_fpr_DT0(DFPREG(rs1));
2415 gen_op_load_fpr_DT1(DFPREG(rs2));
2416 gen_clear_float_exceptions();
2417 gen_helper_fsubd();
2418 gen_helper_check_ieee_exceptions();
2419 gen_op_store_DT0_fpr(DFPREG(rd));
2420 break;
2421 case 0x47: /* fsubq */
2422 CHECK_FPU_FEATURE(dc, FLOAT128);
2423 gen_op_load_fpr_QT0(QFPREG(rs1));
2424 gen_op_load_fpr_QT1(QFPREG(rs2));
2425 gen_clear_float_exceptions();
2426 gen_helper_fsubq();
2427 gen_helper_check_ieee_exceptions();
2428 gen_op_store_QT0_fpr(QFPREG(rd));
2429 break;
2430 case 0x49: /* fmuls */
2431 CHECK_FPU_FEATURE(dc, FMUL);
2432 gen_clear_float_exceptions();
2433 gen_helper_fmuls(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2434 gen_helper_check_ieee_exceptions();
2435 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2436 break;
2437 case 0x4a: /* fmuld */
2438 CHECK_FPU_FEATURE(dc, FMUL);
2439 gen_op_load_fpr_DT0(DFPREG(rs1));
2440 gen_op_load_fpr_DT1(DFPREG(rs2));
2441 gen_clear_float_exceptions();
2442 gen_helper_fmuld();
2443 gen_helper_check_ieee_exceptions();
2444 gen_op_store_DT0_fpr(DFPREG(rd));
2445 break;
2446 case 0x4b: /* fmulq */
2447 CHECK_FPU_FEATURE(dc, FLOAT128);
2448 CHECK_FPU_FEATURE(dc, FMUL);
2449 gen_op_load_fpr_QT0(QFPREG(rs1));
2450 gen_op_load_fpr_QT1(QFPREG(rs2));
2451 gen_clear_float_exceptions();
2452 gen_helper_fmulq();
2453 gen_helper_check_ieee_exceptions();
2454 gen_op_store_QT0_fpr(QFPREG(rd));
2455 break;
2456 case 0x4d: /* fdivs */
2457 gen_clear_float_exceptions();
2458 gen_helper_fdivs(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
2459 gen_helper_check_ieee_exceptions();
2460 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2461 break;
2462 case 0x4e: /* fdivd */
2463 gen_op_load_fpr_DT0(DFPREG(rs1));
2464 gen_op_load_fpr_DT1(DFPREG(rs2));
2465 gen_clear_float_exceptions();
2466 gen_helper_fdivd();
2467 gen_helper_check_ieee_exceptions();
2468 gen_op_store_DT0_fpr(DFPREG(rd));
2469 break;
2470 case 0x4f: /* fdivq */
2471 CHECK_FPU_FEATURE(dc, FLOAT128);
2472 gen_op_load_fpr_QT0(QFPREG(rs1));
2473 gen_op_load_fpr_QT1(QFPREG(rs2));
2474 gen_clear_float_exceptions();
2475 gen_helper_fdivq();
2476 gen_helper_check_ieee_exceptions();
2477 gen_op_store_QT0_fpr(QFPREG(rd));
2478 break;
2479 case 0x69: /* fsmuld */
2480 CHECK_FPU_FEATURE(dc, FSMULD);
2481 gen_clear_float_exceptions();
2482 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2483 gen_helper_check_ieee_exceptions();
2484 gen_op_store_DT0_fpr(DFPREG(rd));
2485 break;
2486 case 0x6e: /* fdmulq */
2487 CHECK_FPU_FEATURE(dc, FLOAT128);
2488 gen_op_load_fpr_DT0(DFPREG(rs1));
2489 gen_op_load_fpr_DT1(DFPREG(rs2));
2490 gen_clear_float_exceptions();
2491 gen_helper_fdmulq();
2492 gen_helper_check_ieee_exceptions();
2493 gen_op_store_QT0_fpr(QFPREG(rd));
2494 break;
2495 case 0xc4: /* fitos */
2496 gen_clear_float_exceptions();
2497 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2498 gen_helper_check_ieee_exceptions();
2499 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2500 break;
2501 case 0xc6: /* fdtos */
2502 gen_op_load_fpr_DT1(DFPREG(rs2));
2503 gen_clear_float_exceptions();
2504 gen_helper_fdtos(cpu_tmp32);
2505 gen_helper_check_ieee_exceptions();
2506 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2507 break;
2508 case 0xc7: /* fqtos */
2509 CHECK_FPU_FEATURE(dc, FLOAT128);
2510 gen_op_load_fpr_QT1(QFPREG(rs2));
2511 gen_clear_float_exceptions();
2512 gen_helper_fqtos(cpu_tmp32);
2513 gen_helper_check_ieee_exceptions();
2514 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2515 break;
2516 case 0xc8: /* fitod */
2517 gen_helper_fitod(cpu_fpr[rs2]);
2518 gen_op_store_DT0_fpr(DFPREG(rd));
2519 break;
2520 case 0xc9: /* fstod */
2521 gen_helper_fstod(cpu_fpr[rs2]);
2522 gen_op_store_DT0_fpr(DFPREG(rd));
2523 break;
2524 case 0xcb: /* fqtod */
2525 CHECK_FPU_FEATURE(dc, FLOAT128);
2526 gen_op_load_fpr_QT1(QFPREG(rs2));
2527 gen_clear_float_exceptions();
2528 gen_helper_fqtod();
2529 gen_helper_check_ieee_exceptions();
2530 gen_op_store_DT0_fpr(DFPREG(rd));
2531 break;
2532 case 0xcc: /* fitoq */
2533 CHECK_FPU_FEATURE(dc, FLOAT128);
2534 gen_helper_fitoq(cpu_fpr[rs2]);
2535 gen_op_store_QT0_fpr(QFPREG(rd));
2536 break;
2537 case 0xcd: /* fstoq */
2538 CHECK_FPU_FEATURE(dc, FLOAT128);
2539 gen_helper_fstoq(cpu_fpr[rs2]);
2540 gen_op_store_QT0_fpr(QFPREG(rd));
2541 break;
2542 case 0xce: /* fdtoq */
2543 CHECK_FPU_FEATURE(dc, FLOAT128);
2544 gen_op_load_fpr_DT1(DFPREG(rs2));
2545 gen_helper_fdtoq();
2546 gen_op_store_QT0_fpr(QFPREG(rd));
2547 break;
2548 case 0xd1: /* fstoi */
2549 gen_clear_float_exceptions();
2550 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2551 gen_helper_check_ieee_exceptions();
2552 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2553 break;
2554 case 0xd2: /* fdtoi */
2555 gen_op_load_fpr_DT1(DFPREG(rs2));
2556 gen_clear_float_exceptions();
2557 gen_helper_fdtoi(cpu_tmp32);
2558 gen_helper_check_ieee_exceptions();
2559 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2560 break;
2561 case 0xd3: /* fqtoi */
2562 CHECK_FPU_FEATURE(dc, FLOAT128);
2563 gen_op_load_fpr_QT1(QFPREG(rs2));
2564 gen_clear_float_exceptions();
2565 gen_helper_fqtoi(cpu_tmp32);
2566 gen_helper_check_ieee_exceptions();
2567 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2568 break;
2569 #ifdef TARGET_SPARC64
2570 case 0x2: /* V9 fmovd */
2571 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2572 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2573 cpu_fpr[DFPREG(rs2) + 1]);
2574 break;
2575 case 0x3: /* V9 fmovq */
2576 CHECK_FPU_FEATURE(dc, FLOAT128);
2577 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2578 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2579 cpu_fpr[QFPREG(rs2) + 1]);
2580 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2581 cpu_fpr[QFPREG(rs2) + 2]);
2582 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2583 cpu_fpr[QFPREG(rs2) + 3]);
2584 break;
2585 case 0x6: /* V9 fnegd */
2586 gen_op_load_fpr_DT1(DFPREG(rs2));
2587 gen_helper_fnegd();
2588 gen_op_store_DT0_fpr(DFPREG(rd));
2589 break;
2590 case 0x7: /* V9 fnegq */
2591 CHECK_FPU_FEATURE(dc, FLOAT128);
2592 gen_op_load_fpr_QT1(QFPREG(rs2));
2593 gen_helper_fnegq();
2594 gen_op_store_QT0_fpr(QFPREG(rd));
2595 break;
2596 case 0xa: /* V9 fabsd */
2597 gen_op_load_fpr_DT1(DFPREG(rs2));
2598 gen_helper_fabsd();
2599 gen_op_store_DT0_fpr(DFPREG(rd));
2600 break;
2601 case 0xb: /* V9 fabsq */
2602 CHECK_FPU_FEATURE(dc, FLOAT128);
2603 gen_op_load_fpr_QT1(QFPREG(rs2));
2604 gen_helper_fabsq();
2605 gen_op_store_QT0_fpr(QFPREG(rd));
2606 break;
2607 case 0x81: /* V9 fstox */
2608 gen_clear_float_exceptions();
2609 gen_helper_fstox(cpu_fpr[rs2]);
2610 gen_helper_check_ieee_exceptions();
2611 gen_op_store_DT0_fpr(DFPREG(rd));
2612 break;
2613 case 0x82: /* V9 fdtox */
2614 gen_op_load_fpr_DT1(DFPREG(rs2));
2615 gen_clear_float_exceptions();
2616 gen_helper_fdtox();
2617 gen_helper_check_ieee_exceptions();
2618 gen_op_store_DT0_fpr(DFPREG(rd));
2619 break;
2620 case 0x83: /* V9 fqtox */
2621 CHECK_FPU_FEATURE(dc, FLOAT128);
2622 gen_op_load_fpr_QT1(QFPREG(rs2));
2623 gen_clear_float_exceptions();
2624 gen_helper_fqtox();
2625 gen_helper_check_ieee_exceptions();
2626 gen_op_store_DT0_fpr(DFPREG(rd));
2627 break;
2628 case 0x84: /* V9 fxtos */
2629 gen_op_load_fpr_DT1(DFPREG(rs2));
2630 gen_clear_float_exceptions();
2631 gen_helper_fxtos(cpu_tmp32);
2632 gen_helper_check_ieee_exceptions();
2633 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2634 break;
2635 case 0x88: /* V9 fxtod */
2636 gen_op_load_fpr_DT1(DFPREG(rs2));
2637 gen_clear_float_exceptions();
2638 gen_helper_fxtod();
2639 gen_helper_check_ieee_exceptions();
2640 gen_op_store_DT0_fpr(DFPREG(rd));
2641 break;
2642 case 0x8c: /* V9 fxtoq */
2643 CHECK_FPU_FEATURE(dc, FLOAT128);
2644 gen_op_load_fpr_DT1(DFPREG(rs2));
2645 gen_clear_float_exceptions();
2646 gen_helper_fxtoq();
2647 gen_helper_check_ieee_exceptions();
2648 gen_op_store_QT0_fpr(QFPREG(rd));
2649 break;
2650 #endif
2651 default:
2652 goto illegal_insn;
2653 }
2654 } else if (xop == 0x35) { /* FPU Operations */
2655 #ifdef TARGET_SPARC64
2656 int cond;
2657 #endif
2658 if (gen_trap_ifnofpu(dc, cpu_cond))
2659 goto jmp_insn;
2660 gen_op_clear_ieee_excp_and_FTT();
2661 rs1 = GET_FIELD(insn, 13, 17);
2662 rs2 = GET_FIELD(insn, 27, 31);
2663 xop = GET_FIELD(insn, 18, 26);
2664 save_state(dc, cpu_cond);
2665 #ifdef TARGET_SPARC64
2666 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2667 int l1;
2668
2669 l1 = gen_new_label();
2670 cond = GET_FIELD_SP(insn, 14, 17);
2671 cpu_src1 = get_src1(insn, cpu_src1);
2672 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2673 0, l1);
2674 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2675 gen_set_label(l1);
2676 break;
2677 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2678 int l1;
2679
2680 l1 = gen_new_label();
2681 cond = GET_FIELD_SP(insn, 14, 17);
2682 cpu_src1 = get_src1(insn, cpu_src1);
2683 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2684 0, l1);
2685 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2686 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2687 gen_set_label(l1);
2688 break;
2689 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2690 int l1;
2691
2692 CHECK_FPU_FEATURE(dc, FLOAT128);
2693 l1 = gen_new_label();
2694 cond = GET_FIELD_SP(insn, 14, 17);
2695 cpu_src1 = get_src1(insn, cpu_src1);
2696 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2697 0, l1);
2698 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2699 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2700 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2701 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2702 gen_set_label(l1);
2703 break;
2704 }
2705 #endif
2706 switch (xop) {
2707 #ifdef TARGET_SPARC64
2708 #define FMOVSCC(fcc) \
2709 { \
2710 TCGv r_cond; \
2711 int l1; \
2712 \
2713 l1 = gen_new_label(); \
2714 r_cond = tcg_temp_new(); \
2715 cond = GET_FIELD_SP(insn, 14, 17); \
2716 gen_fcond(r_cond, fcc, cond); \
2717 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2718 0, l1); \
2719 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2720 gen_set_label(l1); \
2721 tcg_temp_free(r_cond); \
2722 }
2723 #define FMOVDCC(fcc) \
2724 { \
2725 TCGv r_cond; \
2726 int l1; \
2727 \
2728 l1 = gen_new_label(); \
2729 r_cond = tcg_temp_new(); \
2730 cond = GET_FIELD_SP(insn, 14, 17); \
2731 gen_fcond(r_cond, fcc, cond); \
2732 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2733 0, l1); \
2734 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2735 cpu_fpr[DFPREG(rs2)]); \
2736 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2737 cpu_fpr[DFPREG(rs2) + 1]); \
2738 gen_set_label(l1); \
2739 tcg_temp_free(r_cond); \
2740 }
2741 #define FMOVQCC(fcc) \
2742 { \
2743 TCGv r_cond; \
2744 int l1; \
2745 \
2746 l1 = gen_new_label(); \
2747 r_cond = tcg_temp_new(); \
2748 cond = GET_FIELD_SP(insn, 14, 17); \
2749 gen_fcond(r_cond, fcc, cond); \
2750 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2751 0, l1); \
2752 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2753 cpu_fpr[QFPREG(rs2)]); \
2754 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2755 cpu_fpr[QFPREG(rs2) + 1]); \
2756 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2757 cpu_fpr[QFPREG(rs2) + 2]); \
2758 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2759 cpu_fpr[QFPREG(rs2) + 3]); \
2760 gen_set_label(l1); \
2761 tcg_temp_free(r_cond); \
2762 }
2763 case 0x001: /* V9 fmovscc %fcc0 */
2764 FMOVSCC(0);
2765 break;
2766 case 0x002: /* V9 fmovdcc %fcc0 */
2767 FMOVDCC(0);
2768 break;
2769 case 0x003: /* V9 fmovqcc %fcc0 */
2770 CHECK_FPU_FEATURE(dc, FLOAT128);
2771 FMOVQCC(0);
2772 break;
2773 case 0x041: /* V9 fmovscc %fcc1 */
2774 FMOVSCC(1);
2775 break;
2776 case 0x042: /* V9 fmovdcc %fcc1 */
2777 FMOVDCC(1);
2778 break;
2779 case 0x043: /* V9 fmovqcc %fcc1 */
2780 CHECK_FPU_FEATURE(dc, FLOAT128);
2781 FMOVQCC(1);
2782 break;
2783 case 0x081: /* V9 fmovscc %fcc2 */
2784 FMOVSCC(2);
2785 break;
2786 case 0x082: /* V9 fmovdcc %fcc2 */
2787 FMOVDCC(2);
2788 break;
2789 case 0x083: /* V9 fmovqcc %fcc2 */
2790 CHECK_FPU_FEATURE(dc, FLOAT128);
2791 FMOVQCC(2);
2792 break;
2793 case 0x0c1: /* V9 fmovscc %fcc3 */
2794 FMOVSCC(3);
2795 break;
2796 case 0x0c2: /* V9 fmovdcc %fcc3 */
2797 FMOVDCC(3);
2798 break;
2799 case 0x0c3: /* V9 fmovqcc %fcc3 */
2800 CHECK_FPU_FEATURE(dc, FLOAT128);
2801 FMOVQCC(3);
2802 break;
2803 #undef FMOVSCC
2804 #undef FMOVDCC
2805 #undef FMOVQCC
2806 #define FMOVSCC(icc) \
2807 { \
2808 TCGv r_cond; \
2809 int l1; \
2810 \
2811 l1 = gen_new_label(); \
2812 r_cond = tcg_temp_new(); \
2813 cond = GET_FIELD_SP(insn, 14, 17); \
2814 gen_cond(r_cond, icc, cond, dc); \
2815 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2816 0, l1); \
2817 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2818 gen_set_label(l1); \
2819 tcg_temp_free(r_cond); \
2820 }
2821 #define FMOVDCC(icc) \
2822 { \
2823 TCGv r_cond; \
2824 int l1; \
2825 \
2826 l1 = gen_new_label(); \
2827 r_cond = tcg_temp_new(); \
2828 cond = GET_FIELD_SP(insn, 14, 17); \
2829 gen_cond(r_cond, icc, cond, dc); \
2830 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2831 0, l1); \
2832 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2833 cpu_fpr[DFPREG(rs2)]); \
2834 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2835 cpu_fpr[DFPREG(rs2) + 1]); \
2836 gen_set_label(l1); \
2837 tcg_temp_free(r_cond); \
2838 }
2839 #define FMOVQCC(icc) \
2840 { \
2841 TCGv r_cond; \
2842 int l1; \
2843 \
2844 l1 = gen_new_label(); \
2845 r_cond = tcg_temp_new(); \
2846 cond = GET_FIELD_SP(insn, 14, 17); \
2847 gen_cond(r_cond, icc, cond, dc); \
2848 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2849 0, l1); \
2850 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2851 cpu_fpr[QFPREG(rs2)]); \
2852 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2853 cpu_fpr[QFPREG(rs2) + 1]); \
2854 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2855 cpu_fpr[QFPREG(rs2) + 2]); \
2856 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2857 cpu_fpr[QFPREG(rs2) + 3]); \
2858 gen_set_label(l1); \
2859 tcg_temp_free(r_cond); \
2860 }
2861
2862 case 0x101: /* V9 fmovscc %icc */
2863 FMOVSCC(0);
2864 break;
2865 case 0x102: /* V9 fmovdcc %icc */
2866 FMOVDCC(0);
2867 break;
2868 case 0x103: /* V9 fmovqcc %icc */
2869 CHECK_FPU_FEATURE(dc, FLOAT128);
2870 FMOVQCC(0);
2871 break;
2872 case 0x181: /* V9 fmovscc %xcc */
2873 FMOVSCC(1);
2874 break;
2875 case 0x182: /* V9 fmovdcc %xcc */
2876 FMOVDCC(1);
2877 break;
2878 case 0x183: /* V9 fmovqcc %xcc */
2879 CHECK_FPU_FEATURE(dc, FLOAT128);
2880 FMOVQCC(1);
2881 break;
2882 #undef FMOVSCC
2883 #undef FMOVDCC
2884 #undef FMOVQCC
2885 #endif
2886 case 0x51: /* fcmps, V9 %fcc */
2887 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2888 break;
2889 case 0x52: /* fcmpd, V9 %fcc */
2890 gen_op_load_fpr_DT0(DFPREG(rs1));
2891 gen_op_load_fpr_DT1(DFPREG(rs2));
2892 gen_op_fcmpd(rd & 3);
2893 break;
2894 case 0x53: /* fcmpq, V9 %fcc */
2895 CHECK_FPU_FEATURE(dc, FLOAT128);
2896 gen_op_load_fpr_QT0(QFPREG(rs1));
2897 gen_op_load_fpr_QT1(QFPREG(rs2));
2898 gen_op_fcmpq(rd & 3);
2899 break;
2900 case 0x55: /* fcmpes, V9 %fcc */
2901 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2902 break;
2903 case 0x56: /* fcmped, V9 %fcc */
2904 gen_op_load_fpr_DT0(DFPREG(rs1));
2905 gen_op_load_fpr_DT1(DFPREG(rs2));
2906 gen_op_fcmped(rd & 3);
2907 break;
2908 case 0x57: /* fcmpeq, V9 %fcc */
2909 CHECK_FPU_FEATURE(dc, FLOAT128);
2910 gen_op_load_fpr_QT0(QFPREG(rs1));
2911 gen_op_load_fpr_QT1(QFPREG(rs2));
2912 gen_op_fcmpeq(rd & 3);
2913 break;
2914 default:
2915 goto illegal_insn;
2916 }
2917 } else if (xop == 0x2) {
2918 // clr/mov shortcut
2919
2920 rs1 = GET_FIELD(insn, 13, 17);
2921 if (rs1 == 0) {
2922 // or %g0, x, y -> mov T0, x; mov y, T0
2923 if (IS_IMM) { /* immediate */
2924 TCGv r_const;
2925
2926 simm = GET_FIELDs(insn, 19, 31);
2927 r_const = tcg_const_tl(simm);
2928 gen_movl_TN_reg(rd, r_const);
2929 tcg_temp_free(r_const);
2930 } else { /* register */
2931 rs2 = GET_FIELD(insn, 27, 31);
2932 gen_movl_reg_TN(rs2, cpu_dst);
2933 gen_movl_TN_reg(rd, cpu_dst);
2934 }
2935 } else {
2936 cpu_src1 = get_src1(insn, cpu_src1);
2937 if (IS_IMM) { /* immediate */
2938 simm = GET_FIELDs(insn, 19, 31);
2939 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
2940 gen_movl_TN_reg(rd, cpu_dst);
2941 } else { /* register */
2942 // or x, %g0, y -> mov T1, x; mov y, T1
2943 rs2 = GET_FIELD(insn, 27, 31);
2944 if (rs2 != 0) {
2945 gen_movl_reg_TN(rs2, cpu_src2);
2946 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2947 gen_movl_TN_reg(rd, cpu_dst);
2948 } else
2949 gen_movl_TN_reg(rd, cpu_src1);
2950 }
2951 }
2952 #ifdef TARGET_SPARC64
2953 } else if (xop == 0x25) { /* sll, V9 sllx */
2954 cpu_src1 = get_src1(insn, cpu_src1);
2955 if (IS_IMM) { /* immediate */
2956 simm = GET_FIELDs(insn, 20, 31);
2957 if (insn & (1 << 12)) {
2958 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
2959 } else {
2960 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
2961 }
2962 } else { /* register */
2963 rs2 = GET_FIELD(insn, 27, 31);
2964 gen_movl_reg_TN(rs2, cpu_src2);
2965 if (insn & (1 << 12)) {
2966 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2967 } else {
2968 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2969 }
2970 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2971 }
2972 gen_movl_TN_reg(rd, cpu_dst);
2973 } else if (xop == 0x26) { /* srl, V9 srlx */
2974 cpu_src1 = get_src1(insn, cpu_src1);
2975 if (IS_IMM) { /* immediate */
2976 simm = GET_FIELDs(insn, 20, 31);
2977 if (insn & (1 << 12)) {
2978 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
2979 } else {
2980 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2981 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
2982 }
2983 } else { /* register */
2984 rs2 = GET_FIELD(insn, 27, 31);
2985 gen_movl_reg_TN(rs2, cpu_src2);
2986 if (insn & (1 << 12)) {
2987 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2988 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2989 } else {
2990 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2991 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2992 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2993 }
2994 }
2995 gen_movl_TN_reg(rd, cpu_dst);
2996 } else if (xop == 0x27) { /* sra, V9 srax */
2997 cpu_src1 = get_src1(insn, cpu_src1);
2998 if (IS_IMM) { /* immediate */
2999 simm = GET_FIELDs(insn, 20, 31);
3000 if (insn & (1 << 12)) {
3001 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3002 } else {
3003 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3004 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3005 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3006 }
3007 } else { /* register */
3008 rs2 = GET_FIELD(insn, 27, 31);
3009 gen_movl_reg_TN(rs2, cpu_src2);
3010 if (insn & (1 << 12)) {
3011 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3012 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3013 } else {
3014 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3015 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3016 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3017 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3018 }
3019 }
3020 gen_movl_TN_reg(rd, cpu_dst);
3021 #endif
3022 } else if (xop < 0x36) {
3023 if (xop < 0x20) {
3024 cpu_src1 = get_src1(insn, cpu_src1);
3025 cpu_src2 = get_src2(insn, cpu_src2);
3026 switch (xop & ~0x10) {
3027 case 0x0: /* add */
3028 if (IS_IMM) {
3029 simm = GET_FIELDs(insn, 19, 31);
3030 if (xop & 0x10) {
3031 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3032 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3033 dc->cc_op = CC_OP_ADD;
3034 } else {
3035 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3036 }
3037 } else {
3038 if (xop & 0x10) {
3039 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3040 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3041 dc->cc_op = CC_OP_ADD;
3042 } else {
3043 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3044 }
3045 }
3046 break;
3047 case 0x1: /* and */
3048 if (IS_IMM) {
3049 simm = GET_FIELDs(insn, 19, 31);
3050 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3051 } else {
3052 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3053 }
3054 if (xop & 0x10) {
3055 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3056 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3057 dc->cc_op = CC_OP_LOGIC;
3058 }
3059 break;
3060 case 0x2: /* or */
3061 if (IS_IMM) {
3062 simm = GET_FIELDs(insn, 19, 31);
3063 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3064 } else {
3065 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3066 }
3067 if (xop & 0x10) {
3068 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3069 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3070 dc->cc_op = CC_OP_LOGIC;
3071 }
3072 break;
3073 case 0x3: /* xor */
3074 if (IS_IMM) {
3075 simm = GET_FIELDs(insn, 19, 31);
3076 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3077 } else {
3078 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3079 }
3080 if (xop & 0x10) {
3081 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3082 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3083 dc->cc_op = CC_OP_LOGIC;
3084 }
3085 break;
3086 case 0x4: /* sub */
3087 if (IS_IMM) {
3088 simm = GET_FIELDs(insn, 19, 31);
3089 if (xop & 0x10) {
3090 gen_op_subi_cc(cpu_dst, cpu_src1, simm, dc);
3091 } else {
3092 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3093 }
3094 } else {
3095 if (xop & 0x10) {
3096 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3097 tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3098 dc->cc_op = CC_OP_SUB;
3099 } else {
3100 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3101 }
3102 }
3103 break;
3104 case 0x5: /* andn */
3105 if (IS_IMM) {
3106 simm = GET_FIELDs(insn, 19, 31);
3107 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3108 } else {
3109 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3110 }
3111 if (xop & 0x10) {
3112 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3113 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3114 dc->cc_op = CC_OP_LOGIC;
3115 }
3116 break;
3117 case 0x6: /* orn */
3118 if (IS_IMM) {
3119 simm = GET_FIELDs(insn, 19, 31);
3120 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3121 } else {
3122 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3123 }
3124 if (xop & 0x10) {
3125 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3126 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3127 dc->cc_op = CC_OP_LOGIC;
3128 }
3129 break;
3130 case 0x7: /* xorn */
3131 if (IS_IMM) {
3132 simm = GET_FIELDs(insn, 19, 31);
3133 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3134 } else {
3135 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3136 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3137 }
3138 if (xop & 0x10) {
3139 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3140 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3141 dc->cc_op = CC_OP_LOGIC;
3142 }
3143 break;
3144 case 0x8: /* addx, V9 addc */
3145 gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3146 (xop & 0x10));
3147 break;
3148 #ifdef TARGET_SPARC64
3149 case 0x9: /* V9 mulx */
3150 if (IS_IMM) {
3151 simm = GET_FIELDs(insn, 19, 31);
3152 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3153 } else {
3154 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3155 }
3156 break;
3157 #endif
3158 case 0xa: /* umul */
3159 CHECK_IU_FEATURE(dc, MUL);
3160 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3161 if (xop & 0x10) {
3162 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3163 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3164 dc->cc_op = CC_OP_LOGIC;
3165 }
3166 break;
3167 case 0xb: /* smul */
3168 CHECK_IU_FEATURE(dc, MUL);
3169 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3170 if (xop & 0x10) {
3171 tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3172 tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3173 dc->cc_op = CC_OP_LOGIC;
3174 }
3175 break;
3176 case 0xc: /* subx, V9 subc */
3177 gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3178 (xop & 0x10));
3179 break;
3180 #ifdef TARGET_SPARC64
3181 case 0xd: /* V9 udivx */
3182 {
3183 TCGv r_temp1, r_temp2;
3184 r_temp1 = tcg_temp_local_new();
3185 r_temp2 = tcg_temp_local_new();
3186 tcg_gen_mov_tl(r_temp1, cpu_src1);
3187 tcg_gen_mov_tl(r_temp2, cpu_src2);
3188 gen_trap_ifdivzero_tl(r_temp2);
3189 tcg_gen_divu_i64(cpu_dst, r_temp1, r_temp2);
3190 tcg_temp_free(r_temp1);
3191 tcg_temp_free(r_temp2);
3192 }
3193 break;
3194 #endif
3195 case 0xe: /* udiv */
3196 CHECK_IU_FEATURE(dc, DIV);
3197 if (xop & 0x10) {
3198 gen_helper_udiv_cc(cpu_dst, cpu_src1, cpu_src2);
3199 dc->cc_op = CC_OP_DIV;
3200 } else {
3201 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3202 }
3203 break;
3204 case 0xf: /* sdiv */
3205 CHECK_IU_FEATURE(dc, DIV);
3206 if (xop & 0x10) {
3207 gen_helper_sdiv_cc(cpu_dst, cpu_src1, cpu_src2);
3208 dc->cc_op = CC_OP_DIV;
3209 } else {
3210 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3211 }
3212 break;
3213 default:
3214 goto illegal_insn;
3215 }
3216 gen_movl_TN_reg(rd, cpu_dst);
3217 } else {
3218 cpu_src1 = get_src1(insn, cpu_src1);
3219 cpu_src2 = get_src2(insn, cpu_src2);
3220 switch (xop) {
3221 case 0x20: /* taddcc */
3222 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3223 gen_movl_TN_reg(rd, cpu_dst);
3224 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
3225 dc->cc_op = CC_OP_TADD;
3226 break;
3227 case 0x21: /* tsubcc */
3228 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3229 gen_movl_TN_reg(rd, cpu_dst);
3230 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
3231 dc->cc_op = CC_OP_TSUB;
3232 break;
3233 case 0x22: /* taddcctv */
3234 save_state(dc, cpu_cond);
3235 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3236 gen_movl_TN_reg(rd, cpu_dst);
3237 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADDTV);
3238 dc->cc_op = CC_OP_TADDTV;
3239 break;
3240 case 0x23: /* tsubcctv */
3241 save_state(dc, cpu_cond);
3242 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3243 gen_movl_TN_reg(rd, cpu_dst);
3244 tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUBTV);
3245 dc->cc_op = CC_OP_TSUBTV;
3246 break;
3247 case 0x24: /* mulscc */
3248 gen_helper_compute_psr();
3249 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3250 gen_movl_TN_reg(rd, cpu_dst);
3251 tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3252 dc->cc_op = CC_OP_ADD;
3253 break;
3254 #ifndef TARGET_SPARC64
3255 case 0x25: /* sll */
3256 if (IS_IMM) { /* immediate */
3257 simm = GET_FIELDs(insn, 20, 31);
3258 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3259 } else { /* register */
3260 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3261 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3262 }
3263 gen_movl_TN_reg(rd, cpu_dst);
3264 break;
3265 case 0x26: /* srl */
3266 if (IS_IMM) { /* immediate */
3267 simm = GET_FIELDs(insn, 20, 31);
3268 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3269 } else { /* register */
3270 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3271 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3272 }
3273 gen_movl_TN_reg(rd, cpu_dst);
3274 break;
3275 case 0x27: /* sra */
3276 if (IS_IMM) { /* immediate */
3277 simm = GET_FIELDs(insn, 20, 31);
3278 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3279 } else { /* register */
3280 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3281 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3282 }
3283 gen_movl_TN_reg(rd, cpu_dst);
3284 break;
3285 #endif
3286 case 0x30:
3287 {
3288 switch(rd) {
3289 case 0: /* wry */
3290 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3291 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3292 break;
3293 #ifndef TARGET_SPARC64
3294 case 0x01 ... 0x0f: /* undefined in the
3295 SPARCv8 manual, nop
3296 on the microSPARC
3297 II */
3298 case 0x10 ... 0x1f: /* implementation-dependent
3299 in the SPARCv8
3300 manual, nop on the
3301 microSPARC II */
3302 break;
3303 #else
3304 case 0x2: /* V9 wrccr */
3305 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3306 gen_helper_wrccr(cpu_dst);
3307 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3308 dc->cc_op = CC_OP_FLAGS;
3309 break;
3310 case 0x3: /* V9 wrasi */
3311 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3312 tcg_gen_andi_tl(cpu_dst, cpu_dst, 0xff);
3313 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3314 break;
3315 case 0x6: /* V9 wrfprs */
3316 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3317 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3318 save_state(dc, cpu_cond);
3319 gen_op_next_insn();
3320 tcg_gen_exit_tb(0);
3321 dc->is_br = 1;
3322 break;
3323 case 0xf: /* V9 sir, nop if user */
3324 #if !defined(CONFIG_USER_ONLY)
3325 if (supervisor(dc)) {
3326 ; // XXX
3327 }
3328 #endif
3329 break;
3330 case 0x13: /* Graphics Status */
3331 if (gen_trap_ifnofpu(dc, cpu_cond))
3332 goto jmp_insn;
3333 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3334 break;
3335 case 0x14: /* Softint set */
3336 if (!supervisor(dc))
3337 goto illegal_insn;
3338 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3339 gen_helper_set_softint(cpu_tmp64);
3340 break;
3341 case 0x15: /* Softint clear */
3342 if (!supervisor(dc))
3343 goto illegal_insn;
3344 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3345 gen_helper_clear_softint(cpu_tmp64);
3346 break;
3347 case 0x16: /* Softint write */
3348 if (!supervisor(dc))
3349 goto illegal_insn;
3350 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3351 gen_helper_write_softint(cpu_tmp64);
3352 break;
3353 case 0x17: /* Tick compare */
3354 #if !defined(CONFIG_USER_ONLY)
3355 if (!supervisor(dc))
3356 goto illegal_insn;
3357 #endif
3358 {
3359 TCGv_ptr r_tickptr;
3360
3361 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3362 cpu_src2);
3363 r_tickptr = tcg_temp_new_ptr();
3364 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3365 offsetof(CPUState, tick));
3366 gen_helper_tick_set_limit(r_tickptr,
3367 cpu_tick_cmpr);
3368 tcg_temp_free_ptr(r_tickptr);
3369 }
3370 break;
3371 case 0x18: /* System tick */
3372 #if !defined(CONFIG_USER_ONLY)
3373 if (!supervisor(dc))
3374 goto illegal_insn;
3375 #endif
3376 {
3377 TCGv_ptr r_tickptr;
3378
3379 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3380 cpu_src2);
3381 r_tickptr = tcg_temp_new_ptr();
3382 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3383 offsetof(CPUState, stick));
3384 gen_helper_tick_set_count(r_tickptr,
3385 cpu_dst);
3386 tcg_temp_free_ptr(r_tickptr);
3387 }
3388 break;
3389 case 0x19: /* System tick compare */
3390 #if !defined(CONFIG_USER_ONLY)
3391 if (!supervisor(dc))
3392 goto illegal_insn;
3393 #endif
3394 {
3395 TCGv_ptr r_tickptr;
3396
3397 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3398 cpu_src2);
3399 r_tickptr = tcg_temp_new_ptr();
3400 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3401 offsetof(CPUState, stick));
3402 gen_helper_tick_set_limit(r_tickptr,
3403 cpu_stick_cmpr);
3404 tcg_temp_free_ptr(r_tickptr);
3405 }
3406 break;
3407
3408 case 0x10: /* Performance Control */
3409 case 0x11: /* Performance Instrumentation
3410 Counter */
3411 case 0x12: /* Dispatch Control */
3412 #endif
3413 default:
3414 goto illegal_insn;
3415 }
3416 }
3417 break;
3418 #if !defined(CONFIG_USER_ONLY)
3419 case 0x31: /* wrpsr, V9 saved, restored */
3420 {
3421 if (!supervisor(dc))
3422 goto priv_insn;
3423 #ifdef TARGET_SPARC64
3424 switch (rd) {
3425 case 0:
3426 gen_helper_saved();
3427 break;
3428 case 1:
3429 gen_helper_restored();
3430 break;
3431 case 2: /* UA2005 allclean */
3432 case 3: /* UA2005 otherw */
3433 case 4: /* UA2005 normalw */
3434 case 5: /* UA2005 invalw */
3435 // XXX
3436 default:
3437 goto illegal_insn;
3438 }
3439 #else
3440 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3441 gen_helper_wrpsr(cpu_dst);
3442 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
3443 dc->cc_op = CC_OP_FLAGS;
3444 save_state(dc, cpu_cond);
3445 gen_op_next_insn();
3446 tcg_gen_exit_tb(0);
3447 dc->is_br = 1;
3448 #endif
3449 }
3450 break;
3451 case 0x32: /* wrwim, V9 wrpr */
3452 {
3453 if (!supervisor(dc))
3454 goto priv_insn;
3455 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3456 #ifdef TARGET_SPARC64
3457 switch (rd) {
3458 case 0: // tpc
3459 {
3460 TCGv_ptr r_tsptr;
3461
3462 r_tsptr = tcg_temp_new_ptr();
3463 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3464 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3465 offsetof(trap_state, tpc));
3466 tcg_temp_free_ptr(r_tsptr);
3467 }
3468 break;
3469 case 1: // tnpc
3470 {
3471 TCGv_ptr r_tsptr;
3472
3473 r_tsptr = tcg_temp_new_ptr();
3474 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3475 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3476 offsetof(trap_state, tnpc));
3477 tcg_temp_free_ptr(r_tsptr);
3478 }
3479 break;
3480 case 2: // tstate
3481 {
3482 TCGv_ptr r_tsptr;
3483
3484 r_tsptr = tcg_temp_new_ptr();
3485 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3486 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3487 offsetof(trap_state,
3488 tstate));
3489 tcg_temp_free_ptr(r_tsptr);
3490 }
3491 break;
3492 case 3: // tt
3493 {
3494 TCGv_ptr r_tsptr;
3495
3496 r_tsptr = tcg_temp_new_ptr();
3497 gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3498 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3499 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3500 offsetof(trap_state, tt));
3501 tcg_temp_free_ptr(r_tsptr);
3502 }
3503 break;
3504 case 4: // tick
3505 {
3506 TCGv_ptr r_tickptr;
3507
3508 r_tickptr = tcg_temp_new_ptr();
3509 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3510 offsetof(CPUState, tick));
3511 gen_helper_tick_set_count(r_tickptr,
3512 cpu_tmp0);
3513 tcg_temp_free_ptr(r_tickptr);
3514 }
3515 break;
3516 case 5: // tba
3517 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3518 break;
3519 case 6: // pstate
3520 {
3521 TCGv r_tmp = tcg_temp_local_new();
3522
3523 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3524 save_state(dc, cpu_cond);
3525 gen_helper_wrpstate(r_tmp);
3526 tcg_temp_free(r_tmp);
3527 dc->npc = DYNAMIC_PC;
3528 }
3529 break;
3530 case 7: // tl
3531 {
3532 TCGv r_tmp = tcg_temp_local_new();
3533
3534 tcg_gen_mov_tl(r_tmp, cpu_tmp0);
3535 save_state(dc, cpu_cond);
3536 tcg_gen_trunc_tl_i32(cpu_tmp32, r_tmp);
3537 tcg_temp_free(r_tmp);
3538 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3539 offsetof(CPUSPARCState, tl));
3540 dc->npc = DYNAMIC_PC;
3541 }
3542 break;
3543 case 8: // pil
3544 gen_helper_wrpil(cpu_tmp0);
3545 break;
3546 case 9: // cwp
3547 gen_helper_wrcwp(cpu_tmp0);
3548 break;
3549 case 10: // cansave
3550 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3551 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3552 offsetof(CPUSPARCState,
3553 cansave));
3554 break;
3555 case 11: // canrestore
3556 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3557 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3558 offsetof(CPUSPARCState,
3559 canrestore));
3560 break;
3561 case 12: // cleanwin
3562 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3563 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3564 offsetof(CPUSPARCState,
3565 cleanwin));
3566 break;
3567 case 13: // otherwin
3568 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3569 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3570 offsetof(CPUSPARCState,
3571 otherwin));
3572 break;
3573 case 14: // wstate
3574 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3575 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3576 offsetof(CPUSPARCState,
3577 wstate));
3578 break;
3579 case 16: // UA2005 gl
3580 CHECK_IU_FEATURE(dc, GL);
3581 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3582 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3583 offsetof(CPUSPARCState, gl));
3584 break;
3585 case 26: // UA2005 strand status
3586 CHECK_IU_FEATURE(dc, HYPV);
3587 if (!hypervisor(dc))
3588 goto priv_insn;
3589 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3590 break;
3591 default:
3592 goto illegal_insn;
3593 }
3594 #else
3595 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3596 if (dc->def->nwindows != 32)
3597 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3598 (1 << dc->def->nwindows) - 1);
3599 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3600 #endif
3601 }
3602 break;
3603 case 0x33: /* wrtbr, UA2005 wrhpr */
3604 {
3605 #ifndef TARGET_SPARC64
3606 if (!supervisor(dc))
3607 goto priv_insn;
3608 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3609 #else
3610 CHECK_IU_FEATURE(dc, HYPV);
3611 if (!hypervisor(dc))
3612 goto priv_insn;
3613 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3614 switch (rd) {
3615 case 0: // hpstate
3616 // XXX gen_op_wrhpstate();
3617 save_state(dc, cpu_cond);
3618 gen_op_next_insn();
3619 tcg_gen_exit_tb(0);
3620 dc->is_br = 1;
3621 break;
3622 case 1: // htstate
3623 // XXX gen_op_wrhtstate();
3624 break;
3625 case 3: // hintp
3626 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3627 break;
3628 case 5: // htba
3629 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3630 break;
3631 case 31: // hstick_cmpr
3632 {
3633 TCGv_ptr r_tickptr;
3634
3635 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3636 r_tickptr = tcg_temp_new_ptr();
3637 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3638 offsetof(CPUState, hstick));
3639 gen_helper_tick_set_limit(r_tickptr,
3640 cpu_hstick_cmpr);
3641 tcg_temp_free_ptr(r_tickptr);
3642 }
3643 break;
3644 case 6: // hver readonly
3645 default:
3646 goto illegal_insn;
3647 }
3648 #endif
3649 }
3650 break;
3651 #endif
3652 #ifdef TARGET_SPARC64
3653 case 0x2c: /* V9 movcc */
3654 {
3655 int cc = GET_FIELD_SP(insn, 11, 12);
3656 int cond = GET_FIELD_SP(insn, 14, 17);
3657 TCGv r_cond;
3658 int l1;
3659
3660 r_cond = tcg_temp_new();
3661 if (insn & (1 << 18)) {
3662 if (cc == 0)
3663 gen_cond(r_cond, 0, cond, dc);
3664 else if (cc == 2)
3665 gen_cond(r_cond, 1, cond, dc);
3666 else
3667 goto illegal_insn;
3668 } else {
3669 gen_fcond(r_cond, cc, cond);
3670 }
3671
3672 l1 = gen_new_label();
3673
3674 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3675 if (IS_IMM) { /* immediate */
3676 TCGv r_const;
3677
3678 simm = GET_FIELD_SPs(insn, 0, 10);
3679 r_const = tcg_const_tl(simm);
3680 gen_movl_TN_reg(rd, r_const);
3681 tcg_temp_free(r_const);
3682 } else {
3683 rs2 = GET_FIELD_SP(insn, 0, 4);
3684 gen_movl_reg_TN(rs2, cpu_tmp0);
3685 gen_movl_TN_reg(rd, cpu_tmp0);
3686 }
3687 gen_set_label(l1);
3688 tcg_temp_free(r_cond);
3689 break;
3690 }
3691 case 0x2d: /* V9 sdivx */
3692 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3693 gen_movl_TN_reg(rd, cpu_dst);
3694 break;
3695 case 0x2e: /* V9 popc */
3696 {
3697 cpu_src2 = get_src2(insn, cpu_src2);
3698 gen_helper_popc(cpu_dst, cpu_src2);
3699 gen_movl_TN_reg(rd, cpu_dst);
3700 }
3701 case 0x2f: /* V9 movr */
3702 {
3703 int cond = GET_FIELD_SP(insn, 10, 12);
3704 int l1;
3705
3706 cpu_src1 = get_src1(insn, cpu_src1);
3707
3708 l1 = gen_new_label();
3709
3710 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3711 cpu_src1, 0, l1);
3712 if (IS_IMM) { /* immediate */
3713 TCGv r_const;
3714
3715 simm = GET_FIELD_SPs(insn, 0, 9);
3716 r_const = tcg_const_tl(simm);
3717 gen_movl_TN_reg(rd, r_const);
3718 tcg_temp_free(r_const);
3719 } else {
3720 rs2 = GET_FIELD_SP(insn, 0, 4);
3721 gen_movl_reg_TN(rs2, cpu_tmp0);
3722 gen_movl_TN_reg(rd, cpu_tmp0);
3723 }
3724 gen_set_label(l1);
3725 break;
3726 }
3727 #endif
3728 default:
3729 goto illegal_insn;
3730 }
3731 }
3732 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3733 #ifdef TARGET_SPARC64
3734 int opf = GET_FIELD_SP(insn, 5, 13);
3735 rs1 = GET_FIELD(insn, 13, 17);
3736 rs2 = GET_FIELD(insn, 27, 31);
3737 if (gen_trap_ifnofpu(dc, cpu_cond))
3738 goto jmp_insn;
3739
3740 switch (opf) {
3741 case 0x000: /* VIS I edge8cc */
3742 case 0x001: /* VIS II edge8n */
3743 case 0x002: /* VIS I edge8lcc */
3744 case 0x003: /* VIS II edge8ln */
3745 case 0x004: /* VIS I edge16cc */
3746 case 0x005: /* VIS II edge16n */
3747 case 0x006: /* VIS I edge16lcc */
3748 case 0x007: /* VIS II edge16ln */
3749 case 0x008: /* VIS I edge32cc */
3750 case 0x009: /* VIS II edge32n */
3751 case 0x00a: /* VIS I edge32lcc */
3752 case 0x00b: /* VIS II edge32ln */
3753 // XXX
3754 goto illegal_insn;
3755 case 0x010: /* VIS I array8 */
3756 CHECK_FPU_FEATURE(dc, VIS1);
3757 cpu_src1 = get_src1(insn, cpu_src1);
3758 gen_movl_reg_TN(rs2, cpu_src2);
3759 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3760 gen_movl_TN_reg(rd, cpu_dst);
3761 break;
3762 case 0x012: /* VIS I array16 */
3763 CHECK_FPU_FEATURE(dc, VIS1);
3764 cpu_src1 = get_src1(insn, cpu_src1);
3765 gen_movl_reg_TN(rs2, cpu_src2);
3766 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3767 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3768 gen_movl_TN_reg(rd, cpu_dst);
3769 break;
3770 case 0x014: /* VIS I array32 */
3771 CHECK_FPU_FEATURE(dc, VIS1);
3772 cpu_src1 = get_src1(insn, cpu_src1);
3773 gen_movl_reg_TN(rs2, cpu_src2);
3774 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3775 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3776 gen_movl_TN_reg(rd, cpu_dst);
3777 break;
3778 case 0x018: /* VIS I alignaddr */
3779 CHECK_FPU_FEATURE(dc, VIS1);
3780 cpu_src1 = get_src1(insn, cpu_src1);
3781 gen_movl_reg_TN(rs2, cpu_src2);
3782 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3783 gen_movl_TN_reg(rd, cpu_dst);
3784 break;
3785 case 0x019: /* VIS II bmask */
3786 case 0x01a: /* VIS I alignaddrl */
3787 // XXX
3788 goto illegal_insn;
3789 case 0x020: /* VIS I fcmple16 */
3790 CHECK_FPU_FEATURE(dc, VIS1);
3791 gen_op_load_fpr_DT0(DFPREG(rs1));
3792 gen_op_load_fpr_DT1(DFPREG(rs2));
3793 gen_helper_fcmple16(cpu_dst);
3794 gen_movl_TN_reg(rd, cpu_dst);
3795 break;
3796 case 0x022: /* VIS I fcmpne16 */
3797 CHECK_FPU_FEATURE(dc, VIS1);
3798 gen_op_load_fpr_DT0(DFPREG(rs1));
3799 gen_op_load_fpr_DT1(DFPREG(rs2));
3800 gen_helper_fcmpne16(cpu_dst);
3801 gen_movl_TN_reg(rd, cpu_dst);
3802 break;
3803 case 0x024: /* VIS I fcmple32 */
3804 CHECK_FPU_FEATURE(dc, VIS1);
3805 gen_op_load_fpr_DT0(DFPREG(rs1));
3806 gen_op_load_fpr_DT1(DFPREG(rs2));
3807 gen_helper_fcmple32(cpu_dst);
3808 gen_movl_TN_reg(rd, cpu_dst);
3809 break;
3810 case 0x026: /* VIS I fcmpne32 */
3811 CHECK_FPU_FEATURE(dc, VIS1);
3812 gen_op_load_fpr_DT0(DFPREG(rs1));
3813 gen_op_load_fpr_DT1(DFPREG(rs2));
3814 gen_helper_fcmpne32(cpu_dst);
3815 gen_movl_TN_reg(rd, cpu_dst);
3816 break;
3817 case 0x028: /* VIS I fcmpgt16 */
3818 CHECK_FPU_FEATURE(dc, VIS1);
3819 gen_op_load_fpr_DT0(DFPREG(rs1));
3820 gen_op_load_fpr_DT1(DFPREG(rs2));
3821 gen_helper_fcmpgt16(cpu_dst);
3822 gen_movl_TN_reg(rd, cpu_dst);
3823 break;
3824 case 0x02a: /* VIS I fcmpeq16 */
3825 CHECK_FPU_FEATURE(dc, VIS1);
3826 gen_op_load_fpr_DT0(DFPREG(rs1));
3827 gen_op_load_fpr_DT1(DFPREG(rs2));
3828 gen_helper_fcmpeq16(cpu_dst);
3829 gen_movl_TN_reg(rd, cpu_dst);
3830 break;
3831 case 0x02c: /* VIS I fcmpgt32 */
3832 CHECK_FPU_FEATURE(dc, VIS1);
3833 gen_op_load_fpr_DT0(DFPREG(rs1));
3834 gen_op_load_fpr_DT1(DFPREG(rs2));
3835 gen_helper_fcmpgt32(cpu_dst);
3836 gen_movl_TN_reg(rd, cpu_dst);
3837 break;
3838 case 0x02e: /* VIS I fcmpeq32 */
3839 CHECK_FPU_FEATURE(dc, VIS1);
3840 gen_op_load_fpr_DT0(DFPREG(rs1));
3841 gen_op_load_fpr_DT1(DFPREG(rs2));
3842 gen_helper_fcmpeq32(cpu_dst);
3843 gen_movl_TN_reg(rd, cpu_dst);
3844 break;
3845 case 0x031: /* VIS I fmul8x16 */
3846 CHECK_FPU_FEATURE(dc, VIS1);
3847 gen_op_load_fpr_DT0(DFPREG(rs1));
3848 gen_op_load_fpr_DT1(DFPREG(rs2));
3849 gen_helper_fmul8x16();
3850 gen_op_store_DT0_fpr(DFPREG(rd));
3851 break;
3852 case 0x033: /* VIS I fmul8x16au */
3853 CHECK_FPU_FEATURE(dc, VIS1);
3854 gen_op_load_fpr_DT0(DFPREG(rs1));
3855 gen_op_load_fpr_DT1(DFPREG(rs2));
3856 gen_helper_fmul8x16au();
3857 gen_op_store_DT0_fpr(DFPREG(rd));
3858 break;
3859 case 0x035: /* VIS I fmul8x16al */
3860 CHECK_FPU_FEATURE(dc, VIS1);
3861 gen_op_load_fpr_DT0(DFPREG(rs1));
3862 gen_op_load_fpr_DT1(DFPREG(rs2));
3863 gen_helper_fmul8x16al();
3864 gen_op_store_DT0_fpr(DFPREG(rd));
3865 break;
3866 case 0x036: /* VIS I fmul8sux16 */
3867 CHECK_FPU_FEATURE(dc, VIS1);
3868 gen_op_load_fpr_DT0(DFPREG(rs1));
3869 gen_op_load_fpr_DT1(DFPREG(rs2));
3870 gen_helper_fmul8sux16();
3871 gen_op_store_DT0_fpr(DFPREG(rd));
3872 break;
3873 case 0x037: /* VIS I fmul8ulx16 */
3874 CHECK_FPU_FEATURE(dc, VIS1);
3875 gen_op_load_fpr_DT0(DFPREG(rs1));
3876 gen_op_load_fpr_DT1(DFPREG(rs2));
3877 gen_helper_fmul8ulx16();
3878 gen_op_store_DT0_fpr(DFPREG(rd));
3879 break;
3880 case 0x038: /* VIS I fmuld8sux16 */
3881 CHECK_FPU_FEATURE(dc, VIS1);
3882 gen_op_load_fpr_DT0(DFPREG(rs1));
3883 gen_op_load_fpr_DT1(DFPREG(rs2));
3884 gen_helper_fmuld8sux16();
3885 gen_op_store_DT0_fpr(DFPREG(rd));
3886 break;
3887 case 0x039: /* VIS I fmuld8ulx16 */
3888 CHECK_FPU_FEATURE(dc, VIS1);
3889 gen_op_load_fpr_DT0(DFPREG(rs1));
3890 gen_op_load_fpr_DT1(DFPREG(rs2));
3891 gen_helper_fmuld8ulx16();
3892 gen_op_store_DT0_fpr(DFPREG(rd));
3893 break;
3894 case 0x03a: /* VIS I fpack32 */
3895 case 0x03b: /* VIS I fpack16 */
3896 case 0x03d: /* VIS I fpackfix */
3897 case 0x03e: /* VIS I pdist */
3898 // XXX
3899 goto illegal_insn;
3900 case 0x048: /* VIS I faligndata */
3901 CHECK_FPU_FEATURE(dc, VIS1);
3902 gen_op_load_fpr_DT0(DFPREG(rs1));
3903 gen_op_load_fpr_DT1(DFPREG(rs2));
3904 gen_helper_faligndata();
3905 gen_op_store_DT0_fpr(DFPREG(rd));
3906 break;
3907 case 0x04b: /* VIS I fpmerge */
3908 CHECK_FPU_FEATURE(dc, VIS1);
3909 gen_op_load_fpr_DT0(DFPREG(rs1));
3910 gen_op_load_fpr_DT1(DFPREG(rs2));
3911 gen_helper_fpmerge();
3912 gen_op_store_DT0_fpr(DFPREG(rd));
3913 break;
3914 case 0x04c: /* VIS II bshuffle */
3915 // XXX
3916 goto illegal_insn;
3917 case 0x04d: /* VIS I fexpand */
3918 CHECK_FPU_FEATURE(dc, VIS1);
3919 gen_op_load_fpr_DT0(DFPREG(rs1));
3920 gen_op_load_fpr_DT1(DFPREG(rs2));
3921 gen_helper_fexpand();
3922 gen_op_store_DT0_fpr(DFPREG(rd));
3923 break;
3924 case 0x050: /* VIS I fpadd16 */
3925 CHECK_FPU_FEATURE(dc, VIS1);
3926 gen_op_load_fpr_DT0(DFPREG(rs1));
3927 gen_op_load_fpr_DT1(DFPREG(rs2));
3928 gen_helper_fpadd16();
3929 gen_op_store_DT0_fpr(DFPREG(rd));
3930 break;
3931 case 0x051: /* VIS I fpadd16s */
3932 CHECK_FPU_FEATURE(dc, VIS1);
3933 gen_helper_fpadd16s(cpu_fpr[rd],
3934 cpu_fpr[rs1], cpu_fpr[rs2]);
3935 break;
3936 case 0x052: /* VIS I fpadd32 */
3937 CHECK_FPU_FEATURE(dc, VIS1);
3938 gen_op_load_fpr_DT0(DFPREG(rs1));
3939 gen_op_load_fpr_DT1(DFPREG(rs2));
3940 gen_helper_fpadd32();
3941 gen_op_store_DT0_fpr(DFPREG(rd));
3942 break;
3943 case 0x053: /* VIS I fpadd32s */
3944 CHECK_FPU_FEATURE(dc, VIS1);
3945 gen_helper_fpadd32s(cpu_fpr[rd],
3946 cpu_fpr[rs1], cpu_fpr[rs2]);
3947 break;
3948 case 0x054: /* VIS I fpsub16 */
3949 CHECK_FPU_FEATURE(dc, VIS1);
3950 gen_op_load_fpr_DT0(DFPREG(rs1));
3951 gen_op_load_fpr_DT1(DFPREG(rs2));
3952 gen_helper_fpsub16();
3953 gen_op_store_DT0_fpr(DFPREG(rd));
3954 break;
3955 case 0x055: /* VIS I fpsub16s */
3956 CHECK_FPU_FEATURE(dc, VIS1);
3957 gen_helper_fpsub16s(cpu_fpr[rd],
3958 cpu_fpr[rs1], cpu_fpr[rs2]);
3959 break;
3960 case 0x056: /* VIS I fpsub32 */
3961 CHECK_FPU_FEATURE(dc, VIS1);
3962 gen_op_load_fpr_DT0(DFPREG(rs1));
3963 gen_op_load_fpr_DT1(DFPREG(rs2));
3964 gen_helper_fpsub32();
3965 gen_op_store_DT0_fpr(DFPREG(rd));
3966 break;
3967 case 0x057: /* VIS I fpsub32s */
3968 CHECK_FPU_FEATURE(dc, VIS1);
3969 gen_helper_fpsub32s(cpu_fpr[rd],
3970 cpu_fpr[rs1], cpu_fpr[rs2]);
3971 break;
3972 case 0x060: /* VIS I fzero */
3973 CHECK_FPU_FEATURE(dc, VIS1);
3974 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3975 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3976 break;
3977 case 0x061: /* VIS I fzeros */
3978 CHECK_FPU_FEATURE(dc, VIS1);
3979 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3980 break;
3981 case 0x062: /* VIS I fnor */
3982 CHECK_FPU_FEATURE(dc, VIS1);
3983 tcg_gen_nor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3984 cpu_fpr[DFPREG(rs2)]);
3985 tcg_gen_nor_i32(cpu_fpr[DFPREG(rd) + 1],
3986 cpu_fpr[DFPREG(rs1) + 1],
3987 cpu_fpr[DFPREG(rs2) + 1]);
3988 break;
3989 case 0x063: /* VIS I fnors */
3990 CHECK_FPU_FEATURE(dc, VIS1);
3991 tcg_gen_nor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3992 break;
3993 case 0x064: /* VIS I fandnot2 */
3994 CHECK_FPU_FEATURE(dc, VIS1);
3995 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3996 cpu_fpr[DFPREG(rs2)]);
3997 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
3998 cpu_fpr[DFPREG(rs1) + 1],
3999 cpu_fpr[DFPREG(rs2) + 1]);
4000 break;
4001 case 0x065: /* VIS I fandnot2s */
4002 CHECK_FPU_FEATURE(dc, VIS1);
4003 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4004 break;
4005 case 0x066: /* VIS I fnot2 */
4006 CHECK_FPU_FEATURE(dc, VIS1);
4007 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4008 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4009 cpu_fpr[DFPREG(rs2) + 1]);
4010 break;
4011 case 0x067: /* VIS I fnot2s */
4012 CHECK_FPU_FEATURE(dc, VIS1);
4013 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4014 break;
4015 case 0x068: /* VIS I fandnot1 */
4016 CHECK_FPU_FEATURE(dc, VIS1);
4017 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4018 cpu_fpr[DFPREG(rs1)]);
4019 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4020 cpu_fpr[DFPREG(rs2) + 1],
4021 cpu_fpr[DFPREG(rs1) + 1]);
4022 break;
4023 case 0x069: /* VIS I fandnot1s */
4024 CHECK_FPU_FEATURE(dc, VIS1);
4025 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4026 break;
4027 case 0x06a: /* VIS I fnot1 */
4028 CHECK_FPU_FEATURE(dc, VIS1);
4029 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4030 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4031 cpu_fpr[DFPREG(rs1) + 1]);
4032 break;
4033 case 0x06b: /* VIS I fnot1s */
4034 CHECK_FPU_FEATURE(dc, VIS1);
4035 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4036 break;
4037 case 0x06c: /* VIS I fxor */
4038 CHECK_FPU_FEATURE(dc, VIS1);
4039 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4040 cpu_fpr[DFPREG(rs2)]);
4041 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4042 cpu_fpr[DFPREG(rs1) + 1],
4043 cpu_fpr[DFPREG(rs2) + 1]);
4044 break;
4045 case 0x06d: /* VIS I fxors */
4046 CHECK_FPU_FEATURE(dc, VIS1);
4047 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4048 break;
4049 case 0x06e: /* VIS I fnand */
4050 CHECK_FPU_FEATURE(dc, VIS1);
4051 tcg_gen_nand_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4052 cpu_fpr[DFPREG(rs2)]);
4053 tcg_gen_nand_i32(cpu_fpr[DFPREG(rd) + 1],
4054 cpu_fpr[DFPREG(rs1) + 1],
4055 cpu_fpr[DFPREG(rs2) + 1]);
4056 break;
4057 case 0x06f: /* VIS I fnands */
4058 CHECK_FPU_FEATURE(dc, VIS1);
4059 tcg_gen_nand_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4060 break;
4061 case 0x070: /* VIS I fand */
4062 CHECK_FPU_FEATURE(dc, VIS1);
4063 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4064 cpu_fpr[DFPREG(rs2)]);
4065 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4066 cpu_fpr[DFPREG(rs1) + 1],
4067 cpu_fpr[DFPREG(rs2) + 1]);
4068 break;
4069 case 0x071: /* VIS I fands */
4070 CHECK_FPU_FEATURE(dc, VIS1);
4071 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4072 break;
4073 case 0x072: /* VIS I fxnor */
4074 CHECK_FPU_FEATURE(dc, VIS1);
4075 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4076 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4077 cpu_fpr[DFPREG(rs1)]);
4078 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4079 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4080 cpu_fpr[DFPREG(rs1) + 1]);
4081 break;
4082 case 0x073: /* VIS I fxnors */
4083 CHECK_FPU_FEATURE(dc, VIS1);
4084 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4085 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4086 break;
4087 case 0x074: /* VIS I fsrc1 */
4088 CHECK_FPU_FEATURE(dc, VIS1);
4089 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4090 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4091 cpu_fpr[DFPREG(rs1) + 1]);
4092 break;
4093 case 0x075: /* VIS I fsrc1s */
4094 CHECK_FPU_FEATURE(dc, VIS1);
4095 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4096 break;
4097 case 0x076: /* VIS I fornot2 */
4098 CHECK_FPU_FEATURE(dc, VIS1);
4099 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4100 cpu_fpr[DFPREG(rs2)]);
4101 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4102 cpu_fpr[DFPREG(rs1) + 1],
4103 cpu_fpr[DFPREG(rs2) + 1]);
4104 break;
4105 case 0x077: /* VIS I fornot2s */
4106 CHECK_FPU_FEATURE(dc, VIS1);
4107 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4108 break;
4109 case 0x078: /* VIS I fsrc2 */
4110 CHECK_FPU_FEATURE(dc, VIS1);
4111 gen_op_load_fpr_DT0(DFPREG(rs2));
4112 gen_op_store_DT0_fpr(DFPREG(rd));
4113 break;
4114 case 0x079: /* VIS I fsrc2s */
4115 CHECK_FPU_FEATURE(dc, VIS1);
4116 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4117 break;
4118 case 0x07a: /* VIS I fornot1 */
4119 CHECK_FPU_FEATURE(dc, VIS1);
4120 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4121 cpu_fpr[DFPREG(rs1)]);
4122 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4123 cpu_fpr[DFPREG(rs2) + 1],
4124 cpu_fpr[DFPREG(rs1) + 1]);
4125 break;
4126 case 0x07b: /* VIS I fornot1s */
4127 CHECK_FPU_FEATURE(dc, VIS1);
4128 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4129 break;
4130 case 0x07c: /* VIS I for */
4131 CHECK_FPU_FEATURE(dc, VIS1);
4132 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4133 cpu_fpr[DFPREG(rs2)]);
4134 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4135 cpu_fpr[DFPREG(rs1) + 1],
4136 cpu_fpr[DFPREG(rs2) + 1]);
4137 break;
4138 case 0x07d: /* VIS I fors */
4139 CHECK_FPU_FEATURE(dc, VIS1);
4140 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4141 break;
4142 case 0x07e: /* VIS I fone */
4143 CHECK_FPU_FEATURE(dc, VIS1);
4144 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4145 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4146 break;
4147 case 0x07f: /* VIS I fones */
4148 CHECK_FPU_FEATURE(dc, VIS1);
4149 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4150 break;
4151 case 0x080: /* VIS I shutdown */
4152 case 0x081: /* VIS II siam */
4153 // XXX
4154 goto illegal_insn;
4155 default:
4156 goto illegal_insn;
4157 }
4158 #else
4159 goto ncp_insn;
4160 #endif
4161 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4162 #ifdef TARGET_SPARC64
4163 goto illegal_insn;
4164 #else
4165 goto ncp_insn;
4166 #endif
4167 #ifdef TARGET_SPARC64
4168 } else if (xop == 0x39) { /* V9 return */
4169 TCGv_i32 r_const;
4170
4171 save_state(dc, cpu_cond);
4172 cpu_src1 = get_src1(insn, cpu_src1);
4173 if (IS_IMM) { /* immediate */
4174 simm = GET_FIELDs(insn, 19, 31);
4175 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4176 } else { /* register */
4177 rs2 = GET_FIELD(insn, 27, 31);
4178 if (rs2) {
4179 gen_movl_reg_TN(rs2, cpu_src2);
4180 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4181 } else
4182 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4183 }
4184 gen_helper_restore();
4185 gen_mov_pc_npc(dc, cpu_cond);
4186 r_const = tcg_const_i32(3);
4187 gen_helper_check_align(cpu_dst, r_const);
4188 tcg_temp_free_i32(r_const);
4189 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4190 dc->npc = DYNAMIC_PC;
4191 goto jmp_insn;
4192 #endif
4193 } else {
4194 cpu_src1 = get_src1(insn, cpu_src1);
4195 if (IS_IMM) { /* immediate */
4196 simm = GET_FIELDs(insn, 19, 31);
4197 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4198 } else { /* register */
4199 rs2 = GET_FIELD(insn, 27, 31);
4200 if (rs2) {
4201 gen_movl_reg_TN(rs2, cpu_src2);
4202 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4203 } else
4204 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4205 }
4206 switch (xop) {
4207 case 0x38: /* jmpl */
4208 {
4209 TCGv r_pc;
4210 TCGv_i32 r_const;
4211
4212 r_pc = tcg_const_tl(dc->pc);
4213 gen_movl_TN_reg(rd, r_pc);
4214 tcg_temp_free(r_pc);
4215 gen_mov_pc_npc(dc, cpu_cond);
4216 r_const = tcg_const_i32(3);
4217 gen_helper_check_align(cpu_dst, r_const);
4218 tcg_temp_free_i32(r_const);
4219 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4220 dc->npc = DYNAMIC_PC;
4221 }
4222 goto jmp_insn;
4223 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4224 case 0x39: /* rett, V9 return */
4225 {
4226 TCGv_i32 r_const;
4227
4228 if (!supervisor(dc))
4229 goto priv_insn;
4230 gen_mov_pc_npc(dc, cpu_cond);
4231 r_const = tcg_const_i32(3);
4232 gen_helper_check_align(cpu_dst, r_const);
4233 tcg_temp_free_i32(r_const);
4234 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4235 dc->npc = DYNAMIC_PC;
4236 gen_helper_rett();
4237 }
4238 goto jmp_insn;
4239 #endif
4240 case 0x3b: /* flush */
4241 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4242 goto unimp_flush;
4243 /* nop */
4244 break;
4245 case 0x3c: /* save */
4246 save_state(dc, cpu_cond);
4247 gen_helper_save();
4248 gen_movl_TN_reg(rd, cpu_dst);
4249 break;
4250 case 0x3d: /* restore */
4251 save_state(dc, cpu_cond);
4252 gen_helper_restore();
4253 gen_movl_TN_reg(rd, cpu_dst);
4254 break;
4255 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4256 case 0x3e: /* V9 done/retry */
4257 {
4258 switch (rd) {
4259 case 0:
4260 if (!supervisor(dc))
4261 goto priv_insn;
4262 dc->npc = DYNAMIC_PC;
4263 dc->pc = DYNAMIC_PC;
4264 gen_helper_done();
4265 goto jmp_insn;
4266 case 1:
4267 if (!supervisor(dc))
4268 goto priv_insn;
4269 dc->npc = DYNAMIC_PC;
4270 dc->pc = DYNAMIC_PC;
4271 gen_helper_retry();
4272 goto jmp_insn;
4273 default:
4274 goto illegal_insn;
4275 }
4276 }
4277 break;
4278 #endif
4279 default:
4280 goto illegal_insn;
4281 }
4282 }
4283 break;
4284 }
4285 break;
4286 case 3: /* load/store instructions */
4287 {
4288 unsigned int xop = GET_FIELD(insn, 7, 12);
4289
4290 /* flush pending conditional evaluations before exposing
4291 cpu state */
4292 if (dc->cc_op != CC_OP_FLAGS) {
4293 dc->cc_op = CC_OP_FLAGS;
4294 gen_helper_compute_psr();
4295 }
4296 cpu_src1 = get_src1(insn, cpu_src1);
4297 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4298 rs2 = GET_FIELD(insn, 27, 31);
4299 gen_movl_reg_TN(rs2, cpu_src2);
4300 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4301 } else if (IS_IMM) { /* immediate */
4302 simm = GET_FIELDs(insn, 19, 31);
4303 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4304 } else { /* register */
4305 rs2 = GET_FIELD(insn, 27, 31);
4306 if (rs2 != 0) {
4307 gen_movl_reg_TN(rs2, cpu_src2);
4308 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4309 } else
4310 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4311 }
4312 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4313 (xop > 0x17 && xop <= 0x1d ) ||
4314 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4315 switch (xop) {
4316 case 0x0: /* ld, V9 lduw, load unsigned word */
4317 gen_address_mask(dc, cpu_addr);
4318 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4319 break;
4320 case 0x1: /* ldub, load unsigned byte */
4321 gen_address_mask(dc, cpu_addr);
4322 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4323 break;
4324 case 0x2: /* lduh, load unsigned halfword */
4325 gen_address_mask(dc, cpu_addr);
4326 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4327 break;
4328 case 0x3: /* ldd, load double word */
4329 if (rd & 1)
4330 goto illegal_insn;
4331 else {
4332 TCGv_i32 r_const;
4333
4334 save_state(dc, cpu_cond);
4335 r_const = tcg_const_i32(7);
4336 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4337 tcg_temp_free_i32(r_const);
4338 gen_address_mask(dc, cpu_addr);
4339 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4340 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4341 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4342 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4343 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4344 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4345 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4346 }
4347 break;
4348 case 0x9: /* ldsb, load signed byte */
4349 gen_address_mask(dc, cpu_addr);
4350 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4351 break;
4352 case 0xa: /* ldsh, load signed halfword */
4353 gen_address_mask(dc, cpu_addr);
4354 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4355 break;
4356 case 0xd: /* ldstub -- XXX: should be atomically */
4357 {
4358 TCGv r_const;
4359
4360 gen_address_mask(dc, cpu_addr);
4361 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4362 r_const = tcg_const_tl(0xff);
4363 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4364 tcg_temp_free(r_const);
4365 }
4366 break;
4367 case 0x0f: /* swap, swap register with memory. Also
4368 atomically */
4369 CHECK_IU_FEATURE(dc, SWAP);
4370 gen_movl_reg_TN(rd, cpu_val);
4371 gen_address_mask(dc, cpu_addr);
4372 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4373 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4374 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4375 break;
4376 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4377 case 0x10: /* lda, V9 lduwa, load word alternate */
4378 #ifndef TARGET_SPARC64
4379 if (IS_IMM)
4380 goto illegal_insn;
4381 if (!supervisor(dc))
4382 goto priv_insn;
4383 #endif
4384 save_state(dc, cpu_cond);
4385 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4386 break;
4387 case 0x11: /* lduba, load unsigned byte alternate */
4388 #ifndef TARGET_SPARC64
4389 if (IS_IMM)
4390 goto illegal_insn;
4391 if (!supervisor(dc))
4392 goto priv_insn;
4393 #endif
4394 save_state(dc, cpu_cond);
4395 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4396 break;
4397 case 0x12: /* lduha, load unsigned halfword alternate */
4398 #ifndef TARGET_SPARC64
4399 if (IS_IMM)
4400 goto illegal_insn;
4401 if (!supervisor(dc))
4402 goto priv_insn;
4403 #endif
4404 save_state(dc, cpu_cond);
4405 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4406 break;
4407 case 0x13: /* ldda, load double word alternate */
4408 #ifndef TARGET_SPARC64
4409 if (IS_IMM)
4410 goto illegal_insn;
4411 if (!supervisor(dc))
4412 goto priv_insn;
4413 #endif
4414 if (rd & 1)
4415 goto illegal_insn;
4416 save_state(dc, cpu_cond);
4417 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4418 goto skip_move;
4419 case 0x19: /* ldsba, load signed byte alternate */
4420 #ifndef TARGET_SPARC64
4421 if (IS_IMM)
4422 goto illegal_insn;
4423 if (!supervisor(dc))
4424 goto priv_insn;
4425 #endif
4426 save_state(dc, cpu_cond);
4427 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4428 break;
4429 case 0x1a: /* ldsha, load signed halfword alternate */
4430 #ifndef TARGET_SPARC64
4431 if (IS_IMM)
4432 goto illegal_insn;
4433 if (!supervisor(dc))
4434 goto priv_insn;
4435 #endif
4436 save_state(dc, cpu_cond);
4437 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4438 break;
4439 case 0x1d: /* ldstuba -- XXX: should be atomically */
4440 #ifndef TARGET_SPARC64
4441 if (IS_IMM)
4442 goto illegal_insn;
4443 if (!supervisor(dc))
4444 goto priv_insn;
4445 #endif
4446 save_state(dc, cpu_cond);
4447 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4448 break;
4449 case 0x1f: /* swapa, swap reg with alt. memory. Also
4450 atomically */
4451 CHECK_IU_FEATURE(dc, SWAP);
4452 #ifndef TARGET_SPARC64
4453 if (IS_IMM)
4454 goto illegal_insn;
4455 if (!supervisor(dc))
4456 goto priv_insn;
4457 #endif
4458 save_state(dc, cpu_cond);
4459 gen_movl_reg_TN(rd, cpu_val);
4460 gen_swap_asi(cpu_val, cpu_addr, insn);
4461 break;
4462
4463 #ifndef TARGET_SPARC64
4464 case 0x30: /* ldc */
4465 case 0x31: /* ldcsr */
4466 case 0x33: /* lddc */
4467 goto ncp_insn;
4468 #endif
4469 #endif
4470 #ifdef TARGET_SPARC64
4471 case 0x08: /* V9 ldsw */
4472 gen_address_mask(dc, cpu_addr);
4473 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4474 break;
4475 case 0x0b: /* V9 ldx */
4476 gen_address_mask(dc, cpu_addr);
4477 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4478 break;
4479 case 0x18: /* V9 ldswa */
4480 save_state(dc, cpu_cond);
4481 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4482 break;
4483 case 0x1b: /* V9 ldxa */
4484 save_state(dc, cpu_cond);
4485 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4486 break;
4487 case 0x2d: /* V9 prefetch, no effect */
4488 goto skip_move;
4489 case 0x30: /* V9 ldfa */
4490 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4491 goto jmp_insn;
4492 }
4493 save_state(dc, cpu_cond);
4494 gen_ldf_asi(cpu_addr, insn, 4, rd);
4495 goto skip_move;
4496 case 0x33: /* V9 lddfa */
4497 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4498 goto jmp_insn;
4499 }
4500 save_state(dc, cpu_cond);
4501 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4502 goto skip_move;
4503 case 0x3d: /* V9 prefetcha, no effect */
4504 goto skip_move;
4505 case 0x32: /* V9 ldqfa */
4506 CHECK_FPU_FEATURE(dc, FLOAT128);
4507 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4508 goto jmp_insn;
4509 }
4510 save_state(dc, cpu_cond);
4511 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4512 goto skip_move;
4513 #endif
4514 default:
4515 goto illegal_insn;
4516 }
4517 gen_movl_TN_reg(rd, cpu_val);
4518 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4519 skip_move: ;
4520 #endif
4521 } else if (xop >= 0x20 && xop < 0x24) {
4522 if (gen_trap_ifnofpu(dc, cpu_cond))
4523 goto jmp_insn;
4524 save_state(dc, cpu_cond);
4525 switch (xop) {
4526 case 0x20: /* ldf, load fpreg */
4527 gen_address_mask(dc, cpu_addr);
4528 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4529 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4530 break;
4531 case 0x21: /* ldfsr, V9 ldxfsr */
4532 #ifdef TARGET_SPARC64
4533 gen_address_mask(dc, cpu_addr);
4534 if (rd == 1) {
4535 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4536 gen_helper_ldxfsr(cpu_tmp64);
4537 } else {
4538 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4539 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
4540 gen_helper_ldfsr(cpu_tmp32);
4541 }
4542 #else
4543 {
4544 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4545 gen_helper_ldfsr(cpu_tmp32);
4546 }
4547 #endif
4548 break;
4549 case 0x22: /* ldqf, load quad fpreg */
4550 {
4551 TCGv_i32 r_const;
4552
4553 CHECK_FPU_FEATURE(dc, FLOAT128);
4554 r_const = tcg_const_i32(dc->mem_idx);
4555 gen_address_mask(dc, cpu_addr);
4556 gen_helper_ldqf(cpu_addr, r_const);
4557 tcg_temp_free_i32(r_const);
4558 gen_op_store_QT0_fpr(QFPREG(rd));
4559 }
4560 break;
4561 case 0x23: /* lddf, load double fpreg */
4562 {
4563 TCGv_i32 r_const;
4564
4565 r_const = tcg_const_i32(dc->mem_idx);
4566 gen_address_mask(dc, cpu_addr);
4567 gen_helper_lddf(cpu_addr, r_const);
4568 tcg_temp_free_i32(r_const);
4569 gen_op_store_DT0_fpr(DFPREG(rd));
4570 }
4571 break;
4572 default:
4573 goto illegal_insn;
4574 }
4575 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
4576 xop == 0xe || xop == 0x1e) {
4577 gen_movl_reg_TN(rd, cpu_val);
4578 switch (xop) {
4579 case 0x4: /* st, store word */
4580 gen_address_mask(dc, cpu_addr);
4581 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4582 break;
4583 case 0x5: /* stb, store byte */
4584 gen_address_mask(dc, cpu_addr);
4585 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4586 break;
4587 case 0x6: /* sth, store halfword */
4588 gen_address_mask(dc, cpu_addr);
4589 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4590 break;
4591 case 0x7: /* std, store double word */
4592 if (rd & 1)
4593 goto illegal_insn;
4594 else {
4595 TCGv_i32 r_const;
4596
4597 save_state(dc, cpu_cond);
4598 gen_address_mask(dc, cpu_addr);
4599 r_const = tcg_const_i32(7);
4600 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4601 tcg_temp_free_i32(r_const);
4602 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4603 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4604 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4605 }
4606 break;
4607 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4608 case 0x14: /* sta, V9 stwa, store word alternate */
4609 #ifndef TARGET_SPARC64
4610 if (IS_IMM)
4611 goto illegal_insn;
4612 if (!supervisor(dc))
4613 goto priv_insn;
4614 #endif
4615 save_state(dc, cpu_cond);
4616 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4617 dc->npc = DYNAMIC_PC;
4618 break;
4619 case 0x15: /* stba, store byte alternate */
4620 #ifndef TARGET_SPARC64
4621 if (IS_IMM)
4622 goto illegal_insn;
4623 if (!supervisor(dc))
4624 goto priv_insn;
4625 #endif
4626 save_state(dc, cpu_cond);
4627 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4628 dc->npc = DYNAMIC_PC;
4629 break;
4630 case 0x16: /* stha, store halfword alternate */
4631 #ifndef TARGET_SPARC64
4632 if (IS_IMM)
4633 goto illegal_insn;
4634 if (!supervisor(dc))
4635 goto priv_insn;
4636 #endif
4637 save_state(dc, cpu_cond);
4638 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4639 dc->npc = DYNAMIC_PC;
4640 break;
4641 case 0x17: /* stda, store double word alternate */
4642 #ifndef TARGET_SPARC64
4643 if (IS_IMM)
4644 goto illegal_insn;
4645 if (!supervisor(dc))
4646 goto priv_insn;
4647 #endif
4648 if (rd & 1)
4649 goto illegal_insn;
4650 else {
4651 save_state(dc, cpu_cond);
4652 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4653 }
4654 break;
4655 #endif
4656 #ifdef TARGET_SPARC64
4657 case 0x0e: /* V9 stx */
4658 gen_address_mask(dc, cpu_addr);
4659 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4660 break;
4661 case 0x1e: /* V9 stxa */
4662 save_state(dc, cpu_cond);
4663 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4664 dc->npc = DYNAMIC_PC;
4665 break;
4666 #endif
4667 default:
4668 goto illegal_insn;
4669 }
4670 } else if (xop > 0x23 && xop < 0x28) {
4671 if (gen_trap_ifnofpu(dc, cpu_cond))
4672 goto jmp_insn;
4673 save_state(dc, cpu_cond);
4674 switch (xop) {
4675 case 0x24: /* stf, store fpreg */
4676 gen_address_mask(dc, cpu_addr);
4677 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4678 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4679 break;
4680 case 0x25: /* stfsr, V9 stxfsr */
4681 #ifdef TARGET_SPARC64
4682 gen_address_mask(dc, cpu_addr);
4683 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4684 if (rd == 1)
4685 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4686 else
4687 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4688 #else
4689 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4690 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4691 #endif
4692 break;
4693 case 0x26:
4694 #ifdef TARGET_SPARC64
4695 /* V9 stqf, store quad fpreg */
4696 {
4697 TCGv_i32 r_const;
4698
4699 CHECK_FPU_FEATURE(dc, FLOAT128);
4700 gen_op_load_fpr_QT0(QFPREG(rd));
4701 r_const = tcg_const_i32(dc->mem_idx);
4702 gen_address_mask(dc, cpu_addr);
4703 gen_helper_stqf(cpu_addr, r_const);
4704 tcg_temp_free_i32(r_const);
4705 }
4706 break;
4707 #else /* !TARGET_SPARC64 */
4708 /* stdfq, store floating point queue */
4709 #if defined(CONFIG_USER_ONLY)
4710 goto illegal_insn;
4711 #else
4712 if (!supervisor(dc))
4713 goto priv_insn;
4714 if (gen_trap_ifnofpu(dc, cpu_cond))
4715 goto jmp_insn;
4716 goto nfq_insn;
4717 #endif
4718 #endif
4719 case 0x27: /* stdf, store double fpreg */
4720 {
4721 TCGv_i32 r_const;
4722
4723 gen_op_load_fpr_DT0(DFPREG(rd));
4724 r_const = tcg_const_i32(dc->mem_idx);
4725 gen_address_mask(dc, cpu_addr);
4726 gen_helper_stdf(cpu_addr, r_const);
4727 tcg_temp_free_i32(r_const);
4728 }
4729 break;
4730 default:
4731 goto illegal_insn;
4732 }
4733 } else if (xop > 0x33 && xop < 0x3f) {
4734 save_state(dc, cpu_cond);
4735 switch (xop) {
4736 #ifdef TARGET_SPARC64
4737 case 0x34: /* V9 stfa */
4738 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4739 goto jmp_insn;
4740 }
4741 gen_stf_asi(cpu_addr, insn, 4, rd);
4742 break;
4743 case 0x36: /* V9 stqfa */
4744 {
4745 TCGv_i32 r_const;
4746
4747 CHECK_FPU_FEATURE(dc, FLOAT128);
4748 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4749 goto jmp_insn;
4750 }
4751 r_const = tcg_const_i32(7);
4752 gen_helper_check_align(cpu_addr, r_const);
4753 tcg_temp_free_i32(r_const);
4754 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4755 }
4756 break;
4757 case 0x37: /* V9 stdfa */
4758 if (gen_trap_ifnofpu(dc, cpu_cond)) {
4759 goto jmp_insn;
4760 }
4761 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4762 break;
4763 case 0x3c: /* V9 casa */
4764 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4765 gen_movl_TN_reg(rd, cpu_val);
4766 break;
4767 case 0x3e: /* V9 casxa */
4768 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4769 gen_movl_TN_reg(rd, cpu_val);
4770 break;
4771 #else
4772 case 0x34: /* stc */
4773 case 0x35: /* stcsr */
4774 case 0x36: /* stdcq */
4775 case 0x37: /* stdc */
4776 goto ncp_insn;
4777 #endif
4778 default:
4779 goto illegal_insn;
4780 }
4781 } else
4782 goto illegal_insn;
4783 }
4784 break;
4785 }
4786 /* default case for non jump instructions */
4787 if (dc->npc == DYNAMIC_PC) {
4788 dc->pc = DYNAMIC_PC;
4789 gen_op_next_insn();
4790 } else if (dc->npc == JUMP_PC) {
4791 /* we can do a static jump */
4792 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4793 dc->is_br = 1;
4794 } else {
4795 dc->pc = dc->npc;
4796 dc->npc = dc->npc + 4;
4797 }
4798 jmp_insn:
4799 goto egress;
4800 illegal_insn:
4801 {
4802 TCGv_i32 r_const;
4803
4804 save_state(dc, cpu_cond);
4805 r_const = tcg_const_i32(TT_ILL_INSN);
4806 gen_helper_raise_exception(r_const);
4807 tcg_temp_free_i32(r_const);
4808 dc->is_br = 1;
4809 }
4810 goto egress;
4811 unimp_flush:
4812 {
4813 TCGv_i32 r_const;
4814
4815 save_state(dc, cpu_cond);
4816 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4817 gen_helper_raise_exception(r_const);
4818 tcg_temp_free_i32(r_const);
4819 dc->is_br = 1;
4820 }
4821 goto egress;
4822 #if !defined(CONFIG_USER_ONLY)
4823 priv_insn:
4824 {
4825 TCGv_i32 r_const;
4826
4827 save_state(dc, cpu_cond);
4828 r_const = tcg_const_i32(TT_PRIV_INSN);
4829 gen_helper_raise_exception(r_const);
4830 tcg_temp_free_i32(r_const);
4831 dc->is_br = 1;
4832 }
4833 goto egress;
4834 #endif
4835 nfpu_insn:
4836 save_state(dc, cpu_cond);
4837 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4838 dc->is_br = 1;
4839 goto egress;
4840 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4841 nfq_insn:
4842 save_state(dc, cpu_cond);
4843 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4844 dc->is_br = 1;
4845 goto egress;
4846 #endif
4847 #ifndef TARGET_SPARC64
4848 ncp_insn:
4849 {
4850 TCGv r_const;
4851
4852 save_state(dc, cpu_cond);
4853 r_const = tcg_const_i32(TT_NCP_INSN);
4854 gen_helper_raise_exception(r_const);
4855 tcg_temp_free(r_const);
4856 dc->is_br = 1;
4857 }
4858 goto egress;
4859 #endif
4860 egress:
4861 tcg_temp_free(cpu_tmp1);
4862 tcg_temp_free(cpu_tmp2);
4863 }
4864
4865 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4866 int spc, CPUSPARCState *env)
4867 {
4868 target_ulong pc_start, last_pc;
4869 uint16_t *gen_opc_end;
4870 DisasContext dc1, *dc = &dc1;
4871 CPUBreakpoint *bp;
4872 int j, lj = -1;
4873 int num_insns;
4874 int max_insns;
4875
4876 memset(dc, 0, sizeof(DisasContext));
4877 dc->tb = tb;
4878 pc_start = tb->pc;
4879 dc->pc = pc_start;
4880 last_pc = dc->pc;
4881 dc->npc = (target_ulong) tb->cs_base;
4882 dc->cc_op = CC_OP_DYNAMIC;
4883 dc->mem_idx = cpu_mmu_index(env);
4884 dc->def = env->def;
4885 dc->fpu_enabled = tb_fpu_enabled(tb->flags);
4886 dc->address_mask_32bit = tb_am_enabled(tb->flags);
4887 dc->singlestep = (env->singlestep_enabled || singlestep);
4888 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4889
4890 cpu_tmp0 = tcg_temp_new();
4891 cpu_tmp32 = tcg_temp_new_i32();
4892 cpu_tmp64 = tcg_temp_new_i64();
4893
4894 cpu_dst = tcg_temp_local_new();
4895
4896 // loads and stores
4897 cpu_val = tcg_temp_local_new();
4898 cpu_addr = tcg_temp_local_new();
4899
4900 num_insns = 0;
4901 max_insns = tb->cflags & CF_COUNT_MASK;
4902 if (max_insns == 0)
4903 max_insns = CF_COUNT_MASK;
4904 gen_icount_start();
4905 do {
4906 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
4907 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
4908 if (bp->pc == dc->pc) {
4909 if (dc->pc != pc_start)
4910 save_state(dc, cpu_cond);
4911 gen_helper_debug();
4912 tcg_gen_exit_tb(0);
4913 dc->is_br = 1;
4914 goto exit_gen_loop;
4915 }
4916 }
4917 }
4918 if (spc) {
4919 qemu_log("Search PC...\n");
4920 j = gen_opc_ptr - gen_opc_buf;
4921 if (lj < j) {
4922 lj++;
4923 while (lj < j)
4924 gen_opc_instr_start[lj++] = 0;
4925 gen_opc_pc[lj] = dc->pc;
4926 gen_opc_npc[lj] = dc->npc;
4927 gen_opc_instr_start[lj] = 1;
4928 gen_opc_icount[lj] = num_insns;
4929 }
4930 }
4931 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4932 gen_io_start();
4933 last_pc = dc->pc;
4934 disas_sparc_insn(dc);
4935 num_insns++;
4936
4937 if (dc->is_br)
4938 break;
4939 /* if the next PC is different, we abort now */
4940 if (dc->pc != (last_pc + 4))
4941 break;
4942 /* if we reach a page boundary, we stop generation so that the
4943 PC of a TT_TFAULT exception is always in the right page */
4944 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4945 break;
4946 /* if single step mode, we generate only one instruction and
4947 generate an exception */
4948 if (dc->singlestep) {
4949 break;
4950 }
4951 } while ((gen_opc_ptr < gen_opc_end) &&
4952 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4953 num_insns < max_insns);
4954
4955 exit_gen_loop:
4956 tcg_temp_free(cpu_addr);
4957 tcg_temp_free(cpu_val);
4958 tcg_temp_free(cpu_dst);
4959 tcg_temp_free_i64(cpu_tmp64);
4960 tcg_temp_free_i32(cpu_tmp32);
4961 tcg_temp_free(cpu_tmp0);
4962 if (tb->cflags & CF_LAST_IO)
4963 gen_io_end();
4964 if (!dc->is_br) {
4965 if (dc->pc != DYNAMIC_PC &&
4966 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4967 /* static PC and NPC: we can use direct chaining */
4968 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4969 } else {
4970 if (dc->pc != DYNAMIC_PC)
4971 tcg_gen_movi_tl(cpu_pc, dc->pc);
4972 save_npc(dc, cpu_cond);
4973 tcg_gen_exit_tb(0);
4974 }
4975 }
4976 gen_icount_end(tb, num_insns);
4977 *gen_opc_ptr = INDEX_op_end;
4978 if (spc) {
4979 j = gen_opc_ptr - gen_opc_buf;
4980 lj++;
4981 while (lj <= j)
4982 gen_opc_instr_start[lj++] = 0;
4983 #if 0
4984 log_page_dump();
4985 #endif
4986 gen_opc_jump_pc[0] = dc->jump_pc[0];
4987 gen_opc_jump_pc[1] = dc->jump_pc[1];
4988 } else {
4989 tb->size = last_pc + 4 - pc_start;
4990 tb->icount = num_insns;
4991 }
4992 #ifdef DEBUG_DISAS
4993 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
4994 qemu_log("--------------\n");
4995 qemu_log("IN: %s\n", lookup_symbol(pc_start));
4996 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
4997 qemu_log("\n");
4998 }
4999 #endif
5000 }
5001
5002 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5003 {
5004 gen_intermediate_code_internal(tb, 0, env);
5005 }
5006
5007 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5008 {
5009 gen_intermediate_code_internal(tb, 1, env);
5010 }
5011
5012 void gen_intermediate_code_init(CPUSPARCState *env)
5013 {
5014 unsigned int i;
5015 static int inited;
5016 static const char * const gregnames[8] = {
5017 NULL, // g0 not used
5018 "g1",
5019 "g2",
5020 "g3",
5021 "g4",
5022 "g5",
5023 "g6",
5024 "g7",
5025 };
5026 static const char * const fregnames[64] = {
5027 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5028 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5029 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5030 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5031 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5032 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5033 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5034 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5035 };
5036
5037 /* init various static tables */
5038 if (!inited) {
5039 inited = 1;
5040
5041 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5042 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5043 offsetof(CPUState, regwptr),
5044 "regwptr");
5045 #ifdef TARGET_SPARC64
5046 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5047 "xcc");
5048 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5049 "asi");
5050 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5051 "fprs");
5052 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5053 "gsr");
5054 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5055 offsetof(CPUState, tick_cmpr),
5056 "tick_cmpr");
5057 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5058 offsetof(CPUState, stick_cmpr),
5059 "stick_cmpr");
5060 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5061 offsetof(CPUState, hstick_cmpr),
5062 "hstick_cmpr");
5063 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5064 "hintp");
5065 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5066 "htba");
5067 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5068 "hver");
5069 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5070 offsetof(CPUState, ssr), "ssr");
5071 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5072 offsetof(CPUState, version), "ver");
5073 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5074 offsetof(CPUState, softint),
5075 "softint");
5076 #else
5077 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5078 "wim");
5079 #endif
5080 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5081 "cond");
5082 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5083 "cc_src");
5084 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5085 offsetof(CPUState, cc_src2),
5086 "cc_src2");
5087 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5088 "cc_dst");
5089 cpu_cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
5090 "cc_op");
5091 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5092 "psr");
5093 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5094 "fsr");
5095 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5096 "pc");
5097 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5098 "npc");
5099 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5100 #ifndef CONFIG_USER_ONLY
5101 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5102 "tbr");
5103 #endif
5104 for (i = 1; i < 8; i++)
5105 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5106 offsetof(CPUState, gregs[i]),
5107 gregnames[i]);
5108 for (i = 0; i < TARGET_FPREGS; i++)
5109 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5110 offsetof(CPUState, fpr[i]),
5111 fregnames[i]);
5112
5113 /* register helpers */
5114
5115 #define GEN_HELPER 2
5116 #include "helper.h"
5117 }
5118 }
5119
5120 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5121 {
5122 target_ulong npc;
5123 env->pc = gen_opc_pc[pc_pos];
5124 npc = gen_opc_npc[pc_pos];
5125 if (npc == 1) {
5126 /* dynamic NPC: already stored */
5127 } else if (npc == 2) {
5128 /* jump PC: use 'cond' and the jump targets of the translation */
5129 if (env->cond) {
5130 env->npc = gen_opc_jump_pc[0];
5131 } else {
5132 env->npc = gen_opc_jump_pc[1];
5133 }
5134 } else {
5135 env->npc = npc;
5136 }
5137
5138 /* flush pending conditional evaluations before exposing cpu state */
5139 if (CC_OP != CC_OP_FLAGS) {
5140 helper_compute_psr();
5141 }
5142 }