]> git.proxmox.com Git - qemu.git/blob - target-sparc/translate.c
Convert ldfsr and stfsr to TCG
[qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 */
21
22 /*
23 TODO-list:
24
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
28 */
29
30 #include <stdarg.h>
31 #include <stdlib.h>
32 #include <stdio.h>
33 #include <string.h>
34 #include <inttypes.h>
35
36 #include "cpu.h"
37 #include "exec-all.h"
38 #include "disas.h"
39 #include "helper.h"
40 #include "tcg-op.h"
41
42 #define DEBUG_DISAS
43
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
47
48 /* global register indexes */
49 static TCGv cpu_env, cpu_T[3], cpu_regwptr, cpu_cc_src, cpu_cc_dst, cpu_psr;
50 static TCGv cpu_gregs[8];
51 #ifdef TARGET_SPARC64
52 static TCGv cpu_xcc;
53 #endif
54 /* local register indexes (only used inside old micro ops) */
55 static TCGv cpu_tmp0;
56
57 typedef struct DisasContext {
58 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
59 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
60 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
61 int is_br;
62 int mem_idx;
63 int fpu_enabled;
64 struct TranslationBlock *tb;
65 } DisasContext;
66
67 typedef struct sparc_def_t sparc_def_t;
68
69 struct sparc_def_t {
70 const unsigned char *name;
71 target_ulong iu_version;
72 uint32_t fpu_version;
73 uint32_t mmu_version;
74 uint32_t mmu_bm;
75 uint32_t mmu_ctpr_mask;
76 uint32_t mmu_cxr_mask;
77 uint32_t mmu_sfsr_mask;
78 uint32_t mmu_trcr_mask;
79 };
80
81 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name);
82
83 extern FILE *logfile;
84 extern int loglevel;
85
86 // This function uses non-native bit order
87 #define GET_FIELD(X, FROM, TO) \
88 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
89
90 // This function uses the order in the manuals, i.e. bit 0 is 2^0
91 #define GET_FIELD_SP(X, FROM, TO) \
92 GET_FIELD(X, 31 - (TO), 31 - (FROM))
93
94 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
95 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96
97 #ifdef TARGET_SPARC64
98 #define FFPREG(r) (r)
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101 #else
102 #define FFPREG(r) (r)
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
105 #endif
106
107 static int sign_extend(int x, int len)
108 {
109 len = 32 - len;
110 return (x << len) >> len;
111 }
112
113 #define IS_IMM (insn & (1<<13))
114
115 static void disas_sparc_insn(DisasContext * dc);
116
117 #ifdef TARGET_SPARC64
118 #define GEN32(func, NAME) \
119 static GenOpFunc * const NAME ## _table [64] = { \
120 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
121 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
122 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
123 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
124 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
125 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
126 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
127 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
128 NAME ## 32, 0, NAME ## 34, 0, NAME ## 36, 0, NAME ## 38, 0, \
129 NAME ## 40, 0, NAME ## 42, 0, NAME ## 44, 0, NAME ## 46, 0, \
130 NAME ## 48, 0, NAME ## 50, 0, NAME ## 52, 0, NAME ## 54, 0, \
131 NAME ## 56, 0, NAME ## 58, 0, NAME ## 60, 0, NAME ## 62, 0, \
132 }; \
133 static inline void func(int n) \
134 { \
135 NAME ## _table[n](); \
136 }
137 #else
138 #define GEN32(func, NAME) \
139 static GenOpFunc *const NAME ## _table [32] = { \
140 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
141 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
142 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
143 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
144 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
145 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
146 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
147 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
148 }; \
149 static inline void func(int n) \
150 { \
151 NAME ## _table[n](); \
152 }
153 #endif
154
155 /* floating point registers moves */
156 GEN32(gen_op_load_fpr_FT0, gen_op_load_fpr_FT0_fprf);
157 GEN32(gen_op_load_fpr_FT1, gen_op_load_fpr_FT1_fprf);
158 GEN32(gen_op_store_FT0_fpr, gen_op_store_FT0_fpr_fprf);
159 GEN32(gen_op_store_FT1_fpr, gen_op_store_FT1_fpr_fprf);
160
161 GEN32(gen_op_load_fpr_DT0, gen_op_load_fpr_DT0_fprf);
162 GEN32(gen_op_load_fpr_DT1, gen_op_load_fpr_DT1_fprf);
163 GEN32(gen_op_store_DT0_fpr, gen_op_store_DT0_fpr_fprf);
164 GEN32(gen_op_store_DT1_fpr, gen_op_store_DT1_fpr_fprf);
165
166 #if defined(CONFIG_USER_ONLY)
167 GEN32(gen_op_load_fpr_QT0, gen_op_load_fpr_QT0_fprf);
168 GEN32(gen_op_load_fpr_QT1, gen_op_load_fpr_QT1_fprf);
169 GEN32(gen_op_store_QT0_fpr, gen_op_store_QT0_fpr_fprf);
170 GEN32(gen_op_store_QT1_fpr, gen_op_store_QT1_fpr_fprf);
171 #endif
172
173 /* moves */
174 #ifdef CONFIG_USER_ONLY
175 #define supervisor(dc) 0
176 #ifdef TARGET_SPARC64
177 #define hypervisor(dc) 0
178 #endif
179 #define gen_op_ldst(name) gen_op_##name##_raw()
180 #else
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
184 #define OP_LD_TABLE(width) \
185 static GenOpFunc * const gen_op_##width[] = { \
186 &gen_op_##width##_user, \
187 &gen_op_##width##_kernel, \
188 &gen_op_##width##_hypv, \
189 };
190 #else
191 #define OP_LD_TABLE(width) \
192 static GenOpFunc * const gen_op_##width[] = { \
193 &gen_op_##width##_user, \
194 &gen_op_##width##_kernel, \
195 };
196 #endif
197 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
198 #endif
199
200 #ifndef CONFIG_USER_ONLY
201 #ifdef __i386__
202 OP_LD_TABLE(std);
203 #endif /* __i386__ */
204 OP_LD_TABLE(stf);
205 OP_LD_TABLE(stdf);
206 OP_LD_TABLE(ldf);
207 OP_LD_TABLE(lddf);
208 #endif
209
210 #ifdef TARGET_ABI32
211 #define ABI32_MASK(addr) tcg_gen_andi_i64(addr, addr, 0xffffffffULL);
212 #else
213 #define ABI32_MASK(addr)
214 #endif
215
216 static inline void gen_movl_simm_T1(int32_t val)
217 {
218 tcg_gen_movi_tl(cpu_T[1], val);
219 }
220
221 static inline void gen_movl_reg_TN(int reg, TCGv tn)
222 {
223 if (reg == 0)
224 tcg_gen_movi_tl(tn, 0);
225 else if (reg < 8)
226 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
227 else {
228 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
229 }
230 }
231
232 static inline void gen_movl_reg_T0(int reg)
233 {
234 gen_movl_reg_TN(reg, cpu_T[0]);
235 }
236
237 static inline void gen_movl_reg_T1(int reg)
238 {
239 gen_movl_reg_TN(reg, cpu_T[1]);
240 }
241
242 #ifdef __i386__
243 static inline void gen_movl_reg_T2(int reg)
244 {
245 gen_movl_reg_TN(reg, cpu_T[2]);
246 }
247
248 #endif /* __i386__ */
249 static inline void gen_movl_TN_reg(int reg, TCGv tn)
250 {
251 if (reg == 0)
252 return;
253 else if (reg < 8)
254 tcg_gen_mov_tl(cpu_gregs[reg], tn);
255 else {
256 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
257 }
258 }
259
260 static inline void gen_movl_T0_reg(int reg)
261 {
262 gen_movl_TN_reg(reg, cpu_T[0]);
263 }
264
265 static inline void gen_movl_T1_reg(int reg)
266 {
267 gen_movl_TN_reg(reg, cpu_T[1]);
268 }
269
270 static inline void gen_op_movl_T0_env(size_t offset)
271 {
272 tcg_gen_ld_i32(cpu_T[0], cpu_env, offset);
273 }
274
275 static inline void gen_op_movl_env_T0(size_t offset)
276 {
277 tcg_gen_st_i32(cpu_T[0], cpu_env, offset);
278 }
279
280 static inline void gen_op_movtl_T0_env(size_t offset)
281 {
282 tcg_gen_ld_tl(cpu_T[0], cpu_env, offset);
283 }
284
285 static inline void gen_op_movtl_env_T0(size_t offset)
286 {
287 tcg_gen_st_tl(cpu_T[0], cpu_env, offset);
288 }
289
290 static inline void gen_op_add_T1_T0(void)
291 {
292 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
293 }
294
295 static inline void gen_op_or_T1_T0(void)
296 {
297 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
298 }
299
300 static inline void gen_op_xor_T1_T0(void)
301 {
302 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
303 }
304
305 static inline void gen_jmp_im(target_ulong pc)
306 {
307 tcg_gen_movi_tl(cpu_tmp0, pc);
308 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, pc));
309 }
310
311 static inline void gen_movl_npc_im(target_ulong npc)
312 {
313 tcg_gen_movi_tl(cpu_tmp0, npc);
314 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUState, npc));
315 }
316
317 static inline void gen_goto_tb(DisasContext *s, int tb_num,
318 target_ulong pc, target_ulong npc)
319 {
320 TranslationBlock *tb;
321
322 tb = s->tb;
323 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
324 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
325 /* jump to same page: we can use a direct jump */
326 tcg_gen_goto_tb(tb_num);
327 gen_jmp_im(pc);
328 gen_movl_npc_im(npc);
329 tcg_gen_exit_tb((long)tb + tb_num);
330 } else {
331 /* jump to another page: currently not optimized */
332 gen_jmp_im(pc);
333 gen_movl_npc_im(npc);
334 tcg_gen_exit_tb(0);
335 }
336 }
337
338 // XXX suboptimal
339 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
340 {
341 tcg_gen_shri_i32(reg, src, 23);
342 tcg_gen_andi_tl(reg, reg, 0x1);
343 }
344
345 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
346 {
347 tcg_gen_shri_i32(reg, src, 22);
348 tcg_gen_andi_tl(reg, reg, 0x1);
349 }
350
351 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
352 {
353 tcg_gen_shri_i32(reg, src, 21);
354 tcg_gen_andi_tl(reg, reg, 0x1);
355 }
356
357 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
358 {
359 tcg_gen_shri_i32(reg, src, 20);
360 tcg_gen_andi_tl(reg, reg, 0x1);
361 }
362
363 static inline void gen_op_exception(int exception)
364 {
365 TCGv r_except;
366
367 r_except = tcg_temp_new(TCG_TYPE_I32);
368 tcg_gen_movi_i32(r_except, exception);
369 tcg_gen_helper_0_1(raise_exception, r_except);
370 }
371
372 static inline void gen_cc_clear(void)
373 {
374 tcg_gen_movi_i32(cpu_psr, 0);
375 #ifdef TARGET_SPARC64
376 tcg_gen_movi_i32(cpu_xcc, 0);
377 #endif
378 }
379
380 /* old op:
381 if (!T0)
382 env->psr |= PSR_ZERO;
383 if ((int32_t) T0 < 0)
384 env->psr |= PSR_NEG;
385 */
386 static inline void gen_cc_NZ(TCGv dst)
387 {
388 int l1, l2;
389 TCGv r_zero;
390
391 l1 = gen_new_label();
392 l2 = gen_new_label();
393 r_zero = tcg_const_tl(0);
394 tcg_gen_brcond_i32(TCG_COND_NE, dst, r_zero, l1);
395 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
396 gen_set_label(l1);
397 tcg_gen_brcond_i32(TCG_COND_GE, dst, r_zero, l2);
398 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
399 gen_set_label(l2);
400 #ifdef TARGET_SPARC64
401 {
402 int l3, l4;
403
404 l3 = gen_new_label();
405 l4 = gen_new_label();
406 tcg_gen_brcond_tl(TCG_COND_NE, dst, r_zero, l3);
407 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
408 gen_set_label(l3);
409 tcg_gen_brcond_tl(TCG_COND_GE, dst, r_zero, l4);
410 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
411 gen_set_label(l4);
412 }
413 #endif
414 }
415
416 /* old op:
417 if (T0 < src1)
418 env->psr |= PSR_CARRY;
419 */
420 static inline void gen_cc_C_add(TCGv dst, TCGv src1)
421 {
422 int l1;
423
424 l1 = gen_new_label();
425 tcg_gen_brcond_i32(TCG_COND_GEU, dst, src1, l1);
426 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
427 gen_set_label(l1);
428 #ifdef TARGET_SPARC64
429 {
430 int l2;
431
432 l2 = gen_new_label();
433 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l2);
434 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
435 gen_set_label(l2);
436 }
437 #endif
438 }
439
440 /* old op:
441 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
442 env->psr |= PSR_OVF;
443 */
444 static inline void gen_cc_V_add(TCGv dst, TCGv src1, TCGv src2)
445 {
446 TCGv r_temp, r_temp2, r_temp3, r_zero;
447 int l1;
448
449 l1 = gen_new_label();
450
451 r_temp = tcg_temp_new(TCG_TYPE_TL);
452 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
453 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
454 r_zero = tcg_const_tl(0);
455 tcg_gen_xor_tl(r_temp, src1, src2);
456 tcg_gen_xori_tl(r_temp, r_temp, -1);
457 tcg_gen_xor_tl(r_temp2, src1, dst);
458 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
459 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
460 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
461 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
462 gen_set_label(l1);
463 #ifdef TARGET_SPARC64
464 {
465 int l2;
466
467 l2 = gen_new_label();
468 tcg_gen_xor_tl(r_temp, src1, src2);
469 tcg_gen_xori_tl(r_temp, r_temp, -1);
470 tcg_gen_xor_tl(r_temp2, src1, dst);
471 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
472 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
473 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
474 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_OVF);
475 gen_set_label(l2);
476 }
477 #endif
478 }
479
480 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
481 {
482 TCGv r_temp, r_temp2, r_temp3, r_zero;
483 int l1;
484
485 l1 = gen_new_label();
486
487 r_temp = tcg_temp_new(TCG_TYPE_TL);
488 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
489 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
490 r_zero = tcg_const_tl(0);
491 tcg_gen_xor_tl(r_temp, src1, src2);
492 tcg_gen_xori_tl(r_temp, r_temp, -1);
493 tcg_gen_xor_tl(r_temp2, src1, dst);
494 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
495 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
496 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
497 gen_op_exception(TT_TOVF);
498 gen_set_label(l1);
499 #ifdef TARGET_SPARC64
500 {
501 int l2;
502
503 l2 = gen_new_label();
504 tcg_gen_xor_tl(r_temp, src1, src2);
505 tcg_gen_xori_tl(r_temp, r_temp, -1);
506 tcg_gen_xor_tl(r_temp2, src1, dst);
507 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
508 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
509 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
510 gen_op_exception(TT_TOVF);
511 gen_set_label(l2);
512 }
513 #endif
514 }
515
516 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
517 {
518 int l1;
519 TCGv r_zero, r_temp;
520
521 l1 = gen_new_label();
522 r_zero = tcg_const_tl(0);
523 r_temp = tcg_temp_new(TCG_TYPE_TL);
524 tcg_gen_or_tl(r_temp, src1, src2);
525 tcg_gen_andi_tl(r_temp, r_temp, 0x3);
526 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, r_zero, l1);
527 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
528 gen_set_label(l1);
529 }
530
531 static inline void gen_tag_tv(TCGv src1, TCGv src2)
532 {
533 int l1;
534 TCGv r_zero, r_temp;
535
536 l1 = gen_new_label();
537 r_zero = tcg_const_tl(0);
538 r_temp = tcg_temp_new(TCG_TYPE_TL);
539 tcg_gen_or_tl(r_temp, src1, src2);
540 tcg_gen_andi_tl(r_temp, r_temp, 0x3);
541 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, r_zero, l1);
542 gen_op_exception(TT_TOVF);
543 gen_set_label(l1);
544 }
545
546 static inline void gen_op_add_T1_T0_cc(void)
547 {
548 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
549 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
550 gen_cc_clear();
551 gen_cc_NZ(cpu_T[0]);
552 gen_cc_C_add(cpu_T[0], cpu_cc_src);
553 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
554 }
555
556 static inline void gen_op_addx_T1_T0_cc(void)
557 {
558 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
559 gen_mov_reg_C(cpu_tmp0, cpu_psr);
560 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
561 gen_cc_clear();
562 gen_cc_C_add(cpu_T[0], cpu_cc_src);
563 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
564 gen_cc_C_add(cpu_T[0], cpu_cc_src);
565 gen_cc_NZ(cpu_T[0]);
566 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
567 }
568
569 static inline void gen_op_tadd_T1_T0_cc(void)
570 {
571 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
572 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
573 gen_cc_clear();
574 gen_cc_NZ(cpu_T[0]);
575 gen_cc_C_add(cpu_T[0], cpu_cc_src);
576 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
577 gen_cc_V_tag(cpu_cc_src, cpu_T[1]);
578 }
579
580 static inline void gen_op_tadd_T1_T0_ccTV(void)
581 {
582 gen_tag_tv(cpu_T[0], cpu_T[1]);
583 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
584 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
585 gen_add_tv(cpu_T[0], cpu_cc_src, cpu_T[1]);
586 gen_cc_clear();
587 gen_cc_NZ(cpu_T[0]);
588 gen_cc_C_add(cpu_T[0], cpu_cc_src);
589 }
590
591 /* old op:
592 if (src1 < T1)
593 env->psr |= PSR_CARRY;
594 */
595 static inline void gen_cc_C_sub(TCGv src1, TCGv src2)
596 {
597 int l1;
598
599 l1 = gen_new_label();
600 tcg_gen_brcond_i32(TCG_COND_GEU, src1, src2, l1);
601 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
602 gen_set_label(l1);
603 #ifdef TARGET_SPARC64
604 {
605 int l2;
606
607 l2 = gen_new_label();
608 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l2);
609 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
610 gen_set_label(l2);
611 }
612 #endif
613 }
614
615 /* old op:
616 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
617 env->psr |= PSR_OVF;
618 */
619 static inline void gen_cc_V_sub(TCGv dst, TCGv src1, TCGv src2)
620 {
621 TCGv r_temp, r_temp2, r_temp3, r_zero;
622 int l1;
623
624 l1 = gen_new_label();
625
626 r_temp = tcg_temp_new(TCG_TYPE_TL);
627 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
628 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
629 r_zero = tcg_const_tl(0);
630 tcg_gen_xor_tl(r_temp, src1, src2);
631 tcg_gen_xor_tl(r_temp2, src1, dst);
632 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
633 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
634 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
635 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
636 gen_set_label(l1);
637 #ifdef TARGET_SPARC64
638 {
639 int l2;
640
641 l2 = gen_new_label();
642 tcg_gen_xor_tl(r_temp, src1, src2);
643 tcg_gen_xor_tl(r_temp2, src1, dst);
644 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
645 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
646 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
647 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_OVF);
648 gen_set_label(l2);
649 }
650 #endif
651 }
652
653 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
654 {
655 TCGv r_temp, r_temp2, r_temp3, r_zero;
656 int l1;
657
658 l1 = gen_new_label();
659
660 r_temp = tcg_temp_new(TCG_TYPE_TL);
661 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
662 r_temp3 = tcg_temp_new(TCG_TYPE_TL);
663 r_zero = tcg_const_tl(0);
664 tcg_gen_xor_tl(r_temp, src1, src2);
665 tcg_gen_xor_tl(r_temp2, src1, dst);
666 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
667 tcg_gen_andi_tl(r_temp3, r_temp, (1 << 31));
668 tcg_gen_brcond_i32(TCG_COND_EQ, r_temp3, r_zero, l1);
669 gen_op_exception(TT_TOVF);
670 gen_set_label(l1);
671 #ifdef TARGET_SPARC64
672 {
673 int l2;
674
675 l2 = gen_new_label();
676 tcg_gen_xor_tl(r_temp, src1, src2);
677 tcg_gen_xor_tl(r_temp2, src1, dst);
678 tcg_gen_and_tl(r_temp, r_temp, r_temp2);
679 tcg_gen_andi_tl(r_temp3, r_temp, (1ULL << 63));
680 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp3, r_zero, l2);
681 gen_op_exception(TT_TOVF);
682 gen_set_label(l2);
683 }
684 #endif
685 }
686
687 static inline void gen_op_sub_T1_T0_cc(void)
688 {
689 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
690 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
691 gen_cc_clear();
692 gen_cc_NZ(cpu_T[0]);
693 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
694 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
695 }
696
697 static inline void gen_op_subx_T1_T0_cc(void)
698 {
699 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
700 gen_mov_reg_C(cpu_tmp0, cpu_psr);
701 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
702 gen_cc_clear();
703 gen_cc_C_sub(cpu_T[0], cpu_cc_src);
704 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
705 gen_cc_C_sub(cpu_T[0], cpu_cc_src);
706 gen_cc_NZ(cpu_T[0]);
707 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
708 }
709
710 static inline void gen_op_tsub_T1_T0_cc(void)
711 {
712 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
713 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
714 gen_cc_clear();
715 gen_cc_NZ(cpu_T[0]);
716 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
717 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
718 gen_cc_V_tag(cpu_cc_src, cpu_T[1]);
719 }
720
721 static inline void gen_op_tsub_T1_T0_ccTV(void)
722 {
723 gen_tag_tv(cpu_T[0], cpu_T[1]);
724 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
725 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
726 gen_sub_tv(cpu_T[0], cpu_cc_src, cpu_T[1]);
727 gen_cc_clear();
728 gen_cc_NZ(cpu_T[0]);
729 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
730 }
731
732 #ifdef TARGET_SPARC64
733 static inline void gen_trap_ifdivzero_i64(TCGv divisor)
734 {
735 int l1;
736
737 l1 = gen_new_label();
738 tcg_gen_brcond_i64(TCG_COND_NE, divisor, tcg_const_tl(0), l1);
739 gen_op_exception(TT_DIV_ZERO);
740 gen_set_label(l1);
741 }
742
743 static inline void gen_op_sdivx_T1_T0(void)
744 {
745 int l1, l2;
746
747 l1 = gen_new_label();
748 l2 = gen_new_label();
749 gen_trap_ifdivzero_i64(cpu_T[1]);
750 tcg_gen_brcond_i64(TCG_COND_NE, cpu_T[0], tcg_const_i64(INT64_MIN), l1);
751 tcg_gen_brcond_i64(TCG_COND_NE, cpu_T[1], tcg_const_i64(-1), l1);
752 tcg_gen_movi_i64(cpu_T[0], INT64_MIN);
753 gen_op_jmp_label(l2);
754 gen_set_label(l1);
755 tcg_gen_div_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
756 gen_set_label(l2);
757 }
758 #endif
759
760 static inline void gen_op_div_cc(void)
761 {
762 int l1;
763 TCGv r_zero;
764
765 gen_cc_clear();
766 gen_cc_NZ(cpu_T[0]);
767 l1 = gen_new_label();
768 r_zero = tcg_const_tl(0);
769 tcg_gen_brcond_i32(TCG_COND_EQ, cpu_T[1], r_zero, l1);
770 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
771 gen_set_label(l1);
772 }
773
774 static inline void gen_op_logic_T0_cc(void)
775 {
776 gen_cc_clear();
777 gen_cc_NZ(cpu_T[0]);
778 }
779
780 // 1
781 static inline void gen_op_eval_ba(TCGv dst)
782 {
783 tcg_gen_movi_tl(dst, 1);
784 }
785
786 // Z
787 static inline void gen_op_eval_be(TCGv dst, TCGv src)
788 {
789 gen_mov_reg_Z(dst, src);
790 }
791
792 // Z | (N ^ V)
793 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
794 {
795 TCGv r_flag;
796
797 r_flag = tcg_temp_new(TCG_TYPE_TL);
798 gen_mov_reg_N(r_flag, src);
799 gen_mov_reg_V(dst, src);
800 tcg_gen_xor_tl(dst, dst, r_flag);
801 gen_mov_reg_Z(r_flag, src);
802 tcg_gen_or_tl(dst, dst, r_flag);
803 }
804
805 // N ^ V
806 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
807 {
808 TCGv r_V;
809
810 r_V = tcg_temp_new(TCG_TYPE_TL);
811 gen_mov_reg_V(r_V, src);
812 gen_mov_reg_N(dst, src);
813 tcg_gen_xor_tl(dst, dst, r_V);
814 }
815
816 // C | Z
817 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
818 {
819 TCGv r_Z;
820
821 r_Z = tcg_temp_new(TCG_TYPE_TL);
822 gen_mov_reg_Z(r_Z, src);
823 gen_mov_reg_C(dst, src);
824 tcg_gen_or_tl(dst, dst, r_Z);
825 }
826
827 // C
828 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
829 {
830 gen_mov_reg_C(dst, src);
831 }
832
833 // V
834 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
835 {
836 gen_mov_reg_V(dst, src);
837 }
838
839 // 0
840 static inline void gen_op_eval_bn(TCGv dst)
841 {
842 tcg_gen_movi_tl(dst, 0);
843 }
844
845 // N
846 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
847 {
848 gen_mov_reg_N(dst, src);
849 }
850
851 // !Z
852 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
853 {
854 gen_mov_reg_Z(dst, src);
855 tcg_gen_xori_tl(dst, dst, 0x1);
856 }
857
858 // !(Z | (N ^ V))
859 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
860 {
861 TCGv r_flag;
862
863 r_flag = tcg_temp_new(TCG_TYPE_TL);
864 gen_mov_reg_N(r_flag, src);
865 gen_mov_reg_V(dst, src);
866 tcg_gen_xor_tl(dst, dst, r_flag);
867 gen_mov_reg_Z(r_flag, src);
868 tcg_gen_or_tl(dst, dst, r_flag);
869 tcg_gen_xori_tl(dst, dst, 0x1);
870 }
871
872 // !(N ^ V)
873 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
874 {
875 TCGv r_V;
876
877 r_V = tcg_temp_new(TCG_TYPE_TL);
878 gen_mov_reg_V(r_V, src);
879 gen_mov_reg_N(dst, src);
880 tcg_gen_xor_tl(dst, dst, r_V);
881 tcg_gen_xori_tl(dst, dst, 0x1);
882 }
883
884 // !(C | Z)
885 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
886 {
887 TCGv r_Z;
888
889 r_Z = tcg_temp_new(TCG_TYPE_TL);
890 gen_mov_reg_Z(r_Z, src);
891 gen_mov_reg_C(dst, src);
892 tcg_gen_or_tl(dst, dst, r_Z);
893 tcg_gen_xori_tl(dst, dst, 0x1);
894 }
895
896 // !C
897 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
898 {
899 gen_mov_reg_C(dst, src);
900 tcg_gen_xori_tl(dst, dst, 0x1);
901 }
902
903 // !N
904 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
905 {
906 gen_mov_reg_N(dst, src);
907 tcg_gen_xori_tl(dst, dst, 0x1);
908 }
909
910 // !V
911 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
912 {
913 gen_mov_reg_V(dst, src);
914 tcg_gen_xori_tl(dst, dst, 0x1);
915 }
916
917 /*
918 FPSR bit field FCC1 | FCC0:
919 0 =
920 1 <
921 2 >
922 3 unordered
923 */
924 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
925 unsigned int fcc_offset)
926 {
927 tcg_gen_shri_i32(reg, src, 10 + fcc_offset);
928 tcg_gen_andi_tl(reg, reg, 0x1);
929 }
930
931 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
932 unsigned int fcc_offset)
933 {
934 tcg_gen_shri_i32(reg, src, 11 + fcc_offset);
935 tcg_gen_andi_tl(reg, reg, 0x1);
936 }
937
938 // !0: FCC0 | FCC1
939 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
940 unsigned int fcc_offset)
941 {
942 TCGv r_fcc1;
943
944 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
945 gen_mov_reg_FCC0(dst, src, fcc_offset);
946 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
947 tcg_gen_or_tl(dst, dst, r_fcc1);
948 }
949
950 // 1 or 2: FCC0 ^ FCC1
951 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
952 unsigned int fcc_offset)
953 {
954 TCGv r_fcc1;
955
956 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
957 gen_mov_reg_FCC0(dst, src, fcc_offset);
958 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
959 tcg_gen_xor_tl(dst, dst, r_fcc1);
960 }
961
962 // 1 or 3: FCC0
963 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
964 unsigned int fcc_offset)
965 {
966 gen_mov_reg_FCC0(dst, src, fcc_offset);
967 }
968
969 // 1: FCC0 & !FCC1
970 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
971 unsigned int fcc_offset)
972 {
973 TCGv r_fcc1;
974
975 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
976 gen_mov_reg_FCC0(dst, src, fcc_offset);
977 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
978 tcg_gen_xori_tl(r_fcc1, r_fcc1, 0x1);
979 tcg_gen_and_tl(dst, dst, r_fcc1);
980 }
981
982 // 2 or 3: FCC1
983 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
984 unsigned int fcc_offset)
985 {
986 gen_mov_reg_FCC1(dst, src, fcc_offset);
987 }
988
989 // 2: !FCC0 & FCC1
990 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
991 unsigned int fcc_offset)
992 {
993 TCGv r_fcc1;
994
995 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
996 gen_mov_reg_FCC0(dst, src, fcc_offset);
997 tcg_gen_xori_tl(dst, dst, 0x1);
998 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
999 tcg_gen_and_tl(dst, dst, r_fcc1);
1000 }
1001
1002 // 3: FCC0 & FCC1
1003 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1004 unsigned int fcc_offset)
1005 {
1006 TCGv r_fcc1;
1007
1008 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1009 gen_mov_reg_FCC0(dst, src, fcc_offset);
1010 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1011 tcg_gen_and_tl(dst, dst, r_fcc1);
1012 }
1013
1014 // 0: !(FCC0 | FCC1)
1015 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1016 unsigned int fcc_offset)
1017 {
1018 TCGv r_fcc1;
1019
1020 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1021 gen_mov_reg_FCC0(dst, src, fcc_offset);
1022 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1023 tcg_gen_or_tl(dst, dst, r_fcc1);
1024 tcg_gen_xori_tl(dst, dst, 0x1);
1025 }
1026
1027 // 0 or 3: !(FCC0 ^ FCC1)
1028 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1029 unsigned int fcc_offset)
1030 {
1031 TCGv r_fcc1;
1032
1033 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1034 gen_mov_reg_FCC0(dst, src, fcc_offset);
1035 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1036 tcg_gen_xor_tl(dst, dst, r_fcc1);
1037 tcg_gen_xori_tl(dst, dst, 0x1);
1038 }
1039
1040 // 0 or 2: !FCC0
1041 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1042 unsigned int fcc_offset)
1043 {
1044 gen_mov_reg_FCC0(dst, src, fcc_offset);
1045 tcg_gen_xori_tl(dst, dst, 0x1);
1046 }
1047
1048 // !1: !(FCC0 & !FCC1)
1049 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1050 unsigned int fcc_offset)
1051 {
1052 TCGv r_fcc1;
1053
1054 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1055 gen_mov_reg_FCC0(dst, src, fcc_offset);
1056 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1057 tcg_gen_xori_tl(r_fcc1, r_fcc1, 0x1);
1058 tcg_gen_and_tl(dst, dst, r_fcc1);
1059 tcg_gen_xori_tl(dst, dst, 0x1);
1060 }
1061
1062 // 0 or 1: !FCC1
1063 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1064 unsigned int fcc_offset)
1065 {
1066 gen_mov_reg_FCC1(dst, src, fcc_offset);
1067 tcg_gen_xori_tl(dst, dst, 0x1);
1068 }
1069
1070 // !2: !(!FCC0 & FCC1)
1071 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1072 unsigned int fcc_offset)
1073 {
1074 TCGv r_fcc1;
1075
1076 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1077 gen_mov_reg_FCC0(dst, src, fcc_offset);
1078 tcg_gen_xori_tl(dst, dst, 0x1);
1079 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1080 tcg_gen_and_tl(dst, dst, r_fcc1);
1081 tcg_gen_xori_tl(dst, dst, 0x1);
1082 }
1083
1084 // !3: !(FCC0 & FCC1)
1085 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1086 unsigned int fcc_offset)
1087 {
1088 TCGv r_fcc1;
1089
1090 r_fcc1 = tcg_temp_new(TCG_TYPE_TL);
1091 gen_mov_reg_FCC0(dst, src, fcc_offset);
1092 gen_mov_reg_FCC1(r_fcc1, src, fcc_offset);
1093 tcg_gen_and_tl(dst, dst, r_fcc1);
1094 tcg_gen_xori_tl(dst, dst, 0x1);
1095 }
1096
1097 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1098 target_ulong pc2, TCGv r_cond)
1099 {
1100 TCGv r_zero;
1101 int l1;
1102
1103 l1 = gen_new_label();
1104 r_zero = tcg_const_tl(0);
1105
1106 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1);
1107
1108 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1109
1110 gen_set_label(l1);
1111 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1112 }
1113
1114 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1115 target_ulong pc2, TCGv r_cond)
1116 {
1117 TCGv r_zero;
1118 int l1;
1119
1120 l1 = gen_new_label();
1121 r_zero = tcg_const_tl(0);
1122
1123 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1);
1124
1125 gen_goto_tb(dc, 0, pc2, pc1);
1126
1127 gen_set_label(l1);
1128 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1129 }
1130
1131 static inline void gen_branch(DisasContext *dc, target_ulong pc,
1132 target_ulong npc)
1133 {
1134 gen_goto_tb(dc, 0, pc, npc);
1135 }
1136
1137 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1138 TCGv r_cond)
1139 {
1140 TCGv r_zero;
1141 int l1, l2;
1142
1143 l1 = gen_new_label();
1144 l2 = gen_new_label();
1145 r_zero = tcg_const_tl(0);
1146
1147 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1);
1148
1149 gen_movl_npc_im(npc1);
1150 gen_op_jmp_label(l2);
1151
1152 gen_set_label(l1);
1153 gen_movl_npc_im(npc2);
1154 gen_set_label(l2);
1155 }
1156
1157 /* call this function before using T2 as it may have been set for a jump */
1158 static inline void flush_T2(DisasContext * dc)
1159 {
1160 if (dc->npc == JUMP_PC) {
1161 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1162 dc->npc = DYNAMIC_PC;
1163 }
1164 }
1165
1166 static inline void save_npc(DisasContext * dc)
1167 {
1168 if (dc->npc == JUMP_PC) {
1169 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1170 dc->npc = DYNAMIC_PC;
1171 } else if (dc->npc != DYNAMIC_PC) {
1172 gen_movl_npc_im(dc->npc);
1173 }
1174 }
1175
1176 static inline void save_state(DisasContext * dc)
1177 {
1178 gen_jmp_im(dc->pc);
1179 save_npc(dc);
1180 }
1181
1182 static inline void gen_mov_pc_npc(DisasContext * dc)
1183 {
1184 if (dc->npc == JUMP_PC) {
1185 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1186 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1187 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
1188 dc->pc = DYNAMIC_PC;
1189 } else if (dc->npc == DYNAMIC_PC) {
1190 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1191 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
1192 dc->pc = DYNAMIC_PC;
1193 } else {
1194 dc->pc = dc->npc;
1195 }
1196 }
1197
1198 static inline void gen_op_next_insn(void)
1199 {
1200 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1201 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, pc));
1202 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, 4);
1203 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, npc));
1204 }
1205
1206 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1207 {
1208 TCGv r_src;
1209
1210 #ifdef TARGET_SPARC64
1211 if (cc)
1212 r_src = cpu_xcc;
1213 else
1214 r_src = cpu_psr;
1215 #else
1216 r_src = cpu_psr;
1217 #endif
1218 switch (cond) {
1219 case 0x0:
1220 gen_op_eval_bn(r_dst);
1221 break;
1222 case 0x1:
1223 gen_op_eval_be(r_dst, r_src);
1224 break;
1225 case 0x2:
1226 gen_op_eval_ble(r_dst, r_src);
1227 break;
1228 case 0x3:
1229 gen_op_eval_bl(r_dst, r_src);
1230 break;
1231 case 0x4:
1232 gen_op_eval_bleu(r_dst, r_src);
1233 break;
1234 case 0x5:
1235 gen_op_eval_bcs(r_dst, r_src);
1236 break;
1237 case 0x6:
1238 gen_op_eval_bneg(r_dst, r_src);
1239 break;
1240 case 0x7:
1241 gen_op_eval_bvs(r_dst, r_src);
1242 break;
1243 case 0x8:
1244 gen_op_eval_ba(r_dst);
1245 break;
1246 case 0x9:
1247 gen_op_eval_bne(r_dst, r_src);
1248 break;
1249 case 0xa:
1250 gen_op_eval_bg(r_dst, r_src);
1251 break;
1252 case 0xb:
1253 gen_op_eval_bge(r_dst, r_src);
1254 break;
1255 case 0xc:
1256 gen_op_eval_bgu(r_dst, r_src);
1257 break;
1258 case 0xd:
1259 gen_op_eval_bcc(r_dst, r_src);
1260 break;
1261 case 0xe:
1262 gen_op_eval_bpos(r_dst, r_src);
1263 break;
1264 case 0xf:
1265 gen_op_eval_bvc(r_dst, r_src);
1266 break;
1267 }
1268 }
1269
1270 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1271 {
1272 TCGv r_src;
1273 unsigned int offset;
1274
1275 r_src = tcg_temp_new(TCG_TYPE_TL);
1276 tcg_gen_ld_tl(r_src, cpu_env, offsetof(CPUSPARCState, fsr));
1277
1278 switch (cc) {
1279 default:
1280 case 0x0:
1281 offset = 0;
1282 break;
1283 case 0x1:
1284 offset = 32 - 10;
1285 break;
1286 case 0x2:
1287 offset = 34 - 10;
1288 break;
1289 case 0x3:
1290 offset = 36 - 10;
1291 break;
1292 }
1293
1294 switch (cond) {
1295 case 0x0:
1296 gen_op_eval_bn(r_dst);
1297 break;
1298 case 0x1:
1299 gen_op_eval_fbne(r_dst, r_src, offset);
1300 break;
1301 case 0x2:
1302 gen_op_eval_fblg(r_dst, r_src, offset);
1303 break;
1304 case 0x3:
1305 gen_op_eval_fbul(r_dst, r_src, offset);
1306 break;
1307 case 0x4:
1308 gen_op_eval_fbl(r_dst, r_src, offset);
1309 break;
1310 case 0x5:
1311 gen_op_eval_fbug(r_dst, r_src, offset);
1312 break;
1313 case 0x6:
1314 gen_op_eval_fbg(r_dst, r_src, offset);
1315 break;
1316 case 0x7:
1317 gen_op_eval_fbu(r_dst, r_src, offset);
1318 break;
1319 case 0x8:
1320 gen_op_eval_ba(r_dst);
1321 break;
1322 case 0x9:
1323 gen_op_eval_fbe(r_dst, r_src, offset);
1324 break;
1325 case 0xa:
1326 gen_op_eval_fbue(r_dst, r_src, offset);
1327 break;
1328 case 0xb:
1329 gen_op_eval_fbge(r_dst, r_src, offset);
1330 break;
1331 case 0xc:
1332 gen_op_eval_fbuge(r_dst, r_src, offset);
1333 break;
1334 case 0xd:
1335 gen_op_eval_fble(r_dst, r_src, offset);
1336 break;
1337 case 0xe:
1338 gen_op_eval_fbule(r_dst, r_src, offset);
1339 break;
1340 case 0xf:
1341 gen_op_eval_fbo(r_dst, r_src, offset);
1342 break;
1343 }
1344 }
1345
1346 #ifdef TARGET_SPARC64
1347 // Inverted logic
1348 static const int gen_tcg_cond_reg[8] = {
1349 -1,
1350 TCG_COND_NE,
1351 TCG_COND_GT,
1352 TCG_COND_GE,
1353 -1,
1354 TCG_COND_EQ,
1355 TCG_COND_LE,
1356 TCG_COND_LT,
1357 };
1358
1359 static inline void gen_cond_reg(TCGv r_dst, int cond)
1360 {
1361 TCGv r_zero;
1362 int l1;
1363
1364 l1 = gen_new_label();
1365 r_zero = tcg_const_tl(0);
1366 tcg_gen_mov_tl(r_dst, r_zero);
1367 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
1368 tcg_gen_movi_tl(r_dst, 1);
1369 gen_set_label(l1);
1370 }
1371 #endif
1372
1373 /* XXX: potentially incorrect if dynamic npc */
1374 static void do_branch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
1375 {
1376 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1377 target_ulong target = dc->pc + offset;
1378
1379 if (cond == 0x0) {
1380 /* unconditional not taken */
1381 if (a) {
1382 dc->pc = dc->npc + 4;
1383 dc->npc = dc->pc + 4;
1384 } else {
1385 dc->pc = dc->npc;
1386 dc->npc = dc->pc + 4;
1387 }
1388 } else if (cond == 0x8) {
1389 /* unconditional taken */
1390 if (a) {
1391 dc->pc = target;
1392 dc->npc = dc->pc + 4;
1393 } else {
1394 dc->pc = dc->npc;
1395 dc->npc = target;
1396 }
1397 } else {
1398 flush_T2(dc);
1399 gen_cond(cpu_T[2], cc, cond);
1400 if (a) {
1401 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1402 dc->is_br = 1;
1403 } else {
1404 dc->pc = dc->npc;
1405 dc->jump_pc[0] = target;
1406 dc->jump_pc[1] = dc->npc + 4;
1407 dc->npc = JUMP_PC;
1408 }
1409 }
1410 }
1411
1412 /* XXX: potentially incorrect if dynamic npc */
1413 static void do_fbranch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
1414 {
1415 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1416 target_ulong target = dc->pc + offset;
1417
1418 if (cond == 0x0) {
1419 /* unconditional not taken */
1420 if (a) {
1421 dc->pc = dc->npc + 4;
1422 dc->npc = dc->pc + 4;
1423 } else {
1424 dc->pc = dc->npc;
1425 dc->npc = dc->pc + 4;
1426 }
1427 } else if (cond == 0x8) {
1428 /* unconditional taken */
1429 if (a) {
1430 dc->pc = target;
1431 dc->npc = dc->pc + 4;
1432 } else {
1433 dc->pc = dc->npc;
1434 dc->npc = target;
1435 }
1436 } else {
1437 flush_T2(dc);
1438 gen_fcond(cpu_T[2], cc, cond);
1439 if (a) {
1440 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1441 dc->is_br = 1;
1442 } else {
1443 dc->pc = dc->npc;
1444 dc->jump_pc[0] = target;
1445 dc->jump_pc[1] = dc->npc + 4;
1446 dc->npc = JUMP_PC;
1447 }
1448 }
1449 }
1450
1451 #ifdef TARGET_SPARC64
1452 /* XXX: potentially incorrect if dynamic npc */
1453 static void do_branch_reg(DisasContext * dc, int32_t offset, uint32_t insn)
1454 {
1455 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1456 target_ulong target = dc->pc + offset;
1457
1458 flush_T2(dc);
1459 gen_cond_reg(cpu_T[2], cond);
1460 if (a) {
1461 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1462 dc->is_br = 1;
1463 } else {
1464 dc->pc = dc->npc;
1465 dc->jump_pc[0] = target;
1466 dc->jump_pc[1] = dc->npc + 4;
1467 dc->npc = JUMP_PC;
1468 }
1469 }
1470
1471 static GenOpFunc * const gen_fcmps[4] = {
1472 helper_fcmps,
1473 helper_fcmps_fcc1,
1474 helper_fcmps_fcc2,
1475 helper_fcmps_fcc3,
1476 };
1477
1478 static GenOpFunc * const gen_fcmpd[4] = {
1479 helper_fcmpd,
1480 helper_fcmpd_fcc1,
1481 helper_fcmpd_fcc2,
1482 helper_fcmpd_fcc3,
1483 };
1484
1485 #if defined(CONFIG_USER_ONLY)
1486 static GenOpFunc * const gen_fcmpq[4] = {
1487 helper_fcmpq,
1488 helper_fcmpq_fcc1,
1489 helper_fcmpq_fcc2,
1490 helper_fcmpq_fcc3,
1491 };
1492 #endif
1493
1494 static GenOpFunc * const gen_fcmpes[4] = {
1495 helper_fcmpes,
1496 helper_fcmpes_fcc1,
1497 helper_fcmpes_fcc2,
1498 helper_fcmpes_fcc3,
1499 };
1500
1501 static GenOpFunc * const gen_fcmped[4] = {
1502 helper_fcmped,
1503 helper_fcmped_fcc1,
1504 helper_fcmped_fcc2,
1505 helper_fcmped_fcc3,
1506 };
1507
1508 #if defined(CONFIG_USER_ONLY)
1509 static GenOpFunc * const gen_fcmpeq[4] = {
1510 helper_fcmpeq,
1511 helper_fcmpeq_fcc1,
1512 helper_fcmpeq_fcc2,
1513 helper_fcmpeq_fcc3,
1514 };
1515 #endif
1516
1517 static inline void gen_op_fcmps(int fccno)
1518 {
1519 tcg_gen_helper_0_0(gen_fcmps[fccno]);
1520 }
1521
1522 static inline void gen_op_fcmpd(int fccno)
1523 {
1524 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1525 }
1526
1527 #if defined(CONFIG_USER_ONLY)
1528 static inline void gen_op_fcmpq(int fccno)
1529 {
1530 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1531 }
1532 #endif
1533
1534 static inline void gen_op_fcmpes(int fccno)
1535 {
1536 tcg_gen_helper_0_0(gen_fcmpes[fccno]);
1537 }
1538
1539 static inline void gen_op_fcmped(int fccno)
1540 {
1541 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1542 }
1543
1544 #if defined(CONFIG_USER_ONLY)
1545 static inline void gen_op_fcmpeq(int fccno)
1546 {
1547 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1548 }
1549 #endif
1550
1551 #else
1552
1553 static inline void gen_op_fcmps(int fccno)
1554 {
1555 tcg_gen_helper_0_0(helper_fcmps);
1556 }
1557
1558 static inline void gen_op_fcmpd(int fccno)
1559 {
1560 tcg_gen_helper_0_0(helper_fcmpd);
1561 }
1562
1563 #if defined(CONFIG_USER_ONLY)
1564 static inline void gen_op_fcmpq(int fccno)
1565 {
1566 tcg_gen_helper_0_0(helper_fcmpq);
1567 }
1568 #endif
1569
1570 static inline void gen_op_fcmpes(int fccno)
1571 {
1572 tcg_gen_helper_0_0(helper_fcmpes);
1573 }
1574
1575 static inline void gen_op_fcmped(int fccno)
1576 {
1577 tcg_gen_helper_0_0(helper_fcmped);
1578 }
1579
1580 #if defined(CONFIG_USER_ONLY)
1581 static inline void gen_op_fcmpeq(int fccno)
1582 {
1583 tcg_gen_helper_0_0(helper_fcmpeq);
1584 }
1585 #endif
1586
1587 #endif
1588
1589 static inline void gen_op_fpexception_im(int fsr_flags)
1590 {
1591 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1592 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, ~FSR_FTT_MASK);
1593 tcg_gen_ori_tl(cpu_tmp0, cpu_tmp0, fsr_flags);
1594 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1595 gen_op_exception(TT_FP_EXCP);
1596 }
1597
1598 static int gen_trap_ifnofpu(DisasContext * dc)
1599 {
1600 #if !defined(CONFIG_USER_ONLY)
1601 if (!dc->fpu_enabled) {
1602 save_state(dc);
1603 gen_op_exception(TT_NFPU_INSN);
1604 dc->is_br = 1;
1605 return 1;
1606 }
1607 #endif
1608 return 0;
1609 }
1610
1611 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1612 {
1613 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1614 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, ~(FSR_FTT_MASK | FSR_CEXC_MASK));
1615 tcg_gen_st_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, fsr));
1616 }
1617
1618 static inline void gen_clear_float_exceptions(void)
1619 {
1620 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1621 }
1622
1623 /* asi moves */
1624 #ifdef TARGET_SPARC64
1625 static inline void gen_ld_asi(int insn, int size, int sign)
1626 {
1627 int asi, offset;
1628 TCGv r_size, r_sign;
1629
1630 r_size = tcg_temp_new(TCG_TYPE_I32);
1631 r_sign = tcg_temp_new(TCG_TYPE_I32);
1632 tcg_gen_movi_i32(r_size, size);
1633 tcg_gen_movi_i32(r_sign, sign);
1634 if (IS_IMM) {
1635 offset = GET_FIELD(insn, 25, 31);
1636 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1637 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
1638 } else {
1639 asi = GET_FIELD(insn, 19, 26);
1640 tcg_gen_movi_i32(cpu_T[1], asi);
1641 }
1642 tcg_gen_helper_1_4(helper_ld_asi, cpu_T[1], cpu_T[0], cpu_T[1], r_size,
1643 r_sign);
1644 }
1645
1646 static inline void gen_st_asi(int insn, int size)
1647 {
1648 int asi, offset;
1649 TCGv r_asi, r_size;
1650
1651 r_asi = tcg_temp_new(TCG_TYPE_I32);
1652 r_size = tcg_temp_new(TCG_TYPE_I32);
1653 tcg_gen_movi_i32(r_size, size);
1654 if (IS_IMM) {
1655 offset = GET_FIELD(insn, 25, 31);
1656 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1657 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1658 } else {
1659 asi = GET_FIELD(insn, 19, 26);
1660 tcg_gen_movi_i32(r_asi, asi);
1661 }
1662 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_asi, r_size);
1663 }
1664
1665 static inline void gen_ldf_asi(int insn, int size, int rd)
1666 {
1667 int asi, offset;
1668 TCGv r_asi, r_size, r_rd;
1669
1670 r_asi = tcg_temp_new(TCG_TYPE_I32);
1671 r_size = tcg_temp_new(TCG_TYPE_I32);
1672 r_rd = tcg_temp_new(TCG_TYPE_I32);
1673 tcg_gen_movi_i32(r_size, size);
1674 tcg_gen_movi_i32(r_rd, rd);
1675 if (IS_IMM) {
1676 offset = GET_FIELD(insn, 25, 31);
1677 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1678 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1679 } else {
1680 asi = GET_FIELD(insn, 19, 26);
1681 tcg_gen_movi_i32(r_asi, asi);
1682 }
1683 tcg_gen_helper_0_4(helper_ldf_asi, cpu_T[0], r_asi, r_size, r_rd);
1684 }
1685
1686 static inline void gen_stf_asi(int insn, int size, int rd)
1687 {
1688 int asi, offset;
1689 TCGv r_asi, r_size, r_rd;
1690
1691 r_asi = tcg_temp_new(TCG_TYPE_I32);
1692 r_size = tcg_temp_new(TCG_TYPE_I32);
1693 r_rd = tcg_temp_new(TCG_TYPE_I32);
1694 tcg_gen_movi_i32(r_size, size);
1695 tcg_gen_movi_i32(r_rd, rd);
1696 if (IS_IMM) {
1697 offset = GET_FIELD(insn, 25, 31);
1698 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1699 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1700 } else {
1701 asi = GET_FIELD(insn, 19, 26);
1702 tcg_gen_movi_i32(r_asi, asi);
1703 }
1704 tcg_gen_helper_0_4(helper_stf_asi, cpu_T[0], r_asi, r_size, r_rd);
1705 }
1706
1707 static inline void gen_swap_asi(int insn)
1708 {
1709 int asi, offset;
1710 TCGv r_size, r_sign, r_temp;
1711
1712 r_size = tcg_temp_new(TCG_TYPE_I32);
1713 r_sign = tcg_temp_new(TCG_TYPE_I32);
1714 r_temp = tcg_temp_new(TCG_TYPE_I32);
1715 tcg_gen_movi_i32(r_size, 4);
1716 tcg_gen_movi_i32(r_sign, 0);
1717 if (IS_IMM) {
1718 offset = GET_FIELD(insn, 25, 31);
1719 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1720 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
1721 } else {
1722 asi = GET_FIELD(insn, 19, 26);
1723 tcg_gen_movi_i32(cpu_T[1], asi);
1724 }
1725 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
1726 r_sign);
1727 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
1728 tcg_gen_mov_i32(cpu_T[1], r_temp);
1729 }
1730
1731 static inline void gen_ldda_asi(int insn)
1732 {
1733 int asi, offset;
1734 TCGv r_size, r_sign, r_dword;
1735
1736 r_size = tcg_temp_new(TCG_TYPE_I32);
1737 r_sign = tcg_temp_new(TCG_TYPE_I32);
1738 r_dword = tcg_temp_new(TCG_TYPE_I64);
1739 tcg_gen_movi_i32(r_size, 8);
1740 tcg_gen_movi_i32(r_sign, 0);
1741 if (IS_IMM) {
1742 offset = GET_FIELD(insn, 25, 31);
1743 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1744 tcg_gen_ld_i32(cpu_T[1], cpu_env, offsetof(CPUSPARCState, asi));
1745 } else {
1746 asi = GET_FIELD(insn, 19, 26);
1747 tcg_gen_movi_i32(cpu_T[1], asi);
1748 }
1749 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1750 r_sign);
1751 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
1752 tcg_gen_shri_i64(r_dword, r_dword, 32);
1753 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1754 }
1755
1756 static inline void gen_cas_asi(int insn, int rd)
1757 {
1758 int asi, offset;
1759 TCGv r_val1, r_asi;
1760
1761 r_val1 = tcg_temp_new(TCG_TYPE_I32);
1762 r_asi = tcg_temp_new(TCG_TYPE_I32);
1763 gen_movl_reg_TN(rd, r_val1);
1764 if (IS_IMM) {
1765 offset = GET_FIELD(insn, 25, 31);
1766 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1767 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1768 } else {
1769 asi = GET_FIELD(insn, 19, 26);
1770 tcg_gen_movi_i32(r_asi, asi);
1771 }
1772 tcg_gen_helper_1_4(helper_cas_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1773 r_asi);
1774 }
1775
1776 static inline void gen_casx_asi(int insn, int rd)
1777 {
1778 int asi, offset;
1779 TCGv r_val1, r_asi;
1780
1781 r_val1 = tcg_temp_new(TCG_TYPE_I64);
1782 r_asi = tcg_temp_new(TCG_TYPE_I32);
1783 gen_movl_reg_TN(rd, r_val1);
1784 if (IS_IMM) {
1785 offset = GET_FIELD(insn, 25, 31);
1786 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
1787 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1788 } else {
1789 asi = GET_FIELD(insn, 19, 26);
1790 tcg_gen_movi_i32(r_asi, asi);
1791 }
1792 tcg_gen_helper_1_4(helper_casx_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1793 r_asi);
1794 }
1795
1796 #elif !defined(CONFIG_USER_ONLY)
1797
1798 static inline void gen_ld_asi(int insn, int size, int sign)
1799 {
1800 int asi;
1801 TCGv r_size, r_sign, r_dword;
1802
1803 r_size = tcg_temp_new(TCG_TYPE_I32);
1804 r_sign = tcg_temp_new(TCG_TYPE_I32);
1805 r_dword = tcg_temp_new(TCG_TYPE_I64);
1806 tcg_gen_movi_i32(r_size, size);
1807 tcg_gen_movi_i32(r_sign, sign);
1808 asi = GET_FIELD(insn, 19, 26);
1809 tcg_gen_movi_i32(cpu_T[1], asi);
1810 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1811 r_sign);
1812 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1813 }
1814
1815 static inline void gen_st_asi(int insn, int size)
1816 {
1817 int asi;
1818 TCGv r_dword, r_asi, r_size;
1819
1820 r_dword = tcg_temp_new(TCG_TYPE_I64);
1821 tcg_gen_extu_i32_i64(r_dword, cpu_T[1]);
1822 r_asi = tcg_temp_new(TCG_TYPE_I32);
1823 r_size = tcg_temp_new(TCG_TYPE_I32);
1824 asi = GET_FIELD(insn, 19, 26);
1825 tcg_gen_movi_i32(r_asi, asi);
1826 tcg_gen_movi_i32(r_size, size);
1827 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
1828 }
1829
1830 static inline void gen_swap_asi(int insn)
1831 {
1832 int asi;
1833 TCGv r_size, r_sign, r_temp;
1834
1835 r_size = tcg_temp_new(TCG_TYPE_I32);
1836 r_sign = tcg_temp_new(TCG_TYPE_I32);
1837 r_temp = tcg_temp_new(TCG_TYPE_I32);
1838 tcg_gen_movi_i32(r_size, 4);
1839 tcg_gen_movi_i32(r_sign, 0);
1840 asi = GET_FIELD(insn, 19, 26);
1841 tcg_gen_movi_i32(cpu_T[1], asi);
1842 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], cpu_T[1], r_size,
1843 r_sign);
1844 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_size, r_sign);
1845 tcg_gen_mov_i32(cpu_T[1], r_temp);
1846 }
1847
1848 static inline void gen_ldda_asi(int insn)
1849 {
1850 int asi;
1851 TCGv r_size, r_sign, r_dword;
1852
1853 r_size = tcg_temp_new(TCG_TYPE_I32);
1854 r_sign = tcg_temp_new(TCG_TYPE_I32);
1855 r_dword = tcg_temp_new(TCG_TYPE_I64);
1856 tcg_gen_movi_i32(r_size, 8);
1857 tcg_gen_movi_i32(r_sign, 0);
1858 asi = GET_FIELD(insn, 19, 26);
1859 tcg_gen_movi_i32(cpu_T[1], asi);
1860 tcg_gen_helper_1_4(helper_ld_asi, r_dword, cpu_T[0], cpu_T[1], r_size,
1861 r_sign);
1862 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
1863 tcg_gen_shri_i64(r_dword, r_dword, 32);
1864 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
1865 }
1866 #endif
1867
1868 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1869 static inline void gen_ldstub_asi(int insn)
1870 {
1871 int asi;
1872 TCGv r_dword, r_asi, r_size;
1873
1874 gen_ld_asi(insn, 1, 0);
1875
1876 r_dword = tcg_temp_new(TCG_TYPE_I64);
1877 r_asi = tcg_temp_new(TCG_TYPE_I32);
1878 r_size = tcg_temp_new(TCG_TYPE_I32);
1879 asi = GET_FIELD(insn, 19, 26);
1880 tcg_gen_movi_i32(r_dword, 0xff);
1881 tcg_gen_movi_i32(r_asi, asi);
1882 tcg_gen_movi_i32(r_size, 1);
1883 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_asi, r_size);
1884 }
1885 #endif
1886
1887 /* before an instruction, dc->pc must be static */
1888 static void disas_sparc_insn(DisasContext * dc)
1889 {
1890 unsigned int insn, opc, rs1, rs2, rd;
1891
1892 insn = ldl_code(dc->pc);
1893 opc = GET_FIELD(insn, 0, 1);
1894
1895 rd = GET_FIELD(insn, 2, 6);
1896 switch (opc) {
1897 case 0: /* branches/sethi */
1898 {
1899 unsigned int xop = GET_FIELD(insn, 7, 9);
1900 int32_t target;
1901 switch (xop) {
1902 #ifdef TARGET_SPARC64
1903 case 0x1: /* V9 BPcc */
1904 {
1905 int cc;
1906
1907 target = GET_FIELD_SP(insn, 0, 18);
1908 target = sign_extend(target, 18);
1909 target <<= 2;
1910 cc = GET_FIELD_SP(insn, 20, 21);
1911 if (cc == 0)
1912 do_branch(dc, target, insn, 0);
1913 else if (cc == 2)
1914 do_branch(dc, target, insn, 1);
1915 else
1916 goto illegal_insn;
1917 goto jmp_insn;
1918 }
1919 case 0x3: /* V9 BPr */
1920 {
1921 target = GET_FIELD_SP(insn, 0, 13) |
1922 (GET_FIELD_SP(insn, 20, 21) << 14);
1923 target = sign_extend(target, 16);
1924 target <<= 2;
1925 rs1 = GET_FIELD(insn, 13, 17);
1926 gen_movl_reg_T0(rs1);
1927 do_branch_reg(dc, target, insn);
1928 goto jmp_insn;
1929 }
1930 case 0x5: /* V9 FBPcc */
1931 {
1932 int cc = GET_FIELD_SP(insn, 20, 21);
1933 if (gen_trap_ifnofpu(dc))
1934 goto jmp_insn;
1935 target = GET_FIELD_SP(insn, 0, 18);
1936 target = sign_extend(target, 19);
1937 target <<= 2;
1938 do_fbranch(dc, target, insn, cc);
1939 goto jmp_insn;
1940 }
1941 #else
1942 case 0x7: /* CBN+x */
1943 {
1944 goto ncp_insn;
1945 }
1946 #endif
1947 case 0x2: /* BN+x */
1948 {
1949 target = GET_FIELD(insn, 10, 31);
1950 target = sign_extend(target, 22);
1951 target <<= 2;
1952 do_branch(dc, target, insn, 0);
1953 goto jmp_insn;
1954 }
1955 case 0x6: /* FBN+x */
1956 {
1957 if (gen_trap_ifnofpu(dc))
1958 goto jmp_insn;
1959 target = GET_FIELD(insn, 10, 31);
1960 target = sign_extend(target, 22);
1961 target <<= 2;
1962 do_fbranch(dc, target, insn, 0);
1963 goto jmp_insn;
1964 }
1965 case 0x4: /* SETHI */
1966 #define OPTIM
1967 #if defined(OPTIM)
1968 if (rd) { // nop
1969 #endif
1970 uint32_t value = GET_FIELD(insn, 10, 31);
1971 tcg_gen_movi_tl(cpu_T[0], value << 10);
1972 gen_movl_T0_reg(rd);
1973 #if defined(OPTIM)
1974 }
1975 #endif
1976 break;
1977 case 0x0: /* UNIMPL */
1978 default:
1979 goto illegal_insn;
1980 }
1981 break;
1982 }
1983 break;
1984 case 1:
1985 /*CALL*/ {
1986 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1987
1988 tcg_gen_movi_tl(cpu_T[0], dc->pc);
1989 gen_movl_T0_reg(15);
1990 target += dc->pc;
1991 gen_mov_pc_npc(dc);
1992 dc->npc = target;
1993 }
1994 goto jmp_insn;
1995 case 2: /* FPU & Logical Operations */
1996 {
1997 unsigned int xop = GET_FIELD(insn, 7, 12);
1998 if (xop == 0x3a) { /* generate trap */
1999 int cond;
2000
2001 rs1 = GET_FIELD(insn, 13, 17);
2002 gen_movl_reg_T0(rs1);
2003 if (IS_IMM) {
2004 rs2 = GET_FIELD(insn, 25, 31);
2005 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], rs2);
2006 } else {
2007 rs2 = GET_FIELD(insn, 27, 31);
2008 #if defined(OPTIM)
2009 if (rs2 != 0) {
2010 #endif
2011 gen_movl_reg_T1(rs2);
2012 gen_op_add_T1_T0();
2013 #if defined(OPTIM)
2014 }
2015 #endif
2016 }
2017 cond = GET_FIELD(insn, 3, 6);
2018 if (cond == 0x8) {
2019 save_state(dc);
2020 tcg_gen_helper_0_1(helper_trap, cpu_T[0]);
2021 } else if (cond != 0) {
2022 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2023 #ifdef TARGET_SPARC64
2024 /* V9 icc/xcc */
2025 int cc = GET_FIELD_SP(insn, 11, 12);
2026
2027 save_state(dc);
2028 if (cc == 0)
2029 gen_cond(r_cond, 0, cond);
2030 else if (cc == 2)
2031 gen_cond(r_cond, 1, cond);
2032 else
2033 goto illegal_insn;
2034 #else
2035 save_state(dc);
2036 gen_cond(r_cond, 0, cond);
2037 #endif
2038 tcg_gen_helper_0_2(helper_trapcc, cpu_T[0], r_cond);
2039 }
2040 gen_op_next_insn();
2041 tcg_gen_exit_tb(0);
2042 dc->is_br = 1;
2043 goto jmp_insn;
2044 } else if (xop == 0x28) {
2045 rs1 = GET_FIELD(insn, 13, 17);
2046 switch(rs1) {
2047 case 0: /* rdy */
2048 #ifndef TARGET_SPARC64
2049 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2050 manual, rdy on the microSPARC
2051 II */
2052 case 0x0f: /* stbar in the SPARCv8 manual,
2053 rdy on the microSPARC II */
2054 case 0x10 ... 0x1f: /* implementation-dependent in the
2055 SPARCv8 manual, rdy on the
2056 microSPARC II */
2057 #endif
2058 gen_op_movtl_T0_env(offsetof(CPUSPARCState, y));
2059 gen_movl_T0_reg(rd);
2060 break;
2061 #ifdef TARGET_SPARC64
2062 case 0x2: /* V9 rdccr */
2063 gen_op_rdccr();
2064 gen_movl_T0_reg(rd);
2065 break;
2066 case 0x3: /* V9 rdasi */
2067 gen_op_movl_T0_env(offsetof(CPUSPARCState, asi));
2068 gen_movl_T0_reg(rd);
2069 break;
2070 case 0x4: /* V9 rdtick */
2071 {
2072 TCGv r_tickptr;
2073
2074 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2075 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2076 offsetof(CPUState, tick));
2077 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2078 r_tickptr);
2079 gen_movl_T0_reg(rd);
2080 }
2081 break;
2082 case 0x5: /* V9 rdpc */
2083 tcg_gen_movi_tl(cpu_T[0], dc->pc);
2084 gen_movl_T0_reg(rd);
2085 break;
2086 case 0x6: /* V9 rdfprs */
2087 gen_op_movl_T0_env(offsetof(CPUSPARCState, fprs));
2088 gen_movl_T0_reg(rd);
2089 break;
2090 case 0xf: /* V9 membar */
2091 break; /* no effect */
2092 case 0x13: /* Graphics Status */
2093 if (gen_trap_ifnofpu(dc))
2094 goto jmp_insn;
2095 gen_op_movtl_T0_env(offsetof(CPUSPARCState, gsr));
2096 gen_movl_T0_reg(rd);
2097 break;
2098 case 0x17: /* Tick compare */
2099 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tick_cmpr));
2100 gen_movl_T0_reg(rd);
2101 break;
2102 case 0x18: /* System tick */
2103 {
2104 TCGv r_tickptr;
2105
2106 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2107 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2108 offsetof(CPUState, stick));
2109 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2110 r_tickptr);
2111 gen_movl_T0_reg(rd);
2112 }
2113 break;
2114 case 0x19: /* System tick compare */
2115 gen_op_movtl_T0_env(offsetof(CPUSPARCState, stick_cmpr));
2116 gen_movl_T0_reg(rd);
2117 break;
2118 case 0x10: /* Performance Control */
2119 case 0x11: /* Performance Instrumentation Counter */
2120 case 0x12: /* Dispatch Control */
2121 case 0x14: /* Softint set, WO */
2122 case 0x15: /* Softint clear, WO */
2123 case 0x16: /* Softint write */
2124 #endif
2125 default:
2126 goto illegal_insn;
2127 }
2128 #if !defined(CONFIG_USER_ONLY)
2129 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2130 #ifndef TARGET_SPARC64
2131 if (!supervisor(dc))
2132 goto priv_insn;
2133 tcg_gen_helper_1_0(helper_rdpsr, cpu_T[0]);
2134 #else
2135 if (!hypervisor(dc))
2136 goto priv_insn;
2137 rs1 = GET_FIELD(insn, 13, 17);
2138 switch (rs1) {
2139 case 0: // hpstate
2140 // gen_op_rdhpstate();
2141 break;
2142 case 1: // htstate
2143 // gen_op_rdhtstate();
2144 break;
2145 case 3: // hintp
2146 gen_op_movl_T0_env(offsetof(CPUSPARCState, hintp));
2147 break;
2148 case 5: // htba
2149 gen_op_movl_T0_env(offsetof(CPUSPARCState, htba));
2150 break;
2151 case 6: // hver
2152 gen_op_movl_T0_env(offsetof(CPUSPARCState, hver));
2153 break;
2154 case 31: // hstick_cmpr
2155 gen_op_movl_env_T0(offsetof(CPUSPARCState, hstick_cmpr));
2156 break;
2157 default:
2158 goto illegal_insn;
2159 }
2160 #endif
2161 gen_movl_T0_reg(rd);
2162 break;
2163 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2164 if (!supervisor(dc))
2165 goto priv_insn;
2166 #ifdef TARGET_SPARC64
2167 rs1 = GET_FIELD(insn, 13, 17);
2168 switch (rs1) {
2169 case 0: // tpc
2170 {
2171 TCGv r_tsptr;
2172
2173 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2174 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2175 offsetof(CPUState, tsptr));
2176 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2177 offsetof(trap_state, tpc));
2178 }
2179 break;
2180 case 1: // tnpc
2181 {
2182 TCGv r_tsptr;
2183
2184 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2185 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2186 offsetof(CPUState, tsptr));
2187 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2188 offsetof(trap_state, tnpc));
2189 }
2190 break;
2191 case 2: // tstate
2192 {
2193 TCGv r_tsptr;
2194
2195 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2196 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2197 offsetof(CPUState, tsptr));
2198 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2199 offsetof(trap_state, tstate));
2200 }
2201 break;
2202 case 3: // tt
2203 {
2204 TCGv r_tsptr;
2205
2206 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2207 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2208 offsetof(CPUState, tsptr));
2209 tcg_gen_ld_i32(cpu_T[0], r_tsptr,
2210 offsetof(trap_state, tt));
2211 }
2212 break;
2213 case 4: // tick
2214 {
2215 TCGv r_tickptr;
2216
2217 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2218 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2219 offsetof(CPUState, tick));
2220 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2221 r_tickptr);
2222 gen_movl_T0_reg(rd);
2223 }
2224 break;
2225 case 5: // tba
2226 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
2227 break;
2228 case 6: // pstate
2229 gen_op_movl_T0_env(offsetof(CPUSPARCState, pstate));
2230 break;
2231 case 7: // tl
2232 gen_op_movl_T0_env(offsetof(CPUSPARCState, tl));
2233 break;
2234 case 8: // pil
2235 gen_op_movl_T0_env(offsetof(CPUSPARCState, psrpil));
2236 break;
2237 case 9: // cwp
2238 gen_op_rdcwp();
2239 break;
2240 case 10: // cansave
2241 gen_op_movl_T0_env(offsetof(CPUSPARCState, cansave));
2242 break;
2243 case 11: // canrestore
2244 gen_op_movl_T0_env(offsetof(CPUSPARCState, canrestore));
2245 break;
2246 case 12: // cleanwin
2247 gen_op_movl_T0_env(offsetof(CPUSPARCState, cleanwin));
2248 break;
2249 case 13: // otherwin
2250 gen_op_movl_T0_env(offsetof(CPUSPARCState, otherwin));
2251 break;
2252 case 14: // wstate
2253 gen_op_movl_T0_env(offsetof(CPUSPARCState, wstate));
2254 break;
2255 case 16: // UA2005 gl
2256 gen_op_movl_T0_env(offsetof(CPUSPARCState, gl));
2257 break;
2258 case 26: // UA2005 strand status
2259 if (!hypervisor(dc))
2260 goto priv_insn;
2261 gen_op_movl_T0_env(offsetof(CPUSPARCState, ssr));
2262 break;
2263 case 31: // ver
2264 gen_op_movtl_T0_env(offsetof(CPUSPARCState, version));
2265 break;
2266 case 15: // fq
2267 default:
2268 goto illegal_insn;
2269 }
2270 #else
2271 gen_op_movl_T0_env(offsetof(CPUSPARCState, wim));
2272 #endif
2273 gen_movl_T0_reg(rd);
2274 break;
2275 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2276 #ifdef TARGET_SPARC64
2277 gen_op_flushw();
2278 #else
2279 if (!supervisor(dc))
2280 goto priv_insn;
2281 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
2282 gen_movl_T0_reg(rd);
2283 #endif
2284 break;
2285 #endif
2286 } else if (xop == 0x34) { /* FPU Operations */
2287 if (gen_trap_ifnofpu(dc))
2288 goto jmp_insn;
2289 gen_op_clear_ieee_excp_and_FTT();
2290 rs1 = GET_FIELD(insn, 13, 17);
2291 rs2 = GET_FIELD(insn, 27, 31);
2292 xop = GET_FIELD(insn, 18, 26);
2293 switch (xop) {
2294 case 0x1: /* fmovs */
2295 gen_op_load_fpr_FT0(rs2);
2296 gen_op_store_FT0_fpr(rd);
2297 break;
2298 case 0x5: /* fnegs */
2299 gen_op_load_fpr_FT1(rs2);
2300 gen_op_fnegs();
2301 gen_op_store_FT0_fpr(rd);
2302 break;
2303 case 0x9: /* fabss */
2304 gen_op_load_fpr_FT1(rs2);
2305 tcg_gen_helper_0_0(helper_fabss);
2306 gen_op_store_FT0_fpr(rd);
2307 break;
2308 case 0x29: /* fsqrts */
2309 gen_op_load_fpr_FT1(rs2);
2310 gen_clear_float_exceptions();
2311 tcg_gen_helper_0_0(helper_fsqrts);
2312 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2313 gen_op_store_FT0_fpr(rd);
2314 break;
2315 case 0x2a: /* fsqrtd */
2316 gen_op_load_fpr_DT1(DFPREG(rs2));
2317 gen_clear_float_exceptions();
2318 tcg_gen_helper_0_0(helper_fsqrtd);
2319 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2320 gen_op_store_DT0_fpr(DFPREG(rd));
2321 break;
2322 case 0x2b: /* fsqrtq */
2323 #if defined(CONFIG_USER_ONLY)
2324 gen_op_load_fpr_QT1(QFPREG(rs2));
2325 gen_clear_float_exceptions();
2326 tcg_gen_helper_0_0(helper_fsqrtq);
2327 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2328 gen_op_store_QT0_fpr(QFPREG(rd));
2329 break;
2330 #else
2331 goto nfpu_insn;
2332 #endif
2333 case 0x41:
2334 gen_op_load_fpr_FT0(rs1);
2335 gen_op_load_fpr_FT1(rs2);
2336 gen_clear_float_exceptions();
2337 gen_op_fadds();
2338 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2339 gen_op_store_FT0_fpr(rd);
2340 break;
2341 case 0x42:
2342 gen_op_load_fpr_DT0(DFPREG(rs1));
2343 gen_op_load_fpr_DT1(DFPREG(rs2));
2344 gen_clear_float_exceptions();
2345 gen_op_faddd();
2346 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2347 gen_op_store_DT0_fpr(DFPREG(rd));
2348 break;
2349 case 0x43: /* faddq */
2350 #if defined(CONFIG_USER_ONLY)
2351 gen_op_load_fpr_QT0(QFPREG(rs1));
2352 gen_op_load_fpr_QT1(QFPREG(rs2));
2353 gen_clear_float_exceptions();
2354 gen_op_faddq();
2355 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2356 gen_op_store_QT0_fpr(QFPREG(rd));
2357 break;
2358 #else
2359 goto nfpu_insn;
2360 #endif
2361 case 0x45:
2362 gen_op_load_fpr_FT0(rs1);
2363 gen_op_load_fpr_FT1(rs2);
2364 gen_clear_float_exceptions();
2365 gen_op_fsubs();
2366 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2367 gen_op_store_FT0_fpr(rd);
2368 break;
2369 case 0x46:
2370 gen_op_load_fpr_DT0(DFPREG(rs1));
2371 gen_op_load_fpr_DT1(DFPREG(rs2));
2372 gen_clear_float_exceptions();
2373 gen_op_fsubd();
2374 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2375 gen_op_store_DT0_fpr(DFPREG(rd));
2376 break;
2377 case 0x47: /* fsubq */
2378 #if defined(CONFIG_USER_ONLY)
2379 gen_op_load_fpr_QT0(QFPREG(rs1));
2380 gen_op_load_fpr_QT1(QFPREG(rs2));
2381 gen_clear_float_exceptions();
2382 gen_op_fsubq();
2383 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2384 gen_op_store_QT0_fpr(QFPREG(rd));
2385 break;
2386 #else
2387 goto nfpu_insn;
2388 #endif
2389 case 0x49:
2390 gen_op_load_fpr_FT0(rs1);
2391 gen_op_load_fpr_FT1(rs2);
2392 gen_clear_float_exceptions();
2393 gen_op_fmuls();
2394 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2395 gen_op_store_FT0_fpr(rd);
2396 break;
2397 case 0x4a:
2398 gen_op_load_fpr_DT0(DFPREG(rs1));
2399 gen_op_load_fpr_DT1(DFPREG(rs2));
2400 gen_clear_float_exceptions();
2401 gen_op_fmuld();
2402 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2403 gen_op_store_DT0_fpr(DFPREG(rd));
2404 break;
2405 case 0x4b: /* fmulq */
2406 #if defined(CONFIG_USER_ONLY)
2407 gen_op_load_fpr_QT0(QFPREG(rs1));
2408 gen_op_load_fpr_QT1(QFPREG(rs2));
2409 gen_clear_float_exceptions();
2410 gen_op_fmulq();
2411 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2412 gen_op_store_QT0_fpr(QFPREG(rd));
2413 break;
2414 #else
2415 goto nfpu_insn;
2416 #endif
2417 case 0x4d:
2418 gen_op_load_fpr_FT0(rs1);
2419 gen_op_load_fpr_FT1(rs2);
2420 gen_clear_float_exceptions();
2421 gen_op_fdivs();
2422 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2423 gen_op_store_FT0_fpr(rd);
2424 break;
2425 case 0x4e:
2426 gen_op_load_fpr_DT0(DFPREG(rs1));
2427 gen_op_load_fpr_DT1(DFPREG(rs2));
2428 gen_clear_float_exceptions();
2429 gen_op_fdivd();
2430 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2431 gen_op_store_DT0_fpr(DFPREG(rd));
2432 break;
2433 case 0x4f: /* fdivq */
2434 #if defined(CONFIG_USER_ONLY)
2435 gen_op_load_fpr_QT0(QFPREG(rs1));
2436 gen_op_load_fpr_QT1(QFPREG(rs2));
2437 gen_clear_float_exceptions();
2438 gen_op_fdivq();
2439 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2440 gen_op_store_QT0_fpr(QFPREG(rd));
2441 break;
2442 #else
2443 goto nfpu_insn;
2444 #endif
2445 case 0x69:
2446 gen_op_load_fpr_FT0(rs1);
2447 gen_op_load_fpr_FT1(rs2);
2448 gen_clear_float_exceptions();
2449 gen_op_fsmuld();
2450 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2451 gen_op_store_DT0_fpr(DFPREG(rd));
2452 break;
2453 case 0x6e: /* fdmulq */
2454 #if defined(CONFIG_USER_ONLY)
2455 gen_op_load_fpr_DT0(DFPREG(rs1));
2456 gen_op_load_fpr_DT1(DFPREG(rs2));
2457 gen_clear_float_exceptions();
2458 gen_op_fdmulq();
2459 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2460 gen_op_store_QT0_fpr(QFPREG(rd));
2461 break;
2462 #else
2463 goto nfpu_insn;
2464 #endif
2465 case 0xc4:
2466 gen_op_load_fpr_FT1(rs2);
2467 gen_clear_float_exceptions();
2468 gen_op_fitos();
2469 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2470 gen_op_store_FT0_fpr(rd);
2471 break;
2472 case 0xc6:
2473 gen_op_load_fpr_DT1(DFPREG(rs2));
2474 gen_clear_float_exceptions();
2475 gen_op_fdtos();
2476 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2477 gen_op_store_FT0_fpr(rd);
2478 break;
2479 case 0xc7: /* fqtos */
2480 #if defined(CONFIG_USER_ONLY)
2481 gen_op_load_fpr_QT1(QFPREG(rs2));
2482 gen_clear_float_exceptions();
2483 gen_op_fqtos();
2484 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2485 gen_op_store_FT0_fpr(rd);
2486 break;
2487 #else
2488 goto nfpu_insn;
2489 #endif
2490 case 0xc8:
2491 gen_op_load_fpr_FT1(rs2);
2492 gen_op_fitod();
2493 gen_op_store_DT0_fpr(DFPREG(rd));
2494 break;
2495 case 0xc9:
2496 gen_op_load_fpr_FT1(rs2);
2497 gen_op_fstod();
2498 gen_op_store_DT0_fpr(DFPREG(rd));
2499 break;
2500 case 0xcb: /* fqtod */
2501 #if defined(CONFIG_USER_ONLY)
2502 gen_op_load_fpr_QT1(QFPREG(rs2));
2503 gen_clear_float_exceptions();
2504 gen_op_fqtod();
2505 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2506 gen_op_store_DT0_fpr(DFPREG(rd));
2507 break;
2508 #else
2509 goto nfpu_insn;
2510 #endif
2511 case 0xcc: /* fitoq */
2512 #if defined(CONFIG_USER_ONLY)
2513 gen_op_load_fpr_FT1(rs2);
2514 gen_op_fitoq();
2515 gen_op_store_QT0_fpr(QFPREG(rd));
2516 break;
2517 #else
2518 goto nfpu_insn;
2519 #endif
2520 case 0xcd: /* fstoq */
2521 #if defined(CONFIG_USER_ONLY)
2522 gen_op_load_fpr_FT1(rs2);
2523 gen_op_fstoq();
2524 gen_op_store_QT0_fpr(QFPREG(rd));
2525 break;
2526 #else
2527 goto nfpu_insn;
2528 #endif
2529 case 0xce: /* fdtoq */
2530 #if defined(CONFIG_USER_ONLY)
2531 gen_op_load_fpr_DT1(DFPREG(rs2));
2532 gen_op_fdtoq();
2533 gen_op_store_QT0_fpr(QFPREG(rd));
2534 break;
2535 #else
2536 goto nfpu_insn;
2537 #endif
2538 case 0xd1:
2539 gen_op_load_fpr_FT1(rs2);
2540 gen_clear_float_exceptions();
2541 gen_op_fstoi();
2542 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2543 gen_op_store_FT0_fpr(rd);
2544 break;
2545 case 0xd2:
2546 gen_op_load_fpr_DT1(DFPREG(rs2));
2547 gen_clear_float_exceptions();
2548 gen_op_fdtoi();
2549 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2550 gen_op_store_FT0_fpr(rd);
2551 break;
2552 case 0xd3: /* fqtoi */
2553 #if defined(CONFIG_USER_ONLY)
2554 gen_op_load_fpr_QT1(QFPREG(rs2));
2555 gen_clear_float_exceptions();
2556 gen_op_fqtoi();
2557 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2558 gen_op_store_FT0_fpr(rd);
2559 break;
2560 #else
2561 goto nfpu_insn;
2562 #endif
2563 #ifdef TARGET_SPARC64
2564 case 0x2: /* V9 fmovd */
2565 gen_op_load_fpr_DT0(DFPREG(rs2));
2566 gen_op_store_DT0_fpr(DFPREG(rd));
2567 break;
2568 case 0x3: /* V9 fmovq */
2569 #if defined(CONFIG_USER_ONLY)
2570 gen_op_load_fpr_QT0(QFPREG(rs2));
2571 gen_op_store_QT0_fpr(QFPREG(rd));
2572 break;
2573 #else
2574 goto nfpu_insn;
2575 #endif
2576 case 0x6: /* V9 fnegd */
2577 gen_op_load_fpr_DT1(DFPREG(rs2));
2578 gen_op_fnegd();
2579 gen_op_store_DT0_fpr(DFPREG(rd));
2580 break;
2581 case 0x7: /* V9 fnegq */
2582 #if defined(CONFIG_USER_ONLY)
2583 gen_op_load_fpr_QT1(QFPREG(rs2));
2584 gen_op_fnegq();
2585 gen_op_store_QT0_fpr(QFPREG(rd));
2586 break;
2587 #else
2588 goto nfpu_insn;
2589 #endif
2590 case 0xa: /* V9 fabsd */
2591 gen_op_load_fpr_DT1(DFPREG(rs2));
2592 tcg_gen_helper_0_0(helper_fabsd);
2593 gen_op_store_DT0_fpr(DFPREG(rd));
2594 break;
2595 case 0xb: /* V9 fabsq */
2596 #if defined(CONFIG_USER_ONLY)
2597 gen_op_load_fpr_QT1(QFPREG(rs2));
2598 tcg_gen_helper_0_0(helper_fabsq);
2599 gen_op_store_QT0_fpr(QFPREG(rd));
2600 break;
2601 #else
2602 goto nfpu_insn;
2603 #endif
2604 case 0x81: /* V9 fstox */
2605 gen_op_load_fpr_FT1(rs2);
2606 gen_clear_float_exceptions();
2607 gen_op_fstox();
2608 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2609 gen_op_store_DT0_fpr(DFPREG(rd));
2610 break;
2611 case 0x82: /* V9 fdtox */
2612 gen_op_load_fpr_DT1(DFPREG(rs2));
2613 gen_clear_float_exceptions();
2614 gen_op_fdtox();
2615 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2616 gen_op_store_DT0_fpr(DFPREG(rd));
2617 break;
2618 case 0x83: /* V9 fqtox */
2619 #if defined(CONFIG_USER_ONLY)
2620 gen_op_load_fpr_QT1(QFPREG(rs2));
2621 gen_clear_float_exceptions();
2622 gen_op_fqtox();
2623 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2624 gen_op_store_DT0_fpr(DFPREG(rd));
2625 break;
2626 #else
2627 goto nfpu_insn;
2628 #endif
2629 case 0x84: /* V9 fxtos */
2630 gen_op_load_fpr_DT1(DFPREG(rs2));
2631 gen_clear_float_exceptions();
2632 gen_op_fxtos();
2633 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2634 gen_op_store_FT0_fpr(rd);
2635 break;
2636 case 0x88: /* V9 fxtod */
2637 gen_op_load_fpr_DT1(DFPREG(rs2));
2638 gen_clear_float_exceptions();
2639 gen_op_fxtod();
2640 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2641 gen_op_store_DT0_fpr(DFPREG(rd));
2642 break;
2643 case 0x8c: /* V9 fxtoq */
2644 #if defined(CONFIG_USER_ONLY)
2645 gen_op_load_fpr_DT1(DFPREG(rs2));
2646 gen_clear_float_exceptions();
2647 gen_op_fxtoq();
2648 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2649 gen_op_store_QT0_fpr(QFPREG(rd));
2650 break;
2651 #else
2652 goto nfpu_insn;
2653 #endif
2654 #endif
2655 default:
2656 goto illegal_insn;
2657 }
2658 } else if (xop == 0x35) { /* FPU Operations */
2659 #ifdef TARGET_SPARC64
2660 int cond;
2661 #endif
2662 if (gen_trap_ifnofpu(dc))
2663 goto jmp_insn;
2664 gen_op_clear_ieee_excp_and_FTT();
2665 rs1 = GET_FIELD(insn, 13, 17);
2666 rs2 = GET_FIELD(insn, 27, 31);
2667 xop = GET_FIELD(insn, 18, 26);
2668 #ifdef TARGET_SPARC64
2669 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2670 TCGv r_zero;
2671 int l1;
2672
2673 l1 = gen_new_label();
2674 r_zero = tcg_const_tl(0);
2675 cond = GET_FIELD_SP(insn, 14, 17);
2676 rs1 = GET_FIELD(insn, 13, 17);
2677 gen_movl_reg_T0(rs1);
2678 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2679 gen_op_load_fpr_FT0(rs2);
2680 gen_op_store_FT0_fpr(rd);
2681 gen_set_label(l1);
2682 break;
2683 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2684 TCGv r_zero;
2685 int l1;
2686
2687 l1 = gen_new_label();
2688 r_zero = tcg_const_tl(0);
2689 cond = GET_FIELD_SP(insn, 14, 17);
2690 rs1 = GET_FIELD(insn, 13, 17);
2691 gen_movl_reg_T0(rs1);
2692 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2693 gen_op_load_fpr_DT0(DFPREG(rs2));
2694 gen_op_store_DT0_fpr(DFPREG(rd));
2695 gen_set_label(l1);
2696 break;
2697 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2698 #if defined(CONFIG_USER_ONLY)
2699 TCGv r_zero;
2700 int l1;
2701
2702 l1 = gen_new_label();
2703 r_zero = tcg_const_tl(0);
2704 cond = GET_FIELD_SP(insn, 14, 17);
2705 rs1 = GET_FIELD(insn, 13, 17);
2706 gen_movl_reg_T0(rs1);
2707 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
2708 gen_op_load_fpr_QT0(QFPREG(rs2));
2709 gen_op_store_QT0_fpr(QFPREG(rd));
2710 gen_set_label(l1);
2711 break;
2712 #else
2713 goto nfpu_insn;
2714 #endif
2715 }
2716 #endif
2717 switch (xop) {
2718 #ifdef TARGET_SPARC64
2719 #define FMOVCC(size_FDQ, fcc) \
2720 { \
2721 TCGv r_zero, r_cond; \
2722 int l1; \
2723 \
2724 l1 = gen_new_label(); \
2725 r_zero = tcg_const_tl(0); \
2726 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2727 cond = GET_FIELD_SP(insn, 14, 17); \
2728 gen_fcond(r_cond, fcc, cond); \
2729 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2730 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2731 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2732 gen_set_label(l1); \
2733 }
2734 case 0x001: /* V9 fmovscc %fcc0 */
2735 FMOVCC(F, 0);
2736 break;
2737 case 0x002: /* V9 fmovdcc %fcc0 */
2738 FMOVCC(D, 0);
2739 break;
2740 case 0x003: /* V9 fmovqcc %fcc0 */
2741 #if defined(CONFIG_USER_ONLY)
2742 FMOVCC(Q, 0);
2743 break;
2744 #else
2745 goto nfpu_insn;
2746 #endif
2747 case 0x041: /* V9 fmovscc %fcc1 */
2748 FMOVCC(F, 1);
2749 break;
2750 case 0x042: /* V9 fmovdcc %fcc1 */
2751 FMOVCC(D, 1);
2752 break;
2753 case 0x043: /* V9 fmovqcc %fcc1 */
2754 #if defined(CONFIG_USER_ONLY)
2755 FMOVCC(Q, 1);
2756 break;
2757 #else
2758 goto nfpu_insn;
2759 #endif
2760 case 0x081: /* V9 fmovscc %fcc2 */
2761 FMOVCC(F, 2);
2762 break;
2763 case 0x082: /* V9 fmovdcc %fcc2 */
2764 FMOVCC(D, 2);
2765 break;
2766 case 0x083: /* V9 fmovqcc %fcc2 */
2767 #if defined(CONFIG_USER_ONLY)
2768 FMOVCC(Q, 2);
2769 break;
2770 #else
2771 goto nfpu_insn;
2772 #endif
2773 case 0x0c1: /* V9 fmovscc %fcc3 */
2774 FMOVCC(F, 3);
2775 break;
2776 case 0x0c2: /* V9 fmovdcc %fcc3 */
2777 FMOVCC(D, 3);
2778 break;
2779 case 0x0c3: /* V9 fmovqcc %fcc3 */
2780 #if defined(CONFIG_USER_ONLY)
2781 FMOVCC(Q, 3);
2782 break;
2783 #else
2784 goto nfpu_insn;
2785 #endif
2786 #undef FMOVCC
2787 #define FMOVCC(size_FDQ, icc) \
2788 { \
2789 TCGv r_zero, r_cond; \
2790 int l1; \
2791 \
2792 l1 = gen_new_label(); \
2793 r_zero = tcg_const_tl(0); \
2794 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2795 cond = GET_FIELD_SP(insn, 14, 17); \
2796 gen_cond(r_cond, icc, cond); \
2797 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, r_zero, l1); \
2798 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2799 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2800 gen_set_label(l1); \
2801 }
2802
2803 case 0x101: /* V9 fmovscc %icc */
2804 FMOVCC(F, 0);
2805 break;
2806 case 0x102: /* V9 fmovdcc %icc */
2807 FMOVCC(D, 0);
2808 case 0x103: /* V9 fmovqcc %icc */
2809 #if defined(CONFIG_USER_ONLY)
2810 FMOVCC(D, 0);
2811 break;
2812 #else
2813 goto nfpu_insn;
2814 #endif
2815 case 0x181: /* V9 fmovscc %xcc */
2816 FMOVCC(F, 1);
2817 break;
2818 case 0x182: /* V9 fmovdcc %xcc */
2819 FMOVCC(D, 1);
2820 break;
2821 case 0x183: /* V9 fmovqcc %xcc */
2822 #if defined(CONFIG_USER_ONLY)
2823 FMOVCC(Q, 1);
2824 break;
2825 #else
2826 goto nfpu_insn;
2827 #endif
2828 #undef FMOVCC
2829 #endif
2830 case 0x51: /* fcmps, V9 %fcc */
2831 gen_op_load_fpr_FT0(rs1);
2832 gen_op_load_fpr_FT1(rs2);
2833 gen_op_fcmps(rd & 3);
2834 break;
2835 case 0x52: /* fcmpd, V9 %fcc */
2836 gen_op_load_fpr_DT0(DFPREG(rs1));
2837 gen_op_load_fpr_DT1(DFPREG(rs2));
2838 gen_op_fcmpd(rd & 3);
2839 break;
2840 case 0x53: /* fcmpq, V9 %fcc */
2841 #if defined(CONFIG_USER_ONLY)
2842 gen_op_load_fpr_QT0(QFPREG(rs1));
2843 gen_op_load_fpr_QT1(QFPREG(rs2));
2844 gen_op_fcmpq(rd & 3);
2845 break;
2846 #else /* !defined(CONFIG_USER_ONLY) */
2847 goto nfpu_insn;
2848 #endif
2849 case 0x55: /* fcmpes, V9 %fcc */
2850 gen_op_load_fpr_FT0(rs1);
2851 gen_op_load_fpr_FT1(rs2);
2852 gen_op_fcmpes(rd & 3);
2853 break;
2854 case 0x56: /* fcmped, V9 %fcc */
2855 gen_op_load_fpr_DT0(DFPREG(rs1));
2856 gen_op_load_fpr_DT1(DFPREG(rs2));
2857 gen_op_fcmped(rd & 3);
2858 break;
2859 case 0x57: /* fcmpeq, V9 %fcc */
2860 #if defined(CONFIG_USER_ONLY)
2861 gen_op_load_fpr_QT0(QFPREG(rs1));
2862 gen_op_load_fpr_QT1(QFPREG(rs2));
2863 gen_op_fcmpeq(rd & 3);
2864 break;
2865 #else/* !defined(CONFIG_USER_ONLY) */
2866 goto nfpu_insn;
2867 #endif
2868 default:
2869 goto illegal_insn;
2870 }
2871 #if defined(OPTIM)
2872 } else if (xop == 0x2) {
2873 // clr/mov shortcut
2874
2875 rs1 = GET_FIELD(insn, 13, 17);
2876 if (rs1 == 0) {
2877 // or %g0, x, y -> mov T0, x; mov y, T0
2878 if (IS_IMM) { /* immediate */
2879 rs2 = GET_FIELDs(insn, 19, 31);
2880 tcg_gen_movi_tl(cpu_T[0], (int)rs2);
2881 } else { /* register */
2882 rs2 = GET_FIELD(insn, 27, 31);
2883 gen_movl_reg_T0(rs2);
2884 }
2885 } else {
2886 gen_movl_reg_T0(rs1);
2887 if (IS_IMM) { /* immediate */
2888 rs2 = GET_FIELDs(insn, 19, 31);
2889 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], (int)rs2);
2890 } else { /* register */
2891 // or x, %g0, y -> mov T1, x; mov y, T1
2892 rs2 = GET_FIELD(insn, 27, 31);
2893 if (rs2 != 0) {
2894 gen_movl_reg_T1(rs2);
2895 gen_op_or_T1_T0();
2896 }
2897 }
2898 }
2899 gen_movl_T0_reg(rd);
2900 #endif
2901 #ifdef TARGET_SPARC64
2902 } else if (xop == 0x25) { /* sll, V9 sllx */
2903 rs1 = GET_FIELD(insn, 13, 17);
2904 gen_movl_reg_T0(rs1);
2905 if (IS_IMM) { /* immediate */
2906 rs2 = GET_FIELDs(insn, 20, 31);
2907 if (insn & (1 << 12)) {
2908 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2909 } else {
2910 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2911 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2912 }
2913 } else { /* register */
2914 rs2 = GET_FIELD(insn, 27, 31);
2915 gen_movl_reg_T1(rs2);
2916 if (insn & (1 << 12)) {
2917 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2918 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2919 } else {
2920 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2921 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2922 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2923 }
2924 }
2925 gen_movl_T0_reg(rd);
2926 } else if (xop == 0x26) { /* srl, V9 srlx */
2927 rs1 = GET_FIELD(insn, 13, 17);
2928 gen_movl_reg_T0(rs1);
2929 if (IS_IMM) { /* immediate */
2930 rs2 = GET_FIELDs(insn, 20, 31);
2931 if (insn & (1 << 12)) {
2932 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2933 } else {
2934 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2935 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2936 }
2937 } else { /* register */
2938 rs2 = GET_FIELD(insn, 27, 31);
2939 gen_movl_reg_T1(rs2);
2940 if (insn & (1 << 12)) {
2941 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2942 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2943 } else {
2944 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2945 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2946 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2947 }
2948 }
2949 gen_movl_T0_reg(rd);
2950 } else if (xop == 0x27) { /* sra, V9 srax */
2951 rs1 = GET_FIELD(insn, 13, 17);
2952 gen_movl_reg_T0(rs1);
2953 if (IS_IMM) { /* immediate */
2954 rs2 = GET_FIELDs(insn, 20, 31);
2955 if (insn & (1 << 12)) {
2956 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2957 } else {
2958 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2959 tcg_gen_ext_i32_i64(cpu_T[0], cpu_T[0]);
2960 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2961 }
2962 } else { /* register */
2963 rs2 = GET_FIELD(insn, 27, 31);
2964 gen_movl_reg_T1(rs2);
2965 if (insn & (1 << 12)) {
2966 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2967 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2968 } else {
2969 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2970 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2971 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2972 }
2973 }
2974 gen_movl_T0_reg(rd);
2975 #endif
2976 } else if (xop < 0x36) {
2977 rs1 = GET_FIELD(insn, 13, 17);
2978 gen_movl_reg_T0(rs1);
2979 if (IS_IMM) { /* immediate */
2980 rs2 = GET_FIELDs(insn, 19, 31);
2981 gen_movl_simm_T1(rs2);
2982 } else { /* register */
2983 rs2 = GET_FIELD(insn, 27, 31);
2984 gen_movl_reg_T1(rs2);
2985 }
2986 if (xop < 0x20) {
2987 switch (xop & ~0x10) {
2988 case 0x0:
2989 if (xop & 0x10)
2990 gen_op_add_T1_T0_cc();
2991 else
2992 gen_op_add_T1_T0();
2993 break;
2994 case 0x1:
2995 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
2996 if (xop & 0x10)
2997 gen_op_logic_T0_cc();
2998 break;
2999 case 0x2:
3000 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3001 if (xop & 0x10)
3002 gen_op_logic_T0_cc();
3003 break;
3004 case 0x3:
3005 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3006 if (xop & 0x10)
3007 gen_op_logic_T0_cc();
3008 break;
3009 case 0x4:
3010 if (xop & 0x10)
3011 gen_op_sub_T1_T0_cc();
3012 else
3013 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3014 break;
3015 case 0x5:
3016 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3017 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3018 if (xop & 0x10)
3019 gen_op_logic_T0_cc();
3020 break;
3021 case 0x6:
3022 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3023 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3024 if (xop & 0x10)
3025 gen_op_logic_T0_cc();
3026 break;
3027 case 0x7:
3028 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3029 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3030 if (xop & 0x10)
3031 gen_op_logic_T0_cc();
3032 break;
3033 case 0x8:
3034 if (xop & 0x10)
3035 gen_op_addx_T1_T0_cc();
3036 else {
3037 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3038 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
3039 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3040 }
3041 break;
3042 #ifdef TARGET_SPARC64
3043 case 0x9: /* V9 mulx */
3044 tcg_gen_mul_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
3045 break;
3046 #endif
3047 case 0xa:
3048 gen_op_umul_T1_T0();
3049 if (xop & 0x10)
3050 gen_op_logic_T0_cc();
3051 break;
3052 case 0xb:
3053 gen_op_smul_T1_T0();
3054 if (xop & 0x10)
3055 gen_op_logic_T0_cc();
3056 break;
3057 case 0xc:
3058 if (xop & 0x10)
3059 gen_op_subx_T1_T0_cc();
3060 else {
3061 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3062 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
3063 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3064 }
3065 break;
3066 #ifdef TARGET_SPARC64
3067 case 0xd: /* V9 udivx */
3068 gen_trap_ifdivzero_i64(cpu_T[1]);
3069 tcg_gen_divu_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
3070 break;
3071 #endif
3072 case 0xe:
3073 gen_op_udiv_T1_T0();
3074 if (xop & 0x10)
3075 gen_op_div_cc();
3076 break;
3077 case 0xf:
3078 gen_op_sdiv_T1_T0();
3079 if (xop & 0x10)
3080 gen_op_div_cc();
3081 break;
3082 default:
3083 goto illegal_insn;
3084 }
3085 gen_movl_T0_reg(rd);
3086 } else {
3087 switch (xop) {
3088 case 0x20: /* taddcc */
3089 gen_op_tadd_T1_T0_cc();
3090 gen_movl_T0_reg(rd);
3091 break;
3092 case 0x21: /* tsubcc */
3093 gen_op_tsub_T1_T0_cc();
3094 gen_movl_T0_reg(rd);
3095 break;
3096 case 0x22: /* taddcctv */
3097 save_state(dc);
3098 gen_op_tadd_T1_T0_ccTV();
3099 gen_movl_T0_reg(rd);
3100 break;
3101 case 0x23: /* tsubcctv */
3102 save_state(dc);
3103 gen_op_tsub_T1_T0_ccTV();
3104 gen_movl_T0_reg(rd);
3105 break;
3106 case 0x24: /* mulscc */
3107 gen_op_mulscc_T1_T0();
3108 gen_movl_T0_reg(rd);
3109 break;
3110 #ifndef TARGET_SPARC64
3111 case 0x25: /* sll */
3112 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3113 tcg_gen_shl_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3114 gen_movl_T0_reg(rd);
3115 break;
3116 case 0x26: /* srl */
3117 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3118 tcg_gen_shr_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3119 gen_movl_T0_reg(rd);
3120 break;
3121 case 0x27: /* sra */
3122 tcg_gen_andi_i32(cpu_T[1], cpu_T[1], 0x1f);
3123 tcg_gen_sar_i32(cpu_T[0], cpu_T[0], cpu_T[1]);
3124 gen_movl_T0_reg(rd);
3125 break;
3126 #endif
3127 case 0x30:
3128 {
3129 switch(rd) {
3130 case 0: /* wry */
3131 gen_op_xor_T1_T0();
3132 gen_op_movtl_env_T0(offsetof(CPUSPARCState, y));
3133 break;
3134 #ifndef TARGET_SPARC64
3135 case 0x01 ... 0x0f: /* undefined in the
3136 SPARCv8 manual, nop
3137 on the microSPARC
3138 II */
3139 case 0x10 ... 0x1f: /* implementation-dependent
3140 in the SPARCv8
3141 manual, nop on the
3142 microSPARC II */
3143 break;
3144 #else
3145 case 0x2: /* V9 wrccr */
3146 gen_op_xor_T1_T0();
3147 gen_op_wrccr();
3148 break;
3149 case 0x3: /* V9 wrasi */
3150 gen_op_xor_T1_T0();
3151 gen_op_movl_env_T0(offsetof(CPUSPARCState, asi));
3152 break;
3153 case 0x6: /* V9 wrfprs */
3154 gen_op_xor_T1_T0();
3155 gen_op_movl_env_T0(offsetof(CPUSPARCState, fprs));
3156 save_state(dc);
3157 gen_op_next_insn();
3158 tcg_gen_exit_tb(0);
3159 dc->is_br = 1;
3160 break;
3161 case 0xf: /* V9 sir, nop if user */
3162 #if !defined(CONFIG_USER_ONLY)
3163 if (supervisor(dc))
3164 ; // XXX
3165 #endif
3166 break;
3167 case 0x13: /* Graphics Status */
3168 if (gen_trap_ifnofpu(dc))
3169 goto jmp_insn;
3170 gen_op_xor_T1_T0();
3171 gen_op_movtl_env_T0(offsetof(CPUSPARCState, gsr));
3172 break;
3173 case 0x17: /* Tick compare */
3174 #if !defined(CONFIG_USER_ONLY)
3175 if (!supervisor(dc))
3176 goto illegal_insn;
3177 #endif
3178 {
3179 TCGv r_tickptr;
3180
3181 gen_op_xor_T1_T0();
3182 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3183 tick_cmpr));
3184 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3185 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3186 offsetof(CPUState, tick));
3187 tcg_gen_helper_0_2(helper_tick_set_limit,
3188 r_tickptr, cpu_T[0]);
3189 }
3190 break;
3191 case 0x18: /* System tick */
3192 #if !defined(CONFIG_USER_ONLY)
3193 if (!supervisor(dc))
3194 goto illegal_insn;
3195 #endif
3196 {
3197 TCGv r_tickptr;
3198
3199 gen_op_xor_T1_T0();
3200 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3201 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3202 offsetof(CPUState, stick));
3203 tcg_gen_helper_0_2(helper_tick_set_count,
3204 r_tickptr, cpu_T[0]);
3205 }
3206 break;
3207 case 0x19: /* System tick compare */
3208 #if !defined(CONFIG_USER_ONLY)
3209 if (!supervisor(dc))
3210 goto illegal_insn;
3211 #endif
3212 {
3213 TCGv r_tickptr;
3214
3215 gen_op_xor_T1_T0();
3216 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3217 stick_cmpr));
3218 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3219 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3220 offsetof(CPUState, stick));
3221 tcg_gen_helper_0_2(helper_tick_set_limit,
3222 r_tickptr, cpu_T[0]);
3223 }
3224 break;
3225
3226 case 0x10: /* Performance Control */
3227 case 0x11: /* Performance Instrumentation Counter */
3228 case 0x12: /* Dispatch Control */
3229 case 0x14: /* Softint set */
3230 case 0x15: /* Softint clear */
3231 case 0x16: /* Softint write */
3232 #endif
3233 default:
3234 goto illegal_insn;
3235 }
3236 }
3237 break;
3238 #if !defined(CONFIG_USER_ONLY)
3239 case 0x31: /* wrpsr, V9 saved, restored */
3240 {
3241 if (!supervisor(dc))
3242 goto priv_insn;
3243 #ifdef TARGET_SPARC64
3244 switch (rd) {
3245 case 0:
3246 gen_op_saved();
3247 break;
3248 case 1:
3249 gen_op_restored();
3250 break;
3251 case 2: /* UA2005 allclean */
3252 case 3: /* UA2005 otherw */
3253 case 4: /* UA2005 normalw */
3254 case 5: /* UA2005 invalw */
3255 // XXX
3256 default:
3257 goto illegal_insn;
3258 }
3259 #else
3260 gen_op_xor_T1_T0();
3261 tcg_gen_helper_0_1(helper_wrpsr, cpu_T[0]);
3262 save_state(dc);
3263 gen_op_next_insn();
3264 tcg_gen_exit_tb(0);
3265 dc->is_br = 1;
3266 #endif
3267 }
3268 break;
3269 case 0x32: /* wrwim, V9 wrpr */
3270 {
3271 if (!supervisor(dc))
3272 goto priv_insn;
3273 gen_op_xor_T1_T0();
3274 #ifdef TARGET_SPARC64
3275 switch (rd) {
3276 case 0: // tpc
3277 {
3278 TCGv r_tsptr;
3279
3280 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3281 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3282 offsetof(CPUState, tsptr));
3283 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3284 offsetof(trap_state, tpc));
3285 }
3286 break;
3287 case 1: // tnpc
3288 {
3289 TCGv r_tsptr;
3290
3291 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3292 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3293 offsetof(CPUState, tsptr));
3294 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3295 offsetof(trap_state, tnpc));
3296 }
3297 break;
3298 case 2: // tstate
3299 {
3300 TCGv r_tsptr;
3301
3302 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3303 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3304 offsetof(CPUState, tsptr));
3305 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3306 offsetof(trap_state, tstate));
3307 }
3308 break;
3309 case 3: // tt
3310 {
3311 TCGv r_tsptr;
3312
3313 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3314 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3315 offsetof(CPUState, tsptr));
3316 tcg_gen_st_i32(cpu_T[0], r_tsptr,
3317 offsetof(trap_state, tt));
3318 }
3319 break;
3320 case 4: // tick
3321 {
3322 TCGv r_tickptr;
3323
3324 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3325 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3326 offsetof(CPUState, tick));
3327 tcg_gen_helper_0_2(helper_tick_set_count,
3328 r_tickptr, cpu_T[0]);
3329 }
3330 break;
3331 case 5: // tba
3332 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
3333 break;
3334 case 6: // pstate
3335 save_state(dc);
3336 tcg_gen_helper_0_1(helper_wrpstate, cpu_T[0]);
3337 gen_op_next_insn();
3338 tcg_gen_exit_tb(0);
3339 dc->is_br = 1;
3340 break;
3341 case 7: // tl
3342 gen_op_movl_env_T0(offsetof(CPUSPARCState, tl));
3343 break;
3344 case 8: // pil
3345 gen_op_movl_env_T0(offsetof(CPUSPARCState, psrpil));
3346 break;
3347 case 9: // cwp
3348 gen_op_wrcwp();
3349 break;
3350 case 10: // cansave
3351 gen_op_movl_env_T0(offsetof(CPUSPARCState, cansave));
3352 break;
3353 case 11: // canrestore
3354 gen_op_movl_env_T0(offsetof(CPUSPARCState, canrestore));
3355 break;
3356 case 12: // cleanwin
3357 gen_op_movl_env_T0(offsetof(CPUSPARCState, cleanwin));
3358 break;
3359 case 13: // otherwin
3360 gen_op_movl_env_T0(offsetof(CPUSPARCState, otherwin));
3361 break;
3362 case 14: // wstate
3363 gen_op_movl_env_T0(offsetof(CPUSPARCState, wstate));
3364 break;
3365 case 16: // UA2005 gl
3366 gen_op_movl_env_T0(offsetof(CPUSPARCState, gl));
3367 break;
3368 case 26: // UA2005 strand status
3369 if (!hypervisor(dc))
3370 goto priv_insn;
3371 gen_op_movl_env_T0(offsetof(CPUSPARCState, ssr));
3372 break;
3373 default:
3374 goto illegal_insn;
3375 }
3376 #else
3377 tcg_gen_andi_i32(cpu_T[0], cpu_T[0], ((1 << NWINDOWS) - 1));
3378 gen_op_movl_env_T0(offsetof(CPUSPARCState, wim));
3379 #endif
3380 }
3381 break;
3382 case 0x33: /* wrtbr, UA2005 wrhpr */
3383 {
3384 #ifndef TARGET_SPARC64
3385 if (!supervisor(dc))
3386 goto priv_insn;
3387 gen_op_xor_T1_T0();
3388 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
3389 #else
3390 if (!hypervisor(dc))
3391 goto priv_insn;
3392 gen_op_xor_T1_T0();
3393 switch (rd) {
3394 case 0: // hpstate
3395 // XXX gen_op_wrhpstate();
3396 save_state(dc);
3397 gen_op_next_insn();
3398 tcg_gen_exit_tb(0);
3399 dc->is_br = 1;
3400 break;
3401 case 1: // htstate
3402 // XXX gen_op_wrhtstate();
3403 break;
3404 case 3: // hintp
3405 gen_op_movl_env_T0(offsetof(CPUSPARCState, hintp));
3406 break;
3407 case 5: // htba
3408 gen_op_movl_env_T0(offsetof(CPUSPARCState, htba));
3409 break;
3410 case 31: // hstick_cmpr
3411 {
3412 TCGv r_tickptr;
3413
3414 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3415 hstick_cmpr));
3416 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3417 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3418 offsetof(CPUState, hstick));
3419 tcg_gen_helper_0_2(helper_tick_set_limit,
3420 r_tickptr, cpu_T[0]);
3421 }
3422 break;
3423 case 6: // hver readonly
3424 default:
3425 goto illegal_insn;
3426 }
3427 #endif
3428 }
3429 break;
3430 #endif
3431 #ifdef TARGET_SPARC64
3432 case 0x2c: /* V9 movcc */
3433 {
3434 int cc = GET_FIELD_SP(insn, 11, 12);
3435 int cond = GET_FIELD_SP(insn, 14, 17);
3436 TCGv r_cond;
3437 int l1;
3438
3439 r_cond = tcg_temp_new(TCG_TYPE_TL);
3440 if (insn & (1 << 18)) {
3441 if (cc == 0)
3442 gen_cond(r_cond, 0, cond);
3443 else if (cc == 2)
3444 gen_cond(r_cond, 1, cond);
3445 else
3446 goto illegal_insn;
3447 } else {
3448 gen_fcond(r_cond, cc, cond);
3449 }
3450
3451 l1 = gen_new_label();
3452
3453 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond,
3454 tcg_const_tl(0), l1);
3455 if (IS_IMM) { /* immediate */
3456 rs2 = GET_FIELD_SPs(insn, 0, 10);
3457 gen_movl_simm_T1(rs2);
3458 } else {
3459 rs2 = GET_FIELD_SP(insn, 0, 4);
3460 gen_movl_reg_T1(rs2);
3461 }
3462 gen_movl_T1_reg(rd);
3463 gen_set_label(l1);
3464 break;
3465 }
3466 case 0x2d: /* V9 sdivx */
3467 gen_op_sdivx_T1_T0();
3468 gen_movl_T0_reg(rd);
3469 break;
3470 case 0x2e: /* V9 popc */
3471 {
3472 if (IS_IMM) { /* immediate */
3473 rs2 = GET_FIELD_SPs(insn, 0, 12);
3474 gen_movl_simm_T1(rs2);
3475 // XXX optimize: popc(constant)
3476 }
3477 else {
3478 rs2 = GET_FIELD_SP(insn, 0, 4);
3479 gen_movl_reg_T1(rs2);
3480 }
3481 tcg_gen_helper_1_1(helper_popc, cpu_T[0],
3482 cpu_T[1]);
3483 gen_movl_T0_reg(rd);
3484 }
3485 case 0x2f: /* V9 movr */
3486 {
3487 int cond = GET_FIELD_SP(insn, 10, 12);
3488 TCGv r_zero;
3489 int l1;
3490
3491 rs1 = GET_FIELD(insn, 13, 17);
3492 gen_movl_reg_T0(rs1);
3493
3494 l1 = gen_new_label();
3495
3496 r_zero = tcg_const_tl(0);
3497 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], r_zero, l1);
3498 if (IS_IMM) { /* immediate */
3499 rs2 = GET_FIELD_SPs(insn, 0, 9);
3500 gen_movl_simm_T1(rs2);
3501 } else {
3502 rs2 = GET_FIELD_SP(insn, 0, 4);
3503 gen_movl_reg_T1(rs2);
3504 }
3505 gen_movl_T1_reg(rd);
3506 gen_set_label(l1);
3507 break;
3508 }
3509 #endif
3510 default:
3511 goto illegal_insn;
3512 }
3513 }
3514 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3515 #ifdef TARGET_SPARC64
3516 int opf = GET_FIELD_SP(insn, 5, 13);
3517 rs1 = GET_FIELD(insn, 13, 17);
3518 rs2 = GET_FIELD(insn, 27, 31);
3519 if (gen_trap_ifnofpu(dc))
3520 goto jmp_insn;
3521
3522 switch (opf) {
3523 case 0x000: /* VIS I edge8cc */
3524 case 0x001: /* VIS II edge8n */
3525 case 0x002: /* VIS I edge8lcc */
3526 case 0x003: /* VIS II edge8ln */
3527 case 0x004: /* VIS I edge16cc */
3528 case 0x005: /* VIS II edge16n */
3529 case 0x006: /* VIS I edge16lcc */
3530 case 0x007: /* VIS II edge16ln */
3531 case 0x008: /* VIS I edge32cc */
3532 case 0x009: /* VIS II edge32n */
3533 case 0x00a: /* VIS I edge32lcc */
3534 case 0x00b: /* VIS II edge32ln */
3535 // XXX
3536 goto illegal_insn;
3537 case 0x010: /* VIS I array8 */
3538 gen_movl_reg_T0(rs1);
3539 gen_movl_reg_T1(rs2);
3540 gen_op_array8();
3541 gen_movl_T0_reg(rd);
3542 break;
3543 case 0x012: /* VIS I array16 */
3544 gen_movl_reg_T0(rs1);
3545 gen_movl_reg_T1(rs2);
3546 gen_op_array16();
3547 gen_movl_T0_reg(rd);
3548 break;
3549 case 0x014: /* VIS I array32 */
3550 gen_movl_reg_T0(rs1);
3551 gen_movl_reg_T1(rs2);
3552 gen_op_array32();
3553 gen_movl_T0_reg(rd);
3554 break;
3555 case 0x018: /* VIS I alignaddr */
3556 gen_movl_reg_T0(rs1);
3557 gen_movl_reg_T1(rs2);
3558 gen_op_alignaddr();
3559 gen_movl_T0_reg(rd);
3560 break;
3561 case 0x019: /* VIS II bmask */
3562 case 0x01a: /* VIS I alignaddrl */
3563 // XXX
3564 goto illegal_insn;
3565 case 0x020: /* VIS I fcmple16 */
3566 gen_op_load_fpr_DT0(DFPREG(rs1));
3567 gen_op_load_fpr_DT1(DFPREG(rs2));
3568 gen_op_fcmple16();
3569 gen_op_store_DT0_fpr(DFPREG(rd));
3570 break;
3571 case 0x022: /* VIS I fcmpne16 */
3572 gen_op_load_fpr_DT0(DFPREG(rs1));
3573 gen_op_load_fpr_DT1(DFPREG(rs2));
3574 gen_op_fcmpne16();
3575 gen_op_store_DT0_fpr(DFPREG(rd));
3576 break;
3577 case 0x024: /* VIS I fcmple32 */
3578 gen_op_load_fpr_DT0(DFPREG(rs1));
3579 gen_op_load_fpr_DT1(DFPREG(rs2));
3580 gen_op_fcmple32();
3581 gen_op_store_DT0_fpr(DFPREG(rd));
3582 break;
3583 case 0x026: /* VIS I fcmpne32 */
3584 gen_op_load_fpr_DT0(DFPREG(rs1));
3585 gen_op_load_fpr_DT1(DFPREG(rs2));
3586 gen_op_fcmpne32();
3587 gen_op_store_DT0_fpr(DFPREG(rd));
3588 break;
3589 case 0x028: /* VIS I fcmpgt16 */
3590 gen_op_load_fpr_DT0(DFPREG(rs1));
3591 gen_op_load_fpr_DT1(DFPREG(rs2));
3592 gen_op_fcmpgt16();
3593 gen_op_store_DT0_fpr(DFPREG(rd));
3594 break;
3595 case 0x02a: /* VIS I fcmpeq16 */
3596 gen_op_load_fpr_DT0(DFPREG(rs1));
3597 gen_op_load_fpr_DT1(DFPREG(rs2));
3598 gen_op_fcmpeq16();
3599 gen_op_store_DT0_fpr(DFPREG(rd));
3600 break;
3601 case 0x02c: /* VIS I fcmpgt32 */
3602 gen_op_load_fpr_DT0(DFPREG(rs1));
3603 gen_op_load_fpr_DT1(DFPREG(rs2));
3604 gen_op_fcmpgt32();
3605 gen_op_store_DT0_fpr(DFPREG(rd));
3606 break;
3607 case 0x02e: /* VIS I fcmpeq32 */
3608 gen_op_load_fpr_DT0(DFPREG(rs1));
3609 gen_op_load_fpr_DT1(DFPREG(rs2));
3610 gen_op_fcmpeq32();
3611 gen_op_store_DT0_fpr(DFPREG(rd));
3612 break;
3613 case 0x031: /* VIS I fmul8x16 */
3614 gen_op_load_fpr_DT0(DFPREG(rs1));
3615 gen_op_load_fpr_DT1(DFPREG(rs2));
3616 gen_op_fmul8x16();
3617 gen_op_store_DT0_fpr(DFPREG(rd));
3618 break;
3619 case 0x033: /* VIS I fmul8x16au */
3620 gen_op_load_fpr_DT0(DFPREG(rs1));
3621 gen_op_load_fpr_DT1(DFPREG(rs2));
3622 gen_op_fmul8x16au();
3623 gen_op_store_DT0_fpr(DFPREG(rd));
3624 break;
3625 case 0x035: /* VIS I fmul8x16al */
3626 gen_op_load_fpr_DT0(DFPREG(rs1));
3627 gen_op_load_fpr_DT1(DFPREG(rs2));
3628 gen_op_fmul8x16al();
3629 gen_op_store_DT0_fpr(DFPREG(rd));
3630 break;
3631 case 0x036: /* VIS I fmul8sux16 */
3632 gen_op_load_fpr_DT0(DFPREG(rs1));
3633 gen_op_load_fpr_DT1(DFPREG(rs2));
3634 gen_op_fmul8sux16();
3635 gen_op_store_DT0_fpr(DFPREG(rd));
3636 break;
3637 case 0x037: /* VIS I fmul8ulx16 */
3638 gen_op_load_fpr_DT0(DFPREG(rs1));
3639 gen_op_load_fpr_DT1(DFPREG(rs2));
3640 gen_op_fmul8ulx16();
3641 gen_op_store_DT0_fpr(DFPREG(rd));
3642 break;
3643 case 0x038: /* VIS I fmuld8sux16 */
3644 gen_op_load_fpr_DT0(DFPREG(rs1));
3645 gen_op_load_fpr_DT1(DFPREG(rs2));
3646 gen_op_fmuld8sux16();
3647 gen_op_store_DT0_fpr(DFPREG(rd));
3648 break;
3649 case 0x039: /* VIS I fmuld8ulx16 */
3650 gen_op_load_fpr_DT0(DFPREG(rs1));
3651 gen_op_load_fpr_DT1(DFPREG(rs2));
3652 gen_op_fmuld8ulx16();
3653 gen_op_store_DT0_fpr(DFPREG(rd));
3654 break;
3655 case 0x03a: /* VIS I fpack32 */
3656 case 0x03b: /* VIS I fpack16 */
3657 case 0x03d: /* VIS I fpackfix */
3658 case 0x03e: /* VIS I pdist */
3659 // XXX
3660 goto illegal_insn;
3661 case 0x048: /* VIS I faligndata */
3662 gen_op_load_fpr_DT0(DFPREG(rs1));
3663 gen_op_load_fpr_DT1(DFPREG(rs2));
3664 gen_op_faligndata();
3665 gen_op_store_DT0_fpr(DFPREG(rd));
3666 break;
3667 case 0x04b: /* VIS I fpmerge */
3668 gen_op_load_fpr_DT0(DFPREG(rs1));
3669 gen_op_load_fpr_DT1(DFPREG(rs2));
3670 gen_op_fpmerge();
3671 gen_op_store_DT0_fpr(DFPREG(rd));
3672 break;
3673 case 0x04c: /* VIS II bshuffle */
3674 // XXX
3675 goto illegal_insn;
3676 case 0x04d: /* VIS I fexpand */
3677 gen_op_load_fpr_DT0(DFPREG(rs1));
3678 gen_op_load_fpr_DT1(DFPREG(rs2));
3679 gen_op_fexpand();
3680 gen_op_store_DT0_fpr(DFPREG(rd));
3681 break;
3682 case 0x050: /* VIS I fpadd16 */
3683 gen_op_load_fpr_DT0(DFPREG(rs1));
3684 gen_op_load_fpr_DT1(DFPREG(rs2));
3685 gen_op_fpadd16();
3686 gen_op_store_DT0_fpr(DFPREG(rd));
3687 break;
3688 case 0x051: /* VIS I fpadd16s */
3689 gen_op_load_fpr_FT0(rs1);
3690 gen_op_load_fpr_FT1(rs2);
3691 gen_op_fpadd16s();
3692 gen_op_store_FT0_fpr(rd);
3693 break;
3694 case 0x052: /* VIS I fpadd32 */
3695 gen_op_load_fpr_DT0(DFPREG(rs1));
3696 gen_op_load_fpr_DT1(DFPREG(rs2));
3697 gen_op_fpadd32();
3698 gen_op_store_DT0_fpr(DFPREG(rd));
3699 break;
3700 case 0x053: /* VIS I fpadd32s */
3701 gen_op_load_fpr_FT0(rs1);
3702 gen_op_load_fpr_FT1(rs2);
3703 gen_op_fpadd32s();
3704 gen_op_store_FT0_fpr(rd);
3705 break;
3706 case 0x054: /* VIS I fpsub16 */
3707 gen_op_load_fpr_DT0(DFPREG(rs1));
3708 gen_op_load_fpr_DT1(DFPREG(rs2));
3709 gen_op_fpsub16();
3710 gen_op_store_DT0_fpr(DFPREG(rd));
3711 break;
3712 case 0x055: /* VIS I fpsub16s */
3713 gen_op_load_fpr_FT0(rs1);
3714 gen_op_load_fpr_FT1(rs2);
3715 gen_op_fpsub16s();
3716 gen_op_store_FT0_fpr(rd);
3717 break;
3718 case 0x056: /* VIS I fpsub32 */
3719 gen_op_load_fpr_DT0(DFPREG(rs1));
3720 gen_op_load_fpr_DT1(DFPREG(rs2));
3721 gen_op_fpadd32();
3722 gen_op_store_DT0_fpr(DFPREG(rd));
3723 break;
3724 case 0x057: /* VIS I fpsub32s */
3725 gen_op_load_fpr_FT0(rs1);
3726 gen_op_load_fpr_FT1(rs2);
3727 gen_op_fpsub32s();
3728 gen_op_store_FT0_fpr(rd);
3729 break;
3730 case 0x060: /* VIS I fzero */
3731 gen_op_movl_DT0_0();
3732 gen_op_store_DT0_fpr(DFPREG(rd));
3733 break;
3734 case 0x061: /* VIS I fzeros */
3735 gen_op_movl_FT0_0();
3736 gen_op_store_FT0_fpr(rd);
3737 break;
3738 case 0x062: /* VIS I fnor */
3739 gen_op_load_fpr_DT0(DFPREG(rs1));
3740 gen_op_load_fpr_DT1(DFPREG(rs2));
3741 gen_op_fnor();
3742 gen_op_store_DT0_fpr(DFPREG(rd));
3743 break;
3744 case 0x063: /* VIS I fnors */
3745 gen_op_load_fpr_FT0(rs1);
3746 gen_op_load_fpr_FT1(rs2);
3747 gen_op_fnors();
3748 gen_op_store_FT0_fpr(rd);
3749 break;
3750 case 0x064: /* VIS I fandnot2 */
3751 gen_op_load_fpr_DT1(DFPREG(rs1));
3752 gen_op_load_fpr_DT0(DFPREG(rs2));
3753 gen_op_fandnot();
3754 gen_op_store_DT0_fpr(DFPREG(rd));
3755 break;
3756 case 0x065: /* VIS I fandnot2s */
3757 gen_op_load_fpr_FT1(rs1);
3758 gen_op_load_fpr_FT0(rs2);
3759 gen_op_fandnots();
3760 gen_op_store_FT0_fpr(rd);
3761 break;
3762 case 0x066: /* VIS I fnot2 */
3763 gen_op_load_fpr_DT1(DFPREG(rs2));
3764 gen_op_fnot();
3765 gen_op_store_DT0_fpr(DFPREG(rd));
3766 break;
3767 case 0x067: /* VIS I fnot2s */
3768 gen_op_load_fpr_FT1(rs2);
3769 gen_op_fnot();
3770 gen_op_store_FT0_fpr(rd);
3771 break;
3772 case 0x068: /* VIS I fandnot1 */
3773 gen_op_load_fpr_DT0(DFPREG(rs1));
3774 gen_op_load_fpr_DT1(DFPREG(rs2));
3775 gen_op_fandnot();
3776 gen_op_store_DT0_fpr(DFPREG(rd));
3777 break;
3778 case 0x069: /* VIS I fandnot1s */
3779 gen_op_load_fpr_FT0(rs1);
3780 gen_op_load_fpr_FT1(rs2);
3781 gen_op_fandnots();
3782 gen_op_store_FT0_fpr(rd);
3783 break;
3784 case 0x06a: /* VIS I fnot1 */
3785 gen_op_load_fpr_DT1(DFPREG(rs1));
3786 gen_op_fnot();
3787 gen_op_store_DT0_fpr(DFPREG(rd));
3788 break;
3789 case 0x06b: /* VIS I fnot1s */
3790 gen_op_load_fpr_FT1(rs1);
3791 gen_op_fnot();
3792 gen_op_store_FT0_fpr(rd);
3793 break;
3794 case 0x06c: /* VIS I fxor */
3795 gen_op_load_fpr_DT0(DFPREG(rs1));
3796 gen_op_load_fpr_DT1(DFPREG(rs2));
3797 gen_op_fxor();
3798 gen_op_store_DT0_fpr(DFPREG(rd));
3799 break;
3800 case 0x06d: /* VIS I fxors */
3801 gen_op_load_fpr_FT0(rs1);
3802 gen_op_load_fpr_FT1(rs2);
3803 gen_op_fxors();
3804 gen_op_store_FT0_fpr(rd);
3805 break;
3806 case 0x06e: /* VIS I fnand */
3807 gen_op_load_fpr_DT0(DFPREG(rs1));
3808 gen_op_load_fpr_DT1(DFPREG(rs2));
3809 gen_op_fnand();
3810 gen_op_store_DT0_fpr(DFPREG(rd));
3811 break;
3812 case 0x06f: /* VIS I fnands */
3813 gen_op_load_fpr_FT0(rs1);
3814 gen_op_load_fpr_FT1(rs2);
3815 gen_op_fnands();
3816 gen_op_store_FT0_fpr(rd);
3817 break;
3818 case 0x070: /* VIS I fand */
3819 gen_op_load_fpr_DT0(DFPREG(rs1));
3820 gen_op_load_fpr_DT1(DFPREG(rs2));
3821 gen_op_fand();
3822 gen_op_store_DT0_fpr(DFPREG(rd));
3823 break;
3824 case 0x071: /* VIS I fands */
3825 gen_op_load_fpr_FT0(rs1);
3826 gen_op_load_fpr_FT1(rs2);
3827 gen_op_fands();
3828 gen_op_store_FT0_fpr(rd);
3829 break;
3830 case 0x072: /* VIS I fxnor */
3831 gen_op_load_fpr_DT0(DFPREG(rs1));
3832 gen_op_load_fpr_DT1(DFPREG(rs2));
3833 gen_op_fxnor();
3834 gen_op_store_DT0_fpr(DFPREG(rd));
3835 break;
3836 case 0x073: /* VIS I fxnors */
3837 gen_op_load_fpr_FT0(rs1);
3838 gen_op_load_fpr_FT1(rs2);
3839 gen_op_fxnors();
3840 gen_op_store_FT0_fpr(rd);
3841 break;
3842 case 0x074: /* VIS I fsrc1 */
3843 gen_op_load_fpr_DT0(DFPREG(rs1));
3844 gen_op_store_DT0_fpr(DFPREG(rd));
3845 break;
3846 case 0x075: /* VIS I fsrc1s */
3847 gen_op_load_fpr_FT0(rs1);
3848 gen_op_store_FT0_fpr(rd);
3849 break;
3850 case 0x076: /* VIS I fornot2 */
3851 gen_op_load_fpr_DT1(DFPREG(rs1));
3852 gen_op_load_fpr_DT0(DFPREG(rs2));
3853 gen_op_fornot();
3854 gen_op_store_DT0_fpr(DFPREG(rd));
3855 break;
3856 case 0x077: /* VIS I fornot2s */
3857 gen_op_load_fpr_FT1(rs1);
3858 gen_op_load_fpr_FT0(rs2);
3859 gen_op_fornots();
3860 gen_op_store_FT0_fpr(rd);
3861 break;
3862 case 0x078: /* VIS I fsrc2 */
3863 gen_op_load_fpr_DT0(DFPREG(rs2));
3864 gen_op_store_DT0_fpr(DFPREG(rd));
3865 break;
3866 case 0x079: /* VIS I fsrc2s */
3867 gen_op_load_fpr_FT0(rs2);
3868 gen_op_store_FT0_fpr(rd);
3869 break;
3870 case 0x07a: /* VIS I fornot1 */
3871 gen_op_load_fpr_DT0(DFPREG(rs1));
3872 gen_op_load_fpr_DT1(DFPREG(rs2));
3873 gen_op_fornot();
3874 gen_op_store_DT0_fpr(DFPREG(rd));
3875 break;
3876 case 0x07b: /* VIS I fornot1s */
3877 gen_op_load_fpr_FT0(rs1);
3878 gen_op_load_fpr_FT1(rs2);
3879 gen_op_fornots();
3880 gen_op_store_FT0_fpr(rd);
3881 break;
3882 case 0x07c: /* VIS I for */
3883 gen_op_load_fpr_DT0(DFPREG(rs1));
3884 gen_op_load_fpr_DT1(DFPREG(rs2));
3885 gen_op_for();
3886 gen_op_store_DT0_fpr(DFPREG(rd));
3887 break;
3888 case 0x07d: /* VIS I fors */
3889 gen_op_load_fpr_FT0(rs1);
3890 gen_op_load_fpr_FT1(rs2);
3891 gen_op_fors();
3892 gen_op_store_FT0_fpr(rd);
3893 break;
3894 case 0x07e: /* VIS I fone */
3895 gen_op_movl_DT0_1();
3896 gen_op_store_DT0_fpr(DFPREG(rd));
3897 break;
3898 case 0x07f: /* VIS I fones */
3899 gen_op_movl_FT0_1();
3900 gen_op_store_FT0_fpr(rd);
3901 break;
3902 case 0x080: /* VIS I shutdown */
3903 case 0x081: /* VIS II siam */
3904 // XXX
3905 goto illegal_insn;
3906 default:
3907 goto illegal_insn;
3908 }
3909 #else
3910 goto ncp_insn;
3911 #endif
3912 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3913 #ifdef TARGET_SPARC64
3914 goto illegal_insn;
3915 #else
3916 goto ncp_insn;
3917 #endif
3918 #ifdef TARGET_SPARC64
3919 } else if (xop == 0x39) { /* V9 return */
3920 rs1 = GET_FIELD(insn, 13, 17);
3921 save_state(dc);
3922 gen_movl_reg_T0(rs1);
3923 if (IS_IMM) { /* immediate */
3924 rs2 = GET_FIELDs(insn, 19, 31);
3925 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3926 } else { /* register */
3927 rs2 = GET_FIELD(insn, 27, 31);
3928 #if defined(OPTIM)
3929 if (rs2) {
3930 #endif
3931 gen_movl_reg_T1(rs2);
3932 gen_op_add_T1_T0();
3933 #if defined(OPTIM)
3934 }
3935 #endif
3936 }
3937 gen_op_restore();
3938 gen_mov_pc_npc(dc);
3939 gen_op_check_align_T0_3();
3940 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3941 dc->npc = DYNAMIC_PC;
3942 goto jmp_insn;
3943 #endif
3944 } else {
3945 rs1 = GET_FIELD(insn, 13, 17);
3946 gen_movl_reg_T0(rs1);
3947 if (IS_IMM) { /* immediate */
3948 rs2 = GET_FIELDs(insn, 19, 31);
3949 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3950 } else { /* register */
3951 rs2 = GET_FIELD(insn, 27, 31);
3952 #if defined(OPTIM)
3953 if (rs2) {
3954 #endif
3955 gen_movl_reg_T1(rs2);
3956 gen_op_add_T1_T0();
3957 #if defined(OPTIM)
3958 }
3959 #endif
3960 }
3961 switch (xop) {
3962 case 0x38: /* jmpl */
3963 {
3964 if (rd != 0) {
3965 tcg_gen_movi_tl(cpu_T[1], dc->pc);
3966 gen_movl_T1_reg(rd);
3967 }
3968 gen_mov_pc_npc(dc);
3969 gen_op_check_align_T0_3();
3970 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3971 dc->npc = DYNAMIC_PC;
3972 }
3973 goto jmp_insn;
3974 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3975 case 0x39: /* rett, V9 return */
3976 {
3977 if (!supervisor(dc))
3978 goto priv_insn;
3979 gen_mov_pc_npc(dc);
3980 gen_op_check_align_T0_3();
3981 tcg_gen_st_tl(cpu_T[0], cpu_env, offsetof(CPUSPARCState, npc));
3982 dc->npc = DYNAMIC_PC;
3983 tcg_gen_helper_0_0(helper_rett);
3984 }
3985 goto jmp_insn;
3986 #endif
3987 case 0x3b: /* flush */
3988 tcg_gen_helper_0_1(helper_flush, cpu_T[0]);
3989 break;
3990 case 0x3c: /* save */
3991 save_state(dc);
3992 gen_op_save();
3993 gen_movl_T0_reg(rd);
3994 break;
3995 case 0x3d: /* restore */
3996 save_state(dc);
3997 gen_op_restore();
3998 gen_movl_T0_reg(rd);
3999 break;
4000 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4001 case 0x3e: /* V9 done/retry */
4002 {
4003 switch (rd) {
4004 case 0:
4005 if (!supervisor(dc))
4006 goto priv_insn;
4007 dc->npc = DYNAMIC_PC;
4008 dc->pc = DYNAMIC_PC;
4009 tcg_gen_helper_0_0(helper_done);
4010 goto jmp_insn;
4011 case 1:
4012 if (!supervisor(dc))
4013 goto priv_insn;
4014 dc->npc = DYNAMIC_PC;
4015 dc->pc = DYNAMIC_PC;
4016 tcg_gen_helper_0_0(helper_retry);
4017 goto jmp_insn;
4018 default:
4019 goto illegal_insn;
4020 }
4021 }
4022 break;
4023 #endif
4024 default:
4025 goto illegal_insn;
4026 }
4027 }
4028 break;
4029 }
4030 break;
4031 case 3: /* load/store instructions */
4032 {
4033 unsigned int xop = GET_FIELD(insn, 7, 12);
4034 rs1 = GET_FIELD(insn, 13, 17);
4035 save_state(dc);
4036 gen_movl_reg_T0(rs1);
4037 if (xop == 0x3c || xop == 0x3e)
4038 {
4039 rs2 = GET_FIELD(insn, 27, 31);
4040 gen_movl_reg_T1(rs2);
4041 }
4042 else if (IS_IMM) { /* immediate */
4043 rs2 = GET_FIELDs(insn, 19, 31);
4044 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
4045 } else { /* register */
4046 rs2 = GET_FIELD(insn, 27, 31);
4047 #if defined(OPTIM)
4048 if (rs2 != 0) {
4049 #endif
4050 gen_movl_reg_T1(rs2);
4051 gen_op_add_T1_T0();
4052 #if defined(OPTIM)
4053 }
4054 #endif
4055 }
4056 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4057 (xop > 0x17 && xop <= 0x1d ) ||
4058 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4059 switch (xop) {
4060 case 0x0: /* load unsigned word */
4061 gen_op_check_align_T0_3();
4062 ABI32_MASK(cpu_T[0]);
4063 tcg_gen_qemu_ld32u(cpu_T[1], cpu_T[0], dc->mem_idx);
4064 break;
4065 case 0x1: /* load unsigned byte */
4066 ABI32_MASK(cpu_T[0]);
4067 tcg_gen_qemu_ld8u(cpu_T[1], cpu_T[0], dc->mem_idx);
4068 break;
4069 case 0x2: /* load unsigned halfword */
4070 gen_op_check_align_T0_1();
4071 ABI32_MASK(cpu_T[0]);
4072 tcg_gen_qemu_ld16u(cpu_T[1], cpu_T[0], dc->mem_idx);
4073 break;
4074 case 0x3: /* load double word */
4075 if (rd & 1)
4076 goto illegal_insn;
4077 else {
4078 TCGv r_dword;
4079
4080 r_dword = tcg_temp_new(TCG_TYPE_I64);
4081 gen_op_check_align_T0_7();
4082 ABI32_MASK(cpu_T[0]);
4083 tcg_gen_qemu_ld64(r_dword, cpu_T[0], dc->mem_idx);
4084 tcg_gen_trunc_i64_i32(cpu_T[0], r_dword);
4085 gen_movl_T0_reg(rd + 1);
4086 tcg_gen_shri_i64(r_dword, r_dword, 32);
4087 tcg_gen_trunc_i64_i32(cpu_T[1], r_dword);
4088 }
4089 break;
4090 case 0x9: /* load signed byte */
4091 ABI32_MASK(cpu_T[0]);
4092 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
4093 break;
4094 case 0xa: /* load signed halfword */
4095 gen_op_check_align_T0_1();
4096 ABI32_MASK(cpu_T[0]);
4097 tcg_gen_qemu_ld16s(cpu_T[1], cpu_T[0], dc->mem_idx);
4098 break;
4099 case 0xd: /* ldstub -- XXX: should be atomically */
4100 tcg_gen_movi_i32(cpu_tmp0, 0xff);
4101 ABI32_MASK(cpu_T[0]);
4102 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
4103 tcg_gen_qemu_st8(cpu_tmp0, cpu_T[0], dc->mem_idx);
4104 break;
4105 case 0x0f: /* swap register with memory. Also atomically */
4106 gen_op_check_align_T0_3();
4107 gen_movl_reg_T1(rd);
4108 ABI32_MASK(cpu_T[0]);
4109 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_T[0], dc->mem_idx);
4110 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
4111 tcg_gen_mov_i32(cpu_T[1], cpu_tmp0);
4112 break;
4113 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4114 case 0x10: /* load word alternate */
4115 #ifndef TARGET_SPARC64
4116 if (IS_IMM)
4117 goto illegal_insn;
4118 if (!supervisor(dc))
4119 goto priv_insn;
4120 #endif
4121 gen_op_check_align_T0_3();
4122 gen_ld_asi(insn, 4, 0);
4123 break;
4124 case 0x11: /* load unsigned byte alternate */
4125 #ifndef TARGET_SPARC64
4126 if (IS_IMM)
4127 goto illegal_insn;
4128 if (!supervisor(dc))
4129 goto priv_insn;
4130 #endif
4131 gen_ld_asi(insn, 1, 0);
4132 break;
4133 case 0x12: /* load unsigned halfword alternate */
4134 #ifndef TARGET_SPARC64
4135 if (IS_IMM)
4136 goto illegal_insn;
4137 if (!supervisor(dc))
4138 goto priv_insn;
4139 #endif
4140 gen_op_check_align_T0_1();
4141 gen_ld_asi(insn, 2, 0);
4142 break;
4143 case 0x13: /* load double word alternate */
4144 #ifndef TARGET_SPARC64
4145 if (IS_IMM)
4146 goto illegal_insn;
4147 if (!supervisor(dc))
4148 goto priv_insn;
4149 #endif
4150 if (rd & 1)
4151 goto illegal_insn;
4152 gen_op_check_align_T0_7();
4153 gen_ldda_asi(insn);
4154 gen_movl_T0_reg(rd + 1);
4155 break;
4156 case 0x19: /* load signed byte alternate */
4157 #ifndef TARGET_SPARC64
4158 if (IS_IMM)
4159 goto illegal_insn;
4160 if (!supervisor(dc))
4161 goto priv_insn;
4162 #endif
4163 gen_ld_asi(insn, 1, 1);
4164 break;
4165 case 0x1a: /* load signed halfword alternate */
4166 #ifndef TARGET_SPARC64
4167 if (IS_IMM)
4168 goto illegal_insn;
4169 if (!supervisor(dc))
4170 goto priv_insn;
4171 #endif
4172 gen_op_check_align_T0_1();
4173 gen_ld_asi(insn, 2, 1);
4174 break;
4175 case 0x1d: /* ldstuba -- XXX: should be atomically */
4176 #ifndef TARGET_SPARC64
4177 if (IS_IMM)
4178 goto illegal_insn;
4179 if (!supervisor(dc))
4180 goto priv_insn;
4181 #endif
4182 gen_ldstub_asi(insn);
4183 break;
4184 case 0x1f: /* swap reg with alt. memory. Also atomically */
4185 #ifndef TARGET_SPARC64
4186 if (IS_IMM)
4187 goto illegal_insn;
4188 if (!supervisor(dc))
4189 goto priv_insn;
4190 #endif
4191 gen_op_check_align_T0_3();
4192 gen_movl_reg_T1(rd);
4193 gen_swap_asi(insn);
4194 break;
4195
4196 #ifndef TARGET_SPARC64
4197 case 0x30: /* ldc */
4198 case 0x31: /* ldcsr */
4199 case 0x33: /* lddc */
4200 goto ncp_insn;
4201 #endif
4202 #endif
4203 #ifdef TARGET_SPARC64
4204 case 0x08: /* V9 ldsw */
4205 gen_op_check_align_T0_3();
4206 ABI32_MASK(cpu_T[0]);
4207 tcg_gen_qemu_ld32s(cpu_T[1], cpu_T[0], dc->mem_idx);
4208 break;
4209 case 0x0b: /* V9 ldx */
4210 gen_op_check_align_T0_7();
4211 ABI32_MASK(cpu_T[0]);
4212 tcg_gen_qemu_ld64(cpu_T[1], cpu_T[0], dc->mem_idx);
4213 break;
4214 case 0x18: /* V9 ldswa */
4215 gen_op_check_align_T0_3();
4216 gen_ld_asi(insn, 4, 1);
4217 break;
4218 case 0x1b: /* V9 ldxa */
4219 gen_op_check_align_T0_7();
4220 gen_ld_asi(insn, 8, 0);
4221 break;
4222 case 0x2d: /* V9 prefetch, no effect */
4223 goto skip_move;
4224 case 0x30: /* V9 ldfa */
4225 gen_op_check_align_T0_3();
4226 gen_ldf_asi(insn, 4, rd);
4227 goto skip_move;
4228 case 0x33: /* V9 lddfa */
4229 gen_op_check_align_T0_3();
4230 gen_ldf_asi(insn, 8, DFPREG(rd));
4231 goto skip_move;
4232 case 0x3d: /* V9 prefetcha, no effect */
4233 goto skip_move;
4234 case 0x32: /* V9 ldqfa */
4235 #if defined(CONFIG_USER_ONLY)
4236 gen_op_check_align_T0_3();
4237 gen_ldf_asi(insn, 16, QFPREG(rd));
4238 goto skip_move;
4239 #else
4240 goto nfpu_insn;
4241 #endif
4242 #endif
4243 default:
4244 goto illegal_insn;
4245 }
4246 gen_movl_T1_reg(rd);
4247 #ifdef TARGET_SPARC64
4248 skip_move: ;
4249 #endif
4250 } else if (xop >= 0x20 && xop < 0x24) {
4251 if (gen_trap_ifnofpu(dc))
4252 goto jmp_insn;
4253 switch (xop) {
4254 case 0x20: /* load fpreg */
4255 gen_op_check_align_T0_3();
4256 gen_op_ldst(ldf);
4257 gen_op_store_FT0_fpr(rd);
4258 break;
4259 case 0x21: /* load fsr */
4260 gen_op_check_align_T0_3();
4261 gen_op_ldst(ldf);
4262 tcg_gen_helper_0_0(helper_ldfsr);
4263 break;
4264 case 0x22: /* load quad fpreg */
4265 #if defined(CONFIG_USER_ONLY)
4266 gen_op_check_align_T0_7();
4267 gen_op_ldst(ldqf);
4268 gen_op_store_QT0_fpr(QFPREG(rd));
4269 break;
4270 #else
4271 goto nfpu_insn;
4272 #endif
4273 case 0x23: /* load double fpreg */
4274 gen_op_check_align_T0_7();
4275 gen_op_ldst(lddf);
4276 gen_op_store_DT0_fpr(DFPREG(rd));
4277 break;
4278 default:
4279 goto illegal_insn;
4280 }
4281 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4282 xop == 0xe || xop == 0x1e) {
4283 gen_movl_reg_T1(rd);
4284 switch (xop) {
4285 case 0x4: /* store word */
4286 gen_op_check_align_T0_3();
4287 ABI32_MASK(cpu_T[0]);
4288 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
4289 break;
4290 case 0x5: /* store byte */
4291 ABI32_MASK(cpu_T[0]);
4292 tcg_gen_qemu_st8(cpu_T[1], cpu_T[0], dc->mem_idx);
4293 break;
4294 case 0x6: /* store halfword */
4295 gen_op_check_align_T0_1();
4296 ABI32_MASK(cpu_T[0]);
4297 tcg_gen_qemu_st16(cpu_T[1], cpu_T[0], dc->mem_idx);
4298 break;
4299 case 0x7: /* store double word */
4300 if (rd & 1)
4301 goto illegal_insn;
4302 #ifndef __i386__
4303 else {
4304 TCGv r_dword, r_low;
4305
4306 gen_op_check_align_T0_7();
4307 r_dword = tcg_temp_new(TCG_TYPE_I64);
4308 r_low = tcg_temp_new(TCG_TYPE_I32);
4309 gen_movl_reg_TN(rd + 1, r_low);
4310 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
4311 r_low);
4312 tcg_gen_qemu_st64(r_dword, cpu_T[0], dc->mem_idx);
4313 }
4314 #else /* __i386__ */
4315 gen_op_check_align_T0_7();
4316 flush_T2(dc);
4317 gen_movl_reg_T2(rd + 1);
4318 gen_op_ldst(std);
4319 #endif /* __i386__ */
4320 break;
4321 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4322 case 0x14: /* store word alternate */
4323 #ifndef TARGET_SPARC64
4324 if (IS_IMM)
4325 goto illegal_insn;
4326 if (!supervisor(dc))
4327 goto priv_insn;
4328 #endif
4329 gen_op_check_align_T0_3();
4330 gen_st_asi(insn, 4);
4331 break;
4332 case 0x15: /* store byte alternate */
4333 #ifndef TARGET_SPARC64
4334 if (IS_IMM)
4335 goto illegal_insn;
4336 if (!supervisor(dc))
4337 goto priv_insn;
4338 #endif
4339 gen_st_asi(insn, 1);
4340 break;
4341 case 0x16: /* store halfword alternate */
4342 #ifndef TARGET_SPARC64
4343 if (IS_IMM)
4344 goto illegal_insn;
4345 if (!supervisor(dc))
4346 goto priv_insn;
4347 #endif
4348 gen_op_check_align_T0_1();
4349 gen_st_asi(insn, 2);
4350 break;
4351 case 0x17: /* store double word alternate */
4352 #ifndef TARGET_SPARC64
4353 if (IS_IMM)
4354 goto illegal_insn;
4355 if (!supervisor(dc))
4356 goto priv_insn;
4357 #endif
4358 if (rd & 1)
4359 goto illegal_insn;
4360 else {
4361 int asi;
4362 TCGv r_dword, r_temp, r_size;
4363
4364 gen_op_check_align_T0_7();
4365 r_dword = tcg_temp_new(TCG_TYPE_I64);
4366 r_temp = tcg_temp_new(TCG_TYPE_I32);
4367 r_size = tcg_temp_new(TCG_TYPE_I32);
4368 gen_movl_reg_TN(rd + 1, r_temp);
4369 tcg_gen_helper_1_2(helper_pack64, r_dword, cpu_T[1],
4370 r_temp);
4371 #ifdef TARGET_SPARC64
4372 if (IS_IMM) {
4373 int offset;
4374
4375 offset = GET_FIELD(insn, 25, 31);
4376 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], offset);
4377 tcg_gen_ld_i32(r_dword, cpu_env, offsetof(CPUSPARCState, asi));
4378 } else {
4379 #endif
4380 asi = GET_FIELD(insn, 19, 26);
4381 tcg_gen_movi_i32(r_temp, asi);
4382 #ifdef TARGET_SPARC64
4383 }
4384 #endif
4385 tcg_gen_movi_i32(r_size, 8);
4386 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_dword, r_temp, r_size);
4387 }
4388 break;
4389 #endif
4390 #ifdef TARGET_SPARC64
4391 case 0x0e: /* V9 stx */
4392 gen_op_check_align_T0_7();
4393 ABI32_MASK(cpu_T[0]);
4394 tcg_gen_qemu_st64(cpu_T[1], cpu_T[0], dc->mem_idx);
4395 break;
4396 case 0x1e: /* V9 stxa */
4397 gen_op_check_align_T0_7();
4398 gen_st_asi(insn, 8);
4399 break;
4400 #endif
4401 default:
4402 goto illegal_insn;
4403 }
4404 } else if (xop > 0x23 && xop < 0x28) {
4405 if (gen_trap_ifnofpu(dc))
4406 goto jmp_insn;
4407 switch (xop) {
4408 case 0x24:
4409 gen_op_check_align_T0_3();
4410 gen_op_load_fpr_FT0(rd);
4411 gen_op_ldst(stf);
4412 break;
4413 case 0x25: /* stfsr, V9 stxfsr */
4414 #ifdef CONFIG_USER_ONLY
4415 gen_op_check_align_T0_3();
4416 #endif
4417 tcg_gen_helper_0_0(helper_stfsr);
4418 gen_op_ldst(stf);
4419 break;
4420 case 0x26:
4421 #ifdef TARGET_SPARC64
4422 #if defined(CONFIG_USER_ONLY)
4423 /* V9 stqf, store quad fpreg */
4424 gen_op_check_align_T0_7();
4425 gen_op_load_fpr_QT0(QFPREG(rd));
4426 gen_op_ldst(stqf);
4427 break;
4428 #else
4429 goto nfpu_insn;
4430 #endif
4431 #else /* !TARGET_SPARC64 */
4432 /* stdfq, store floating point queue */
4433 #if defined(CONFIG_USER_ONLY)
4434 goto illegal_insn;
4435 #else
4436 if (!supervisor(dc))
4437 goto priv_insn;
4438 if (gen_trap_ifnofpu(dc))
4439 goto jmp_insn;
4440 goto nfq_insn;
4441 #endif
4442 #endif
4443 case 0x27:
4444 gen_op_check_align_T0_7();
4445 gen_op_load_fpr_DT0(DFPREG(rd));
4446 gen_op_ldst(stdf);
4447 break;
4448 default:
4449 goto illegal_insn;
4450 }
4451 } else if (xop > 0x33 && xop < 0x3f) {
4452 switch (xop) {
4453 #ifdef TARGET_SPARC64
4454 case 0x34: /* V9 stfa */
4455 gen_op_check_align_T0_3();
4456 gen_op_load_fpr_FT0(rd);
4457 gen_stf_asi(insn, 4, rd);
4458 break;
4459 case 0x36: /* V9 stqfa */
4460 #if defined(CONFIG_USER_ONLY)
4461 gen_op_check_align_T0_7();
4462 gen_op_load_fpr_QT0(QFPREG(rd));
4463 gen_stf_asi(insn, 16, QFPREG(rd));
4464 break;
4465 #else
4466 goto nfpu_insn;
4467 #endif
4468 case 0x37: /* V9 stdfa */
4469 gen_op_check_align_T0_3();
4470 gen_op_load_fpr_DT0(DFPREG(rd));
4471 gen_stf_asi(insn, 8, DFPREG(rd));
4472 break;
4473 case 0x3c: /* V9 casa */
4474 gen_op_check_align_T0_3();
4475 gen_cas_asi(insn, rd);
4476 gen_movl_T1_reg(rd);
4477 break;
4478 case 0x3e: /* V9 casxa */
4479 gen_op_check_align_T0_7();
4480 gen_casx_asi(insn, rd);
4481 gen_movl_T1_reg(rd);
4482 break;
4483 #else
4484 case 0x34: /* stc */
4485 case 0x35: /* stcsr */
4486 case 0x36: /* stdcq */
4487 case 0x37: /* stdc */
4488 goto ncp_insn;
4489 #endif
4490 default:
4491 goto illegal_insn;
4492 }
4493 }
4494 else
4495 goto illegal_insn;
4496 }
4497 break;
4498 }
4499 /* default case for non jump instructions */
4500 if (dc->npc == DYNAMIC_PC) {
4501 dc->pc = DYNAMIC_PC;
4502 gen_op_next_insn();
4503 } else if (dc->npc == JUMP_PC) {
4504 /* we can do a static jump */
4505 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
4506 dc->is_br = 1;
4507 } else {
4508 dc->pc = dc->npc;
4509 dc->npc = dc->npc + 4;
4510 }
4511 jmp_insn:
4512 return;
4513 illegal_insn:
4514 save_state(dc);
4515 gen_op_exception(TT_ILL_INSN);
4516 dc->is_br = 1;
4517 return;
4518 #if !defined(CONFIG_USER_ONLY)
4519 priv_insn:
4520 save_state(dc);
4521 gen_op_exception(TT_PRIV_INSN);
4522 dc->is_br = 1;
4523 return;
4524 nfpu_insn:
4525 save_state(dc);
4526 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4527 dc->is_br = 1;
4528 return;
4529 #ifndef TARGET_SPARC64
4530 nfq_insn:
4531 save_state(dc);
4532 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4533 dc->is_br = 1;
4534 return;
4535 #endif
4536 #endif
4537 #ifndef TARGET_SPARC64
4538 ncp_insn:
4539 save_state(dc);
4540 gen_op_exception(TT_NCP_INSN);
4541 dc->is_br = 1;
4542 return;
4543 #endif
4544 }
4545
4546 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
4547 {
4548 }
4549
4550 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
4551 int spc, CPUSPARCState *env)
4552 {
4553 target_ulong pc_start, last_pc;
4554 uint16_t *gen_opc_end;
4555 DisasContext dc1, *dc = &dc1;
4556 int j, lj = -1;
4557
4558 memset(dc, 0, sizeof(DisasContext));
4559 dc->tb = tb;
4560 pc_start = tb->pc;
4561 dc->pc = pc_start;
4562 last_pc = dc->pc;
4563 dc->npc = (target_ulong) tb->cs_base;
4564 dc->mem_idx = cpu_mmu_index(env);
4565 dc->fpu_enabled = cpu_fpu_enabled(env);
4566 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4567
4568 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4569
4570 do {
4571 if (env->nb_breakpoints > 0) {
4572 for(j = 0; j < env->nb_breakpoints; j++) {
4573 if (env->breakpoints[j] == dc->pc) {
4574 if (dc->pc != pc_start)
4575 save_state(dc);
4576 tcg_gen_helper_0_0(helper_debug);
4577 tcg_gen_exit_tb(0);
4578 dc->is_br = 1;
4579 goto exit_gen_loop;
4580 }
4581 }
4582 }
4583 if (spc) {
4584 if (loglevel > 0)
4585 fprintf(logfile, "Search PC...\n");
4586 j = gen_opc_ptr - gen_opc_buf;
4587 if (lj < j) {
4588 lj++;
4589 while (lj < j)
4590 gen_opc_instr_start[lj++] = 0;
4591 gen_opc_pc[lj] = dc->pc;
4592 gen_opc_npc[lj] = dc->npc;
4593 gen_opc_instr_start[lj] = 1;
4594 }
4595 }
4596 last_pc = dc->pc;
4597 disas_sparc_insn(dc);
4598
4599 if (dc->is_br)
4600 break;
4601 /* if the next PC is different, we abort now */
4602 if (dc->pc != (last_pc + 4))
4603 break;
4604 /* if we reach a page boundary, we stop generation so that the
4605 PC of a TT_TFAULT exception is always in the right page */
4606 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4607 break;
4608 /* if single step mode, we generate only one instruction and
4609 generate an exception */
4610 if (env->singlestep_enabled) {
4611 gen_jmp_im(dc->pc);
4612 tcg_gen_exit_tb(0);
4613 break;
4614 }
4615 } while ((gen_opc_ptr < gen_opc_end) &&
4616 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
4617
4618 exit_gen_loop:
4619 if (!dc->is_br) {
4620 if (dc->pc != DYNAMIC_PC &&
4621 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4622 /* static PC and NPC: we can use direct chaining */
4623 gen_branch(dc, dc->pc, dc->npc);
4624 } else {
4625 if (dc->pc != DYNAMIC_PC)
4626 gen_jmp_im(dc->pc);
4627 save_npc(dc);
4628 tcg_gen_exit_tb(0);
4629 }
4630 }
4631 *gen_opc_ptr = INDEX_op_end;
4632 if (spc) {
4633 j = gen_opc_ptr - gen_opc_buf;
4634 lj++;
4635 while (lj <= j)
4636 gen_opc_instr_start[lj++] = 0;
4637 #if 0
4638 if (loglevel > 0) {
4639 page_dump(logfile);
4640 }
4641 #endif
4642 gen_opc_jump_pc[0] = dc->jump_pc[0];
4643 gen_opc_jump_pc[1] = dc->jump_pc[1];
4644 } else {
4645 tb->size = last_pc + 4 - pc_start;
4646 }
4647 #ifdef DEBUG_DISAS
4648 if (loglevel & CPU_LOG_TB_IN_ASM) {
4649 fprintf(logfile, "--------------\n");
4650 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4651 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4652 fprintf(logfile, "\n");
4653 }
4654 #endif
4655 return 0;
4656 }
4657
4658 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4659 {
4660 return gen_intermediate_code_internal(tb, 0, env);
4661 }
4662
4663 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4664 {
4665 return gen_intermediate_code_internal(tb, 1, env);
4666 }
4667
4668 void cpu_reset(CPUSPARCState *env)
4669 {
4670 tlb_flush(env, 1);
4671 env->cwp = 0;
4672 env->wim = 1;
4673 env->regwptr = env->regbase + (env->cwp * 16);
4674 #if defined(CONFIG_USER_ONLY)
4675 env->user_mode_only = 1;
4676 #ifdef TARGET_SPARC64
4677 env->cleanwin = NWINDOWS - 2;
4678 env->cansave = NWINDOWS - 2;
4679 env->pstate = PS_RMO | PS_PEF | PS_IE;
4680 env->asi = 0x82; // Primary no-fault
4681 #endif
4682 #else
4683 env->psret = 0;
4684 env->psrs = 1;
4685 env->psrps = 1;
4686 #ifdef TARGET_SPARC64
4687 env->pstate = PS_PRIV;
4688 env->hpstate = HS_PRIV;
4689 env->pc = 0x1fff0000000ULL;
4690 env->tsptr = &env->ts[env->tl];
4691 #else
4692 env->pc = 0;
4693 env->mmuregs[0] &= ~(MMU_E | MMU_NF);
4694 env->mmuregs[0] |= env->mmu_bm;
4695 #endif
4696 env->npc = env->pc + 4;
4697 #endif
4698 }
4699
4700 CPUSPARCState *cpu_sparc_init(const char *cpu_model)
4701 {
4702 CPUSPARCState *env;
4703 const sparc_def_t *def;
4704 static int inited;
4705 unsigned int i;
4706 static const char * const gregnames[8] = {
4707 NULL, // g0 not used
4708 "g1",
4709 "g2",
4710 "g3",
4711 "g4",
4712 "g5",
4713 "g6",
4714 "g7",
4715 };
4716
4717 def = cpu_sparc_find_by_name(cpu_model);
4718 if (!def)
4719 return NULL;
4720
4721 env = qemu_mallocz(sizeof(CPUSPARCState));
4722 if (!env)
4723 return NULL;
4724 cpu_exec_init(env);
4725 env->cpu_model_str = cpu_model;
4726 env->version = def->iu_version;
4727 env->fsr = def->fpu_version;
4728 #if !defined(TARGET_SPARC64)
4729 env->mmu_bm = def->mmu_bm;
4730 env->mmu_ctpr_mask = def->mmu_ctpr_mask;
4731 env->mmu_cxr_mask = def->mmu_cxr_mask;
4732 env->mmu_sfsr_mask = def->mmu_sfsr_mask;
4733 env->mmu_trcr_mask = def->mmu_trcr_mask;
4734 env->mmuregs[0] |= def->mmu_version;
4735 cpu_sparc_set_id(env, 0);
4736 #endif
4737
4738 /* init various static tables */
4739 if (!inited) {
4740 inited = 1;
4741
4742 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
4743 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4744 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4745 offsetof(CPUState, regwptr),
4746 "regwptr");
4747 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4748 #ifdef TARGET_SPARC64
4749 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
4750 TCG_AREG0, offsetof(CPUState, t0), "T0");
4751 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
4752 TCG_AREG0, offsetof(CPUState, t1), "T1");
4753 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
4754 TCG_AREG0, offsetof(CPUState, t2), "T2");
4755 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4756 TCG_AREG0, offsetof(CPUState, xcc),
4757 "xcc");
4758 #else
4759 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
4760 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
4761 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
4762 #endif
4763 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4764 TCG_AREG0, offsetof(CPUState, cc_src),
4765 "cc_src");
4766 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4767 TCG_AREG0, offsetof(CPUState, cc_dst),
4768 "cc_dst");
4769 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4770 TCG_AREG0, offsetof(CPUState, psr),
4771 "psr");
4772 for (i = 1; i < 8; i++)
4773 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4774 offsetof(CPUState, gregs[i]),
4775 gregnames[i]);
4776 }
4777
4778 cpu_reset(env);
4779
4780 return env;
4781 }
4782
4783 void cpu_sparc_set_id(CPUSPARCState *env, unsigned int cpu)
4784 {
4785 #if !defined(TARGET_SPARC64)
4786 env->mxccregs[7] = ((cpu + 8) & 0xf) << 24;
4787 #endif
4788 }
4789
4790 static const sparc_def_t sparc_defs[] = {
4791 #ifdef TARGET_SPARC64
4792 {
4793 .name = "Fujitsu Sparc64",
4794 .iu_version = ((0x04ULL << 48) | (0x02ULL << 32) | (0ULL << 24)
4795 | (MAXTL << 8) | (NWINDOWS - 1)),
4796 .fpu_version = 0x00000000,
4797 .mmu_version = 0,
4798 },
4799 {
4800 .name = "Fujitsu Sparc64 III",
4801 .iu_version = ((0x04ULL << 48) | (0x03ULL << 32) | (0ULL << 24)
4802 | (MAXTL << 8) | (NWINDOWS - 1)),
4803 .fpu_version = 0x00000000,
4804 .mmu_version = 0,
4805 },
4806 {
4807 .name = "Fujitsu Sparc64 IV",
4808 .iu_version = ((0x04ULL << 48) | (0x04ULL << 32) | (0ULL << 24)
4809 | (MAXTL << 8) | (NWINDOWS - 1)),
4810 .fpu_version = 0x00000000,
4811 .mmu_version = 0,
4812 },
4813 {
4814 .name = "Fujitsu Sparc64 V",
4815 .iu_version = ((0x04ULL << 48) | (0x05ULL << 32) | (0x51ULL << 24)
4816 | (MAXTL << 8) | (NWINDOWS - 1)),
4817 .fpu_version = 0x00000000,
4818 .mmu_version = 0,
4819 },
4820 {
4821 .name = "TI UltraSparc I",
4822 .iu_version = ((0x17ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4823 | (MAXTL << 8) | (NWINDOWS - 1)),
4824 .fpu_version = 0x00000000,
4825 .mmu_version = 0,
4826 },
4827 {
4828 .name = "TI UltraSparc II",
4829 .iu_version = ((0x17ULL << 48) | (0x11ULL << 32) | (0x20ULL << 24)
4830 | (MAXTL << 8) | (NWINDOWS - 1)),
4831 .fpu_version = 0x00000000,
4832 .mmu_version = 0,
4833 },
4834 {
4835 .name = "TI UltraSparc IIi",
4836 .iu_version = ((0x17ULL << 48) | (0x12ULL << 32) | (0x91ULL << 24)
4837 | (MAXTL << 8) | (NWINDOWS - 1)),
4838 .fpu_version = 0x00000000,
4839 .mmu_version = 0,
4840 },
4841 {
4842 .name = "TI UltraSparc IIe",
4843 .iu_version = ((0x17ULL << 48) | (0x13ULL << 32) | (0x14ULL << 24)
4844 | (MAXTL << 8) | (NWINDOWS - 1)),
4845 .fpu_version = 0x00000000,
4846 .mmu_version = 0,
4847 },
4848 {
4849 .name = "Sun UltraSparc III",
4850 .iu_version = ((0x3eULL << 48) | (0x14ULL << 32) | (0x34ULL << 24)
4851 | (MAXTL << 8) | (NWINDOWS - 1)),
4852 .fpu_version = 0x00000000,
4853 .mmu_version = 0,
4854 },
4855 {
4856 .name = "Sun UltraSparc III Cu",
4857 .iu_version = ((0x3eULL << 48) | (0x15ULL << 32) | (0x41ULL << 24)
4858 | (MAXTL << 8) | (NWINDOWS - 1)),
4859 .fpu_version = 0x00000000,
4860 .mmu_version = 0,
4861 },
4862 {
4863 .name = "Sun UltraSparc IIIi",
4864 .iu_version = ((0x3eULL << 48) | (0x16ULL << 32) | (0x34ULL << 24)
4865 | (MAXTL << 8) | (NWINDOWS - 1)),
4866 .fpu_version = 0x00000000,
4867 .mmu_version = 0,
4868 },
4869 {
4870 .name = "Sun UltraSparc IV",
4871 .iu_version = ((0x3eULL << 48) | (0x18ULL << 32) | (0x31ULL << 24)
4872 | (MAXTL << 8) | (NWINDOWS - 1)),
4873 .fpu_version = 0x00000000,
4874 .mmu_version = 0,
4875 },
4876 {
4877 .name = "Sun UltraSparc IV+",
4878 .iu_version = ((0x3eULL << 48) | (0x19ULL << 32) | (0x22ULL << 24)
4879 | (MAXTL << 8) | (NWINDOWS - 1)),
4880 .fpu_version = 0x00000000,
4881 .mmu_version = 0,
4882 },
4883 {
4884 .name = "Sun UltraSparc IIIi+",
4885 .iu_version = ((0x3eULL << 48) | (0x22ULL << 32) | (0ULL << 24)
4886 | (MAXTL << 8) | (NWINDOWS - 1)),
4887 .fpu_version = 0x00000000,
4888 .mmu_version = 0,
4889 },
4890 {
4891 .name = "NEC UltraSparc I",
4892 .iu_version = ((0x22ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4893 | (MAXTL << 8) | (NWINDOWS - 1)),
4894 .fpu_version = 0x00000000,
4895 .mmu_version = 0,
4896 },
4897 #else
4898 {
4899 .name = "Fujitsu MB86900",
4900 .iu_version = 0x00 << 24, /* Impl 0, ver 0 */
4901 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4902 .mmu_version = 0x00 << 24, /* Impl 0, ver 0 */
4903 .mmu_bm = 0x00004000,
4904 .mmu_ctpr_mask = 0x007ffff0,
4905 .mmu_cxr_mask = 0x0000003f,
4906 .mmu_sfsr_mask = 0xffffffff,
4907 .mmu_trcr_mask = 0xffffffff,
4908 },
4909 {
4910 .name = "Fujitsu MB86904",
4911 .iu_version = 0x04 << 24, /* Impl 0, ver 4 */
4912 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4913 .mmu_version = 0x04 << 24, /* Impl 0, ver 4 */
4914 .mmu_bm = 0x00004000,
4915 .mmu_ctpr_mask = 0x00ffffc0,
4916 .mmu_cxr_mask = 0x000000ff,
4917 .mmu_sfsr_mask = 0x00016fff,
4918 .mmu_trcr_mask = 0x00ffffff,
4919 },
4920 {
4921 .name = "Fujitsu MB86907",
4922 .iu_version = 0x05 << 24, /* Impl 0, ver 5 */
4923 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4924 .mmu_version = 0x05 << 24, /* Impl 0, ver 5 */
4925 .mmu_bm = 0x00004000,
4926 .mmu_ctpr_mask = 0xffffffc0,
4927 .mmu_cxr_mask = 0x000000ff,
4928 .mmu_sfsr_mask = 0x00016fff,
4929 .mmu_trcr_mask = 0xffffffff,
4930 },
4931 {
4932 .name = "LSI L64811",
4933 .iu_version = 0x10 << 24, /* Impl 1, ver 0 */
4934 .fpu_version = 1 << 17, /* FPU version 1 (LSI L64814) */
4935 .mmu_version = 0x10 << 24,
4936 .mmu_bm = 0x00004000,
4937 .mmu_ctpr_mask = 0x007ffff0,
4938 .mmu_cxr_mask = 0x0000003f,
4939 .mmu_sfsr_mask = 0xffffffff,
4940 .mmu_trcr_mask = 0xffffffff,
4941 },
4942 {
4943 .name = "Cypress CY7C601",
4944 .iu_version = 0x11 << 24, /* Impl 1, ver 1 */
4945 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4946 .mmu_version = 0x10 << 24,
4947 .mmu_bm = 0x00004000,
4948 .mmu_ctpr_mask = 0x007ffff0,
4949 .mmu_cxr_mask = 0x0000003f,
4950 .mmu_sfsr_mask = 0xffffffff,
4951 .mmu_trcr_mask = 0xffffffff,
4952 },
4953 {
4954 .name = "Cypress CY7C611",
4955 .iu_version = 0x13 << 24, /* Impl 1, ver 3 */
4956 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4957 .mmu_version = 0x10 << 24,
4958 .mmu_bm = 0x00004000,
4959 .mmu_ctpr_mask = 0x007ffff0,
4960 .mmu_cxr_mask = 0x0000003f,
4961 .mmu_sfsr_mask = 0xffffffff,
4962 .mmu_trcr_mask = 0xffffffff,
4963 },
4964 {
4965 .name = "TI SuperSparc II",
4966 .iu_version = 0x40000000,
4967 .fpu_version = 0 << 17,
4968 .mmu_version = 0x04000000,
4969 .mmu_bm = 0x00002000,
4970 .mmu_ctpr_mask = 0xffffffc0,
4971 .mmu_cxr_mask = 0x0000ffff,
4972 .mmu_sfsr_mask = 0xffffffff,
4973 .mmu_trcr_mask = 0xffffffff,
4974 },
4975 {
4976 .name = "TI MicroSparc I",
4977 .iu_version = 0x41000000,
4978 .fpu_version = 4 << 17,
4979 .mmu_version = 0x41000000,
4980 .mmu_bm = 0x00004000,
4981 .mmu_ctpr_mask = 0x007ffff0,
4982 .mmu_cxr_mask = 0x0000003f,
4983 .mmu_sfsr_mask = 0x00016fff,
4984 .mmu_trcr_mask = 0x0000003f,
4985 },
4986 {
4987 .name = "TI MicroSparc II",
4988 .iu_version = 0x42000000,
4989 .fpu_version = 4 << 17,
4990 .mmu_version = 0x02000000,
4991 .mmu_bm = 0x00004000,
4992 .mmu_ctpr_mask = 0x00ffffc0,
4993 .mmu_cxr_mask = 0x000000ff,
4994 .mmu_sfsr_mask = 0x00016fff,
4995 .mmu_trcr_mask = 0x00ffffff,
4996 },
4997 {
4998 .name = "TI MicroSparc IIep",
4999 .iu_version = 0x42000000,
5000 .fpu_version = 4 << 17,
5001 .mmu_version = 0x04000000,
5002 .mmu_bm = 0x00004000,
5003 .mmu_ctpr_mask = 0x00ffffc0,
5004 .mmu_cxr_mask = 0x000000ff,
5005 .mmu_sfsr_mask = 0x00016bff,
5006 .mmu_trcr_mask = 0x00ffffff,
5007 },
5008 {
5009 .name = "TI SuperSparc 51",
5010 .iu_version = 0x43000000,
5011 .fpu_version = 0 << 17,
5012 .mmu_version = 0x04000000,
5013 .mmu_bm = 0x00002000,
5014 .mmu_ctpr_mask = 0xffffffc0,
5015 .mmu_cxr_mask = 0x0000ffff,
5016 .mmu_sfsr_mask = 0xffffffff,
5017 .mmu_trcr_mask = 0xffffffff,
5018 },
5019 {
5020 .name = "TI SuperSparc 61",
5021 .iu_version = 0x44000000,
5022 .fpu_version = 0 << 17,
5023 .mmu_version = 0x04000000,
5024 .mmu_bm = 0x00002000,
5025 .mmu_ctpr_mask = 0xffffffc0,
5026 .mmu_cxr_mask = 0x0000ffff,
5027 .mmu_sfsr_mask = 0xffffffff,
5028 .mmu_trcr_mask = 0xffffffff,
5029 },
5030 {
5031 .name = "Ross RT625",
5032 .iu_version = 0x1e000000,
5033 .fpu_version = 1 << 17,
5034 .mmu_version = 0x1e000000,
5035 .mmu_bm = 0x00004000,
5036 .mmu_ctpr_mask = 0x007ffff0,
5037 .mmu_cxr_mask = 0x0000003f,
5038 .mmu_sfsr_mask = 0xffffffff,
5039 .mmu_trcr_mask = 0xffffffff,
5040 },
5041 {
5042 .name = "Ross RT620",
5043 .iu_version = 0x1f000000,
5044 .fpu_version = 1 << 17,
5045 .mmu_version = 0x1f000000,
5046 .mmu_bm = 0x00004000,
5047 .mmu_ctpr_mask = 0x007ffff0,
5048 .mmu_cxr_mask = 0x0000003f,
5049 .mmu_sfsr_mask = 0xffffffff,
5050 .mmu_trcr_mask = 0xffffffff,
5051 },
5052 {
5053 .name = "BIT B5010",
5054 .iu_version = 0x20000000,
5055 .fpu_version = 0 << 17, /* B5010/B5110/B5120/B5210 */
5056 .mmu_version = 0x20000000,
5057 .mmu_bm = 0x00004000,
5058 .mmu_ctpr_mask = 0x007ffff0,
5059 .mmu_cxr_mask = 0x0000003f,
5060 .mmu_sfsr_mask = 0xffffffff,
5061 .mmu_trcr_mask = 0xffffffff,
5062 },
5063 {
5064 .name = "Matsushita MN10501",
5065 .iu_version = 0x50000000,
5066 .fpu_version = 0 << 17,
5067 .mmu_version = 0x50000000,
5068 .mmu_bm = 0x00004000,
5069 .mmu_ctpr_mask = 0x007ffff0,
5070 .mmu_cxr_mask = 0x0000003f,
5071 .mmu_sfsr_mask = 0xffffffff,
5072 .mmu_trcr_mask = 0xffffffff,
5073 },
5074 {
5075 .name = "Weitek W8601",
5076 .iu_version = 0x90 << 24, /* Impl 9, ver 0 */
5077 .fpu_version = 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
5078 .mmu_version = 0x10 << 24,
5079 .mmu_bm = 0x00004000,
5080 .mmu_ctpr_mask = 0x007ffff0,
5081 .mmu_cxr_mask = 0x0000003f,
5082 .mmu_sfsr_mask = 0xffffffff,
5083 .mmu_trcr_mask = 0xffffffff,
5084 },
5085 {
5086 .name = "LEON2",
5087 .iu_version = 0xf2000000,
5088 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
5089 .mmu_version = 0xf2000000,
5090 .mmu_bm = 0x00004000,
5091 .mmu_ctpr_mask = 0x007ffff0,
5092 .mmu_cxr_mask = 0x0000003f,
5093 .mmu_sfsr_mask = 0xffffffff,
5094 .mmu_trcr_mask = 0xffffffff,
5095 },
5096 {
5097 .name = "LEON3",
5098 .iu_version = 0xf3000000,
5099 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
5100 .mmu_version = 0xf3000000,
5101 .mmu_bm = 0x00004000,
5102 .mmu_ctpr_mask = 0x007ffff0,
5103 .mmu_cxr_mask = 0x0000003f,
5104 .mmu_sfsr_mask = 0xffffffff,
5105 .mmu_trcr_mask = 0xffffffff,
5106 },
5107 #endif
5108 };
5109
5110 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name)
5111 {
5112 unsigned int i;
5113
5114 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
5115 if (strcasecmp(name, sparc_defs[i].name) == 0) {
5116 return &sparc_defs[i];
5117 }
5118 }
5119 return NULL;
5120 }
5121
5122 void sparc_cpu_list (FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
5123 {
5124 unsigned int i;
5125
5126 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
5127 (*cpu_fprintf)(f, "Sparc %16s IU " TARGET_FMT_lx " FPU %08x MMU %08x\n",
5128 sparc_defs[i].name,
5129 sparc_defs[i].iu_version,
5130 sparc_defs[i].fpu_version,
5131 sparc_defs[i].mmu_version);
5132 }
5133 }
5134
5135 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
5136
5137 void cpu_dump_state(CPUState *env, FILE *f,
5138 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
5139 int flags)
5140 {
5141 int i, x;
5142
5143 cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
5144 cpu_fprintf(f, "General Registers:\n");
5145 for (i = 0; i < 4; i++)
5146 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
5147 cpu_fprintf(f, "\n");
5148 for (; i < 8; i++)
5149 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
5150 cpu_fprintf(f, "\nCurrent Register Window:\n");
5151 for (x = 0; x < 3; x++) {
5152 for (i = 0; i < 4; i++)
5153 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
5154 (x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
5155 env->regwptr[i + x * 8]);
5156 cpu_fprintf(f, "\n");
5157 for (; i < 8; i++)
5158 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
5159 (x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
5160 env->regwptr[i + x * 8]);
5161 cpu_fprintf(f, "\n");
5162 }
5163 cpu_fprintf(f, "\nFloating Point Registers:\n");
5164 for (i = 0; i < 32; i++) {
5165 if ((i & 3) == 0)
5166 cpu_fprintf(f, "%%f%02d:", i);
5167 cpu_fprintf(f, " %016lf", env->fpr[i]);
5168 if ((i & 3) == 3)
5169 cpu_fprintf(f, "\n");
5170 }
5171 #ifdef TARGET_SPARC64
5172 cpu_fprintf(f, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
5173 env->pstate, GET_CCR(env), env->asi, env->tl, env->fprs);
5174 cpu_fprintf(f, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
5175 env->cansave, env->canrestore, env->otherwin, env->wstate,
5176 env->cleanwin, NWINDOWS - 1 - env->cwp);
5177 #else
5178 cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
5179 GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
5180 GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
5181 env->psrs?'S':'-', env->psrps?'P':'-',
5182 env->psret?'E':'-', env->wim);
5183 #endif
5184 cpu_fprintf(f, "fsr: 0x%08x\n", GET_FSR32(env));
5185 }
5186
5187 #if defined(CONFIG_USER_ONLY)
5188 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
5189 {
5190 return addr;
5191 }
5192
5193 #else
5194 extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
5195 int *access_index, target_ulong address, int rw,
5196 int mmu_idx);
5197
5198 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
5199 {
5200 target_phys_addr_t phys_addr;
5201 int prot, access_index;
5202
5203 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2,
5204 MMU_KERNEL_IDX) != 0)
5205 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr,
5206 0, MMU_KERNEL_IDX) != 0)
5207 return -1;
5208 if (cpu_get_physical_page_desc(phys_addr) == IO_MEM_UNASSIGNED)
5209 return -1;
5210 return phys_addr;
5211 }
5212 #endif
5213
5214 void helper_flush(target_ulong addr)
5215 {
5216 addr &= ~7;
5217 tb_invalidate_page_range(addr, addr + 8);
5218 }