]> git.proxmox.com Git - mirror_qemu.git/blob - target-sparc/translate.c
Convert ldf/ldfsr and stf/stfsr to TCG
[mirror_qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 */
21
22 /*
23 TODO-list:
24
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
28 */
29
30 #include <stdarg.h>
31 #include <stdlib.h>
32 #include <stdio.h>
33 #include <string.h>
34 #include <inttypes.h>
35
36 #include "cpu.h"
37 #include "exec-all.h"
38 #include "disas.h"
39 #include "helper.h"
40 #include "tcg-op.h"
41
42 #define DEBUG_DISAS
43
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
47
48 /* global register indexes */
49 static TCGv cpu_env, cpu_T[3], cpu_regwptr, cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
50 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
51 #ifdef TARGET_SPARC64
52 static TCGv cpu_xcc;
53 #endif
54 /* local register indexes (only used inside old micro ops) */
55 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
56
57 typedef struct DisasContext {
58 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
59 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
60 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
61 int is_br;
62 int mem_idx;
63 int fpu_enabled;
64 struct TranslationBlock *tb;
65 } DisasContext;
66
67 typedef struct sparc_def_t sparc_def_t;
68
69 struct sparc_def_t {
70 const unsigned char *name;
71 target_ulong iu_version;
72 uint32_t fpu_version;
73 uint32_t mmu_version;
74 uint32_t mmu_bm;
75 uint32_t mmu_ctpr_mask;
76 uint32_t mmu_cxr_mask;
77 uint32_t mmu_sfsr_mask;
78 uint32_t mmu_trcr_mask;
79 };
80
81 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name);
82
83 extern FILE *logfile;
84 extern int loglevel;
85
86 // This function uses non-native bit order
87 #define GET_FIELD(X, FROM, TO) \
88 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
89
90 // This function uses the order in the manuals, i.e. bit 0 is 2^0
91 #define GET_FIELD_SP(X, FROM, TO) \
92 GET_FIELD(X, 31 - (TO), 31 - (FROM))
93
94 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
95 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
96
97 #ifdef TARGET_SPARC64
98 #define FFPREG(r) (r)
99 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
100 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
101 #else
102 #define FFPREG(r) (r)
103 #define DFPREG(r) (r & 0x1e)
104 #define QFPREG(r) (r & 0x1c)
105 #endif
106
107 static int sign_extend(int x, int len)
108 {
109 len = 32 - len;
110 return (x << len) >> len;
111 }
112
113 #define IS_IMM (insn & (1<<13))
114
115 static void disas_sparc_insn(DisasContext * dc);
116
117 /* floating point registers moves */
118 static void gen_op_load_fpr_FT0(unsigned int src)
119 {
120 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
121 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft0));
122 }
123
124 static void gen_op_load_fpr_FT1(unsigned int src)
125 {
126 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
127 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft1));
128 }
129
130 static void gen_op_store_FT0_fpr(unsigned int dst)
131 {
132 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft0));
133 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
134 }
135
136 static void gen_op_load_fpr_DT0(unsigned int src)
137 {
138 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
139 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) + offsetof(CPU_DoubleU, l.upper));
140 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
141 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) + offsetof(CPU_DoubleU, l.lower));
142 }
143
144 static void gen_op_load_fpr_DT1(unsigned int src)
145 {
146 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
147 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt1) + offsetof(CPU_DoubleU, l.upper));
148 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
149 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt1) + offsetof(CPU_DoubleU, l.lower));
150 }
151
152 static void gen_op_store_DT0_fpr(unsigned int dst)
153 {
154 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) + offsetof(CPU_DoubleU, l.upper));
155 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
156 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) + offsetof(CPU_DoubleU, l.lower));
157 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 1]));
158 }
159
160 #ifdef CONFIG_USER_ONLY
161 static void gen_op_load_fpr_QT0(unsigned int src)
162 {
163 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
164 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.upmost));
165 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
166 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.upper));
167 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 2]));
168 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.lower));
169 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 3]));
170 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.lowest));
171 }
172
173 static void gen_op_load_fpr_QT1(unsigned int src)
174 {
175 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
176 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) + offsetof(CPU_QuadU, l.upmost));
177 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
178 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) + offsetof(CPU_QuadU, l.upper));
179 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 2]));
180 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) + offsetof(CPU_QuadU, l.lower));
181 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 3]));
182 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) + offsetof(CPU_QuadU, l.lowest));
183 }
184
185 static void gen_op_store_QT0_fpr(unsigned int dst)
186 {
187 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.upmost));
188 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
189 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.upper));
190 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 1]));
191 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.lower));
192 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 2]));
193 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.lowest));
194 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 3]));
195 }
196 #endif
197
198 /* moves */
199 #ifdef CONFIG_USER_ONLY
200 #define supervisor(dc) 0
201 #ifdef TARGET_SPARC64
202 #define hypervisor(dc) 0
203 #endif
204 #define gen_op_ldst(name) gen_op_##name##_raw()
205 #else
206 #define supervisor(dc) (dc->mem_idx >= 1)
207 #ifdef TARGET_SPARC64
208 #define hypervisor(dc) (dc->mem_idx == 2)
209 #define OP_LD_TABLE(width) \
210 static GenOpFunc * const gen_op_##width[] = { \
211 &gen_op_##width##_user, \
212 &gen_op_##width##_kernel, \
213 &gen_op_##width##_hypv, \
214 };
215 #else
216 #define OP_LD_TABLE(width) \
217 static GenOpFunc * const gen_op_##width[] = { \
218 &gen_op_##width##_user, \
219 &gen_op_##width##_kernel, \
220 };
221 #endif
222 #define gen_op_ldst(name) (*gen_op_##name[dc->mem_idx])()
223 #endif
224
225 #ifndef CONFIG_USER_ONLY
226 #ifdef __i386__
227 OP_LD_TABLE(std);
228 #endif /* __i386__ */
229 OP_LD_TABLE(stdf);
230 OP_LD_TABLE(lddf);
231 #endif
232
233 #ifdef TARGET_ABI32
234 #define ABI32_MASK(addr) tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
235 #else
236 #define ABI32_MASK(addr)
237 #endif
238
239 static inline void gen_movl_simm_T1(int32_t val)
240 {
241 tcg_gen_movi_tl(cpu_T[1], val);
242 }
243
244 static inline void gen_movl_reg_TN(int reg, TCGv tn)
245 {
246 if (reg == 0)
247 tcg_gen_movi_tl(tn, 0);
248 else if (reg < 8)
249 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
250 else {
251 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
252 }
253 }
254
255 static inline void gen_movl_reg_T0(int reg)
256 {
257 gen_movl_reg_TN(reg, cpu_T[0]);
258 }
259
260 static inline void gen_movl_reg_T1(int reg)
261 {
262 gen_movl_reg_TN(reg, cpu_T[1]);
263 }
264
265 #ifdef __i386__
266 static inline void gen_movl_reg_T2(int reg)
267 {
268 gen_movl_reg_TN(reg, cpu_T[2]);
269 }
270
271 #endif /* __i386__ */
272 static inline void gen_movl_TN_reg(int reg, TCGv tn)
273 {
274 if (reg == 0)
275 return;
276 else if (reg < 8)
277 tcg_gen_mov_tl(cpu_gregs[reg], tn);
278 else {
279 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
280 }
281 }
282
283 static inline void gen_movl_T0_reg(int reg)
284 {
285 gen_movl_TN_reg(reg, cpu_T[0]);
286 }
287
288 static inline void gen_movl_T1_reg(int reg)
289 {
290 gen_movl_TN_reg(reg, cpu_T[1]);
291 }
292
293 static inline void gen_op_movl_T0_env(size_t offset)
294 {
295 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offset);
296 tcg_gen_ext_i32_tl(cpu_T[0], cpu_tmp32);
297 }
298
299 static inline void gen_op_movl_env_T0(size_t offset)
300 {
301 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_T[0]);
302 tcg_gen_st_i32(cpu_tmp32, cpu_env, offset);
303 }
304
305 static inline void gen_op_movtl_T0_env(size_t offset)
306 {
307 tcg_gen_ld_tl(cpu_T[0], cpu_env, offset);
308 }
309
310 static inline void gen_op_movtl_env_T0(size_t offset)
311 {
312 tcg_gen_st_tl(cpu_T[0], cpu_env, offset);
313 }
314
315 static inline void gen_op_add_T1_T0(void)
316 {
317 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
318 }
319
320 static inline void gen_op_or_T1_T0(void)
321 {
322 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
323 }
324
325 static inline void gen_op_xor_T1_T0(void)
326 {
327 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
328 }
329
330 static inline void gen_jmp_im(target_ulong pc)
331 {
332 tcg_gen_movi_tl(cpu_pc, pc);
333 }
334
335 static inline void gen_movl_npc_im(target_ulong npc)
336 {
337 tcg_gen_movi_tl(cpu_npc, npc);
338 }
339
340 static inline void gen_goto_tb(DisasContext *s, int tb_num,
341 target_ulong pc, target_ulong npc)
342 {
343 TranslationBlock *tb;
344
345 tb = s->tb;
346 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
347 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
348 /* jump to same page: we can use a direct jump */
349 tcg_gen_goto_tb(tb_num);
350 gen_jmp_im(pc);
351 gen_movl_npc_im(npc);
352 tcg_gen_exit_tb((long)tb + tb_num);
353 } else {
354 /* jump to another page: currently not optimized */
355 gen_jmp_im(pc);
356 gen_movl_npc_im(npc);
357 tcg_gen_exit_tb(0);
358 }
359 }
360
361 // XXX suboptimal
362 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
363 {
364 tcg_gen_extu_i32_tl(reg, src);
365 tcg_gen_shri_tl(reg, reg, 23);
366 tcg_gen_andi_tl(reg, reg, 0x1);
367 }
368
369 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
370 {
371 tcg_gen_extu_i32_tl(reg, src);
372 tcg_gen_shri_tl(reg, reg, 22);
373 tcg_gen_andi_tl(reg, reg, 0x1);
374 }
375
376 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
377 {
378 tcg_gen_extu_i32_tl(reg, src);
379 tcg_gen_shri_tl(reg, reg, 21);
380 tcg_gen_andi_tl(reg, reg, 0x1);
381 }
382
383 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
384 {
385 tcg_gen_extu_i32_tl(reg, src);
386 tcg_gen_shri_tl(reg, reg, 20);
387 tcg_gen_andi_tl(reg, reg, 0x1);
388 }
389
390 static inline void gen_op_exception(int exception)
391 {
392 tcg_gen_movi_i32(cpu_tmp32, exception);
393 tcg_gen_helper_0_1(raise_exception, cpu_tmp32);
394 }
395
396 static inline void gen_cc_clear(void)
397 {
398 tcg_gen_movi_i32(cpu_psr, 0);
399 #ifdef TARGET_SPARC64
400 tcg_gen_movi_i32(cpu_xcc, 0);
401 #endif
402 }
403
404 /* old op:
405 if (!T0)
406 env->psr |= PSR_ZERO;
407 if ((int32_t) T0 < 0)
408 env->psr |= PSR_NEG;
409 */
410 static inline void gen_cc_NZ(TCGv dst)
411 {
412 TCGv r_temp;
413 int l1, l2;
414
415 l1 = gen_new_label();
416 l2 = gen_new_label();
417 r_temp = tcg_temp_new(TCG_TYPE_TL);
418 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
419 tcg_gen_brcond_tl(TCG_COND_NE, r_temp, tcg_const_tl(0), l1);
420 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
421 gen_set_label(l1);
422 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
423 tcg_gen_brcond_tl(TCG_COND_GE, r_temp, tcg_const_tl(0), l2);
424 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
425 gen_set_label(l2);
426 #ifdef TARGET_SPARC64
427 {
428 int l3, l4;
429
430 l3 = gen_new_label();
431 l4 = gen_new_label();
432 tcg_gen_brcond_tl(TCG_COND_NE, dst, tcg_const_tl(0), l3);
433 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
434 gen_set_label(l3);
435 tcg_gen_brcond_tl(TCG_COND_GE, dst, tcg_const_tl(0), l4);
436 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
437 gen_set_label(l4);
438 }
439 #endif
440 }
441
442 /* old op:
443 if (T0 < src1)
444 env->psr |= PSR_CARRY;
445 */
446 static inline void gen_cc_C_add(TCGv dst, TCGv src1)
447 {
448 TCGv r_temp;
449 int l1;
450
451 l1 = gen_new_label();
452 r_temp = tcg_temp_new(TCG_TYPE_TL);
453 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
454 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
455 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
456 gen_set_label(l1);
457 #ifdef TARGET_SPARC64
458 {
459 int l2;
460
461 l2 = gen_new_label();
462 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l2);
463 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
464 gen_set_label(l2);
465 }
466 #endif
467 }
468
469 /* old op:
470 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
471 env->psr |= PSR_OVF;
472 */
473 static inline void gen_cc_V_add(TCGv dst, TCGv src1, TCGv src2)
474 {
475 TCGv r_temp;
476 int l1;
477
478 l1 = gen_new_label();
479
480 r_temp = tcg_temp_new(TCG_TYPE_TL);
481 tcg_gen_xor_tl(r_temp, src1, src2);
482 tcg_gen_xori_tl(r_temp, r_temp, -1);
483 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
484 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
485 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
486 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l1);
487 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
488 gen_set_label(l1);
489 #ifdef TARGET_SPARC64
490 {
491 int l2;
492
493 l2 = gen_new_label();
494 tcg_gen_xor_tl(r_temp, src1, src2);
495 tcg_gen_xori_tl(r_temp, r_temp, -1);
496 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
497 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
498 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
499 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l2);
500 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_OVF);
501 gen_set_label(l2);
502 }
503 #endif
504 tcg_gen_discard_tl(r_temp);
505 }
506
507 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
508 {
509 TCGv r_temp;
510 int l1;
511
512 l1 = gen_new_label();
513
514 r_temp = tcg_temp_new(TCG_TYPE_TL);
515 tcg_gen_xor_tl(r_temp, src1, src2);
516 tcg_gen_xori_tl(r_temp, r_temp, -1);
517 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
518 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
519 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
520 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l1);
521 gen_op_exception(TT_TOVF);
522 gen_set_label(l1);
523 #ifdef TARGET_SPARC64
524 {
525 int l2;
526
527 l2 = gen_new_label();
528 tcg_gen_xor_tl(r_temp, src1, src2);
529 tcg_gen_xori_tl(r_temp, r_temp, -1);
530 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
531 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
532 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
533 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l2);
534 gen_op_exception(TT_TOVF);
535 gen_set_label(l2);
536 }
537 #endif
538 tcg_gen_discard_tl(r_temp);
539 }
540
541 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
542 {
543 int l1;
544
545 l1 = gen_new_label();
546 tcg_gen_or_tl(cpu_tmp0, src1, src2);
547 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
548 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), l1);
549 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
550 gen_set_label(l1);
551 }
552
553 static inline void gen_tag_tv(TCGv src1, TCGv src2)
554 {
555 int l1;
556
557 l1 = gen_new_label();
558 tcg_gen_or_tl(cpu_tmp0, src1, src2);
559 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
560 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), l1);
561 gen_op_exception(TT_TOVF);
562 gen_set_label(l1);
563 }
564
565 static inline void gen_op_add_T1_T0_cc(void)
566 {
567 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
568 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
569 gen_cc_clear();
570 gen_cc_NZ(cpu_T[0]);
571 gen_cc_C_add(cpu_T[0], cpu_cc_src);
572 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
573 }
574
575 static inline void gen_op_addx_T1_T0_cc(void)
576 {
577 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
578 gen_mov_reg_C(cpu_tmp0, cpu_psr);
579 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
580 gen_cc_clear();
581 gen_cc_C_add(cpu_T[0], cpu_cc_src);
582 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
583 gen_cc_C_add(cpu_T[0], cpu_cc_src);
584 gen_cc_NZ(cpu_T[0]);
585 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
586 }
587
588 static inline void gen_op_tadd_T1_T0_cc(void)
589 {
590 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
591 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
592 gen_cc_clear();
593 gen_cc_NZ(cpu_T[0]);
594 gen_cc_C_add(cpu_T[0], cpu_cc_src);
595 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_T[1]);
596 gen_cc_V_tag(cpu_cc_src, cpu_T[1]);
597 }
598
599 static inline void gen_op_tadd_T1_T0_ccTV(void)
600 {
601 gen_tag_tv(cpu_T[0], cpu_T[1]);
602 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
603 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
604 gen_add_tv(cpu_T[0], cpu_cc_src, cpu_T[1]);
605 gen_cc_clear();
606 gen_cc_NZ(cpu_T[0]);
607 gen_cc_C_add(cpu_T[0], cpu_cc_src);
608 }
609
610 /* old op:
611 if (src1 < T1)
612 env->psr |= PSR_CARRY;
613 */
614 static inline void gen_cc_C_sub(TCGv src1, TCGv src2)
615 {
616 TCGv r_temp1, r_temp2;
617 int l1;
618
619 l1 = gen_new_label();
620 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
621 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
622 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
623 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
624 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
625 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
626 gen_set_label(l1);
627 #ifdef TARGET_SPARC64
628 {
629 int l2;
630
631 l2 = gen_new_label();
632 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l2);
633 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
634 gen_set_label(l2);
635 }
636 #endif
637 }
638
639 /* old op:
640 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
641 env->psr |= PSR_OVF;
642 */
643 static inline void gen_cc_V_sub(TCGv dst, TCGv src1, TCGv src2)
644 {
645 TCGv r_temp;
646 int l1;
647
648 l1 = gen_new_label();
649
650 r_temp = tcg_temp_new(TCG_TYPE_TL);
651 tcg_gen_xor_tl(r_temp, src1, src2);
652 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
653 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
654 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
655 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l1);
656 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
657 gen_set_label(l1);
658 #ifdef TARGET_SPARC64
659 {
660 int l2;
661
662 l2 = gen_new_label();
663 tcg_gen_xor_tl(r_temp, src1, src2);
664 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
665 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
666 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
667 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l2);
668 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_OVF);
669 gen_set_label(l2);
670 }
671 #endif
672 tcg_gen_discard_tl(r_temp);
673 }
674
675 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
676 {
677 TCGv r_temp;
678 int l1;
679
680 l1 = gen_new_label();
681
682 r_temp = tcg_temp_new(TCG_TYPE_TL);
683 tcg_gen_xor_tl(r_temp, src1, src2);
684 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
685 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
686 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
687 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l1);
688 gen_op_exception(TT_TOVF);
689 gen_set_label(l1);
690 #ifdef TARGET_SPARC64
691 {
692 int l2;
693
694 l2 = gen_new_label();
695 tcg_gen_xor_tl(r_temp, src1, src2);
696 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
697 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
698 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
699 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l2);
700 gen_op_exception(TT_TOVF);
701 gen_set_label(l2);
702 }
703 #endif
704 tcg_gen_discard_tl(r_temp);
705 }
706
707 static inline void gen_op_sub_T1_T0_cc(void)
708 {
709 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
710 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
711 gen_cc_clear();
712 gen_cc_NZ(cpu_T[0]);
713 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
714 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
715 }
716
717 static inline void gen_op_subx_T1_T0_cc(void)
718 {
719 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
720 gen_mov_reg_C(cpu_tmp0, cpu_psr);
721 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_tmp0);
722 gen_cc_clear();
723 gen_cc_C_sub(cpu_T[0], cpu_cc_src);
724 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
725 gen_cc_C_sub(cpu_T[0], cpu_cc_src);
726 gen_cc_NZ(cpu_T[0]);
727 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
728 }
729
730 static inline void gen_op_tsub_T1_T0_cc(void)
731 {
732 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
733 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
734 gen_cc_clear();
735 gen_cc_NZ(cpu_T[0]);
736 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
737 gen_cc_V_sub(cpu_T[0], cpu_cc_src, cpu_T[1]);
738 gen_cc_V_tag(cpu_cc_src, cpu_T[1]);
739 }
740
741 static inline void gen_op_tsub_T1_T0_ccTV(void)
742 {
743 gen_tag_tv(cpu_T[0], cpu_T[1]);
744 tcg_gen_mov_tl(cpu_cc_src, cpu_T[0]);
745 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
746 gen_sub_tv(cpu_T[0], cpu_cc_src, cpu_T[1]);
747 gen_cc_clear();
748 gen_cc_NZ(cpu_T[0]);
749 gen_cc_C_sub(cpu_cc_src, cpu_T[1]);
750 }
751
752 static inline void gen_op_mulscc_T1_T0(void)
753 {
754 TCGv r_temp;
755 int l1, l2;
756
757 l1 = gen_new_label();
758 l2 = gen_new_label();
759 r_temp = tcg_temp_new(TCG_TYPE_TL);
760
761 /* old op:
762 if (!(env->y & 1))
763 T1 = 0;
764 */
765 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, y));
766 tcg_gen_extu_i32_tl(r_temp, cpu_tmp32);
767 tcg_gen_andi_tl(r_temp, r_temp, 0x1);
768 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l1);
769 tcg_gen_mov_tl(cpu_cc_src2, cpu_T[1]);
770 tcg_gen_br(l2);
771 gen_set_label(l1);
772 tcg_gen_movi_tl(cpu_cc_src2, 0);
773 gen_set_label(l2);
774
775 // b2 = T0 & 1;
776 // env->y = (b2 << 31) | (env->y >> 1);
777 tcg_gen_shli_tl(r_temp, cpu_T[0], 31);
778 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, y));
779 tcg_gen_shri_i32(cpu_tmp32, cpu_tmp32, 1);
780 tcg_gen_or_i32(cpu_tmp32, cpu_tmp32, r_temp);
781 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, y));
782
783 // b1 = N ^ V;
784 gen_mov_reg_N(cpu_tmp0, cpu_psr);
785 gen_mov_reg_V(r_temp, cpu_psr);
786 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
787
788 // T0 = (b1 << 31) | (T0 >> 1);
789 // src1 = T0;
790 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
791 tcg_gen_shri_tl(cpu_cc_src, cpu_T[0], 1);
792 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
793
794 /* do addition and update flags */
795 tcg_gen_add_tl(cpu_T[0], cpu_cc_src, cpu_cc_src2);
796 tcg_gen_discard_tl(r_temp);
797
798 gen_cc_clear();
799 gen_cc_NZ(cpu_T[0]);
800 gen_cc_V_add(cpu_T[0], cpu_cc_src, cpu_cc_src2);
801 gen_cc_C_add(cpu_T[0], cpu_cc_src);
802 }
803
804 static inline void gen_op_umul_T1_T0(void)
805 {
806 TCGv r_temp, r_temp2;
807
808 r_temp = tcg_temp_new(TCG_TYPE_I64);
809 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
810
811 tcg_gen_extu_tl_i64(r_temp, cpu_T[1]);
812 tcg_gen_extu_tl_i64(r_temp2, cpu_T[0]);
813 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
814
815 tcg_gen_shri_i64(r_temp, r_temp2, 32);
816 tcg_gen_trunc_i64_i32(r_temp, r_temp);
817 tcg_gen_st_i32(r_temp, cpu_env, offsetof(CPUSPARCState, y));
818 #ifdef TARGET_SPARC64
819 tcg_gen_mov_i64(cpu_T[0], r_temp2);
820 #else
821 tcg_gen_trunc_i64_tl(cpu_T[0], r_temp2);
822 #endif
823
824 tcg_gen_discard_i64(r_temp);
825 tcg_gen_discard_i64(r_temp2);
826 }
827
828 static inline void gen_op_smul_T1_T0(void)
829 {
830 TCGv r_temp, r_temp2;
831
832 r_temp = tcg_temp_new(TCG_TYPE_I64);
833 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
834
835 tcg_gen_ext_tl_i64(r_temp, cpu_T[1]);
836 tcg_gen_ext_tl_i64(r_temp2, cpu_T[0]);
837 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
838
839 tcg_gen_shri_i64(r_temp, r_temp2, 32);
840 tcg_gen_trunc_i64_i32(r_temp, r_temp);
841 tcg_gen_st_i32(r_temp, cpu_env, offsetof(CPUSPARCState, y));
842 #ifdef TARGET_SPARC64
843 tcg_gen_mov_i64(cpu_T[0], r_temp2);
844 #else
845 tcg_gen_trunc_i64_tl(cpu_T[0], r_temp2);
846 #endif
847
848 tcg_gen_discard_i64(r_temp);
849 tcg_gen_discard_i64(r_temp2);
850 }
851
852 static inline void gen_op_udiv_T1_T0(void)
853 {
854 tcg_gen_helper_1_2(helper_udiv, cpu_T[0], cpu_T[0], cpu_T[1]);
855 }
856
857 static inline void gen_op_sdiv_T1_T0(void)
858 {
859 tcg_gen_helper_1_2(helper_sdiv, cpu_T[0], cpu_T[0], cpu_T[1]);
860 }
861
862 #ifdef TARGET_SPARC64
863 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
864 {
865 int l1;
866
867 l1 = gen_new_label();
868 tcg_gen_brcond_tl(TCG_COND_NE, divisor, tcg_const_tl(0), l1);
869 gen_op_exception(TT_DIV_ZERO);
870 gen_set_label(l1);
871 }
872
873 static inline void gen_op_sdivx_T1_T0(void)
874 {
875 int l1, l2;
876
877 l1 = gen_new_label();
878 l2 = gen_new_label();
879 gen_trap_ifdivzero_tl(cpu_T[1]);
880 tcg_gen_brcond_tl(TCG_COND_NE, cpu_T[0], tcg_const_tl(INT64_MIN), l1);
881 tcg_gen_brcond_tl(TCG_COND_NE, cpu_T[1], tcg_const_tl(-1), l1);
882 tcg_gen_movi_i64(cpu_T[0], INT64_MIN);
883 tcg_gen_br(l2);
884 gen_set_label(l1);
885 tcg_gen_div_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
886 gen_set_label(l2);
887 }
888 #endif
889
890 static inline void gen_op_div_cc(void)
891 {
892 int l1;
893
894 gen_cc_clear();
895 gen_cc_NZ(cpu_T[0]);
896 l1 = gen_new_label();
897 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, cc_src2));
898 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), l1);
899 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
900 gen_set_label(l1);
901 }
902
903 static inline void gen_op_logic_T0_cc(void)
904 {
905 gen_cc_clear();
906 gen_cc_NZ(cpu_T[0]);
907 }
908
909 // 1
910 static inline void gen_op_eval_ba(TCGv dst)
911 {
912 tcg_gen_movi_tl(dst, 1);
913 }
914
915 // Z
916 static inline void gen_op_eval_be(TCGv dst, TCGv src)
917 {
918 gen_mov_reg_Z(dst, src);
919 }
920
921 // Z | (N ^ V)
922 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
923 {
924 gen_mov_reg_N(cpu_tmp0, src);
925 gen_mov_reg_V(dst, src);
926 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
927 gen_mov_reg_Z(cpu_tmp0, src);
928 tcg_gen_or_tl(dst, dst, cpu_tmp0);
929 }
930
931 // N ^ V
932 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
933 {
934 gen_mov_reg_V(cpu_tmp0, src);
935 gen_mov_reg_N(dst, src);
936 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
937 }
938
939 // C | Z
940 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
941 {
942 gen_mov_reg_Z(cpu_tmp0, src);
943 gen_mov_reg_C(dst, src);
944 tcg_gen_or_tl(dst, dst, cpu_tmp0);
945 }
946
947 // C
948 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
949 {
950 gen_mov_reg_C(dst, src);
951 }
952
953 // V
954 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
955 {
956 gen_mov_reg_V(dst, src);
957 }
958
959 // 0
960 static inline void gen_op_eval_bn(TCGv dst)
961 {
962 tcg_gen_movi_tl(dst, 0);
963 }
964
965 // N
966 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
967 {
968 gen_mov_reg_N(dst, src);
969 }
970
971 // !Z
972 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
973 {
974 gen_mov_reg_Z(dst, src);
975 tcg_gen_xori_tl(dst, dst, 0x1);
976 }
977
978 // !(Z | (N ^ V))
979 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
980 {
981 gen_mov_reg_N(cpu_tmp0, src);
982 gen_mov_reg_V(dst, src);
983 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
984 gen_mov_reg_Z(cpu_tmp0, src);
985 tcg_gen_or_tl(dst, dst, cpu_tmp0);
986 tcg_gen_xori_tl(dst, dst, 0x1);
987 }
988
989 // !(N ^ V)
990 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
991 {
992 gen_mov_reg_V(cpu_tmp0, src);
993 gen_mov_reg_N(dst, src);
994 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
995 tcg_gen_xori_tl(dst, dst, 0x1);
996 }
997
998 // !(C | Z)
999 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
1000 {
1001 gen_mov_reg_Z(cpu_tmp0, src);
1002 gen_mov_reg_C(dst, src);
1003 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1004 tcg_gen_xori_tl(dst, dst, 0x1);
1005 }
1006
1007 // !C
1008 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
1009 {
1010 gen_mov_reg_C(dst, src);
1011 tcg_gen_xori_tl(dst, dst, 0x1);
1012 }
1013
1014 // !N
1015 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
1016 {
1017 gen_mov_reg_N(dst, src);
1018 tcg_gen_xori_tl(dst, dst, 0x1);
1019 }
1020
1021 // !V
1022 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
1023 {
1024 gen_mov_reg_V(dst, src);
1025 tcg_gen_xori_tl(dst, dst, 0x1);
1026 }
1027
1028 /*
1029 FPSR bit field FCC1 | FCC0:
1030 0 =
1031 1 <
1032 2 >
1033 3 unordered
1034 */
1035 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1036 unsigned int fcc_offset)
1037 {
1038 tcg_gen_extu_i32_tl(reg, src);
1039 tcg_gen_shri_tl(reg, reg, 10 + fcc_offset);
1040 tcg_gen_andi_tl(reg, reg, 0x1);
1041 }
1042
1043 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1044 unsigned int fcc_offset)
1045 {
1046 tcg_gen_extu_i32_tl(reg, src);
1047 tcg_gen_shri_tl(reg, reg, 11 + fcc_offset);
1048 tcg_gen_andi_tl(reg, reg, 0x1);
1049 }
1050
1051 // !0: FCC0 | FCC1
1052 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1053 unsigned int fcc_offset)
1054 {
1055 gen_mov_reg_FCC0(dst, src, fcc_offset);
1056 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1058 }
1059
1060 // 1 or 2: FCC0 ^ FCC1
1061 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1062 unsigned int fcc_offset)
1063 {
1064 gen_mov_reg_FCC0(dst, src, fcc_offset);
1065 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1066 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1067 }
1068
1069 // 1 or 3: FCC0
1070 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1071 unsigned int fcc_offset)
1072 {
1073 gen_mov_reg_FCC0(dst, src, fcc_offset);
1074 }
1075
1076 // 1: FCC0 & !FCC1
1077 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1078 unsigned int fcc_offset)
1079 {
1080 gen_mov_reg_FCC0(dst, src, fcc_offset);
1081 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1082 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1083 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1084 }
1085
1086 // 2 or 3: FCC1
1087 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1088 unsigned int fcc_offset)
1089 {
1090 gen_mov_reg_FCC1(dst, src, fcc_offset);
1091 }
1092
1093 // 2: !FCC0 & FCC1
1094 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1095 unsigned int fcc_offset)
1096 {
1097 gen_mov_reg_FCC0(dst, src, fcc_offset);
1098 tcg_gen_xori_tl(dst, dst, 0x1);
1099 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1100 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1101 }
1102
1103 // 3: FCC0 & FCC1
1104 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1105 unsigned int fcc_offset)
1106 {
1107 gen_mov_reg_FCC0(dst, src, fcc_offset);
1108 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1109 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1110 }
1111
1112 // 0: !(FCC0 | FCC1)
1113 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1114 unsigned int fcc_offset)
1115 {
1116 gen_mov_reg_FCC0(dst, src, fcc_offset);
1117 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1118 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1119 tcg_gen_xori_tl(dst, dst, 0x1);
1120 }
1121
1122 // 0 or 3: !(FCC0 ^ FCC1)
1123 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1124 unsigned int fcc_offset)
1125 {
1126 gen_mov_reg_FCC0(dst, src, fcc_offset);
1127 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1128 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1129 tcg_gen_xori_tl(dst, dst, 0x1);
1130 }
1131
1132 // 0 or 2: !FCC0
1133 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1134 unsigned int fcc_offset)
1135 {
1136 gen_mov_reg_FCC0(dst, src, fcc_offset);
1137 tcg_gen_xori_tl(dst, dst, 0x1);
1138 }
1139
1140 // !1: !(FCC0 & !FCC1)
1141 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1142 unsigned int fcc_offset)
1143 {
1144 gen_mov_reg_FCC0(dst, src, fcc_offset);
1145 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1146 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1147 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1148 tcg_gen_xori_tl(dst, dst, 0x1);
1149 }
1150
1151 // 0 or 1: !FCC1
1152 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1153 unsigned int fcc_offset)
1154 {
1155 gen_mov_reg_FCC1(dst, src, fcc_offset);
1156 tcg_gen_xori_tl(dst, dst, 0x1);
1157 }
1158
1159 // !2: !(!FCC0 & FCC1)
1160 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1161 unsigned int fcc_offset)
1162 {
1163 gen_mov_reg_FCC0(dst, src, fcc_offset);
1164 tcg_gen_xori_tl(dst, dst, 0x1);
1165 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1166 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1167 tcg_gen_xori_tl(dst, dst, 0x1);
1168 }
1169
1170 // !3: !(FCC0 & FCC1)
1171 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1172 unsigned int fcc_offset)
1173 {
1174 gen_mov_reg_FCC0(dst, src, fcc_offset);
1175 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1176 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1177 tcg_gen_xori_tl(dst, dst, 0x1);
1178 }
1179
1180 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1181 target_ulong pc2, TCGv r_cond)
1182 {
1183 int l1;
1184
1185 l1 = gen_new_label();
1186
1187 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, tcg_const_tl(0), l1);
1188
1189 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1190
1191 gen_set_label(l1);
1192 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1193 }
1194
1195 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1196 target_ulong pc2, TCGv r_cond)
1197 {
1198 int l1;
1199
1200 l1 = gen_new_label();
1201
1202 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, tcg_const_tl(0), l1);
1203
1204 gen_goto_tb(dc, 0, pc2, pc1);
1205
1206 gen_set_label(l1);
1207 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1208 }
1209
1210 static inline void gen_branch(DisasContext *dc, target_ulong pc,
1211 target_ulong npc)
1212 {
1213 gen_goto_tb(dc, 0, pc, npc);
1214 }
1215
1216 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1217 TCGv r_cond)
1218 {
1219 int l1, l2;
1220
1221 l1 = gen_new_label();
1222 l2 = gen_new_label();
1223
1224 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, tcg_const_tl(0), l1);
1225
1226 gen_movl_npc_im(npc1);
1227 tcg_gen_br(l2);
1228
1229 gen_set_label(l1);
1230 gen_movl_npc_im(npc2);
1231 gen_set_label(l2);
1232 }
1233
1234 /* call this function before using T2 as it may have been set for a jump */
1235 static inline void flush_T2(DisasContext * dc)
1236 {
1237 if (dc->npc == JUMP_PC) {
1238 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1239 dc->npc = DYNAMIC_PC;
1240 }
1241 }
1242
1243 static inline void save_npc(DisasContext * dc)
1244 {
1245 if (dc->npc == JUMP_PC) {
1246 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1247 dc->npc = DYNAMIC_PC;
1248 } else if (dc->npc != DYNAMIC_PC) {
1249 gen_movl_npc_im(dc->npc);
1250 }
1251 }
1252
1253 static inline void save_state(DisasContext * dc)
1254 {
1255 gen_jmp_im(dc->pc);
1256 save_npc(dc);
1257 }
1258
1259 static inline void gen_mov_pc_npc(DisasContext * dc)
1260 {
1261 if (dc->npc == JUMP_PC) {
1262 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
1263 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1264 dc->pc = DYNAMIC_PC;
1265 } else if (dc->npc == DYNAMIC_PC) {
1266 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1267 dc->pc = DYNAMIC_PC;
1268 } else {
1269 dc->pc = dc->npc;
1270 }
1271 }
1272
1273 static inline void gen_op_next_insn(void)
1274 {
1275 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1276 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1277 }
1278
1279 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1280 {
1281 TCGv r_src;
1282
1283 #ifdef TARGET_SPARC64
1284 if (cc)
1285 r_src = cpu_xcc;
1286 else
1287 r_src = cpu_psr;
1288 #else
1289 r_src = cpu_psr;
1290 #endif
1291 switch (cond) {
1292 case 0x0:
1293 gen_op_eval_bn(r_dst);
1294 break;
1295 case 0x1:
1296 gen_op_eval_be(r_dst, r_src);
1297 break;
1298 case 0x2:
1299 gen_op_eval_ble(r_dst, r_src);
1300 break;
1301 case 0x3:
1302 gen_op_eval_bl(r_dst, r_src);
1303 break;
1304 case 0x4:
1305 gen_op_eval_bleu(r_dst, r_src);
1306 break;
1307 case 0x5:
1308 gen_op_eval_bcs(r_dst, r_src);
1309 break;
1310 case 0x6:
1311 gen_op_eval_bneg(r_dst, r_src);
1312 break;
1313 case 0x7:
1314 gen_op_eval_bvs(r_dst, r_src);
1315 break;
1316 case 0x8:
1317 gen_op_eval_ba(r_dst);
1318 break;
1319 case 0x9:
1320 gen_op_eval_bne(r_dst, r_src);
1321 break;
1322 case 0xa:
1323 gen_op_eval_bg(r_dst, r_src);
1324 break;
1325 case 0xb:
1326 gen_op_eval_bge(r_dst, r_src);
1327 break;
1328 case 0xc:
1329 gen_op_eval_bgu(r_dst, r_src);
1330 break;
1331 case 0xd:
1332 gen_op_eval_bcc(r_dst, r_src);
1333 break;
1334 case 0xe:
1335 gen_op_eval_bpos(r_dst, r_src);
1336 break;
1337 case 0xf:
1338 gen_op_eval_bvc(r_dst, r_src);
1339 break;
1340 }
1341 }
1342
1343 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1344 {
1345 unsigned int offset;
1346
1347 switch (cc) {
1348 default:
1349 case 0x0:
1350 offset = 0;
1351 break;
1352 case 0x1:
1353 offset = 32 - 10;
1354 break;
1355 case 0x2:
1356 offset = 34 - 10;
1357 break;
1358 case 0x3:
1359 offset = 36 - 10;
1360 break;
1361 }
1362
1363 switch (cond) {
1364 case 0x0:
1365 gen_op_eval_bn(r_dst);
1366 break;
1367 case 0x1:
1368 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1369 break;
1370 case 0x2:
1371 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1372 break;
1373 case 0x3:
1374 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1375 break;
1376 case 0x4:
1377 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1378 break;
1379 case 0x5:
1380 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1381 break;
1382 case 0x6:
1383 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1384 break;
1385 case 0x7:
1386 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1387 break;
1388 case 0x8:
1389 gen_op_eval_ba(r_dst);
1390 break;
1391 case 0x9:
1392 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1393 break;
1394 case 0xa:
1395 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1396 break;
1397 case 0xb:
1398 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1399 break;
1400 case 0xc:
1401 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1402 break;
1403 case 0xd:
1404 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1405 break;
1406 case 0xe:
1407 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1408 break;
1409 case 0xf:
1410 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1411 break;
1412 }
1413 }
1414
1415 #ifdef TARGET_SPARC64
1416 // Inverted logic
1417 static const int gen_tcg_cond_reg[8] = {
1418 -1,
1419 TCG_COND_NE,
1420 TCG_COND_GT,
1421 TCG_COND_GE,
1422 -1,
1423 TCG_COND_EQ,
1424 TCG_COND_LE,
1425 TCG_COND_LT,
1426 };
1427
1428 static inline void gen_cond_reg(TCGv r_dst, int cond)
1429 {
1430 int l1;
1431
1432 l1 = gen_new_label();
1433 tcg_gen_movi_tl(r_dst, 0);
1434 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0], tcg_const_tl(0), l1);
1435 tcg_gen_movi_tl(r_dst, 1);
1436 gen_set_label(l1);
1437 }
1438 #endif
1439
1440 /* XXX: potentially incorrect if dynamic npc */
1441 static void do_branch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
1442 {
1443 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1444 target_ulong target = dc->pc + offset;
1445
1446 if (cond == 0x0) {
1447 /* unconditional not taken */
1448 if (a) {
1449 dc->pc = dc->npc + 4;
1450 dc->npc = dc->pc + 4;
1451 } else {
1452 dc->pc = dc->npc;
1453 dc->npc = dc->pc + 4;
1454 }
1455 } else if (cond == 0x8) {
1456 /* unconditional taken */
1457 if (a) {
1458 dc->pc = target;
1459 dc->npc = dc->pc + 4;
1460 } else {
1461 dc->pc = dc->npc;
1462 dc->npc = target;
1463 }
1464 } else {
1465 flush_T2(dc);
1466 gen_cond(cpu_T[2], cc, cond);
1467 if (a) {
1468 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1469 dc->is_br = 1;
1470 } else {
1471 dc->pc = dc->npc;
1472 dc->jump_pc[0] = target;
1473 dc->jump_pc[1] = dc->npc + 4;
1474 dc->npc = JUMP_PC;
1475 }
1476 }
1477 }
1478
1479 /* XXX: potentially incorrect if dynamic npc */
1480 static void do_fbranch(DisasContext * dc, int32_t offset, uint32_t insn, int cc)
1481 {
1482 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1483 target_ulong target = dc->pc + offset;
1484
1485 if (cond == 0x0) {
1486 /* unconditional not taken */
1487 if (a) {
1488 dc->pc = dc->npc + 4;
1489 dc->npc = dc->pc + 4;
1490 } else {
1491 dc->pc = dc->npc;
1492 dc->npc = dc->pc + 4;
1493 }
1494 } else if (cond == 0x8) {
1495 /* unconditional taken */
1496 if (a) {
1497 dc->pc = target;
1498 dc->npc = dc->pc + 4;
1499 } else {
1500 dc->pc = dc->npc;
1501 dc->npc = target;
1502 }
1503 } else {
1504 flush_T2(dc);
1505 gen_fcond(cpu_T[2], cc, cond);
1506 if (a) {
1507 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1508 dc->is_br = 1;
1509 } else {
1510 dc->pc = dc->npc;
1511 dc->jump_pc[0] = target;
1512 dc->jump_pc[1] = dc->npc + 4;
1513 dc->npc = JUMP_PC;
1514 }
1515 }
1516 }
1517
1518 #ifdef TARGET_SPARC64
1519 /* XXX: potentially incorrect if dynamic npc */
1520 static void do_branch_reg(DisasContext * dc, int32_t offset, uint32_t insn)
1521 {
1522 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1523 target_ulong target = dc->pc + offset;
1524
1525 flush_T2(dc);
1526 gen_cond_reg(cpu_T[2], cond);
1527 if (a) {
1528 gen_branch_a(dc, target, dc->npc, cpu_T[2]);
1529 dc->is_br = 1;
1530 } else {
1531 dc->pc = dc->npc;
1532 dc->jump_pc[0] = target;
1533 dc->jump_pc[1] = dc->npc + 4;
1534 dc->npc = JUMP_PC;
1535 }
1536 }
1537
1538 static GenOpFunc * const gen_fcmps[4] = {
1539 helper_fcmps,
1540 helper_fcmps_fcc1,
1541 helper_fcmps_fcc2,
1542 helper_fcmps_fcc3,
1543 };
1544
1545 static GenOpFunc * const gen_fcmpd[4] = {
1546 helper_fcmpd,
1547 helper_fcmpd_fcc1,
1548 helper_fcmpd_fcc2,
1549 helper_fcmpd_fcc3,
1550 };
1551
1552 #if defined(CONFIG_USER_ONLY)
1553 static GenOpFunc * const gen_fcmpq[4] = {
1554 helper_fcmpq,
1555 helper_fcmpq_fcc1,
1556 helper_fcmpq_fcc2,
1557 helper_fcmpq_fcc3,
1558 };
1559 #endif
1560
1561 static GenOpFunc * const gen_fcmpes[4] = {
1562 helper_fcmpes,
1563 helper_fcmpes_fcc1,
1564 helper_fcmpes_fcc2,
1565 helper_fcmpes_fcc3,
1566 };
1567
1568 static GenOpFunc * const gen_fcmped[4] = {
1569 helper_fcmped,
1570 helper_fcmped_fcc1,
1571 helper_fcmped_fcc2,
1572 helper_fcmped_fcc3,
1573 };
1574
1575 #if defined(CONFIG_USER_ONLY)
1576 static GenOpFunc * const gen_fcmpeq[4] = {
1577 helper_fcmpeq,
1578 helper_fcmpeq_fcc1,
1579 helper_fcmpeq_fcc2,
1580 helper_fcmpeq_fcc3,
1581 };
1582 #endif
1583
1584 static inline void gen_op_fcmps(int fccno)
1585 {
1586 tcg_gen_helper_0_0(gen_fcmps[fccno]);
1587 }
1588
1589 static inline void gen_op_fcmpd(int fccno)
1590 {
1591 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1592 }
1593
1594 #if defined(CONFIG_USER_ONLY)
1595 static inline void gen_op_fcmpq(int fccno)
1596 {
1597 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1598 }
1599 #endif
1600
1601 static inline void gen_op_fcmpes(int fccno)
1602 {
1603 tcg_gen_helper_0_0(gen_fcmpes[fccno]);
1604 }
1605
1606 static inline void gen_op_fcmped(int fccno)
1607 {
1608 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1609 }
1610
1611 #if defined(CONFIG_USER_ONLY)
1612 static inline void gen_op_fcmpeq(int fccno)
1613 {
1614 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1615 }
1616 #endif
1617
1618 #else
1619
1620 static inline void gen_op_fcmps(int fccno)
1621 {
1622 tcg_gen_helper_0_0(helper_fcmps);
1623 }
1624
1625 static inline void gen_op_fcmpd(int fccno)
1626 {
1627 tcg_gen_helper_0_0(helper_fcmpd);
1628 }
1629
1630 #if defined(CONFIG_USER_ONLY)
1631 static inline void gen_op_fcmpq(int fccno)
1632 {
1633 tcg_gen_helper_0_0(helper_fcmpq);
1634 }
1635 #endif
1636
1637 static inline void gen_op_fcmpes(int fccno)
1638 {
1639 tcg_gen_helper_0_0(helper_fcmpes);
1640 }
1641
1642 static inline void gen_op_fcmped(int fccno)
1643 {
1644 tcg_gen_helper_0_0(helper_fcmped);
1645 }
1646
1647 #if defined(CONFIG_USER_ONLY)
1648 static inline void gen_op_fcmpeq(int fccno)
1649 {
1650 tcg_gen_helper_0_0(helper_fcmpeq);
1651 }
1652 #endif
1653
1654 #endif
1655
1656 static inline void gen_op_fpexception_im(int fsr_flags)
1657 {
1658 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, ~FSR_FTT_MASK);
1659 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1660 gen_op_exception(TT_FP_EXCP);
1661 }
1662
1663 static int gen_trap_ifnofpu(DisasContext * dc)
1664 {
1665 #if !defined(CONFIG_USER_ONLY)
1666 if (!dc->fpu_enabled) {
1667 save_state(dc);
1668 gen_op_exception(TT_NFPU_INSN);
1669 dc->is_br = 1;
1670 return 1;
1671 }
1672 #endif
1673 return 0;
1674 }
1675
1676 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1677 {
1678 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, ~(FSR_FTT_MASK | FSR_CEXC_MASK));
1679 }
1680
1681 static inline void gen_clear_float_exceptions(void)
1682 {
1683 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1684 }
1685
1686 static inline void gen_check_align(TCGv r_addr, int align)
1687 {
1688 tcg_gen_helper_0_2(helper_check_align, r_addr, tcg_const_i32(align));
1689 }
1690
1691 static inline void gen_op_check_align_T0_1(void)
1692 {
1693 gen_check_align(cpu_T[0], 1);
1694 }
1695
1696 static inline void gen_op_check_align_T0_3(void)
1697 {
1698 gen_check_align(cpu_T[0], 3);
1699 }
1700
1701 static inline void gen_op_check_align_T0_7(void)
1702 {
1703 gen_check_align(cpu_T[0], 7);
1704 }
1705
1706 /* asi moves */
1707 #ifdef TARGET_SPARC64
1708 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1709 {
1710 int asi, offset;
1711 TCGv r_asi;
1712
1713 if (IS_IMM) {
1714 r_asi = tcg_temp_new(TCG_TYPE_I32);
1715 offset = GET_FIELD(insn, 25, 31);
1716 tcg_gen_addi_tl(r_addr, r_addr, offset);
1717 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1718 } else {
1719 asi = GET_FIELD(insn, 19, 26);
1720 r_asi = tcg_const_i32(asi);
1721 }
1722 return r_asi;
1723 }
1724
1725 static inline void gen_ld_asi(int insn, int size, int sign)
1726 {
1727 TCGv r_asi;
1728
1729 r_asi = gen_get_asi(insn, cpu_T[0]);
1730 tcg_gen_helper_1_4(helper_ld_asi, cpu_T[1], cpu_T[0], r_asi,
1731 tcg_const_i32(size), tcg_const_i32(sign));
1732 tcg_gen_discard_i32(r_asi);
1733 }
1734
1735 static inline void gen_st_asi(int insn, int size)
1736 {
1737 TCGv r_asi;
1738
1739 r_asi = gen_get_asi(insn, cpu_T[0]);
1740 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], r_asi,
1741 tcg_const_i32(size));
1742 tcg_gen_discard_i32(r_asi);
1743 }
1744
1745 static inline void gen_ldf_asi(int insn, int size, int rd)
1746 {
1747 TCGv r_asi;
1748
1749 r_asi = gen_get_asi(insn, cpu_T[0]);
1750 tcg_gen_helper_0_4(helper_ldf_asi, cpu_T[0], r_asi, tcg_const_i32(size),
1751 tcg_const_i32(rd));
1752 tcg_gen_discard_i32(r_asi);
1753 }
1754
1755 static inline void gen_stf_asi(int insn, int size, int rd)
1756 {
1757 TCGv r_asi;
1758
1759 r_asi = gen_get_asi(insn, cpu_T[0]);
1760 tcg_gen_helper_0_4(helper_stf_asi, cpu_T[0], r_asi, tcg_const_i32(size),
1761 tcg_const_i32(rd));
1762 tcg_gen_discard_i32(r_asi);
1763 }
1764
1765 static inline void gen_swap_asi(int insn)
1766 {
1767 TCGv r_temp, r_asi;
1768
1769 r_temp = tcg_temp_new(TCG_TYPE_I32);
1770 r_asi = gen_get_asi(insn, cpu_T[0]);
1771 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], r_asi,
1772 tcg_const_i32(4), tcg_const_i32(0));
1773 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], r_temp, r_asi,
1774 tcg_const_i32(4));
1775 tcg_gen_extu_i32_tl(cpu_T[1], r_temp);
1776 tcg_gen_discard_i32(r_asi);
1777 tcg_gen_discard_i32(r_temp);
1778 }
1779
1780 static inline void gen_ldda_asi(int insn)
1781 {
1782 TCGv r_asi;
1783
1784 r_asi = gen_get_asi(insn, cpu_T[0]);
1785 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, cpu_T[0], r_asi,
1786 tcg_const_i32(8), tcg_const_i32(0));
1787 tcg_gen_andi_i64(cpu_T[0], cpu_tmp64, 0xffffffffULL);
1788 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1789 tcg_gen_andi_i64(cpu_T[1], cpu_tmp64, 0xffffffffULL);
1790 tcg_gen_discard_i32(r_asi);
1791 }
1792
1793 static inline void gen_stda_asi(int insn, int rd)
1794 {
1795 TCGv r_temp, r_asi;
1796
1797 r_temp = tcg_temp_new(TCG_TYPE_I32);
1798 gen_movl_reg_TN(rd + 1, r_temp);
1799 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_T[1],
1800 r_temp);
1801 r_asi = gen_get_asi(insn, cpu_T[0]);
1802 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_tmp64, r_asi,
1803 tcg_const_i32(8));
1804 tcg_gen_discard_i32(r_asi);
1805 tcg_gen_discard_i32(r_temp);
1806 }
1807
1808 static inline void gen_cas_asi(int insn, int rd)
1809 {
1810 TCGv r_val1, r_asi;
1811
1812 r_val1 = tcg_temp_new(TCG_TYPE_I32);
1813 gen_movl_reg_TN(rd, r_val1);
1814 r_asi = gen_get_asi(insn, cpu_T[0]);
1815 tcg_gen_helper_1_4(helper_cas_asi, cpu_T[1], cpu_T[0], r_val1, cpu_T[1],
1816 r_asi);
1817 tcg_gen_discard_i32(r_asi);
1818 tcg_gen_discard_i32(r_val1);
1819 }
1820
1821 static inline void gen_casx_asi(int insn, int rd)
1822 {
1823 TCGv r_asi;
1824
1825 gen_movl_reg_TN(rd, cpu_tmp64);
1826 r_asi = gen_get_asi(insn, cpu_T[0]);
1827 tcg_gen_helper_1_4(helper_casx_asi, cpu_T[1], cpu_T[0], cpu_tmp64, cpu_T[1],
1828 r_asi);
1829 tcg_gen_discard_i32(r_asi);
1830 }
1831
1832 #elif !defined(CONFIG_USER_ONLY)
1833
1834 static inline void gen_ld_asi(int insn, int size, int sign)
1835 {
1836 int asi;
1837
1838 asi = GET_FIELD(insn, 19, 26);
1839 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, cpu_T[0], tcg_const_i32(asi),
1840 tcg_const_i32(size), tcg_const_i32(sign));
1841 tcg_gen_trunc_i64_tl(cpu_T[1], cpu_tmp64);
1842 }
1843
1844 static inline void gen_st_asi(int insn, int size)
1845 {
1846 int asi;
1847
1848 tcg_gen_extu_tl_i64(cpu_tmp64, cpu_T[1]);
1849 asi = GET_FIELD(insn, 19, 26);
1850 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_tmp64, tcg_const_i32(asi),
1851 tcg_const_i32(size));
1852 }
1853
1854 static inline void gen_swap_asi(int insn)
1855 {
1856 int asi;
1857 TCGv r_temp;
1858
1859 r_temp = tcg_temp_new(TCG_TYPE_I32);
1860 asi = GET_FIELD(insn, 19, 26);
1861 tcg_gen_helper_1_4(helper_ld_asi, r_temp, cpu_T[0], tcg_const_i32(asi),
1862 tcg_const_i32(4), tcg_const_i32(0));
1863 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_T[1], tcg_const_i32(asi),
1864 tcg_const_i32(4));
1865 tcg_gen_extu_i32_tl(cpu_T[1], r_temp);
1866 tcg_gen_discard_i32(r_temp);
1867 }
1868
1869 static inline void gen_ldda_asi(int insn)
1870 {
1871 int asi;
1872
1873 asi = GET_FIELD(insn, 19, 26);
1874 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, cpu_T[0], tcg_const_i32(asi),
1875 tcg_const_i32(8), tcg_const_i32(0));
1876 tcg_gen_trunc_i64_tl(cpu_T[0], cpu_tmp64);
1877 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1878 tcg_gen_trunc_i64_tl(cpu_T[1], cpu_tmp64);
1879 }
1880
1881 static inline void gen_stda_asi(int insn, int rd)
1882 {
1883 int asi;
1884 TCGv r_temp;
1885
1886 r_temp = tcg_temp_new(TCG_TYPE_I32);
1887 gen_movl_reg_TN(rd + 1, r_temp);
1888 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_T[1], r_temp);
1889 asi = GET_FIELD(insn, 19, 26);
1890 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], cpu_tmp64, tcg_const_i32(asi),
1891 tcg_const_i32(8));
1892 }
1893 #endif
1894
1895 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1896 static inline void gen_ldstub_asi(int insn)
1897 {
1898 int asi;
1899
1900 gen_ld_asi(insn, 1, 0);
1901
1902 asi = GET_FIELD(insn, 19, 26);
1903 tcg_gen_helper_0_4(helper_st_asi, cpu_T[0], tcg_const_i64(0xffULL),
1904 tcg_const_i32(asi), tcg_const_i32(1));
1905 }
1906 #endif
1907
1908 /* before an instruction, dc->pc must be static */
1909 static void disas_sparc_insn(DisasContext * dc)
1910 {
1911 unsigned int insn, opc, rs1, rs2, rd;
1912
1913 insn = ldl_code(dc->pc);
1914 opc = GET_FIELD(insn, 0, 1);
1915
1916 rd = GET_FIELD(insn, 2, 6);
1917 switch (opc) {
1918 case 0: /* branches/sethi */
1919 {
1920 unsigned int xop = GET_FIELD(insn, 7, 9);
1921 int32_t target;
1922 switch (xop) {
1923 #ifdef TARGET_SPARC64
1924 case 0x1: /* V9 BPcc */
1925 {
1926 int cc;
1927
1928 target = GET_FIELD_SP(insn, 0, 18);
1929 target = sign_extend(target, 18);
1930 target <<= 2;
1931 cc = GET_FIELD_SP(insn, 20, 21);
1932 if (cc == 0)
1933 do_branch(dc, target, insn, 0);
1934 else if (cc == 2)
1935 do_branch(dc, target, insn, 1);
1936 else
1937 goto illegal_insn;
1938 goto jmp_insn;
1939 }
1940 case 0x3: /* V9 BPr */
1941 {
1942 target = GET_FIELD_SP(insn, 0, 13) |
1943 (GET_FIELD_SP(insn, 20, 21) << 14);
1944 target = sign_extend(target, 16);
1945 target <<= 2;
1946 rs1 = GET_FIELD(insn, 13, 17);
1947 gen_movl_reg_T0(rs1);
1948 do_branch_reg(dc, target, insn);
1949 goto jmp_insn;
1950 }
1951 case 0x5: /* V9 FBPcc */
1952 {
1953 int cc = GET_FIELD_SP(insn, 20, 21);
1954 if (gen_trap_ifnofpu(dc))
1955 goto jmp_insn;
1956 target = GET_FIELD_SP(insn, 0, 18);
1957 target = sign_extend(target, 19);
1958 target <<= 2;
1959 do_fbranch(dc, target, insn, cc);
1960 goto jmp_insn;
1961 }
1962 #else
1963 case 0x7: /* CBN+x */
1964 {
1965 goto ncp_insn;
1966 }
1967 #endif
1968 case 0x2: /* BN+x */
1969 {
1970 target = GET_FIELD(insn, 10, 31);
1971 target = sign_extend(target, 22);
1972 target <<= 2;
1973 do_branch(dc, target, insn, 0);
1974 goto jmp_insn;
1975 }
1976 case 0x6: /* FBN+x */
1977 {
1978 if (gen_trap_ifnofpu(dc))
1979 goto jmp_insn;
1980 target = GET_FIELD(insn, 10, 31);
1981 target = sign_extend(target, 22);
1982 target <<= 2;
1983 do_fbranch(dc, target, insn, 0);
1984 goto jmp_insn;
1985 }
1986 case 0x4: /* SETHI */
1987 #define OPTIM
1988 #if defined(OPTIM)
1989 if (rd) { // nop
1990 #endif
1991 uint32_t value = GET_FIELD(insn, 10, 31);
1992 tcg_gen_movi_tl(cpu_T[0], value << 10);
1993 gen_movl_T0_reg(rd);
1994 #if defined(OPTIM)
1995 }
1996 #endif
1997 break;
1998 case 0x0: /* UNIMPL */
1999 default:
2000 goto illegal_insn;
2001 }
2002 break;
2003 }
2004 break;
2005 case 1:
2006 /*CALL*/ {
2007 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2008
2009 gen_movl_TN_reg(15, tcg_const_tl(dc->pc));
2010 target += dc->pc;
2011 gen_mov_pc_npc(dc);
2012 dc->npc = target;
2013 }
2014 goto jmp_insn;
2015 case 2: /* FPU & Logical Operations */
2016 {
2017 unsigned int xop = GET_FIELD(insn, 7, 12);
2018 if (xop == 0x3a) { /* generate trap */
2019 int cond;
2020
2021 rs1 = GET_FIELD(insn, 13, 17);
2022 gen_movl_reg_T0(rs1);
2023 if (IS_IMM) {
2024 rs2 = GET_FIELD(insn, 25, 31);
2025 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], rs2);
2026 } else {
2027 rs2 = GET_FIELD(insn, 27, 31);
2028 #if defined(OPTIM)
2029 if (rs2 != 0) {
2030 #endif
2031 gen_movl_reg_T1(rs2);
2032 gen_op_add_T1_T0();
2033 #if defined(OPTIM)
2034 }
2035 #endif
2036 }
2037 cond = GET_FIELD(insn, 3, 6);
2038 if (cond == 0x8) {
2039 save_state(dc);
2040 tcg_gen_helper_0_1(helper_trap, cpu_T[0]);
2041 } else if (cond != 0) {
2042 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2043 #ifdef TARGET_SPARC64
2044 /* V9 icc/xcc */
2045 int cc = GET_FIELD_SP(insn, 11, 12);
2046
2047 save_state(dc);
2048 if (cc == 0)
2049 gen_cond(r_cond, 0, cond);
2050 else if (cc == 2)
2051 gen_cond(r_cond, 1, cond);
2052 else
2053 goto illegal_insn;
2054 #else
2055 save_state(dc);
2056 gen_cond(r_cond, 0, cond);
2057 #endif
2058 tcg_gen_helper_0_2(helper_trapcc, cpu_T[0], r_cond);
2059 tcg_gen_discard_tl(r_cond);
2060 }
2061 gen_op_next_insn();
2062 tcg_gen_exit_tb(0);
2063 dc->is_br = 1;
2064 goto jmp_insn;
2065 } else if (xop == 0x28) {
2066 rs1 = GET_FIELD(insn, 13, 17);
2067 switch(rs1) {
2068 case 0: /* rdy */
2069 #ifndef TARGET_SPARC64
2070 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2071 manual, rdy on the microSPARC
2072 II */
2073 case 0x0f: /* stbar in the SPARCv8 manual,
2074 rdy on the microSPARC II */
2075 case 0x10 ... 0x1f: /* implementation-dependent in the
2076 SPARCv8 manual, rdy on the
2077 microSPARC II */
2078 #endif
2079 gen_op_movtl_T0_env(offsetof(CPUSPARCState, y));
2080 gen_movl_T0_reg(rd);
2081 break;
2082 #ifdef TARGET_SPARC64
2083 case 0x2: /* V9 rdccr */
2084 tcg_gen_helper_1_0(helper_rdccr, cpu_T[0]);
2085 gen_movl_T0_reg(rd);
2086 break;
2087 case 0x3: /* V9 rdasi */
2088 gen_op_movl_T0_env(offsetof(CPUSPARCState, asi));
2089 gen_movl_T0_reg(rd);
2090 break;
2091 case 0x4: /* V9 rdtick */
2092 {
2093 TCGv r_tickptr;
2094
2095 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2096 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2097 offsetof(CPUState, tick));
2098 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2099 r_tickptr);
2100 gen_movl_T0_reg(rd);
2101 tcg_gen_discard_ptr(r_tickptr);
2102 }
2103 break;
2104 case 0x5: /* V9 rdpc */
2105 tcg_gen_movi_tl(cpu_T[0], dc->pc);
2106 gen_movl_T0_reg(rd);
2107 break;
2108 case 0x6: /* V9 rdfprs */
2109 gen_op_movl_T0_env(offsetof(CPUSPARCState, fprs));
2110 gen_movl_T0_reg(rd);
2111 break;
2112 case 0xf: /* V9 membar */
2113 break; /* no effect */
2114 case 0x13: /* Graphics Status */
2115 if (gen_trap_ifnofpu(dc))
2116 goto jmp_insn;
2117 gen_op_movtl_T0_env(offsetof(CPUSPARCState, gsr));
2118 gen_movl_T0_reg(rd);
2119 break;
2120 case 0x17: /* Tick compare */
2121 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tick_cmpr));
2122 gen_movl_T0_reg(rd);
2123 break;
2124 case 0x18: /* System tick */
2125 {
2126 TCGv r_tickptr;
2127
2128 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2129 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2130 offsetof(CPUState, stick));
2131 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2132 r_tickptr);
2133 gen_movl_T0_reg(rd);
2134 tcg_gen_discard_ptr(r_tickptr);
2135 }
2136 break;
2137 case 0x19: /* System tick compare */
2138 gen_op_movtl_T0_env(offsetof(CPUSPARCState, stick_cmpr));
2139 gen_movl_T0_reg(rd);
2140 break;
2141 case 0x10: /* Performance Control */
2142 case 0x11: /* Performance Instrumentation Counter */
2143 case 0x12: /* Dispatch Control */
2144 case 0x14: /* Softint set, WO */
2145 case 0x15: /* Softint clear, WO */
2146 case 0x16: /* Softint write */
2147 #endif
2148 default:
2149 goto illegal_insn;
2150 }
2151 #if !defined(CONFIG_USER_ONLY)
2152 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2153 #ifndef TARGET_SPARC64
2154 if (!supervisor(dc))
2155 goto priv_insn;
2156 tcg_gen_helper_1_0(helper_rdpsr, cpu_T[0]);
2157 #else
2158 if (!hypervisor(dc))
2159 goto priv_insn;
2160 rs1 = GET_FIELD(insn, 13, 17);
2161 switch (rs1) {
2162 case 0: // hpstate
2163 // gen_op_rdhpstate();
2164 break;
2165 case 1: // htstate
2166 // gen_op_rdhtstate();
2167 break;
2168 case 3: // hintp
2169 gen_op_movl_T0_env(offsetof(CPUSPARCState, hintp));
2170 break;
2171 case 5: // htba
2172 gen_op_movl_T0_env(offsetof(CPUSPARCState, htba));
2173 break;
2174 case 6: // hver
2175 gen_op_movl_T0_env(offsetof(CPUSPARCState, hver));
2176 break;
2177 case 31: // hstick_cmpr
2178 gen_op_movl_env_T0(offsetof(CPUSPARCState, hstick_cmpr));
2179 break;
2180 default:
2181 goto illegal_insn;
2182 }
2183 #endif
2184 gen_movl_T0_reg(rd);
2185 break;
2186 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2187 if (!supervisor(dc))
2188 goto priv_insn;
2189 #ifdef TARGET_SPARC64
2190 rs1 = GET_FIELD(insn, 13, 17);
2191 switch (rs1) {
2192 case 0: // tpc
2193 {
2194 TCGv r_tsptr;
2195
2196 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2197 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2198 offsetof(CPUState, tsptr));
2199 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2200 offsetof(trap_state, tpc));
2201 tcg_gen_discard_ptr(r_tsptr);
2202 }
2203 break;
2204 case 1: // tnpc
2205 {
2206 TCGv r_tsptr;
2207
2208 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2209 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2210 offsetof(CPUState, tsptr));
2211 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2212 offsetof(trap_state, tnpc));
2213 tcg_gen_discard_ptr(r_tsptr);
2214 }
2215 break;
2216 case 2: // tstate
2217 {
2218 TCGv r_tsptr;
2219
2220 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2221 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2222 offsetof(CPUState, tsptr));
2223 tcg_gen_ld_tl(cpu_T[0], r_tsptr,
2224 offsetof(trap_state, tstate));
2225 tcg_gen_discard_ptr(r_tsptr);
2226 }
2227 break;
2228 case 3: // tt
2229 {
2230 TCGv r_tsptr;
2231
2232 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2233 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2234 offsetof(CPUState, tsptr));
2235 tcg_gen_ld_i32(cpu_T[0], r_tsptr,
2236 offsetof(trap_state, tt));
2237 tcg_gen_discard_ptr(r_tsptr);
2238 }
2239 break;
2240 case 4: // tick
2241 {
2242 TCGv r_tickptr;
2243
2244 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2245 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2246 offsetof(CPUState, tick));
2247 tcg_gen_helper_1_1(helper_tick_get_count, cpu_T[0],
2248 r_tickptr);
2249 gen_movl_T0_reg(rd);
2250 tcg_gen_discard_ptr(r_tickptr);
2251 }
2252 break;
2253 case 5: // tba
2254 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
2255 break;
2256 case 6: // pstate
2257 gen_op_movl_T0_env(offsetof(CPUSPARCState, pstate));
2258 break;
2259 case 7: // tl
2260 gen_op_movl_T0_env(offsetof(CPUSPARCState, tl));
2261 break;
2262 case 8: // pil
2263 gen_op_movl_T0_env(offsetof(CPUSPARCState, psrpil));
2264 break;
2265 case 9: // cwp
2266 tcg_gen_helper_1_0(helper_rdcwp, cpu_T[0]);
2267 break;
2268 case 10: // cansave
2269 gen_op_movl_T0_env(offsetof(CPUSPARCState, cansave));
2270 break;
2271 case 11: // canrestore
2272 gen_op_movl_T0_env(offsetof(CPUSPARCState, canrestore));
2273 break;
2274 case 12: // cleanwin
2275 gen_op_movl_T0_env(offsetof(CPUSPARCState, cleanwin));
2276 break;
2277 case 13: // otherwin
2278 gen_op_movl_T0_env(offsetof(CPUSPARCState, otherwin));
2279 break;
2280 case 14: // wstate
2281 gen_op_movl_T0_env(offsetof(CPUSPARCState, wstate));
2282 break;
2283 case 16: // UA2005 gl
2284 gen_op_movl_T0_env(offsetof(CPUSPARCState, gl));
2285 break;
2286 case 26: // UA2005 strand status
2287 if (!hypervisor(dc))
2288 goto priv_insn;
2289 gen_op_movl_T0_env(offsetof(CPUSPARCState, ssr));
2290 break;
2291 case 31: // ver
2292 gen_op_movtl_T0_env(offsetof(CPUSPARCState, version));
2293 break;
2294 case 15: // fq
2295 default:
2296 goto illegal_insn;
2297 }
2298 #else
2299 gen_op_movl_T0_env(offsetof(CPUSPARCState, wim));
2300 #endif
2301 gen_movl_T0_reg(rd);
2302 break;
2303 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2304 #ifdef TARGET_SPARC64
2305 tcg_gen_helper_0_0(helper_flushw);
2306 #else
2307 if (!supervisor(dc))
2308 goto priv_insn;
2309 gen_op_movtl_T0_env(offsetof(CPUSPARCState, tbr));
2310 gen_movl_T0_reg(rd);
2311 #endif
2312 break;
2313 #endif
2314 } else if (xop == 0x34) { /* FPU Operations */
2315 if (gen_trap_ifnofpu(dc))
2316 goto jmp_insn;
2317 gen_op_clear_ieee_excp_and_FTT();
2318 rs1 = GET_FIELD(insn, 13, 17);
2319 rs2 = GET_FIELD(insn, 27, 31);
2320 xop = GET_FIELD(insn, 18, 26);
2321 switch (xop) {
2322 case 0x1: /* fmovs */
2323 gen_op_load_fpr_FT0(rs2);
2324 gen_op_store_FT0_fpr(rd);
2325 break;
2326 case 0x5: /* fnegs */
2327 gen_op_load_fpr_FT1(rs2);
2328 tcg_gen_helper_0_0(helper_fnegs);
2329 gen_op_store_FT0_fpr(rd);
2330 break;
2331 case 0x9: /* fabss */
2332 gen_op_load_fpr_FT1(rs2);
2333 tcg_gen_helper_0_0(helper_fabss);
2334 gen_op_store_FT0_fpr(rd);
2335 break;
2336 case 0x29: /* fsqrts */
2337 gen_op_load_fpr_FT1(rs2);
2338 gen_clear_float_exceptions();
2339 tcg_gen_helper_0_0(helper_fsqrts);
2340 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2341 gen_op_store_FT0_fpr(rd);
2342 break;
2343 case 0x2a: /* fsqrtd */
2344 gen_op_load_fpr_DT1(DFPREG(rs2));
2345 gen_clear_float_exceptions();
2346 tcg_gen_helper_0_0(helper_fsqrtd);
2347 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2348 gen_op_store_DT0_fpr(DFPREG(rd));
2349 break;
2350 case 0x2b: /* fsqrtq */
2351 #if defined(CONFIG_USER_ONLY)
2352 gen_op_load_fpr_QT1(QFPREG(rs2));
2353 gen_clear_float_exceptions();
2354 tcg_gen_helper_0_0(helper_fsqrtq);
2355 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2356 gen_op_store_QT0_fpr(QFPREG(rd));
2357 break;
2358 #else
2359 goto nfpu_insn;
2360 #endif
2361 case 0x41:
2362 gen_op_load_fpr_FT0(rs1);
2363 gen_op_load_fpr_FT1(rs2);
2364 gen_clear_float_exceptions();
2365 tcg_gen_helper_0_0(helper_fadds);
2366 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2367 gen_op_store_FT0_fpr(rd);
2368 break;
2369 case 0x42:
2370 gen_op_load_fpr_DT0(DFPREG(rs1));
2371 gen_op_load_fpr_DT1(DFPREG(rs2));
2372 gen_clear_float_exceptions();
2373 tcg_gen_helper_0_0(helper_faddd);
2374 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2375 gen_op_store_DT0_fpr(DFPREG(rd));
2376 break;
2377 case 0x43: /* faddq */
2378 #if defined(CONFIG_USER_ONLY)
2379 gen_op_load_fpr_QT0(QFPREG(rs1));
2380 gen_op_load_fpr_QT1(QFPREG(rs2));
2381 gen_clear_float_exceptions();
2382 tcg_gen_helper_0_0(helper_faddq);
2383 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2384 gen_op_store_QT0_fpr(QFPREG(rd));
2385 break;
2386 #else
2387 goto nfpu_insn;
2388 #endif
2389 case 0x45:
2390 gen_op_load_fpr_FT0(rs1);
2391 gen_op_load_fpr_FT1(rs2);
2392 gen_clear_float_exceptions();
2393 tcg_gen_helper_0_0(helper_fsubs);
2394 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2395 gen_op_store_FT0_fpr(rd);
2396 break;
2397 case 0x46:
2398 gen_op_load_fpr_DT0(DFPREG(rs1));
2399 gen_op_load_fpr_DT1(DFPREG(rs2));
2400 gen_clear_float_exceptions();
2401 tcg_gen_helper_0_0(helper_fsubd);
2402 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2403 gen_op_store_DT0_fpr(DFPREG(rd));
2404 break;
2405 case 0x47: /* fsubq */
2406 #if defined(CONFIG_USER_ONLY)
2407 gen_op_load_fpr_QT0(QFPREG(rs1));
2408 gen_op_load_fpr_QT1(QFPREG(rs2));
2409 gen_clear_float_exceptions();
2410 tcg_gen_helper_0_0(helper_fsubq);
2411 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2412 gen_op_store_QT0_fpr(QFPREG(rd));
2413 break;
2414 #else
2415 goto nfpu_insn;
2416 #endif
2417 case 0x49:
2418 gen_op_load_fpr_FT0(rs1);
2419 gen_op_load_fpr_FT1(rs2);
2420 gen_clear_float_exceptions();
2421 tcg_gen_helper_0_0(helper_fmuls);
2422 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2423 gen_op_store_FT0_fpr(rd);
2424 break;
2425 case 0x4a:
2426 gen_op_load_fpr_DT0(DFPREG(rs1));
2427 gen_op_load_fpr_DT1(DFPREG(rs2));
2428 gen_clear_float_exceptions();
2429 tcg_gen_helper_0_0(helper_fmuld);
2430 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2431 gen_op_store_DT0_fpr(DFPREG(rd));
2432 break;
2433 case 0x4b: /* fmulq */
2434 #if defined(CONFIG_USER_ONLY)
2435 gen_op_load_fpr_QT0(QFPREG(rs1));
2436 gen_op_load_fpr_QT1(QFPREG(rs2));
2437 gen_clear_float_exceptions();
2438 tcg_gen_helper_0_0(helper_fmulq);
2439 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2440 gen_op_store_QT0_fpr(QFPREG(rd));
2441 break;
2442 #else
2443 goto nfpu_insn;
2444 #endif
2445 case 0x4d:
2446 gen_op_load_fpr_FT0(rs1);
2447 gen_op_load_fpr_FT1(rs2);
2448 gen_clear_float_exceptions();
2449 tcg_gen_helper_0_0(helper_fdivs);
2450 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2451 gen_op_store_FT0_fpr(rd);
2452 break;
2453 case 0x4e:
2454 gen_op_load_fpr_DT0(DFPREG(rs1));
2455 gen_op_load_fpr_DT1(DFPREG(rs2));
2456 gen_clear_float_exceptions();
2457 tcg_gen_helper_0_0(helper_fdivd);
2458 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2459 gen_op_store_DT0_fpr(DFPREG(rd));
2460 break;
2461 case 0x4f: /* fdivq */
2462 #if defined(CONFIG_USER_ONLY)
2463 gen_op_load_fpr_QT0(QFPREG(rs1));
2464 gen_op_load_fpr_QT1(QFPREG(rs2));
2465 gen_clear_float_exceptions();
2466 tcg_gen_helper_0_0(helper_fdivq);
2467 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2468 gen_op_store_QT0_fpr(QFPREG(rd));
2469 break;
2470 #else
2471 goto nfpu_insn;
2472 #endif
2473 case 0x69:
2474 gen_op_load_fpr_FT0(rs1);
2475 gen_op_load_fpr_FT1(rs2);
2476 gen_clear_float_exceptions();
2477 tcg_gen_helper_0_0(helper_fsmuld);
2478 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2479 gen_op_store_DT0_fpr(DFPREG(rd));
2480 break;
2481 case 0x6e: /* fdmulq */
2482 #if defined(CONFIG_USER_ONLY)
2483 gen_op_load_fpr_DT0(DFPREG(rs1));
2484 gen_op_load_fpr_DT1(DFPREG(rs2));
2485 gen_clear_float_exceptions();
2486 tcg_gen_helper_0_0(helper_fdmulq);
2487 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2488 gen_op_store_QT0_fpr(QFPREG(rd));
2489 break;
2490 #else
2491 goto nfpu_insn;
2492 #endif
2493 case 0xc4:
2494 gen_op_load_fpr_FT1(rs2);
2495 gen_clear_float_exceptions();
2496 tcg_gen_helper_0_0(helper_fitos);
2497 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2498 gen_op_store_FT0_fpr(rd);
2499 break;
2500 case 0xc6:
2501 gen_op_load_fpr_DT1(DFPREG(rs2));
2502 gen_clear_float_exceptions();
2503 tcg_gen_helper_0_0(helper_fdtos);
2504 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2505 gen_op_store_FT0_fpr(rd);
2506 break;
2507 case 0xc7: /* fqtos */
2508 #if defined(CONFIG_USER_ONLY)
2509 gen_op_load_fpr_QT1(QFPREG(rs2));
2510 gen_clear_float_exceptions();
2511 tcg_gen_helper_0_0(helper_fqtos);
2512 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2513 gen_op_store_FT0_fpr(rd);
2514 break;
2515 #else
2516 goto nfpu_insn;
2517 #endif
2518 case 0xc8:
2519 gen_op_load_fpr_FT1(rs2);
2520 tcg_gen_helper_0_0(helper_fitod);
2521 gen_op_store_DT0_fpr(DFPREG(rd));
2522 break;
2523 case 0xc9:
2524 gen_op_load_fpr_FT1(rs2);
2525 tcg_gen_helper_0_0(helper_fstod);
2526 gen_op_store_DT0_fpr(DFPREG(rd));
2527 break;
2528 case 0xcb: /* fqtod */
2529 #if defined(CONFIG_USER_ONLY)
2530 gen_op_load_fpr_QT1(QFPREG(rs2));
2531 gen_clear_float_exceptions();
2532 tcg_gen_helper_0_0(helper_fqtod);
2533 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2534 gen_op_store_DT0_fpr(DFPREG(rd));
2535 break;
2536 #else
2537 goto nfpu_insn;
2538 #endif
2539 case 0xcc: /* fitoq */
2540 #if defined(CONFIG_USER_ONLY)
2541 gen_op_load_fpr_FT1(rs2);
2542 tcg_gen_helper_0_0(helper_fitoq);
2543 gen_op_store_QT0_fpr(QFPREG(rd));
2544 break;
2545 #else
2546 goto nfpu_insn;
2547 #endif
2548 case 0xcd: /* fstoq */
2549 #if defined(CONFIG_USER_ONLY)
2550 gen_op_load_fpr_FT1(rs2);
2551 tcg_gen_helper_0_0(helper_fstoq);
2552 gen_op_store_QT0_fpr(QFPREG(rd));
2553 break;
2554 #else
2555 goto nfpu_insn;
2556 #endif
2557 case 0xce: /* fdtoq */
2558 #if defined(CONFIG_USER_ONLY)
2559 gen_op_load_fpr_DT1(DFPREG(rs2));
2560 tcg_gen_helper_0_0(helper_fdtoq);
2561 gen_op_store_QT0_fpr(QFPREG(rd));
2562 break;
2563 #else
2564 goto nfpu_insn;
2565 #endif
2566 case 0xd1:
2567 gen_op_load_fpr_FT1(rs2);
2568 gen_clear_float_exceptions();
2569 tcg_gen_helper_0_0(helper_fstoi);
2570 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2571 gen_op_store_FT0_fpr(rd);
2572 break;
2573 case 0xd2:
2574 gen_op_load_fpr_DT1(DFPREG(rs2));
2575 gen_clear_float_exceptions();
2576 tcg_gen_helper_0_0(helper_fdtoi);
2577 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2578 gen_op_store_FT0_fpr(rd);
2579 break;
2580 case 0xd3: /* fqtoi */
2581 #if defined(CONFIG_USER_ONLY)
2582 gen_op_load_fpr_QT1(QFPREG(rs2));
2583 gen_clear_float_exceptions();
2584 tcg_gen_helper_0_0(helper_fqtoi);
2585 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2586 gen_op_store_FT0_fpr(rd);
2587 break;
2588 #else
2589 goto nfpu_insn;
2590 #endif
2591 #ifdef TARGET_SPARC64
2592 case 0x2: /* V9 fmovd */
2593 gen_op_load_fpr_DT0(DFPREG(rs2));
2594 gen_op_store_DT0_fpr(DFPREG(rd));
2595 break;
2596 case 0x3: /* V9 fmovq */
2597 #if defined(CONFIG_USER_ONLY)
2598 gen_op_load_fpr_QT0(QFPREG(rs2));
2599 gen_op_store_QT0_fpr(QFPREG(rd));
2600 break;
2601 #else
2602 goto nfpu_insn;
2603 #endif
2604 case 0x6: /* V9 fnegd */
2605 gen_op_load_fpr_DT1(DFPREG(rs2));
2606 tcg_gen_helper_0_0(helper_fnegd);
2607 gen_op_store_DT0_fpr(DFPREG(rd));
2608 break;
2609 case 0x7: /* V9 fnegq */
2610 #if defined(CONFIG_USER_ONLY)
2611 gen_op_load_fpr_QT1(QFPREG(rs2));
2612 tcg_gen_helper_0_0(helper_fnegq);
2613 gen_op_store_QT0_fpr(QFPREG(rd));
2614 break;
2615 #else
2616 goto nfpu_insn;
2617 #endif
2618 case 0xa: /* V9 fabsd */
2619 gen_op_load_fpr_DT1(DFPREG(rs2));
2620 tcg_gen_helper_0_0(helper_fabsd);
2621 gen_op_store_DT0_fpr(DFPREG(rd));
2622 break;
2623 case 0xb: /* V9 fabsq */
2624 #if defined(CONFIG_USER_ONLY)
2625 gen_op_load_fpr_QT1(QFPREG(rs2));
2626 tcg_gen_helper_0_0(helper_fabsq);
2627 gen_op_store_QT0_fpr(QFPREG(rd));
2628 break;
2629 #else
2630 goto nfpu_insn;
2631 #endif
2632 case 0x81: /* V9 fstox */
2633 gen_op_load_fpr_FT1(rs2);
2634 gen_clear_float_exceptions();
2635 tcg_gen_helper_0_0(helper_fstox);
2636 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2637 gen_op_store_DT0_fpr(DFPREG(rd));
2638 break;
2639 case 0x82: /* V9 fdtox */
2640 gen_op_load_fpr_DT1(DFPREG(rs2));
2641 gen_clear_float_exceptions();
2642 tcg_gen_helper_0_0(helper_fdtox);
2643 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2644 gen_op_store_DT0_fpr(DFPREG(rd));
2645 break;
2646 case 0x83: /* V9 fqtox */
2647 #if defined(CONFIG_USER_ONLY)
2648 gen_op_load_fpr_QT1(QFPREG(rs2));
2649 gen_clear_float_exceptions();
2650 tcg_gen_helper_0_0(helper_fqtox);
2651 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2652 gen_op_store_DT0_fpr(DFPREG(rd));
2653 break;
2654 #else
2655 goto nfpu_insn;
2656 #endif
2657 case 0x84: /* V9 fxtos */
2658 gen_op_load_fpr_DT1(DFPREG(rs2));
2659 gen_clear_float_exceptions();
2660 tcg_gen_helper_0_0(helper_fxtos);
2661 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2662 gen_op_store_FT0_fpr(rd);
2663 break;
2664 case 0x88: /* V9 fxtod */
2665 gen_op_load_fpr_DT1(DFPREG(rs2));
2666 gen_clear_float_exceptions();
2667 tcg_gen_helper_0_0(helper_fxtod);
2668 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2669 gen_op_store_DT0_fpr(DFPREG(rd));
2670 break;
2671 case 0x8c: /* V9 fxtoq */
2672 #if defined(CONFIG_USER_ONLY)
2673 gen_op_load_fpr_DT1(DFPREG(rs2));
2674 gen_clear_float_exceptions();
2675 tcg_gen_helper_0_0(helper_fxtoq);
2676 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2677 gen_op_store_QT0_fpr(QFPREG(rd));
2678 break;
2679 #else
2680 goto nfpu_insn;
2681 #endif
2682 #endif
2683 default:
2684 goto illegal_insn;
2685 }
2686 } else if (xop == 0x35) { /* FPU Operations */
2687 #ifdef TARGET_SPARC64
2688 int cond;
2689 #endif
2690 if (gen_trap_ifnofpu(dc))
2691 goto jmp_insn;
2692 gen_op_clear_ieee_excp_and_FTT();
2693 rs1 = GET_FIELD(insn, 13, 17);
2694 rs2 = GET_FIELD(insn, 27, 31);
2695 xop = GET_FIELD(insn, 18, 26);
2696 #ifdef TARGET_SPARC64
2697 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2698 int l1;
2699
2700 l1 = gen_new_label();
2701 cond = GET_FIELD_SP(insn, 14, 17);
2702 rs1 = GET_FIELD(insn, 13, 17);
2703 gen_movl_reg_T0(rs1);
2704 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0],
2705 tcg_const_tl(0), l1);
2706 gen_op_load_fpr_FT0(rs2);
2707 gen_op_store_FT0_fpr(rd);
2708 gen_set_label(l1);
2709 break;
2710 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2711 int l1;
2712
2713 l1 = gen_new_label();
2714 cond = GET_FIELD_SP(insn, 14, 17);
2715 rs1 = GET_FIELD(insn, 13, 17);
2716 gen_movl_reg_T0(rs1);
2717 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0],
2718 tcg_const_tl(0), l1);
2719 gen_op_load_fpr_DT0(DFPREG(rs2));
2720 gen_op_store_DT0_fpr(DFPREG(rd));
2721 gen_set_label(l1);
2722 break;
2723 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2724 #if defined(CONFIG_USER_ONLY)
2725 int l1;
2726
2727 l1 = gen_new_label();
2728 cond = GET_FIELD_SP(insn, 14, 17);
2729 rs1 = GET_FIELD(insn, 13, 17);
2730 gen_movl_reg_T0(rs1);
2731 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0],
2732 tcg_const_tl(0), l1);
2733 gen_op_load_fpr_QT0(QFPREG(rs2));
2734 gen_op_store_QT0_fpr(QFPREG(rd));
2735 gen_set_label(l1);
2736 break;
2737 #else
2738 goto nfpu_insn;
2739 #endif
2740 }
2741 #endif
2742 switch (xop) {
2743 #ifdef TARGET_SPARC64
2744 #define FMOVCC(size_FDQ, fcc) \
2745 { \
2746 TCGv r_cond; \
2747 int l1; \
2748 \
2749 l1 = gen_new_label(); \
2750 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2751 cond = GET_FIELD_SP(insn, 14, 17); \
2752 gen_fcond(r_cond, fcc, cond); \
2753 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, \
2754 tcg_const_tl(0), l1); \
2755 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2756 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2757 gen_set_label(l1); \
2758 tcg_gen_discard_tl(r_cond); \
2759 }
2760 case 0x001: /* V9 fmovscc %fcc0 */
2761 FMOVCC(F, 0);
2762 break;
2763 case 0x002: /* V9 fmovdcc %fcc0 */
2764 FMOVCC(D, 0);
2765 break;
2766 case 0x003: /* V9 fmovqcc %fcc0 */
2767 #if defined(CONFIG_USER_ONLY)
2768 FMOVCC(Q, 0);
2769 break;
2770 #else
2771 goto nfpu_insn;
2772 #endif
2773 case 0x041: /* V9 fmovscc %fcc1 */
2774 FMOVCC(F, 1);
2775 break;
2776 case 0x042: /* V9 fmovdcc %fcc1 */
2777 FMOVCC(D, 1);
2778 break;
2779 case 0x043: /* V9 fmovqcc %fcc1 */
2780 #if defined(CONFIG_USER_ONLY)
2781 FMOVCC(Q, 1);
2782 break;
2783 #else
2784 goto nfpu_insn;
2785 #endif
2786 case 0x081: /* V9 fmovscc %fcc2 */
2787 FMOVCC(F, 2);
2788 break;
2789 case 0x082: /* V9 fmovdcc %fcc2 */
2790 FMOVCC(D, 2);
2791 break;
2792 case 0x083: /* V9 fmovqcc %fcc2 */
2793 #if defined(CONFIG_USER_ONLY)
2794 FMOVCC(Q, 2);
2795 break;
2796 #else
2797 goto nfpu_insn;
2798 #endif
2799 case 0x0c1: /* V9 fmovscc %fcc3 */
2800 FMOVCC(F, 3);
2801 break;
2802 case 0x0c2: /* V9 fmovdcc %fcc3 */
2803 FMOVCC(D, 3);
2804 break;
2805 case 0x0c3: /* V9 fmovqcc %fcc3 */
2806 #if defined(CONFIG_USER_ONLY)
2807 FMOVCC(Q, 3);
2808 break;
2809 #else
2810 goto nfpu_insn;
2811 #endif
2812 #undef FMOVCC
2813 #define FMOVCC(size_FDQ, icc) \
2814 { \
2815 TCGv r_cond; \
2816 int l1; \
2817 \
2818 l1 = gen_new_label(); \
2819 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2820 cond = GET_FIELD_SP(insn, 14, 17); \
2821 gen_cond(r_cond, icc, cond); \
2822 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, \
2823 tcg_const_tl(0), l1); \
2824 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2825 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2826 gen_set_label(l1); \
2827 tcg_gen_discard_tl(r_cond); \
2828 }
2829
2830 case 0x101: /* V9 fmovscc %icc */
2831 FMOVCC(F, 0);
2832 break;
2833 case 0x102: /* V9 fmovdcc %icc */
2834 FMOVCC(D, 0);
2835 case 0x103: /* V9 fmovqcc %icc */
2836 #if defined(CONFIG_USER_ONLY)
2837 FMOVCC(D, 0);
2838 break;
2839 #else
2840 goto nfpu_insn;
2841 #endif
2842 case 0x181: /* V9 fmovscc %xcc */
2843 FMOVCC(F, 1);
2844 break;
2845 case 0x182: /* V9 fmovdcc %xcc */
2846 FMOVCC(D, 1);
2847 break;
2848 case 0x183: /* V9 fmovqcc %xcc */
2849 #if defined(CONFIG_USER_ONLY)
2850 FMOVCC(Q, 1);
2851 break;
2852 #else
2853 goto nfpu_insn;
2854 #endif
2855 #undef FMOVCC
2856 #endif
2857 case 0x51: /* fcmps, V9 %fcc */
2858 gen_op_load_fpr_FT0(rs1);
2859 gen_op_load_fpr_FT1(rs2);
2860 gen_op_fcmps(rd & 3);
2861 break;
2862 case 0x52: /* fcmpd, V9 %fcc */
2863 gen_op_load_fpr_DT0(DFPREG(rs1));
2864 gen_op_load_fpr_DT1(DFPREG(rs2));
2865 gen_op_fcmpd(rd & 3);
2866 break;
2867 case 0x53: /* fcmpq, V9 %fcc */
2868 #if defined(CONFIG_USER_ONLY)
2869 gen_op_load_fpr_QT0(QFPREG(rs1));
2870 gen_op_load_fpr_QT1(QFPREG(rs2));
2871 gen_op_fcmpq(rd & 3);
2872 break;
2873 #else /* !defined(CONFIG_USER_ONLY) */
2874 goto nfpu_insn;
2875 #endif
2876 case 0x55: /* fcmpes, V9 %fcc */
2877 gen_op_load_fpr_FT0(rs1);
2878 gen_op_load_fpr_FT1(rs2);
2879 gen_op_fcmpes(rd & 3);
2880 break;
2881 case 0x56: /* fcmped, V9 %fcc */
2882 gen_op_load_fpr_DT0(DFPREG(rs1));
2883 gen_op_load_fpr_DT1(DFPREG(rs2));
2884 gen_op_fcmped(rd & 3);
2885 break;
2886 case 0x57: /* fcmpeq, V9 %fcc */
2887 #if defined(CONFIG_USER_ONLY)
2888 gen_op_load_fpr_QT0(QFPREG(rs1));
2889 gen_op_load_fpr_QT1(QFPREG(rs2));
2890 gen_op_fcmpeq(rd & 3);
2891 break;
2892 #else/* !defined(CONFIG_USER_ONLY) */
2893 goto nfpu_insn;
2894 #endif
2895 default:
2896 goto illegal_insn;
2897 }
2898 #if defined(OPTIM)
2899 } else if (xop == 0x2) {
2900 // clr/mov shortcut
2901
2902 rs1 = GET_FIELD(insn, 13, 17);
2903 if (rs1 == 0) {
2904 // or %g0, x, y -> mov T0, x; mov y, T0
2905 if (IS_IMM) { /* immediate */
2906 rs2 = GET_FIELDs(insn, 19, 31);
2907 tcg_gen_movi_tl(cpu_T[0], (int)rs2);
2908 } else { /* register */
2909 rs2 = GET_FIELD(insn, 27, 31);
2910 gen_movl_reg_T0(rs2);
2911 }
2912 } else {
2913 gen_movl_reg_T0(rs1);
2914 if (IS_IMM) { /* immediate */
2915 rs2 = GET_FIELDs(insn, 19, 31);
2916 tcg_gen_ori_tl(cpu_T[0], cpu_T[0], (int)rs2);
2917 } else { /* register */
2918 // or x, %g0, y -> mov T1, x; mov y, T1
2919 rs2 = GET_FIELD(insn, 27, 31);
2920 if (rs2 != 0) {
2921 gen_movl_reg_T1(rs2);
2922 gen_op_or_T1_T0();
2923 }
2924 }
2925 }
2926 gen_movl_T0_reg(rd);
2927 #endif
2928 #ifdef TARGET_SPARC64
2929 } else if (xop == 0x25) { /* sll, V9 sllx */
2930 rs1 = GET_FIELD(insn, 13, 17);
2931 gen_movl_reg_T0(rs1);
2932 if (IS_IMM) { /* immediate */
2933 rs2 = GET_FIELDs(insn, 20, 31);
2934 if (insn & (1 << 12)) {
2935 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2936 } else {
2937 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2938 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2939 }
2940 } else { /* register */
2941 rs2 = GET_FIELD(insn, 27, 31);
2942 gen_movl_reg_T1(rs2);
2943 if (insn & (1 << 12)) {
2944 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2945 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2946 } else {
2947 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2948 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2949 tcg_gen_shl_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2950 }
2951 }
2952 gen_movl_T0_reg(rd);
2953 } else if (xop == 0x26) { /* srl, V9 srlx */
2954 rs1 = GET_FIELD(insn, 13, 17);
2955 gen_movl_reg_T0(rs1);
2956 if (IS_IMM) { /* immediate */
2957 rs2 = GET_FIELDs(insn, 20, 31);
2958 if (insn & (1 << 12)) {
2959 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2960 } else {
2961 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2962 tcg_gen_shri_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2963 }
2964 } else { /* register */
2965 rs2 = GET_FIELD(insn, 27, 31);
2966 gen_movl_reg_T1(rs2);
2967 if (insn & (1 << 12)) {
2968 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2969 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2970 } else {
2971 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2972 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2973 tcg_gen_shr_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2974 }
2975 }
2976 gen_movl_T0_reg(rd);
2977 } else if (xop == 0x27) { /* sra, V9 srax */
2978 rs1 = GET_FIELD(insn, 13, 17);
2979 gen_movl_reg_T0(rs1);
2980 if (IS_IMM) { /* immediate */
2981 rs2 = GET_FIELDs(insn, 20, 31);
2982 if (insn & (1 << 12)) {
2983 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x3f);
2984 } else {
2985 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2986 tcg_gen_ext_i32_i64(cpu_T[0], cpu_T[0]);
2987 tcg_gen_sari_i64(cpu_T[0], cpu_T[0], rs2 & 0x1f);
2988 }
2989 } else { /* register */
2990 rs2 = GET_FIELD(insn, 27, 31);
2991 gen_movl_reg_T1(rs2);
2992 if (insn & (1 << 12)) {
2993 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x3f);
2994 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2995 } else {
2996 tcg_gen_andi_i64(cpu_T[1], cpu_T[1], 0x1f);
2997 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], 0xffffffffULL);
2998 tcg_gen_sar_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2999 }
3000 }
3001 gen_movl_T0_reg(rd);
3002 #endif
3003 } else if (xop < 0x36) {
3004 rs1 = GET_FIELD(insn, 13, 17);
3005 gen_movl_reg_T0(rs1);
3006 if (IS_IMM) { /* immediate */
3007 rs2 = GET_FIELDs(insn, 19, 31);
3008 gen_movl_simm_T1(rs2);
3009 } else { /* register */
3010 rs2 = GET_FIELD(insn, 27, 31);
3011 gen_movl_reg_T1(rs2);
3012 }
3013 if (xop < 0x20) {
3014 switch (xop & ~0x10) {
3015 case 0x0:
3016 if (xop & 0x10)
3017 gen_op_add_T1_T0_cc();
3018 else
3019 gen_op_add_T1_T0();
3020 break;
3021 case 0x1:
3022 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3023 if (xop & 0x10)
3024 gen_op_logic_T0_cc();
3025 break;
3026 case 0x2:
3027 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3028 if (xop & 0x10)
3029 gen_op_logic_T0_cc();
3030 break;
3031 case 0x3:
3032 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3033 if (xop & 0x10)
3034 gen_op_logic_T0_cc();
3035 break;
3036 case 0x4:
3037 if (xop & 0x10)
3038 gen_op_sub_T1_T0_cc();
3039 else
3040 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3041 break;
3042 case 0x5:
3043 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3044 tcg_gen_and_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3045 if (xop & 0x10)
3046 gen_op_logic_T0_cc();
3047 break;
3048 case 0x6:
3049 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3050 tcg_gen_or_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3051 if (xop & 0x10)
3052 gen_op_logic_T0_cc();
3053 break;
3054 case 0x7:
3055 tcg_gen_xori_tl(cpu_T[1], cpu_T[1], -1);
3056 tcg_gen_xor_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3057 if (xop & 0x10)
3058 gen_op_logic_T0_cc();
3059 break;
3060 case 0x8:
3061 if (xop & 0x10)
3062 gen_op_addx_T1_T0_cc();
3063 else {
3064 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3065 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
3066 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3067 }
3068 break;
3069 #ifdef TARGET_SPARC64
3070 case 0x9: /* V9 mulx */
3071 tcg_gen_mul_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
3072 break;
3073 #endif
3074 case 0xa:
3075 gen_op_umul_T1_T0();
3076 if (xop & 0x10)
3077 gen_op_logic_T0_cc();
3078 break;
3079 case 0xb:
3080 gen_op_smul_T1_T0();
3081 if (xop & 0x10)
3082 gen_op_logic_T0_cc();
3083 break;
3084 case 0xc:
3085 if (xop & 0x10)
3086 gen_op_subx_T1_T0_cc();
3087 else {
3088 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3089 tcg_gen_add_tl(cpu_T[1], cpu_T[1], cpu_tmp0);
3090 tcg_gen_sub_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3091 }
3092 break;
3093 #ifdef TARGET_SPARC64
3094 case 0xd: /* V9 udivx */
3095 gen_trap_ifdivzero_tl(cpu_T[1]);
3096 tcg_gen_divu_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
3097 break;
3098 #endif
3099 case 0xe:
3100 gen_op_udiv_T1_T0();
3101 if (xop & 0x10)
3102 gen_op_div_cc();
3103 break;
3104 case 0xf:
3105 gen_op_sdiv_T1_T0();
3106 if (xop & 0x10)
3107 gen_op_div_cc();
3108 break;
3109 default:
3110 goto illegal_insn;
3111 }
3112 gen_movl_T0_reg(rd);
3113 } else {
3114 switch (xop) {
3115 case 0x20: /* taddcc */
3116 gen_op_tadd_T1_T0_cc();
3117 gen_movl_T0_reg(rd);
3118 break;
3119 case 0x21: /* tsubcc */
3120 gen_op_tsub_T1_T0_cc();
3121 gen_movl_T0_reg(rd);
3122 break;
3123 case 0x22: /* taddcctv */
3124 save_state(dc);
3125 gen_op_tadd_T1_T0_ccTV();
3126 gen_movl_T0_reg(rd);
3127 break;
3128 case 0x23: /* tsubcctv */
3129 save_state(dc);
3130 gen_op_tsub_T1_T0_ccTV();
3131 gen_movl_T0_reg(rd);
3132 break;
3133 case 0x24: /* mulscc */
3134 gen_op_mulscc_T1_T0();
3135 gen_movl_T0_reg(rd);
3136 break;
3137 #ifndef TARGET_SPARC64
3138 case 0x25: /* sll */
3139 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 0x1f);
3140 tcg_gen_shl_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3141 gen_movl_T0_reg(rd);
3142 break;
3143 case 0x26: /* srl */
3144 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 0x1f);
3145 tcg_gen_shr_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3146 gen_movl_T0_reg(rd);
3147 break;
3148 case 0x27: /* sra */
3149 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 0x1f);
3150 tcg_gen_sar_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
3151 gen_movl_T0_reg(rd);
3152 break;
3153 #endif
3154 case 0x30:
3155 {
3156 switch(rd) {
3157 case 0: /* wry */
3158 gen_op_xor_T1_T0();
3159 gen_op_movtl_env_T0(offsetof(CPUSPARCState, y));
3160 break;
3161 #ifndef TARGET_SPARC64
3162 case 0x01 ... 0x0f: /* undefined in the
3163 SPARCv8 manual, nop
3164 on the microSPARC
3165 II */
3166 case 0x10 ... 0x1f: /* implementation-dependent
3167 in the SPARCv8
3168 manual, nop on the
3169 microSPARC II */
3170 break;
3171 #else
3172 case 0x2: /* V9 wrccr */
3173 gen_op_xor_T1_T0();
3174 tcg_gen_helper_0_1(helper_wrccr, cpu_T[0]);
3175 break;
3176 case 0x3: /* V9 wrasi */
3177 gen_op_xor_T1_T0();
3178 gen_op_movl_env_T0(offsetof(CPUSPARCState, asi));
3179 break;
3180 case 0x6: /* V9 wrfprs */
3181 gen_op_xor_T1_T0();
3182 gen_op_movl_env_T0(offsetof(CPUSPARCState, fprs));
3183 save_state(dc);
3184 gen_op_next_insn();
3185 tcg_gen_exit_tb(0);
3186 dc->is_br = 1;
3187 break;
3188 case 0xf: /* V9 sir, nop if user */
3189 #if !defined(CONFIG_USER_ONLY)
3190 if (supervisor(dc))
3191 ; // XXX
3192 #endif
3193 break;
3194 case 0x13: /* Graphics Status */
3195 if (gen_trap_ifnofpu(dc))
3196 goto jmp_insn;
3197 gen_op_xor_T1_T0();
3198 gen_op_movtl_env_T0(offsetof(CPUSPARCState, gsr));
3199 break;
3200 case 0x17: /* Tick compare */
3201 #if !defined(CONFIG_USER_ONLY)
3202 if (!supervisor(dc))
3203 goto illegal_insn;
3204 #endif
3205 {
3206 TCGv r_tickptr;
3207
3208 gen_op_xor_T1_T0();
3209 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3210 tick_cmpr));
3211 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3212 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3213 offsetof(CPUState, tick));
3214 tcg_gen_helper_0_2(helper_tick_set_limit,
3215 r_tickptr, cpu_T[0]);
3216 tcg_gen_discard_ptr(r_tickptr);
3217 }
3218 break;
3219 case 0x18: /* System tick */
3220 #if !defined(CONFIG_USER_ONLY)
3221 if (!supervisor(dc))
3222 goto illegal_insn;
3223 #endif
3224 {
3225 TCGv r_tickptr;
3226
3227 gen_op_xor_T1_T0();
3228 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3229 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3230 offsetof(CPUState, stick));
3231 tcg_gen_helper_0_2(helper_tick_set_count,
3232 r_tickptr, cpu_T[0]);
3233 tcg_gen_discard_ptr(r_tickptr);
3234 }
3235 break;
3236 case 0x19: /* System tick compare */
3237 #if !defined(CONFIG_USER_ONLY)
3238 if (!supervisor(dc))
3239 goto illegal_insn;
3240 #endif
3241 {
3242 TCGv r_tickptr;
3243
3244 gen_op_xor_T1_T0();
3245 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3246 stick_cmpr));
3247 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3248 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3249 offsetof(CPUState, stick));
3250 tcg_gen_helper_0_2(helper_tick_set_limit,
3251 r_tickptr, cpu_T[0]);
3252 tcg_gen_discard_ptr(r_tickptr);
3253 }
3254 break;
3255
3256 case 0x10: /* Performance Control */
3257 case 0x11: /* Performance Instrumentation Counter */
3258 case 0x12: /* Dispatch Control */
3259 case 0x14: /* Softint set */
3260 case 0x15: /* Softint clear */
3261 case 0x16: /* Softint write */
3262 #endif
3263 default:
3264 goto illegal_insn;
3265 }
3266 }
3267 break;
3268 #if !defined(CONFIG_USER_ONLY)
3269 case 0x31: /* wrpsr, V9 saved, restored */
3270 {
3271 if (!supervisor(dc))
3272 goto priv_insn;
3273 #ifdef TARGET_SPARC64
3274 switch (rd) {
3275 case 0:
3276 tcg_gen_helper_0_0(helper_saved);
3277 break;
3278 case 1:
3279 tcg_gen_helper_0_0(helper_restored);
3280 break;
3281 case 2: /* UA2005 allclean */
3282 case 3: /* UA2005 otherw */
3283 case 4: /* UA2005 normalw */
3284 case 5: /* UA2005 invalw */
3285 // XXX
3286 default:
3287 goto illegal_insn;
3288 }
3289 #else
3290 gen_op_xor_T1_T0();
3291 tcg_gen_helper_0_1(helper_wrpsr, cpu_T[0]);
3292 save_state(dc);
3293 gen_op_next_insn();
3294 tcg_gen_exit_tb(0);
3295 dc->is_br = 1;
3296 #endif
3297 }
3298 break;
3299 case 0x32: /* wrwim, V9 wrpr */
3300 {
3301 if (!supervisor(dc))
3302 goto priv_insn;
3303 gen_op_xor_T1_T0();
3304 #ifdef TARGET_SPARC64
3305 switch (rd) {
3306 case 0: // tpc
3307 {
3308 TCGv r_tsptr;
3309
3310 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3311 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3312 offsetof(CPUState, tsptr));
3313 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3314 offsetof(trap_state, tpc));
3315 tcg_gen_discard_ptr(r_tsptr);
3316 }
3317 break;
3318 case 1: // tnpc
3319 {
3320 TCGv r_tsptr;
3321
3322 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3323 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3324 offsetof(CPUState, tsptr));
3325 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3326 offsetof(trap_state, tnpc));
3327 tcg_gen_discard_ptr(r_tsptr);
3328 }
3329 break;
3330 case 2: // tstate
3331 {
3332 TCGv r_tsptr;
3333
3334 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3335 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3336 offsetof(CPUState, tsptr));
3337 tcg_gen_st_tl(cpu_T[0], r_tsptr,
3338 offsetof(trap_state, tstate));
3339 tcg_gen_discard_ptr(r_tsptr);
3340 }
3341 break;
3342 case 3: // tt
3343 {
3344 TCGv r_tsptr;
3345
3346 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3347 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3348 offsetof(CPUState, tsptr));
3349 tcg_gen_st_i32(cpu_T[0], r_tsptr,
3350 offsetof(trap_state, tt));
3351 tcg_gen_discard_ptr(r_tsptr);
3352 }
3353 break;
3354 case 4: // tick
3355 {
3356 TCGv r_tickptr;
3357
3358 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3359 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3360 offsetof(CPUState, tick));
3361 tcg_gen_helper_0_2(helper_tick_set_count,
3362 r_tickptr, cpu_T[0]);
3363 tcg_gen_discard_ptr(r_tickptr);
3364 }
3365 break;
3366 case 5: // tba
3367 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
3368 break;
3369 case 6: // pstate
3370 save_state(dc);
3371 tcg_gen_helper_0_1(helper_wrpstate, cpu_T[0]);
3372 gen_op_next_insn();
3373 tcg_gen_exit_tb(0);
3374 dc->is_br = 1;
3375 break;
3376 case 7: // tl
3377 gen_op_movl_env_T0(offsetof(CPUSPARCState, tl));
3378 break;
3379 case 8: // pil
3380 gen_op_movl_env_T0(offsetof(CPUSPARCState, psrpil));
3381 break;
3382 case 9: // cwp
3383 tcg_gen_helper_0_1(helper_wrcwp, cpu_T[0]);
3384 break;
3385 case 10: // cansave
3386 gen_op_movl_env_T0(offsetof(CPUSPARCState, cansave));
3387 break;
3388 case 11: // canrestore
3389 gen_op_movl_env_T0(offsetof(CPUSPARCState, canrestore));
3390 break;
3391 case 12: // cleanwin
3392 gen_op_movl_env_T0(offsetof(CPUSPARCState, cleanwin));
3393 break;
3394 case 13: // otherwin
3395 gen_op_movl_env_T0(offsetof(CPUSPARCState, otherwin));
3396 break;
3397 case 14: // wstate
3398 gen_op_movl_env_T0(offsetof(CPUSPARCState, wstate));
3399 break;
3400 case 16: // UA2005 gl
3401 gen_op_movl_env_T0(offsetof(CPUSPARCState, gl));
3402 break;
3403 case 26: // UA2005 strand status
3404 if (!hypervisor(dc))
3405 goto priv_insn;
3406 gen_op_movl_env_T0(offsetof(CPUSPARCState, ssr));
3407 break;
3408 default:
3409 goto illegal_insn;
3410 }
3411 #else
3412 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], ((1 << NWINDOWS) - 1));
3413 gen_op_movl_env_T0(offsetof(CPUSPARCState, wim));
3414 #endif
3415 }
3416 break;
3417 case 0x33: /* wrtbr, UA2005 wrhpr */
3418 {
3419 #ifndef TARGET_SPARC64
3420 if (!supervisor(dc))
3421 goto priv_insn;
3422 gen_op_xor_T1_T0();
3423 gen_op_movtl_env_T0(offsetof(CPUSPARCState, tbr));
3424 #else
3425 if (!hypervisor(dc))
3426 goto priv_insn;
3427 gen_op_xor_T1_T0();
3428 switch (rd) {
3429 case 0: // hpstate
3430 // XXX gen_op_wrhpstate();
3431 save_state(dc);
3432 gen_op_next_insn();
3433 tcg_gen_exit_tb(0);
3434 dc->is_br = 1;
3435 break;
3436 case 1: // htstate
3437 // XXX gen_op_wrhtstate();
3438 break;
3439 case 3: // hintp
3440 gen_op_movl_env_T0(offsetof(CPUSPARCState, hintp));
3441 break;
3442 case 5: // htba
3443 gen_op_movl_env_T0(offsetof(CPUSPARCState, htba));
3444 break;
3445 case 31: // hstick_cmpr
3446 {
3447 TCGv r_tickptr;
3448
3449 gen_op_movtl_env_T0(offsetof(CPUSPARCState,
3450 hstick_cmpr));
3451 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3452 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3453 offsetof(CPUState, hstick));
3454 tcg_gen_helper_0_2(helper_tick_set_limit,
3455 r_tickptr, cpu_T[0]);
3456 tcg_gen_discard_ptr(r_tickptr);
3457 }
3458 break;
3459 case 6: // hver readonly
3460 default:
3461 goto illegal_insn;
3462 }
3463 #endif
3464 }
3465 break;
3466 #endif
3467 #ifdef TARGET_SPARC64
3468 case 0x2c: /* V9 movcc */
3469 {
3470 int cc = GET_FIELD_SP(insn, 11, 12);
3471 int cond = GET_FIELD_SP(insn, 14, 17);
3472 TCGv r_cond;
3473 int l1;
3474
3475 r_cond = tcg_temp_new(TCG_TYPE_TL);
3476 if (insn & (1 << 18)) {
3477 if (cc == 0)
3478 gen_cond(r_cond, 0, cond);
3479 else if (cc == 2)
3480 gen_cond(r_cond, 1, cond);
3481 else
3482 goto illegal_insn;
3483 } else {
3484 gen_fcond(r_cond, cc, cond);
3485 }
3486
3487 l1 = gen_new_label();
3488
3489 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond,
3490 tcg_const_tl(0), l1);
3491 if (IS_IMM) { /* immediate */
3492 rs2 = GET_FIELD_SPs(insn, 0, 10);
3493 gen_movl_simm_T1(rs2);
3494 } else {
3495 rs2 = GET_FIELD_SP(insn, 0, 4);
3496 gen_movl_reg_T1(rs2);
3497 }
3498 gen_movl_T1_reg(rd);
3499 gen_set_label(l1);
3500 tcg_gen_discard_tl(r_cond);
3501 break;
3502 }
3503 case 0x2d: /* V9 sdivx */
3504 gen_op_sdivx_T1_T0();
3505 gen_movl_T0_reg(rd);
3506 break;
3507 case 0x2e: /* V9 popc */
3508 {
3509 if (IS_IMM) { /* immediate */
3510 rs2 = GET_FIELD_SPs(insn, 0, 12);
3511 gen_movl_simm_T1(rs2);
3512 // XXX optimize: popc(constant)
3513 }
3514 else {
3515 rs2 = GET_FIELD_SP(insn, 0, 4);
3516 gen_movl_reg_T1(rs2);
3517 }
3518 tcg_gen_helper_1_1(helper_popc, cpu_T[0],
3519 cpu_T[1]);
3520 gen_movl_T0_reg(rd);
3521 }
3522 case 0x2f: /* V9 movr */
3523 {
3524 int cond = GET_FIELD_SP(insn, 10, 12);
3525 int l1;
3526
3527 rs1 = GET_FIELD(insn, 13, 17);
3528 gen_movl_reg_T0(rs1);
3529
3530 l1 = gen_new_label();
3531
3532 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_T[0],
3533 tcg_const_tl(0), l1);
3534 if (IS_IMM) { /* immediate */
3535 rs2 = GET_FIELD_SPs(insn, 0, 9);
3536 gen_movl_simm_T1(rs2);
3537 } else {
3538 rs2 = GET_FIELD_SP(insn, 0, 4);
3539 gen_movl_reg_T1(rs2);
3540 }
3541 gen_movl_T1_reg(rd);
3542 gen_set_label(l1);
3543 break;
3544 }
3545 #endif
3546 default:
3547 goto illegal_insn;
3548 }
3549 }
3550 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3551 #ifdef TARGET_SPARC64
3552 int opf = GET_FIELD_SP(insn, 5, 13);
3553 rs1 = GET_FIELD(insn, 13, 17);
3554 rs2 = GET_FIELD(insn, 27, 31);
3555 if (gen_trap_ifnofpu(dc))
3556 goto jmp_insn;
3557
3558 switch (opf) {
3559 case 0x000: /* VIS I edge8cc */
3560 case 0x001: /* VIS II edge8n */
3561 case 0x002: /* VIS I edge8lcc */
3562 case 0x003: /* VIS II edge8ln */
3563 case 0x004: /* VIS I edge16cc */
3564 case 0x005: /* VIS II edge16n */
3565 case 0x006: /* VIS I edge16lcc */
3566 case 0x007: /* VIS II edge16ln */
3567 case 0x008: /* VIS I edge32cc */
3568 case 0x009: /* VIS II edge32n */
3569 case 0x00a: /* VIS I edge32lcc */
3570 case 0x00b: /* VIS II edge32ln */
3571 // XXX
3572 goto illegal_insn;
3573 case 0x010: /* VIS I array8 */
3574 gen_movl_reg_T0(rs1);
3575 gen_movl_reg_T1(rs2);
3576 tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0],
3577 cpu_T[1]);
3578 gen_movl_T0_reg(rd);
3579 break;
3580 case 0x012: /* VIS I array16 */
3581 gen_movl_reg_T0(rs1);
3582 gen_movl_reg_T1(rs2);
3583 tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0],
3584 cpu_T[1]);
3585 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 1);
3586 gen_movl_T0_reg(rd);
3587 break;
3588 case 0x014: /* VIS I array32 */
3589 gen_movl_reg_T0(rs1);
3590 gen_movl_reg_T1(rs2);
3591 tcg_gen_helper_1_2(helper_array8, cpu_T[0], cpu_T[0],
3592 cpu_T[1]);
3593 tcg_gen_shli_i64(cpu_T[0], cpu_T[0], 2);
3594 gen_movl_T0_reg(rd);
3595 break;
3596 case 0x018: /* VIS I alignaddr */
3597 gen_movl_reg_T0(rs1);
3598 gen_movl_reg_T1(rs2);
3599 tcg_gen_helper_1_2(helper_alignaddr, cpu_T[0], cpu_T[0],
3600 cpu_T[1]);
3601 gen_movl_T0_reg(rd);
3602 break;
3603 case 0x019: /* VIS II bmask */
3604 case 0x01a: /* VIS I alignaddrl */
3605 // XXX
3606 goto illegal_insn;
3607 case 0x020: /* VIS I fcmple16 */
3608 gen_op_load_fpr_DT0(DFPREG(rs1));
3609 gen_op_load_fpr_DT1(DFPREG(rs2));
3610 tcg_gen_helper_0_0(helper_fcmple16);
3611 gen_op_store_DT0_fpr(DFPREG(rd));
3612 break;
3613 case 0x022: /* VIS I fcmpne16 */
3614 gen_op_load_fpr_DT0(DFPREG(rs1));
3615 gen_op_load_fpr_DT1(DFPREG(rs2));
3616 tcg_gen_helper_0_0(helper_fcmpne16);
3617 gen_op_store_DT0_fpr(DFPREG(rd));
3618 break;
3619 case 0x024: /* VIS I fcmple32 */
3620 gen_op_load_fpr_DT0(DFPREG(rs1));
3621 gen_op_load_fpr_DT1(DFPREG(rs2));
3622 tcg_gen_helper_0_0(helper_fcmple32);
3623 gen_op_store_DT0_fpr(DFPREG(rd));
3624 break;
3625 case 0x026: /* VIS I fcmpne32 */
3626 gen_op_load_fpr_DT0(DFPREG(rs1));
3627 gen_op_load_fpr_DT1(DFPREG(rs2));
3628 tcg_gen_helper_0_0(helper_fcmpne32);
3629 gen_op_store_DT0_fpr(DFPREG(rd));
3630 break;
3631 case 0x028: /* VIS I fcmpgt16 */
3632 gen_op_load_fpr_DT0(DFPREG(rs1));
3633 gen_op_load_fpr_DT1(DFPREG(rs2));
3634 tcg_gen_helper_0_0(helper_fcmpgt16);
3635 gen_op_store_DT0_fpr(DFPREG(rd));
3636 break;
3637 case 0x02a: /* VIS I fcmpeq16 */
3638 gen_op_load_fpr_DT0(DFPREG(rs1));
3639 gen_op_load_fpr_DT1(DFPREG(rs2));
3640 tcg_gen_helper_0_0(helper_fcmpeq16);
3641 gen_op_store_DT0_fpr(DFPREG(rd));
3642 break;
3643 case 0x02c: /* VIS I fcmpgt32 */
3644 gen_op_load_fpr_DT0(DFPREG(rs1));
3645 gen_op_load_fpr_DT1(DFPREG(rs2));
3646 tcg_gen_helper_0_0(helper_fcmpgt32);
3647 gen_op_store_DT0_fpr(DFPREG(rd));
3648 break;
3649 case 0x02e: /* VIS I fcmpeq32 */
3650 gen_op_load_fpr_DT0(DFPREG(rs1));
3651 gen_op_load_fpr_DT1(DFPREG(rs2));
3652 tcg_gen_helper_0_0(helper_fcmpeq32);
3653 gen_op_store_DT0_fpr(DFPREG(rd));
3654 break;
3655 case 0x031: /* VIS I fmul8x16 */
3656 gen_op_load_fpr_DT0(DFPREG(rs1));
3657 gen_op_load_fpr_DT1(DFPREG(rs2));
3658 tcg_gen_helper_0_0(helper_fmul8x16);
3659 gen_op_store_DT0_fpr(DFPREG(rd));
3660 break;
3661 case 0x033: /* VIS I fmul8x16au */
3662 gen_op_load_fpr_DT0(DFPREG(rs1));
3663 gen_op_load_fpr_DT1(DFPREG(rs2));
3664 tcg_gen_helper_0_0(helper_fmul8x16au);
3665 gen_op_store_DT0_fpr(DFPREG(rd));
3666 break;
3667 case 0x035: /* VIS I fmul8x16al */
3668 gen_op_load_fpr_DT0(DFPREG(rs1));
3669 gen_op_load_fpr_DT1(DFPREG(rs2));
3670 tcg_gen_helper_0_0(helper_fmul8x16al);
3671 gen_op_store_DT0_fpr(DFPREG(rd));
3672 break;
3673 case 0x036: /* VIS I fmul8sux16 */
3674 gen_op_load_fpr_DT0(DFPREG(rs1));
3675 gen_op_load_fpr_DT1(DFPREG(rs2));
3676 tcg_gen_helper_0_0(helper_fmul8sux16);
3677 gen_op_store_DT0_fpr(DFPREG(rd));
3678 break;
3679 case 0x037: /* VIS I fmul8ulx16 */
3680 gen_op_load_fpr_DT0(DFPREG(rs1));
3681 gen_op_load_fpr_DT1(DFPREG(rs2));
3682 tcg_gen_helper_0_0(helper_fmul8ulx16);
3683 gen_op_store_DT0_fpr(DFPREG(rd));
3684 break;
3685 case 0x038: /* VIS I fmuld8sux16 */
3686 gen_op_load_fpr_DT0(DFPREG(rs1));
3687 gen_op_load_fpr_DT1(DFPREG(rs2));
3688 tcg_gen_helper_0_0(helper_fmuld8sux16);
3689 gen_op_store_DT0_fpr(DFPREG(rd));
3690 break;
3691 case 0x039: /* VIS I fmuld8ulx16 */
3692 gen_op_load_fpr_DT0(DFPREG(rs1));
3693 gen_op_load_fpr_DT1(DFPREG(rs2));
3694 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3695 gen_op_store_DT0_fpr(DFPREG(rd));
3696 break;
3697 case 0x03a: /* VIS I fpack32 */
3698 case 0x03b: /* VIS I fpack16 */
3699 case 0x03d: /* VIS I fpackfix */
3700 case 0x03e: /* VIS I pdist */
3701 // XXX
3702 goto illegal_insn;
3703 case 0x048: /* VIS I faligndata */
3704 gen_op_load_fpr_DT0(DFPREG(rs1));
3705 gen_op_load_fpr_DT1(DFPREG(rs2));
3706 tcg_gen_helper_0_0(helper_faligndata);
3707 gen_op_store_DT0_fpr(DFPREG(rd));
3708 break;
3709 case 0x04b: /* VIS I fpmerge */
3710 gen_op_load_fpr_DT0(DFPREG(rs1));
3711 gen_op_load_fpr_DT1(DFPREG(rs2));
3712 tcg_gen_helper_0_0(helper_fpmerge);
3713 gen_op_store_DT0_fpr(DFPREG(rd));
3714 break;
3715 case 0x04c: /* VIS II bshuffle */
3716 // XXX
3717 goto illegal_insn;
3718 case 0x04d: /* VIS I fexpand */
3719 gen_op_load_fpr_DT0(DFPREG(rs1));
3720 gen_op_load_fpr_DT1(DFPREG(rs2));
3721 tcg_gen_helper_0_0(helper_fexpand);
3722 gen_op_store_DT0_fpr(DFPREG(rd));
3723 break;
3724 case 0x050: /* VIS I fpadd16 */
3725 gen_op_load_fpr_DT0(DFPREG(rs1));
3726 gen_op_load_fpr_DT1(DFPREG(rs2));
3727 tcg_gen_helper_0_0(helper_fpadd16);
3728 gen_op_store_DT0_fpr(DFPREG(rd));
3729 break;
3730 case 0x051: /* VIS I fpadd16s */
3731 gen_op_load_fpr_FT0(rs1);
3732 gen_op_load_fpr_FT1(rs2);
3733 tcg_gen_helper_0_0(helper_fpadd16s);
3734 gen_op_store_FT0_fpr(rd);
3735 break;
3736 case 0x052: /* VIS I fpadd32 */
3737 gen_op_load_fpr_DT0(DFPREG(rs1));
3738 gen_op_load_fpr_DT1(DFPREG(rs2));
3739 tcg_gen_helper_0_0(helper_fpadd32);
3740 gen_op_store_DT0_fpr(DFPREG(rd));
3741 break;
3742 case 0x053: /* VIS I fpadd32s */
3743 gen_op_load_fpr_FT0(rs1);
3744 gen_op_load_fpr_FT1(rs2);
3745 tcg_gen_helper_0_0(helper_fpadd32s);
3746 gen_op_store_FT0_fpr(rd);
3747 break;
3748 case 0x054: /* VIS I fpsub16 */
3749 gen_op_load_fpr_DT0(DFPREG(rs1));
3750 gen_op_load_fpr_DT1(DFPREG(rs2));
3751 tcg_gen_helper_0_0(helper_fpsub16);
3752 gen_op_store_DT0_fpr(DFPREG(rd));
3753 break;
3754 case 0x055: /* VIS I fpsub16s */
3755 gen_op_load_fpr_FT0(rs1);
3756 gen_op_load_fpr_FT1(rs2);
3757 tcg_gen_helper_0_0(helper_fpsub16s);
3758 gen_op_store_FT0_fpr(rd);
3759 break;
3760 case 0x056: /* VIS I fpsub32 */
3761 gen_op_load_fpr_DT0(DFPREG(rs1));
3762 gen_op_load_fpr_DT1(DFPREG(rs2));
3763 tcg_gen_helper_0_0(helper_fpadd32);
3764 gen_op_store_DT0_fpr(DFPREG(rd));
3765 break;
3766 case 0x057: /* VIS I fpsub32s */
3767 gen_op_load_fpr_FT0(rs1);
3768 gen_op_load_fpr_FT1(rs2);
3769 tcg_gen_helper_0_0(helper_fpsub32s);
3770 gen_op_store_FT0_fpr(rd);
3771 break;
3772 case 0x060: /* VIS I fzero */
3773 tcg_gen_helper_0_0(helper_movl_DT0_0);
3774 gen_op_store_DT0_fpr(DFPREG(rd));
3775 break;
3776 case 0x061: /* VIS I fzeros */
3777 tcg_gen_helper_0_0(helper_movl_FT0_0);
3778 gen_op_store_FT0_fpr(rd);
3779 break;
3780 case 0x062: /* VIS I fnor */
3781 gen_op_load_fpr_DT0(DFPREG(rs1));
3782 gen_op_load_fpr_DT1(DFPREG(rs2));
3783 tcg_gen_helper_0_0(helper_fnor);
3784 gen_op_store_DT0_fpr(DFPREG(rd));
3785 break;
3786 case 0x063: /* VIS I fnors */
3787 gen_op_load_fpr_FT0(rs1);
3788 gen_op_load_fpr_FT1(rs2);
3789 tcg_gen_helper_0_0(helper_fnors);
3790 gen_op_store_FT0_fpr(rd);
3791 break;
3792 case 0x064: /* VIS I fandnot2 */
3793 gen_op_load_fpr_DT1(DFPREG(rs1));
3794 gen_op_load_fpr_DT0(DFPREG(rs2));
3795 tcg_gen_helper_0_0(helper_fandnot);
3796 gen_op_store_DT0_fpr(DFPREG(rd));
3797 break;
3798 case 0x065: /* VIS I fandnot2s */
3799 gen_op_load_fpr_FT1(rs1);
3800 gen_op_load_fpr_FT0(rs2);
3801 tcg_gen_helper_0_0(helper_fandnots);
3802 gen_op_store_FT0_fpr(rd);
3803 break;
3804 case 0x066: /* VIS I fnot2 */
3805 gen_op_load_fpr_DT1(DFPREG(rs2));
3806 tcg_gen_helper_0_0(helper_fnot);
3807 gen_op_store_DT0_fpr(DFPREG(rd));
3808 break;
3809 case 0x067: /* VIS I fnot2s */
3810 gen_op_load_fpr_FT1(rs2);
3811 tcg_gen_helper_0_0(helper_fnot);
3812 gen_op_store_FT0_fpr(rd);
3813 break;
3814 case 0x068: /* VIS I fandnot1 */
3815 gen_op_load_fpr_DT0(DFPREG(rs1));
3816 gen_op_load_fpr_DT1(DFPREG(rs2));
3817 tcg_gen_helper_0_0(helper_fandnot);
3818 gen_op_store_DT0_fpr(DFPREG(rd));
3819 break;
3820 case 0x069: /* VIS I fandnot1s */
3821 gen_op_load_fpr_FT0(rs1);
3822 gen_op_load_fpr_FT1(rs2);
3823 tcg_gen_helper_0_0(helper_fandnots);
3824 gen_op_store_FT0_fpr(rd);
3825 break;
3826 case 0x06a: /* VIS I fnot1 */
3827 gen_op_load_fpr_DT1(DFPREG(rs1));
3828 tcg_gen_helper_0_0(helper_fnot);
3829 gen_op_store_DT0_fpr(DFPREG(rd));
3830 break;
3831 case 0x06b: /* VIS I fnot1s */
3832 gen_op_load_fpr_FT1(rs1);
3833 tcg_gen_helper_0_0(helper_fnot);
3834 gen_op_store_FT0_fpr(rd);
3835 break;
3836 case 0x06c: /* VIS I fxor */
3837 gen_op_load_fpr_DT0(DFPREG(rs1));
3838 gen_op_load_fpr_DT1(DFPREG(rs2));
3839 tcg_gen_helper_0_0(helper_fxor);
3840 gen_op_store_DT0_fpr(DFPREG(rd));
3841 break;
3842 case 0x06d: /* VIS I fxors */
3843 gen_op_load_fpr_FT0(rs1);
3844 gen_op_load_fpr_FT1(rs2);
3845 tcg_gen_helper_0_0(helper_fxors);
3846 gen_op_store_FT0_fpr(rd);
3847 break;
3848 case 0x06e: /* VIS I fnand */
3849 gen_op_load_fpr_DT0(DFPREG(rs1));
3850 gen_op_load_fpr_DT1(DFPREG(rs2));
3851 tcg_gen_helper_0_0(helper_fnand);
3852 gen_op_store_DT0_fpr(DFPREG(rd));
3853 break;
3854 case 0x06f: /* VIS I fnands */
3855 gen_op_load_fpr_FT0(rs1);
3856 gen_op_load_fpr_FT1(rs2);
3857 tcg_gen_helper_0_0(helper_fnands);
3858 gen_op_store_FT0_fpr(rd);
3859 break;
3860 case 0x070: /* VIS I fand */
3861 gen_op_load_fpr_DT0(DFPREG(rs1));
3862 gen_op_load_fpr_DT1(DFPREG(rs2));
3863 tcg_gen_helper_0_0(helper_fand);
3864 gen_op_store_DT0_fpr(DFPREG(rd));
3865 break;
3866 case 0x071: /* VIS I fands */
3867 gen_op_load_fpr_FT0(rs1);
3868 gen_op_load_fpr_FT1(rs2);
3869 tcg_gen_helper_0_0(helper_fands);
3870 gen_op_store_FT0_fpr(rd);
3871 break;
3872 case 0x072: /* VIS I fxnor */
3873 gen_op_load_fpr_DT0(DFPREG(rs1));
3874 gen_op_load_fpr_DT1(DFPREG(rs2));
3875 tcg_gen_helper_0_0(helper_fxnor);
3876 gen_op_store_DT0_fpr(DFPREG(rd));
3877 break;
3878 case 0x073: /* VIS I fxnors */
3879 gen_op_load_fpr_FT0(rs1);
3880 gen_op_load_fpr_FT1(rs2);
3881 tcg_gen_helper_0_0(helper_fxnors);
3882 gen_op_store_FT0_fpr(rd);
3883 break;
3884 case 0x074: /* VIS I fsrc1 */
3885 gen_op_load_fpr_DT0(DFPREG(rs1));
3886 gen_op_store_DT0_fpr(DFPREG(rd));
3887 break;
3888 case 0x075: /* VIS I fsrc1s */
3889 gen_op_load_fpr_FT0(rs1);
3890 gen_op_store_FT0_fpr(rd);
3891 break;
3892 case 0x076: /* VIS I fornot2 */
3893 gen_op_load_fpr_DT1(DFPREG(rs1));
3894 gen_op_load_fpr_DT0(DFPREG(rs2));
3895 tcg_gen_helper_0_0(helper_fornot);
3896 gen_op_store_DT0_fpr(DFPREG(rd));
3897 break;
3898 case 0x077: /* VIS I fornot2s */
3899 gen_op_load_fpr_FT1(rs1);
3900 gen_op_load_fpr_FT0(rs2);
3901 tcg_gen_helper_0_0(helper_fornots);
3902 gen_op_store_FT0_fpr(rd);
3903 break;
3904 case 0x078: /* VIS I fsrc2 */
3905 gen_op_load_fpr_DT0(DFPREG(rs2));
3906 gen_op_store_DT0_fpr(DFPREG(rd));
3907 break;
3908 case 0x079: /* VIS I fsrc2s */
3909 gen_op_load_fpr_FT0(rs2);
3910 gen_op_store_FT0_fpr(rd);
3911 break;
3912 case 0x07a: /* VIS I fornot1 */
3913 gen_op_load_fpr_DT0(DFPREG(rs1));
3914 gen_op_load_fpr_DT1(DFPREG(rs2));
3915 tcg_gen_helper_0_0(helper_fornot);
3916 gen_op_store_DT0_fpr(DFPREG(rd));
3917 break;
3918 case 0x07b: /* VIS I fornot1s */
3919 gen_op_load_fpr_FT0(rs1);
3920 gen_op_load_fpr_FT1(rs2);
3921 tcg_gen_helper_0_0(helper_fornots);
3922 gen_op_store_FT0_fpr(rd);
3923 break;
3924 case 0x07c: /* VIS I for */
3925 gen_op_load_fpr_DT0(DFPREG(rs1));
3926 gen_op_load_fpr_DT1(DFPREG(rs2));
3927 tcg_gen_helper_0_0(helper_for);
3928 gen_op_store_DT0_fpr(DFPREG(rd));
3929 break;
3930 case 0x07d: /* VIS I fors */
3931 gen_op_load_fpr_FT0(rs1);
3932 gen_op_load_fpr_FT1(rs2);
3933 tcg_gen_helper_0_0(helper_fors);
3934 gen_op_store_FT0_fpr(rd);
3935 break;
3936 case 0x07e: /* VIS I fone */
3937 tcg_gen_helper_0_0(helper_movl_DT0_1);
3938 gen_op_store_DT0_fpr(DFPREG(rd));
3939 break;
3940 case 0x07f: /* VIS I fones */
3941 tcg_gen_helper_0_0(helper_movl_FT0_1);
3942 gen_op_store_FT0_fpr(rd);
3943 break;
3944 case 0x080: /* VIS I shutdown */
3945 case 0x081: /* VIS II siam */
3946 // XXX
3947 goto illegal_insn;
3948 default:
3949 goto illegal_insn;
3950 }
3951 #else
3952 goto ncp_insn;
3953 #endif
3954 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3955 #ifdef TARGET_SPARC64
3956 goto illegal_insn;
3957 #else
3958 goto ncp_insn;
3959 #endif
3960 #ifdef TARGET_SPARC64
3961 } else if (xop == 0x39) { /* V9 return */
3962 rs1 = GET_FIELD(insn, 13, 17);
3963 save_state(dc);
3964 gen_movl_reg_T0(rs1);
3965 if (IS_IMM) { /* immediate */
3966 rs2 = GET_FIELDs(insn, 19, 31);
3967 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3968 } else { /* register */
3969 rs2 = GET_FIELD(insn, 27, 31);
3970 #if defined(OPTIM)
3971 if (rs2) {
3972 #endif
3973 gen_movl_reg_T1(rs2);
3974 gen_op_add_T1_T0();
3975 #if defined(OPTIM)
3976 }
3977 #endif
3978 }
3979 tcg_gen_helper_0_0(helper_restore);
3980 gen_mov_pc_npc(dc);
3981 gen_op_check_align_T0_3();
3982 tcg_gen_mov_tl(cpu_npc, cpu_T[0]);
3983 dc->npc = DYNAMIC_PC;
3984 goto jmp_insn;
3985 #endif
3986 } else {
3987 rs1 = GET_FIELD(insn, 13, 17);
3988 gen_movl_reg_T0(rs1);
3989 if (IS_IMM) { /* immediate */
3990 rs2 = GET_FIELDs(insn, 19, 31);
3991 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
3992 } else { /* register */
3993 rs2 = GET_FIELD(insn, 27, 31);
3994 #if defined(OPTIM)
3995 if (rs2) {
3996 #endif
3997 gen_movl_reg_T1(rs2);
3998 gen_op_add_T1_T0();
3999 #if defined(OPTIM)
4000 }
4001 #endif
4002 }
4003 switch (xop) {
4004 case 0x38: /* jmpl */
4005 {
4006 if (rd != 0) {
4007 tcg_gen_movi_tl(cpu_T[1], dc->pc);
4008 gen_movl_T1_reg(rd);
4009 }
4010 gen_mov_pc_npc(dc);
4011 gen_op_check_align_T0_3();
4012 tcg_gen_mov_tl(cpu_npc, cpu_T[0]);
4013 dc->npc = DYNAMIC_PC;
4014 }
4015 goto jmp_insn;
4016 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4017 case 0x39: /* rett, V9 return */
4018 {
4019 if (!supervisor(dc))
4020 goto priv_insn;
4021 gen_mov_pc_npc(dc);
4022 gen_op_check_align_T0_3();
4023 tcg_gen_mov_tl(cpu_npc, cpu_T[0]);
4024 dc->npc = DYNAMIC_PC;
4025 tcg_gen_helper_0_0(helper_rett);
4026 }
4027 goto jmp_insn;
4028 #endif
4029 case 0x3b: /* flush */
4030 tcg_gen_helper_0_1(helper_flush, cpu_T[0]);
4031 break;
4032 case 0x3c: /* save */
4033 save_state(dc);
4034 tcg_gen_helper_0_0(helper_save);
4035 gen_movl_T0_reg(rd);
4036 break;
4037 case 0x3d: /* restore */
4038 save_state(dc);
4039 tcg_gen_helper_0_0(helper_restore);
4040 gen_movl_T0_reg(rd);
4041 break;
4042 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4043 case 0x3e: /* V9 done/retry */
4044 {
4045 switch (rd) {
4046 case 0:
4047 if (!supervisor(dc))
4048 goto priv_insn;
4049 dc->npc = DYNAMIC_PC;
4050 dc->pc = DYNAMIC_PC;
4051 tcg_gen_helper_0_0(helper_done);
4052 goto jmp_insn;
4053 case 1:
4054 if (!supervisor(dc))
4055 goto priv_insn;
4056 dc->npc = DYNAMIC_PC;
4057 dc->pc = DYNAMIC_PC;
4058 tcg_gen_helper_0_0(helper_retry);
4059 goto jmp_insn;
4060 default:
4061 goto illegal_insn;
4062 }
4063 }
4064 break;
4065 #endif
4066 default:
4067 goto illegal_insn;
4068 }
4069 }
4070 break;
4071 }
4072 break;
4073 case 3: /* load/store instructions */
4074 {
4075 unsigned int xop = GET_FIELD(insn, 7, 12);
4076 rs1 = GET_FIELD(insn, 13, 17);
4077 save_state(dc);
4078 gen_movl_reg_T0(rs1);
4079 if (xop == 0x3c || xop == 0x3e)
4080 {
4081 rs2 = GET_FIELD(insn, 27, 31);
4082 gen_movl_reg_T1(rs2);
4083 }
4084 else if (IS_IMM) { /* immediate */
4085 rs2 = GET_FIELDs(insn, 19, 31);
4086 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], (int)rs2);
4087 } else { /* register */
4088 rs2 = GET_FIELD(insn, 27, 31);
4089 #if defined(OPTIM)
4090 if (rs2 != 0) {
4091 #endif
4092 gen_movl_reg_T1(rs2);
4093 gen_op_add_T1_T0();
4094 #if defined(OPTIM)
4095 }
4096 #endif
4097 }
4098 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4099 (xop > 0x17 && xop <= 0x1d ) ||
4100 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4101 switch (xop) {
4102 case 0x0: /* load unsigned word */
4103 gen_op_check_align_T0_3();
4104 ABI32_MASK(cpu_T[0]);
4105 tcg_gen_qemu_ld32u(cpu_T[1], cpu_T[0], dc->mem_idx);
4106 break;
4107 case 0x1: /* load unsigned byte */
4108 ABI32_MASK(cpu_T[0]);
4109 tcg_gen_qemu_ld8u(cpu_T[1], cpu_T[0], dc->mem_idx);
4110 break;
4111 case 0x2: /* load unsigned halfword */
4112 gen_op_check_align_T0_1();
4113 ABI32_MASK(cpu_T[0]);
4114 tcg_gen_qemu_ld16u(cpu_T[1], cpu_T[0], dc->mem_idx);
4115 break;
4116 case 0x3: /* load double word */
4117 if (rd & 1)
4118 goto illegal_insn;
4119 else {
4120 gen_op_check_align_T0_7();
4121 ABI32_MASK(cpu_T[0]);
4122 tcg_gen_qemu_ld64(cpu_tmp64, cpu_T[0], dc->mem_idx);
4123 tcg_gen_trunc_i64_tl(cpu_T[0], cpu_tmp64);
4124 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], 0xffffffffULL);
4125 gen_movl_T0_reg(rd + 1);
4126 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4127 tcg_gen_trunc_i64_tl(cpu_T[1], cpu_tmp64);
4128 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], 0xffffffffULL);
4129 }
4130 break;
4131 case 0x9: /* load signed byte */
4132 ABI32_MASK(cpu_T[0]);
4133 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
4134 break;
4135 case 0xa: /* load signed halfword */
4136 gen_op_check_align_T0_1();
4137 ABI32_MASK(cpu_T[0]);
4138 tcg_gen_qemu_ld16s(cpu_T[1], cpu_T[0], dc->mem_idx);
4139 break;
4140 case 0xd: /* ldstub -- XXX: should be atomically */
4141 ABI32_MASK(cpu_T[0]);
4142 tcg_gen_qemu_ld8s(cpu_T[1], cpu_T[0], dc->mem_idx);
4143 tcg_gen_qemu_st8(tcg_const_tl(0xff), cpu_T[0], dc->mem_idx);
4144 break;
4145 case 0x0f: /* swap register with memory. Also atomically */
4146 gen_op_check_align_T0_3();
4147 gen_movl_reg_T1(rd);
4148 ABI32_MASK(cpu_T[0]);
4149 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_T[0], dc->mem_idx);
4150 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
4151 tcg_gen_extu_i32_tl(cpu_T[1], cpu_tmp32);
4152 break;
4153 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4154 case 0x10: /* load word alternate */
4155 #ifndef TARGET_SPARC64
4156 if (IS_IMM)
4157 goto illegal_insn;
4158 if (!supervisor(dc))
4159 goto priv_insn;
4160 #endif
4161 gen_op_check_align_T0_3();
4162 gen_ld_asi(insn, 4, 0);
4163 break;
4164 case 0x11: /* load unsigned byte alternate */
4165 #ifndef TARGET_SPARC64
4166 if (IS_IMM)
4167 goto illegal_insn;
4168 if (!supervisor(dc))
4169 goto priv_insn;
4170 #endif
4171 gen_ld_asi(insn, 1, 0);
4172 break;
4173 case 0x12: /* load unsigned halfword alternate */
4174 #ifndef TARGET_SPARC64
4175 if (IS_IMM)
4176 goto illegal_insn;
4177 if (!supervisor(dc))
4178 goto priv_insn;
4179 #endif
4180 gen_op_check_align_T0_1();
4181 gen_ld_asi(insn, 2, 0);
4182 break;
4183 case 0x13: /* load double word alternate */
4184 #ifndef TARGET_SPARC64
4185 if (IS_IMM)
4186 goto illegal_insn;
4187 if (!supervisor(dc))
4188 goto priv_insn;
4189 #endif
4190 if (rd & 1)
4191 goto illegal_insn;
4192 gen_op_check_align_T0_7();
4193 gen_ldda_asi(insn);
4194 gen_movl_T0_reg(rd + 1);
4195 break;
4196 case 0x19: /* load signed byte alternate */
4197 #ifndef TARGET_SPARC64
4198 if (IS_IMM)
4199 goto illegal_insn;
4200 if (!supervisor(dc))
4201 goto priv_insn;
4202 #endif
4203 gen_ld_asi(insn, 1, 1);
4204 break;
4205 case 0x1a: /* load signed halfword alternate */
4206 #ifndef TARGET_SPARC64
4207 if (IS_IMM)
4208 goto illegal_insn;
4209 if (!supervisor(dc))
4210 goto priv_insn;
4211 #endif
4212 gen_op_check_align_T0_1();
4213 gen_ld_asi(insn, 2, 1);
4214 break;
4215 case 0x1d: /* ldstuba -- XXX: should be atomically */
4216 #ifndef TARGET_SPARC64
4217 if (IS_IMM)
4218 goto illegal_insn;
4219 if (!supervisor(dc))
4220 goto priv_insn;
4221 #endif
4222 gen_ldstub_asi(insn);
4223 break;
4224 case 0x1f: /* swap reg with alt. memory. Also atomically */
4225 #ifndef TARGET_SPARC64
4226 if (IS_IMM)
4227 goto illegal_insn;
4228 if (!supervisor(dc))
4229 goto priv_insn;
4230 #endif
4231 gen_op_check_align_T0_3();
4232 gen_movl_reg_T1(rd);
4233 gen_swap_asi(insn);
4234 break;
4235
4236 #ifndef TARGET_SPARC64
4237 case 0x30: /* ldc */
4238 case 0x31: /* ldcsr */
4239 case 0x33: /* lddc */
4240 goto ncp_insn;
4241 #endif
4242 #endif
4243 #ifdef TARGET_SPARC64
4244 case 0x08: /* V9 ldsw */
4245 gen_op_check_align_T0_3();
4246 ABI32_MASK(cpu_T[0]);
4247 tcg_gen_qemu_ld32s(cpu_T[1], cpu_T[0], dc->mem_idx);
4248 break;
4249 case 0x0b: /* V9 ldx */
4250 gen_op_check_align_T0_7();
4251 ABI32_MASK(cpu_T[0]);
4252 tcg_gen_qemu_ld64(cpu_T[1], cpu_T[0], dc->mem_idx);
4253 break;
4254 case 0x18: /* V9 ldswa */
4255 gen_op_check_align_T0_3();
4256 gen_ld_asi(insn, 4, 1);
4257 break;
4258 case 0x1b: /* V9 ldxa */
4259 gen_op_check_align_T0_7();
4260 gen_ld_asi(insn, 8, 0);
4261 break;
4262 case 0x2d: /* V9 prefetch, no effect */
4263 goto skip_move;
4264 case 0x30: /* V9 ldfa */
4265 gen_op_check_align_T0_3();
4266 gen_ldf_asi(insn, 4, rd);
4267 goto skip_move;
4268 case 0x33: /* V9 lddfa */
4269 gen_op_check_align_T0_3();
4270 gen_ldf_asi(insn, 8, DFPREG(rd));
4271 goto skip_move;
4272 case 0x3d: /* V9 prefetcha, no effect */
4273 goto skip_move;
4274 case 0x32: /* V9 ldqfa */
4275 #if defined(CONFIG_USER_ONLY)
4276 gen_op_check_align_T0_3();
4277 gen_ldf_asi(insn, 16, QFPREG(rd));
4278 goto skip_move;
4279 #else
4280 goto nfpu_insn;
4281 #endif
4282 #endif
4283 default:
4284 goto illegal_insn;
4285 }
4286 gen_movl_T1_reg(rd);
4287 #ifdef TARGET_SPARC64
4288 skip_move: ;
4289 #endif
4290 } else if (xop >= 0x20 && xop < 0x24) {
4291 if (gen_trap_ifnofpu(dc))
4292 goto jmp_insn;
4293 switch (xop) {
4294 case 0x20: /* load fpreg */
4295 gen_op_check_align_T0_3();
4296 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_T[0], dc->mem_idx);
4297 tcg_gen_st_i32(cpu_tmp32, cpu_env,
4298 offsetof(CPUState, fpr[rd]));
4299 break;
4300 case 0x21: /* load fsr */
4301 gen_op_check_align_T0_3();
4302 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_T[0], dc->mem_idx);
4303 tcg_gen_st_i32(cpu_tmp32, cpu_env,
4304 offsetof(CPUState, ft0));
4305 tcg_gen_helper_0_0(helper_ldfsr);
4306 break;
4307 case 0x22: /* load quad fpreg */
4308 #if defined(CONFIG_USER_ONLY)
4309 gen_op_check_align_T0_7();
4310 gen_op_ldst(ldqf);
4311 gen_op_store_QT0_fpr(QFPREG(rd));
4312 break;
4313 #else
4314 goto nfpu_insn;
4315 #endif
4316 case 0x23: /* load double fpreg */
4317 gen_op_check_align_T0_7();
4318 gen_op_ldst(lddf);
4319 gen_op_store_DT0_fpr(DFPREG(rd));
4320 break;
4321 default:
4322 goto illegal_insn;
4323 }
4324 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4325 xop == 0xe || xop == 0x1e) {
4326 gen_movl_reg_T1(rd);
4327 switch (xop) {
4328 case 0x4: /* store word */
4329 gen_op_check_align_T0_3();
4330 ABI32_MASK(cpu_T[0]);
4331 tcg_gen_qemu_st32(cpu_T[1], cpu_T[0], dc->mem_idx);
4332 break;
4333 case 0x5: /* store byte */
4334 ABI32_MASK(cpu_T[0]);
4335 tcg_gen_qemu_st8(cpu_T[1], cpu_T[0], dc->mem_idx);
4336 break;
4337 case 0x6: /* store halfword */
4338 gen_op_check_align_T0_1();
4339 ABI32_MASK(cpu_T[0]);
4340 tcg_gen_qemu_st16(cpu_T[1], cpu_T[0], dc->mem_idx);
4341 break;
4342 case 0x7: /* store double word */
4343 if (rd & 1)
4344 goto illegal_insn;
4345 #ifndef __i386__
4346 else {
4347 TCGv r_low;
4348
4349 gen_op_check_align_T0_7();
4350 r_low = tcg_temp_new(TCG_TYPE_I32);
4351 gen_movl_reg_TN(rd + 1, r_low);
4352 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_T[1],
4353 r_low);
4354 tcg_gen_qemu_st64(cpu_tmp64, cpu_T[0], dc->mem_idx);
4355 }
4356 #else /* __i386__ */
4357 gen_op_check_align_T0_7();
4358 flush_T2(dc);
4359 gen_movl_reg_T2(rd + 1);
4360 gen_op_ldst(std);
4361 #endif /* __i386__ */
4362 break;
4363 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4364 case 0x14: /* store word alternate */
4365 #ifndef TARGET_SPARC64
4366 if (IS_IMM)
4367 goto illegal_insn;
4368 if (!supervisor(dc))
4369 goto priv_insn;
4370 #endif
4371 gen_op_check_align_T0_3();
4372 gen_st_asi(insn, 4);
4373 break;
4374 case 0x15: /* store byte alternate */
4375 #ifndef TARGET_SPARC64
4376 if (IS_IMM)
4377 goto illegal_insn;
4378 if (!supervisor(dc))
4379 goto priv_insn;
4380 #endif
4381 gen_st_asi(insn, 1);
4382 break;
4383 case 0x16: /* store halfword alternate */
4384 #ifndef TARGET_SPARC64
4385 if (IS_IMM)
4386 goto illegal_insn;
4387 if (!supervisor(dc))
4388 goto priv_insn;
4389 #endif
4390 gen_op_check_align_T0_1();
4391 gen_st_asi(insn, 2);
4392 break;
4393 case 0x17: /* store double word alternate */
4394 #ifndef TARGET_SPARC64
4395 if (IS_IMM)
4396 goto illegal_insn;
4397 if (!supervisor(dc))
4398 goto priv_insn;
4399 #endif
4400 if (rd & 1)
4401 goto illegal_insn;
4402 else {
4403 gen_op_check_align_T0_7();
4404 gen_stda_asi(insn, rd);
4405 }
4406 break;
4407 #endif
4408 #ifdef TARGET_SPARC64
4409 case 0x0e: /* V9 stx */
4410 gen_op_check_align_T0_7();
4411 ABI32_MASK(cpu_T[0]);
4412 tcg_gen_qemu_st64(cpu_T[1], cpu_T[0], dc->mem_idx);
4413 break;
4414 case 0x1e: /* V9 stxa */
4415 gen_op_check_align_T0_7();
4416 gen_st_asi(insn, 8);
4417 break;
4418 #endif
4419 default:
4420 goto illegal_insn;
4421 }
4422 } else if (xop > 0x23 && xop < 0x28) {
4423 if (gen_trap_ifnofpu(dc))
4424 goto jmp_insn;
4425 switch (xop) {
4426 case 0x24: /* store fpreg */
4427 gen_op_check_align_T0_3();
4428 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
4429 offsetof(CPUState, fpr[rd]));
4430 tcg_gen_qemu_st32(cpu_tmp32, cpu_T[0], dc->mem_idx);
4431 break;
4432 case 0x25: /* stfsr, V9 stxfsr */
4433 #ifdef CONFIG_USER_ONLY
4434 gen_op_check_align_T0_3();
4435 #endif
4436 tcg_gen_helper_0_0(helper_stfsr);
4437 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
4438 offsetof(CPUState, ft0));
4439 tcg_gen_qemu_st32(cpu_tmp32, cpu_T[0], dc->mem_idx);
4440 break;
4441 case 0x26:
4442 #ifdef TARGET_SPARC64
4443 #if defined(CONFIG_USER_ONLY)
4444 /* V9 stqf, store quad fpreg */
4445 gen_op_check_align_T0_7();
4446 gen_op_load_fpr_QT0(QFPREG(rd));
4447 gen_op_ldst(stqf);
4448 break;
4449 #else
4450 goto nfpu_insn;
4451 #endif
4452 #else /* !TARGET_SPARC64 */
4453 /* stdfq, store floating point queue */
4454 #if defined(CONFIG_USER_ONLY)
4455 goto illegal_insn;
4456 #else
4457 if (!supervisor(dc))
4458 goto priv_insn;
4459 if (gen_trap_ifnofpu(dc))
4460 goto jmp_insn;
4461 goto nfq_insn;
4462 #endif
4463 #endif
4464 case 0x27:
4465 gen_op_check_align_T0_7();
4466 gen_op_load_fpr_DT0(DFPREG(rd));
4467 gen_op_ldst(stdf);
4468 break;
4469 default:
4470 goto illegal_insn;
4471 }
4472 } else if (xop > 0x33 && xop < 0x3f) {
4473 switch (xop) {
4474 #ifdef TARGET_SPARC64
4475 case 0x34: /* V9 stfa */
4476 gen_op_check_align_T0_3();
4477 gen_op_load_fpr_FT0(rd);
4478 gen_stf_asi(insn, 4, rd);
4479 break;
4480 case 0x36: /* V9 stqfa */
4481 #if defined(CONFIG_USER_ONLY)
4482 gen_op_check_align_T0_7();
4483 gen_op_load_fpr_QT0(QFPREG(rd));
4484 gen_stf_asi(insn, 16, QFPREG(rd));
4485 break;
4486 #else
4487 goto nfpu_insn;
4488 #endif
4489 case 0x37: /* V9 stdfa */
4490 gen_op_check_align_T0_3();
4491 gen_op_load_fpr_DT0(DFPREG(rd));
4492 gen_stf_asi(insn, 8, DFPREG(rd));
4493 break;
4494 case 0x3c: /* V9 casa */
4495 gen_op_check_align_T0_3();
4496 gen_cas_asi(insn, rd);
4497 gen_movl_T1_reg(rd);
4498 break;
4499 case 0x3e: /* V9 casxa */
4500 gen_op_check_align_T0_7();
4501 gen_casx_asi(insn, rd);
4502 gen_movl_T1_reg(rd);
4503 break;
4504 #else
4505 case 0x34: /* stc */
4506 case 0x35: /* stcsr */
4507 case 0x36: /* stdcq */
4508 case 0x37: /* stdc */
4509 goto ncp_insn;
4510 #endif
4511 default:
4512 goto illegal_insn;
4513 }
4514 }
4515 else
4516 goto illegal_insn;
4517 }
4518 break;
4519 }
4520 /* default case for non jump instructions */
4521 if (dc->npc == DYNAMIC_PC) {
4522 dc->pc = DYNAMIC_PC;
4523 gen_op_next_insn();
4524 } else if (dc->npc == JUMP_PC) {
4525 /* we can do a static jump */
4526 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_T[2]);
4527 dc->is_br = 1;
4528 } else {
4529 dc->pc = dc->npc;
4530 dc->npc = dc->npc + 4;
4531 }
4532 jmp_insn:
4533 return;
4534 illegal_insn:
4535 save_state(dc);
4536 gen_op_exception(TT_ILL_INSN);
4537 dc->is_br = 1;
4538 return;
4539 #if !defined(CONFIG_USER_ONLY)
4540 priv_insn:
4541 save_state(dc);
4542 gen_op_exception(TT_PRIV_INSN);
4543 dc->is_br = 1;
4544 return;
4545 nfpu_insn:
4546 save_state(dc);
4547 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4548 dc->is_br = 1;
4549 return;
4550 #ifndef TARGET_SPARC64
4551 nfq_insn:
4552 save_state(dc);
4553 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4554 dc->is_br = 1;
4555 return;
4556 #endif
4557 #endif
4558 #ifndef TARGET_SPARC64
4559 ncp_insn:
4560 save_state(dc);
4561 gen_op_exception(TT_NCP_INSN);
4562 dc->is_br = 1;
4563 return;
4564 #endif
4565 }
4566
4567 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
4568 {
4569 }
4570
4571 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
4572 int spc, CPUSPARCState *env)
4573 {
4574 target_ulong pc_start, last_pc;
4575 uint16_t *gen_opc_end;
4576 DisasContext dc1, *dc = &dc1;
4577 int j, lj = -1;
4578
4579 memset(dc, 0, sizeof(DisasContext));
4580 dc->tb = tb;
4581 pc_start = tb->pc;
4582 dc->pc = pc_start;
4583 last_pc = dc->pc;
4584 dc->npc = (target_ulong) tb->cs_base;
4585 dc->mem_idx = cpu_mmu_index(env);
4586 dc->fpu_enabled = cpu_fpu_enabled(env);
4587 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4588
4589 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4590 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4591 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4592
4593 do {
4594 if (env->nb_breakpoints > 0) {
4595 for(j = 0; j < env->nb_breakpoints; j++) {
4596 if (env->breakpoints[j] == dc->pc) {
4597 if (dc->pc != pc_start)
4598 save_state(dc);
4599 tcg_gen_helper_0_0(helper_debug);
4600 tcg_gen_exit_tb(0);
4601 dc->is_br = 1;
4602 goto exit_gen_loop;
4603 }
4604 }
4605 }
4606 if (spc) {
4607 if (loglevel > 0)
4608 fprintf(logfile, "Search PC...\n");
4609 j = gen_opc_ptr - gen_opc_buf;
4610 if (lj < j) {
4611 lj++;
4612 while (lj < j)
4613 gen_opc_instr_start[lj++] = 0;
4614 gen_opc_pc[lj] = dc->pc;
4615 gen_opc_npc[lj] = dc->npc;
4616 gen_opc_instr_start[lj] = 1;
4617 }
4618 }
4619 last_pc = dc->pc;
4620 disas_sparc_insn(dc);
4621
4622 if (dc->is_br)
4623 break;
4624 /* if the next PC is different, we abort now */
4625 if (dc->pc != (last_pc + 4))
4626 break;
4627 /* if we reach a page boundary, we stop generation so that the
4628 PC of a TT_TFAULT exception is always in the right page */
4629 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4630 break;
4631 /* if single step mode, we generate only one instruction and
4632 generate an exception */
4633 if (env->singlestep_enabled) {
4634 gen_jmp_im(dc->pc);
4635 tcg_gen_exit_tb(0);
4636 break;
4637 }
4638 } while ((gen_opc_ptr < gen_opc_end) &&
4639 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
4640
4641 exit_gen_loop:
4642 if (!dc->is_br) {
4643 if (dc->pc != DYNAMIC_PC &&
4644 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4645 /* static PC and NPC: we can use direct chaining */
4646 gen_branch(dc, dc->pc, dc->npc);
4647 } else {
4648 if (dc->pc != DYNAMIC_PC)
4649 gen_jmp_im(dc->pc);
4650 save_npc(dc);
4651 tcg_gen_exit_tb(0);
4652 }
4653 }
4654 *gen_opc_ptr = INDEX_op_end;
4655 if (spc) {
4656 j = gen_opc_ptr - gen_opc_buf;
4657 lj++;
4658 while (lj <= j)
4659 gen_opc_instr_start[lj++] = 0;
4660 #if 0
4661 if (loglevel > 0) {
4662 page_dump(logfile);
4663 }
4664 #endif
4665 gen_opc_jump_pc[0] = dc->jump_pc[0];
4666 gen_opc_jump_pc[1] = dc->jump_pc[1];
4667 } else {
4668 tb->size = last_pc + 4 - pc_start;
4669 }
4670 #ifdef DEBUG_DISAS
4671 if (loglevel & CPU_LOG_TB_IN_ASM) {
4672 fprintf(logfile, "--------------\n");
4673 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4674 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4675 fprintf(logfile, "\n");
4676 }
4677 #endif
4678 return 0;
4679 }
4680
4681 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4682 {
4683 return gen_intermediate_code_internal(tb, 0, env);
4684 }
4685
4686 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4687 {
4688 return gen_intermediate_code_internal(tb, 1, env);
4689 }
4690
4691 void cpu_reset(CPUSPARCState *env)
4692 {
4693 tlb_flush(env, 1);
4694 env->cwp = 0;
4695 env->wim = 1;
4696 env->regwptr = env->regbase + (env->cwp * 16);
4697 #if defined(CONFIG_USER_ONLY)
4698 env->user_mode_only = 1;
4699 #ifdef TARGET_SPARC64
4700 env->cleanwin = NWINDOWS - 2;
4701 env->cansave = NWINDOWS - 2;
4702 env->pstate = PS_RMO | PS_PEF | PS_IE;
4703 env->asi = 0x82; // Primary no-fault
4704 #endif
4705 #else
4706 env->psret = 0;
4707 env->psrs = 1;
4708 env->psrps = 1;
4709 #ifdef TARGET_SPARC64
4710 env->pstate = PS_PRIV;
4711 env->hpstate = HS_PRIV;
4712 env->pc = 0x1fff0000000ULL;
4713 env->tsptr = &env->ts[env->tl];
4714 #else
4715 env->pc = 0;
4716 env->mmuregs[0] &= ~(MMU_E | MMU_NF);
4717 env->mmuregs[0] |= env->mmu_bm;
4718 #endif
4719 env->npc = env->pc + 4;
4720 #endif
4721 }
4722
4723 CPUSPARCState *cpu_sparc_init(const char *cpu_model)
4724 {
4725 CPUSPARCState *env;
4726 const sparc_def_t *def;
4727 static int inited;
4728 unsigned int i;
4729 static const char * const gregnames[8] = {
4730 NULL, // g0 not used
4731 "g1",
4732 "g2",
4733 "g3",
4734 "g4",
4735 "g5",
4736 "g6",
4737 "g7",
4738 };
4739
4740 def = cpu_sparc_find_by_name(cpu_model);
4741 if (!def)
4742 return NULL;
4743
4744 env = qemu_mallocz(sizeof(CPUSPARCState));
4745 if (!env)
4746 return NULL;
4747 cpu_exec_init(env);
4748 env->cpu_model_str = cpu_model;
4749 env->version = def->iu_version;
4750 env->fsr = def->fpu_version;
4751 #if !defined(TARGET_SPARC64)
4752 env->mmu_bm = def->mmu_bm;
4753 env->mmu_ctpr_mask = def->mmu_ctpr_mask;
4754 env->mmu_cxr_mask = def->mmu_cxr_mask;
4755 env->mmu_sfsr_mask = def->mmu_sfsr_mask;
4756 env->mmu_trcr_mask = def->mmu_trcr_mask;
4757 env->mmuregs[0] |= def->mmu_version;
4758 cpu_sparc_set_id(env, 0);
4759 #endif
4760
4761 /* init various static tables */
4762 if (!inited) {
4763 inited = 1;
4764
4765 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
4766 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4767 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4768 offsetof(CPUState, regwptr),
4769 "regwptr");
4770 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4771 #ifdef TARGET_SPARC64
4772 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
4773 TCG_AREG0, offsetof(CPUState, t0), "T0");
4774 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
4775 TCG_AREG0, offsetof(CPUState, t1), "T1");
4776 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
4777 TCG_AREG0, offsetof(CPUState, t2), "T2");
4778 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4779 TCG_AREG0, offsetof(CPUState, xcc),
4780 "xcc");
4781 #else
4782 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
4783 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
4784 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
4785 #endif
4786 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4787 TCG_AREG0, offsetof(CPUState, cc_src),
4788 "cc_src");
4789 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4790 offsetof(CPUState, cc_src2),
4791 "cc_src2");
4792 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4793 TCG_AREG0, offsetof(CPUState, cc_dst),
4794 "cc_dst");
4795 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4796 TCG_AREG0, offsetof(CPUState, psr),
4797 "psr");
4798 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
4799 TCG_AREG0, offsetof(CPUState, fsr),
4800 "fsr");
4801 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
4802 TCG_AREG0, offsetof(CPUState, pc),
4803 "pc");
4804 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
4805 TCG_AREG0, offsetof(CPUState, npc),
4806 "npc");
4807 for (i = 1; i < 8; i++)
4808 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4809 offsetof(CPUState, gregs[i]),
4810 gregnames[i]);
4811 }
4812
4813 cpu_reset(env);
4814
4815 return env;
4816 }
4817
4818 void cpu_sparc_set_id(CPUSPARCState *env, unsigned int cpu)
4819 {
4820 #if !defined(TARGET_SPARC64)
4821 env->mxccregs[7] = ((cpu + 8) & 0xf) << 24;
4822 #endif
4823 }
4824
4825 static const sparc_def_t sparc_defs[] = {
4826 #ifdef TARGET_SPARC64
4827 {
4828 .name = "Fujitsu Sparc64",
4829 .iu_version = ((0x04ULL << 48) | (0x02ULL << 32) | (0ULL << 24)
4830 | (MAXTL << 8) | (NWINDOWS - 1)),
4831 .fpu_version = 0x00000000,
4832 .mmu_version = 0,
4833 },
4834 {
4835 .name = "Fujitsu Sparc64 III",
4836 .iu_version = ((0x04ULL << 48) | (0x03ULL << 32) | (0ULL << 24)
4837 | (MAXTL << 8) | (NWINDOWS - 1)),
4838 .fpu_version = 0x00000000,
4839 .mmu_version = 0,
4840 },
4841 {
4842 .name = "Fujitsu Sparc64 IV",
4843 .iu_version = ((0x04ULL << 48) | (0x04ULL << 32) | (0ULL << 24)
4844 | (MAXTL << 8) | (NWINDOWS - 1)),
4845 .fpu_version = 0x00000000,
4846 .mmu_version = 0,
4847 },
4848 {
4849 .name = "Fujitsu Sparc64 V",
4850 .iu_version = ((0x04ULL << 48) | (0x05ULL << 32) | (0x51ULL << 24)
4851 | (MAXTL << 8) | (NWINDOWS - 1)),
4852 .fpu_version = 0x00000000,
4853 .mmu_version = 0,
4854 },
4855 {
4856 .name = "TI UltraSparc I",
4857 .iu_version = ((0x17ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4858 | (MAXTL << 8) | (NWINDOWS - 1)),
4859 .fpu_version = 0x00000000,
4860 .mmu_version = 0,
4861 },
4862 {
4863 .name = "TI UltraSparc II",
4864 .iu_version = ((0x17ULL << 48) | (0x11ULL << 32) | (0x20ULL << 24)
4865 | (MAXTL << 8) | (NWINDOWS - 1)),
4866 .fpu_version = 0x00000000,
4867 .mmu_version = 0,
4868 },
4869 {
4870 .name = "TI UltraSparc IIi",
4871 .iu_version = ((0x17ULL << 48) | (0x12ULL << 32) | (0x91ULL << 24)
4872 | (MAXTL << 8) | (NWINDOWS - 1)),
4873 .fpu_version = 0x00000000,
4874 .mmu_version = 0,
4875 },
4876 {
4877 .name = "TI UltraSparc IIe",
4878 .iu_version = ((0x17ULL << 48) | (0x13ULL << 32) | (0x14ULL << 24)
4879 | (MAXTL << 8) | (NWINDOWS - 1)),
4880 .fpu_version = 0x00000000,
4881 .mmu_version = 0,
4882 },
4883 {
4884 .name = "Sun UltraSparc III",
4885 .iu_version = ((0x3eULL << 48) | (0x14ULL << 32) | (0x34ULL << 24)
4886 | (MAXTL << 8) | (NWINDOWS - 1)),
4887 .fpu_version = 0x00000000,
4888 .mmu_version = 0,
4889 },
4890 {
4891 .name = "Sun UltraSparc III Cu",
4892 .iu_version = ((0x3eULL << 48) | (0x15ULL << 32) | (0x41ULL << 24)
4893 | (MAXTL << 8) | (NWINDOWS - 1)),
4894 .fpu_version = 0x00000000,
4895 .mmu_version = 0,
4896 },
4897 {
4898 .name = "Sun UltraSparc IIIi",
4899 .iu_version = ((0x3eULL << 48) | (0x16ULL << 32) | (0x34ULL << 24)
4900 | (MAXTL << 8) | (NWINDOWS - 1)),
4901 .fpu_version = 0x00000000,
4902 .mmu_version = 0,
4903 },
4904 {
4905 .name = "Sun UltraSparc IV",
4906 .iu_version = ((0x3eULL << 48) | (0x18ULL << 32) | (0x31ULL << 24)
4907 | (MAXTL << 8) | (NWINDOWS - 1)),
4908 .fpu_version = 0x00000000,
4909 .mmu_version = 0,
4910 },
4911 {
4912 .name = "Sun UltraSparc IV+",
4913 .iu_version = ((0x3eULL << 48) | (0x19ULL << 32) | (0x22ULL << 24)
4914 | (MAXTL << 8) | (NWINDOWS - 1)),
4915 .fpu_version = 0x00000000,
4916 .mmu_version = 0,
4917 },
4918 {
4919 .name = "Sun UltraSparc IIIi+",
4920 .iu_version = ((0x3eULL << 48) | (0x22ULL << 32) | (0ULL << 24)
4921 | (MAXTL << 8) | (NWINDOWS - 1)),
4922 .fpu_version = 0x00000000,
4923 .mmu_version = 0,
4924 },
4925 {
4926 .name = "NEC UltraSparc I",
4927 .iu_version = ((0x22ULL << 48) | (0x10ULL << 32) | (0x40ULL << 24)
4928 | (MAXTL << 8) | (NWINDOWS - 1)),
4929 .fpu_version = 0x00000000,
4930 .mmu_version = 0,
4931 },
4932 #else
4933 {
4934 .name = "Fujitsu MB86900",
4935 .iu_version = 0x00 << 24, /* Impl 0, ver 0 */
4936 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4937 .mmu_version = 0x00 << 24, /* Impl 0, ver 0 */
4938 .mmu_bm = 0x00004000,
4939 .mmu_ctpr_mask = 0x007ffff0,
4940 .mmu_cxr_mask = 0x0000003f,
4941 .mmu_sfsr_mask = 0xffffffff,
4942 .mmu_trcr_mask = 0xffffffff,
4943 },
4944 {
4945 .name = "Fujitsu MB86904",
4946 .iu_version = 0x04 << 24, /* Impl 0, ver 4 */
4947 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4948 .mmu_version = 0x04 << 24, /* Impl 0, ver 4 */
4949 .mmu_bm = 0x00004000,
4950 .mmu_ctpr_mask = 0x00ffffc0,
4951 .mmu_cxr_mask = 0x000000ff,
4952 .mmu_sfsr_mask = 0x00016fff,
4953 .mmu_trcr_mask = 0x00ffffff,
4954 },
4955 {
4956 .name = "Fujitsu MB86907",
4957 .iu_version = 0x05 << 24, /* Impl 0, ver 5 */
4958 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
4959 .mmu_version = 0x05 << 24, /* Impl 0, ver 5 */
4960 .mmu_bm = 0x00004000,
4961 .mmu_ctpr_mask = 0xffffffc0,
4962 .mmu_cxr_mask = 0x000000ff,
4963 .mmu_sfsr_mask = 0x00016fff,
4964 .mmu_trcr_mask = 0xffffffff,
4965 },
4966 {
4967 .name = "LSI L64811",
4968 .iu_version = 0x10 << 24, /* Impl 1, ver 0 */
4969 .fpu_version = 1 << 17, /* FPU version 1 (LSI L64814) */
4970 .mmu_version = 0x10 << 24,
4971 .mmu_bm = 0x00004000,
4972 .mmu_ctpr_mask = 0x007ffff0,
4973 .mmu_cxr_mask = 0x0000003f,
4974 .mmu_sfsr_mask = 0xffffffff,
4975 .mmu_trcr_mask = 0xffffffff,
4976 },
4977 {
4978 .name = "Cypress CY7C601",
4979 .iu_version = 0x11 << 24, /* Impl 1, ver 1 */
4980 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4981 .mmu_version = 0x10 << 24,
4982 .mmu_bm = 0x00004000,
4983 .mmu_ctpr_mask = 0x007ffff0,
4984 .mmu_cxr_mask = 0x0000003f,
4985 .mmu_sfsr_mask = 0xffffffff,
4986 .mmu_trcr_mask = 0xffffffff,
4987 },
4988 {
4989 .name = "Cypress CY7C611",
4990 .iu_version = 0x13 << 24, /* Impl 1, ver 3 */
4991 .fpu_version = 3 << 17, /* FPU version 3 (Cypress CY7C602) */
4992 .mmu_version = 0x10 << 24,
4993 .mmu_bm = 0x00004000,
4994 .mmu_ctpr_mask = 0x007ffff0,
4995 .mmu_cxr_mask = 0x0000003f,
4996 .mmu_sfsr_mask = 0xffffffff,
4997 .mmu_trcr_mask = 0xffffffff,
4998 },
4999 {
5000 .name = "TI SuperSparc II",
5001 .iu_version = 0x40000000,
5002 .fpu_version = 0 << 17,
5003 .mmu_version = 0x04000000,
5004 .mmu_bm = 0x00002000,
5005 .mmu_ctpr_mask = 0xffffffc0,
5006 .mmu_cxr_mask = 0x0000ffff,
5007 .mmu_sfsr_mask = 0xffffffff,
5008 .mmu_trcr_mask = 0xffffffff,
5009 },
5010 {
5011 .name = "TI MicroSparc I",
5012 .iu_version = 0x41000000,
5013 .fpu_version = 4 << 17,
5014 .mmu_version = 0x41000000,
5015 .mmu_bm = 0x00004000,
5016 .mmu_ctpr_mask = 0x007ffff0,
5017 .mmu_cxr_mask = 0x0000003f,
5018 .mmu_sfsr_mask = 0x00016fff,
5019 .mmu_trcr_mask = 0x0000003f,
5020 },
5021 {
5022 .name = "TI MicroSparc II",
5023 .iu_version = 0x42000000,
5024 .fpu_version = 4 << 17,
5025 .mmu_version = 0x02000000,
5026 .mmu_bm = 0x00004000,
5027 .mmu_ctpr_mask = 0x00ffffc0,
5028 .mmu_cxr_mask = 0x000000ff,
5029 .mmu_sfsr_mask = 0x00016fff,
5030 .mmu_trcr_mask = 0x00ffffff,
5031 },
5032 {
5033 .name = "TI MicroSparc IIep",
5034 .iu_version = 0x42000000,
5035 .fpu_version = 4 << 17,
5036 .mmu_version = 0x04000000,
5037 .mmu_bm = 0x00004000,
5038 .mmu_ctpr_mask = 0x00ffffc0,
5039 .mmu_cxr_mask = 0x000000ff,
5040 .mmu_sfsr_mask = 0x00016bff,
5041 .mmu_trcr_mask = 0x00ffffff,
5042 },
5043 {
5044 .name = "TI SuperSparc 51",
5045 .iu_version = 0x43000000,
5046 .fpu_version = 0 << 17,
5047 .mmu_version = 0x04000000,
5048 .mmu_bm = 0x00002000,
5049 .mmu_ctpr_mask = 0xffffffc0,
5050 .mmu_cxr_mask = 0x0000ffff,
5051 .mmu_sfsr_mask = 0xffffffff,
5052 .mmu_trcr_mask = 0xffffffff,
5053 },
5054 {
5055 .name = "TI SuperSparc 61",
5056 .iu_version = 0x44000000,
5057 .fpu_version = 0 << 17,
5058 .mmu_version = 0x04000000,
5059 .mmu_bm = 0x00002000,
5060 .mmu_ctpr_mask = 0xffffffc0,
5061 .mmu_cxr_mask = 0x0000ffff,
5062 .mmu_sfsr_mask = 0xffffffff,
5063 .mmu_trcr_mask = 0xffffffff,
5064 },
5065 {
5066 .name = "Ross RT625",
5067 .iu_version = 0x1e000000,
5068 .fpu_version = 1 << 17,
5069 .mmu_version = 0x1e000000,
5070 .mmu_bm = 0x00004000,
5071 .mmu_ctpr_mask = 0x007ffff0,
5072 .mmu_cxr_mask = 0x0000003f,
5073 .mmu_sfsr_mask = 0xffffffff,
5074 .mmu_trcr_mask = 0xffffffff,
5075 },
5076 {
5077 .name = "Ross RT620",
5078 .iu_version = 0x1f000000,
5079 .fpu_version = 1 << 17,
5080 .mmu_version = 0x1f000000,
5081 .mmu_bm = 0x00004000,
5082 .mmu_ctpr_mask = 0x007ffff0,
5083 .mmu_cxr_mask = 0x0000003f,
5084 .mmu_sfsr_mask = 0xffffffff,
5085 .mmu_trcr_mask = 0xffffffff,
5086 },
5087 {
5088 .name = "BIT B5010",
5089 .iu_version = 0x20000000,
5090 .fpu_version = 0 << 17, /* B5010/B5110/B5120/B5210 */
5091 .mmu_version = 0x20000000,
5092 .mmu_bm = 0x00004000,
5093 .mmu_ctpr_mask = 0x007ffff0,
5094 .mmu_cxr_mask = 0x0000003f,
5095 .mmu_sfsr_mask = 0xffffffff,
5096 .mmu_trcr_mask = 0xffffffff,
5097 },
5098 {
5099 .name = "Matsushita MN10501",
5100 .iu_version = 0x50000000,
5101 .fpu_version = 0 << 17,
5102 .mmu_version = 0x50000000,
5103 .mmu_bm = 0x00004000,
5104 .mmu_ctpr_mask = 0x007ffff0,
5105 .mmu_cxr_mask = 0x0000003f,
5106 .mmu_sfsr_mask = 0xffffffff,
5107 .mmu_trcr_mask = 0xffffffff,
5108 },
5109 {
5110 .name = "Weitek W8601",
5111 .iu_version = 0x90 << 24, /* Impl 9, ver 0 */
5112 .fpu_version = 3 << 17, /* FPU version 3 (Weitek WTL3170/2) */
5113 .mmu_version = 0x10 << 24,
5114 .mmu_bm = 0x00004000,
5115 .mmu_ctpr_mask = 0x007ffff0,
5116 .mmu_cxr_mask = 0x0000003f,
5117 .mmu_sfsr_mask = 0xffffffff,
5118 .mmu_trcr_mask = 0xffffffff,
5119 },
5120 {
5121 .name = "LEON2",
5122 .iu_version = 0xf2000000,
5123 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
5124 .mmu_version = 0xf2000000,
5125 .mmu_bm = 0x00004000,
5126 .mmu_ctpr_mask = 0x007ffff0,
5127 .mmu_cxr_mask = 0x0000003f,
5128 .mmu_sfsr_mask = 0xffffffff,
5129 .mmu_trcr_mask = 0xffffffff,
5130 },
5131 {
5132 .name = "LEON3",
5133 .iu_version = 0xf3000000,
5134 .fpu_version = 4 << 17, /* FPU version 4 (Meiko) */
5135 .mmu_version = 0xf3000000,
5136 .mmu_bm = 0x00004000,
5137 .mmu_ctpr_mask = 0x007ffff0,
5138 .mmu_cxr_mask = 0x0000003f,
5139 .mmu_sfsr_mask = 0xffffffff,
5140 .mmu_trcr_mask = 0xffffffff,
5141 },
5142 #endif
5143 };
5144
5145 static const sparc_def_t *cpu_sparc_find_by_name(const unsigned char *name)
5146 {
5147 unsigned int i;
5148
5149 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
5150 if (strcasecmp(name, sparc_defs[i].name) == 0) {
5151 return &sparc_defs[i];
5152 }
5153 }
5154 return NULL;
5155 }
5156
5157 void sparc_cpu_list (FILE *f, int (*cpu_fprintf)(FILE *f, const char *fmt, ...))
5158 {
5159 unsigned int i;
5160
5161 for (i = 0; i < sizeof(sparc_defs) / sizeof(sparc_def_t); i++) {
5162 (*cpu_fprintf)(f, "Sparc %16s IU " TARGET_FMT_lx " FPU %08x MMU %08x\n",
5163 sparc_defs[i].name,
5164 sparc_defs[i].iu_version,
5165 sparc_defs[i].fpu_version,
5166 sparc_defs[i].mmu_version);
5167 }
5168 }
5169
5170 #define GET_FLAG(a,b) ((env->psr & a)?b:'-')
5171
5172 void cpu_dump_state(CPUState *env, FILE *f,
5173 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
5174 int flags)
5175 {
5176 int i, x;
5177
5178 cpu_fprintf(f, "pc: " TARGET_FMT_lx " npc: " TARGET_FMT_lx "\n", env->pc, env->npc);
5179 cpu_fprintf(f, "General Registers:\n");
5180 for (i = 0; i < 4; i++)
5181 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
5182 cpu_fprintf(f, "\n");
5183 for (; i < 8; i++)
5184 cpu_fprintf(f, "%%g%c: " TARGET_FMT_lx "\t", i + '0', env->gregs[i]);
5185 cpu_fprintf(f, "\nCurrent Register Window:\n");
5186 for (x = 0; x < 3; x++) {
5187 for (i = 0; i < 4; i++)
5188 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
5189 (x == 0 ? 'o' : (x == 1 ? 'l' : 'i')), i,
5190 env->regwptr[i + x * 8]);
5191 cpu_fprintf(f, "\n");
5192 for (; i < 8; i++)
5193 cpu_fprintf(f, "%%%c%d: " TARGET_FMT_lx "\t",
5194 (x == 0 ? 'o' : x == 1 ? 'l' : 'i'), i,
5195 env->regwptr[i + x * 8]);
5196 cpu_fprintf(f, "\n");
5197 }
5198 cpu_fprintf(f, "\nFloating Point Registers:\n");
5199 for (i = 0; i < 32; i++) {
5200 if ((i & 3) == 0)
5201 cpu_fprintf(f, "%%f%02d:", i);
5202 cpu_fprintf(f, " %016lf", env->fpr[i]);
5203 if ((i & 3) == 3)
5204 cpu_fprintf(f, "\n");
5205 }
5206 #ifdef TARGET_SPARC64
5207 cpu_fprintf(f, "pstate: 0x%08x ccr: 0x%02x asi: 0x%02x tl: %d fprs: %d\n",
5208 env->pstate, GET_CCR(env), env->asi, env->tl, env->fprs);
5209 cpu_fprintf(f, "cansave: %d canrestore: %d otherwin: %d wstate %d cleanwin %d cwp %d\n",
5210 env->cansave, env->canrestore, env->otherwin, env->wstate,
5211 env->cleanwin, NWINDOWS - 1 - env->cwp);
5212 #else
5213 cpu_fprintf(f, "psr: 0x%08x -> %c%c%c%c %c%c%c wim: 0x%08x\n", GET_PSR(env),
5214 GET_FLAG(PSR_ZERO, 'Z'), GET_FLAG(PSR_OVF, 'V'),
5215 GET_FLAG(PSR_NEG, 'N'), GET_FLAG(PSR_CARRY, 'C'),
5216 env->psrs?'S':'-', env->psrps?'P':'-',
5217 env->psret?'E':'-', env->wim);
5218 #endif
5219 cpu_fprintf(f, "fsr: 0x%08x\n", GET_FSR32(env));
5220 }
5221
5222 #if defined(CONFIG_USER_ONLY)
5223 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
5224 {
5225 return addr;
5226 }
5227
5228 #else
5229 extern int get_physical_address (CPUState *env, target_phys_addr_t *physical, int *prot,
5230 int *access_index, target_ulong address, int rw,
5231 int mmu_idx);
5232
5233 target_phys_addr_t cpu_get_phys_page_debug(CPUState *env, target_ulong addr)
5234 {
5235 target_phys_addr_t phys_addr;
5236 int prot, access_index;
5237
5238 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr, 2,
5239 MMU_KERNEL_IDX) != 0)
5240 if (get_physical_address(env, &phys_addr, &prot, &access_index, addr,
5241 0, MMU_KERNEL_IDX) != 0)
5242 return -1;
5243 if (cpu_get_physical_page_desc(phys_addr) == IO_MEM_UNASSIGNED)
5244 return -1;
5245 return phys_addr;
5246 }
5247 #endif
5248
5249 void helper_flush(target_ulong addr)
5250 {
5251 addr &= ~7;
5252 tb_invalidate_page_range(addr, addr + 8);
5253 }