]> git.proxmox.com Git - qemu.git/blob - target-sparc/translate.c
Improve instruction name comments for easier searching
[qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
20 */
21
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define GEN_HELPER 1
35 #include "helper.h"
36
37 #define DEBUG_DISAS
38
39 #define DYNAMIC_PC 1 /* dynamic pc value */
40 #define JUMP_PC 2 /* dynamic pc value which takes only two values
41 according to jump_pc[T2] */
42
43 /* global register indexes */
44 static TCGv_ptr cpu_env, cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
46 static TCGv_i32 cpu_psr;
47 static TCGv cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
48 static TCGv cpu_y;
49 #ifndef CONFIG_USER_ONLY
50 static TCGv cpu_tbr;
51 #endif
52 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
53 #ifdef TARGET_SPARC64
54 static TCGv_i32 cpu_xcc, cpu_asi, cpu_fprs;
55 static TCGv cpu_gsr;
56 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
57 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
58 static TCGv_i32 cpu_softint;
59 #else
60 static TCGv cpu_wim;
61 #endif
62 /* local register indexes (only used inside old micro ops) */
63 static TCGv cpu_tmp0;
64 static TCGv_i32 cpu_tmp32;
65 static TCGv_i64 cpu_tmp64;
66 /* Floating point registers */
67 static TCGv_i32 cpu_fpr[TARGET_FPREGS];
68
69 #include "gen-icount.h"
70
71 typedef struct DisasContext {
72 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
73 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
74 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
75 int is_br;
76 int mem_idx;
77 int fpu_enabled;
78 int address_mask_32bit;
79 struct TranslationBlock *tb;
80 sparc_def_t *def;
81 } DisasContext;
82
83 // This function uses non-native bit order
84 #define GET_FIELD(X, FROM, TO) \
85 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
86
87 // This function uses the order in the manuals, i.e. bit 0 is 2^0
88 #define GET_FIELD_SP(X, FROM, TO) \
89 GET_FIELD(X, 31 - (TO), 31 - (FROM))
90
91 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
92 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
93
94 #ifdef TARGET_SPARC64
95 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
96 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
97 #else
98 #define DFPREG(r) (r & 0x1e)
99 #define QFPREG(r) (r & 0x1c)
100 #endif
101
102 #define UA2005_HTRAP_MASK 0xff
103 #define V8_TRAP_MASK 0x7f
104
105 static int sign_extend(int x, int len)
106 {
107 len = 32 - len;
108 return (x << len) >> len;
109 }
110
111 #define IS_IMM (insn & (1<<13))
112
113 /* floating point registers moves */
114 static void gen_op_load_fpr_DT0(unsigned int src)
115 {
116 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
117 offsetof(CPU_DoubleU, l.upper));
118 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
119 offsetof(CPU_DoubleU, l.lower));
120 }
121
122 static void gen_op_load_fpr_DT1(unsigned int src)
123 {
124 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
125 offsetof(CPU_DoubleU, l.upper));
126 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
127 offsetof(CPU_DoubleU, l.lower));
128 }
129
130 static void gen_op_store_DT0_fpr(unsigned int dst)
131 {
132 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
133 offsetof(CPU_DoubleU, l.upper));
134 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
135 offsetof(CPU_DoubleU, l.lower));
136 }
137
138 static void gen_op_load_fpr_QT0(unsigned int src)
139 {
140 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
141 offsetof(CPU_QuadU, l.upmost));
142 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
143 offsetof(CPU_QuadU, l.upper));
144 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
145 offsetof(CPU_QuadU, l.lower));
146 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.lowest));
148 }
149
150 static void gen_op_load_fpr_QT1(unsigned int src)
151 {
152 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
153 offsetof(CPU_QuadU, l.upmost));
154 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
155 offsetof(CPU_QuadU, l.upper));
156 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
157 offsetof(CPU_QuadU, l.lower));
158 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.lowest));
160 }
161
162 static void gen_op_store_QT0_fpr(unsigned int dst)
163 {
164 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
165 offsetof(CPU_QuadU, l.upmost));
166 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
167 offsetof(CPU_QuadU, l.upper));
168 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
169 offsetof(CPU_QuadU, l.lower));
170 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.lowest));
172 }
173
174 /* moves */
175 #ifdef CONFIG_USER_ONLY
176 #define supervisor(dc) 0
177 #ifdef TARGET_SPARC64
178 #define hypervisor(dc) 0
179 #endif
180 #else
181 #define supervisor(dc) (dc->mem_idx >= 1)
182 #ifdef TARGET_SPARC64
183 #define hypervisor(dc) (dc->mem_idx == 2)
184 #else
185 #endif
186 #endif
187
188 #ifdef TARGET_SPARC64
189 #ifndef TARGET_ABI32
190 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
191 #else
192 #define AM_CHECK(dc) (1)
193 #endif
194 #endif
195
196 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
197 {
198 #ifdef TARGET_SPARC64
199 if (AM_CHECK(dc))
200 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
201 #endif
202 }
203
204 static inline void gen_movl_reg_TN(int reg, TCGv tn)
205 {
206 if (reg == 0)
207 tcg_gen_movi_tl(tn, 0);
208 else if (reg < 8)
209 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
210 else {
211 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
212 }
213 }
214
215 static inline void gen_movl_TN_reg(int reg, TCGv tn)
216 {
217 if (reg == 0)
218 return;
219 else if (reg < 8)
220 tcg_gen_mov_tl(cpu_gregs[reg], tn);
221 else {
222 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
223 }
224 }
225
226 static inline void gen_goto_tb(DisasContext *s, int tb_num,
227 target_ulong pc, target_ulong npc)
228 {
229 TranslationBlock *tb;
230
231 tb = s->tb;
232 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
233 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
234 /* jump to same page: we can use a direct jump */
235 tcg_gen_goto_tb(tb_num);
236 tcg_gen_movi_tl(cpu_pc, pc);
237 tcg_gen_movi_tl(cpu_npc, npc);
238 tcg_gen_exit_tb((long)tb + tb_num);
239 } else {
240 /* jump to another page: currently not optimized */
241 tcg_gen_movi_tl(cpu_pc, pc);
242 tcg_gen_movi_tl(cpu_npc, npc);
243 tcg_gen_exit_tb(0);
244 }
245 }
246
247 // XXX suboptimal
248 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
249 {
250 tcg_gen_extu_i32_tl(reg, src);
251 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
252 tcg_gen_andi_tl(reg, reg, 0x1);
253 }
254
255 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
256 {
257 tcg_gen_extu_i32_tl(reg, src);
258 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
259 tcg_gen_andi_tl(reg, reg, 0x1);
260 }
261
262 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
263 {
264 tcg_gen_extu_i32_tl(reg, src);
265 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
266 tcg_gen_andi_tl(reg, reg, 0x1);
267 }
268
269 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
270 {
271 tcg_gen_extu_i32_tl(reg, src);
272 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
273 tcg_gen_andi_tl(reg, reg, 0x1);
274 }
275
276 static inline void gen_cc_clear_icc(void)
277 {
278 tcg_gen_movi_i32(cpu_psr, 0);
279 }
280
281 #ifdef TARGET_SPARC64
282 static inline void gen_cc_clear_xcc(void)
283 {
284 tcg_gen_movi_i32(cpu_xcc, 0);
285 }
286 #endif
287
288 /* old op:
289 if (!T0)
290 env->psr |= PSR_ZERO;
291 if ((int32_t) T0 < 0)
292 env->psr |= PSR_NEG;
293 */
294 static inline void gen_cc_NZ_icc(TCGv dst)
295 {
296 TCGv r_temp;
297 int l1, l2;
298
299 l1 = gen_new_label();
300 l2 = gen_new_label();
301 r_temp = tcg_temp_new();
302 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
303 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
304 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
305 gen_set_label(l1);
306 tcg_gen_ext32s_tl(r_temp, dst);
307 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
308 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
309 gen_set_label(l2);
310 tcg_temp_free(r_temp);
311 }
312
313 #ifdef TARGET_SPARC64
314 static inline void gen_cc_NZ_xcc(TCGv dst)
315 {
316 int l1, l2;
317
318 l1 = gen_new_label();
319 l2 = gen_new_label();
320 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
321 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
322 gen_set_label(l1);
323 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
324 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
325 gen_set_label(l2);
326 }
327 #endif
328
329 /* old op:
330 if (T0 < src1)
331 env->psr |= PSR_CARRY;
332 */
333 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
334 {
335 TCGv r_temp1, r_temp2;
336 int l1;
337
338 l1 = gen_new_label();
339 r_temp1 = tcg_temp_new();
340 r_temp2 = tcg_temp_new();
341 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
342 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
343 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
344 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
345 gen_set_label(l1);
346 tcg_temp_free(r_temp1);
347 tcg_temp_free(r_temp2);
348 }
349
350 #ifdef TARGET_SPARC64
351 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
352 {
353 int l1;
354
355 l1 = gen_new_label();
356 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
357 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
358 gen_set_label(l1);
359 }
360 #endif
361
362 /* old op:
363 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
364 env->psr |= PSR_OVF;
365 */
366 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
367 {
368 TCGv r_temp;
369
370 r_temp = tcg_temp_new();
371 tcg_gen_xor_tl(r_temp, src1, src2);
372 tcg_gen_not_tl(r_temp, r_temp);
373 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
374 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
375 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
376 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
377 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
378 tcg_temp_free(r_temp);
379 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
380 }
381
382 #ifdef TARGET_SPARC64
383 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
384 {
385 TCGv r_temp;
386
387 r_temp = tcg_temp_new();
388 tcg_gen_xor_tl(r_temp, src1, src2);
389 tcg_gen_not_tl(r_temp, r_temp);
390 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
391 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
392 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
393 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
394 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
395 tcg_temp_free(r_temp);
396 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
397 }
398 #endif
399
400 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
401 {
402 TCGv r_temp;
403 TCGv_i32 r_const;
404 int l1;
405
406 l1 = gen_new_label();
407
408 r_temp = tcg_temp_new();
409 tcg_gen_xor_tl(r_temp, src1, src2);
410 tcg_gen_not_tl(r_temp, r_temp);
411 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
412 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
413 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
414 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
415 r_const = tcg_const_i32(TT_TOVF);
416 gen_helper_raise_exception(r_const);
417 tcg_temp_free_i32(r_const);
418 gen_set_label(l1);
419 tcg_temp_free(r_temp);
420 }
421
422 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
423 {
424 int l1;
425
426 l1 = gen_new_label();
427 tcg_gen_or_tl(cpu_tmp0, src1, src2);
428 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
429 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
430 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
431 gen_set_label(l1);
432 }
433
434 static inline void gen_tag_tv(TCGv src1, TCGv src2)
435 {
436 int l1;
437 TCGv_i32 r_const;
438
439 l1 = gen_new_label();
440 tcg_gen_or_tl(cpu_tmp0, src1, src2);
441 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
442 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
443 r_const = tcg_const_i32(TT_TOVF);
444 gen_helper_raise_exception(r_const);
445 tcg_temp_free_i32(r_const);
446 gen_set_label(l1);
447 }
448
449 static inline void gen_op_add_cc2(TCGv dst)
450 {
451 gen_cc_clear_icc();
452 gen_cc_NZ_icc(cpu_cc_dst);
453 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
454 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
455 #ifdef TARGET_SPARC64
456 gen_cc_clear_xcc();
457 gen_cc_NZ_xcc(cpu_cc_dst);
458 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
459 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
460 #endif
461 tcg_gen_mov_tl(dst, cpu_cc_dst);
462 }
463
464 static inline void gen_op_addi_cc(TCGv dst, TCGv src1, target_long src2)
465 {
466 tcg_gen_mov_tl(cpu_cc_src, src1);
467 tcg_gen_movi_tl(cpu_cc_src2, src2);
468 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_src, src2);
469 gen_op_add_cc2(dst);
470 }
471
472 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
473 {
474 tcg_gen_mov_tl(cpu_cc_src, src1);
475 tcg_gen_mov_tl(cpu_cc_src2, src2);
476 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 gen_op_add_cc2(dst);
478 }
479
480 static inline void gen_op_addx_cc2(TCGv dst)
481 {
482 gen_cc_NZ_icc(cpu_cc_dst);
483 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
484 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
485 #ifdef TARGET_SPARC64
486 gen_cc_NZ_xcc(cpu_cc_dst);
487 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
488 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
489 #endif
490 tcg_gen_mov_tl(dst, cpu_cc_dst);
491 }
492
493 static inline void gen_op_addxi_cc(TCGv dst, TCGv src1, target_long src2)
494 {
495 tcg_gen_mov_tl(cpu_cc_src, src1);
496 tcg_gen_movi_tl(cpu_cc_src2, src2);
497 gen_mov_reg_C(cpu_tmp0, cpu_psr);
498 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
499 gen_cc_clear_icc();
500 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
501 #ifdef TARGET_SPARC64
502 gen_cc_clear_xcc();
503 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
504 #endif
505 tcg_gen_addi_tl(cpu_cc_dst, cpu_cc_dst, src2);
506 gen_op_addx_cc2(dst);
507 }
508
509 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
510 {
511 tcg_gen_mov_tl(cpu_cc_src, src1);
512 tcg_gen_mov_tl(cpu_cc_src2, src2);
513 gen_mov_reg_C(cpu_tmp0, cpu_psr);
514 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
515 gen_cc_clear_icc();
516 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
517 #ifdef TARGET_SPARC64
518 gen_cc_clear_xcc();
519 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
520 #endif
521 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
522 gen_op_addx_cc2(dst);
523 }
524
525 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
526 {
527 tcg_gen_mov_tl(cpu_cc_src, src1);
528 tcg_gen_mov_tl(cpu_cc_src2, src2);
529 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
530 gen_cc_clear_icc();
531 gen_cc_NZ_icc(cpu_cc_dst);
532 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
533 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
534 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
535 #ifdef TARGET_SPARC64
536 gen_cc_clear_xcc();
537 gen_cc_NZ_xcc(cpu_cc_dst);
538 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
539 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
540 #endif
541 tcg_gen_mov_tl(dst, cpu_cc_dst);
542 }
543
544 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
545 {
546 tcg_gen_mov_tl(cpu_cc_src, src1);
547 tcg_gen_mov_tl(cpu_cc_src2, src2);
548 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
549 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
550 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
551 gen_cc_clear_icc();
552 gen_cc_NZ_icc(cpu_cc_dst);
553 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
554 #ifdef TARGET_SPARC64
555 gen_cc_clear_xcc();
556 gen_cc_NZ_xcc(cpu_cc_dst);
557 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
558 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
559 #endif
560 tcg_gen_mov_tl(dst, cpu_cc_dst);
561 }
562
563 /* old op:
564 if (src1 < T1)
565 env->psr |= PSR_CARRY;
566 */
567 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
568 {
569 TCGv r_temp1, r_temp2;
570 int l1;
571
572 l1 = gen_new_label();
573 r_temp1 = tcg_temp_new();
574 r_temp2 = tcg_temp_new();
575 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
576 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
577 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
578 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
579 gen_set_label(l1);
580 tcg_temp_free(r_temp1);
581 tcg_temp_free(r_temp2);
582 }
583
584 #ifdef TARGET_SPARC64
585 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
586 {
587 int l1;
588
589 l1 = gen_new_label();
590 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
591 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
592 gen_set_label(l1);
593 }
594 #endif
595
596 /* old op:
597 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
598 env->psr |= PSR_OVF;
599 */
600 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
601 {
602 TCGv r_temp;
603
604 r_temp = tcg_temp_new();
605 tcg_gen_xor_tl(r_temp, src1, src2);
606 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
607 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
608 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
609 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
610 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
611 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
612 tcg_temp_free(r_temp);
613 }
614
615 #ifdef TARGET_SPARC64
616 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
617 {
618 TCGv r_temp;
619
620 r_temp = tcg_temp_new();
621 tcg_gen_xor_tl(r_temp, src1, src2);
622 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
623 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
624 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
625 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
626 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
627 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
628 tcg_temp_free(r_temp);
629 }
630 #endif
631
632 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
633 {
634 TCGv r_temp;
635 TCGv_i32 r_const;
636 int l1;
637
638 l1 = gen_new_label();
639
640 r_temp = tcg_temp_new();
641 tcg_gen_xor_tl(r_temp, src1, src2);
642 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
643 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
644 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
645 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
646 r_const = tcg_const_i32(TT_TOVF);
647 gen_helper_raise_exception(r_const);
648 tcg_temp_free_i32(r_const);
649 gen_set_label(l1);
650 tcg_temp_free(r_temp);
651 }
652
653 static inline void gen_op_sub_cc2(TCGv dst)
654 {
655 gen_cc_clear_icc();
656 gen_cc_NZ_icc(cpu_cc_dst);
657 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
658 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
659 #ifdef TARGET_SPARC64
660 gen_cc_clear_xcc();
661 gen_cc_NZ_xcc(cpu_cc_dst);
662 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
663 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
664 #endif
665 tcg_gen_mov_tl(dst, cpu_cc_dst);
666 }
667
668 static inline void gen_op_subi_cc(TCGv dst, TCGv src1, target_long src2)
669 {
670 tcg_gen_mov_tl(cpu_cc_src, src1);
671 tcg_gen_movi_tl(cpu_cc_src2, src2);
672 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_src, src2);
673 gen_op_sub_cc2(dst);
674 }
675
676 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
677 {
678 tcg_gen_mov_tl(cpu_cc_src, src1);
679 tcg_gen_mov_tl(cpu_cc_src2, src2);
680 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
681 gen_op_sub_cc2(dst);
682 }
683
684 static inline void gen_op_subx_cc2(TCGv dst)
685 {
686 gen_cc_NZ_icc(cpu_cc_dst);
687 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
688 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
689 #ifdef TARGET_SPARC64
690 gen_cc_NZ_xcc(cpu_cc_dst);
691 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
692 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
693 #endif
694 tcg_gen_mov_tl(dst, cpu_cc_dst);
695 }
696
697 static inline void gen_op_subxi_cc(TCGv dst, TCGv src1, target_long src2)
698 {
699 tcg_gen_mov_tl(cpu_cc_src, src1);
700 tcg_gen_movi_tl(cpu_cc_src2, src2);
701 gen_mov_reg_C(cpu_tmp0, cpu_psr);
702 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
703 gen_cc_clear_icc();
704 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
705 #ifdef TARGET_SPARC64
706 gen_cc_clear_xcc();
707 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
708 #endif
709 tcg_gen_subi_tl(cpu_cc_dst, cpu_cc_dst, src2);
710 gen_op_subx_cc2(dst);
711 }
712
713 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
714 {
715 tcg_gen_mov_tl(cpu_cc_src, src1);
716 tcg_gen_mov_tl(cpu_cc_src2, src2);
717 gen_mov_reg_C(cpu_tmp0, cpu_psr);
718 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
719 gen_cc_clear_icc();
720 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
721 #ifdef TARGET_SPARC64
722 gen_cc_clear_xcc();
723 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
724 #endif
725 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
726 gen_op_subx_cc2(dst);
727 }
728
729 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
730 {
731 tcg_gen_mov_tl(cpu_cc_src, src1);
732 tcg_gen_mov_tl(cpu_cc_src2, src2);
733 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
734 gen_cc_clear_icc();
735 gen_cc_NZ_icc(cpu_cc_dst);
736 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
737 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
738 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
739 #ifdef TARGET_SPARC64
740 gen_cc_clear_xcc();
741 gen_cc_NZ_xcc(cpu_cc_dst);
742 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
743 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
744 #endif
745 tcg_gen_mov_tl(dst, cpu_cc_dst);
746 }
747
748 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
749 {
750 tcg_gen_mov_tl(cpu_cc_src, src1);
751 tcg_gen_mov_tl(cpu_cc_src2, src2);
752 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
753 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
754 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
755 gen_cc_clear_icc();
756 gen_cc_NZ_icc(cpu_cc_dst);
757 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
758 #ifdef TARGET_SPARC64
759 gen_cc_clear_xcc();
760 gen_cc_NZ_xcc(cpu_cc_dst);
761 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
762 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
763 #endif
764 tcg_gen_mov_tl(dst, cpu_cc_dst);
765 }
766
767 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
768 {
769 TCGv r_temp;
770 int l1;
771
772 l1 = gen_new_label();
773 r_temp = tcg_temp_new();
774
775 /* old op:
776 if (!(env->y & 1))
777 T1 = 0;
778 */
779 tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
780 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
781 tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
782 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
783 tcg_gen_movi_tl(cpu_cc_src2, 0);
784 gen_set_label(l1);
785
786 // b2 = T0 & 1;
787 // env->y = (b2 << 31) | (env->y >> 1);
788 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
789 tcg_gen_shli_tl(r_temp, r_temp, 31);
790 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
791 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x7fffffff);
792 tcg_gen_or_tl(cpu_tmp0, cpu_tmp0, r_temp);
793 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
794
795 // b1 = N ^ V;
796 gen_mov_reg_N(cpu_tmp0, cpu_psr);
797 gen_mov_reg_V(r_temp, cpu_psr);
798 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
799 tcg_temp_free(r_temp);
800
801 // T0 = (b1 << 31) | (T0 >> 1);
802 // src1 = T0;
803 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
804 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
805 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
806
807 /* do addition and update flags */
808 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
809
810 gen_cc_clear_icc();
811 gen_cc_NZ_icc(cpu_cc_dst);
812 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
813 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
814 tcg_gen_mov_tl(dst, cpu_cc_dst);
815 }
816
817 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
818 {
819 TCGv_i64 r_temp, r_temp2;
820
821 r_temp = tcg_temp_new_i64();
822 r_temp2 = tcg_temp_new_i64();
823
824 tcg_gen_extu_tl_i64(r_temp, src2);
825 tcg_gen_extu_tl_i64(r_temp2, src1);
826 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
827
828 tcg_gen_shri_i64(r_temp, r_temp2, 32);
829 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
830 tcg_temp_free_i64(r_temp);
831 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
832 #ifdef TARGET_SPARC64
833 tcg_gen_mov_i64(dst, r_temp2);
834 #else
835 tcg_gen_trunc_i64_tl(dst, r_temp2);
836 #endif
837 tcg_temp_free_i64(r_temp2);
838 }
839
840 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
841 {
842 TCGv_i64 r_temp, r_temp2;
843
844 r_temp = tcg_temp_new_i64();
845 r_temp2 = tcg_temp_new_i64();
846
847 tcg_gen_ext_tl_i64(r_temp, src2);
848 tcg_gen_ext_tl_i64(r_temp2, src1);
849 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
850
851 tcg_gen_shri_i64(r_temp, r_temp2, 32);
852 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
853 tcg_temp_free_i64(r_temp);
854 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
855 #ifdef TARGET_SPARC64
856 tcg_gen_mov_i64(dst, r_temp2);
857 #else
858 tcg_gen_trunc_i64_tl(dst, r_temp2);
859 #endif
860 tcg_temp_free_i64(r_temp2);
861 }
862
863 #ifdef TARGET_SPARC64
864 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
865 {
866 TCGv_i32 r_const;
867 int l1;
868
869 l1 = gen_new_label();
870 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
871 r_const = tcg_const_i32(TT_DIV_ZERO);
872 gen_helper_raise_exception(r_const);
873 tcg_temp_free_i32(r_const);
874 gen_set_label(l1);
875 }
876
877 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
878 {
879 int l1, l2;
880
881 l1 = gen_new_label();
882 l2 = gen_new_label();
883 tcg_gen_mov_tl(cpu_cc_src, src1);
884 tcg_gen_mov_tl(cpu_cc_src2, src2);
885 gen_trap_ifdivzero_tl(cpu_cc_src2);
886 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
887 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
888 tcg_gen_movi_i64(dst, INT64_MIN);
889 tcg_gen_br(l2);
890 gen_set_label(l1);
891 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
892 gen_set_label(l2);
893 }
894 #endif
895
896 static inline void gen_op_div_cc(TCGv dst)
897 {
898 int l1;
899
900 tcg_gen_mov_tl(cpu_cc_dst, dst);
901 gen_cc_clear_icc();
902 gen_cc_NZ_icc(cpu_cc_dst);
903 l1 = gen_new_label();
904 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
905 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
906 gen_set_label(l1);
907 }
908
909 static inline void gen_op_logic_cc(TCGv dst)
910 {
911 tcg_gen_mov_tl(cpu_cc_dst, dst);
912
913 gen_cc_clear_icc();
914 gen_cc_NZ_icc(cpu_cc_dst);
915 #ifdef TARGET_SPARC64
916 gen_cc_clear_xcc();
917 gen_cc_NZ_xcc(cpu_cc_dst);
918 #endif
919 }
920
921 // 1
922 static inline void gen_op_eval_ba(TCGv dst)
923 {
924 tcg_gen_movi_tl(dst, 1);
925 }
926
927 // Z
928 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
929 {
930 gen_mov_reg_Z(dst, src);
931 }
932
933 // Z | (N ^ V)
934 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
935 {
936 gen_mov_reg_N(cpu_tmp0, src);
937 gen_mov_reg_V(dst, src);
938 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
939 gen_mov_reg_Z(cpu_tmp0, src);
940 tcg_gen_or_tl(dst, dst, cpu_tmp0);
941 }
942
943 // N ^ V
944 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
945 {
946 gen_mov_reg_V(cpu_tmp0, src);
947 gen_mov_reg_N(dst, src);
948 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
949 }
950
951 // C | Z
952 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
953 {
954 gen_mov_reg_Z(cpu_tmp0, src);
955 gen_mov_reg_C(dst, src);
956 tcg_gen_or_tl(dst, dst, cpu_tmp0);
957 }
958
959 // C
960 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
961 {
962 gen_mov_reg_C(dst, src);
963 }
964
965 // V
966 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
967 {
968 gen_mov_reg_V(dst, src);
969 }
970
971 // 0
972 static inline void gen_op_eval_bn(TCGv dst)
973 {
974 tcg_gen_movi_tl(dst, 0);
975 }
976
977 // N
978 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
979 {
980 gen_mov_reg_N(dst, src);
981 }
982
983 // !Z
984 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
985 {
986 gen_mov_reg_Z(dst, src);
987 tcg_gen_xori_tl(dst, dst, 0x1);
988 }
989
990 // !(Z | (N ^ V))
991 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
992 {
993 gen_mov_reg_N(cpu_tmp0, src);
994 gen_mov_reg_V(dst, src);
995 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
996 gen_mov_reg_Z(cpu_tmp0, src);
997 tcg_gen_or_tl(dst, dst, cpu_tmp0);
998 tcg_gen_xori_tl(dst, dst, 0x1);
999 }
1000
1001 // !(N ^ V)
1002 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
1003 {
1004 gen_mov_reg_V(cpu_tmp0, src);
1005 gen_mov_reg_N(dst, src);
1006 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1007 tcg_gen_xori_tl(dst, dst, 0x1);
1008 }
1009
1010 // !(C | Z)
1011 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
1012 {
1013 gen_mov_reg_Z(cpu_tmp0, src);
1014 gen_mov_reg_C(dst, src);
1015 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1016 tcg_gen_xori_tl(dst, dst, 0x1);
1017 }
1018
1019 // !C
1020 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
1021 {
1022 gen_mov_reg_C(dst, src);
1023 tcg_gen_xori_tl(dst, dst, 0x1);
1024 }
1025
1026 // !N
1027 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
1028 {
1029 gen_mov_reg_N(dst, src);
1030 tcg_gen_xori_tl(dst, dst, 0x1);
1031 }
1032
1033 // !V
1034 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
1035 {
1036 gen_mov_reg_V(dst, src);
1037 tcg_gen_xori_tl(dst, dst, 0x1);
1038 }
1039
1040 /*
1041 FPSR bit field FCC1 | FCC0:
1042 0 =
1043 1 <
1044 2 >
1045 3 unordered
1046 */
1047 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1048 unsigned int fcc_offset)
1049 {
1050 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
1051 tcg_gen_andi_tl(reg, reg, 0x1);
1052 }
1053
1054 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1055 unsigned int fcc_offset)
1056 {
1057 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
1058 tcg_gen_andi_tl(reg, reg, 0x1);
1059 }
1060
1061 // !0: FCC0 | FCC1
1062 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1063 unsigned int fcc_offset)
1064 {
1065 gen_mov_reg_FCC0(dst, src, fcc_offset);
1066 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1067 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1068 }
1069
1070 // 1 or 2: FCC0 ^ FCC1
1071 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1072 unsigned int fcc_offset)
1073 {
1074 gen_mov_reg_FCC0(dst, src, fcc_offset);
1075 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1076 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1077 }
1078
1079 // 1 or 3: FCC0
1080 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1081 unsigned int fcc_offset)
1082 {
1083 gen_mov_reg_FCC0(dst, src, fcc_offset);
1084 }
1085
1086 // 1: FCC0 & !FCC1
1087 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1088 unsigned int fcc_offset)
1089 {
1090 gen_mov_reg_FCC0(dst, src, fcc_offset);
1091 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1092 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1093 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1094 }
1095
1096 // 2 or 3: FCC1
1097 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1098 unsigned int fcc_offset)
1099 {
1100 gen_mov_reg_FCC1(dst, src, fcc_offset);
1101 }
1102
1103 // 2: !FCC0 & FCC1
1104 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1105 unsigned int fcc_offset)
1106 {
1107 gen_mov_reg_FCC0(dst, src, fcc_offset);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1109 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1110 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1111 }
1112
1113 // 3: FCC0 & FCC1
1114 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1115 unsigned int fcc_offset)
1116 {
1117 gen_mov_reg_FCC0(dst, src, fcc_offset);
1118 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1119 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1120 }
1121
1122 // 0: !(FCC0 | FCC1)
1123 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1124 unsigned int fcc_offset)
1125 {
1126 gen_mov_reg_FCC0(dst, src, fcc_offset);
1127 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1128 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1129 tcg_gen_xori_tl(dst, dst, 0x1);
1130 }
1131
1132 // 0 or 3: !(FCC0 ^ FCC1)
1133 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1134 unsigned int fcc_offset)
1135 {
1136 gen_mov_reg_FCC0(dst, src, fcc_offset);
1137 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1138 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1139 tcg_gen_xori_tl(dst, dst, 0x1);
1140 }
1141
1142 // 0 or 2: !FCC0
1143 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1144 unsigned int fcc_offset)
1145 {
1146 gen_mov_reg_FCC0(dst, src, fcc_offset);
1147 tcg_gen_xori_tl(dst, dst, 0x1);
1148 }
1149
1150 // !1: !(FCC0 & !FCC1)
1151 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1152 unsigned int fcc_offset)
1153 {
1154 gen_mov_reg_FCC0(dst, src, fcc_offset);
1155 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1156 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1157 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1158 tcg_gen_xori_tl(dst, dst, 0x1);
1159 }
1160
1161 // 0 or 1: !FCC1
1162 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1163 unsigned int fcc_offset)
1164 {
1165 gen_mov_reg_FCC1(dst, src, fcc_offset);
1166 tcg_gen_xori_tl(dst, dst, 0x1);
1167 }
1168
1169 // !2: !(!FCC0 & FCC1)
1170 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1171 unsigned int fcc_offset)
1172 {
1173 gen_mov_reg_FCC0(dst, src, fcc_offset);
1174 tcg_gen_xori_tl(dst, dst, 0x1);
1175 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1176 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1177 tcg_gen_xori_tl(dst, dst, 0x1);
1178 }
1179
1180 // !3: !(FCC0 & FCC1)
1181 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1182 unsigned int fcc_offset)
1183 {
1184 gen_mov_reg_FCC0(dst, src, fcc_offset);
1185 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1186 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1187 tcg_gen_xori_tl(dst, dst, 0x1);
1188 }
1189
1190 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1191 target_ulong pc2, TCGv r_cond)
1192 {
1193 int l1;
1194
1195 l1 = gen_new_label();
1196
1197 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1198
1199 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1200
1201 gen_set_label(l1);
1202 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1203 }
1204
1205 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1206 target_ulong pc2, TCGv r_cond)
1207 {
1208 int l1;
1209
1210 l1 = gen_new_label();
1211
1212 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1213
1214 gen_goto_tb(dc, 0, pc2, pc1);
1215
1216 gen_set_label(l1);
1217 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1218 }
1219
1220 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1221 TCGv r_cond)
1222 {
1223 int l1, l2;
1224
1225 l1 = gen_new_label();
1226 l2 = gen_new_label();
1227
1228 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1229
1230 tcg_gen_movi_tl(cpu_npc, npc1);
1231 tcg_gen_br(l2);
1232
1233 gen_set_label(l1);
1234 tcg_gen_movi_tl(cpu_npc, npc2);
1235 gen_set_label(l2);
1236 }
1237
1238 /* call this function before using the condition register as it may
1239 have been set for a jump */
1240 static inline void flush_cond(DisasContext *dc, TCGv cond)
1241 {
1242 if (dc->npc == JUMP_PC) {
1243 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1244 dc->npc = DYNAMIC_PC;
1245 }
1246 }
1247
1248 static inline void save_npc(DisasContext *dc, TCGv cond)
1249 {
1250 if (dc->npc == JUMP_PC) {
1251 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1252 dc->npc = DYNAMIC_PC;
1253 } else if (dc->npc != DYNAMIC_PC) {
1254 tcg_gen_movi_tl(cpu_npc, dc->npc);
1255 }
1256 }
1257
1258 static inline void save_state(DisasContext *dc, TCGv cond)
1259 {
1260 tcg_gen_movi_tl(cpu_pc, dc->pc);
1261 save_npc(dc, cond);
1262 }
1263
1264 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1265 {
1266 if (dc->npc == JUMP_PC) {
1267 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1268 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1269 dc->pc = DYNAMIC_PC;
1270 } else if (dc->npc == DYNAMIC_PC) {
1271 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1272 dc->pc = DYNAMIC_PC;
1273 } else {
1274 dc->pc = dc->npc;
1275 }
1276 }
1277
1278 static inline void gen_op_next_insn(void)
1279 {
1280 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1281 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1282 }
1283
1284 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1285 {
1286 TCGv_i32 r_src;
1287
1288 #ifdef TARGET_SPARC64
1289 if (cc)
1290 r_src = cpu_xcc;
1291 else
1292 r_src = cpu_psr;
1293 #else
1294 r_src = cpu_psr;
1295 #endif
1296 switch (cond) {
1297 case 0x0:
1298 gen_op_eval_bn(r_dst);
1299 break;
1300 case 0x1:
1301 gen_op_eval_be(r_dst, r_src);
1302 break;
1303 case 0x2:
1304 gen_op_eval_ble(r_dst, r_src);
1305 break;
1306 case 0x3:
1307 gen_op_eval_bl(r_dst, r_src);
1308 break;
1309 case 0x4:
1310 gen_op_eval_bleu(r_dst, r_src);
1311 break;
1312 case 0x5:
1313 gen_op_eval_bcs(r_dst, r_src);
1314 break;
1315 case 0x6:
1316 gen_op_eval_bneg(r_dst, r_src);
1317 break;
1318 case 0x7:
1319 gen_op_eval_bvs(r_dst, r_src);
1320 break;
1321 case 0x8:
1322 gen_op_eval_ba(r_dst);
1323 break;
1324 case 0x9:
1325 gen_op_eval_bne(r_dst, r_src);
1326 break;
1327 case 0xa:
1328 gen_op_eval_bg(r_dst, r_src);
1329 break;
1330 case 0xb:
1331 gen_op_eval_bge(r_dst, r_src);
1332 break;
1333 case 0xc:
1334 gen_op_eval_bgu(r_dst, r_src);
1335 break;
1336 case 0xd:
1337 gen_op_eval_bcc(r_dst, r_src);
1338 break;
1339 case 0xe:
1340 gen_op_eval_bpos(r_dst, r_src);
1341 break;
1342 case 0xf:
1343 gen_op_eval_bvc(r_dst, r_src);
1344 break;
1345 }
1346 }
1347
1348 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1349 {
1350 unsigned int offset;
1351
1352 switch (cc) {
1353 default:
1354 case 0x0:
1355 offset = 0;
1356 break;
1357 case 0x1:
1358 offset = 32 - 10;
1359 break;
1360 case 0x2:
1361 offset = 34 - 10;
1362 break;
1363 case 0x3:
1364 offset = 36 - 10;
1365 break;
1366 }
1367
1368 switch (cond) {
1369 case 0x0:
1370 gen_op_eval_bn(r_dst);
1371 break;
1372 case 0x1:
1373 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1374 break;
1375 case 0x2:
1376 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1377 break;
1378 case 0x3:
1379 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1380 break;
1381 case 0x4:
1382 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1383 break;
1384 case 0x5:
1385 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1386 break;
1387 case 0x6:
1388 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1389 break;
1390 case 0x7:
1391 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1392 break;
1393 case 0x8:
1394 gen_op_eval_ba(r_dst);
1395 break;
1396 case 0x9:
1397 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1398 break;
1399 case 0xa:
1400 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1401 break;
1402 case 0xb:
1403 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1404 break;
1405 case 0xc:
1406 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1407 break;
1408 case 0xd:
1409 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1410 break;
1411 case 0xe:
1412 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1413 break;
1414 case 0xf:
1415 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1416 break;
1417 }
1418 }
1419
1420 #ifdef TARGET_SPARC64
1421 // Inverted logic
1422 static const int gen_tcg_cond_reg[8] = {
1423 -1,
1424 TCG_COND_NE,
1425 TCG_COND_GT,
1426 TCG_COND_GE,
1427 -1,
1428 TCG_COND_EQ,
1429 TCG_COND_LE,
1430 TCG_COND_LT,
1431 };
1432
1433 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1434 {
1435 int l1;
1436
1437 l1 = gen_new_label();
1438 tcg_gen_movi_tl(r_dst, 0);
1439 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1440 tcg_gen_movi_tl(r_dst, 1);
1441 gen_set_label(l1);
1442 }
1443 #endif
1444
1445 /* XXX: potentially incorrect if dynamic npc */
1446 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1447 TCGv r_cond)
1448 {
1449 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1450 target_ulong target = dc->pc + offset;
1451
1452 if (cond == 0x0) {
1453 /* unconditional not taken */
1454 if (a) {
1455 dc->pc = dc->npc + 4;
1456 dc->npc = dc->pc + 4;
1457 } else {
1458 dc->pc = dc->npc;
1459 dc->npc = dc->pc + 4;
1460 }
1461 } else if (cond == 0x8) {
1462 /* unconditional taken */
1463 if (a) {
1464 dc->pc = target;
1465 dc->npc = dc->pc + 4;
1466 } else {
1467 dc->pc = dc->npc;
1468 dc->npc = target;
1469 }
1470 } else {
1471 flush_cond(dc, r_cond);
1472 gen_cond(r_cond, cc, cond);
1473 if (a) {
1474 gen_branch_a(dc, target, dc->npc, r_cond);
1475 dc->is_br = 1;
1476 } else {
1477 dc->pc = dc->npc;
1478 dc->jump_pc[0] = target;
1479 dc->jump_pc[1] = dc->npc + 4;
1480 dc->npc = JUMP_PC;
1481 }
1482 }
1483 }
1484
1485 /* XXX: potentially incorrect if dynamic npc */
1486 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1487 TCGv r_cond)
1488 {
1489 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1490 target_ulong target = dc->pc + offset;
1491
1492 if (cond == 0x0) {
1493 /* unconditional not taken */
1494 if (a) {
1495 dc->pc = dc->npc + 4;
1496 dc->npc = dc->pc + 4;
1497 } else {
1498 dc->pc = dc->npc;
1499 dc->npc = dc->pc + 4;
1500 }
1501 } else if (cond == 0x8) {
1502 /* unconditional taken */
1503 if (a) {
1504 dc->pc = target;
1505 dc->npc = dc->pc + 4;
1506 } else {
1507 dc->pc = dc->npc;
1508 dc->npc = target;
1509 }
1510 } else {
1511 flush_cond(dc, r_cond);
1512 gen_fcond(r_cond, cc, cond);
1513 if (a) {
1514 gen_branch_a(dc, target, dc->npc, r_cond);
1515 dc->is_br = 1;
1516 } else {
1517 dc->pc = dc->npc;
1518 dc->jump_pc[0] = target;
1519 dc->jump_pc[1] = dc->npc + 4;
1520 dc->npc = JUMP_PC;
1521 }
1522 }
1523 }
1524
1525 #ifdef TARGET_SPARC64
1526 /* XXX: potentially incorrect if dynamic npc */
1527 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1528 TCGv r_cond, TCGv r_reg)
1529 {
1530 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1531 target_ulong target = dc->pc + offset;
1532
1533 flush_cond(dc, r_cond);
1534 gen_cond_reg(r_cond, cond, r_reg);
1535 if (a) {
1536 gen_branch_a(dc, target, dc->npc, r_cond);
1537 dc->is_br = 1;
1538 } else {
1539 dc->pc = dc->npc;
1540 dc->jump_pc[0] = target;
1541 dc->jump_pc[1] = dc->npc + 4;
1542 dc->npc = JUMP_PC;
1543 }
1544 }
1545
1546 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1547 {
1548 switch (fccno) {
1549 case 0:
1550 gen_helper_fcmps(r_rs1, r_rs2);
1551 break;
1552 case 1:
1553 gen_helper_fcmps_fcc1(r_rs1, r_rs2);
1554 break;
1555 case 2:
1556 gen_helper_fcmps_fcc2(r_rs1, r_rs2);
1557 break;
1558 case 3:
1559 gen_helper_fcmps_fcc3(r_rs1, r_rs2);
1560 break;
1561 }
1562 }
1563
1564 static inline void gen_op_fcmpd(int fccno)
1565 {
1566 switch (fccno) {
1567 case 0:
1568 gen_helper_fcmpd();
1569 break;
1570 case 1:
1571 gen_helper_fcmpd_fcc1();
1572 break;
1573 case 2:
1574 gen_helper_fcmpd_fcc2();
1575 break;
1576 case 3:
1577 gen_helper_fcmpd_fcc3();
1578 break;
1579 }
1580 }
1581
1582 static inline void gen_op_fcmpq(int fccno)
1583 {
1584 switch (fccno) {
1585 case 0:
1586 gen_helper_fcmpq();
1587 break;
1588 case 1:
1589 gen_helper_fcmpq_fcc1();
1590 break;
1591 case 2:
1592 gen_helper_fcmpq_fcc2();
1593 break;
1594 case 3:
1595 gen_helper_fcmpq_fcc3();
1596 break;
1597 }
1598 }
1599
1600 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1601 {
1602 switch (fccno) {
1603 case 0:
1604 gen_helper_fcmpes(r_rs1, r_rs2);
1605 break;
1606 case 1:
1607 gen_helper_fcmpes_fcc1(r_rs1, r_rs2);
1608 break;
1609 case 2:
1610 gen_helper_fcmpes_fcc2(r_rs1, r_rs2);
1611 break;
1612 case 3:
1613 gen_helper_fcmpes_fcc3(r_rs1, r_rs2);
1614 break;
1615 }
1616 }
1617
1618 static inline void gen_op_fcmped(int fccno)
1619 {
1620 switch (fccno) {
1621 case 0:
1622 gen_helper_fcmped();
1623 break;
1624 case 1:
1625 gen_helper_fcmped_fcc1();
1626 break;
1627 case 2:
1628 gen_helper_fcmped_fcc2();
1629 break;
1630 case 3:
1631 gen_helper_fcmped_fcc3();
1632 break;
1633 }
1634 }
1635
1636 static inline void gen_op_fcmpeq(int fccno)
1637 {
1638 switch (fccno) {
1639 case 0:
1640 gen_helper_fcmpeq();
1641 break;
1642 case 1:
1643 gen_helper_fcmpeq_fcc1();
1644 break;
1645 case 2:
1646 gen_helper_fcmpeq_fcc2();
1647 break;
1648 case 3:
1649 gen_helper_fcmpeq_fcc3();
1650 break;
1651 }
1652 }
1653
1654 #else
1655
1656 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1657 {
1658 gen_helper_fcmps(r_rs1, r_rs2);
1659 }
1660
1661 static inline void gen_op_fcmpd(int fccno)
1662 {
1663 gen_helper_fcmpd();
1664 }
1665
1666 static inline void gen_op_fcmpq(int fccno)
1667 {
1668 gen_helper_fcmpq();
1669 }
1670
1671 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1672 {
1673 gen_helper_fcmpes(r_rs1, r_rs2);
1674 }
1675
1676 static inline void gen_op_fcmped(int fccno)
1677 {
1678 gen_helper_fcmped();
1679 }
1680
1681 static inline void gen_op_fcmpeq(int fccno)
1682 {
1683 gen_helper_fcmpeq();
1684 }
1685 #endif
1686
1687 static inline void gen_op_fpexception_im(int fsr_flags)
1688 {
1689 TCGv_i32 r_const;
1690
1691 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1692 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1693 r_const = tcg_const_i32(TT_FP_EXCP);
1694 gen_helper_raise_exception(r_const);
1695 tcg_temp_free_i32(r_const);
1696 }
1697
1698 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1699 {
1700 #if !defined(CONFIG_USER_ONLY)
1701 if (!dc->fpu_enabled) {
1702 TCGv_i32 r_const;
1703
1704 save_state(dc, r_cond);
1705 r_const = tcg_const_i32(TT_NFPU_INSN);
1706 gen_helper_raise_exception(r_const);
1707 tcg_temp_free_i32(r_const);
1708 dc->is_br = 1;
1709 return 1;
1710 }
1711 #endif
1712 return 0;
1713 }
1714
1715 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1716 {
1717 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1718 }
1719
1720 static inline void gen_clear_float_exceptions(void)
1721 {
1722 gen_helper_clear_float_exceptions();
1723 }
1724
1725 /* asi moves */
1726 #ifdef TARGET_SPARC64
1727 static inline TCGv_i32 gen_get_asi(int insn, TCGv r_addr)
1728 {
1729 int asi;
1730 TCGv_i32 r_asi;
1731
1732 if (IS_IMM) {
1733 r_asi = tcg_temp_new_i32();
1734 tcg_gen_mov_i32(r_asi, cpu_asi);
1735 } else {
1736 asi = GET_FIELD(insn, 19, 26);
1737 r_asi = tcg_const_i32(asi);
1738 }
1739 return r_asi;
1740 }
1741
1742 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1743 int sign)
1744 {
1745 TCGv_i32 r_asi, r_size, r_sign;
1746
1747 r_asi = gen_get_asi(insn, addr);
1748 r_size = tcg_const_i32(size);
1749 r_sign = tcg_const_i32(sign);
1750 gen_helper_ld_asi(dst, addr, r_asi, r_size, r_sign);
1751 tcg_temp_free_i32(r_sign);
1752 tcg_temp_free_i32(r_size);
1753 tcg_temp_free_i32(r_asi);
1754 }
1755
1756 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1757 {
1758 TCGv_i32 r_asi, r_size;
1759
1760 r_asi = gen_get_asi(insn, addr);
1761 r_size = tcg_const_i32(size);
1762 gen_helper_st_asi(addr, src, r_asi, r_size);
1763 tcg_temp_free_i32(r_size);
1764 tcg_temp_free_i32(r_asi);
1765 }
1766
1767 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1768 {
1769 TCGv_i32 r_asi, r_size, r_rd;
1770
1771 r_asi = gen_get_asi(insn, addr);
1772 r_size = tcg_const_i32(size);
1773 r_rd = tcg_const_i32(rd);
1774 gen_helper_ldf_asi(addr, r_asi, r_size, r_rd);
1775 tcg_temp_free_i32(r_rd);
1776 tcg_temp_free_i32(r_size);
1777 tcg_temp_free_i32(r_asi);
1778 }
1779
1780 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1781 {
1782 TCGv_i32 r_asi, r_size, r_rd;
1783
1784 r_asi = gen_get_asi(insn, addr);
1785 r_size = tcg_const_i32(size);
1786 r_rd = tcg_const_i32(rd);
1787 gen_helper_stf_asi(addr, r_asi, r_size, r_rd);
1788 tcg_temp_free_i32(r_rd);
1789 tcg_temp_free_i32(r_size);
1790 tcg_temp_free_i32(r_asi);
1791 }
1792
1793 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1794 {
1795 TCGv_i32 r_asi, r_size, r_sign;
1796
1797 r_asi = gen_get_asi(insn, addr);
1798 r_size = tcg_const_i32(4);
1799 r_sign = tcg_const_i32(0);
1800 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1801 tcg_temp_free_i32(r_sign);
1802 gen_helper_st_asi(addr, dst, r_asi, r_size);
1803 tcg_temp_free_i32(r_size);
1804 tcg_temp_free_i32(r_asi);
1805 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1806 }
1807
1808 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1809 {
1810 TCGv_i32 r_asi, r_rd;
1811
1812 r_asi = gen_get_asi(insn, addr);
1813 r_rd = tcg_const_i32(rd);
1814 gen_helper_ldda_asi(addr, r_asi, r_rd);
1815 tcg_temp_free_i32(r_rd);
1816 tcg_temp_free_i32(r_asi);
1817 }
1818
1819 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1820 {
1821 TCGv_i32 r_asi, r_size;
1822
1823 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1824 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1825 r_asi = gen_get_asi(insn, addr);
1826 r_size = tcg_const_i32(8);
1827 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1828 tcg_temp_free_i32(r_size);
1829 tcg_temp_free_i32(r_asi);
1830 }
1831
1832 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1833 int rd)
1834 {
1835 TCGv r_val1;
1836 TCGv_i32 r_asi;
1837
1838 r_val1 = tcg_temp_new();
1839 gen_movl_reg_TN(rd, r_val1);
1840 r_asi = gen_get_asi(insn, addr);
1841 gen_helper_cas_asi(dst, addr, r_val1, val2, r_asi);
1842 tcg_temp_free_i32(r_asi);
1843 tcg_temp_free(r_val1);
1844 }
1845
1846 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1847 int rd)
1848 {
1849 TCGv_i32 r_asi;
1850
1851 gen_movl_reg_TN(rd, cpu_tmp64);
1852 r_asi = gen_get_asi(insn, addr);
1853 gen_helper_casx_asi(dst, addr, cpu_tmp64, val2, r_asi);
1854 tcg_temp_free_i32(r_asi);
1855 }
1856
1857 #elif !defined(CONFIG_USER_ONLY)
1858
1859 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1860 int sign)
1861 {
1862 TCGv_i32 r_asi, r_size, r_sign;
1863
1864 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1865 r_size = tcg_const_i32(size);
1866 r_sign = tcg_const_i32(sign);
1867 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1868 tcg_temp_free(r_sign);
1869 tcg_temp_free(r_size);
1870 tcg_temp_free(r_asi);
1871 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1872 }
1873
1874 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1875 {
1876 TCGv_i32 r_asi, r_size;
1877
1878 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1879 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1880 r_size = tcg_const_i32(size);
1881 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1882 tcg_temp_free(r_size);
1883 tcg_temp_free(r_asi);
1884 }
1885
1886 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1887 {
1888 TCGv_i32 r_asi, r_size, r_sign;
1889 TCGv_i64 r_val;
1890
1891 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1892 r_size = tcg_const_i32(4);
1893 r_sign = tcg_const_i32(0);
1894 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1895 tcg_temp_free(r_sign);
1896 r_val = tcg_temp_new_i64();
1897 tcg_gen_extu_tl_i64(r_val, dst);
1898 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1899 tcg_temp_free_i64(r_val);
1900 tcg_temp_free(r_size);
1901 tcg_temp_free(r_asi);
1902 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1903 }
1904
1905 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1906 {
1907 TCGv_i32 r_asi, r_size, r_sign;
1908
1909 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1910 r_size = tcg_const_i32(8);
1911 r_sign = tcg_const_i32(0);
1912 gen_helper_ld_asi(cpu_tmp64, addr, r_asi, r_size, r_sign);
1913 tcg_temp_free(r_sign);
1914 tcg_temp_free(r_size);
1915 tcg_temp_free(r_asi);
1916 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1917 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1918 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1919 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1920 gen_movl_TN_reg(rd, hi);
1921 }
1922
1923 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1924 {
1925 TCGv_i32 r_asi, r_size;
1926
1927 gen_movl_reg_TN(rd + 1, cpu_tmp0);
1928 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, hi);
1929 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1930 r_size = tcg_const_i32(8);
1931 gen_helper_st_asi(addr, cpu_tmp64, r_asi, r_size);
1932 tcg_temp_free(r_size);
1933 tcg_temp_free(r_asi);
1934 }
1935 #endif
1936
1937 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1938 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1939 {
1940 TCGv_i64 r_val;
1941 TCGv_i32 r_asi, r_size;
1942
1943 gen_ld_asi(dst, addr, insn, 1, 0);
1944
1945 r_val = tcg_const_i64(0xffULL);
1946 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1947 r_size = tcg_const_i32(1);
1948 gen_helper_st_asi(addr, r_val, r_asi, r_size);
1949 tcg_temp_free_i32(r_size);
1950 tcg_temp_free_i32(r_asi);
1951 tcg_temp_free_i64(r_val);
1952 }
1953 #endif
1954
1955 static inline TCGv get_src1(unsigned int insn, TCGv def)
1956 {
1957 TCGv r_rs1 = def;
1958 unsigned int rs1;
1959
1960 rs1 = GET_FIELD(insn, 13, 17);
1961 if (rs1 == 0)
1962 r_rs1 = tcg_const_tl(0); // XXX how to free?
1963 else if (rs1 < 8)
1964 r_rs1 = cpu_gregs[rs1];
1965 else
1966 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1967 return r_rs1;
1968 }
1969
1970 static inline TCGv get_src2(unsigned int insn, TCGv def)
1971 {
1972 TCGv r_rs2 = def;
1973
1974 if (IS_IMM) { /* immediate */
1975 target_long simm;
1976
1977 simm = GET_FIELDs(insn, 19, 31);
1978 r_rs2 = tcg_const_tl(simm); // XXX how to free?
1979 } else { /* register */
1980 unsigned int rs2;
1981
1982 rs2 = GET_FIELD(insn, 27, 31);
1983 if (rs2 == 0)
1984 r_rs2 = tcg_const_tl(0); // XXX how to free?
1985 else if (rs2 < 8)
1986 r_rs2 = cpu_gregs[rs2];
1987 else
1988 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1989 }
1990 return r_rs2;
1991 }
1992
1993 #define CHECK_IU_FEATURE(dc, FEATURE) \
1994 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1995 goto illegal_insn;
1996 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1997 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1998 goto nfpu_insn;
1999
2000 /* before an instruction, dc->pc must be static */
2001 static void disas_sparc_insn(DisasContext * dc)
2002 {
2003 unsigned int insn, opc, rs1, rs2, rd;
2004 target_long simm;
2005
2006 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP)))
2007 tcg_gen_debug_insn_start(dc->pc);
2008 insn = ldl_code(dc->pc);
2009 opc = GET_FIELD(insn, 0, 1);
2010
2011 rd = GET_FIELD(insn, 2, 6);
2012
2013 cpu_src1 = tcg_temp_new(); // const
2014 cpu_src2 = tcg_temp_new(); // const
2015
2016 switch (opc) {
2017 case 0: /* branches/sethi */
2018 {
2019 unsigned int xop = GET_FIELD(insn, 7, 9);
2020 int32_t target;
2021 switch (xop) {
2022 #ifdef TARGET_SPARC64
2023 case 0x1: /* V9 BPcc */
2024 {
2025 int cc;
2026
2027 target = GET_FIELD_SP(insn, 0, 18);
2028 target = sign_extend(target, 18);
2029 target <<= 2;
2030 cc = GET_FIELD_SP(insn, 20, 21);
2031 if (cc == 0)
2032 do_branch(dc, target, insn, 0, cpu_cond);
2033 else if (cc == 2)
2034 do_branch(dc, target, insn, 1, cpu_cond);
2035 else
2036 goto illegal_insn;
2037 goto jmp_insn;
2038 }
2039 case 0x3: /* V9 BPr */
2040 {
2041 target = GET_FIELD_SP(insn, 0, 13) |
2042 (GET_FIELD_SP(insn, 20, 21) << 14);
2043 target = sign_extend(target, 16);
2044 target <<= 2;
2045 cpu_src1 = get_src1(insn, cpu_src1);
2046 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
2047 goto jmp_insn;
2048 }
2049 case 0x5: /* V9 FBPcc */
2050 {
2051 int cc = GET_FIELD_SP(insn, 20, 21);
2052 if (gen_trap_ifnofpu(dc, cpu_cond))
2053 goto jmp_insn;
2054 target = GET_FIELD_SP(insn, 0, 18);
2055 target = sign_extend(target, 19);
2056 target <<= 2;
2057 do_fbranch(dc, target, insn, cc, cpu_cond);
2058 goto jmp_insn;
2059 }
2060 #else
2061 case 0x7: /* CBN+x */
2062 {
2063 goto ncp_insn;
2064 }
2065 #endif
2066 case 0x2: /* BN+x */
2067 {
2068 target = GET_FIELD(insn, 10, 31);
2069 target = sign_extend(target, 22);
2070 target <<= 2;
2071 do_branch(dc, target, insn, 0, cpu_cond);
2072 goto jmp_insn;
2073 }
2074 case 0x6: /* FBN+x */
2075 {
2076 if (gen_trap_ifnofpu(dc, cpu_cond))
2077 goto jmp_insn;
2078 target = GET_FIELD(insn, 10, 31);
2079 target = sign_extend(target, 22);
2080 target <<= 2;
2081 do_fbranch(dc, target, insn, 0, cpu_cond);
2082 goto jmp_insn;
2083 }
2084 case 0x4: /* SETHI */
2085 if (rd) { // nop
2086 uint32_t value = GET_FIELD(insn, 10, 31);
2087 TCGv r_const;
2088
2089 r_const = tcg_const_tl(value << 10);
2090 gen_movl_TN_reg(rd, r_const);
2091 tcg_temp_free(r_const);
2092 }
2093 break;
2094 case 0x0: /* UNIMPL */
2095 default:
2096 goto illegal_insn;
2097 }
2098 break;
2099 }
2100 break;
2101 case 1:
2102 /*CALL*/ {
2103 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2104 TCGv r_const;
2105
2106 r_const = tcg_const_tl(dc->pc);
2107 gen_movl_TN_reg(15, r_const);
2108 tcg_temp_free(r_const);
2109 target += dc->pc;
2110 gen_mov_pc_npc(dc, cpu_cond);
2111 dc->npc = target;
2112 }
2113 goto jmp_insn;
2114 case 2: /* FPU & Logical Operations */
2115 {
2116 unsigned int xop = GET_FIELD(insn, 7, 12);
2117 if (xop == 0x3a) { /* generate trap */
2118 int cond;
2119
2120 cpu_src1 = get_src1(insn, cpu_src1);
2121 if (IS_IMM) {
2122 rs2 = GET_FIELD(insn, 25, 31);
2123 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2124 } else {
2125 rs2 = GET_FIELD(insn, 27, 31);
2126 if (rs2 != 0) {
2127 gen_movl_reg_TN(rs2, cpu_src2);
2128 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2129 } else
2130 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2131 }
2132 cond = GET_FIELD(insn, 3, 6);
2133 if (cond == 0x8) {
2134 save_state(dc, cpu_cond);
2135 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2136 supervisor(dc))
2137 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2138 else
2139 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2140 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2141 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2142 gen_helper_raise_exception(cpu_tmp32);
2143 } else if (cond != 0) {
2144 TCGv r_cond = tcg_temp_new();
2145 int l1;
2146 #ifdef TARGET_SPARC64
2147 /* V9 icc/xcc */
2148 int cc = GET_FIELD_SP(insn, 11, 12);
2149
2150 save_state(dc, cpu_cond);
2151 if (cc == 0)
2152 gen_cond(r_cond, 0, cond);
2153 else if (cc == 2)
2154 gen_cond(r_cond, 1, cond);
2155 else
2156 goto illegal_insn;
2157 #else
2158 save_state(dc, cpu_cond);
2159 gen_cond(r_cond, 0, cond);
2160 #endif
2161 l1 = gen_new_label();
2162 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
2163
2164 if ((dc->def->features & CPU_FEATURE_HYPV) &&
2165 supervisor(dc))
2166 tcg_gen_andi_tl(cpu_dst, cpu_dst, UA2005_HTRAP_MASK);
2167 else
2168 tcg_gen_andi_tl(cpu_dst, cpu_dst, V8_TRAP_MASK);
2169 tcg_gen_addi_tl(cpu_dst, cpu_dst, TT_TRAP);
2170 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2171 gen_helper_raise_exception(cpu_tmp32);
2172
2173 gen_set_label(l1);
2174 tcg_temp_free(r_cond);
2175 }
2176 gen_op_next_insn();
2177 tcg_gen_exit_tb(0);
2178 dc->is_br = 1;
2179 goto jmp_insn;
2180 } else if (xop == 0x28) {
2181 rs1 = GET_FIELD(insn, 13, 17);
2182 switch(rs1) {
2183 case 0: /* rdy */
2184 #ifndef TARGET_SPARC64
2185 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2186 manual, rdy on the microSPARC
2187 II */
2188 case 0x0f: /* stbar in the SPARCv8 manual,
2189 rdy on the microSPARC II */
2190 case 0x10 ... 0x1f: /* implementation-dependent in the
2191 SPARCv8 manual, rdy on the
2192 microSPARC II */
2193 #endif
2194 gen_movl_TN_reg(rd, cpu_y);
2195 break;
2196 #ifdef TARGET_SPARC64
2197 case 0x2: /* V9 rdccr */
2198 gen_helper_rdccr(cpu_dst);
2199 gen_movl_TN_reg(rd, cpu_dst);
2200 break;
2201 case 0x3: /* V9 rdasi */
2202 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2203 gen_movl_TN_reg(rd, cpu_dst);
2204 break;
2205 case 0x4: /* V9 rdtick */
2206 {
2207 TCGv_ptr r_tickptr;
2208
2209 r_tickptr = tcg_temp_new_ptr();
2210 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2211 offsetof(CPUState, tick));
2212 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2213 tcg_temp_free_ptr(r_tickptr);
2214 gen_movl_TN_reg(rd, cpu_dst);
2215 }
2216 break;
2217 case 0x5: /* V9 rdpc */
2218 {
2219 TCGv r_const;
2220
2221 r_const = tcg_const_tl(dc->pc);
2222 gen_movl_TN_reg(rd, r_const);
2223 tcg_temp_free(r_const);
2224 }
2225 break;
2226 case 0x6: /* V9 rdfprs */
2227 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2228 gen_movl_TN_reg(rd, cpu_dst);
2229 break;
2230 case 0xf: /* V9 membar */
2231 break; /* no effect */
2232 case 0x13: /* Graphics Status */
2233 if (gen_trap_ifnofpu(dc, cpu_cond))
2234 goto jmp_insn;
2235 gen_movl_TN_reg(rd, cpu_gsr);
2236 break;
2237 case 0x16: /* Softint */
2238 tcg_gen_ext_i32_tl(cpu_dst, cpu_softint);
2239 gen_movl_TN_reg(rd, cpu_dst);
2240 break;
2241 case 0x17: /* Tick compare */
2242 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2243 break;
2244 case 0x18: /* System tick */
2245 {
2246 TCGv_ptr r_tickptr;
2247
2248 r_tickptr = tcg_temp_new_ptr();
2249 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2250 offsetof(CPUState, stick));
2251 gen_helper_tick_get_count(cpu_dst, r_tickptr);
2252 tcg_temp_free_ptr(r_tickptr);
2253 gen_movl_TN_reg(rd, cpu_dst);
2254 }
2255 break;
2256 case 0x19: /* System tick compare */
2257 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2258 break;
2259 case 0x10: /* Performance Control */
2260 case 0x11: /* Performance Instrumentation Counter */
2261 case 0x12: /* Dispatch Control */
2262 case 0x14: /* Softint set, WO */
2263 case 0x15: /* Softint clear, WO */
2264 #endif
2265 default:
2266 goto illegal_insn;
2267 }
2268 #if !defined(CONFIG_USER_ONLY)
2269 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2270 #ifndef TARGET_SPARC64
2271 if (!supervisor(dc))
2272 goto priv_insn;
2273 gen_helper_rdpsr(cpu_dst);
2274 #else
2275 CHECK_IU_FEATURE(dc, HYPV);
2276 if (!hypervisor(dc))
2277 goto priv_insn;
2278 rs1 = GET_FIELD(insn, 13, 17);
2279 switch (rs1) {
2280 case 0: // hpstate
2281 // gen_op_rdhpstate();
2282 break;
2283 case 1: // htstate
2284 // gen_op_rdhtstate();
2285 break;
2286 case 3: // hintp
2287 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2288 break;
2289 case 5: // htba
2290 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2291 break;
2292 case 6: // hver
2293 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2294 break;
2295 case 31: // hstick_cmpr
2296 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2297 break;
2298 default:
2299 goto illegal_insn;
2300 }
2301 #endif
2302 gen_movl_TN_reg(rd, cpu_dst);
2303 break;
2304 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2305 if (!supervisor(dc))
2306 goto priv_insn;
2307 #ifdef TARGET_SPARC64
2308 rs1 = GET_FIELD(insn, 13, 17);
2309 switch (rs1) {
2310 case 0: // tpc
2311 {
2312 TCGv_ptr r_tsptr;
2313
2314 r_tsptr = tcg_temp_new_ptr();
2315 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2316 offsetof(CPUState, tsptr));
2317 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2318 offsetof(trap_state, tpc));
2319 tcg_temp_free_ptr(r_tsptr);
2320 }
2321 break;
2322 case 1: // tnpc
2323 {
2324 TCGv_ptr r_tsptr;
2325
2326 r_tsptr = tcg_temp_new_ptr();
2327 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2328 offsetof(CPUState, tsptr));
2329 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2330 offsetof(trap_state, tnpc));
2331 tcg_temp_free_ptr(r_tsptr);
2332 }
2333 break;
2334 case 2: // tstate
2335 {
2336 TCGv_ptr r_tsptr;
2337
2338 r_tsptr = tcg_temp_new_ptr();
2339 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2340 offsetof(CPUState, tsptr));
2341 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2342 offsetof(trap_state, tstate));
2343 tcg_temp_free_ptr(r_tsptr);
2344 }
2345 break;
2346 case 3: // tt
2347 {
2348 TCGv_ptr r_tsptr;
2349
2350 r_tsptr = tcg_temp_new_ptr();
2351 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2352 offsetof(CPUState, tsptr));
2353 tcg_gen_ld_i32(cpu_tmp32, r_tsptr,
2354 offsetof(trap_state, tt));
2355 tcg_temp_free_ptr(r_tsptr);
2356 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2357 }
2358 break;
2359 case 4: // tick
2360 {
2361 TCGv_ptr r_tickptr;
2362
2363 r_tickptr = tcg_temp_new_ptr();
2364 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2365 offsetof(CPUState, tick));
2366 gen_helper_tick_get_count(cpu_tmp0, r_tickptr);
2367 gen_movl_TN_reg(rd, cpu_tmp0);
2368 tcg_temp_free_ptr(r_tickptr);
2369 }
2370 break;
2371 case 5: // tba
2372 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2373 break;
2374 case 6: // pstate
2375 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2376 offsetof(CPUSPARCState, pstate));
2377 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2378 break;
2379 case 7: // tl
2380 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2381 offsetof(CPUSPARCState, tl));
2382 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2383 break;
2384 case 8: // pil
2385 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2386 offsetof(CPUSPARCState, psrpil));
2387 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2388 break;
2389 case 9: // cwp
2390 gen_helper_rdcwp(cpu_tmp0);
2391 break;
2392 case 10: // cansave
2393 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2394 offsetof(CPUSPARCState, cansave));
2395 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2396 break;
2397 case 11: // canrestore
2398 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2399 offsetof(CPUSPARCState, canrestore));
2400 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2401 break;
2402 case 12: // cleanwin
2403 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2404 offsetof(CPUSPARCState, cleanwin));
2405 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2406 break;
2407 case 13: // otherwin
2408 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2409 offsetof(CPUSPARCState, otherwin));
2410 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2411 break;
2412 case 14: // wstate
2413 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2414 offsetof(CPUSPARCState, wstate));
2415 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2416 break;
2417 case 16: // UA2005 gl
2418 CHECK_IU_FEATURE(dc, GL);
2419 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2420 offsetof(CPUSPARCState, gl));
2421 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2422 break;
2423 case 26: // UA2005 strand status
2424 CHECK_IU_FEATURE(dc, HYPV);
2425 if (!hypervisor(dc))
2426 goto priv_insn;
2427 tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
2428 break;
2429 case 31: // ver
2430 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2431 break;
2432 case 15: // fq
2433 default:
2434 goto illegal_insn;
2435 }
2436 #else
2437 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2438 #endif
2439 gen_movl_TN_reg(rd, cpu_tmp0);
2440 break;
2441 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2442 #ifdef TARGET_SPARC64
2443 save_state(dc, cpu_cond);
2444 gen_helper_flushw();
2445 #else
2446 if (!supervisor(dc))
2447 goto priv_insn;
2448 gen_movl_TN_reg(rd, cpu_tbr);
2449 #endif
2450 break;
2451 #endif
2452 } else if (xop == 0x34) { /* FPU Operations */
2453 if (gen_trap_ifnofpu(dc, cpu_cond))
2454 goto jmp_insn;
2455 gen_op_clear_ieee_excp_and_FTT();
2456 rs1 = GET_FIELD(insn, 13, 17);
2457 rs2 = GET_FIELD(insn, 27, 31);
2458 xop = GET_FIELD(insn, 18, 26);
2459 switch (xop) {
2460 case 0x1: /* fmovs */
2461 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2462 break;
2463 case 0x5: /* fnegs */
2464 gen_helper_fnegs(cpu_fpr[rd], cpu_fpr[rs2]);
2465 break;
2466 case 0x9: /* fabss */
2467 gen_helper_fabss(cpu_fpr[rd], cpu_fpr[rs2]);
2468 break;
2469 case 0x29: /* fsqrts */
2470 CHECK_FPU_FEATURE(dc, FSQRT);
2471 gen_clear_float_exceptions();
2472 gen_helper_fsqrts(cpu_tmp32, cpu_fpr[rs2]);
2473 gen_helper_check_ieee_exceptions();
2474 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2475 break;
2476 case 0x2a: /* fsqrtd */
2477 CHECK_FPU_FEATURE(dc, FSQRT);
2478 gen_op_load_fpr_DT1(DFPREG(rs2));
2479 gen_clear_float_exceptions();
2480 gen_helper_fsqrtd();
2481 gen_helper_check_ieee_exceptions();
2482 gen_op_store_DT0_fpr(DFPREG(rd));
2483 break;
2484 case 0x2b: /* fsqrtq */
2485 CHECK_FPU_FEATURE(dc, FLOAT128);
2486 gen_op_load_fpr_QT1(QFPREG(rs2));
2487 gen_clear_float_exceptions();
2488 gen_helper_fsqrtq();
2489 gen_helper_check_ieee_exceptions();
2490 gen_op_store_QT0_fpr(QFPREG(rd));
2491 break;
2492 case 0x41: /* fadds */
2493 gen_clear_float_exceptions();
2494 gen_helper_fadds(cpu_tmp32,
2495 cpu_fpr[rs1], cpu_fpr[rs2]);
2496 gen_helper_check_ieee_exceptions();
2497 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2498 break;
2499 case 0x42: /* faddd */
2500 gen_op_load_fpr_DT0(DFPREG(rs1));
2501 gen_op_load_fpr_DT1(DFPREG(rs2));
2502 gen_clear_float_exceptions();
2503 gen_helper_faddd();
2504 gen_helper_check_ieee_exceptions();
2505 gen_op_store_DT0_fpr(DFPREG(rd));
2506 break;
2507 case 0x43: /* faddq */
2508 CHECK_FPU_FEATURE(dc, FLOAT128);
2509 gen_op_load_fpr_QT0(QFPREG(rs1));
2510 gen_op_load_fpr_QT1(QFPREG(rs2));
2511 gen_clear_float_exceptions();
2512 gen_helper_faddq();
2513 gen_helper_check_ieee_exceptions();
2514 gen_op_store_QT0_fpr(QFPREG(rd));
2515 break;
2516 case 0x45: /* fsubs */
2517 gen_clear_float_exceptions();
2518 gen_helper_fsubs(cpu_tmp32,
2519 cpu_fpr[rs1], cpu_fpr[rs2]);
2520 gen_helper_check_ieee_exceptions();
2521 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2522 break;
2523 case 0x46: /* fsubd */
2524 gen_op_load_fpr_DT0(DFPREG(rs1));
2525 gen_op_load_fpr_DT1(DFPREG(rs2));
2526 gen_clear_float_exceptions();
2527 gen_helper_fsubd();
2528 gen_helper_check_ieee_exceptions();
2529 gen_op_store_DT0_fpr(DFPREG(rd));
2530 break;
2531 case 0x47: /* fsubq */
2532 CHECK_FPU_FEATURE(dc, FLOAT128);
2533 gen_op_load_fpr_QT0(QFPREG(rs1));
2534 gen_op_load_fpr_QT1(QFPREG(rs2));
2535 gen_clear_float_exceptions();
2536 gen_helper_fsubq();
2537 gen_helper_check_ieee_exceptions();
2538 gen_op_store_QT0_fpr(QFPREG(rd));
2539 break;
2540 case 0x49: /* fmuls */
2541 CHECK_FPU_FEATURE(dc, FMUL);
2542 gen_clear_float_exceptions();
2543 gen_helper_fmuls(cpu_tmp32,
2544 cpu_fpr[rs1], cpu_fpr[rs2]);
2545 gen_helper_check_ieee_exceptions();
2546 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2547 break;
2548 case 0x4a: /* fmuld */
2549 CHECK_FPU_FEATURE(dc, FMUL);
2550 gen_op_load_fpr_DT0(DFPREG(rs1));
2551 gen_op_load_fpr_DT1(DFPREG(rs2));
2552 gen_clear_float_exceptions();
2553 gen_helper_fmuld();
2554 gen_helper_check_ieee_exceptions();
2555 gen_op_store_DT0_fpr(DFPREG(rd));
2556 break;
2557 case 0x4b: /* fmulq */
2558 CHECK_FPU_FEATURE(dc, FLOAT128);
2559 CHECK_FPU_FEATURE(dc, FMUL);
2560 gen_op_load_fpr_QT0(QFPREG(rs1));
2561 gen_op_load_fpr_QT1(QFPREG(rs2));
2562 gen_clear_float_exceptions();
2563 gen_helper_fmulq();
2564 gen_helper_check_ieee_exceptions();
2565 gen_op_store_QT0_fpr(QFPREG(rd));
2566 break;
2567 case 0x4d: /* fdivs */
2568 gen_clear_float_exceptions();
2569 gen_helper_fdivs(cpu_tmp32,
2570 cpu_fpr[rs1], cpu_fpr[rs2]);
2571 gen_helper_check_ieee_exceptions();
2572 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2573 break;
2574 case 0x4e: /* fdivd */
2575 gen_op_load_fpr_DT0(DFPREG(rs1));
2576 gen_op_load_fpr_DT1(DFPREG(rs2));
2577 gen_clear_float_exceptions();
2578 gen_helper_fdivd();
2579 gen_helper_check_ieee_exceptions();
2580 gen_op_store_DT0_fpr(DFPREG(rd));
2581 break;
2582 case 0x4f: /* fdivq */
2583 CHECK_FPU_FEATURE(dc, FLOAT128);
2584 gen_op_load_fpr_QT0(QFPREG(rs1));
2585 gen_op_load_fpr_QT1(QFPREG(rs2));
2586 gen_clear_float_exceptions();
2587 gen_helper_fdivq();
2588 gen_helper_check_ieee_exceptions();
2589 gen_op_store_QT0_fpr(QFPREG(rd));
2590 break;
2591 case 0x69: /* fsmuld */
2592 CHECK_FPU_FEATURE(dc, FSMULD);
2593 gen_clear_float_exceptions();
2594 gen_helper_fsmuld(cpu_fpr[rs1], cpu_fpr[rs2]);
2595 gen_helper_check_ieee_exceptions();
2596 gen_op_store_DT0_fpr(DFPREG(rd));
2597 break;
2598 case 0x6e: /* fdmulq */
2599 CHECK_FPU_FEATURE(dc, FLOAT128);
2600 gen_op_load_fpr_DT0(DFPREG(rs1));
2601 gen_op_load_fpr_DT1(DFPREG(rs2));
2602 gen_clear_float_exceptions();
2603 gen_helper_fdmulq();
2604 gen_helper_check_ieee_exceptions();
2605 gen_op_store_QT0_fpr(QFPREG(rd));
2606 break;
2607 case 0xc4: /* fitos */
2608 gen_clear_float_exceptions();
2609 gen_helper_fitos(cpu_tmp32, cpu_fpr[rs2]);
2610 gen_helper_check_ieee_exceptions();
2611 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2612 break;
2613 case 0xc6: /* fdtos */
2614 gen_op_load_fpr_DT1(DFPREG(rs2));
2615 gen_clear_float_exceptions();
2616 gen_helper_fdtos(cpu_tmp32);
2617 gen_helper_check_ieee_exceptions();
2618 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2619 break;
2620 case 0xc7: /* fqtos */
2621 CHECK_FPU_FEATURE(dc, FLOAT128);
2622 gen_op_load_fpr_QT1(QFPREG(rs2));
2623 gen_clear_float_exceptions();
2624 gen_helper_fqtos(cpu_tmp32);
2625 gen_helper_check_ieee_exceptions();
2626 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2627 break;
2628 case 0xc8: /* fitod */
2629 gen_helper_fitod(cpu_fpr[rs2]);
2630 gen_op_store_DT0_fpr(DFPREG(rd));
2631 break;
2632 case 0xc9: /* fstod */
2633 gen_helper_fstod(cpu_fpr[rs2]);
2634 gen_op_store_DT0_fpr(DFPREG(rd));
2635 break;
2636 case 0xcb: /* fqtod */
2637 CHECK_FPU_FEATURE(dc, FLOAT128);
2638 gen_op_load_fpr_QT1(QFPREG(rs2));
2639 gen_clear_float_exceptions();
2640 gen_helper_fqtod();
2641 gen_helper_check_ieee_exceptions();
2642 gen_op_store_DT0_fpr(DFPREG(rd));
2643 break;
2644 case 0xcc: /* fitoq */
2645 CHECK_FPU_FEATURE(dc, FLOAT128);
2646 gen_helper_fitoq(cpu_fpr[rs2]);
2647 gen_op_store_QT0_fpr(QFPREG(rd));
2648 break;
2649 case 0xcd: /* fstoq */
2650 CHECK_FPU_FEATURE(dc, FLOAT128);
2651 gen_helper_fstoq(cpu_fpr[rs2]);
2652 gen_op_store_QT0_fpr(QFPREG(rd));
2653 break;
2654 case 0xce: /* fdtoq */
2655 CHECK_FPU_FEATURE(dc, FLOAT128);
2656 gen_op_load_fpr_DT1(DFPREG(rs2));
2657 gen_helper_fdtoq();
2658 gen_op_store_QT0_fpr(QFPREG(rd));
2659 break;
2660 case 0xd1: /* fstoi */
2661 gen_clear_float_exceptions();
2662 gen_helper_fstoi(cpu_tmp32, cpu_fpr[rs2]);
2663 gen_helper_check_ieee_exceptions();
2664 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2665 break;
2666 case 0xd2: /* fdtoi */
2667 gen_op_load_fpr_DT1(DFPREG(rs2));
2668 gen_clear_float_exceptions();
2669 gen_helper_fdtoi(cpu_tmp32);
2670 gen_helper_check_ieee_exceptions();
2671 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2672 break;
2673 case 0xd3: /* fqtoi */
2674 CHECK_FPU_FEATURE(dc, FLOAT128);
2675 gen_op_load_fpr_QT1(QFPREG(rs2));
2676 gen_clear_float_exceptions();
2677 gen_helper_fqtoi(cpu_tmp32);
2678 gen_helper_check_ieee_exceptions();
2679 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2680 break;
2681 #ifdef TARGET_SPARC64
2682 case 0x2: /* V9 fmovd */
2683 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2684 cpu_fpr[DFPREG(rs2)]);
2685 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2686 cpu_fpr[DFPREG(rs2) + 1]);
2687 break;
2688 case 0x3: /* V9 fmovq */
2689 CHECK_FPU_FEATURE(dc, FLOAT128);
2690 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2691 cpu_fpr[QFPREG(rs2)]);
2692 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2693 cpu_fpr[QFPREG(rs2) + 1]);
2694 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2695 cpu_fpr[QFPREG(rs2) + 2]);
2696 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2697 cpu_fpr[QFPREG(rs2) + 3]);
2698 break;
2699 case 0x6: /* V9 fnegd */
2700 gen_op_load_fpr_DT1(DFPREG(rs2));
2701 gen_helper_fnegd();
2702 gen_op_store_DT0_fpr(DFPREG(rd));
2703 break;
2704 case 0x7: /* V9 fnegq */
2705 CHECK_FPU_FEATURE(dc, FLOAT128);
2706 gen_op_load_fpr_QT1(QFPREG(rs2));
2707 gen_helper_fnegq();
2708 gen_op_store_QT0_fpr(QFPREG(rd));
2709 break;
2710 case 0xa: /* V9 fabsd */
2711 gen_op_load_fpr_DT1(DFPREG(rs2));
2712 gen_helper_fabsd();
2713 gen_op_store_DT0_fpr(DFPREG(rd));
2714 break;
2715 case 0xb: /* V9 fabsq */
2716 CHECK_FPU_FEATURE(dc, FLOAT128);
2717 gen_op_load_fpr_QT1(QFPREG(rs2));
2718 gen_helper_fabsq();
2719 gen_op_store_QT0_fpr(QFPREG(rd));
2720 break;
2721 case 0x81: /* V9 fstox */
2722 gen_clear_float_exceptions();
2723 gen_helper_fstox(cpu_fpr[rs2]);
2724 gen_helper_check_ieee_exceptions();
2725 gen_op_store_DT0_fpr(DFPREG(rd));
2726 break;
2727 case 0x82: /* V9 fdtox */
2728 gen_op_load_fpr_DT1(DFPREG(rs2));
2729 gen_clear_float_exceptions();
2730 gen_helper_fdtox();
2731 gen_helper_check_ieee_exceptions();
2732 gen_op_store_DT0_fpr(DFPREG(rd));
2733 break;
2734 case 0x83: /* V9 fqtox */
2735 CHECK_FPU_FEATURE(dc, FLOAT128);
2736 gen_op_load_fpr_QT1(QFPREG(rs2));
2737 gen_clear_float_exceptions();
2738 gen_helper_fqtox();
2739 gen_helper_check_ieee_exceptions();
2740 gen_op_store_DT0_fpr(DFPREG(rd));
2741 break;
2742 case 0x84: /* V9 fxtos */
2743 gen_op_load_fpr_DT1(DFPREG(rs2));
2744 gen_clear_float_exceptions();
2745 gen_helper_fxtos(cpu_tmp32);
2746 gen_helper_check_ieee_exceptions();
2747 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2748 break;
2749 case 0x88: /* V9 fxtod */
2750 gen_op_load_fpr_DT1(DFPREG(rs2));
2751 gen_clear_float_exceptions();
2752 gen_helper_fxtod();
2753 gen_helper_check_ieee_exceptions();
2754 gen_op_store_DT0_fpr(DFPREG(rd));
2755 break;
2756 case 0x8c: /* V9 fxtoq */
2757 CHECK_FPU_FEATURE(dc, FLOAT128);
2758 gen_op_load_fpr_DT1(DFPREG(rs2));
2759 gen_clear_float_exceptions();
2760 gen_helper_fxtoq();
2761 gen_helper_check_ieee_exceptions();
2762 gen_op_store_QT0_fpr(QFPREG(rd));
2763 break;
2764 #endif
2765 default:
2766 goto illegal_insn;
2767 }
2768 } else if (xop == 0x35) { /* FPU Operations */
2769 #ifdef TARGET_SPARC64
2770 int cond;
2771 #endif
2772 if (gen_trap_ifnofpu(dc, cpu_cond))
2773 goto jmp_insn;
2774 gen_op_clear_ieee_excp_and_FTT();
2775 rs1 = GET_FIELD(insn, 13, 17);
2776 rs2 = GET_FIELD(insn, 27, 31);
2777 xop = GET_FIELD(insn, 18, 26);
2778 #ifdef TARGET_SPARC64
2779 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2780 int l1;
2781
2782 l1 = gen_new_label();
2783 cond = GET_FIELD_SP(insn, 14, 17);
2784 cpu_src1 = get_src1(insn, cpu_src1);
2785 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2786 0, l1);
2787 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2788 gen_set_label(l1);
2789 break;
2790 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2791 int l1;
2792
2793 l1 = gen_new_label();
2794 cond = GET_FIELD_SP(insn, 14, 17);
2795 cpu_src1 = get_src1(insn, cpu_src1);
2796 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2797 0, l1);
2798 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2799 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2800 gen_set_label(l1);
2801 break;
2802 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2803 int l1;
2804
2805 CHECK_FPU_FEATURE(dc, FLOAT128);
2806 l1 = gen_new_label();
2807 cond = GET_FIELD_SP(insn, 14, 17);
2808 cpu_src1 = get_src1(insn, cpu_src1);
2809 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2810 0, l1);
2811 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2812 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2813 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2814 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2815 gen_set_label(l1);
2816 break;
2817 }
2818 #endif
2819 switch (xop) {
2820 #ifdef TARGET_SPARC64
2821 #define FMOVSCC(fcc) \
2822 { \
2823 TCGv r_cond; \
2824 int l1; \
2825 \
2826 l1 = gen_new_label(); \
2827 r_cond = tcg_temp_new(); \
2828 cond = GET_FIELD_SP(insn, 14, 17); \
2829 gen_fcond(r_cond, fcc, cond); \
2830 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2831 0, l1); \
2832 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2833 gen_set_label(l1); \
2834 tcg_temp_free(r_cond); \
2835 }
2836 #define FMOVDCC(fcc) \
2837 { \
2838 TCGv r_cond; \
2839 int l1; \
2840 \
2841 l1 = gen_new_label(); \
2842 r_cond = tcg_temp_new(); \
2843 cond = GET_FIELD_SP(insn, 14, 17); \
2844 gen_fcond(r_cond, fcc, cond); \
2845 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2846 0, l1); \
2847 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2848 cpu_fpr[DFPREG(rs2)]); \
2849 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2850 cpu_fpr[DFPREG(rs2) + 1]); \
2851 gen_set_label(l1); \
2852 tcg_temp_free(r_cond); \
2853 }
2854 #define FMOVQCC(fcc) \
2855 { \
2856 TCGv r_cond; \
2857 int l1; \
2858 \
2859 l1 = gen_new_label(); \
2860 r_cond = tcg_temp_new(); \
2861 cond = GET_FIELD_SP(insn, 14, 17); \
2862 gen_fcond(r_cond, fcc, cond); \
2863 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2864 0, l1); \
2865 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2866 cpu_fpr[QFPREG(rs2)]); \
2867 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2868 cpu_fpr[QFPREG(rs2) + 1]); \
2869 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2870 cpu_fpr[QFPREG(rs2) + 2]); \
2871 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2872 cpu_fpr[QFPREG(rs2) + 3]); \
2873 gen_set_label(l1); \
2874 tcg_temp_free(r_cond); \
2875 }
2876 case 0x001: /* V9 fmovscc %fcc0 */
2877 FMOVSCC(0);
2878 break;
2879 case 0x002: /* V9 fmovdcc %fcc0 */
2880 FMOVDCC(0);
2881 break;
2882 case 0x003: /* V9 fmovqcc %fcc0 */
2883 CHECK_FPU_FEATURE(dc, FLOAT128);
2884 FMOVQCC(0);
2885 break;
2886 case 0x041: /* V9 fmovscc %fcc1 */
2887 FMOVSCC(1);
2888 break;
2889 case 0x042: /* V9 fmovdcc %fcc1 */
2890 FMOVDCC(1);
2891 break;
2892 case 0x043: /* V9 fmovqcc %fcc1 */
2893 CHECK_FPU_FEATURE(dc, FLOAT128);
2894 FMOVQCC(1);
2895 break;
2896 case 0x081: /* V9 fmovscc %fcc2 */
2897 FMOVSCC(2);
2898 break;
2899 case 0x082: /* V9 fmovdcc %fcc2 */
2900 FMOVDCC(2);
2901 break;
2902 case 0x083: /* V9 fmovqcc %fcc2 */
2903 CHECK_FPU_FEATURE(dc, FLOAT128);
2904 FMOVQCC(2);
2905 break;
2906 case 0x0c1: /* V9 fmovscc %fcc3 */
2907 FMOVSCC(3);
2908 break;
2909 case 0x0c2: /* V9 fmovdcc %fcc3 */
2910 FMOVDCC(3);
2911 break;
2912 case 0x0c3: /* V9 fmovqcc %fcc3 */
2913 CHECK_FPU_FEATURE(dc, FLOAT128);
2914 FMOVQCC(3);
2915 break;
2916 #undef FMOVSCC
2917 #undef FMOVDCC
2918 #undef FMOVQCC
2919 #define FMOVSCC(icc) \
2920 { \
2921 TCGv r_cond; \
2922 int l1; \
2923 \
2924 l1 = gen_new_label(); \
2925 r_cond = tcg_temp_new(); \
2926 cond = GET_FIELD_SP(insn, 14, 17); \
2927 gen_cond(r_cond, icc, cond); \
2928 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2929 0, l1); \
2930 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2931 gen_set_label(l1); \
2932 tcg_temp_free(r_cond); \
2933 }
2934 #define FMOVDCC(icc) \
2935 { \
2936 TCGv r_cond; \
2937 int l1; \
2938 \
2939 l1 = gen_new_label(); \
2940 r_cond = tcg_temp_new(); \
2941 cond = GET_FIELD_SP(insn, 14, 17); \
2942 gen_cond(r_cond, icc, cond); \
2943 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2944 0, l1); \
2945 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2946 cpu_fpr[DFPREG(rs2)]); \
2947 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2948 cpu_fpr[DFPREG(rs2) + 1]); \
2949 gen_set_label(l1); \
2950 tcg_temp_free(r_cond); \
2951 }
2952 #define FMOVQCC(icc) \
2953 { \
2954 TCGv r_cond; \
2955 int l1; \
2956 \
2957 l1 = gen_new_label(); \
2958 r_cond = tcg_temp_new(); \
2959 cond = GET_FIELD_SP(insn, 14, 17); \
2960 gen_cond(r_cond, icc, cond); \
2961 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2962 0, l1); \
2963 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2964 cpu_fpr[QFPREG(rs2)]); \
2965 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2966 cpu_fpr[QFPREG(rs2) + 1]); \
2967 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2968 cpu_fpr[QFPREG(rs2) + 2]); \
2969 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2970 cpu_fpr[QFPREG(rs2) + 3]); \
2971 gen_set_label(l1); \
2972 tcg_temp_free(r_cond); \
2973 }
2974
2975 case 0x101: /* V9 fmovscc %icc */
2976 FMOVSCC(0);
2977 break;
2978 case 0x102: /* V9 fmovdcc %icc */
2979 FMOVDCC(0);
2980 case 0x103: /* V9 fmovqcc %icc */
2981 CHECK_FPU_FEATURE(dc, FLOAT128);
2982 FMOVQCC(0);
2983 break;
2984 case 0x181: /* V9 fmovscc %xcc */
2985 FMOVSCC(1);
2986 break;
2987 case 0x182: /* V9 fmovdcc %xcc */
2988 FMOVDCC(1);
2989 break;
2990 case 0x183: /* V9 fmovqcc %xcc */
2991 CHECK_FPU_FEATURE(dc, FLOAT128);
2992 FMOVQCC(1);
2993 break;
2994 #undef FMOVSCC
2995 #undef FMOVDCC
2996 #undef FMOVQCC
2997 #endif
2998 case 0x51: /* fcmps, V9 %fcc */
2999 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
3000 break;
3001 case 0x52: /* fcmpd, V9 %fcc */
3002 gen_op_load_fpr_DT0(DFPREG(rs1));
3003 gen_op_load_fpr_DT1(DFPREG(rs2));
3004 gen_op_fcmpd(rd & 3);
3005 break;
3006 case 0x53: /* fcmpq, V9 %fcc */
3007 CHECK_FPU_FEATURE(dc, FLOAT128);
3008 gen_op_load_fpr_QT0(QFPREG(rs1));
3009 gen_op_load_fpr_QT1(QFPREG(rs2));
3010 gen_op_fcmpq(rd & 3);
3011 break;
3012 case 0x55: /* fcmpes, V9 %fcc */
3013 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
3014 break;
3015 case 0x56: /* fcmped, V9 %fcc */
3016 gen_op_load_fpr_DT0(DFPREG(rs1));
3017 gen_op_load_fpr_DT1(DFPREG(rs2));
3018 gen_op_fcmped(rd & 3);
3019 break;
3020 case 0x57: /* fcmpeq, V9 %fcc */
3021 CHECK_FPU_FEATURE(dc, FLOAT128);
3022 gen_op_load_fpr_QT0(QFPREG(rs1));
3023 gen_op_load_fpr_QT1(QFPREG(rs2));
3024 gen_op_fcmpeq(rd & 3);
3025 break;
3026 default:
3027 goto illegal_insn;
3028 }
3029 } else if (xop == 0x2) {
3030 // clr/mov shortcut
3031
3032 rs1 = GET_FIELD(insn, 13, 17);
3033 if (rs1 == 0) {
3034 // or %g0, x, y -> mov T0, x; mov y, T0
3035 if (IS_IMM) { /* immediate */
3036 TCGv r_const;
3037
3038 simm = GET_FIELDs(insn, 19, 31);
3039 r_const = tcg_const_tl(simm);
3040 gen_movl_TN_reg(rd, r_const);
3041 tcg_temp_free(r_const);
3042 } else { /* register */
3043 rs2 = GET_FIELD(insn, 27, 31);
3044 gen_movl_reg_TN(rs2, cpu_dst);
3045 gen_movl_TN_reg(rd, cpu_dst);
3046 }
3047 } else {
3048 cpu_src1 = get_src1(insn, cpu_src1);
3049 if (IS_IMM) { /* immediate */
3050 simm = GET_FIELDs(insn, 19, 31);
3051 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3052 gen_movl_TN_reg(rd, cpu_dst);
3053 } else { /* register */
3054 // or x, %g0, y -> mov T1, x; mov y, T1
3055 rs2 = GET_FIELD(insn, 27, 31);
3056 if (rs2 != 0) {
3057 gen_movl_reg_TN(rs2, cpu_src2);
3058 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3059 gen_movl_TN_reg(rd, cpu_dst);
3060 } else
3061 gen_movl_TN_reg(rd, cpu_src1);
3062 }
3063 }
3064 #ifdef TARGET_SPARC64
3065 } else if (xop == 0x25) { /* sll, V9 sllx */
3066 cpu_src1 = get_src1(insn, cpu_src1);
3067 if (IS_IMM) { /* immediate */
3068 simm = GET_FIELDs(insn, 20, 31);
3069 if (insn & (1 << 12)) {
3070 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3071 } else {
3072 tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3073 }
3074 } else { /* register */
3075 rs2 = GET_FIELD(insn, 27, 31);
3076 gen_movl_reg_TN(rs2, cpu_src2);
3077 if (insn & (1 << 12)) {
3078 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3079 } else {
3080 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3081 }
3082 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3083 }
3084 gen_movl_TN_reg(rd, cpu_dst);
3085 } else if (xop == 0x26) { /* srl, V9 srlx */
3086 cpu_src1 = get_src1(insn, cpu_src1);
3087 if (IS_IMM) { /* immediate */
3088 simm = GET_FIELDs(insn, 20, 31);
3089 if (insn & (1 << 12)) {
3090 tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3091 } else {
3092 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3093 tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3094 }
3095 } else { /* register */
3096 rs2 = GET_FIELD(insn, 27, 31);
3097 gen_movl_reg_TN(rs2, cpu_src2);
3098 if (insn & (1 << 12)) {
3099 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3100 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3101 } else {
3102 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3103 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3104 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3105 }
3106 }
3107 gen_movl_TN_reg(rd, cpu_dst);
3108 } else if (xop == 0x27) { /* sra, V9 srax */
3109 cpu_src1 = get_src1(insn, cpu_src1);
3110 if (IS_IMM) { /* immediate */
3111 simm = GET_FIELDs(insn, 20, 31);
3112 if (insn & (1 << 12)) {
3113 tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3114 } else {
3115 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3116 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3117 tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3118 }
3119 } else { /* register */
3120 rs2 = GET_FIELD(insn, 27, 31);
3121 gen_movl_reg_TN(rs2, cpu_src2);
3122 if (insn & (1 << 12)) {
3123 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3124 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3125 } else {
3126 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3127 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3128 tcg_gen_ext32s_i64(cpu_dst, cpu_dst);
3129 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3130 }
3131 }
3132 gen_movl_TN_reg(rd, cpu_dst);
3133 #endif
3134 } else if (xop < 0x36) {
3135 if (xop < 0x20) {
3136 cpu_src1 = get_src1(insn, cpu_src1);
3137 cpu_src2 = get_src2(insn, cpu_src2);
3138 switch (xop & ~0x10) {
3139 case 0x0: /* add */
3140 if (IS_IMM) {
3141 simm = GET_FIELDs(insn, 19, 31);
3142 if (xop & 0x10) {
3143 gen_op_addi_cc(cpu_dst, cpu_src1, simm);
3144 } else {
3145 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
3146 }
3147 } else {
3148 if (xop & 0x10) {
3149 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3150 } else {
3151 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3152 }
3153 }
3154 break;
3155 case 0x1: /* and */
3156 if (IS_IMM) {
3157 simm = GET_FIELDs(insn, 19, 31);
3158 tcg_gen_andi_tl(cpu_dst, cpu_src1, simm);
3159 } else {
3160 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3161 }
3162 if (xop & 0x10) {
3163 gen_op_logic_cc(cpu_dst);
3164 }
3165 break;
3166 case 0x2: /* or */
3167 if (IS_IMM) {
3168 simm = GET_FIELDs(insn, 19, 31);
3169 tcg_gen_ori_tl(cpu_dst, cpu_src1, simm);
3170 } else {
3171 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3172 }
3173 if (xop & 0x10)
3174 gen_op_logic_cc(cpu_dst);
3175 break;
3176 case 0x3: /* xor */
3177 if (IS_IMM) {
3178 simm = GET_FIELDs(insn, 19, 31);
3179 tcg_gen_xori_tl(cpu_dst, cpu_src1, simm);
3180 } else {
3181 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3182 }
3183 if (xop & 0x10)
3184 gen_op_logic_cc(cpu_dst);
3185 break;
3186 case 0x4: /* sub */
3187 if (IS_IMM) {
3188 simm = GET_FIELDs(insn, 19, 31);
3189 if (xop & 0x10) {
3190 gen_op_subi_cc(cpu_dst, cpu_src1, simm);
3191 } else {
3192 tcg_gen_subi_tl(cpu_dst, cpu_src1, simm);
3193 }
3194 } else {
3195 if (xop & 0x10) {
3196 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3197 } else {
3198 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3199 }
3200 }
3201 break;
3202 case 0x5: /* andn */
3203 if (IS_IMM) {
3204 simm = GET_FIELDs(insn, 19, 31);
3205 tcg_gen_andi_tl(cpu_dst, cpu_src1, ~simm);
3206 } else {
3207 tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3208 }
3209 if (xop & 0x10)
3210 gen_op_logic_cc(cpu_dst);
3211 break;
3212 case 0x6: /* orn */
3213 if (IS_IMM) {
3214 simm = GET_FIELDs(insn, 19, 31);
3215 tcg_gen_ori_tl(cpu_dst, cpu_src1, ~simm);
3216 } else {
3217 tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3218 }
3219 if (xop & 0x10)
3220 gen_op_logic_cc(cpu_dst);
3221 break;
3222 case 0x7: /* xorn */
3223 if (IS_IMM) {
3224 simm = GET_FIELDs(insn, 19, 31);
3225 tcg_gen_xori_tl(cpu_dst, cpu_src1, ~simm);
3226 } else {
3227 tcg_gen_not_tl(cpu_tmp0, cpu_src2);
3228 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3229 }
3230 if (xop & 0x10)
3231 gen_op_logic_cc(cpu_dst);
3232 break;
3233 case 0x8: /* addx, V9 addc */
3234 if (IS_IMM) {
3235 simm = GET_FIELDs(insn, 19, 31);
3236 if (xop & 0x10)
3237 gen_op_addxi_cc(cpu_dst, cpu_src1, simm);
3238 else {
3239 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3240 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3241 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3242 }
3243 } else {
3244 if (xop & 0x10)
3245 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3246 else {
3247 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3248 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3249 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3250 }
3251 }
3252 break;
3253 #ifdef TARGET_SPARC64
3254 case 0x9: /* V9 mulx */
3255 if (IS_IMM) {
3256 simm = GET_FIELDs(insn, 19, 31);
3257 tcg_gen_muli_i64(cpu_dst, cpu_src1, simm);
3258 } else {
3259 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3260 }
3261 break;
3262 #endif
3263 case 0xa: /* umul */
3264 CHECK_IU_FEATURE(dc, MUL);
3265 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3266 if (xop & 0x10)
3267 gen_op_logic_cc(cpu_dst);
3268 break;
3269 case 0xb: /* smul */
3270 CHECK_IU_FEATURE(dc, MUL);
3271 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3272 if (xop & 0x10)
3273 gen_op_logic_cc(cpu_dst);
3274 break;
3275 case 0xc: /* subx, V9 subc */
3276 if (IS_IMM) {
3277 simm = GET_FIELDs(insn, 19, 31);
3278 if (xop & 0x10) {
3279 gen_op_subxi_cc(cpu_dst, cpu_src1, simm);
3280 } else {
3281 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3282 tcg_gen_addi_tl(cpu_tmp0, cpu_tmp0, simm);
3283 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3284 }
3285 } else {
3286 if (xop & 0x10) {
3287 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3288 } else {
3289 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3290 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3291 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3292 }
3293 }
3294 break;
3295 #ifdef TARGET_SPARC64
3296 case 0xd: /* V9 udivx */
3297 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3298 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3299 gen_trap_ifdivzero_tl(cpu_cc_src2);
3300 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3301 break;
3302 #endif
3303 case 0xe: /* udiv */
3304 CHECK_IU_FEATURE(dc, DIV);
3305 gen_helper_udiv(cpu_dst, cpu_src1, cpu_src2);
3306 if (xop & 0x10)
3307 gen_op_div_cc(cpu_dst);
3308 break;
3309 case 0xf: /* sdiv */
3310 CHECK_IU_FEATURE(dc, DIV);
3311 gen_helper_sdiv(cpu_dst, cpu_src1, cpu_src2);
3312 if (xop & 0x10)
3313 gen_op_div_cc(cpu_dst);
3314 break;
3315 default:
3316 goto illegal_insn;
3317 }
3318 gen_movl_TN_reg(rd, cpu_dst);
3319 } else {
3320 cpu_src1 = get_src1(insn, cpu_src1);
3321 cpu_src2 = get_src2(insn, cpu_src2);
3322 switch (xop) {
3323 case 0x20: /* taddcc */
3324 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3325 gen_movl_TN_reg(rd, cpu_dst);
3326 break;
3327 case 0x21: /* tsubcc */
3328 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3329 gen_movl_TN_reg(rd, cpu_dst);
3330 break;
3331 case 0x22: /* taddcctv */
3332 save_state(dc, cpu_cond);
3333 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3334 gen_movl_TN_reg(rd, cpu_dst);
3335 break;
3336 case 0x23: /* tsubcctv */
3337 save_state(dc, cpu_cond);
3338 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3339 gen_movl_TN_reg(rd, cpu_dst);
3340 break;
3341 case 0x24: /* mulscc */
3342 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3343 gen_movl_TN_reg(rd, cpu_dst);
3344 break;
3345 #ifndef TARGET_SPARC64
3346 case 0x25: /* sll */
3347 if (IS_IMM) { /* immediate */
3348 simm = GET_FIELDs(insn, 20, 31);
3349 tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
3350 } else { /* register */
3351 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3352 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3353 }
3354 gen_movl_TN_reg(rd, cpu_dst);
3355 break;
3356 case 0x26: /* srl */
3357 if (IS_IMM) { /* immediate */
3358 simm = GET_FIELDs(insn, 20, 31);
3359 tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
3360 } else { /* register */
3361 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3362 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3363 }
3364 gen_movl_TN_reg(rd, cpu_dst);
3365 break;
3366 case 0x27: /* sra */
3367 if (IS_IMM) { /* immediate */
3368 simm = GET_FIELDs(insn, 20, 31);
3369 tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
3370 } else { /* register */
3371 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3372 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3373 }
3374 gen_movl_TN_reg(rd, cpu_dst);
3375 break;
3376 #endif
3377 case 0x30:
3378 {
3379 switch(rd) {
3380 case 0: /* wry */
3381 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3382 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
3383 break;
3384 #ifndef TARGET_SPARC64
3385 case 0x01 ... 0x0f: /* undefined in the
3386 SPARCv8 manual, nop
3387 on the microSPARC
3388 II */
3389 case 0x10 ... 0x1f: /* implementation-dependent
3390 in the SPARCv8
3391 manual, nop on the
3392 microSPARC II */
3393 break;
3394 #else
3395 case 0x2: /* V9 wrccr */
3396 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3397 gen_helper_wrccr(cpu_dst);
3398 break;
3399 case 0x3: /* V9 wrasi */
3400 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3401 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3402 break;
3403 case 0x6: /* V9 wrfprs */
3404 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3405 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3406 save_state(dc, cpu_cond);
3407 gen_op_next_insn();
3408 tcg_gen_exit_tb(0);
3409 dc->is_br = 1;
3410 break;
3411 case 0xf: /* V9 sir, nop if user */
3412 #if !defined(CONFIG_USER_ONLY)
3413 if (supervisor(dc))
3414 ; // XXX
3415 #endif
3416 break;
3417 case 0x13: /* Graphics Status */
3418 if (gen_trap_ifnofpu(dc, cpu_cond))
3419 goto jmp_insn;
3420 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3421 break;
3422 case 0x14: /* Softint set */
3423 if (!supervisor(dc))
3424 goto illegal_insn;
3425 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3426 gen_helper_set_softint(cpu_tmp64);
3427 break;
3428 case 0x15: /* Softint clear */
3429 if (!supervisor(dc))
3430 goto illegal_insn;
3431 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3432 gen_helper_clear_softint(cpu_tmp64);
3433 break;
3434 case 0x16: /* Softint write */
3435 if (!supervisor(dc))
3436 goto illegal_insn;
3437 tcg_gen_xor_tl(cpu_tmp64, cpu_src1, cpu_src2);
3438 gen_helper_write_softint(cpu_tmp64);
3439 break;
3440 case 0x17: /* Tick compare */
3441 #if !defined(CONFIG_USER_ONLY)
3442 if (!supervisor(dc))
3443 goto illegal_insn;
3444 #endif
3445 {
3446 TCGv_ptr r_tickptr;
3447
3448 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3449 cpu_src2);
3450 r_tickptr = tcg_temp_new_ptr();
3451 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3452 offsetof(CPUState, tick));
3453 gen_helper_tick_set_limit(r_tickptr,
3454 cpu_tick_cmpr);
3455 tcg_temp_free_ptr(r_tickptr);
3456 }
3457 break;
3458 case 0x18: /* System tick */
3459 #if !defined(CONFIG_USER_ONLY)
3460 if (!supervisor(dc))
3461 goto illegal_insn;
3462 #endif
3463 {
3464 TCGv_ptr r_tickptr;
3465
3466 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3467 cpu_src2);
3468 r_tickptr = tcg_temp_new_ptr();
3469 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3470 offsetof(CPUState, stick));
3471 gen_helper_tick_set_count(r_tickptr,
3472 cpu_dst);
3473 tcg_temp_free_ptr(r_tickptr);
3474 }
3475 break;
3476 case 0x19: /* System tick compare */
3477 #if !defined(CONFIG_USER_ONLY)
3478 if (!supervisor(dc))
3479 goto illegal_insn;
3480 #endif
3481 {
3482 TCGv_ptr r_tickptr;
3483
3484 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3485 cpu_src2);
3486 r_tickptr = tcg_temp_new_ptr();
3487 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3488 offsetof(CPUState, stick));
3489 gen_helper_tick_set_limit(r_tickptr,
3490 cpu_stick_cmpr);
3491 tcg_temp_free_ptr(r_tickptr);
3492 }
3493 break;
3494
3495 case 0x10: /* Performance Control */
3496 case 0x11: /* Performance Instrumentation
3497 Counter */
3498 case 0x12: /* Dispatch Control */
3499 #endif
3500 default:
3501 goto illegal_insn;
3502 }
3503 }
3504 break;
3505 #if !defined(CONFIG_USER_ONLY)
3506 case 0x31: /* wrpsr, V9 saved, restored */
3507 {
3508 if (!supervisor(dc))
3509 goto priv_insn;
3510 #ifdef TARGET_SPARC64
3511 switch (rd) {
3512 case 0:
3513 gen_helper_saved();
3514 break;
3515 case 1:
3516 gen_helper_restored();
3517 break;
3518 case 2: /* UA2005 allclean */
3519 case 3: /* UA2005 otherw */
3520 case 4: /* UA2005 normalw */
3521 case 5: /* UA2005 invalw */
3522 // XXX
3523 default:
3524 goto illegal_insn;
3525 }
3526 #else
3527 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3528 gen_helper_wrpsr(cpu_dst);
3529 save_state(dc, cpu_cond);
3530 gen_op_next_insn();
3531 tcg_gen_exit_tb(0);
3532 dc->is_br = 1;
3533 #endif
3534 }
3535 break;
3536 case 0x32: /* wrwim, V9 wrpr */
3537 {
3538 if (!supervisor(dc))
3539 goto priv_insn;
3540 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3541 #ifdef TARGET_SPARC64
3542 switch (rd) {
3543 case 0: // tpc
3544 {
3545 TCGv_ptr r_tsptr;
3546
3547 r_tsptr = tcg_temp_new_ptr();
3548 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3549 offsetof(CPUState, tsptr));
3550 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3551 offsetof(trap_state, tpc));
3552 tcg_temp_free_ptr(r_tsptr);
3553 }
3554 break;
3555 case 1: // tnpc
3556 {
3557 TCGv_ptr r_tsptr;
3558
3559 r_tsptr = tcg_temp_new_ptr();
3560 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3561 offsetof(CPUState, tsptr));
3562 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3563 offsetof(trap_state, tnpc));
3564 tcg_temp_free_ptr(r_tsptr);
3565 }
3566 break;
3567 case 2: // tstate
3568 {
3569 TCGv_ptr r_tsptr;
3570
3571 r_tsptr = tcg_temp_new_ptr();
3572 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3573 offsetof(CPUState, tsptr));
3574 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3575 offsetof(trap_state,
3576 tstate));
3577 tcg_temp_free_ptr(r_tsptr);
3578 }
3579 break;
3580 case 3: // tt
3581 {
3582 TCGv_ptr r_tsptr;
3583
3584 r_tsptr = tcg_temp_new_ptr();
3585 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3586 offsetof(CPUState, tsptr));
3587 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3588 tcg_gen_st_i32(cpu_tmp32, r_tsptr,
3589 offsetof(trap_state, tt));
3590 tcg_temp_free_ptr(r_tsptr);
3591 }
3592 break;
3593 case 4: // tick
3594 {
3595 TCGv_ptr r_tickptr;
3596
3597 r_tickptr = tcg_temp_new_ptr();
3598 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3599 offsetof(CPUState, tick));
3600 gen_helper_tick_set_count(r_tickptr,
3601 cpu_tmp0);
3602 tcg_temp_free_ptr(r_tickptr);
3603 }
3604 break;
3605 case 5: // tba
3606 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3607 break;
3608 case 6: // pstate
3609 save_state(dc, cpu_cond);
3610 gen_helper_wrpstate(cpu_tmp0);
3611 gen_op_next_insn();
3612 tcg_gen_exit_tb(0);
3613 dc->is_br = 1;
3614 break;
3615 case 7: // tl
3616 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3617 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3618 offsetof(CPUSPARCState, tl));
3619 break;
3620 case 8: // pil
3621 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3622 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3623 offsetof(CPUSPARCState,
3624 psrpil));
3625 break;
3626 case 9: // cwp
3627 gen_helper_wrcwp(cpu_tmp0);
3628 break;
3629 case 10: // cansave
3630 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3631 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3632 offsetof(CPUSPARCState,
3633 cansave));
3634 break;
3635 case 11: // canrestore
3636 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3637 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3638 offsetof(CPUSPARCState,
3639 canrestore));
3640 break;
3641 case 12: // cleanwin
3642 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3643 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3644 offsetof(CPUSPARCState,
3645 cleanwin));
3646 break;
3647 case 13: // otherwin
3648 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3649 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3650 offsetof(CPUSPARCState,
3651 otherwin));
3652 break;
3653 case 14: // wstate
3654 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3655 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3656 offsetof(CPUSPARCState,
3657 wstate));
3658 break;
3659 case 16: // UA2005 gl
3660 CHECK_IU_FEATURE(dc, GL);
3661 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3662 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3663 offsetof(CPUSPARCState, gl));
3664 break;
3665 case 26: // UA2005 strand status
3666 CHECK_IU_FEATURE(dc, HYPV);
3667 if (!hypervisor(dc))
3668 goto priv_insn;
3669 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
3670 break;
3671 default:
3672 goto illegal_insn;
3673 }
3674 #else
3675 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3676 if (dc->def->nwindows != 32)
3677 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3678 (1 << dc->def->nwindows) - 1);
3679 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3680 #endif
3681 }
3682 break;
3683 case 0x33: /* wrtbr, UA2005 wrhpr */
3684 {
3685 #ifndef TARGET_SPARC64
3686 if (!supervisor(dc))
3687 goto priv_insn;
3688 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3689 #else
3690 CHECK_IU_FEATURE(dc, HYPV);
3691 if (!hypervisor(dc))
3692 goto priv_insn;
3693 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3694 switch (rd) {
3695 case 0: // hpstate
3696 // XXX gen_op_wrhpstate();
3697 save_state(dc, cpu_cond);
3698 gen_op_next_insn();
3699 tcg_gen_exit_tb(0);
3700 dc->is_br = 1;
3701 break;
3702 case 1: // htstate
3703 // XXX gen_op_wrhtstate();
3704 break;
3705 case 3: // hintp
3706 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3707 break;
3708 case 5: // htba
3709 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3710 break;
3711 case 31: // hstick_cmpr
3712 {
3713 TCGv_ptr r_tickptr;
3714
3715 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3716 r_tickptr = tcg_temp_new_ptr();
3717 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3718 offsetof(CPUState, hstick));
3719 gen_helper_tick_set_limit(r_tickptr,
3720 cpu_hstick_cmpr);
3721 tcg_temp_free_ptr(r_tickptr);
3722 }
3723 break;
3724 case 6: // hver readonly
3725 default:
3726 goto illegal_insn;
3727 }
3728 #endif
3729 }
3730 break;
3731 #endif
3732 #ifdef TARGET_SPARC64
3733 case 0x2c: /* V9 movcc */
3734 {
3735 int cc = GET_FIELD_SP(insn, 11, 12);
3736 int cond = GET_FIELD_SP(insn, 14, 17);
3737 TCGv r_cond;
3738 int l1;
3739
3740 r_cond = tcg_temp_new();
3741 if (insn & (1 << 18)) {
3742 if (cc == 0)
3743 gen_cond(r_cond, 0, cond);
3744 else if (cc == 2)
3745 gen_cond(r_cond, 1, cond);
3746 else
3747 goto illegal_insn;
3748 } else {
3749 gen_fcond(r_cond, cc, cond);
3750 }
3751
3752 l1 = gen_new_label();
3753
3754 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3755 if (IS_IMM) { /* immediate */
3756 TCGv r_const;
3757
3758 simm = GET_FIELD_SPs(insn, 0, 10);
3759 r_const = tcg_const_tl(simm);
3760 gen_movl_TN_reg(rd, r_const);
3761 tcg_temp_free(r_const);
3762 } else {
3763 rs2 = GET_FIELD_SP(insn, 0, 4);
3764 gen_movl_reg_TN(rs2, cpu_tmp0);
3765 gen_movl_TN_reg(rd, cpu_tmp0);
3766 }
3767 gen_set_label(l1);
3768 tcg_temp_free(r_cond);
3769 break;
3770 }
3771 case 0x2d: /* V9 sdivx */
3772 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3773 gen_movl_TN_reg(rd, cpu_dst);
3774 break;
3775 case 0x2e: /* V9 popc */
3776 {
3777 cpu_src2 = get_src2(insn, cpu_src2);
3778 gen_helper_popc(cpu_dst, cpu_src2);
3779 gen_movl_TN_reg(rd, cpu_dst);
3780 }
3781 case 0x2f: /* V9 movr */
3782 {
3783 int cond = GET_FIELD_SP(insn, 10, 12);
3784 int l1;
3785
3786 cpu_src1 = get_src1(insn, cpu_src1);
3787
3788 l1 = gen_new_label();
3789
3790 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3791 cpu_src1, 0, l1);
3792 if (IS_IMM) { /* immediate */
3793 TCGv r_const;
3794
3795 simm = GET_FIELD_SPs(insn, 0, 9);
3796 r_const = tcg_const_tl(simm);
3797 gen_movl_TN_reg(rd, r_const);
3798 tcg_temp_free(r_const);
3799 } else {
3800 rs2 = GET_FIELD_SP(insn, 0, 4);
3801 gen_movl_reg_TN(rs2, cpu_tmp0);
3802 gen_movl_TN_reg(rd, cpu_tmp0);
3803 }
3804 gen_set_label(l1);
3805 break;
3806 }
3807 #endif
3808 default:
3809 goto illegal_insn;
3810 }
3811 }
3812 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3813 #ifdef TARGET_SPARC64
3814 int opf = GET_FIELD_SP(insn, 5, 13);
3815 rs1 = GET_FIELD(insn, 13, 17);
3816 rs2 = GET_FIELD(insn, 27, 31);
3817 if (gen_trap_ifnofpu(dc, cpu_cond))
3818 goto jmp_insn;
3819
3820 switch (opf) {
3821 case 0x000: /* VIS I edge8cc */
3822 case 0x001: /* VIS II edge8n */
3823 case 0x002: /* VIS I edge8lcc */
3824 case 0x003: /* VIS II edge8ln */
3825 case 0x004: /* VIS I edge16cc */
3826 case 0x005: /* VIS II edge16n */
3827 case 0x006: /* VIS I edge16lcc */
3828 case 0x007: /* VIS II edge16ln */
3829 case 0x008: /* VIS I edge32cc */
3830 case 0x009: /* VIS II edge32n */
3831 case 0x00a: /* VIS I edge32lcc */
3832 case 0x00b: /* VIS II edge32ln */
3833 // XXX
3834 goto illegal_insn;
3835 case 0x010: /* VIS I array8 */
3836 CHECK_FPU_FEATURE(dc, VIS1);
3837 cpu_src1 = get_src1(insn, cpu_src1);
3838 gen_movl_reg_TN(rs2, cpu_src2);
3839 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3840 gen_movl_TN_reg(rd, cpu_dst);
3841 break;
3842 case 0x012: /* VIS I array16 */
3843 CHECK_FPU_FEATURE(dc, VIS1);
3844 cpu_src1 = get_src1(insn, cpu_src1);
3845 gen_movl_reg_TN(rs2, cpu_src2);
3846 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3847 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3848 gen_movl_TN_reg(rd, cpu_dst);
3849 break;
3850 case 0x014: /* VIS I array32 */
3851 CHECK_FPU_FEATURE(dc, VIS1);
3852 cpu_src1 = get_src1(insn, cpu_src1);
3853 gen_movl_reg_TN(rs2, cpu_src2);
3854 gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
3855 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3856 gen_movl_TN_reg(rd, cpu_dst);
3857 break;
3858 case 0x018: /* VIS I alignaddr */
3859 CHECK_FPU_FEATURE(dc, VIS1);
3860 cpu_src1 = get_src1(insn, cpu_src1);
3861 gen_movl_reg_TN(rs2, cpu_src2);
3862 gen_helper_alignaddr(cpu_dst, cpu_src1, cpu_src2);
3863 gen_movl_TN_reg(rd, cpu_dst);
3864 break;
3865 case 0x019: /* VIS II bmask */
3866 case 0x01a: /* VIS I alignaddrl */
3867 // XXX
3868 goto illegal_insn;
3869 case 0x020: /* VIS I fcmple16 */
3870 CHECK_FPU_FEATURE(dc, VIS1);
3871 gen_op_load_fpr_DT0(DFPREG(rs1));
3872 gen_op_load_fpr_DT1(DFPREG(rs2));
3873 gen_helper_fcmple16();
3874 gen_op_store_DT0_fpr(DFPREG(rd));
3875 break;
3876 case 0x022: /* VIS I fcmpne16 */
3877 CHECK_FPU_FEATURE(dc, VIS1);
3878 gen_op_load_fpr_DT0(DFPREG(rs1));
3879 gen_op_load_fpr_DT1(DFPREG(rs2));
3880 gen_helper_fcmpne16();
3881 gen_op_store_DT0_fpr(DFPREG(rd));
3882 break;
3883 case 0x024: /* VIS I fcmple32 */
3884 CHECK_FPU_FEATURE(dc, VIS1);
3885 gen_op_load_fpr_DT0(DFPREG(rs1));
3886 gen_op_load_fpr_DT1(DFPREG(rs2));
3887 gen_helper_fcmple32();
3888 gen_op_store_DT0_fpr(DFPREG(rd));
3889 break;
3890 case 0x026: /* VIS I fcmpne32 */
3891 CHECK_FPU_FEATURE(dc, VIS1);
3892 gen_op_load_fpr_DT0(DFPREG(rs1));
3893 gen_op_load_fpr_DT1(DFPREG(rs2));
3894 gen_helper_fcmpne32();
3895 gen_op_store_DT0_fpr(DFPREG(rd));
3896 break;
3897 case 0x028: /* VIS I fcmpgt16 */
3898 CHECK_FPU_FEATURE(dc, VIS1);
3899 gen_op_load_fpr_DT0(DFPREG(rs1));
3900 gen_op_load_fpr_DT1(DFPREG(rs2));
3901 gen_helper_fcmpgt16();
3902 gen_op_store_DT0_fpr(DFPREG(rd));
3903 break;
3904 case 0x02a: /* VIS I fcmpeq16 */
3905 CHECK_FPU_FEATURE(dc, VIS1);
3906 gen_op_load_fpr_DT0(DFPREG(rs1));
3907 gen_op_load_fpr_DT1(DFPREG(rs2));
3908 gen_helper_fcmpeq16();
3909 gen_op_store_DT0_fpr(DFPREG(rd));
3910 break;
3911 case 0x02c: /* VIS I fcmpgt32 */
3912 CHECK_FPU_FEATURE(dc, VIS1);
3913 gen_op_load_fpr_DT0(DFPREG(rs1));
3914 gen_op_load_fpr_DT1(DFPREG(rs2));
3915 gen_helper_fcmpgt32();
3916 gen_op_store_DT0_fpr(DFPREG(rd));
3917 break;
3918 case 0x02e: /* VIS I fcmpeq32 */
3919 CHECK_FPU_FEATURE(dc, VIS1);
3920 gen_op_load_fpr_DT0(DFPREG(rs1));
3921 gen_op_load_fpr_DT1(DFPREG(rs2));
3922 gen_helper_fcmpeq32();
3923 gen_op_store_DT0_fpr(DFPREG(rd));
3924 break;
3925 case 0x031: /* VIS I fmul8x16 */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 gen_op_load_fpr_DT0(DFPREG(rs1));
3928 gen_op_load_fpr_DT1(DFPREG(rs2));
3929 gen_helper_fmul8x16();
3930 gen_op_store_DT0_fpr(DFPREG(rd));
3931 break;
3932 case 0x033: /* VIS I fmul8x16au */
3933 CHECK_FPU_FEATURE(dc, VIS1);
3934 gen_op_load_fpr_DT0(DFPREG(rs1));
3935 gen_op_load_fpr_DT1(DFPREG(rs2));
3936 gen_helper_fmul8x16au();
3937 gen_op_store_DT0_fpr(DFPREG(rd));
3938 break;
3939 case 0x035: /* VIS I fmul8x16al */
3940 CHECK_FPU_FEATURE(dc, VIS1);
3941 gen_op_load_fpr_DT0(DFPREG(rs1));
3942 gen_op_load_fpr_DT1(DFPREG(rs2));
3943 gen_helper_fmul8x16al();
3944 gen_op_store_DT0_fpr(DFPREG(rd));
3945 break;
3946 case 0x036: /* VIS I fmul8sux16 */
3947 CHECK_FPU_FEATURE(dc, VIS1);
3948 gen_op_load_fpr_DT0(DFPREG(rs1));
3949 gen_op_load_fpr_DT1(DFPREG(rs2));
3950 gen_helper_fmul8sux16();
3951 gen_op_store_DT0_fpr(DFPREG(rd));
3952 break;
3953 case 0x037: /* VIS I fmul8ulx16 */
3954 CHECK_FPU_FEATURE(dc, VIS1);
3955 gen_op_load_fpr_DT0(DFPREG(rs1));
3956 gen_op_load_fpr_DT1(DFPREG(rs2));
3957 gen_helper_fmul8ulx16();
3958 gen_op_store_DT0_fpr(DFPREG(rd));
3959 break;
3960 case 0x038: /* VIS I fmuld8sux16 */
3961 CHECK_FPU_FEATURE(dc, VIS1);
3962 gen_op_load_fpr_DT0(DFPREG(rs1));
3963 gen_op_load_fpr_DT1(DFPREG(rs2));
3964 gen_helper_fmuld8sux16();
3965 gen_op_store_DT0_fpr(DFPREG(rd));
3966 break;
3967 case 0x039: /* VIS I fmuld8ulx16 */
3968 CHECK_FPU_FEATURE(dc, VIS1);
3969 gen_op_load_fpr_DT0(DFPREG(rs1));
3970 gen_op_load_fpr_DT1(DFPREG(rs2));
3971 gen_helper_fmuld8ulx16();
3972 gen_op_store_DT0_fpr(DFPREG(rd));
3973 break;
3974 case 0x03a: /* VIS I fpack32 */
3975 case 0x03b: /* VIS I fpack16 */
3976 case 0x03d: /* VIS I fpackfix */
3977 case 0x03e: /* VIS I pdist */
3978 // XXX
3979 goto illegal_insn;
3980 case 0x048: /* VIS I faligndata */
3981 CHECK_FPU_FEATURE(dc, VIS1);
3982 gen_op_load_fpr_DT0(DFPREG(rs1));
3983 gen_op_load_fpr_DT1(DFPREG(rs2));
3984 gen_helper_faligndata();
3985 gen_op_store_DT0_fpr(DFPREG(rd));
3986 break;
3987 case 0x04b: /* VIS I fpmerge */
3988 CHECK_FPU_FEATURE(dc, VIS1);
3989 gen_op_load_fpr_DT0(DFPREG(rs1));
3990 gen_op_load_fpr_DT1(DFPREG(rs2));
3991 gen_helper_fpmerge();
3992 gen_op_store_DT0_fpr(DFPREG(rd));
3993 break;
3994 case 0x04c: /* VIS II bshuffle */
3995 // XXX
3996 goto illegal_insn;
3997 case 0x04d: /* VIS I fexpand */
3998 CHECK_FPU_FEATURE(dc, VIS1);
3999 gen_op_load_fpr_DT0(DFPREG(rs1));
4000 gen_op_load_fpr_DT1(DFPREG(rs2));
4001 gen_helper_fexpand();
4002 gen_op_store_DT0_fpr(DFPREG(rd));
4003 break;
4004 case 0x050: /* VIS I fpadd16 */
4005 CHECK_FPU_FEATURE(dc, VIS1);
4006 gen_op_load_fpr_DT0(DFPREG(rs1));
4007 gen_op_load_fpr_DT1(DFPREG(rs2));
4008 gen_helper_fpadd16();
4009 gen_op_store_DT0_fpr(DFPREG(rd));
4010 break;
4011 case 0x051: /* VIS I fpadd16s */
4012 CHECK_FPU_FEATURE(dc, VIS1);
4013 gen_helper_fpadd16s(cpu_fpr[rd],
4014 cpu_fpr[rs1], cpu_fpr[rs2]);
4015 break;
4016 case 0x052: /* VIS I fpadd32 */
4017 CHECK_FPU_FEATURE(dc, VIS1);
4018 gen_op_load_fpr_DT0(DFPREG(rs1));
4019 gen_op_load_fpr_DT1(DFPREG(rs2));
4020 gen_helper_fpadd32();
4021 gen_op_store_DT0_fpr(DFPREG(rd));
4022 break;
4023 case 0x053: /* VIS I fpadd32s */
4024 CHECK_FPU_FEATURE(dc, VIS1);
4025 gen_helper_fpadd32s(cpu_fpr[rd],
4026 cpu_fpr[rs1], cpu_fpr[rs2]);
4027 break;
4028 case 0x054: /* VIS I fpsub16 */
4029 CHECK_FPU_FEATURE(dc, VIS1);
4030 gen_op_load_fpr_DT0(DFPREG(rs1));
4031 gen_op_load_fpr_DT1(DFPREG(rs2));
4032 gen_helper_fpsub16();
4033 gen_op_store_DT0_fpr(DFPREG(rd));
4034 break;
4035 case 0x055: /* VIS I fpsub16s */
4036 CHECK_FPU_FEATURE(dc, VIS1);
4037 gen_helper_fpsub16s(cpu_fpr[rd],
4038 cpu_fpr[rs1], cpu_fpr[rs2]);
4039 break;
4040 case 0x056: /* VIS I fpsub32 */
4041 CHECK_FPU_FEATURE(dc, VIS1);
4042 gen_op_load_fpr_DT0(DFPREG(rs1));
4043 gen_op_load_fpr_DT1(DFPREG(rs2));
4044 gen_helper_fpsub32();
4045 gen_op_store_DT0_fpr(DFPREG(rd));
4046 break;
4047 case 0x057: /* VIS I fpsub32s */
4048 CHECK_FPU_FEATURE(dc, VIS1);
4049 gen_helper_fpsub32s(cpu_fpr[rd],
4050 cpu_fpr[rs1], cpu_fpr[rs2]);
4051 break;
4052 case 0x060: /* VIS I fzero */
4053 CHECK_FPU_FEATURE(dc, VIS1);
4054 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
4055 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
4056 break;
4057 case 0x061: /* VIS I fzeros */
4058 CHECK_FPU_FEATURE(dc, VIS1);
4059 tcg_gen_movi_i32(cpu_fpr[rd], 0);
4060 break;
4061 case 0x062: /* VIS I fnor */
4062 CHECK_FPU_FEATURE(dc, VIS1);
4063 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4064 cpu_fpr[DFPREG(rs2)]);
4065 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4066 cpu_fpr[DFPREG(rs2) + 1]);
4067 break;
4068 case 0x063: /* VIS I fnors */
4069 CHECK_FPU_FEATURE(dc, VIS1);
4070 tcg_gen_nor_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4071 break;
4072 case 0x064: /* VIS I fandnot2 */
4073 CHECK_FPU_FEATURE(dc, VIS1);
4074 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4075 cpu_fpr[DFPREG(rs2)]);
4076 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4077 cpu_fpr[DFPREG(rs1) + 1],
4078 cpu_fpr[DFPREG(rs2) + 1]);
4079 break;
4080 case 0x065: /* VIS I fandnot2s */
4081 CHECK_FPU_FEATURE(dc, VIS1);
4082 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4083 break;
4084 case 0x066: /* VIS I fnot2 */
4085 CHECK_FPU_FEATURE(dc, VIS1);
4086 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
4087 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4088 cpu_fpr[DFPREG(rs2) + 1]);
4089 break;
4090 case 0x067: /* VIS I fnot2s */
4091 CHECK_FPU_FEATURE(dc, VIS1);
4092 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4093 break;
4094 case 0x068: /* VIS I fandnot1 */
4095 CHECK_FPU_FEATURE(dc, VIS1);
4096 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4097 cpu_fpr[DFPREG(rs1)]);
4098 tcg_gen_andc_i32(cpu_fpr[DFPREG(rd) + 1],
4099 cpu_fpr[DFPREG(rs2) + 1],
4100 cpu_fpr[DFPREG(rs1) + 1]);
4101 break;
4102 case 0x069: /* VIS I fandnot1s */
4103 CHECK_FPU_FEATURE(dc, VIS1);
4104 tcg_gen_andc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4105 break;
4106 case 0x06a: /* VIS I fnot1 */
4107 CHECK_FPU_FEATURE(dc, VIS1);
4108 tcg_gen_not_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4109 tcg_gen_not_i32(cpu_fpr[DFPREG(rd) + 1],
4110 cpu_fpr[DFPREG(rs1) + 1]);
4111 break;
4112 case 0x06b: /* VIS I fnot1s */
4113 CHECK_FPU_FEATURE(dc, VIS1);
4114 tcg_gen_not_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4115 break;
4116 case 0x06c: /* VIS I fxor */
4117 CHECK_FPU_FEATURE(dc, VIS1);
4118 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4119 cpu_fpr[DFPREG(rs2)]);
4120 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
4121 cpu_fpr[DFPREG(rs1) + 1],
4122 cpu_fpr[DFPREG(rs2) + 1]);
4123 break;
4124 case 0x06d: /* VIS I fxors */
4125 CHECK_FPU_FEATURE(dc, VIS1);
4126 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4127 break;
4128 case 0x06e: /* VIS I fnand */
4129 CHECK_FPU_FEATURE(dc, VIS1);
4130 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
4131 cpu_fpr[DFPREG(rs2)]);
4132 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
4133 cpu_fpr[DFPREG(rs2) + 1]);
4134 break;
4135 case 0x06f: /* VIS I fnands */
4136 CHECK_FPU_FEATURE(dc, VIS1);
4137 tcg_gen_nand_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
4138 break;
4139 case 0x070: /* VIS I fand */
4140 CHECK_FPU_FEATURE(dc, VIS1);
4141 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4142 cpu_fpr[DFPREG(rs2)]);
4143 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
4144 cpu_fpr[DFPREG(rs1) + 1],
4145 cpu_fpr[DFPREG(rs2) + 1]);
4146 break;
4147 case 0x071: /* VIS I fands */
4148 CHECK_FPU_FEATURE(dc, VIS1);
4149 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4150 break;
4151 case 0x072: /* VIS I fxnor */
4152 CHECK_FPU_FEATURE(dc, VIS1);
4153 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4154 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4155 cpu_fpr[DFPREG(rs1)]);
4156 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4157 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4158 cpu_fpr[DFPREG(rs1) + 1]);
4159 break;
4160 case 0x073: /* VIS I fxnors */
4161 CHECK_FPU_FEATURE(dc, VIS1);
4162 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4163 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4164 break;
4165 case 0x074: /* VIS I fsrc1 */
4166 CHECK_FPU_FEATURE(dc, VIS1);
4167 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4168 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4169 cpu_fpr[DFPREG(rs1) + 1]);
4170 break;
4171 case 0x075: /* VIS I fsrc1s */
4172 CHECK_FPU_FEATURE(dc, VIS1);
4173 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4174 break;
4175 case 0x076: /* VIS I fornot2 */
4176 CHECK_FPU_FEATURE(dc, VIS1);
4177 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4178 cpu_fpr[DFPREG(rs2)]);
4179 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4180 cpu_fpr[DFPREG(rs1) + 1],
4181 cpu_fpr[DFPREG(rs2) + 1]);
4182 break;
4183 case 0x077: /* VIS I fornot2s */
4184 CHECK_FPU_FEATURE(dc, VIS1);
4185 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4186 break;
4187 case 0x078: /* VIS I fsrc2 */
4188 CHECK_FPU_FEATURE(dc, VIS1);
4189 gen_op_load_fpr_DT0(DFPREG(rs2));
4190 gen_op_store_DT0_fpr(DFPREG(rd));
4191 break;
4192 case 0x079: /* VIS I fsrc2s */
4193 CHECK_FPU_FEATURE(dc, VIS1);
4194 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4195 break;
4196 case 0x07a: /* VIS I fornot1 */
4197 CHECK_FPU_FEATURE(dc, VIS1);
4198 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
4199 cpu_fpr[DFPREG(rs1)]);
4200 tcg_gen_orc_i32(cpu_fpr[DFPREG(rd) + 1],
4201 cpu_fpr[DFPREG(rs2) + 1],
4202 cpu_fpr[DFPREG(rs1) + 1]);
4203 break;
4204 case 0x07b: /* VIS I fornot1s */
4205 CHECK_FPU_FEATURE(dc, VIS1);
4206 tcg_gen_orc_i32(cpu_fpr[rd], cpu_fpr[rs2], cpu_fpr[rs1]);
4207 break;
4208 case 0x07c: /* VIS I for */
4209 CHECK_FPU_FEATURE(dc, VIS1);
4210 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4211 cpu_fpr[DFPREG(rs2)]);
4212 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4213 cpu_fpr[DFPREG(rs1) + 1],
4214 cpu_fpr[DFPREG(rs2) + 1]);
4215 break;
4216 case 0x07d: /* VIS I fors */
4217 CHECK_FPU_FEATURE(dc, VIS1);
4218 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4219 break;
4220 case 0x07e: /* VIS I fone */
4221 CHECK_FPU_FEATURE(dc, VIS1);
4222 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4223 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4224 break;
4225 case 0x07f: /* VIS I fones */
4226 CHECK_FPU_FEATURE(dc, VIS1);
4227 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4228 break;
4229 case 0x080: /* VIS I shutdown */
4230 case 0x081: /* VIS II siam */
4231 // XXX
4232 goto illegal_insn;
4233 default:
4234 goto illegal_insn;
4235 }
4236 #else
4237 goto ncp_insn;
4238 #endif
4239 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4240 #ifdef TARGET_SPARC64
4241 goto illegal_insn;
4242 #else
4243 goto ncp_insn;
4244 #endif
4245 #ifdef TARGET_SPARC64
4246 } else if (xop == 0x39) { /* V9 return */
4247 TCGv_i32 r_const;
4248
4249 save_state(dc, cpu_cond);
4250 cpu_src1 = get_src1(insn, cpu_src1);
4251 if (IS_IMM) { /* immediate */
4252 simm = GET_FIELDs(insn, 19, 31);
4253 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4254 } else { /* register */
4255 rs2 = GET_FIELD(insn, 27, 31);
4256 if (rs2) {
4257 gen_movl_reg_TN(rs2, cpu_src2);
4258 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4259 } else
4260 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4261 }
4262 gen_helper_restore();
4263 gen_mov_pc_npc(dc, cpu_cond);
4264 r_const = tcg_const_i32(3);
4265 gen_helper_check_align(cpu_dst, r_const);
4266 tcg_temp_free_i32(r_const);
4267 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4268 dc->npc = DYNAMIC_PC;
4269 goto jmp_insn;
4270 #endif
4271 } else {
4272 cpu_src1 = get_src1(insn, cpu_src1);
4273 if (IS_IMM) { /* immediate */
4274 simm = GET_FIELDs(insn, 19, 31);
4275 tcg_gen_addi_tl(cpu_dst, cpu_src1, simm);
4276 } else { /* register */
4277 rs2 = GET_FIELD(insn, 27, 31);
4278 if (rs2) {
4279 gen_movl_reg_TN(rs2, cpu_src2);
4280 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4281 } else
4282 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4283 }
4284 switch (xop) {
4285 case 0x38: /* jmpl */
4286 {
4287 TCGv r_pc;
4288 TCGv_i32 r_const;
4289
4290 r_pc = tcg_const_tl(dc->pc);
4291 gen_movl_TN_reg(rd, r_pc);
4292 tcg_temp_free(r_pc);
4293 gen_mov_pc_npc(dc, cpu_cond);
4294 r_const = tcg_const_i32(3);
4295 gen_helper_check_align(cpu_dst, r_const);
4296 tcg_temp_free_i32(r_const);
4297 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4298 dc->npc = DYNAMIC_PC;
4299 }
4300 goto jmp_insn;
4301 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4302 case 0x39: /* rett, V9 return */
4303 {
4304 TCGv_i32 r_const;
4305
4306 if (!supervisor(dc))
4307 goto priv_insn;
4308 gen_mov_pc_npc(dc, cpu_cond);
4309 r_const = tcg_const_i32(3);
4310 gen_helper_check_align(cpu_dst, r_const);
4311 tcg_temp_free_i32(r_const);
4312 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4313 dc->npc = DYNAMIC_PC;
4314 gen_helper_rett();
4315 }
4316 goto jmp_insn;
4317 #endif
4318 case 0x3b: /* flush */
4319 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4320 goto unimp_flush;
4321 gen_helper_flush(cpu_dst);
4322 break;
4323 case 0x3c: /* save */
4324 save_state(dc, cpu_cond);
4325 gen_helper_save();
4326 gen_movl_TN_reg(rd, cpu_dst);
4327 break;
4328 case 0x3d: /* restore */
4329 save_state(dc, cpu_cond);
4330 gen_helper_restore();
4331 gen_movl_TN_reg(rd, cpu_dst);
4332 break;
4333 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4334 case 0x3e: /* V9 done/retry */
4335 {
4336 switch (rd) {
4337 case 0:
4338 if (!supervisor(dc))
4339 goto priv_insn;
4340 dc->npc = DYNAMIC_PC;
4341 dc->pc = DYNAMIC_PC;
4342 gen_helper_done();
4343 goto jmp_insn;
4344 case 1:
4345 if (!supervisor(dc))
4346 goto priv_insn;
4347 dc->npc = DYNAMIC_PC;
4348 dc->pc = DYNAMIC_PC;
4349 gen_helper_retry();
4350 goto jmp_insn;
4351 default:
4352 goto illegal_insn;
4353 }
4354 }
4355 break;
4356 #endif
4357 default:
4358 goto illegal_insn;
4359 }
4360 }
4361 break;
4362 }
4363 break;
4364 case 3: /* load/store instructions */
4365 {
4366 unsigned int xop = GET_FIELD(insn, 7, 12);
4367
4368 cpu_src1 = get_src1(insn, cpu_src1);
4369 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4370 rs2 = GET_FIELD(insn, 27, 31);
4371 gen_movl_reg_TN(rs2, cpu_src2);
4372 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4373 } else if (IS_IMM) { /* immediate */
4374 simm = GET_FIELDs(insn, 19, 31);
4375 tcg_gen_addi_tl(cpu_addr, cpu_src1, simm);
4376 } else { /* register */
4377 rs2 = GET_FIELD(insn, 27, 31);
4378 if (rs2 != 0) {
4379 gen_movl_reg_TN(rs2, cpu_src2);
4380 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4381 } else
4382 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4383 }
4384 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4385 (xop > 0x17 && xop <= 0x1d ) ||
4386 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4387 switch (xop) {
4388 case 0x0: /* ld, V9 lduw, load unsigned word */
4389 gen_address_mask(dc, cpu_addr);
4390 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4391 break;
4392 case 0x1: /* ldub, load unsigned byte */
4393 gen_address_mask(dc, cpu_addr);
4394 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4395 break;
4396 case 0x2: /* lduh, load unsigned halfword */
4397 gen_address_mask(dc, cpu_addr);
4398 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4399 break;
4400 case 0x3: /* ldd, load double word */
4401 if (rd & 1)
4402 goto illegal_insn;
4403 else {
4404 TCGv_i32 r_const;
4405
4406 save_state(dc, cpu_cond);
4407 r_const = tcg_const_i32(7);
4408 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4409 tcg_temp_free_i32(r_const);
4410 gen_address_mask(dc, cpu_addr);
4411 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4412 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4413 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4414 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4415 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4416 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4417 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4418 }
4419 break;
4420 case 0x9: /* ldsb, load signed byte */
4421 gen_address_mask(dc, cpu_addr);
4422 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4423 break;
4424 case 0xa: /* ldsh, load signed halfword */
4425 gen_address_mask(dc, cpu_addr);
4426 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4427 break;
4428 case 0xd: /* ldstub -- XXX: should be atomically */
4429 {
4430 TCGv r_const;
4431
4432 gen_address_mask(dc, cpu_addr);
4433 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4434 r_const = tcg_const_tl(0xff);
4435 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4436 tcg_temp_free(r_const);
4437 }
4438 break;
4439 case 0x0f: /* swap, swap register with memory. Also
4440 atomically */
4441 CHECK_IU_FEATURE(dc, SWAP);
4442 gen_movl_reg_TN(rd, cpu_val);
4443 gen_address_mask(dc, cpu_addr);
4444 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4445 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4446 tcg_gen_mov_tl(cpu_val, cpu_tmp0);
4447 break;
4448 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4449 case 0x10: /* lda, V9 lduwa, load word alternate */
4450 #ifndef TARGET_SPARC64
4451 if (IS_IMM)
4452 goto illegal_insn;
4453 if (!supervisor(dc))
4454 goto priv_insn;
4455 #endif
4456 save_state(dc, cpu_cond);
4457 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4458 break;
4459 case 0x11: /* lduba, load unsigned byte alternate */
4460 #ifndef TARGET_SPARC64
4461 if (IS_IMM)
4462 goto illegal_insn;
4463 if (!supervisor(dc))
4464 goto priv_insn;
4465 #endif
4466 save_state(dc, cpu_cond);
4467 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4468 break;
4469 case 0x12: /* lduha, load unsigned halfword alternate */
4470 #ifndef TARGET_SPARC64
4471 if (IS_IMM)
4472 goto illegal_insn;
4473 if (!supervisor(dc))
4474 goto priv_insn;
4475 #endif
4476 save_state(dc, cpu_cond);
4477 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4478 break;
4479 case 0x13: /* ldda, load double word alternate */
4480 #ifndef TARGET_SPARC64
4481 if (IS_IMM)
4482 goto illegal_insn;
4483 if (!supervisor(dc))
4484 goto priv_insn;
4485 #endif
4486 if (rd & 1)
4487 goto illegal_insn;
4488 save_state(dc, cpu_cond);
4489 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4490 goto skip_move;
4491 case 0x19: /* ldsba, load signed byte alternate */
4492 #ifndef TARGET_SPARC64
4493 if (IS_IMM)
4494 goto illegal_insn;
4495 if (!supervisor(dc))
4496 goto priv_insn;
4497 #endif
4498 save_state(dc, cpu_cond);
4499 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4500 break;
4501 case 0x1a: /* ldsha, load signed halfword alternate */
4502 #ifndef TARGET_SPARC64
4503 if (IS_IMM)
4504 goto illegal_insn;
4505 if (!supervisor(dc))
4506 goto priv_insn;
4507 #endif
4508 save_state(dc, cpu_cond);
4509 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4510 break;
4511 case 0x1d: /* ldstuba -- XXX: should be atomically */
4512 #ifndef TARGET_SPARC64
4513 if (IS_IMM)
4514 goto illegal_insn;
4515 if (!supervisor(dc))
4516 goto priv_insn;
4517 #endif
4518 save_state(dc, cpu_cond);
4519 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4520 break;
4521 case 0x1f: /* swapa, swap reg with alt. memory. Also
4522 atomically */
4523 CHECK_IU_FEATURE(dc, SWAP);
4524 #ifndef TARGET_SPARC64
4525 if (IS_IMM)
4526 goto illegal_insn;
4527 if (!supervisor(dc))
4528 goto priv_insn;
4529 #endif
4530 save_state(dc, cpu_cond);
4531 gen_movl_reg_TN(rd, cpu_val);
4532 gen_swap_asi(cpu_val, cpu_addr, insn);
4533 break;
4534
4535 #ifndef TARGET_SPARC64
4536 case 0x30: /* ldc */
4537 case 0x31: /* ldcsr */
4538 case 0x33: /* lddc */
4539 goto ncp_insn;
4540 #endif
4541 #endif
4542 #ifdef TARGET_SPARC64
4543 case 0x08: /* V9 ldsw */
4544 gen_address_mask(dc, cpu_addr);
4545 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4546 break;
4547 case 0x0b: /* V9 ldx */
4548 gen_address_mask(dc, cpu_addr);
4549 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4550 break;
4551 case 0x18: /* V9 ldswa */
4552 save_state(dc, cpu_cond);
4553 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4554 break;
4555 case 0x1b: /* V9 ldxa */
4556 save_state(dc, cpu_cond);
4557 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4558 break;
4559 case 0x2d: /* V9 prefetch, no effect */
4560 goto skip_move;
4561 case 0x30: /* V9 ldfa */
4562 save_state(dc, cpu_cond);
4563 gen_ldf_asi(cpu_addr, insn, 4, rd);
4564 goto skip_move;
4565 case 0x33: /* V9 lddfa */
4566 save_state(dc, cpu_cond);
4567 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4568 goto skip_move;
4569 case 0x3d: /* V9 prefetcha, no effect */
4570 goto skip_move;
4571 case 0x32: /* V9 ldqfa */
4572 CHECK_FPU_FEATURE(dc, FLOAT128);
4573 save_state(dc, cpu_cond);
4574 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4575 goto skip_move;
4576 #endif
4577 default:
4578 goto illegal_insn;
4579 }
4580 gen_movl_TN_reg(rd, cpu_val);
4581 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4582 skip_move: ;
4583 #endif
4584 } else if (xop >= 0x20 && xop < 0x24) {
4585 if (gen_trap_ifnofpu(dc, cpu_cond))
4586 goto jmp_insn;
4587 save_state(dc, cpu_cond);
4588 switch (xop) {
4589 case 0x20: /* ldf, load fpreg */
4590 gen_address_mask(dc, cpu_addr);
4591 tcg_gen_qemu_ld32u(cpu_tmp0, cpu_addr, dc->mem_idx);
4592 tcg_gen_trunc_tl_i32(cpu_fpr[rd], cpu_tmp0);
4593 break;
4594 case 0x21: /* ldfsr, V9 ldxfsr */
4595 #ifdef TARGET_SPARC64
4596 gen_address_mask(dc, cpu_addr);
4597 if (rd == 1) {
4598 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4599 gen_helper_ldxfsr(cpu_tmp64);
4600 } else
4601 #else
4602 {
4603 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4604 gen_helper_ldfsr(cpu_tmp32);
4605 }
4606 #endif
4607 break;
4608 case 0x22: /* ldqf, load quad fpreg */
4609 {
4610 TCGv_i32 r_const;
4611
4612 CHECK_FPU_FEATURE(dc, FLOAT128);
4613 r_const = tcg_const_i32(dc->mem_idx);
4614 gen_helper_ldqf(cpu_addr, r_const);
4615 tcg_temp_free_i32(r_const);
4616 gen_op_store_QT0_fpr(QFPREG(rd));
4617 }
4618 break;
4619 case 0x23: /* lddf, load double fpreg */
4620 {
4621 TCGv_i32 r_const;
4622
4623 r_const = tcg_const_i32(dc->mem_idx);
4624 gen_helper_lddf(cpu_addr, r_const);
4625 tcg_temp_free_i32(r_const);
4626 gen_op_store_DT0_fpr(DFPREG(rd));
4627 }
4628 break;
4629 default:
4630 goto illegal_insn;
4631 }
4632 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4633 xop == 0xe || xop == 0x1e) {
4634 gen_movl_reg_TN(rd, cpu_val);
4635 switch (xop) {
4636 case 0x4: /* st, store word */
4637 gen_address_mask(dc, cpu_addr);
4638 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4639 break;
4640 case 0x5: /* stb, store byte */
4641 gen_address_mask(dc, cpu_addr);
4642 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4643 break;
4644 case 0x6: /* sth, store halfword */
4645 gen_address_mask(dc, cpu_addr);
4646 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4647 break;
4648 case 0x7: /* std, store double word */
4649 if (rd & 1)
4650 goto illegal_insn;
4651 else {
4652 TCGv_i32 r_const;
4653
4654 save_state(dc, cpu_cond);
4655 gen_address_mask(dc, cpu_addr);
4656 r_const = tcg_const_i32(7);
4657 gen_helper_check_align(cpu_addr, r_const); // XXX remove
4658 tcg_temp_free_i32(r_const);
4659 gen_movl_reg_TN(rd + 1, cpu_tmp0);
4660 tcg_gen_concat_tl_i64(cpu_tmp64, cpu_tmp0, cpu_val);
4661 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4662 }
4663 break;
4664 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4665 case 0x14: /* sta, V9 stwa, store word alternate */
4666 #ifndef TARGET_SPARC64
4667 if (IS_IMM)
4668 goto illegal_insn;
4669 if (!supervisor(dc))
4670 goto priv_insn;
4671 #endif
4672 save_state(dc, cpu_cond);
4673 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4674 break;
4675 case 0x15: /* stba, store byte alternate */
4676 #ifndef TARGET_SPARC64
4677 if (IS_IMM)
4678 goto illegal_insn;
4679 if (!supervisor(dc))
4680 goto priv_insn;
4681 #endif
4682 save_state(dc, cpu_cond);
4683 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4684 break;
4685 case 0x16: /* stha, store halfword alternate */
4686 #ifndef TARGET_SPARC64
4687 if (IS_IMM)
4688 goto illegal_insn;
4689 if (!supervisor(dc))
4690 goto priv_insn;
4691 #endif
4692 save_state(dc, cpu_cond);
4693 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4694 break;
4695 case 0x17: /* stda, store double word alternate */
4696 #ifndef TARGET_SPARC64
4697 if (IS_IMM)
4698 goto illegal_insn;
4699 if (!supervisor(dc))
4700 goto priv_insn;
4701 #endif
4702 if (rd & 1)
4703 goto illegal_insn;
4704 else {
4705 save_state(dc, cpu_cond);
4706 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4707 }
4708 break;
4709 #endif
4710 #ifdef TARGET_SPARC64
4711 case 0x0e: /* V9 stx */
4712 gen_address_mask(dc, cpu_addr);
4713 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4714 break;
4715 case 0x1e: /* V9 stxa */
4716 save_state(dc, cpu_cond);
4717 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4718 break;
4719 #endif
4720 default:
4721 goto illegal_insn;
4722 }
4723 } else if (xop > 0x23 && xop < 0x28) {
4724 if (gen_trap_ifnofpu(dc, cpu_cond))
4725 goto jmp_insn;
4726 save_state(dc, cpu_cond);
4727 switch (xop) {
4728 case 0x24: /* stf, store fpreg */
4729 gen_address_mask(dc, cpu_addr);
4730 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_fpr[rd]);
4731 tcg_gen_qemu_st32(cpu_tmp0, cpu_addr, dc->mem_idx);
4732 break;
4733 case 0x25: /* stfsr, V9 stxfsr */
4734 #ifdef TARGET_SPARC64
4735 gen_address_mask(dc, cpu_addr);
4736 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4737 if (rd == 1)
4738 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4739 else
4740 tcg_gen_qemu_st32(cpu_tmp64, cpu_addr, dc->mem_idx);
4741 #else
4742 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4743 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4744 #endif
4745 break;
4746 case 0x26:
4747 #ifdef TARGET_SPARC64
4748 /* V9 stqf, store quad fpreg */
4749 {
4750 TCGv_i32 r_const;
4751
4752 CHECK_FPU_FEATURE(dc, FLOAT128);
4753 gen_op_load_fpr_QT0(QFPREG(rd));
4754 r_const = tcg_const_i32(dc->mem_idx);
4755 gen_helper_stqf(cpu_addr, r_const);
4756 tcg_temp_free_i32(r_const);
4757 }
4758 break;
4759 #else /* !TARGET_SPARC64 */
4760 /* stdfq, store floating point queue */
4761 #if defined(CONFIG_USER_ONLY)
4762 goto illegal_insn;
4763 #else
4764 if (!supervisor(dc))
4765 goto priv_insn;
4766 if (gen_trap_ifnofpu(dc, cpu_cond))
4767 goto jmp_insn;
4768 goto nfq_insn;
4769 #endif
4770 #endif
4771 case 0x27: /* stdf, store double fpreg */
4772 {
4773 TCGv_i32 r_const;
4774
4775 gen_op_load_fpr_DT0(DFPREG(rd));
4776 r_const = tcg_const_i32(dc->mem_idx);
4777 gen_helper_stdf(cpu_addr, r_const);
4778 tcg_temp_free_i32(r_const);
4779 }
4780 break;
4781 default:
4782 goto illegal_insn;
4783 }
4784 } else if (xop > 0x33 && xop < 0x3f) {
4785 save_state(dc, cpu_cond);
4786 switch (xop) {
4787 #ifdef TARGET_SPARC64
4788 case 0x34: /* V9 stfa */
4789 gen_stf_asi(cpu_addr, insn, 4, rd);
4790 break;
4791 case 0x36: /* V9 stqfa */
4792 {
4793 TCGv_i32 r_const;
4794
4795 CHECK_FPU_FEATURE(dc, FLOAT128);
4796 r_const = tcg_const_i32(7);
4797 gen_helper_check_align(cpu_addr, r_const);
4798 tcg_temp_free_i32(r_const);
4799 gen_op_load_fpr_QT0(QFPREG(rd));
4800 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4801 }
4802 break;
4803 case 0x37: /* V9 stdfa */
4804 gen_op_load_fpr_DT0(DFPREG(rd));
4805 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4806 break;
4807 case 0x3c: /* V9 casa */
4808 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4809 gen_movl_TN_reg(rd, cpu_val);
4810 break;
4811 case 0x3e: /* V9 casxa */
4812 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4813 gen_movl_TN_reg(rd, cpu_val);
4814 break;
4815 #else
4816 case 0x34: /* stc */
4817 case 0x35: /* stcsr */
4818 case 0x36: /* stdcq */
4819 case 0x37: /* stdc */
4820 goto ncp_insn;
4821 #endif
4822 default:
4823 goto illegal_insn;
4824 }
4825 }
4826 else
4827 goto illegal_insn;
4828 }
4829 break;
4830 }
4831 /* default case for non jump instructions */
4832 if (dc->npc == DYNAMIC_PC) {
4833 dc->pc = DYNAMIC_PC;
4834 gen_op_next_insn();
4835 } else if (dc->npc == JUMP_PC) {
4836 /* we can do a static jump */
4837 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4838 dc->is_br = 1;
4839 } else {
4840 dc->pc = dc->npc;
4841 dc->npc = dc->npc + 4;
4842 }
4843 jmp_insn:
4844 return;
4845 illegal_insn:
4846 {
4847 TCGv_i32 r_const;
4848
4849 save_state(dc, cpu_cond);
4850 r_const = tcg_const_i32(TT_ILL_INSN);
4851 gen_helper_raise_exception(r_const);
4852 tcg_temp_free_i32(r_const);
4853 dc->is_br = 1;
4854 }
4855 return;
4856 unimp_flush:
4857 {
4858 TCGv_i32 r_const;
4859
4860 save_state(dc, cpu_cond);
4861 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4862 gen_helper_raise_exception(r_const);
4863 tcg_temp_free_i32(r_const);
4864 dc->is_br = 1;
4865 }
4866 return;
4867 #if !defined(CONFIG_USER_ONLY)
4868 priv_insn:
4869 {
4870 TCGv_i32 r_const;
4871
4872 save_state(dc, cpu_cond);
4873 r_const = tcg_const_i32(TT_PRIV_INSN);
4874 gen_helper_raise_exception(r_const);
4875 tcg_temp_free_i32(r_const);
4876 dc->is_br = 1;
4877 }
4878 return;
4879 #endif
4880 nfpu_insn:
4881 save_state(dc, cpu_cond);
4882 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4883 dc->is_br = 1;
4884 return;
4885 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4886 nfq_insn:
4887 save_state(dc, cpu_cond);
4888 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4889 dc->is_br = 1;
4890 return;
4891 #endif
4892 #ifndef TARGET_SPARC64
4893 ncp_insn:
4894 {
4895 TCGv r_const;
4896
4897 save_state(dc, cpu_cond);
4898 r_const = tcg_const_i32(TT_NCP_INSN);
4899 gen_helper_raise_exception(r_const);
4900 tcg_temp_free(r_const);
4901 dc->is_br = 1;
4902 }
4903 return;
4904 #endif
4905 }
4906
4907 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4908 int spc, CPUSPARCState *env)
4909 {
4910 target_ulong pc_start, last_pc;
4911 uint16_t *gen_opc_end;
4912 DisasContext dc1, *dc = &dc1;
4913 CPUBreakpoint *bp;
4914 int j, lj = -1;
4915 int num_insns;
4916 int max_insns;
4917
4918 memset(dc, 0, sizeof(DisasContext));
4919 dc->tb = tb;
4920 pc_start = tb->pc;
4921 dc->pc = pc_start;
4922 last_pc = dc->pc;
4923 dc->npc = (target_ulong) tb->cs_base;
4924 dc->mem_idx = cpu_mmu_index(env);
4925 dc->def = env->def;
4926 if ((dc->def->features & CPU_FEATURE_FLOAT))
4927 dc->fpu_enabled = cpu_fpu_enabled(env);
4928 else
4929 dc->fpu_enabled = 0;
4930 #ifdef TARGET_SPARC64
4931 dc->address_mask_32bit = env->pstate & PS_AM;
4932 #endif
4933 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4934
4935 cpu_tmp0 = tcg_temp_new();
4936 cpu_tmp32 = tcg_temp_new_i32();
4937 cpu_tmp64 = tcg_temp_new_i64();
4938
4939 cpu_dst = tcg_temp_local_new();
4940
4941 // loads and stores
4942 cpu_val = tcg_temp_local_new();
4943 cpu_addr = tcg_temp_local_new();
4944
4945 num_insns = 0;
4946 max_insns = tb->cflags & CF_COUNT_MASK;
4947 if (max_insns == 0)
4948 max_insns = CF_COUNT_MASK;
4949 gen_icount_start();
4950 do {
4951 if (unlikely(!TAILQ_EMPTY(&env->breakpoints))) {
4952 TAILQ_FOREACH(bp, &env->breakpoints, entry) {
4953 if (bp->pc == dc->pc) {
4954 if (dc->pc != pc_start)
4955 save_state(dc, cpu_cond);
4956 gen_helper_debug();
4957 tcg_gen_exit_tb(0);
4958 dc->is_br = 1;
4959 goto exit_gen_loop;
4960 }
4961 }
4962 }
4963 if (spc) {
4964 qemu_log("Search PC...\n");
4965 j = gen_opc_ptr - gen_opc_buf;
4966 if (lj < j) {
4967 lj++;
4968 while (lj < j)
4969 gen_opc_instr_start[lj++] = 0;
4970 gen_opc_pc[lj] = dc->pc;
4971 gen_opc_npc[lj] = dc->npc;
4972 gen_opc_instr_start[lj] = 1;
4973 gen_opc_icount[lj] = num_insns;
4974 }
4975 }
4976 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4977 gen_io_start();
4978 last_pc = dc->pc;
4979 disas_sparc_insn(dc);
4980 num_insns++;
4981
4982 if (dc->is_br)
4983 break;
4984 /* if the next PC is different, we abort now */
4985 if (dc->pc != (last_pc + 4))
4986 break;
4987 /* if we reach a page boundary, we stop generation so that the
4988 PC of a TT_TFAULT exception is always in the right page */
4989 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4990 break;
4991 /* if single step mode, we generate only one instruction and
4992 generate an exception */
4993 if (env->singlestep_enabled || singlestep) {
4994 tcg_gen_movi_tl(cpu_pc, dc->pc);
4995 tcg_gen_exit_tb(0);
4996 break;
4997 }
4998 } while ((gen_opc_ptr < gen_opc_end) &&
4999 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5000 num_insns < max_insns);
5001
5002 exit_gen_loop:
5003 tcg_temp_free(cpu_addr);
5004 tcg_temp_free(cpu_val);
5005 tcg_temp_free(cpu_dst);
5006 tcg_temp_free_i64(cpu_tmp64);
5007 tcg_temp_free_i32(cpu_tmp32);
5008 tcg_temp_free(cpu_tmp0);
5009 if (tb->cflags & CF_LAST_IO)
5010 gen_io_end();
5011 if (!dc->is_br) {
5012 if (dc->pc != DYNAMIC_PC &&
5013 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5014 /* static PC and NPC: we can use direct chaining */
5015 gen_goto_tb(dc, 0, dc->pc, dc->npc);
5016 } else {
5017 if (dc->pc != DYNAMIC_PC)
5018 tcg_gen_movi_tl(cpu_pc, dc->pc);
5019 save_npc(dc, cpu_cond);
5020 tcg_gen_exit_tb(0);
5021 }
5022 }
5023 gen_icount_end(tb, num_insns);
5024 *gen_opc_ptr = INDEX_op_end;
5025 if (spc) {
5026 j = gen_opc_ptr - gen_opc_buf;
5027 lj++;
5028 while (lj <= j)
5029 gen_opc_instr_start[lj++] = 0;
5030 #if 0
5031 log_page_dump();
5032 #endif
5033 gen_opc_jump_pc[0] = dc->jump_pc[0];
5034 gen_opc_jump_pc[1] = dc->jump_pc[1];
5035 } else {
5036 tb->size = last_pc + 4 - pc_start;
5037 tb->icount = num_insns;
5038 }
5039 #ifdef DEBUG_DISAS
5040 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5041 qemu_log("--------------\n");
5042 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5043 log_target_disas(pc_start, last_pc + 4 - pc_start, 0);
5044 qemu_log("\n");
5045 }
5046 #endif
5047 }
5048
5049 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5050 {
5051 gen_intermediate_code_internal(tb, 0, env);
5052 }
5053
5054 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
5055 {
5056 gen_intermediate_code_internal(tb, 1, env);
5057 }
5058
5059 void gen_intermediate_code_init(CPUSPARCState *env)
5060 {
5061 unsigned int i;
5062 static int inited;
5063 static const char * const gregnames[8] = {
5064 NULL, // g0 not used
5065 "g1",
5066 "g2",
5067 "g3",
5068 "g4",
5069 "g5",
5070 "g6",
5071 "g7",
5072 };
5073 static const char * const fregnames[64] = {
5074 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
5075 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
5076 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
5077 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
5078 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
5079 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
5080 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
5081 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
5082 };
5083
5084 /* init various static tables */
5085 if (!inited) {
5086 inited = 1;
5087
5088 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5089 cpu_regwptr = tcg_global_mem_new_ptr(TCG_AREG0,
5090 offsetof(CPUState, regwptr),
5091 "regwptr");
5092 #ifdef TARGET_SPARC64
5093 cpu_xcc = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, xcc),
5094 "xcc");
5095 cpu_asi = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, asi),
5096 "asi");
5097 cpu_fprs = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, fprs),
5098 "fprs");
5099 cpu_gsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, gsr),
5100 "gsr");
5101 cpu_tick_cmpr = tcg_global_mem_new(TCG_AREG0,
5102 offsetof(CPUState, tick_cmpr),
5103 "tick_cmpr");
5104 cpu_stick_cmpr = tcg_global_mem_new(TCG_AREG0,
5105 offsetof(CPUState, stick_cmpr),
5106 "stick_cmpr");
5107 cpu_hstick_cmpr = tcg_global_mem_new(TCG_AREG0,
5108 offsetof(CPUState, hstick_cmpr),
5109 "hstick_cmpr");
5110 cpu_hintp = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hintp),
5111 "hintp");
5112 cpu_htba = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, htba),
5113 "htba");
5114 cpu_hver = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, hver),
5115 "hver");
5116 cpu_ssr = tcg_global_mem_new(TCG_AREG0,
5117 offsetof(CPUState, ssr), "ssr");
5118 cpu_ver = tcg_global_mem_new(TCG_AREG0,
5119 offsetof(CPUState, version), "ver");
5120 cpu_softint = tcg_global_mem_new_i32(TCG_AREG0,
5121 offsetof(CPUState, softint),
5122 "softint");
5123 #else
5124 cpu_wim = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, wim),
5125 "wim");
5126 #endif
5127 cpu_cond = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cond),
5128 "cond");
5129 cpu_cc_src = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_src),
5130 "cc_src");
5131 cpu_cc_src2 = tcg_global_mem_new(TCG_AREG0,
5132 offsetof(CPUState, cc_src2),
5133 "cc_src2");
5134 cpu_cc_dst = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, cc_dst),
5135 "cc_dst");
5136 cpu_psr = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, psr),
5137 "psr");
5138 cpu_fsr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, fsr),
5139 "fsr");
5140 cpu_pc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, pc),
5141 "pc");
5142 cpu_npc = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, npc),
5143 "npc");
5144 cpu_y = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, y), "y");
5145 #ifndef CONFIG_USER_ONLY
5146 cpu_tbr = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, tbr),
5147 "tbr");
5148 #endif
5149 for (i = 1; i < 8; i++)
5150 cpu_gregs[i] = tcg_global_mem_new(TCG_AREG0,
5151 offsetof(CPUState, gregs[i]),
5152 gregnames[i]);
5153 for (i = 0; i < TARGET_FPREGS; i++)
5154 cpu_fpr[i] = tcg_global_mem_new_i32(TCG_AREG0,
5155 offsetof(CPUState, fpr[i]),
5156 fregnames[i]);
5157
5158 /* register helpers */
5159
5160 #define GEN_HELPER 2
5161 #include "helper.h"
5162 }
5163 }
5164
5165 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5166 unsigned long searched_pc, int pc_pos, void *puc)
5167 {
5168 target_ulong npc;
5169 env->pc = gen_opc_pc[pc_pos];
5170 npc = gen_opc_npc[pc_pos];
5171 if (npc == 1) {
5172 /* dynamic NPC: already stored */
5173 } else if (npc == 2) {
5174 target_ulong t2 = (target_ulong)(unsigned long)puc;
5175 /* jump PC: use T2 and the jump targets of the translation */
5176 if (t2)
5177 env->npc = gen_opc_jump_pc[0];
5178 else
5179 env->npc = gen_opc_jump_pc[1];
5180 } else {
5181 env->npc = npc;
5182 }
5183 }