]> git.proxmox.com Git - mirror_qemu.git/blob - target-sparc/translate.c
Partially convert float128 conversion ops to TCG
[mirror_qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 */
21
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define DEBUG_DISAS
35
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
39
40 /* global register indexes */
41 static TCGv cpu_env, cpu_regwptr;
42 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
43 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
44 static TCGv cpu_y;
45 #ifndef CONFIG_USER_ONLY
46 static TCGv cpu_tbr;
47 #endif
48 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
49 #ifdef TARGET_SPARC64
50 static TCGv cpu_xcc, cpu_asi, cpu_fprs, cpu_gsr;
51 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
52 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
53 #else
54 static TCGv cpu_wim;
55 #endif
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
58 /* Floating point registers */
59 static TCGv cpu_fpr[TARGET_FPREGS];
60
61 #include "gen-icount.h"
62
63 typedef struct DisasContext {
64 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
67 int is_br;
68 int mem_idx;
69 int fpu_enabled;
70 int address_mask_32bit;
71 struct TranslationBlock *tb;
72 sparc_def_t *def;
73 } DisasContext;
74
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
78
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
82
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
85
86 #ifdef TARGET_SPARC64
87 #define FFPREG(r) (r)
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
90 #else
91 #define FFPREG(r) (r)
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
94 #endif
95
96 static int sign_extend(int x, int len)
97 {
98 len = 32 - len;
99 return (x << len) >> len;
100 }
101
102 #define IS_IMM (insn & (1<<13))
103
104 /* floating point registers moves */
105 static void gen_op_load_fpr_FT0(unsigned int src)
106 {
107 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, ft0));
108 }
109
110 static void gen_op_load_fpr_FT1(unsigned int src)
111 {
112 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, ft1));
113 }
114
115 static void gen_op_store_FT0_fpr(unsigned int dst)
116 {
117 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, ft0));
118 }
119
120 static void gen_op_load_fpr_DT0(unsigned int src)
121 {
122 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
123 offsetof(CPU_DoubleU, l.upper));
124 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
125 offsetof(CPU_DoubleU, l.lower));
126 }
127
128 static void gen_op_load_fpr_DT1(unsigned int src)
129 {
130 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
131 offsetof(CPU_DoubleU, l.upper));
132 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
133 offsetof(CPU_DoubleU, l.lower));
134 }
135
136 static void gen_op_store_DT0_fpr(unsigned int dst)
137 {
138 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
139 offsetof(CPU_DoubleU, l.upper));
140 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
141 offsetof(CPU_DoubleU, l.lower));
142 }
143
144 static void gen_op_load_fpr_QT0(unsigned int src)
145 {
146 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.upmost));
148 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
149 offsetof(CPU_QuadU, l.upper));
150 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
151 offsetof(CPU_QuadU, l.lower));
152 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
153 offsetof(CPU_QuadU, l.lowest));
154 }
155
156 static void gen_op_load_fpr_QT1(unsigned int src)
157 {
158 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
159 offsetof(CPU_QuadU, l.upmost));
160 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
161 offsetof(CPU_QuadU, l.upper));
162 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
163 offsetof(CPU_QuadU, l.lower));
164 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
165 offsetof(CPU_QuadU, l.lowest));
166 }
167
168 static void gen_op_store_QT0_fpr(unsigned int dst)
169 {
170 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
171 offsetof(CPU_QuadU, l.upmost));
172 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
173 offsetof(CPU_QuadU, l.upper));
174 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
175 offsetof(CPU_QuadU, l.lower));
176 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
177 offsetof(CPU_QuadU, l.lowest));
178 }
179
180 /* moves */
181 #ifdef CONFIG_USER_ONLY
182 #define supervisor(dc) 0
183 #ifdef TARGET_SPARC64
184 #define hypervisor(dc) 0
185 #endif
186 #else
187 #define supervisor(dc) (dc->mem_idx >= 1)
188 #ifdef TARGET_SPARC64
189 #define hypervisor(dc) (dc->mem_idx == 2)
190 #else
191 #endif
192 #endif
193
194 #ifdef TARGET_SPARC64
195 #ifndef TARGET_ABI32
196 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
197 #else
198 #define AM_CHECK(dc) (1)
199 #endif
200 #endif
201
202 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
203 {
204 #ifdef TARGET_SPARC64
205 if (AM_CHECK(dc))
206 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
207 #endif
208 }
209
210 static inline void gen_movl_reg_TN(int reg, TCGv tn)
211 {
212 if (reg == 0)
213 tcg_gen_movi_tl(tn, 0);
214 else if (reg < 8)
215 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
216 else {
217 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
218 }
219 }
220
221 static inline void gen_movl_TN_reg(int reg, TCGv tn)
222 {
223 if (reg == 0)
224 return;
225 else if (reg < 8)
226 tcg_gen_mov_tl(cpu_gregs[reg], tn);
227 else {
228 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
229 }
230 }
231
232 static inline void gen_goto_tb(DisasContext *s, int tb_num,
233 target_ulong pc, target_ulong npc)
234 {
235 TranslationBlock *tb;
236
237 tb = s->tb;
238 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
239 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
240 /* jump to same page: we can use a direct jump */
241 tcg_gen_goto_tb(tb_num);
242 tcg_gen_movi_tl(cpu_pc, pc);
243 tcg_gen_movi_tl(cpu_npc, npc);
244 tcg_gen_exit_tb((long)tb + tb_num);
245 } else {
246 /* jump to another page: currently not optimized */
247 tcg_gen_movi_tl(cpu_pc, pc);
248 tcg_gen_movi_tl(cpu_npc, npc);
249 tcg_gen_exit_tb(0);
250 }
251 }
252
253 // XXX suboptimal
254 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
255 {
256 tcg_gen_extu_i32_tl(reg, src);
257 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
258 tcg_gen_andi_tl(reg, reg, 0x1);
259 }
260
261 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
262 {
263 tcg_gen_extu_i32_tl(reg, src);
264 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
265 tcg_gen_andi_tl(reg, reg, 0x1);
266 }
267
268 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
269 {
270 tcg_gen_extu_i32_tl(reg, src);
271 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
272 tcg_gen_andi_tl(reg, reg, 0x1);
273 }
274
275 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
276 {
277 tcg_gen_extu_i32_tl(reg, src);
278 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
279 tcg_gen_andi_tl(reg, reg, 0x1);
280 }
281
282 static inline void gen_cc_clear_icc(void)
283 {
284 tcg_gen_movi_i32(cpu_psr, 0);
285 }
286
287 #ifdef TARGET_SPARC64
288 static inline void gen_cc_clear_xcc(void)
289 {
290 tcg_gen_movi_i32(cpu_xcc, 0);
291 }
292 #endif
293
294 /* old op:
295 if (!T0)
296 env->psr |= PSR_ZERO;
297 if ((int32_t) T0 < 0)
298 env->psr |= PSR_NEG;
299 */
300 static inline void gen_cc_NZ_icc(TCGv dst)
301 {
302 TCGv r_temp;
303 int l1, l2;
304
305 l1 = gen_new_label();
306 l2 = gen_new_label();
307 r_temp = tcg_temp_new(TCG_TYPE_TL);
308 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
309 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
310 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
311 gen_set_label(l1);
312 tcg_gen_ext_i32_tl(r_temp, dst);
313 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
314 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
315 gen_set_label(l2);
316 tcg_temp_free(r_temp);
317 }
318
319 #ifdef TARGET_SPARC64
320 static inline void gen_cc_NZ_xcc(TCGv dst)
321 {
322 int l1, l2;
323
324 l1 = gen_new_label();
325 l2 = gen_new_label();
326 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
327 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
328 gen_set_label(l1);
329 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
330 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
331 gen_set_label(l2);
332 }
333 #endif
334
335 /* old op:
336 if (T0 < src1)
337 env->psr |= PSR_CARRY;
338 */
339 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
340 {
341 TCGv r_temp1, r_temp2;
342 int l1;
343
344 l1 = gen_new_label();
345 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
346 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
347 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
348 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
349 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
350 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
351 gen_set_label(l1);
352 tcg_temp_free(r_temp1);
353 tcg_temp_free(r_temp2);
354 }
355
356 #ifdef TARGET_SPARC64
357 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
358 {
359 int l1;
360
361 l1 = gen_new_label();
362 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
363 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
364 gen_set_label(l1);
365 }
366 #endif
367
368 /* old op:
369 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
370 env->psr |= PSR_OVF;
371 */
372 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
373 {
374 TCGv r_temp;
375
376 r_temp = tcg_temp_new(TCG_TYPE_TL);
377 tcg_gen_xor_tl(r_temp, src1, src2);
378 tcg_gen_xori_tl(r_temp, r_temp, -1);
379 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
380 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
381 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
382 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
383 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
384 tcg_temp_free(r_temp);
385 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
386 }
387
388 #ifdef TARGET_SPARC64
389 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
390 {
391 TCGv r_temp;
392
393 r_temp = tcg_temp_new(TCG_TYPE_TL);
394 tcg_gen_xor_tl(r_temp, src1, src2);
395 tcg_gen_xori_tl(r_temp, r_temp, -1);
396 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
397 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
398 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
399 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
400 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
401 tcg_temp_free(r_temp);
402 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
403 }
404 #endif
405
406 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
407 {
408 TCGv r_temp, r_const;
409 int l1;
410
411 l1 = gen_new_label();
412
413 r_temp = tcg_temp_new(TCG_TYPE_TL);
414 tcg_gen_xor_tl(r_temp, src1, src2);
415 tcg_gen_xori_tl(r_temp, r_temp, -1);
416 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
417 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
418 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
419 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
420 r_const = tcg_const_i32(TT_TOVF);
421 tcg_gen_helper_0_1(raise_exception, r_const);
422 tcg_temp_free(r_const);
423 gen_set_label(l1);
424 tcg_temp_free(r_temp);
425 }
426
427 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
428 {
429 int l1;
430
431 l1 = gen_new_label();
432 tcg_gen_or_tl(cpu_tmp0, src1, src2);
433 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
434 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
435 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
436 gen_set_label(l1);
437 }
438
439 static inline void gen_tag_tv(TCGv src1, TCGv src2)
440 {
441 int l1;
442 TCGv r_const;
443
444 l1 = gen_new_label();
445 tcg_gen_or_tl(cpu_tmp0, src1, src2);
446 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
447 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
448 r_const = tcg_const_i32(TT_TOVF);
449 tcg_gen_helper_0_1(raise_exception, r_const);
450 tcg_temp_free(r_const);
451 gen_set_label(l1);
452 }
453
454 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
455 {
456 tcg_gen_mov_tl(cpu_cc_src, src1);
457 tcg_gen_mov_tl(cpu_cc_src2, src2);
458 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
459 gen_cc_clear_icc();
460 gen_cc_NZ_icc(cpu_cc_dst);
461 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
462 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
463 #ifdef TARGET_SPARC64
464 gen_cc_clear_xcc();
465 gen_cc_NZ_xcc(cpu_cc_dst);
466 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
467 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
468 #endif
469 tcg_gen_mov_tl(dst, cpu_cc_dst);
470 }
471
472 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
473 {
474 tcg_gen_mov_tl(cpu_cc_src, src1);
475 tcg_gen_mov_tl(cpu_cc_src2, src2);
476 gen_mov_reg_C(cpu_tmp0, cpu_psr);
477 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
478 gen_cc_clear_icc();
479 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
480 #ifdef TARGET_SPARC64
481 gen_cc_clear_xcc();
482 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
483 #endif
484 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
485 gen_cc_NZ_icc(cpu_cc_dst);
486 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
487 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
488 #ifdef TARGET_SPARC64
489 gen_cc_NZ_xcc(cpu_cc_dst);
490 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
491 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
492 #endif
493 tcg_gen_mov_tl(dst, cpu_cc_dst);
494 }
495
496 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
497 {
498 tcg_gen_mov_tl(cpu_cc_src, src1);
499 tcg_gen_mov_tl(cpu_cc_src2, src2);
500 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
501 gen_cc_clear_icc();
502 gen_cc_NZ_icc(cpu_cc_dst);
503 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
504 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
505 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
506 #ifdef TARGET_SPARC64
507 gen_cc_clear_xcc();
508 gen_cc_NZ_xcc(cpu_cc_dst);
509 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
510 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
511 #endif
512 tcg_gen_mov_tl(dst, cpu_cc_dst);
513 }
514
515 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
516 {
517 tcg_gen_mov_tl(cpu_cc_src, src1);
518 tcg_gen_mov_tl(cpu_cc_src2, src2);
519 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
520 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
521 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
522 gen_cc_clear_icc();
523 gen_cc_NZ_icc(cpu_cc_dst);
524 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
525 #ifdef TARGET_SPARC64
526 gen_cc_clear_xcc();
527 gen_cc_NZ_xcc(cpu_cc_dst);
528 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
529 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
530 #endif
531 tcg_gen_mov_tl(dst, cpu_cc_dst);
532 }
533
534 /* old op:
535 if (src1 < T1)
536 env->psr |= PSR_CARRY;
537 */
538 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
539 {
540 TCGv r_temp1, r_temp2;
541 int l1;
542
543 l1 = gen_new_label();
544 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
545 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
546 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
547 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
548 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
549 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
550 gen_set_label(l1);
551 tcg_temp_free(r_temp1);
552 tcg_temp_free(r_temp2);
553 }
554
555 #ifdef TARGET_SPARC64
556 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
557 {
558 int l1;
559
560 l1 = gen_new_label();
561 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
562 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
563 gen_set_label(l1);
564 }
565 #endif
566
567 /* old op:
568 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
569 env->psr |= PSR_OVF;
570 */
571 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
572 {
573 TCGv r_temp;
574
575 r_temp = tcg_temp_new(TCG_TYPE_TL);
576 tcg_gen_xor_tl(r_temp, src1, src2);
577 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
578 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
579 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
580 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
581 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
582 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
583 tcg_temp_free(r_temp);
584 }
585
586 #ifdef TARGET_SPARC64
587 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
588 {
589 TCGv r_temp;
590
591 r_temp = tcg_temp_new(TCG_TYPE_TL);
592 tcg_gen_xor_tl(r_temp, src1, src2);
593 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
594 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
595 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
596 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
597 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
598 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
599 tcg_temp_free(r_temp);
600 }
601 #endif
602
603 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
604 {
605 TCGv r_temp, r_const;
606 int l1;
607
608 l1 = gen_new_label();
609
610 r_temp = tcg_temp_new(TCG_TYPE_TL);
611 tcg_gen_xor_tl(r_temp, src1, src2);
612 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
613 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
614 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
615 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
616 r_const = tcg_const_i32(TT_TOVF);
617 tcg_gen_helper_0_1(raise_exception, r_const);
618 tcg_temp_free(r_const);
619 gen_set_label(l1);
620 tcg_temp_free(r_temp);
621 }
622
623 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
624 {
625 tcg_gen_mov_tl(cpu_cc_src, src1);
626 tcg_gen_mov_tl(cpu_cc_src2, src2);
627 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
628 gen_cc_clear_icc();
629 gen_cc_NZ_icc(cpu_cc_dst);
630 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
631 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
632 #ifdef TARGET_SPARC64
633 gen_cc_clear_xcc();
634 gen_cc_NZ_xcc(cpu_cc_dst);
635 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
636 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
637 #endif
638 tcg_gen_mov_tl(dst, cpu_cc_dst);
639 }
640
641 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
642 {
643 tcg_gen_mov_tl(cpu_cc_src, src1);
644 tcg_gen_mov_tl(cpu_cc_src2, src2);
645 gen_mov_reg_C(cpu_tmp0, cpu_psr);
646 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
647 gen_cc_clear_icc();
648 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
649 #ifdef TARGET_SPARC64
650 gen_cc_clear_xcc();
651 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
652 #endif
653 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
654 gen_cc_NZ_icc(cpu_cc_dst);
655 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
656 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
657 #ifdef TARGET_SPARC64
658 gen_cc_NZ_xcc(cpu_cc_dst);
659 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
660 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
661 #endif
662 tcg_gen_mov_tl(dst, cpu_cc_dst);
663 }
664
665 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
666 {
667 tcg_gen_mov_tl(cpu_cc_src, src1);
668 tcg_gen_mov_tl(cpu_cc_src2, src2);
669 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
670 gen_cc_clear_icc();
671 gen_cc_NZ_icc(cpu_cc_dst);
672 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
673 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
674 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
675 #ifdef TARGET_SPARC64
676 gen_cc_clear_xcc();
677 gen_cc_NZ_xcc(cpu_cc_dst);
678 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
679 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
680 #endif
681 tcg_gen_mov_tl(dst, cpu_cc_dst);
682 }
683
684 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
685 {
686 tcg_gen_mov_tl(cpu_cc_src, src1);
687 tcg_gen_mov_tl(cpu_cc_src2, src2);
688 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
689 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
690 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
691 gen_cc_clear_icc();
692 gen_cc_NZ_icc(cpu_cc_dst);
693 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
694 #ifdef TARGET_SPARC64
695 gen_cc_clear_xcc();
696 gen_cc_NZ_xcc(cpu_cc_dst);
697 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
698 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
699 #endif
700 tcg_gen_mov_tl(dst, cpu_cc_dst);
701 }
702
703 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
704 {
705 TCGv r_temp;
706 int l1;
707
708 l1 = gen_new_label();
709 r_temp = tcg_temp_new(TCG_TYPE_TL);
710
711 /* old op:
712 if (!(env->y & 1))
713 T1 = 0;
714 */
715 tcg_gen_mov_tl(cpu_cc_src, src1);
716 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
717 tcg_gen_mov_tl(cpu_cc_src2, src2);
718 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
719 tcg_gen_movi_tl(cpu_cc_src2, 0);
720 gen_set_label(l1);
721
722 // b2 = T0 & 1;
723 // env->y = (b2 << 31) | (env->y >> 1);
724 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
725 tcg_gen_shli_tl(r_temp, r_temp, 31);
726 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
727 tcg_gen_or_tl(cpu_y, cpu_tmp0, r_temp);
728
729 // b1 = N ^ V;
730 gen_mov_reg_N(cpu_tmp0, cpu_psr);
731 gen_mov_reg_V(r_temp, cpu_psr);
732 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
733 tcg_temp_free(r_temp);
734
735 // T0 = (b1 << 31) | (T0 >> 1);
736 // src1 = T0;
737 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
738 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
739 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
740
741 /* do addition and update flags */
742 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
743
744 gen_cc_clear_icc();
745 gen_cc_NZ_icc(cpu_cc_dst);
746 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
747 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
748 tcg_gen_mov_tl(dst, cpu_cc_dst);
749 }
750
751 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
752 {
753 TCGv r_temp, r_temp2;
754
755 r_temp = tcg_temp_new(TCG_TYPE_I64);
756 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
757
758 tcg_gen_extu_i32_i64(r_temp, src2);
759 tcg_gen_extu_i32_i64(r_temp2, src1);
760 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
761
762 tcg_gen_shri_i64(r_temp, r_temp2, 32);
763 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
764 tcg_temp_free(r_temp);
765 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
766 #ifdef TARGET_SPARC64
767 tcg_gen_mov_i64(dst, r_temp2);
768 #else
769 tcg_gen_trunc_i64_tl(dst, r_temp2);
770 #endif
771 tcg_temp_free(r_temp2);
772 }
773
774 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
775 {
776 TCGv r_temp, r_temp2;
777
778 r_temp = tcg_temp_new(TCG_TYPE_I64);
779 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
780
781 tcg_gen_ext_i32_i64(r_temp, src2);
782 tcg_gen_ext_i32_i64(r_temp2, src1);
783 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
784
785 tcg_gen_shri_i64(r_temp, r_temp2, 32);
786 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
787 tcg_temp_free(r_temp);
788 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
789 #ifdef TARGET_SPARC64
790 tcg_gen_mov_i64(dst, r_temp2);
791 #else
792 tcg_gen_trunc_i64_tl(dst, r_temp2);
793 #endif
794 tcg_temp_free(r_temp2);
795 }
796
797 #ifdef TARGET_SPARC64
798 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
799 {
800 TCGv r_const;
801 int l1;
802
803 l1 = gen_new_label();
804 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
805 r_const = tcg_const_i32(TT_DIV_ZERO);
806 tcg_gen_helper_0_1(raise_exception, r_const);
807 tcg_temp_free(r_const);
808 gen_set_label(l1);
809 }
810
811 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
812 {
813 int l1, l2;
814
815 l1 = gen_new_label();
816 l2 = gen_new_label();
817 tcg_gen_mov_tl(cpu_cc_src, src1);
818 tcg_gen_mov_tl(cpu_cc_src2, src2);
819 gen_trap_ifdivzero_tl(cpu_cc_src2);
820 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
821 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
822 tcg_gen_movi_i64(dst, INT64_MIN);
823 tcg_gen_br(l2);
824 gen_set_label(l1);
825 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
826 gen_set_label(l2);
827 }
828 #endif
829
830 static inline void gen_op_div_cc(TCGv dst)
831 {
832 int l1;
833
834 tcg_gen_mov_tl(cpu_cc_dst, dst);
835 gen_cc_clear_icc();
836 gen_cc_NZ_icc(cpu_cc_dst);
837 l1 = gen_new_label();
838 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
839 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
840 gen_set_label(l1);
841 }
842
843 static inline void gen_op_logic_cc(TCGv dst)
844 {
845 tcg_gen_mov_tl(cpu_cc_dst, dst);
846
847 gen_cc_clear_icc();
848 gen_cc_NZ_icc(cpu_cc_dst);
849 #ifdef TARGET_SPARC64
850 gen_cc_clear_xcc();
851 gen_cc_NZ_xcc(cpu_cc_dst);
852 #endif
853 }
854
855 // 1
856 static inline void gen_op_eval_ba(TCGv dst)
857 {
858 tcg_gen_movi_tl(dst, 1);
859 }
860
861 // Z
862 static inline void gen_op_eval_be(TCGv dst, TCGv src)
863 {
864 gen_mov_reg_Z(dst, src);
865 }
866
867 // Z | (N ^ V)
868 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
869 {
870 gen_mov_reg_N(cpu_tmp0, src);
871 gen_mov_reg_V(dst, src);
872 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
873 gen_mov_reg_Z(cpu_tmp0, src);
874 tcg_gen_or_tl(dst, dst, cpu_tmp0);
875 }
876
877 // N ^ V
878 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
879 {
880 gen_mov_reg_V(cpu_tmp0, src);
881 gen_mov_reg_N(dst, src);
882 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
883 }
884
885 // C | Z
886 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
887 {
888 gen_mov_reg_Z(cpu_tmp0, src);
889 gen_mov_reg_C(dst, src);
890 tcg_gen_or_tl(dst, dst, cpu_tmp0);
891 }
892
893 // C
894 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
895 {
896 gen_mov_reg_C(dst, src);
897 }
898
899 // V
900 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
901 {
902 gen_mov_reg_V(dst, src);
903 }
904
905 // 0
906 static inline void gen_op_eval_bn(TCGv dst)
907 {
908 tcg_gen_movi_tl(dst, 0);
909 }
910
911 // N
912 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
913 {
914 gen_mov_reg_N(dst, src);
915 }
916
917 // !Z
918 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
919 {
920 gen_mov_reg_Z(dst, src);
921 tcg_gen_xori_tl(dst, dst, 0x1);
922 }
923
924 // !(Z | (N ^ V))
925 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
926 {
927 gen_mov_reg_N(cpu_tmp0, src);
928 gen_mov_reg_V(dst, src);
929 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
930 gen_mov_reg_Z(cpu_tmp0, src);
931 tcg_gen_or_tl(dst, dst, cpu_tmp0);
932 tcg_gen_xori_tl(dst, dst, 0x1);
933 }
934
935 // !(N ^ V)
936 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
937 {
938 gen_mov_reg_V(cpu_tmp0, src);
939 gen_mov_reg_N(dst, src);
940 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
941 tcg_gen_xori_tl(dst, dst, 0x1);
942 }
943
944 // !(C | Z)
945 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
946 {
947 gen_mov_reg_Z(cpu_tmp0, src);
948 gen_mov_reg_C(dst, src);
949 tcg_gen_or_tl(dst, dst, cpu_tmp0);
950 tcg_gen_xori_tl(dst, dst, 0x1);
951 }
952
953 // !C
954 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
955 {
956 gen_mov_reg_C(dst, src);
957 tcg_gen_xori_tl(dst, dst, 0x1);
958 }
959
960 // !N
961 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
962 {
963 gen_mov_reg_N(dst, src);
964 tcg_gen_xori_tl(dst, dst, 0x1);
965 }
966
967 // !V
968 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
969 {
970 gen_mov_reg_V(dst, src);
971 tcg_gen_xori_tl(dst, dst, 0x1);
972 }
973
974 /*
975 FPSR bit field FCC1 | FCC0:
976 0 =
977 1 <
978 2 >
979 3 unordered
980 */
981 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
982 unsigned int fcc_offset)
983 {
984 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
985 tcg_gen_andi_tl(reg, reg, 0x1);
986 }
987
988 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
989 unsigned int fcc_offset)
990 {
991 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
992 tcg_gen_andi_tl(reg, reg, 0x1);
993 }
994
995 // !0: FCC0 | FCC1
996 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
997 unsigned int fcc_offset)
998 {
999 gen_mov_reg_FCC0(dst, src, fcc_offset);
1000 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1001 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1002 }
1003
1004 // 1 or 2: FCC0 ^ FCC1
1005 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1006 unsigned int fcc_offset)
1007 {
1008 gen_mov_reg_FCC0(dst, src, fcc_offset);
1009 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1010 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1011 }
1012
1013 // 1 or 3: FCC0
1014 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1015 unsigned int fcc_offset)
1016 {
1017 gen_mov_reg_FCC0(dst, src, fcc_offset);
1018 }
1019
1020 // 1: FCC0 & !FCC1
1021 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1022 unsigned int fcc_offset)
1023 {
1024 gen_mov_reg_FCC0(dst, src, fcc_offset);
1025 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1026 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1027 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1028 }
1029
1030 // 2 or 3: FCC1
1031 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1032 unsigned int fcc_offset)
1033 {
1034 gen_mov_reg_FCC1(dst, src, fcc_offset);
1035 }
1036
1037 // 2: !FCC0 & FCC1
1038 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1039 unsigned int fcc_offset)
1040 {
1041 gen_mov_reg_FCC0(dst, src, fcc_offset);
1042 tcg_gen_xori_tl(dst, dst, 0x1);
1043 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1044 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1045 }
1046
1047 // 3: FCC0 & FCC1
1048 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1049 unsigned int fcc_offset)
1050 {
1051 gen_mov_reg_FCC0(dst, src, fcc_offset);
1052 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1053 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1054 }
1055
1056 // 0: !(FCC0 | FCC1)
1057 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1058 unsigned int fcc_offset)
1059 {
1060 gen_mov_reg_FCC0(dst, src, fcc_offset);
1061 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1062 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1063 tcg_gen_xori_tl(dst, dst, 0x1);
1064 }
1065
1066 // 0 or 3: !(FCC0 ^ FCC1)
1067 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1068 unsigned int fcc_offset)
1069 {
1070 gen_mov_reg_FCC0(dst, src, fcc_offset);
1071 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1072 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1073 tcg_gen_xori_tl(dst, dst, 0x1);
1074 }
1075
1076 // 0 or 2: !FCC0
1077 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1078 unsigned int fcc_offset)
1079 {
1080 gen_mov_reg_FCC0(dst, src, fcc_offset);
1081 tcg_gen_xori_tl(dst, dst, 0x1);
1082 }
1083
1084 // !1: !(FCC0 & !FCC1)
1085 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1086 unsigned int fcc_offset)
1087 {
1088 gen_mov_reg_FCC0(dst, src, fcc_offset);
1089 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1090 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1091 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1092 tcg_gen_xori_tl(dst, dst, 0x1);
1093 }
1094
1095 // 0 or 1: !FCC1
1096 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1097 unsigned int fcc_offset)
1098 {
1099 gen_mov_reg_FCC1(dst, src, fcc_offset);
1100 tcg_gen_xori_tl(dst, dst, 0x1);
1101 }
1102
1103 // !2: !(!FCC0 & FCC1)
1104 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1105 unsigned int fcc_offset)
1106 {
1107 gen_mov_reg_FCC0(dst, src, fcc_offset);
1108 tcg_gen_xori_tl(dst, dst, 0x1);
1109 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1110 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1111 tcg_gen_xori_tl(dst, dst, 0x1);
1112 }
1113
1114 // !3: !(FCC0 & FCC1)
1115 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1116 unsigned int fcc_offset)
1117 {
1118 gen_mov_reg_FCC0(dst, src, fcc_offset);
1119 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1120 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1121 tcg_gen_xori_tl(dst, dst, 0x1);
1122 }
1123
1124 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1125 target_ulong pc2, TCGv r_cond)
1126 {
1127 int l1;
1128
1129 l1 = gen_new_label();
1130
1131 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1132
1133 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1134
1135 gen_set_label(l1);
1136 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1137 }
1138
1139 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1140 target_ulong pc2, TCGv r_cond)
1141 {
1142 int l1;
1143
1144 l1 = gen_new_label();
1145
1146 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1147
1148 gen_goto_tb(dc, 0, pc2, pc1);
1149
1150 gen_set_label(l1);
1151 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1152 }
1153
1154 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1155 TCGv r_cond)
1156 {
1157 int l1, l2;
1158
1159 l1 = gen_new_label();
1160 l2 = gen_new_label();
1161
1162 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1163
1164 tcg_gen_movi_tl(cpu_npc, npc1);
1165 tcg_gen_br(l2);
1166
1167 gen_set_label(l1);
1168 tcg_gen_movi_tl(cpu_npc, npc2);
1169 gen_set_label(l2);
1170 }
1171
1172 /* call this function before using the condition register as it may
1173 have been set for a jump */
1174 static inline void flush_cond(DisasContext *dc, TCGv cond)
1175 {
1176 if (dc->npc == JUMP_PC) {
1177 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1178 dc->npc = DYNAMIC_PC;
1179 }
1180 }
1181
1182 static inline void save_npc(DisasContext *dc, TCGv cond)
1183 {
1184 if (dc->npc == JUMP_PC) {
1185 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1186 dc->npc = DYNAMIC_PC;
1187 } else if (dc->npc != DYNAMIC_PC) {
1188 tcg_gen_movi_tl(cpu_npc, dc->npc);
1189 }
1190 }
1191
1192 static inline void save_state(DisasContext *dc, TCGv cond)
1193 {
1194 tcg_gen_movi_tl(cpu_pc, dc->pc);
1195 save_npc(dc, cond);
1196 }
1197
1198 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1199 {
1200 if (dc->npc == JUMP_PC) {
1201 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1202 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1203 dc->pc = DYNAMIC_PC;
1204 } else if (dc->npc == DYNAMIC_PC) {
1205 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1206 dc->pc = DYNAMIC_PC;
1207 } else {
1208 dc->pc = dc->npc;
1209 }
1210 }
1211
1212 static inline void gen_op_next_insn(void)
1213 {
1214 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1215 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1216 }
1217
1218 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1219 {
1220 TCGv r_src;
1221
1222 #ifdef TARGET_SPARC64
1223 if (cc)
1224 r_src = cpu_xcc;
1225 else
1226 r_src = cpu_psr;
1227 #else
1228 r_src = cpu_psr;
1229 #endif
1230 switch (cond) {
1231 case 0x0:
1232 gen_op_eval_bn(r_dst);
1233 break;
1234 case 0x1:
1235 gen_op_eval_be(r_dst, r_src);
1236 break;
1237 case 0x2:
1238 gen_op_eval_ble(r_dst, r_src);
1239 break;
1240 case 0x3:
1241 gen_op_eval_bl(r_dst, r_src);
1242 break;
1243 case 0x4:
1244 gen_op_eval_bleu(r_dst, r_src);
1245 break;
1246 case 0x5:
1247 gen_op_eval_bcs(r_dst, r_src);
1248 break;
1249 case 0x6:
1250 gen_op_eval_bneg(r_dst, r_src);
1251 break;
1252 case 0x7:
1253 gen_op_eval_bvs(r_dst, r_src);
1254 break;
1255 case 0x8:
1256 gen_op_eval_ba(r_dst);
1257 break;
1258 case 0x9:
1259 gen_op_eval_bne(r_dst, r_src);
1260 break;
1261 case 0xa:
1262 gen_op_eval_bg(r_dst, r_src);
1263 break;
1264 case 0xb:
1265 gen_op_eval_bge(r_dst, r_src);
1266 break;
1267 case 0xc:
1268 gen_op_eval_bgu(r_dst, r_src);
1269 break;
1270 case 0xd:
1271 gen_op_eval_bcc(r_dst, r_src);
1272 break;
1273 case 0xe:
1274 gen_op_eval_bpos(r_dst, r_src);
1275 break;
1276 case 0xf:
1277 gen_op_eval_bvc(r_dst, r_src);
1278 break;
1279 }
1280 }
1281
1282 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1283 {
1284 unsigned int offset;
1285
1286 switch (cc) {
1287 default:
1288 case 0x0:
1289 offset = 0;
1290 break;
1291 case 0x1:
1292 offset = 32 - 10;
1293 break;
1294 case 0x2:
1295 offset = 34 - 10;
1296 break;
1297 case 0x3:
1298 offset = 36 - 10;
1299 break;
1300 }
1301
1302 switch (cond) {
1303 case 0x0:
1304 gen_op_eval_bn(r_dst);
1305 break;
1306 case 0x1:
1307 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0x2:
1310 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1311 break;
1312 case 0x3:
1313 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1314 break;
1315 case 0x4:
1316 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1317 break;
1318 case 0x5:
1319 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1320 break;
1321 case 0x6:
1322 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1323 break;
1324 case 0x7:
1325 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1326 break;
1327 case 0x8:
1328 gen_op_eval_ba(r_dst);
1329 break;
1330 case 0x9:
1331 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1332 break;
1333 case 0xa:
1334 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1335 break;
1336 case 0xb:
1337 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1338 break;
1339 case 0xc:
1340 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1341 break;
1342 case 0xd:
1343 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1344 break;
1345 case 0xe:
1346 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1347 break;
1348 case 0xf:
1349 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1350 break;
1351 }
1352 }
1353
1354 #ifdef TARGET_SPARC64
1355 // Inverted logic
1356 static const int gen_tcg_cond_reg[8] = {
1357 -1,
1358 TCG_COND_NE,
1359 TCG_COND_GT,
1360 TCG_COND_GE,
1361 -1,
1362 TCG_COND_EQ,
1363 TCG_COND_LE,
1364 TCG_COND_LT,
1365 };
1366
1367 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1368 {
1369 int l1;
1370
1371 l1 = gen_new_label();
1372 tcg_gen_movi_tl(r_dst, 0);
1373 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1374 tcg_gen_movi_tl(r_dst, 1);
1375 gen_set_label(l1);
1376 }
1377 #endif
1378
1379 /* XXX: potentially incorrect if dynamic npc */
1380 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1381 TCGv r_cond)
1382 {
1383 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1384 target_ulong target = dc->pc + offset;
1385
1386 if (cond == 0x0) {
1387 /* unconditional not taken */
1388 if (a) {
1389 dc->pc = dc->npc + 4;
1390 dc->npc = dc->pc + 4;
1391 } else {
1392 dc->pc = dc->npc;
1393 dc->npc = dc->pc + 4;
1394 }
1395 } else if (cond == 0x8) {
1396 /* unconditional taken */
1397 if (a) {
1398 dc->pc = target;
1399 dc->npc = dc->pc + 4;
1400 } else {
1401 dc->pc = dc->npc;
1402 dc->npc = target;
1403 }
1404 } else {
1405 flush_cond(dc, r_cond);
1406 gen_cond(r_cond, cc, cond);
1407 if (a) {
1408 gen_branch_a(dc, target, dc->npc, r_cond);
1409 dc->is_br = 1;
1410 } else {
1411 dc->pc = dc->npc;
1412 dc->jump_pc[0] = target;
1413 dc->jump_pc[1] = dc->npc + 4;
1414 dc->npc = JUMP_PC;
1415 }
1416 }
1417 }
1418
1419 /* XXX: potentially incorrect if dynamic npc */
1420 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1421 TCGv r_cond)
1422 {
1423 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1424 target_ulong target = dc->pc + offset;
1425
1426 if (cond == 0x0) {
1427 /* unconditional not taken */
1428 if (a) {
1429 dc->pc = dc->npc + 4;
1430 dc->npc = dc->pc + 4;
1431 } else {
1432 dc->pc = dc->npc;
1433 dc->npc = dc->pc + 4;
1434 }
1435 } else if (cond == 0x8) {
1436 /* unconditional taken */
1437 if (a) {
1438 dc->pc = target;
1439 dc->npc = dc->pc + 4;
1440 } else {
1441 dc->pc = dc->npc;
1442 dc->npc = target;
1443 }
1444 } else {
1445 flush_cond(dc, r_cond);
1446 gen_fcond(r_cond, cc, cond);
1447 if (a) {
1448 gen_branch_a(dc, target, dc->npc, r_cond);
1449 dc->is_br = 1;
1450 } else {
1451 dc->pc = dc->npc;
1452 dc->jump_pc[0] = target;
1453 dc->jump_pc[1] = dc->npc + 4;
1454 dc->npc = JUMP_PC;
1455 }
1456 }
1457 }
1458
1459 #ifdef TARGET_SPARC64
1460 /* XXX: potentially incorrect if dynamic npc */
1461 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1462 TCGv r_cond, TCGv r_reg)
1463 {
1464 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1465 target_ulong target = dc->pc + offset;
1466
1467 flush_cond(dc, r_cond);
1468 gen_cond_reg(r_cond, cond, r_reg);
1469 if (a) {
1470 gen_branch_a(dc, target, dc->npc, r_cond);
1471 dc->is_br = 1;
1472 } else {
1473 dc->pc = dc->npc;
1474 dc->jump_pc[0] = target;
1475 dc->jump_pc[1] = dc->npc + 4;
1476 dc->npc = JUMP_PC;
1477 }
1478 }
1479
1480 static GenOpFunc * const gen_fcmpd[4] = {
1481 helper_fcmpd,
1482 helper_fcmpd_fcc1,
1483 helper_fcmpd_fcc2,
1484 helper_fcmpd_fcc3,
1485 };
1486
1487 static GenOpFunc * const gen_fcmpq[4] = {
1488 helper_fcmpq,
1489 helper_fcmpq_fcc1,
1490 helper_fcmpq_fcc2,
1491 helper_fcmpq_fcc3,
1492 };
1493
1494 static GenOpFunc * const gen_fcmped[4] = {
1495 helper_fcmped,
1496 helper_fcmped_fcc1,
1497 helper_fcmped_fcc2,
1498 helper_fcmped_fcc3,
1499 };
1500
1501 static GenOpFunc * const gen_fcmpeq[4] = {
1502 helper_fcmpeq,
1503 helper_fcmpeq_fcc1,
1504 helper_fcmpeq_fcc2,
1505 helper_fcmpeq_fcc3,
1506 };
1507
1508 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1509 {
1510 switch (fccno) {
1511 case 0:
1512 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1513 break;
1514 case 1:
1515 tcg_gen_helper_0_2(helper_fcmps_fcc1, r_rs1, r_rs2);
1516 break;
1517 case 2:
1518 tcg_gen_helper_0_2(helper_fcmps_fcc2, r_rs1, r_rs2);
1519 break;
1520 case 3:
1521 tcg_gen_helper_0_2(helper_fcmps_fcc3, r_rs1, r_rs2);
1522 break;
1523 }
1524 }
1525
1526 static inline void gen_op_fcmpd(int fccno)
1527 {
1528 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1529 }
1530
1531 static inline void gen_op_fcmpq(int fccno)
1532 {
1533 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1534 }
1535
1536 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1537 {
1538 switch (fccno) {
1539 case 0:
1540 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1541 break;
1542 case 1:
1543 tcg_gen_helper_0_2(helper_fcmpes_fcc1, r_rs1, r_rs2);
1544 break;
1545 case 2:
1546 tcg_gen_helper_0_2(helper_fcmpes_fcc2, r_rs1, r_rs2);
1547 break;
1548 case 3:
1549 tcg_gen_helper_0_2(helper_fcmpes_fcc3, r_rs1, r_rs2);
1550 break;
1551 }
1552 }
1553
1554 static inline void gen_op_fcmped(int fccno)
1555 {
1556 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1557 }
1558
1559 static inline void gen_op_fcmpeq(int fccno)
1560 {
1561 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1562 }
1563
1564 #else
1565
1566 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1567 {
1568 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1569 }
1570
1571 static inline void gen_op_fcmpd(int fccno)
1572 {
1573 tcg_gen_helper_0_0(helper_fcmpd);
1574 }
1575
1576 static inline void gen_op_fcmpq(int fccno)
1577 {
1578 tcg_gen_helper_0_0(helper_fcmpq);
1579 }
1580
1581 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1582 {
1583 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1584 }
1585
1586 static inline void gen_op_fcmped(int fccno)
1587 {
1588 tcg_gen_helper_0_0(helper_fcmped);
1589 }
1590
1591 static inline void gen_op_fcmpeq(int fccno)
1592 {
1593 tcg_gen_helper_0_0(helper_fcmpeq);
1594 }
1595 #endif
1596
1597 static inline void gen_op_fpexception_im(int fsr_flags)
1598 {
1599 TCGv r_const;
1600
1601 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1602 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1603 r_const = tcg_const_i32(TT_FP_EXCP);
1604 tcg_gen_helper_0_1(raise_exception, r_const);
1605 tcg_temp_free(r_const);
1606 }
1607
1608 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1609 {
1610 #if !defined(CONFIG_USER_ONLY)
1611 if (!dc->fpu_enabled) {
1612 TCGv r_const;
1613
1614 save_state(dc, r_cond);
1615 r_const = tcg_const_i32(TT_NFPU_INSN);
1616 tcg_gen_helper_0_1(raise_exception, r_const);
1617 tcg_temp_free(r_const);
1618 dc->is_br = 1;
1619 return 1;
1620 }
1621 #endif
1622 return 0;
1623 }
1624
1625 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1626 {
1627 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1628 }
1629
1630 static inline void gen_clear_float_exceptions(void)
1631 {
1632 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1633 }
1634
1635 /* asi moves */
1636 #ifdef TARGET_SPARC64
1637 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1638 {
1639 int asi;
1640 TCGv r_asi;
1641
1642 if (IS_IMM) {
1643 r_asi = tcg_temp_new(TCG_TYPE_I32);
1644 tcg_gen_mov_i32(r_asi, cpu_asi);
1645 } else {
1646 asi = GET_FIELD(insn, 19, 26);
1647 r_asi = tcg_const_i32(asi);
1648 }
1649 return r_asi;
1650 }
1651
1652 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1653 int sign)
1654 {
1655 TCGv r_asi, r_size, r_sign;
1656
1657 r_asi = gen_get_asi(insn, addr);
1658 r_size = tcg_const_i32(size);
1659 r_sign = tcg_const_i32(sign);
1660 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
1661 tcg_temp_free(r_sign);
1662 tcg_temp_free(r_size);
1663 tcg_temp_free(r_asi);
1664 }
1665
1666 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1667 {
1668 TCGv r_asi, r_size;
1669
1670 r_asi = gen_get_asi(insn, addr);
1671 r_size = tcg_const_i32(size);
1672 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
1673 tcg_temp_free(r_size);
1674 tcg_temp_free(r_asi);
1675 }
1676
1677 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1678 {
1679 TCGv r_asi, r_size, r_rd;
1680
1681 r_asi = gen_get_asi(insn, addr);
1682 r_size = tcg_const_i32(size);
1683 r_rd = tcg_const_i32(rd);
1684 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
1685 tcg_temp_free(r_rd);
1686 tcg_temp_free(r_size);
1687 tcg_temp_free(r_asi);
1688 }
1689
1690 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1691 {
1692 TCGv r_asi, r_size, r_rd;
1693
1694 r_asi = gen_get_asi(insn, addr);
1695 r_size = tcg_const_i32(size);
1696 r_rd = tcg_const_i32(rd);
1697 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
1698 tcg_temp_free(r_rd);
1699 tcg_temp_free(r_size);
1700 tcg_temp_free(r_asi);
1701 }
1702
1703 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1704 {
1705 TCGv r_asi, r_size, r_sign;
1706
1707 r_asi = gen_get_asi(insn, addr);
1708 r_size = tcg_const_i32(4);
1709 r_sign = tcg_const_i32(0);
1710 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1711 tcg_temp_free(r_sign);
1712 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1713 tcg_temp_free(r_size);
1714 tcg_temp_free(r_asi);
1715 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1716 }
1717
1718 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1719 {
1720 TCGv r_asi, r_rd;
1721
1722 r_asi = gen_get_asi(insn, addr);
1723 r_rd = tcg_const_i32(rd);
1724 tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
1725 tcg_temp_free(r_rd);
1726 tcg_temp_free(r_asi);
1727 }
1728
1729 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1730 {
1731 TCGv r_temp, r_asi, r_size;
1732
1733 r_temp = tcg_temp_new(TCG_TYPE_TL);
1734 gen_movl_reg_TN(rd + 1, r_temp);
1735 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi,
1736 r_temp);
1737 tcg_temp_free(r_temp);
1738 r_asi = gen_get_asi(insn, addr);
1739 r_size = tcg_const_i32(8);
1740 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1741 tcg_temp_free(r_size);
1742 tcg_temp_free(r_asi);
1743 }
1744
1745 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1746 int rd)
1747 {
1748 TCGv r_val1, r_asi;
1749
1750 r_val1 = tcg_temp_new(TCG_TYPE_TL);
1751 gen_movl_reg_TN(rd, r_val1);
1752 r_asi = gen_get_asi(insn, addr);
1753 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1754 tcg_temp_free(r_asi);
1755 tcg_temp_free(r_val1);
1756 }
1757
1758 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1759 int rd)
1760 {
1761 TCGv r_asi;
1762
1763 gen_movl_reg_TN(rd, cpu_tmp64);
1764 r_asi = gen_get_asi(insn, addr);
1765 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1766 tcg_temp_free(r_asi);
1767 }
1768
1769 #elif !defined(CONFIG_USER_ONLY)
1770
1771 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1772 int sign)
1773 {
1774 TCGv r_asi, r_size, r_sign;
1775
1776 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1777 r_size = tcg_const_i32(size);
1778 r_sign = tcg_const_i32(sign);
1779 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1780 tcg_temp_free(r_sign);
1781 tcg_temp_free(r_size);
1782 tcg_temp_free(r_asi);
1783 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1784 }
1785
1786 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1787 {
1788 TCGv r_asi, r_size;
1789
1790 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1791 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1792 r_size = tcg_const_i32(size);
1793 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1794 tcg_temp_free(r_size);
1795 tcg_temp_free(r_asi);
1796 }
1797
1798 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1799 {
1800 TCGv r_asi, r_size, r_sign;
1801
1802 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1803 r_size = tcg_const_i32(4);
1804 r_sign = tcg_const_i32(0);
1805 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1806 tcg_temp_free(r_sign);
1807 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1808 tcg_temp_free(r_size);
1809 tcg_temp_free(r_asi);
1810 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1811 }
1812
1813 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1814 {
1815 TCGv r_asi, r_size, r_sign;
1816
1817 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1818 r_size = tcg_const_i32(8);
1819 r_sign = tcg_const_i32(0);
1820 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1821 tcg_temp_free(r_sign);
1822 tcg_temp_free(r_size);
1823 tcg_temp_free(r_asi);
1824 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1825 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1826 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1827 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1828 gen_movl_TN_reg(rd, hi);
1829 }
1830
1831 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1832 {
1833 TCGv r_temp, r_asi, r_size;
1834
1835 r_temp = tcg_temp_new(TCG_TYPE_TL);
1836 gen_movl_reg_TN(rd + 1, r_temp);
1837 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi, r_temp);
1838 tcg_temp_free(r_temp);
1839 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1840 r_size = tcg_const_i32(8);
1841 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1842 tcg_temp_free(r_size);
1843 tcg_temp_free(r_asi);
1844 }
1845 #endif
1846
1847 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1848 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1849 {
1850 TCGv r_val, r_asi, r_size;
1851
1852 gen_ld_asi(dst, addr, insn, 1, 0);
1853
1854 r_val = tcg_const_i64(0xffULL);
1855 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1856 r_size = tcg_const_i32(1);
1857 tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
1858 tcg_temp_free(r_size);
1859 tcg_temp_free(r_asi);
1860 tcg_temp_free(r_val);
1861 }
1862 #endif
1863
1864 static inline TCGv get_src1(unsigned int insn, TCGv def)
1865 {
1866 TCGv r_rs1 = def;
1867 unsigned int rs1;
1868
1869 rs1 = GET_FIELD(insn, 13, 17);
1870 if (rs1 == 0)
1871 r_rs1 = tcg_const_tl(0); // XXX how to free?
1872 else if (rs1 < 8)
1873 r_rs1 = cpu_gregs[rs1];
1874 else
1875 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1876 return r_rs1;
1877 }
1878
1879 static inline TCGv get_src2(unsigned int insn, TCGv def)
1880 {
1881 TCGv r_rs2 = def;
1882 unsigned int rs2;
1883
1884 if (IS_IMM) { /* immediate */
1885 rs2 = GET_FIELDs(insn, 19, 31);
1886 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1887 } else { /* register */
1888 rs2 = GET_FIELD(insn, 27, 31);
1889 if (rs2 == 0)
1890 r_rs2 = tcg_const_tl(0); // XXX how to free?
1891 else if (rs2 < 8)
1892 r_rs2 = cpu_gregs[rs2];
1893 else
1894 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1895 }
1896 return r_rs2;
1897 }
1898
1899 #define CHECK_IU_FEATURE(dc, FEATURE) \
1900 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1901 goto illegal_insn;
1902 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1903 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1904 goto nfpu_insn;
1905
1906 /* before an instruction, dc->pc must be static */
1907 static void disas_sparc_insn(DisasContext * dc)
1908 {
1909 unsigned int insn, opc, rs1, rs2, rd;
1910
1911 if (unlikely(loglevel & CPU_LOG_TB_OP))
1912 tcg_gen_debug_insn_start(dc->pc);
1913 insn = ldl_code(dc->pc);
1914 opc = GET_FIELD(insn, 0, 1);
1915
1916 rd = GET_FIELD(insn, 2, 6);
1917
1918 cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
1919 cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
1920
1921 switch (opc) {
1922 case 0: /* branches/sethi */
1923 {
1924 unsigned int xop = GET_FIELD(insn, 7, 9);
1925 int32_t target;
1926 switch (xop) {
1927 #ifdef TARGET_SPARC64
1928 case 0x1: /* V9 BPcc */
1929 {
1930 int cc;
1931
1932 target = GET_FIELD_SP(insn, 0, 18);
1933 target = sign_extend(target, 18);
1934 target <<= 2;
1935 cc = GET_FIELD_SP(insn, 20, 21);
1936 if (cc == 0)
1937 do_branch(dc, target, insn, 0, cpu_cond);
1938 else if (cc == 2)
1939 do_branch(dc, target, insn, 1, cpu_cond);
1940 else
1941 goto illegal_insn;
1942 goto jmp_insn;
1943 }
1944 case 0x3: /* V9 BPr */
1945 {
1946 target = GET_FIELD_SP(insn, 0, 13) |
1947 (GET_FIELD_SP(insn, 20, 21) << 14);
1948 target = sign_extend(target, 16);
1949 target <<= 2;
1950 cpu_src1 = get_src1(insn, cpu_src1);
1951 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1952 goto jmp_insn;
1953 }
1954 case 0x5: /* V9 FBPcc */
1955 {
1956 int cc = GET_FIELD_SP(insn, 20, 21);
1957 if (gen_trap_ifnofpu(dc, cpu_cond))
1958 goto jmp_insn;
1959 target = GET_FIELD_SP(insn, 0, 18);
1960 target = sign_extend(target, 19);
1961 target <<= 2;
1962 do_fbranch(dc, target, insn, cc, cpu_cond);
1963 goto jmp_insn;
1964 }
1965 #else
1966 case 0x7: /* CBN+x */
1967 {
1968 goto ncp_insn;
1969 }
1970 #endif
1971 case 0x2: /* BN+x */
1972 {
1973 target = GET_FIELD(insn, 10, 31);
1974 target = sign_extend(target, 22);
1975 target <<= 2;
1976 do_branch(dc, target, insn, 0, cpu_cond);
1977 goto jmp_insn;
1978 }
1979 case 0x6: /* FBN+x */
1980 {
1981 if (gen_trap_ifnofpu(dc, cpu_cond))
1982 goto jmp_insn;
1983 target = GET_FIELD(insn, 10, 31);
1984 target = sign_extend(target, 22);
1985 target <<= 2;
1986 do_fbranch(dc, target, insn, 0, cpu_cond);
1987 goto jmp_insn;
1988 }
1989 case 0x4: /* SETHI */
1990 if (rd) { // nop
1991 uint32_t value = GET_FIELD(insn, 10, 31);
1992 TCGv r_const;
1993
1994 r_const = tcg_const_tl(value << 10);
1995 gen_movl_TN_reg(rd, r_const);
1996 tcg_temp_free(r_const);
1997 }
1998 break;
1999 case 0x0: /* UNIMPL */
2000 default:
2001 goto illegal_insn;
2002 }
2003 break;
2004 }
2005 break;
2006 case 1:
2007 /*CALL*/ {
2008 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2009 TCGv r_const;
2010
2011 r_const = tcg_const_tl(dc->pc);
2012 gen_movl_TN_reg(15, r_const);
2013 tcg_temp_free(r_const);
2014 target += dc->pc;
2015 gen_mov_pc_npc(dc, cpu_cond);
2016 dc->npc = target;
2017 }
2018 goto jmp_insn;
2019 case 2: /* FPU & Logical Operations */
2020 {
2021 unsigned int xop = GET_FIELD(insn, 7, 12);
2022 if (xop == 0x3a) { /* generate trap */
2023 int cond;
2024
2025 cpu_src1 = get_src1(insn, cpu_src1);
2026 if (IS_IMM) {
2027 rs2 = GET_FIELD(insn, 25, 31);
2028 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2029 } else {
2030 rs2 = GET_FIELD(insn, 27, 31);
2031 if (rs2 != 0) {
2032 gen_movl_reg_TN(rs2, cpu_src2);
2033 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2034 } else
2035 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2036 }
2037 cond = GET_FIELD(insn, 3, 6);
2038 if (cond == 0x8) {
2039 save_state(dc, cpu_cond);
2040 tcg_gen_helper_0_1(helper_trap, cpu_dst);
2041 } else if (cond != 0) {
2042 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2043 #ifdef TARGET_SPARC64
2044 /* V9 icc/xcc */
2045 int cc = GET_FIELD_SP(insn, 11, 12);
2046
2047 save_state(dc, cpu_cond);
2048 if (cc == 0)
2049 gen_cond(r_cond, 0, cond);
2050 else if (cc == 2)
2051 gen_cond(r_cond, 1, cond);
2052 else
2053 goto illegal_insn;
2054 #else
2055 save_state(dc, cpu_cond);
2056 gen_cond(r_cond, 0, cond);
2057 #endif
2058 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
2059 tcg_temp_free(r_cond);
2060 }
2061 gen_op_next_insn();
2062 tcg_gen_exit_tb(0);
2063 dc->is_br = 1;
2064 goto jmp_insn;
2065 } else if (xop == 0x28) {
2066 rs1 = GET_FIELD(insn, 13, 17);
2067 switch(rs1) {
2068 case 0: /* rdy */
2069 #ifndef TARGET_SPARC64
2070 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2071 manual, rdy on the microSPARC
2072 II */
2073 case 0x0f: /* stbar in the SPARCv8 manual,
2074 rdy on the microSPARC II */
2075 case 0x10 ... 0x1f: /* implementation-dependent in the
2076 SPARCv8 manual, rdy on the
2077 microSPARC II */
2078 #endif
2079 gen_movl_TN_reg(rd, cpu_y);
2080 break;
2081 #ifdef TARGET_SPARC64
2082 case 0x2: /* V9 rdccr */
2083 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
2084 gen_movl_TN_reg(rd, cpu_dst);
2085 break;
2086 case 0x3: /* V9 rdasi */
2087 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2088 gen_movl_TN_reg(rd, cpu_dst);
2089 break;
2090 case 0x4: /* V9 rdtick */
2091 {
2092 TCGv r_tickptr;
2093
2094 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2095 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2096 offsetof(CPUState, tick));
2097 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2098 r_tickptr);
2099 tcg_temp_free(r_tickptr);
2100 gen_movl_TN_reg(rd, cpu_dst);
2101 }
2102 break;
2103 case 0x5: /* V9 rdpc */
2104 {
2105 TCGv r_const;
2106
2107 r_const = tcg_const_tl(dc->pc);
2108 gen_movl_TN_reg(rd, r_const);
2109 tcg_temp_free(r_const);
2110 }
2111 break;
2112 case 0x6: /* V9 rdfprs */
2113 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2114 gen_movl_TN_reg(rd, cpu_dst);
2115 break;
2116 case 0xf: /* V9 membar */
2117 break; /* no effect */
2118 case 0x13: /* Graphics Status */
2119 if (gen_trap_ifnofpu(dc, cpu_cond))
2120 goto jmp_insn;
2121 gen_movl_TN_reg(rd, cpu_gsr);
2122 break;
2123 case 0x17: /* Tick compare */
2124 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2125 break;
2126 case 0x18: /* System tick */
2127 {
2128 TCGv r_tickptr;
2129
2130 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2131 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2132 offsetof(CPUState, stick));
2133 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2134 r_tickptr);
2135 tcg_temp_free(r_tickptr);
2136 gen_movl_TN_reg(rd, cpu_dst);
2137 }
2138 break;
2139 case 0x19: /* System tick compare */
2140 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2141 break;
2142 case 0x10: /* Performance Control */
2143 case 0x11: /* Performance Instrumentation Counter */
2144 case 0x12: /* Dispatch Control */
2145 case 0x14: /* Softint set, WO */
2146 case 0x15: /* Softint clear, WO */
2147 case 0x16: /* Softint write */
2148 #endif
2149 default:
2150 goto illegal_insn;
2151 }
2152 #if !defined(CONFIG_USER_ONLY)
2153 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2154 #ifndef TARGET_SPARC64
2155 if (!supervisor(dc))
2156 goto priv_insn;
2157 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2158 #else
2159 CHECK_IU_FEATURE(dc, HYPV);
2160 if (!hypervisor(dc))
2161 goto priv_insn;
2162 rs1 = GET_FIELD(insn, 13, 17);
2163 switch (rs1) {
2164 case 0: // hpstate
2165 // gen_op_rdhpstate();
2166 break;
2167 case 1: // htstate
2168 // gen_op_rdhtstate();
2169 break;
2170 case 3: // hintp
2171 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2172 break;
2173 case 5: // htba
2174 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2175 break;
2176 case 6: // hver
2177 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2178 break;
2179 case 31: // hstick_cmpr
2180 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2181 break;
2182 default:
2183 goto illegal_insn;
2184 }
2185 #endif
2186 gen_movl_TN_reg(rd, cpu_dst);
2187 break;
2188 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2189 if (!supervisor(dc))
2190 goto priv_insn;
2191 #ifdef TARGET_SPARC64
2192 rs1 = GET_FIELD(insn, 13, 17);
2193 switch (rs1) {
2194 case 0: // tpc
2195 {
2196 TCGv r_tsptr;
2197
2198 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2199 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2200 offsetof(CPUState, tsptr));
2201 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2202 offsetof(trap_state, tpc));
2203 tcg_temp_free(r_tsptr);
2204 }
2205 break;
2206 case 1: // tnpc
2207 {
2208 TCGv r_tsptr;
2209
2210 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2211 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2212 offsetof(CPUState, tsptr));
2213 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2214 offsetof(trap_state, tnpc));
2215 tcg_temp_free(r_tsptr);
2216 }
2217 break;
2218 case 2: // tstate
2219 {
2220 TCGv r_tsptr;
2221
2222 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2223 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2224 offsetof(CPUState, tsptr));
2225 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2226 offsetof(trap_state, tstate));
2227 tcg_temp_free(r_tsptr);
2228 }
2229 break;
2230 case 3: // tt
2231 {
2232 TCGv r_tsptr;
2233
2234 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2235 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2236 offsetof(CPUState, tsptr));
2237 tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
2238 offsetof(trap_state, tt));
2239 tcg_temp_free(r_tsptr);
2240 }
2241 break;
2242 case 4: // tick
2243 {
2244 TCGv r_tickptr;
2245
2246 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2247 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2248 offsetof(CPUState, tick));
2249 tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
2250 r_tickptr);
2251 gen_movl_TN_reg(rd, cpu_tmp0);
2252 tcg_temp_free(r_tickptr);
2253 }
2254 break;
2255 case 5: // tba
2256 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2257 break;
2258 case 6: // pstate
2259 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2260 offsetof(CPUSPARCState, pstate));
2261 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2262 break;
2263 case 7: // tl
2264 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2265 offsetof(CPUSPARCState, tl));
2266 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2267 break;
2268 case 8: // pil
2269 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2270 offsetof(CPUSPARCState, psrpil));
2271 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2272 break;
2273 case 9: // cwp
2274 tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
2275 break;
2276 case 10: // cansave
2277 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2278 offsetof(CPUSPARCState, cansave));
2279 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2280 break;
2281 case 11: // canrestore
2282 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2283 offsetof(CPUSPARCState, canrestore));
2284 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2285 break;
2286 case 12: // cleanwin
2287 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2288 offsetof(CPUSPARCState, cleanwin));
2289 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2290 break;
2291 case 13: // otherwin
2292 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2293 offsetof(CPUSPARCState, otherwin));
2294 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2295 break;
2296 case 14: // wstate
2297 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2298 offsetof(CPUSPARCState, wstate));
2299 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2300 break;
2301 case 16: // UA2005 gl
2302 CHECK_IU_FEATURE(dc, GL);
2303 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2304 offsetof(CPUSPARCState, gl));
2305 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2306 break;
2307 case 26: // UA2005 strand status
2308 CHECK_IU_FEATURE(dc, HYPV);
2309 if (!hypervisor(dc))
2310 goto priv_insn;
2311 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_ssr);
2312 break;
2313 case 31: // ver
2314 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2315 break;
2316 case 15: // fq
2317 default:
2318 goto illegal_insn;
2319 }
2320 #else
2321 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2322 #endif
2323 gen_movl_TN_reg(rd, cpu_tmp0);
2324 break;
2325 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2326 #ifdef TARGET_SPARC64
2327 save_state(dc, cpu_cond);
2328 tcg_gen_helper_0_0(helper_flushw);
2329 #else
2330 if (!supervisor(dc))
2331 goto priv_insn;
2332 gen_movl_TN_reg(rd, cpu_tbr);
2333 #endif
2334 break;
2335 #endif
2336 } else if (xop == 0x34) { /* FPU Operations */
2337 if (gen_trap_ifnofpu(dc, cpu_cond))
2338 goto jmp_insn;
2339 gen_op_clear_ieee_excp_and_FTT();
2340 rs1 = GET_FIELD(insn, 13, 17);
2341 rs2 = GET_FIELD(insn, 27, 31);
2342 xop = GET_FIELD(insn, 18, 26);
2343 switch (xop) {
2344 case 0x1: /* fmovs */
2345 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2346 break;
2347 case 0x5: /* fnegs */
2348 tcg_gen_helper_1_1(helper_fnegs, cpu_fpr[rd],
2349 cpu_fpr[rs2]);
2350 break;
2351 case 0x9: /* fabss */
2352 tcg_gen_helper_1_1(helper_fabss, cpu_fpr[rd],
2353 cpu_fpr[rs2]);
2354 break;
2355 case 0x29: /* fsqrts */
2356 CHECK_FPU_FEATURE(dc, FSQRT);
2357 gen_clear_float_exceptions();
2358 tcg_gen_helper_1_1(helper_fsqrts, cpu_tmp32,
2359 cpu_fpr[rs2]);
2360 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2361 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2362 break;
2363 case 0x2a: /* fsqrtd */
2364 CHECK_FPU_FEATURE(dc, FSQRT);
2365 gen_op_load_fpr_DT1(DFPREG(rs2));
2366 gen_clear_float_exceptions();
2367 tcg_gen_helper_0_0(helper_fsqrtd);
2368 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2369 gen_op_store_DT0_fpr(DFPREG(rd));
2370 break;
2371 case 0x2b: /* fsqrtq */
2372 CHECK_FPU_FEATURE(dc, FLOAT128);
2373 gen_op_load_fpr_QT1(QFPREG(rs2));
2374 gen_clear_float_exceptions();
2375 tcg_gen_helper_0_0(helper_fsqrtq);
2376 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2377 gen_op_store_QT0_fpr(QFPREG(rd));
2378 break;
2379 case 0x41: /* fadds */
2380 gen_clear_float_exceptions();
2381 tcg_gen_helper_1_2(helper_fadds, cpu_tmp32,
2382 cpu_fpr[rs1], cpu_fpr[rs2]);
2383 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2384 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2385 break;
2386 case 0x42:
2387 gen_op_load_fpr_DT0(DFPREG(rs1));
2388 gen_op_load_fpr_DT1(DFPREG(rs2));
2389 gen_clear_float_exceptions();
2390 tcg_gen_helper_0_0(helper_faddd);
2391 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2392 gen_op_store_DT0_fpr(DFPREG(rd));
2393 break;
2394 case 0x43: /* faddq */
2395 CHECK_FPU_FEATURE(dc, FLOAT128);
2396 gen_op_load_fpr_QT0(QFPREG(rs1));
2397 gen_op_load_fpr_QT1(QFPREG(rs2));
2398 gen_clear_float_exceptions();
2399 tcg_gen_helper_0_0(helper_faddq);
2400 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2401 gen_op_store_QT0_fpr(QFPREG(rd));
2402 break;
2403 case 0x45: /* fsubs */
2404 gen_clear_float_exceptions();
2405 tcg_gen_helper_1_2(helper_fsubs, cpu_tmp32,
2406 cpu_fpr[rs1], cpu_fpr[rs2]);
2407 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2408 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2409 break;
2410 case 0x46:
2411 gen_op_load_fpr_DT0(DFPREG(rs1));
2412 gen_op_load_fpr_DT1(DFPREG(rs2));
2413 gen_clear_float_exceptions();
2414 tcg_gen_helper_0_0(helper_fsubd);
2415 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2416 gen_op_store_DT0_fpr(DFPREG(rd));
2417 break;
2418 case 0x47: /* fsubq */
2419 CHECK_FPU_FEATURE(dc, FLOAT128);
2420 gen_op_load_fpr_QT0(QFPREG(rs1));
2421 gen_op_load_fpr_QT1(QFPREG(rs2));
2422 gen_clear_float_exceptions();
2423 tcg_gen_helper_0_0(helper_fsubq);
2424 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2425 gen_op_store_QT0_fpr(QFPREG(rd));
2426 break;
2427 case 0x49: /* fmuls */
2428 CHECK_FPU_FEATURE(dc, FMUL);
2429 gen_clear_float_exceptions();
2430 tcg_gen_helper_1_2(helper_fmuls, cpu_tmp32,
2431 cpu_fpr[rs1], cpu_fpr[rs2]);
2432 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2433 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2434 break;
2435 case 0x4a: /* fmuld */
2436 CHECK_FPU_FEATURE(dc, FMUL);
2437 gen_op_load_fpr_DT0(DFPREG(rs1));
2438 gen_op_load_fpr_DT1(DFPREG(rs2));
2439 gen_clear_float_exceptions();
2440 tcg_gen_helper_0_0(helper_fmuld);
2441 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2442 gen_op_store_DT0_fpr(DFPREG(rd));
2443 break;
2444 case 0x4b: /* fmulq */
2445 CHECK_FPU_FEATURE(dc, FLOAT128);
2446 CHECK_FPU_FEATURE(dc, FMUL);
2447 gen_op_load_fpr_QT0(QFPREG(rs1));
2448 gen_op_load_fpr_QT1(QFPREG(rs2));
2449 gen_clear_float_exceptions();
2450 tcg_gen_helper_0_0(helper_fmulq);
2451 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2452 gen_op_store_QT0_fpr(QFPREG(rd));
2453 break;
2454 case 0x4d: /* fdivs */
2455 gen_clear_float_exceptions();
2456 tcg_gen_helper_1_2(helper_fdivs, cpu_tmp32,
2457 cpu_fpr[rs1], cpu_fpr[rs2]);
2458 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2459 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2460 break;
2461 case 0x4e:
2462 gen_op_load_fpr_DT0(DFPREG(rs1));
2463 gen_op_load_fpr_DT1(DFPREG(rs2));
2464 gen_clear_float_exceptions();
2465 tcg_gen_helper_0_0(helper_fdivd);
2466 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2467 gen_op_store_DT0_fpr(DFPREG(rd));
2468 break;
2469 case 0x4f: /* fdivq */
2470 CHECK_FPU_FEATURE(dc, FLOAT128);
2471 gen_op_load_fpr_QT0(QFPREG(rs1));
2472 gen_op_load_fpr_QT1(QFPREG(rs2));
2473 gen_clear_float_exceptions();
2474 tcg_gen_helper_0_0(helper_fdivq);
2475 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2476 gen_op_store_QT0_fpr(QFPREG(rd));
2477 break;
2478 case 0x69:
2479 CHECK_FPU_FEATURE(dc, FSMULD);
2480 gen_op_load_fpr_FT0(rs1);
2481 gen_op_load_fpr_FT1(rs2);
2482 gen_clear_float_exceptions();
2483 tcg_gen_helper_0_0(helper_fsmuld);
2484 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2485 gen_op_store_DT0_fpr(DFPREG(rd));
2486 break;
2487 case 0x6e: /* fdmulq */
2488 CHECK_FPU_FEATURE(dc, FLOAT128);
2489 gen_op_load_fpr_DT0(DFPREG(rs1));
2490 gen_op_load_fpr_DT1(DFPREG(rs2));
2491 gen_clear_float_exceptions();
2492 tcg_gen_helper_0_0(helper_fdmulq);
2493 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2494 gen_op_store_QT0_fpr(QFPREG(rd));
2495 break;
2496 case 0xc4: /* fitos */
2497 gen_clear_float_exceptions();
2498 tcg_gen_helper_1_1(helper_fitos, cpu_tmp32,
2499 cpu_fpr[rs2]);
2500 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2501 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2502 break;
2503 case 0xc6:
2504 gen_op_load_fpr_DT1(DFPREG(rs2));
2505 gen_clear_float_exceptions();
2506 tcg_gen_helper_0_0(helper_fdtos);
2507 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2508 gen_op_store_FT0_fpr(rd);
2509 break;
2510 case 0xc7: /* fqtos */
2511 CHECK_FPU_FEATURE(dc, FLOAT128);
2512 gen_op_load_fpr_QT1(QFPREG(rs2));
2513 gen_clear_float_exceptions();
2514 tcg_gen_helper_1_0(helper_fqtos, cpu_tmp32);
2515 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2516 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2517 break;
2518 case 0xc8:
2519 gen_op_load_fpr_FT1(rs2);
2520 tcg_gen_helper_0_0(helper_fitod);
2521 gen_op_store_DT0_fpr(DFPREG(rd));
2522 break;
2523 case 0xc9:
2524 gen_op_load_fpr_FT1(rs2);
2525 tcg_gen_helper_0_0(helper_fstod);
2526 gen_op_store_DT0_fpr(DFPREG(rd));
2527 break;
2528 case 0xcb: /* fqtod */
2529 CHECK_FPU_FEATURE(dc, FLOAT128);
2530 gen_op_load_fpr_QT1(QFPREG(rs2));
2531 gen_clear_float_exceptions();
2532 tcg_gen_helper_0_0(helper_fqtod);
2533 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2534 gen_op_store_DT0_fpr(DFPREG(rd));
2535 break;
2536 case 0xcc: /* fitoq */
2537 CHECK_FPU_FEATURE(dc, FLOAT128);
2538 tcg_gen_helper_0_1(helper_fitoq, cpu_fpr[rs2]);
2539 gen_op_store_QT0_fpr(QFPREG(rd));
2540 break;
2541 case 0xcd: /* fstoq */
2542 CHECK_FPU_FEATURE(dc, FLOAT128);
2543 tcg_gen_helper_0_1(helper_fstoq, cpu_fpr[rs2]);
2544 gen_op_store_QT0_fpr(QFPREG(rd));
2545 break;
2546 case 0xce: /* fdtoq */
2547 CHECK_FPU_FEATURE(dc, FLOAT128);
2548 gen_op_load_fpr_DT1(DFPREG(rs2));
2549 tcg_gen_helper_0_0(helper_fdtoq);
2550 gen_op_store_QT0_fpr(QFPREG(rd));
2551 break;
2552 case 0xd1: /* fstoi */
2553 gen_clear_float_exceptions();
2554 tcg_gen_helper_1_1(helper_fstoi, cpu_tmp32,
2555 cpu_fpr[rs2]);
2556 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2557 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2558 break;
2559 case 0xd2:
2560 gen_op_load_fpr_DT1(DFPREG(rs2));
2561 gen_clear_float_exceptions();
2562 tcg_gen_helper_0_0(helper_fdtoi);
2563 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2564 gen_op_store_FT0_fpr(rd);
2565 break;
2566 case 0xd3: /* fqtoi */
2567 CHECK_FPU_FEATURE(dc, FLOAT128);
2568 gen_op_load_fpr_QT1(QFPREG(rs2));
2569 gen_clear_float_exceptions();
2570 tcg_gen_helper_1_0(helper_fqtoi, cpu_tmp32);
2571 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2572 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2573 break;
2574 #ifdef TARGET_SPARC64
2575 case 0x2: /* V9 fmovd */
2576 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2577 cpu_fpr[DFPREG(rs2)]);
2578 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2579 cpu_fpr[DFPREG(rs2) + 1]);
2580 break;
2581 case 0x3: /* V9 fmovq */
2582 CHECK_FPU_FEATURE(dc, FLOAT128);
2583 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2584 cpu_fpr[QFPREG(rs2)]);
2585 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2586 cpu_fpr[QFPREG(rs2) + 1]);
2587 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2588 cpu_fpr[QFPREG(rs2) + 2]);
2589 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2590 cpu_fpr[QFPREG(rs2) + 3]);
2591 break;
2592 case 0x6: /* V9 fnegd */
2593 gen_op_load_fpr_DT1(DFPREG(rs2));
2594 tcg_gen_helper_0_0(helper_fnegd);
2595 gen_op_store_DT0_fpr(DFPREG(rd));
2596 break;
2597 case 0x7: /* V9 fnegq */
2598 CHECK_FPU_FEATURE(dc, FLOAT128);
2599 gen_op_load_fpr_QT1(QFPREG(rs2));
2600 tcg_gen_helper_0_0(helper_fnegq);
2601 gen_op_store_QT0_fpr(QFPREG(rd));
2602 break;
2603 case 0xa: /* V9 fabsd */
2604 gen_op_load_fpr_DT1(DFPREG(rs2));
2605 tcg_gen_helper_0_0(helper_fabsd);
2606 gen_op_store_DT0_fpr(DFPREG(rd));
2607 break;
2608 case 0xb: /* V9 fabsq */
2609 CHECK_FPU_FEATURE(dc, FLOAT128);
2610 gen_op_load_fpr_QT1(QFPREG(rs2));
2611 tcg_gen_helper_0_0(helper_fabsq);
2612 gen_op_store_QT0_fpr(QFPREG(rd));
2613 break;
2614 case 0x81: /* V9 fstox */
2615 gen_op_load_fpr_FT1(rs2);
2616 gen_clear_float_exceptions();
2617 tcg_gen_helper_0_0(helper_fstox);
2618 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2619 gen_op_store_DT0_fpr(DFPREG(rd));
2620 break;
2621 case 0x82: /* V9 fdtox */
2622 gen_op_load_fpr_DT1(DFPREG(rs2));
2623 gen_clear_float_exceptions();
2624 tcg_gen_helper_0_0(helper_fdtox);
2625 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2626 gen_op_store_DT0_fpr(DFPREG(rd));
2627 break;
2628 case 0x83: /* V9 fqtox */
2629 CHECK_FPU_FEATURE(dc, FLOAT128);
2630 gen_op_load_fpr_QT1(QFPREG(rs2));
2631 gen_clear_float_exceptions();
2632 tcg_gen_helper_0_0(helper_fqtox);
2633 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2634 gen_op_store_DT0_fpr(DFPREG(rd));
2635 break;
2636 case 0x84: /* V9 fxtos */
2637 gen_op_load_fpr_DT1(DFPREG(rs2));
2638 gen_clear_float_exceptions();
2639 tcg_gen_helper_0_0(helper_fxtos);
2640 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2641 gen_op_store_FT0_fpr(rd);
2642 break;
2643 case 0x88: /* V9 fxtod */
2644 gen_op_load_fpr_DT1(DFPREG(rs2));
2645 gen_clear_float_exceptions();
2646 tcg_gen_helper_0_0(helper_fxtod);
2647 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2648 gen_op_store_DT0_fpr(DFPREG(rd));
2649 break;
2650 case 0x8c: /* V9 fxtoq */
2651 CHECK_FPU_FEATURE(dc, FLOAT128);
2652 gen_op_load_fpr_DT1(DFPREG(rs2));
2653 gen_clear_float_exceptions();
2654 tcg_gen_helper_0_0(helper_fxtoq);
2655 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2656 gen_op_store_QT0_fpr(QFPREG(rd));
2657 break;
2658 #endif
2659 default:
2660 goto illegal_insn;
2661 }
2662 } else if (xop == 0x35) { /* FPU Operations */
2663 #ifdef TARGET_SPARC64
2664 int cond;
2665 #endif
2666 if (gen_trap_ifnofpu(dc, cpu_cond))
2667 goto jmp_insn;
2668 gen_op_clear_ieee_excp_and_FTT();
2669 rs1 = GET_FIELD(insn, 13, 17);
2670 rs2 = GET_FIELD(insn, 27, 31);
2671 xop = GET_FIELD(insn, 18, 26);
2672 #ifdef TARGET_SPARC64
2673 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2674 int l1;
2675
2676 l1 = gen_new_label();
2677 cond = GET_FIELD_SP(insn, 14, 17);
2678 cpu_src1 = get_src1(insn, cpu_src1);
2679 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2680 0, l1);
2681 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2682 gen_set_label(l1);
2683 break;
2684 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2685 int l1;
2686
2687 l1 = gen_new_label();
2688 cond = GET_FIELD_SP(insn, 14, 17);
2689 cpu_src1 = get_src1(insn, cpu_src1);
2690 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2691 0, l1);
2692 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2693 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2694 gen_set_label(l1);
2695 break;
2696 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2697 int l1;
2698
2699 CHECK_FPU_FEATURE(dc, FLOAT128);
2700 l1 = gen_new_label();
2701 cond = GET_FIELD_SP(insn, 14, 17);
2702 cpu_src1 = get_src1(insn, cpu_src1);
2703 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2704 0, l1);
2705 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2706 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2707 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2708 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2709 gen_set_label(l1);
2710 break;
2711 }
2712 #endif
2713 switch (xop) {
2714 #ifdef TARGET_SPARC64
2715 #define FMOVSCC(fcc) \
2716 { \
2717 TCGv r_cond; \
2718 int l1; \
2719 \
2720 l1 = gen_new_label(); \
2721 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2722 cond = GET_FIELD_SP(insn, 14, 17); \
2723 gen_fcond(r_cond, fcc, cond); \
2724 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2725 0, l1); \
2726 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2727 gen_set_label(l1); \
2728 tcg_temp_free(r_cond); \
2729 }
2730 #define FMOVDCC(fcc) \
2731 { \
2732 TCGv r_cond; \
2733 int l1; \
2734 \
2735 l1 = gen_new_label(); \
2736 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2737 cond = GET_FIELD_SP(insn, 14, 17); \
2738 gen_fcond(r_cond, fcc, cond); \
2739 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2740 0, l1); \
2741 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2742 cpu_fpr[DFPREG(rs2)]); \
2743 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2744 cpu_fpr[DFPREG(rs2) + 1]); \
2745 gen_set_label(l1); \
2746 tcg_temp_free(r_cond); \
2747 }
2748 #define FMOVQCC(fcc) \
2749 { \
2750 TCGv r_cond; \
2751 int l1; \
2752 \
2753 l1 = gen_new_label(); \
2754 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2755 cond = GET_FIELD_SP(insn, 14, 17); \
2756 gen_fcond(r_cond, fcc, cond); \
2757 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2758 0, l1); \
2759 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2760 cpu_fpr[QFPREG(rs2)]); \
2761 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2762 cpu_fpr[QFPREG(rs2) + 1]); \
2763 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2764 cpu_fpr[QFPREG(rs2) + 2]); \
2765 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2766 cpu_fpr[QFPREG(rs2) + 3]); \
2767 gen_set_label(l1); \
2768 tcg_temp_free(r_cond); \
2769 }
2770 case 0x001: /* V9 fmovscc %fcc0 */
2771 FMOVSCC(0);
2772 break;
2773 case 0x002: /* V9 fmovdcc %fcc0 */
2774 FMOVDCC(0);
2775 break;
2776 case 0x003: /* V9 fmovqcc %fcc0 */
2777 CHECK_FPU_FEATURE(dc, FLOAT128);
2778 FMOVQCC(0);
2779 break;
2780 case 0x041: /* V9 fmovscc %fcc1 */
2781 FMOVSCC(1);
2782 break;
2783 case 0x042: /* V9 fmovdcc %fcc1 */
2784 FMOVDCC(1);
2785 break;
2786 case 0x043: /* V9 fmovqcc %fcc1 */
2787 CHECK_FPU_FEATURE(dc, FLOAT128);
2788 FMOVQCC(1);
2789 break;
2790 case 0x081: /* V9 fmovscc %fcc2 */
2791 FMOVSCC(2);
2792 break;
2793 case 0x082: /* V9 fmovdcc %fcc2 */
2794 FMOVDCC(2);
2795 break;
2796 case 0x083: /* V9 fmovqcc %fcc2 */
2797 CHECK_FPU_FEATURE(dc, FLOAT128);
2798 FMOVQCC(2);
2799 break;
2800 case 0x0c1: /* V9 fmovscc %fcc3 */
2801 FMOVSCC(3);
2802 break;
2803 case 0x0c2: /* V9 fmovdcc %fcc3 */
2804 FMOVDCC(3);
2805 break;
2806 case 0x0c3: /* V9 fmovqcc %fcc3 */
2807 CHECK_FPU_FEATURE(dc, FLOAT128);
2808 FMOVQCC(3);
2809 break;
2810 #undef FMOVSCC
2811 #undef FMOVDCC
2812 #undef FMOVQCC
2813 #define FMOVCC(size_FDQ, icc) \
2814 { \
2815 TCGv r_cond; \
2816 int l1; \
2817 \
2818 l1 = gen_new_label(); \
2819 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2820 cond = GET_FIELD_SP(insn, 14, 17); \
2821 gen_cond(r_cond, icc, cond); \
2822 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2823 0, l1); \
2824 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2825 (glue(size_FDQ, FPREG(rs2))); \
2826 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2827 (glue(size_FDQ, FPREG(rd))); \
2828 gen_set_label(l1); \
2829 tcg_temp_free(r_cond); \
2830 }
2831 #define FMOVSCC(icc) \
2832 { \
2833 TCGv r_cond; \
2834 int l1; \
2835 \
2836 l1 = gen_new_label(); \
2837 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2838 cond = GET_FIELD_SP(insn, 14, 17); \
2839 gen_cond(r_cond, icc, cond); \
2840 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2841 0, l1); \
2842 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2843 gen_set_label(l1); \
2844 tcg_temp_free(r_cond); \
2845 }
2846 #define FMOVDCC(icc) \
2847 { \
2848 TCGv r_cond; \
2849 int l1; \
2850 \
2851 l1 = gen_new_label(); \
2852 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2853 cond = GET_FIELD_SP(insn, 14, 17); \
2854 gen_cond(r_cond, icc, cond); \
2855 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2856 0, l1); \
2857 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2858 cpu_fpr[DFPREG(rs2)]); \
2859 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2860 cpu_fpr[DFPREG(rs2) + 1]); \
2861 gen_set_label(l1); \
2862 tcg_temp_free(r_cond); \
2863 }
2864 #define FMOVQCC(icc) \
2865 { \
2866 TCGv r_cond; \
2867 int l1; \
2868 \
2869 l1 = gen_new_label(); \
2870 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2871 cond = GET_FIELD_SP(insn, 14, 17); \
2872 gen_cond(r_cond, icc, cond); \
2873 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2874 0, l1); \
2875 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2876 cpu_fpr[QFPREG(rs2)]); \
2877 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2878 cpu_fpr[QFPREG(rs2) + 1]); \
2879 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2880 cpu_fpr[QFPREG(rs2) + 2]); \
2881 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2882 cpu_fpr[QFPREG(rs2) + 3]); \
2883 gen_set_label(l1); \
2884 tcg_temp_free(r_cond); \
2885 }
2886
2887 case 0x101: /* V9 fmovscc %icc */
2888 FMOVSCC(0);
2889 break;
2890 case 0x102: /* V9 fmovdcc %icc */
2891 FMOVDCC(0);
2892 case 0x103: /* V9 fmovqcc %icc */
2893 CHECK_FPU_FEATURE(dc, FLOAT128);
2894 FMOVQCC(0);
2895 break;
2896 case 0x181: /* V9 fmovscc %xcc */
2897 FMOVSCC(1);
2898 break;
2899 case 0x182: /* V9 fmovdcc %xcc */
2900 FMOVDCC(1);
2901 break;
2902 case 0x183: /* V9 fmovqcc %xcc */
2903 CHECK_FPU_FEATURE(dc, FLOAT128);
2904 FMOVQCC(1);
2905 break;
2906 #undef FMOVSCC
2907 #undef FMOVDCC
2908 #undef FMOVQCC
2909 #endif
2910 case 0x51: /* fcmps, V9 %fcc */
2911 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2912 break;
2913 case 0x52: /* fcmpd, V9 %fcc */
2914 gen_op_load_fpr_DT0(DFPREG(rs1));
2915 gen_op_load_fpr_DT1(DFPREG(rs2));
2916 gen_op_fcmpd(rd & 3);
2917 break;
2918 case 0x53: /* fcmpq, V9 %fcc */
2919 CHECK_FPU_FEATURE(dc, FLOAT128);
2920 gen_op_load_fpr_QT0(QFPREG(rs1));
2921 gen_op_load_fpr_QT1(QFPREG(rs2));
2922 gen_op_fcmpq(rd & 3);
2923 break;
2924 case 0x55: /* fcmpes, V9 %fcc */
2925 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2926 break;
2927 case 0x56: /* fcmped, V9 %fcc */
2928 gen_op_load_fpr_DT0(DFPREG(rs1));
2929 gen_op_load_fpr_DT1(DFPREG(rs2));
2930 gen_op_fcmped(rd & 3);
2931 break;
2932 case 0x57: /* fcmpeq, V9 %fcc */
2933 CHECK_FPU_FEATURE(dc, FLOAT128);
2934 gen_op_load_fpr_QT0(QFPREG(rs1));
2935 gen_op_load_fpr_QT1(QFPREG(rs2));
2936 gen_op_fcmpeq(rd & 3);
2937 break;
2938 default:
2939 goto illegal_insn;
2940 }
2941 } else if (xop == 0x2) {
2942 // clr/mov shortcut
2943
2944 rs1 = GET_FIELD(insn, 13, 17);
2945 if (rs1 == 0) {
2946 // or %g0, x, y -> mov T0, x; mov y, T0
2947 if (IS_IMM) { /* immediate */
2948 TCGv r_const;
2949
2950 rs2 = GET_FIELDs(insn, 19, 31);
2951 r_const = tcg_const_tl((int)rs2);
2952 gen_movl_TN_reg(rd, r_const);
2953 tcg_temp_free(r_const);
2954 } else { /* register */
2955 rs2 = GET_FIELD(insn, 27, 31);
2956 gen_movl_reg_TN(rs2, cpu_dst);
2957 gen_movl_TN_reg(rd, cpu_dst);
2958 }
2959 } else {
2960 cpu_src1 = get_src1(insn, cpu_src1);
2961 if (IS_IMM) { /* immediate */
2962 rs2 = GET_FIELDs(insn, 19, 31);
2963 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2964 gen_movl_TN_reg(rd, cpu_dst);
2965 } else { /* register */
2966 // or x, %g0, y -> mov T1, x; mov y, T1
2967 rs2 = GET_FIELD(insn, 27, 31);
2968 if (rs2 != 0) {
2969 gen_movl_reg_TN(rs2, cpu_src2);
2970 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2971 gen_movl_TN_reg(rd, cpu_dst);
2972 } else
2973 gen_movl_TN_reg(rd, cpu_src1);
2974 }
2975 }
2976 #ifdef TARGET_SPARC64
2977 } else if (xop == 0x25) { /* sll, V9 sllx */
2978 cpu_src1 = get_src1(insn, cpu_src1);
2979 if (IS_IMM) { /* immediate */
2980 rs2 = GET_FIELDs(insn, 20, 31);
2981 if (insn & (1 << 12)) {
2982 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2983 } else {
2984 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2985 }
2986 } else { /* register */
2987 rs2 = GET_FIELD(insn, 27, 31);
2988 gen_movl_reg_TN(rs2, cpu_src2);
2989 if (insn & (1 << 12)) {
2990 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2991 } else {
2992 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2993 }
2994 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2995 }
2996 gen_movl_TN_reg(rd, cpu_dst);
2997 } else if (xop == 0x26) { /* srl, V9 srlx */
2998 cpu_src1 = get_src1(insn, cpu_src1);
2999 if (IS_IMM) { /* immediate */
3000 rs2 = GET_FIELDs(insn, 20, 31);
3001 if (insn & (1 << 12)) {
3002 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3003 } else {
3004 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3005 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3006 }
3007 } else { /* register */
3008 rs2 = GET_FIELD(insn, 27, 31);
3009 gen_movl_reg_TN(rs2, cpu_src2);
3010 if (insn & (1 << 12)) {
3011 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3012 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3013 } else {
3014 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3015 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3016 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3017 }
3018 }
3019 gen_movl_TN_reg(rd, cpu_dst);
3020 } else if (xop == 0x27) { /* sra, V9 srax */
3021 cpu_src1 = get_src1(insn, cpu_src1);
3022 if (IS_IMM) { /* immediate */
3023 rs2 = GET_FIELDs(insn, 20, 31);
3024 if (insn & (1 << 12)) {
3025 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3026 } else {
3027 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3028 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3029 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3030 }
3031 } else { /* register */
3032 rs2 = GET_FIELD(insn, 27, 31);
3033 gen_movl_reg_TN(rs2, cpu_src2);
3034 if (insn & (1 << 12)) {
3035 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3036 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3037 } else {
3038 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3039 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3040 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3041 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3042 }
3043 }
3044 gen_movl_TN_reg(rd, cpu_dst);
3045 #endif
3046 } else if (xop < 0x36) {
3047 cpu_src1 = get_src1(insn, cpu_src1);
3048 cpu_src2 = get_src2(insn, cpu_src2);
3049 if (xop < 0x20) {
3050 switch (xop & ~0x10) {
3051 case 0x0:
3052 if (xop & 0x10)
3053 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3054 else
3055 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3056 break;
3057 case 0x1:
3058 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3059 if (xop & 0x10)
3060 gen_op_logic_cc(cpu_dst);
3061 break;
3062 case 0x2:
3063 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3064 if (xop & 0x10)
3065 gen_op_logic_cc(cpu_dst);
3066 break;
3067 case 0x3:
3068 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3069 if (xop & 0x10)
3070 gen_op_logic_cc(cpu_dst);
3071 break;
3072 case 0x4:
3073 if (xop & 0x10)
3074 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3075 else
3076 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3077 break;
3078 case 0x5:
3079 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3080 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
3081 if (xop & 0x10)
3082 gen_op_logic_cc(cpu_dst);
3083 break;
3084 case 0x6:
3085 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3086 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
3087 if (xop & 0x10)
3088 gen_op_logic_cc(cpu_dst);
3089 break;
3090 case 0x7:
3091 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3092 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3093 if (xop & 0x10)
3094 gen_op_logic_cc(cpu_dst);
3095 break;
3096 case 0x8:
3097 if (xop & 0x10)
3098 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3099 else {
3100 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3101 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3102 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3103 }
3104 break;
3105 #ifdef TARGET_SPARC64
3106 case 0x9: /* V9 mulx */
3107 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3108 break;
3109 #endif
3110 case 0xa:
3111 CHECK_IU_FEATURE(dc, MUL);
3112 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3113 if (xop & 0x10)
3114 gen_op_logic_cc(cpu_dst);
3115 break;
3116 case 0xb:
3117 CHECK_IU_FEATURE(dc, MUL);
3118 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3119 if (xop & 0x10)
3120 gen_op_logic_cc(cpu_dst);
3121 break;
3122 case 0xc:
3123 if (xop & 0x10)
3124 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3125 else {
3126 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3127 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3128 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3129 }
3130 break;
3131 #ifdef TARGET_SPARC64
3132 case 0xd: /* V9 udivx */
3133 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3134 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3135 gen_trap_ifdivzero_tl(cpu_cc_src2);
3136 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3137 break;
3138 #endif
3139 case 0xe:
3140 CHECK_IU_FEATURE(dc, DIV);
3141 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
3142 cpu_src2);
3143 if (xop & 0x10)
3144 gen_op_div_cc(cpu_dst);
3145 break;
3146 case 0xf:
3147 CHECK_IU_FEATURE(dc, DIV);
3148 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
3149 cpu_src2);
3150 if (xop & 0x10)
3151 gen_op_div_cc(cpu_dst);
3152 break;
3153 default:
3154 goto illegal_insn;
3155 }
3156 gen_movl_TN_reg(rd, cpu_dst);
3157 } else {
3158 switch (xop) {
3159 case 0x20: /* taddcc */
3160 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3161 gen_movl_TN_reg(rd, cpu_dst);
3162 break;
3163 case 0x21: /* tsubcc */
3164 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3165 gen_movl_TN_reg(rd, cpu_dst);
3166 break;
3167 case 0x22: /* taddcctv */
3168 save_state(dc, cpu_cond);
3169 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3170 gen_movl_TN_reg(rd, cpu_dst);
3171 break;
3172 case 0x23: /* tsubcctv */
3173 save_state(dc, cpu_cond);
3174 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3175 gen_movl_TN_reg(rd, cpu_dst);
3176 break;
3177 case 0x24: /* mulscc */
3178 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3179 gen_movl_TN_reg(rd, cpu_dst);
3180 break;
3181 #ifndef TARGET_SPARC64
3182 case 0x25: /* sll */
3183 if (IS_IMM) { /* immediate */
3184 rs2 = GET_FIELDs(insn, 20, 31);
3185 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3186 } else { /* register */
3187 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3188 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3189 }
3190 gen_movl_TN_reg(rd, cpu_dst);
3191 break;
3192 case 0x26: /* srl */
3193 if (IS_IMM) { /* immediate */
3194 rs2 = GET_FIELDs(insn, 20, 31);
3195 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3196 } else { /* register */
3197 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3198 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3199 }
3200 gen_movl_TN_reg(rd, cpu_dst);
3201 break;
3202 case 0x27: /* sra */
3203 if (IS_IMM) { /* immediate */
3204 rs2 = GET_FIELDs(insn, 20, 31);
3205 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3206 } else { /* register */
3207 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3208 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3209 }
3210 gen_movl_TN_reg(rd, cpu_dst);
3211 break;
3212 #endif
3213 case 0x30:
3214 {
3215 switch(rd) {
3216 case 0: /* wry */
3217 tcg_gen_xor_tl(cpu_y, cpu_src1, cpu_src2);
3218 break;
3219 #ifndef TARGET_SPARC64
3220 case 0x01 ... 0x0f: /* undefined in the
3221 SPARCv8 manual, nop
3222 on the microSPARC
3223 II */
3224 case 0x10 ... 0x1f: /* implementation-dependent
3225 in the SPARCv8
3226 manual, nop on the
3227 microSPARC II */
3228 break;
3229 #else
3230 case 0x2: /* V9 wrccr */
3231 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3232 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3233 break;
3234 case 0x3: /* V9 wrasi */
3235 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3236 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3237 break;
3238 case 0x6: /* V9 wrfprs */
3239 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3240 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3241 save_state(dc, cpu_cond);
3242 gen_op_next_insn();
3243 tcg_gen_exit_tb(0);
3244 dc->is_br = 1;
3245 break;
3246 case 0xf: /* V9 sir, nop if user */
3247 #if !defined(CONFIG_USER_ONLY)
3248 if (supervisor(dc))
3249 ; // XXX
3250 #endif
3251 break;
3252 case 0x13: /* Graphics Status */
3253 if (gen_trap_ifnofpu(dc, cpu_cond))
3254 goto jmp_insn;
3255 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3256 break;
3257 case 0x17: /* Tick compare */
3258 #if !defined(CONFIG_USER_ONLY)
3259 if (!supervisor(dc))
3260 goto illegal_insn;
3261 #endif
3262 {
3263 TCGv r_tickptr;
3264
3265 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3266 cpu_src2);
3267 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3268 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3269 offsetof(CPUState, tick));
3270 tcg_gen_helper_0_2(helper_tick_set_limit,
3271 r_tickptr, cpu_tick_cmpr);
3272 tcg_temp_free(r_tickptr);
3273 }
3274 break;
3275 case 0x18: /* System tick */
3276 #if !defined(CONFIG_USER_ONLY)
3277 if (!supervisor(dc))
3278 goto illegal_insn;
3279 #endif
3280 {
3281 TCGv r_tickptr;
3282
3283 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3284 cpu_src2);
3285 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3286 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3287 offsetof(CPUState, stick));
3288 tcg_gen_helper_0_2(helper_tick_set_count,
3289 r_tickptr, cpu_dst);
3290 tcg_temp_free(r_tickptr);
3291 }
3292 break;
3293 case 0x19: /* System tick compare */
3294 #if !defined(CONFIG_USER_ONLY)
3295 if (!supervisor(dc))
3296 goto illegal_insn;
3297 #endif
3298 {
3299 TCGv r_tickptr;
3300
3301 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3302 cpu_src2);
3303 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3304 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3305 offsetof(CPUState, stick));
3306 tcg_gen_helper_0_2(helper_tick_set_limit,
3307 r_tickptr, cpu_stick_cmpr);
3308 tcg_temp_free(r_tickptr);
3309 }
3310 break;
3311
3312 case 0x10: /* Performance Control */
3313 case 0x11: /* Performance Instrumentation
3314 Counter */
3315 case 0x12: /* Dispatch Control */
3316 case 0x14: /* Softint set */
3317 case 0x15: /* Softint clear */
3318 case 0x16: /* Softint write */
3319 #endif
3320 default:
3321 goto illegal_insn;
3322 }
3323 }
3324 break;
3325 #if !defined(CONFIG_USER_ONLY)
3326 case 0x31: /* wrpsr, V9 saved, restored */
3327 {
3328 if (!supervisor(dc))
3329 goto priv_insn;
3330 #ifdef TARGET_SPARC64
3331 switch (rd) {
3332 case 0:
3333 tcg_gen_helper_0_0(helper_saved);
3334 break;
3335 case 1:
3336 tcg_gen_helper_0_0(helper_restored);
3337 break;
3338 case 2: /* UA2005 allclean */
3339 case 3: /* UA2005 otherw */
3340 case 4: /* UA2005 normalw */
3341 case 5: /* UA2005 invalw */
3342 // XXX
3343 default:
3344 goto illegal_insn;
3345 }
3346 #else
3347 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3348 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3349 save_state(dc, cpu_cond);
3350 gen_op_next_insn();
3351 tcg_gen_exit_tb(0);
3352 dc->is_br = 1;
3353 #endif
3354 }
3355 break;
3356 case 0x32: /* wrwim, V9 wrpr */
3357 {
3358 if (!supervisor(dc))
3359 goto priv_insn;
3360 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3361 #ifdef TARGET_SPARC64
3362 switch (rd) {
3363 case 0: // tpc
3364 {
3365 TCGv r_tsptr;
3366
3367 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3368 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3369 offsetof(CPUState, tsptr));
3370 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3371 offsetof(trap_state, tpc));
3372 tcg_temp_free(r_tsptr);
3373 }
3374 break;
3375 case 1: // tnpc
3376 {
3377 TCGv r_tsptr;
3378
3379 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3380 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3381 offsetof(CPUState, tsptr));
3382 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3383 offsetof(trap_state, tnpc));
3384 tcg_temp_free(r_tsptr);
3385 }
3386 break;
3387 case 2: // tstate
3388 {
3389 TCGv r_tsptr;
3390
3391 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3392 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3393 offsetof(CPUState, tsptr));
3394 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3395 offsetof(trap_state,
3396 tstate));
3397 tcg_temp_free(r_tsptr);
3398 }
3399 break;
3400 case 3: // tt
3401 {
3402 TCGv r_tsptr;
3403
3404 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3405 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3406 offsetof(CPUState, tsptr));
3407 tcg_gen_st_i32(cpu_tmp0, r_tsptr,
3408 offsetof(trap_state, tt));
3409 tcg_temp_free(r_tsptr);
3410 }
3411 break;
3412 case 4: // tick
3413 {
3414 TCGv r_tickptr;
3415
3416 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3417 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3418 offsetof(CPUState, tick));
3419 tcg_gen_helper_0_2(helper_tick_set_count,
3420 r_tickptr, cpu_tmp0);
3421 tcg_temp_free(r_tickptr);
3422 }
3423 break;
3424 case 5: // tba
3425 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3426 break;
3427 case 6: // pstate
3428 save_state(dc, cpu_cond);
3429 tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
3430 gen_op_next_insn();
3431 tcg_gen_exit_tb(0);
3432 dc->is_br = 1;
3433 break;
3434 case 7: // tl
3435 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3436 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3437 offsetof(CPUSPARCState, tl));
3438 break;
3439 case 8: // pil
3440 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3441 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3442 offsetof(CPUSPARCState,
3443 psrpil));
3444 break;
3445 case 9: // cwp
3446 tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
3447 break;
3448 case 10: // cansave
3449 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3450 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3451 offsetof(CPUSPARCState,
3452 cansave));
3453 break;
3454 case 11: // canrestore
3455 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3456 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3457 offsetof(CPUSPARCState,
3458 canrestore));
3459 break;
3460 case 12: // cleanwin
3461 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3462 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3463 offsetof(CPUSPARCState,
3464 cleanwin));
3465 break;
3466 case 13: // otherwin
3467 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3468 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3469 offsetof(CPUSPARCState,
3470 otherwin));
3471 break;
3472 case 14: // wstate
3473 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3474 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3475 offsetof(CPUSPARCState,
3476 wstate));
3477 break;
3478 case 16: // UA2005 gl
3479 CHECK_IU_FEATURE(dc, GL);
3480 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3481 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3482 offsetof(CPUSPARCState, gl));
3483 break;
3484 case 26: // UA2005 strand status
3485 CHECK_IU_FEATURE(dc, HYPV);
3486 if (!hypervisor(dc))
3487 goto priv_insn;
3488 tcg_gen_trunc_tl_i32(cpu_ssr, cpu_tmp0);
3489 break;
3490 default:
3491 goto illegal_insn;
3492 }
3493 #else
3494 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3495 if (dc->def->nwindows != 32)
3496 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3497 (1 << dc->def->nwindows) - 1);
3498 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3499 #endif
3500 }
3501 break;
3502 case 0x33: /* wrtbr, UA2005 wrhpr */
3503 {
3504 #ifndef TARGET_SPARC64
3505 if (!supervisor(dc))
3506 goto priv_insn;
3507 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3508 #else
3509 CHECK_IU_FEATURE(dc, HYPV);
3510 if (!hypervisor(dc))
3511 goto priv_insn;
3512 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3513 switch (rd) {
3514 case 0: // hpstate
3515 // XXX gen_op_wrhpstate();
3516 save_state(dc, cpu_cond);
3517 gen_op_next_insn();
3518 tcg_gen_exit_tb(0);
3519 dc->is_br = 1;
3520 break;
3521 case 1: // htstate
3522 // XXX gen_op_wrhtstate();
3523 break;
3524 case 3: // hintp
3525 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3526 break;
3527 case 5: // htba
3528 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3529 break;
3530 case 31: // hstick_cmpr
3531 {
3532 TCGv r_tickptr;
3533
3534 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3535 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3536 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3537 offsetof(CPUState, hstick));
3538 tcg_gen_helper_0_2(helper_tick_set_limit,
3539 r_tickptr, cpu_hstick_cmpr);
3540 tcg_temp_free(r_tickptr);
3541 }
3542 break;
3543 case 6: // hver readonly
3544 default:
3545 goto illegal_insn;
3546 }
3547 #endif
3548 }
3549 break;
3550 #endif
3551 #ifdef TARGET_SPARC64
3552 case 0x2c: /* V9 movcc */
3553 {
3554 int cc = GET_FIELD_SP(insn, 11, 12);
3555 int cond = GET_FIELD_SP(insn, 14, 17);
3556 TCGv r_cond;
3557 int l1;
3558
3559 r_cond = tcg_temp_new(TCG_TYPE_TL);
3560 if (insn & (1 << 18)) {
3561 if (cc == 0)
3562 gen_cond(r_cond, 0, cond);
3563 else if (cc == 2)
3564 gen_cond(r_cond, 1, cond);
3565 else
3566 goto illegal_insn;
3567 } else {
3568 gen_fcond(r_cond, cc, cond);
3569 }
3570
3571 l1 = gen_new_label();
3572
3573 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3574 if (IS_IMM) { /* immediate */
3575 TCGv r_const;
3576
3577 rs2 = GET_FIELD_SPs(insn, 0, 10);
3578 r_const = tcg_const_tl((int)rs2);
3579 gen_movl_TN_reg(rd, r_const);
3580 tcg_temp_free(r_const);
3581 } else {
3582 rs2 = GET_FIELD_SP(insn, 0, 4);
3583 gen_movl_reg_TN(rs2, cpu_tmp0);
3584 gen_movl_TN_reg(rd, cpu_tmp0);
3585 }
3586 gen_set_label(l1);
3587 tcg_temp_free(r_cond);
3588 break;
3589 }
3590 case 0x2d: /* V9 sdivx */
3591 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3592 gen_movl_TN_reg(rd, cpu_dst);
3593 break;
3594 case 0x2e: /* V9 popc */
3595 {
3596 cpu_src2 = get_src2(insn, cpu_src2);
3597 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3598 cpu_src2);
3599 gen_movl_TN_reg(rd, cpu_dst);
3600 }
3601 case 0x2f: /* V9 movr */
3602 {
3603 int cond = GET_FIELD_SP(insn, 10, 12);
3604 int l1;
3605
3606 cpu_src1 = get_src1(insn, cpu_src1);
3607
3608 l1 = gen_new_label();
3609
3610 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3611 cpu_src1, 0, l1);
3612 if (IS_IMM) { /* immediate */
3613 TCGv r_const;
3614
3615 rs2 = GET_FIELD_SPs(insn, 0, 9);
3616 r_const = tcg_const_tl((int)rs2);
3617 gen_movl_TN_reg(rd, r_const);
3618 tcg_temp_free(r_const);
3619 } else {
3620 rs2 = GET_FIELD_SP(insn, 0, 4);
3621 gen_movl_reg_TN(rs2, cpu_tmp0);
3622 gen_movl_TN_reg(rd, cpu_tmp0);
3623 }
3624 gen_set_label(l1);
3625 break;
3626 }
3627 #endif
3628 default:
3629 goto illegal_insn;
3630 }
3631 }
3632 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3633 #ifdef TARGET_SPARC64
3634 int opf = GET_FIELD_SP(insn, 5, 13);
3635 rs1 = GET_FIELD(insn, 13, 17);
3636 rs2 = GET_FIELD(insn, 27, 31);
3637 if (gen_trap_ifnofpu(dc, cpu_cond))
3638 goto jmp_insn;
3639
3640 switch (opf) {
3641 case 0x000: /* VIS I edge8cc */
3642 case 0x001: /* VIS II edge8n */
3643 case 0x002: /* VIS I edge8lcc */
3644 case 0x003: /* VIS II edge8ln */
3645 case 0x004: /* VIS I edge16cc */
3646 case 0x005: /* VIS II edge16n */
3647 case 0x006: /* VIS I edge16lcc */
3648 case 0x007: /* VIS II edge16ln */
3649 case 0x008: /* VIS I edge32cc */
3650 case 0x009: /* VIS II edge32n */
3651 case 0x00a: /* VIS I edge32lcc */
3652 case 0x00b: /* VIS II edge32ln */
3653 // XXX
3654 goto illegal_insn;
3655 case 0x010: /* VIS I array8 */
3656 CHECK_FPU_FEATURE(dc, VIS1);
3657 cpu_src1 = get_src1(insn, cpu_src1);
3658 gen_movl_reg_TN(rs2, cpu_src2);
3659 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3660 cpu_src2);
3661 gen_movl_TN_reg(rd, cpu_dst);
3662 break;
3663 case 0x012: /* VIS I array16 */
3664 CHECK_FPU_FEATURE(dc, VIS1);
3665 cpu_src1 = get_src1(insn, cpu_src1);
3666 gen_movl_reg_TN(rs2, cpu_src2);
3667 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3668 cpu_src2);
3669 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3670 gen_movl_TN_reg(rd, cpu_dst);
3671 break;
3672 case 0x014: /* VIS I array32 */
3673 CHECK_FPU_FEATURE(dc, VIS1);
3674 cpu_src1 = get_src1(insn, cpu_src1);
3675 gen_movl_reg_TN(rs2, cpu_src2);
3676 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3677 cpu_src2);
3678 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3679 gen_movl_TN_reg(rd, cpu_dst);
3680 break;
3681 case 0x018: /* VIS I alignaddr */
3682 CHECK_FPU_FEATURE(dc, VIS1);
3683 cpu_src1 = get_src1(insn, cpu_src1);
3684 gen_movl_reg_TN(rs2, cpu_src2);
3685 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3686 cpu_src2);
3687 gen_movl_TN_reg(rd, cpu_dst);
3688 break;
3689 case 0x019: /* VIS II bmask */
3690 case 0x01a: /* VIS I alignaddrl */
3691 // XXX
3692 goto illegal_insn;
3693 case 0x020: /* VIS I fcmple16 */
3694 CHECK_FPU_FEATURE(dc, VIS1);
3695 gen_op_load_fpr_DT0(DFPREG(rs1));
3696 gen_op_load_fpr_DT1(DFPREG(rs2));
3697 tcg_gen_helper_0_0(helper_fcmple16);
3698 gen_op_store_DT0_fpr(DFPREG(rd));
3699 break;
3700 case 0x022: /* VIS I fcmpne16 */
3701 CHECK_FPU_FEATURE(dc, VIS1);
3702 gen_op_load_fpr_DT0(DFPREG(rs1));
3703 gen_op_load_fpr_DT1(DFPREG(rs2));
3704 tcg_gen_helper_0_0(helper_fcmpne16);
3705 gen_op_store_DT0_fpr(DFPREG(rd));
3706 break;
3707 case 0x024: /* VIS I fcmple32 */
3708 CHECK_FPU_FEATURE(dc, VIS1);
3709 gen_op_load_fpr_DT0(DFPREG(rs1));
3710 gen_op_load_fpr_DT1(DFPREG(rs2));
3711 tcg_gen_helper_0_0(helper_fcmple32);
3712 gen_op_store_DT0_fpr(DFPREG(rd));
3713 break;
3714 case 0x026: /* VIS I fcmpne32 */
3715 CHECK_FPU_FEATURE(dc, VIS1);
3716 gen_op_load_fpr_DT0(DFPREG(rs1));
3717 gen_op_load_fpr_DT1(DFPREG(rs2));
3718 tcg_gen_helper_0_0(helper_fcmpne32);
3719 gen_op_store_DT0_fpr(DFPREG(rd));
3720 break;
3721 case 0x028: /* VIS I fcmpgt16 */
3722 CHECK_FPU_FEATURE(dc, VIS1);
3723 gen_op_load_fpr_DT0(DFPREG(rs1));
3724 gen_op_load_fpr_DT1(DFPREG(rs2));
3725 tcg_gen_helper_0_0(helper_fcmpgt16);
3726 gen_op_store_DT0_fpr(DFPREG(rd));
3727 break;
3728 case 0x02a: /* VIS I fcmpeq16 */
3729 CHECK_FPU_FEATURE(dc, VIS1);
3730 gen_op_load_fpr_DT0(DFPREG(rs1));
3731 gen_op_load_fpr_DT1(DFPREG(rs2));
3732 tcg_gen_helper_0_0(helper_fcmpeq16);
3733 gen_op_store_DT0_fpr(DFPREG(rd));
3734 break;
3735 case 0x02c: /* VIS I fcmpgt32 */
3736 CHECK_FPU_FEATURE(dc, VIS1);
3737 gen_op_load_fpr_DT0(DFPREG(rs1));
3738 gen_op_load_fpr_DT1(DFPREG(rs2));
3739 tcg_gen_helper_0_0(helper_fcmpgt32);
3740 gen_op_store_DT0_fpr(DFPREG(rd));
3741 break;
3742 case 0x02e: /* VIS I fcmpeq32 */
3743 CHECK_FPU_FEATURE(dc, VIS1);
3744 gen_op_load_fpr_DT0(DFPREG(rs1));
3745 gen_op_load_fpr_DT1(DFPREG(rs2));
3746 tcg_gen_helper_0_0(helper_fcmpeq32);
3747 gen_op_store_DT0_fpr(DFPREG(rd));
3748 break;
3749 case 0x031: /* VIS I fmul8x16 */
3750 CHECK_FPU_FEATURE(dc, VIS1);
3751 gen_op_load_fpr_DT0(DFPREG(rs1));
3752 gen_op_load_fpr_DT1(DFPREG(rs2));
3753 tcg_gen_helper_0_0(helper_fmul8x16);
3754 gen_op_store_DT0_fpr(DFPREG(rd));
3755 break;
3756 case 0x033: /* VIS I fmul8x16au */
3757 CHECK_FPU_FEATURE(dc, VIS1);
3758 gen_op_load_fpr_DT0(DFPREG(rs1));
3759 gen_op_load_fpr_DT1(DFPREG(rs2));
3760 tcg_gen_helper_0_0(helper_fmul8x16au);
3761 gen_op_store_DT0_fpr(DFPREG(rd));
3762 break;
3763 case 0x035: /* VIS I fmul8x16al */
3764 CHECK_FPU_FEATURE(dc, VIS1);
3765 gen_op_load_fpr_DT0(DFPREG(rs1));
3766 gen_op_load_fpr_DT1(DFPREG(rs2));
3767 tcg_gen_helper_0_0(helper_fmul8x16al);
3768 gen_op_store_DT0_fpr(DFPREG(rd));
3769 break;
3770 case 0x036: /* VIS I fmul8sux16 */
3771 CHECK_FPU_FEATURE(dc, VIS1);
3772 gen_op_load_fpr_DT0(DFPREG(rs1));
3773 gen_op_load_fpr_DT1(DFPREG(rs2));
3774 tcg_gen_helper_0_0(helper_fmul8sux16);
3775 gen_op_store_DT0_fpr(DFPREG(rd));
3776 break;
3777 case 0x037: /* VIS I fmul8ulx16 */
3778 CHECK_FPU_FEATURE(dc, VIS1);
3779 gen_op_load_fpr_DT0(DFPREG(rs1));
3780 gen_op_load_fpr_DT1(DFPREG(rs2));
3781 tcg_gen_helper_0_0(helper_fmul8ulx16);
3782 gen_op_store_DT0_fpr(DFPREG(rd));
3783 break;
3784 case 0x038: /* VIS I fmuld8sux16 */
3785 CHECK_FPU_FEATURE(dc, VIS1);
3786 gen_op_load_fpr_DT0(DFPREG(rs1));
3787 gen_op_load_fpr_DT1(DFPREG(rs2));
3788 tcg_gen_helper_0_0(helper_fmuld8sux16);
3789 gen_op_store_DT0_fpr(DFPREG(rd));
3790 break;
3791 case 0x039: /* VIS I fmuld8ulx16 */
3792 CHECK_FPU_FEATURE(dc, VIS1);
3793 gen_op_load_fpr_DT0(DFPREG(rs1));
3794 gen_op_load_fpr_DT1(DFPREG(rs2));
3795 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3796 gen_op_store_DT0_fpr(DFPREG(rd));
3797 break;
3798 case 0x03a: /* VIS I fpack32 */
3799 case 0x03b: /* VIS I fpack16 */
3800 case 0x03d: /* VIS I fpackfix */
3801 case 0x03e: /* VIS I pdist */
3802 // XXX
3803 goto illegal_insn;
3804 case 0x048: /* VIS I faligndata */
3805 CHECK_FPU_FEATURE(dc, VIS1);
3806 gen_op_load_fpr_DT0(DFPREG(rs1));
3807 gen_op_load_fpr_DT1(DFPREG(rs2));
3808 tcg_gen_helper_0_0(helper_faligndata);
3809 gen_op_store_DT0_fpr(DFPREG(rd));
3810 break;
3811 case 0x04b: /* VIS I fpmerge */
3812 CHECK_FPU_FEATURE(dc, VIS1);
3813 gen_op_load_fpr_DT0(DFPREG(rs1));
3814 gen_op_load_fpr_DT1(DFPREG(rs2));
3815 tcg_gen_helper_0_0(helper_fpmerge);
3816 gen_op_store_DT0_fpr(DFPREG(rd));
3817 break;
3818 case 0x04c: /* VIS II bshuffle */
3819 // XXX
3820 goto illegal_insn;
3821 case 0x04d: /* VIS I fexpand */
3822 CHECK_FPU_FEATURE(dc, VIS1);
3823 gen_op_load_fpr_DT0(DFPREG(rs1));
3824 gen_op_load_fpr_DT1(DFPREG(rs2));
3825 tcg_gen_helper_0_0(helper_fexpand);
3826 gen_op_store_DT0_fpr(DFPREG(rd));
3827 break;
3828 case 0x050: /* VIS I fpadd16 */
3829 CHECK_FPU_FEATURE(dc, VIS1);
3830 gen_op_load_fpr_DT0(DFPREG(rs1));
3831 gen_op_load_fpr_DT1(DFPREG(rs2));
3832 tcg_gen_helper_0_0(helper_fpadd16);
3833 gen_op_store_DT0_fpr(DFPREG(rd));
3834 break;
3835 case 0x051: /* VIS I fpadd16s */
3836 CHECK_FPU_FEATURE(dc, VIS1);
3837 tcg_gen_helper_1_2(helper_fpadd16s, cpu_fpr[rd],
3838 cpu_fpr[rs1], cpu_fpr[rs2]);
3839 break;
3840 case 0x052: /* VIS I fpadd32 */
3841 CHECK_FPU_FEATURE(dc, VIS1);
3842 gen_op_load_fpr_DT0(DFPREG(rs1));
3843 gen_op_load_fpr_DT1(DFPREG(rs2));
3844 tcg_gen_helper_0_0(helper_fpadd32);
3845 gen_op_store_DT0_fpr(DFPREG(rd));
3846 break;
3847 case 0x053: /* VIS I fpadd32s */
3848 CHECK_FPU_FEATURE(dc, VIS1);
3849 tcg_gen_helper_1_2(helper_fpadd32s, cpu_fpr[rd],
3850 cpu_fpr[rs1], cpu_fpr[rs2]);
3851 break;
3852 case 0x054: /* VIS I fpsub16 */
3853 CHECK_FPU_FEATURE(dc, VIS1);
3854 gen_op_load_fpr_DT0(DFPREG(rs1));
3855 gen_op_load_fpr_DT1(DFPREG(rs2));
3856 tcg_gen_helper_0_0(helper_fpsub16);
3857 gen_op_store_DT0_fpr(DFPREG(rd));
3858 break;
3859 case 0x055: /* VIS I fpsub16s */
3860 CHECK_FPU_FEATURE(dc, VIS1);
3861 tcg_gen_helper_1_2(helper_fpsub16s, cpu_fpr[rd],
3862 cpu_fpr[rs1], cpu_fpr[rs2]);
3863 break;
3864 case 0x056: /* VIS I fpsub32 */
3865 CHECK_FPU_FEATURE(dc, VIS1);
3866 gen_op_load_fpr_DT0(DFPREG(rs1));
3867 gen_op_load_fpr_DT1(DFPREG(rs2));
3868 tcg_gen_helper_0_0(helper_fpsub32);
3869 gen_op_store_DT0_fpr(DFPREG(rd));
3870 break;
3871 case 0x057: /* VIS I fpsub32s */
3872 CHECK_FPU_FEATURE(dc, VIS1);
3873 tcg_gen_helper_1_2(helper_fpsub32s, cpu_fpr[rd],
3874 cpu_fpr[rs1], cpu_fpr[rs2]);
3875 break;
3876 case 0x060: /* VIS I fzero */
3877 CHECK_FPU_FEATURE(dc, VIS1);
3878 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3879 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3880 break;
3881 case 0x061: /* VIS I fzeros */
3882 CHECK_FPU_FEATURE(dc, VIS1);
3883 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3884 break;
3885 case 0x062: /* VIS I fnor */
3886 CHECK_FPU_FEATURE(dc, VIS1);
3887 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3888 cpu_fpr[DFPREG(rs2)]);
3889 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3890 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3891 cpu_fpr[DFPREG(rs2) + 1]);
3892 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3893 break;
3894 case 0x063: /* VIS I fnors */
3895 CHECK_FPU_FEATURE(dc, VIS1);
3896 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3897 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3898 break;
3899 case 0x064: /* VIS I fandnot2 */
3900 CHECK_FPU_FEATURE(dc, VIS1);
3901 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
3902 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3903 cpu_fpr[DFPREG(rs2)]);
3904 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
3905 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3906 cpu_fpr[DFPREG(rs2) + 1]);
3907 break;
3908 case 0x065: /* VIS I fandnot2s */
3909 CHECK_FPU_FEATURE(dc, VIS1);
3910 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
3911 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
3912 break;
3913 case 0x066: /* VIS I fnot2 */
3914 CHECK_FPU_FEATURE(dc, VIS1);
3915 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3916 -1);
3917 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3918 cpu_fpr[DFPREG(rs2) + 1], -1);
3919 break;
3920 case 0x067: /* VIS I fnot2s */
3921 CHECK_FPU_FEATURE(dc, VIS1);
3922 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs2], -1);
3923 break;
3924 case 0x068: /* VIS I fandnot1 */
3925 CHECK_FPU_FEATURE(dc, VIS1);
3926 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3927 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3928 cpu_fpr[DFPREG(rs1)]);
3929 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3930 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3931 cpu_fpr[DFPREG(rs1) + 1]);
3932 break;
3933 case 0x069: /* VIS I fandnot1s */
3934 CHECK_FPU_FEATURE(dc, VIS1);
3935 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3936 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3937 break;
3938 case 0x06a: /* VIS I fnot1 */
3939 CHECK_FPU_FEATURE(dc, VIS1);
3940 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3941 -1);
3942 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3943 cpu_fpr[DFPREG(rs1) + 1], -1);
3944 break;
3945 case 0x06b: /* VIS I fnot1s */
3946 CHECK_FPU_FEATURE(dc, VIS1);
3947 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs1], -1);
3948 break;
3949 case 0x06c: /* VIS I fxor */
3950 CHECK_FPU_FEATURE(dc, VIS1);
3951 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3952 cpu_fpr[DFPREG(rs2)]);
3953 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3954 cpu_fpr[DFPREG(rs1) + 1],
3955 cpu_fpr[DFPREG(rs2) + 1]);
3956 break;
3957 case 0x06d: /* VIS I fxors */
3958 CHECK_FPU_FEATURE(dc, VIS1);
3959 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3960 break;
3961 case 0x06e: /* VIS I fnand */
3962 CHECK_FPU_FEATURE(dc, VIS1);
3963 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3964 cpu_fpr[DFPREG(rs2)]);
3965 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3966 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3967 cpu_fpr[DFPREG(rs2) + 1]);
3968 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3969 break;
3970 case 0x06f: /* VIS I fnands */
3971 CHECK_FPU_FEATURE(dc, VIS1);
3972 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3973 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3974 break;
3975 case 0x070: /* VIS I fand */
3976 CHECK_FPU_FEATURE(dc, VIS1);
3977 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3978 cpu_fpr[DFPREG(rs2)]);
3979 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3980 cpu_fpr[DFPREG(rs1) + 1],
3981 cpu_fpr[DFPREG(rs2) + 1]);
3982 break;
3983 case 0x071: /* VIS I fands */
3984 CHECK_FPU_FEATURE(dc, VIS1);
3985 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3986 break;
3987 case 0x072: /* VIS I fxnor */
3988 CHECK_FPU_FEATURE(dc, VIS1);
3989 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3990 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3991 cpu_fpr[DFPREG(rs1)]);
3992 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3993 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3994 cpu_fpr[DFPREG(rs1) + 1]);
3995 break;
3996 case 0x073: /* VIS I fxnors */
3997 CHECK_FPU_FEATURE(dc, VIS1);
3998 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3999 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4000 break;
4001 case 0x074: /* VIS I fsrc1 */
4002 CHECK_FPU_FEATURE(dc, VIS1);
4003 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
4004 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
4005 cpu_fpr[DFPREG(rs1) + 1]);
4006 break;
4007 case 0x075: /* VIS I fsrc1s */
4008 CHECK_FPU_FEATURE(dc, VIS1);
4009 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
4010 break;
4011 case 0x076: /* VIS I fornot2 */
4012 CHECK_FPU_FEATURE(dc, VIS1);
4013 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
4014 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4015 cpu_fpr[DFPREG(rs2)]);
4016 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
4017 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4018 cpu_fpr[DFPREG(rs2) + 1]);
4019 break;
4020 case 0x077: /* VIS I fornot2s */
4021 CHECK_FPU_FEATURE(dc, VIS1);
4022 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
4023 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
4024 break;
4025 case 0x078: /* VIS I fsrc2 */
4026 CHECK_FPU_FEATURE(dc, VIS1);
4027 gen_op_load_fpr_DT0(DFPREG(rs2));
4028 gen_op_store_DT0_fpr(DFPREG(rd));
4029 break;
4030 case 0x079: /* VIS I fsrc2s */
4031 CHECK_FPU_FEATURE(dc, VIS1);
4032 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4033 break;
4034 case 0x07a: /* VIS I fornot1 */
4035 CHECK_FPU_FEATURE(dc, VIS1);
4036 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4037 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4038 cpu_fpr[DFPREG(rs1)]);
4039 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4040 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4041 cpu_fpr[DFPREG(rs1) + 1]);
4042 break;
4043 case 0x07b: /* VIS I fornot1s */
4044 CHECK_FPU_FEATURE(dc, VIS1);
4045 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4046 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4047 break;
4048 case 0x07c: /* VIS I for */
4049 CHECK_FPU_FEATURE(dc, VIS1);
4050 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4051 cpu_fpr[DFPREG(rs2)]);
4052 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4053 cpu_fpr[DFPREG(rs1) + 1],
4054 cpu_fpr[DFPREG(rs2) + 1]);
4055 break;
4056 case 0x07d: /* VIS I fors */
4057 CHECK_FPU_FEATURE(dc, VIS1);
4058 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4059 break;
4060 case 0x07e: /* VIS I fone */
4061 CHECK_FPU_FEATURE(dc, VIS1);
4062 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4063 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4064 break;
4065 case 0x07f: /* VIS I fones */
4066 CHECK_FPU_FEATURE(dc, VIS1);
4067 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4068 break;
4069 case 0x080: /* VIS I shutdown */
4070 case 0x081: /* VIS II siam */
4071 // XXX
4072 goto illegal_insn;
4073 default:
4074 goto illegal_insn;
4075 }
4076 #else
4077 goto ncp_insn;
4078 #endif
4079 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4080 #ifdef TARGET_SPARC64
4081 goto illegal_insn;
4082 #else
4083 goto ncp_insn;
4084 #endif
4085 #ifdef TARGET_SPARC64
4086 } else if (xop == 0x39) { /* V9 return */
4087 TCGv r_const;
4088
4089 save_state(dc, cpu_cond);
4090 cpu_src1 = get_src1(insn, cpu_src1);
4091 if (IS_IMM) { /* immediate */
4092 rs2 = GET_FIELDs(insn, 19, 31);
4093 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4094 } else { /* register */
4095 rs2 = GET_FIELD(insn, 27, 31);
4096 if (rs2) {
4097 gen_movl_reg_TN(rs2, cpu_src2);
4098 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4099 } else
4100 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4101 }
4102 tcg_gen_helper_0_0(helper_restore);
4103 gen_mov_pc_npc(dc, cpu_cond);
4104 r_const = tcg_const_i32(3);
4105 tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
4106 tcg_temp_free(r_const);
4107 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4108 dc->npc = DYNAMIC_PC;
4109 goto jmp_insn;
4110 #endif
4111 } else {
4112 cpu_src1 = get_src1(insn, cpu_src1);
4113 if (IS_IMM) { /* immediate */
4114 rs2 = GET_FIELDs(insn, 19, 31);
4115 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4116 } else { /* register */
4117 rs2 = GET_FIELD(insn, 27, 31);
4118 if (rs2) {
4119 gen_movl_reg_TN(rs2, cpu_src2);
4120 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4121 } else
4122 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4123 }
4124 switch (xop) {
4125 case 0x38: /* jmpl */
4126 {
4127 TCGv r_const;
4128
4129 r_const = tcg_const_tl(dc->pc);
4130 gen_movl_TN_reg(rd, r_const);
4131 tcg_temp_free(r_const);
4132 gen_mov_pc_npc(dc, cpu_cond);
4133 r_const = tcg_const_i32(3);
4134 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4135 r_const);
4136 tcg_temp_free(r_const);
4137 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4138 dc->npc = DYNAMIC_PC;
4139 }
4140 goto jmp_insn;
4141 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4142 case 0x39: /* rett, V9 return */
4143 {
4144 TCGv r_const;
4145
4146 if (!supervisor(dc))
4147 goto priv_insn;
4148 gen_mov_pc_npc(dc, cpu_cond);
4149 r_const = tcg_const_i32(3);
4150 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4151 r_const);
4152 tcg_temp_free(r_const);
4153 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4154 dc->npc = DYNAMIC_PC;
4155 tcg_gen_helper_0_0(helper_rett);
4156 }
4157 goto jmp_insn;
4158 #endif
4159 case 0x3b: /* flush */
4160 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4161 goto unimp_flush;
4162 tcg_gen_helper_0_1(helper_flush, cpu_dst);
4163 break;
4164 case 0x3c: /* save */
4165 save_state(dc, cpu_cond);
4166 tcg_gen_helper_0_0(helper_save);
4167 gen_movl_TN_reg(rd, cpu_dst);
4168 break;
4169 case 0x3d: /* restore */
4170 save_state(dc, cpu_cond);
4171 tcg_gen_helper_0_0(helper_restore);
4172 gen_movl_TN_reg(rd, cpu_dst);
4173 break;
4174 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4175 case 0x3e: /* V9 done/retry */
4176 {
4177 switch (rd) {
4178 case 0:
4179 if (!supervisor(dc))
4180 goto priv_insn;
4181 dc->npc = DYNAMIC_PC;
4182 dc->pc = DYNAMIC_PC;
4183 tcg_gen_helper_0_0(helper_done);
4184 goto jmp_insn;
4185 case 1:
4186 if (!supervisor(dc))
4187 goto priv_insn;
4188 dc->npc = DYNAMIC_PC;
4189 dc->pc = DYNAMIC_PC;
4190 tcg_gen_helper_0_0(helper_retry);
4191 goto jmp_insn;
4192 default:
4193 goto illegal_insn;
4194 }
4195 }
4196 break;
4197 #endif
4198 default:
4199 goto illegal_insn;
4200 }
4201 }
4202 break;
4203 }
4204 break;
4205 case 3: /* load/store instructions */
4206 {
4207 unsigned int xop = GET_FIELD(insn, 7, 12);
4208
4209 cpu_src1 = get_src1(insn, cpu_src1);
4210 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4211 rs2 = GET_FIELD(insn, 27, 31);
4212 gen_movl_reg_TN(rs2, cpu_src2);
4213 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4214 } else if (IS_IMM) { /* immediate */
4215 rs2 = GET_FIELDs(insn, 19, 31);
4216 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4217 } else { /* register */
4218 rs2 = GET_FIELD(insn, 27, 31);
4219 if (rs2 != 0) {
4220 gen_movl_reg_TN(rs2, cpu_src2);
4221 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4222 } else
4223 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4224 }
4225 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4226 (xop > 0x17 && xop <= 0x1d ) ||
4227 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4228 switch (xop) {
4229 case 0x0: /* load unsigned word */
4230 gen_address_mask(dc, cpu_addr);
4231 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4232 break;
4233 case 0x1: /* load unsigned byte */
4234 gen_address_mask(dc, cpu_addr);
4235 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4236 break;
4237 case 0x2: /* load unsigned halfword */
4238 gen_address_mask(dc, cpu_addr);
4239 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4240 break;
4241 case 0x3: /* load double word */
4242 if (rd & 1)
4243 goto illegal_insn;
4244 else {
4245 TCGv r_const;
4246
4247 save_state(dc, cpu_cond);
4248 r_const = tcg_const_i32(7);
4249 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4250 r_const); // XXX remove
4251 tcg_temp_free(r_const);
4252 gen_address_mask(dc, cpu_addr);
4253 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4254 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4255 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4256 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4257 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4258 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4259 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4260 }
4261 break;
4262 case 0x9: /* load signed byte */
4263 gen_address_mask(dc, cpu_addr);
4264 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4265 break;
4266 case 0xa: /* load signed halfword */
4267 gen_address_mask(dc, cpu_addr);
4268 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4269 break;
4270 case 0xd: /* ldstub -- XXX: should be atomically */
4271 {
4272 TCGv r_const;
4273
4274 gen_address_mask(dc, cpu_addr);
4275 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4276 r_const = tcg_const_tl(0xff);
4277 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4278 tcg_temp_free(r_const);
4279 }
4280 break;
4281 case 0x0f: /* swap register with memory. Also
4282 atomically */
4283 CHECK_IU_FEATURE(dc, SWAP);
4284 gen_movl_reg_TN(rd, cpu_val);
4285 gen_address_mask(dc, cpu_addr);
4286 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4287 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4288 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4289 break;
4290 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4291 case 0x10: /* load word alternate */
4292 #ifndef TARGET_SPARC64
4293 if (IS_IMM)
4294 goto illegal_insn;
4295 if (!supervisor(dc))
4296 goto priv_insn;
4297 #endif
4298 save_state(dc, cpu_cond);
4299 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4300 break;
4301 case 0x11: /* load unsigned byte alternate */
4302 #ifndef TARGET_SPARC64
4303 if (IS_IMM)
4304 goto illegal_insn;
4305 if (!supervisor(dc))
4306 goto priv_insn;
4307 #endif
4308 save_state(dc, cpu_cond);
4309 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4310 break;
4311 case 0x12: /* load unsigned halfword alternate */
4312 #ifndef TARGET_SPARC64
4313 if (IS_IMM)
4314 goto illegal_insn;
4315 if (!supervisor(dc))
4316 goto priv_insn;
4317 #endif
4318 save_state(dc, cpu_cond);
4319 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4320 break;
4321 case 0x13: /* load double word alternate */
4322 #ifndef TARGET_SPARC64
4323 if (IS_IMM)
4324 goto illegal_insn;
4325 if (!supervisor(dc))
4326 goto priv_insn;
4327 #endif
4328 if (rd & 1)
4329 goto illegal_insn;
4330 save_state(dc, cpu_cond);
4331 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4332 goto skip_move;
4333 case 0x19: /* load signed byte alternate */
4334 #ifndef TARGET_SPARC64
4335 if (IS_IMM)
4336 goto illegal_insn;
4337 if (!supervisor(dc))
4338 goto priv_insn;
4339 #endif
4340 save_state(dc, cpu_cond);
4341 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4342 break;
4343 case 0x1a: /* load signed halfword alternate */
4344 #ifndef TARGET_SPARC64
4345 if (IS_IMM)
4346 goto illegal_insn;
4347 if (!supervisor(dc))
4348 goto priv_insn;
4349 #endif
4350 save_state(dc, cpu_cond);
4351 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4352 break;
4353 case 0x1d: /* ldstuba -- XXX: should be atomically */
4354 #ifndef TARGET_SPARC64
4355 if (IS_IMM)
4356 goto illegal_insn;
4357 if (!supervisor(dc))
4358 goto priv_insn;
4359 #endif
4360 save_state(dc, cpu_cond);
4361 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4362 break;
4363 case 0x1f: /* swap reg with alt. memory. Also
4364 atomically */
4365 CHECK_IU_FEATURE(dc, SWAP);
4366 #ifndef TARGET_SPARC64
4367 if (IS_IMM)
4368 goto illegal_insn;
4369 if (!supervisor(dc))
4370 goto priv_insn;
4371 #endif
4372 save_state(dc, cpu_cond);
4373 gen_movl_reg_TN(rd, cpu_val);
4374 gen_swap_asi(cpu_val, cpu_addr, insn);
4375 break;
4376
4377 #ifndef TARGET_SPARC64
4378 case 0x30: /* ldc */
4379 case 0x31: /* ldcsr */
4380 case 0x33: /* lddc */
4381 goto ncp_insn;
4382 #endif
4383 #endif
4384 #ifdef TARGET_SPARC64
4385 case 0x08: /* V9 ldsw */
4386 gen_address_mask(dc, cpu_addr);
4387 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4388 break;
4389 case 0x0b: /* V9 ldx */
4390 gen_address_mask(dc, cpu_addr);
4391 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4392 break;
4393 case 0x18: /* V9 ldswa */
4394 save_state(dc, cpu_cond);
4395 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4396 break;
4397 case 0x1b: /* V9 ldxa */
4398 save_state(dc, cpu_cond);
4399 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4400 break;
4401 case 0x2d: /* V9 prefetch, no effect */
4402 goto skip_move;
4403 case 0x30: /* V9 ldfa */
4404 save_state(dc, cpu_cond);
4405 gen_ldf_asi(cpu_addr, insn, 4, rd);
4406 goto skip_move;
4407 case 0x33: /* V9 lddfa */
4408 save_state(dc, cpu_cond);
4409 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4410 goto skip_move;
4411 case 0x3d: /* V9 prefetcha, no effect */
4412 goto skip_move;
4413 case 0x32: /* V9 ldqfa */
4414 CHECK_FPU_FEATURE(dc, FLOAT128);
4415 save_state(dc, cpu_cond);
4416 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4417 goto skip_move;
4418 #endif
4419 default:
4420 goto illegal_insn;
4421 }
4422 gen_movl_TN_reg(rd, cpu_val);
4423 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4424 skip_move: ;
4425 #endif
4426 } else if (xop >= 0x20 && xop < 0x24) {
4427 if (gen_trap_ifnofpu(dc, cpu_cond))
4428 goto jmp_insn;
4429 save_state(dc, cpu_cond);
4430 switch (xop) {
4431 case 0x20: /* load fpreg */
4432 gen_address_mask(dc, cpu_addr);
4433 tcg_gen_qemu_ld32u(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4434 break;
4435 case 0x21: /* ldfsr, V9 ldxfsr */
4436 #ifdef TARGET_SPARC64
4437 gen_address_mask(dc, cpu_addr);
4438 if (rd == 1) {
4439 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4440 tcg_gen_helper_0_1(helper_ldxfsr, cpu_tmp64);
4441 } else
4442 #else
4443 {
4444 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4445 tcg_gen_helper_0_1(helper_ldfsr, cpu_tmp32);
4446 }
4447 #endif
4448 break;
4449 case 0x22: /* load quad fpreg */
4450 {
4451 TCGv r_const;
4452
4453 CHECK_FPU_FEATURE(dc, FLOAT128);
4454 r_const = tcg_const_i32(dc->mem_idx);
4455 tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
4456 tcg_temp_free(r_const);
4457 gen_op_store_QT0_fpr(QFPREG(rd));
4458 }
4459 break;
4460 case 0x23: /* load double fpreg */
4461 {
4462 TCGv r_const;
4463
4464 r_const = tcg_const_i32(dc->mem_idx);
4465 tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
4466 tcg_temp_free(r_const);
4467 gen_op_store_DT0_fpr(DFPREG(rd));
4468 }
4469 break;
4470 default:
4471 goto illegal_insn;
4472 }
4473 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4474 xop == 0xe || xop == 0x1e) {
4475 gen_movl_reg_TN(rd, cpu_val);
4476 switch (xop) {
4477 case 0x4: /* store word */
4478 gen_address_mask(dc, cpu_addr);
4479 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4480 break;
4481 case 0x5: /* store byte */
4482 gen_address_mask(dc, cpu_addr);
4483 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4484 break;
4485 case 0x6: /* store halfword */
4486 gen_address_mask(dc, cpu_addr);
4487 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4488 break;
4489 case 0x7: /* store double word */
4490 if (rd & 1)
4491 goto illegal_insn;
4492 else {
4493 TCGv r_low, r_const;
4494
4495 save_state(dc, cpu_cond);
4496 gen_address_mask(dc, cpu_addr);
4497 r_const = tcg_const_i32(7);
4498 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4499 r_const); // XXX remove
4500 tcg_temp_free(r_const);
4501 r_low = tcg_temp_new(TCG_TYPE_TL);
4502 gen_movl_reg_TN(rd + 1, r_low);
4503 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_val,
4504 r_low);
4505 tcg_temp_free(r_low);
4506 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4507 }
4508 break;
4509 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4510 case 0x14: /* store word alternate */
4511 #ifndef TARGET_SPARC64
4512 if (IS_IMM)
4513 goto illegal_insn;
4514 if (!supervisor(dc))
4515 goto priv_insn;
4516 #endif
4517 save_state(dc, cpu_cond);
4518 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4519 break;
4520 case 0x15: /* store byte alternate */
4521 #ifndef TARGET_SPARC64
4522 if (IS_IMM)
4523 goto illegal_insn;
4524 if (!supervisor(dc))
4525 goto priv_insn;
4526 #endif
4527 save_state(dc, cpu_cond);
4528 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4529 break;
4530 case 0x16: /* store halfword alternate */
4531 #ifndef TARGET_SPARC64
4532 if (IS_IMM)
4533 goto illegal_insn;
4534 if (!supervisor(dc))
4535 goto priv_insn;
4536 #endif
4537 save_state(dc, cpu_cond);
4538 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4539 break;
4540 case 0x17: /* store double word alternate */
4541 #ifndef TARGET_SPARC64
4542 if (IS_IMM)
4543 goto illegal_insn;
4544 if (!supervisor(dc))
4545 goto priv_insn;
4546 #endif
4547 if (rd & 1)
4548 goto illegal_insn;
4549 else {
4550 save_state(dc, cpu_cond);
4551 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4552 }
4553 break;
4554 #endif
4555 #ifdef TARGET_SPARC64
4556 case 0x0e: /* V9 stx */
4557 gen_address_mask(dc, cpu_addr);
4558 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4559 break;
4560 case 0x1e: /* V9 stxa */
4561 save_state(dc, cpu_cond);
4562 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4563 break;
4564 #endif
4565 default:
4566 goto illegal_insn;
4567 }
4568 } else if (xop > 0x23 && xop < 0x28) {
4569 if (gen_trap_ifnofpu(dc, cpu_cond))
4570 goto jmp_insn;
4571 save_state(dc, cpu_cond);
4572 switch (xop) {
4573 case 0x24: /* store fpreg */
4574 gen_address_mask(dc, cpu_addr);
4575 tcg_gen_qemu_st32(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4576 break;
4577 case 0x25: /* stfsr, V9 stxfsr */
4578 #ifdef TARGET_SPARC64
4579 gen_address_mask(dc, cpu_addr);
4580 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4581 if (rd == 1)
4582 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4583 else {
4584 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp64);
4585 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4586 }
4587 #else
4588 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4589 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4590 #endif
4591 break;
4592 case 0x26:
4593 #ifdef TARGET_SPARC64
4594 /* V9 stqf, store quad fpreg */
4595 {
4596 TCGv r_const;
4597
4598 CHECK_FPU_FEATURE(dc, FLOAT128);
4599 gen_op_load_fpr_QT0(QFPREG(rd));
4600 r_const = tcg_const_i32(dc->mem_idx);
4601 tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
4602 tcg_temp_free(r_const);
4603 }
4604 break;
4605 #else /* !TARGET_SPARC64 */
4606 /* stdfq, store floating point queue */
4607 #if defined(CONFIG_USER_ONLY)
4608 goto illegal_insn;
4609 #else
4610 if (!supervisor(dc))
4611 goto priv_insn;
4612 if (gen_trap_ifnofpu(dc, cpu_cond))
4613 goto jmp_insn;
4614 goto nfq_insn;
4615 #endif
4616 #endif
4617 case 0x27: /* store double fpreg */
4618 {
4619 TCGv r_const;
4620
4621 gen_op_load_fpr_DT0(DFPREG(rd));
4622 r_const = tcg_const_i32(dc->mem_idx);
4623 tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
4624 tcg_temp_free(r_const);
4625 }
4626 break;
4627 default:
4628 goto illegal_insn;
4629 }
4630 } else if (xop > 0x33 && xop < 0x3f) {
4631 save_state(dc, cpu_cond);
4632 switch (xop) {
4633 #ifdef TARGET_SPARC64
4634 case 0x34: /* V9 stfa */
4635 gen_stf_asi(cpu_addr, insn, 4, rd);
4636 break;
4637 case 0x36: /* V9 stqfa */
4638 {
4639 TCGv r_const;
4640
4641 CHECK_FPU_FEATURE(dc, FLOAT128);
4642 r_const = tcg_const_i32(7);
4643 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4644 r_const);
4645 tcg_temp_free(r_const);
4646 gen_op_load_fpr_QT0(QFPREG(rd));
4647 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4648 }
4649 break;
4650 case 0x37: /* V9 stdfa */
4651 gen_op_load_fpr_DT0(DFPREG(rd));
4652 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4653 break;
4654 case 0x3c: /* V9 casa */
4655 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4656 gen_movl_TN_reg(rd, cpu_val);
4657 break;
4658 case 0x3e: /* V9 casxa */
4659 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4660 gen_movl_TN_reg(rd, cpu_val);
4661 break;
4662 #else
4663 case 0x34: /* stc */
4664 case 0x35: /* stcsr */
4665 case 0x36: /* stdcq */
4666 case 0x37: /* stdc */
4667 goto ncp_insn;
4668 #endif
4669 default:
4670 goto illegal_insn;
4671 }
4672 }
4673 else
4674 goto illegal_insn;
4675 }
4676 break;
4677 }
4678 /* default case for non jump instructions */
4679 if (dc->npc == DYNAMIC_PC) {
4680 dc->pc = DYNAMIC_PC;
4681 gen_op_next_insn();
4682 } else if (dc->npc == JUMP_PC) {
4683 /* we can do a static jump */
4684 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4685 dc->is_br = 1;
4686 } else {
4687 dc->pc = dc->npc;
4688 dc->npc = dc->npc + 4;
4689 }
4690 jmp_insn:
4691 return;
4692 illegal_insn:
4693 {
4694 TCGv r_const;
4695
4696 save_state(dc, cpu_cond);
4697 r_const = tcg_const_i32(TT_ILL_INSN);
4698 tcg_gen_helper_0_1(raise_exception, r_const);
4699 tcg_temp_free(r_const);
4700 dc->is_br = 1;
4701 }
4702 return;
4703 unimp_flush:
4704 {
4705 TCGv r_const;
4706
4707 save_state(dc, cpu_cond);
4708 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4709 tcg_gen_helper_0_1(raise_exception, r_const);
4710 tcg_temp_free(r_const);
4711 dc->is_br = 1;
4712 }
4713 return;
4714 #if !defined(CONFIG_USER_ONLY)
4715 priv_insn:
4716 {
4717 TCGv r_const;
4718
4719 save_state(dc, cpu_cond);
4720 r_const = tcg_const_i32(TT_PRIV_INSN);
4721 tcg_gen_helper_0_1(raise_exception, r_const);
4722 tcg_temp_free(r_const);
4723 dc->is_br = 1;
4724 }
4725 return;
4726 #endif
4727 nfpu_insn:
4728 save_state(dc, cpu_cond);
4729 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4730 dc->is_br = 1;
4731 return;
4732 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4733 nfq_insn:
4734 save_state(dc, cpu_cond);
4735 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4736 dc->is_br = 1;
4737 return;
4738 #endif
4739 #ifndef TARGET_SPARC64
4740 ncp_insn:
4741 {
4742 TCGv r_const;
4743
4744 save_state(dc, cpu_cond);
4745 r_const = tcg_const_i32(TT_NCP_INSN);
4746 tcg_gen_helper_0_1(raise_exception, r_const);
4747 tcg_temp_free(r_const);
4748 dc->is_br = 1;
4749 }
4750 return;
4751 #endif
4752 }
4753
4754 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4755 int spc, CPUSPARCState *env)
4756 {
4757 target_ulong pc_start, last_pc;
4758 uint16_t *gen_opc_end;
4759 DisasContext dc1, *dc = &dc1;
4760 int j, lj = -1;
4761 int num_insns;
4762 int max_insns;
4763
4764 memset(dc, 0, sizeof(DisasContext));
4765 dc->tb = tb;
4766 pc_start = tb->pc;
4767 dc->pc = pc_start;
4768 last_pc = dc->pc;
4769 dc->npc = (target_ulong) tb->cs_base;
4770 dc->mem_idx = cpu_mmu_index(env);
4771 dc->def = env->def;
4772 if ((dc->def->features & CPU_FEATURE_FLOAT))
4773 dc->fpu_enabled = cpu_fpu_enabled(env);
4774 else
4775 dc->fpu_enabled = 0;
4776 #ifdef TARGET_SPARC64
4777 dc->address_mask_32bit = env->pstate & PS_AM;
4778 #endif
4779 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4780
4781 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4782 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4783 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4784
4785 cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
4786
4787 // loads and stores
4788 cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
4789 cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
4790
4791 num_insns = 0;
4792 max_insns = tb->cflags & CF_COUNT_MASK;
4793 if (max_insns == 0)
4794 max_insns = CF_COUNT_MASK;
4795 gen_icount_start();
4796 do {
4797 if (env->nb_breakpoints > 0) {
4798 for(j = 0; j < env->nb_breakpoints; j++) {
4799 if (env->breakpoints[j] == dc->pc) {
4800 if (dc->pc != pc_start)
4801 save_state(dc, cpu_cond);
4802 tcg_gen_helper_0_0(helper_debug);
4803 tcg_gen_exit_tb(0);
4804 dc->is_br = 1;
4805 goto exit_gen_loop;
4806 }
4807 }
4808 }
4809 if (spc) {
4810 if (loglevel > 0)
4811 fprintf(logfile, "Search PC...\n");
4812 j = gen_opc_ptr - gen_opc_buf;
4813 if (lj < j) {
4814 lj++;
4815 while (lj < j)
4816 gen_opc_instr_start[lj++] = 0;
4817 gen_opc_pc[lj] = dc->pc;
4818 gen_opc_npc[lj] = dc->npc;
4819 gen_opc_instr_start[lj] = 1;
4820 gen_opc_icount[lj] = num_insns;
4821 }
4822 }
4823 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4824 gen_io_start();
4825 last_pc = dc->pc;
4826 disas_sparc_insn(dc);
4827 num_insns++;
4828
4829 if (dc->is_br)
4830 break;
4831 /* if the next PC is different, we abort now */
4832 if (dc->pc != (last_pc + 4))
4833 break;
4834 /* if we reach a page boundary, we stop generation so that the
4835 PC of a TT_TFAULT exception is always in the right page */
4836 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4837 break;
4838 /* if single step mode, we generate only one instruction and
4839 generate an exception */
4840 if (env->singlestep_enabled) {
4841 tcg_gen_movi_tl(cpu_pc, dc->pc);
4842 tcg_gen_exit_tb(0);
4843 break;
4844 }
4845 } while ((gen_opc_ptr < gen_opc_end) &&
4846 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4847 num_insns < max_insns);
4848
4849 exit_gen_loop:
4850 tcg_temp_free(cpu_addr);
4851 tcg_temp_free(cpu_val);
4852 tcg_temp_free(cpu_dst);
4853 tcg_temp_free(cpu_tmp64);
4854 tcg_temp_free(cpu_tmp32);
4855 tcg_temp_free(cpu_tmp0);
4856 if (tb->cflags & CF_LAST_IO)
4857 gen_io_end();
4858 if (!dc->is_br) {
4859 if (dc->pc != DYNAMIC_PC &&
4860 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4861 /* static PC and NPC: we can use direct chaining */
4862 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4863 } else {
4864 if (dc->pc != DYNAMIC_PC)
4865 tcg_gen_movi_tl(cpu_pc, dc->pc);
4866 save_npc(dc, cpu_cond);
4867 tcg_gen_exit_tb(0);
4868 }
4869 }
4870 gen_icount_end(tb, num_insns);
4871 *gen_opc_ptr = INDEX_op_end;
4872 if (spc) {
4873 j = gen_opc_ptr - gen_opc_buf;
4874 lj++;
4875 while (lj <= j)
4876 gen_opc_instr_start[lj++] = 0;
4877 #if 0
4878 if (loglevel > 0) {
4879 page_dump(logfile);
4880 }
4881 #endif
4882 gen_opc_jump_pc[0] = dc->jump_pc[0];
4883 gen_opc_jump_pc[1] = dc->jump_pc[1];
4884 } else {
4885 tb->size = last_pc + 4 - pc_start;
4886 tb->icount = num_insns;
4887 }
4888 #ifdef DEBUG_DISAS
4889 if (loglevel & CPU_LOG_TB_IN_ASM) {
4890 fprintf(logfile, "--------------\n");
4891 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4892 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4893 fprintf(logfile, "\n");
4894 }
4895 #endif
4896 }
4897
4898 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4899 {
4900 gen_intermediate_code_internal(tb, 0, env);
4901 }
4902
4903 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4904 {
4905 gen_intermediate_code_internal(tb, 1, env);
4906 }
4907
4908 void gen_intermediate_code_init(CPUSPARCState *env)
4909 {
4910 unsigned int i;
4911 static int inited;
4912 static const char * const gregnames[8] = {
4913 NULL, // g0 not used
4914 "g1",
4915 "g2",
4916 "g3",
4917 "g4",
4918 "g5",
4919 "g6",
4920 "g7",
4921 };
4922 static const char * const fregnames[64] = {
4923 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4924 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4925 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4926 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4927 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4928 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4929 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4930 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4931 };
4932
4933 /* init various static tables */
4934 if (!inited) {
4935 inited = 1;
4936
4937 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4938 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4939 offsetof(CPUState, regwptr),
4940 "regwptr");
4941 #ifdef TARGET_SPARC64
4942 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4943 TCG_AREG0, offsetof(CPUState, xcc),
4944 "xcc");
4945 cpu_asi = tcg_global_mem_new(TCG_TYPE_I32,
4946 TCG_AREG0, offsetof(CPUState, asi),
4947 "asi");
4948 cpu_fprs = tcg_global_mem_new(TCG_TYPE_I32,
4949 TCG_AREG0, offsetof(CPUState, fprs),
4950 "fprs");
4951 cpu_gsr = tcg_global_mem_new(TCG_TYPE_TL,
4952 TCG_AREG0, offsetof(CPUState, gsr),
4953 "gsr");
4954 cpu_tick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4955 TCG_AREG0,
4956 offsetof(CPUState, tick_cmpr),
4957 "tick_cmpr");
4958 cpu_stick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4959 TCG_AREG0,
4960 offsetof(CPUState, stick_cmpr),
4961 "stick_cmpr");
4962 cpu_hstick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4963 TCG_AREG0,
4964 offsetof(CPUState, hstick_cmpr),
4965 "hstick_cmpr");
4966 cpu_hintp = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4967 offsetof(CPUState, hintp),
4968 "hintp");
4969 cpu_htba = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4970 offsetof(CPUState, htba),
4971 "htba");
4972 cpu_hver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4973 offsetof(CPUState, hver),
4974 "hver");
4975 cpu_ssr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4976 offsetof(CPUState, ssr), "ssr");
4977 cpu_ver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4978 offsetof(CPUState, version), "ver");
4979 #else
4980 cpu_wim = tcg_global_mem_new(TCG_TYPE_I32,
4981 TCG_AREG0, offsetof(CPUState, wim),
4982 "wim");
4983 #endif
4984 cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
4985 TCG_AREG0, offsetof(CPUState, cond),
4986 "cond");
4987 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4988 TCG_AREG0, offsetof(CPUState, cc_src),
4989 "cc_src");
4990 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4991 offsetof(CPUState, cc_src2),
4992 "cc_src2");
4993 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4994 TCG_AREG0, offsetof(CPUState, cc_dst),
4995 "cc_dst");
4996 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4997 TCG_AREG0, offsetof(CPUState, psr),
4998 "psr");
4999 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
5000 TCG_AREG0, offsetof(CPUState, fsr),
5001 "fsr");
5002 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
5003 TCG_AREG0, offsetof(CPUState, pc),
5004 "pc");
5005 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
5006 TCG_AREG0, offsetof(CPUState, npc),
5007 "npc");
5008 cpu_y = tcg_global_mem_new(TCG_TYPE_TL,
5009 TCG_AREG0, offsetof(CPUState, y), "y");
5010 #ifndef CONFIG_USER_ONLY
5011 cpu_tbr = tcg_global_mem_new(TCG_TYPE_TL,
5012 TCG_AREG0, offsetof(CPUState, tbr),
5013 "tbr");
5014 #endif
5015 for (i = 1; i < 8; i++)
5016 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
5017 offsetof(CPUState, gregs[i]),
5018 gregnames[i]);
5019 for (i = 0; i < TARGET_FPREGS; i++)
5020 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
5021 offsetof(CPUState, fpr[i]),
5022 fregnames[i]);
5023
5024 /* register helpers */
5025
5026 #undef DEF_HELPER
5027 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5028 #include "helper.h"
5029 }
5030 }
5031
5032 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5033 unsigned long searched_pc, int pc_pos, void *puc)
5034 {
5035 target_ulong npc;
5036 env->pc = gen_opc_pc[pc_pos];
5037 npc = gen_opc_npc[pc_pos];
5038 if (npc == 1) {
5039 /* dynamic NPC: already stored */
5040 } else if (npc == 2) {
5041 target_ulong t2 = (target_ulong)(unsigned long)puc;
5042 /* jump PC: use T2 and the jump targets of the translation */
5043 if (t2)
5044 env->npc = gen_opc_jump_pc[0];
5045 else
5046 env->npc = gen_opc_jump_pc[1];
5047 } else {
5048 env->npc = npc;
5049 }
5050 }