]> git.proxmox.com Git - qemu.git/blob - target-sparc/translate.c
Convert rest of ops using float32 to TCG, remove FT0 and FT1
[qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 */
21
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define DEBUG_DISAS
35
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
39
40 /* global register indexes */
41 static TCGv cpu_env, cpu_regwptr;
42 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
43 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
44 static TCGv cpu_y;
45 #ifndef CONFIG_USER_ONLY
46 static TCGv cpu_tbr;
47 #endif
48 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
49 #ifdef TARGET_SPARC64
50 static TCGv cpu_xcc, cpu_asi, cpu_fprs, cpu_gsr;
51 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
52 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
53 #else
54 static TCGv cpu_wim;
55 #endif
56 /* local register indexes (only used inside old micro ops) */
57 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
58 /* Floating point registers */
59 static TCGv cpu_fpr[TARGET_FPREGS];
60
61 #include "gen-icount.h"
62
63 typedef struct DisasContext {
64 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
65 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
66 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
67 int is_br;
68 int mem_idx;
69 int fpu_enabled;
70 int address_mask_32bit;
71 struct TranslationBlock *tb;
72 sparc_def_t *def;
73 } DisasContext;
74
75 // This function uses non-native bit order
76 #define GET_FIELD(X, FROM, TO) \
77 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
78
79 // This function uses the order in the manuals, i.e. bit 0 is 2^0
80 #define GET_FIELD_SP(X, FROM, TO) \
81 GET_FIELD(X, 31 - (TO), 31 - (FROM))
82
83 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
84 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
85
86 #ifdef TARGET_SPARC64
87 #define FFPREG(r) (r)
88 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
89 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
90 #else
91 #define FFPREG(r) (r)
92 #define DFPREG(r) (r & 0x1e)
93 #define QFPREG(r) (r & 0x1c)
94 #endif
95
96 static int sign_extend(int x, int len)
97 {
98 len = 32 - len;
99 return (x << len) >> len;
100 }
101
102 #define IS_IMM (insn & (1<<13))
103
104 /* floating point registers moves */
105 static void gen_op_load_fpr_DT0(unsigned int src)
106 {
107 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt0) +
108 offsetof(CPU_DoubleU, l.upper));
109 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
110 offsetof(CPU_DoubleU, l.lower));
111 }
112
113 static void gen_op_load_fpr_DT1(unsigned int src)
114 {
115 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, dt1) +
116 offsetof(CPU_DoubleU, l.upper));
117 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, dt1) +
118 offsetof(CPU_DoubleU, l.lower));
119 }
120
121 static void gen_op_store_DT0_fpr(unsigned int dst)
122 {
123 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, dt0) +
124 offsetof(CPU_DoubleU, l.upper));
125 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, dt0) +
126 offsetof(CPU_DoubleU, l.lower));
127 }
128
129 static void gen_op_load_fpr_QT0(unsigned int src)
130 {
131 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt0) +
132 offsetof(CPU_QuadU, l.upmost));
133 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
134 offsetof(CPU_QuadU, l.upper));
135 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
136 offsetof(CPU_QuadU, l.lower));
137 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
138 offsetof(CPU_QuadU, l.lowest));
139 }
140
141 static void gen_op_load_fpr_QT1(unsigned int src)
142 {
143 tcg_gen_st_i32(cpu_fpr[src], cpu_env, offsetof(CPUSPARCState, qt1) +
144 offsetof(CPU_QuadU, l.upmost));
145 tcg_gen_st_i32(cpu_fpr[src + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
146 offsetof(CPU_QuadU, l.upper));
147 tcg_gen_st_i32(cpu_fpr[src + 2], cpu_env, offsetof(CPUSPARCState, qt1) +
148 offsetof(CPU_QuadU, l.lower));
149 tcg_gen_st_i32(cpu_fpr[src + 3], cpu_env, offsetof(CPUSPARCState, qt1) +
150 offsetof(CPU_QuadU, l.lowest));
151 }
152
153 static void gen_op_store_QT0_fpr(unsigned int dst)
154 {
155 tcg_gen_ld_i32(cpu_fpr[dst], cpu_env, offsetof(CPUSPARCState, qt0) +
156 offsetof(CPU_QuadU, l.upmost));
157 tcg_gen_ld_i32(cpu_fpr[dst + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
158 offsetof(CPU_QuadU, l.upper));
159 tcg_gen_ld_i32(cpu_fpr[dst + 2], cpu_env, offsetof(CPUSPARCState, qt0) +
160 offsetof(CPU_QuadU, l.lower));
161 tcg_gen_ld_i32(cpu_fpr[dst + 3], cpu_env, offsetof(CPUSPARCState, qt0) +
162 offsetof(CPU_QuadU, l.lowest));
163 }
164
165 /* moves */
166 #ifdef CONFIG_USER_ONLY
167 #define supervisor(dc) 0
168 #ifdef TARGET_SPARC64
169 #define hypervisor(dc) 0
170 #endif
171 #else
172 #define supervisor(dc) (dc->mem_idx >= 1)
173 #ifdef TARGET_SPARC64
174 #define hypervisor(dc) (dc->mem_idx == 2)
175 #else
176 #endif
177 #endif
178
179 #ifdef TARGET_SPARC64
180 #ifndef TARGET_ABI32
181 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
182 #else
183 #define AM_CHECK(dc) (1)
184 #endif
185 #endif
186
187 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
188 {
189 #ifdef TARGET_SPARC64
190 if (AM_CHECK(dc))
191 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
192 #endif
193 }
194
195 static inline void gen_movl_reg_TN(int reg, TCGv tn)
196 {
197 if (reg == 0)
198 tcg_gen_movi_tl(tn, 0);
199 else if (reg < 8)
200 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
201 else {
202 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
203 }
204 }
205
206 static inline void gen_movl_TN_reg(int reg, TCGv tn)
207 {
208 if (reg == 0)
209 return;
210 else if (reg < 8)
211 tcg_gen_mov_tl(cpu_gregs[reg], tn);
212 else {
213 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
214 }
215 }
216
217 static inline void gen_goto_tb(DisasContext *s, int tb_num,
218 target_ulong pc, target_ulong npc)
219 {
220 TranslationBlock *tb;
221
222 tb = s->tb;
223 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
224 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
225 /* jump to same page: we can use a direct jump */
226 tcg_gen_goto_tb(tb_num);
227 tcg_gen_movi_tl(cpu_pc, pc);
228 tcg_gen_movi_tl(cpu_npc, npc);
229 tcg_gen_exit_tb((long)tb + tb_num);
230 } else {
231 /* jump to another page: currently not optimized */
232 tcg_gen_movi_tl(cpu_pc, pc);
233 tcg_gen_movi_tl(cpu_npc, npc);
234 tcg_gen_exit_tb(0);
235 }
236 }
237
238 // XXX suboptimal
239 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
240 {
241 tcg_gen_extu_i32_tl(reg, src);
242 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
243 tcg_gen_andi_tl(reg, reg, 0x1);
244 }
245
246 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
247 {
248 tcg_gen_extu_i32_tl(reg, src);
249 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
250 tcg_gen_andi_tl(reg, reg, 0x1);
251 }
252
253 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
254 {
255 tcg_gen_extu_i32_tl(reg, src);
256 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
257 tcg_gen_andi_tl(reg, reg, 0x1);
258 }
259
260 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
261 {
262 tcg_gen_extu_i32_tl(reg, src);
263 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
264 tcg_gen_andi_tl(reg, reg, 0x1);
265 }
266
267 static inline void gen_cc_clear_icc(void)
268 {
269 tcg_gen_movi_i32(cpu_psr, 0);
270 }
271
272 #ifdef TARGET_SPARC64
273 static inline void gen_cc_clear_xcc(void)
274 {
275 tcg_gen_movi_i32(cpu_xcc, 0);
276 }
277 #endif
278
279 /* old op:
280 if (!T0)
281 env->psr |= PSR_ZERO;
282 if ((int32_t) T0 < 0)
283 env->psr |= PSR_NEG;
284 */
285 static inline void gen_cc_NZ_icc(TCGv dst)
286 {
287 TCGv r_temp;
288 int l1, l2;
289
290 l1 = gen_new_label();
291 l2 = gen_new_label();
292 r_temp = tcg_temp_new(TCG_TYPE_TL);
293 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
294 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
295 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
296 gen_set_label(l1);
297 tcg_gen_ext_i32_tl(r_temp, dst);
298 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
299 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
300 gen_set_label(l2);
301 tcg_temp_free(r_temp);
302 }
303
304 #ifdef TARGET_SPARC64
305 static inline void gen_cc_NZ_xcc(TCGv dst)
306 {
307 int l1, l2;
308
309 l1 = gen_new_label();
310 l2 = gen_new_label();
311 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
312 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
313 gen_set_label(l1);
314 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
315 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
316 gen_set_label(l2);
317 }
318 #endif
319
320 /* old op:
321 if (T0 < src1)
322 env->psr |= PSR_CARRY;
323 */
324 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
325 {
326 TCGv r_temp1, r_temp2;
327 int l1;
328
329 l1 = gen_new_label();
330 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
331 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
332 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
333 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
334 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
335 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
336 gen_set_label(l1);
337 tcg_temp_free(r_temp1);
338 tcg_temp_free(r_temp2);
339 }
340
341 #ifdef TARGET_SPARC64
342 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
343 {
344 int l1;
345
346 l1 = gen_new_label();
347 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
348 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
349 gen_set_label(l1);
350 }
351 #endif
352
353 /* old op:
354 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
355 env->psr |= PSR_OVF;
356 */
357 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
358 {
359 TCGv r_temp;
360
361 r_temp = tcg_temp_new(TCG_TYPE_TL);
362 tcg_gen_xor_tl(r_temp, src1, src2);
363 tcg_gen_xori_tl(r_temp, r_temp, -1);
364 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
365 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
366 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
367 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
368 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
369 tcg_temp_free(r_temp);
370 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
371 }
372
373 #ifdef TARGET_SPARC64
374 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
375 {
376 TCGv r_temp;
377
378 r_temp = tcg_temp_new(TCG_TYPE_TL);
379 tcg_gen_xor_tl(r_temp, src1, src2);
380 tcg_gen_xori_tl(r_temp, r_temp, -1);
381 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
382 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
383 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
384 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
385 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
386 tcg_temp_free(r_temp);
387 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
388 }
389 #endif
390
391 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
392 {
393 TCGv r_temp, r_const;
394 int l1;
395
396 l1 = gen_new_label();
397
398 r_temp = tcg_temp_new(TCG_TYPE_TL);
399 tcg_gen_xor_tl(r_temp, src1, src2);
400 tcg_gen_xori_tl(r_temp, r_temp, -1);
401 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
402 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
403 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
404 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
405 r_const = tcg_const_i32(TT_TOVF);
406 tcg_gen_helper_0_1(raise_exception, r_const);
407 tcg_temp_free(r_const);
408 gen_set_label(l1);
409 tcg_temp_free(r_temp);
410 }
411
412 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
413 {
414 int l1;
415
416 l1 = gen_new_label();
417 tcg_gen_or_tl(cpu_tmp0, src1, src2);
418 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
419 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
420 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
421 gen_set_label(l1);
422 }
423
424 static inline void gen_tag_tv(TCGv src1, TCGv src2)
425 {
426 int l1;
427 TCGv r_const;
428
429 l1 = gen_new_label();
430 tcg_gen_or_tl(cpu_tmp0, src1, src2);
431 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
432 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
433 r_const = tcg_const_i32(TT_TOVF);
434 tcg_gen_helper_0_1(raise_exception, r_const);
435 tcg_temp_free(r_const);
436 gen_set_label(l1);
437 }
438
439 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
440 {
441 tcg_gen_mov_tl(cpu_cc_src, src1);
442 tcg_gen_mov_tl(cpu_cc_src2, src2);
443 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
444 gen_cc_clear_icc();
445 gen_cc_NZ_icc(cpu_cc_dst);
446 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
447 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
448 #ifdef TARGET_SPARC64
449 gen_cc_clear_xcc();
450 gen_cc_NZ_xcc(cpu_cc_dst);
451 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
452 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
453 #endif
454 tcg_gen_mov_tl(dst, cpu_cc_dst);
455 }
456
457 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
458 {
459 tcg_gen_mov_tl(cpu_cc_src, src1);
460 tcg_gen_mov_tl(cpu_cc_src2, src2);
461 gen_mov_reg_C(cpu_tmp0, cpu_psr);
462 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
463 gen_cc_clear_icc();
464 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
465 #ifdef TARGET_SPARC64
466 gen_cc_clear_xcc();
467 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
468 #endif
469 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
470 gen_cc_NZ_icc(cpu_cc_dst);
471 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
472 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
473 #ifdef TARGET_SPARC64
474 gen_cc_NZ_xcc(cpu_cc_dst);
475 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
476 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
477 #endif
478 tcg_gen_mov_tl(dst, cpu_cc_dst);
479 }
480
481 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
482 {
483 tcg_gen_mov_tl(cpu_cc_src, src1);
484 tcg_gen_mov_tl(cpu_cc_src2, src2);
485 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
486 gen_cc_clear_icc();
487 gen_cc_NZ_icc(cpu_cc_dst);
488 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
489 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
490 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
491 #ifdef TARGET_SPARC64
492 gen_cc_clear_xcc();
493 gen_cc_NZ_xcc(cpu_cc_dst);
494 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
495 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
496 #endif
497 tcg_gen_mov_tl(dst, cpu_cc_dst);
498 }
499
500 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
501 {
502 tcg_gen_mov_tl(cpu_cc_src, src1);
503 tcg_gen_mov_tl(cpu_cc_src2, src2);
504 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
505 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
506 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
507 gen_cc_clear_icc();
508 gen_cc_NZ_icc(cpu_cc_dst);
509 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
510 #ifdef TARGET_SPARC64
511 gen_cc_clear_xcc();
512 gen_cc_NZ_xcc(cpu_cc_dst);
513 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
514 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
515 #endif
516 tcg_gen_mov_tl(dst, cpu_cc_dst);
517 }
518
519 /* old op:
520 if (src1 < T1)
521 env->psr |= PSR_CARRY;
522 */
523 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
524 {
525 TCGv r_temp1, r_temp2;
526 int l1;
527
528 l1 = gen_new_label();
529 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
530 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
531 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
532 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
533 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
534 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
535 gen_set_label(l1);
536 tcg_temp_free(r_temp1);
537 tcg_temp_free(r_temp2);
538 }
539
540 #ifdef TARGET_SPARC64
541 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
542 {
543 int l1;
544
545 l1 = gen_new_label();
546 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
547 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
548 gen_set_label(l1);
549 }
550 #endif
551
552 /* old op:
553 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
554 env->psr |= PSR_OVF;
555 */
556 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
557 {
558 TCGv r_temp;
559
560 r_temp = tcg_temp_new(TCG_TYPE_TL);
561 tcg_gen_xor_tl(r_temp, src1, src2);
562 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
563 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
564 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
565 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
566 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
567 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
568 tcg_temp_free(r_temp);
569 }
570
571 #ifdef TARGET_SPARC64
572 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
573 {
574 TCGv r_temp;
575
576 r_temp = tcg_temp_new(TCG_TYPE_TL);
577 tcg_gen_xor_tl(r_temp, src1, src2);
578 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
579 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
580 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
581 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
582 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
583 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
584 tcg_temp_free(r_temp);
585 }
586 #endif
587
588 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
589 {
590 TCGv r_temp, r_const;
591 int l1;
592
593 l1 = gen_new_label();
594
595 r_temp = tcg_temp_new(TCG_TYPE_TL);
596 tcg_gen_xor_tl(r_temp, src1, src2);
597 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
598 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
599 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
600 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
601 r_const = tcg_const_i32(TT_TOVF);
602 tcg_gen_helper_0_1(raise_exception, r_const);
603 tcg_temp_free(r_const);
604 gen_set_label(l1);
605 tcg_temp_free(r_temp);
606 }
607
608 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
609 {
610 tcg_gen_mov_tl(cpu_cc_src, src1);
611 tcg_gen_mov_tl(cpu_cc_src2, src2);
612 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
613 gen_cc_clear_icc();
614 gen_cc_NZ_icc(cpu_cc_dst);
615 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
616 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
617 #ifdef TARGET_SPARC64
618 gen_cc_clear_xcc();
619 gen_cc_NZ_xcc(cpu_cc_dst);
620 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
621 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
622 #endif
623 tcg_gen_mov_tl(dst, cpu_cc_dst);
624 }
625
626 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
627 {
628 tcg_gen_mov_tl(cpu_cc_src, src1);
629 tcg_gen_mov_tl(cpu_cc_src2, src2);
630 gen_mov_reg_C(cpu_tmp0, cpu_psr);
631 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
632 gen_cc_clear_icc();
633 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
634 #ifdef TARGET_SPARC64
635 gen_cc_clear_xcc();
636 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
637 #endif
638 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
639 gen_cc_NZ_icc(cpu_cc_dst);
640 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
641 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
642 #ifdef TARGET_SPARC64
643 gen_cc_NZ_xcc(cpu_cc_dst);
644 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
645 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
646 #endif
647 tcg_gen_mov_tl(dst, cpu_cc_dst);
648 }
649
650 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
651 {
652 tcg_gen_mov_tl(cpu_cc_src, src1);
653 tcg_gen_mov_tl(cpu_cc_src2, src2);
654 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
655 gen_cc_clear_icc();
656 gen_cc_NZ_icc(cpu_cc_dst);
657 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
658 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
659 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
660 #ifdef TARGET_SPARC64
661 gen_cc_clear_xcc();
662 gen_cc_NZ_xcc(cpu_cc_dst);
663 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
664 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
665 #endif
666 tcg_gen_mov_tl(dst, cpu_cc_dst);
667 }
668
669 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
670 {
671 tcg_gen_mov_tl(cpu_cc_src, src1);
672 tcg_gen_mov_tl(cpu_cc_src2, src2);
673 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
674 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
675 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
676 gen_cc_clear_icc();
677 gen_cc_NZ_icc(cpu_cc_dst);
678 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
679 #ifdef TARGET_SPARC64
680 gen_cc_clear_xcc();
681 gen_cc_NZ_xcc(cpu_cc_dst);
682 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
683 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
684 #endif
685 tcg_gen_mov_tl(dst, cpu_cc_dst);
686 }
687
688 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
689 {
690 TCGv r_temp;
691 int l1;
692
693 l1 = gen_new_label();
694 r_temp = tcg_temp_new(TCG_TYPE_TL);
695
696 /* old op:
697 if (!(env->y & 1))
698 T1 = 0;
699 */
700 tcg_gen_mov_tl(cpu_cc_src, src1);
701 tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
702 tcg_gen_mov_tl(cpu_cc_src2, src2);
703 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
704 tcg_gen_movi_tl(cpu_cc_src2, 0);
705 gen_set_label(l1);
706
707 // b2 = T0 & 1;
708 // env->y = (b2 << 31) | (env->y >> 1);
709 tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
710 tcg_gen_shli_tl(r_temp, r_temp, 31);
711 tcg_gen_shri_tl(cpu_tmp0, cpu_y, 1);
712 tcg_gen_or_tl(cpu_y, cpu_tmp0, r_temp);
713
714 // b1 = N ^ V;
715 gen_mov_reg_N(cpu_tmp0, cpu_psr);
716 gen_mov_reg_V(r_temp, cpu_psr);
717 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
718 tcg_temp_free(r_temp);
719
720 // T0 = (b1 << 31) | (T0 >> 1);
721 // src1 = T0;
722 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
723 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
724 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
725
726 /* do addition and update flags */
727 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
728
729 gen_cc_clear_icc();
730 gen_cc_NZ_icc(cpu_cc_dst);
731 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
732 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
733 tcg_gen_mov_tl(dst, cpu_cc_dst);
734 }
735
736 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
737 {
738 TCGv r_temp, r_temp2;
739
740 r_temp = tcg_temp_new(TCG_TYPE_I64);
741 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
742
743 tcg_gen_extu_i32_i64(r_temp, src2);
744 tcg_gen_extu_i32_i64(r_temp2, src1);
745 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
746
747 tcg_gen_shri_i64(r_temp, r_temp2, 32);
748 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
749 tcg_temp_free(r_temp);
750 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
751 #ifdef TARGET_SPARC64
752 tcg_gen_mov_i64(dst, r_temp2);
753 #else
754 tcg_gen_trunc_i64_tl(dst, r_temp2);
755 #endif
756 tcg_temp_free(r_temp2);
757 }
758
759 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
760 {
761 TCGv r_temp, r_temp2;
762
763 r_temp = tcg_temp_new(TCG_TYPE_I64);
764 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
765
766 tcg_gen_ext_i32_i64(r_temp, src2);
767 tcg_gen_ext_i32_i64(r_temp2, src1);
768 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
769
770 tcg_gen_shri_i64(r_temp, r_temp2, 32);
771 tcg_gen_trunc_i64_tl(cpu_tmp0, r_temp);
772 tcg_temp_free(r_temp);
773 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
774 #ifdef TARGET_SPARC64
775 tcg_gen_mov_i64(dst, r_temp2);
776 #else
777 tcg_gen_trunc_i64_tl(dst, r_temp2);
778 #endif
779 tcg_temp_free(r_temp2);
780 }
781
782 #ifdef TARGET_SPARC64
783 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
784 {
785 TCGv r_const;
786 int l1;
787
788 l1 = gen_new_label();
789 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
790 r_const = tcg_const_i32(TT_DIV_ZERO);
791 tcg_gen_helper_0_1(raise_exception, r_const);
792 tcg_temp_free(r_const);
793 gen_set_label(l1);
794 }
795
796 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
797 {
798 int l1, l2;
799
800 l1 = gen_new_label();
801 l2 = gen_new_label();
802 tcg_gen_mov_tl(cpu_cc_src, src1);
803 tcg_gen_mov_tl(cpu_cc_src2, src2);
804 gen_trap_ifdivzero_tl(cpu_cc_src2);
805 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
806 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
807 tcg_gen_movi_i64(dst, INT64_MIN);
808 tcg_gen_br(l2);
809 gen_set_label(l1);
810 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
811 gen_set_label(l2);
812 }
813 #endif
814
815 static inline void gen_op_div_cc(TCGv dst)
816 {
817 int l1;
818
819 tcg_gen_mov_tl(cpu_cc_dst, dst);
820 gen_cc_clear_icc();
821 gen_cc_NZ_icc(cpu_cc_dst);
822 l1 = gen_new_label();
823 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
824 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
825 gen_set_label(l1);
826 }
827
828 static inline void gen_op_logic_cc(TCGv dst)
829 {
830 tcg_gen_mov_tl(cpu_cc_dst, dst);
831
832 gen_cc_clear_icc();
833 gen_cc_NZ_icc(cpu_cc_dst);
834 #ifdef TARGET_SPARC64
835 gen_cc_clear_xcc();
836 gen_cc_NZ_xcc(cpu_cc_dst);
837 #endif
838 }
839
840 // 1
841 static inline void gen_op_eval_ba(TCGv dst)
842 {
843 tcg_gen_movi_tl(dst, 1);
844 }
845
846 // Z
847 static inline void gen_op_eval_be(TCGv dst, TCGv src)
848 {
849 gen_mov_reg_Z(dst, src);
850 }
851
852 // Z | (N ^ V)
853 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
854 {
855 gen_mov_reg_N(cpu_tmp0, src);
856 gen_mov_reg_V(dst, src);
857 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
858 gen_mov_reg_Z(cpu_tmp0, src);
859 tcg_gen_or_tl(dst, dst, cpu_tmp0);
860 }
861
862 // N ^ V
863 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
864 {
865 gen_mov_reg_V(cpu_tmp0, src);
866 gen_mov_reg_N(dst, src);
867 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
868 }
869
870 // C | Z
871 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
872 {
873 gen_mov_reg_Z(cpu_tmp0, src);
874 gen_mov_reg_C(dst, src);
875 tcg_gen_or_tl(dst, dst, cpu_tmp0);
876 }
877
878 // C
879 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
880 {
881 gen_mov_reg_C(dst, src);
882 }
883
884 // V
885 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
886 {
887 gen_mov_reg_V(dst, src);
888 }
889
890 // 0
891 static inline void gen_op_eval_bn(TCGv dst)
892 {
893 tcg_gen_movi_tl(dst, 0);
894 }
895
896 // N
897 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
898 {
899 gen_mov_reg_N(dst, src);
900 }
901
902 // !Z
903 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
904 {
905 gen_mov_reg_Z(dst, src);
906 tcg_gen_xori_tl(dst, dst, 0x1);
907 }
908
909 // !(Z | (N ^ V))
910 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
911 {
912 gen_mov_reg_N(cpu_tmp0, src);
913 gen_mov_reg_V(dst, src);
914 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
915 gen_mov_reg_Z(cpu_tmp0, src);
916 tcg_gen_or_tl(dst, dst, cpu_tmp0);
917 tcg_gen_xori_tl(dst, dst, 0x1);
918 }
919
920 // !(N ^ V)
921 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
922 {
923 gen_mov_reg_V(cpu_tmp0, src);
924 gen_mov_reg_N(dst, src);
925 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
926 tcg_gen_xori_tl(dst, dst, 0x1);
927 }
928
929 // !(C | Z)
930 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
931 {
932 gen_mov_reg_Z(cpu_tmp0, src);
933 gen_mov_reg_C(dst, src);
934 tcg_gen_or_tl(dst, dst, cpu_tmp0);
935 tcg_gen_xori_tl(dst, dst, 0x1);
936 }
937
938 // !C
939 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
940 {
941 gen_mov_reg_C(dst, src);
942 tcg_gen_xori_tl(dst, dst, 0x1);
943 }
944
945 // !N
946 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
947 {
948 gen_mov_reg_N(dst, src);
949 tcg_gen_xori_tl(dst, dst, 0x1);
950 }
951
952 // !V
953 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
954 {
955 gen_mov_reg_V(dst, src);
956 tcg_gen_xori_tl(dst, dst, 0x1);
957 }
958
959 /*
960 FPSR bit field FCC1 | FCC0:
961 0 =
962 1 <
963 2 >
964 3 unordered
965 */
966 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
967 unsigned int fcc_offset)
968 {
969 tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
970 tcg_gen_andi_tl(reg, reg, 0x1);
971 }
972
973 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
974 unsigned int fcc_offset)
975 {
976 tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
977 tcg_gen_andi_tl(reg, reg, 0x1);
978 }
979
980 // !0: FCC0 | FCC1
981 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
982 unsigned int fcc_offset)
983 {
984 gen_mov_reg_FCC0(dst, src, fcc_offset);
985 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986 tcg_gen_or_tl(dst, dst, cpu_tmp0);
987 }
988
989 // 1 or 2: FCC0 ^ FCC1
990 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
991 unsigned int fcc_offset)
992 {
993 gen_mov_reg_FCC0(dst, src, fcc_offset);
994 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
995 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
996 }
997
998 // 1 or 3: FCC0
999 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1000 unsigned int fcc_offset)
1001 {
1002 gen_mov_reg_FCC0(dst, src, fcc_offset);
1003 }
1004
1005 // 1: FCC0 & !FCC1
1006 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1007 unsigned int fcc_offset)
1008 {
1009 gen_mov_reg_FCC0(dst, src, fcc_offset);
1010 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1011 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1012 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1013 }
1014
1015 // 2 or 3: FCC1
1016 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1017 unsigned int fcc_offset)
1018 {
1019 gen_mov_reg_FCC1(dst, src, fcc_offset);
1020 }
1021
1022 // 2: !FCC0 & FCC1
1023 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1024 unsigned int fcc_offset)
1025 {
1026 gen_mov_reg_FCC0(dst, src, fcc_offset);
1027 tcg_gen_xori_tl(dst, dst, 0x1);
1028 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1029 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1030 }
1031
1032 // 3: FCC0 & FCC1
1033 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1034 unsigned int fcc_offset)
1035 {
1036 gen_mov_reg_FCC0(dst, src, fcc_offset);
1037 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1038 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1039 }
1040
1041 // 0: !(FCC0 | FCC1)
1042 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1043 unsigned int fcc_offset)
1044 {
1045 gen_mov_reg_FCC0(dst, src, fcc_offset);
1046 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1047 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1048 tcg_gen_xori_tl(dst, dst, 0x1);
1049 }
1050
1051 // 0 or 3: !(FCC0 ^ FCC1)
1052 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1053 unsigned int fcc_offset)
1054 {
1055 gen_mov_reg_FCC0(dst, src, fcc_offset);
1056 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1057 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1058 tcg_gen_xori_tl(dst, dst, 0x1);
1059 }
1060
1061 // 0 or 2: !FCC0
1062 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1063 unsigned int fcc_offset)
1064 {
1065 gen_mov_reg_FCC0(dst, src, fcc_offset);
1066 tcg_gen_xori_tl(dst, dst, 0x1);
1067 }
1068
1069 // !1: !(FCC0 & !FCC1)
1070 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1071 unsigned int fcc_offset)
1072 {
1073 gen_mov_reg_FCC0(dst, src, fcc_offset);
1074 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1075 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1076 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1077 tcg_gen_xori_tl(dst, dst, 0x1);
1078 }
1079
1080 // 0 or 1: !FCC1
1081 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1082 unsigned int fcc_offset)
1083 {
1084 gen_mov_reg_FCC1(dst, src, fcc_offset);
1085 tcg_gen_xori_tl(dst, dst, 0x1);
1086 }
1087
1088 // !2: !(!FCC0 & FCC1)
1089 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1090 unsigned int fcc_offset)
1091 {
1092 gen_mov_reg_FCC0(dst, src, fcc_offset);
1093 tcg_gen_xori_tl(dst, dst, 0x1);
1094 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1095 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1096 tcg_gen_xori_tl(dst, dst, 0x1);
1097 }
1098
1099 // !3: !(FCC0 & FCC1)
1100 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1101 unsigned int fcc_offset)
1102 {
1103 gen_mov_reg_FCC0(dst, src, fcc_offset);
1104 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1105 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1106 tcg_gen_xori_tl(dst, dst, 0x1);
1107 }
1108
1109 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1110 target_ulong pc2, TCGv r_cond)
1111 {
1112 int l1;
1113
1114 l1 = gen_new_label();
1115
1116 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1117
1118 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1119
1120 gen_set_label(l1);
1121 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1122 }
1123
1124 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1125 target_ulong pc2, TCGv r_cond)
1126 {
1127 int l1;
1128
1129 l1 = gen_new_label();
1130
1131 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1132
1133 gen_goto_tb(dc, 0, pc2, pc1);
1134
1135 gen_set_label(l1);
1136 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1137 }
1138
1139 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1140 TCGv r_cond)
1141 {
1142 int l1, l2;
1143
1144 l1 = gen_new_label();
1145 l2 = gen_new_label();
1146
1147 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1148
1149 tcg_gen_movi_tl(cpu_npc, npc1);
1150 tcg_gen_br(l2);
1151
1152 gen_set_label(l1);
1153 tcg_gen_movi_tl(cpu_npc, npc2);
1154 gen_set_label(l2);
1155 }
1156
1157 /* call this function before using the condition register as it may
1158 have been set for a jump */
1159 static inline void flush_cond(DisasContext *dc, TCGv cond)
1160 {
1161 if (dc->npc == JUMP_PC) {
1162 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1163 dc->npc = DYNAMIC_PC;
1164 }
1165 }
1166
1167 static inline void save_npc(DisasContext *dc, TCGv cond)
1168 {
1169 if (dc->npc == JUMP_PC) {
1170 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1171 dc->npc = DYNAMIC_PC;
1172 } else if (dc->npc != DYNAMIC_PC) {
1173 tcg_gen_movi_tl(cpu_npc, dc->npc);
1174 }
1175 }
1176
1177 static inline void save_state(DisasContext *dc, TCGv cond)
1178 {
1179 tcg_gen_movi_tl(cpu_pc, dc->pc);
1180 save_npc(dc, cond);
1181 }
1182
1183 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1184 {
1185 if (dc->npc == JUMP_PC) {
1186 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1187 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1188 dc->pc = DYNAMIC_PC;
1189 } else if (dc->npc == DYNAMIC_PC) {
1190 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1191 dc->pc = DYNAMIC_PC;
1192 } else {
1193 dc->pc = dc->npc;
1194 }
1195 }
1196
1197 static inline void gen_op_next_insn(void)
1198 {
1199 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1200 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1201 }
1202
1203 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1204 {
1205 TCGv r_src;
1206
1207 #ifdef TARGET_SPARC64
1208 if (cc)
1209 r_src = cpu_xcc;
1210 else
1211 r_src = cpu_psr;
1212 #else
1213 r_src = cpu_psr;
1214 #endif
1215 switch (cond) {
1216 case 0x0:
1217 gen_op_eval_bn(r_dst);
1218 break;
1219 case 0x1:
1220 gen_op_eval_be(r_dst, r_src);
1221 break;
1222 case 0x2:
1223 gen_op_eval_ble(r_dst, r_src);
1224 break;
1225 case 0x3:
1226 gen_op_eval_bl(r_dst, r_src);
1227 break;
1228 case 0x4:
1229 gen_op_eval_bleu(r_dst, r_src);
1230 break;
1231 case 0x5:
1232 gen_op_eval_bcs(r_dst, r_src);
1233 break;
1234 case 0x6:
1235 gen_op_eval_bneg(r_dst, r_src);
1236 break;
1237 case 0x7:
1238 gen_op_eval_bvs(r_dst, r_src);
1239 break;
1240 case 0x8:
1241 gen_op_eval_ba(r_dst);
1242 break;
1243 case 0x9:
1244 gen_op_eval_bne(r_dst, r_src);
1245 break;
1246 case 0xa:
1247 gen_op_eval_bg(r_dst, r_src);
1248 break;
1249 case 0xb:
1250 gen_op_eval_bge(r_dst, r_src);
1251 break;
1252 case 0xc:
1253 gen_op_eval_bgu(r_dst, r_src);
1254 break;
1255 case 0xd:
1256 gen_op_eval_bcc(r_dst, r_src);
1257 break;
1258 case 0xe:
1259 gen_op_eval_bpos(r_dst, r_src);
1260 break;
1261 case 0xf:
1262 gen_op_eval_bvc(r_dst, r_src);
1263 break;
1264 }
1265 }
1266
1267 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1268 {
1269 unsigned int offset;
1270
1271 switch (cc) {
1272 default:
1273 case 0x0:
1274 offset = 0;
1275 break;
1276 case 0x1:
1277 offset = 32 - 10;
1278 break;
1279 case 0x2:
1280 offset = 34 - 10;
1281 break;
1282 case 0x3:
1283 offset = 36 - 10;
1284 break;
1285 }
1286
1287 switch (cond) {
1288 case 0x0:
1289 gen_op_eval_bn(r_dst);
1290 break;
1291 case 0x1:
1292 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1293 break;
1294 case 0x2:
1295 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1296 break;
1297 case 0x3:
1298 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1299 break;
1300 case 0x4:
1301 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1302 break;
1303 case 0x5:
1304 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1305 break;
1306 case 0x6:
1307 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1308 break;
1309 case 0x7:
1310 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1311 break;
1312 case 0x8:
1313 gen_op_eval_ba(r_dst);
1314 break;
1315 case 0x9:
1316 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1317 break;
1318 case 0xa:
1319 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1320 break;
1321 case 0xb:
1322 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1323 break;
1324 case 0xc:
1325 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1326 break;
1327 case 0xd:
1328 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1329 break;
1330 case 0xe:
1331 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1332 break;
1333 case 0xf:
1334 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1335 break;
1336 }
1337 }
1338
1339 #ifdef TARGET_SPARC64
1340 // Inverted logic
1341 static const int gen_tcg_cond_reg[8] = {
1342 -1,
1343 TCG_COND_NE,
1344 TCG_COND_GT,
1345 TCG_COND_GE,
1346 -1,
1347 TCG_COND_EQ,
1348 TCG_COND_LE,
1349 TCG_COND_LT,
1350 };
1351
1352 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1353 {
1354 int l1;
1355
1356 l1 = gen_new_label();
1357 tcg_gen_movi_tl(r_dst, 0);
1358 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1359 tcg_gen_movi_tl(r_dst, 1);
1360 gen_set_label(l1);
1361 }
1362 #endif
1363
1364 /* XXX: potentially incorrect if dynamic npc */
1365 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1366 TCGv r_cond)
1367 {
1368 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1369 target_ulong target = dc->pc + offset;
1370
1371 if (cond == 0x0) {
1372 /* unconditional not taken */
1373 if (a) {
1374 dc->pc = dc->npc + 4;
1375 dc->npc = dc->pc + 4;
1376 } else {
1377 dc->pc = dc->npc;
1378 dc->npc = dc->pc + 4;
1379 }
1380 } else if (cond == 0x8) {
1381 /* unconditional taken */
1382 if (a) {
1383 dc->pc = target;
1384 dc->npc = dc->pc + 4;
1385 } else {
1386 dc->pc = dc->npc;
1387 dc->npc = target;
1388 }
1389 } else {
1390 flush_cond(dc, r_cond);
1391 gen_cond(r_cond, cc, cond);
1392 if (a) {
1393 gen_branch_a(dc, target, dc->npc, r_cond);
1394 dc->is_br = 1;
1395 } else {
1396 dc->pc = dc->npc;
1397 dc->jump_pc[0] = target;
1398 dc->jump_pc[1] = dc->npc + 4;
1399 dc->npc = JUMP_PC;
1400 }
1401 }
1402 }
1403
1404 /* XXX: potentially incorrect if dynamic npc */
1405 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1406 TCGv r_cond)
1407 {
1408 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1409 target_ulong target = dc->pc + offset;
1410
1411 if (cond == 0x0) {
1412 /* unconditional not taken */
1413 if (a) {
1414 dc->pc = dc->npc + 4;
1415 dc->npc = dc->pc + 4;
1416 } else {
1417 dc->pc = dc->npc;
1418 dc->npc = dc->pc + 4;
1419 }
1420 } else if (cond == 0x8) {
1421 /* unconditional taken */
1422 if (a) {
1423 dc->pc = target;
1424 dc->npc = dc->pc + 4;
1425 } else {
1426 dc->pc = dc->npc;
1427 dc->npc = target;
1428 }
1429 } else {
1430 flush_cond(dc, r_cond);
1431 gen_fcond(r_cond, cc, cond);
1432 if (a) {
1433 gen_branch_a(dc, target, dc->npc, r_cond);
1434 dc->is_br = 1;
1435 } else {
1436 dc->pc = dc->npc;
1437 dc->jump_pc[0] = target;
1438 dc->jump_pc[1] = dc->npc + 4;
1439 dc->npc = JUMP_PC;
1440 }
1441 }
1442 }
1443
1444 #ifdef TARGET_SPARC64
1445 /* XXX: potentially incorrect if dynamic npc */
1446 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1447 TCGv r_cond, TCGv r_reg)
1448 {
1449 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1450 target_ulong target = dc->pc + offset;
1451
1452 flush_cond(dc, r_cond);
1453 gen_cond_reg(r_cond, cond, r_reg);
1454 if (a) {
1455 gen_branch_a(dc, target, dc->npc, r_cond);
1456 dc->is_br = 1;
1457 } else {
1458 dc->pc = dc->npc;
1459 dc->jump_pc[0] = target;
1460 dc->jump_pc[1] = dc->npc + 4;
1461 dc->npc = JUMP_PC;
1462 }
1463 }
1464
1465 static GenOpFunc * const gen_fcmpd[4] = {
1466 helper_fcmpd,
1467 helper_fcmpd_fcc1,
1468 helper_fcmpd_fcc2,
1469 helper_fcmpd_fcc3,
1470 };
1471
1472 static GenOpFunc * const gen_fcmpq[4] = {
1473 helper_fcmpq,
1474 helper_fcmpq_fcc1,
1475 helper_fcmpq_fcc2,
1476 helper_fcmpq_fcc3,
1477 };
1478
1479 static GenOpFunc * const gen_fcmped[4] = {
1480 helper_fcmped,
1481 helper_fcmped_fcc1,
1482 helper_fcmped_fcc2,
1483 helper_fcmped_fcc3,
1484 };
1485
1486 static GenOpFunc * const gen_fcmpeq[4] = {
1487 helper_fcmpeq,
1488 helper_fcmpeq_fcc1,
1489 helper_fcmpeq_fcc2,
1490 helper_fcmpeq_fcc3,
1491 };
1492
1493 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1494 {
1495 switch (fccno) {
1496 case 0:
1497 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1498 break;
1499 case 1:
1500 tcg_gen_helper_0_2(helper_fcmps_fcc1, r_rs1, r_rs2);
1501 break;
1502 case 2:
1503 tcg_gen_helper_0_2(helper_fcmps_fcc2, r_rs1, r_rs2);
1504 break;
1505 case 3:
1506 tcg_gen_helper_0_2(helper_fcmps_fcc3, r_rs1, r_rs2);
1507 break;
1508 }
1509 }
1510
1511 static inline void gen_op_fcmpd(int fccno)
1512 {
1513 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1514 }
1515
1516 static inline void gen_op_fcmpq(int fccno)
1517 {
1518 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1519 }
1520
1521 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1522 {
1523 switch (fccno) {
1524 case 0:
1525 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1526 break;
1527 case 1:
1528 tcg_gen_helper_0_2(helper_fcmpes_fcc1, r_rs1, r_rs2);
1529 break;
1530 case 2:
1531 tcg_gen_helper_0_2(helper_fcmpes_fcc2, r_rs1, r_rs2);
1532 break;
1533 case 3:
1534 tcg_gen_helper_0_2(helper_fcmpes_fcc3, r_rs1, r_rs2);
1535 break;
1536 }
1537 }
1538
1539 static inline void gen_op_fcmped(int fccno)
1540 {
1541 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1542 }
1543
1544 static inline void gen_op_fcmpeq(int fccno)
1545 {
1546 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1547 }
1548
1549 #else
1550
1551 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1552 {
1553 tcg_gen_helper_0_2(helper_fcmps, r_rs1, r_rs2);
1554 }
1555
1556 static inline void gen_op_fcmpd(int fccno)
1557 {
1558 tcg_gen_helper_0_0(helper_fcmpd);
1559 }
1560
1561 static inline void gen_op_fcmpq(int fccno)
1562 {
1563 tcg_gen_helper_0_0(helper_fcmpq);
1564 }
1565
1566 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1567 {
1568 tcg_gen_helper_0_2(helper_fcmpes, r_rs1, r_rs2);
1569 }
1570
1571 static inline void gen_op_fcmped(int fccno)
1572 {
1573 tcg_gen_helper_0_0(helper_fcmped);
1574 }
1575
1576 static inline void gen_op_fcmpeq(int fccno)
1577 {
1578 tcg_gen_helper_0_0(helper_fcmpeq);
1579 }
1580 #endif
1581
1582 static inline void gen_op_fpexception_im(int fsr_flags)
1583 {
1584 TCGv r_const;
1585
1586 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1587 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1588 r_const = tcg_const_i32(TT_FP_EXCP);
1589 tcg_gen_helper_0_1(raise_exception, r_const);
1590 tcg_temp_free(r_const);
1591 }
1592
1593 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1594 {
1595 #if !defined(CONFIG_USER_ONLY)
1596 if (!dc->fpu_enabled) {
1597 TCGv r_const;
1598
1599 save_state(dc, r_cond);
1600 r_const = tcg_const_i32(TT_NFPU_INSN);
1601 tcg_gen_helper_0_1(raise_exception, r_const);
1602 tcg_temp_free(r_const);
1603 dc->is_br = 1;
1604 return 1;
1605 }
1606 #endif
1607 return 0;
1608 }
1609
1610 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1611 {
1612 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1613 }
1614
1615 static inline void gen_clear_float_exceptions(void)
1616 {
1617 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1618 }
1619
1620 /* asi moves */
1621 #ifdef TARGET_SPARC64
1622 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1623 {
1624 int asi;
1625 TCGv r_asi;
1626
1627 if (IS_IMM) {
1628 r_asi = tcg_temp_new(TCG_TYPE_I32);
1629 tcg_gen_mov_i32(r_asi, cpu_asi);
1630 } else {
1631 asi = GET_FIELD(insn, 19, 26);
1632 r_asi = tcg_const_i32(asi);
1633 }
1634 return r_asi;
1635 }
1636
1637 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1638 int sign)
1639 {
1640 TCGv r_asi, r_size, r_sign;
1641
1642 r_asi = gen_get_asi(insn, addr);
1643 r_size = tcg_const_i32(size);
1644 r_sign = tcg_const_i32(sign);
1645 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
1646 tcg_temp_free(r_sign);
1647 tcg_temp_free(r_size);
1648 tcg_temp_free(r_asi);
1649 }
1650
1651 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1652 {
1653 TCGv r_asi, r_size;
1654
1655 r_asi = gen_get_asi(insn, addr);
1656 r_size = tcg_const_i32(size);
1657 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
1658 tcg_temp_free(r_size);
1659 tcg_temp_free(r_asi);
1660 }
1661
1662 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1663 {
1664 TCGv r_asi, r_size, r_rd;
1665
1666 r_asi = gen_get_asi(insn, addr);
1667 r_size = tcg_const_i32(size);
1668 r_rd = tcg_const_i32(rd);
1669 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
1670 tcg_temp_free(r_rd);
1671 tcg_temp_free(r_size);
1672 tcg_temp_free(r_asi);
1673 }
1674
1675 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1676 {
1677 TCGv r_asi, r_size, r_rd;
1678
1679 r_asi = gen_get_asi(insn, addr);
1680 r_size = tcg_const_i32(size);
1681 r_rd = tcg_const_i32(rd);
1682 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
1683 tcg_temp_free(r_rd);
1684 tcg_temp_free(r_size);
1685 tcg_temp_free(r_asi);
1686 }
1687
1688 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1689 {
1690 TCGv r_asi, r_size, r_sign;
1691
1692 r_asi = gen_get_asi(insn, addr);
1693 r_size = tcg_const_i32(4);
1694 r_sign = tcg_const_i32(0);
1695 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1696 tcg_temp_free(r_sign);
1697 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1698 tcg_temp_free(r_size);
1699 tcg_temp_free(r_asi);
1700 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1701 }
1702
1703 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1704 {
1705 TCGv r_asi, r_rd;
1706
1707 r_asi = gen_get_asi(insn, addr);
1708 r_rd = tcg_const_i32(rd);
1709 tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
1710 tcg_temp_free(r_rd);
1711 tcg_temp_free(r_asi);
1712 }
1713
1714 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1715 {
1716 TCGv r_temp, r_asi, r_size;
1717
1718 r_temp = tcg_temp_new(TCG_TYPE_TL);
1719 gen_movl_reg_TN(rd + 1, r_temp);
1720 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi,
1721 r_temp);
1722 tcg_temp_free(r_temp);
1723 r_asi = gen_get_asi(insn, addr);
1724 r_size = tcg_const_i32(8);
1725 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1726 tcg_temp_free(r_size);
1727 tcg_temp_free(r_asi);
1728 }
1729
1730 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1731 int rd)
1732 {
1733 TCGv r_val1, r_asi;
1734
1735 r_val1 = tcg_temp_new(TCG_TYPE_TL);
1736 gen_movl_reg_TN(rd, r_val1);
1737 r_asi = gen_get_asi(insn, addr);
1738 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1739 tcg_temp_free(r_asi);
1740 tcg_temp_free(r_val1);
1741 }
1742
1743 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1744 int rd)
1745 {
1746 TCGv r_asi;
1747
1748 gen_movl_reg_TN(rd, cpu_tmp64);
1749 r_asi = gen_get_asi(insn, addr);
1750 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1751 tcg_temp_free(r_asi);
1752 }
1753
1754 #elif !defined(CONFIG_USER_ONLY)
1755
1756 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1757 int sign)
1758 {
1759 TCGv r_asi, r_size, r_sign;
1760
1761 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1762 r_size = tcg_const_i32(size);
1763 r_sign = tcg_const_i32(sign);
1764 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1765 tcg_temp_free(r_sign);
1766 tcg_temp_free(r_size);
1767 tcg_temp_free(r_asi);
1768 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1769 }
1770
1771 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1772 {
1773 TCGv r_asi, r_size;
1774
1775 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1776 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1777 r_size = tcg_const_i32(size);
1778 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1779 tcg_temp_free(r_size);
1780 tcg_temp_free(r_asi);
1781 }
1782
1783 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1784 {
1785 TCGv r_asi, r_size, r_sign;
1786
1787 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1788 r_size = tcg_const_i32(4);
1789 r_sign = tcg_const_i32(0);
1790 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1791 tcg_temp_free(r_sign);
1792 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1793 tcg_temp_free(r_size);
1794 tcg_temp_free(r_asi);
1795 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1796 }
1797
1798 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1799 {
1800 TCGv r_asi, r_size, r_sign;
1801
1802 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1803 r_size = tcg_const_i32(8);
1804 r_sign = tcg_const_i32(0);
1805 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1806 tcg_temp_free(r_sign);
1807 tcg_temp_free(r_size);
1808 tcg_temp_free(r_asi);
1809 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1810 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1811 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1812 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1813 gen_movl_TN_reg(rd, hi);
1814 }
1815
1816 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1817 {
1818 TCGv r_temp, r_asi, r_size;
1819
1820 r_temp = tcg_temp_new(TCG_TYPE_TL);
1821 gen_movl_reg_TN(rd + 1, r_temp);
1822 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi, r_temp);
1823 tcg_temp_free(r_temp);
1824 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1825 r_size = tcg_const_i32(8);
1826 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1827 tcg_temp_free(r_size);
1828 tcg_temp_free(r_asi);
1829 }
1830 #endif
1831
1832 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1833 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1834 {
1835 TCGv r_val, r_asi, r_size;
1836
1837 gen_ld_asi(dst, addr, insn, 1, 0);
1838
1839 r_val = tcg_const_i64(0xffULL);
1840 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1841 r_size = tcg_const_i32(1);
1842 tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
1843 tcg_temp_free(r_size);
1844 tcg_temp_free(r_asi);
1845 tcg_temp_free(r_val);
1846 }
1847 #endif
1848
1849 static inline TCGv get_src1(unsigned int insn, TCGv def)
1850 {
1851 TCGv r_rs1 = def;
1852 unsigned int rs1;
1853
1854 rs1 = GET_FIELD(insn, 13, 17);
1855 if (rs1 == 0)
1856 r_rs1 = tcg_const_tl(0); // XXX how to free?
1857 else if (rs1 < 8)
1858 r_rs1 = cpu_gregs[rs1];
1859 else
1860 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1861 return r_rs1;
1862 }
1863
1864 static inline TCGv get_src2(unsigned int insn, TCGv def)
1865 {
1866 TCGv r_rs2 = def;
1867 unsigned int rs2;
1868
1869 if (IS_IMM) { /* immediate */
1870 rs2 = GET_FIELDs(insn, 19, 31);
1871 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1872 } else { /* register */
1873 rs2 = GET_FIELD(insn, 27, 31);
1874 if (rs2 == 0)
1875 r_rs2 = tcg_const_tl(0); // XXX how to free?
1876 else if (rs2 < 8)
1877 r_rs2 = cpu_gregs[rs2];
1878 else
1879 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1880 }
1881 return r_rs2;
1882 }
1883
1884 #define CHECK_IU_FEATURE(dc, FEATURE) \
1885 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1886 goto illegal_insn;
1887 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1888 if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE)) \
1889 goto nfpu_insn;
1890
1891 /* before an instruction, dc->pc must be static */
1892 static void disas_sparc_insn(DisasContext * dc)
1893 {
1894 unsigned int insn, opc, rs1, rs2, rd;
1895
1896 if (unlikely(loglevel & CPU_LOG_TB_OP))
1897 tcg_gen_debug_insn_start(dc->pc);
1898 insn = ldl_code(dc->pc);
1899 opc = GET_FIELD(insn, 0, 1);
1900
1901 rd = GET_FIELD(insn, 2, 6);
1902
1903 cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
1904 cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
1905
1906 switch (opc) {
1907 case 0: /* branches/sethi */
1908 {
1909 unsigned int xop = GET_FIELD(insn, 7, 9);
1910 int32_t target;
1911 switch (xop) {
1912 #ifdef TARGET_SPARC64
1913 case 0x1: /* V9 BPcc */
1914 {
1915 int cc;
1916
1917 target = GET_FIELD_SP(insn, 0, 18);
1918 target = sign_extend(target, 18);
1919 target <<= 2;
1920 cc = GET_FIELD_SP(insn, 20, 21);
1921 if (cc == 0)
1922 do_branch(dc, target, insn, 0, cpu_cond);
1923 else if (cc == 2)
1924 do_branch(dc, target, insn, 1, cpu_cond);
1925 else
1926 goto illegal_insn;
1927 goto jmp_insn;
1928 }
1929 case 0x3: /* V9 BPr */
1930 {
1931 target = GET_FIELD_SP(insn, 0, 13) |
1932 (GET_FIELD_SP(insn, 20, 21) << 14);
1933 target = sign_extend(target, 16);
1934 target <<= 2;
1935 cpu_src1 = get_src1(insn, cpu_src1);
1936 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1937 goto jmp_insn;
1938 }
1939 case 0x5: /* V9 FBPcc */
1940 {
1941 int cc = GET_FIELD_SP(insn, 20, 21);
1942 if (gen_trap_ifnofpu(dc, cpu_cond))
1943 goto jmp_insn;
1944 target = GET_FIELD_SP(insn, 0, 18);
1945 target = sign_extend(target, 19);
1946 target <<= 2;
1947 do_fbranch(dc, target, insn, cc, cpu_cond);
1948 goto jmp_insn;
1949 }
1950 #else
1951 case 0x7: /* CBN+x */
1952 {
1953 goto ncp_insn;
1954 }
1955 #endif
1956 case 0x2: /* BN+x */
1957 {
1958 target = GET_FIELD(insn, 10, 31);
1959 target = sign_extend(target, 22);
1960 target <<= 2;
1961 do_branch(dc, target, insn, 0, cpu_cond);
1962 goto jmp_insn;
1963 }
1964 case 0x6: /* FBN+x */
1965 {
1966 if (gen_trap_ifnofpu(dc, cpu_cond))
1967 goto jmp_insn;
1968 target = GET_FIELD(insn, 10, 31);
1969 target = sign_extend(target, 22);
1970 target <<= 2;
1971 do_fbranch(dc, target, insn, 0, cpu_cond);
1972 goto jmp_insn;
1973 }
1974 case 0x4: /* SETHI */
1975 if (rd) { // nop
1976 uint32_t value = GET_FIELD(insn, 10, 31);
1977 TCGv r_const;
1978
1979 r_const = tcg_const_tl(value << 10);
1980 gen_movl_TN_reg(rd, r_const);
1981 tcg_temp_free(r_const);
1982 }
1983 break;
1984 case 0x0: /* UNIMPL */
1985 default:
1986 goto illegal_insn;
1987 }
1988 break;
1989 }
1990 break;
1991 case 1:
1992 /*CALL*/ {
1993 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1994 TCGv r_const;
1995
1996 r_const = tcg_const_tl(dc->pc);
1997 gen_movl_TN_reg(15, r_const);
1998 tcg_temp_free(r_const);
1999 target += dc->pc;
2000 gen_mov_pc_npc(dc, cpu_cond);
2001 dc->npc = target;
2002 }
2003 goto jmp_insn;
2004 case 2: /* FPU & Logical Operations */
2005 {
2006 unsigned int xop = GET_FIELD(insn, 7, 12);
2007 if (xop == 0x3a) { /* generate trap */
2008 int cond;
2009
2010 cpu_src1 = get_src1(insn, cpu_src1);
2011 if (IS_IMM) {
2012 rs2 = GET_FIELD(insn, 25, 31);
2013 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2014 } else {
2015 rs2 = GET_FIELD(insn, 27, 31);
2016 if (rs2 != 0) {
2017 gen_movl_reg_TN(rs2, cpu_src2);
2018 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2019 } else
2020 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2021 }
2022 cond = GET_FIELD(insn, 3, 6);
2023 if (cond == 0x8) {
2024 save_state(dc, cpu_cond);
2025 tcg_gen_helper_0_1(helper_trap, cpu_dst);
2026 } else if (cond != 0) {
2027 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2028 #ifdef TARGET_SPARC64
2029 /* V9 icc/xcc */
2030 int cc = GET_FIELD_SP(insn, 11, 12);
2031
2032 save_state(dc, cpu_cond);
2033 if (cc == 0)
2034 gen_cond(r_cond, 0, cond);
2035 else if (cc == 2)
2036 gen_cond(r_cond, 1, cond);
2037 else
2038 goto illegal_insn;
2039 #else
2040 save_state(dc, cpu_cond);
2041 gen_cond(r_cond, 0, cond);
2042 #endif
2043 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
2044 tcg_temp_free(r_cond);
2045 }
2046 gen_op_next_insn();
2047 tcg_gen_exit_tb(0);
2048 dc->is_br = 1;
2049 goto jmp_insn;
2050 } else if (xop == 0x28) {
2051 rs1 = GET_FIELD(insn, 13, 17);
2052 switch(rs1) {
2053 case 0: /* rdy */
2054 #ifndef TARGET_SPARC64
2055 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2056 manual, rdy on the microSPARC
2057 II */
2058 case 0x0f: /* stbar in the SPARCv8 manual,
2059 rdy on the microSPARC II */
2060 case 0x10 ... 0x1f: /* implementation-dependent in the
2061 SPARCv8 manual, rdy on the
2062 microSPARC II */
2063 #endif
2064 gen_movl_TN_reg(rd, cpu_y);
2065 break;
2066 #ifdef TARGET_SPARC64
2067 case 0x2: /* V9 rdccr */
2068 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
2069 gen_movl_TN_reg(rd, cpu_dst);
2070 break;
2071 case 0x3: /* V9 rdasi */
2072 tcg_gen_ext_i32_tl(cpu_dst, cpu_asi);
2073 gen_movl_TN_reg(rd, cpu_dst);
2074 break;
2075 case 0x4: /* V9 rdtick */
2076 {
2077 TCGv r_tickptr;
2078
2079 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2080 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2081 offsetof(CPUState, tick));
2082 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2083 r_tickptr);
2084 tcg_temp_free(r_tickptr);
2085 gen_movl_TN_reg(rd, cpu_dst);
2086 }
2087 break;
2088 case 0x5: /* V9 rdpc */
2089 {
2090 TCGv r_const;
2091
2092 r_const = tcg_const_tl(dc->pc);
2093 gen_movl_TN_reg(rd, r_const);
2094 tcg_temp_free(r_const);
2095 }
2096 break;
2097 case 0x6: /* V9 rdfprs */
2098 tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
2099 gen_movl_TN_reg(rd, cpu_dst);
2100 break;
2101 case 0xf: /* V9 membar */
2102 break; /* no effect */
2103 case 0x13: /* Graphics Status */
2104 if (gen_trap_ifnofpu(dc, cpu_cond))
2105 goto jmp_insn;
2106 gen_movl_TN_reg(rd, cpu_gsr);
2107 break;
2108 case 0x17: /* Tick compare */
2109 gen_movl_TN_reg(rd, cpu_tick_cmpr);
2110 break;
2111 case 0x18: /* System tick */
2112 {
2113 TCGv r_tickptr;
2114
2115 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2116 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2117 offsetof(CPUState, stick));
2118 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2119 r_tickptr);
2120 tcg_temp_free(r_tickptr);
2121 gen_movl_TN_reg(rd, cpu_dst);
2122 }
2123 break;
2124 case 0x19: /* System tick compare */
2125 gen_movl_TN_reg(rd, cpu_stick_cmpr);
2126 break;
2127 case 0x10: /* Performance Control */
2128 case 0x11: /* Performance Instrumentation Counter */
2129 case 0x12: /* Dispatch Control */
2130 case 0x14: /* Softint set, WO */
2131 case 0x15: /* Softint clear, WO */
2132 case 0x16: /* Softint write */
2133 #endif
2134 default:
2135 goto illegal_insn;
2136 }
2137 #if !defined(CONFIG_USER_ONLY)
2138 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2139 #ifndef TARGET_SPARC64
2140 if (!supervisor(dc))
2141 goto priv_insn;
2142 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2143 #else
2144 CHECK_IU_FEATURE(dc, HYPV);
2145 if (!hypervisor(dc))
2146 goto priv_insn;
2147 rs1 = GET_FIELD(insn, 13, 17);
2148 switch (rs1) {
2149 case 0: // hpstate
2150 // gen_op_rdhpstate();
2151 break;
2152 case 1: // htstate
2153 // gen_op_rdhtstate();
2154 break;
2155 case 3: // hintp
2156 tcg_gen_mov_tl(cpu_dst, cpu_hintp);
2157 break;
2158 case 5: // htba
2159 tcg_gen_mov_tl(cpu_dst, cpu_htba);
2160 break;
2161 case 6: // hver
2162 tcg_gen_mov_tl(cpu_dst, cpu_hver);
2163 break;
2164 case 31: // hstick_cmpr
2165 tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
2166 break;
2167 default:
2168 goto illegal_insn;
2169 }
2170 #endif
2171 gen_movl_TN_reg(rd, cpu_dst);
2172 break;
2173 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2174 if (!supervisor(dc))
2175 goto priv_insn;
2176 #ifdef TARGET_SPARC64
2177 rs1 = GET_FIELD(insn, 13, 17);
2178 switch (rs1) {
2179 case 0: // tpc
2180 {
2181 TCGv r_tsptr;
2182
2183 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2184 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2185 offsetof(CPUState, tsptr));
2186 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2187 offsetof(trap_state, tpc));
2188 tcg_temp_free(r_tsptr);
2189 }
2190 break;
2191 case 1: // tnpc
2192 {
2193 TCGv r_tsptr;
2194
2195 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2196 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2197 offsetof(CPUState, tsptr));
2198 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2199 offsetof(trap_state, tnpc));
2200 tcg_temp_free(r_tsptr);
2201 }
2202 break;
2203 case 2: // tstate
2204 {
2205 TCGv r_tsptr;
2206
2207 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2208 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2209 offsetof(CPUState, tsptr));
2210 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2211 offsetof(trap_state, tstate));
2212 tcg_temp_free(r_tsptr);
2213 }
2214 break;
2215 case 3: // tt
2216 {
2217 TCGv r_tsptr;
2218
2219 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2220 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2221 offsetof(CPUState, tsptr));
2222 tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
2223 offsetof(trap_state, tt));
2224 tcg_temp_free(r_tsptr);
2225 }
2226 break;
2227 case 4: // tick
2228 {
2229 TCGv r_tickptr;
2230
2231 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2232 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2233 offsetof(CPUState, tick));
2234 tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
2235 r_tickptr);
2236 gen_movl_TN_reg(rd, cpu_tmp0);
2237 tcg_temp_free(r_tickptr);
2238 }
2239 break;
2240 case 5: // tba
2241 tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
2242 break;
2243 case 6: // pstate
2244 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2245 offsetof(CPUSPARCState, pstate));
2246 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2247 break;
2248 case 7: // tl
2249 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2250 offsetof(CPUSPARCState, tl));
2251 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2252 break;
2253 case 8: // pil
2254 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2255 offsetof(CPUSPARCState, psrpil));
2256 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2257 break;
2258 case 9: // cwp
2259 tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
2260 break;
2261 case 10: // cansave
2262 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2263 offsetof(CPUSPARCState, cansave));
2264 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2265 break;
2266 case 11: // canrestore
2267 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2268 offsetof(CPUSPARCState, canrestore));
2269 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2270 break;
2271 case 12: // cleanwin
2272 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2273 offsetof(CPUSPARCState, cleanwin));
2274 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2275 break;
2276 case 13: // otherwin
2277 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2278 offsetof(CPUSPARCState, otherwin));
2279 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2280 break;
2281 case 14: // wstate
2282 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2283 offsetof(CPUSPARCState, wstate));
2284 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2285 break;
2286 case 16: // UA2005 gl
2287 CHECK_IU_FEATURE(dc, GL);
2288 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2289 offsetof(CPUSPARCState, gl));
2290 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2291 break;
2292 case 26: // UA2005 strand status
2293 CHECK_IU_FEATURE(dc, HYPV);
2294 if (!hypervisor(dc))
2295 goto priv_insn;
2296 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_ssr);
2297 break;
2298 case 31: // ver
2299 tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
2300 break;
2301 case 15: // fq
2302 default:
2303 goto illegal_insn;
2304 }
2305 #else
2306 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
2307 #endif
2308 gen_movl_TN_reg(rd, cpu_tmp0);
2309 break;
2310 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2311 #ifdef TARGET_SPARC64
2312 save_state(dc, cpu_cond);
2313 tcg_gen_helper_0_0(helper_flushw);
2314 #else
2315 if (!supervisor(dc))
2316 goto priv_insn;
2317 gen_movl_TN_reg(rd, cpu_tbr);
2318 #endif
2319 break;
2320 #endif
2321 } else if (xop == 0x34) { /* FPU Operations */
2322 if (gen_trap_ifnofpu(dc, cpu_cond))
2323 goto jmp_insn;
2324 gen_op_clear_ieee_excp_and_FTT();
2325 rs1 = GET_FIELD(insn, 13, 17);
2326 rs2 = GET_FIELD(insn, 27, 31);
2327 xop = GET_FIELD(insn, 18, 26);
2328 switch (xop) {
2329 case 0x1: /* fmovs */
2330 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2331 break;
2332 case 0x5: /* fnegs */
2333 tcg_gen_helper_1_1(helper_fnegs, cpu_fpr[rd],
2334 cpu_fpr[rs2]);
2335 break;
2336 case 0x9: /* fabss */
2337 tcg_gen_helper_1_1(helper_fabss, cpu_fpr[rd],
2338 cpu_fpr[rs2]);
2339 break;
2340 case 0x29: /* fsqrts */
2341 CHECK_FPU_FEATURE(dc, FSQRT);
2342 gen_clear_float_exceptions();
2343 tcg_gen_helper_1_1(helper_fsqrts, cpu_tmp32,
2344 cpu_fpr[rs2]);
2345 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2346 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2347 break;
2348 case 0x2a: /* fsqrtd */
2349 CHECK_FPU_FEATURE(dc, FSQRT);
2350 gen_op_load_fpr_DT1(DFPREG(rs2));
2351 gen_clear_float_exceptions();
2352 tcg_gen_helper_0_0(helper_fsqrtd);
2353 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2354 gen_op_store_DT0_fpr(DFPREG(rd));
2355 break;
2356 case 0x2b: /* fsqrtq */
2357 CHECK_FPU_FEATURE(dc, FLOAT128);
2358 gen_op_load_fpr_QT1(QFPREG(rs2));
2359 gen_clear_float_exceptions();
2360 tcg_gen_helper_0_0(helper_fsqrtq);
2361 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2362 gen_op_store_QT0_fpr(QFPREG(rd));
2363 break;
2364 case 0x41: /* fadds */
2365 gen_clear_float_exceptions();
2366 tcg_gen_helper_1_2(helper_fadds, cpu_tmp32,
2367 cpu_fpr[rs1], cpu_fpr[rs2]);
2368 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2369 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2370 break;
2371 case 0x42:
2372 gen_op_load_fpr_DT0(DFPREG(rs1));
2373 gen_op_load_fpr_DT1(DFPREG(rs2));
2374 gen_clear_float_exceptions();
2375 tcg_gen_helper_0_0(helper_faddd);
2376 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2377 gen_op_store_DT0_fpr(DFPREG(rd));
2378 break;
2379 case 0x43: /* faddq */
2380 CHECK_FPU_FEATURE(dc, FLOAT128);
2381 gen_op_load_fpr_QT0(QFPREG(rs1));
2382 gen_op_load_fpr_QT1(QFPREG(rs2));
2383 gen_clear_float_exceptions();
2384 tcg_gen_helper_0_0(helper_faddq);
2385 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2386 gen_op_store_QT0_fpr(QFPREG(rd));
2387 break;
2388 case 0x45: /* fsubs */
2389 gen_clear_float_exceptions();
2390 tcg_gen_helper_1_2(helper_fsubs, cpu_tmp32,
2391 cpu_fpr[rs1], cpu_fpr[rs2]);
2392 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2393 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2394 break;
2395 case 0x46:
2396 gen_op_load_fpr_DT0(DFPREG(rs1));
2397 gen_op_load_fpr_DT1(DFPREG(rs2));
2398 gen_clear_float_exceptions();
2399 tcg_gen_helper_0_0(helper_fsubd);
2400 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2401 gen_op_store_DT0_fpr(DFPREG(rd));
2402 break;
2403 case 0x47: /* fsubq */
2404 CHECK_FPU_FEATURE(dc, FLOAT128);
2405 gen_op_load_fpr_QT0(QFPREG(rs1));
2406 gen_op_load_fpr_QT1(QFPREG(rs2));
2407 gen_clear_float_exceptions();
2408 tcg_gen_helper_0_0(helper_fsubq);
2409 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2410 gen_op_store_QT0_fpr(QFPREG(rd));
2411 break;
2412 case 0x49: /* fmuls */
2413 CHECK_FPU_FEATURE(dc, FMUL);
2414 gen_clear_float_exceptions();
2415 tcg_gen_helper_1_2(helper_fmuls, cpu_tmp32,
2416 cpu_fpr[rs1], cpu_fpr[rs2]);
2417 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2418 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2419 break;
2420 case 0x4a: /* fmuld */
2421 CHECK_FPU_FEATURE(dc, FMUL);
2422 gen_op_load_fpr_DT0(DFPREG(rs1));
2423 gen_op_load_fpr_DT1(DFPREG(rs2));
2424 gen_clear_float_exceptions();
2425 tcg_gen_helper_0_0(helper_fmuld);
2426 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2427 gen_op_store_DT0_fpr(DFPREG(rd));
2428 break;
2429 case 0x4b: /* fmulq */
2430 CHECK_FPU_FEATURE(dc, FLOAT128);
2431 CHECK_FPU_FEATURE(dc, FMUL);
2432 gen_op_load_fpr_QT0(QFPREG(rs1));
2433 gen_op_load_fpr_QT1(QFPREG(rs2));
2434 gen_clear_float_exceptions();
2435 tcg_gen_helper_0_0(helper_fmulq);
2436 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2437 gen_op_store_QT0_fpr(QFPREG(rd));
2438 break;
2439 case 0x4d: /* fdivs */
2440 gen_clear_float_exceptions();
2441 tcg_gen_helper_1_2(helper_fdivs, cpu_tmp32,
2442 cpu_fpr[rs1], cpu_fpr[rs2]);
2443 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2444 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2445 break;
2446 case 0x4e:
2447 gen_op_load_fpr_DT0(DFPREG(rs1));
2448 gen_op_load_fpr_DT1(DFPREG(rs2));
2449 gen_clear_float_exceptions();
2450 tcg_gen_helper_0_0(helper_fdivd);
2451 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2452 gen_op_store_DT0_fpr(DFPREG(rd));
2453 break;
2454 case 0x4f: /* fdivq */
2455 CHECK_FPU_FEATURE(dc, FLOAT128);
2456 gen_op_load_fpr_QT0(QFPREG(rs1));
2457 gen_op_load_fpr_QT1(QFPREG(rs2));
2458 gen_clear_float_exceptions();
2459 tcg_gen_helper_0_0(helper_fdivq);
2460 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2461 gen_op_store_QT0_fpr(QFPREG(rd));
2462 break;
2463 case 0x69: /* fsmuld */
2464 CHECK_FPU_FEATURE(dc, FSMULD);
2465 gen_clear_float_exceptions();
2466 tcg_gen_helper_0_2(helper_fsmuld, cpu_fpr[rs1],
2467 cpu_fpr[rs2]);
2468 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2469 gen_op_store_DT0_fpr(DFPREG(rd));
2470 break;
2471 case 0x6e: /* fdmulq */
2472 CHECK_FPU_FEATURE(dc, FLOAT128);
2473 gen_op_load_fpr_DT0(DFPREG(rs1));
2474 gen_op_load_fpr_DT1(DFPREG(rs2));
2475 gen_clear_float_exceptions();
2476 tcg_gen_helper_0_0(helper_fdmulq);
2477 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2478 gen_op_store_QT0_fpr(QFPREG(rd));
2479 break;
2480 case 0xc4: /* fitos */
2481 gen_clear_float_exceptions();
2482 tcg_gen_helper_1_1(helper_fitos, cpu_tmp32,
2483 cpu_fpr[rs2]);
2484 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2485 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2486 break;
2487 case 0xc6: /* fdtos */
2488 gen_op_load_fpr_DT1(DFPREG(rs2));
2489 gen_clear_float_exceptions();
2490 tcg_gen_helper_1_0(helper_fdtos, cpu_tmp32);
2491 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2492 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2493 break;
2494 case 0xc7: /* fqtos */
2495 CHECK_FPU_FEATURE(dc, FLOAT128);
2496 gen_op_load_fpr_QT1(QFPREG(rs2));
2497 gen_clear_float_exceptions();
2498 tcg_gen_helper_1_0(helper_fqtos, cpu_tmp32);
2499 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2500 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2501 break;
2502 case 0xc8: /* fitod */
2503 tcg_gen_helper_0_1(helper_fitod, cpu_fpr[rs2]);
2504 gen_op_store_DT0_fpr(DFPREG(rd));
2505 break;
2506 case 0xc9: /* fstod */
2507 tcg_gen_helper_0_1(helper_fstod, cpu_fpr[rs2]);
2508 gen_op_store_DT0_fpr(DFPREG(rd));
2509 break;
2510 case 0xcb: /* fqtod */
2511 CHECK_FPU_FEATURE(dc, FLOAT128);
2512 gen_op_load_fpr_QT1(QFPREG(rs2));
2513 gen_clear_float_exceptions();
2514 tcg_gen_helper_0_0(helper_fqtod);
2515 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2516 gen_op_store_DT0_fpr(DFPREG(rd));
2517 break;
2518 case 0xcc: /* fitoq */
2519 CHECK_FPU_FEATURE(dc, FLOAT128);
2520 tcg_gen_helper_0_1(helper_fitoq, cpu_fpr[rs2]);
2521 gen_op_store_QT0_fpr(QFPREG(rd));
2522 break;
2523 case 0xcd: /* fstoq */
2524 CHECK_FPU_FEATURE(dc, FLOAT128);
2525 tcg_gen_helper_0_1(helper_fstoq, cpu_fpr[rs2]);
2526 gen_op_store_QT0_fpr(QFPREG(rd));
2527 break;
2528 case 0xce: /* fdtoq */
2529 CHECK_FPU_FEATURE(dc, FLOAT128);
2530 gen_op_load_fpr_DT1(DFPREG(rs2));
2531 tcg_gen_helper_0_0(helper_fdtoq);
2532 gen_op_store_QT0_fpr(QFPREG(rd));
2533 break;
2534 case 0xd1: /* fstoi */
2535 gen_clear_float_exceptions();
2536 tcg_gen_helper_1_1(helper_fstoi, cpu_tmp32,
2537 cpu_fpr[rs2]);
2538 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2539 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2540 break;
2541 case 0xd2: /* fdtoi */
2542 gen_op_load_fpr_DT1(DFPREG(rs2));
2543 gen_clear_float_exceptions();
2544 tcg_gen_helper_1_0(helper_fdtoi, cpu_tmp32);
2545 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2546 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2547 break;
2548 case 0xd3: /* fqtoi */
2549 CHECK_FPU_FEATURE(dc, FLOAT128);
2550 gen_op_load_fpr_QT1(QFPREG(rs2));
2551 gen_clear_float_exceptions();
2552 tcg_gen_helper_1_0(helper_fqtoi, cpu_tmp32);
2553 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2554 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2555 break;
2556 #ifdef TARGET_SPARC64
2557 case 0x2: /* V9 fmovd */
2558 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)],
2559 cpu_fpr[DFPREG(rs2)]);
2560 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
2561 cpu_fpr[DFPREG(rs2) + 1]);
2562 break;
2563 case 0x3: /* V9 fmovq */
2564 CHECK_FPU_FEATURE(dc, FLOAT128);
2565 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)],
2566 cpu_fpr[QFPREG(rs2)]);
2567 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1],
2568 cpu_fpr[QFPREG(rs2) + 1]);
2569 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2],
2570 cpu_fpr[QFPREG(rs2) + 2]);
2571 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3],
2572 cpu_fpr[QFPREG(rs2) + 3]);
2573 break;
2574 case 0x6: /* V9 fnegd */
2575 gen_op_load_fpr_DT1(DFPREG(rs2));
2576 tcg_gen_helper_0_0(helper_fnegd);
2577 gen_op_store_DT0_fpr(DFPREG(rd));
2578 break;
2579 case 0x7: /* V9 fnegq */
2580 CHECK_FPU_FEATURE(dc, FLOAT128);
2581 gen_op_load_fpr_QT1(QFPREG(rs2));
2582 tcg_gen_helper_0_0(helper_fnegq);
2583 gen_op_store_QT0_fpr(QFPREG(rd));
2584 break;
2585 case 0xa: /* V9 fabsd */
2586 gen_op_load_fpr_DT1(DFPREG(rs2));
2587 tcg_gen_helper_0_0(helper_fabsd);
2588 gen_op_store_DT0_fpr(DFPREG(rd));
2589 break;
2590 case 0xb: /* V9 fabsq */
2591 CHECK_FPU_FEATURE(dc, FLOAT128);
2592 gen_op_load_fpr_QT1(QFPREG(rs2));
2593 tcg_gen_helper_0_0(helper_fabsq);
2594 gen_op_store_QT0_fpr(QFPREG(rd));
2595 break;
2596 case 0x81: /* V9 fstox */
2597 gen_clear_float_exceptions();
2598 tcg_gen_helper_0_1(helper_fstox, cpu_fpr[rs2]);
2599 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2600 gen_op_store_DT0_fpr(DFPREG(rd));
2601 break;
2602 case 0x82: /* V9 fdtox */
2603 gen_op_load_fpr_DT1(DFPREG(rs2));
2604 gen_clear_float_exceptions();
2605 tcg_gen_helper_0_0(helper_fdtox);
2606 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2607 gen_op_store_DT0_fpr(DFPREG(rd));
2608 break;
2609 case 0x83: /* V9 fqtox */
2610 CHECK_FPU_FEATURE(dc, FLOAT128);
2611 gen_op_load_fpr_QT1(QFPREG(rs2));
2612 gen_clear_float_exceptions();
2613 tcg_gen_helper_0_0(helper_fqtox);
2614 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2615 gen_op_store_DT0_fpr(DFPREG(rd));
2616 break;
2617 case 0x84: /* V9 fxtos */
2618 gen_op_load_fpr_DT1(DFPREG(rs2));
2619 gen_clear_float_exceptions();
2620 tcg_gen_helper_1_0(helper_fxtos, cpu_tmp32);
2621 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2622 tcg_gen_mov_i32(cpu_fpr[rd], cpu_tmp32);
2623 break;
2624 case 0x88: /* V9 fxtod */
2625 gen_op_load_fpr_DT1(DFPREG(rs2));
2626 gen_clear_float_exceptions();
2627 tcg_gen_helper_0_0(helper_fxtod);
2628 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2629 gen_op_store_DT0_fpr(DFPREG(rd));
2630 break;
2631 case 0x8c: /* V9 fxtoq */
2632 CHECK_FPU_FEATURE(dc, FLOAT128);
2633 gen_op_load_fpr_DT1(DFPREG(rs2));
2634 gen_clear_float_exceptions();
2635 tcg_gen_helper_0_0(helper_fxtoq);
2636 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2637 gen_op_store_QT0_fpr(QFPREG(rd));
2638 break;
2639 #endif
2640 default:
2641 goto illegal_insn;
2642 }
2643 } else if (xop == 0x35) { /* FPU Operations */
2644 #ifdef TARGET_SPARC64
2645 int cond;
2646 #endif
2647 if (gen_trap_ifnofpu(dc, cpu_cond))
2648 goto jmp_insn;
2649 gen_op_clear_ieee_excp_and_FTT();
2650 rs1 = GET_FIELD(insn, 13, 17);
2651 rs2 = GET_FIELD(insn, 27, 31);
2652 xop = GET_FIELD(insn, 18, 26);
2653 #ifdef TARGET_SPARC64
2654 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2655 int l1;
2656
2657 l1 = gen_new_label();
2658 cond = GET_FIELD_SP(insn, 14, 17);
2659 cpu_src1 = get_src1(insn, cpu_src1);
2660 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2661 0, l1);
2662 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
2663 gen_set_label(l1);
2664 break;
2665 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2666 int l1;
2667
2668 l1 = gen_new_label();
2669 cond = GET_FIELD_SP(insn, 14, 17);
2670 cpu_src1 = get_src1(insn, cpu_src1);
2671 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2672 0, l1);
2673 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)]);
2674 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], cpu_fpr[DFPREG(rs2) + 1]);
2675 gen_set_label(l1);
2676 break;
2677 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2678 int l1;
2679
2680 CHECK_FPU_FEATURE(dc, FLOAT128);
2681 l1 = gen_new_label();
2682 cond = GET_FIELD_SP(insn, 14, 17);
2683 cpu_src1 = get_src1(insn, cpu_src1);
2684 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2685 0, l1);
2686 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], cpu_fpr[QFPREG(rs2)]);
2687 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], cpu_fpr[QFPREG(rs2) + 1]);
2688 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], cpu_fpr[QFPREG(rs2) + 2]);
2689 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], cpu_fpr[QFPREG(rs2) + 3]);
2690 gen_set_label(l1);
2691 break;
2692 }
2693 #endif
2694 switch (xop) {
2695 #ifdef TARGET_SPARC64
2696 #define FMOVSCC(fcc) \
2697 { \
2698 TCGv r_cond; \
2699 int l1; \
2700 \
2701 l1 = gen_new_label(); \
2702 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2703 cond = GET_FIELD_SP(insn, 14, 17); \
2704 gen_fcond(r_cond, fcc, cond); \
2705 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2706 0, l1); \
2707 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2708 gen_set_label(l1); \
2709 tcg_temp_free(r_cond); \
2710 }
2711 #define FMOVDCC(fcc) \
2712 { \
2713 TCGv r_cond; \
2714 int l1; \
2715 \
2716 l1 = gen_new_label(); \
2717 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2718 cond = GET_FIELD_SP(insn, 14, 17); \
2719 gen_fcond(r_cond, fcc, cond); \
2720 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2721 0, l1); \
2722 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2723 cpu_fpr[DFPREG(rs2)]); \
2724 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2725 cpu_fpr[DFPREG(rs2) + 1]); \
2726 gen_set_label(l1); \
2727 tcg_temp_free(r_cond); \
2728 }
2729 #define FMOVQCC(fcc) \
2730 { \
2731 TCGv r_cond; \
2732 int l1; \
2733 \
2734 l1 = gen_new_label(); \
2735 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2736 cond = GET_FIELD_SP(insn, 14, 17); \
2737 gen_fcond(r_cond, fcc, cond); \
2738 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2739 0, l1); \
2740 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2741 cpu_fpr[QFPREG(rs2)]); \
2742 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2743 cpu_fpr[QFPREG(rs2) + 1]); \
2744 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2745 cpu_fpr[QFPREG(rs2) + 2]); \
2746 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2747 cpu_fpr[QFPREG(rs2) + 3]); \
2748 gen_set_label(l1); \
2749 tcg_temp_free(r_cond); \
2750 }
2751 case 0x001: /* V9 fmovscc %fcc0 */
2752 FMOVSCC(0);
2753 break;
2754 case 0x002: /* V9 fmovdcc %fcc0 */
2755 FMOVDCC(0);
2756 break;
2757 case 0x003: /* V9 fmovqcc %fcc0 */
2758 CHECK_FPU_FEATURE(dc, FLOAT128);
2759 FMOVQCC(0);
2760 break;
2761 case 0x041: /* V9 fmovscc %fcc1 */
2762 FMOVSCC(1);
2763 break;
2764 case 0x042: /* V9 fmovdcc %fcc1 */
2765 FMOVDCC(1);
2766 break;
2767 case 0x043: /* V9 fmovqcc %fcc1 */
2768 CHECK_FPU_FEATURE(dc, FLOAT128);
2769 FMOVQCC(1);
2770 break;
2771 case 0x081: /* V9 fmovscc %fcc2 */
2772 FMOVSCC(2);
2773 break;
2774 case 0x082: /* V9 fmovdcc %fcc2 */
2775 FMOVDCC(2);
2776 break;
2777 case 0x083: /* V9 fmovqcc %fcc2 */
2778 CHECK_FPU_FEATURE(dc, FLOAT128);
2779 FMOVQCC(2);
2780 break;
2781 case 0x0c1: /* V9 fmovscc %fcc3 */
2782 FMOVSCC(3);
2783 break;
2784 case 0x0c2: /* V9 fmovdcc %fcc3 */
2785 FMOVDCC(3);
2786 break;
2787 case 0x0c3: /* V9 fmovqcc %fcc3 */
2788 CHECK_FPU_FEATURE(dc, FLOAT128);
2789 FMOVQCC(3);
2790 break;
2791 #undef FMOVSCC
2792 #undef FMOVDCC
2793 #undef FMOVQCC
2794 #define FMOVCC(size_FDQ, icc) \
2795 { \
2796 TCGv r_cond; \
2797 int l1; \
2798 \
2799 l1 = gen_new_label(); \
2800 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2801 cond = GET_FIELD_SP(insn, 14, 17); \
2802 gen_cond(r_cond, icc, cond); \
2803 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2804 0, l1); \
2805 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2806 (glue(size_FDQ, FPREG(rs2))); \
2807 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2808 (glue(size_FDQ, FPREG(rd))); \
2809 gen_set_label(l1); \
2810 tcg_temp_free(r_cond); \
2811 }
2812 #define FMOVSCC(icc) \
2813 { \
2814 TCGv r_cond; \
2815 int l1; \
2816 \
2817 l1 = gen_new_label(); \
2818 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2819 cond = GET_FIELD_SP(insn, 14, 17); \
2820 gen_cond(r_cond, icc, cond); \
2821 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2822 0, l1); \
2823 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]); \
2824 gen_set_label(l1); \
2825 tcg_temp_free(r_cond); \
2826 }
2827 #define FMOVDCC(icc) \
2828 { \
2829 TCGv r_cond; \
2830 int l1; \
2831 \
2832 l1 = gen_new_label(); \
2833 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2834 cond = GET_FIELD_SP(insn, 14, 17); \
2835 gen_cond(r_cond, icc, cond); \
2836 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2837 0, l1); \
2838 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], \
2839 cpu_fpr[DFPREG(rs2)]); \
2840 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1], \
2841 cpu_fpr[DFPREG(rs2) + 1]); \
2842 gen_set_label(l1); \
2843 tcg_temp_free(r_cond); \
2844 }
2845 #define FMOVQCC(icc) \
2846 { \
2847 TCGv r_cond; \
2848 int l1; \
2849 \
2850 l1 = gen_new_label(); \
2851 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2852 cond = GET_FIELD_SP(insn, 14, 17); \
2853 gen_cond(r_cond, icc, cond); \
2854 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2855 0, l1); \
2856 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd)], \
2857 cpu_fpr[QFPREG(rs2)]); \
2858 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 1], \
2859 cpu_fpr[QFPREG(rs2) + 1]); \
2860 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 2], \
2861 cpu_fpr[QFPREG(rs2) + 2]); \
2862 tcg_gen_mov_i32(cpu_fpr[QFPREG(rd) + 3], \
2863 cpu_fpr[QFPREG(rs2) + 3]); \
2864 gen_set_label(l1); \
2865 tcg_temp_free(r_cond); \
2866 }
2867
2868 case 0x101: /* V9 fmovscc %icc */
2869 FMOVSCC(0);
2870 break;
2871 case 0x102: /* V9 fmovdcc %icc */
2872 FMOVDCC(0);
2873 case 0x103: /* V9 fmovqcc %icc */
2874 CHECK_FPU_FEATURE(dc, FLOAT128);
2875 FMOVQCC(0);
2876 break;
2877 case 0x181: /* V9 fmovscc %xcc */
2878 FMOVSCC(1);
2879 break;
2880 case 0x182: /* V9 fmovdcc %xcc */
2881 FMOVDCC(1);
2882 break;
2883 case 0x183: /* V9 fmovqcc %xcc */
2884 CHECK_FPU_FEATURE(dc, FLOAT128);
2885 FMOVQCC(1);
2886 break;
2887 #undef FMOVSCC
2888 #undef FMOVDCC
2889 #undef FMOVQCC
2890 #endif
2891 case 0x51: /* fcmps, V9 %fcc */
2892 gen_op_fcmps(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2893 break;
2894 case 0x52: /* fcmpd, V9 %fcc */
2895 gen_op_load_fpr_DT0(DFPREG(rs1));
2896 gen_op_load_fpr_DT1(DFPREG(rs2));
2897 gen_op_fcmpd(rd & 3);
2898 break;
2899 case 0x53: /* fcmpq, V9 %fcc */
2900 CHECK_FPU_FEATURE(dc, FLOAT128);
2901 gen_op_load_fpr_QT0(QFPREG(rs1));
2902 gen_op_load_fpr_QT1(QFPREG(rs2));
2903 gen_op_fcmpq(rd & 3);
2904 break;
2905 case 0x55: /* fcmpes, V9 %fcc */
2906 gen_op_fcmpes(rd & 3, cpu_fpr[rs1], cpu_fpr[rs2]);
2907 break;
2908 case 0x56: /* fcmped, V9 %fcc */
2909 gen_op_load_fpr_DT0(DFPREG(rs1));
2910 gen_op_load_fpr_DT1(DFPREG(rs2));
2911 gen_op_fcmped(rd & 3);
2912 break;
2913 case 0x57: /* fcmpeq, V9 %fcc */
2914 CHECK_FPU_FEATURE(dc, FLOAT128);
2915 gen_op_load_fpr_QT0(QFPREG(rs1));
2916 gen_op_load_fpr_QT1(QFPREG(rs2));
2917 gen_op_fcmpeq(rd & 3);
2918 break;
2919 default:
2920 goto illegal_insn;
2921 }
2922 } else if (xop == 0x2) {
2923 // clr/mov shortcut
2924
2925 rs1 = GET_FIELD(insn, 13, 17);
2926 if (rs1 == 0) {
2927 // or %g0, x, y -> mov T0, x; mov y, T0
2928 if (IS_IMM) { /* immediate */
2929 TCGv r_const;
2930
2931 rs2 = GET_FIELDs(insn, 19, 31);
2932 r_const = tcg_const_tl((int)rs2);
2933 gen_movl_TN_reg(rd, r_const);
2934 tcg_temp_free(r_const);
2935 } else { /* register */
2936 rs2 = GET_FIELD(insn, 27, 31);
2937 gen_movl_reg_TN(rs2, cpu_dst);
2938 gen_movl_TN_reg(rd, cpu_dst);
2939 }
2940 } else {
2941 cpu_src1 = get_src1(insn, cpu_src1);
2942 if (IS_IMM) { /* immediate */
2943 rs2 = GET_FIELDs(insn, 19, 31);
2944 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2945 gen_movl_TN_reg(rd, cpu_dst);
2946 } else { /* register */
2947 // or x, %g0, y -> mov T1, x; mov y, T1
2948 rs2 = GET_FIELD(insn, 27, 31);
2949 if (rs2 != 0) {
2950 gen_movl_reg_TN(rs2, cpu_src2);
2951 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2952 gen_movl_TN_reg(rd, cpu_dst);
2953 } else
2954 gen_movl_TN_reg(rd, cpu_src1);
2955 }
2956 }
2957 #ifdef TARGET_SPARC64
2958 } else if (xop == 0x25) { /* sll, V9 sllx */
2959 cpu_src1 = get_src1(insn, cpu_src1);
2960 if (IS_IMM) { /* immediate */
2961 rs2 = GET_FIELDs(insn, 20, 31);
2962 if (insn & (1 << 12)) {
2963 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2964 } else {
2965 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2966 }
2967 } else { /* register */
2968 rs2 = GET_FIELD(insn, 27, 31);
2969 gen_movl_reg_TN(rs2, cpu_src2);
2970 if (insn & (1 << 12)) {
2971 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2972 } else {
2973 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2974 }
2975 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2976 }
2977 gen_movl_TN_reg(rd, cpu_dst);
2978 } else if (xop == 0x26) { /* srl, V9 srlx */
2979 cpu_src1 = get_src1(insn, cpu_src1);
2980 if (IS_IMM) { /* immediate */
2981 rs2 = GET_FIELDs(insn, 20, 31);
2982 if (insn & (1 << 12)) {
2983 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2984 } else {
2985 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2986 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2987 }
2988 } else { /* register */
2989 rs2 = GET_FIELD(insn, 27, 31);
2990 gen_movl_reg_TN(rs2, cpu_src2);
2991 if (insn & (1 << 12)) {
2992 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2993 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2994 } else {
2995 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2996 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2997 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2998 }
2999 }
3000 gen_movl_TN_reg(rd, cpu_dst);
3001 } else if (xop == 0x27) { /* sra, V9 srax */
3002 cpu_src1 = get_src1(insn, cpu_src1);
3003 if (IS_IMM) { /* immediate */
3004 rs2 = GET_FIELDs(insn, 20, 31);
3005 if (insn & (1 << 12)) {
3006 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
3007 } else {
3008 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3009 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3010 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
3011 }
3012 } else { /* register */
3013 rs2 = GET_FIELD(insn, 27, 31);
3014 gen_movl_reg_TN(rs2, cpu_src2);
3015 if (insn & (1 << 12)) {
3016 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3017 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3018 } else {
3019 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3020 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3021 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
3022 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3023 }
3024 }
3025 gen_movl_TN_reg(rd, cpu_dst);
3026 #endif
3027 } else if (xop < 0x36) {
3028 cpu_src1 = get_src1(insn, cpu_src1);
3029 cpu_src2 = get_src2(insn, cpu_src2);
3030 if (xop < 0x20) {
3031 switch (xop & ~0x10) {
3032 case 0x0:
3033 if (xop & 0x10)
3034 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3035 else
3036 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3037 break;
3038 case 0x1:
3039 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3040 if (xop & 0x10)
3041 gen_op_logic_cc(cpu_dst);
3042 break;
3043 case 0x2:
3044 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3045 if (xop & 0x10)
3046 gen_op_logic_cc(cpu_dst);
3047 break;
3048 case 0x3:
3049 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3050 if (xop & 0x10)
3051 gen_op_logic_cc(cpu_dst);
3052 break;
3053 case 0x4:
3054 if (xop & 0x10)
3055 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3056 else
3057 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3058 break;
3059 case 0x5:
3060 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3061 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
3062 if (xop & 0x10)
3063 gen_op_logic_cc(cpu_dst);
3064 break;
3065 case 0x6:
3066 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3067 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
3068 if (xop & 0x10)
3069 gen_op_logic_cc(cpu_dst);
3070 break;
3071 case 0x7:
3072 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3073 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3074 if (xop & 0x10)
3075 gen_op_logic_cc(cpu_dst);
3076 break;
3077 case 0x8:
3078 if (xop & 0x10)
3079 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3080 else {
3081 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3082 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3083 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3084 }
3085 break;
3086 #ifdef TARGET_SPARC64
3087 case 0x9: /* V9 mulx */
3088 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3089 break;
3090 #endif
3091 case 0xa:
3092 CHECK_IU_FEATURE(dc, MUL);
3093 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3094 if (xop & 0x10)
3095 gen_op_logic_cc(cpu_dst);
3096 break;
3097 case 0xb:
3098 CHECK_IU_FEATURE(dc, MUL);
3099 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3100 if (xop & 0x10)
3101 gen_op_logic_cc(cpu_dst);
3102 break;
3103 case 0xc:
3104 if (xop & 0x10)
3105 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3106 else {
3107 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3108 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3109 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3110 }
3111 break;
3112 #ifdef TARGET_SPARC64
3113 case 0xd: /* V9 udivx */
3114 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3115 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3116 gen_trap_ifdivzero_tl(cpu_cc_src2);
3117 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3118 break;
3119 #endif
3120 case 0xe:
3121 CHECK_IU_FEATURE(dc, DIV);
3122 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
3123 cpu_src2);
3124 if (xop & 0x10)
3125 gen_op_div_cc(cpu_dst);
3126 break;
3127 case 0xf:
3128 CHECK_IU_FEATURE(dc, DIV);
3129 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
3130 cpu_src2);
3131 if (xop & 0x10)
3132 gen_op_div_cc(cpu_dst);
3133 break;
3134 default:
3135 goto illegal_insn;
3136 }
3137 gen_movl_TN_reg(rd, cpu_dst);
3138 } else {
3139 switch (xop) {
3140 case 0x20: /* taddcc */
3141 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3142 gen_movl_TN_reg(rd, cpu_dst);
3143 break;
3144 case 0x21: /* tsubcc */
3145 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3146 gen_movl_TN_reg(rd, cpu_dst);
3147 break;
3148 case 0x22: /* taddcctv */
3149 save_state(dc, cpu_cond);
3150 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3151 gen_movl_TN_reg(rd, cpu_dst);
3152 break;
3153 case 0x23: /* tsubcctv */
3154 save_state(dc, cpu_cond);
3155 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3156 gen_movl_TN_reg(rd, cpu_dst);
3157 break;
3158 case 0x24: /* mulscc */
3159 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3160 gen_movl_TN_reg(rd, cpu_dst);
3161 break;
3162 #ifndef TARGET_SPARC64
3163 case 0x25: /* sll */
3164 if (IS_IMM) { /* immediate */
3165 rs2 = GET_FIELDs(insn, 20, 31);
3166 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3167 } else { /* register */
3168 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3169 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3170 }
3171 gen_movl_TN_reg(rd, cpu_dst);
3172 break;
3173 case 0x26: /* srl */
3174 if (IS_IMM) { /* immediate */
3175 rs2 = GET_FIELDs(insn, 20, 31);
3176 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3177 } else { /* register */
3178 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3179 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3180 }
3181 gen_movl_TN_reg(rd, cpu_dst);
3182 break;
3183 case 0x27: /* sra */
3184 if (IS_IMM) { /* immediate */
3185 rs2 = GET_FIELDs(insn, 20, 31);
3186 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3187 } else { /* register */
3188 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3189 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3190 }
3191 gen_movl_TN_reg(rd, cpu_dst);
3192 break;
3193 #endif
3194 case 0x30:
3195 {
3196 switch(rd) {
3197 case 0: /* wry */
3198 tcg_gen_xor_tl(cpu_y, cpu_src1, cpu_src2);
3199 break;
3200 #ifndef TARGET_SPARC64
3201 case 0x01 ... 0x0f: /* undefined in the
3202 SPARCv8 manual, nop
3203 on the microSPARC
3204 II */
3205 case 0x10 ... 0x1f: /* implementation-dependent
3206 in the SPARCv8
3207 manual, nop on the
3208 microSPARC II */
3209 break;
3210 #else
3211 case 0x2: /* V9 wrccr */
3212 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3213 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3214 break;
3215 case 0x3: /* V9 wrasi */
3216 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3217 tcg_gen_trunc_tl_i32(cpu_asi, cpu_dst);
3218 break;
3219 case 0x6: /* V9 wrfprs */
3220 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3221 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_dst);
3222 save_state(dc, cpu_cond);
3223 gen_op_next_insn();
3224 tcg_gen_exit_tb(0);
3225 dc->is_br = 1;
3226 break;
3227 case 0xf: /* V9 sir, nop if user */
3228 #if !defined(CONFIG_USER_ONLY)
3229 if (supervisor(dc))
3230 ; // XXX
3231 #endif
3232 break;
3233 case 0x13: /* Graphics Status */
3234 if (gen_trap_ifnofpu(dc, cpu_cond))
3235 goto jmp_insn;
3236 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
3237 break;
3238 case 0x17: /* Tick compare */
3239 #if !defined(CONFIG_USER_ONLY)
3240 if (!supervisor(dc))
3241 goto illegal_insn;
3242 #endif
3243 {
3244 TCGv r_tickptr;
3245
3246 tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
3247 cpu_src2);
3248 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3249 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3250 offsetof(CPUState, tick));
3251 tcg_gen_helper_0_2(helper_tick_set_limit,
3252 r_tickptr, cpu_tick_cmpr);
3253 tcg_temp_free(r_tickptr);
3254 }
3255 break;
3256 case 0x18: /* System tick */
3257 #if !defined(CONFIG_USER_ONLY)
3258 if (!supervisor(dc))
3259 goto illegal_insn;
3260 #endif
3261 {
3262 TCGv r_tickptr;
3263
3264 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3265 cpu_src2);
3266 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3267 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3268 offsetof(CPUState, stick));
3269 tcg_gen_helper_0_2(helper_tick_set_count,
3270 r_tickptr, cpu_dst);
3271 tcg_temp_free(r_tickptr);
3272 }
3273 break;
3274 case 0x19: /* System tick compare */
3275 #if !defined(CONFIG_USER_ONLY)
3276 if (!supervisor(dc))
3277 goto illegal_insn;
3278 #endif
3279 {
3280 TCGv r_tickptr;
3281
3282 tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
3283 cpu_src2);
3284 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3285 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3286 offsetof(CPUState, stick));
3287 tcg_gen_helper_0_2(helper_tick_set_limit,
3288 r_tickptr, cpu_stick_cmpr);
3289 tcg_temp_free(r_tickptr);
3290 }
3291 break;
3292
3293 case 0x10: /* Performance Control */
3294 case 0x11: /* Performance Instrumentation
3295 Counter */
3296 case 0x12: /* Dispatch Control */
3297 case 0x14: /* Softint set */
3298 case 0x15: /* Softint clear */
3299 case 0x16: /* Softint write */
3300 #endif
3301 default:
3302 goto illegal_insn;
3303 }
3304 }
3305 break;
3306 #if !defined(CONFIG_USER_ONLY)
3307 case 0x31: /* wrpsr, V9 saved, restored */
3308 {
3309 if (!supervisor(dc))
3310 goto priv_insn;
3311 #ifdef TARGET_SPARC64
3312 switch (rd) {
3313 case 0:
3314 tcg_gen_helper_0_0(helper_saved);
3315 break;
3316 case 1:
3317 tcg_gen_helper_0_0(helper_restored);
3318 break;
3319 case 2: /* UA2005 allclean */
3320 case 3: /* UA2005 otherw */
3321 case 4: /* UA2005 normalw */
3322 case 5: /* UA2005 invalw */
3323 // XXX
3324 default:
3325 goto illegal_insn;
3326 }
3327 #else
3328 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3329 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3330 save_state(dc, cpu_cond);
3331 gen_op_next_insn();
3332 tcg_gen_exit_tb(0);
3333 dc->is_br = 1;
3334 #endif
3335 }
3336 break;
3337 case 0x32: /* wrwim, V9 wrpr */
3338 {
3339 if (!supervisor(dc))
3340 goto priv_insn;
3341 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3342 #ifdef TARGET_SPARC64
3343 switch (rd) {
3344 case 0: // tpc
3345 {
3346 TCGv r_tsptr;
3347
3348 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3349 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3350 offsetof(CPUState, tsptr));
3351 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3352 offsetof(trap_state, tpc));
3353 tcg_temp_free(r_tsptr);
3354 }
3355 break;
3356 case 1: // tnpc
3357 {
3358 TCGv r_tsptr;
3359
3360 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3361 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3362 offsetof(CPUState, tsptr));
3363 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3364 offsetof(trap_state, tnpc));
3365 tcg_temp_free(r_tsptr);
3366 }
3367 break;
3368 case 2: // tstate
3369 {
3370 TCGv r_tsptr;
3371
3372 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3373 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3374 offsetof(CPUState, tsptr));
3375 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3376 offsetof(trap_state,
3377 tstate));
3378 tcg_temp_free(r_tsptr);
3379 }
3380 break;
3381 case 3: // tt
3382 {
3383 TCGv r_tsptr;
3384
3385 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3386 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3387 offsetof(CPUState, tsptr));
3388 tcg_gen_st_i32(cpu_tmp0, r_tsptr,
3389 offsetof(trap_state, tt));
3390 tcg_temp_free(r_tsptr);
3391 }
3392 break;
3393 case 4: // tick
3394 {
3395 TCGv r_tickptr;
3396
3397 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3398 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3399 offsetof(CPUState, tick));
3400 tcg_gen_helper_0_2(helper_tick_set_count,
3401 r_tickptr, cpu_tmp0);
3402 tcg_temp_free(r_tickptr);
3403 }
3404 break;
3405 case 5: // tba
3406 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
3407 break;
3408 case 6: // pstate
3409 save_state(dc, cpu_cond);
3410 tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
3411 gen_op_next_insn();
3412 tcg_gen_exit_tb(0);
3413 dc->is_br = 1;
3414 break;
3415 case 7: // tl
3416 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3417 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3418 offsetof(CPUSPARCState, tl));
3419 break;
3420 case 8: // pil
3421 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3422 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3423 offsetof(CPUSPARCState,
3424 psrpil));
3425 break;
3426 case 9: // cwp
3427 tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
3428 break;
3429 case 10: // cansave
3430 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3431 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3432 offsetof(CPUSPARCState,
3433 cansave));
3434 break;
3435 case 11: // canrestore
3436 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3437 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3438 offsetof(CPUSPARCState,
3439 canrestore));
3440 break;
3441 case 12: // cleanwin
3442 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3443 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3444 offsetof(CPUSPARCState,
3445 cleanwin));
3446 break;
3447 case 13: // otherwin
3448 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3449 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3450 offsetof(CPUSPARCState,
3451 otherwin));
3452 break;
3453 case 14: // wstate
3454 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3455 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3456 offsetof(CPUSPARCState,
3457 wstate));
3458 break;
3459 case 16: // UA2005 gl
3460 CHECK_IU_FEATURE(dc, GL);
3461 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3462 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3463 offsetof(CPUSPARCState, gl));
3464 break;
3465 case 26: // UA2005 strand status
3466 CHECK_IU_FEATURE(dc, HYPV);
3467 if (!hypervisor(dc))
3468 goto priv_insn;
3469 tcg_gen_trunc_tl_i32(cpu_ssr, cpu_tmp0);
3470 break;
3471 default:
3472 goto illegal_insn;
3473 }
3474 #else
3475 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3476 if (dc->def->nwindows != 32)
3477 tcg_gen_andi_tl(cpu_tmp32, cpu_tmp32,
3478 (1 << dc->def->nwindows) - 1);
3479 tcg_gen_mov_i32(cpu_wim, cpu_tmp32);
3480 #endif
3481 }
3482 break;
3483 case 0x33: /* wrtbr, UA2005 wrhpr */
3484 {
3485 #ifndef TARGET_SPARC64
3486 if (!supervisor(dc))
3487 goto priv_insn;
3488 tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
3489 #else
3490 CHECK_IU_FEATURE(dc, HYPV);
3491 if (!hypervisor(dc))
3492 goto priv_insn;
3493 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3494 switch (rd) {
3495 case 0: // hpstate
3496 // XXX gen_op_wrhpstate();
3497 save_state(dc, cpu_cond);
3498 gen_op_next_insn();
3499 tcg_gen_exit_tb(0);
3500 dc->is_br = 1;
3501 break;
3502 case 1: // htstate
3503 // XXX gen_op_wrhtstate();
3504 break;
3505 case 3: // hintp
3506 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
3507 break;
3508 case 5: // htba
3509 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
3510 break;
3511 case 31: // hstick_cmpr
3512 {
3513 TCGv r_tickptr;
3514
3515 tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
3516 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3517 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3518 offsetof(CPUState, hstick));
3519 tcg_gen_helper_0_2(helper_tick_set_limit,
3520 r_tickptr, cpu_hstick_cmpr);
3521 tcg_temp_free(r_tickptr);
3522 }
3523 break;
3524 case 6: // hver readonly
3525 default:
3526 goto illegal_insn;
3527 }
3528 #endif
3529 }
3530 break;
3531 #endif
3532 #ifdef TARGET_SPARC64
3533 case 0x2c: /* V9 movcc */
3534 {
3535 int cc = GET_FIELD_SP(insn, 11, 12);
3536 int cond = GET_FIELD_SP(insn, 14, 17);
3537 TCGv r_cond;
3538 int l1;
3539
3540 r_cond = tcg_temp_new(TCG_TYPE_TL);
3541 if (insn & (1 << 18)) {
3542 if (cc == 0)
3543 gen_cond(r_cond, 0, cond);
3544 else if (cc == 2)
3545 gen_cond(r_cond, 1, cond);
3546 else
3547 goto illegal_insn;
3548 } else {
3549 gen_fcond(r_cond, cc, cond);
3550 }
3551
3552 l1 = gen_new_label();
3553
3554 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3555 if (IS_IMM) { /* immediate */
3556 TCGv r_const;
3557
3558 rs2 = GET_FIELD_SPs(insn, 0, 10);
3559 r_const = tcg_const_tl((int)rs2);
3560 gen_movl_TN_reg(rd, r_const);
3561 tcg_temp_free(r_const);
3562 } else {
3563 rs2 = GET_FIELD_SP(insn, 0, 4);
3564 gen_movl_reg_TN(rs2, cpu_tmp0);
3565 gen_movl_TN_reg(rd, cpu_tmp0);
3566 }
3567 gen_set_label(l1);
3568 tcg_temp_free(r_cond);
3569 break;
3570 }
3571 case 0x2d: /* V9 sdivx */
3572 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3573 gen_movl_TN_reg(rd, cpu_dst);
3574 break;
3575 case 0x2e: /* V9 popc */
3576 {
3577 cpu_src2 = get_src2(insn, cpu_src2);
3578 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3579 cpu_src2);
3580 gen_movl_TN_reg(rd, cpu_dst);
3581 }
3582 case 0x2f: /* V9 movr */
3583 {
3584 int cond = GET_FIELD_SP(insn, 10, 12);
3585 int l1;
3586
3587 cpu_src1 = get_src1(insn, cpu_src1);
3588
3589 l1 = gen_new_label();
3590
3591 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3592 cpu_src1, 0, l1);
3593 if (IS_IMM) { /* immediate */
3594 TCGv r_const;
3595
3596 rs2 = GET_FIELD_SPs(insn, 0, 9);
3597 r_const = tcg_const_tl((int)rs2);
3598 gen_movl_TN_reg(rd, r_const);
3599 tcg_temp_free(r_const);
3600 } else {
3601 rs2 = GET_FIELD_SP(insn, 0, 4);
3602 gen_movl_reg_TN(rs2, cpu_tmp0);
3603 gen_movl_TN_reg(rd, cpu_tmp0);
3604 }
3605 gen_set_label(l1);
3606 break;
3607 }
3608 #endif
3609 default:
3610 goto illegal_insn;
3611 }
3612 }
3613 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3614 #ifdef TARGET_SPARC64
3615 int opf = GET_FIELD_SP(insn, 5, 13);
3616 rs1 = GET_FIELD(insn, 13, 17);
3617 rs2 = GET_FIELD(insn, 27, 31);
3618 if (gen_trap_ifnofpu(dc, cpu_cond))
3619 goto jmp_insn;
3620
3621 switch (opf) {
3622 case 0x000: /* VIS I edge8cc */
3623 case 0x001: /* VIS II edge8n */
3624 case 0x002: /* VIS I edge8lcc */
3625 case 0x003: /* VIS II edge8ln */
3626 case 0x004: /* VIS I edge16cc */
3627 case 0x005: /* VIS II edge16n */
3628 case 0x006: /* VIS I edge16lcc */
3629 case 0x007: /* VIS II edge16ln */
3630 case 0x008: /* VIS I edge32cc */
3631 case 0x009: /* VIS II edge32n */
3632 case 0x00a: /* VIS I edge32lcc */
3633 case 0x00b: /* VIS II edge32ln */
3634 // XXX
3635 goto illegal_insn;
3636 case 0x010: /* VIS I array8 */
3637 CHECK_FPU_FEATURE(dc, VIS1);
3638 cpu_src1 = get_src1(insn, cpu_src1);
3639 gen_movl_reg_TN(rs2, cpu_src2);
3640 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3641 cpu_src2);
3642 gen_movl_TN_reg(rd, cpu_dst);
3643 break;
3644 case 0x012: /* VIS I array16 */
3645 CHECK_FPU_FEATURE(dc, VIS1);
3646 cpu_src1 = get_src1(insn, cpu_src1);
3647 gen_movl_reg_TN(rs2, cpu_src2);
3648 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3649 cpu_src2);
3650 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3651 gen_movl_TN_reg(rd, cpu_dst);
3652 break;
3653 case 0x014: /* VIS I array32 */
3654 CHECK_FPU_FEATURE(dc, VIS1);
3655 cpu_src1 = get_src1(insn, cpu_src1);
3656 gen_movl_reg_TN(rs2, cpu_src2);
3657 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3658 cpu_src2);
3659 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3660 gen_movl_TN_reg(rd, cpu_dst);
3661 break;
3662 case 0x018: /* VIS I alignaddr */
3663 CHECK_FPU_FEATURE(dc, VIS1);
3664 cpu_src1 = get_src1(insn, cpu_src1);
3665 gen_movl_reg_TN(rs2, cpu_src2);
3666 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3667 cpu_src2);
3668 gen_movl_TN_reg(rd, cpu_dst);
3669 break;
3670 case 0x019: /* VIS II bmask */
3671 case 0x01a: /* VIS I alignaddrl */
3672 // XXX
3673 goto illegal_insn;
3674 case 0x020: /* VIS I fcmple16 */
3675 CHECK_FPU_FEATURE(dc, VIS1);
3676 gen_op_load_fpr_DT0(DFPREG(rs1));
3677 gen_op_load_fpr_DT1(DFPREG(rs2));
3678 tcg_gen_helper_0_0(helper_fcmple16);
3679 gen_op_store_DT0_fpr(DFPREG(rd));
3680 break;
3681 case 0x022: /* VIS I fcmpne16 */
3682 CHECK_FPU_FEATURE(dc, VIS1);
3683 gen_op_load_fpr_DT0(DFPREG(rs1));
3684 gen_op_load_fpr_DT1(DFPREG(rs2));
3685 tcg_gen_helper_0_0(helper_fcmpne16);
3686 gen_op_store_DT0_fpr(DFPREG(rd));
3687 break;
3688 case 0x024: /* VIS I fcmple32 */
3689 CHECK_FPU_FEATURE(dc, VIS1);
3690 gen_op_load_fpr_DT0(DFPREG(rs1));
3691 gen_op_load_fpr_DT1(DFPREG(rs2));
3692 tcg_gen_helper_0_0(helper_fcmple32);
3693 gen_op_store_DT0_fpr(DFPREG(rd));
3694 break;
3695 case 0x026: /* VIS I fcmpne32 */
3696 CHECK_FPU_FEATURE(dc, VIS1);
3697 gen_op_load_fpr_DT0(DFPREG(rs1));
3698 gen_op_load_fpr_DT1(DFPREG(rs2));
3699 tcg_gen_helper_0_0(helper_fcmpne32);
3700 gen_op_store_DT0_fpr(DFPREG(rd));
3701 break;
3702 case 0x028: /* VIS I fcmpgt16 */
3703 CHECK_FPU_FEATURE(dc, VIS1);
3704 gen_op_load_fpr_DT0(DFPREG(rs1));
3705 gen_op_load_fpr_DT1(DFPREG(rs2));
3706 tcg_gen_helper_0_0(helper_fcmpgt16);
3707 gen_op_store_DT0_fpr(DFPREG(rd));
3708 break;
3709 case 0x02a: /* VIS I fcmpeq16 */
3710 CHECK_FPU_FEATURE(dc, VIS1);
3711 gen_op_load_fpr_DT0(DFPREG(rs1));
3712 gen_op_load_fpr_DT1(DFPREG(rs2));
3713 tcg_gen_helper_0_0(helper_fcmpeq16);
3714 gen_op_store_DT0_fpr(DFPREG(rd));
3715 break;
3716 case 0x02c: /* VIS I fcmpgt32 */
3717 CHECK_FPU_FEATURE(dc, VIS1);
3718 gen_op_load_fpr_DT0(DFPREG(rs1));
3719 gen_op_load_fpr_DT1(DFPREG(rs2));
3720 tcg_gen_helper_0_0(helper_fcmpgt32);
3721 gen_op_store_DT0_fpr(DFPREG(rd));
3722 break;
3723 case 0x02e: /* VIS I fcmpeq32 */
3724 CHECK_FPU_FEATURE(dc, VIS1);
3725 gen_op_load_fpr_DT0(DFPREG(rs1));
3726 gen_op_load_fpr_DT1(DFPREG(rs2));
3727 tcg_gen_helper_0_0(helper_fcmpeq32);
3728 gen_op_store_DT0_fpr(DFPREG(rd));
3729 break;
3730 case 0x031: /* VIS I fmul8x16 */
3731 CHECK_FPU_FEATURE(dc, VIS1);
3732 gen_op_load_fpr_DT0(DFPREG(rs1));
3733 gen_op_load_fpr_DT1(DFPREG(rs2));
3734 tcg_gen_helper_0_0(helper_fmul8x16);
3735 gen_op_store_DT0_fpr(DFPREG(rd));
3736 break;
3737 case 0x033: /* VIS I fmul8x16au */
3738 CHECK_FPU_FEATURE(dc, VIS1);
3739 gen_op_load_fpr_DT0(DFPREG(rs1));
3740 gen_op_load_fpr_DT1(DFPREG(rs2));
3741 tcg_gen_helper_0_0(helper_fmul8x16au);
3742 gen_op_store_DT0_fpr(DFPREG(rd));
3743 break;
3744 case 0x035: /* VIS I fmul8x16al */
3745 CHECK_FPU_FEATURE(dc, VIS1);
3746 gen_op_load_fpr_DT0(DFPREG(rs1));
3747 gen_op_load_fpr_DT1(DFPREG(rs2));
3748 tcg_gen_helper_0_0(helper_fmul8x16al);
3749 gen_op_store_DT0_fpr(DFPREG(rd));
3750 break;
3751 case 0x036: /* VIS I fmul8sux16 */
3752 CHECK_FPU_FEATURE(dc, VIS1);
3753 gen_op_load_fpr_DT0(DFPREG(rs1));
3754 gen_op_load_fpr_DT1(DFPREG(rs2));
3755 tcg_gen_helper_0_0(helper_fmul8sux16);
3756 gen_op_store_DT0_fpr(DFPREG(rd));
3757 break;
3758 case 0x037: /* VIS I fmul8ulx16 */
3759 CHECK_FPU_FEATURE(dc, VIS1);
3760 gen_op_load_fpr_DT0(DFPREG(rs1));
3761 gen_op_load_fpr_DT1(DFPREG(rs2));
3762 tcg_gen_helper_0_0(helper_fmul8ulx16);
3763 gen_op_store_DT0_fpr(DFPREG(rd));
3764 break;
3765 case 0x038: /* VIS I fmuld8sux16 */
3766 CHECK_FPU_FEATURE(dc, VIS1);
3767 gen_op_load_fpr_DT0(DFPREG(rs1));
3768 gen_op_load_fpr_DT1(DFPREG(rs2));
3769 tcg_gen_helper_0_0(helper_fmuld8sux16);
3770 gen_op_store_DT0_fpr(DFPREG(rd));
3771 break;
3772 case 0x039: /* VIS I fmuld8ulx16 */
3773 CHECK_FPU_FEATURE(dc, VIS1);
3774 gen_op_load_fpr_DT0(DFPREG(rs1));
3775 gen_op_load_fpr_DT1(DFPREG(rs2));
3776 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3777 gen_op_store_DT0_fpr(DFPREG(rd));
3778 break;
3779 case 0x03a: /* VIS I fpack32 */
3780 case 0x03b: /* VIS I fpack16 */
3781 case 0x03d: /* VIS I fpackfix */
3782 case 0x03e: /* VIS I pdist */
3783 // XXX
3784 goto illegal_insn;
3785 case 0x048: /* VIS I faligndata */
3786 CHECK_FPU_FEATURE(dc, VIS1);
3787 gen_op_load_fpr_DT0(DFPREG(rs1));
3788 gen_op_load_fpr_DT1(DFPREG(rs2));
3789 tcg_gen_helper_0_0(helper_faligndata);
3790 gen_op_store_DT0_fpr(DFPREG(rd));
3791 break;
3792 case 0x04b: /* VIS I fpmerge */
3793 CHECK_FPU_FEATURE(dc, VIS1);
3794 gen_op_load_fpr_DT0(DFPREG(rs1));
3795 gen_op_load_fpr_DT1(DFPREG(rs2));
3796 tcg_gen_helper_0_0(helper_fpmerge);
3797 gen_op_store_DT0_fpr(DFPREG(rd));
3798 break;
3799 case 0x04c: /* VIS II bshuffle */
3800 // XXX
3801 goto illegal_insn;
3802 case 0x04d: /* VIS I fexpand */
3803 CHECK_FPU_FEATURE(dc, VIS1);
3804 gen_op_load_fpr_DT0(DFPREG(rs1));
3805 gen_op_load_fpr_DT1(DFPREG(rs2));
3806 tcg_gen_helper_0_0(helper_fexpand);
3807 gen_op_store_DT0_fpr(DFPREG(rd));
3808 break;
3809 case 0x050: /* VIS I fpadd16 */
3810 CHECK_FPU_FEATURE(dc, VIS1);
3811 gen_op_load_fpr_DT0(DFPREG(rs1));
3812 gen_op_load_fpr_DT1(DFPREG(rs2));
3813 tcg_gen_helper_0_0(helper_fpadd16);
3814 gen_op_store_DT0_fpr(DFPREG(rd));
3815 break;
3816 case 0x051: /* VIS I fpadd16s */
3817 CHECK_FPU_FEATURE(dc, VIS1);
3818 tcg_gen_helper_1_2(helper_fpadd16s, cpu_fpr[rd],
3819 cpu_fpr[rs1], cpu_fpr[rs2]);
3820 break;
3821 case 0x052: /* VIS I fpadd32 */
3822 CHECK_FPU_FEATURE(dc, VIS1);
3823 gen_op_load_fpr_DT0(DFPREG(rs1));
3824 gen_op_load_fpr_DT1(DFPREG(rs2));
3825 tcg_gen_helper_0_0(helper_fpadd32);
3826 gen_op_store_DT0_fpr(DFPREG(rd));
3827 break;
3828 case 0x053: /* VIS I fpadd32s */
3829 CHECK_FPU_FEATURE(dc, VIS1);
3830 tcg_gen_helper_1_2(helper_fpadd32s, cpu_fpr[rd],
3831 cpu_fpr[rs1], cpu_fpr[rs2]);
3832 break;
3833 case 0x054: /* VIS I fpsub16 */
3834 CHECK_FPU_FEATURE(dc, VIS1);
3835 gen_op_load_fpr_DT0(DFPREG(rs1));
3836 gen_op_load_fpr_DT1(DFPREG(rs2));
3837 tcg_gen_helper_0_0(helper_fpsub16);
3838 gen_op_store_DT0_fpr(DFPREG(rd));
3839 break;
3840 case 0x055: /* VIS I fpsub16s */
3841 CHECK_FPU_FEATURE(dc, VIS1);
3842 tcg_gen_helper_1_2(helper_fpsub16s, cpu_fpr[rd],
3843 cpu_fpr[rs1], cpu_fpr[rs2]);
3844 break;
3845 case 0x056: /* VIS I fpsub32 */
3846 CHECK_FPU_FEATURE(dc, VIS1);
3847 gen_op_load_fpr_DT0(DFPREG(rs1));
3848 gen_op_load_fpr_DT1(DFPREG(rs2));
3849 tcg_gen_helper_0_0(helper_fpsub32);
3850 gen_op_store_DT0_fpr(DFPREG(rd));
3851 break;
3852 case 0x057: /* VIS I fpsub32s */
3853 CHECK_FPU_FEATURE(dc, VIS1);
3854 tcg_gen_helper_1_2(helper_fpsub32s, cpu_fpr[rd],
3855 cpu_fpr[rs1], cpu_fpr[rs2]);
3856 break;
3857 case 0x060: /* VIS I fzero */
3858 CHECK_FPU_FEATURE(dc, VIS1);
3859 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], 0);
3860 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], 0);
3861 break;
3862 case 0x061: /* VIS I fzeros */
3863 CHECK_FPU_FEATURE(dc, VIS1);
3864 tcg_gen_movi_i32(cpu_fpr[rd], 0);
3865 break;
3866 case 0x062: /* VIS I fnor */
3867 CHECK_FPU_FEATURE(dc, VIS1);
3868 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3869 cpu_fpr[DFPREG(rs2)]);
3870 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3871 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3872 cpu_fpr[DFPREG(rs2) + 1]);
3873 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3874 break;
3875 case 0x063: /* VIS I fnors */
3876 CHECK_FPU_FEATURE(dc, VIS1);
3877 tcg_gen_or_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3878 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3879 break;
3880 case 0x064: /* VIS I fandnot2 */
3881 CHECK_FPU_FEATURE(dc, VIS1);
3882 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
3883 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3884 cpu_fpr[DFPREG(rs2)]);
3885 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
3886 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3887 cpu_fpr[DFPREG(rs2) + 1]);
3888 break;
3889 case 0x065: /* VIS I fandnot2s */
3890 CHECK_FPU_FEATURE(dc, VIS1);
3891 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
3892 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
3893 break;
3894 case 0x066: /* VIS I fnot2 */
3895 CHECK_FPU_FEATURE(dc, VIS1);
3896 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs2)],
3897 -1);
3898 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3899 cpu_fpr[DFPREG(rs2) + 1], -1);
3900 break;
3901 case 0x067: /* VIS I fnot2s */
3902 CHECK_FPU_FEATURE(dc, VIS1);
3903 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs2], -1);
3904 break;
3905 case 0x068: /* VIS I fandnot1 */
3906 CHECK_FPU_FEATURE(dc, VIS1);
3907 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3908 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3909 cpu_fpr[DFPREG(rs1)]);
3910 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3911 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3912 cpu_fpr[DFPREG(rs1) + 1]);
3913 break;
3914 case 0x069: /* VIS I fandnot1s */
3915 CHECK_FPU_FEATURE(dc, VIS1);
3916 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3917 tcg_gen_and_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3918 break;
3919 case 0x06a: /* VIS I fnot1 */
3920 CHECK_FPU_FEATURE(dc, VIS1);
3921 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3922 -1);
3923 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1],
3924 cpu_fpr[DFPREG(rs1) + 1], -1);
3925 break;
3926 case 0x06b: /* VIS I fnot1s */
3927 CHECK_FPU_FEATURE(dc, VIS1);
3928 tcg_gen_xori_i32(cpu_fpr[rd], cpu_fpr[rs1], -1);
3929 break;
3930 case 0x06c: /* VIS I fxor */
3931 CHECK_FPU_FEATURE(dc, VIS1);
3932 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3933 cpu_fpr[DFPREG(rs2)]);
3934 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1],
3935 cpu_fpr[DFPREG(rs1) + 1],
3936 cpu_fpr[DFPREG(rs2) + 1]);
3937 break;
3938 case 0x06d: /* VIS I fxors */
3939 CHECK_FPU_FEATURE(dc, VIS1);
3940 tcg_gen_xor_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3941 break;
3942 case 0x06e: /* VIS I fnand */
3943 CHECK_FPU_FEATURE(dc, VIS1);
3944 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)],
3945 cpu_fpr[DFPREG(rs2)]);
3946 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32, -1);
3947 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1],
3948 cpu_fpr[DFPREG(rs2) + 1]);
3949 tcg_gen_xori_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32, -1);
3950 break;
3951 case 0x06f: /* VIS I fnands */
3952 CHECK_FPU_FEATURE(dc, VIS1);
3953 tcg_gen_and_i32(cpu_tmp32, cpu_fpr[rs1], cpu_fpr[rs2]);
3954 tcg_gen_xori_i32(cpu_fpr[rd], cpu_tmp32, -1);
3955 break;
3956 case 0x070: /* VIS I fand */
3957 CHECK_FPU_FEATURE(dc, VIS1);
3958 tcg_gen_and_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
3959 cpu_fpr[DFPREG(rs2)]);
3960 tcg_gen_and_i32(cpu_fpr[DFPREG(rd) + 1],
3961 cpu_fpr[DFPREG(rs1) + 1],
3962 cpu_fpr[DFPREG(rs2) + 1]);
3963 break;
3964 case 0x071: /* VIS I fands */
3965 CHECK_FPU_FEATURE(dc, VIS1);
3966 tcg_gen_and_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
3967 break;
3968 case 0x072: /* VIS I fxnor */
3969 CHECK_FPU_FEATURE(dc, VIS1);
3970 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
3971 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3972 cpu_fpr[DFPREG(rs1)]);
3973 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
3974 tcg_gen_xor_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3975 cpu_fpr[DFPREG(rs1) + 1]);
3976 break;
3977 case 0x073: /* VIS I fxnors */
3978 CHECK_FPU_FEATURE(dc, VIS1);
3979 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
3980 tcg_gen_xor_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
3981 break;
3982 case 0x074: /* VIS I fsrc1 */
3983 CHECK_FPU_FEATURE(dc, VIS1);
3984 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)]);
3985 tcg_gen_mov_i32(cpu_fpr[DFPREG(rd) + 1],
3986 cpu_fpr[DFPREG(rs1) + 1]);
3987 break;
3988 case 0x075: /* VIS I fsrc1s */
3989 CHECK_FPU_FEATURE(dc, VIS1);
3990 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs1]);
3991 break;
3992 case 0x076: /* VIS I fornot2 */
3993 CHECK_FPU_FEATURE(dc, VIS1);
3994 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1)], -1);
3995 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
3996 cpu_fpr[DFPREG(rs2)]);
3997 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs1) + 1], -1);
3998 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
3999 cpu_fpr[DFPREG(rs2) + 1]);
4000 break;
4001 case 0x077: /* VIS I fornot2s */
4002 CHECK_FPU_FEATURE(dc, VIS1);
4003 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs1], -1);
4004 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs2]);
4005 break;
4006 case 0x078: /* VIS I fsrc2 */
4007 CHECK_FPU_FEATURE(dc, VIS1);
4008 gen_op_load_fpr_DT0(DFPREG(rs2));
4009 gen_op_store_DT0_fpr(DFPREG(rd));
4010 break;
4011 case 0x079: /* VIS I fsrc2s */
4012 CHECK_FPU_FEATURE(dc, VIS1);
4013 tcg_gen_mov_i32(cpu_fpr[rd], cpu_fpr[rs2]);
4014 break;
4015 case 0x07a: /* VIS I fornot1 */
4016 CHECK_FPU_FEATURE(dc, VIS1);
4017 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2)], -1);
4018 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_tmp32,
4019 cpu_fpr[DFPREG(rs1)]);
4020 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[DFPREG(rs2) + 1], -1);
4021 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1], cpu_tmp32,
4022 cpu_fpr[DFPREG(rs1) + 1]);
4023 break;
4024 case 0x07b: /* VIS I fornot1s */
4025 CHECK_FPU_FEATURE(dc, VIS1);
4026 tcg_gen_xori_i32(cpu_tmp32, cpu_fpr[rs2], -1);
4027 tcg_gen_or_i32(cpu_fpr[rd], cpu_tmp32, cpu_fpr[rs1]);
4028 break;
4029 case 0x07c: /* VIS I for */
4030 CHECK_FPU_FEATURE(dc, VIS1);
4031 tcg_gen_or_i32(cpu_fpr[DFPREG(rd)], cpu_fpr[DFPREG(rs1)],
4032 cpu_fpr[DFPREG(rs2)]);
4033 tcg_gen_or_i32(cpu_fpr[DFPREG(rd) + 1],
4034 cpu_fpr[DFPREG(rs1) + 1],
4035 cpu_fpr[DFPREG(rs2) + 1]);
4036 break;
4037 case 0x07d: /* VIS I fors */
4038 CHECK_FPU_FEATURE(dc, VIS1);
4039 tcg_gen_or_i32(cpu_fpr[rd], cpu_fpr[rs1], cpu_fpr[rs2]);
4040 break;
4041 case 0x07e: /* VIS I fone */
4042 CHECK_FPU_FEATURE(dc, VIS1);
4043 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd)], -1);
4044 tcg_gen_movi_i32(cpu_fpr[DFPREG(rd) + 1], -1);
4045 break;
4046 case 0x07f: /* VIS I fones */
4047 CHECK_FPU_FEATURE(dc, VIS1);
4048 tcg_gen_movi_i32(cpu_fpr[rd], -1);
4049 break;
4050 case 0x080: /* VIS I shutdown */
4051 case 0x081: /* VIS II siam */
4052 // XXX
4053 goto illegal_insn;
4054 default:
4055 goto illegal_insn;
4056 }
4057 #else
4058 goto ncp_insn;
4059 #endif
4060 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4061 #ifdef TARGET_SPARC64
4062 goto illegal_insn;
4063 #else
4064 goto ncp_insn;
4065 #endif
4066 #ifdef TARGET_SPARC64
4067 } else if (xop == 0x39) { /* V9 return */
4068 TCGv r_const;
4069
4070 save_state(dc, cpu_cond);
4071 cpu_src1 = get_src1(insn, cpu_src1);
4072 if (IS_IMM) { /* immediate */
4073 rs2 = GET_FIELDs(insn, 19, 31);
4074 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4075 } else { /* register */
4076 rs2 = GET_FIELD(insn, 27, 31);
4077 if (rs2) {
4078 gen_movl_reg_TN(rs2, cpu_src2);
4079 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4080 } else
4081 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4082 }
4083 tcg_gen_helper_0_0(helper_restore);
4084 gen_mov_pc_npc(dc, cpu_cond);
4085 r_const = tcg_const_i32(3);
4086 tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
4087 tcg_temp_free(r_const);
4088 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4089 dc->npc = DYNAMIC_PC;
4090 goto jmp_insn;
4091 #endif
4092 } else {
4093 cpu_src1 = get_src1(insn, cpu_src1);
4094 if (IS_IMM) { /* immediate */
4095 rs2 = GET_FIELDs(insn, 19, 31);
4096 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4097 } else { /* register */
4098 rs2 = GET_FIELD(insn, 27, 31);
4099 if (rs2) {
4100 gen_movl_reg_TN(rs2, cpu_src2);
4101 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4102 } else
4103 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4104 }
4105 switch (xop) {
4106 case 0x38: /* jmpl */
4107 {
4108 TCGv r_const;
4109
4110 r_const = tcg_const_tl(dc->pc);
4111 gen_movl_TN_reg(rd, r_const);
4112 tcg_temp_free(r_const);
4113 gen_mov_pc_npc(dc, cpu_cond);
4114 r_const = tcg_const_i32(3);
4115 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4116 r_const);
4117 tcg_temp_free(r_const);
4118 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4119 dc->npc = DYNAMIC_PC;
4120 }
4121 goto jmp_insn;
4122 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4123 case 0x39: /* rett, V9 return */
4124 {
4125 TCGv r_const;
4126
4127 if (!supervisor(dc))
4128 goto priv_insn;
4129 gen_mov_pc_npc(dc, cpu_cond);
4130 r_const = tcg_const_i32(3);
4131 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4132 r_const);
4133 tcg_temp_free(r_const);
4134 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4135 dc->npc = DYNAMIC_PC;
4136 tcg_gen_helper_0_0(helper_rett);
4137 }
4138 goto jmp_insn;
4139 #endif
4140 case 0x3b: /* flush */
4141 if (!((dc)->def->features & CPU_FEATURE_FLUSH))
4142 goto unimp_flush;
4143 tcg_gen_helper_0_1(helper_flush, cpu_dst);
4144 break;
4145 case 0x3c: /* save */
4146 save_state(dc, cpu_cond);
4147 tcg_gen_helper_0_0(helper_save);
4148 gen_movl_TN_reg(rd, cpu_dst);
4149 break;
4150 case 0x3d: /* restore */
4151 save_state(dc, cpu_cond);
4152 tcg_gen_helper_0_0(helper_restore);
4153 gen_movl_TN_reg(rd, cpu_dst);
4154 break;
4155 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4156 case 0x3e: /* V9 done/retry */
4157 {
4158 switch (rd) {
4159 case 0:
4160 if (!supervisor(dc))
4161 goto priv_insn;
4162 dc->npc = DYNAMIC_PC;
4163 dc->pc = DYNAMIC_PC;
4164 tcg_gen_helper_0_0(helper_done);
4165 goto jmp_insn;
4166 case 1:
4167 if (!supervisor(dc))
4168 goto priv_insn;
4169 dc->npc = DYNAMIC_PC;
4170 dc->pc = DYNAMIC_PC;
4171 tcg_gen_helper_0_0(helper_retry);
4172 goto jmp_insn;
4173 default:
4174 goto illegal_insn;
4175 }
4176 }
4177 break;
4178 #endif
4179 default:
4180 goto illegal_insn;
4181 }
4182 }
4183 break;
4184 }
4185 break;
4186 case 3: /* load/store instructions */
4187 {
4188 unsigned int xop = GET_FIELD(insn, 7, 12);
4189
4190 cpu_src1 = get_src1(insn, cpu_src1);
4191 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4192 rs2 = GET_FIELD(insn, 27, 31);
4193 gen_movl_reg_TN(rs2, cpu_src2);
4194 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4195 } else if (IS_IMM) { /* immediate */
4196 rs2 = GET_FIELDs(insn, 19, 31);
4197 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4198 } else { /* register */
4199 rs2 = GET_FIELD(insn, 27, 31);
4200 if (rs2 != 0) {
4201 gen_movl_reg_TN(rs2, cpu_src2);
4202 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4203 } else
4204 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4205 }
4206 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4207 (xop > 0x17 && xop <= 0x1d ) ||
4208 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4209 switch (xop) {
4210 case 0x0: /* load unsigned word */
4211 gen_address_mask(dc, cpu_addr);
4212 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4213 break;
4214 case 0x1: /* load unsigned byte */
4215 gen_address_mask(dc, cpu_addr);
4216 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4217 break;
4218 case 0x2: /* load unsigned halfword */
4219 gen_address_mask(dc, cpu_addr);
4220 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4221 break;
4222 case 0x3: /* load double word */
4223 if (rd & 1)
4224 goto illegal_insn;
4225 else {
4226 TCGv r_const;
4227
4228 save_state(dc, cpu_cond);
4229 r_const = tcg_const_i32(7);
4230 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4231 r_const); // XXX remove
4232 tcg_temp_free(r_const);
4233 gen_address_mask(dc, cpu_addr);
4234 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4235 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4236 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4237 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4238 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4239 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4240 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4241 }
4242 break;
4243 case 0x9: /* load signed byte */
4244 gen_address_mask(dc, cpu_addr);
4245 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4246 break;
4247 case 0xa: /* load signed halfword */
4248 gen_address_mask(dc, cpu_addr);
4249 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4250 break;
4251 case 0xd: /* ldstub -- XXX: should be atomically */
4252 {
4253 TCGv r_const;
4254
4255 gen_address_mask(dc, cpu_addr);
4256 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4257 r_const = tcg_const_tl(0xff);
4258 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4259 tcg_temp_free(r_const);
4260 }
4261 break;
4262 case 0x0f: /* swap register with memory. Also
4263 atomically */
4264 CHECK_IU_FEATURE(dc, SWAP);
4265 gen_movl_reg_TN(rd, cpu_val);
4266 gen_address_mask(dc, cpu_addr);
4267 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4268 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4269 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4270 break;
4271 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4272 case 0x10: /* load word alternate */
4273 #ifndef TARGET_SPARC64
4274 if (IS_IMM)
4275 goto illegal_insn;
4276 if (!supervisor(dc))
4277 goto priv_insn;
4278 #endif
4279 save_state(dc, cpu_cond);
4280 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4281 break;
4282 case 0x11: /* load unsigned byte alternate */
4283 #ifndef TARGET_SPARC64
4284 if (IS_IMM)
4285 goto illegal_insn;
4286 if (!supervisor(dc))
4287 goto priv_insn;
4288 #endif
4289 save_state(dc, cpu_cond);
4290 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4291 break;
4292 case 0x12: /* load unsigned halfword alternate */
4293 #ifndef TARGET_SPARC64
4294 if (IS_IMM)
4295 goto illegal_insn;
4296 if (!supervisor(dc))
4297 goto priv_insn;
4298 #endif
4299 save_state(dc, cpu_cond);
4300 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4301 break;
4302 case 0x13: /* load double word alternate */
4303 #ifndef TARGET_SPARC64
4304 if (IS_IMM)
4305 goto illegal_insn;
4306 if (!supervisor(dc))
4307 goto priv_insn;
4308 #endif
4309 if (rd & 1)
4310 goto illegal_insn;
4311 save_state(dc, cpu_cond);
4312 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4313 goto skip_move;
4314 case 0x19: /* load signed byte alternate */
4315 #ifndef TARGET_SPARC64
4316 if (IS_IMM)
4317 goto illegal_insn;
4318 if (!supervisor(dc))
4319 goto priv_insn;
4320 #endif
4321 save_state(dc, cpu_cond);
4322 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4323 break;
4324 case 0x1a: /* load signed halfword alternate */
4325 #ifndef TARGET_SPARC64
4326 if (IS_IMM)
4327 goto illegal_insn;
4328 if (!supervisor(dc))
4329 goto priv_insn;
4330 #endif
4331 save_state(dc, cpu_cond);
4332 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4333 break;
4334 case 0x1d: /* ldstuba -- XXX: should be atomically */
4335 #ifndef TARGET_SPARC64
4336 if (IS_IMM)
4337 goto illegal_insn;
4338 if (!supervisor(dc))
4339 goto priv_insn;
4340 #endif
4341 save_state(dc, cpu_cond);
4342 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4343 break;
4344 case 0x1f: /* swap reg with alt. memory. Also
4345 atomically */
4346 CHECK_IU_FEATURE(dc, SWAP);
4347 #ifndef TARGET_SPARC64
4348 if (IS_IMM)
4349 goto illegal_insn;
4350 if (!supervisor(dc))
4351 goto priv_insn;
4352 #endif
4353 save_state(dc, cpu_cond);
4354 gen_movl_reg_TN(rd, cpu_val);
4355 gen_swap_asi(cpu_val, cpu_addr, insn);
4356 break;
4357
4358 #ifndef TARGET_SPARC64
4359 case 0x30: /* ldc */
4360 case 0x31: /* ldcsr */
4361 case 0x33: /* lddc */
4362 goto ncp_insn;
4363 #endif
4364 #endif
4365 #ifdef TARGET_SPARC64
4366 case 0x08: /* V9 ldsw */
4367 gen_address_mask(dc, cpu_addr);
4368 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4369 break;
4370 case 0x0b: /* V9 ldx */
4371 gen_address_mask(dc, cpu_addr);
4372 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4373 break;
4374 case 0x18: /* V9 ldswa */
4375 save_state(dc, cpu_cond);
4376 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4377 break;
4378 case 0x1b: /* V9 ldxa */
4379 save_state(dc, cpu_cond);
4380 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4381 break;
4382 case 0x2d: /* V9 prefetch, no effect */
4383 goto skip_move;
4384 case 0x30: /* V9 ldfa */
4385 save_state(dc, cpu_cond);
4386 gen_ldf_asi(cpu_addr, insn, 4, rd);
4387 goto skip_move;
4388 case 0x33: /* V9 lddfa */
4389 save_state(dc, cpu_cond);
4390 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4391 goto skip_move;
4392 case 0x3d: /* V9 prefetcha, no effect */
4393 goto skip_move;
4394 case 0x32: /* V9 ldqfa */
4395 CHECK_FPU_FEATURE(dc, FLOAT128);
4396 save_state(dc, cpu_cond);
4397 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4398 goto skip_move;
4399 #endif
4400 default:
4401 goto illegal_insn;
4402 }
4403 gen_movl_TN_reg(rd, cpu_val);
4404 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4405 skip_move: ;
4406 #endif
4407 } else if (xop >= 0x20 && xop < 0x24) {
4408 if (gen_trap_ifnofpu(dc, cpu_cond))
4409 goto jmp_insn;
4410 save_state(dc, cpu_cond);
4411 switch (xop) {
4412 case 0x20: /* load fpreg */
4413 gen_address_mask(dc, cpu_addr);
4414 tcg_gen_qemu_ld32u(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4415 break;
4416 case 0x21: /* ldfsr, V9 ldxfsr */
4417 #ifdef TARGET_SPARC64
4418 gen_address_mask(dc, cpu_addr);
4419 if (rd == 1) {
4420 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4421 tcg_gen_helper_0_1(helper_ldxfsr, cpu_tmp64);
4422 } else
4423 #else
4424 {
4425 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4426 tcg_gen_helper_0_1(helper_ldfsr, cpu_tmp32);
4427 }
4428 #endif
4429 break;
4430 case 0x22: /* load quad fpreg */
4431 {
4432 TCGv r_const;
4433
4434 CHECK_FPU_FEATURE(dc, FLOAT128);
4435 r_const = tcg_const_i32(dc->mem_idx);
4436 tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
4437 tcg_temp_free(r_const);
4438 gen_op_store_QT0_fpr(QFPREG(rd));
4439 }
4440 break;
4441 case 0x23: /* load double fpreg */
4442 {
4443 TCGv r_const;
4444
4445 r_const = tcg_const_i32(dc->mem_idx);
4446 tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
4447 tcg_temp_free(r_const);
4448 gen_op_store_DT0_fpr(DFPREG(rd));
4449 }
4450 break;
4451 default:
4452 goto illegal_insn;
4453 }
4454 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4455 xop == 0xe || xop == 0x1e) {
4456 gen_movl_reg_TN(rd, cpu_val);
4457 switch (xop) {
4458 case 0x4: /* store word */
4459 gen_address_mask(dc, cpu_addr);
4460 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4461 break;
4462 case 0x5: /* store byte */
4463 gen_address_mask(dc, cpu_addr);
4464 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4465 break;
4466 case 0x6: /* store halfword */
4467 gen_address_mask(dc, cpu_addr);
4468 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4469 break;
4470 case 0x7: /* store double word */
4471 if (rd & 1)
4472 goto illegal_insn;
4473 else {
4474 TCGv r_low, r_const;
4475
4476 save_state(dc, cpu_cond);
4477 gen_address_mask(dc, cpu_addr);
4478 r_const = tcg_const_i32(7);
4479 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4480 r_const); // XXX remove
4481 tcg_temp_free(r_const);
4482 r_low = tcg_temp_new(TCG_TYPE_TL);
4483 gen_movl_reg_TN(rd + 1, r_low);
4484 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_val,
4485 r_low);
4486 tcg_temp_free(r_low);
4487 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4488 }
4489 break;
4490 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4491 case 0x14: /* store word alternate */
4492 #ifndef TARGET_SPARC64
4493 if (IS_IMM)
4494 goto illegal_insn;
4495 if (!supervisor(dc))
4496 goto priv_insn;
4497 #endif
4498 save_state(dc, cpu_cond);
4499 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4500 break;
4501 case 0x15: /* store byte alternate */
4502 #ifndef TARGET_SPARC64
4503 if (IS_IMM)
4504 goto illegal_insn;
4505 if (!supervisor(dc))
4506 goto priv_insn;
4507 #endif
4508 save_state(dc, cpu_cond);
4509 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4510 break;
4511 case 0x16: /* store halfword alternate */
4512 #ifndef TARGET_SPARC64
4513 if (IS_IMM)
4514 goto illegal_insn;
4515 if (!supervisor(dc))
4516 goto priv_insn;
4517 #endif
4518 save_state(dc, cpu_cond);
4519 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4520 break;
4521 case 0x17: /* store double word alternate */
4522 #ifndef TARGET_SPARC64
4523 if (IS_IMM)
4524 goto illegal_insn;
4525 if (!supervisor(dc))
4526 goto priv_insn;
4527 #endif
4528 if (rd & 1)
4529 goto illegal_insn;
4530 else {
4531 save_state(dc, cpu_cond);
4532 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4533 }
4534 break;
4535 #endif
4536 #ifdef TARGET_SPARC64
4537 case 0x0e: /* V9 stx */
4538 gen_address_mask(dc, cpu_addr);
4539 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4540 break;
4541 case 0x1e: /* V9 stxa */
4542 save_state(dc, cpu_cond);
4543 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4544 break;
4545 #endif
4546 default:
4547 goto illegal_insn;
4548 }
4549 } else if (xop > 0x23 && xop < 0x28) {
4550 if (gen_trap_ifnofpu(dc, cpu_cond))
4551 goto jmp_insn;
4552 save_state(dc, cpu_cond);
4553 switch (xop) {
4554 case 0x24: /* store fpreg */
4555 gen_address_mask(dc, cpu_addr);
4556 tcg_gen_qemu_st32(cpu_fpr[rd], cpu_addr, dc->mem_idx);
4557 break;
4558 case 0x25: /* stfsr, V9 stxfsr */
4559 #ifdef TARGET_SPARC64
4560 gen_address_mask(dc, cpu_addr);
4561 tcg_gen_ld_i64(cpu_tmp64, cpu_env, offsetof(CPUState, fsr));
4562 if (rd == 1)
4563 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4564 else {
4565 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp64);
4566 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4567 }
4568 #else
4569 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUState, fsr));
4570 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4571 #endif
4572 break;
4573 case 0x26:
4574 #ifdef TARGET_SPARC64
4575 /* V9 stqf, store quad fpreg */
4576 {
4577 TCGv r_const;
4578
4579 CHECK_FPU_FEATURE(dc, FLOAT128);
4580 gen_op_load_fpr_QT0(QFPREG(rd));
4581 r_const = tcg_const_i32(dc->mem_idx);
4582 tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
4583 tcg_temp_free(r_const);
4584 }
4585 break;
4586 #else /* !TARGET_SPARC64 */
4587 /* stdfq, store floating point queue */
4588 #if defined(CONFIG_USER_ONLY)
4589 goto illegal_insn;
4590 #else
4591 if (!supervisor(dc))
4592 goto priv_insn;
4593 if (gen_trap_ifnofpu(dc, cpu_cond))
4594 goto jmp_insn;
4595 goto nfq_insn;
4596 #endif
4597 #endif
4598 case 0x27: /* store double fpreg */
4599 {
4600 TCGv r_const;
4601
4602 gen_op_load_fpr_DT0(DFPREG(rd));
4603 r_const = tcg_const_i32(dc->mem_idx);
4604 tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
4605 tcg_temp_free(r_const);
4606 }
4607 break;
4608 default:
4609 goto illegal_insn;
4610 }
4611 } else if (xop > 0x33 && xop < 0x3f) {
4612 save_state(dc, cpu_cond);
4613 switch (xop) {
4614 #ifdef TARGET_SPARC64
4615 case 0x34: /* V9 stfa */
4616 gen_stf_asi(cpu_addr, insn, 4, rd);
4617 break;
4618 case 0x36: /* V9 stqfa */
4619 {
4620 TCGv r_const;
4621
4622 CHECK_FPU_FEATURE(dc, FLOAT128);
4623 r_const = tcg_const_i32(7);
4624 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4625 r_const);
4626 tcg_temp_free(r_const);
4627 gen_op_load_fpr_QT0(QFPREG(rd));
4628 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4629 }
4630 break;
4631 case 0x37: /* V9 stdfa */
4632 gen_op_load_fpr_DT0(DFPREG(rd));
4633 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4634 break;
4635 case 0x3c: /* V9 casa */
4636 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4637 gen_movl_TN_reg(rd, cpu_val);
4638 break;
4639 case 0x3e: /* V9 casxa */
4640 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4641 gen_movl_TN_reg(rd, cpu_val);
4642 break;
4643 #else
4644 case 0x34: /* stc */
4645 case 0x35: /* stcsr */
4646 case 0x36: /* stdcq */
4647 case 0x37: /* stdc */
4648 goto ncp_insn;
4649 #endif
4650 default:
4651 goto illegal_insn;
4652 }
4653 }
4654 else
4655 goto illegal_insn;
4656 }
4657 break;
4658 }
4659 /* default case for non jump instructions */
4660 if (dc->npc == DYNAMIC_PC) {
4661 dc->pc = DYNAMIC_PC;
4662 gen_op_next_insn();
4663 } else if (dc->npc == JUMP_PC) {
4664 /* we can do a static jump */
4665 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4666 dc->is_br = 1;
4667 } else {
4668 dc->pc = dc->npc;
4669 dc->npc = dc->npc + 4;
4670 }
4671 jmp_insn:
4672 return;
4673 illegal_insn:
4674 {
4675 TCGv r_const;
4676
4677 save_state(dc, cpu_cond);
4678 r_const = tcg_const_i32(TT_ILL_INSN);
4679 tcg_gen_helper_0_1(raise_exception, r_const);
4680 tcg_temp_free(r_const);
4681 dc->is_br = 1;
4682 }
4683 return;
4684 unimp_flush:
4685 {
4686 TCGv r_const;
4687
4688 save_state(dc, cpu_cond);
4689 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4690 tcg_gen_helper_0_1(raise_exception, r_const);
4691 tcg_temp_free(r_const);
4692 dc->is_br = 1;
4693 }
4694 return;
4695 #if !defined(CONFIG_USER_ONLY)
4696 priv_insn:
4697 {
4698 TCGv r_const;
4699
4700 save_state(dc, cpu_cond);
4701 r_const = tcg_const_i32(TT_PRIV_INSN);
4702 tcg_gen_helper_0_1(raise_exception, r_const);
4703 tcg_temp_free(r_const);
4704 dc->is_br = 1;
4705 }
4706 return;
4707 #endif
4708 nfpu_insn:
4709 save_state(dc, cpu_cond);
4710 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4711 dc->is_br = 1;
4712 return;
4713 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4714 nfq_insn:
4715 save_state(dc, cpu_cond);
4716 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4717 dc->is_br = 1;
4718 return;
4719 #endif
4720 #ifndef TARGET_SPARC64
4721 ncp_insn:
4722 {
4723 TCGv r_const;
4724
4725 save_state(dc, cpu_cond);
4726 r_const = tcg_const_i32(TT_NCP_INSN);
4727 tcg_gen_helper_0_1(raise_exception, r_const);
4728 tcg_temp_free(r_const);
4729 dc->is_br = 1;
4730 }
4731 return;
4732 #endif
4733 }
4734
4735 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4736 int spc, CPUSPARCState *env)
4737 {
4738 target_ulong pc_start, last_pc;
4739 uint16_t *gen_opc_end;
4740 DisasContext dc1, *dc = &dc1;
4741 int j, lj = -1;
4742 int num_insns;
4743 int max_insns;
4744
4745 memset(dc, 0, sizeof(DisasContext));
4746 dc->tb = tb;
4747 pc_start = tb->pc;
4748 dc->pc = pc_start;
4749 last_pc = dc->pc;
4750 dc->npc = (target_ulong) tb->cs_base;
4751 dc->mem_idx = cpu_mmu_index(env);
4752 dc->def = env->def;
4753 if ((dc->def->features & CPU_FEATURE_FLOAT))
4754 dc->fpu_enabled = cpu_fpu_enabled(env);
4755 else
4756 dc->fpu_enabled = 0;
4757 #ifdef TARGET_SPARC64
4758 dc->address_mask_32bit = env->pstate & PS_AM;
4759 #endif
4760 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4761
4762 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4763 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4764 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4765
4766 cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
4767
4768 // loads and stores
4769 cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
4770 cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
4771
4772 num_insns = 0;
4773 max_insns = tb->cflags & CF_COUNT_MASK;
4774 if (max_insns == 0)
4775 max_insns = CF_COUNT_MASK;
4776 gen_icount_start();
4777 do {
4778 if (env->nb_breakpoints > 0) {
4779 for(j = 0; j < env->nb_breakpoints; j++) {
4780 if (env->breakpoints[j] == dc->pc) {
4781 if (dc->pc != pc_start)
4782 save_state(dc, cpu_cond);
4783 tcg_gen_helper_0_0(helper_debug);
4784 tcg_gen_exit_tb(0);
4785 dc->is_br = 1;
4786 goto exit_gen_loop;
4787 }
4788 }
4789 }
4790 if (spc) {
4791 if (loglevel > 0)
4792 fprintf(logfile, "Search PC...\n");
4793 j = gen_opc_ptr - gen_opc_buf;
4794 if (lj < j) {
4795 lj++;
4796 while (lj < j)
4797 gen_opc_instr_start[lj++] = 0;
4798 gen_opc_pc[lj] = dc->pc;
4799 gen_opc_npc[lj] = dc->npc;
4800 gen_opc_instr_start[lj] = 1;
4801 gen_opc_icount[lj] = num_insns;
4802 }
4803 }
4804 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4805 gen_io_start();
4806 last_pc = dc->pc;
4807 disas_sparc_insn(dc);
4808 num_insns++;
4809
4810 if (dc->is_br)
4811 break;
4812 /* if the next PC is different, we abort now */
4813 if (dc->pc != (last_pc + 4))
4814 break;
4815 /* if we reach a page boundary, we stop generation so that the
4816 PC of a TT_TFAULT exception is always in the right page */
4817 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4818 break;
4819 /* if single step mode, we generate only one instruction and
4820 generate an exception */
4821 if (env->singlestep_enabled) {
4822 tcg_gen_movi_tl(cpu_pc, dc->pc);
4823 tcg_gen_exit_tb(0);
4824 break;
4825 }
4826 } while ((gen_opc_ptr < gen_opc_end) &&
4827 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4828 num_insns < max_insns);
4829
4830 exit_gen_loop:
4831 tcg_temp_free(cpu_addr);
4832 tcg_temp_free(cpu_val);
4833 tcg_temp_free(cpu_dst);
4834 tcg_temp_free(cpu_tmp64);
4835 tcg_temp_free(cpu_tmp32);
4836 tcg_temp_free(cpu_tmp0);
4837 if (tb->cflags & CF_LAST_IO)
4838 gen_io_end();
4839 if (!dc->is_br) {
4840 if (dc->pc != DYNAMIC_PC &&
4841 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4842 /* static PC and NPC: we can use direct chaining */
4843 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4844 } else {
4845 if (dc->pc != DYNAMIC_PC)
4846 tcg_gen_movi_tl(cpu_pc, dc->pc);
4847 save_npc(dc, cpu_cond);
4848 tcg_gen_exit_tb(0);
4849 }
4850 }
4851 gen_icount_end(tb, num_insns);
4852 *gen_opc_ptr = INDEX_op_end;
4853 if (spc) {
4854 j = gen_opc_ptr - gen_opc_buf;
4855 lj++;
4856 while (lj <= j)
4857 gen_opc_instr_start[lj++] = 0;
4858 #if 0
4859 if (loglevel > 0) {
4860 page_dump(logfile);
4861 }
4862 #endif
4863 gen_opc_jump_pc[0] = dc->jump_pc[0];
4864 gen_opc_jump_pc[1] = dc->jump_pc[1];
4865 } else {
4866 tb->size = last_pc + 4 - pc_start;
4867 tb->icount = num_insns;
4868 }
4869 #ifdef DEBUG_DISAS
4870 if (loglevel & CPU_LOG_TB_IN_ASM) {
4871 fprintf(logfile, "--------------\n");
4872 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4873 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4874 fprintf(logfile, "\n");
4875 }
4876 #endif
4877 }
4878
4879 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4880 {
4881 gen_intermediate_code_internal(tb, 0, env);
4882 }
4883
4884 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4885 {
4886 gen_intermediate_code_internal(tb, 1, env);
4887 }
4888
4889 void gen_intermediate_code_init(CPUSPARCState *env)
4890 {
4891 unsigned int i;
4892 static int inited;
4893 static const char * const gregnames[8] = {
4894 NULL, // g0 not used
4895 "g1",
4896 "g2",
4897 "g3",
4898 "g4",
4899 "g5",
4900 "g6",
4901 "g7",
4902 };
4903 static const char * const fregnames[64] = {
4904 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
4905 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
4906 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
4907 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
4908 "f32", "f33", "f34", "f35", "f36", "f37", "f38", "f39",
4909 "f40", "f41", "f42", "f43", "f44", "f45", "f46", "f47",
4910 "f48", "f49", "f50", "f51", "f52", "f53", "f54", "f55",
4911 "f56", "f57", "f58", "f59", "f60", "f61", "f62", "f63",
4912 };
4913
4914 /* init various static tables */
4915 if (!inited) {
4916 inited = 1;
4917
4918 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4919 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4920 offsetof(CPUState, regwptr),
4921 "regwptr");
4922 #ifdef TARGET_SPARC64
4923 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4924 TCG_AREG0, offsetof(CPUState, xcc),
4925 "xcc");
4926 cpu_asi = tcg_global_mem_new(TCG_TYPE_I32,
4927 TCG_AREG0, offsetof(CPUState, asi),
4928 "asi");
4929 cpu_fprs = tcg_global_mem_new(TCG_TYPE_I32,
4930 TCG_AREG0, offsetof(CPUState, fprs),
4931 "fprs");
4932 cpu_gsr = tcg_global_mem_new(TCG_TYPE_TL,
4933 TCG_AREG0, offsetof(CPUState, gsr),
4934 "gsr");
4935 cpu_tick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4936 TCG_AREG0,
4937 offsetof(CPUState, tick_cmpr),
4938 "tick_cmpr");
4939 cpu_stick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4940 TCG_AREG0,
4941 offsetof(CPUState, stick_cmpr),
4942 "stick_cmpr");
4943 cpu_hstick_cmpr = tcg_global_mem_new(TCG_TYPE_TL,
4944 TCG_AREG0,
4945 offsetof(CPUState, hstick_cmpr),
4946 "hstick_cmpr");
4947 cpu_hintp = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4948 offsetof(CPUState, hintp),
4949 "hintp");
4950 cpu_htba = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4951 offsetof(CPUState, htba),
4952 "htba");
4953 cpu_hver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4954 offsetof(CPUState, hver),
4955 "hver");
4956 cpu_ssr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4957 offsetof(CPUState, ssr), "ssr");
4958 cpu_ver = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4959 offsetof(CPUState, version), "ver");
4960 #else
4961 cpu_wim = tcg_global_mem_new(TCG_TYPE_I32,
4962 TCG_AREG0, offsetof(CPUState, wim),
4963 "wim");
4964 #endif
4965 cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
4966 TCG_AREG0, offsetof(CPUState, cond),
4967 "cond");
4968 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4969 TCG_AREG0, offsetof(CPUState, cc_src),
4970 "cc_src");
4971 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4972 offsetof(CPUState, cc_src2),
4973 "cc_src2");
4974 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4975 TCG_AREG0, offsetof(CPUState, cc_dst),
4976 "cc_dst");
4977 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4978 TCG_AREG0, offsetof(CPUState, psr),
4979 "psr");
4980 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
4981 TCG_AREG0, offsetof(CPUState, fsr),
4982 "fsr");
4983 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
4984 TCG_AREG0, offsetof(CPUState, pc),
4985 "pc");
4986 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
4987 TCG_AREG0, offsetof(CPUState, npc),
4988 "npc");
4989 cpu_y = tcg_global_mem_new(TCG_TYPE_TL,
4990 TCG_AREG0, offsetof(CPUState, y), "y");
4991 #ifndef CONFIG_USER_ONLY
4992 cpu_tbr = tcg_global_mem_new(TCG_TYPE_TL,
4993 TCG_AREG0, offsetof(CPUState, tbr),
4994 "tbr");
4995 #endif
4996 for (i = 1; i < 8; i++)
4997 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4998 offsetof(CPUState, gregs[i]),
4999 gregnames[i]);
5000 for (i = 0; i < TARGET_FPREGS; i++)
5001 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
5002 offsetof(CPUState, fpr[i]),
5003 fregnames[i]);
5004
5005 /* register helpers */
5006
5007 #undef DEF_HELPER
5008 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
5009 #include "helper.h"
5010 }
5011 }
5012
5013 void gen_pc_load(CPUState *env, TranslationBlock *tb,
5014 unsigned long searched_pc, int pc_pos, void *puc)
5015 {
5016 target_ulong npc;
5017 env->pc = gen_opc_pc[pc_pos];
5018 npc = gen_opc_npc[pc_pos];
5019 if (npc == 1) {
5020 /* dynamic NPC: already stored */
5021 } else if (npc == 2) {
5022 target_ulong t2 = (target_ulong)(unsigned long)puc;
5023 /* jump PC: use T2 and the jump targets of the translation */
5024 if (t2)
5025 env->npc = gen_opc_jump_pc[0];
5026 else
5027 env->npc = gen_opc_jump_pc[1];
5028 } else {
5029 env->npc = npc;
5030 }
5031 }