]> git.proxmox.com Git - mirror_qemu.git/blob - target-sparc/translate.c
Complete the TCG conversion
[mirror_qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 */
21
22 /*
23 TODO-list:
24
25 Rest of V9 instructions, VIS instructions
26 NPC/PC static optimisations (use JUMP_TB when possible)
27 Optimize synthetic instructions
28 */
29
30 #include <stdarg.h>
31 #include <stdlib.h>
32 #include <stdio.h>
33 #include <string.h>
34 #include <inttypes.h>
35
36 #include "cpu.h"
37 #include "exec-all.h"
38 #include "disas.h"
39 #include "helper.h"
40 #include "tcg-op.h"
41
42 #define DEBUG_DISAS
43
44 #define DYNAMIC_PC 1 /* dynamic pc value */
45 #define JUMP_PC 2 /* dynamic pc value which takes only two values
46 according to jump_pc[T2] */
47
48 /* global register indexes */
49 static TCGv cpu_env, cpu_T[3], cpu_regwptr, cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
50 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
51 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
52 #ifdef TARGET_SPARC64
53 static TCGv cpu_xcc;
54 #endif
55 /* local register indexes (only used inside old micro ops) */
56 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
57
58 typedef struct DisasContext {
59 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
60 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
61 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
62 int is_br;
63 int mem_idx;
64 int fpu_enabled;
65 struct TranslationBlock *tb;
66 } DisasContext;
67
68 extern FILE *logfile;
69 extern int loglevel;
70
71 // This function uses non-native bit order
72 #define GET_FIELD(X, FROM, TO) \
73 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
74
75 // This function uses the order in the manuals, i.e. bit 0 is 2^0
76 #define GET_FIELD_SP(X, FROM, TO) \
77 GET_FIELD(X, 31 - (TO), 31 - (FROM))
78
79 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
80 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
81
82 #ifdef TARGET_SPARC64
83 #define FFPREG(r) (r)
84 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
85 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
86 #else
87 #define FFPREG(r) (r)
88 #define DFPREG(r) (r & 0x1e)
89 #define QFPREG(r) (r & 0x1c)
90 #endif
91
92 static int sign_extend(int x, int len)
93 {
94 len = 32 - len;
95 return (x << len) >> len;
96 }
97
98 #define IS_IMM (insn & (1<<13))
99
100 /* floating point registers moves */
101 static void gen_op_load_fpr_FT0(unsigned int src)
102 {
103 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
104 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft0));
105 }
106
107 static void gen_op_load_fpr_FT1(unsigned int src)
108 {
109 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
110 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft1));
111 }
112
113 static void gen_op_store_FT0_fpr(unsigned int dst)
114 {
115 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft0));
116 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
117 }
118
119 static void gen_op_load_fpr_DT0(unsigned int src)
120 {
121 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
122 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) + offsetof(CPU_DoubleU, l.upper));
123 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
124 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) + offsetof(CPU_DoubleU, l.lower));
125 }
126
127 static void gen_op_load_fpr_DT1(unsigned int src)
128 {
129 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
130 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt1) + offsetof(CPU_DoubleU, l.upper));
131 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
132 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt1) + offsetof(CPU_DoubleU, l.lower));
133 }
134
135 static void gen_op_store_DT0_fpr(unsigned int dst)
136 {
137 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) + offsetof(CPU_DoubleU, l.upper));
138 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
139 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) + offsetof(CPU_DoubleU, l.lower));
140 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 1]));
141 }
142
143 #ifdef CONFIG_USER_ONLY
144 static void gen_op_load_fpr_QT0(unsigned int src)
145 {
146 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
147 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.upmost));
148 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
149 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.upper));
150 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 2]));
151 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.lower));
152 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 3]));
153 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.lowest));
154 }
155
156 static void gen_op_load_fpr_QT1(unsigned int src)
157 {
158 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
159 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) + offsetof(CPU_QuadU, l.upmost));
160 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
161 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) + offsetof(CPU_QuadU, l.upper));
162 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 2]));
163 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) + offsetof(CPU_QuadU, l.lower));
164 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 3]));
165 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) + offsetof(CPU_QuadU, l.lowest));
166 }
167
168 static void gen_op_store_QT0_fpr(unsigned int dst)
169 {
170 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.upmost));
171 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
172 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.upper));
173 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 1]));
174 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.lower));
175 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 2]));
176 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) + offsetof(CPU_QuadU, l.lowest));
177 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 3]));
178 }
179 #endif
180
181 /* moves */
182 #ifdef CONFIG_USER_ONLY
183 #define supervisor(dc) 0
184 #ifdef TARGET_SPARC64
185 #define hypervisor(dc) 0
186 #endif
187 #else
188 #define supervisor(dc) (dc->mem_idx >= 1)
189 #ifdef TARGET_SPARC64
190 #define hypervisor(dc) (dc->mem_idx == 2)
191 #else
192 #endif
193 #endif
194
195 #ifdef TARGET_ABI32
196 #define ABI32_MASK(addr) tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
197 #else
198 #define ABI32_MASK(addr)
199 #endif
200
201 static inline void gen_movl_reg_TN(int reg, TCGv tn)
202 {
203 if (reg == 0)
204 tcg_gen_movi_tl(tn, 0);
205 else if (reg < 8)
206 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
207 else {
208 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
209 }
210 }
211
212 static inline void gen_movl_TN_reg(int reg, TCGv tn)
213 {
214 if (reg == 0)
215 return;
216 else if (reg < 8)
217 tcg_gen_mov_tl(cpu_gregs[reg], tn);
218 else {
219 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
220 }
221 }
222
223 static inline void gen_goto_tb(DisasContext *s, int tb_num,
224 target_ulong pc, target_ulong npc)
225 {
226 TranslationBlock *tb;
227
228 tb = s->tb;
229 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
230 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
231 /* jump to same page: we can use a direct jump */
232 tcg_gen_goto_tb(tb_num);
233 tcg_gen_movi_tl(cpu_pc, pc);
234 tcg_gen_movi_tl(cpu_npc, npc);
235 tcg_gen_exit_tb((long)tb + tb_num);
236 } else {
237 /* jump to another page: currently not optimized */
238 tcg_gen_movi_tl(cpu_pc, pc);
239 tcg_gen_movi_tl(cpu_npc, npc);
240 tcg_gen_exit_tb(0);
241 }
242 }
243
244 // XXX suboptimal
245 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
246 {
247 tcg_gen_extu_i32_tl(reg, src);
248 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
249 tcg_gen_andi_tl(reg, reg, 0x1);
250 }
251
252 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
253 {
254 tcg_gen_extu_i32_tl(reg, src);
255 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
256 tcg_gen_andi_tl(reg, reg, 0x1);
257 }
258
259 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
260 {
261 tcg_gen_extu_i32_tl(reg, src);
262 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
263 tcg_gen_andi_tl(reg, reg, 0x1);
264 }
265
266 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
267 {
268 tcg_gen_extu_i32_tl(reg, src);
269 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
270 tcg_gen_andi_tl(reg, reg, 0x1);
271 }
272
273 static inline void gen_cc_clear_icc(void)
274 {
275 tcg_gen_movi_i32(cpu_psr, 0);
276 }
277
278 #ifdef TARGET_SPARC64
279 static inline void gen_cc_clear_xcc(void)
280 {
281 tcg_gen_movi_i32(cpu_xcc, 0);
282 }
283 #endif
284
285 /* old op:
286 if (!T0)
287 env->psr |= PSR_ZERO;
288 if ((int32_t) T0 < 0)
289 env->psr |= PSR_NEG;
290 */
291 static inline void gen_cc_NZ_icc(TCGv dst)
292 {
293 TCGv r_temp;
294 int l1, l2;
295
296 l1 = gen_new_label();
297 l2 = gen_new_label();
298 r_temp = tcg_temp_new(TCG_TYPE_TL);
299 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
300 tcg_gen_brcond_tl(TCG_COND_NE, r_temp, tcg_const_tl(0), l1);
301 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
302 gen_set_label(l1);
303 tcg_gen_ext_i32_tl(r_temp, dst);
304 tcg_gen_brcond_tl(TCG_COND_GE, r_temp, tcg_const_tl(0), l2);
305 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
306 gen_set_label(l2);
307 }
308
309 #ifdef TARGET_SPARC64
310 static inline void gen_cc_NZ_xcc(TCGv dst)
311 {
312 int l1, l2;
313
314 l1 = gen_new_label();
315 l2 = gen_new_label();
316 tcg_gen_brcond_tl(TCG_COND_NE, dst, tcg_const_tl(0), l1);
317 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
318 gen_set_label(l1);
319 tcg_gen_brcond_tl(TCG_COND_GE, dst, tcg_const_tl(0), l2);
320 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
321 gen_set_label(l2);
322 }
323 #endif
324
325 /* old op:
326 if (T0 < src1)
327 env->psr |= PSR_CARRY;
328 */
329 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
330 {
331 TCGv r_temp;
332 int l1;
333
334 l1 = gen_new_label();
335 r_temp = tcg_temp_new(TCG_TYPE_TL);
336 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
337 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
338 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
339 gen_set_label(l1);
340 }
341
342 #ifdef TARGET_SPARC64
343 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
344 {
345 int l1;
346
347 l1 = gen_new_label();
348 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
349 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
350 gen_set_label(l1);
351 }
352 #endif
353
354 /* old op:
355 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
356 env->psr |= PSR_OVF;
357 */
358 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
359 {
360 TCGv r_temp;
361
362 r_temp = tcg_temp_new(TCG_TYPE_TL);
363 tcg_gen_xor_tl(r_temp, src1, src2);
364 tcg_gen_xori_tl(r_temp, r_temp, -1);
365 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
366 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
367 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
368 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
369 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
370 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
371 }
372
373 #ifdef TARGET_SPARC64
374 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
375 {
376 TCGv r_temp;
377
378 r_temp = tcg_temp_new(TCG_TYPE_TL);
379 tcg_gen_xor_tl(r_temp, src1, src2);
380 tcg_gen_xori_tl(r_temp, r_temp, -1);
381 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
382 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
383 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
384 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
385 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
386 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
387 }
388 #endif
389
390 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
391 {
392 TCGv r_temp;
393 int l1;
394
395 l1 = gen_new_label();
396
397 r_temp = tcg_temp_new(TCG_TYPE_TL);
398 tcg_gen_xor_tl(r_temp, src1, src2);
399 tcg_gen_xori_tl(r_temp, r_temp, -1);
400 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
401 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
402 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
403 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l1);
404 tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_TOVF));
405 gen_set_label(l1);
406 }
407
408 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
409 {
410 int l1;
411
412 l1 = gen_new_label();
413 tcg_gen_or_tl(cpu_tmp0, src1, src2);
414 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
415 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), l1);
416 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
417 gen_set_label(l1);
418 }
419
420 static inline void gen_tag_tv(TCGv src1, TCGv src2)
421 {
422 int l1;
423
424 l1 = gen_new_label();
425 tcg_gen_or_tl(cpu_tmp0, src1, src2);
426 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
427 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), l1);
428 tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_TOVF));
429 gen_set_label(l1);
430 }
431
432 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
433 {
434 tcg_gen_mov_tl(cpu_cc_src, src1);
435 tcg_gen_mov_tl(cpu_cc_src2, src2);
436 tcg_gen_add_tl(dst, src1, src2);
437 gen_cc_clear_icc();
438 gen_cc_NZ_icc(dst);
439 gen_cc_C_add_icc(dst, cpu_cc_src);
440 gen_cc_V_add_icc(dst, cpu_cc_src, cpu_cc_src2);
441 #ifdef TARGET_SPARC64
442 gen_cc_clear_xcc();
443 gen_cc_NZ_xcc(dst);
444 gen_cc_C_add_xcc(dst, cpu_cc_src);
445 gen_cc_V_add_xcc(dst, cpu_cc_src, cpu_cc_src2);
446 #endif
447 }
448
449 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
450 {
451 tcg_gen_mov_tl(cpu_cc_src, src1);
452 tcg_gen_mov_tl(cpu_cc_src2, src2);
453 gen_mov_reg_C(cpu_tmp0, cpu_psr);
454 tcg_gen_add_tl(dst, src1, cpu_tmp0);
455 gen_cc_clear_icc();
456 gen_cc_C_add_icc(dst, cpu_cc_src);
457 #ifdef TARGET_SPARC64
458 gen_cc_clear_xcc();
459 gen_cc_C_add_xcc(dst, cpu_cc_src);
460 #endif
461 tcg_gen_add_tl(dst, dst, cpu_cc_src2);
462 gen_cc_NZ_icc(dst);
463 gen_cc_C_add_icc(dst, cpu_cc_src);
464 gen_cc_V_add_icc(dst, cpu_cc_src, cpu_cc_src2);
465 #ifdef TARGET_SPARC64
466 gen_cc_NZ_xcc(dst);
467 gen_cc_C_add_xcc(dst, cpu_cc_src);
468 gen_cc_V_add_xcc(dst, cpu_cc_src, cpu_cc_src2);
469 #endif
470 }
471
472 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
473 {
474 tcg_gen_mov_tl(cpu_cc_src, src1);
475 tcg_gen_mov_tl(cpu_cc_src2, src2);
476 tcg_gen_add_tl(dst, src1, src2);
477 gen_cc_clear_icc();
478 gen_cc_NZ_icc(dst);
479 gen_cc_C_add_icc(dst, cpu_cc_src);
480 gen_cc_V_add_icc(dst, cpu_cc_src, cpu_cc_src2);
481 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
482 #ifdef TARGET_SPARC64
483 gen_cc_clear_xcc();
484 gen_cc_NZ_xcc(dst);
485 gen_cc_C_add_xcc(dst, cpu_cc_src);
486 gen_cc_V_add_xcc(dst, cpu_cc_src, cpu_cc_src2);
487 #endif
488 }
489
490 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
491 {
492 tcg_gen_mov_tl(cpu_cc_src, src1);
493 tcg_gen_mov_tl(cpu_cc_src2, src2);
494 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
495 tcg_gen_add_tl(dst, src1, src2);
496 gen_add_tv(dst, cpu_cc_src, cpu_cc_src2);
497 gen_cc_clear_icc();
498 gen_cc_NZ_icc(dst);
499 gen_cc_C_add_icc(dst, cpu_cc_src);
500 #ifdef TARGET_SPARC64
501 gen_cc_clear_xcc();
502 gen_cc_NZ_xcc(dst);
503 gen_cc_C_add_xcc(dst, cpu_cc_src);
504 gen_cc_V_add_xcc(dst, cpu_cc_src, cpu_cc_src2);
505 #endif
506 }
507
508 /* old op:
509 if (src1 < T1)
510 env->psr |= PSR_CARRY;
511 */
512 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
513 {
514 TCGv r_temp1, r_temp2;
515 int l1;
516
517 l1 = gen_new_label();
518 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
519 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
520 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
521 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
522 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
523 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
524 gen_set_label(l1);
525 }
526
527 #ifdef TARGET_SPARC64
528 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
529 {
530 int l1;
531
532 l1 = gen_new_label();
533 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
534 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
535 gen_set_label(l1);
536 }
537 #endif
538
539 /* old op:
540 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
541 env->psr |= PSR_OVF;
542 */
543 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
544 {
545 TCGv r_temp;
546
547 r_temp = tcg_temp_new(TCG_TYPE_TL);
548 tcg_gen_xor_tl(r_temp, src1, src2);
549 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
550 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
551 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
552 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
553 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
554 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
555 }
556
557 #ifdef TARGET_SPARC64
558 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
559 {
560 TCGv r_temp;
561
562 r_temp = tcg_temp_new(TCG_TYPE_TL);
563 tcg_gen_xor_tl(r_temp, src1, src2);
564 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
565 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
566 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
567 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
568 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
569 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
570 }
571 #endif
572
573 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
574 {
575 TCGv r_temp;
576 int l1;
577
578 l1 = gen_new_label();
579
580 r_temp = tcg_temp_new(TCG_TYPE_TL);
581 tcg_gen_xor_tl(r_temp, src1, src2);
582 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
583 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
584 tcg_gen_andi_tl(r_temp, r_temp, (1 << 31));
585 tcg_gen_brcond_tl(TCG_COND_EQ, r_temp, tcg_const_tl(0), l1);
586 tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_TOVF));
587 gen_set_label(l1);
588 }
589
590 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
591 {
592 tcg_gen_mov_tl(cpu_cc_src, src1);
593 tcg_gen_mov_tl(cpu_cc_src2, src2);
594 tcg_gen_sub_tl(dst, src1, src2);
595 gen_cc_clear_icc();
596 gen_cc_NZ_icc(dst);
597 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
598 gen_cc_V_sub_icc(dst, cpu_cc_src, cpu_cc_src2);
599 #ifdef TARGET_SPARC64
600 gen_cc_clear_xcc();
601 gen_cc_NZ_xcc(dst);
602 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
603 gen_cc_V_sub_xcc(dst, cpu_cc_src, cpu_cc_src2);
604 #endif
605 }
606
607 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
608 {
609 tcg_gen_mov_tl(cpu_cc_src, src1);
610 tcg_gen_mov_tl(cpu_cc_src2, src2);
611 gen_mov_reg_C(cpu_tmp0, cpu_psr);
612 tcg_gen_sub_tl(dst, src1, cpu_tmp0);
613 gen_cc_clear_icc();
614 gen_cc_C_sub_icc(dst, cpu_cc_src);
615 #ifdef TARGET_SPARC64
616 gen_cc_clear_xcc();
617 gen_cc_C_sub_xcc(dst, cpu_cc_src);
618 #endif
619 tcg_gen_sub_tl(dst, dst, cpu_cc_src2);
620 gen_cc_NZ_icc(dst);
621 gen_cc_C_sub_icc(dst, cpu_cc_src);
622 gen_cc_V_sub_icc(dst, cpu_cc_src, cpu_cc_src2);
623 #ifdef TARGET_SPARC64
624 gen_cc_NZ_xcc(dst);
625 gen_cc_C_sub_xcc(dst, cpu_cc_src);
626 gen_cc_V_sub_xcc(dst, cpu_cc_src, cpu_cc_src2);
627 #endif
628 }
629
630 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
631 {
632 tcg_gen_mov_tl(cpu_cc_src, src1);
633 tcg_gen_mov_tl(cpu_cc_src2, src2);
634 tcg_gen_sub_tl(dst, src1, src2);
635 gen_cc_clear_icc();
636 gen_cc_NZ_icc(dst);
637 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
638 gen_cc_V_sub_icc(dst, cpu_cc_src, cpu_cc_src2);
639 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
640 #ifdef TARGET_SPARC64
641 gen_cc_clear_xcc();
642 gen_cc_NZ_xcc(dst);
643 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
644 gen_cc_V_sub_xcc(dst, cpu_cc_src, cpu_cc_src2);
645 #endif
646 }
647
648 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
649 {
650 tcg_gen_mov_tl(cpu_cc_src, src1);
651 tcg_gen_mov_tl(cpu_cc_src2, src2);
652 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
653 tcg_gen_sub_tl(dst, src1, src2);
654 gen_sub_tv(dst, cpu_cc_src, cpu_cc_src2);
655 gen_cc_clear_icc();
656 gen_cc_NZ_icc(dst);
657 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
658 #ifdef TARGET_SPARC64
659 gen_cc_clear_xcc();
660 gen_cc_NZ_xcc(dst);
661 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
662 gen_cc_V_sub_xcc(dst, cpu_cc_src, cpu_cc_src2);
663 #endif
664 }
665
666 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
667 {
668 TCGv r_temp, r_temp2;
669 int l1;
670
671 l1 = gen_new_label();
672 r_temp = tcg_temp_new(TCG_TYPE_TL);
673 r_temp2 = tcg_temp_new(TCG_TYPE_I32);
674
675 /* old op:
676 if (!(env->y & 1))
677 T1 = 0;
678 */
679 tcg_gen_mov_tl(cpu_cc_src, src1);
680 tcg_gen_ld32u_tl(r_temp, cpu_env, offsetof(CPUSPARCState, y));
681 tcg_gen_trunc_tl_i32(r_temp2, r_temp);
682 tcg_gen_andi_i32(r_temp2, r_temp2, 0x1);
683 tcg_gen_mov_tl(cpu_cc_src2, src2);
684 tcg_gen_brcond_i32(TCG_COND_NE, r_temp2, tcg_const_i32(0), l1);
685 tcg_gen_movi_tl(cpu_cc_src2, 0);
686 gen_set_label(l1);
687
688 // b2 = T0 & 1;
689 // env->y = (b2 << 31) | (env->y >> 1);
690 tcg_gen_trunc_tl_i32(r_temp2, cpu_cc_src);
691 tcg_gen_andi_i32(r_temp2, r_temp2, 0x1);
692 tcg_gen_shli_i32(r_temp2, r_temp2, 31);
693 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, y));
694 tcg_gen_shri_i32(cpu_tmp32, cpu_tmp32, 1);
695 tcg_gen_or_i32(cpu_tmp32, cpu_tmp32, r_temp2);
696 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, y));
697
698 // b1 = N ^ V;
699 gen_mov_reg_N(cpu_tmp0, cpu_psr);
700 gen_mov_reg_V(r_temp, cpu_psr);
701 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
702
703 // T0 = (b1 << 31) | (T0 >> 1);
704 // src1 = T0;
705 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
706 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
707 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
708
709 /* do addition and update flags */
710 tcg_gen_add_tl(dst, cpu_cc_src, cpu_cc_src2);
711
712 gen_cc_clear_icc();
713 gen_cc_NZ_icc(dst);
714 gen_cc_V_add_icc(dst, cpu_cc_src, cpu_cc_src2);
715 gen_cc_C_add_icc(dst, cpu_cc_src);
716 }
717
718 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
719 {
720 TCGv r_temp, r_temp2;
721
722 r_temp = tcg_temp_new(TCG_TYPE_I64);
723 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
724
725 tcg_gen_extu_tl_i64(r_temp, src2);
726 tcg_gen_extu_tl_i64(r_temp2, src1);
727 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
728
729 tcg_gen_shri_i64(r_temp, r_temp2, 32);
730 tcg_gen_trunc_i64_i32(r_temp, r_temp);
731 tcg_gen_st_i32(r_temp, cpu_env, offsetof(CPUSPARCState, y));
732 #ifdef TARGET_SPARC64
733 tcg_gen_mov_i64(dst, r_temp2);
734 #else
735 tcg_gen_trunc_i64_tl(dst, r_temp2);
736 #endif
737 }
738
739 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
740 {
741 TCGv r_temp, r_temp2;
742
743 r_temp = tcg_temp_new(TCG_TYPE_I64);
744 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
745
746 tcg_gen_ext_tl_i64(r_temp, src2);
747 tcg_gen_ext_tl_i64(r_temp2, src1);
748 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
749
750 tcg_gen_shri_i64(r_temp, r_temp2, 32);
751 tcg_gen_trunc_i64_i32(r_temp, r_temp);
752 tcg_gen_st_i32(r_temp, cpu_env, offsetof(CPUSPARCState, y));
753 #ifdef TARGET_SPARC64
754 tcg_gen_mov_i64(dst, r_temp2);
755 #else
756 tcg_gen_trunc_i64_tl(dst, r_temp2);
757 #endif
758 }
759
760 #ifdef TARGET_SPARC64
761 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
762 {
763 int l1;
764
765 l1 = gen_new_label();
766 tcg_gen_brcond_tl(TCG_COND_NE, divisor, tcg_const_tl(0), l1);
767 tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_DIV_ZERO));
768 gen_set_label(l1);
769 }
770
771 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
772 {
773 int l1, l2;
774
775 l1 = gen_new_label();
776 l2 = gen_new_label();
777 tcg_gen_mov_tl(cpu_cc_src, src1);
778 tcg_gen_mov_tl(cpu_cc_src2, src2);
779 gen_trap_ifdivzero_tl(src2);
780 tcg_gen_brcond_tl(TCG_COND_NE, cpu_cc_src, tcg_const_tl(INT64_MIN), l1);
781 tcg_gen_brcond_tl(TCG_COND_NE, cpu_cc_src2, tcg_const_tl(-1), l1);
782 tcg_gen_movi_i64(dst, INT64_MIN);
783 tcg_gen_br(l2);
784 gen_set_label(l1);
785 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
786 gen_set_label(l2);
787 }
788 #endif
789
790 static inline void gen_op_div_cc(TCGv dst)
791 {
792 int l1;
793
794 gen_cc_clear_icc();
795 gen_cc_NZ_icc(dst);
796 l1 = gen_new_label();
797 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, cc_src2));
798 tcg_gen_brcond_tl(TCG_COND_EQ, cpu_tmp0, tcg_const_tl(0), l1);
799 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
800 gen_set_label(l1);
801 }
802
803 static inline void gen_op_logic_cc(TCGv dst)
804 {
805 gen_cc_clear_icc();
806 gen_cc_NZ_icc(dst);
807 #ifdef TARGET_SPARC64
808 gen_cc_clear_xcc();
809 gen_cc_NZ_xcc(dst);
810 #endif
811 }
812
813 // 1
814 static inline void gen_op_eval_ba(TCGv dst)
815 {
816 tcg_gen_movi_tl(dst, 1);
817 }
818
819 // Z
820 static inline void gen_op_eval_be(TCGv dst, TCGv src)
821 {
822 gen_mov_reg_Z(dst, src);
823 }
824
825 // Z | (N ^ V)
826 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
827 {
828 gen_mov_reg_N(cpu_tmp0, src);
829 gen_mov_reg_V(dst, src);
830 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
831 gen_mov_reg_Z(cpu_tmp0, src);
832 tcg_gen_or_tl(dst, dst, cpu_tmp0);
833 }
834
835 // N ^ V
836 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
837 {
838 gen_mov_reg_V(cpu_tmp0, src);
839 gen_mov_reg_N(dst, src);
840 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
841 }
842
843 // C | Z
844 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
845 {
846 gen_mov_reg_Z(cpu_tmp0, src);
847 gen_mov_reg_C(dst, src);
848 tcg_gen_or_tl(dst, dst, cpu_tmp0);
849 }
850
851 // C
852 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
853 {
854 gen_mov_reg_C(dst, src);
855 }
856
857 // V
858 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
859 {
860 gen_mov_reg_V(dst, src);
861 }
862
863 // 0
864 static inline void gen_op_eval_bn(TCGv dst)
865 {
866 tcg_gen_movi_tl(dst, 0);
867 }
868
869 // N
870 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
871 {
872 gen_mov_reg_N(dst, src);
873 }
874
875 // !Z
876 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
877 {
878 gen_mov_reg_Z(dst, src);
879 tcg_gen_xori_tl(dst, dst, 0x1);
880 }
881
882 // !(Z | (N ^ V))
883 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
884 {
885 gen_mov_reg_N(cpu_tmp0, src);
886 gen_mov_reg_V(dst, src);
887 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
888 gen_mov_reg_Z(cpu_tmp0, src);
889 tcg_gen_or_tl(dst, dst, cpu_tmp0);
890 tcg_gen_xori_tl(dst, dst, 0x1);
891 }
892
893 // !(N ^ V)
894 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
895 {
896 gen_mov_reg_V(cpu_tmp0, src);
897 gen_mov_reg_N(dst, src);
898 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
899 tcg_gen_xori_tl(dst, dst, 0x1);
900 }
901
902 // !(C | Z)
903 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
904 {
905 gen_mov_reg_Z(cpu_tmp0, src);
906 gen_mov_reg_C(dst, src);
907 tcg_gen_or_tl(dst, dst, cpu_tmp0);
908 tcg_gen_xori_tl(dst, dst, 0x1);
909 }
910
911 // !C
912 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
913 {
914 gen_mov_reg_C(dst, src);
915 tcg_gen_xori_tl(dst, dst, 0x1);
916 }
917
918 // !N
919 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
920 {
921 gen_mov_reg_N(dst, src);
922 tcg_gen_xori_tl(dst, dst, 0x1);
923 }
924
925 // !V
926 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
927 {
928 gen_mov_reg_V(dst, src);
929 tcg_gen_xori_tl(dst, dst, 0x1);
930 }
931
932 /*
933 FPSR bit field FCC1 | FCC0:
934 0 =
935 1 <
936 2 >
937 3 unordered
938 */
939 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
940 unsigned int fcc_offset)
941 {
942 tcg_gen_extu_i32_tl(reg, src);
943 tcg_gen_shri_tl(reg, reg, FSR_FCC0_SHIFT + fcc_offset);
944 tcg_gen_andi_tl(reg, reg, 0x1);
945 }
946
947 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
948 unsigned int fcc_offset)
949 {
950 tcg_gen_extu_i32_tl(reg, src);
951 tcg_gen_shri_tl(reg, reg, FSR_FCC1_SHIFT + fcc_offset);
952 tcg_gen_andi_tl(reg, reg, 0x1);
953 }
954
955 // !0: FCC0 | FCC1
956 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
957 unsigned int fcc_offset)
958 {
959 gen_mov_reg_FCC0(dst, src, fcc_offset);
960 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
961 tcg_gen_or_tl(dst, dst, cpu_tmp0);
962 }
963
964 // 1 or 2: FCC0 ^ FCC1
965 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
966 unsigned int fcc_offset)
967 {
968 gen_mov_reg_FCC0(dst, src, fcc_offset);
969 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
970 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
971 }
972
973 // 1 or 3: FCC0
974 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
975 unsigned int fcc_offset)
976 {
977 gen_mov_reg_FCC0(dst, src, fcc_offset);
978 }
979
980 // 1: FCC0 & !FCC1
981 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
982 unsigned int fcc_offset)
983 {
984 gen_mov_reg_FCC0(dst, src, fcc_offset);
985 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
986 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
987 tcg_gen_and_tl(dst, dst, cpu_tmp0);
988 }
989
990 // 2 or 3: FCC1
991 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
992 unsigned int fcc_offset)
993 {
994 gen_mov_reg_FCC1(dst, src, fcc_offset);
995 }
996
997 // 2: !FCC0 & FCC1
998 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
999 unsigned int fcc_offset)
1000 {
1001 gen_mov_reg_FCC0(dst, src, fcc_offset);
1002 tcg_gen_xori_tl(dst, dst, 0x1);
1003 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1004 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1005 }
1006
1007 // 3: FCC0 & FCC1
1008 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1009 unsigned int fcc_offset)
1010 {
1011 gen_mov_reg_FCC0(dst, src, fcc_offset);
1012 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1013 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1014 }
1015
1016 // 0: !(FCC0 | FCC1)
1017 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1018 unsigned int fcc_offset)
1019 {
1020 gen_mov_reg_FCC0(dst, src, fcc_offset);
1021 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1022 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1023 tcg_gen_xori_tl(dst, dst, 0x1);
1024 }
1025
1026 // 0 or 3: !(FCC0 ^ FCC1)
1027 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1028 unsigned int fcc_offset)
1029 {
1030 gen_mov_reg_FCC0(dst, src, fcc_offset);
1031 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1032 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1033 tcg_gen_xori_tl(dst, dst, 0x1);
1034 }
1035
1036 // 0 or 2: !FCC0
1037 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1038 unsigned int fcc_offset)
1039 {
1040 gen_mov_reg_FCC0(dst, src, fcc_offset);
1041 tcg_gen_xori_tl(dst, dst, 0x1);
1042 }
1043
1044 // !1: !(FCC0 & !FCC1)
1045 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1046 unsigned int fcc_offset)
1047 {
1048 gen_mov_reg_FCC0(dst, src, fcc_offset);
1049 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1050 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1051 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1052 tcg_gen_xori_tl(dst, dst, 0x1);
1053 }
1054
1055 // 0 or 1: !FCC1
1056 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1057 unsigned int fcc_offset)
1058 {
1059 gen_mov_reg_FCC1(dst, src, fcc_offset);
1060 tcg_gen_xori_tl(dst, dst, 0x1);
1061 }
1062
1063 // !2: !(!FCC0 & FCC1)
1064 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1065 unsigned int fcc_offset)
1066 {
1067 gen_mov_reg_FCC0(dst, src, fcc_offset);
1068 tcg_gen_xori_tl(dst, dst, 0x1);
1069 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1070 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1071 tcg_gen_xori_tl(dst, dst, 0x1);
1072 }
1073
1074 // !3: !(FCC0 & FCC1)
1075 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1076 unsigned int fcc_offset)
1077 {
1078 gen_mov_reg_FCC0(dst, src, fcc_offset);
1079 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1080 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1081 tcg_gen_xori_tl(dst, dst, 0x1);
1082 }
1083
1084 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1085 target_ulong pc2, TCGv r_cond)
1086 {
1087 int l1;
1088
1089 l1 = gen_new_label();
1090
1091 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, tcg_const_tl(0), l1);
1092
1093 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1094
1095 gen_set_label(l1);
1096 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1097 }
1098
1099 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1100 target_ulong pc2, TCGv r_cond)
1101 {
1102 int l1;
1103
1104 l1 = gen_new_label();
1105
1106 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, tcg_const_tl(0), l1);
1107
1108 gen_goto_tb(dc, 0, pc2, pc1);
1109
1110 gen_set_label(l1);
1111 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1112 }
1113
1114 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1115 TCGv r_cond)
1116 {
1117 int l1, l2;
1118
1119 l1 = gen_new_label();
1120 l2 = gen_new_label();
1121
1122 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, tcg_const_tl(0), l1);
1123
1124 tcg_gen_movi_tl(cpu_npc, npc1);
1125 tcg_gen_br(l2);
1126
1127 gen_set_label(l1);
1128 tcg_gen_movi_tl(cpu_npc, npc2);
1129 gen_set_label(l2);
1130 }
1131
1132 /* call this function before using the condition register as it may
1133 have been set for a jump */
1134 static inline void flush_cond(DisasContext *dc, TCGv cond)
1135 {
1136 if (dc->npc == JUMP_PC) {
1137 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1138 dc->npc = DYNAMIC_PC;
1139 }
1140 }
1141
1142 static inline void save_npc(DisasContext *dc, TCGv cond)
1143 {
1144 if (dc->npc == JUMP_PC) {
1145 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1146 dc->npc = DYNAMIC_PC;
1147 } else if (dc->npc != DYNAMIC_PC) {
1148 tcg_gen_movi_tl(cpu_npc, dc->npc);
1149 }
1150 }
1151
1152 static inline void save_state(DisasContext *dc, TCGv cond)
1153 {
1154 tcg_gen_movi_tl(cpu_pc, dc->pc);
1155 save_npc(dc, cond);
1156 }
1157
1158 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1159 {
1160 if (dc->npc == JUMP_PC) {
1161 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1162 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1163 dc->pc = DYNAMIC_PC;
1164 } else if (dc->npc == DYNAMIC_PC) {
1165 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1166 dc->pc = DYNAMIC_PC;
1167 } else {
1168 dc->pc = dc->npc;
1169 }
1170 }
1171
1172 static inline void gen_op_next_insn(void)
1173 {
1174 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1175 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1176 }
1177
1178 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1179 {
1180 TCGv r_src;
1181
1182 #ifdef TARGET_SPARC64
1183 if (cc)
1184 r_src = cpu_xcc;
1185 else
1186 r_src = cpu_psr;
1187 #else
1188 r_src = cpu_psr;
1189 #endif
1190 switch (cond) {
1191 case 0x0:
1192 gen_op_eval_bn(r_dst);
1193 break;
1194 case 0x1:
1195 gen_op_eval_be(r_dst, r_src);
1196 break;
1197 case 0x2:
1198 gen_op_eval_ble(r_dst, r_src);
1199 break;
1200 case 0x3:
1201 gen_op_eval_bl(r_dst, r_src);
1202 break;
1203 case 0x4:
1204 gen_op_eval_bleu(r_dst, r_src);
1205 break;
1206 case 0x5:
1207 gen_op_eval_bcs(r_dst, r_src);
1208 break;
1209 case 0x6:
1210 gen_op_eval_bneg(r_dst, r_src);
1211 break;
1212 case 0x7:
1213 gen_op_eval_bvs(r_dst, r_src);
1214 break;
1215 case 0x8:
1216 gen_op_eval_ba(r_dst);
1217 break;
1218 case 0x9:
1219 gen_op_eval_bne(r_dst, r_src);
1220 break;
1221 case 0xa:
1222 gen_op_eval_bg(r_dst, r_src);
1223 break;
1224 case 0xb:
1225 gen_op_eval_bge(r_dst, r_src);
1226 break;
1227 case 0xc:
1228 gen_op_eval_bgu(r_dst, r_src);
1229 break;
1230 case 0xd:
1231 gen_op_eval_bcc(r_dst, r_src);
1232 break;
1233 case 0xe:
1234 gen_op_eval_bpos(r_dst, r_src);
1235 break;
1236 case 0xf:
1237 gen_op_eval_bvc(r_dst, r_src);
1238 break;
1239 }
1240 }
1241
1242 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1243 {
1244 unsigned int offset;
1245
1246 switch (cc) {
1247 default:
1248 case 0x0:
1249 offset = 0;
1250 break;
1251 case 0x1:
1252 offset = 32 - 10;
1253 break;
1254 case 0x2:
1255 offset = 34 - 10;
1256 break;
1257 case 0x3:
1258 offset = 36 - 10;
1259 break;
1260 }
1261
1262 switch (cond) {
1263 case 0x0:
1264 gen_op_eval_bn(r_dst);
1265 break;
1266 case 0x1:
1267 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1268 break;
1269 case 0x2:
1270 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1271 break;
1272 case 0x3:
1273 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1274 break;
1275 case 0x4:
1276 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1277 break;
1278 case 0x5:
1279 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1280 break;
1281 case 0x6:
1282 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1283 break;
1284 case 0x7:
1285 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1286 break;
1287 case 0x8:
1288 gen_op_eval_ba(r_dst);
1289 break;
1290 case 0x9:
1291 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1292 break;
1293 case 0xa:
1294 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1295 break;
1296 case 0xb:
1297 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1298 break;
1299 case 0xc:
1300 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1301 break;
1302 case 0xd:
1303 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1304 break;
1305 case 0xe:
1306 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1307 break;
1308 case 0xf:
1309 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1310 break;
1311 }
1312 }
1313
1314 #ifdef TARGET_SPARC64
1315 // Inverted logic
1316 static const int gen_tcg_cond_reg[8] = {
1317 -1,
1318 TCG_COND_NE,
1319 TCG_COND_GT,
1320 TCG_COND_GE,
1321 -1,
1322 TCG_COND_EQ,
1323 TCG_COND_LE,
1324 TCG_COND_LT,
1325 };
1326
1327 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1328 {
1329 int l1;
1330
1331 l1 = gen_new_label();
1332 tcg_gen_movi_tl(r_dst, 0);
1333 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], r_src, tcg_const_tl(0), l1);
1334 tcg_gen_movi_tl(r_dst, 1);
1335 gen_set_label(l1);
1336 }
1337 #endif
1338
1339 /* XXX: potentially incorrect if dynamic npc */
1340 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1341 TCGv r_cond)
1342 {
1343 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1344 target_ulong target = dc->pc + offset;
1345
1346 if (cond == 0x0) {
1347 /* unconditional not taken */
1348 if (a) {
1349 dc->pc = dc->npc + 4;
1350 dc->npc = dc->pc + 4;
1351 } else {
1352 dc->pc = dc->npc;
1353 dc->npc = dc->pc + 4;
1354 }
1355 } else if (cond == 0x8) {
1356 /* unconditional taken */
1357 if (a) {
1358 dc->pc = target;
1359 dc->npc = dc->pc + 4;
1360 } else {
1361 dc->pc = dc->npc;
1362 dc->npc = target;
1363 }
1364 } else {
1365 flush_cond(dc, r_cond);
1366 gen_cond(r_cond, cc, cond);
1367 if (a) {
1368 gen_branch_a(dc, target, dc->npc, r_cond);
1369 dc->is_br = 1;
1370 } else {
1371 dc->pc = dc->npc;
1372 dc->jump_pc[0] = target;
1373 dc->jump_pc[1] = dc->npc + 4;
1374 dc->npc = JUMP_PC;
1375 }
1376 }
1377 }
1378
1379 /* XXX: potentially incorrect if dynamic npc */
1380 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1381 TCGv r_cond)
1382 {
1383 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1384 target_ulong target = dc->pc + offset;
1385
1386 if (cond == 0x0) {
1387 /* unconditional not taken */
1388 if (a) {
1389 dc->pc = dc->npc + 4;
1390 dc->npc = dc->pc + 4;
1391 } else {
1392 dc->pc = dc->npc;
1393 dc->npc = dc->pc + 4;
1394 }
1395 } else if (cond == 0x8) {
1396 /* unconditional taken */
1397 if (a) {
1398 dc->pc = target;
1399 dc->npc = dc->pc + 4;
1400 } else {
1401 dc->pc = dc->npc;
1402 dc->npc = target;
1403 }
1404 } else {
1405 flush_cond(dc, r_cond);
1406 gen_fcond(r_cond, cc, cond);
1407 if (a) {
1408 gen_branch_a(dc, target, dc->npc, r_cond);
1409 dc->is_br = 1;
1410 } else {
1411 dc->pc = dc->npc;
1412 dc->jump_pc[0] = target;
1413 dc->jump_pc[1] = dc->npc + 4;
1414 dc->npc = JUMP_PC;
1415 }
1416 }
1417 }
1418
1419 #ifdef TARGET_SPARC64
1420 /* XXX: potentially incorrect if dynamic npc */
1421 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1422 TCGv r_cond, TCGv r_reg)
1423 {
1424 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1425 target_ulong target = dc->pc + offset;
1426
1427 flush_cond(dc, r_cond);
1428 gen_cond_reg(r_cond, cond, r_reg);
1429 if (a) {
1430 gen_branch_a(dc, target, dc->npc, r_cond);
1431 dc->is_br = 1;
1432 } else {
1433 dc->pc = dc->npc;
1434 dc->jump_pc[0] = target;
1435 dc->jump_pc[1] = dc->npc + 4;
1436 dc->npc = JUMP_PC;
1437 }
1438 }
1439
1440 static GenOpFunc * const gen_fcmps[4] = {
1441 helper_fcmps,
1442 helper_fcmps_fcc1,
1443 helper_fcmps_fcc2,
1444 helper_fcmps_fcc3,
1445 };
1446
1447 static GenOpFunc * const gen_fcmpd[4] = {
1448 helper_fcmpd,
1449 helper_fcmpd_fcc1,
1450 helper_fcmpd_fcc2,
1451 helper_fcmpd_fcc3,
1452 };
1453
1454 #if defined(CONFIG_USER_ONLY)
1455 static GenOpFunc * const gen_fcmpq[4] = {
1456 helper_fcmpq,
1457 helper_fcmpq_fcc1,
1458 helper_fcmpq_fcc2,
1459 helper_fcmpq_fcc3,
1460 };
1461 #endif
1462
1463 static GenOpFunc * const gen_fcmpes[4] = {
1464 helper_fcmpes,
1465 helper_fcmpes_fcc1,
1466 helper_fcmpes_fcc2,
1467 helper_fcmpes_fcc3,
1468 };
1469
1470 static GenOpFunc * const gen_fcmped[4] = {
1471 helper_fcmped,
1472 helper_fcmped_fcc1,
1473 helper_fcmped_fcc2,
1474 helper_fcmped_fcc3,
1475 };
1476
1477 #if defined(CONFIG_USER_ONLY)
1478 static GenOpFunc * const gen_fcmpeq[4] = {
1479 helper_fcmpeq,
1480 helper_fcmpeq_fcc1,
1481 helper_fcmpeq_fcc2,
1482 helper_fcmpeq_fcc3,
1483 };
1484 #endif
1485
1486 static inline void gen_op_fcmps(int fccno)
1487 {
1488 tcg_gen_helper_0_0(gen_fcmps[fccno]);
1489 }
1490
1491 static inline void gen_op_fcmpd(int fccno)
1492 {
1493 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1494 }
1495
1496 #if defined(CONFIG_USER_ONLY)
1497 static inline void gen_op_fcmpq(int fccno)
1498 {
1499 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1500 }
1501 #endif
1502
1503 static inline void gen_op_fcmpes(int fccno)
1504 {
1505 tcg_gen_helper_0_0(gen_fcmpes[fccno]);
1506 }
1507
1508 static inline void gen_op_fcmped(int fccno)
1509 {
1510 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1511 }
1512
1513 #if defined(CONFIG_USER_ONLY)
1514 static inline void gen_op_fcmpeq(int fccno)
1515 {
1516 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1517 }
1518 #endif
1519
1520 #else
1521
1522 static inline void gen_op_fcmps(int fccno)
1523 {
1524 tcg_gen_helper_0_0(helper_fcmps);
1525 }
1526
1527 static inline void gen_op_fcmpd(int fccno)
1528 {
1529 tcg_gen_helper_0_0(helper_fcmpd);
1530 }
1531
1532 #if defined(CONFIG_USER_ONLY)
1533 static inline void gen_op_fcmpq(int fccno)
1534 {
1535 tcg_gen_helper_0_0(helper_fcmpq);
1536 }
1537 #endif
1538
1539 static inline void gen_op_fcmpes(int fccno)
1540 {
1541 tcg_gen_helper_0_0(helper_fcmpes);
1542 }
1543
1544 static inline void gen_op_fcmped(int fccno)
1545 {
1546 tcg_gen_helper_0_0(helper_fcmped);
1547 }
1548
1549 #if defined(CONFIG_USER_ONLY)
1550 static inline void gen_op_fcmpeq(int fccno)
1551 {
1552 tcg_gen_helper_0_0(helper_fcmpeq);
1553 }
1554 #endif
1555
1556 #endif
1557
1558 static inline void gen_op_fpexception_im(int fsr_flags)
1559 {
1560 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, ~FSR_FTT_MASK);
1561 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1562 tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_FP_EXCP));
1563 }
1564
1565 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1566 {
1567 #if !defined(CONFIG_USER_ONLY)
1568 if (!dc->fpu_enabled) {
1569 save_state(dc, r_cond);
1570 tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_NFPU_INSN));
1571 dc->is_br = 1;
1572 return 1;
1573 }
1574 #endif
1575 return 0;
1576 }
1577
1578 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1579 {
1580 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, ~(FSR_FTT_MASK | FSR_CEXC_MASK));
1581 }
1582
1583 static inline void gen_clear_float_exceptions(void)
1584 {
1585 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1586 }
1587
1588 /* asi moves */
1589 #ifdef TARGET_SPARC64
1590 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1591 {
1592 int asi, offset;
1593 TCGv r_asi;
1594
1595 if (IS_IMM) {
1596 r_asi = tcg_temp_new(TCG_TYPE_I32);
1597 offset = GET_FIELD(insn, 25, 31);
1598 tcg_gen_addi_tl(r_addr, r_addr, offset);
1599 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1600 } else {
1601 asi = GET_FIELD(insn, 19, 26);
1602 r_asi = tcg_const_i32(asi);
1603 }
1604 return r_asi;
1605 }
1606
1607 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size, int sign)
1608 {
1609 TCGv r_asi;
1610
1611 r_asi = gen_get_asi(insn, addr);
1612 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi,
1613 tcg_const_i32(size), tcg_const_i32(sign));
1614 }
1615
1616 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1617 {
1618 TCGv r_asi;
1619
1620 r_asi = gen_get_asi(insn, addr);
1621 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, tcg_const_i32(size));
1622 }
1623
1624 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1625 {
1626 TCGv r_asi;
1627
1628 r_asi = gen_get_asi(insn, addr);
1629 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, tcg_const_i32(size),
1630 tcg_const_i32(rd));
1631 }
1632
1633 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1634 {
1635 TCGv r_asi;
1636
1637 r_asi = gen_get_asi(insn, addr);
1638 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, tcg_const_i32(size),
1639 tcg_const_i32(rd));
1640 }
1641
1642 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1643 {
1644 TCGv r_temp, r_asi;
1645
1646 r_temp = tcg_temp_new(TCG_TYPE_I32);
1647 r_asi = gen_get_asi(insn, addr);
1648 tcg_gen_helper_1_4(helper_ld_asi, r_temp, addr, r_asi,
1649 tcg_const_i32(4), tcg_const_i32(0));
1650 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi,
1651 tcg_const_i32(4));
1652 tcg_gen_extu_i32_tl(dst, r_temp);
1653 }
1654
1655 static inline void gen_ldda_asi(TCGv lo, TCGv hi, TCGv addr, int insn)
1656 {
1657 TCGv r_asi;
1658
1659 r_asi = gen_get_asi(insn, addr);
1660 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi,
1661 tcg_const_i32(8), tcg_const_i32(0));
1662 tcg_gen_andi_i64(lo, cpu_tmp64, 0xffffffffULL);
1663 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1664 tcg_gen_andi_i64(hi, cpu_tmp64, 0xffffffffULL);
1665 }
1666
1667 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1668 {
1669 TCGv r_temp, r_asi;
1670
1671 r_temp = tcg_temp_new(TCG_TYPE_I32);
1672 gen_movl_reg_TN(rd + 1, r_temp);
1673 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi,
1674 r_temp);
1675 r_asi = gen_get_asi(insn, addr);
1676 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi,
1677 tcg_const_i32(8));
1678 }
1679
1680 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn, int rd)
1681 {
1682 TCGv r_val1, r_asi;
1683
1684 r_val1 = tcg_temp_new(TCG_TYPE_I32);
1685 gen_movl_reg_TN(rd, r_val1);
1686 r_asi = gen_get_asi(insn, addr);
1687 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1688 }
1689
1690 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn, int rd)
1691 {
1692 TCGv r_asi;
1693
1694 gen_movl_reg_TN(rd, cpu_tmp64);
1695 r_asi = gen_get_asi(insn, addr);
1696 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1697 }
1698
1699 #elif !defined(CONFIG_USER_ONLY)
1700
1701 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size, int sign)
1702 {
1703 int asi;
1704
1705 asi = GET_FIELD(insn, 19, 26);
1706 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, tcg_const_i32(asi),
1707 tcg_const_i32(size), tcg_const_i32(sign));
1708 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1709 }
1710
1711 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1712 {
1713 int asi;
1714
1715 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1716 asi = GET_FIELD(insn, 19, 26);
1717 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, tcg_const_i32(asi),
1718 tcg_const_i32(size));
1719 }
1720
1721 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1722 {
1723 int asi;
1724 TCGv r_temp;
1725
1726 r_temp = tcg_temp_new(TCG_TYPE_I32);
1727 asi = GET_FIELD(insn, 19, 26);
1728 tcg_gen_helper_1_4(helper_ld_asi, r_temp, addr, tcg_const_i32(asi),
1729 tcg_const_i32(4), tcg_const_i32(0));
1730 tcg_gen_helper_0_4(helper_st_asi, addr, dst, tcg_const_i32(asi),
1731 tcg_const_i32(4));
1732 tcg_gen_extu_i32_tl(dst, r_temp);
1733 }
1734
1735 static inline void gen_ldda_asi(TCGv lo, TCGv hi, TCGv addr, int insn)
1736 {
1737 int asi;
1738
1739 asi = GET_FIELD(insn, 19, 26);
1740 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, tcg_const_i32(asi),
1741 tcg_const_i32(8), tcg_const_i32(0));
1742 tcg_gen_trunc_i64_tl(lo, cpu_tmp64);
1743 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1744 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1745 }
1746
1747 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1748 {
1749 int asi;
1750 TCGv r_temp;
1751
1752 r_temp = tcg_temp_new(TCG_TYPE_I32);
1753 gen_movl_reg_TN(rd + 1, r_temp);
1754 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi, r_temp);
1755 asi = GET_FIELD(insn, 19, 26);
1756 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, tcg_const_i32(asi),
1757 tcg_const_i32(8));
1758 }
1759 #endif
1760
1761 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1762 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1763 {
1764 int asi;
1765
1766 gen_ld_asi(dst, addr, insn, 1, 0);
1767
1768 asi = GET_FIELD(insn, 19, 26);
1769 tcg_gen_helper_0_4(helper_st_asi, addr, tcg_const_i64(0xffULL),
1770 tcg_const_i32(asi), tcg_const_i32(1));
1771 }
1772 #endif
1773
1774 static inline TCGv get_src1(unsigned int insn, TCGv def)
1775 {
1776 TCGv r_rs1 = def;
1777 unsigned int rs1;
1778
1779 rs1 = GET_FIELD(insn, 13, 17);
1780 if (rs1 == 0)
1781 //r_rs1 = tcg_const_tl(0);
1782 tcg_gen_movi_tl(def, 0);
1783 else if (rs1 < 8)
1784 //r_rs1 = cpu_gregs[rs1];
1785 tcg_gen_mov_tl(def, cpu_gregs[rs1]);
1786 else
1787 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1788 return r_rs1;
1789 }
1790
1791 static inline TCGv get_src2(unsigned int insn, TCGv def)
1792 {
1793 TCGv r_rs2 = def;
1794 unsigned int rs2;
1795
1796 if (IS_IMM) { /* immediate */
1797 rs2 = GET_FIELDs(insn, 19, 31);
1798 r_rs2 = tcg_const_tl((int)rs2);
1799 } else { /* register */
1800 rs2 = GET_FIELD(insn, 27, 31);
1801 if (rs2 == 0)
1802 r_rs2 = tcg_const_tl(0);
1803 else if (rs2 < 8)
1804 r_rs2 = cpu_gregs[rs2];
1805 else
1806 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1807 }
1808 return r_rs2;
1809 }
1810
1811 /* before an instruction, dc->pc must be static */
1812 static void disas_sparc_insn(DisasContext * dc)
1813 {
1814 unsigned int insn, opc, rs1, rs2, rd;
1815
1816 insn = ldl_code(dc->pc);
1817 opc = GET_FIELD(insn, 0, 1);
1818
1819 rd = GET_FIELD(insn, 2, 6);
1820
1821 cpu_dst = cpu_T[0];
1822 cpu_src1 = cpu_T[0]; // const
1823 cpu_src2 = cpu_T[1]; // const
1824
1825 // loads and stores
1826 cpu_addr = cpu_T[0];
1827 cpu_val = cpu_T[1];
1828
1829 switch (opc) {
1830 case 0: /* branches/sethi */
1831 {
1832 unsigned int xop = GET_FIELD(insn, 7, 9);
1833 int32_t target;
1834 switch (xop) {
1835 #ifdef TARGET_SPARC64
1836 case 0x1: /* V9 BPcc */
1837 {
1838 int cc;
1839
1840 target = GET_FIELD_SP(insn, 0, 18);
1841 target = sign_extend(target, 18);
1842 target <<= 2;
1843 cc = GET_FIELD_SP(insn, 20, 21);
1844 if (cc == 0)
1845 do_branch(dc, target, insn, 0, cpu_cond);
1846 else if (cc == 2)
1847 do_branch(dc, target, insn, 1, cpu_cond);
1848 else
1849 goto illegal_insn;
1850 goto jmp_insn;
1851 }
1852 case 0x3: /* V9 BPr */
1853 {
1854 target = GET_FIELD_SP(insn, 0, 13) |
1855 (GET_FIELD_SP(insn, 20, 21) << 14);
1856 target = sign_extend(target, 16);
1857 target <<= 2;
1858 cpu_src1 = get_src1(insn, cpu_src1);
1859 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1860 goto jmp_insn;
1861 }
1862 case 0x5: /* V9 FBPcc */
1863 {
1864 int cc = GET_FIELD_SP(insn, 20, 21);
1865 if (gen_trap_ifnofpu(dc, cpu_cond))
1866 goto jmp_insn;
1867 target = GET_FIELD_SP(insn, 0, 18);
1868 target = sign_extend(target, 19);
1869 target <<= 2;
1870 do_fbranch(dc, target, insn, cc, cpu_cond);
1871 goto jmp_insn;
1872 }
1873 #else
1874 case 0x7: /* CBN+x */
1875 {
1876 goto ncp_insn;
1877 }
1878 #endif
1879 case 0x2: /* BN+x */
1880 {
1881 target = GET_FIELD(insn, 10, 31);
1882 target = sign_extend(target, 22);
1883 target <<= 2;
1884 do_branch(dc, target, insn, 0, cpu_cond);
1885 goto jmp_insn;
1886 }
1887 case 0x6: /* FBN+x */
1888 {
1889 if (gen_trap_ifnofpu(dc, cpu_cond))
1890 goto jmp_insn;
1891 target = GET_FIELD(insn, 10, 31);
1892 target = sign_extend(target, 22);
1893 target <<= 2;
1894 do_fbranch(dc, target, insn, 0, cpu_cond);
1895 goto jmp_insn;
1896 }
1897 case 0x4: /* SETHI */
1898 if (rd) { // nop
1899 uint32_t value = GET_FIELD(insn, 10, 31);
1900 tcg_gen_movi_tl(cpu_dst, value << 10);
1901 gen_movl_TN_reg(rd, cpu_dst);
1902 }
1903 break;
1904 case 0x0: /* UNIMPL */
1905 default:
1906 goto illegal_insn;
1907 }
1908 break;
1909 }
1910 break;
1911 case 1:
1912 /*CALL*/ {
1913 target_long target = GET_FIELDs(insn, 2, 31) << 2;
1914
1915 gen_movl_TN_reg(15, tcg_const_tl(dc->pc));
1916 target += dc->pc;
1917 gen_mov_pc_npc(dc, cpu_cond);
1918 dc->npc = target;
1919 }
1920 goto jmp_insn;
1921 case 2: /* FPU & Logical Operations */
1922 {
1923 unsigned int xop = GET_FIELD(insn, 7, 12);
1924 if (xop == 0x3a) { /* generate trap */
1925 int cond;
1926
1927 cpu_src1 = get_src1(insn, cpu_src1);
1928 if (IS_IMM) {
1929 rs2 = GET_FIELD(insn, 25, 31);
1930 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
1931 } else {
1932 rs2 = GET_FIELD(insn, 27, 31);
1933 if (rs2 != 0) {
1934 gen_movl_reg_TN(rs2, cpu_src2);
1935 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
1936 } else
1937 tcg_gen_mov_tl(cpu_dst, cpu_src1);
1938 }
1939 cond = GET_FIELD(insn, 3, 6);
1940 if (cond == 0x8) {
1941 save_state(dc, cpu_cond);
1942 tcg_gen_helper_0_1(helper_trap, cpu_dst);
1943 } else if (cond != 0) {
1944 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
1945 #ifdef TARGET_SPARC64
1946 /* V9 icc/xcc */
1947 int cc = GET_FIELD_SP(insn, 11, 12);
1948
1949 save_state(dc, cpu_cond);
1950 if (cc == 0)
1951 gen_cond(r_cond, 0, cond);
1952 else if (cc == 2)
1953 gen_cond(r_cond, 1, cond);
1954 else
1955 goto illegal_insn;
1956 #else
1957 save_state(dc, cpu_cond);
1958 gen_cond(r_cond, 0, cond);
1959 #endif
1960 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
1961 }
1962 gen_op_next_insn();
1963 tcg_gen_exit_tb(0);
1964 dc->is_br = 1;
1965 goto jmp_insn;
1966 } else if (xop == 0x28) {
1967 rs1 = GET_FIELD(insn, 13, 17);
1968 switch(rs1) {
1969 case 0: /* rdy */
1970 #ifndef TARGET_SPARC64
1971 case 0x01 ... 0x0e: /* undefined in the SPARCv8
1972 manual, rdy on the microSPARC
1973 II */
1974 case 0x0f: /* stbar in the SPARCv8 manual,
1975 rdy on the microSPARC II */
1976 case 0x10 ... 0x1f: /* implementation-dependent in the
1977 SPARCv8 manual, rdy on the
1978 microSPARC II */
1979 #endif
1980 tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, y));
1981 gen_movl_TN_reg(rd, cpu_dst);
1982 break;
1983 #ifdef TARGET_SPARC64
1984 case 0x2: /* V9 rdccr */
1985 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
1986 gen_movl_TN_reg(rd, cpu_dst);
1987 break;
1988 case 0x3: /* V9 rdasi */
1989 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, asi));
1990 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
1991 gen_movl_TN_reg(rd, cpu_dst);
1992 break;
1993 case 0x4: /* V9 rdtick */
1994 {
1995 TCGv r_tickptr;
1996
1997 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
1998 tcg_gen_ld_ptr(r_tickptr, cpu_env,
1999 offsetof(CPUState, tick));
2000 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2001 r_tickptr);
2002 gen_movl_TN_reg(rd, cpu_dst);
2003 }
2004 break;
2005 case 0x5: /* V9 rdpc */
2006 tcg_gen_movi_tl(cpu_dst, dc->pc);
2007 gen_movl_TN_reg(rd, cpu_dst);
2008 break;
2009 case 0x6: /* V9 rdfprs */
2010 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fprs));
2011 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2012 gen_movl_TN_reg(rd, cpu_dst);
2013 break;
2014 case 0xf: /* V9 membar */
2015 break; /* no effect */
2016 case 0x13: /* Graphics Status */
2017 if (gen_trap_ifnofpu(dc, cpu_cond))
2018 goto jmp_insn;
2019 tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, gsr));
2020 gen_movl_TN_reg(rd, cpu_dst);
2021 break;
2022 case 0x17: /* Tick compare */
2023 tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, tick_cmpr));
2024 gen_movl_TN_reg(rd, cpu_dst);
2025 break;
2026 case 0x18: /* System tick */
2027 {
2028 TCGv r_tickptr;
2029
2030 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2031 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2032 offsetof(CPUState, stick));
2033 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2034 r_tickptr);
2035 gen_movl_TN_reg(rd, cpu_dst);
2036 }
2037 break;
2038 case 0x19: /* System tick compare */
2039 tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, stick_cmpr));
2040 gen_movl_TN_reg(rd, cpu_dst);
2041 break;
2042 case 0x10: /* Performance Control */
2043 case 0x11: /* Performance Instrumentation Counter */
2044 case 0x12: /* Dispatch Control */
2045 case 0x14: /* Softint set, WO */
2046 case 0x15: /* Softint clear, WO */
2047 case 0x16: /* Softint write */
2048 #endif
2049 default:
2050 goto illegal_insn;
2051 }
2052 #if !defined(CONFIG_USER_ONLY)
2053 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2054 #ifndef TARGET_SPARC64
2055 if (!supervisor(dc))
2056 goto priv_insn;
2057 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2058 #else
2059 if (!hypervisor(dc))
2060 goto priv_insn;
2061 rs1 = GET_FIELD(insn, 13, 17);
2062 switch (rs1) {
2063 case 0: // hpstate
2064 // gen_op_rdhpstate();
2065 break;
2066 case 1: // htstate
2067 // gen_op_rdhtstate();
2068 break;
2069 case 3: // hintp
2070 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hintp));
2071 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2072 break;
2073 case 5: // htba
2074 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, htba));
2075 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2076 break;
2077 case 6: // hver
2078 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hver));
2079 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2080 break;
2081 case 31: // hstick_cmpr
2082 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
2083 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hstick_cmpr));
2084 break;
2085 default:
2086 goto illegal_insn;
2087 }
2088 #endif
2089 gen_movl_TN_reg(rd, cpu_dst);
2090 break;
2091 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2092 if (!supervisor(dc))
2093 goto priv_insn;
2094 #ifdef TARGET_SPARC64
2095 rs1 = GET_FIELD(insn, 13, 17);
2096 switch (rs1) {
2097 case 0: // tpc
2098 {
2099 TCGv r_tsptr;
2100
2101 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2102 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2103 offsetof(CPUState, tsptr));
2104 tcg_gen_ld_tl(cpu_dst, r_tsptr,
2105 offsetof(trap_state, tpc));
2106 }
2107 break;
2108 case 1: // tnpc
2109 {
2110 TCGv r_tsptr;
2111
2112 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2113 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2114 offsetof(CPUState, tsptr));
2115 tcg_gen_ld_tl(cpu_dst, r_tsptr,
2116 offsetof(trap_state, tnpc));
2117 }
2118 break;
2119 case 2: // tstate
2120 {
2121 TCGv r_tsptr;
2122
2123 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2124 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2125 offsetof(CPUState, tsptr));
2126 tcg_gen_ld_tl(cpu_dst, r_tsptr,
2127 offsetof(trap_state, tstate));
2128 }
2129 break;
2130 case 3: // tt
2131 {
2132 TCGv r_tsptr;
2133
2134 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2135 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2136 offsetof(CPUState, tsptr));
2137 tcg_gen_ld_i32(cpu_dst, r_tsptr,
2138 offsetof(trap_state, tt));
2139 }
2140 break;
2141 case 4: // tick
2142 {
2143 TCGv r_tickptr;
2144
2145 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2146 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2147 offsetof(CPUState, tick));
2148 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2149 r_tickptr);
2150 gen_movl_TN_reg(rd, cpu_dst);
2151 }
2152 break;
2153 case 5: // tba
2154 tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, tbr));
2155 break;
2156 case 6: // pstate
2157 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, pstate));
2158 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2159 break;
2160 case 7: // tl
2161 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, tl));
2162 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2163 break;
2164 case 8: // pil
2165 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, psrpil));
2166 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2167 break;
2168 case 9: // cwp
2169 tcg_gen_helper_1_0(helper_rdcwp, cpu_dst);
2170 break;
2171 case 10: // cansave
2172 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cansave));
2173 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2174 break;
2175 case 11: // canrestore
2176 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, canrestore));
2177 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2178 break;
2179 case 12: // cleanwin
2180 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cleanwin));
2181 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2182 break;
2183 case 13: // otherwin
2184 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, otherwin));
2185 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2186 break;
2187 case 14: // wstate
2188 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wstate));
2189 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2190 break;
2191 case 16: // UA2005 gl
2192 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, gl));
2193 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2194 break;
2195 case 26: // UA2005 strand status
2196 if (!hypervisor(dc))
2197 goto priv_insn;
2198 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ssr));
2199 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2200 break;
2201 case 31: // ver
2202 tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, version));
2203 break;
2204 case 15: // fq
2205 default:
2206 goto illegal_insn;
2207 }
2208 #else
2209 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wim));
2210 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2211 #endif
2212 gen_movl_TN_reg(rd, cpu_dst);
2213 break;
2214 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2215 #ifdef TARGET_SPARC64
2216 tcg_gen_helper_0_0(helper_flushw);
2217 #else
2218 if (!supervisor(dc))
2219 goto priv_insn;
2220 tcg_gen_ld_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, tbr));
2221 gen_movl_TN_reg(rd, cpu_dst);
2222 #endif
2223 break;
2224 #endif
2225 } else if (xop == 0x34) { /* FPU Operations */
2226 if (gen_trap_ifnofpu(dc, cpu_cond))
2227 goto jmp_insn;
2228 gen_op_clear_ieee_excp_and_FTT();
2229 rs1 = GET_FIELD(insn, 13, 17);
2230 rs2 = GET_FIELD(insn, 27, 31);
2231 xop = GET_FIELD(insn, 18, 26);
2232 switch (xop) {
2233 case 0x1: /* fmovs */
2234 gen_op_load_fpr_FT0(rs2);
2235 gen_op_store_FT0_fpr(rd);
2236 break;
2237 case 0x5: /* fnegs */
2238 gen_op_load_fpr_FT1(rs2);
2239 tcg_gen_helper_0_0(helper_fnegs);
2240 gen_op_store_FT0_fpr(rd);
2241 break;
2242 case 0x9: /* fabss */
2243 gen_op_load_fpr_FT1(rs2);
2244 tcg_gen_helper_0_0(helper_fabss);
2245 gen_op_store_FT0_fpr(rd);
2246 break;
2247 case 0x29: /* fsqrts */
2248 gen_op_load_fpr_FT1(rs2);
2249 gen_clear_float_exceptions();
2250 tcg_gen_helper_0_0(helper_fsqrts);
2251 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2252 gen_op_store_FT0_fpr(rd);
2253 break;
2254 case 0x2a: /* fsqrtd */
2255 gen_op_load_fpr_DT1(DFPREG(rs2));
2256 gen_clear_float_exceptions();
2257 tcg_gen_helper_0_0(helper_fsqrtd);
2258 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2259 gen_op_store_DT0_fpr(DFPREG(rd));
2260 break;
2261 case 0x2b: /* fsqrtq */
2262 #if defined(CONFIG_USER_ONLY)
2263 gen_op_load_fpr_QT1(QFPREG(rs2));
2264 gen_clear_float_exceptions();
2265 tcg_gen_helper_0_0(helper_fsqrtq);
2266 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2267 gen_op_store_QT0_fpr(QFPREG(rd));
2268 break;
2269 #else
2270 goto nfpu_insn;
2271 #endif
2272 case 0x41:
2273 gen_op_load_fpr_FT0(rs1);
2274 gen_op_load_fpr_FT1(rs2);
2275 gen_clear_float_exceptions();
2276 tcg_gen_helper_0_0(helper_fadds);
2277 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2278 gen_op_store_FT0_fpr(rd);
2279 break;
2280 case 0x42:
2281 gen_op_load_fpr_DT0(DFPREG(rs1));
2282 gen_op_load_fpr_DT1(DFPREG(rs2));
2283 gen_clear_float_exceptions();
2284 tcg_gen_helper_0_0(helper_faddd);
2285 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2286 gen_op_store_DT0_fpr(DFPREG(rd));
2287 break;
2288 case 0x43: /* faddq */
2289 #if defined(CONFIG_USER_ONLY)
2290 gen_op_load_fpr_QT0(QFPREG(rs1));
2291 gen_op_load_fpr_QT1(QFPREG(rs2));
2292 gen_clear_float_exceptions();
2293 tcg_gen_helper_0_0(helper_faddq);
2294 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2295 gen_op_store_QT0_fpr(QFPREG(rd));
2296 break;
2297 #else
2298 goto nfpu_insn;
2299 #endif
2300 case 0x45:
2301 gen_op_load_fpr_FT0(rs1);
2302 gen_op_load_fpr_FT1(rs2);
2303 gen_clear_float_exceptions();
2304 tcg_gen_helper_0_0(helper_fsubs);
2305 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2306 gen_op_store_FT0_fpr(rd);
2307 break;
2308 case 0x46:
2309 gen_op_load_fpr_DT0(DFPREG(rs1));
2310 gen_op_load_fpr_DT1(DFPREG(rs2));
2311 gen_clear_float_exceptions();
2312 tcg_gen_helper_0_0(helper_fsubd);
2313 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2314 gen_op_store_DT0_fpr(DFPREG(rd));
2315 break;
2316 case 0x47: /* fsubq */
2317 #if defined(CONFIG_USER_ONLY)
2318 gen_op_load_fpr_QT0(QFPREG(rs1));
2319 gen_op_load_fpr_QT1(QFPREG(rs2));
2320 gen_clear_float_exceptions();
2321 tcg_gen_helper_0_0(helper_fsubq);
2322 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2323 gen_op_store_QT0_fpr(QFPREG(rd));
2324 break;
2325 #else
2326 goto nfpu_insn;
2327 #endif
2328 case 0x49:
2329 gen_op_load_fpr_FT0(rs1);
2330 gen_op_load_fpr_FT1(rs2);
2331 gen_clear_float_exceptions();
2332 tcg_gen_helper_0_0(helper_fmuls);
2333 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2334 gen_op_store_FT0_fpr(rd);
2335 break;
2336 case 0x4a:
2337 gen_op_load_fpr_DT0(DFPREG(rs1));
2338 gen_op_load_fpr_DT1(DFPREG(rs2));
2339 gen_clear_float_exceptions();
2340 tcg_gen_helper_0_0(helper_fmuld);
2341 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2342 gen_op_store_DT0_fpr(DFPREG(rd));
2343 break;
2344 case 0x4b: /* fmulq */
2345 #if defined(CONFIG_USER_ONLY)
2346 gen_op_load_fpr_QT0(QFPREG(rs1));
2347 gen_op_load_fpr_QT1(QFPREG(rs2));
2348 gen_clear_float_exceptions();
2349 tcg_gen_helper_0_0(helper_fmulq);
2350 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2351 gen_op_store_QT0_fpr(QFPREG(rd));
2352 break;
2353 #else
2354 goto nfpu_insn;
2355 #endif
2356 case 0x4d:
2357 gen_op_load_fpr_FT0(rs1);
2358 gen_op_load_fpr_FT1(rs2);
2359 gen_clear_float_exceptions();
2360 tcg_gen_helper_0_0(helper_fdivs);
2361 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2362 gen_op_store_FT0_fpr(rd);
2363 break;
2364 case 0x4e:
2365 gen_op_load_fpr_DT0(DFPREG(rs1));
2366 gen_op_load_fpr_DT1(DFPREG(rs2));
2367 gen_clear_float_exceptions();
2368 tcg_gen_helper_0_0(helper_fdivd);
2369 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2370 gen_op_store_DT0_fpr(DFPREG(rd));
2371 break;
2372 case 0x4f: /* fdivq */
2373 #if defined(CONFIG_USER_ONLY)
2374 gen_op_load_fpr_QT0(QFPREG(rs1));
2375 gen_op_load_fpr_QT1(QFPREG(rs2));
2376 gen_clear_float_exceptions();
2377 tcg_gen_helper_0_0(helper_fdivq);
2378 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2379 gen_op_store_QT0_fpr(QFPREG(rd));
2380 break;
2381 #else
2382 goto nfpu_insn;
2383 #endif
2384 case 0x69:
2385 gen_op_load_fpr_FT0(rs1);
2386 gen_op_load_fpr_FT1(rs2);
2387 gen_clear_float_exceptions();
2388 tcg_gen_helper_0_0(helper_fsmuld);
2389 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2390 gen_op_store_DT0_fpr(DFPREG(rd));
2391 break;
2392 case 0x6e: /* fdmulq */
2393 #if defined(CONFIG_USER_ONLY)
2394 gen_op_load_fpr_DT0(DFPREG(rs1));
2395 gen_op_load_fpr_DT1(DFPREG(rs2));
2396 gen_clear_float_exceptions();
2397 tcg_gen_helper_0_0(helper_fdmulq);
2398 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2399 gen_op_store_QT0_fpr(QFPREG(rd));
2400 break;
2401 #else
2402 goto nfpu_insn;
2403 #endif
2404 case 0xc4:
2405 gen_op_load_fpr_FT1(rs2);
2406 gen_clear_float_exceptions();
2407 tcg_gen_helper_0_0(helper_fitos);
2408 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2409 gen_op_store_FT0_fpr(rd);
2410 break;
2411 case 0xc6:
2412 gen_op_load_fpr_DT1(DFPREG(rs2));
2413 gen_clear_float_exceptions();
2414 tcg_gen_helper_0_0(helper_fdtos);
2415 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2416 gen_op_store_FT0_fpr(rd);
2417 break;
2418 case 0xc7: /* fqtos */
2419 #if defined(CONFIG_USER_ONLY)
2420 gen_op_load_fpr_QT1(QFPREG(rs2));
2421 gen_clear_float_exceptions();
2422 tcg_gen_helper_0_0(helper_fqtos);
2423 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2424 gen_op_store_FT0_fpr(rd);
2425 break;
2426 #else
2427 goto nfpu_insn;
2428 #endif
2429 case 0xc8:
2430 gen_op_load_fpr_FT1(rs2);
2431 tcg_gen_helper_0_0(helper_fitod);
2432 gen_op_store_DT0_fpr(DFPREG(rd));
2433 break;
2434 case 0xc9:
2435 gen_op_load_fpr_FT1(rs2);
2436 tcg_gen_helper_0_0(helper_fstod);
2437 gen_op_store_DT0_fpr(DFPREG(rd));
2438 break;
2439 case 0xcb: /* fqtod */
2440 #if defined(CONFIG_USER_ONLY)
2441 gen_op_load_fpr_QT1(QFPREG(rs2));
2442 gen_clear_float_exceptions();
2443 tcg_gen_helper_0_0(helper_fqtod);
2444 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2445 gen_op_store_DT0_fpr(DFPREG(rd));
2446 break;
2447 #else
2448 goto nfpu_insn;
2449 #endif
2450 case 0xcc: /* fitoq */
2451 #if defined(CONFIG_USER_ONLY)
2452 gen_op_load_fpr_FT1(rs2);
2453 tcg_gen_helper_0_0(helper_fitoq);
2454 gen_op_store_QT0_fpr(QFPREG(rd));
2455 break;
2456 #else
2457 goto nfpu_insn;
2458 #endif
2459 case 0xcd: /* fstoq */
2460 #if defined(CONFIG_USER_ONLY)
2461 gen_op_load_fpr_FT1(rs2);
2462 tcg_gen_helper_0_0(helper_fstoq);
2463 gen_op_store_QT0_fpr(QFPREG(rd));
2464 break;
2465 #else
2466 goto nfpu_insn;
2467 #endif
2468 case 0xce: /* fdtoq */
2469 #if defined(CONFIG_USER_ONLY)
2470 gen_op_load_fpr_DT1(DFPREG(rs2));
2471 tcg_gen_helper_0_0(helper_fdtoq);
2472 gen_op_store_QT0_fpr(QFPREG(rd));
2473 break;
2474 #else
2475 goto nfpu_insn;
2476 #endif
2477 case 0xd1:
2478 gen_op_load_fpr_FT1(rs2);
2479 gen_clear_float_exceptions();
2480 tcg_gen_helper_0_0(helper_fstoi);
2481 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2482 gen_op_store_FT0_fpr(rd);
2483 break;
2484 case 0xd2:
2485 gen_op_load_fpr_DT1(DFPREG(rs2));
2486 gen_clear_float_exceptions();
2487 tcg_gen_helper_0_0(helper_fdtoi);
2488 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2489 gen_op_store_FT0_fpr(rd);
2490 break;
2491 case 0xd3: /* fqtoi */
2492 #if defined(CONFIG_USER_ONLY)
2493 gen_op_load_fpr_QT1(QFPREG(rs2));
2494 gen_clear_float_exceptions();
2495 tcg_gen_helper_0_0(helper_fqtoi);
2496 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2497 gen_op_store_FT0_fpr(rd);
2498 break;
2499 #else
2500 goto nfpu_insn;
2501 #endif
2502 #ifdef TARGET_SPARC64
2503 case 0x2: /* V9 fmovd */
2504 gen_op_load_fpr_DT0(DFPREG(rs2));
2505 gen_op_store_DT0_fpr(DFPREG(rd));
2506 break;
2507 case 0x3: /* V9 fmovq */
2508 #if defined(CONFIG_USER_ONLY)
2509 gen_op_load_fpr_QT0(QFPREG(rs2));
2510 gen_op_store_QT0_fpr(QFPREG(rd));
2511 break;
2512 #else
2513 goto nfpu_insn;
2514 #endif
2515 case 0x6: /* V9 fnegd */
2516 gen_op_load_fpr_DT1(DFPREG(rs2));
2517 tcg_gen_helper_0_0(helper_fnegd);
2518 gen_op_store_DT0_fpr(DFPREG(rd));
2519 break;
2520 case 0x7: /* V9 fnegq */
2521 #if defined(CONFIG_USER_ONLY)
2522 gen_op_load_fpr_QT1(QFPREG(rs2));
2523 tcg_gen_helper_0_0(helper_fnegq);
2524 gen_op_store_QT0_fpr(QFPREG(rd));
2525 break;
2526 #else
2527 goto nfpu_insn;
2528 #endif
2529 case 0xa: /* V9 fabsd */
2530 gen_op_load_fpr_DT1(DFPREG(rs2));
2531 tcg_gen_helper_0_0(helper_fabsd);
2532 gen_op_store_DT0_fpr(DFPREG(rd));
2533 break;
2534 case 0xb: /* V9 fabsq */
2535 #if defined(CONFIG_USER_ONLY)
2536 gen_op_load_fpr_QT1(QFPREG(rs2));
2537 tcg_gen_helper_0_0(helper_fabsq);
2538 gen_op_store_QT0_fpr(QFPREG(rd));
2539 break;
2540 #else
2541 goto nfpu_insn;
2542 #endif
2543 case 0x81: /* V9 fstox */
2544 gen_op_load_fpr_FT1(rs2);
2545 gen_clear_float_exceptions();
2546 tcg_gen_helper_0_0(helper_fstox);
2547 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2548 gen_op_store_DT0_fpr(DFPREG(rd));
2549 break;
2550 case 0x82: /* V9 fdtox */
2551 gen_op_load_fpr_DT1(DFPREG(rs2));
2552 gen_clear_float_exceptions();
2553 tcg_gen_helper_0_0(helper_fdtox);
2554 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2555 gen_op_store_DT0_fpr(DFPREG(rd));
2556 break;
2557 case 0x83: /* V9 fqtox */
2558 #if defined(CONFIG_USER_ONLY)
2559 gen_op_load_fpr_QT1(QFPREG(rs2));
2560 gen_clear_float_exceptions();
2561 tcg_gen_helper_0_0(helper_fqtox);
2562 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2563 gen_op_store_DT0_fpr(DFPREG(rd));
2564 break;
2565 #else
2566 goto nfpu_insn;
2567 #endif
2568 case 0x84: /* V9 fxtos */
2569 gen_op_load_fpr_DT1(DFPREG(rs2));
2570 gen_clear_float_exceptions();
2571 tcg_gen_helper_0_0(helper_fxtos);
2572 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2573 gen_op_store_FT0_fpr(rd);
2574 break;
2575 case 0x88: /* V9 fxtod */
2576 gen_op_load_fpr_DT1(DFPREG(rs2));
2577 gen_clear_float_exceptions();
2578 tcg_gen_helper_0_0(helper_fxtod);
2579 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2580 gen_op_store_DT0_fpr(DFPREG(rd));
2581 break;
2582 case 0x8c: /* V9 fxtoq */
2583 #if defined(CONFIG_USER_ONLY)
2584 gen_op_load_fpr_DT1(DFPREG(rs2));
2585 gen_clear_float_exceptions();
2586 tcg_gen_helper_0_0(helper_fxtoq);
2587 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2588 gen_op_store_QT0_fpr(QFPREG(rd));
2589 break;
2590 #else
2591 goto nfpu_insn;
2592 #endif
2593 #endif
2594 default:
2595 goto illegal_insn;
2596 }
2597 } else if (xop == 0x35) { /* FPU Operations */
2598 #ifdef TARGET_SPARC64
2599 int cond;
2600 #endif
2601 if (gen_trap_ifnofpu(dc, cpu_cond))
2602 goto jmp_insn;
2603 gen_op_clear_ieee_excp_and_FTT();
2604 rs1 = GET_FIELD(insn, 13, 17);
2605 rs2 = GET_FIELD(insn, 27, 31);
2606 xop = GET_FIELD(insn, 18, 26);
2607 #ifdef TARGET_SPARC64
2608 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2609 int l1;
2610
2611 l1 = gen_new_label();
2612 cond = GET_FIELD_SP(insn, 14, 17);
2613 cpu_src1 = get_src1(insn, cpu_src1);
2614 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_src1,
2615 tcg_const_tl(0), l1);
2616 gen_op_load_fpr_FT0(rs2);
2617 gen_op_store_FT0_fpr(rd);
2618 gen_set_label(l1);
2619 break;
2620 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2621 int l1;
2622
2623 l1 = gen_new_label();
2624 cond = GET_FIELD_SP(insn, 14, 17);
2625 cpu_src1 = get_src1(insn, cpu_src1);
2626 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_src1,
2627 tcg_const_tl(0), l1);
2628 gen_op_load_fpr_DT0(DFPREG(rs2));
2629 gen_op_store_DT0_fpr(DFPREG(rd));
2630 gen_set_label(l1);
2631 break;
2632 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2633 #if defined(CONFIG_USER_ONLY)
2634 int l1;
2635
2636 l1 = gen_new_label();
2637 cond = GET_FIELD_SP(insn, 14, 17);
2638 cpu_src1 = get_src1(insn, cpu_src1);
2639 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_src1,
2640 tcg_const_tl(0), l1);
2641 gen_op_load_fpr_QT0(QFPREG(rs2));
2642 gen_op_store_QT0_fpr(QFPREG(rd));
2643 gen_set_label(l1);
2644 break;
2645 #else
2646 goto nfpu_insn;
2647 #endif
2648 }
2649 #endif
2650 switch (xop) {
2651 #ifdef TARGET_SPARC64
2652 #define FMOVCC(size_FDQ, fcc) \
2653 { \
2654 TCGv r_cond; \
2655 int l1; \
2656 \
2657 l1 = gen_new_label(); \
2658 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2659 cond = GET_FIELD_SP(insn, 14, 17); \
2660 gen_fcond(r_cond, fcc, cond); \
2661 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, \
2662 tcg_const_tl(0), l1); \
2663 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2664 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2665 gen_set_label(l1); \
2666 }
2667 case 0x001: /* V9 fmovscc %fcc0 */
2668 FMOVCC(F, 0);
2669 break;
2670 case 0x002: /* V9 fmovdcc %fcc0 */
2671 FMOVCC(D, 0);
2672 break;
2673 case 0x003: /* V9 fmovqcc %fcc0 */
2674 #if defined(CONFIG_USER_ONLY)
2675 FMOVCC(Q, 0);
2676 break;
2677 #else
2678 goto nfpu_insn;
2679 #endif
2680 case 0x041: /* V9 fmovscc %fcc1 */
2681 FMOVCC(F, 1);
2682 break;
2683 case 0x042: /* V9 fmovdcc %fcc1 */
2684 FMOVCC(D, 1);
2685 break;
2686 case 0x043: /* V9 fmovqcc %fcc1 */
2687 #if defined(CONFIG_USER_ONLY)
2688 FMOVCC(Q, 1);
2689 break;
2690 #else
2691 goto nfpu_insn;
2692 #endif
2693 case 0x081: /* V9 fmovscc %fcc2 */
2694 FMOVCC(F, 2);
2695 break;
2696 case 0x082: /* V9 fmovdcc %fcc2 */
2697 FMOVCC(D, 2);
2698 break;
2699 case 0x083: /* V9 fmovqcc %fcc2 */
2700 #if defined(CONFIG_USER_ONLY)
2701 FMOVCC(Q, 2);
2702 break;
2703 #else
2704 goto nfpu_insn;
2705 #endif
2706 case 0x0c1: /* V9 fmovscc %fcc3 */
2707 FMOVCC(F, 3);
2708 break;
2709 case 0x0c2: /* V9 fmovdcc %fcc3 */
2710 FMOVCC(D, 3);
2711 break;
2712 case 0x0c3: /* V9 fmovqcc %fcc3 */
2713 #if defined(CONFIG_USER_ONLY)
2714 FMOVCC(Q, 3);
2715 break;
2716 #else
2717 goto nfpu_insn;
2718 #endif
2719 #undef FMOVCC
2720 #define FMOVCC(size_FDQ, icc) \
2721 { \
2722 TCGv r_cond; \
2723 int l1; \
2724 \
2725 l1 = gen_new_label(); \
2726 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2727 cond = GET_FIELD_SP(insn, 14, 17); \
2728 gen_cond(r_cond, icc, cond); \
2729 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond, \
2730 tcg_const_tl(0), l1); \
2731 glue(glue(gen_op_load_fpr_, size_FDQ), T0)(glue(size_FDQ, FPREG(rs2))); \
2732 glue(glue(gen_op_store_, size_FDQ), T0_fpr)(glue(size_FDQ, FPREG(rd))); \
2733 gen_set_label(l1); \
2734 }
2735
2736 case 0x101: /* V9 fmovscc %icc */
2737 FMOVCC(F, 0);
2738 break;
2739 case 0x102: /* V9 fmovdcc %icc */
2740 FMOVCC(D, 0);
2741 case 0x103: /* V9 fmovqcc %icc */
2742 #if defined(CONFIG_USER_ONLY)
2743 FMOVCC(D, 0);
2744 break;
2745 #else
2746 goto nfpu_insn;
2747 #endif
2748 case 0x181: /* V9 fmovscc %xcc */
2749 FMOVCC(F, 1);
2750 break;
2751 case 0x182: /* V9 fmovdcc %xcc */
2752 FMOVCC(D, 1);
2753 break;
2754 case 0x183: /* V9 fmovqcc %xcc */
2755 #if defined(CONFIG_USER_ONLY)
2756 FMOVCC(Q, 1);
2757 break;
2758 #else
2759 goto nfpu_insn;
2760 #endif
2761 #undef FMOVCC
2762 #endif
2763 case 0x51: /* fcmps, V9 %fcc */
2764 gen_op_load_fpr_FT0(rs1);
2765 gen_op_load_fpr_FT1(rs2);
2766 gen_op_fcmps(rd & 3);
2767 break;
2768 case 0x52: /* fcmpd, V9 %fcc */
2769 gen_op_load_fpr_DT0(DFPREG(rs1));
2770 gen_op_load_fpr_DT1(DFPREG(rs2));
2771 gen_op_fcmpd(rd & 3);
2772 break;
2773 case 0x53: /* fcmpq, V9 %fcc */
2774 #if defined(CONFIG_USER_ONLY)
2775 gen_op_load_fpr_QT0(QFPREG(rs1));
2776 gen_op_load_fpr_QT1(QFPREG(rs2));
2777 gen_op_fcmpq(rd & 3);
2778 break;
2779 #else /* !defined(CONFIG_USER_ONLY) */
2780 goto nfpu_insn;
2781 #endif
2782 case 0x55: /* fcmpes, V9 %fcc */
2783 gen_op_load_fpr_FT0(rs1);
2784 gen_op_load_fpr_FT1(rs2);
2785 gen_op_fcmpes(rd & 3);
2786 break;
2787 case 0x56: /* fcmped, V9 %fcc */
2788 gen_op_load_fpr_DT0(DFPREG(rs1));
2789 gen_op_load_fpr_DT1(DFPREG(rs2));
2790 gen_op_fcmped(rd & 3);
2791 break;
2792 case 0x57: /* fcmpeq, V9 %fcc */
2793 #if defined(CONFIG_USER_ONLY)
2794 gen_op_load_fpr_QT0(QFPREG(rs1));
2795 gen_op_load_fpr_QT1(QFPREG(rs2));
2796 gen_op_fcmpeq(rd & 3);
2797 break;
2798 #else/* !defined(CONFIG_USER_ONLY) */
2799 goto nfpu_insn;
2800 #endif
2801 default:
2802 goto illegal_insn;
2803 }
2804 } else if (xop == 0x2) {
2805 // clr/mov shortcut
2806
2807 rs1 = GET_FIELD(insn, 13, 17);
2808 if (rs1 == 0) {
2809 // or %g0, x, y -> mov T0, x; mov y, T0
2810 if (IS_IMM) { /* immediate */
2811 rs2 = GET_FIELDs(insn, 19, 31);
2812 tcg_gen_movi_tl(cpu_dst, (int)rs2);
2813 } else { /* register */
2814 rs2 = GET_FIELD(insn, 27, 31);
2815 gen_movl_reg_TN(rs2, cpu_dst);
2816 }
2817 } else {
2818 cpu_src1 = get_src1(insn, cpu_src1);
2819 if (IS_IMM) { /* immediate */
2820 rs2 = GET_FIELDs(insn, 19, 31);
2821 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2822 } else { /* register */
2823 // or x, %g0, y -> mov T1, x; mov y, T1
2824 rs2 = GET_FIELD(insn, 27, 31);
2825 if (rs2 != 0) {
2826 gen_movl_reg_TN(rs2, cpu_src2);
2827 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2828 } else
2829 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2830 }
2831 }
2832 gen_movl_TN_reg(rd, cpu_dst);
2833 #ifdef TARGET_SPARC64
2834 } else if (xop == 0x25) { /* sll, V9 sllx */
2835 cpu_src1 = get_src1(insn, cpu_src1);
2836 if (IS_IMM) { /* immediate */
2837 rs2 = GET_FIELDs(insn, 20, 31);
2838 if (insn & (1 << 12)) {
2839 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2840 } else {
2841 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2842 tcg_gen_shli_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2843 }
2844 } else { /* register */
2845 rs2 = GET_FIELD(insn, 27, 31);
2846 gen_movl_reg_TN(rs2, cpu_src2);
2847 if (insn & (1 << 12)) {
2848 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2849 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2850 } else {
2851 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2852 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2853 tcg_gen_shl_i64(cpu_dst, cpu_dst, cpu_tmp0);
2854 }
2855 }
2856 gen_movl_TN_reg(rd, cpu_dst);
2857 } else if (xop == 0x26) { /* srl, V9 srlx */
2858 cpu_src1 = get_src1(insn, cpu_src1);
2859 if (IS_IMM) { /* immediate */
2860 rs2 = GET_FIELDs(insn, 20, 31);
2861 if (insn & (1 << 12)) {
2862 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2863 } else {
2864 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2865 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2866 }
2867 } else { /* register */
2868 rs2 = GET_FIELD(insn, 27, 31);
2869 gen_movl_reg_TN(rs2, cpu_src2);
2870 if (insn & (1 << 12)) {
2871 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2872 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2873 } else {
2874 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2875 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2876 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2877 }
2878 }
2879 gen_movl_TN_reg(rd, cpu_dst);
2880 } else if (xop == 0x27) { /* sra, V9 srax */
2881 cpu_src1 = get_src1(insn, cpu_src1);
2882 if (IS_IMM) { /* immediate */
2883 rs2 = GET_FIELDs(insn, 20, 31);
2884 if (insn & (1 << 12)) {
2885 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2886 } else {
2887 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2888 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
2889 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2890 }
2891 } else { /* register */
2892 rs2 = GET_FIELD(insn, 27, 31);
2893 gen_movl_reg_TN(rs2, cpu_src2);
2894 if (insn & (1 << 12)) {
2895 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2896 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2897 } else {
2898 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2899 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2900 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2901 }
2902 }
2903 gen_movl_TN_reg(rd, cpu_dst);
2904 #endif
2905 } else if (xop < 0x36) {
2906 cpu_src1 = get_src1(insn, cpu_src1);
2907 cpu_src2 = get_src2(insn, cpu_src2);
2908 if (xop < 0x20) {
2909 switch (xop & ~0x10) {
2910 case 0x0:
2911 if (xop & 0x10)
2912 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2913 else
2914 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2915 break;
2916 case 0x1:
2917 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
2918 if (xop & 0x10)
2919 gen_op_logic_cc(cpu_dst);
2920 break;
2921 case 0x2:
2922 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2923 if (xop & 0x10)
2924 gen_op_logic_cc(cpu_dst);
2925 break;
2926 case 0x3:
2927 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
2928 if (xop & 0x10)
2929 gen_op_logic_cc(cpu_dst);
2930 break;
2931 case 0x4:
2932 if (xop & 0x10)
2933 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
2934 else
2935 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
2936 break;
2937 case 0x5:
2938 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
2939 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
2940 if (xop & 0x10)
2941 gen_op_logic_cc(cpu_dst);
2942 break;
2943 case 0x6:
2944 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
2945 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
2946 if (xop & 0x10)
2947 gen_op_logic_cc(cpu_dst);
2948 break;
2949 case 0x7:
2950 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
2951 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
2952 if (xop & 0x10)
2953 gen_op_logic_cc(cpu_dst);
2954 break;
2955 case 0x8:
2956 if (xop & 0x10)
2957 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
2958 else {
2959 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2960 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2961 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
2962 }
2963 break;
2964 #ifdef TARGET_SPARC64
2965 case 0x9: /* V9 mulx */
2966 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
2967 break;
2968 #endif
2969 case 0xa:
2970 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
2971 if (xop & 0x10)
2972 gen_op_logic_cc(cpu_dst);
2973 break;
2974 case 0xb:
2975 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
2976 if (xop & 0x10)
2977 gen_op_logic_cc(cpu_dst);
2978 break;
2979 case 0xc:
2980 if (xop & 0x10)
2981 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
2982 else {
2983 gen_mov_reg_C(cpu_tmp0, cpu_psr);
2984 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
2985 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
2986 }
2987 break;
2988 #ifdef TARGET_SPARC64
2989 case 0xd: /* V9 udivx */
2990 gen_trap_ifdivzero_tl(cpu_src2);
2991 tcg_gen_divu_i64(cpu_dst, cpu_src1, cpu_src2);
2992 break;
2993 #endif
2994 case 0xe:
2995 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1, cpu_src2);
2996 if (xop & 0x10)
2997 gen_op_div_cc(cpu_dst);
2998 break;
2999 case 0xf:
3000 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1, cpu_src2);
3001 if (xop & 0x10)
3002 gen_op_div_cc(cpu_dst);
3003 break;
3004 default:
3005 goto illegal_insn;
3006 }
3007 gen_movl_TN_reg(rd, cpu_dst);
3008 } else {
3009 switch (xop) {
3010 case 0x20: /* taddcc */
3011 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3012 gen_movl_TN_reg(rd, cpu_dst);
3013 break;
3014 case 0x21: /* tsubcc */
3015 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3016 gen_movl_TN_reg(rd, cpu_dst);
3017 break;
3018 case 0x22: /* taddcctv */
3019 save_state(dc, cpu_cond);
3020 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3021 gen_movl_TN_reg(rd, cpu_dst);
3022 break;
3023 case 0x23: /* tsubcctv */
3024 save_state(dc, cpu_cond);
3025 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3026 gen_movl_TN_reg(rd, cpu_dst);
3027 break;
3028 case 0x24: /* mulscc */
3029 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3030 gen_movl_TN_reg(rd, cpu_dst);
3031 break;
3032 #ifndef TARGET_SPARC64
3033 case 0x25: /* sll */
3034 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3035 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3036 gen_movl_TN_reg(rd, cpu_dst);
3037 break;
3038 case 0x26: /* srl */
3039 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3040 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3041 gen_movl_TN_reg(rd, cpu_dst);
3042 break;
3043 case 0x27: /* sra */
3044 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3045 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3046 gen_movl_TN_reg(rd, cpu_dst);
3047 break;
3048 #endif
3049 case 0x30:
3050 {
3051 switch(rd) {
3052 case 0: /* wry */
3053 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3054 tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, y));
3055 break;
3056 #ifndef TARGET_SPARC64
3057 case 0x01 ... 0x0f: /* undefined in the
3058 SPARCv8 manual, nop
3059 on the microSPARC
3060 II */
3061 case 0x10 ... 0x1f: /* implementation-dependent
3062 in the SPARCv8
3063 manual, nop on the
3064 microSPARC II */
3065 break;
3066 #else
3067 case 0x2: /* V9 wrccr */
3068 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3069 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3070 break;
3071 case 0x3: /* V9 wrasi */
3072 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3073 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3074 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, asi));
3075 break;
3076 case 0x6: /* V9 wrfprs */
3077 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3078 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3079 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fprs));
3080 save_state(dc, cpu_cond);
3081 gen_op_next_insn();
3082 tcg_gen_exit_tb(0);
3083 dc->is_br = 1;
3084 break;
3085 case 0xf: /* V9 sir, nop if user */
3086 #if !defined(CONFIG_USER_ONLY)
3087 if (supervisor(dc))
3088 ; // XXX
3089 #endif
3090 break;
3091 case 0x13: /* Graphics Status */
3092 if (gen_trap_ifnofpu(dc, cpu_cond))
3093 goto jmp_insn;
3094 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3095 tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, gsr));
3096 break;
3097 case 0x17: /* Tick compare */
3098 #if !defined(CONFIG_USER_ONLY)
3099 if (!supervisor(dc))
3100 goto illegal_insn;
3101 #endif
3102 {
3103 TCGv r_tickptr;
3104
3105 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3106 cpu_src2);
3107 tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState,
3108 tick_cmpr));
3109 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3110 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3111 offsetof(CPUState, tick));
3112 tcg_gen_helper_0_2(helper_tick_set_limit,
3113 r_tickptr, cpu_dst);
3114 }
3115 break;
3116 case 0x18: /* System tick */
3117 #if !defined(CONFIG_USER_ONLY)
3118 if (!supervisor(dc))
3119 goto illegal_insn;
3120 #endif
3121 {
3122 TCGv r_tickptr;
3123
3124 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3125 cpu_src2);
3126 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3127 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3128 offsetof(CPUState, stick));
3129 tcg_gen_helper_0_2(helper_tick_set_count,
3130 r_tickptr, cpu_dst);
3131 }
3132 break;
3133 case 0x19: /* System tick compare */
3134 #if !defined(CONFIG_USER_ONLY)
3135 if (!supervisor(dc))
3136 goto illegal_insn;
3137 #endif
3138 {
3139 TCGv r_tickptr;
3140
3141 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3142 cpu_src2);
3143 tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState,
3144 stick_cmpr));
3145 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3146 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3147 offsetof(CPUState, stick));
3148 tcg_gen_helper_0_2(helper_tick_set_limit,
3149 r_tickptr, cpu_dst);
3150 }
3151 break;
3152
3153 case 0x10: /* Performance Control */
3154 case 0x11: /* Performance Instrumentation Counter */
3155 case 0x12: /* Dispatch Control */
3156 case 0x14: /* Softint set */
3157 case 0x15: /* Softint clear */
3158 case 0x16: /* Softint write */
3159 #endif
3160 default:
3161 goto illegal_insn;
3162 }
3163 }
3164 break;
3165 #if !defined(CONFIG_USER_ONLY)
3166 case 0x31: /* wrpsr, V9 saved, restored */
3167 {
3168 if (!supervisor(dc))
3169 goto priv_insn;
3170 #ifdef TARGET_SPARC64
3171 switch (rd) {
3172 case 0:
3173 tcg_gen_helper_0_0(helper_saved);
3174 break;
3175 case 1:
3176 tcg_gen_helper_0_0(helper_restored);
3177 break;
3178 case 2: /* UA2005 allclean */
3179 case 3: /* UA2005 otherw */
3180 case 4: /* UA2005 normalw */
3181 case 5: /* UA2005 invalw */
3182 // XXX
3183 default:
3184 goto illegal_insn;
3185 }
3186 #else
3187 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3188 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3189 save_state(dc, cpu_cond);
3190 gen_op_next_insn();
3191 tcg_gen_exit_tb(0);
3192 dc->is_br = 1;
3193 #endif
3194 }
3195 break;
3196 case 0x32: /* wrwim, V9 wrpr */
3197 {
3198 if (!supervisor(dc))
3199 goto priv_insn;
3200 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3201 #ifdef TARGET_SPARC64
3202 switch (rd) {
3203 case 0: // tpc
3204 {
3205 TCGv r_tsptr;
3206
3207 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3208 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3209 offsetof(CPUState, tsptr));
3210 tcg_gen_st_tl(cpu_dst, r_tsptr,
3211 offsetof(trap_state, tpc));
3212 }
3213 break;
3214 case 1: // tnpc
3215 {
3216 TCGv r_tsptr;
3217
3218 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3219 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3220 offsetof(CPUState, tsptr));
3221 tcg_gen_st_tl(cpu_dst, r_tsptr,
3222 offsetof(trap_state, tnpc));
3223 }
3224 break;
3225 case 2: // tstate
3226 {
3227 TCGv r_tsptr;
3228
3229 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3230 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3231 offsetof(CPUState, tsptr));
3232 tcg_gen_st_tl(cpu_dst, r_tsptr,
3233 offsetof(trap_state, tstate));
3234 }
3235 break;
3236 case 3: // tt
3237 {
3238 TCGv r_tsptr;
3239
3240 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3241 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3242 offsetof(CPUState, tsptr));
3243 tcg_gen_st_i32(cpu_dst, r_tsptr,
3244 offsetof(trap_state, tt));
3245 }
3246 break;
3247 case 4: // tick
3248 {
3249 TCGv r_tickptr;
3250
3251 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3252 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3253 offsetof(CPUState, tick));
3254 tcg_gen_helper_0_2(helper_tick_set_count,
3255 r_tickptr, cpu_dst);
3256 }
3257 break;
3258 case 5: // tba
3259 tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, tbr));
3260 break;
3261 case 6: // pstate
3262 save_state(dc, cpu_cond);
3263 tcg_gen_helper_0_1(helper_wrpstate, cpu_dst);
3264 gen_op_next_insn();
3265 tcg_gen_exit_tb(0);
3266 dc->is_br = 1;
3267 break;
3268 case 7: // tl
3269 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3270 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, tl));
3271 break;
3272 case 8: // pil
3273 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3274 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, psrpil));
3275 break;
3276 case 9: // cwp
3277 tcg_gen_helper_0_1(helper_wrcwp, cpu_dst);
3278 break;
3279 case 10: // cansave
3280 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3281 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cansave));
3282 break;
3283 case 11: // canrestore
3284 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3285 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, canrestore));
3286 break;
3287 case 12: // cleanwin
3288 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3289 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, cleanwin));
3290 break;
3291 case 13: // otherwin
3292 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3293 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, otherwin));
3294 break;
3295 case 14: // wstate
3296 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3297 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wstate));
3298 break;
3299 case 16: // UA2005 gl
3300 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3301 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, gl));
3302 break;
3303 case 26: // UA2005 strand status
3304 if (!hypervisor(dc))
3305 goto priv_insn;
3306 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3307 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ssr));
3308 break;
3309 default:
3310 goto illegal_insn;
3311 }
3312 #else
3313 tcg_gen_andi_tl(cpu_dst, cpu_dst, ((1 << NWINDOWS) - 1));
3314 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3315 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, wim));
3316 #endif
3317 }
3318 break;
3319 case 0x33: /* wrtbr, UA2005 wrhpr */
3320 {
3321 #ifndef TARGET_SPARC64
3322 if (!supervisor(dc))
3323 goto priv_insn;
3324 tcg_gen_xor_tl(cpu_dst, cpu_dst, cpu_src2);
3325 tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState, tbr));
3326 #else
3327 if (!hypervisor(dc))
3328 goto priv_insn;
3329 tcg_gen_xor_tl(cpu_dst, cpu_dst, cpu_src2);
3330 switch (rd) {
3331 case 0: // hpstate
3332 // XXX gen_op_wrhpstate();
3333 save_state(dc, cpu_cond);
3334 gen_op_next_insn();
3335 tcg_gen_exit_tb(0);
3336 dc->is_br = 1;
3337 break;
3338 case 1: // htstate
3339 // XXX gen_op_wrhtstate();
3340 break;
3341 case 3: // hintp
3342 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3343 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, hintp));
3344 break;
3345 case 5: // htba
3346 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3347 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, htba));
3348 break;
3349 case 31: // hstick_cmpr
3350 {
3351 TCGv r_tickptr;
3352
3353 tcg_gen_st_tl(cpu_dst, cpu_env, offsetof(CPUSPARCState,
3354 hstick_cmpr));
3355 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3356 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3357 offsetof(CPUState, hstick));
3358 tcg_gen_helper_0_2(helper_tick_set_limit,
3359 r_tickptr, cpu_dst);
3360 }
3361 break;
3362 case 6: // hver readonly
3363 default:
3364 goto illegal_insn;
3365 }
3366 #endif
3367 }
3368 break;
3369 #endif
3370 #ifdef TARGET_SPARC64
3371 case 0x2c: /* V9 movcc */
3372 {
3373 int cc = GET_FIELD_SP(insn, 11, 12);
3374 int cond = GET_FIELD_SP(insn, 14, 17);
3375 TCGv r_cond;
3376 int l1;
3377
3378 r_cond = tcg_temp_new(TCG_TYPE_TL);
3379 if (insn & (1 << 18)) {
3380 if (cc == 0)
3381 gen_cond(r_cond, 0, cond);
3382 else if (cc == 2)
3383 gen_cond(r_cond, 1, cond);
3384 else
3385 goto illegal_insn;
3386 } else {
3387 gen_fcond(r_cond, cc, cond);
3388 }
3389
3390 l1 = gen_new_label();
3391
3392 tcg_gen_brcond_tl(TCG_COND_EQ, r_cond,
3393 tcg_const_tl(0), l1);
3394 if (IS_IMM) { /* immediate */
3395 rs2 = GET_FIELD_SPs(insn, 0, 10);
3396 tcg_gen_movi_tl(cpu_dst, (int)rs2);
3397 } else {
3398 rs2 = GET_FIELD_SP(insn, 0, 4);
3399 gen_movl_reg_TN(rs2, cpu_dst);
3400 }
3401 gen_movl_TN_reg(rd, cpu_dst);
3402 gen_set_label(l1);
3403 break;
3404 }
3405 case 0x2d: /* V9 sdivx */
3406 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3407 gen_movl_TN_reg(rd, cpu_dst);
3408 break;
3409 case 0x2e: /* V9 popc */
3410 {
3411 cpu_src2 = get_src2(insn, cpu_src2);
3412 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3413 cpu_src2);
3414 gen_movl_TN_reg(rd, cpu_dst);
3415 }
3416 case 0x2f: /* V9 movr */
3417 {
3418 int cond = GET_FIELD_SP(insn, 10, 12);
3419 int l1;
3420
3421 cpu_src1 = get_src1(insn, cpu_src1);
3422
3423 l1 = gen_new_label();
3424
3425 tcg_gen_brcond_tl(gen_tcg_cond_reg[cond], cpu_src1,
3426 tcg_const_tl(0), l1);
3427 if (IS_IMM) { /* immediate */
3428 rs2 = GET_FIELD_SPs(insn, 0, 9);
3429 tcg_gen_movi_tl(cpu_dst, (int)rs2);
3430 } else {
3431 rs2 = GET_FIELD_SP(insn, 0, 4);
3432 gen_movl_reg_TN(rs2, cpu_dst);
3433 }
3434 gen_movl_TN_reg(rd, cpu_dst);
3435 gen_set_label(l1);
3436 break;
3437 }
3438 #endif
3439 default:
3440 goto illegal_insn;
3441 }
3442 }
3443 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3444 #ifdef TARGET_SPARC64
3445 int opf = GET_FIELD_SP(insn, 5, 13);
3446 rs1 = GET_FIELD(insn, 13, 17);
3447 rs2 = GET_FIELD(insn, 27, 31);
3448 if (gen_trap_ifnofpu(dc, cpu_cond))
3449 goto jmp_insn;
3450
3451 switch (opf) {
3452 case 0x000: /* VIS I edge8cc */
3453 case 0x001: /* VIS II edge8n */
3454 case 0x002: /* VIS I edge8lcc */
3455 case 0x003: /* VIS II edge8ln */
3456 case 0x004: /* VIS I edge16cc */
3457 case 0x005: /* VIS II edge16n */
3458 case 0x006: /* VIS I edge16lcc */
3459 case 0x007: /* VIS II edge16ln */
3460 case 0x008: /* VIS I edge32cc */
3461 case 0x009: /* VIS II edge32n */
3462 case 0x00a: /* VIS I edge32lcc */
3463 case 0x00b: /* VIS II edge32ln */
3464 // XXX
3465 goto illegal_insn;
3466 case 0x010: /* VIS I array8 */
3467 cpu_src1 = get_src1(insn, cpu_src1);
3468 gen_movl_reg_TN(rs2, cpu_src2);
3469 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3470 cpu_src2);
3471 gen_movl_TN_reg(rd, cpu_dst);
3472 break;
3473 case 0x012: /* VIS I array16 */
3474 cpu_src1 = get_src1(insn, cpu_src1);
3475 gen_movl_reg_TN(rs2, cpu_src2);
3476 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3477 cpu_src2);
3478 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3479 gen_movl_TN_reg(rd, cpu_dst);
3480 break;
3481 case 0x014: /* VIS I array32 */
3482 cpu_src1 = get_src1(insn, cpu_src1);
3483 gen_movl_reg_TN(rs2, cpu_src2);
3484 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3485 cpu_src2);
3486 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3487 gen_movl_TN_reg(rd, cpu_dst);
3488 break;
3489 case 0x018: /* VIS I alignaddr */
3490 cpu_src1 = get_src1(insn, cpu_src1);
3491 gen_movl_reg_TN(rs2, cpu_src2);
3492 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3493 cpu_src2);
3494 gen_movl_TN_reg(rd, cpu_dst);
3495 break;
3496 case 0x019: /* VIS II bmask */
3497 case 0x01a: /* VIS I alignaddrl */
3498 // XXX
3499 goto illegal_insn;
3500 case 0x020: /* VIS I fcmple16 */
3501 gen_op_load_fpr_DT0(DFPREG(rs1));
3502 gen_op_load_fpr_DT1(DFPREG(rs2));
3503 tcg_gen_helper_0_0(helper_fcmple16);
3504 gen_op_store_DT0_fpr(DFPREG(rd));
3505 break;
3506 case 0x022: /* VIS I fcmpne16 */
3507 gen_op_load_fpr_DT0(DFPREG(rs1));
3508 gen_op_load_fpr_DT1(DFPREG(rs2));
3509 tcg_gen_helper_0_0(helper_fcmpne16);
3510 gen_op_store_DT0_fpr(DFPREG(rd));
3511 break;
3512 case 0x024: /* VIS I fcmple32 */
3513 gen_op_load_fpr_DT0(DFPREG(rs1));
3514 gen_op_load_fpr_DT1(DFPREG(rs2));
3515 tcg_gen_helper_0_0(helper_fcmple32);
3516 gen_op_store_DT0_fpr(DFPREG(rd));
3517 break;
3518 case 0x026: /* VIS I fcmpne32 */
3519 gen_op_load_fpr_DT0(DFPREG(rs1));
3520 gen_op_load_fpr_DT1(DFPREG(rs2));
3521 tcg_gen_helper_0_0(helper_fcmpne32);
3522 gen_op_store_DT0_fpr(DFPREG(rd));
3523 break;
3524 case 0x028: /* VIS I fcmpgt16 */
3525 gen_op_load_fpr_DT0(DFPREG(rs1));
3526 gen_op_load_fpr_DT1(DFPREG(rs2));
3527 tcg_gen_helper_0_0(helper_fcmpgt16);
3528 gen_op_store_DT0_fpr(DFPREG(rd));
3529 break;
3530 case 0x02a: /* VIS I fcmpeq16 */
3531 gen_op_load_fpr_DT0(DFPREG(rs1));
3532 gen_op_load_fpr_DT1(DFPREG(rs2));
3533 tcg_gen_helper_0_0(helper_fcmpeq16);
3534 gen_op_store_DT0_fpr(DFPREG(rd));
3535 break;
3536 case 0x02c: /* VIS I fcmpgt32 */
3537 gen_op_load_fpr_DT0(DFPREG(rs1));
3538 gen_op_load_fpr_DT1(DFPREG(rs2));
3539 tcg_gen_helper_0_0(helper_fcmpgt32);
3540 gen_op_store_DT0_fpr(DFPREG(rd));
3541 break;
3542 case 0x02e: /* VIS I fcmpeq32 */
3543 gen_op_load_fpr_DT0(DFPREG(rs1));
3544 gen_op_load_fpr_DT1(DFPREG(rs2));
3545 tcg_gen_helper_0_0(helper_fcmpeq32);
3546 gen_op_store_DT0_fpr(DFPREG(rd));
3547 break;
3548 case 0x031: /* VIS I fmul8x16 */
3549 gen_op_load_fpr_DT0(DFPREG(rs1));
3550 gen_op_load_fpr_DT1(DFPREG(rs2));
3551 tcg_gen_helper_0_0(helper_fmul8x16);
3552 gen_op_store_DT0_fpr(DFPREG(rd));
3553 break;
3554 case 0x033: /* VIS I fmul8x16au */
3555 gen_op_load_fpr_DT0(DFPREG(rs1));
3556 gen_op_load_fpr_DT1(DFPREG(rs2));
3557 tcg_gen_helper_0_0(helper_fmul8x16au);
3558 gen_op_store_DT0_fpr(DFPREG(rd));
3559 break;
3560 case 0x035: /* VIS I fmul8x16al */
3561 gen_op_load_fpr_DT0(DFPREG(rs1));
3562 gen_op_load_fpr_DT1(DFPREG(rs2));
3563 tcg_gen_helper_0_0(helper_fmul8x16al);
3564 gen_op_store_DT0_fpr(DFPREG(rd));
3565 break;
3566 case 0x036: /* VIS I fmul8sux16 */
3567 gen_op_load_fpr_DT0(DFPREG(rs1));
3568 gen_op_load_fpr_DT1(DFPREG(rs2));
3569 tcg_gen_helper_0_0(helper_fmul8sux16);
3570 gen_op_store_DT0_fpr(DFPREG(rd));
3571 break;
3572 case 0x037: /* VIS I fmul8ulx16 */
3573 gen_op_load_fpr_DT0(DFPREG(rs1));
3574 gen_op_load_fpr_DT1(DFPREG(rs2));
3575 tcg_gen_helper_0_0(helper_fmul8ulx16);
3576 gen_op_store_DT0_fpr(DFPREG(rd));
3577 break;
3578 case 0x038: /* VIS I fmuld8sux16 */
3579 gen_op_load_fpr_DT0(DFPREG(rs1));
3580 gen_op_load_fpr_DT1(DFPREG(rs2));
3581 tcg_gen_helper_0_0(helper_fmuld8sux16);
3582 gen_op_store_DT0_fpr(DFPREG(rd));
3583 break;
3584 case 0x039: /* VIS I fmuld8ulx16 */
3585 gen_op_load_fpr_DT0(DFPREG(rs1));
3586 gen_op_load_fpr_DT1(DFPREG(rs2));
3587 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3588 gen_op_store_DT0_fpr(DFPREG(rd));
3589 break;
3590 case 0x03a: /* VIS I fpack32 */
3591 case 0x03b: /* VIS I fpack16 */
3592 case 0x03d: /* VIS I fpackfix */
3593 case 0x03e: /* VIS I pdist */
3594 // XXX
3595 goto illegal_insn;
3596 case 0x048: /* VIS I faligndata */
3597 gen_op_load_fpr_DT0(DFPREG(rs1));
3598 gen_op_load_fpr_DT1(DFPREG(rs2));
3599 tcg_gen_helper_0_0(helper_faligndata);
3600 gen_op_store_DT0_fpr(DFPREG(rd));
3601 break;
3602 case 0x04b: /* VIS I fpmerge */
3603 gen_op_load_fpr_DT0(DFPREG(rs1));
3604 gen_op_load_fpr_DT1(DFPREG(rs2));
3605 tcg_gen_helper_0_0(helper_fpmerge);
3606 gen_op_store_DT0_fpr(DFPREG(rd));
3607 break;
3608 case 0x04c: /* VIS II bshuffle */
3609 // XXX
3610 goto illegal_insn;
3611 case 0x04d: /* VIS I fexpand */
3612 gen_op_load_fpr_DT0(DFPREG(rs1));
3613 gen_op_load_fpr_DT1(DFPREG(rs2));
3614 tcg_gen_helper_0_0(helper_fexpand);
3615 gen_op_store_DT0_fpr(DFPREG(rd));
3616 break;
3617 case 0x050: /* VIS I fpadd16 */
3618 gen_op_load_fpr_DT0(DFPREG(rs1));
3619 gen_op_load_fpr_DT1(DFPREG(rs2));
3620 tcg_gen_helper_0_0(helper_fpadd16);
3621 gen_op_store_DT0_fpr(DFPREG(rd));
3622 break;
3623 case 0x051: /* VIS I fpadd16s */
3624 gen_op_load_fpr_FT0(rs1);
3625 gen_op_load_fpr_FT1(rs2);
3626 tcg_gen_helper_0_0(helper_fpadd16s);
3627 gen_op_store_FT0_fpr(rd);
3628 break;
3629 case 0x052: /* VIS I fpadd32 */
3630 gen_op_load_fpr_DT0(DFPREG(rs1));
3631 gen_op_load_fpr_DT1(DFPREG(rs2));
3632 tcg_gen_helper_0_0(helper_fpadd32);
3633 gen_op_store_DT0_fpr(DFPREG(rd));
3634 break;
3635 case 0x053: /* VIS I fpadd32s */
3636 gen_op_load_fpr_FT0(rs1);
3637 gen_op_load_fpr_FT1(rs2);
3638 tcg_gen_helper_0_0(helper_fpadd32s);
3639 gen_op_store_FT0_fpr(rd);
3640 break;
3641 case 0x054: /* VIS I fpsub16 */
3642 gen_op_load_fpr_DT0(DFPREG(rs1));
3643 gen_op_load_fpr_DT1(DFPREG(rs2));
3644 tcg_gen_helper_0_0(helper_fpsub16);
3645 gen_op_store_DT0_fpr(DFPREG(rd));
3646 break;
3647 case 0x055: /* VIS I fpsub16s */
3648 gen_op_load_fpr_FT0(rs1);
3649 gen_op_load_fpr_FT1(rs2);
3650 tcg_gen_helper_0_0(helper_fpsub16s);
3651 gen_op_store_FT0_fpr(rd);
3652 break;
3653 case 0x056: /* VIS I fpsub32 */
3654 gen_op_load_fpr_DT0(DFPREG(rs1));
3655 gen_op_load_fpr_DT1(DFPREG(rs2));
3656 tcg_gen_helper_0_0(helper_fpadd32);
3657 gen_op_store_DT0_fpr(DFPREG(rd));
3658 break;
3659 case 0x057: /* VIS I fpsub32s */
3660 gen_op_load_fpr_FT0(rs1);
3661 gen_op_load_fpr_FT1(rs2);
3662 tcg_gen_helper_0_0(helper_fpsub32s);
3663 gen_op_store_FT0_fpr(rd);
3664 break;
3665 case 0x060: /* VIS I fzero */
3666 tcg_gen_helper_0_0(helper_movl_DT0_0);
3667 gen_op_store_DT0_fpr(DFPREG(rd));
3668 break;
3669 case 0x061: /* VIS I fzeros */
3670 tcg_gen_helper_0_0(helper_movl_FT0_0);
3671 gen_op_store_FT0_fpr(rd);
3672 break;
3673 case 0x062: /* VIS I fnor */
3674 gen_op_load_fpr_DT0(DFPREG(rs1));
3675 gen_op_load_fpr_DT1(DFPREG(rs2));
3676 tcg_gen_helper_0_0(helper_fnor);
3677 gen_op_store_DT0_fpr(DFPREG(rd));
3678 break;
3679 case 0x063: /* VIS I fnors */
3680 gen_op_load_fpr_FT0(rs1);
3681 gen_op_load_fpr_FT1(rs2);
3682 tcg_gen_helper_0_0(helper_fnors);
3683 gen_op_store_FT0_fpr(rd);
3684 break;
3685 case 0x064: /* VIS I fandnot2 */
3686 gen_op_load_fpr_DT1(DFPREG(rs1));
3687 gen_op_load_fpr_DT0(DFPREG(rs2));
3688 tcg_gen_helper_0_0(helper_fandnot);
3689 gen_op_store_DT0_fpr(DFPREG(rd));
3690 break;
3691 case 0x065: /* VIS I fandnot2s */
3692 gen_op_load_fpr_FT1(rs1);
3693 gen_op_load_fpr_FT0(rs2);
3694 tcg_gen_helper_0_0(helper_fandnots);
3695 gen_op_store_FT0_fpr(rd);
3696 break;
3697 case 0x066: /* VIS I fnot2 */
3698 gen_op_load_fpr_DT1(DFPREG(rs2));
3699 tcg_gen_helper_0_0(helper_fnot);
3700 gen_op_store_DT0_fpr(DFPREG(rd));
3701 break;
3702 case 0x067: /* VIS I fnot2s */
3703 gen_op_load_fpr_FT1(rs2);
3704 tcg_gen_helper_0_0(helper_fnot);
3705 gen_op_store_FT0_fpr(rd);
3706 break;
3707 case 0x068: /* VIS I fandnot1 */
3708 gen_op_load_fpr_DT0(DFPREG(rs1));
3709 gen_op_load_fpr_DT1(DFPREG(rs2));
3710 tcg_gen_helper_0_0(helper_fandnot);
3711 gen_op_store_DT0_fpr(DFPREG(rd));
3712 break;
3713 case 0x069: /* VIS I fandnot1s */
3714 gen_op_load_fpr_FT0(rs1);
3715 gen_op_load_fpr_FT1(rs2);
3716 tcg_gen_helper_0_0(helper_fandnots);
3717 gen_op_store_FT0_fpr(rd);
3718 break;
3719 case 0x06a: /* VIS I fnot1 */
3720 gen_op_load_fpr_DT1(DFPREG(rs1));
3721 tcg_gen_helper_0_0(helper_fnot);
3722 gen_op_store_DT0_fpr(DFPREG(rd));
3723 break;
3724 case 0x06b: /* VIS I fnot1s */
3725 gen_op_load_fpr_FT1(rs1);
3726 tcg_gen_helper_0_0(helper_fnot);
3727 gen_op_store_FT0_fpr(rd);
3728 break;
3729 case 0x06c: /* VIS I fxor */
3730 gen_op_load_fpr_DT0(DFPREG(rs1));
3731 gen_op_load_fpr_DT1(DFPREG(rs2));
3732 tcg_gen_helper_0_0(helper_fxor);
3733 gen_op_store_DT0_fpr(DFPREG(rd));
3734 break;
3735 case 0x06d: /* VIS I fxors */
3736 gen_op_load_fpr_FT0(rs1);
3737 gen_op_load_fpr_FT1(rs2);
3738 tcg_gen_helper_0_0(helper_fxors);
3739 gen_op_store_FT0_fpr(rd);
3740 break;
3741 case 0x06e: /* VIS I fnand */
3742 gen_op_load_fpr_DT0(DFPREG(rs1));
3743 gen_op_load_fpr_DT1(DFPREG(rs2));
3744 tcg_gen_helper_0_0(helper_fnand);
3745 gen_op_store_DT0_fpr(DFPREG(rd));
3746 break;
3747 case 0x06f: /* VIS I fnands */
3748 gen_op_load_fpr_FT0(rs1);
3749 gen_op_load_fpr_FT1(rs2);
3750 tcg_gen_helper_0_0(helper_fnands);
3751 gen_op_store_FT0_fpr(rd);
3752 break;
3753 case 0x070: /* VIS I fand */
3754 gen_op_load_fpr_DT0(DFPREG(rs1));
3755 gen_op_load_fpr_DT1(DFPREG(rs2));
3756 tcg_gen_helper_0_0(helper_fand);
3757 gen_op_store_DT0_fpr(DFPREG(rd));
3758 break;
3759 case 0x071: /* VIS I fands */
3760 gen_op_load_fpr_FT0(rs1);
3761 gen_op_load_fpr_FT1(rs2);
3762 tcg_gen_helper_0_0(helper_fands);
3763 gen_op_store_FT0_fpr(rd);
3764 break;
3765 case 0x072: /* VIS I fxnor */
3766 gen_op_load_fpr_DT0(DFPREG(rs1));
3767 gen_op_load_fpr_DT1(DFPREG(rs2));
3768 tcg_gen_helper_0_0(helper_fxnor);
3769 gen_op_store_DT0_fpr(DFPREG(rd));
3770 break;
3771 case 0x073: /* VIS I fxnors */
3772 gen_op_load_fpr_FT0(rs1);
3773 gen_op_load_fpr_FT1(rs2);
3774 tcg_gen_helper_0_0(helper_fxnors);
3775 gen_op_store_FT0_fpr(rd);
3776 break;
3777 case 0x074: /* VIS I fsrc1 */
3778 gen_op_load_fpr_DT0(DFPREG(rs1));
3779 gen_op_store_DT0_fpr(DFPREG(rd));
3780 break;
3781 case 0x075: /* VIS I fsrc1s */
3782 gen_op_load_fpr_FT0(rs1);
3783 gen_op_store_FT0_fpr(rd);
3784 break;
3785 case 0x076: /* VIS I fornot2 */
3786 gen_op_load_fpr_DT1(DFPREG(rs1));
3787 gen_op_load_fpr_DT0(DFPREG(rs2));
3788 tcg_gen_helper_0_0(helper_fornot);
3789 gen_op_store_DT0_fpr(DFPREG(rd));
3790 break;
3791 case 0x077: /* VIS I fornot2s */
3792 gen_op_load_fpr_FT1(rs1);
3793 gen_op_load_fpr_FT0(rs2);
3794 tcg_gen_helper_0_0(helper_fornots);
3795 gen_op_store_FT0_fpr(rd);
3796 break;
3797 case 0x078: /* VIS I fsrc2 */
3798 gen_op_load_fpr_DT0(DFPREG(rs2));
3799 gen_op_store_DT0_fpr(DFPREG(rd));
3800 break;
3801 case 0x079: /* VIS I fsrc2s */
3802 gen_op_load_fpr_FT0(rs2);
3803 gen_op_store_FT0_fpr(rd);
3804 break;
3805 case 0x07a: /* VIS I fornot1 */
3806 gen_op_load_fpr_DT0(DFPREG(rs1));
3807 gen_op_load_fpr_DT1(DFPREG(rs2));
3808 tcg_gen_helper_0_0(helper_fornot);
3809 gen_op_store_DT0_fpr(DFPREG(rd));
3810 break;
3811 case 0x07b: /* VIS I fornot1s */
3812 gen_op_load_fpr_FT0(rs1);
3813 gen_op_load_fpr_FT1(rs2);
3814 tcg_gen_helper_0_0(helper_fornots);
3815 gen_op_store_FT0_fpr(rd);
3816 break;
3817 case 0x07c: /* VIS I for */
3818 gen_op_load_fpr_DT0(DFPREG(rs1));
3819 gen_op_load_fpr_DT1(DFPREG(rs2));
3820 tcg_gen_helper_0_0(helper_for);
3821 gen_op_store_DT0_fpr(DFPREG(rd));
3822 break;
3823 case 0x07d: /* VIS I fors */
3824 gen_op_load_fpr_FT0(rs1);
3825 gen_op_load_fpr_FT1(rs2);
3826 tcg_gen_helper_0_0(helper_fors);
3827 gen_op_store_FT0_fpr(rd);
3828 break;
3829 case 0x07e: /* VIS I fone */
3830 tcg_gen_helper_0_0(helper_movl_DT0_1);
3831 gen_op_store_DT0_fpr(DFPREG(rd));
3832 break;
3833 case 0x07f: /* VIS I fones */
3834 tcg_gen_helper_0_0(helper_movl_FT0_1);
3835 gen_op_store_FT0_fpr(rd);
3836 break;
3837 case 0x080: /* VIS I shutdown */
3838 case 0x081: /* VIS II siam */
3839 // XXX
3840 goto illegal_insn;
3841 default:
3842 goto illegal_insn;
3843 }
3844 #else
3845 goto ncp_insn;
3846 #endif
3847 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
3848 #ifdef TARGET_SPARC64
3849 goto illegal_insn;
3850 #else
3851 goto ncp_insn;
3852 #endif
3853 #ifdef TARGET_SPARC64
3854 } else if (xop == 0x39) { /* V9 return */
3855 save_state(dc, cpu_cond);
3856 cpu_src1 = get_src1(insn, cpu_src1);
3857 if (IS_IMM) { /* immediate */
3858 rs2 = GET_FIELDs(insn, 19, 31);
3859 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
3860 } else { /* register */
3861 rs2 = GET_FIELD(insn, 27, 31);
3862 if (rs2) {
3863 gen_movl_reg_TN(rs2, cpu_src2);
3864 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3865 } else
3866 tcg_gen_mov_tl(cpu_dst, cpu_src1);
3867 }
3868 tcg_gen_helper_0_0(helper_restore);
3869 gen_mov_pc_npc(dc, cpu_cond);
3870 tcg_gen_helper_0_2(helper_check_align, cpu_dst, tcg_const_i32(3));
3871 tcg_gen_mov_tl(cpu_npc, cpu_dst);
3872 dc->npc = DYNAMIC_PC;
3873 goto jmp_insn;
3874 #endif
3875 } else {
3876 cpu_src1 = get_src1(insn, cpu_src1);
3877 if (IS_IMM) { /* immediate */
3878 rs2 = GET_FIELDs(insn, 19, 31);
3879 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
3880 } else { /* register */
3881 rs2 = GET_FIELD(insn, 27, 31);
3882 if (rs2) {
3883 gen_movl_reg_TN(rs2, cpu_src2);
3884 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3885 } else
3886 tcg_gen_mov_tl(cpu_dst, cpu_src1);
3887 }
3888 switch (xop) {
3889 case 0x38: /* jmpl */
3890 {
3891 if (rd != 0) {
3892 tcg_gen_movi_tl(cpu_tmp0, dc->pc);
3893 gen_movl_TN_reg(rd, cpu_tmp0);
3894 }
3895 gen_mov_pc_npc(dc, cpu_cond);
3896 tcg_gen_helper_0_2(helper_check_align, cpu_dst, tcg_const_i32(3));
3897 tcg_gen_mov_tl(cpu_npc, cpu_dst);
3898 dc->npc = DYNAMIC_PC;
3899 }
3900 goto jmp_insn;
3901 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
3902 case 0x39: /* rett, V9 return */
3903 {
3904 if (!supervisor(dc))
3905 goto priv_insn;
3906 gen_mov_pc_npc(dc, cpu_cond);
3907 tcg_gen_helper_0_2(helper_check_align, cpu_dst, tcg_const_i32(3));
3908 tcg_gen_mov_tl(cpu_npc, cpu_dst);
3909 dc->npc = DYNAMIC_PC;
3910 tcg_gen_helper_0_0(helper_rett);
3911 }
3912 goto jmp_insn;
3913 #endif
3914 case 0x3b: /* flush */
3915 tcg_gen_helper_0_1(helper_flush, cpu_dst);
3916 break;
3917 case 0x3c: /* save */
3918 save_state(dc, cpu_cond);
3919 tcg_gen_helper_0_0(helper_save);
3920 gen_movl_TN_reg(rd, cpu_dst);
3921 break;
3922 case 0x3d: /* restore */
3923 save_state(dc, cpu_cond);
3924 tcg_gen_helper_0_0(helper_restore);
3925 gen_movl_TN_reg(rd, cpu_dst);
3926 break;
3927 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
3928 case 0x3e: /* V9 done/retry */
3929 {
3930 switch (rd) {
3931 case 0:
3932 if (!supervisor(dc))
3933 goto priv_insn;
3934 dc->npc = DYNAMIC_PC;
3935 dc->pc = DYNAMIC_PC;
3936 tcg_gen_helper_0_0(helper_done);
3937 goto jmp_insn;
3938 case 1:
3939 if (!supervisor(dc))
3940 goto priv_insn;
3941 dc->npc = DYNAMIC_PC;
3942 dc->pc = DYNAMIC_PC;
3943 tcg_gen_helper_0_0(helper_retry);
3944 goto jmp_insn;
3945 default:
3946 goto illegal_insn;
3947 }
3948 }
3949 break;
3950 #endif
3951 default:
3952 goto illegal_insn;
3953 }
3954 }
3955 break;
3956 }
3957 break;
3958 case 3: /* load/store instructions */
3959 {
3960 unsigned int xop = GET_FIELD(insn, 7, 12);
3961
3962 save_state(dc, cpu_cond);
3963 cpu_src1 = get_src1(insn, cpu_src1);
3964 if (xop == 0x3c || xop == 0x3e)
3965 {
3966 rs2 = GET_FIELD(insn, 27, 31);
3967 gen_movl_reg_TN(rs2, cpu_src2);
3968 }
3969 else if (IS_IMM) { /* immediate */
3970 rs2 = GET_FIELDs(insn, 19, 31);
3971 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
3972 } else { /* register */
3973 rs2 = GET_FIELD(insn, 27, 31);
3974 if (rs2 != 0) {
3975 gen_movl_reg_TN(rs2, cpu_src2);
3976 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
3977 } else
3978 tcg_gen_mov_tl(cpu_addr, cpu_src1);
3979 }
3980 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
3981 (xop > 0x17 && xop <= 0x1d ) ||
3982 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
3983 switch (xop) {
3984 case 0x0: /* load unsigned word */
3985 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
3986 ABI32_MASK(cpu_addr);
3987 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
3988 break;
3989 case 0x1: /* load unsigned byte */
3990 ABI32_MASK(cpu_addr);
3991 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
3992 break;
3993 case 0x2: /* load unsigned halfword */
3994 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1));
3995 ABI32_MASK(cpu_addr);
3996 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
3997 break;
3998 case 0x3: /* load double word */
3999 if (rd & 1)
4000 goto illegal_insn;
4001 else {
4002 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7));
4003 ABI32_MASK(cpu_addr);
4004 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4005 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4006 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4007 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4008 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4009 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4010 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4011 }
4012 break;
4013 case 0x9: /* load signed byte */
4014 ABI32_MASK(cpu_addr);
4015 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4016 break;
4017 case 0xa: /* load signed halfword */
4018 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1));
4019 ABI32_MASK(cpu_addr);
4020 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4021 break;
4022 case 0xd: /* ldstub -- XXX: should be atomically */
4023 ABI32_MASK(cpu_addr);
4024 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4025 tcg_gen_qemu_st8(tcg_const_tl(0xff), cpu_addr, dc->mem_idx);
4026 break;
4027 case 0x0f: /* swap register with memory. Also atomically */
4028 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4029 gen_movl_reg_TN(rd, cpu_val);
4030 ABI32_MASK(cpu_addr);
4031 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4032 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4033 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4034 break;
4035 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4036 case 0x10: /* load word alternate */
4037 #ifndef TARGET_SPARC64
4038 if (IS_IMM)
4039 goto illegal_insn;
4040 if (!supervisor(dc))
4041 goto priv_insn;
4042 #endif
4043 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4044 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4045 break;
4046 case 0x11: /* load unsigned byte alternate */
4047 #ifndef TARGET_SPARC64
4048 if (IS_IMM)
4049 goto illegal_insn;
4050 if (!supervisor(dc))
4051 goto priv_insn;
4052 #endif
4053 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4054 break;
4055 case 0x12: /* load unsigned halfword alternate */
4056 #ifndef TARGET_SPARC64
4057 if (IS_IMM)
4058 goto illegal_insn;
4059 if (!supervisor(dc))
4060 goto priv_insn;
4061 #endif
4062 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1));
4063 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4064 break;
4065 case 0x13: /* load double word alternate */
4066 #ifndef TARGET_SPARC64
4067 if (IS_IMM)
4068 goto illegal_insn;
4069 if (!supervisor(dc))
4070 goto priv_insn;
4071 #endif
4072 if (rd & 1)
4073 goto illegal_insn;
4074 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7));
4075 gen_ldda_asi(cpu_tmp0, cpu_val, cpu_addr, insn);
4076 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4077 break;
4078 case 0x19: /* load signed byte alternate */
4079 #ifndef TARGET_SPARC64
4080 if (IS_IMM)
4081 goto illegal_insn;
4082 if (!supervisor(dc))
4083 goto priv_insn;
4084 #endif
4085 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4086 break;
4087 case 0x1a: /* load signed halfword alternate */
4088 #ifndef TARGET_SPARC64
4089 if (IS_IMM)
4090 goto illegal_insn;
4091 if (!supervisor(dc))
4092 goto priv_insn;
4093 #endif
4094 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1));
4095 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4096 break;
4097 case 0x1d: /* ldstuba -- XXX: should be atomically */
4098 #ifndef TARGET_SPARC64
4099 if (IS_IMM)
4100 goto illegal_insn;
4101 if (!supervisor(dc))
4102 goto priv_insn;
4103 #endif
4104 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4105 break;
4106 case 0x1f: /* swap reg with alt. memory. Also atomically */
4107 #ifndef TARGET_SPARC64
4108 if (IS_IMM)
4109 goto illegal_insn;
4110 if (!supervisor(dc))
4111 goto priv_insn;
4112 #endif
4113 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4114 gen_movl_reg_TN(rd, cpu_val);
4115 gen_swap_asi(cpu_val, cpu_addr, insn);
4116 break;
4117
4118 #ifndef TARGET_SPARC64
4119 case 0x30: /* ldc */
4120 case 0x31: /* ldcsr */
4121 case 0x33: /* lddc */
4122 goto ncp_insn;
4123 #endif
4124 #endif
4125 #ifdef TARGET_SPARC64
4126 case 0x08: /* V9 ldsw */
4127 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4128 ABI32_MASK(cpu_addr);
4129 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4130 break;
4131 case 0x0b: /* V9 ldx */
4132 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7));
4133 ABI32_MASK(cpu_addr);
4134 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4135 break;
4136 case 0x18: /* V9 ldswa */
4137 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4138 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4139 break;
4140 case 0x1b: /* V9 ldxa */
4141 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7));
4142 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4143 break;
4144 case 0x2d: /* V9 prefetch, no effect */
4145 goto skip_move;
4146 case 0x30: /* V9 ldfa */
4147 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4148 gen_ldf_asi(cpu_addr, insn, 4, rd);
4149 goto skip_move;
4150 case 0x33: /* V9 lddfa */
4151 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4152 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4153 goto skip_move;
4154 case 0x3d: /* V9 prefetcha, no effect */
4155 goto skip_move;
4156 case 0x32: /* V9 ldqfa */
4157 #if defined(CONFIG_USER_ONLY)
4158 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4159 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4160 goto skip_move;
4161 #else
4162 goto nfpu_insn;
4163 #endif
4164 #endif
4165 default:
4166 goto illegal_insn;
4167 }
4168 gen_movl_TN_reg(rd, cpu_val);
4169 #ifdef TARGET_SPARC64
4170 skip_move: ;
4171 #endif
4172 } else if (xop >= 0x20 && xop < 0x24) {
4173 if (gen_trap_ifnofpu(dc, cpu_cond))
4174 goto jmp_insn;
4175 switch (xop) {
4176 case 0x20: /* load fpreg */
4177 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4178 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4179 tcg_gen_st_i32(cpu_tmp32, cpu_env,
4180 offsetof(CPUState, fpr[rd]));
4181 break;
4182 case 0x21: /* load fsr */
4183 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4184 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4185 tcg_gen_st_i32(cpu_tmp32, cpu_env,
4186 offsetof(CPUState, ft0));
4187 tcg_gen_helper_0_0(helper_ldfsr);
4188 break;
4189 case 0x22: /* load quad fpreg */
4190 #if defined(CONFIG_USER_ONLY)
4191 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4192 tcg_const_i32(7));
4193 tcg_gen_helper_0_1(helper_ldqf, cpu_addr);
4194 gen_op_store_QT0_fpr(QFPREG(rd));
4195 break;
4196 #else
4197 goto nfpu_insn;
4198 #endif
4199 case 0x23: /* load double fpreg */
4200 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4201 tcg_const_i32(7));
4202 tcg_gen_helper_0_2(helper_lddf, cpu_addr,
4203 tcg_const_i32(dc->mem_idx));
4204 gen_op_store_DT0_fpr(DFPREG(rd));
4205 break;
4206 default:
4207 goto illegal_insn;
4208 }
4209 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4210 xop == 0xe || xop == 0x1e) {
4211 gen_movl_reg_TN(rd, cpu_val);
4212 switch (xop) {
4213 case 0x4: /* store word */
4214 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4215 ABI32_MASK(cpu_addr);
4216 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4217 break;
4218 case 0x5: /* store byte */
4219 ABI32_MASK(cpu_addr);
4220 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4221 break;
4222 case 0x6: /* store halfword */
4223 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1));
4224 ABI32_MASK(cpu_addr);
4225 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4226 break;
4227 case 0x7: /* store double word */
4228 if (rd & 1)
4229 goto illegal_insn;
4230 else {
4231 TCGv r_low;
4232
4233 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7));
4234 r_low = tcg_temp_new(TCG_TYPE_I32);
4235 gen_movl_reg_TN(rd + 1, r_low);
4236 #ifndef __i386__
4237 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_val,
4238 r_low);
4239 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4240 #else /* __i386__ */
4241 tcg_gen_st_tl(cpu_val, cpu_env, offsetof(CPUState, t1));
4242 tcg_gen_st_tl(r_low, cpu_env, offsetof(CPUState, t2));
4243 tcg_gen_helper_0_2(helper_std_i386, cpu_addr,
4244 tcg_const_i32(dc->mem_idx));
4245 #endif /* __i386__ */
4246 }
4247 break;
4248 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4249 case 0x14: /* store word alternate */
4250 #ifndef TARGET_SPARC64
4251 if (IS_IMM)
4252 goto illegal_insn;
4253 if (!supervisor(dc))
4254 goto priv_insn;
4255 #endif
4256 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4257 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4258 break;
4259 case 0x15: /* store byte alternate */
4260 #ifndef TARGET_SPARC64
4261 if (IS_IMM)
4262 goto illegal_insn;
4263 if (!supervisor(dc))
4264 goto priv_insn;
4265 #endif
4266 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4267 break;
4268 case 0x16: /* store halfword alternate */
4269 #ifndef TARGET_SPARC64
4270 if (IS_IMM)
4271 goto illegal_insn;
4272 if (!supervisor(dc))
4273 goto priv_insn;
4274 #endif
4275 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(1));
4276 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4277 break;
4278 case 0x17: /* store double word alternate */
4279 #ifndef TARGET_SPARC64
4280 if (IS_IMM)
4281 goto illegal_insn;
4282 if (!supervisor(dc))
4283 goto priv_insn;
4284 #endif
4285 if (rd & 1)
4286 goto illegal_insn;
4287 else {
4288 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7));
4289 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4290 }
4291 break;
4292 #endif
4293 #ifdef TARGET_SPARC64
4294 case 0x0e: /* V9 stx */
4295 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7));
4296 ABI32_MASK(cpu_addr);
4297 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4298 break;
4299 case 0x1e: /* V9 stxa */
4300 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7));
4301 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4302 break;
4303 #endif
4304 default:
4305 goto illegal_insn;
4306 }
4307 } else if (xop > 0x23 && xop < 0x28) {
4308 if (gen_trap_ifnofpu(dc, cpu_cond))
4309 goto jmp_insn;
4310 switch (xop) {
4311 case 0x24: /* store fpreg */
4312 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4313 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
4314 offsetof(CPUState, fpr[rd]));
4315 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4316 break;
4317 case 0x25: /* stfsr, V9 stxfsr */
4318 #ifdef CONFIG_USER_ONLY
4319 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4320 #endif
4321 tcg_gen_helper_0_0(helper_stfsr);
4322 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
4323 offsetof(CPUState, ft0));
4324 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4325 break;
4326 case 0x26:
4327 #ifdef TARGET_SPARC64
4328 #if defined(CONFIG_USER_ONLY)
4329 /* V9 stqf, store quad fpreg */
4330 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4331 tcg_const_i32(7));
4332 gen_op_load_fpr_QT0(QFPREG(rd));
4333 tcg_gen_helper_0_1(helper_stqf, cpu_addr);
4334 break;
4335 #else
4336 goto nfpu_insn;
4337 #endif
4338 #else /* !TARGET_SPARC64 */
4339 /* stdfq, store floating point queue */
4340 #if defined(CONFIG_USER_ONLY)
4341 goto illegal_insn;
4342 #else
4343 if (!supervisor(dc))
4344 goto priv_insn;
4345 if (gen_trap_ifnofpu(dc, cpu_cond))
4346 goto jmp_insn;
4347 goto nfq_insn;
4348 #endif
4349 #endif
4350 case 0x27: /* store double fpreg */
4351 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4352 tcg_const_i32(7));
4353 gen_op_load_fpr_DT0(DFPREG(rd));
4354 tcg_gen_helper_0_2(helper_stdf, cpu_addr,
4355 tcg_const_i32(dc->mem_idx));
4356 break;
4357 default:
4358 goto illegal_insn;
4359 }
4360 } else if (xop > 0x33 && xop < 0x3f) {
4361 switch (xop) {
4362 #ifdef TARGET_SPARC64
4363 case 0x34: /* V9 stfa */
4364 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4365 gen_op_load_fpr_FT0(rd);
4366 gen_stf_asi(cpu_addr, insn, 4, rd);
4367 break;
4368 case 0x36: /* V9 stqfa */
4369 #if defined(CONFIG_USER_ONLY)
4370 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7));
4371 gen_op_load_fpr_QT0(QFPREG(rd));
4372 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4373 break;
4374 #else
4375 goto nfpu_insn;
4376 #endif
4377 case 0x37: /* V9 stdfa */
4378 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4379 gen_op_load_fpr_DT0(DFPREG(rd));
4380 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4381 break;
4382 case 0x3c: /* V9 casa */
4383 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(3));
4384 gen_cas_asi(cpu_val, cpu_addr, cpu_val, insn, rd);
4385 gen_movl_TN_reg(rd, cpu_val);
4386 break;
4387 case 0x3e: /* V9 casxa */
4388 tcg_gen_helper_0_2(helper_check_align, cpu_addr, tcg_const_i32(7));
4389 gen_casx_asi(cpu_val, cpu_addr, cpu_val, insn, rd);
4390 gen_movl_TN_reg(rd, cpu_val);
4391 break;
4392 #else
4393 case 0x34: /* stc */
4394 case 0x35: /* stcsr */
4395 case 0x36: /* stdcq */
4396 case 0x37: /* stdc */
4397 goto ncp_insn;
4398 #endif
4399 default:
4400 goto illegal_insn;
4401 }
4402 }
4403 else
4404 goto illegal_insn;
4405 }
4406 break;
4407 }
4408 /* default case for non jump instructions */
4409 if (dc->npc == DYNAMIC_PC) {
4410 dc->pc = DYNAMIC_PC;
4411 gen_op_next_insn();
4412 } else if (dc->npc == JUMP_PC) {
4413 /* we can do a static jump */
4414 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4415 dc->is_br = 1;
4416 } else {
4417 dc->pc = dc->npc;
4418 dc->npc = dc->npc + 4;
4419 }
4420 jmp_insn:
4421 return;
4422 illegal_insn:
4423 save_state(dc, cpu_cond);
4424 tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_ILL_INSN));
4425 dc->is_br = 1;
4426 return;
4427 #if !defined(CONFIG_USER_ONLY)
4428 priv_insn:
4429 save_state(dc, cpu_cond);
4430 tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_PRIV_INSN));
4431 dc->is_br = 1;
4432 return;
4433 nfpu_insn:
4434 save_state(dc, cpu_cond);
4435 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4436 dc->is_br = 1;
4437 return;
4438 #ifndef TARGET_SPARC64
4439 nfq_insn:
4440 save_state(dc, cpu_cond);
4441 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4442 dc->is_br = 1;
4443 return;
4444 #endif
4445 #endif
4446 #ifndef TARGET_SPARC64
4447 ncp_insn:
4448 save_state(dc, cpu_cond);
4449 tcg_gen_helper_0_1(raise_exception, tcg_const_i32(TT_NCP_INSN));
4450 dc->is_br = 1;
4451 return;
4452 #endif
4453 }
4454
4455 static void tcg_macro_func(TCGContext *s, int macro_id, const int *dead_args)
4456 {
4457 }
4458
4459 static inline int gen_intermediate_code_internal(TranslationBlock * tb,
4460 int spc, CPUSPARCState *env)
4461 {
4462 target_ulong pc_start, last_pc;
4463 uint16_t *gen_opc_end;
4464 DisasContext dc1, *dc = &dc1;
4465 int j, lj = -1;
4466
4467 memset(dc, 0, sizeof(DisasContext));
4468 dc->tb = tb;
4469 pc_start = tb->pc;
4470 dc->pc = pc_start;
4471 last_pc = dc->pc;
4472 dc->npc = (target_ulong) tb->cs_base;
4473 dc->mem_idx = cpu_mmu_index(env);
4474 dc->fpu_enabled = cpu_fpu_enabled(env);
4475 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4476
4477 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4478 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4479 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4480
4481 cpu_cond = cpu_T[2];
4482
4483 do {
4484 if (env->nb_breakpoints > 0) {
4485 for(j = 0; j < env->nb_breakpoints; j++) {
4486 if (env->breakpoints[j] == dc->pc) {
4487 if (dc->pc != pc_start)
4488 save_state(dc, cpu_cond);
4489 tcg_gen_helper_0_0(helper_debug);
4490 tcg_gen_exit_tb(0);
4491 dc->is_br = 1;
4492 goto exit_gen_loop;
4493 }
4494 }
4495 }
4496 if (spc) {
4497 if (loglevel > 0)
4498 fprintf(logfile, "Search PC...\n");
4499 j = gen_opc_ptr - gen_opc_buf;
4500 if (lj < j) {
4501 lj++;
4502 while (lj < j)
4503 gen_opc_instr_start[lj++] = 0;
4504 gen_opc_pc[lj] = dc->pc;
4505 gen_opc_npc[lj] = dc->npc;
4506 gen_opc_instr_start[lj] = 1;
4507 }
4508 }
4509 last_pc = dc->pc;
4510 disas_sparc_insn(dc);
4511
4512 if (dc->is_br)
4513 break;
4514 /* if the next PC is different, we abort now */
4515 if (dc->pc != (last_pc + 4))
4516 break;
4517 /* if we reach a page boundary, we stop generation so that the
4518 PC of a TT_TFAULT exception is always in the right page */
4519 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4520 break;
4521 /* if single step mode, we generate only one instruction and
4522 generate an exception */
4523 if (env->singlestep_enabled) {
4524 tcg_gen_movi_tl(cpu_pc, dc->pc);
4525 tcg_gen_exit_tb(0);
4526 break;
4527 }
4528 } while ((gen_opc_ptr < gen_opc_end) &&
4529 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32));
4530
4531 exit_gen_loop:
4532 if (!dc->is_br) {
4533 if (dc->pc != DYNAMIC_PC &&
4534 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4535 /* static PC and NPC: we can use direct chaining */
4536 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4537 } else {
4538 if (dc->pc != DYNAMIC_PC)
4539 tcg_gen_movi_tl(cpu_pc, dc->pc);
4540 save_npc(dc, cpu_cond);
4541 tcg_gen_exit_tb(0);
4542 }
4543 }
4544 *gen_opc_ptr = INDEX_op_end;
4545 if (spc) {
4546 j = gen_opc_ptr - gen_opc_buf;
4547 lj++;
4548 while (lj <= j)
4549 gen_opc_instr_start[lj++] = 0;
4550 #if 0
4551 if (loglevel > 0) {
4552 page_dump(logfile);
4553 }
4554 #endif
4555 gen_opc_jump_pc[0] = dc->jump_pc[0];
4556 gen_opc_jump_pc[1] = dc->jump_pc[1];
4557 } else {
4558 tb->size = last_pc + 4 - pc_start;
4559 }
4560 #ifdef DEBUG_DISAS
4561 if (loglevel & CPU_LOG_TB_IN_ASM) {
4562 fprintf(logfile, "--------------\n");
4563 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4564 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4565 fprintf(logfile, "\n");
4566 }
4567 #endif
4568 return 0;
4569 }
4570
4571 int gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4572 {
4573 return gen_intermediate_code_internal(tb, 0, env);
4574 }
4575
4576 int gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4577 {
4578 return gen_intermediate_code_internal(tb, 1, env);
4579 }
4580
4581 void gen_intermediate_code_init(CPUSPARCState *env)
4582 {
4583 unsigned int i;
4584 static int inited;
4585 static const char * const gregnames[8] = {
4586 NULL, // g0 not used
4587 "g1",
4588 "g2",
4589 "g3",
4590 "g4",
4591 "g5",
4592 "g6",
4593 "g7",
4594 };
4595
4596 /* init various static tables */
4597 if (!inited) {
4598 inited = 1;
4599
4600 tcg_set_macro_func(&tcg_ctx, tcg_macro_func);
4601 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4602 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4603 offsetof(CPUState, regwptr),
4604 "regwptr");
4605 //#if TARGET_LONG_BITS > HOST_LONG_BITS
4606 #ifdef TARGET_SPARC64
4607 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
4608 TCG_AREG0, offsetof(CPUState, t0), "T0");
4609 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
4610 TCG_AREG0, offsetof(CPUState, t1), "T1");
4611 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
4612 TCG_AREG0, offsetof(CPUState, t2), "T2");
4613 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4614 TCG_AREG0, offsetof(CPUState, xcc),
4615 "xcc");
4616 #else
4617 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
4618 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
4619 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
4620 #endif
4621 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4622 TCG_AREG0, offsetof(CPUState, cc_src),
4623 "cc_src");
4624 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4625 offsetof(CPUState, cc_src2),
4626 "cc_src2");
4627 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4628 TCG_AREG0, offsetof(CPUState, cc_dst),
4629 "cc_dst");
4630 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4631 TCG_AREG0, offsetof(CPUState, psr),
4632 "psr");
4633 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
4634 TCG_AREG0, offsetof(CPUState, fsr),
4635 "fsr");
4636 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
4637 TCG_AREG0, offsetof(CPUState, pc),
4638 "pc");
4639 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
4640 TCG_AREG0, offsetof(CPUState, npc),
4641 "npc");
4642 for (i = 1; i < 8; i++)
4643 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4644 offsetof(CPUState, gregs[i]),
4645 gregnames[i]);
4646 }
4647 }
4648
4649 void gen_pc_load(CPUState *env, TranslationBlock *tb,
4650 unsigned long searched_pc, int pc_pos, void *puc)
4651 {
4652 target_ulong npc;
4653 env->pc = gen_opc_pc[pc_pos];
4654 npc = gen_opc_npc[pc_pos];
4655 if (npc == 1) {
4656 /* dynamic NPC: already stored */
4657 } else if (npc == 2) {
4658 target_ulong t2 = (target_ulong)(unsigned long)puc;
4659 /* jump PC: use T2 and the jump targets of the translation */
4660 if (t2)
4661 env->npc = gen_opc_jump_pc[0];
4662 else
4663 env->npc = gen_opc_jump_pc[1];
4664 } else {
4665 env->npc = npc;
4666 }
4667 }