]> git.proxmox.com Git - qemu.git/blob - target-sparc/translate.c
63152183758e5d88e3f2d837fcae6a1c0f623ca4
[qemu.git] / target-sparc / translate.c
1 /*
2 SPARC translation
3
4 Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5 Copyright (C) 2003-2005 Fabrice Bellard
6
7 This library is free software; you can redistribute it and/or
8 modify it under the terms of the GNU Lesser General Public
9 License as published by the Free Software Foundation; either
10 version 2 of the License, or (at your option) any later version.
11
12 This library is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 Lesser General Public License for more details.
16
17 You should have received a copy of the GNU Lesser General Public
18 License along with this library; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 */
21
22 #include <stdarg.h>
23 #include <stdlib.h>
24 #include <stdio.h>
25 #include <string.h>
26 #include <inttypes.h>
27
28 #include "cpu.h"
29 #include "exec-all.h"
30 #include "disas.h"
31 #include "helper.h"
32 #include "tcg-op.h"
33
34 #define DEBUG_DISAS
35
36 #define DYNAMIC_PC 1 /* dynamic pc value */
37 #define JUMP_PC 2 /* dynamic pc value which takes only two values
38 according to jump_pc[T2] */
39
40 /* global register indexes */
41 static TCGv cpu_env, cpu_regwptr;
42 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
43 static TCGv cpu_psr, cpu_fsr, cpu_pc, cpu_npc, cpu_gregs[8];
44 static TCGv cpu_cond, cpu_src1, cpu_src2, cpu_dst, cpu_addr, cpu_val;
45 #ifdef TARGET_SPARC64
46 static TCGv cpu_xcc;
47 #endif
48 /* local register indexes (only used inside old micro ops) */
49 static TCGv cpu_tmp0, cpu_tmp32, cpu_tmp64;
50
51 #include "gen-icount.h"
52
53 typedef struct DisasContext {
54 target_ulong pc; /* current Program Counter: integer or DYNAMIC_PC */
55 target_ulong npc; /* next PC: integer or DYNAMIC_PC or JUMP_PC */
56 target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
57 int is_br;
58 int mem_idx;
59 int fpu_enabled;
60 int address_mask_32bit;
61 struct TranslationBlock *tb;
62 uint32_t features;
63 } DisasContext;
64
65 // This function uses non-native bit order
66 #define GET_FIELD(X, FROM, TO) \
67 ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
68
69 // This function uses the order in the manuals, i.e. bit 0 is 2^0
70 #define GET_FIELD_SP(X, FROM, TO) \
71 GET_FIELD(X, 31 - (TO), 31 - (FROM))
72
73 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
74 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
75
76 #ifdef TARGET_SPARC64
77 #define FFPREG(r) (r)
78 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
79 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
80 #else
81 #define FFPREG(r) (r)
82 #define DFPREG(r) (r & 0x1e)
83 #define QFPREG(r) (r & 0x1c)
84 #endif
85
86 static int sign_extend(int x, int len)
87 {
88 len = 32 - len;
89 return (x << len) >> len;
90 }
91
92 #define IS_IMM (insn & (1<<13))
93
94 /* floating point registers moves */
95 static void gen_op_load_fpr_FT0(unsigned int src)
96 {
97 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
98 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft0));
99 }
100
101 static void gen_op_load_fpr_FT1(unsigned int src)
102 {
103 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
104 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft1));
105 }
106
107 static void gen_op_store_FT0_fpr(unsigned int dst)
108 {
109 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, ft0));
110 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
111 }
112
113 static void gen_op_load_fpr_DT0(unsigned int src)
114 {
115 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
116 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) +
117 offsetof(CPU_DoubleU, l.upper));
118 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
119 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) +
120 offsetof(CPU_DoubleU, l.lower));
121 }
122
123 static void gen_op_load_fpr_DT1(unsigned int src)
124 {
125 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
126 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt1) +
127 offsetof(CPU_DoubleU, l.upper));
128 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
129 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt1) +
130 offsetof(CPU_DoubleU, l.lower));
131 }
132
133 static void gen_op_store_DT0_fpr(unsigned int dst)
134 {
135 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) +
136 offsetof(CPU_DoubleU, l.upper));
137 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
138 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, dt0) +
139 offsetof(CPU_DoubleU, l.lower));
140 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 1]));
141 }
142
143 static void gen_op_load_fpr_QT0(unsigned int src)
144 {
145 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
146 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
147 offsetof(CPU_QuadU, l.upmost));
148 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
149 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
150 offsetof(CPU_QuadU, l.upper));
151 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 2]));
152 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
153 offsetof(CPU_QuadU, l.lower));
154 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 3]));
155 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
156 offsetof(CPU_QuadU, l.lowest));
157 }
158
159 static void gen_op_load_fpr_QT1(unsigned int src)
160 {
161 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src]));
162 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) +
163 offsetof(CPU_QuadU, l.upmost));
164 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 1]));
165 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) +
166 offsetof(CPU_QuadU, l.upper));
167 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 2]));
168 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) +
169 offsetof(CPU_QuadU, l.lower));
170 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[src + 3]));
171 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt1) +
172 offsetof(CPU_QuadU, l.lowest));
173 }
174
175 static void gen_op_store_QT0_fpr(unsigned int dst)
176 {
177 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
178 offsetof(CPU_QuadU, l.upmost));
179 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst]));
180 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
181 offsetof(CPU_QuadU, l.upper));
182 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 1]));
183 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
184 offsetof(CPU_QuadU, l.lower));
185 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 2]));
186 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, qt0) +
187 offsetof(CPU_QuadU, l.lowest));
188 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, fpr[dst + 3]));
189 }
190
191 /* moves */
192 #ifdef CONFIG_USER_ONLY
193 #define supervisor(dc) 0
194 #ifdef TARGET_SPARC64
195 #define hypervisor(dc) 0
196 #endif
197 #else
198 #define supervisor(dc) (dc->mem_idx >= 1)
199 #ifdef TARGET_SPARC64
200 #define hypervisor(dc) (dc->mem_idx == 2)
201 #else
202 #endif
203 #endif
204
205 #ifdef TARGET_SPARC64
206 #ifndef TARGET_ABI32
207 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
208 #else
209 #define AM_CHECK(dc) (1)
210 #endif
211 #endif
212
213 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
214 {
215 #ifdef TARGET_SPARC64
216 if (AM_CHECK(dc))
217 tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
218 #endif
219 }
220
221 static inline void gen_movl_reg_TN(int reg, TCGv tn)
222 {
223 if (reg == 0)
224 tcg_gen_movi_tl(tn, 0);
225 else if (reg < 8)
226 tcg_gen_mov_tl(tn, cpu_gregs[reg]);
227 else {
228 tcg_gen_ld_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
229 }
230 }
231
232 static inline void gen_movl_TN_reg(int reg, TCGv tn)
233 {
234 if (reg == 0)
235 return;
236 else if (reg < 8)
237 tcg_gen_mov_tl(cpu_gregs[reg], tn);
238 else {
239 tcg_gen_st_tl(tn, cpu_regwptr, (reg - 8) * sizeof(target_ulong));
240 }
241 }
242
243 static inline void gen_goto_tb(DisasContext *s, int tb_num,
244 target_ulong pc, target_ulong npc)
245 {
246 TranslationBlock *tb;
247
248 tb = s->tb;
249 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) &&
250 (npc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK)) {
251 /* jump to same page: we can use a direct jump */
252 tcg_gen_goto_tb(tb_num);
253 tcg_gen_movi_tl(cpu_pc, pc);
254 tcg_gen_movi_tl(cpu_npc, npc);
255 tcg_gen_exit_tb((long)tb + tb_num);
256 } else {
257 /* jump to another page: currently not optimized */
258 tcg_gen_movi_tl(cpu_pc, pc);
259 tcg_gen_movi_tl(cpu_npc, npc);
260 tcg_gen_exit_tb(0);
261 }
262 }
263
264 // XXX suboptimal
265 static inline void gen_mov_reg_N(TCGv reg, TCGv src)
266 {
267 tcg_gen_extu_i32_tl(reg, src);
268 tcg_gen_shri_tl(reg, reg, PSR_NEG_SHIFT);
269 tcg_gen_andi_tl(reg, reg, 0x1);
270 }
271
272 static inline void gen_mov_reg_Z(TCGv reg, TCGv src)
273 {
274 tcg_gen_extu_i32_tl(reg, src);
275 tcg_gen_shri_tl(reg, reg, PSR_ZERO_SHIFT);
276 tcg_gen_andi_tl(reg, reg, 0x1);
277 }
278
279 static inline void gen_mov_reg_V(TCGv reg, TCGv src)
280 {
281 tcg_gen_extu_i32_tl(reg, src);
282 tcg_gen_shri_tl(reg, reg, PSR_OVF_SHIFT);
283 tcg_gen_andi_tl(reg, reg, 0x1);
284 }
285
286 static inline void gen_mov_reg_C(TCGv reg, TCGv src)
287 {
288 tcg_gen_extu_i32_tl(reg, src);
289 tcg_gen_shri_tl(reg, reg, PSR_CARRY_SHIFT);
290 tcg_gen_andi_tl(reg, reg, 0x1);
291 }
292
293 static inline void gen_cc_clear_icc(void)
294 {
295 tcg_gen_movi_i32(cpu_psr, 0);
296 }
297
298 #ifdef TARGET_SPARC64
299 static inline void gen_cc_clear_xcc(void)
300 {
301 tcg_gen_movi_i32(cpu_xcc, 0);
302 }
303 #endif
304
305 /* old op:
306 if (!T0)
307 env->psr |= PSR_ZERO;
308 if ((int32_t) T0 < 0)
309 env->psr |= PSR_NEG;
310 */
311 static inline void gen_cc_NZ_icc(TCGv dst)
312 {
313 TCGv r_temp;
314 int l1, l2;
315
316 l1 = gen_new_label();
317 l2 = gen_new_label();
318 r_temp = tcg_temp_new(TCG_TYPE_TL);
319 tcg_gen_andi_tl(r_temp, dst, 0xffffffffULL);
320 tcg_gen_brcondi_tl(TCG_COND_NE, r_temp, 0, l1);
321 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_ZERO);
322 gen_set_label(l1);
323 tcg_gen_ext_i32_tl(r_temp, dst);
324 tcg_gen_brcondi_tl(TCG_COND_GE, r_temp, 0, l2);
325 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_NEG);
326 gen_set_label(l2);
327 tcg_temp_free(r_temp);
328 }
329
330 #ifdef TARGET_SPARC64
331 static inline void gen_cc_NZ_xcc(TCGv dst)
332 {
333 int l1, l2;
334
335 l1 = gen_new_label();
336 l2 = gen_new_label();
337 tcg_gen_brcondi_tl(TCG_COND_NE, dst, 0, l1);
338 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_ZERO);
339 gen_set_label(l1);
340 tcg_gen_brcondi_tl(TCG_COND_GE, dst, 0, l2);
341 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_NEG);
342 gen_set_label(l2);
343 }
344 #endif
345
346 /* old op:
347 if (T0 < src1)
348 env->psr |= PSR_CARRY;
349 */
350 static inline void gen_cc_C_add_icc(TCGv dst, TCGv src1)
351 {
352 TCGv r_temp1, r_temp2;
353 int l1;
354
355 l1 = gen_new_label();
356 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
357 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
358 tcg_gen_andi_tl(r_temp1, dst, 0xffffffffULL);
359 tcg_gen_andi_tl(r_temp2, src1, 0xffffffffULL);
360 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
361 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
362 gen_set_label(l1);
363 tcg_temp_free(r_temp1);
364 tcg_temp_free(r_temp2);
365 }
366
367 #ifdef TARGET_SPARC64
368 static inline void gen_cc_C_add_xcc(TCGv dst, TCGv src1)
369 {
370 int l1;
371
372 l1 = gen_new_label();
373 tcg_gen_brcond_tl(TCG_COND_GEU, dst, src1, l1);
374 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
375 gen_set_label(l1);
376 }
377 #endif
378
379 /* old op:
380 if (((src1 ^ T1 ^ -1) & (src1 ^ T0)) & (1 << 31))
381 env->psr |= PSR_OVF;
382 */
383 static inline void gen_cc_V_add_icc(TCGv dst, TCGv src1, TCGv src2)
384 {
385 TCGv r_temp;
386
387 r_temp = tcg_temp_new(TCG_TYPE_TL);
388 tcg_gen_xor_tl(r_temp, src1, src2);
389 tcg_gen_xori_tl(r_temp, r_temp, -1);
390 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
391 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
392 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
393 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
394 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
395 tcg_temp_free(r_temp);
396 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
397 }
398
399 #ifdef TARGET_SPARC64
400 static inline void gen_cc_V_add_xcc(TCGv dst, TCGv src1, TCGv src2)
401 {
402 TCGv r_temp;
403
404 r_temp = tcg_temp_new(TCG_TYPE_TL);
405 tcg_gen_xor_tl(r_temp, src1, src2);
406 tcg_gen_xori_tl(r_temp, r_temp, -1);
407 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
408 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
409 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
410 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
411 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
412 tcg_temp_free(r_temp);
413 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
414 }
415 #endif
416
417 static inline void gen_add_tv(TCGv dst, TCGv src1, TCGv src2)
418 {
419 TCGv r_temp, r_const;
420 int l1;
421
422 l1 = gen_new_label();
423
424 r_temp = tcg_temp_new(TCG_TYPE_TL);
425 tcg_gen_xor_tl(r_temp, src1, src2);
426 tcg_gen_xori_tl(r_temp, r_temp, -1);
427 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
428 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
429 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
430 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
431 r_const = tcg_const_i32(TT_TOVF);
432 tcg_gen_helper_0_1(raise_exception, r_const);
433 tcg_temp_free(r_const);
434 gen_set_label(l1);
435 tcg_temp_free(r_temp);
436 }
437
438 static inline void gen_cc_V_tag(TCGv src1, TCGv src2)
439 {
440 int l1;
441
442 l1 = gen_new_label();
443 tcg_gen_or_tl(cpu_tmp0, src1, src2);
444 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
445 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
446 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
447 gen_set_label(l1);
448 }
449
450 static inline void gen_tag_tv(TCGv src1, TCGv src2)
451 {
452 int l1;
453 TCGv r_const;
454
455 l1 = gen_new_label();
456 tcg_gen_or_tl(cpu_tmp0, src1, src2);
457 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0x3);
458 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_tmp0, 0, l1);
459 r_const = tcg_const_i32(TT_TOVF);
460 tcg_gen_helper_0_1(raise_exception, r_const);
461 tcg_temp_free(r_const);
462 gen_set_label(l1);
463 }
464
465 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
466 {
467 tcg_gen_mov_tl(cpu_cc_src, src1);
468 tcg_gen_mov_tl(cpu_cc_src2, src2);
469 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
470 gen_cc_clear_icc();
471 gen_cc_NZ_icc(cpu_cc_dst);
472 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
473 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
474 #ifdef TARGET_SPARC64
475 gen_cc_clear_xcc();
476 gen_cc_NZ_xcc(cpu_cc_dst);
477 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
478 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
479 #endif
480 tcg_gen_mov_tl(dst, cpu_cc_dst);
481 }
482
483 static inline void gen_op_addx_cc(TCGv dst, TCGv src1, TCGv src2)
484 {
485 tcg_gen_mov_tl(cpu_cc_src, src1);
486 tcg_gen_mov_tl(cpu_cc_src2, src2);
487 gen_mov_reg_C(cpu_tmp0, cpu_psr);
488 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
489 gen_cc_clear_icc();
490 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
491 #ifdef TARGET_SPARC64
492 gen_cc_clear_xcc();
493 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
494 #endif
495 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
496 gen_cc_NZ_icc(cpu_cc_dst);
497 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
498 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
499 #ifdef TARGET_SPARC64
500 gen_cc_NZ_xcc(cpu_cc_dst);
501 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
502 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
503 #endif
504 tcg_gen_mov_tl(dst, cpu_cc_dst);
505 }
506
507 static inline void gen_op_tadd_cc(TCGv dst, TCGv src1, TCGv src2)
508 {
509 tcg_gen_mov_tl(cpu_cc_src, src1);
510 tcg_gen_mov_tl(cpu_cc_src2, src2);
511 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
512 gen_cc_clear_icc();
513 gen_cc_NZ_icc(cpu_cc_dst);
514 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
515 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
516 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
517 #ifdef TARGET_SPARC64
518 gen_cc_clear_xcc();
519 gen_cc_NZ_xcc(cpu_cc_dst);
520 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
521 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
522 #endif
523 tcg_gen_mov_tl(dst, cpu_cc_dst);
524 }
525
526 static inline void gen_op_tadd_ccTV(TCGv dst, TCGv src1, TCGv src2)
527 {
528 tcg_gen_mov_tl(cpu_cc_src, src1);
529 tcg_gen_mov_tl(cpu_cc_src2, src2);
530 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
531 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
532 gen_add_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
533 gen_cc_clear_icc();
534 gen_cc_NZ_icc(cpu_cc_dst);
535 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
536 #ifdef TARGET_SPARC64
537 gen_cc_clear_xcc();
538 gen_cc_NZ_xcc(cpu_cc_dst);
539 gen_cc_C_add_xcc(cpu_cc_dst, cpu_cc_src);
540 gen_cc_V_add_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
541 #endif
542 tcg_gen_mov_tl(dst, cpu_cc_dst);
543 }
544
545 /* old op:
546 if (src1 < T1)
547 env->psr |= PSR_CARRY;
548 */
549 static inline void gen_cc_C_sub_icc(TCGv src1, TCGv src2)
550 {
551 TCGv r_temp1, r_temp2;
552 int l1;
553
554 l1 = gen_new_label();
555 r_temp1 = tcg_temp_new(TCG_TYPE_TL);
556 r_temp2 = tcg_temp_new(TCG_TYPE_TL);
557 tcg_gen_andi_tl(r_temp1, src1, 0xffffffffULL);
558 tcg_gen_andi_tl(r_temp2, src2, 0xffffffffULL);
559 tcg_gen_brcond_tl(TCG_COND_GEU, r_temp1, r_temp2, l1);
560 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_CARRY);
561 gen_set_label(l1);
562 tcg_temp_free(r_temp1);
563 tcg_temp_free(r_temp2);
564 }
565
566 #ifdef TARGET_SPARC64
567 static inline void gen_cc_C_sub_xcc(TCGv src1, TCGv src2)
568 {
569 int l1;
570
571 l1 = gen_new_label();
572 tcg_gen_brcond_tl(TCG_COND_GEU, src1, src2, l1);
573 tcg_gen_ori_i32(cpu_xcc, cpu_xcc, PSR_CARRY);
574 gen_set_label(l1);
575 }
576 #endif
577
578 /* old op:
579 if (((src1 ^ T1) & (src1 ^ T0)) & (1 << 31))
580 env->psr |= PSR_OVF;
581 */
582 static inline void gen_cc_V_sub_icc(TCGv dst, TCGv src1, TCGv src2)
583 {
584 TCGv r_temp;
585
586 r_temp = tcg_temp_new(TCG_TYPE_TL);
587 tcg_gen_xor_tl(r_temp, src1, src2);
588 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
589 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
590 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
591 tcg_gen_shri_tl(r_temp, r_temp, 31 - PSR_OVF_SHIFT);
592 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
593 tcg_gen_or_i32(cpu_psr, cpu_psr, cpu_tmp32);
594 tcg_temp_free(r_temp);
595 }
596
597 #ifdef TARGET_SPARC64
598 static inline void gen_cc_V_sub_xcc(TCGv dst, TCGv src1, TCGv src2)
599 {
600 TCGv r_temp;
601
602 r_temp = tcg_temp_new(TCG_TYPE_TL);
603 tcg_gen_xor_tl(r_temp, src1, src2);
604 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
605 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
606 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 63));
607 tcg_gen_shri_tl(r_temp, r_temp, 63 - PSR_OVF_SHIFT);
608 tcg_gen_trunc_tl_i32(cpu_tmp32, r_temp);
609 tcg_gen_or_i32(cpu_xcc, cpu_xcc, cpu_tmp32);
610 tcg_temp_free(r_temp);
611 }
612 #endif
613
614 static inline void gen_sub_tv(TCGv dst, TCGv src1, TCGv src2)
615 {
616 TCGv r_temp, r_const;
617 int l1;
618
619 l1 = gen_new_label();
620
621 r_temp = tcg_temp_new(TCG_TYPE_TL);
622 tcg_gen_xor_tl(r_temp, src1, src2);
623 tcg_gen_xor_tl(cpu_tmp0, src1, dst);
624 tcg_gen_and_tl(r_temp, r_temp, cpu_tmp0);
625 tcg_gen_andi_tl(r_temp, r_temp, (1ULL << 31));
626 tcg_gen_brcondi_tl(TCG_COND_EQ, r_temp, 0, l1);
627 r_const = tcg_const_i32(TT_TOVF);
628 tcg_gen_helper_0_1(raise_exception, r_const);
629 tcg_temp_free(r_const);
630 gen_set_label(l1);
631 tcg_temp_free(r_temp);
632 }
633
634 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
635 {
636 tcg_gen_mov_tl(cpu_cc_src, src1);
637 tcg_gen_mov_tl(cpu_cc_src2, src2);
638 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
639 gen_cc_clear_icc();
640 gen_cc_NZ_icc(cpu_cc_dst);
641 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
642 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
643 #ifdef TARGET_SPARC64
644 gen_cc_clear_xcc();
645 gen_cc_NZ_xcc(cpu_cc_dst);
646 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
647 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
648 #endif
649 tcg_gen_mov_tl(dst, cpu_cc_dst);
650 }
651
652 static inline void gen_op_subx_cc(TCGv dst, TCGv src1, TCGv src2)
653 {
654 tcg_gen_mov_tl(cpu_cc_src, src1);
655 tcg_gen_mov_tl(cpu_cc_src2, src2);
656 gen_mov_reg_C(cpu_tmp0, cpu_psr);
657 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_tmp0);
658 gen_cc_clear_icc();
659 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
660 #ifdef TARGET_SPARC64
661 gen_cc_clear_xcc();
662 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
663 #endif
664 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_dst, cpu_cc_src2);
665 gen_cc_NZ_icc(cpu_cc_dst);
666 gen_cc_C_sub_icc(cpu_cc_dst, cpu_cc_src);
667 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
668 #ifdef TARGET_SPARC64
669 gen_cc_NZ_xcc(cpu_cc_dst);
670 gen_cc_C_sub_xcc(cpu_cc_dst, cpu_cc_src);
671 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
672 #endif
673 tcg_gen_mov_tl(dst, cpu_cc_dst);
674 }
675
676 static inline void gen_op_tsub_cc(TCGv dst, TCGv src1, TCGv src2)
677 {
678 tcg_gen_mov_tl(cpu_cc_src, src1);
679 tcg_gen_mov_tl(cpu_cc_src2, src2);
680 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
681 gen_cc_clear_icc();
682 gen_cc_NZ_icc(cpu_cc_dst);
683 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
684 gen_cc_V_sub_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
685 gen_cc_V_tag(cpu_cc_src, cpu_cc_src2);
686 #ifdef TARGET_SPARC64
687 gen_cc_clear_xcc();
688 gen_cc_NZ_xcc(cpu_cc_dst);
689 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
690 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
691 #endif
692 tcg_gen_mov_tl(dst, cpu_cc_dst);
693 }
694
695 static inline void gen_op_tsub_ccTV(TCGv dst, TCGv src1, TCGv src2)
696 {
697 tcg_gen_mov_tl(cpu_cc_src, src1);
698 tcg_gen_mov_tl(cpu_cc_src2, src2);
699 gen_tag_tv(cpu_cc_src, cpu_cc_src2);
700 tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
701 gen_sub_tv(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
702 gen_cc_clear_icc();
703 gen_cc_NZ_icc(cpu_cc_dst);
704 gen_cc_C_sub_icc(cpu_cc_src, cpu_cc_src2);
705 #ifdef TARGET_SPARC64
706 gen_cc_clear_xcc();
707 gen_cc_NZ_xcc(cpu_cc_dst);
708 gen_cc_C_sub_xcc(cpu_cc_src, cpu_cc_src2);
709 gen_cc_V_sub_xcc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
710 #endif
711 tcg_gen_mov_tl(dst, cpu_cc_dst);
712 }
713
714 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
715 {
716 TCGv r_temp, r_temp2;
717 int l1;
718
719 l1 = gen_new_label();
720 r_temp = tcg_temp_new(TCG_TYPE_TL);
721 r_temp2 = tcg_temp_new(TCG_TYPE_I32);
722
723 /* old op:
724 if (!(env->y & 1))
725 T1 = 0;
726 */
727 tcg_gen_mov_tl(cpu_cc_src, src1);
728 tcg_gen_ld32u_tl(r_temp, cpu_env, offsetof(CPUSPARCState, y));
729 tcg_gen_trunc_tl_i32(r_temp2, r_temp);
730 tcg_gen_andi_i32(r_temp2, r_temp2, 0x1);
731 tcg_gen_mov_tl(cpu_cc_src2, src2);
732 tcg_gen_brcondi_i32(TCG_COND_NE, r_temp2, 0, l1);
733 tcg_gen_movi_tl(cpu_cc_src2, 0);
734 gen_set_label(l1);
735
736 // b2 = T0 & 1;
737 // env->y = (b2 << 31) | (env->y >> 1);
738 tcg_gen_trunc_tl_i32(r_temp2, cpu_cc_src);
739 tcg_gen_andi_i32(r_temp2, r_temp2, 0x1);
740 tcg_gen_shli_i32(r_temp2, r_temp2, 31);
741 tcg_gen_ld_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, y));
742 tcg_gen_shri_i32(cpu_tmp32, cpu_tmp32, 1);
743 tcg_gen_or_i32(cpu_tmp32, cpu_tmp32, r_temp2);
744 tcg_temp_free(r_temp2);
745 tcg_gen_st_i32(cpu_tmp32, cpu_env, offsetof(CPUSPARCState, y));
746
747 // b1 = N ^ V;
748 gen_mov_reg_N(cpu_tmp0, cpu_psr);
749 gen_mov_reg_V(r_temp, cpu_psr);
750 tcg_gen_xor_tl(cpu_tmp0, cpu_tmp0, r_temp);
751 tcg_temp_free(r_temp);
752
753 // T0 = (b1 << 31) | (T0 >> 1);
754 // src1 = T0;
755 tcg_gen_shli_tl(cpu_tmp0, cpu_tmp0, 31);
756 tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
757 tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, cpu_tmp0);
758
759 /* do addition and update flags */
760 tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
761
762 gen_cc_clear_icc();
763 gen_cc_NZ_icc(cpu_cc_dst);
764 gen_cc_V_add_icc(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
765 gen_cc_C_add_icc(cpu_cc_dst, cpu_cc_src);
766 tcg_gen_mov_tl(dst, cpu_cc_dst);
767 }
768
769 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
770 {
771 TCGv r_temp, r_temp2;
772
773 r_temp = tcg_temp_new(TCG_TYPE_I64);
774 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
775
776 tcg_gen_extu_tl_i64(r_temp, src2);
777 tcg_gen_extu_tl_i64(r_temp2, src1);
778 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
779
780 tcg_gen_shri_i64(r_temp, r_temp2, 32);
781 tcg_gen_trunc_i64_i32(r_temp, r_temp);
782 tcg_gen_st_i32(r_temp, cpu_env, offsetof(CPUSPARCState, y));
783 tcg_temp_free(r_temp);
784 #ifdef TARGET_SPARC64
785 tcg_gen_mov_i64(dst, r_temp2);
786 #else
787 tcg_gen_trunc_i64_tl(dst, r_temp2);
788 #endif
789 tcg_temp_free(r_temp2);
790 }
791
792 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
793 {
794 TCGv r_temp, r_temp2;
795
796 r_temp = tcg_temp_new(TCG_TYPE_I64);
797 r_temp2 = tcg_temp_new(TCG_TYPE_I64);
798
799 tcg_gen_ext_tl_i64(r_temp, src2);
800 tcg_gen_ext_tl_i64(r_temp2, src1);
801 tcg_gen_mul_i64(r_temp2, r_temp, r_temp2);
802
803 tcg_gen_shri_i64(r_temp, r_temp2, 32);
804 tcg_gen_trunc_i64_i32(r_temp, r_temp);
805 tcg_gen_st_i32(r_temp, cpu_env, offsetof(CPUSPARCState, y));
806 tcg_temp_free(r_temp);
807 #ifdef TARGET_SPARC64
808 tcg_gen_mov_i64(dst, r_temp2);
809 #else
810 tcg_gen_trunc_i64_tl(dst, r_temp2);
811 #endif
812 tcg_temp_free(r_temp2);
813 }
814
815 #ifdef TARGET_SPARC64
816 static inline void gen_trap_ifdivzero_tl(TCGv divisor)
817 {
818 TCGv r_const;
819 int l1;
820
821 l1 = gen_new_label();
822 tcg_gen_brcondi_tl(TCG_COND_NE, divisor, 0, l1);
823 r_const = tcg_const_i32(TT_DIV_ZERO);
824 tcg_gen_helper_0_1(raise_exception, r_const);
825 tcg_temp_free(r_const);
826 gen_set_label(l1);
827 }
828
829 static inline void gen_op_sdivx(TCGv dst, TCGv src1, TCGv src2)
830 {
831 int l1, l2;
832
833 l1 = gen_new_label();
834 l2 = gen_new_label();
835 tcg_gen_mov_tl(cpu_cc_src, src1);
836 tcg_gen_mov_tl(cpu_cc_src2, src2);
837 gen_trap_ifdivzero_tl(cpu_cc_src2);
838 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src, INT64_MIN, l1);
839 tcg_gen_brcondi_tl(TCG_COND_NE, cpu_cc_src2, -1, l1);
840 tcg_gen_movi_i64(dst, INT64_MIN);
841 tcg_gen_br(l2);
842 gen_set_label(l1);
843 tcg_gen_div_i64(dst, cpu_cc_src, cpu_cc_src2);
844 gen_set_label(l2);
845 }
846 #endif
847
848 static inline void gen_op_div_cc(TCGv dst)
849 {
850 int l1;
851
852 tcg_gen_mov_tl(cpu_cc_dst, dst);
853 gen_cc_clear_icc();
854 gen_cc_NZ_icc(cpu_cc_dst);
855 l1 = gen_new_label();
856 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cc_src2, 0, l1);
857 tcg_gen_ori_i32(cpu_psr, cpu_psr, PSR_OVF);
858 gen_set_label(l1);
859 }
860
861 static inline void gen_op_logic_cc(TCGv dst)
862 {
863 tcg_gen_mov_tl(cpu_cc_dst, dst);
864
865 gen_cc_clear_icc();
866 gen_cc_NZ_icc(cpu_cc_dst);
867 #ifdef TARGET_SPARC64
868 gen_cc_clear_xcc();
869 gen_cc_NZ_xcc(cpu_cc_dst);
870 #endif
871 }
872
873 // 1
874 static inline void gen_op_eval_ba(TCGv dst)
875 {
876 tcg_gen_movi_tl(dst, 1);
877 }
878
879 // Z
880 static inline void gen_op_eval_be(TCGv dst, TCGv src)
881 {
882 gen_mov_reg_Z(dst, src);
883 }
884
885 // Z | (N ^ V)
886 static inline void gen_op_eval_ble(TCGv dst, TCGv src)
887 {
888 gen_mov_reg_N(cpu_tmp0, src);
889 gen_mov_reg_V(dst, src);
890 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
891 gen_mov_reg_Z(cpu_tmp0, src);
892 tcg_gen_or_tl(dst, dst, cpu_tmp0);
893 }
894
895 // N ^ V
896 static inline void gen_op_eval_bl(TCGv dst, TCGv src)
897 {
898 gen_mov_reg_V(cpu_tmp0, src);
899 gen_mov_reg_N(dst, src);
900 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
901 }
902
903 // C | Z
904 static inline void gen_op_eval_bleu(TCGv dst, TCGv src)
905 {
906 gen_mov_reg_Z(cpu_tmp0, src);
907 gen_mov_reg_C(dst, src);
908 tcg_gen_or_tl(dst, dst, cpu_tmp0);
909 }
910
911 // C
912 static inline void gen_op_eval_bcs(TCGv dst, TCGv src)
913 {
914 gen_mov_reg_C(dst, src);
915 }
916
917 // V
918 static inline void gen_op_eval_bvs(TCGv dst, TCGv src)
919 {
920 gen_mov_reg_V(dst, src);
921 }
922
923 // 0
924 static inline void gen_op_eval_bn(TCGv dst)
925 {
926 tcg_gen_movi_tl(dst, 0);
927 }
928
929 // N
930 static inline void gen_op_eval_bneg(TCGv dst, TCGv src)
931 {
932 gen_mov_reg_N(dst, src);
933 }
934
935 // !Z
936 static inline void gen_op_eval_bne(TCGv dst, TCGv src)
937 {
938 gen_mov_reg_Z(dst, src);
939 tcg_gen_xori_tl(dst, dst, 0x1);
940 }
941
942 // !(Z | (N ^ V))
943 static inline void gen_op_eval_bg(TCGv dst, TCGv src)
944 {
945 gen_mov_reg_N(cpu_tmp0, src);
946 gen_mov_reg_V(dst, src);
947 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
948 gen_mov_reg_Z(cpu_tmp0, src);
949 tcg_gen_or_tl(dst, dst, cpu_tmp0);
950 tcg_gen_xori_tl(dst, dst, 0x1);
951 }
952
953 // !(N ^ V)
954 static inline void gen_op_eval_bge(TCGv dst, TCGv src)
955 {
956 gen_mov_reg_V(cpu_tmp0, src);
957 gen_mov_reg_N(dst, src);
958 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
959 tcg_gen_xori_tl(dst, dst, 0x1);
960 }
961
962 // !(C | Z)
963 static inline void gen_op_eval_bgu(TCGv dst, TCGv src)
964 {
965 gen_mov_reg_Z(cpu_tmp0, src);
966 gen_mov_reg_C(dst, src);
967 tcg_gen_or_tl(dst, dst, cpu_tmp0);
968 tcg_gen_xori_tl(dst, dst, 0x1);
969 }
970
971 // !C
972 static inline void gen_op_eval_bcc(TCGv dst, TCGv src)
973 {
974 gen_mov_reg_C(dst, src);
975 tcg_gen_xori_tl(dst, dst, 0x1);
976 }
977
978 // !N
979 static inline void gen_op_eval_bpos(TCGv dst, TCGv src)
980 {
981 gen_mov_reg_N(dst, src);
982 tcg_gen_xori_tl(dst, dst, 0x1);
983 }
984
985 // !V
986 static inline void gen_op_eval_bvc(TCGv dst, TCGv src)
987 {
988 gen_mov_reg_V(dst, src);
989 tcg_gen_xori_tl(dst, dst, 0x1);
990 }
991
992 /*
993 FPSR bit field FCC1 | FCC0:
994 0 =
995 1 <
996 2 >
997 3 unordered
998 */
999 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
1000 unsigned int fcc_offset)
1001 {
1002 tcg_gen_extu_i32_tl(reg, src);
1003 tcg_gen_shri_tl(reg, reg, FSR_FCC0_SHIFT + fcc_offset);
1004 tcg_gen_andi_tl(reg, reg, 0x1);
1005 }
1006
1007 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
1008 unsigned int fcc_offset)
1009 {
1010 tcg_gen_extu_i32_tl(reg, src);
1011 tcg_gen_shri_tl(reg, reg, FSR_FCC1_SHIFT + fcc_offset);
1012 tcg_gen_andi_tl(reg, reg, 0x1);
1013 }
1014
1015 // !0: FCC0 | FCC1
1016 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
1017 unsigned int fcc_offset)
1018 {
1019 gen_mov_reg_FCC0(dst, src, fcc_offset);
1020 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1021 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1022 }
1023
1024 // 1 or 2: FCC0 ^ FCC1
1025 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
1026 unsigned int fcc_offset)
1027 {
1028 gen_mov_reg_FCC0(dst, src, fcc_offset);
1029 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1030 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1031 }
1032
1033 // 1 or 3: FCC0
1034 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
1035 unsigned int fcc_offset)
1036 {
1037 gen_mov_reg_FCC0(dst, src, fcc_offset);
1038 }
1039
1040 // 1: FCC0 & !FCC1
1041 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
1042 unsigned int fcc_offset)
1043 {
1044 gen_mov_reg_FCC0(dst, src, fcc_offset);
1045 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1046 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1047 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1048 }
1049
1050 // 2 or 3: FCC1
1051 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
1052 unsigned int fcc_offset)
1053 {
1054 gen_mov_reg_FCC1(dst, src, fcc_offset);
1055 }
1056
1057 // 2: !FCC0 & FCC1
1058 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
1059 unsigned int fcc_offset)
1060 {
1061 gen_mov_reg_FCC0(dst, src, fcc_offset);
1062 tcg_gen_xori_tl(dst, dst, 0x1);
1063 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1064 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1065 }
1066
1067 // 3: FCC0 & FCC1
1068 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
1069 unsigned int fcc_offset)
1070 {
1071 gen_mov_reg_FCC0(dst, src, fcc_offset);
1072 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1073 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1074 }
1075
1076 // 0: !(FCC0 | FCC1)
1077 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
1078 unsigned int fcc_offset)
1079 {
1080 gen_mov_reg_FCC0(dst, src, fcc_offset);
1081 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1082 tcg_gen_or_tl(dst, dst, cpu_tmp0);
1083 tcg_gen_xori_tl(dst, dst, 0x1);
1084 }
1085
1086 // 0 or 3: !(FCC0 ^ FCC1)
1087 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
1088 unsigned int fcc_offset)
1089 {
1090 gen_mov_reg_FCC0(dst, src, fcc_offset);
1091 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1092 tcg_gen_xor_tl(dst, dst, cpu_tmp0);
1093 tcg_gen_xori_tl(dst, dst, 0x1);
1094 }
1095
1096 // 0 or 2: !FCC0
1097 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
1098 unsigned int fcc_offset)
1099 {
1100 gen_mov_reg_FCC0(dst, src, fcc_offset);
1101 tcg_gen_xori_tl(dst, dst, 0x1);
1102 }
1103
1104 // !1: !(FCC0 & !FCC1)
1105 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
1106 unsigned int fcc_offset)
1107 {
1108 gen_mov_reg_FCC0(dst, src, fcc_offset);
1109 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1110 tcg_gen_xori_tl(cpu_tmp0, cpu_tmp0, 0x1);
1111 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1112 tcg_gen_xori_tl(dst, dst, 0x1);
1113 }
1114
1115 // 0 or 1: !FCC1
1116 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
1117 unsigned int fcc_offset)
1118 {
1119 gen_mov_reg_FCC1(dst, src, fcc_offset);
1120 tcg_gen_xori_tl(dst, dst, 0x1);
1121 }
1122
1123 // !2: !(!FCC0 & FCC1)
1124 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
1125 unsigned int fcc_offset)
1126 {
1127 gen_mov_reg_FCC0(dst, src, fcc_offset);
1128 tcg_gen_xori_tl(dst, dst, 0x1);
1129 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1130 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1131 tcg_gen_xori_tl(dst, dst, 0x1);
1132 }
1133
1134 // !3: !(FCC0 & FCC1)
1135 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
1136 unsigned int fcc_offset)
1137 {
1138 gen_mov_reg_FCC0(dst, src, fcc_offset);
1139 gen_mov_reg_FCC1(cpu_tmp0, src, fcc_offset);
1140 tcg_gen_and_tl(dst, dst, cpu_tmp0);
1141 tcg_gen_xori_tl(dst, dst, 0x1);
1142 }
1143
1144 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
1145 target_ulong pc2, TCGv r_cond)
1146 {
1147 int l1;
1148
1149 l1 = gen_new_label();
1150
1151 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1152
1153 gen_goto_tb(dc, 0, pc1, pc1 + 4);
1154
1155 gen_set_label(l1);
1156 gen_goto_tb(dc, 1, pc2, pc2 + 4);
1157 }
1158
1159 static inline void gen_branch_a(DisasContext *dc, target_ulong pc1,
1160 target_ulong pc2, TCGv r_cond)
1161 {
1162 int l1;
1163
1164 l1 = gen_new_label();
1165
1166 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1167
1168 gen_goto_tb(dc, 0, pc2, pc1);
1169
1170 gen_set_label(l1);
1171 gen_goto_tb(dc, 1, pc2 + 4, pc2 + 8);
1172 }
1173
1174 static inline void gen_generic_branch(target_ulong npc1, target_ulong npc2,
1175 TCGv r_cond)
1176 {
1177 int l1, l2;
1178
1179 l1 = gen_new_label();
1180 l2 = gen_new_label();
1181
1182 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
1183
1184 tcg_gen_movi_tl(cpu_npc, npc1);
1185 tcg_gen_br(l2);
1186
1187 gen_set_label(l1);
1188 tcg_gen_movi_tl(cpu_npc, npc2);
1189 gen_set_label(l2);
1190 }
1191
1192 /* call this function before using the condition register as it may
1193 have been set for a jump */
1194 static inline void flush_cond(DisasContext *dc, TCGv cond)
1195 {
1196 if (dc->npc == JUMP_PC) {
1197 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1198 dc->npc = DYNAMIC_PC;
1199 }
1200 }
1201
1202 static inline void save_npc(DisasContext *dc, TCGv cond)
1203 {
1204 if (dc->npc == JUMP_PC) {
1205 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1206 dc->npc = DYNAMIC_PC;
1207 } else if (dc->npc != DYNAMIC_PC) {
1208 tcg_gen_movi_tl(cpu_npc, dc->npc);
1209 }
1210 }
1211
1212 static inline void save_state(DisasContext *dc, TCGv cond)
1213 {
1214 tcg_gen_movi_tl(cpu_pc, dc->pc);
1215 save_npc(dc, cond);
1216 }
1217
1218 static inline void gen_mov_pc_npc(DisasContext *dc, TCGv cond)
1219 {
1220 if (dc->npc == JUMP_PC) {
1221 gen_generic_branch(dc->jump_pc[0], dc->jump_pc[1], cond);
1222 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1223 dc->pc = DYNAMIC_PC;
1224 } else if (dc->npc == DYNAMIC_PC) {
1225 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1226 dc->pc = DYNAMIC_PC;
1227 } else {
1228 dc->pc = dc->npc;
1229 }
1230 }
1231
1232 static inline void gen_op_next_insn(void)
1233 {
1234 tcg_gen_mov_tl(cpu_pc, cpu_npc);
1235 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1236 }
1237
1238 static inline void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond)
1239 {
1240 TCGv r_src;
1241
1242 #ifdef TARGET_SPARC64
1243 if (cc)
1244 r_src = cpu_xcc;
1245 else
1246 r_src = cpu_psr;
1247 #else
1248 r_src = cpu_psr;
1249 #endif
1250 switch (cond) {
1251 case 0x0:
1252 gen_op_eval_bn(r_dst);
1253 break;
1254 case 0x1:
1255 gen_op_eval_be(r_dst, r_src);
1256 break;
1257 case 0x2:
1258 gen_op_eval_ble(r_dst, r_src);
1259 break;
1260 case 0x3:
1261 gen_op_eval_bl(r_dst, r_src);
1262 break;
1263 case 0x4:
1264 gen_op_eval_bleu(r_dst, r_src);
1265 break;
1266 case 0x5:
1267 gen_op_eval_bcs(r_dst, r_src);
1268 break;
1269 case 0x6:
1270 gen_op_eval_bneg(r_dst, r_src);
1271 break;
1272 case 0x7:
1273 gen_op_eval_bvs(r_dst, r_src);
1274 break;
1275 case 0x8:
1276 gen_op_eval_ba(r_dst);
1277 break;
1278 case 0x9:
1279 gen_op_eval_bne(r_dst, r_src);
1280 break;
1281 case 0xa:
1282 gen_op_eval_bg(r_dst, r_src);
1283 break;
1284 case 0xb:
1285 gen_op_eval_bge(r_dst, r_src);
1286 break;
1287 case 0xc:
1288 gen_op_eval_bgu(r_dst, r_src);
1289 break;
1290 case 0xd:
1291 gen_op_eval_bcc(r_dst, r_src);
1292 break;
1293 case 0xe:
1294 gen_op_eval_bpos(r_dst, r_src);
1295 break;
1296 case 0xf:
1297 gen_op_eval_bvc(r_dst, r_src);
1298 break;
1299 }
1300 }
1301
1302 static inline void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1303 {
1304 unsigned int offset;
1305
1306 switch (cc) {
1307 default:
1308 case 0x0:
1309 offset = 0;
1310 break;
1311 case 0x1:
1312 offset = 32 - 10;
1313 break;
1314 case 0x2:
1315 offset = 34 - 10;
1316 break;
1317 case 0x3:
1318 offset = 36 - 10;
1319 break;
1320 }
1321
1322 switch (cond) {
1323 case 0x0:
1324 gen_op_eval_bn(r_dst);
1325 break;
1326 case 0x1:
1327 gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1328 break;
1329 case 0x2:
1330 gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1331 break;
1332 case 0x3:
1333 gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1334 break;
1335 case 0x4:
1336 gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1337 break;
1338 case 0x5:
1339 gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1340 break;
1341 case 0x6:
1342 gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1343 break;
1344 case 0x7:
1345 gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1346 break;
1347 case 0x8:
1348 gen_op_eval_ba(r_dst);
1349 break;
1350 case 0x9:
1351 gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1352 break;
1353 case 0xa:
1354 gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1355 break;
1356 case 0xb:
1357 gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1358 break;
1359 case 0xc:
1360 gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1361 break;
1362 case 0xd:
1363 gen_op_eval_fble(r_dst, cpu_fsr, offset);
1364 break;
1365 case 0xe:
1366 gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1367 break;
1368 case 0xf:
1369 gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1370 break;
1371 }
1372 }
1373
1374 #ifdef TARGET_SPARC64
1375 // Inverted logic
1376 static const int gen_tcg_cond_reg[8] = {
1377 -1,
1378 TCG_COND_NE,
1379 TCG_COND_GT,
1380 TCG_COND_GE,
1381 -1,
1382 TCG_COND_EQ,
1383 TCG_COND_LE,
1384 TCG_COND_LT,
1385 };
1386
1387 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1388 {
1389 int l1;
1390
1391 l1 = gen_new_label();
1392 tcg_gen_movi_tl(r_dst, 0);
1393 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], r_src, 0, l1);
1394 tcg_gen_movi_tl(r_dst, 1);
1395 gen_set_label(l1);
1396 }
1397 #endif
1398
1399 /* XXX: potentially incorrect if dynamic npc */
1400 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1401 TCGv r_cond)
1402 {
1403 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1404 target_ulong target = dc->pc + offset;
1405
1406 if (cond == 0x0) {
1407 /* unconditional not taken */
1408 if (a) {
1409 dc->pc = dc->npc + 4;
1410 dc->npc = dc->pc + 4;
1411 } else {
1412 dc->pc = dc->npc;
1413 dc->npc = dc->pc + 4;
1414 }
1415 } else if (cond == 0x8) {
1416 /* unconditional taken */
1417 if (a) {
1418 dc->pc = target;
1419 dc->npc = dc->pc + 4;
1420 } else {
1421 dc->pc = dc->npc;
1422 dc->npc = target;
1423 }
1424 } else {
1425 flush_cond(dc, r_cond);
1426 gen_cond(r_cond, cc, cond);
1427 if (a) {
1428 gen_branch_a(dc, target, dc->npc, r_cond);
1429 dc->is_br = 1;
1430 } else {
1431 dc->pc = dc->npc;
1432 dc->jump_pc[0] = target;
1433 dc->jump_pc[1] = dc->npc + 4;
1434 dc->npc = JUMP_PC;
1435 }
1436 }
1437 }
1438
1439 /* XXX: potentially incorrect if dynamic npc */
1440 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc,
1441 TCGv r_cond)
1442 {
1443 unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1444 target_ulong target = dc->pc + offset;
1445
1446 if (cond == 0x0) {
1447 /* unconditional not taken */
1448 if (a) {
1449 dc->pc = dc->npc + 4;
1450 dc->npc = dc->pc + 4;
1451 } else {
1452 dc->pc = dc->npc;
1453 dc->npc = dc->pc + 4;
1454 }
1455 } else if (cond == 0x8) {
1456 /* unconditional taken */
1457 if (a) {
1458 dc->pc = target;
1459 dc->npc = dc->pc + 4;
1460 } else {
1461 dc->pc = dc->npc;
1462 dc->npc = target;
1463 }
1464 } else {
1465 flush_cond(dc, r_cond);
1466 gen_fcond(r_cond, cc, cond);
1467 if (a) {
1468 gen_branch_a(dc, target, dc->npc, r_cond);
1469 dc->is_br = 1;
1470 } else {
1471 dc->pc = dc->npc;
1472 dc->jump_pc[0] = target;
1473 dc->jump_pc[1] = dc->npc + 4;
1474 dc->npc = JUMP_PC;
1475 }
1476 }
1477 }
1478
1479 #ifdef TARGET_SPARC64
1480 /* XXX: potentially incorrect if dynamic npc */
1481 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1482 TCGv r_cond, TCGv r_reg)
1483 {
1484 unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1485 target_ulong target = dc->pc + offset;
1486
1487 flush_cond(dc, r_cond);
1488 gen_cond_reg(r_cond, cond, r_reg);
1489 if (a) {
1490 gen_branch_a(dc, target, dc->npc, r_cond);
1491 dc->is_br = 1;
1492 } else {
1493 dc->pc = dc->npc;
1494 dc->jump_pc[0] = target;
1495 dc->jump_pc[1] = dc->npc + 4;
1496 dc->npc = JUMP_PC;
1497 }
1498 }
1499
1500 static GenOpFunc * const gen_fcmps[4] = {
1501 helper_fcmps,
1502 helper_fcmps_fcc1,
1503 helper_fcmps_fcc2,
1504 helper_fcmps_fcc3,
1505 };
1506
1507 static GenOpFunc * const gen_fcmpd[4] = {
1508 helper_fcmpd,
1509 helper_fcmpd_fcc1,
1510 helper_fcmpd_fcc2,
1511 helper_fcmpd_fcc3,
1512 };
1513
1514 static GenOpFunc * const gen_fcmpq[4] = {
1515 helper_fcmpq,
1516 helper_fcmpq_fcc1,
1517 helper_fcmpq_fcc2,
1518 helper_fcmpq_fcc3,
1519 };
1520
1521 static GenOpFunc * const gen_fcmpes[4] = {
1522 helper_fcmpes,
1523 helper_fcmpes_fcc1,
1524 helper_fcmpes_fcc2,
1525 helper_fcmpes_fcc3,
1526 };
1527
1528 static GenOpFunc * const gen_fcmped[4] = {
1529 helper_fcmped,
1530 helper_fcmped_fcc1,
1531 helper_fcmped_fcc2,
1532 helper_fcmped_fcc3,
1533 };
1534
1535 static GenOpFunc * const gen_fcmpeq[4] = {
1536 helper_fcmpeq,
1537 helper_fcmpeq_fcc1,
1538 helper_fcmpeq_fcc2,
1539 helper_fcmpeq_fcc3,
1540 };
1541
1542 static inline void gen_op_fcmps(int fccno)
1543 {
1544 tcg_gen_helper_0_0(gen_fcmps[fccno]);
1545 }
1546
1547 static inline void gen_op_fcmpd(int fccno)
1548 {
1549 tcg_gen_helper_0_0(gen_fcmpd[fccno]);
1550 }
1551
1552 static inline void gen_op_fcmpq(int fccno)
1553 {
1554 tcg_gen_helper_0_0(gen_fcmpq[fccno]);
1555 }
1556
1557 static inline void gen_op_fcmpes(int fccno)
1558 {
1559 tcg_gen_helper_0_0(gen_fcmpes[fccno]);
1560 }
1561
1562 static inline void gen_op_fcmped(int fccno)
1563 {
1564 tcg_gen_helper_0_0(gen_fcmped[fccno]);
1565 }
1566
1567 static inline void gen_op_fcmpeq(int fccno)
1568 {
1569 tcg_gen_helper_0_0(gen_fcmpeq[fccno]);
1570 }
1571
1572 #else
1573
1574 static inline void gen_op_fcmps(int fccno)
1575 {
1576 tcg_gen_helper_0_0(helper_fcmps);
1577 }
1578
1579 static inline void gen_op_fcmpd(int fccno)
1580 {
1581 tcg_gen_helper_0_0(helper_fcmpd);
1582 }
1583
1584 static inline void gen_op_fcmpq(int fccno)
1585 {
1586 tcg_gen_helper_0_0(helper_fcmpq);
1587 }
1588
1589 static inline void gen_op_fcmpes(int fccno)
1590 {
1591 tcg_gen_helper_0_0(helper_fcmpes);
1592 }
1593
1594 static inline void gen_op_fcmped(int fccno)
1595 {
1596 tcg_gen_helper_0_0(helper_fcmped);
1597 }
1598
1599 static inline void gen_op_fcmpeq(int fccno)
1600 {
1601 tcg_gen_helper_0_0(helper_fcmpeq);
1602 }
1603 #endif
1604
1605 static inline void gen_op_fpexception_im(int fsr_flags)
1606 {
1607 TCGv r_const;
1608
1609 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, ~FSR_FTT_MASK);
1610 tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1611 r_const = tcg_const_i32(TT_FP_EXCP);
1612 tcg_gen_helper_0_1(raise_exception, r_const);
1613 tcg_temp_free(r_const);
1614 }
1615
1616 static int gen_trap_ifnofpu(DisasContext *dc, TCGv r_cond)
1617 {
1618 #if !defined(CONFIG_USER_ONLY)
1619 if (!dc->fpu_enabled) {
1620 TCGv r_const;
1621
1622 save_state(dc, r_cond);
1623 r_const = tcg_const_i32(TT_NFPU_INSN);
1624 tcg_gen_helper_0_1(raise_exception, r_const);
1625 tcg_temp_free(r_const);
1626 dc->is_br = 1;
1627 return 1;
1628 }
1629 #endif
1630 return 0;
1631 }
1632
1633 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1634 {
1635 tcg_gen_andi_tl(cpu_fsr, cpu_fsr, ~(FSR_FTT_MASK | FSR_CEXC_MASK));
1636 }
1637
1638 static inline void gen_clear_float_exceptions(void)
1639 {
1640 tcg_gen_helper_0_0(helper_clear_float_exceptions);
1641 }
1642
1643 /* asi moves */
1644 #ifdef TARGET_SPARC64
1645 static inline TCGv gen_get_asi(int insn, TCGv r_addr)
1646 {
1647 int asi;
1648 TCGv r_asi;
1649
1650 if (IS_IMM) {
1651 r_asi = tcg_temp_new(TCG_TYPE_I32);
1652 tcg_gen_ld_i32(r_asi, cpu_env, offsetof(CPUSPARCState, asi));
1653 } else {
1654 asi = GET_FIELD(insn, 19, 26);
1655 r_asi = tcg_const_i32(asi);
1656 }
1657 return r_asi;
1658 }
1659
1660 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1661 int sign)
1662 {
1663 TCGv r_asi, r_size, r_sign;
1664
1665 r_asi = gen_get_asi(insn, addr);
1666 r_size = tcg_const_i32(size);
1667 r_sign = tcg_const_i32(sign);
1668 tcg_gen_helper_1_4(helper_ld_asi, dst, addr, r_asi, r_size, r_sign);
1669 tcg_temp_free(r_sign);
1670 tcg_temp_free(r_size);
1671 tcg_temp_free(r_asi);
1672 }
1673
1674 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1675 {
1676 TCGv r_asi, r_size;
1677
1678 r_asi = gen_get_asi(insn, addr);
1679 r_size = tcg_const_i32(size);
1680 tcg_gen_helper_0_4(helper_st_asi, addr, src, r_asi, r_size);
1681 tcg_temp_free(r_size);
1682 tcg_temp_free(r_asi);
1683 }
1684
1685 static inline void gen_ldf_asi(TCGv addr, int insn, int size, int rd)
1686 {
1687 TCGv r_asi, r_size, r_rd;
1688
1689 r_asi = gen_get_asi(insn, addr);
1690 r_size = tcg_const_i32(size);
1691 r_rd = tcg_const_i32(rd);
1692 tcg_gen_helper_0_4(helper_ldf_asi, addr, r_asi, r_size, r_rd);
1693 tcg_temp_free(r_rd);
1694 tcg_temp_free(r_size);
1695 tcg_temp_free(r_asi);
1696 }
1697
1698 static inline void gen_stf_asi(TCGv addr, int insn, int size, int rd)
1699 {
1700 TCGv r_asi, r_size, r_rd;
1701
1702 r_asi = gen_get_asi(insn, addr);
1703 r_size = tcg_const_i32(size);
1704 r_rd = tcg_const_i32(rd);
1705 tcg_gen_helper_0_4(helper_stf_asi, addr, r_asi, r_size, r_rd);
1706 tcg_temp_free(r_rd);
1707 tcg_temp_free(r_size);
1708 tcg_temp_free(r_asi);
1709 }
1710
1711 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1712 {
1713 TCGv r_asi, r_size, r_sign;
1714
1715 r_asi = gen_get_asi(insn, addr);
1716 r_size = tcg_const_i32(4);
1717 r_sign = tcg_const_i32(0);
1718 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1719 tcg_temp_free(r_sign);
1720 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1721 tcg_temp_free(r_size);
1722 tcg_temp_free(r_asi);
1723 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1724 }
1725
1726 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1727 {
1728 TCGv r_asi, r_rd;
1729
1730 r_asi = gen_get_asi(insn, addr);
1731 r_rd = tcg_const_i32(rd);
1732 tcg_gen_helper_0_3(helper_ldda_asi, addr, r_asi, r_rd);
1733 tcg_temp_free(r_rd);
1734 tcg_temp_free(r_asi);
1735 }
1736
1737 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1738 {
1739 TCGv r_temp, r_asi, r_size;
1740
1741 r_temp = tcg_temp_new(TCG_TYPE_TL);
1742 gen_movl_reg_TN(rd + 1, r_temp);
1743 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi,
1744 r_temp);
1745 tcg_temp_free(r_temp);
1746 r_asi = gen_get_asi(insn, addr);
1747 r_size = tcg_const_i32(8);
1748 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1749 tcg_temp_free(r_size);
1750 tcg_temp_free(r_asi);
1751 }
1752
1753 static inline void gen_cas_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1754 int rd)
1755 {
1756 TCGv r_val1, r_asi;
1757
1758 r_val1 = tcg_temp_new(TCG_TYPE_TL);
1759 gen_movl_reg_TN(rd, r_val1);
1760 r_asi = gen_get_asi(insn, addr);
1761 tcg_gen_helper_1_4(helper_cas_asi, dst, addr, r_val1, val2, r_asi);
1762 tcg_temp_free(r_asi);
1763 tcg_temp_free(r_val1);
1764 }
1765
1766 static inline void gen_casx_asi(TCGv dst, TCGv addr, TCGv val2, int insn,
1767 int rd)
1768 {
1769 TCGv r_asi;
1770
1771 gen_movl_reg_TN(rd, cpu_tmp64);
1772 r_asi = gen_get_asi(insn, addr);
1773 tcg_gen_helper_1_4(helper_casx_asi, dst, addr, cpu_tmp64, val2, r_asi);
1774 tcg_temp_free(r_asi);
1775 }
1776
1777 #elif !defined(CONFIG_USER_ONLY)
1778
1779 static inline void gen_ld_asi(TCGv dst, TCGv addr, int insn, int size,
1780 int sign)
1781 {
1782 TCGv r_asi, r_size, r_sign;
1783
1784 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1785 r_size = tcg_const_i32(size);
1786 r_sign = tcg_const_i32(sign);
1787 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1788 tcg_temp_free(r_sign);
1789 tcg_temp_free(r_size);
1790 tcg_temp_free(r_asi);
1791 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1792 }
1793
1794 static inline void gen_st_asi(TCGv src, TCGv addr, int insn, int size)
1795 {
1796 TCGv r_asi, r_size;
1797
1798 tcg_gen_extu_tl_i64(cpu_tmp64, src);
1799 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1800 r_size = tcg_const_i32(size);
1801 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1802 tcg_temp_free(r_size);
1803 tcg_temp_free(r_asi);
1804 }
1805
1806 static inline void gen_swap_asi(TCGv dst, TCGv addr, int insn)
1807 {
1808 TCGv r_asi, r_size, r_sign;
1809
1810 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1811 r_size = tcg_const_i32(4);
1812 r_sign = tcg_const_i32(0);
1813 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1814 tcg_temp_free(r_sign);
1815 tcg_gen_helper_0_4(helper_st_asi, addr, dst, r_asi, r_size);
1816 tcg_temp_free(r_size);
1817 tcg_temp_free(r_asi);
1818 tcg_gen_trunc_i64_tl(dst, cpu_tmp64);
1819 }
1820
1821 static inline void gen_ldda_asi(TCGv hi, TCGv addr, int insn, int rd)
1822 {
1823 TCGv r_asi, r_size, r_sign;
1824
1825 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1826 r_size = tcg_const_i32(8);
1827 r_sign = tcg_const_i32(0);
1828 tcg_gen_helper_1_4(helper_ld_asi, cpu_tmp64, addr, r_asi, r_size, r_sign);
1829 tcg_temp_free(r_sign);
1830 tcg_temp_free(r_size);
1831 tcg_temp_free(r_asi);
1832 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
1833 gen_movl_TN_reg(rd + 1, cpu_tmp0);
1834 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
1835 tcg_gen_trunc_i64_tl(hi, cpu_tmp64);
1836 gen_movl_TN_reg(rd, hi);
1837 }
1838
1839 static inline void gen_stda_asi(TCGv hi, TCGv addr, int insn, int rd)
1840 {
1841 TCGv r_temp, r_asi, r_size;
1842
1843 r_temp = tcg_temp_new(TCG_TYPE_TL);
1844 gen_movl_reg_TN(rd + 1, r_temp);
1845 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, hi, r_temp);
1846 tcg_temp_free(r_temp);
1847 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1848 r_size = tcg_const_i32(8);
1849 tcg_gen_helper_0_4(helper_st_asi, addr, cpu_tmp64, r_asi, r_size);
1850 tcg_temp_free(r_size);
1851 tcg_temp_free(r_asi);
1852 }
1853 #endif
1854
1855 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1856 static inline void gen_ldstub_asi(TCGv dst, TCGv addr, int insn)
1857 {
1858 TCGv r_val, r_asi, r_size;
1859
1860 gen_ld_asi(dst, addr, insn, 1, 0);
1861
1862 r_val = tcg_const_i64(0xffULL);
1863 r_asi = tcg_const_i32(GET_FIELD(insn, 19, 26));
1864 r_size = tcg_const_i32(1);
1865 tcg_gen_helper_0_4(helper_st_asi, addr, r_val, r_asi, r_size);
1866 tcg_temp_free(r_size);
1867 tcg_temp_free(r_asi);
1868 tcg_temp_free(r_val);
1869 }
1870 #endif
1871
1872 static inline TCGv get_src1(unsigned int insn, TCGv def)
1873 {
1874 TCGv r_rs1 = def;
1875 unsigned int rs1;
1876
1877 rs1 = GET_FIELD(insn, 13, 17);
1878 if (rs1 == 0)
1879 r_rs1 = tcg_const_tl(0); // XXX how to free?
1880 else if (rs1 < 8)
1881 r_rs1 = cpu_gregs[rs1];
1882 else
1883 tcg_gen_ld_tl(def, cpu_regwptr, (rs1 - 8) * sizeof(target_ulong));
1884 return r_rs1;
1885 }
1886
1887 static inline TCGv get_src2(unsigned int insn, TCGv def)
1888 {
1889 TCGv r_rs2 = def;
1890 unsigned int rs2;
1891
1892 if (IS_IMM) { /* immediate */
1893 rs2 = GET_FIELDs(insn, 19, 31);
1894 r_rs2 = tcg_const_tl((int)rs2); // XXX how to free?
1895 } else { /* register */
1896 rs2 = GET_FIELD(insn, 27, 31);
1897 if (rs2 == 0)
1898 r_rs2 = tcg_const_tl(0); // XXX how to free?
1899 else if (rs2 < 8)
1900 r_rs2 = cpu_gregs[rs2];
1901 else
1902 tcg_gen_ld_tl(def, cpu_regwptr, (rs2 - 8) * sizeof(target_ulong));
1903 }
1904 return r_rs2;
1905 }
1906
1907 #define CHECK_IU_FEATURE(dc, FEATURE) \
1908 if (!((dc)->features & CPU_FEATURE_ ## FEATURE)) \
1909 goto illegal_insn;
1910 #define CHECK_FPU_FEATURE(dc, FEATURE) \
1911 if (!((dc)->features & CPU_FEATURE_ ## FEATURE)) \
1912 goto nfpu_insn;
1913
1914 /* before an instruction, dc->pc must be static */
1915 static void disas_sparc_insn(DisasContext * dc)
1916 {
1917 unsigned int insn, opc, rs1, rs2, rd;
1918
1919 if (unlikely(loglevel & CPU_LOG_TB_OP))
1920 tcg_gen_debug_insn_start(dc->pc);
1921 insn = ldl_code(dc->pc);
1922 opc = GET_FIELD(insn, 0, 1);
1923
1924 rd = GET_FIELD(insn, 2, 6);
1925
1926 cpu_src1 = tcg_temp_new(TCG_TYPE_TL); // const
1927 cpu_src2 = tcg_temp_new(TCG_TYPE_TL); // const
1928
1929 switch (opc) {
1930 case 0: /* branches/sethi */
1931 {
1932 unsigned int xop = GET_FIELD(insn, 7, 9);
1933 int32_t target;
1934 switch (xop) {
1935 #ifdef TARGET_SPARC64
1936 case 0x1: /* V9 BPcc */
1937 {
1938 int cc;
1939
1940 target = GET_FIELD_SP(insn, 0, 18);
1941 target = sign_extend(target, 18);
1942 target <<= 2;
1943 cc = GET_FIELD_SP(insn, 20, 21);
1944 if (cc == 0)
1945 do_branch(dc, target, insn, 0, cpu_cond);
1946 else if (cc == 2)
1947 do_branch(dc, target, insn, 1, cpu_cond);
1948 else
1949 goto illegal_insn;
1950 goto jmp_insn;
1951 }
1952 case 0x3: /* V9 BPr */
1953 {
1954 target = GET_FIELD_SP(insn, 0, 13) |
1955 (GET_FIELD_SP(insn, 20, 21) << 14);
1956 target = sign_extend(target, 16);
1957 target <<= 2;
1958 cpu_src1 = get_src1(insn, cpu_src1);
1959 do_branch_reg(dc, target, insn, cpu_cond, cpu_src1);
1960 goto jmp_insn;
1961 }
1962 case 0x5: /* V9 FBPcc */
1963 {
1964 int cc = GET_FIELD_SP(insn, 20, 21);
1965 if (gen_trap_ifnofpu(dc, cpu_cond))
1966 goto jmp_insn;
1967 target = GET_FIELD_SP(insn, 0, 18);
1968 target = sign_extend(target, 19);
1969 target <<= 2;
1970 do_fbranch(dc, target, insn, cc, cpu_cond);
1971 goto jmp_insn;
1972 }
1973 #else
1974 case 0x7: /* CBN+x */
1975 {
1976 goto ncp_insn;
1977 }
1978 #endif
1979 case 0x2: /* BN+x */
1980 {
1981 target = GET_FIELD(insn, 10, 31);
1982 target = sign_extend(target, 22);
1983 target <<= 2;
1984 do_branch(dc, target, insn, 0, cpu_cond);
1985 goto jmp_insn;
1986 }
1987 case 0x6: /* FBN+x */
1988 {
1989 if (gen_trap_ifnofpu(dc, cpu_cond))
1990 goto jmp_insn;
1991 target = GET_FIELD(insn, 10, 31);
1992 target = sign_extend(target, 22);
1993 target <<= 2;
1994 do_fbranch(dc, target, insn, 0, cpu_cond);
1995 goto jmp_insn;
1996 }
1997 case 0x4: /* SETHI */
1998 if (rd) { // nop
1999 uint32_t value = GET_FIELD(insn, 10, 31);
2000 TCGv r_const;
2001
2002 r_const = tcg_const_tl(value << 10);
2003 gen_movl_TN_reg(rd, r_const);
2004 tcg_temp_free(r_const);
2005 }
2006 break;
2007 case 0x0: /* UNIMPL */
2008 default:
2009 goto illegal_insn;
2010 }
2011 break;
2012 }
2013 break;
2014 case 1:
2015 /*CALL*/ {
2016 target_long target = GET_FIELDs(insn, 2, 31) << 2;
2017 TCGv r_const;
2018
2019 r_const = tcg_const_tl(dc->pc);
2020 gen_movl_TN_reg(15, r_const);
2021 tcg_temp_free(r_const);
2022 target += dc->pc;
2023 gen_mov_pc_npc(dc, cpu_cond);
2024 dc->npc = target;
2025 }
2026 goto jmp_insn;
2027 case 2: /* FPU & Logical Operations */
2028 {
2029 unsigned int xop = GET_FIELD(insn, 7, 12);
2030 if (xop == 0x3a) { /* generate trap */
2031 int cond;
2032
2033 cpu_src1 = get_src1(insn, cpu_src1);
2034 if (IS_IMM) {
2035 rs2 = GET_FIELD(insn, 25, 31);
2036 tcg_gen_addi_tl(cpu_dst, cpu_src1, rs2);
2037 } else {
2038 rs2 = GET_FIELD(insn, 27, 31);
2039 if (rs2 != 0) {
2040 gen_movl_reg_TN(rs2, cpu_src2);
2041 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2042 } else
2043 tcg_gen_mov_tl(cpu_dst, cpu_src1);
2044 }
2045 cond = GET_FIELD(insn, 3, 6);
2046 if (cond == 0x8) {
2047 save_state(dc, cpu_cond);
2048 tcg_gen_helper_0_1(helper_trap, cpu_dst);
2049 } else if (cond != 0) {
2050 TCGv r_cond = tcg_temp_new(TCG_TYPE_TL);
2051 #ifdef TARGET_SPARC64
2052 /* V9 icc/xcc */
2053 int cc = GET_FIELD_SP(insn, 11, 12);
2054
2055 save_state(dc, cpu_cond);
2056 if (cc == 0)
2057 gen_cond(r_cond, 0, cond);
2058 else if (cc == 2)
2059 gen_cond(r_cond, 1, cond);
2060 else
2061 goto illegal_insn;
2062 #else
2063 save_state(dc, cpu_cond);
2064 gen_cond(r_cond, 0, cond);
2065 #endif
2066 tcg_gen_helper_0_2(helper_trapcc, cpu_dst, r_cond);
2067 tcg_temp_free(r_cond);
2068 }
2069 gen_op_next_insn();
2070 tcg_gen_exit_tb(0);
2071 dc->is_br = 1;
2072 goto jmp_insn;
2073 } else if (xop == 0x28) {
2074 rs1 = GET_FIELD(insn, 13, 17);
2075 switch(rs1) {
2076 case 0: /* rdy */
2077 #ifndef TARGET_SPARC64
2078 case 0x01 ... 0x0e: /* undefined in the SPARCv8
2079 manual, rdy on the microSPARC
2080 II */
2081 case 0x0f: /* stbar in the SPARCv8 manual,
2082 rdy on the microSPARC II */
2083 case 0x10 ... 0x1f: /* implementation-dependent in the
2084 SPARCv8 manual, rdy on the
2085 microSPARC II */
2086 #endif
2087 tcg_gen_ld_tl(cpu_tmp0, cpu_env,
2088 offsetof(CPUSPARCState, y));
2089 gen_movl_TN_reg(rd, cpu_tmp0);
2090 break;
2091 #ifdef TARGET_SPARC64
2092 case 0x2: /* V9 rdccr */
2093 tcg_gen_helper_1_0(helper_rdccr, cpu_dst);
2094 gen_movl_TN_reg(rd, cpu_dst);
2095 break;
2096 case 0x3: /* V9 rdasi */
2097 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2098 offsetof(CPUSPARCState, asi));
2099 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2100 gen_movl_TN_reg(rd, cpu_dst);
2101 break;
2102 case 0x4: /* V9 rdtick */
2103 {
2104 TCGv r_tickptr;
2105
2106 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2107 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2108 offsetof(CPUState, tick));
2109 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2110 r_tickptr);
2111 tcg_temp_free(r_tickptr);
2112 gen_movl_TN_reg(rd, cpu_dst);
2113 }
2114 break;
2115 case 0x5: /* V9 rdpc */
2116 {
2117 TCGv r_const;
2118
2119 r_const = tcg_const_tl(dc->pc);
2120 gen_movl_TN_reg(rd, r_const);
2121 tcg_temp_free(r_const);
2122 }
2123 break;
2124 case 0x6: /* V9 rdfprs */
2125 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2126 offsetof(CPUSPARCState, fprs));
2127 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2128 gen_movl_TN_reg(rd, cpu_dst);
2129 break;
2130 case 0xf: /* V9 membar */
2131 break; /* no effect */
2132 case 0x13: /* Graphics Status */
2133 if (gen_trap_ifnofpu(dc, cpu_cond))
2134 goto jmp_insn;
2135 tcg_gen_ld_tl(cpu_tmp0, cpu_env,
2136 offsetof(CPUSPARCState, gsr));
2137 gen_movl_TN_reg(rd, cpu_tmp0);
2138 break;
2139 case 0x17: /* Tick compare */
2140 tcg_gen_ld_tl(cpu_tmp0, cpu_env,
2141 offsetof(CPUSPARCState, tick_cmpr));
2142 gen_movl_TN_reg(rd, cpu_tmp0);
2143 break;
2144 case 0x18: /* System tick */
2145 {
2146 TCGv r_tickptr;
2147
2148 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2149 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2150 offsetof(CPUState, stick));
2151 tcg_gen_helper_1_1(helper_tick_get_count, cpu_dst,
2152 r_tickptr);
2153 tcg_temp_free(r_tickptr);
2154 gen_movl_TN_reg(rd, cpu_dst);
2155 }
2156 break;
2157 case 0x19: /* System tick compare */
2158 tcg_gen_ld_tl(cpu_tmp0, cpu_env,
2159 offsetof(CPUSPARCState, stick_cmpr));
2160 gen_movl_TN_reg(rd, cpu_tmp0);
2161 break;
2162 case 0x10: /* Performance Control */
2163 case 0x11: /* Performance Instrumentation Counter */
2164 case 0x12: /* Dispatch Control */
2165 case 0x14: /* Softint set, WO */
2166 case 0x15: /* Softint clear, WO */
2167 case 0x16: /* Softint write */
2168 #endif
2169 default:
2170 goto illegal_insn;
2171 }
2172 #if !defined(CONFIG_USER_ONLY)
2173 } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
2174 #ifndef TARGET_SPARC64
2175 if (!supervisor(dc))
2176 goto priv_insn;
2177 tcg_gen_helper_1_0(helper_rdpsr, cpu_dst);
2178 #else
2179 CHECK_IU_FEATURE(dc, HYPV);
2180 if (!hypervisor(dc))
2181 goto priv_insn;
2182 rs1 = GET_FIELD(insn, 13, 17);
2183 switch (rs1) {
2184 case 0: // hpstate
2185 // gen_op_rdhpstate();
2186 break;
2187 case 1: // htstate
2188 // gen_op_rdhtstate();
2189 break;
2190 case 3: // hintp
2191 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2192 offsetof(CPUSPARCState, hintp));
2193 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2194 break;
2195 case 5: // htba
2196 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2197 offsetof(CPUSPARCState, htba));
2198 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2199 break;
2200 case 6: // hver
2201 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2202 offsetof(CPUSPARCState, hver));
2203 tcg_gen_ext_i32_tl(cpu_dst, cpu_tmp32);
2204 break;
2205 case 31: // hstick_cmpr
2206 tcg_gen_ld_tl(cpu_dst, cpu_env,
2207 offsetof(CPUSPARCState, hstick_cmpr));
2208 break;
2209 default:
2210 goto illegal_insn;
2211 }
2212 #endif
2213 gen_movl_TN_reg(rd, cpu_dst);
2214 break;
2215 } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
2216 if (!supervisor(dc))
2217 goto priv_insn;
2218 #ifdef TARGET_SPARC64
2219 rs1 = GET_FIELD(insn, 13, 17);
2220 switch (rs1) {
2221 case 0: // tpc
2222 {
2223 TCGv r_tsptr;
2224
2225 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2226 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2227 offsetof(CPUState, tsptr));
2228 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2229 offsetof(trap_state, tpc));
2230 tcg_temp_free(r_tsptr);
2231 }
2232 break;
2233 case 1: // tnpc
2234 {
2235 TCGv r_tsptr;
2236
2237 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2238 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2239 offsetof(CPUState, tsptr));
2240 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2241 offsetof(trap_state, tnpc));
2242 tcg_temp_free(r_tsptr);
2243 }
2244 break;
2245 case 2: // tstate
2246 {
2247 TCGv r_tsptr;
2248
2249 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2250 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2251 offsetof(CPUState, tsptr));
2252 tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
2253 offsetof(trap_state, tstate));
2254 tcg_temp_free(r_tsptr);
2255 }
2256 break;
2257 case 3: // tt
2258 {
2259 TCGv r_tsptr;
2260
2261 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
2262 tcg_gen_ld_ptr(r_tsptr, cpu_env,
2263 offsetof(CPUState, tsptr));
2264 tcg_gen_ld_i32(cpu_tmp0, r_tsptr,
2265 offsetof(trap_state, tt));
2266 tcg_temp_free(r_tsptr);
2267 }
2268 break;
2269 case 4: // tick
2270 {
2271 TCGv r_tickptr;
2272
2273 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
2274 tcg_gen_ld_ptr(r_tickptr, cpu_env,
2275 offsetof(CPUState, tick));
2276 tcg_gen_helper_1_1(helper_tick_get_count, cpu_tmp0,
2277 r_tickptr);
2278 gen_movl_TN_reg(rd, cpu_tmp0);
2279 tcg_temp_free(r_tickptr);
2280 }
2281 break;
2282 case 5: // tba
2283 tcg_gen_ld_tl(cpu_tmp0, cpu_env,
2284 offsetof(CPUSPARCState, tbr));
2285 break;
2286 case 6: // pstate
2287 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2288 offsetof(CPUSPARCState, pstate));
2289 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2290 break;
2291 case 7: // tl
2292 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2293 offsetof(CPUSPARCState, tl));
2294 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2295 break;
2296 case 8: // pil
2297 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2298 offsetof(CPUSPARCState, psrpil));
2299 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2300 break;
2301 case 9: // cwp
2302 tcg_gen_helper_1_0(helper_rdcwp, cpu_tmp0);
2303 break;
2304 case 10: // cansave
2305 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2306 offsetof(CPUSPARCState, cansave));
2307 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2308 break;
2309 case 11: // canrestore
2310 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2311 offsetof(CPUSPARCState, canrestore));
2312 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2313 break;
2314 case 12: // cleanwin
2315 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2316 offsetof(CPUSPARCState, cleanwin));
2317 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2318 break;
2319 case 13: // otherwin
2320 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2321 offsetof(CPUSPARCState, otherwin));
2322 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2323 break;
2324 case 14: // wstate
2325 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2326 offsetof(CPUSPARCState, wstate));
2327 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2328 break;
2329 case 16: // UA2005 gl
2330 CHECK_IU_FEATURE(dc, GL);
2331 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2332 offsetof(CPUSPARCState, gl));
2333 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2334 break;
2335 case 26: // UA2005 strand status
2336 CHECK_IU_FEATURE(dc, HYPV);
2337 if (!hypervisor(dc))
2338 goto priv_insn;
2339 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2340 offsetof(CPUSPARCState, ssr));
2341 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2342 break;
2343 case 31: // ver
2344 tcg_gen_ld_tl(cpu_tmp0, cpu_env,
2345 offsetof(CPUSPARCState, version));
2346 break;
2347 case 15: // fq
2348 default:
2349 goto illegal_insn;
2350 }
2351 #else
2352 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
2353 offsetof(CPUSPARCState, wim));
2354 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_tmp32);
2355 #endif
2356 gen_movl_TN_reg(rd, cpu_tmp0);
2357 break;
2358 } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
2359 #ifdef TARGET_SPARC64
2360 save_state(dc, cpu_cond);
2361 tcg_gen_helper_0_0(helper_flushw);
2362 #else
2363 if (!supervisor(dc))
2364 goto priv_insn;
2365 tcg_gen_ld_tl(cpu_tmp0, cpu_env, offsetof(CPUSPARCState, tbr));
2366 gen_movl_TN_reg(rd, cpu_tmp0);
2367 #endif
2368 break;
2369 #endif
2370 } else if (xop == 0x34) { /* FPU Operations */
2371 if (gen_trap_ifnofpu(dc, cpu_cond))
2372 goto jmp_insn;
2373 gen_op_clear_ieee_excp_and_FTT();
2374 rs1 = GET_FIELD(insn, 13, 17);
2375 rs2 = GET_FIELD(insn, 27, 31);
2376 xop = GET_FIELD(insn, 18, 26);
2377 switch (xop) {
2378 case 0x1: /* fmovs */
2379 gen_op_load_fpr_FT0(rs2);
2380 gen_op_store_FT0_fpr(rd);
2381 break;
2382 case 0x5: /* fnegs */
2383 gen_op_load_fpr_FT1(rs2);
2384 tcg_gen_helper_0_0(helper_fnegs);
2385 gen_op_store_FT0_fpr(rd);
2386 break;
2387 case 0x9: /* fabss */
2388 gen_op_load_fpr_FT1(rs2);
2389 tcg_gen_helper_0_0(helper_fabss);
2390 gen_op_store_FT0_fpr(rd);
2391 break;
2392 case 0x29: /* fsqrts */
2393 CHECK_FPU_FEATURE(dc, FSQRT);
2394 gen_op_load_fpr_FT1(rs2);
2395 gen_clear_float_exceptions();
2396 tcg_gen_helper_0_0(helper_fsqrts);
2397 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2398 gen_op_store_FT0_fpr(rd);
2399 break;
2400 case 0x2a: /* fsqrtd */
2401 CHECK_FPU_FEATURE(dc, FSQRT);
2402 gen_op_load_fpr_DT1(DFPREG(rs2));
2403 gen_clear_float_exceptions();
2404 tcg_gen_helper_0_0(helper_fsqrtd);
2405 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2406 gen_op_store_DT0_fpr(DFPREG(rd));
2407 break;
2408 case 0x2b: /* fsqrtq */
2409 CHECK_FPU_FEATURE(dc, FLOAT128);
2410 gen_op_load_fpr_QT1(QFPREG(rs2));
2411 gen_clear_float_exceptions();
2412 tcg_gen_helper_0_0(helper_fsqrtq);
2413 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2414 gen_op_store_QT0_fpr(QFPREG(rd));
2415 break;
2416 case 0x41:
2417 gen_op_load_fpr_FT0(rs1);
2418 gen_op_load_fpr_FT1(rs2);
2419 gen_clear_float_exceptions();
2420 tcg_gen_helper_0_0(helper_fadds);
2421 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2422 gen_op_store_FT0_fpr(rd);
2423 break;
2424 case 0x42:
2425 gen_op_load_fpr_DT0(DFPREG(rs1));
2426 gen_op_load_fpr_DT1(DFPREG(rs2));
2427 gen_clear_float_exceptions();
2428 tcg_gen_helper_0_0(helper_faddd);
2429 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2430 gen_op_store_DT0_fpr(DFPREG(rd));
2431 break;
2432 case 0x43: /* faddq */
2433 CHECK_FPU_FEATURE(dc, FLOAT128);
2434 gen_op_load_fpr_QT0(QFPREG(rs1));
2435 gen_op_load_fpr_QT1(QFPREG(rs2));
2436 gen_clear_float_exceptions();
2437 tcg_gen_helper_0_0(helper_faddq);
2438 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2439 gen_op_store_QT0_fpr(QFPREG(rd));
2440 break;
2441 case 0x45:
2442 gen_op_load_fpr_FT0(rs1);
2443 gen_op_load_fpr_FT1(rs2);
2444 gen_clear_float_exceptions();
2445 tcg_gen_helper_0_0(helper_fsubs);
2446 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2447 gen_op_store_FT0_fpr(rd);
2448 break;
2449 case 0x46:
2450 gen_op_load_fpr_DT0(DFPREG(rs1));
2451 gen_op_load_fpr_DT1(DFPREG(rs2));
2452 gen_clear_float_exceptions();
2453 tcg_gen_helper_0_0(helper_fsubd);
2454 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2455 gen_op_store_DT0_fpr(DFPREG(rd));
2456 break;
2457 case 0x47: /* fsubq */
2458 CHECK_FPU_FEATURE(dc, FLOAT128);
2459 gen_op_load_fpr_QT0(QFPREG(rs1));
2460 gen_op_load_fpr_QT1(QFPREG(rs2));
2461 gen_clear_float_exceptions();
2462 tcg_gen_helper_0_0(helper_fsubq);
2463 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2464 gen_op_store_QT0_fpr(QFPREG(rd));
2465 break;
2466 case 0x49: /* fmuls */
2467 CHECK_FPU_FEATURE(dc, FMUL);
2468 gen_op_load_fpr_FT0(rs1);
2469 gen_op_load_fpr_FT1(rs2);
2470 gen_clear_float_exceptions();
2471 tcg_gen_helper_0_0(helper_fmuls);
2472 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2473 gen_op_store_FT0_fpr(rd);
2474 break;
2475 case 0x4a: /* fmuld */
2476 CHECK_FPU_FEATURE(dc, FMUL);
2477 gen_op_load_fpr_DT0(DFPREG(rs1));
2478 gen_op_load_fpr_DT1(DFPREG(rs2));
2479 gen_clear_float_exceptions();
2480 tcg_gen_helper_0_0(helper_fmuld);
2481 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2482 gen_op_store_DT0_fpr(DFPREG(rd));
2483 break;
2484 case 0x4b: /* fmulq */
2485 CHECK_FPU_FEATURE(dc, FLOAT128);
2486 CHECK_FPU_FEATURE(dc, FMUL);
2487 gen_op_load_fpr_QT0(QFPREG(rs1));
2488 gen_op_load_fpr_QT1(QFPREG(rs2));
2489 gen_clear_float_exceptions();
2490 tcg_gen_helper_0_0(helper_fmulq);
2491 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2492 gen_op_store_QT0_fpr(QFPREG(rd));
2493 break;
2494 case 0x4d:
2495 gen_op_load_fpr_FT0(rs1);
2496 gen_op_load_fpr_FT1(rs2);
2497 gen_clear_float_exceptions();
2498 tcg_gen_helper_0_0(helper_fdivs);
2499 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2500 gen_op_store_FT0_fpr(rd);
2501 break;
2502 case 0x4e:
2503 gen_op_load_fpr_DT0(DFPREG(rs1));
2504 gen_op_load_fpr_DT1(DFPREG(rs2));
2505 gen_clear_float_exceptions();
2506 tcg_gen_helper_0_0(helper_fdivd);
2507 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2508 gen_op_store_DT0_fpr(DFPREG(rd));
2509 break;
2510 case 0x4f: /* fdivq */
2511 CHECK_FPU_FEATURE(dc, FLOAT128);
2512 gen_op_load_fpr_QT0(QFPREG(rs1));
2513 gen_op_load_fpr_QT1(QFPREG(rs2));
2514 gen_clear_float_exceptions();
2515 tcg_gen_helper_0_0(helper_fdivq);
2516 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2517 gen_op_store_QT0_fpr(QFPREG(rd));
2518 break;
2519 case 0x69:
2520 CHECK_FPU_FEATURE(dc, FSMULD);
2521 gen_op_load_fpr_FT0(rs1);
2522 gen_op_load_fpr_FT1(rs2);
2523 gen_clear_float_exceptions();
2524 tcg_gen_helper_0_0(helper_fsmuld);
2525 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2526 gen_op_store_DT0_fpr(DFPREG(rd));
2527 break;
2528 case 0x6e: /* fdmulq */
2529 CHECK_FPU_FEATURE(dc, FLOAT128);
2530 gen_op_load_fpr_DT0(DFPREG(rs1));
2531 gen_op_load_fpr_DT1(DFPREG(rs2));
2532 gen_clear_float_exceptions();
2533 tcg_gen_helper_0_0(helper_fdmulq);
2534 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2535 gen_op_store_QT0_fpr(QFPREG(rd));
2536 break;
2537 case 0xc4:
2538 gen_op_load_fpr_FT1(rs2);
2539 gen_clear_float_exceptions();
2540 tcg_gen_helper_0_0(helper_fitos);
2541 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2542 gen_op_store_FT0_fpr(rd);
2543 break;
2544 case 0xc6:
2545 gen_op_load_fpr_DT1(DFPREG(rs2));
2546 gen_clear_float_exceptions();
2547 tcg_gen_helper_0_0(helper_fdtos);
2548 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2549 gen_op_store_FT0_fpr(rd);
2550 break;
2551 case 0xc7: /* fqtos */
2552 CHECK_FPU_FEATURE(dc, FLOAT128);
2553 gen_op_load_fpr_QT1(QFPREG(rs2));
2554 gen_clear_float_exceptions();
2555 tcg_gen_helper_0_0(helper_fqtos);
2556 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2557 gen_op_store_FT0_fpr(rd);
2558 break;
2559 case 0xc8:
2560 gen_op_load_fpr_FT1(rs2);
2561 tcg_gen_helper_0_0(helper_fitod);
2562 gen_op_store_DT0_fpr(DFPREG(rd));
2563 break;
2564 case 0xc9:
2565 gen_op_load_fpr_FT1(rs2);
2566 tcg_gen_helper_0_0(helper_fstod);
2567 gen_op_store_DT0_fpr(DFPREG(rd));
2568 break;
2569 case 0xcb: /* fqtod */
2570 CHECK_FPU_FEATURE(dc, FLOAT128);
2571 gen_op_load_fpr_QT1(QFPREG(rs2));
2572 gen_clear_float_exceptions();
2573 tcg_gen_helper_0_0(helper_fqtod);
2574 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2575 gen_op_store_DT0_fpr(DFPREG(rd));
2576 break;
2577 case 0xcc: /* fitoq */
2578 CHECK_FPU_FEATURE(dc, FLOAT128);
2579 gen_op_load_fpr_FT1(rs2);
2580 tcg_gen_helper_0_0(helper_fitoq);
2581 gen_op_store_QT0_fpr(QFPREG(rd));
2582 break;
2583 case 0xcd: /* fstoq */
2584 CHECK_FPU_FEATURE(dc, FLOAT128);
2585 gen_op_load_fpr_FT1(rs2);
2586 tcg_gen_helper_0_0(helper_fstoq);
2587 gen_op_store_QT0_fpr(QFPREG(rd));
2588 break;
2589 case 0xce: /* fdtoq */
2590 CHECK_FPU_FEATURE(dc, FLOAT128);
2591 gen_op_load_fpr_DT1(DFPREG(rs2));
2592 tcg_gen_helper_0_0(helper_fdtoq);
2593 gen_op_store_QT0_fpr(QFPREG(rd));
2594 break;
2595 case 0xd1:
2596 gen_op_load_fpr_FT1(rs2);
2597 gen_clear_float_exceptions();
2598 tcg_gen_helper_0_0(helper_fstoi);
2599 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2600 gen_op_store_FT0_fpr(rd);
2601 break;
2602 case 0xd2:
2603 gen_op_load_fpr_DT1(DFPREG(rs2));
2604 gen_clear_float_exceptions();
2605 tcg_gen_helper_0_0(helper_fdtoi);
2606 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2607 gen_op_store_FT0_fpr(rd);
2608 break;
2609 case 0xd3: /* fqtoi */
2610 CHECK_FPU_FEATURE(dc, FLOAT128);
2611 gen_op_load_fpr_QT1(QFPREG(rs2));
2612 gen_clear_float_exceptions();
2613 tcg_gen_helper_0_0(helper_fqtoi);
2614 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2615 gen_op_store_FT0_fpr(rd);
2616 break;
2617 #ifdef TARGET_SPARC64
2618 case 0x2: /* V9 fmovd */
2619 gen_op_load_fpr_DT0(DFPREG(rs2));
2620 gen_op_store_DT0_fpr(DFPREG(rd));
2621 break;
2622 case 0x3: /* V9 fmovq */
2623 CHECK_FPU_FEATURE(dc, FLOAT128);
2624 gen_op_load_fpr_QT0(QFPREG(rs2));
2625 gen_op_store_QT0_fpr(QFPREG(rd));
2626 break;
2627 case 0x6: /* V9 fnegd */
2628 gen_op_load_fpr_DT1(DFPREG(rs2));
2629 tcg_gen_helper_0_0(helper_fnegd);
2630 gen_op_store_DT0_fpr(DFPREG(rd));
2631 break;
2632 case 0x7: /* V9 fnegq */
2633 CHECK_FPU_FEATURE(dc, FLOAT128);
2634 gen_op_load_fpr_QT1(QFPREG(rs2));
2635 tcg_gen_helper_0_0(helper_fnegq);
2636 gen_op_store_QT0_fpr(QFPREG(rd));
2637 break;
2638 case 0xa: /* V9 fabsd */
2639 gen_op_load_fpr_DT1(DFPREG(rs2));
2640 tcg_gen_helper_0_0(helper_fabsd);
2641 gen_op_store_DT0_fpr(DFPREG(rd));
2642 break;
2643 case 0xb: /* V9 fabsq */
2644 CHECK_FPU_FEATURE(dc, FLOAT128);
2645 gen_op_load_fpr_QT1(QFPREG(rs2));
2646 tcg_gen_helper_0_0(helper_fabsq);
2647 gen_op_store_QT0_fpr(QFPREG(rd));
2648 break;
2649 case 0x81: /* V9 fstox */
2650 gen_op_load_fpr_FT1(rs2);
2651 gen_clear_float_exceptions();
2652 tcg_gen_helper_0_0(helper_fstox);
2653 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2654 gen_op_store_DT0_fpr(DFPREG(rd));
2655 break;
2656 case 0x82: /* V9 fdtox */
2657 gen_op_load_fpr_DT1(DFPREG(rs2));
2658 gen_clear_float_exceptions();
2659 tcg_gen_helper_0_0(helper_fdtox);
2660 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2661 gen_op_store_DT0_fpr(DFPREG(rd));
2662 break;
2663 case 0x83: /* V9 fqtox */
2664 CHECK_FPU_FEATURE(dc, FLOAT128);
2665 gen_op_load_fpr_QT1(QFPREG(rs2));
2666 gen_clear_float_exceptions();
2667 tcg_gen_helper_0_0(helper_fqtox);
2668 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2669 gen_op_store_DT0_fpr(DFPREG(rd));
2670 break;
2671 case 0x84: /* V9 fxtos */
2672 gen_op_load_fpr_DT1(DFPREG(rs2));
2673 gen_clear_float_exceptions();
2674 tcg_gen_helper_0_0(helper_fxtos);
2675 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2676 gen_op_store_FT0_fpr(rd);
2677 break;
2678 case 0x88: /* V9 fxtod */
2679 gen_op_load_fpr_DT1(DFPREG(rs2));
2680 gen_clear_float_exceptions();
2681 tcg_gen_helper_0_0(helper_fxtod);
2682 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2683 gen_op_store_DT0_fpr(DFPREG(rd));
2684 break;
2685 case 0x8c: /* V9 fxtoq */
2686 CHECK_FPU_FEATURE(dc, FLOAT128);
2687 gen_op_load_fpr_DT1(DFPREG(rs2));
2688 gen_clear_float_exceptions();
2689 tcg_gen_helper_0_0(helper_fxtoq);
2690 tcg_gen_helper_0_0(helper_check_ieee_exceptions);
2691 gen_op_store_QT0_fpr(QFPREG(rd));
2692 break;
2693 #endif
2694 default:
2695 goto illegal_insn;
2696 }
2697 } else if (xop == 0x35) { /* FPU Operations */
2698 #ifdef TARGET_SPARC64
2699 int cond;
2700 #endif
2701 if (gen_trap_ifnofpu(dc, cpu_cond))
2702 goto jmp_insn;
2703 gen_op_clear_ieee_excp_and_FTT();
2704 rs1 = GET_FIELD(insn, 13, 17);
2705 rs2 = GET_FIELD(insn, 27, 31);
2706 xop = GET_FIELD(insn, 18, 26);
2707 #ifdef TARGET_SPARC64
2708 if ((xop & 0x11f) == 0x005) { // V9 fmovsr
2709 int l1;
2710
2711 l1 = gen_new_label();
2712 cond = GET_FIELD_SP(insn, 14, 17);
2713 cpu_src1 = get_src1(insn, cpu_src1);
2714 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2715 0, l1);
2716 gen_op_load_fpr_FT0(rs2);
2717 gen_op_store_FT0_fpr(rd);
2718 gen_set_label(l1);
2719 break;
2720 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
2721 int l1;
2722
2723 l1 = gen_new_label();
2724 cond = GET_FIELD_SP(insn, 14, 17);
2725 cpu_src1 = get_src1(insn, cpu_src1);
2726 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2727 0, l1);
2728 gen_op_load_fpr_DT0(DFPREG(rs2));
2729 gen_op_store_DT0_fpr(DFPREG(rd));
2730 gen_set_label(l1);
2731 break;
2732 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
2733 int l1;
2734
2735 CHECK_FPU_FEATURE(dc, FLOAT128);
2736 l1 = gen_new_label();
2737 cond = GET_FIELD_SP(insn, 14, 17);
2738 cpu_src1 = get_src1(insn, cpu_src1);
2739 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond], cpu_src1,
2740 0, l1);
2741 gen_op_load_fpr_QT0(QFPREG(rs2));
2742 gen_op_store_QT0_fpr(QFPREG(rd));
2743 gen_set_label(l1);
2744 break;
2745 }
2746 #endif
2747 switch (xop) {
2748 #ifdef TARGET_SPARC64
2749 #define FMOVCC(size_FDQ, fcc) \
2750 { \
2751 TCGv r_cond; \
2752 int l1; \
2753 \
2754 l1 = gen_new_label(); \
2755 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2756 cond = GET_FIELD_SP(insn, 14, 17); \
2757 gen_fcond(r_cond, fcc, cond); \
2758 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2759 0, l1); \
2760 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2761 (glue(size_FDQ, FPREG(rs2))); \
2762 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2763 (glue(size_FDQ, FPREG(rd))); \
2764 gen_set_label(l1); \
2765 tcg_temp_free(r_cond); \
2766 }
2767 case 0x001: /* V9 fmovscc %fcc0 */
2768 FMOVCC(F, 0);
2769 break;
2770 case 0x002: /* V9 fmovdcc %fcc0 */
2771 FMOVCC(D, 0);
2772 break;
2773 case 0x003: /* V9 fmovqcc %fcc0 */
2774 CHECK_FPU_FEATURE(dc, FLOAT128);
2775 FMOVCC(Q, 0);
2776 break;
2777 case 0x041: /* V9 fmovscc %fcc1 */
2778 FMOVCC(F, 1);
2779 break;
2780 case 0x042: /* V9 fmovdcc %fcc1 */
2781 FMOVCC(D, 1);
2782 break;
2783 case 0x043: /* V9 fmovqcc %fcc1 */
2784 CHECK_FPU_FEATURE(dc, FLOAT128);
2785 FMOVCC(Q, 1);
2786 break;
2787 case 0x081: /* V9 fmovscc %fcc2 */
2788 FMOVCC(F, 2);
2789 break;
2790 case 0x082: /* V9 fmovdcc %fcc2 */
2791 FMOVCC(D, 2);
2792 break;
2793 case 0x083: /* V9 fmovqcc %fcc2 */
2794 CHECK_FPU_FEATURE(dc, FLOAT128);
2795 FMOVCC(Q, 2);
2796 break;
2797 case 0x0c1: /* V9 fmovscc %fcc3 */
2798 FMOVCC(F, 3);
2799 break;
2800 case 0x0c2: /* V9 fmovdcc %fcc3 */
2801 FMOVCC(D, 3);
2802 break;
2803 case 0x0c3: /* V9 fmovqcc %fcc3 */
2804 CHECK_FPU_FEATURE(dc, FLOAT128);
2805 FMOVCC(Q, 3);
2806 break;
2807 #undef FMOVCC
2808 #define FMOVCC(size_FDQ, icc) \
2809 { \
2810 TCGv r_cond; \
2811 int l1; \
2812 \
2813 l1 = gen_new_label(); \
2814 r_cond = tcg_temp_new(TCG_TYPE_TL); \
2815 cond = GET_FIELD_SP(insn, 14, 17); \
2816 gen_cond(r_cond, icc, cond); \
2817 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, \
2818 0, l1); \
2819 glue(glue(gen_op_load_fpr_, size_FDQ), T0) \
2820 (glue(size_FDQ, FPREG(rs2))); \
2821 glue(glue(gen_op_store_, size_FDQ), T0_fpr) \
2822 (glue(size_FDQ, FPREG(rd))); \
2823 gen_set_label(l1); \
2824 tcg_temp_free(r_cond); \
2825 }
2826
2827 case 0x101: /* V9 fmovscc %icc */
2828 FMOVCC(F, 0);
2829 break;
2830 case 0x102: /* V9 fmovdcc %icc */
2831 FMOVCC(D, 0);
2832 case 0x103: /* V9 fmovqcc %icc */
2833 CHECK_FPU_FEATURE(dc, FLOAT128);
2834 FMOVCC(Q, 0);
2835 break;
2836 case 0x181: /* V9 fmovscc %xcc */
2837 FMOVCC(F, 1);
2838 break;
2839 case 0x182: /* V9 fmovdcc %xcc */
2840 FMOVCC(D, 1);
2841 break;
2842 case 0x183: /* V9 fmovqcc %xcc */
2843 CHECK_FPU_FEATURE(dc, FLOAT128);
2844 FMOVCC(Q, 1);
2845 break;
2846 #undef FMOVCC
2847 #endif
2848 case 0x51: /* fcmps, V9 %fcc */
2849 gen_op_load_fpr_FT0(rs1);
2850 gen_op_load_fpr_FT1(rs2);
2851 gen_op_fcmps(rd & 3);
2852 break;
2853 case 0x52: /* fcmpd, V9 %fcc */
2854 gen_op_load_fpr_DT0(DFPREG(rs1));
2855 gen_op_load_fpr_DT1(DFPREG(rs2));
2856 gen_op_fcmpd(rd & 3);
2857 break;
2858 case 0x53: /* fcmpq, V9 %fcc */
2859 CHECK_FPU_FEATURE(dc, FLOAT128);
2860 gen_op_load_fpr_QT0(QFPREG(rs1));
2861 gen_op_load_fpr_QT1(QFPREG(rs2));
2862 gen_op_fcmpq(rd & 3);
2863 break;
2864 case 0x55: /* fcmpes, V9 %fcc */
2865 gen_op_load_fpr_FT0(rs1);
2866 gen_op_load_fpr_FT1(rs2);
2867 gen_op_fcmpes(rd & 3);
2868 break;
2869 case 0x56: /* fcmped, V9 %fcc */
2870 gen_op_load_fpr_DT0(DFPREG(rs1));
2871 gen_op_load_fpr_DT1(DFPREG(rs2));
2872 gen_op_fcmped(rd & 3);
2873 break;
2874 case 0x57: /* fcmpeq, V9 %fcc */
2875 CHECK_FPU_FEATURE(dc, FLOAT128);
2876 gen_op_load_fpr_QT0(QFPREG(rs1));
2877 gen_op_load_fpr_QT1(QFPREG(rs2));
2878 gen_op_fcmpeq(rd & 3);
2879 break;
2880 default:
2881 goto illegal_insn;
2882 }
2883 } else if (xop == 0x2) {
2884 // clr/mov shortcut
2885
2886 rs1 = GET_FIELD(insn, 13, 17);
2887 if (rs1 == 0) {
2888 // or %g0, x, y -> mov T0, x; mov y, T0
2889 if (IS_IMM) { /* immediate */
2890 TCGv r_const;
2891
2892 rs2 = GET_FIELDs(insn, 19, 31);
2893 r_const = tcg_const_tl((int)rs2);
2894 gen_movl_TN_reg(rd, r_const);
2895 tcg_temp_free(r_const);
2896 } else { /* register */
2897 rs2 = GET_FIELD(insn, 27, 31);
2898 gen_movl_reg_TN(rs2, cpu_dst);
2899 gen_movl_TN_reg(rd, cpu_dst);
2900 }
2901 } else {
2902 cpu_src1 = get_src1(insn, cpu_src1);
2903 if (IS_IMM) { /* immediate */
2904 rs2 = GET_FIELDs(insn, 19, 31);
2905 tcg_gen_ori_tl(cpu_dst, cpu_src1, (int)rs2);
2906 gen_movl_TN_reg(rd, cpu_dst);
2907 } else { /* register */
2908 // or x, %g0, y -> mov T1, x; mov y, T1
2909 rs2 = GET_FIELD(insn, 27, 31);
2910 if (rs2 != 0) {
2911 gen_movl_reg_TN(rs2, cpu_src2);
2912 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
2913 gen_movl_TN_reg(rd, cpu_dst);
2914 } else
2915 gen_movl_TN_reg(rd, cpu_src1);
2916 }
2917 }
2918 #ifdef TARGET_SPARC64
2919 } else if (xop == 0x25) { /* sll, V9 sllx */
2920 cpu_src1 = get_src1(insn, cpu_src1);
2921 if (IS_IMM) { /* immediate */
2922 rs2 = GET_FIELDs(insn, 20, 31);
2923 if (insn & (1 << 12)) {
2924 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2925 } else {
2926 tcg_gen_shli_i64(cpu_dst, cpu_src1, rs2 & 0x1f);
2927 }
2928 } else { /* register */
2929 rs2 = GET_FIELD(insn, 27, 31);
2930 gen_movl_reg_TN(rs2, cpu_src2);
2931 if (insn & (1 << 12)) {
2932 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2933 } else {
2934 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2935 }
2936 tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
2937 }
2938 gen_movl_TN_reg(rd, cpu_dst);
2939 } else if (xop == 0x26) { /* srl, V9 srlx */
2940 cpu_src1 = get_src1(insn, cpu_src1);
2941 if (IS_IMM) { /* immediate */
2942 rs2 = GET_FIELDs(insn, 20, 31);
2943 if (insn & (1 << 12)) {
2944 tcg_gen_shri_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2945 } else {
2946 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2947 tcg_gen_shri_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2948 }
2949 } else { /* register */
2950 rs2 = GET_FIELD(insn, 27, 31);
2951 gen_movl_reg_TN(rs2, cpu_src2);
2952 if (insn & (1 << 12)) {
2953 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2954 tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
2955 } else {
2956 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2957 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2958 tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
2959 }
2960 }
2961 gen_movl_TN_reg(rd, cpu_dst);
2962 } else if (xop == 0x27) { /* sra, V9 srax */
2963 cpu_src1 = get_src1(insn, cpu_src1);
2964 if (IS_IMM) { /* immediate */
2965 rs2 = GET_FIELDs(insn, 20, 31);
2966 if (insn & (1 << 12)) {
2967 tcg_gen_sari_i64(cpu_dst, cpu_src1, rs2 & 0x3f);
2968 } else {
2969 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2970 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
2971 tcg_gen_sari_i64(cpu_dst, cpu_dst, rs2 & 0x1f);
2972 }
2973 } else { /* register */
2974 rs2 = GET_FIELD(insn, 27, 31);
2975 gen_movl_reg_TN(rs2, cpu_src2);
2976 if (insn & (1 << 12)) {
2977 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
2978 tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
2979 } else {
2980 tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
2981 tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
2982 tcg_gen_ext_i32_i64(cpu_dst, cpu_dst);
2983 tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
2984 }
2985 }
2986 gen_movl_TN_reg(rd, cpu_dst);
2987 #endif
2988 } else if (xop < 0x36) {
2989 cpu_src1 = get_src1(insn, cpu_src1);
2990 cpu_src2 = get_src2(insn, cpu_src2);
2991 if (xop < 0x20) {
2992 switch (xop & ~0x10) {
2993 case 0x0:
2994 if (xop & 0x10)
2995 gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
2996 else
2997 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
2998 break;
2999 case 0x1:
3000 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3001 if (xop & 0x10)
3002 gen_op_logic_cc(cpu_dst);
3003 break;
3004 case 0x2:
3005 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3006 if (xop & 0x10)
3007 gen_op_logic_cc(cpu_dst);
3008 break;
3009 case 0x3:
3010 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3011 if (xop & 0x10)
3012 gen_op_logic_cc(cpu_dst);
3013 break;
3014 case 0x4:
3015 if (xop & 0x10)
3016 gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3017 else
3018 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3019 break;
3020 case 0x5:
3021 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3022 tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_tmp0);
3023 if (xop & 0x10)
3024 gen_op_logic_cc(cpu_dst);
3025 break;
3026 case 0x6:
3027 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3028 tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_tmp0);
3029 if (xop & 0x10)
3030 gen_op_logic_cc(cpu_dst);
3031 break;
3032 case 0x7:
3033 tcg_gen_xori_tl(cpu_tmp0, cpu_src2, -1);
3034 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_tmp0);
3035 if (xop & 0x10)
3036 gen_op_logic_cc(cpu_dst);
3037 break;
3038 case 0x8:
3039 if (xop & 0x10)
3040 gen_op_addx_cc(cpu_dst, cpu_src1, cpu_src2);
3041 else {
3042 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3043 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3044 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_tmp0);
3045 }
3046 break;
3047 #ifdef TARGET_SPARC64
3048 case 0x9: /* V9 mulx */
3049 tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3050 break;
3051 #endif
3052 case 0xa:
3053 CHECK_IU_FEATURE(dc, MUL);
3054 gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3055 if (xop & 0x10)
3056 gen_op_logic_cc(cpu_dst);
3057 break;
3058 case 0xb:
3059 CHECK_IU_FEATURE(dc, MUL);
3060 gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3061 if (xop & 0x10)
3062 gen_op_logic_cc(cpu_dst);
3063 break;
3064 case 0xc:
3065 if (xop & 0x10)
3066 gen_op_subx_cc(cpu_dst, cpu_src1, cpu_src2);
3067 else {
3068 gen_mov_reg_C(cpu_tmp0, cpu_psr);
3069 tcg_gen_add_tl(cpu_tmp0, cpu_src2, cpu_tmp0);
3070 tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_tmp0);
3071 }
3072 break;
3073 #ifdef TARGET_SPARC64
3074 case 0xd: /* V9 udivx */
3075 tcg_gen_mov_tl(cpu_cc_src, cpu_src1);
3076 tcg_gen_mov_tl(cpu_cc_src2, cpu_src2);
3077 gen_trap_ifdivzero_tl(cpu_cc_src2);
3078 tcg_gen_divu_i64(cpu_dst, cpu_cc_src, cpu_cc_src2);
3079 break;
3080 #endif
3081 case 0xe:
3082 CHECK_IU_FEATURE(dc, DIV);
3083 tcg_gen_helper_1_2(helper_udiv, cpu_dst, cpu_src1,
3084 cpu_src2);
3085 if (xop & 0x10)
3086 gen_op_div_cc(cpu_dst);
3087 break;
3088 case 0xf:
3089 CHECK_IU_FEATURE(dc, DIV);
3090 tcg_gen_helper_1_2(helper_sdiv, cpu_dst, cpu_src1,
3091 cpu_src2);
3092 if (xop & 0x10)
3093 gen_op_div_cc(cpu_dst);
3094 break;
3095 default:
3096 goto illegal_insn;
3097 }
3098 gen_movl_TN_reg(rd, cpu_dst);
3099 } else {
3100 switch (xop) {
3101 case 0x20: /* taddcc */
3102 gen_op_tadd_cc(cpu_dst, cpu_src1, cpu_src2);
3103 gen_movl_TN_reg(rd, cpu_dst);
3104 break;
3105 case 0x21: /* tsubcc */
3106 gen_op_tsub_cc(cpu_dst, cpu_src1, cpu_src2);
3107 gen_movl_TN_reg(rd, cpu_dst);
3108 break;
3109 case 0x22: /* taddcctv */
3110 save_state(dc, cpu_cond);
3111 gen_op_tadd_ccTV(cpu_dst, cpu_src1, cpu_src2);
3112 gen_movl_TN_reg(rd, cpu_dst);
3113 break;
3114 case 0x23: /* tsubcctv */
3115 save_state(dc, cpu_cond);
3116 gen_op_tsub_ccTV(cpu_dst, cpu_src1, cpu_src2);
3117 gen_movl_TN_reg(rd, cpu_dst);
3118 break;
3119 case 0x24: /* mulscc */
3120 gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
3121 gen_movl_TN_reg(rd, cpu_dst);
3122 break;
3123 #ifndef TARGET_SPARC64
3124 case 0x25: /* sll */
3125 if (IS_IMM) { /* immediate */
3126 rs2 = GET_FIELDs(insn, 20, 31);
3127 tcg_gen_shli_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3128 } else { /* register */
3129 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3130 tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
3131 }
3132 gen_movl_TN_reg(rd, cpu_dst);
3133 break;
3134 case 0x26: /* srl */
3135 if (IS_IMM) { /* immediate */
3136 rs2 = GET_FIELDs(insn, 20, 31);
3137 tcg_gen_shri_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3138 } else { /* register */
3139 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3140 tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
3141 }
3142 gen_movl_TN_reg(rd, cpu_dst);
3143 break;
3144 case 0x27: /* sra */
3145 if (IS_IMM) { /* immediate */
3146 rs2 = GET_FIELDs(insn, 20, 31);
3147 tcg_gen_sari_tl(cpu_dst, cpu_src1, rs2 & 0x1f);
3148 } else { /* register */
3149 tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
3150 tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
3151 }
3152 gen_movl_TN_reg(rd, cpu_dst);
3153 break;
3154 #endif
3155 case 0x30:
3156 {
3157 switch(rd) {
3158 case 0: /* wry */
3159 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3160 tcg_gen_st_tl(cpu_tmp0, cpu_env,
3161 offsetof(CPUSPARCState, y));
3162 break;
3163 #ifndef TARGET_SPARC64
3164 case 0x01 ... 0x0f: /* undefined in the
3165 SPARCv8 manual, nop
3166 on the microSPARC
3167 II */
3168 case 0x10 ... 0x1f: /* implementation-dependent
3169 in the SPARCv8
3170 manual, nop on the
3171 microSPARC II */
3172 break;
3173 #else
3174 case 0x2: /* V9 wrccr */
3175 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3176 tcg_gen_helper_0_1(helper_wrccr, cpu_dst);
3177 break;
3178 case 0x3: /* V9 wrasi */
3179 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3180 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3181 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3182 offsetof(CPUSPARCState, asi));
3183 break;
3184 case 0x6: /* V9 wrfprs */
3185 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3186 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_dst);
3187 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3188 offsetof(CPUSPARCState, fprs));
3189 save_state(dc, cpu_cond);
3190 gen_op_next_insn();
3191 tcg_gen_exit_tb(0);
3192 dc->is_br = 1;
3193 break;
3194 case 0xf: /* V9 sir, nop if user */
3195 #if !defined(CONFIG_USER_ONLY)
3196 if (supervisor(dc))
3197 ; // XXX
3198 #endif
3199 break;
3200 case 0x13: /* Graphics Status */
3201 if (gen_trap_ifnofpu(dc, cpu_cond))
3202 goto jmp_insn;
3203 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3204 tcg_gen_st_tl(cpu_tmp0, cpu_env,
3205 offsetof(CPUSPARCState, gsr));
3206 break;
3207 case 0x17: /* Tick compare */
3208 #if !defined(CONFIG_USER_ONLY)
3209 if (!supervisor(dc))
3210 goto illegal_insn;
3211 #endif
3212 {
3213 TCGv r_tickptr;
3214
3215 tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3216 cpu_src2);
3217 tcg_gen_st_tl(cpu_tmp0, cpu_env,
3218 offsetof(CPUSPARCState,
3219 tick_cmpr));
3220 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3221 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3222 offsetof(CPUState, tick));
3223 tcg_gen_helper_0_2(helper_tick_set_limit,
3224 r_tickptr, cpu_tmp0);
3225 tcg_temp_free(r_tickptr);
3226 }
3227 break;
3228 case 0x18: /* System tick */
3229 #if !defined(CONFIG_USER_ONLY)
3230 if (!supervisor(dc))
3231 goto illegal_insn;
3232 #endif
3233 {
3234 TCGv r_tickptr;
3235
3236 tcg_gen_xor_tl(cpu_dst, cpu_src1,
3237 cpu_src2);
3238 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3239 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3240 offsetof(CPUState, stick));
3241 tcg_gen_helper_0_2(helper_tick_set_count,
3242 r_tickptr, cpu_dst);
3243 tcg_temp_free(r_tickptr);
3244 }
3245 break;
3246 case 0x19: /* System tick compare */
3247 #if !defined(CONFIG_USER_ONLY)
3248 if (!supervisor(dc))
3249 goto illegal_insn;
3250 #endif
3251 {
3252 TCGv r_tickptr;
3253
3254 tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
3255 cpu_src2);
3256 tcg_gen_st_tl(cpu_tmp0, cpu_env,
3257 offsetof(CPUSPARCState,
3258 stick_cmpr));
3259 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3260 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3261 offsetof(CPUState, stick));
3262 tcg_gen_helper_0_2(helper_tick_set_limit,
3263 r_tickptr, cpu_tmp0);
3264 tcg_temp_free(r_tickptr);
3265 }
3266 break;
3267
3268 case 0x10: /* Performance Control */
3269 case 0x11: /* Performance Instrumentation
3270 Counter */
3271 case 0x12: /* Dispatch Control */
3272 case 0x14: /* Softint set */
3273 case 0x15: /* Softint clear */
3274 case 0x16: /* Softint write */
3275 #endif
3276 default:
3277 goto illegal_insn;
3278 }
3279 }
3280 break;
3281 #if !defined(CONFIG_USER_ONLY)
3282 case 0x31: /* wrpsr, V9 saved, restored */
3283 {
3284 if (!supervisor(dc))
3285 goto priv_insn;
3286 #ifdef TARGET_SPARC64
3287 switch (rd) {
3288 case 0:
3289 tcg_gen_helper_0_0(helper_saved);
3290 break;
3291 case 1:
3292 tcg_gen_helper_0_0(helper_restored);
3293 break;
3294 case 2: /* UA2005 allclean */
3295 case 3: /* UA2005 otherw */
3296 case 4: /* UA2005 normalw */
3297 case 5: /* UA2005 invalw */
3298 // XXX
3299 default:
3300 goto illegal_insn;
3301 }
3302 #else
3303 tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3304 tcg_gen_helper_0_1(helper_wrpsr, cpu_dst);
3305 save_state(dc, cpu_cond);
3306 gen_op_next_insn();
3307 tcg_gen_exit_tb(0);
3308 dc->is_br = 1;
3309 #endif
3310 }
3311 break;
3312 case 0x32: /* wrwim, V9 wrpr */
3313 {
3314 if (!supervisor(dc))
3315 goto priv_insn;
3316 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3317 #ifdef TARGET_SPARC64
3318 switch (rd) {
3319 case 0: // tpc
3320 {
3321 TCGv r_tsptr;
3322
3323 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3324 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3325 offsetof(CPUState, tsptr));
3326 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3327 offsetof(trap_state, tpc));
3328 tcg_temp_free(r_tsptr);
3329 }
3330 break;
3331 case 1: // tnpc
3332 {
3333 TCGv r_tsptr;
3334
3335 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3336 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3337 offsetof(CPUState, tsptr));
3338 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3339 offsetof(trap_state, tnpc));
3340 tcg_temp_free(r_tsptr);
3341 }
3342 break;
3343 case 2: // tstate
3344 {
3345 TCGv r_tsptr;
3346
3347 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3348 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3349 offsetof(CPUState, tsptr));
3350 tcg_gen_st_tl(cpu_tmp0, r_tsptr,
3351 offsetof(trap_state,
3352 tstate));
3353 tcg_temp_free(r_tsptr);
3354 }
3355 break;
3356 case 3: // tt
3357 {
3358 TCGv r_tsptr;
3359
3360 r_tsptr = tcg_temp_new(TCG_TYPE_PTR);
3361 tcg_gen_ld_ptr(r_tsptr, cpu_env,
3362 offsetof(CPUState, tsptr));
3363 tcg_gen_st_i32(cpu_tmp0, r_tsptr,
3364 offsetof(trap_state, tt));
3365 tcg_temp_free(r_tsptr);
3366 }
3367 break;
3368 case 4: // tick
3369 {
3370 TCGv r_tickptr;
3371
3372 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3373 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3374 offsetof(CPUState, tick));
3375 tcg_gen_helper_0_2(helper_tick_set_count,
3376 r_tickptr, cpu_tmp0);
3377 tcg_temp_free(r_tickptr);
3378 }
3379 break;
3380 case 5: // tba
3381 tcg_gen_st_tl(cpu_tmp0, cpu_env,
3382 offsetof(CPUSPARCState, tbr));
3383 break;
3384 case 6: // pstate
3385 save_state(dc, cpu_cond);
3386 tcg_gen_helper_0_1(helper_wrpstate, cpu_tmp0);
3387 gen_op_next_insn();
3388 tcg_gen_exit_tb(0);
3389 dc->is_br = 1;
3390 break;
3391 case 7: // tl
3392 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3393 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3394 offsetof(CPUSPARCState, tl));
3395 break;
3396 case 8: // pil
3397 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3398 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3399 offsetof(CPUSPARCState,
3400 psrpil));
3401 break;
3402 case 9: // cwp
3403 tcg_gen_helper_0_1(helper_wrcwp, cpu_tmp0);
3404 break;
3405 case 10: // cansave
3406 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3407 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3408 offsetof(CPUSPARCState,
3409 cansave));
3410 break;
3411 case 11: // canrestore
3412 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3413 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3414 offsetof(CPUSPARCState,
3415 canrestore));
3416 break;
3417 case 12: // cleanwin
3418 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3419 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3420 offsetof(CPUSPARCState,
3421 cleanwin));
3422 break;
3423 case 13: // otherwin
3424 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3425 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3426 offsetof(CPUSPARCState,
3427 otherwin));
3428 break;
3429 case 14: // wstate
3430 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3431 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3432 offsetof(CPUSPARCState,
3433 wstate));
3434 break;
3435 case 16: // UA2005 gl
3436 CHECK_IU_FEATURE(dc, GL);
3437 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3438 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3439 offsetof(CPUSPARCState, gl));
3440 break;
3441 case 26: // UA2005 strand status
3442 CHECK_IU_FEATURE(dc, HYPV);
3443 if (!hypervisor(dc))
3444 goto priv_insn;
3445 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3446 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3447 offsetof(CPUSPARCState, ssr));
3448 break;
3449 default:
3450 goto illegal_insn;
3451 }
3452 #else
3453 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3454 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3455 offsetof(CPUSPARCState, wim));
3456 #endif
3457 }
3458 break;
3459 case 0x33: /* wrtbr, UA2005 wrhpr */
3460 {
3461 #ifndef TARGET_SPARC64
3462 if (!supervisor(dc))
3463 goto priv_insn;
3464 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3465 tcg_gen_st_tl(cpu_tmp0, cpu_env,
3466 offsetof(CPUSPARCState, tbr));
3467 #else
3468 CHECK_IU_FEATURE(dc, HYPV);
3469 if (!hypervisor(dc))
3470 goto priv_insn;
3471 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
3472 switch (rd) {
3473 case 0: // hpstate
3474 // XXX gen_op_wrhpstate();
3475 save_state(dc, cpu_cond);
3476 gen_op_next_insn();
3477 tcg_gen_exit_tb(0);
3478 dc->is_br = 1;
3479 break;
3480 case 1: // htstate
3481 // XXX gen_op_wrhtstate();
3482 break;
3483 case 3: // hintp
3484 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3485 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3486 offsetof(CPUSPARCState, hintp));
3487 break;
3488 case 5: // htba
3489 tcg_gen_trunc_tl_i32(cpu_tmp32, cpu_tmp0);
3490 tcg_gen_st_i32(cpu_tmp32, cpu_env,
3491 offsetof(CPUSPARCState, htba));
3492 break;
3493 case 31: // hstick_cmpr
3494 {
3495 TCGv r_tickptr;
3496
3497 tcg_gen_st_tl(cpu_tmp0, cpu_env,
3498 offsetof(CPUSPARCState,
3499 hstick_cmpr));
3500 r_tickptr = tcg_temp_new(TCG_TYPE_PTR);
3501 tcg_gen_ld_ptr(r_tickptr, cpu_env,
3502 offsetof(CPUState, hstick));
3503 tcg_gen_helper_0_2(helper_tick_set_limit,
3504 r_tickptr, cpu_tmp0);
3505 tcg_temp_free(r_tickptr);
3506 }
3507 break;
3508 case 6: // hver readonly
3509 default:
3510 goto illegal_insn;
3511 }
3512 #endif
3513 }
3514 break;
3515 #endif
3516 #ifdef TARGET_SPARC64
3517 case 0x2c: /* V9 movcc */
3518 {
3519 int cc = GET_FIELD_SP(insn, 11, 12);
3520 int cond = GET_FIELD_SP(insn, 14, 17);
3521 TCGv r_cond;
3522 int l1;
3523
3524 r_cond = tcg_temp_new(TCG_TYPE_TL);
3525 if (insn & (1 << 18)) {
3526 if (cc == 0)
3527 gen_cond(r_cond, 0, cond);
3528 else if (cc == 2)
3529 gen_cond(r_cond, 1, cond);
3530 else
3531 goto illegal_insn;
3532 } else {
3533 gen_fcond(r_cond, cc, cond);
3534 }
3535
3536 l1 = gen_new_label();
3537
3538 tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
3539 if (IS_IMM) { /* immediate */
3540 TCGv r_const;
3541
3542 rs2 = GET_FIELD_SPs(insn, 0, 10);
3543 r_const = tcg_const_tl((int)rs2);
3544 gen_movl_TN_reg(rd, r_const);
3545 tcg_temp_free(r_const);
3546 } else {
3547 rs2 = GET_FIELD_SP(insn, 0, 4);
3548 gen_movl_reg_TN(rs2, cpu_tmp0);
3549 gen_movl_TN_reg(rd, cpu_tmp0);
3550 }
3551 gen_set_label(l1);
3552 tcg_temp_free(r_cond);
3553 break;
3554 }
3555 case 0x2d: /* V9 sdivx */
3556 gen_op_sdivx(cpu_dst, cpu_src1, cpu_src2);
3557 gen_movl_TN_reg(rd, cpu_dst);
3558 break;
3559 case 0x2e: /* V9 popc */
3560 {
3561 cpu_src2 = get_src2(insn, cpu_src2);
3562 tcg_gen_helper_1_1(helper_popc, cpu_dst,
3563 cpu_src2);
3564 gen_movl_TN_reg(rd, cpu_dst);
3565 }
3566 case 0x2f: /* V9 movr */
3567 {
3568 int cond = GET_FIELD_SP(insn, 10, 12);
3569 int l1;
3570
3571 cpu_src1 = get_src1(insn, cpu_src1);
3572
3573 l1 = gen_new_label();
3574
3575 tcg_gen_brcondi_tl(gen_tcg_cond_reg[cond],
3576 cpu_src1, 0, l1);
3577 if (IS_IMM) { /* immediate */
3578 TCGv r_const;
3579
3580 rs2 = GET_FIELD_SPs(insn, 0, 9);
3581 r_const = tcg_const_tl((int)rs2);
3582 gen_movl_TN_reg(rd, r_const);
3583 tcg_temp_free(r_const);
3584 } else {
3585 rs2 = GET_FIELD_SP(insn, 0, 4);
3586 gen_movl_reg_TN(rs2, cpu_tmp0);
3587 gen_movl_TN_reg(rd, cpu_tmp0);
3588 }
3589 gen_set_label(l1);
3590 break;
3591 }
3592 #endif
3593 default:
3594 goto illegal_insn;
3595 }
3596 }
3597 } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
3598 #ifdef TARGET_SPARC64
3599 int opf = GET_FIELD_SP(insn, 5, 13);
3600 rs1 = GET_FIELD(insn, 13, 17);
3601 rs2 = GET_FIELD(insn, 27, 31);
3602 if (gen_trap_ifnofpu(dc, cpu_cond))
3603 goto jmp_insn;
3604
3605 switch (opf) {
3606 case 0x000: /* VIS I edge8cc */
3607 case 0x001: /* VIS II edge8n */
3608 case 0x002: /* VIS I edge8lcc */
3609 case 0x003: /* VIS II edge8ln */
3610 case 0x004: /* VIS I edge16cc */
3611 case 0x005: /* VIS II edge16n */
3612 case 0x006: /* VIS I edge16lcc */
3613 case 0x007: /* VIS II edge16ln */
3614 case 0x008: /* VIS I edge32cc */
3615 case 0x009: /* VIS II edge32n */
3616 case 0x00a: /* VIS I edge32lcc */
3617 case 0x00b: /* VIS II edge32ln */
3618 // XXX
3619 goto illegal_insn;
3620 case 0x010: /* VIS I array8 */
3621 CHECK_FPU_FEATURE(dc, VIS1);
3622 cpu_src1 = get_src1(insn, cpu_src1);
3623 gen_movl_reg_TN(rs2, cpu_src2);
3624 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3625 cpu_src2);
3626 gen_movl_TN_reg(rd, cpu_dst);
3627 break;
3628 case 0x012: /* VIS I array16 */
3629 CHECK_FPU_FEATURE(dc, VIS1);
3630 cpu_src1 = get_src1(insn, cpu_src1);
3631 gen_movl_reg_TN(rs2, cpu_src2);
3632 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3633 cpu_src2);
3634 tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
3635 gen_movl_TN_reg(rd, cpu_dst);
3636 break;
3637 case 0x014: /* VIS I array32 */
3638 CHECK_FPU_FEATURE(dc, VIS1);
3639 cpu_src1 = get_src1(insn, cpu_src1);
3640 gen_movl_reg_TN(rs2, cpu_src2);
3641 tcg_gen_helper_1_2(helper_array8, cpu_dst, cpu_src1,
3642 cpu_src2);
3643 tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
3644 gen_movl_TN_reg(rd, cpu_dst);
3645 break;
3646 case 0x018: /* VIS I alignaddr */
3647 CHECK_FPU_FEATURE(dc, VIS1);
3648 cpu_src1 = get_src1(insn, cpu_src1);
3649 gen_movl_reg_TN(rs2, cpu_src2);
3650 tcg_gen_helper_1_2(helper_alignaddr, cpu_dst, cpu_src1,
3651 cpu_src2);
3652 gen_movl_TN_reg(rd, cpu_dst);
3653 break;
3654 case 0x019: /* VIS II bmask */
3655 case 0x01a: /* VIS I alignaddrl */
3656 // XXX
3657 goto illegal_insn;
3658 case 0x020: /* VIS I fcmple16 */
3659 CHECK_FPU_FEATURE(dc, VIS1);
3660 gen_op_load_fpr_DT0(DFPREG(rs1));
3661 gen_op_load_fpr_DT1(DFPREG(rs2));
3662 tcg_gen_helper_0_0(helper_fcmple16);
3663 gen_op_store_DT0_fpr(DFPREG(rd));
3664 break;
3665 case 0x022: /* VIS I fcmpne16 */
3666 CHECK_FPU_FEATURE(dc, VIS1);
3667 gen_op_load_fpr_DT0(DFPREG(rs1));
3668 gen_op_load_fpr_DT1(DFPREG(rs2));
3669 tcg_gen_helper_0_0(helper_fcmpne16);
3670 gen_op_store_DT0_fpr(DFPREG(rd));
3671 break;
3672 case 0x024: /* VIS I fcmple32 */
3673 CHECK_FPU_FEATURE(dc, VIS1);
3674 gen_op_load_fpr_DT0(DFPREG(rs1));
3675 gen_op_load_fpr_DT1(DFPREG(rs2));
3676 tcg_gen_helper_0_0(helper_fcmple32);
3677 gen_op_store_DT0_fpr(DFPREG(rd));
3678 break;
3679 case 0x026: /* VIS I fcmpne32 */
3680 CHECK_FPU_FEATURE(dc, VIS1);
3681 gen_op_load_fpr_DT0(DFPREG(rs1));
3682 gen_op_load_fpr_DT1(DFPREG(rs2));
3683 tcg_gen_helper_0_0(helper_fcmpne32);
3684 gen_op_store_DT0_fpr(DFPREG(rd));
3685 break;
3686 case 0x028: /* VIS I fcmpgt16 */
3687 CHECK_FPU_FEATURE(dc, VIS1);
3688 gen_op_load_fpr_DT0(DFPREG(rs1));
3689 gen_op_load_fpr_DT1(DFPREG(rs2));
3690 tcg_gen_helper_0_0(helper_fcmpgt16);
3691 gen_op_store_DT0_fpr(DFPREG(rd));
3692 break;
3693 case 0x02a: /* VIS I fcmpeq16 */
3694 CHECK_FPU_FEATURE(dc, VIS1);
3695 gen_op_load_fpr_DT0(DFPREG(rs1));
3696 gen_op_load_fpr_DT1(DFPREG(rs2));
3697 tcg_gen_helper_0_0(helper_fcmpeq16);
3698 gen_op_store_DT0_fpr(DFPREG(rd));
3699 break;
3700 case 0x02c: /* VIS I fcmpgt32 */
3701 CHECK_FPU_FEATURE(dc, VIS1);
3702 gen_op_load_fpr_DT0(DFPREG(rs1));
3703 gen_op_load_fpr_DT1(DFPREG(rs2));
3704 tcg_gen_helper_0_0(helper_fcmpgt32);
3705 gen_op_store_DT0_fpr(DFPREG(rd));
3706 break;
3707 case 0x02e: /* VIS I fcmpeq32 */
3708 CHECK_FPU_FEATURE(dc, VIS1);
3709 gen_op_load_fpr_DT0(DFPREG(rs1));
3710 gen_op_load_fpr_DT1(DFPREG(rs2));
3711 tcg_gen_helper_0_0(helper_fcmpeq32);
3712 gen_op_store_DT0_fpr(DFPREG(rd));
3713 break;
3714 case 0x031: /* VIS I fmul8x16 */
3715 CHECK_FPU_FEATURE(dc, VIS1);
3716 gen_op_load_fpr_DT0(DFPREG(rs1));
3717 gen_op_load_fpr_DT1(DFPREG(rs2));
3718 tcg_gen_helper_0_0(helper_fmul8x16);
3719 gen_op_store_DT0_fpr(DFPREG(rd));
3720 break;
3721 case 0x033: /* VIS I fmul8x16au */
3722 CHECK_FPU_FEATURE(dc, VIS1);
3723 gen_op_load_fpr_DT0(DFPREG(rs1));
3724 gen_op_load_fpr_DT1(DFPREG(rs2));
3725 tcg_gen_helper_0_0(helper_fmul8x16au);
3726 gen_op_store_DT0_fpr(DFPREG(rd));
3727 break;
3728 case 0x035: /* VIS I fmul8x16al */
3729 CHECK_FPU_FEATURE(dc, VIS1);
3730 gen_op_load_fpr_DT0(DFPREG(rs1));
3731 gen_op_load_fpr_DT1(DFPREG(rs2));
3732 tcg_gen_helper_0_0(helper_fmul8x16al);
3733 gen_op_store_DT0_fpr(DFPREG(rd));
3734 break;
3735 case 0x036: /* VIS I fmul8sux16 */
3736 CHECK_FPU_FEATURE(dc, VIS1);
3737 gen_op_load_fpr_DT0(DFPREG(rs1));
3738 gen_op_load_fpr_DT1(DFPREG(rs2));
3739 tcg_gen_helper_0_0(helper_fmul8sux16);
3740 gen_op_store_DT0_fpr(DFPREG(rd));
3741 break;
3742 case 0x037: /* VIS I fmul8ulx16 */
3743 CHECK_FPU_FEATURE(dc, VIS1);
3744 gen_op_load_fpr_DT0(DFPREG(rs1));
3745 gen_op_load_fpr_DT1(DFPREG(rs2));
3746 tcg_gen_helper_0_0(helper_fmul8ulx16);
3747 gen_op_store_DT0_fpr(DFPREG(rd));
3748 break;
3749 case 0x038: /* VIS I fmuld8sux16 */
3750 CHECK_FPU_FEATURE(dc, VIS1);
3751 gen_op_load_fpr_DT0(DFPREG(rs1));
3752 gen_op_load_fpr_DT1(DFPREG(rs2));
3753 tcg_gen_helper_0_0(helper_fmuld8sux16);
3754 gen_op_store_DT0_fpr(DFPREG(rd));
3755 break;
3756 case 0x039: /* VIS I fmuld8ulx16 */
3757 CHECK_FPU_FEATURE(dc, VIS1);
3758 gen_op_load_fpr_DT0(DFPREG(rs1));
3759 gen_op_load_fpr_DT1(DFPREG(rs2));
3760 tcg_gen_helper_0_0(helper_fmuld8ulx16);
3761 gen_op_store_DT0_fpr(DFPREG(rd));
3762 break;
3763 case 0x03a: /* VIS I fpack32 */
3764 case 0x03b: /* VIS I fpack16 */
3765 case 0x03d: /* VIS I fpackfix */
3766 case 0x03e: /* VIS I pdist */
3767 // XXX
3768 goto illegal_insn;
3769 case 0x048: /* VIS I faligndata */
3770 CHECK_FPU_FEATURE(dc, VIS1);
3771 gen_op_load_fpr_DT0(DFPREG(rs1));
3772 gen_op_load_fpr_DT1(DFPREG(rs2));
3773 tcg_gen_helper_0_0(helper_faligndata);
3774 gen_op_store_DT0_fpr(DFPREG(rd));
3775 break;
3776 case 0x04b: /* VIS I fpmerge */
3777 CHECK_FPU_FEATURE(dc, VIS1);
3778 gen_op_load_fpr_DT0(DFPREG(rs1));
3779 gen_op_load_fpr_DT1(DFPREG(rs2));
3780 tcg_gen_helper_0_0(helper_fpmerge);
3781 gen_op_store_DT0_fpr(DFPREG(rd));
3782 break;
3783 case 0x04c: /* VIS II bshuffle */
3784 // XXX
3785 goto illegal_insn;
3786 case 0x04d: /* VIS I fexpand */
3787 CHECK_FPU_FEATURE(dc, VIS1);
3788 gen_op_load_fpr_DT0(DFPREG(rs1));
3789 gen_op_load_fpr_DT1(DFPREG(rs2));
3790 tcg_gen_helper_0_0(helper_fexpand);
3791 gen_op_store_DT0_fpr(DFPREG(rd));
3792 break;
3793 case 0x050: /* VIS I fpadd16 */
3794 CHECK_FPU_FEATURE(dc, VIS1);
3795 gen_op_load_fpr_DT0(DFPREG(rs1));
3796 gen_op_load_fpr_DT1(DFPREG(rs2));
3797 tcg_gen_helper_0_0(helper_fpadd16);
3798 gen_op_store_DT0_fpr(DFPREG(rd));
3799 break;
3800 case 0x051: /* VIS I fpadd16s */
3801 CHECK_FPU_FEATURE(dc, VIS1);
3802 gen_op_load_fpr_FT0(rs1);
3803 gen_op_load_fpr_FT1(rs2);
3804 tcg_gen_helper_0_0(helper_fpadd16s);
3805 gen_op_store_FT0_fpr(rd);
3806 break;
3807 case 0x052: /* VIS I fpadd32 */
3808 CHECK_FPU_FEATURE(dc, VIS1);
3809 gen_op_load_fpr_DT0(DFPREG(rs1));
3810 gen_op_load_fpr_DT1(DFPREG(rs2));
3811 tcg_gen_helper_0_0(helper_fpadd32);
3812 gen_op_store_DT0_fpr(DFPREG(rd));
3813 break;
3814 case 0x053: /* VIS I fpadd32s */
3815 CHECK_FPU_FEATURE(dc, VIS1);
3816 gen_op_load_fpr_FT0(rs1);
3817 gen_op_load_fpr_FT1(rs2);
3818 tcg_gen_helper_0_0(helper_fpadd32s);
3819 gen_op_store_FT0_fpr(rd);
3820 break;
3821 case 0x054: /* VIS I fpsub16 */
3822 CHECK_FPU_FEATURE(dc, VIS1);
3823 gen_op_load_fpr_DT0(DFPREG(rs1));
3824 gen_op_load_fpr_DT1(DFPREG(rs2));
3825 tcg_gen_helper_0_0(helper_fpsub16);
3826 gen_op_store_DT0_fpr(DFPREG(rd));
3827 break;
3828 case 0x055: /* VIS I fpsub16s */
3829 CHECK_FPU_FEATURE(dc, VIS1);
3830 gen_op_load_fpr_FT0(rs1);
3831 gen_op_load_fpr_FT1(rs2);
3832 tcg_gen_helper_0_0(helper_fpsub16s);
3833 gen_op_store_FT0_fpr(rd);
3834 break;
3835 case 0x056: /* VIS I fpsub32 */
3836 CHECK_FPU_FEATURE(dc, VIS1);
3837 gen_op_load_fpr_DT0(DFPREG(rs1));
3838 gen_op_load_fpr_DT1(DFPREG(rs2));
3839 tcg_gen_helper_0_0(helper_fpadd32);
3840 gen_op_store_DT0_fpr(DFPREG(rd));
3841 break;
3842 case 0x057: /* VIS I fpsub32s */
3843 CHECK_FPU_FEATURE(dc, VIS1);
3844 gen_op_load_fpr_FT0(rs1);
3845 gen_op_load_fpr_FT1(rs2);
3846 tcg_gen_helper_0_0(helper_fpsub32s);
3847 gen_op_store_FT0_fpr(rd);
3848 break;
3849 case 0x060: /* VIS I fzero */
3850 CHECK_FPU_FEATURE(dc, VIS1);
3851 tcg_gen_helper_0_0(helper_movl_DT0_0);
3852 gen_op_store_DT0_fpr(DFPREG(rd));
3853 break;
3854 case 0x061: /* VIS I fzeros */
3855 CHECK_FPU_FEATURE(dc, VIS1);
3856 tcg_gen_helper_0_0(helper_movl_FT0_0);
3857 gen_op_store_FT0_fpr(rd);
3858 break;
3859 case 0x062: /* VIS I fnor */
3860 CHECK_FPU_FEATURE(dc, VIS1);
3861 gen_op_load_fpr_DT0(DFPREG(rs1));
3862 gen_op_load_fpr_DT1(DFPREG(rs2));
3863 tcg_gen_helper_0_0(helper_fnor);
3864 gen_op_store_DT0_fpr(DFPREG(rd));
3865 break;
3866 case 0x063: /* VIS I fnors */
3867 CHECK_FPU_FEATURE(dc, VIS1);
3868 gen_op_load_fpr_FT0(rs1);
3869 gen_op_load_fpr_FT1(rs2);
3870 tcg_gen_helper_0_0(helper_fnors);
3871 gen_op_store_FT0_fpr(rd);
3872 break;
3873 case 0x064: /* VIS I fandnot2 */
3874 CHECK_FPU_FEATURE(dc, VIS1);
3875 gen_op_load_fpr_DT1(DFPREG(rs1));
3876 gen_op_load_fpr_DT0(DFPREG(rs2));
3877 tcg_gen_helper_0_0(helper_fandnot);
3878 gen_op_store_DT0_fpr(DFPREG(rd));
3879 break;
3880 case 0x065: /* VIS I fandnot2s */
3881 CHECK_FPU_FEATURE(dc, VIS1);
3882 gen_op_load_fpr_FT1(rs1);
3883 gen_op_load_fpr_FT0(rs2);
3884 tcg_gen_helper_0_0(helper_fandnots);
3885 gen_op_store_FT0_fpr(rd);
3886 break;
3887 case 0x066: /* VIS I fnot2 */
3888 CHECK_FPU_FEATURE(dc, VIS1);
3889 gen_op_load_fpr_DT1(DFPREG(rs2));
3890 tcg_gen_helper_0_0(helper_fnot);
3891 gen_op_store_DT0_fpr(DFPREG(rd));
3892 break;
3893 case 0x067: /* VIS I fnot2s */
3894 CHECK_FPU_FEATURE(dc, VIS1);
3895 gen_op_load_fpr_FT1(rs2);
3896 tcg_gen_helper_0_0(helper_fnot);
3897 gen_op_store_FT0_fpr(rd);
3898 break;
3899 case 0x068: /* VIS I fandnot1 */
3900 CHECK_FPU_FEATURE(dc, VIS1);
3901 gen_op_load_fpr_DT0(DFPREG(rs1));
3902 gen_op_load_fpr_DT1(DFPREG(rs2));
3903 tcg_gen_helper_0_0(helper_fandnot);
3904 gen_op_store_DT0_fpr(DFPREG(rd));
3905 break;
3906 case 0x069: /* VIS I fandnot1s */
3907 CHECK_FPU_FEATURE(dc, VIS1);
3908 gen_op_load_fpr_FT0(rs1);
3909 gen_op_load_fpr_FT1(rs2);
3910 tcg_gen_helper_0_0(helper_fandnots);
3911 gen_op_store_FT0_fpr(rd);
3912 break;
3913 case 0x06a: /* VIS I fnot1 */
3914 CHECK_FPU_FEATURE(dc, VIS1);
3915 gen_op_load_fpr_DT1(DFPREG(rs1));
3916 tcg_gen_helper_0_0(helper_fnot);
3917 gen_op_store_DT0_fpr(DFPREG(rd));
3918 break;
3919 case 0x06b: /* VIS I fnot1s */
3920 CHECK_FPU_FEATURE(dc, VIS1);
3921 gen_op_load_fpr_FT1(rs1);
3922 tcg_gen_helper_0_0(helper_fnot);
3923 gen_op_store_FT0_fpr(rd);
3924 break;
3925 case 0x06c: /* VIS I fxor */
3926 CHECK_FPU_FEATURE(dc, VIS1);
3927 gen_op_load_fpr_DT0(DFPREG(rs1));
3928 gen_op_load_fpr_DT1(DFPREG(rs2));
3929 tcg_gen_helper_0_0(helper_fxor);
3930 gen_op_store_DT0_fpr(DFPREG(rd));
3931 break;
3932 case 0x06d: /* VIS I fxors */
3933 CHECK_FPU_FEATURE(dc, VIS1);
3934 gen_op_load_fpr_FT0(rs1);
3935 gen_op_load_fpr_FT1(rs2);
3936 tcg_gen_helper_0_0(helper_fxors);
3937 gen_op_store_FT0_fpr(rd);
3938 break;
3939 case 0x06e: /* VIS I fnand */
3940 CHECK_FPU_FEATURE(dc, VIS1);
3941 gen_op_load_fpr_DT0(DFPREG(rs1));
3942 gen_op_load_fpr_DT1(DFPREG(rs2));
3943 tcg_gen_helper_0_0(helper_fnand);
3944 gen_op_store_DT0_fpr(DFPREG(rd));
3945 break;
3946 case 0x06f: /* VIS I fnands */
3947 CHECK_FPU_FEATURE(dc, VIS1);
3948 gen_op_load_fpr_FT0(rs1);
3949 gen_op_load_fpr_FT1(rs2);
3950 tcg_gen_helper_0_0(helper_fnands);
3951 gen_op_store_FT0_fpr(rd);
3952 break;
3953 case 0x070: /* VIS I fand */
3954 CHECK_FPU_FEATURE(dc, VIS1);
3955 gen_op_load_fpr_DT0(DFPREG(rs1));
3956 gen_op_load_fpr_DT1(DFPREG(rs2));
3957 tcg_gen_helper_0_0(helper_fand);
3958 gen_op_store_DT0_fpr(DFPREG(rd));
3959 break;
3960 case 0x071: /* VIS I fands */
3961 CHECK_FPU_FEATURE(dc, VIS1);
3962 gen_op_load_fpr_FT0(rs1);
3963 gen_op_load_fpr_FT1(rs2);
3964 tcg_gen_helper_0_0(helper_fands);
3965 gen_op_store_FT0_fpr(rd);
3966 break;
3967 case 0x072: /* VIS I fxnor */
3968 CHECK_FPU_FEATURE(dc, VIS1);
3969 gen_op_load_fpr_DT0(DFPREG(rs1));
3970 gen_op_load_fpr_DT1(DFPREG(rs2));
3971 tcg_gen_helper_0_0(helper_fxnor);
3972 gen_op_store_DT0_fpr(DFPREG(rd));
3973 break;
3974 case 0x073: /* VIS I fxnors */
3975 CHECK_FPU_FEATURE(dc, VIS1);
3976 gen_op_load_fpr_FT0(rs1);
3977 gen_op_load_fpr_FT1(rs2);
3978 tcg_gen_helper_0_0(helper_fxnors);
3979 gen_op_store_FT0_fpr(rd);
3980 break;
3981 case 0x074: /* VIS I fsrc1 */
3982 CHECK_FPU_FEATURE(dc, VIS1);
3983 gen_op_load_fpr_DT0(DFPREG(rs1));
3984 gen_op_store_DT0_fpr(DFPREG(rd));
3985 break;
3986 case 0x075: /* VIS I fsrc1s */
3987 CHECK_FPU_FEATURE(dc, VIS1);
3988 gen_op_load_fpr_FT0(rs1);
3989 gen_op_store_FT0_fpr(rd);
3990 break;
3991 case 0x076: /* VIS I fornot2 */
3992 CHECK_FPU_FEATURE(dc, VIS1);
3993 gen_op_load_fpr_DT1(DFPREG(rs1));
3994 gen_op_load_fpr_DT0(DFPREG(rs2));
3995 tcg_gen_helper_0_0(helper_fornot);
3996 gen_op_store_DT0_fpr(DFPREG(rd));
3997 break;
3998 case 0x077: /* VIS I fornot2s */
3999 CHECK_FPU_FEATURE(dc, VIS1);
4000 gen_op_load_fpr_FT1(rs1);
4001 gen_op_load_fpr_FT0(rs2);
4002 tcg_gen_helper_0_0(helper_fornots);
4003 gen_op_store_FT0_fpr(rd);
4004 break;
4005 case 0x078: /* VIS I fsrc2 */
4006 CHECK_FPU_FEATURE(dc, VIS1);
4007 gen_op_load_fpr_DT0(DFPREG(rs2));
4008 gen_op_store_DT0_fpr(DFPREG(rd));
4009 break;
4010 case 0x079: /* VIS I fsrc2s */
4011 CHECK_FPU_FEATURE(dc, VIS1);
4012 gen_op_load_fpr_FT0(rs2);
4013 gen_op_store_FT0_fpr(rd);
4014 break;
4015 case 0x07a: /* VIS I fornot1 */
4016 CHECK_FPU_FEATURE(dc, VIS1);
4017 gen_op_load_fpr_DT0(DFPREG(rs1));
4018 gen_op_load_fpr_DT1(DFPREG(rs2));
4019 tcg_gen_helper_0_0(helper_fornot);
4020 gen_op_store_DT0_fpr(DFPREG(rd));
4021 break;
4022 case 0x07b: /* VIS I fornot1s */
4023 CHECK_FPU_FEATURE(dc, VIS1);
4024 gen_op_load_fpr_FT0(rs1);
4025 gen_op_load_fpr_FT1(rs2);
4026 tcg_gen_helper_0_0(helper_fornots);
4027 gen_op_store_FT0_fpr(rd);
4028 break;
4029 case 0x07c: /* VIS I for */
4030 CHECK_FPU_FEATURE(dc, VIS1);
4031 gen_op_load_fpr_DT0(DFPREG(rs1));
4032 gen_op_load_fpr_DT1(DFPREG(rs2));
4033 tcg_gen_helper_0_0(helper_for);
4034 gen_op_store_DT0_fpr(DFPREG(rd));
4035 break;
4036 case 0x07d: /* VIS I fors */
4037 CHECK_FPU_FEATURE(dc, VIS1);
4038 gen_op_load_fpr_FT0(rs1);
4039 gen_op_load_fpr_FT1(rs2);
4040 tcg_gen_helper_0_0(helper_fors);
4041 gen_op_store_FT0_fpr(rd);
4042 break;
4043 case 0x07e: /* VIS I fone */
4044 CHECK_FPU_FEATURE(dc, VIS1);
4045 tcg_gen_helper_0_0(helper_movl_DT0_1);
4046 gen_op_store_DT0_fpr(DFPREG(rd));
4047 break;
4048 case 0x07f: /* VIS I fones */
4049 CHECK_FPU_FEATURE(dc, VIS1);
4050 tcg_gen_helper_0_0(helper_movl_FT0_1);
4051 gen_op_store_FT0_fpr(rd);
4052 break;
4053 case 0x080: /* VIS I shutdown */
4054 case 0x081: /* VIS II siam */
4055 // XXX
4056 goto illegal_insn;
4057 default:
4058 goto illegal_insn;
4059 }
4060 #else
4061 goto ncp_insn;
4062 #endif
4063 } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4064 #ifdef TARGET_SPARC64
4065 goto illegal_insn;
4066 #else
4067 goto ncp_insn;
4068 #endif
4069 #ifdef TARGET_SPARC64
4070 } else if (xop == 0x39) { /* V9 return */
4071 TCGv r_const;
4072
4073 save_state(dc, cpu_cond);
4074 cpu_src1 = get_src1(insn, cpu_src1);
4075 if (IS_IMM) { /* immediate */
4076 rs2 = GET_FIELDs(insn, 19, 31);
4077 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4078 } else { /* register */
4079 rs2 = GET_FIELD(insn, 27, 31);
4080 if (rs2) {
4081 gen_movl_reg_TN(rs2, cpu_src2);
4082 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4083 } else
4084 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4085 }
4086 tcg_gen_helper_0_0(helper_restore);
4087 gen_mov_pc_npc(dc, cpu_cond);
4088 r_const = tcg_const_i32(3);
4089 tcg_gen_helper_0_2(helper_check_align, cpu_dst, r_const);
4090 tcg_temp_free(r_const);
4091 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4092 dc->npc = DYNAMIC_PC;
4093 goto jmp_insn;
4094 #endif
4095 } else {
4096 cpu_src1 = get_src1(insn, cpu_src1);
4097 if (IS_IMM) { /* immediate */
4098 rs2 = GET_FIELDs(insn, 19, 31);
4099 tcg_gen_addi_tl(cpu_dst, cpu_src1, (int)rs2);
4100 } else { /* register */
4101 rs2 = GET_FIELD(insn, 27, 31);
4102 if (rs2) {
4103 gen_movl_reg_TN(rs2, cpu_src2);
4104 tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4105 } else
4106 tcg_gen_mov_tl(cpu_dst, cpu_src1);
4107 }
4108 switch (xop) {
4109 case 0x38: /* jmpl */
4110 {
4111 TCGv r_const;
4112
4113 r_const = tcg_const_tl(dc->pc);
4114 gen_movl_TN_reg(rd, r_const);
4115 tcg_temp_free(r_const);
4116 gen_mov_pc_npc(dc, cpu_cond);
4117 r_const = tcg_const_i32(3);
4118 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4119 r_const);
4120 tcg_temp_free(r_const);
4121 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4122 dc->npc = DYNAMIC_PC;
4123 }
4124 goto jmp_insn;
4125 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4126 case 0x39: /* rett, V9 return */
4127 {
4128 TCGv r_const;
4129
4130 if (!supervisor(dc))
4131 goto priv_insn;
4132 gen_mov_pc_npc(dc, cpu_cond);
4133 r_const = tcg_const_i32(3);
4134 tcg_gen_helper_0_2(helper_check_align, cpu_dst,
4135 r_const);
4136 tcg_temp_free(r_const);
4137 tcg_gen_mov_tl(cpu_npc, cpu_dst);
4138 dc->npc = DYNAMIC_PC;
4139 tcg_gen_helper_0_0(helper_rett);
4140 }
4141 goto jmp_insn;
4142 #endif
4143 case 0x3b: /* flush */
4144 if (!((dc)->features & CPU_FEATURE_FLUSH))
4145 goto unimp_flush;
4146 tcg_gen_helper_0_1(helper_flush, cpu_dst);
4147 break;
4148 case 0x3c: /* save */
4149 save_state(dc, cpu_cond);
4150 tcg_gen_helper_0_0(helper_save);
4151 gen_movl_TN_reg(rd, cpu_dst);
4152 break;
4153 case 0x3d: /* restore */
4154 save_state(dc, cpu_cond);
4155 tcg_gen_helper_0_0(helper_restore);
4156 gen_movl_TN_reg(rd, cpu_dst);
4157 break;
4158 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
4159 case 0x3e: /* V9 done/retry */
4160 {
4161 switch (rd) {
4162 case 0:
4163 if (!supervisor(dc))
4164 goto priv_insn;
4165 dc->npc = DYNAMIC_PC;
4166 dc->pc = DYNAMIC_PC;
4167 tcg_gen_helper_0_0(helper_done);
4168 goto jmp_insn;
4169 case 1:
4170 if (!supervisor(dc))
4171 goto priv_insn;
4172 dc->npc = DYNAMIC_PC;
4173 dc->pc = DYNAMIC_PC;
4174 tcg_gen_helper_0_0(helper_retry);
4175 goto jmp_insn;
4176 default:
4177 goto illegal_insn;
4178 }
4179 }
4180 break;
4181 #endif
4182 default:
4183 goto illegal_insn;
4184 }
4185 }
4186 break;
4187 }
4188 break;
4189 case 3: /* load/store instructions */
4190 {
4191 unsigned int xop = GET_FIELD(insn, 7, 12);
4192
4193 cpu_src1 = get_src1(insn, cpu_src1);
4194 if (xop == 0x3c || xop == 0x3e) { // V9 casa/casxa
4195 rs2 = GET_FIELD(insn, 27, 31);
4196 gen_movl_reg_TN(rs2, cpu_src2);
4197 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4198 } else if (IS_IMM) { /* immediate */
4199 rs2 = GET_FIELDs(insn, 19, 31);
4200 tcg_gen_addi_tl(cpu_addr, cpu_src1, (int)rs2);
4201 } else { /* register */
4202 rs2 = GET_FIELD(insn, 27, 31);
4203 if (rs2 != 0) {
4204 gen_movl_reg_TN(rs2, cpu_src2);
4205 tcg_gen_add_tl(cpu_addr, cpu_src1, cpu_src2);
4206 } else
4207 tcg_gen_mov_tl(cpu_addr, cpu_src1);
4208 }
4209 if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
4210 (xop > 0x17 && xop <= 0x1d ) ||
4211 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
4212 switch (xop) {
4213 case 0x0: /* load unsigned word */
4214 gen_address_mask(dc, cpu_addr);
4215 tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
4216 break;
4217 case 0x1: /* load unsigned byte */
4218 gen_address_mask(dc, cpu_addr);
4219 tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
4220 break;
4221 case 0x2: /* load unsigned halfword */
4222 gen_address_mask(dc, cpu_addr);
4223 tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
4224 break;
4225 case 0x3: /* load double word */
4226 if (rd & 1)
4227 goto illegal_insn;
4228 else {
4229 TCGv r_const;
4230
4231 save_state(dc, cpu_cond);
4232 r_const = tcg_const_i32(7);
4233 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4234 r_const); // XXX remove
4235 tcg_temp_free(r_const);
4236 gen_address_mask(dc, cpu_addr);
4237 tcg_gen_qemu_ld64(cpu_tmp64, cpu_addr, dc->mem_idx);
4238 tcg_gen_trunc_i64_tl(cpu_tmp0, cpu_tmp64);
4239 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xffffffffULL);
4240 gen_movl_TN_reg(rd + 1, cpu_tmp0);
4241 tcg_gen_shri_i64(cpu_tmp64, cpu_tmp64, 32);
4242 tcg_gen_trunc_i64_tl(cpu_val, cpu_tmp64);
4243 tcg_gen_andi_tl(cpu_val, cpu_val, 0xffffffffULL);
4244 }
4245 break;
4246 case 0x9: /* load signed byte */
4247 gen_address_mask(dc, cpu_addr);
4248 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4249 break;
4250 case 0xa: /* load signed halfword */
4251 gen_address_mask(dc, cpu_addr);
4252 tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
4253 break;
4254 case 0xd: /* ldstub -- XXX: should be atomically */
4255 {
4256 TCGv r_const;
4257
4258 gen_address_mask(dc, cpu_addr);
4259 tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
4260 r_const = tcg_const_tl(0xff);
4261 tcg_gen_qemu_st8(r_const, cpu_addr, dc->mem_idx);
4262 tcg_temp_free(r_const);
4263 }
4264 break;
4265 case 0x0f: /* swap register with memory. Also
4266 atomically */
4267 CHECK_IU_FEATURE(dc, SWAP);
4268 gen_movl_reg_TN(rd, cpu_val);
4269 gen_address_mask(dc, cpu_addr);
4270 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4271 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4272 tcg_gen_extu_i32_tl(cpu_val, cpu_tmp32);
4273 break;
4274 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4275 case 0x10: /* load word alternate */
4276 #ifndef TARGET_SPARC64
4277 if (IS_IMM)
4278 goto illegal_insn;
4279 if (!supervisor(dc))
4280 goto priv_insn;
4281 #endif
4282 save_state(dc, cpu_cond);
4283 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 0);
4284 break;
4285 case 0x11: /* load unsigned byte alternate */
4286 #ifndef TARGET_SPARC64
4287 if (IS_IMM)
4288 goto illegal_insn;
4289 if (!supervisor(dc))
4290 goto priv_insn;
4291 #endif
4292 save_state(dc, cpu_cond);
4293 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 0);
4294 break;
4295 case 0x12: /* load unsigned halfword alternate */
4296 #ifndef TARGET_SPARC64
4297 if (IS_IMM)
4298 goto illegal_insn;
4299 if (!supervisor(dc))
4300 goto priv_insn;
4301 #endif
4302 save_state(dc, cpu_cond);
4303 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 0);
4304 break;
4305 case 0x13: /* load double word alternate */
4306 #ifndef TARGET_SPARC64
4307 if (IS_IMM)
4308 goto illegal_insn;
4309 if (!supervisor(dc))
4310 goto priv_insn;
4311 #endif
4312 if (rd & 1)
4313 goto illegal_insn;
4314 save_state(dc, cpu_cond);
4315 gen_ldda_asi(cpu_val, cpu_addr, insn, rd);
4316 goto skip_move;
4317 case 0x19: /* load signed byte alternate */
4318 #ifndef TARGET_SPARC64
4319 if (IS_IMM)
4320 goto illegal_insn;
4321 if (!supervisor(dc))
4322 goto priv_insn;
4323 #endif
4324 save_state(dc, cpu_cond);
4325 gen_ld_asi(cpu_val, cpu_addr, insn, 1, 1);
4326 break;
4327 case 0x1a: /* load signed halfword alternate */
4328 #ifndef TARGET_SPARC64
4329 if (IS_IMM)
4330 goto illegal_insn;
4331 if (!supervisor(dc))
4332 goto priv_insn;
4333 #endif
4334 save_state(dc, cpu_cond);
4335 gen_ld_asi(cpu_val, cpu_addr, insn, 2, 1);
4336 break;
4337 case 0x1d: /* ldstuba -- XXX: should be atomically */
4338 #ifndef TARGET_SPARC64
4339 if (IS_IMM)
4340 goto illegal_insn;
4341 if (!supervisor(dc))
4342 goto priv_insn;
4343 #endif
4344 save_state(dc, cpu_cond);
4345 gen_ldstub_asi(cpu_val, cpu_addr, insn);
4346 break;
4347 case 0x1f: /* swap reg with alt. memory. Also
4348 atomically */
4349 CHECK_IU_FEATURE(dc, SWAP);
4350 #ifndef TARGET_SPARC64
4351 if (IS_IMM)
4352 goto illegal_insn;
4353 if (!supervisor(dc))
4354 goto priv_insn;
4355 #endif
4356 save_state(dc, cpu_cond);
4357 gen_movl_reg_TN(rd, cpu_val);
4358 gen_swap_asi(cpu_val, cpu_addr, insn);
4359 break;
4360
4361 #ifndef TARGET_SPARC64
4362 case 0x30: /* ldc */
4363 case 0x31: /* ldcsr */
4364 case 0x33: /* lddc */
4365 goto ncp_insn;
4366 #endif
4367 #endif
4368 #ifdef TARGET_SPARC64
4369 case 0x08: /* V9 ldsw */
4370 gen_address_mask(dc, cpu_addr);
4371 tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
4372 break;
4373 case 0x0b: /* V9 ldx */
4374 gen_address_mask(dc, cpu_addr);
4375 tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
4376 break;
4377 case 0x18: /* V9 ldswa */
4378 save_state(dc, cpu_cond);
4379 gen_ld_asi(cpu_val, cpu_addr, insn, 4, 1);
4380 break;
4381 case 0x1b: /* V9 ldxa */
4382 save_state(dc, cpu_cond);
4383 gen_ld_asi(cpu_val, cpu_addr, insn, 8, 0);
4384 break;
4385 case 0x2d: /* V9 prefetch, no effect */
4386 goto skip_move;
4387 case 0x30: /* V9 ldfa */
4388 save_state(dc, cpu_cond);
4389 gen_ldf_asi(cpu_addr, insn, 4, rd);
4390 goto skip_move;
4391 case 0x33: /* V9 lddfa */
4392 save_state(dc, cpu_cond);
4393 gen_ldf_asi(cpu_addr, insn, 8, DFPREG(rd));
4394 goto skip_move;
4395 case 0x3d: /* V9 prefetcha, no effect */
4396 goto skip_move;
4397 case 0x32: /* V9 ldqfa */
4398 CHECK_FPU_FEATURE(dc, FLOAT128);
4399 save_state(dc, cpu_cond);
4400 gen_ldf_asi(cpu_addr, insn, 16, QFPREG(rd));
4401 goto skip_move;
4402 #endif
4403 default:
4404 goto illegal_insn;
4405 }
4406 gen_movl_TN_reg(rd, cpu_val);
4407 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4408 skip_move: ;
4409 #endif
4410 } else if (xop >= 0x20 && xop < 0x24) {
4411 if (gen_trap_ifnofpu(dc, cpu_cond))
4412 goto jmp_insn;
4413 save_state(dc, cpu_cond);
4414 switch (xop) {
4415 case 0x20: /* load fpreg */
4416 gen_address_mask(dc, cpu_addr);
4417 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4418 tcg_gen_st_i32(cpu_tmp32, cpu_env,
4419 offsetof(CPUState, fpr[rd]));
4420 break;
4421 case 0x21: /* load fsr */
4422 gen_address_mask(dc, cpu_addr);
4423 tcg_gen_qemu_ld32u(cpu_tmp32, cpu_addr, dc->mem_idx);
4424 tcg_gen_st_i32(cpu_tmp32, cpu_env,
4425 offsetof(CPUState, ft0));
4426 tcg_gen_helper_0_0(helper_ldfsr);
4427 break;
4428 case 0x22: /* load quad fpreg */
4429 {
4430 TCGv r_const;
4431
4432 CHECK_FPU_FEATURE(dc, FLOAT128);
4433 r_const = tcg_const_i32(dc->mem_idx);
4434 tcg_gen_helper_0_2(helper_ldqf, cpu_addr, r_const);
4435 tcg_temp_free(r_const);
4436 gen_op_store_QT0_fpr(QFPREG(rd));
4437 }
4438 break;
4439 case 0x23: /* load double fpreg */
4440 {
4441 TCGv r_const;
4442
4443 r_const = tcg_const_i32(dc->mem_idx);
4444 tcg_gen_helper_0_2(helper_lddf, cpu_addr, r_const);
4445 tcg_temp_free(r_const);
4446 gen_op_store_DT0_fpr(DFPREG(rd));
4447 }
4448 break;
4449 default:
4450 goto illegal_insn;
4451 }
4452 } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) || \
4453 xop == 0xe || xop == 0x1e) {
4454 gen_movl_reg_TN(rd, cpu_val);
4455 switch (xop) {
4456 case 0x4: /* store word */
4457 gen_address_mask(dc, cpu_addr);
4458 tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
4459 break;
4460 case 0x5: /* store byte */
4461 gen_address_mask(dc, cpu_addr);
4462 tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
4463 break;
4464 case 0x6: /* store halfword */
4465 gen_address_mask(dc, cpu_addr);
4466 tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
4467 break;
4468 case 0x7: /* store double word */
4469 if (rd & 1)
4470 goto illegal_insn;
4471 else {
4472 TCGv r_low, r_const;
4473
4474 save_state(dc, cpu_cond);
4475 gen_address_mask(dc, cpu_addr);
4476 r_const = tcg_const_i32(7);
4477 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4478 r_const); // XXX remove
4479 tcg_temp_free(r_const);
4480 r_low = tcg_temp_new(TCG_TYPE_TL);
4481 gen_movl_reg_TN(rd + 1, r_low);
4482 tcg_gen_helper_1_2(helper_pack64, cpu_tmp64, cpu_val,
4483 r_low);
4484 tcg_temp_free(r_low);
4485 tcg_gen_qemu_st64(cpu_tmp64, cpu_addr, dc->mem_idx);
4486 }
4487 break;
4488 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
4489 case 0x14: /* store word alternate */
4490 #ifndef TARGET_SPARC64
4491 if (IS_IMM)
4492 goto illegal_insn;
4493 if (!supervisor(dc))
4494 goto priv_insn;
4495 #endif
4496 save_state(dc, cpu_cond);
4497 gen_st_asi(cpu_val, cpu_addr, insn, 4);
4498 break;
4499 case 0x15: /* store byte alternate */
4500 #ifndef TARGET_SPARC64
4501 if (IS_IMM)
4502 goto illegal_insn;
4503 if (!supervisor(dc))
4504 goto priv_insn;
4505 #endif
4506 save_state(dc, cpu_cond);
4507 gen_st_asi(cpu_val, cpu_addr, insn, 1);
4508 break;
4509 case 0x16: /* store halfword alternate */
4510 #ifndef TARGET_SPARC64
4511 if (IS_IMM)
4512 goto illegal_insn;
4513 if (!supervisor(dc))
4514 goto priv_insn;
4515 #endif
4516 save_state(dc, cpu_cond);
4517 gen_st_asi(cpu_val, cpu_addr, insn, 2);
4518 break;
4519 case 0x17: /* store double word alternate */
4520 #ifndef TARGET_SPARC64
4521 if (IS_IMM)
4522 goto illegal_insn;
4523 if (!supervisor(dc))
4524 goto priv_insn;
4525 #endif
4526 if (rd & 1)
4527 goto illegal_insn;
4528 else {
4529 save_state(dc, cpu_cond);
4530 gen_stda_asi(cpu_val, cpu_addr, insn, rd);
4531 }
4532 break;
4533 #endif
4534 #ifdef TARGET_SPARC64
4535 case 0x0e: /* V9 stx */
4536 gen_address_mask(dc, cpu_addr);
4537 tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
4538 break;
4539 case 0x1e: /* V9 stxa */
4540 save_state(dc, cpu_cond);
4541 gen_st_asi(cpu_val, cpu_addr, insn, 8);
4542 break;
4543 #endif
4544 default:
4545 goto illegal_insn;
4546 }
4547 } else if (xop > 0x23 && xop < 0x28) {
4548 if (gen_trap_ifnofpu(dc, cpu_cond))
4549 goto jmp_insn;
4550 save_state(dc, cpu_cond);
4551 switch (xop) {
4552 case 0x24: /* store fpreg */
4553 gen_address_mask(dc, cpu_addr);
4554 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
4555 offsetof(CPUState, fpr[rd]));
4556 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4557 break;
4558 case 0x25: /* stfsr, V9 stxfsr */
4559 gen_address_mask(dc, cpu_addr);
4560 tcg_gen_helper_0_0(helper_stfsr);
4561 tcg_gen_ld_i32(cpu_tmp32, cpu_env,
4562 offsetof(CPUState, ft0));
4563 tcg_gen_qemu_st32(cpu_tmp32, cpu_addr, dc->mem_idx);
4564 break;
4565 case 0x26:
4566 #ifdef TARGET_SPARC64
4567 /* V9 stqf, store quad fpreg */
4568 {
4569 TCGv r_const;
4570
4571 CHECK_FPU_FEATURE(dc, FLOAT128);
4572 gen_op_load_fpr_QT0(QFPREG(rd));
4573 r_const = tcg_const_i32(dc->mem_idx);
4574 tcg_gen_helper_0_2(helper_stqf, cpu_addr, r_const);
4575 tcg_temp_free(r_const);
4576 }
4577 break;
4578 #else /* !TARGET_SPARC64 */
4579 /* stdfq, store floating point queue */
4580 #if defined(CONFIG_USER_ONLY)
4581 goto illegal_insn;
4582 #else
4583 if (!supervisor(dc))
4584 goto priv_insn;
4585 if (gen_trap_ifnofpu(dc, cpu_cond))
4586 goto jmp_insn;
4587 goto nfq_insn;
4588 #endif
4589 #endif
4590 case 0x27: /* store double fpreg */
4591 {
4592 TCGv r_const;
4593
4594 gen_op_load_fpr_DT0(DFPREG(rd));
4595 r_const = tcg_const_i32(dc->mem_idx);
4596 tcg_gen_helper_0_2(helper_stdf, cpu_addr, r_const);
4597 tcg_temp_free(r_const);
4598 }
4599 break;
4600 default:
4601 goto illegal_insn;
4602 }
4603 } else if (xop > 0x33 && xop < 0x3f) {
4604 save_state(dc, cpu_cond);
4605 switch (xop) {
4606 #ifdef TARGET_SPARC64
4607 case 0x34: /* V9 stfa */
4608 gen_op_load_fpr_FT0(rd);
4609 gen_stf_asi(cpu_addr, insn, 4, rd);
4610 break;
4611 case 0x36: /* V9 stqfa */
4612 {
4613 TCGv r_const;
4614
4615 CHECK_FPU_FEATURE(dc, FLOAT128);
4616 r_const = tcg_const_i32(7);
4617 tcg_gen_helper_0_2(helper_check_align, cpu_addr,
4618 r_const);
4619 tcg_temp_free(r_const);
4620 gen_op_load_fpr_QT0(QFPREG(rd));
4621 gen_stf_asi(cpu_addr, insn, 16, QFPREG(rd));
4622 }
4623 break;
4624 case 0x37: /* V9 stdfa */
4625 gen_op_load_fpr_DT0(DFPREG(rd));
4626 gen_stf_asi(cpu_addr, insn, 8, DFPREG(rd));
4627 break;
4628 case 0x3c: /* V9 casa */
4629 gen_cas_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4630 gen_movl_TN_reg(rd, cpu_val);
4631 break;
4632 case 0x3e: /* V9 casxa */
4633 gen_casx_asi(cpu_val, cpu_addr, cpu_src2, insn, rd);
4634 gen_movl_TN_reg(rd, cpu_val);
4635 break;
4636 #else
4637 case 0x34: /* stc */
4638 case 0x35: /* stcsr */
4639 case 0x36: /* stdcq */
4640 case 0x37: /* stdc */
4641 goto ncp_insn;
4642 #endif
4643 default:
4644 goto illegal_insn;
4645 }
4646 }
4647 else
4648 goto illegal_insn;
4649 }
4650 break;
4651 }
4652 /* default case for non jump instructions */
4653 if (dc->npc == DYNAMIC_PC) {
4654 dc->pc = DYNAMIC_PC;
4655 gen_op_next_insn();
4656 } else if (dc->npc == JUMP_PC) {
4657 /* we can do a static jump */
4658 gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
4659 dc->is_br = 1;
4660 } else {
4661 dc->pc = dc->npc;
4662 dc->npc = dc->npc + 4;
4663 }
4664 jmp_insn:
4665 return;
4666 illegal_insn:
4667 {
4668 TCGv r_const;
4669
4670 save_state(dc, cpu_cond);
4671 r_const = tcg_const_i32(TT_ILL_INSN);
4672 tcg_gen_helper_0_1(raise_exception, r_const);
4673 tcg_temp_free(r_const);
4674 dc->is_br = 1;
4675 }
4676 return;
4677 unimp_flush:
4678 {
4679 TCGv r_const;
4680
4681 save_state(dc, cpu_cond);
4682 r_const = tcg_const_i32(TT_UNIMP_FLUSH);
4683 tcg_gen_helper_0_1(raise_exception, r_const);
4684 tcg_temp_free(r_const);
4685 dc->is_br = 1;
4686 }
4687 return;
4688 #if !defined(CONFIG_USER_ONLY)
4689 priv_insn:
4690 {
4691 TCGv r_const;
4692
4693 save_state(dc, cpu_cond);
4694 r_const = tcg_const_i32(TT_PRIV_INSN);
4695 tcg_gen_helper_0_1(raise_exception, r_const);
4696 tcg_temp_free(r_const);
4697 dc->is_br = 1;
4698 }
4699 return;
4700 #endif
4701 nfpu_insn:
4702 save_state(dc, cpu_cond);
4703 gen_op_fpexception_im(FSR_FTT_UNIMPFPOP);
4704 dc->is_br = 1;
4705 return;
4706 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
4707 nfq_insn:
4708 save_state(dc, cpu_cond);
4709 gen_op_fpexception_im(FSR_FTT_SEQ_ERROR);
4710 dc->is_br = 1;
4711 return;
4712 #endif
4713 #ifndef TARGET_SPARC64
4714 ncp_insn:
4715 {
4716 TCGv r_const;
4717
4718 save_state(dc, cpu_cond);
4719 r_const = tcg_const_i32(TT_NCP_INSN);
4720 tcg_gen_helper_0_1(raise_exception, r_const);
4721 tcg_temp_free(r_const);
4722 dc->is_br = 1;
4723 }
4724 return;
4725 #endif
4726 }
4727
4728 static inline void gen_intermediate_code_internal(TranslationBlock * tb,
4729 int spc, CPUSPARCState *env)
4730 {
4731 target_ulong pc_start, last_pc;
4732 uint16_t *gen_opc_end;
4733 DisasContext dc1, *dc = &dc1;
4734 int j, lj = -1;
4735 int num_insns;
4736 int max_insns;
4737
4738 memset(dc, 0, sizeof(DisasContext));
4739 dc->tb = tb;
4740 pc_start = tb->pc;
4741 dc->pc = pc_start;
4742 last_pc = dc->pc;
4743 dc->npc = (target_ulong) tb->cs_base;
4744 dc->mem_idx = cpu_mmu_index(env);
4745 dc->features = env->features;
4746 if ((dc->features & CPU_FEATURE_FLOAT)) {
4747 dc->fpu_enabled = cpu_fpu_enabled(env);
4748 #if defined(CONFIG_USER_ONLY)
4749 dc->features |= CPU_FEATURE_FLOAT128;
4750 #endif
4751 } else
4752 dc->fpu_enabled = 0;
4753 #ifdef TARGET_SPARC64
4754 dc->address_mask_32bit = env->pstate & PS_AM;
4755 #endif
4756 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
4757
4758 cpu_tmp0 = tcg_temp_new(TCG_TYPE_TL);
4759 cpu_tmp32 = tcg_temp_new(TCG_TYPE_I32);
4760 cpu_tmp64 = tcg_temp_new(TCG_TYPE_I64);
4761
4762 cpu_dst = tcg_temp_local_new(TCG_TYPE_TL);
4763
4764 // loads and stores
4765 cpu_val = tcg_temp_local_new(TCG_TYPE_TL);
4766 cpu_addr = tcg_temp_local_new(TCG_TYPE_TL);
4767
4768 num_insns = 0;
4769 max_insns = tb->cflags & CF_COUNT_MASK;
4770 if (max_insns == 0)
4771 max_insns = CF_COUNT_MASK;
4772 gen_icount_start();
4773 do {
4774 if (env->nb_breakpoints > 0) {
4775 for(j = 0; j < env->nb_breakpoints; j++) {
4776 if (env->breakpoints[j] == dc->pc) {
4777 if (dc->pc != pc_start)
4778 save_state(dc, cpu_cond);
4779 tcg_gen_helper_0_0(helper_debug);
4780 tcg_gen_exit_tb(0);
4781 dc->is_br = 1;
4782 goto exit_gen_loop;
4783 }
4784 }
4785 }
4786 if (spc) {
4787 if (loglevel > 0)
4788 fprintf(logfile, "Search PC...\n");
4789 j = gen_opc_ptr - gen_opc_buf;
4790 if (lj < j) {
4791 lj++;
4792 while (lj < j)
4793 gen_opc_instr_start[lj++] = 0;
4794 gen_opc_pc[lj] = dc->pc;
4795 gen_opc_npc[lj] = dc->npc;
4796 gen_opc_instr_start[lj] = 1;
4797 gen_opc_icount[lj] = num_insns;
4798 }
4799 }
4800 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
4801 gen_io_start();
4802 last_pc = dc->pc;
4803 disas_sparc_insn(dc);
4804 num_insns++;
4805
4806 if (dc->is_br)
4807 break;
4808 /* if the next PC is different, we abort now */
4809 if (dc->pc != (last_pc + 4))
4810 break;
4811 /* if we reach a page boundary, we stop generation so that the
4812 PC of a TT_TFAULT exception is always in the right page */
4813 if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
4814 break;
4815 /* if single step mode, we generate only one instruction and
4816 generate an exception */
4817 if (env->singlestep_enabled) {
4818 tcg_gen_movi_tl(cpu_pc, dc->pc);
4819 tcg_gen_exit_tb(0);
4820 break;
4821 }
4822 } while ((gen_opc_ptr < gen_opc_end) &&
4823 (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
4824 num_insns < max_insns);
4825
4826 exit_gen_loop:
4827 tcg_temp_free(cpu_addr);
4828 tcg_temp_free(cpu_val);
4829 tcg_temp_free(cpu_dst);
4830 tcg_temp_free(cpu_tmp64);
4831 tcg_temp_free(cpu_tmp32);
4832 tcg_temp_free(cpu_tmp0);
4833 if (tb->cflags & CF_LAST_IO)
4834 gen_io_end();
4835 if (!dc->is_br) {
4836 if (dc->pc != DYNAMIC_PC &&
4837 (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
4838 /* static PC and NPC: we can use direct chaining */
4839 gen_goto_tb(dc, 0, dc->pc, dc->npc);
4840 } else {
4841 if (dc->pc != DYNAMIC_PC)
4842 tcg_gen_movi_tl(cpu_pc, dc->pc);
4843 save_npc(dc, cpu_cond);
4844 tcg_gen_exit_tb(0);
4845 }
4846 }
4847 gen_icount_end(tb, num_insns);
4848 *gen_opc_ptr = INDEX_op_end;
4849 if (spc) {
4850 j = gen_opc_ptr - gen_opc_buf;
4851 lj++;
4852 while (lj <= j)
4853 gen_opc_instr_start[lj++] = 0;
4854 #if 0
4855 if (loglevel > 0) {
4856 page_dump(logfile);
4857 }
4858 #endif
4859 gen_opc_jump_pc[0] = dc->jump_pc[0];
4860 gen_opc_jump_pc[1] = dc->jump_pc[1];
4861 } else {
4862 tb->size = last_pc + 4 - pc_start;
4863 tb->icount = num_insns;
4864 }
4865 #ifdef DEBUG_DISAS
4866 if (loglevel & CPU_LOG_TB_IN_ASM) {
4867 fprintf(logfile, "--------------\n");
4868 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
4869 target_disas(logfile, pc_start, last_pc + 4 - pc_start, 0);
4870 fprintf(logfile, "\n");
4871 }
4872 #endif
4873 }
4874
4875 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
4876 {
4877 gen_intermediate_code_internal(tb, 0, env);
4878 }
4879
4880 void gen_intermediate_code_pc(CPUSPARCState * env, TranslationBlock * tb)
4881 {
4882 gen_intermediate_code_internal(tb, 1, env);
4883 }
4884
4885 void gen_intermediate_code_init(CPUSPARCState *env)
4886 {
4887 unsigned int i;
4888 static int inited;
4889 static const char * const gregnames[8] = {
4890 NULL, // g0 not used
4891 "g1",
4892 "g2",
4893 "g3",
4894 "g4",
4895 "g5",
4896 "g6",
4897 "g7",
4898 };
4899
4900 /* init various static tables */
4901 if (!inited) {
4902 inited = 1;
4903
4904 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
4905 cpu_regwptr = tcg_global_mem_new(TCG_TYPE_PTR, TCG_AREG0,
4906 offsetof(CPUState, regwptr),
4907 "regwptr");
4908 #ifdef TARGET_SPARC64
4909 cpu_xcc = tcg_global_mem_new(TCG_TYPE_I32,
4910 TCG_AREG0, offsetof(CPUState, xcc),
4911 "xcc");
4912 #endif
4913 cpu_cond = tcg_global_mem_new(TCG_TYPE_TL,
4914 TCG_AREG0, offsetof(CPUState, cond),
4915 "cond");
4916 cpu_cc_src = tcg_global_mem_new(TCG_TYPE_TL,
4917 TCG_AREG0, offsetof(CPUState, cc_src),
4918 "cc_src");
4919 cpu_cc_src2 = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4920 offsetof(CPUState, cc_src2),
4921 "cc_src2");
4922 cpu_cc_dst = tcg_global_mem_new(TCG_TYPE_TL,
4923 TCG_AREG0, offsetof(CPUState, cc_dst),
4924 "cc_dst");
4925 cpu_psr = tcg_global_mem_new(TCG_TYPE_I32,
4926 TCG_AREG0, offsetof(CPUState, psr),
4927 "psr");
4928 cpu_fsr = tcg_global_mem_new(TCG_TYPE_TL,
4929 TCG_AREG0, offsetof(CPUState, fsr),
4930 "fsr");
4931 cpu_pc = tcg_global_mem_new(TCG_TYPE_TL,
4932 TCG_AREG0, offsetof(CPUState, pc),
4933 "pc");
4934 cpu_npc = tcg_global_mem_new(TCG_TYPE_TL,
4935 TCG_AREG0, offsetof(CPUState, npc),
4936 "npc");
4937 for (i = 1; i < 8; i++)
4938 cpu_gregs[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
4939 offsetof(CPUState, gregs[i]),
4940 gregnames[i]);
4941 /* register helpers */
4942
4943 #undef DEF_HELPER
4944 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
4945 #include "helper.h"
4946 }
4947 }
4948
4949 void gen_pc_load(CPUState *env, TranslationBlock *tb,
4950 unsigned long searched_pc, int pc_pos, void *puc)
4951 {
4952 target_ulong npc;
4953 env->pc = gen_opc_pc[pc_pos];
4954 npc = gen_opc_npc[pc_pos];
4955 if (npc == 1) {
4956 /* dynamic NPC: already stored */
4957 } else if (npc == 2) {
4958 target_ulong t2 = (target_ulong)(unsigned long)puc;
4959 /* jump PC: use T2 and the jump targets of the translation */
4960 if (t2)
4961 env->npc = gen_opc_jump_pc[0];
4962 else
4963 env->npc = gen_opc_jump_pc[1];
4964 } else {
4965 env->npc = npc;
4966 }
4967 }