]> git.proxmox.com Git - qemu.git/blob - target-alpha/translate.c
target-alpha: convert remaining arith3 functions to TCG
[qemu.git] / target-alpha / translate.c
1 /*
2 * Alpha emulation cpu translation for qemu.
3 *
4 * Copyright (c) 2007 Jocelyn Mayer
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21 #include <stdint.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24
25 #include "cpu.h"
26 #include "exec-all.h"
27 #include "disas.h"
28 #include "host-utils.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31 #include "qemu-common.h"
32
33 #define DO_SINGLE_STEP
34 #define GENERATE_NOP
35 #define ALPHA_DEBUG_DISAS
36 #define DO_TB_FLUSH
37
38 typedef struct DisasContext DisasContext;
39 struct DisasContext {
40 uint64_t pc;
41 int mem_idx;
42 #if !defined (CONFIG_USER_ONLY)
43 int pal_mode;
44 #endif
45 uint32_t amask;
46 };
47
48 /* global register indexes */
49 static TCGv cpu_env;
50 static TCGv cpu_ir[31];
51 static TCGv cpu_pc;
52
53 /* dyngen register indexes */
54 static TCGv cpu_T[2];
55
56 /* register names */
57 static char cpu_reg_names[10*4+21*5];
58
59 #include "gen-icount.h"
60
61 static void alpha_translate_init(void)
62 {
63 int i;
64 char *p;
65 static int done_init = 0;
66
67 if (done_init)
68 return;
69
70 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
71
72 #if TARGET_LONG_BITS > HOST_LONG_BITS
73 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
74 offsetof(CPUState, t0), "T0");
75 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
76 offsetof(CPUState, t1), "T1");
77 #else
78 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG1, "T0");
79 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG2, "T1");
80 #endif
81
82 p = cpu_reg_names;
83 for (i = 0; i < 31; i++) {
84 sprintf(p, "ir%d", i);
85 cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
86 offsetof(CPUState, ir[i]), p);
87 p += (i < 10) ? 4 : 5;
88 }
89
90 cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
91 offsetof(CPUState, pc), "pc");
92
93 /* register helpers */
94 #undef DEF_HELPER
95 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
96 #include "helper.h"
97
98 done_init = 1;
99 }
100
101 static always_inline void gen_op_nop (void)
102 {
103 #if defined(GENERATE_NOP)
104 gen_op_no_op();
105 #endif
106 }
107
108 #define GEN32(func, NAME) \
109 static GenOpFunc *NAME ## _table [32] = { \
110 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
111 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
112 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
113 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
114 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
115 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
116 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
117 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
118 }; \
119 static always_inline void func (int n) \
120 { \
121 NAME ## _table[n](); \
122 }
123
124 /* FIR moves */
125 /* Special hacks for fir31 */
126 #define gen_op_load_FT0_fir31 gen_op_reset_FT0
127 #define gen_op_load_FT1_fir31 gen_op_reset_FT1
128 #define gen_op_load_FT2_fir31 gen_op_reset_FT2
129 #define gen_op_store_FT0_fir31 gen_op_nop
130 #define gen_op_store_FT1_fir31 gen_op_nop
131 #define gen_op_store_FT2_fir31 gen_op_nop
132 #define gen_op_cmov_fir31 gen_op_nop
133 GEN32(gen_op_load_FT0_fir, gen_op_load_FT0_fir);
134 GEN32(gen_op_load_FT1_fir, gen_op_load_FT1_fir);
135 GEN32(gen_op_load_FT2_fir, gen_op_load_FT2_fir);
136 GEN32(gen_op_store_FT0_fir, gen_op_store_FT0_fir);
137 GEN32(gen_op_store_FT1_fir, gen_op_store_FT1_fir);
138 GEN32(gen_op_store_FT2_fir, gen_op_store_FT2_fir);
139 GEN32(gen_op_cmov_fir, gen_op_cmov_fir);
140
141 static always_inline void gen_load_fir (DisasContext *ctx, int firn, int Tn)
142 {
143 switch (Tn) {
144 case 0:
145 gen_op_load_FT0_fir(firn);
146 break;
147 case 1:
148 gen_op_load_FT1_fir(firn);
149 break;
150 case 2:
151 gen_op_load_FT2_fir(firn);
152 break;
153 }
154 }
155
156 static always_inline void gen_store_fir (DisasContext *ctx, int firn, int Tn)
157 {
158 switch (Tn) {
159 case 0:
160 gen_op_store_FT0_fir(firn);
161 break;
162 case 1:
163 gen_op_store_FT1_fir(firn);
164 break;
165 case 2:
166 gen_op_store_FT2_fir(firn);
167 break;
168 }
169 }
170
171 /* Memory moves */
172 #if defined(CONFIG_USER_ONLY)
173 #define OP_LD_TABLE(width) \
174 static GenOpFunc *gen_op_ld##width[] = { \
175 &gen_op_ld##width##_raw, \
176 }
177 #define OP_ST_TABLE(width) \
178 static GenOpFunc *gen_op_st##width[] = { \
179 &gen_op_st##width##_raw, \
180 }
181 #else
182 #define OP_LD_TABLE(width) \
183 static GenOpFunc *gen_op_ld##width[] = { \
184 &gen_op_ld##width##_kernel, \
185 &gen_op_ld##width##_executive, \
186 &gen_op_ld##width##_supervisor, \
187 &gen_op_ld##width##_user, \
188 }
189 #define OP_ST_TABLE(width) \
190 static GenOpFunc *gen_op_st##width[] = { \
191 &gen_op_st##width##_kernel, \
192 &gen_op_st##width##_executive, \
193 &gen_op_st##width##_supervisor, \
194 &gen_op_st##width##_user, \
195 }
196 #endif
197
198 #define GEN_LD(width) \
199 OP_LD_TABLE(width); \
200 static always_inline void gen_ld##width (DisasContext *ctx) \
201 { \
202 (*gen_op_ld##width[ctx->mem_idx])(); \
203 }
204
205 #define GEN_ST(width) \
206 OP_ST_TABLE(width); \
207 static always_inline void gen_st##width (DisasContext *ctx) \
208 { \
209 (*gen_op_st##width[ctx->mem_idx])(); \
210 }
211
212 GEN_LD(bu);
213 GEN_ST(b);
214 GEN_LD(wu);
215 GEN_ST(w);
216 GEN_LD(l);
217 GEN_ST(l);
218 GEN_LD(q);
219 GEN_ST(q);
220 GEN_LD(q_u);
221 GEN_ST(q_u);
222 GEN_LD(l_l);
223 GEN_ST(l_c);
224 GEN_LD(q_l);
225 GEN_ST(q_c);
226
227 #if 0 /* currently unused */
228 GEN_LD(f);
229 GEN_ST(f);
230 GEN_LD(g);
231 GEN_ST(g);
232 #endif /* 0 */
233 GEN_LD(s);
234 GEN_ST(s);
235 GEN_LD(t);
236 GEN_ST(t);
237
238 static always_inline void _gen_op_bcond (DisasContext *ctx)
239 {
240 #if 0 // Qemu does not know how to do this...
241 gen_op_bcond(ctx->pc);
242 #else
243 gen_op_bcond(ctx->pc >> 32, ctx->pc);
244 #endif
245 }
246
247 static always_inline void gen_excp (DisasContext *ctx,
248 int exception, int error_code)
249 {
250 TCGv tmp1, tmp2;
251
252 tcg_gen_movi_i64(cpu_pc, ctx->pc);
253 tmp1 = tcg_const_i32(exception);
254 tmp2 = tcg_const_i32(error_code);
255 tcg_gen_helper_0_2(helper_excp, tmp1, tmp2);
256 tcg_temp_free(tmp2);
257 tcg_temp_free(tmp1);
258 }
259
260 static always_inline void gen_invalid (DisasContext *ctx)
261 {
262 gen_excp(ctx, EXCP_OPCDEC, 0);
263 }
264
265 static always_inline void gen_load_mem (DisasContext *ctx,
266 void (*gen_load_op)(DisasContext *ctx),
267 int ra, int rb, int32_t disp16,
268 int clear)
269 {
270 if (ra == 31 && disp16 == 0) {
271 /* UNOP */
272 gen_op_nop();
273 } else {
274 if (rb != 31)
275 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
276 else
277 tcg_gen_movi_i64(cpu_T[0], disp16);
278 if (clear)
279 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
280 (*gen_load_op)(ctx);
281 if (ra != 31)
282 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
283 }
284 }
285
286 static always_inline void gen_store_mem (DisasContext *ctx,
287 void (*gen_store_op)(DisasContext *ctx),
288 int ra, int rb, int32_t disp16,
289 int clear)
290 {
291 if (rb != 31)
292 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
293 else
294 tcg_gen_movi_i64(cpu_T[0], disp16);
295 if (clear)
296 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
297 if (ra != 31)
298 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
299 else
300 tcg_gen_movi_i64(cpu_T[1], 0);
301 (*gen_store_op)(ctx);
302 }
303
304 static always_inline void gen_load_fmem (DisasContext *ctx,
305 void (*gen_load_fop)(DisasContext *ctx),
306 int ra, int rb, int32_t disp16)
307 {
308 if (rb != 31)
309 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
310 else
311 tcg_gen_movi_i64(cpu_T[0], disp16);
312 (*gen_load_fop)(ctx);
313 gen_store_fir(ctx, ra, 1);
314 }
315
316 static always_inline void gen_store_fmem (DisasContext *ctx,
317 void (*gen_store_fop)(DisasContext *ctx),
318 int ra, int rb, int32_t disp16)
319 {
320 if (rb != 31)
321 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
322 else
323 tcg_gen_movi_i64(cpu_T[0], disp16);
324 gen_load_fir(ctx, ra, 1);
325 (*gen_store_fop)(ctx);
326 }
327
328 static always_inline void gen_bcond (DisasContext *ctx,
329 TCGCond cond,
330 int ra, int32_t disp16, int mask)
331 {
332 int l1, l2;
333
334 l1 = gen_new_label();
335 l2 = gen_new_label();
336 if (likely(ra != 31)) {
337 if (mask) {
338 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
339 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
340 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
341 tcg_temp_free(tmp);
342 } else
343 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
344 } else {
345 /* Very uncommon case - Do not bother to optimize. */
346 TCGv tmp = tcg_const_i64(0);
347 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
348 tcg_temp_free(tmp);
349 }
350 tcg_gen_movi_i64(cpu_pc, ctx->pc);
351 tcg_gen_br(l2);
352 gen_set_label(l1);
353 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
354 gen_set_label(l2);
355 }
356
357 static always_inline void gen_fbcond (DisasContext *ctx,
358 void (*gen_test_op)(void),
359 int ra, int32_t disp16)
360 {
361 tcg_gen_movi_i64(cpu_T[1], ctx->pc + (int64_t)(disp16 << 2));
362 gen_load_fir(ctx, ra, 0);
363 (*gen_test_op)();
364 _gen_op_bcond(ctx);
365 }
366
367 static always_inline void gen_cmov (DisasContext *ctx,
368 TCGCond inv_cond,
369 int ra, int rb, int rc,
370 int islit, uint8_t lit, int mask)
371 {
372 int l1;
373
374 if (unlikely(rc == 31))
375 return;
376
377 l1 = gen_new_label();
378
379 if (ra != 31) {
380 if (mask) {
381 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
382 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
383 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
384 tcg_temp_free(tmp);
385 } else
386 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
387 } else {
388 /* Very uncommon case - Do not bother to optimize. */
389 TCGv tmp = tcg_const_i64(0);
390 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
391 tcg_temp_free(tmp);
392 }
393
394 if (islit)
395 tcg_gen_movi_i64(cpu_ir[rc], lit);
396 else
397 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
398 gen_set_label(l1);
399 }
400
401 static always_inline void gen_farith2 (DisasContext *ctx,
402 void (*gen_arith_fop)(void),
403 int rb, int rc)
404 {
405 gen_load_fir(ctx, rb, 0);
406 (*gen_arith_fop)();
407 gen_store_fir(ctx, rc, 0);
408 }
409
410 static always_inline void gen_farith3 (DisasContext *ctx,
411 void (*gen_arith_fop)(void),
412 int ra, int rb, int rc)
413 {
414 gen_load_fir(ctx, ra, 0);
415 gen_load_fir(ctx, rb, 1);
416 (*gen_arith_fop)();
417 gen_store_fir(ctx, rc, 0);
418 }
419
420 static always_inline void gen_fcmov (DisasContext *ctx,
421 void (*gen_test_fop)(void),
422 int ra, int rb, int rc)
423 {
424 gen_load_fir(ctx, ra, 0);
425 gen_load_fir(ctx, rb, 1);
426 (*gen_test_fop)();
427 gen_op_cmov_fir(rc);
428 }
429
430 static always_inline void gen_fti (DisasContext *ctx,
431 void (*gen_move_fop)(void),
432 int ra, int rc)
433 {
434 gen_load_fir(ctx, rc, 0);
435 (*gen_move_fop)();
436 if (ra != 31)
437 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
438 }
439
440 static always_inline void gen_itf (DisasContext *ctx,
441 void (*gen_move_fop)(void),
442 int ra, int rc)
443 {
444 if (ra != 31)
445 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
446 else
447 tcg_gen_movi_i64(cpu_T[0], 0);
448 (*gen_move_fop)();
449 gen_store_fir(ctx, rc, 0);
450 }
451
452 /* EXTWH, EXTWH, EXTLH, EXTQH */
453 static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
454 int ra, int rb, int rc,
455 int islit, uint8_t lit)
456 {
457 if (unlikely(rc == 31))
458 return;
459
460 if (ra != 31) {
461 if (islit) {
462 if (lit != 0)
463 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
464 else
465 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
466 } else {
467 TCGv tmp1, tmp2;
468 tmp1 = tcg_temp_new(TCG_TYPE_I64);
469 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
470 tcg_gen_shli_i64(tmp1, tmp1, 3);
471 tmp2 = tcg_const_i64(64);
472 tcg_gen_sub_i64(tmp1, tmp2, tmp1);
473 tcg_temp_free(tmp2);
474 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
475 tcg_temp_free(tmp1);
476 }
477 if (tcg_gen_ext_i64)
478 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
479 } else
480 tcg_gen_movi_i64(cpu_ir[rc], 0);
481 }
482
483 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
484 static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
485 int ra, int rb, int rc,
486 int islit, uint8_t lit)
487 {
488 if (unlikely(rc == 31))
489 return;
490
491 if (ra != 31) {
492 if (islit) {
493 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
494 } else {
495 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
496 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
497 tcg_gen_shli_i64(tmp, tmp, 3);
498 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
499 tcg_temp_free(tmp);
500 }
501 if (tcg_gen_ext_i64)
502 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
503 } else
504 tcg_gen_movi_i64(cpu_ir[rc], 0);
505 }
506
507 /* Code to call arith3 helpers */
508 static always_inline void gen_arith3_helper(void *helper,
509 int ra, int rb, int rc,
510 int islit, uint8_t lit)
511 {
512 if (unlikely(rc == 31))
513 return;
514
515 if (ra != 31) {
516 if (islit) {
517 TCGv tmp = tcg_const_i64(lit);
518 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], tmp);
519 tcg_temp_free(tmp);
520 } else
521 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
522 } else {
523 TCGv tmp1 = tcg_const_i64(0);
524 if (islit) {
525 TCGv tmp2 = tcg_const_i64(lit);
526 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, tmp2);
527 tcg_temp_free(tmp2);
528 } else
529 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, cpu_ir[rb]);
530 tcg_temp_free(tmp1);
531 }
532 }
533
534 static always_inline void gen_cmp(TCGCond cond,
535 int ra, int rb, int rc,
536 int islit, uint8_t lit)
537 {
538 int l1, l2;
539 TCGv tmp;
540
541 if (unlikely(rc == 31))
542 return;
543
544 l1 = gen_new_label();
545 l2 = gen_new_label();
546
547 if (ra != 31) {
548 tmp = tcg_temp_new(TCG_TYPE_I64);
549 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
550 } else
551 tmp = tcg_const_i64(0);
552 if (islit)
553 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
554 else
555 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
556
557 tcg_gen_movi_i64(cpu_ir[rc], 0);
558 tcg_gen_br(l2);
559 gen_set_label(l1);
560 tcg_gen_movi_i64(cpu_ir[rc], 1);
561 gen_set_label(l2);
562 }
563
564 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
565 {
566 uint32_t palcode;
567 int32_t disp21, disp16, disp12;
568 uint16_t fn11, fn16;
569 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
570 uint8_t lit;
571 int ret;
572
573 /* Decode all instruction fields */
574 opc = insn >> 26;
575 ra = (insn >> 21) & 0x1F;
576 rb = (insn >> 16) & 0x1F;
577 rc = insn & 0x1F;
578 sbz = (insn >> 13) & 0x07;
579 islit = (insn >> 12) & 1;
580 if (rb == 31 && !islit) {
581 islit = 1;
582 lit = 0;
583 } else
584 lit = (insn >> 13) & 0xFF;
585 palcode = insn & 0x03FFFFFF;
586 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
587 disp16 = (int16_t)(insn & 0x0000FFFF);
588 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
589 fn16 = insn & 0x0000FFFF;
590 fn11 = (insn >> 5) & 0x000007FF;
591 fpfn = fn11 & 0x3F;
592 fn7 = (insn >> 5) & 0x0000007F;
593 fn2 = (insn >> 5) & 0x00000003;
594 ret = 0;
595 #if defined ALPHA_DEBUG_DISAS
596 if (logfile != NULL) {
597 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
598 opc, ra, rb, rc, disp16);
599 }
600 #endif
601 switch (opc) {
602 case 0x00:
603 /* CALL_PAL */
604 if (palcode >= 0x80 && palcode < 0xC0) {
605 /* Unprivileged PAL call */
606 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
607 #if !defined (CONFIG_USER_ONLY)
608 } else if (palcode < 0x40) {
609 /* Privileged PAL code */
610 if (ctx->mem_idx & 1)
611 goto invalid_opc;
612 else
613 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
614 #endif
615 } else {
616 /* Invalid PAL call */
617 goto invalid_opc;
618 }
619 ret = 3;
620 break;
621 case 0x01:
622 /* OPC01 */
623 goto invalid_opc;
624 case 0x02:
625 /* OPC02 */
626 goto invalid_opc;
627 case 0x03:
628 /* OPC03 */
629 goto invalid_opc;
630 case 0x04:
631 /* OPC04 */
632 goto invalid_opc;
633 case 0x05:
634 /* OPC05 */
635 goto invalid_opc;
636 case 0x06:
637 /* OPC06 */
638 goto invalid_opc;
639 case 0x07:
640 /* OPC07 */
641 goto invalid_opc;
642 case 0x08:
643 /* LDA */
644 if (likely(ra != 31)) {
645 if (rb != 31)
646 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
647 else
648 tcg_gen_movi_i64(cpu_ir[ra], disp16);
649 }
650 break;
651 case 0x09:
652 /* LDAH */
653 if (likely(ra != 31)) {
654 if (rb != 31)
655 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
656 else
657 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
658 }
659 break;
660 case 0x0A:
661 /* LDBU */
662 if (!(ctx->amask & AMASK_BWX))
663 goto invalid_opc;
664 gen_load_mem(ctx, &gen_ldbu, ra, rb, disp16, 0);
665 break;
666 case 0x0B:
667 /* LDQ_U */
668 gen_load_mem(ctx, &gen_ldq_u, ra, rb, disp16, 1);
669 break;
670 case 0x0C:
671 /* LDWU */
672 if (!(ctx->amask & AMASK_BWX))
673 goto invalid_opc;
674 gen_load_mem(ctx, &gen_ldwu, ra, rb, disp16, 0);
675 break;
676 case 0x0D:
677 /* STW */
678 if (!(ctx->amask & AMASK_BWX))
679 goto invalid_opc;
680 gen_store_mem(ctx, &gen_stw, ra, rb, disp16, 0);
681 break;
682 case 0x0E:
683 /* STB */
684 if (!(ctx->amask & AMASK_BWX))
685 goto invalid_opc;
686 gen_store_mem(ctx, &gen_stb, ra, rb, disp16, 0);
687 break;
688 case 0x0F:
689 /* STQ_U */
690 gen_store_mem(ctx, &gen_stq_u, ra, rb, disp16, 1);
691 break;
692 case 0x10:
693 switch (fn7) {
694 case 0x00:
695 /* ADDL */
696 if (likely(rc != 31)) {
697 if (ra != 31) {
698 if (islit) {
699 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
700 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
701 } else {
702 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
703 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
704 }
705 } else {
706 if (islit)
707 tcg_gen_movi_i64(cpu_ir[rc], lit);
708 else
709 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
710 }
711 }
712 break;
713 case 0x02:
714 /* S4ADDL */
715 if (likely(rc != 31)) {
716 if (ra != 31) {
717 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
718 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
719 if (islit)
720 tcg_gen_addi_i64(tmp, tmp, lit);
721 else
722 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
723 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
724 tcg_temp_free(tmp);
725 } else {
726 if (islit)
727 tcg_gen_movi_i64(cpu_ir[rc], lit);
728 else
729 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
730 }
731 }
732 break;
733 case 0x09:
734 /* SUBL */
735 if (likely(rc != 31)) {
736 if (ra != 31) {
737 if (islit)
738 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
739 else
740 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
741 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
742 } else {
743 if (islit)
744 tcg_gen_movi_i64(cpu_ir[rc], -lit);
745 else {
746 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
747 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
748 }
749 }
750 break;
751 case 0x0B:
752 /* S4SUBL */
753 if (likely(rc != 31)) {
754 if (ra != 31) {
755 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
756 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
757 if (islit)
758 tcg_gen_subi_i64(tmp, tmp, lit);
759 else
760 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
761 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
762 tcg_temp_free(tmp);
763 } else {
764 if (islit)
765 tcg_gen_movi_i64(cpu_ir[rc], -lit);
766 else {
767 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
768 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
769 }
770 }
771 }
772 break;
773 case 0x0F:
774 /* CMPBGE */
775 gen_arith3_helper(helper_cmpbge, ra, rb, rc, islit, lit);
776 break;
777 case 0x12:
778 /* S8ADDL */
779 if (likely(rc != 31)) {
780 if (ra != 31) {
781 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
782 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
783 if (islit)
784 tcg_gen_addi_i64(tmp, tmp, lit);
785 else
786 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
787 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
788 tcg_temp_free(tmp);
789 } else {
790 if (islit)
791 tcg_gen_movi_i64(cpu_ir[rc], lit);
792 else
793 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
794 }
795 }
796 break;
797 case 0x1B:
798 /* S8SUBL */
799 if (likely(rc != 31)) {
800 if (ra != 31) {
801 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
802 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
803 if (islit)
804 tcg_gen_subi_i64(tmp, tmp, lit);
805 else
806 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
807 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
808 tcg_temp_free(tmp);
809 } else {
810 if (islit)
811 tcg_gen_movi_i64(cpu_ir[rc], -lit);
812 else
813 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
814 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
815 }
816 }
817 }
818 break;
819 case 0x1D:
820 /* CMPULT */
821 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
822 break;
823 case 0x20:
824 /* ADDQ */
825 if (likely(rc != 31)) {
826 if (ra != 31) {
827 if (islit)
828 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
829 else
830 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
831 } else {
832 if (islit)
833 tcg_gen_movi_i64(cpu_ir[rc], lit);
834 else
835 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
836 }
837 }
838 break;
839 case 0x22:
840 /* S4ADDQ */
841 if (likely(rc != 31)) {
842 if (ra != 31) {
843 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
844 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
845 if (islit)
846 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
847 else
848 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
849 tcg_temp_free(tmp);
850 } else {
851 if (islit)
852 tcg_gen_movi_i64(cpu_ir[rc], lit);
853 else
854 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
855 }
856 }
857 break;
858 case 0x29:
859 /* SUBQ */
860 if (likely(rc != 31)) {
861 if (ra != 31) {
862 if (islit)
863 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
864 else
865 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
866 } else {
867 if (islit)
868 tcg_gen_movi_i64(cpu_ir[rc], -lit);
869 else
870 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
871 }
872 }
873 break;
874 case 0x2B:
875 /* S4SUBQ */
876 if (likely(rc != 31)) {
877 if (ra != 31) {
878 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
879 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
880 if (islit)
881 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
882 else
883 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
884 tcg_temp_free(tmp);
885 } else {
886 if (islit)
887 tcg_gen_movi_i64(cpu_ir[rc], -lit);
888 else
889 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
890 }
891 }
892 break;
893 case 0x2D:
894 /* CMPEQ */
895 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
896 break;
897 case 0x32:
898 /* S8ADDQ */
899 if (likely(rc != 31)) {
900 if (ra != 31) {
901 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
902 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
903 if (islit)
904 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
905 else
906 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
907 tcg_temp_free(tmp);
908 } else {
909 if (islit)
910 tcg_gen_movi_i64(cpu_ir[rc], lit);
911 else
912 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
913 }
914 }
915 break;
916 case 0x3B:
917 /* S8SUBQ */
918 if (likely(rc != 31)) {
919 if (ra != 31) {
920 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
921 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
922 if (islit)
923 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
924 else
925 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
926 tcg_temp_free(tmp);
927 } else {
928 if (islit)
929 tcg_gen_movi_i64(cpu_ir[rc], -lit);
930 else
931 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
932 }
933 }
934 break;
935 case 0x3D:
936 /* CMPULE */
937 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
938 break;
939 case 0x40:
940 /* ADDL/V */
941 gen_arith3_helper(helper_addlv, ra, rb, rc, islit, lit);
942 break;
943 case 0x49:
944 /* SUBL/V */
945 gen_arith3_helper(helper_sublv, ra, rb, rc, islit, lit);
946 break;
947 case 0x4D:
948 /* CMPLT */
949 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
950 break;
951 case 0x60:
952 /* ADDQ/V */
953 gen_arith3_helper(helper_addqv, ra, rb, rc, islit, lit);
954 break;
955 case 0x69:
956 /* SUBQ/V */
957 gen_arith3_helper(helper_subqv, ra, rb, rc, islit, lit);
958 break;
959 case 0x6D:
960 /* CMPLE */
961 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
962 break;
963 default:
964 goto invalid_opc;
965 }
966 break;
967 case 0x11:
968 switch (fn7) {
969 case 0x00:
970 /* AND */
971 if (likely(rc != 31)) {
972 if (ra == 31)
973 tcg_gen_movi_i64(cpu_ir[rc], 0);
974 else if (islit)
975 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
976 else
977 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
978 }
979 break;
980 case 0x08:
981 /* BIC */
982 if (likely(rc != 31)) {
983 if (ra != 31) {
984 if (islit)
985 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
986 else {
987 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
988 tcg_gen_not_i64(tmp, cpu_ir[rb]);
989 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], tmp);
990 tcg_temp_free(tmp);
991 }
992 } else
993 tcg_gen_movi_i64(cpu_ir[rc], 0);
994 }
995 break;
996 case 0x14:
997 /* CMOVLBS */
998 gen_cmov(ctx, TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
999 break;
1000 case 0x16:
1001 /* CMOVLBC */
1002 gen_cmov(ctx, TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1003 break;
1004 case 0x20:
1005 /* BIS */
1006 if (likely(rc != 31)) {
1007 if (ra != 31) {
1008 if (islit)
1009 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1010 else
1011 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1012 } else {
1013 if (islit)
1014 tcg_gen_movi_i64(cpu_ir[rc], lit);
1015 else
1016 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1017 }
1018 }
1019 break;
1020 case 0x24:
1021 /* CMOVEQ */
1022 gen_cmov(ctx, TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1023 break;
1024 case 0x26:
1025 /* CMOVNE */
1026 gen_cmov(ctx, TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1027 break;
1028 case 0x28:
1029 /* ORNOT */
1030 if (likely(rc != 31)) {
1031 if (ra != 31) {
1032 if (islit)
1033 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1034 else {
1035 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1036 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1037 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1038 tcg_temp_free(tmp);
1039 }
1040 } else {
1041 if (islit)
1042 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1043 else
1044 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1045 }
1046 }
1047 break;
1048 case 0x40:
1049 /* XOR */
1050 if (likely(rc != 31)) {
1051 if (ra != 31) {
1052 if (islit)
1053 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1054 else
1055 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1056 } else {
1057 if (islit)
1058 tcg_gen_movi_i64(cpu_ir[rc], lit);
1059 else
1060 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1061 }
1062 }
1063 break;
1064 case 0x44:
1065 /* CMOVLT */
1066 gen_cmov(ctx, TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1067 break;
1068 case 0x46:
1069 /* CMOVGE */
1070 gen_cmov(ctx, TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1071 break;
1072 case 0x48:
1073 /* EQV */
1074 if (likely(rc != 31)) {
1075 if (ra != 31) {
1076 if (islit)
1077 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1078 else {
1079 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1080 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1081 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1082 tcg_temp_free(tmp);
1083 }
1084 } else {
1085 if (islit)
1086 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1087 else
1088 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1089 }
1090 }
1091 break;
1092 case 0x61:
1093 /* AMASK */
1094 if (likely(rc != 31)) {
1095 if (islit)
1096 tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1097 else
1098 tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
1099 }
1100 break;
1101 case 0x64:
1102 /* CMOVLE */
1103 gen_cmov(ctx, TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1104 break;
1105 case 0x66:
1106 /* CMOVGT */
1107 gen_cmov(ctx, TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1108 break;
1109 case 0x6C:
1110 /* IMPLVER */
1111 if (rc != 31)
1112 tcg_gen_helper_1_0(helper_load_implver, cpu_ir[rc]);
1113 break;
1114 default:
1115 goto invalid_opc;
1116 }
1117 break;
1118 case 0x12:
1119 switch (fn7) {
1120 case 0x02:
1121 /* MSKBL */
1122 gen_arith3_helper(helper_mskbl, ra, rb, rc, islit, lit);
1123 break;
1124 case 0x06:
1125 /* EXTBL */
1126 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1127 break;
1128 case 0x0B:
1129 /* INSBL */
1130 gen_arith3_helper(helper_insbl, ra, rb, rc, islit, lit);
1131 break;
1132 case 0x12:
1133 /* MSKWL */
1134 gen_arith3_helper(helper_mskwl, ra, rb, rc, islit, lit);
1135 break;
1136 case 0x16:
1137 /* EXTWL */
1138 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1139 break;
1140 case 0x1B:
1141 /* INSWL */
1142 gen_arith3_helper(helper_inswl, ra, rb, rc, islit, lit);
1143 break;
1144 case 0x22:
1145 /* MSKLL */
1146 gen_arith3_helper(helper_mskll, ra, rb, rc, islit, lit);
1147 break;
1148 case 0x26:
1149 /* EXTLL */
1150 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1151 break;
1152 case 0x2B:
1153 /* INSLL */
1154 gen_arith3_helper(helper_insll, ra, rb, rc, islit, lit);
1155 break;
1156 case 0x30:
1157 /* ZAP */
1158 gen_arith3_helper(helper_zap, ra, rb, rc, islit, lit);
1159 break;
1160 case 0x31:
1161 /* ZAPNOT */
1162 gen_arith3_helper(helper_zapnot, ra, rb, rc, islit, lit);
1163 break;
1164 case 0x32:
1165 /* MSKQL */
1166 gen_arith3_helper(helper_mskql, ra, rb, rc, islit, lit);
1167 break;
1168 case 0x34:
1169 /* SRL */
1170 if (likely(rc != 31)) {
1171 if (ra != 31) {
1172 if (islit)
1173 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1174 else {
1175 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1176 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1177 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1178 tcg_temp_free(shift);
1179 }
1180 } else
1181 tcg_gen_movi_i64(cpu_ir[rc], 0);
1182 }
1183 break;
1184 case 0x36:
1185 /* EXTQL */
1186 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1187 break;
1188 case 0x39:
1189 /* SLL */
1190 if (likely(rc != 31)) {
1191 if (ra != 31) {
1192 if (islit)
1193 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1194 else {
1195 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1196 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1197 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1198 tcg_temp_free(shift);
1199 }
1200 } else
1201 tcg_gen_movi_i64(cpu_ir[rc], 0);
1202 }
1203 break;
1204 case 0x3B:
1205 /* INSQL */
1206 gen_arith3_helper(helper_insql, ra, rb, rc, islit, lit);
1207 break;
1208 case 0x3C:
1209 /* SRA */
1210 if (likely(rc != 31)) {
1211 if (ra != 31) {
1212 if (islit)
1213 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1214 else {
1215 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1216 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1217 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1218 tcg_temp_free(shift);
1219 }
1220 } else
1221 tcg_gen_movi_i64(cpu_ir[rc], 0);
1222 }
1223 break;
1224 case 0x52:
1225 /* MSKWH */
1226 gen_arith3_helper(helper_mskwh, ra, rb, rc, islit, lit);
1227 break;
1228 case 0x57:
1229 /* INSWH */
1230 gen_arith3_helper(helper_inswh, ra, rb, rc, islit, lit);
1231 break;
1232 case 0x5A:
1233 /* EXTWH */
1234 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1235 break;
1236 case 0x62:
1237 /* MSKLH */
1238 gen_arith3_helper(helper_msklh, ra, rb, rc, islit, lit);
1239 break;
1240 case 0x67:
1241 /* INSLH */
1242 gen_arith3_helper(helper_inslh, ra, rb, rc, islit, lit);
1243 break;
1244 case 0x6A:
1245 /* EXTLH */
1246 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1247 break;
1248 case 0x72:
1249 /* MSKQH */
1250 gen_arith3_helper(helper_mskqh, ra, rb, rc, islit, lit);
1251 break;
1252 case 0x77:
1253 /* INSQH */
1254 gen_arith3_helper(helper_insqh, ra, rb, rc, islit, lit);
1255 break;
1256 case 0x7A:
1257 /* EXTQH */
1258 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1259 break;
1260 default:
1261 goto invalid_opc;
1262 }
1263 break;
1264 case 0x13:
1265 switch (fn7) {
1266 case 0x00:
1267 /* MULL */
1268 if (likely(rc != 31)) {
1269 if (ra == 31)
1270 tcg_gen_movi_i64(cpu_ir[rc], 0);
1271 else {
1272 if (islit)
1273 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1274 else
1275 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1276 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1277 }
1278 }
1279 break;
1280 case 0x20:
1281 /* MULQ */
1282 if (likely(rc != 31)) {
1283 if (ra == 31)
1284 tcg_gen_movi_i64(cpu_ir[rc], 0);
1285 else if (islit)
1286 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1287 else
1288 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1289 }
1290 break;
1291 case 0x30:
1292 /* UMULH */
1293 gen_arith3_helper(helper_umulh, ra, rb, rc, islit, lit);
1294 break;
1295 case 0x40:
1296 /* MULL/V */
1297 gen_arith3_helper(helper_mullv, ra, rb, rc, islit, lit);
1298 break;
1299 case 0x60:
1300 /* MULQ/V */
1301 gen_arith3_helper(helper_mulqv, ra, rb, rc, islit, lit);
1302 break;
1303 default:
1304 goto invalid_opc;
1305 }
1306 break;
1307 case 0x14:
1308 switch (fpfn) { /* f11 & 0x3F */
1309 case 0x04:
1310 /* ITOFS */
1311 if (!(ctx->amask & AMASK_FIX))
1312 goto invalid_opc;
1313 gen_itf(ctx, &gen_op_itofs, ra, rc);
1314 break;
1315 case 0x0A:
1316 /* SQRTF */
1317 if (!(ctx->amask & AMASK_FIX))
1318 goto invalid_opc;
1319 gen_farith2(ctx, &gen_op_sqrtf, rb, rc);
1320 break;
1321 case 0x0B:
1322 /* SQRTS */
1323 if (!(ctx->amask & AMASK_FIX))
1324 goto invalid_opc;
1325 gen_farith2(ctx, &gen_op_sqrts, rb, rc);
1326 break;
1327 case 0x14:
1328 /* ITOFF */
1329 if (!(ctx->amask & AMASK_FIX))
1330 goto invalid_opc;
1331 #if 0 // TODO
1332 gen_itf(ctx, &gen_op_itoff, ra, rc);
1333 #else
1334 goto invalid_opc;
1335 #endif
1336 break;
1337 case 0x24:
1338 /* ITOFT */
1339 if (!(ctx->amask & AMASK_FIX))
1340 goto invalid_opc;
1341 gen_itf(ctx, &gen_op_itoft, ra, rc);
1342 break;
1343 case 0x2A:
1344 /* SQRTG */
1345 if (!(ctx->amask & AMASK_FIX))
1346 goto invalid_opc;
1347 gen_farith2(ctx, &gen_op_sqrtg, rb, rc);
1348 break;
1349 case 0x02B:
1350 /* SQRTT */
1351 if (!(ctx->amask & AMASK_FIX))
1352 goto invalid_opc;
1353 gen_farith2(ctx, &gen_op_sqrtt, rb, rc);
1354 break;
1355 default:
1356 goto invalid_opc;
1357 }
1358 break;
1359 case 0x15:
1360 /* VAX floating point */
1361 /* XXX: rounding mode and trap are ignored (!) */
1362 switch (fpfn) { /* f11 & 0x3F */
1363 case 0x00:
1364 /* ADDF */
1365 gen_farith3(ctx, &gen_op_addf, ra, rb, rc);
1366 break;
1367 case 0x01:
1368 /* SUBF */
1369 gen_farith3(ctx, &gen_op_subf, ra, rb, rc);
1370 break;
1371 case 0x02:
1372 /* MULF */
1373 gen_farith3(ctx, &gen_op_mulf, ra, rb, rc);
1374 break;
1375 case 0x03:
1376 /* DIVF */
1377 gen_farith3(ctx, &gen_op_divf, ra, rb, rc);
1378 break;
1379 case 0x1E:
1380 /* CVTDG */
1381 #if 0 // TODO
1382 gen_farith2(ctx, &gen_op_cvtdg, rb, rc);
1383 #else
1384 goto invalid_opc;
1385 #endif
1386 break;
1387 case 0x20:
1388 /* ADDG */
1389 gen_farith3(ctx, &gen_op_addg, ra, rb, rc);
1390 break;
1391 case 0x21:
1392 /* SUBG */
1393 gen_farith3(ctx, &gen_op_subg, ra, rb, rc);
1394 break;
1395 case 0x22:
1396 /* MULG */
1397 gen_farith3(ctx, &gen_op_mulg, ra, rb, rc);
1398 break;
1399 case 0x23:
1400 /* DIVG */
1401 gen_farith3(ctx, &gen_op_divg, ra, rb, rc);
1402 break;
1403 case 0x25:
1404 /* CMPGEQ */
1405 gen_farith3(ctx, &gen_op_cmpgeq, ra, rb, rc);
1406 break;
1407 case 0x26:
1408 /* CMPGLT */
1409 gen_farith3(ctx, &gen_op_cmpglt, ra, rb, rc);
1410 break;
1411 case 0x27:
1412 /* CMPGLE */
1413 gen_farith3(ctx, &gen_op_cmpgle, ra, rb, rc);
1414 break;
1415 case 0x2C:
1416 /* CVTGF */
1417 gen_farith2(ctx, &gen_op_cvtgf, rb, rc);
1418 break;
1419 case 0x2D:
1420 /* CVTGD */
1421 #if 0 // TODO
1422 gen_farith2(ctx, &gen_op_cvtgd, rb, rc);
1423 #else
1424 goto invalid_opc;
1425 #endif
1426 break;
1427 case 0x2F:
1428 /* CVTGQ */
1429 gen_farith2(ctx, &gen_op_cvtgq, rb, rc);
1430 break;
1431 case 0x3C:
1432 /* CVTQF */
1433 gen_farith2(ctx, &gen_op_cvtqf, rb, rc);
1434 break;
1435 case 0x3E:
1436 /* CVTQG */
1437 gen_farith2(ctx, &gen_op_cvtqg, rb, rc);
1438 break;
1439 default:
1440 goto invalid_opc;
1441 }
1442 break;
1443 case 0x16:
1444 /* IEEE floating-point */
1445 /* XXX: rounding mode and traps are ignored (!) */
1446 switch (fpfn) { /* f11 & 0x3F */
1447 case 0x00:
1448 /* ADDS */
1449 gen_farith3(ctx, &gen_op_adds, ra, rb, rc);
1450 break;
1451 case 0x01:
1452 /* SUBS */
1453 gen_farith3(ctx, &gen_op_subs, ra, rb, rc);
1454 break;
1455 case 0x02:
1456 /* MULS */
1457 gen_farith3(ctx, &gen_op_muls, ra, rb, rc);
1458 break;
1459 case 0x03:
1460 /* DIVS */
1461 gen_farith3(ctx, &gen_op_divs, ra, rb, rc);
1462 break;
1463 case 0x20:
1464 /* ADDT */
1465 gen_farith3(ctx, &gen_op_addt, ra, rb, rc);
1466 break;
1467 case 0x21:
1468 /* SUBT */
1469 gen_farith3(ctx, &gen_op_subt, ra, rb, rc);
1470 break;
1471 case 0x22:
1472 /* MULT */
1473 gen_farith3(ctx, &gen_op_mult, ra, rb, rc);
1474 break;
1475 case 0x23:
1476 /* DIVT */
1477 gen_farith3(ctx, &gen_op_divt, ra, rb, rc);
1478 break;
1479 case 0x24:
1480 /* CMPTUN */
1481 gen_farith3(ctx, &gen_op_cmptun, ra, rb, rc);
1482 break;
1483 case 0x25:
1484 /* CMPTEQ */
1485 gen_farith3(ctx, &gen_op_cmpteq, ra, rb, rc);
1486 break;
1487 case 0x26:
1488 /* CMPTLT */
1489 gen_farith3(ctx, &gen_op_cmptlt, ra, rb, rc);
1490 break;
1491 case 0x27:
1492 /* CMPTLE */
1493 gen_farith3(ctx, &gen_op_cmptle, ra, rb, rc);
1494 break;
1495 case 0x2C:
1496 /* XXX: incorrect */
1497 if (fn11 == 0x2AC) {
1498 /* CVTST */
1499 gen_farith2(ctx, &gen_op_cvtst, rb, rc);
1500 } else {
1501 /* CVTTS */
1502 gen_farith2(ctx, &gen_op_cvtts, rb, rc);
1503 }
1504 break;
1505 case 0x2F:
1506 /* CVTTQ */
1507 gen_farith2(ctx, &gen_op_cvttq, rb, rc);
1508 break;
1509 case 0x3C:
1510 /* CVTQS */
1511 gen_farith2(ctx, &gen_op_cvtqs, rb, rc);
1512 break;
1513 case 0x3E:
1514 /* CVTQT */
1515 gen_farith2(ctx, &gen_op_cvtqt, rb, rc);
1516 break;
1517 default:
1518 goto invalid_opc;
1519 }
1520 break;
1521 case 0x17:
1522 switch (fn11) {
1523 case 0x010:
1524 /* CVTLQ */
1525 gen_farith2(ctx, &gen_op_cvtlq, rb, rc);
1526 break;
1527 case 0x020:
1528 /* CPYS */
1529 if (ra == rb) {
1530 if (ra == 31 && rc == 31) {
1531 /* FNOP */
1532 gen_op_nop();
1533 } else {
1534 /* FMOV */
1535 gen_load_fir(ctx, rb, 0);
1536 gen_store_fir(ctx, rc, 0);
1537 }
1538 } else {
1539 gen_farith3(ctx, &gen_op_cpys, ra, rb, rc);
1540 }
1541 break;
1542 case 0x021:
1543 /* CPYSN */
1544 gen_farith2(ctx, &gen_op_cpysn, rb, rc);
1545 break;
1546 case 0x022:
1547 /* CPYSE */
1548 gen_farith2(ctx, &gen_op_cpyse, rb, rc);
1549 break;
1550 case 0x024:
1551 /* MT_FPCR */
1552 gen_load_fir(ctx, ra, 0);
1553 gen_op_store_fpcr();
1554 break;
1555 case 0x025:
1556 /* MF_FPCR */
1557 gen_op_load_fpcr();
1558 gen_store_fir(ctx, ra, 0);
1559 break;
1560 case 0x02A:
1561 /* FCMOVEQ */
1562 gen_fcmov(ctx, &gen_op_cmpfeq, ra, rb, rc);
1563 break;
1564 case 0x02B:
1565 /* FCMOVNE */
1566 gen_fcmov(ctx, &gen_op_cmpfne, ra, rb, rc);
1567 break;
1568 case 0x02C:
1569 /* FCMOVLT */
1570 gen_fcmov(ctx, &gen_op_cmpflt, ra, rb, rc);
1571 break;
1572 case 0x02D:
1573 /* FCMOVGE */
1574 gen_fcmov(ctx, &gen_op_cmpfge, ra, rb, rc);
1575 break;
1576 case 0x02E:
1577 /* FCMOVLE */
1578 gen_fcmov(ctx, &gen_op_cmpfle, ra, rb, rc);
1579 break;
1580 case 0x02F:
1581 /* FCMOVGT */
1582 gen_fcmov(ctx, &gen_op_cmpfgt, ra, rb, rc);
1583 break;
1584 case 0x030:
1585 /* CVTQL */
1586 gen_farith2(ctx, &gen_op_cvtql, rb, rc);
1587 break;
1588 case 0x130:
1589 /* CVTQL/V */
1590 gen_farith2(ctx, &gen_op_cvtqlv, rb, rc);
1591 break;
1592 case 0x530:
1593 /* CVTQL/SV */
1594 gen_farith2(ctx, &gen_op_cvtqlsv, rb, rc);
1595 break;
1596 default:
1597 goto invalid_opc;
1598 }
1599 break;
1600 case 0x18:
1601 switch ((uint16_t)disp16) {
1602 case 0x0000:
1603 /* TRAPB */
1604 /* No-op. Just exit from the current tb */
1605 ret = 2;
1606 break;
1607 case 0x0400:
1608 /* EXCB */
1609 /* No-op. Just exit from the current tb */
1610 ret = 2;
1611 break;
1612 case 0x4000:
1613 /* MB */
1614 /* No-op */
1615 break;
1616 case 0x4400:
1617 /* WMB */
1618 /* No-op */
1619 break;
1620 case 0x8000:
1621 /* FETCH */
1622 /* No-op */
1623 break;
1624 case 0xA000:
1625 /* FETCH_M */
1626 /* No-op */
1627 break;
1628 case 0xC000:
1629 /* RPCC */
1630 if (ra != 31)
1631 tcg_gen_helper_1_0(helper_load_pcc, cpu_ir[ra]);
1632 break;
1633 case 0xE000:
1634 /* RC */
1635 if (ra != 31)
1636 tcg_gen_helper_1_0(helper_rc, cpu_ir[ra]);
1637 break;
1638 case 0xE800:
1639 /* ECB */
1640 /* XXX: TODO: evict tb cache at address rb */
1641 #if 0
1642 ret = 2;
1643 #else
1644 goto invalid_opc;
1645 #endif
1646 break;
1647 case 0xF000:
1648 /* RS */
1649 if (ra != 31)
1650 tcg_gen_helper_1_0(helper_rs, cpu_ir[ra]);
1651 break;
1652 case 0xF800:
1653 /* WH64 */
1654 /* No-op */
1655 break;
1656 default:
1657 goto invalid_opc;
1658 }
1659 break;
1660 case 0x19:
1661 /* HW_MFPR (PALcode) */
1662 #if defined (CONFIG_USER_ONLY)
1663 goto invalid_opc;
1664 #else
1665 if (!ctx->pal_mode)
1666 goto invalid_opc;
1667 gen_op_mfpr(insn & 0xFF);
1668 if (ra != 31)
1669 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1670 break;
1671 #endif
1672 case 0x1A:
1673 if (ra != 31)
1674 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1675 if (rb != 31)
1676 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1677 else
1678 tcg_gen_movi_i64(cpu_pc, 0);
1679 /* Those four jumps only differ by the branch prediction hint */
1680 switch (fn2) {
1681 case 0x0:
1682 /* JMP */
1683 break;
1684 case 0x1:
1685 /* JSR */
1686 break;
1687 case 0x2:
1688 /* RET */
1689 break;
1690 case 0x3:
1691 /* JSR_COROUTINE */
1692 break;
1693 }
1694 ret = 1;
1695 break;
1696 case 0x1B:
1697 /* HW_LD (PALcode) */
1698 #if defined (CONFIG_USER_ONLY)
1699 goto invalid_opc;
1700 #else
1701 if (!ctx->pal_mode)
1702 goto invalid_opc;
1703 if (rb != 31)
1704 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1705 else
1706 tcg_gen_movi_i64(cpu_T[0], 0);
1707 tcg_gen_movi_i64(cpu_T[1], disp12);
1708 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1709 switch ((insn >> 12) & 0xF) {
1710 case 0x0:
1711 /* Longword physical access */
1712 gen_op_ldl_raw();
1713 break;
1714 case 0x1:
1715 /* Quadword physical access */
1716 gen_op_ldq_raw();
1717 break;
1718 case 0x2:
1719 /* Longword physical access with lock */
1720 gen_op_ldl_l_raw();
1721 break;
1722 case 0x3:
1723 /* Quadword physical access with lock */
1724 gen_op_ldq_l_raw();
1725 break;
1726 case 0x4:
1727 /* Longword virtual PTE fetch */
1728 gen_op_ldl_kernel();
1729 break;
1730 case 0x5:
1731 /* Quadword virtual PTE fetch */
1732 gen_op_ldq_kernel();
1733 break;
1734 case 0x6:
1735 /* Invalid */
1736 goto invalid_opc;
1737 case 0x7:
1738 /* Invalid */
1739 goto invalid_opc;
1740 case 0x8:
1741 /* Longword virtual access */
1742 gen_op_ld_phys_to_virt();
1743 gen_op_ldl_raw();
1744 break;
1745 case 0x9:
1746 /* Quadword virtual access */
1747 gen_op_ld_phys_to_virt();
1748 gen_op_ldq_raw();
1749 break;
1750 case 0xA:
1751 /* Longword virtual access with protection check */
1752 gen_ldl(ctx);
1753 break;
1754 case 0xB:
1755 /* Quadword virtual access with protection check */
1756 gen_ldq(ctx);
1757 break;
1758 case 0xC:
1759 /* Longword virtual access with altenate access mode */
1760 gen_op_set_alt_mode();
1761 gen_op_ld_phys_to_virt();
1762 gen_op_ldl_raw();
1763 gen_op_restore_mode();
1764 break;
1765 case 0xD:
1766 /* Quadword virtual access with altenate access mode */
1767 gen_op_set_alt_mode();
1768 gen_op_ld_phys_to_virt();
1769 gen_op_ldq_raw();
1770 gen_op_restore_mode();
1771 break;
1772 case 0xE:
1773 /* Longword virtual access with alternate access mode and
1774 * protection checks
1775 */
1776 gen_op_set_alt_mode();
1777 gen_op_ldl_data();
1778 gen_op_restore_mode();
1779 break;
1780 case 0xF:
1781 /* Quadword virtual access with alternate access mode and
1782 * protection checks
1783 */
1784 gen_op_set_alt_mode();
1785 gen_op_ldq_data();
1786 gen_op_restore_mode();
1787 break;
1788 }
1789 if (ra != 31)
1790 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
1791 break;
1792 #endif
1793 case 0x1C:
1794 switch (fn7) {
1795 case 0x00:
1796 /* SEXTB */
1797 if (!(ctx->amask & AMASK_BWX))
1798 goto invalid_opc;
1799 if (likely(rc != 31)) {
1800 if (islit)
1801 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1802 else
1803 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1804 }
1805 break;
1806 case 0x01:
1807 /* SEXTW */
1808 if (!(ctx->amask & AMASK_BWX))
1809 goto invalid_opc;
1810 if (likely(rc != 31)) {
1811 if (islit)
1812 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1813 else
1814 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1815 }
1816 break;
1817 case 0x30:
1818 /* CTPOP */
1819 if (!(ctx->amask & AMASK_CIX))
1820 goto invalid_opc;
1821 if (likely(rc != 31)) {
1822 if (islit)
1823 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1824 else
1825 tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
1826 }
1827 break;
1828 case 0x31:
1829 /* PERR */
1830 if (!(ctx->amask & AMASK_MVI))
1831 goto invalid_opc;
1832 /* XXX: TODO */
1833 goto invalid_opc;
1834 break;
1835 case 0x32:
1836 /* CTLZ */
1837 if (!(ctx->amask & AMASK_CIX))
1838 goto invalid_opc;
1839 if (likely(rc != 31)) {
1840 if (islit)
1841 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1842 else
1843 tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
1844 }
1845 break;
1846 case 0x33:
1847 /* CTTZ */
1848 if (!(ctx->amask & AMASK_CIX))
1849 goto invalid_opc;
1850 if (likely(rc != 31)) {
1851 if (islit)
1852 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1853 else
1854 tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
1855 }
1856 break;
1857 case 0x34:
1858 /* UNPKBW */
1859 if (!(ctx->amask & AMASK_MVI))
1860 goto invalid_opc;
1861 /* XXX: TODO */
1862 goto invalid_opc;
1863 break;
1864 case 0x35:
1865 /* UNPKWL */
1866 if (!(ctx->amask & AMASK_MVI))
1867 goto invalid_opc;
1868 /* XXX: TODO */
1869 goto invalid_opc;
1870 break;
1871 case 0x36:
1872 /* PKWB */
1873 if (!(ctx->amask & AMASK_MVI))
1874 goto invalid_opc;
1875 /* XXX: TODO */
1876 goto invalid_opc;
1877 break;
1878 case 0x37:
1879 /* PKLB */
1880 if (!(ctx->amask & AMASK_MVI))
1881 goto invalid_opc;
1882 /* XXX: TODO */
1883 goto invalid_opc;
1884 break;
1885 case 0x38:
1886 /* MINSB8 */
1887 if (!(ctx->amask & AMASK_MVI))
1888 goto invalid_opc;
1889 /* XXX: TODO */
1890 goto invalid_opc;
1891 break;
1892 case 0x39:
1893 /* MINSW4 */
1894 if (!(ctx->amask & AMASK_MVI))
1895 goto invalid_opc;
1896 /* XXX: TODO */
1897 goto invalid_opc;
1898 break;
1899 case 0x3A:
1900 /* MINUB8 */
1901 if (!(ctx->amask & AMASK_MVI))
1902 goto invalid_opc;
1903 /* XXX: TODO */
1904 goto invalid_opc;
1905 break;
1906 case 0x3B:
1907 /* MINUW4 */
1908 if (!(ctx->amask & AMASK_MVI))
1909 goto invalid_opc;
1910 /* XXX: TODO */
1911 goto invalid_opc;
1912 break;
1913 case 0x3C:
1914 /* MAXUB8 */
1915 if (!(ctx->amask & AMASK_MVI))
1916 goto invalid_opc;
1917 /* XXX: TODO */
1918 goto invalid_opc;
1919 break;
1920 case 0x3D:
1921 /* MAXUW4 */
1922 if (!(ctx->amask & AMASK_MVI))
1923 goto invalid_opc;
1924 /* XXX: TODO */
1925 goto invalid_opc;
1926 break;
1927 case 0x3E:
1928 /* MAXSB8 */
1929 if (!(ctx->amask & AMASK_MVI))
1930 goto invalid_opc;
1931 /* XXX: TODO */
1932 goto invalid_opc;
1933 break;
1934 case 0x3F:
1935 /* MAXSW4 */
1936 if (!(ctx->amask & AMASK_MVI))
1937 goto invalid_opc;
1938 /* XXX: TODO */
1939 goto invalid_opc;
1940 break;
1941 case 0x70:
1942 /* FTOIT */
1943 if (!(ctx->amask & AMASK_FIX))
1944 goto invalid_opc;
1945 gen_fti(ctx, &gen_op_ftoit, ra, rb);
1946 break;
1947 case 0x78:
1948 /* FTOIS */
1949 if (!(ctx->amask & AMASK_FIX))
1950 goto invalid_opc;
1951 gen_fti(ctx, &gen_op_ftois, ra, rb);
1952 break;
1953 default:
1954 goto invalid_opc;
1955 }
1956 break;
1957 case 0x1D:
1958 /* HW_MTPR (PALcode) */
1959 #if defined (CONFIG_USER_ONLY)
1960 goto invalid_opc;
1961 #else
1962 if (!ctx->pal_mode)
1963 goto invalid_opc;
1964 if (ra != 31)
1965 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
1966 else
1967 tcg_gen_movi_i64(cpu_T[0], 0);
1968 gen_op_mtpr(insn & 0xFF);
1969 ret = 2;
1970 break;
1971 #endif
1972 case 0x1E:
1973 /* HW_REI (PALcode) */
1974 #if defined (CONFIG_USER_ONLY)
1975 goto invalid_opc;
1976 #else
1977 if (!ctx->pal_mode)
1978 goto invalid_opc;
1979 if (rb == 31) {
1980 /* "Old" alpha */
1981 gen_op_hw_rei();
1982 } else {
1983 if (ra != 31)
1984 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1985 else
1986 tcg_gen_movi_i64(cpu_T[0], 0);
1987 tcg_gen_movi_i64(cpu_T[1], (((int64_t)insn << 51) >> 51));
1988 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1989 gen_op_hw_ret();
1990 }
1991 ret = 2;
1992 break;
1993 #endif
1994 case 0x1F:
1995 /* HW_ST (PALcode) */
1996 #if defined (CONFIG_USER_ONLY)
1997 goto invalid_opc;
1998 #else
1999 if (!ctx->pal_mode)
2000 goto invalid_opc;
2001 if (ra != 31)
2002 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp12);
2003 else
2004 tcg_gen_movi_i64(cpu_T[0], disp12);
2005 if (ra != 31)
2006 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
2007 else
2008 tcg_gen_movi_i64(cpu_T[1], 0);
2009 switch ((insn >> 12) & 0xF) {
2010 case 0x0:
2011 /* Longword physical access */
2012 gen_op_stl_raw();
2013 break;
2014 case 0x1:
2015 /* Quadword physical access */
2016 gen_op_stq_raw();
2017 break;
2018 case 0x2:
2019 /* Longword physical access with lock */
2020 gen_op_stl_c_raw();
2021 break;
2022 case 0x3:
2023 /* Quadword physical access with lock */
2024 gen_op_stq_c_raw();
2025 break;
2026 case 0x4:
2027 /* Longword virtual access */
2028 gen_op_st_phys_to_virt();
2029 gen_op_stl_raw();
2030 break;
2031 case 0x5:
2032 /* Quadword virtual access */
2033 gen_op_st_phys_to_virt();
2034 gen_op_stq_raw();
2035 break;
2036 case 0x6:
2037 /* Invalid */
2038 goto invalid_opc;
2039 case 0x7:
2040 /* Invalid */
2041 goto invalid_opc;
2042 case 0x8:
2043 /* Invalid */
2044 goto invalid_opc;
2045 case 0x9:
2046 /* Invalid */
2047 goto invalid_opc;
2048 case 0xA:
2049 /* Invalid */
2050 goto invalid_opc;
2051 case 0xB:
2052 /* Invalid */
2053 goto invalid_opc;
2054 case 0xC:
2055 /* Longword virtual access with alternate access mode */
2056 gen_op_set_alt_mode();
2057 gen_op_st_phys_to_virt();
2058 gen_op_ldl_raw();
2059 gen_op_restore_mode();
2060 break;
2061 case 0xD:
2062 /* Quadword virtual access with alternate access mode */
2063 gen_op_set_alt_mode();
2064 gen_op_st_phys_to_virt();
2065 gen_op_ldq_raw();
2066 gen_op_restore_mode();
2067 break;
2068 case 0xE:
2069 /* Invalid */
2070 goto invalid_opc;
2071 case 0xF:
2072 /* Invalid */
2073 goto invalid_opc;
2074 }
2075 ret = 2;
2076 break;
2077 #endif
2078 case 0x20:
2079 /* LDF */
2080 #if 0 // TODO
2081 gen_load_fmem(ctx, &gen_ldf, ra, rb, disp16);
2082 #else
2083 goto invalid_opc;
2084 #endif
2085 break;
2086 case 0x21:
2087 /* LDG */
2088 #if 0 // TODO
2089 gen_load_fmem(ctx, &gen_ldg, ra, rb, disp16);
2090 #else
2091 goto invalid_opc;
2092 #endif
2093 break;
2094 case 0x22:
2095 /* LDS */
2096 gen_load_fmem(ctx, &gen_lds, ra, rb, disp16);
2097 break;
2098 case 0x23:
2099 /* LDT */
2100 gen_load_fmem(ctx, &gen_ldt, ra, rb, disp16);
2101 break;
2102 case 0x24:
2103 /* STF */
2104 #if 0 // TODO
2105 gen_store_fmem(ctx, &gen_stf, ra, rb, disp16);
2106 #else
2107 goto invalid_opc;
2108 #endif
2109 break;
2110 case 0x25:
2111 /* STG */
2112 #if 0 // TODO
2113 gen_store_fmem(ctx, &gen_stg, ra, rb, disp16);
2114 #else
2115 goto invalid_opc;
2116 #endif
2117 break;
2118 case 0x26:
2119 /* STS */
2120 gen_store_fmem(ctx, &gen_sts, ra, rb, disp16);
2121 break;
2122 case 0x27:
2123 /* STT */
2124 gen_store_fmem(ctx, &gen_stt, ra, rb, disp16);
2125 break;
2126 case 0x28:
2127 /* LDL */
2128 gen_load_mem(ctx, &gen_ldl, ra, rb, disp16, 0);
2129 break;
2130 case 0x29:
2131 /* LDQ */
2132 gen_load_mem(ctx, &gen_ldq, ra, rb, disp16, 0);
2133 break;
2134 case 0x2A:
2135 /* LDL_L */
2136 gen_load_mem(ctx, &gen_ldl_l, ra, rb, disp16, 0);
2137 break;
2138 case 0x2B:
2139 /* LDQ_L */
2140 gen_load_mem(ctx, &gen_ldq_l, ra, rb, disp16, 0);
2141 break;
2142 case 0x2C:
2143 /* STL */
2144 gen_store_mem(ctx, &gen_stl, ra, rb, disp16, 0);
2145 break;
2146 case 0x2D:
2147 /* STQ */
2148 gen_store_mem(ctx, &gen_stq, ra, rb, disp16, 0);
2149 break;
2150 case 0x2E:
2151 /* STL_C */
2152 gen_store_mem(ctx, &gen_stl_c, ra, rb, disp16, 0);
2153 break;
2154 case 0x2F:
2155 /* STQ_C */
2156 gen_store_mem(ctx, &gen_stq_c, ra, rb, disp16, 0);
2157 break;
2158 case 0x30:
2159 /* BR */
2160 if (ra != 31)
2161 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2162 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2163 ret = 1;
2164 break;
2165 case 0x31:
2166 /* FBEQ */
2167 gen_fbcond(ctx, &gen_op_cmpfeq, ra, disp16);
2168 ret = 1;
2169 break;
2170 case 0x32:
2171 /* FBLT */
2172 gen_fbcond(ctx, &gen_op_cmpflt, ra, disp16);
2173 ret = 1;
2174 break;
2175 case 0x33:
2176 /* FBLE */
2177 gen_fbcond(ctx, &gen_op_cmpfle, ra, disp16);
2178 ret = 1;
2179 break;
2180 case 0x34:
2181 /* BSR */
2182 if (ra != 31)
2183 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2184 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2185 ret = 1;
2186 break;
2187 case 0x35:
2188 /* FBNE */
2189 gen_fbcond(ctx, &gen_op_cmpfne, ra, disp16);
2190 ret = 1;
2191 break;
2192 case 0x36:
2193 /* FBGE */
2194 gen_fbcond(ctx, &gen_op_cmpfge, ra, disp16);
2195 ret = 1;
2196 break;
2197 case 0x37:
2198 /* FBGT */
2199 gen_fbcond(ctx, &gen_op_cmpfgt, ra, disp16);
2200 ret = 1;
2201 break;
2202 case 0x38:
2203 /* BLBC */
2204 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 1);
2205 ret = 1;
2206 break;
2207 case 0x39:
2208 /* BEQ */
2209 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 0);
2210 ret = 1;
2211 break;
2212 case 0x3A:
2213 /* BLT */
2214 gen_bcond(ctx, TCG_COND_LT, ra, disp16, 0);
2215 ret = 1;
2216 break;
2217 case 0x3B:
2218 /* BLE */
2219 gen_bcond(ctx, TCG_COND_LE, ra, disp16, 0);
2220 ret = 1;
2221 break;
2222 case 0x3C:
2223 /* BLBS */
2224 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 1);
2225 ret = 1;
2226 break;
2227 case 0x3D:
2228 /* BNE */
2229 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 0);
2230 ret = 1;
2231 break;
2232 case 0x3E:
2233 /* BGE */
2234 gen_bcond(ctx, TCG_COND_GE, ra, disp16, 0);
2235 ret = 1;
2236 break;
2237 case 0x3F:
2238 /* BGT */
2239 gen_bcond(ctx, TCG_COND_GT, ra, disp16, 0);
2240 ret = 1;
2241 break;
2242 invalid_opc:
2243 gen_invalid(ctx);
2244 ret = 3;
2245 break;
2246 }
2247
2248 return ret;
2249 }
2250
2251 static always_inline void gen_intermediate_code_internal (CPUState *env,
2252 TranslationBlock *tb,
2253 int search_pc)
2254 {
2255 #if defined ALPHA_DEBUG_DISAS
2256 static int insn_count;
2257 #endif
2258 DisasContext ctx, *ctxp = &ctx;
2259 target_ulong pc_start;
2260 uint32_t insn;
2261 uint16_t *gen_opc_end;
2262 int j, lj = -1;
2263 int ret;
2264 int num_insns;
2265 int max_insns;
2266
2267 pc_start = tb->pc;
2268 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2269 ctx.pc = pc_start;
2270 ctx.amask = env->amask;
2271 #if defined (CONFIG_USER_ONLY)
2272 ctx.mem_idx = 0;
2273 #else
2274 ctx.mem_idx = ((env->ps >> 3) & 3);
2275 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2276 #endif
2277 num_insns = 0;
2278 max_insns = tb->cflags & CF_COUNT_MASK;
2279 if (max_insns == 0)
2280 max_insns = CF_COUNT_MASK;
2281
2282 gen_icount_start();
2283 for (ret = 0; ret == 0;) {
2284 if (env->nb_breakpoints > 0) {
2285 for(j = 0; j < env->nb_breakpoints; j++) {
2286 if (env->breakpoints[j] == ctx.pc) {
2287 gen_excp(&ctx, EXCP_DEBUG, 0);
2288 break;
2289 }
2290 }
2291 }
2292 if (search_pc) {
2293 j = gen_opc_ptr - gen_opc_buf;
2294 if (lj < j) {
2295 lj++;
2296 while (lj < j)
2297 gen_opc_instr_start[lj++] = 0;
2298 gen_opc_pc[lj] = ctx.pc;
2299 gen_opc_instr_start[lj] = 1;
2300 gen_opc_icount[lj] = num_insns;
2301 }
2302 }
2303 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2304 gen_io_start();
2305 #if defined ALPHA_DEBUG_DISAS
2306 insn_count++;
2307 if (logfile != NULL) {
2308 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
2309 ctx.pc, ctx.mem_idx);
2310 }
2311 #endif
2312 insn = ldl_code(ctx.pc);
2313 #if defined ALPHA_DEBUG_DISAS
2314 insn_count++;
2315 if (logfile != NULL) {
2316 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
2317 }
2318 #endif
2319 num_insns++;
2320 ctx.pc += 4;
2321 ret = translate_one(ctxp, insn);
2322 if (ret != 0)
2323 break;
2324 /* if we reach a page boundary or are single stepping, stop
2325 * generation
2326 */
2327 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2328 (env->singlestep_enabled) ||
2329 num_insns >= max_insns) {
2330 break;
2331 }
2332 #if defined (DO_SINGLE_STEP)
2333 break;
2334 #endif
2335 }
2336 if (ret != 1 && ret != 3) {
2337 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2338 }
2339 #if defined (DO_TB_FLUSH)
2340 tcg_gen_helper_0_0(helper_tb_flush);
2341 #endif
2342 if (tb->cflags & CF_LAST_IO)
2343 gen_io_end();
2344 /* Generate the return instruction */
2345 tcg_gen_exit_tb(0);
2346 gen_icount_end(tb, num_insns);
2347 *gen_opc_ptr = INDEX_op_end;
2348 if (search_pc) {
2349 j = gen_opc_ptr - gen_opc_buf;
2350 lj++;
2351 while (lj <= j)
2352 gen_opc_instr_start[lj++] = 0;
2353 } else {
2354 tb->size = ctx.pc - pc_start;
2355 tb->icount = num_insns;
2356 }
2357 #if defined ALPHA_DEBUG_DISAS
2358 if (loglevel & CPU_LOG_TB_CPU) {
2359 cpu_dump_state(env, logfile, fprintf, 0);
2360 }
2361 if (loglevel & CPU_LOG_TB_IN_ASM) {
2362 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2363 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2364 fprintf(logfile, "\n");
2365 }
2366 #endif
2367 }
2368
2369 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2370 {
2371 gen_intermediate_code_internal(env, tb, 0);
2372 }
2373
2374 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2375 {
2376 gen_intermediate_code_internal(env, tb, 1);
2377 }
2378
2379 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2380 {
2381 CPUAlphaState *env;
2382 uint64_t hwpcb;
2383
2384 env = qemu_mallocz(sizeof(CPUAlphaState));
2385 if (!env)
2386 return NULL;
2387 cpu_exec_init(env);
2388 alpha_translate_init();
2389 tlb_flush(env, 1);
2390 /* XXX: should not be hardcoded */
2391 env->implver = IMPLVER_2106x;
2392 env->ps = 0x1F00;
2393 #if defined (CONFIG_USER_ONLY)
2394 env->ps |= 1 << 3;
2395 #endif
2396 pal_init(env);
2397 /* Initialize IPR */
2398 hwpcb = env->ipr[IPR_PCBB];
2399 env->ipr[IPR_ASN] = 0;
2400 env->ipr[IPR_ASTEN] = 0;
2401 env->ipr[IPR_ASTSR] = 0;
2402 env->ipr[IPR_DATFX] = 0;
2403 /* XXX: fix this */
2404 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2405 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2406 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2407 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2408 env->ipr[IPR_FEN] = 0;
2409 env->ipr[IPR_IPL] = 31;
2410 env->ipr[IPR_MCES] = 0;
2411 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2412 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2413 env->ipr[IPR_SISR] = 0;
2414 env->ipr[IPR_VIRBND] = -1ULL;
2415
2416 return env;
2417 }
2418
2419 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2420 unsigned long searched_pc, int pc_pos, void *puc)
2421 {
2422 env->pc = gen_opc_pc[pc_pos];
2423 }