]> git.proxmox.com Git - qemu.git/blob - target-alpha/translate.c
target-alpha: factorize load/store code
[qemu.git] / target-alpha / translate.c
1 /*
2 * Alpha emulation cpu translation for qemu.
3 *
4 * Copyright (c) 2007 Jocelyn Mayer
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20
21 #include <stdint.h>
22 #include <stdlib.h>
23 #include <stdio.h>
24
25 #include "cpu.h"
26 #include "exec-all.h"
27 #include "disas.h"
28 #include "host-utils.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31 #include "qemu-common.h"
32
33 #define DO_SINGLE_STEP
34 #define GENERATE_NOP
35 #define ALPHA_DEBUG_DISAS
36 #define DO_TB_FLUSH
37
38 typedef struct DisasContext DisasContext;
39 struct DisasContext {
40 uint64_t pc;
41 int mem_idx;
42 #if !defined (CONFIG_USER_ONLY)
43 int pal_mode;
44 #endif
45 uint32_t amask;
46 };
47
48 /* global register indexes */
49 static TCGv cpu_env;
50 static TCGv cpu_ir[31];
51 static TCGv cpu_pc;
52
53 /* dyngen register indexes */
54 static TCGv cpu_T[2];
55
56 /* register names */
57 static char cpu_reg_names[10*4+21*5];
58
59 #include "gen-icount.h"
60
61 static void alpha_translate_init(void)
62 {
63 int i;
64 char *p;
65 static int done_init = 0;
66
67 if (done_init)
68 return;
69
70 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
71
72 #if TARGET_LONG_BITS > HOST_LONG_BITS
73 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
74 offsetof(CPUState, t0), "T0");
75 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
76 offsetof(CPUState, t1), "T1");
77 #else
78 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG1, "T0");
79 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_I64, TCG_AREG2, "T1");
80 #endif
81
82 p = cpu_reg_names;
83 for (i = 0; i < 31; i++) {
84 sprintf(p, "ir%d", i);
85 cpu_ir[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
86 offsetof(CPUState, ir[i]), p);
87 p += (i < 10) ? 4 : 5;
88 }
89
90 cpu_pc = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
91 offsetof(CPUState, pc), "pc");
92
93 /* register helpers */
94 #undef DEF_HELPER
95 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
96 #include "helper.h"
97
98 done_init = 1;
99 }
100
101 static always_inline void gen_op_nop (void)
102 {
103 #if defined(GENERATE_NOP)
104 gen_op_no_op();
105 #endif
106 }
107
108 #define GEN32(func, NAME) \
109 static GenOpFunc *NAME ## _table [32] = { \
110 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
111 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
112 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
113 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
114 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
115 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
116 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
117 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
118 }; \
119 static always_inline void func (int n) \
120 { \
121 NAME ## _table[n](); \
122 }
123
124 /* FIR moves */
125 /* Special hacks for fir31 */
126 #define gen_op_load_FT0_fir31 gen_op_reset_FT0
127 #define gen_op_load_FT1_fir31 gen_op_reset_FT1
128 #define gen_op_load_FT2_fir31 gen_op_reset_FT2
129 #define gen_op_store_FT0_fir31 gen_op_nop
130 #define gen_op_store_FT1_fir31 gen_op_nop
131 #define gen_op_store_FT2_fir31 gen_op_nop
132 #define gen_op_cmov_fir31 gen_op_nop
133 GEN32(gen_op_load_FT0_fir, gen_op_load_FT0_fir);
134 GEN32(gen_op_load_FT1_fir, gen_op_load_FT1_fir);
135 GEN32(gen_op_load_FT2_fir, gen_op_load_FT2_fir);
136 GEN32(gen_op_store_FT0_fir, gen_op_store_FT0_fir);
137 GEN32(gen_op_store_FT1_fir, gen_op_store_FT1_fir);
138 GEN32(gen_op_store_FT2_fir, gen_op_store_FT2_fir);
139 GEN32(gen_op_cmov_fir, gen_op_cmov_fir);
140
141 static always_inline void gen_load_fir (DisasContext *ctx, int firn, int Tn)
142 {
143 switch (Tn) {
144 case 0:
145 gen_op_load_FT0_fir(firn);
146 break;
147 case 1:
148 gen_op_load_FT1_fir(firn);
149 break;
150 case 2:
151 gen_op_load_FT2_fir(firn);
152 break;
153 }
154 }
155
156 static always_inline void gen_store_fir (DisasContext *ctx, int firn, int Tn)
157 {
158 switch (Tn) {
159 case 0:
160 gen_op_store_FT0_fir(firn);
161 break;
162 case 1:
163 gen_op_store_FT1_fir(firn);
164 break;
165 case 2:
166 gen_op_store_FT2_fir(firn);
167 break;
168 }
169 }
170
171 /* Memory moves */
172 #if defined(CONFIG_USER_ONLY)
173 #define OP_LD_TABLE(width) \
174 static GenOpFunc *gen_op_ld##width[] = { \
175 &gen_op_ld##width##_raw, \
176 }
177 #define OP_ST_TABLE(width) \
178 static GenOpFunc *gen_op_st##width[] = { \
179 &gen_op_st##width##_raw, \
180 }
181 #else
182 #define OP_LD_TABLE(width) \
183 static GenOpFunc *gen_op_ld##width[] = { \
184 &gen_op_ld##width##_kernel, \
185 &gen_op_ld##width##_executive, \
186 &gen_op_ld##width##_supervisor, \
187 &gen_op_ld##width##_user, \
188 }
189 #define OP_ST_TABLE(width) \
190 static GenOpFunc *gen_op_st##width[] = { \
191 &gen_op_st##width##_kernel, \
192 &gen_op_st##width##_executive, \
193 &gen_op_st##width##_supervisor, \
194 &gen_op_st##width##_user, \
195 }
196 #endif
197
198 #define GEN_LD(width) \
199 OP_LD_TABLE(width); \
200 static always_inline void gen_ld##width (DisasContext *ctx) \
201 { \
202 (*gen_op_ld##width[ctx->mem_idx])(); \
203 }
204
205 #define GEN_ST(width) \
206 OP_ST_TABLE(width); \
207 static always_inline void gen_st##width (DisasContext *ctx) \
208 { \
209 (*gen_op_st##width[ctx->mem_idx])(); \
210 }
211
212 GEN_LD(l);
213 GEN_ST(l);
214 GEN_LD(q);
215 GEN_ST(q);
216 GEN_LD(l_l);
217 GEN_ST(l_c);
218 GEN_LD(q_l);
219 GEN_ST(q_c);
220
221 #if 0 /* currently unused */
222 GEN_LD(f);
223 GEN_ST(f);
224 GEN_LD(g);
225 GEN_ST(g);
226 #endif /* 0 */
227 GEN_LD(s);
228 GEN_ST(s);
229 GEN_LD(t);
230 GEN_ST(t);
231
232 static always_inline void _gen_op_bcond (DisasContext *ctx)
233 {
234 #if 0 // Qemu does not know how to do this...
235 gen_op_bcond(ctx->pc);
236 #else
237 gen_op_bcond(ctx->pc >> 32, ctx->pc);
238 #endif
239 }
240
241 static always_inline void gen_excp (DisasContext *ctx,
242 int exception, int error_code)
243 {
244 TCGv tmp1, tmp2;
245
246 tcg_gen_movi_i64(cpu_pc, ctx->pc);
247 tmp1 = tcg_const_i32(exception);
248 tmp2 = tcg_const_i32(error_code);
249 tcg_gen_helper_0_2(helper_excp, tmp1, tmp2);
250 tcg_temp_free(tmp2);
251 tcg_temp_free(tmp1);
252 }
253
254 static always_inline void gen_invalid (DisasContext *ctx)
255 {
256 gen_excp(ctx, EXCP_OPCDEC, 0);
257 }
258
259 static always_inline void gen_load_mem_dyngen (DisasContext *ctx,
260 void (*gen_load_op)(DisasContext *ctx),
261 int ra, int rb, int32_t disp16,
262 int clear)
263 {
264 if (ra == 31 && disp16 == 0) {
265 /* UNOP */
266 gen_op_nop();
267 } else {
268 if (rb != 31)
269 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
270 else
271 tcg_gen_movi_i64(cpu_T[0], disp16);
272 if (clear)
273 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
274 (*gen_load_op)(ctx);
275 if (ra != 31)
276 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
277 }
278 }
279
280 static always_inline void gen_load_mem (DisasContext *ctx,
281 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1, int flags),
282 int ra, int rb, int32_t disp16,
283 int clear)
284 {
285 TCGv addr;
286
287 if (unlikely(ra == 31))
288 return;
289
290 addr = tcg_temp_new(TCG_TYPE_I64);
291 if (rb != 31) {
292 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
293 if (clear)
294 tcg_gen_andi_i64(addr, addr, ~0x7);
295 } else {
296 if (clear)
297 disp16 &= ~0x7;
298 tcg_gen_movi_i64(addr, disp16);
299 }
300 tcg_gen_qemu_load(cpu_ir[ra], addr, ctx->mem_idx);
301 tcg_temp_free(addr);
302 }
303
304 static always_inline void gen_store_mem_dyngen (DisasContext *ctx,
305 void (*gen_store_op)(DisasContext *ctx),
306 int ra, int rb, int32_t disp16,
307 int clear)
308 {
309 if (rb != 31)
310 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
311 else
312 tcg_gen_movi_i64(cpu_T[0], disp16);
313 if (clear)
314 tcg_gen_andi_i64(cpu_T[0], cpu_T[0], ~0x7);
315 if (ra != 31)
316 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
317 else
318 tcg_gen_movi_i64(cpu_T[1], 0);
319 (*gen_store_op)(ctx);
320 }
321
322 static always_inline void gen_store_mem (DisasContext *ctx,
323 void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1, int flags),
324 int ra, int rb, int32_t disp16,
325 int clear)
326 {
327 TCGv addr = tcg_temp_new(TCG_TYPE_I64);
328 if (rb != 31) {
329 tcg_gen_addi_i64(addr, cpu_ir[rb], disp16);
330 if (clear)
331 tcg_gen_andi_i64(addr, addr, ~0x7);
332 } else {
333 if (clear)
334 disp16 &= ~0x7;
335 tcg_gen_movi_i64(addr, disp16);
336 }
337 if (ra != 31)
338 tcg_gen_qemu_store(cpu_ir[ra], addr, ctx->mem_idx);
339 else {
340 TCGv zero = tcg_const_i64(0);
341 tcg_gen_qemu_store(zero, addr, ctx->mem_idx);
342 tcg_temp_free(zero);
343 }
344 tcg_temp_free(addr);
345 }
346
347 static always_inline void gen_load_fmem (DisasContext *ctx,
348 void (*gen_load_fop)(DisasContext *ctx),
349 int ra, int rb, int32_t disp16)
350 {
351 if (rb != 31)
352 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
353 else
354 tcg_gen_movi_i64(cpu_T[0], disp16);
355 (*gen_load_fop)(ctx);
356 gen_store_fir(ctx, ra, 1);
357 }
358
359 static always_inline void gen_store_fmem (DisasContext *ctx,
360 void (*gen_store_fop)(DisasContext *ctx),
361 int ra, int rb, int32_t disp16)
362 {
363 if (rb != 31)
364 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp16);
365 else
366 tcg_gen_movi_i64(cpu_T[0], disp16);
367 gen_load_fir(ctx, ra, 1);
368 (*gen_store_fop)(ctx);
369 }
370
371 static always_inline void gen_bcond (DisasContext *ctx,
372 TCGCond cond,
373 int ra, int32_t disp16, int mask)
374 {
375 int l1, l2;
376
377 l1 = gen_new_label();
378 l2 = gen_new_label();
379 if (likely(ra != 31)) {
380 if (mask) {
381 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
382 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
383 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
384 tcg_temp_free(tmp);
385 } else
386 tcg_gen_brcondi_i64(cond, cpu_ir[ra], 0, l1);
387 } else {
388 /* Very uncommon case - Do not bother to optimize. */
389 TCGv tmp = tcg_const_i64(0);
390 tcg_gen_brcondi_i64(cond, tmp, 0, l1);
391 tcg_temp_free(tmp);
392 }
393 tcg_gen_movi_i64(cpu_pc, ctx->pc);
394 tcg_gen_br(l2);
395 gen_set_label(l1);
396 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp16 << 2));
397 gen_set_label(l2);
398 }
399
400 static always_inline void gen_fbcond (DisasContext *ctx,
401 void (*gen_test_op)(void),
402 int ra, int32_t disp16)
403 {
404 tcg_gen_movi_i64(cpu_T[1], ctx->pc + (int64_t)(disp16 << 2));
405 gen_load_fir(ctx, ra, 0);
406 (*gen_test_op)();
407 _gen_op_bcond(ctx);
408 }
409
410 static always_inline void gen_cmov (DisasContext *ctx,
411 TCGCond inv_cond,
412 int ra, int rb, int rc,
413 int islit, uint8_t lit, int mask)
414 {
415 int l1;
416
417 if (unlikely(rc == 31))
418 return;
419
420 l1 = gen_new_label();
421
422 if (ra != 31) {
423 if (mask) {
424 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
425 tcg_gen_andi_i64(tmp, cpu_ir[ra], 1);
426 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
427 tcg_temp_free(tmp);
428 } else
429 tcg_gen_brcondi_i64(inv_cond, cpu_ir[ra], 0, l1);
430 } else {
431 /* Very uncommon case - Do not bother to optimize. */
432 TCGv tmp = tcg_const_i64(0);
433 tcg_gen_brcondi_i64(inv_cond, tmp, 0, l1);
434 tcg_temp_free(tmp);
435 }
436
437 if (islit)
438 tcg_gen_movi_i64(cpu_ir[rc], lit);
439 else
440 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
441 gen_set_label(l1);
442 }
443
444 static always_inline void gen_farith2 (DisasContext *ctx,
445 void (*gen_arith_fop)(void),
446 int rb, int rc)
447 {
448 gen_load_fir(ctx, rb, 0);
449 (*gen_arith_fop)();
450 gen_store_fir(ctx, rc, 0);
451 }
452
453 static always_inline void gen_farith3 (DisasContext *ctx,
454 void (*gen_arith_fop)(void),
455 int ra, int rb, int rc)
456 {
457 gen_load_fir(ctx, ra, 0);
458 gen_load_fir(ctx, rb, 1);
459 (*gen_arith_fop)();
460 gen_store_fir(ctx, rc, 0);
461 }
462
463 static always_inline void gen_fcmov (DisasContext *ctx,
464 void (*gen_test_fop)(void),
465 int ra, int rb, int rc)
466 {
467 gen_load_fir(ctx, ra, 0);
468 gen_load_fir(ctx, rb, 1);
469 (*gen_test_fop)();
470 gen_op_cmov_fir(rc);
471 }
472
473 static always_inline void gen_fti (DisasContext *ctx,
474 void (*gen_move_fop)(void),
475 int ra, int rc)
476 {
477 gen_load_fir(ctx, rc, 0);
478 (*gen_move_fop)();
479 if (ra != 31)
480 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
481 }
482
483 static always_inline void gen_itf (DisasContext *ctx,
484 void (*gen_move_fop)(void),
485 int ra, int rc)
486 {
487 if (ra != 31)
488 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
489 else
490 tcg_gen_movi_i64(cpu_T[0], 0);
491 (*gen_move_fop)();
492 gen_store_fir(ctx, rc, 0);
493 }
494
495 /* EXTWH, EXTWH, EXTLH, EXTQH */
496 static always_inline void gen_ext_h(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
497 int ra, int rb, int rc,
498 int islit, uint8_t lit)
499 {
500 if (unlikely(rc == 31))
501 return;
502
503 if (ra != 31) {
504 if (islit) {
505 if (lit != 0)
506 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], 64 - ((lit & 7) * 8));
507 else
508 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[ra]);
509 } else {
510 TCGv tmp1, tmp2;
511 tmp1 = tcg_temp_new(TCG_TYPE_I64);
512 tcg_gen_andi_i64(tmp1, cpu_ir[rb], 7);
513 tcg_gen_shli_i64(tmp1, tmp1, 3);
514 tmp2 = tcg_const_i64(64);
515 tcg_gen_sub_i64(tmp1, tmp2, tmp1);
516 tcg_temp_free(tmp2);
517 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], tmp1);
518 tcg_temp_free(tmp1);
519 }
520 if (tcg_gen_ext_i64)
521 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
522 } else
523 tcg_gen_movi_i64(cpu_ir[rc], 0);
524 }
525
526 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
527 static always_inline void gen_ext_l(void (*tcg_gen_ext_i64)(TCGv t0, TCGv t1),
528 int ra, int rb, int rc,
529 int islit, uint8_t lit)
530 {
531 if (unlikely(rc == 31))
532 return;
533
534 if (ra != 31) {
535 if (islit) {
536 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], (lit & 7) * 8);
537 } else {
538 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
539 tcg_gen_andi_i64(tmp, cpu_ir[rb], 7);
540 tcg_gen_shli_i64(tmp, tmp, 3);
541 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], tmp);
542 tcg_temp_free(tmp);
543 }
544 if (tcg_gen_ext_i64)
545 tcg_gen_ext_i64(cpu_ir[rc], cpu_ir[rc]);
546 } else
547 tcg_gen_movi_i64(cpu_ir[rc], 0);
548 }
549
550 /* Code to call arith3 helpers */
551 static always_inline void gen_arith3_helper(void *helper,
552 int ra, int rb, int rc,
553 int islit, uint8_t lit)
554 {
555 if (unlikely(rc == 31))
556 return;
557
558 if (ra != 31) {
559 if (islit) {
560 TCGv tmp = tcg_const_i64(lit);
561 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], tmp);
562 tcg_temp_free(tmp);
563 } else
564 tcg_gen_helper_1_2(helper, cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
565 } else {
566 TCGv tmp1 = tcg_const_i64(0);
567 if (islit) {
568 TCGv tmp2 = tcg_const_i64(lit);
569 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, tmp2);
570 tcg_temp_free(tmp2);
571 } else
572 tcg_gen_helper_1_2(helper, cpu_ir[rc], tmp1, cpu_ir[rb]);
573 tcg_temp_free(tmp1);
574 }
575 }
576
577 static always_inline void gen_cmp(TCGCond cond,
578 int ra, int rb, int rc,
579 int islit, uint8_t lit)
580 {
581 int l1, l2;
582 TCGv tmp;
583
584 if (unlikely(rc == 31))
585 return;
586
587 l1 = gen_new_label();
588 l2 = gen_new_label();
589
590 if (ra != 31) {
591 tmp = tcg_temp_new(TCG_TYPE_I64);
592 tcg_gen_mov_i64(tmp, cpu_ir[ra]);
593 } else
594 tmp = tcg_const_i64(0);
595 if (islit)
596 tcg_gen_brcondi_i64(cond, tmp, lit, l1);
597 else
598 tcg_gen_brcond_i64(cond, tmp, cpu_ir[rb], l1);
599
600 tcg_gen_movi_i64(cpu_ir[rc], 0);
601 tcg_gen_br(l2);
602 gen_set_label(l1);
603 tcg_gen_movi_i64(cpu_ir[rc], 1);
604 gen_set_label(l2);
605 }
606
607 static always_inline int translate_one (DisasContext *ctx, uint32_t insn)
608 {
609 uint32_t palcode;
610 int32_t disp21, disp16, disp12;
611 uint16_t fn11, fn16;
612 uint8_t opc, ra, rb, rc, sbz, fpfn, fn7, fn2, islit;
613 uint8_t lit;
614 int ret;
615
616 /* Decode all instruction fields */
617 opc = insn >> 26;
618 ra = (insn >> 21) & 0x1F;
619 rb = (insn >> 16) & 0x1F;
620 rc = insn & 0x1F;
621 sbz = (insn >> 13) & 0x07;
622 islit = (insn >> 12) & 1;
623 if (rb == 31 && !islit) {
624 islit = 1;
625 lit = 0;
626 } else
627 lit = (insn >> 13) & 0xFF;
628 palcode = insn & 0x03FFFFFF;
629 disp21 = ((int32_t)((insn & 0x001FFFFF) << 11)) >> 11;
630 disp16 = (int16_t)(insn & 0x0000FFFF);
631 disp12 = (int32_t)((insn & 0x00000FFF) << 20) >> 20;
632 fn16 = insn & 0x0000FFFF;
633 fn11 = (insn >> 5) & 0x000007FF;
634 fpfn = fn11 & 0x3F;
635 fn7 = (insn >> 5) & 0x0000007F;
636 fn2 = (insn >> 5) & 0x00000003;
637 ret = 0;
638 #if defined ALPHA_DEBUG_DISAS
639 if (logfile != NULL) {
640 fprintf(logfile, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
641 opc, ra, rb, rc, disp16);
642 }
643 #endif
644 switch (opc) {
645 case 0x00:
646 /* CALL_PAL */
647 if (palcode >= 0x80 && palcode < 0xC0) {
648 /* Unprivileged PAL call */
649 gen_excp(ctx, EXCP_CALL_PAL + ((palcode & 0x1F) << 6), 0);
650 #if !defined (CONFIG_USER_ONLY)
651 } else if (palcode < 0x40) {
652 /* Privileged PAL code */
653 if (ctx->mem_idx & 1)
654 goto invalid_opc;
655 else
656 gen_excp(ctx, EXCP_CALL_PALP + ((palcode & 0x1F) << 6), 0);
657 #endif
658 } else {
659 /* Invalid PAL call */
660 goto invalid_opc;
661 }
662 ret = 3;
663 break;
664 case 0x01:
665 /* OPC01 */
666 goto invalid_opc;
667 case 0x02:
668 /* OPC02 */
669 goto invalid_opc;
670 case 0x03:
671 /* OPC03 */
672 goto invalid_opc;
673 case 0x04:
674 /* OPC04 */
675 goto invalid_opc;
676 case 0x05:
677 /* OPC05 */
678 goto invalid_opc;
679 case 0x06:
680 /* OPC06 */
681 goto invalid_opc;
682 case 0x07:
683 /* OPC07 */
684 goto invalid_opc;
685 case 0x08:
686 /* LDA */
687 if (likely(ra != 31)) {
688 if (rb != 31)
689 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16);
690 else
691 tcg_gen_movi_i64(cpu_ir[ra], disp16);
692 }
693 break;
694 case 0x09:
695 /* LDAH */
696 if (likely(ra != 31)) {
697 if (rb != 31)
698 tcg_gen_addi_i64(cpu_ir[ra], cpu_ir[rb], disp16 << 16);
699 else
700 tcg_gen_movi_i64(cpu_ir[ra], disp16 << 16);
701 }
702 break;
703 case 0x0A:
704 /* LDBU */
705 if (!(ctx->amask & AMASK_BWX))
706 goto invalid_opc;
707 gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0);
708 break;
709 case 0x0B:
710 /* LDQ_U */
711 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1);
712 break;
713 case 0x0C:
714 /* LDWU */
715 if (!(ctx->amask & AMASK_BWX))
716 goto invalid_opc;
717 gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 1);
718 break;
719 case 0x0D:
720 /* STW */
721 gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0);
722 break;
723 case 0x0E:
724 /* STB */
725 gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0);
726 break;
727 case 0x0F:
728 /* STQ_U */
729 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1);
730 break;
731 case 0x10:
732 switch (fn7) {
733 case 0x00:
734 /* ADDL */
735 if (likely(rc != 31)) {
736 if (ra != 31) {
737 if (islit) {
738 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
739 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
740 } else {
741 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
742 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
743 }
744 } else {
745 if (islit)
746 tcg_gen_movi_i64(cpu_ir[rc], lit);
747 else
748 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
749 }
750 }
751 break;
752 case 0x02:
753 /* S4ADDL */
754 if (likely(rc != 31)) {
755 if (ra != 31) {
756 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
757 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
758 if (islit)
759 tcg_gen_addi_i64(tmp, tmp, lit);
760 else
761 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
762 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
763 tcg_temp_free(tmp);
764 } else {
765 if (islit)
766 tcg_gen_movi_i64(cpu_ir[rc], lit);
767 else
768 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
769 }
770 }
771 break;
772 case 0x09:
773 /* SUBL */
774 if (likely(rc != 31)) {
775 if (ra != 31) {
776 if (islit)
777 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
778 else
779 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
780 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
781 } else {
782 if (islit)
783 tcg_gen_movi_i64(cpu_ir[rc], -lit);
784 else {
785 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
786 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
787 }
788 }
789 break;
790 case 0x0B:
791 /* S4SUBL */
792 if (likely(rc != 31)) {
793 if (ra != 31) {
794 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
795 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
796 if (islit)
797 tcg_gen_subi_i64(tmp, tmp, lit);
798 else
799 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
800 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
801 tcg_temp_free(tmp);
802 } else {
803 if (islit)
804 tcg_gen_movi_i64(cpu_ir[rc], -lit);
805 else {
806 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
807 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
808 }
809 }
810 }
811 break;
812 case 0x0F:
813 /* CMPBGE */
814 gen_arith3_helper(helper_cmpbge, ra, rb, rc, islit, lit);
815 break;
816 case 0x12:
817 /* S8ADDL */
818 if (likely(rc != 31)) {
819 if (ra != 31) {
820 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
821 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
822 if (islit)
823 tcg_gen_addi_i64(tmp, tmp, lit);
824 else
825 tcg_gen_add_i64(tmp, tmp, cpu_ir[rb]);
826 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
827 tcg_temp_free(tmp);
828 } else {
829 if (islit)
830 tcg_gen_movi_i64(cpu_ir[rc], lit);
831 else
832 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rb]);
833 }
834 }
835 break;
836 case 0x1B:
837 /* S8SUBL */
838 if (likely(rc != 31)) {
839 if (ra != 31) {
840 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
841 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
842 if (islit)
843 tcg_gen_subi_i64(tmp, tmp, lit);
844 else
845 tcg_gen_sub_i64(tmp, tmp, cpu_ir[rb]);
846 tcg_gen_ext32s_i64(cpu_ir[rc], tmp);
847 tcg_temp_free(tmp);
848 } else {
849 if (islit)
850 tcg_gen_movi_i64(cpu_ir[rc], -lit);
851 else
852 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
853 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
854 }
855 }
856 }
857 break;
858 case 0x1D:
859 /* CMPULT */
860 gen_cmp(TCG_COND_LTU, ra, rb, rc, islit, lit);
861 break;
862 case 0x20:
863 /* ADDQ */
864 if (likely(rc != 31)) {
865 if (ra != 31) {
866 if (islit)
867 tcg_gen_addi_i64(cpu_ir[rc], cpu_ir[ra], lit);
868 else
869 tcg_gen_add_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
870 } else {
871 if (islit)
872 tcg_gen_movi_i64(cpu_ir[rc], lit);
873 else
874 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
875 }
876 }
877 break;
878 case 0x22:
879 /* S4ADDQ */
880 if (likely(rc != 31)) {
881 if (ra != 31) {
882 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
883 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
884 if (islit)
885 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
886 else
887 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
888 tcg_temp_free(tmp);
889 } else {
890 if (islit)
891 tcg_gen_movi_i64(cpu_ir[rc], lit);
892 else
893 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
894 }
895 }
896 break;
897 case 0x29:
898 /* SUBQ */
899 if (likely(rc != 31)) {
900 if (ra != 31) {
901 if (islit)
902 tcg_gen_subi_i64(cpu_ir[rc], cpu_ir[ra], lit);
903 else
904 tcg_gen_sub_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
905 } else {
906 if (islit)
907 tcg_gen_movi_i64(cpu_ir[rc], -lit);
908 else
909 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
910 }
911 }
912 break;
913 case 0x2B:
914 /* S4SUBQ */
915 if (likely(rc != 31)) {
916 if (ra != 31) {
917 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
918 tcg_gen_shli_i64(tmp, cpu_ir[ra], 2);
919 if (islit)
920 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
921 else
922 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
923 tcg_temp_free(tmp);
924 } else {
925 if (islit)
926 tcg_gen_movi_i64(cpu_ir[rc], -lit);
927 else
928 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
929 }
930 }
931 break;
932 case 0x2D:
933 /* CMPEQ */
934 gen_cmp(TCG_COND_EQ, ra, rb, rc, islit, lit);
935 break;
936 case 0x32:
937 /* S8ADDQ */
938 if (likely(rc != 31)) {
939 if (ra != 31) {
940 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
941 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
942 if (islit)
943 tcg_gen_addi_i64(cpu_ir[rc], tmp, lit);
944 else
945 tcg_gen_add_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
946 tcg_temp_free(tmp);
947 } else {
948 if (islit)
949 tcg_gen_movi_i64(cpu_ir[rc], lit);
950 else
951 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
952 }
953 }
954 break;
955 case 0x3B:
956 /* S8SUBQ */
957 if (likely(rc != 31)) {
958 if (ra != 31) {
959 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
960 tcg_gen_shli_i64(tmp, cpu_ir[ra], 3);
961 if (islit)
962 tcg_gen_subi_i64(cpu_ir[rc], tmp, lit);
963 else
964 tcg_gen_sub_i64(cpu_ir[rc], tmp, cpu_ir[rb]);
965 tcg_temp_free(tmp);
966 } else {
967 if (islit)
968 tcg_gen_movi_i64(cpu_ir[rc], -lit);
969 else
970 tcg_gen_neg_i64(cpu_ir[rc], cpu_ir[rb]);
971 }
972 }
973 break;
974 case 0x3D:
975 /* CMPULE */
976 gen_cmp(TCG_COND_LEU, ra, rb, rc, islit, lit);
977 break;
978 case 0x40:
979 /* ADDL/V */
980 gen_arith3_helper(helper_addlv, ra, rb, rc, islit, lit);
981 break;
982 case 0x49:
983 /* SUBL/V */
984 gen_arith3_helper(helper_sublv, ra, rb, rc, islit, lit);
985 break;
986 case 0x4D:
987 /* CMPLT */
988 gen_cmp(TCG_COND_LT, ra, rb, rc, islit, lit);
989 break;
990 case 0x60:
991 /* ADDQ/V */
992 gen_arith3_helper(helper_addqv, ra, rb, rc, islit, lit);
993 break;
994 case 0x69:
995 /* SUBQ/V */
996 gen_arith3_helper(helper_subqv, ra, rb, rc, islit, lit);
997 break;
998 case 0x6D:
999 /* CMPLE */
1000 gen_cmp(TCG_COND_LE, ra, rb, rc, islit, lit);
1001 break;
1002 default:
1003 goto invalid_opc;
1004 }
1005 break;
1006 case 0x11:
1007 switch (fn7) {
1008 case 0x00:
1009 /* AND */
1010 if (likely(rc != 31)) {
1011 if (ra == 31)
1012 tcg_gen_movi_i64(cpu_ir[rc], 0);
1013 else if (islit)
1014 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], lit);
1015 else
1016 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1017 }
1018 break;
1019 case 0x08:
1020 /* BIC */
1021 if (likely(rc != 31)) {
1022 if (ra != 31) {
1023 if (islit)
1024 tcg_gen_andi_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1025 else {
1026 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1027 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1028 tcg_gen_and_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1029 tcg_temp_free(tmp);
1030 }
1031 } else
1032 tcg_gen_movi_i64(cpu_ir[rc], 0);
1033 }
1034 break;
1035 case 0x14:
1036 /* CMOVLBS */
1037 gen_cmov(ctx, TCG_COND_EQ, ra, rb, rc, islit, lit, 1);
1038 break;
1039 case 0x16:
1040 /* CMOVLBC */
1041 gen_cmov(ctx, TCG_COND_NE, ra, rb, rc, islit, lit, 1);
1042 break;
1043 case 0x20:
1044 /* BIS */
1045 if (likely(rc != 31)) {
1046 if (ra != 31) {
1047 if (islit)
1048 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1049 else
1050 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1051 } else {
1052 if (islit)
1053 tcg_gen_movi_i64(cpu_ir[rc], lit);
1054 else
1055 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1056 }
1057 }
1058 break;
1059 case 0x24:
1060 /* CMOVEQ */
1061 gen_cmov(ctx, TCG_COND_NE, ra, rb, rc, islit, lit, 0);
1062 break;
1063 case 0x26:
1064 /* CMOVNE */
1065 gen_cmov(ctx, TCG_COND_EQ, ra, rb, rc, islit, lit, 0);
1066 break;
1067 case 0x28:
1068 /* ORNOT */
1069 if (likely(rc != 31)) {
1070 if (ra != 31) {
1071 if (islit)
1072 tcg_gen_ori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1073 else {
1074 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1075 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1076 tcg_gen_or_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1077 tcg_temp_free(tmp);
1078 }
1079 } else {
1080 if (islit)
1081 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1082 else
1083 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1084 }
1085 }
1086 break;
1087 case 0x40:
1088 /* XOR */
1089 if (likely(rc != 31)) {
1090 if (ra != 31) {
1091 if (islit)
1092 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], lit);
1093 else
1094 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1095 } else {
1096 if (islit)
1097 tcg_gen_movi_i64(cpu_ir[rc], lit);
1098 else
1099 tcg_gen_mov_i64(cpu_ir[rc], cpu_ir[rb]);
1100 }
1101 }
1102 break;
1103 case 0x44:
1104 /* CMOVLT */
1105 gen_cmov(ctx, TCG_COND_GE, ra, rb, rc, islit, lit, 0);
1106 break;
1107 case 0x46:
1108 /* CMOVGE */
1109 gen_cmov(ctx, TCG_COND_LT, ra, rb, rc, islit, lit, 0);
1110 break;
1111 case 0x48:
1112 /* EQV */
1113 if (likely(rc != 31)) {
1114 if (ra != 31) {
1115 if (islit)
1116 tcg_gen_xori_i64(cpu_ir[rc], cpu_ir[ra], ~lit);
1117 else {
1118 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
1119 tcg_gen_not_i64(tmp, cpu_ir[rb]);
1120 tcg_gen_xor_i64(cpu_ir[rc], cpu_ir[ra], tmp);
1121 tcg_temp_free(tmp);
1122 }
1123 } else {
1124 if (islit)
1125 tcg_gen_movi_i64(cpu_ir[rc], ~lit);
1126 else
1127 tcg_gen_not_i64(cpu_ir[rc], cpu_ir[rb]);
1128 }
1129 }
1130 break;
1131 case 0x61:
1132 /* AMASK */
1133 if (likely(rc != 31)) {
1134 if (islit)
1135 tcg_gen_movi_i64(cpu_ir[rc], helper_amask(lit));
1136 else
1137 tcg_gen_helper_1_1(helper_amask, cpu_ir[rc], cpu_ir[rb]);
1138 }
1139 break;
1140 case 0x64:
1141 /* CMOVLE */
1142 gen_cmov(ctx, TCG_COND_GT, ra, rb, rc, islit, lit, 0);
1143 break;
1144 case 0x66:
1145 /* CMOVGT */
1146 gen_cmov(ctx, TCG_COND_LE, ra, rb, rc, islit, lit, 0);
1147 break;
1148 case 0x6C:
1149 /* IMPLVER */
1150 if (rc != 31)
1151 tcg_gen_helper_1_0(helper_load_implver, cpu_ir[rc]);
1152 break;
1153 default:
1154 goto invalid_opc;
1155 }
1156 break;
1157 case 0x12:
1158 switch (fn7) {
1159 case 0x02:
1160 /* MSKBL */
1161 gen_arith3_helper(helper_mskbl, ra, rb, rc, islit, lit);
1162 break;
1163 case 0x06:
1164 /* EXTBL */
1165 gen_ext_l(&tcg_gen_ext8u_i64, ra, rb, rc, islit, lit);
1166 break;
1167 case 0x0B:
1168 /* INSBL */
1169 gen_arith3_helper(helper_insbl, ra, rb, rc, islit, lit);
1170 break;
1171 case 0x12:
1172 /* MSKWL */
1173 gen_arith3_helper(helper_mskwl, ra, rb, rc, islit, lit);
1174 break;
1175 case 0x16:
1176 /* EXTWL */
1177 gen_ext_l(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1178 break;
1179 case 0x1B:
1180 /* INSWL */
1181 gen_arith3_helper(helper_inswl, ra, rb, rc, islit, lit);
1182 break;
1183 case 0x22:
1184 /* MSKLL */
1185 gen_arith3_helper(helper_mskll, ra, rb, rc, islit, lit);
1186 break;
1187 case 0x26:
1188 /* EXTLL */
1189 gen_ext_l(&tcg_gen_ext32u_i64, ra, rb, rc, islit, lit);
1190 break;
1191 case 0x2B:
1192 /* INSLL */
1193 gen_arith3_helper(helper_insll, ra, rb, rc, islit, lit);
1194 break;
1195 case 0x30:
1196 /* ZAP */
1197 gen_arith3_helper(helper_zap, ra, rb, rc, islit, lit);
1198 break;
1199 case 0x31:
1200 /* ZAPNOT */
1201 gen_arith3_helper(helper_zapnot, ra, rb, rc, islit, lit);
1202 break;
1203 case 0x32:
1204 /* MSKQL */
1205 gen_arith3_helper(helper_mskql, ra, rb, rc, islit, lit);
1206 break;
1207 case 0x34:
1208 /* SRL */
1209 if (likely(rc != 31)) {
1210 if (ra != 31) {
1211 if (islit)
1212 tcg_gen_shri_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1213 else {
1214 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1215 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1216 tcg_gen_shr_i64(cpu_ir[rc], cpu_ir[ra], shift);
1217 tcg_temp_free(shift);
1218 }
1219 } else
1220 tcg_gen_movi_i64(cpu_ir[rc], 0);
1221 }
1222 break;
1223 case 0x36:
1224 /* EXTQL */
1225 gen_ext_l(NULL, ra, rb, rc, islit, lit);
1226 break;
1227 case 0x39:
1228 /* SLL */
1229 if (likely(rc != 31)) {
1230 if (ra != 31) {
1231 if (islit)
1232 tcg_gen_shli_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1233 else {
1234 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1235 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1236 tcg_gen_shl_i64(cpu_ir[rc], cpu_ir[ra], shift);
1237 tcg_temp_free(shift);
1238 }
1239 } else
1240 tcg_gen_movi_i64(cpu_ir[rc], 0);
1241 }
1242 break;
1243 case 0x3B:
1244 /* INSQL */
1245 gen_arith3_helper(helper_insql, ra, rb, rc, islit, lit);
1246 break;
1247 case 0x3C:
1248 /* SRA */
1249 if (likely(rc != 31)) {
1250 if (ra != 31) {
1251 if (islit)
1252 tcg_gen_sari_i64(cpu_ir[rc], cpu_ir[ra], lit & 0x3f);
1253 else {
1254 TCGv shift = tcg_temp_new(TCG_TYPE_I64);
1255 tcg_gen_andi_i64(shift, cpu_ir[rb], 0x3f);
1256 tcg_gen_sar_i64(cpu_ir[rc], cpu_ir[ra], shift);
1257 tcg_temp_free(shift);
1258 }
1259 } else
1260 tcg_gen_movi_i64(cpu_ir[rc], 0);
1261 }
1262 break;
1263 case 0x52:
1264 /* MSKWH */
1265 gen_arith3_helper(helper_mskwh, ra, rb, rc, islit, lit);
1266 break;
1267 case 0x57:
1268 /* INSWH */
1269 gen_arith3_helper(helper_inswh, ra, rb, rc, islit, lit);
1270 break;
1271 case 0x5A:
1272 /* EXTWH */
1273 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1274 break;
1275 case 0x62:
1276 /* MSKLH */
1277 gen_arith3_helper(helper_msklh, ra, rb, rc, islit, lit);
1278 break;
1279 case 0x67:
1280 /* INSLH */
1281 gen_arith3_helper(helper_inslh, ra, rb, rc, islit, lit);
1282 break;
1283 case 0x6A:
1284 /* EXTLH */
1285 gen_ext_h(&tcg_gen_ext16u_i64, ra, rb, rc, islit, lit);
1286 break;
1287 case 0x72:
1288 /* MSKQH */
1289 gen_arith3_helper(helper_mskqh, ra, rb, rc, islit, lit);
1290 break;
1291 case 0x77:
1292 /* INSQH */
1293 gen_arith3_helper(helper_insqh, ra, rb, rc, islit, lit);
1294 break;
1295 case 0x7A:
1296 /* EXTQH */
1297 gen_ext_h(NULL, ra, rb, rc, islit, lit);
1298 break;
1299 default:
1300 goto invalid_opc;
1301 }
1302 break;
1303 case 0x13:
1304 switch (fn7) {
1305 case 0x00:
1306 /* MULL */
1307 if (likely(rc != 31)) {
1308 if (ra == 31)
1309 tcg_gen_movi_i64(cpu_ir[rc], 0);
1310 else {
1311 if (islit)
1312 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1313 else
1314 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1315 tcg_gen_ext32s_i64(cpu_ir[rc], cpu_ir[rc]);
1316 }
1317 }
1318 break;
1319 case 0x20:
1320 /* MULQ */
1321 if (likely(rc != 31)) {
1322 if (ra == 31)
1323 tcg_gen_movi_i64(cpu_ir[rc], 0);
1324 else if (islit)
1325 tcg_gen_muli_i64(cpu_ir[rc], cpu_ir[ra], lit);
1326 else
1327 tcg_gen_mul_i64(cpu_ir[rc], cpu_ir[ra], cpu_ir[rb]);
1328 }
1329 break;
1330 case 0x30:
1331 /* UMULH */
1332 gen_arith3_helper(helper_umulh, ra, rb, rc, islit, lit);
1333 break;
1334 case 0x40:
1335 /* MULL/V */
1336 gen_arith3_helper(helper_mullv, ra, rb, rc, islit, lit);
1337 break;
1338 case 0x60:
1339 /* MULQ/V */
1340 gen_arith3_helper(helper_mulqv, ra, rb, rc, islit, lit);
1341 break;
1342 default:
1343 goto invalid_opc;
1344 }
1345 break;
1346 case 0x14:
1347 switch (fpfn) { /* f11 & 0x3F */
1348 case 0x04:
1349 /* ITOFS */
1350 if (!(ctx->amask & AMASK_FIX))
1351 goto invalid_opc;
1352 gen_itf(ctx, &gen_op_itofs, ra, rc);
1353 break;
1354 case 0x0A:
1355 /* SQRTF */
1356 if (!(ctx->amask & AMASK_FIX))
1357 goto invalid_opc;
1358 gen_farith2(ctx, &gen_op_sqrtf, rb, rc);
1359 break;
1360 case 0x0B:
1361 /* SQRTS */
1362 if (!(ctx->amask & AMASK_FIX))
1363 goto invalid_opc;
1364 gen_farith2(ctx, &gen_op_sqrts, rb, rc);
1365 break;
1366 case 0x14:
1367 /* ITOFF */
1368 if (!(ctx->amask & AMASK_FIX))
1369 goto invalid_opc;
1370 #if 0 // TODO
1371 gen_itf(ctx, &gen_op_itoff, ra, rc);
1372 #else
1373 goto invalid_opc;
1374 #endif
1375 break;
1376 case 0x24:
1377 /* ITOFT */
1378 if (!(ctx->amask & AMASK_FIX))
1379 goto invalid_opc;
1380 gen_itf(ctx, &gen_op_itoft, ra, rc);
1381 break;
1382 case 0x2A:
1383 /* SQRTG */
1384 if (!(ctx->amask & AMASK_FIX))
1385 goto invalid_opc;
1386 gen_farith2(ctx, &gen_op_sqrtg, rb, rc);
1387 break;
1388 case 0x02B:
1389 /* SQRTT */
1390 if (!(ctx->amask & AMASK_FIX))
1391 goto invalid_opc;
1392 gen_farith2(ctx, &gen_op_sqrtt, rb, rc);
1393 break;
1394 default:
1395 goto invalid_opc;
1396 }
1397 break;
1398 case 0x15:
1399 /* VAX floating point */
1400 /* XXX: rounding mode and trap are ignored (!) */
1401 switch (fpfn) { /* f11 & 0x3F */
1402 case 0x00:
1403 /* ADDF */
1404 gen_farith3(ctx, &gen_op_addf, ra, rb, rc);
1405 break;
1406 case 0x01:
1407 /* SUBF */
1408 gen_farith3(ctx, &gen_op_subf, ra, rb, rc);
1409 break;
1410 case 0x02:
1411 /* MULF */
1412 gen_farith3(ctx, &gen_op_mulf, ra, rb, rc);
1413 break;
1414 case 0x03:
1415 /* DIVF */
1416 gen_farith3(ctx, &gen_op_divf, ra, rb, rc);
1417 break;
1418 case 0x1E:
1419 /* CVTDG */
1420 #if 0 // TODO
1421 gen_farith2(ctx, &gen_op_cvtdg, rb, rc);
1422 #else
1423 goto invalid_opc;
1424 #endif
1425 break;
1426 case 0x20:
1427 /* ADDG */
1428 gen_farith3(ctx, &gen_op_addg, ra, rb, rc);
1429 break;
1430 case 0x21:
1431 /* SUBG */
1432 gen_farith3(ctx, &gen_op_subg, ra, rb, rc);
1433 break;
1434 case 0x22:
1435 /* MULG */
1436 gen_farith3(ctx, &gen_op_mulg, ra, rb, rc);
1437 break;
1438 case 0x23:
1439 /* DIVG */
1440 gen_farith3(ctx, &gen_op_divg, ra, rb, rc);
1441 break;
1442 case 0x25:
1443 /* CMPGEQ */
1444 gen_farith3(ctx, &gen_op_cmpgeq, ra, rb, rc);
1445 break;
1446 case 0x26:
1447 /* CMPGLT */
1448 gen_farith3(ctx, &gen_op_cmpglt, ra, rb, rc);
1449 break;
1450 case 0x27:
1451 /* CMPGLE */
1452 gen_farith3(ctx, &gen_op_cmpgle, ra, rb, rc);
1453 break;
1454 case 0x2C:
1455 /* CVTGF */
1456 gen_farith2(ctx, &gen_op_cvtgf, rb, rc);
1457 break;
1458 case 0x2D:
1459 /* CVTGD */
1460 #if 0 // TODO
1461 gen_farith2(ctx, &gen_op_cvtgd, rb, rc);
1462 #else
1463 goto invalid_opc;
1464 #endif
1465 break;
1466 case 0x2F:
1467 /* CVTGQ */
1468 gen_farith2(ctx, &gen_op_cvtgq, rb, rc);
1469 break;
1470 case 0x3C:
1471 /* CVTQF */
1472 gen_farith2(ctx, &gen_op_cvtqf, rb, rc);
1473 break;
1474 case 0x3E:
1475 /* CVTQG */
1476 gen_farith2(ctx, &gen_op_cvtqg, rb, rc);
1477 break;
1478 default:
1479 goto invalid_opc;
1480 }
1481 break;
1482 case 0x16:
1483 /* IEEE floating-point */
1484 /* XXX: rounding mode and traps are ignored (!) */
1485 switch (fpfn) { /* f11 & 0x3F */
1486 case 0x00:
1487 /* ADDS */
1488 gen_farith3(ctx, &gen_op_adds, ra, rb, rc);
1489 break;
1490 case 0x01:
1491 /* SUBS */
1492 gen_farith3(ctx, &gen_op_subs, ra, rb, rc);
1493 break;
1494 case 0x02:
1495 /* MULS */
1496 gen_farith3(ctx, &gen_op_muls, ra, rb, rc);
1497 break;
1498 case 0x03:
1499 /* DIVS */
1500 gen_farith3(ctx, &gen_op_divs, ra, rb, rc);
1501 break;
1502 case 0x20:
1503 /* ADDT */
1504 gen_farith3(ctx, &gen_op_addt, ra, rb, rc);
1505 break;
1506 case 0x21:
1507 /* SUBT */
1508 gen_farith3(ctx, &gen_op_subt, ra, rb, rc);
1509 break;
1510 case 0x22:
1511 /* MULT */
1512 gen_farith3(ctx, &gen_op_mult, ra, rb, rc);
1513 break;
1514 case 0x23:
1515 /* DIVT */
1516 gen_farith3(ctx, &gen_op_divt, ra, rb, rc);
1517 break;
1518 case 0x24:
1519 /* CMPTUN */
1520 gen_farith3(ctx, &gen_op_cmptun, ra, rb, rc);
1521 break;
1522 case 0x25:
1523 /* CMPTEQ */
1524 gen_farith3(ctx, &gen_op_cmpteq, ra, rb, rc);
1525 break;
1526 case 0x26:
1527 /* CMPTLT */
1528 gen_farith3(ctx, &gen_op_cmptlt, ra, rb, rc);
1529 break;
1530 case 0x27:
1531 /* CMPTLE */
1532 gen_farith3(ctx, &gen_op_cmptle, ra, rb, rc);
1533 break;
1534 case 0x2C:
1535 /* XXX: incorrect */
1536 if (fn11 == 0x2AC) {
1537 /* CVTST */
1538 gen_farith2(ctx, &gen_op_cvtst, rb, rc);
1539 } else {
1540 /* CVTTS */
1541 gen_farith2(ctx, &gen_op_cvtts, rb, rc);
1542 }
1543 break;
1544 case 0x2F:
1545 /* CVTTQ */
1546 gen_farith2(ctx, &gen_op_cvttq, rb, rc);
1547 break;
1548 case 0x3C:
1549 /* CVTQS */
1550 gen_farith2(ctx, &gen_op_cvtqs, rb, rc);
1551 break;
1552 case 0x3E:
1553 /* CVTQT */
1554 gen_farith2(ctx, &gen_op_cvtqt, rb, rc);
1555 break;
1556 default:
1557 goto invalid_opc;
1558 }
1559 break;
1560 case 0x17:
1561 switch (fn11) {
1562 case 0x010:
1563 /* CVTLQ */
1564 gen_farith2(ctx, &gen_op_cvtlq, rb, rc);
1565 break;
1566 case 0x020:
1567 /* CPYS */
1568 if (ra == rb) {
1569 if (ra == 31 && rc == 31) {
1570 /* FNOP */
1571 gen_op_nop();
1572 } else {
1573 /* FMOV */
1574 gen_load_fir(ctx, rb, 0);
1575 gen_store_fir(ctx, rc, 0);
1576 }
1577 } else {
1578 gen_farith3(ctx, &gen_op_cpys, ra, rb, rc);
1579 }
1580 break;
1581 case 0x021:
1582 /* CPYSN */
1583 gen_farith2(ctx, &gen_op_cpysn, rb, rc);
1584 break;
1585 case 0x022:
1586 /* CPYSE */
1587 gen_farith2(ctx, &gen_op_cpyse, rb, rc);
1588 break;
1589 case 0x024:
1590 /* MT_FPCR */
1591 gen_load_fir(ctx, ra, 0);
1592 gen_op_store_fpcr();
1593 break;
1594 case 0x025:
1595 /* MF_FPCR */
1596 gen_op_load_fpcr();
1597 gen_store_fir(ctx, ra, 0);
1598 break;
1599 case 0x02A:
1600 /* FCMOVEQ */
1601 gen_fcmov(ctx, &gen_op_cmpfeq, ra, rb, rc);
1602 break;
1603 case 0x02B:
1604 /* FCMOVNE */
1605 gen_fcmov(ctx, &gen_op_cmpfne, ra, rb, rc);
1606 break;
1607 case 0x02C:
1608 /* FCMOVLT */
1609 gen_fcmov(ctx, &gen_op_cmpflt, ra, rb, rc);
1610 break;
1611 case 0x02D:
1612 /* FCMOVGE */
1613 gen_fcmov(ctx, &gen_op_cmpfge, ra, rb, rc);
1614 break;
1615 case 0x02E:
1616 /* FCMOVLE */
1617 gen_fcmov(ctx, &gen_op_cmpfle, ra, rb, rc);
1618 break;
1619 case 0x02F:
1620 /* FCMOVGT */
1621 gen_fcmov(ctx, &gen_op_cmpfgt, ra, rb, rc);
1622 break;
1623 case 0x030:
1624 /* CVTQL */
1625 gen_farith2(ctx, &gen_op_cvtql, rb, rc);
1626 break;
1627 case 0x130:
1628 /* CVTQL/V */
1629 gen_farith2(ctx, &gen_op_cvtqlv, rb, rc);
1630 break;
1631 case 0x530:
1632 /* CVTQL/SV */
1633 gen_farith2(ctx, &gen_op_cvtqlsv, rb, rc);
1634 break;
1635 default:
1636 goto invalid_opc;
1637 }
1638 break;
1639 case 0x18:
1640 switch ((uint16_t)disp16) {
1641 case 0x0000:
1642 /* TRAPB */
1643 /* No-op. Just exit from the current tb */
1644 ret = 2;
1645 break;
1646 case 0x0400:
1647 /* EXCB */
1648 /* No-op. Just exit from the current tb */
1649 ret = 2;
1650 break;
1651 case 0x4000:
1652 /* MB */
1653 /* No-op */
1654 break;
1655 case 0x4400:
1656 /* WMB */
1657 /* No-op */
1658 break;
1659 case 0x8000:
1660 /* FETCH */
1661 /* No-op */
1662 break;
1663 case 0xA000:
1664 /* FETCH_M */
1665 /* No-op */
1666 break;
1667 case 0xC000:
1668 /* RPCC */
1669 if (ra != 31)
1670 tcg_gen_helper_1_0(helper_load_pcc, cpu_ir[ra]);
1671 break;
1672 case 0xE000:
1673 /* RC */
1674 if (ra != 31)
1675 tcg_gen_helper_1_0(helper_rc, cpu_ir[ra]);
1676 break;
1677 case 0xE800:
1678 /* ECB */
1679 /* XXX: TODO: evict tb cache at address rb */
1680 #if 0
1681 ret = 2;
1682 #else
1683 goto invalid_opc;
1684 #endif
1685 break;
1686 case 0xF000:
1687 /* RS */
1688 if (ra != 31)
1689 tcg_gen_helper_1_0(helper_rs, cpu_ir[ra]);
1690 break;
1691 case 0xF800:
1692 /* WH64 */
1693 /* No-op */
1694 break;
1695 default:
1696 goto invalid_opc;
1697 }
1698 break;
1699 case 0x19:
1700 /* HW_MFPR (PALcode) */
1701 #if defined (CONFIG_USER_ONLY)
1702 goto invalid_opc;
1703 #else
1704 if (!ctx->pal_mode)
1705 goto invalid_opc;
1706 gen_op_mfpr(insn & 0xFF);
1707 if (ra != 31)
1708 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[0]);
1709 break;
1710 #endif
1711 case 0x1A:
1712 if (ra != 31)
1713 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
1714 if (rb != 31)
1715 tcg_gen_andi_i64(cpu_pc, cpu_ir[rb], ~3);
1716 else
1717 tcg_gen_movi_i64(cpu_pc, 0);
1718 /* Those four jumps only differ by the branch prediction hint */
1719 switch (fn2) {
1720 case 0x0:
1721 /* JMP */
1722 break;
1723 case 0x1:
1724 /* JSR */
1725 break;
1726 case 0x2:
1727 /* RET */
1728 break;
1729 case 0x3:
1730 /* JSR_COROUTINE */
1731 break;
1732 }
1733 ret = 1;
1734 break;
1735 case 0x1B:
1736 /* HW_LD (PALcode) */
1737 #if defined (CONFIG_USER_ONLY)
1738 goto invalid_opc;
1739 #else
1740 if (!ctx->pal_mode)
1741 goto invalid_opc;
1742 if (rb != 31)
1743 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
1744 else
1745 tcg_gen_movi_i64(cpu_T[0], 0);
1746 tcg_gen_movi_i64(cpu_T[1], disp12);
1747 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
1748 switch ((insn >> 12) & 0xF) {
1749 case 0x0:
1750 /* Longword physical access */
1751 gen_op_ldl_raw();
1752 break;
1753 case 0x1:
1754 /* Quadword physical access */
1755 gen_op_ldq_raw();
1756 break;
1757 case 0x2:
1758 /* Longword physical access with lock */
1759 gen_op_ldl_l_raw();
1760 break;
1761 case 0x3:
1762 /* Quadword physical access with lock */
1763 gen_op_ldq_l_raw();
1764 break;
1765 case 0x4:
1766 /* Longword virtual PTE fetch */
1767 gen_op_ldl_kernel();
1768 break;
1769 case 0x5:
1770 /* Quadword virtual PTE fetch */
1771 gen_op_ldq_kernel();
1772 break;
1773 case 0x6:
1774 /* Invalid */
1775 goto invalid_opc;
1776 case 0x7:
1777 /* Invalid */
1778 goto invalid_opc;
1779 case 0x8:
1780 /* Longword virtual access */
1781 gen_op_ld_phys_to_virt();
1782 gen_op_ldl_raw();
1783 break;
1784 case 0x9:
1785 /* Quadword virtual access */
1786 gen_op_ld_phys_to_virt();
1787 gen_op_ldq_raw();
1788 break;
1789 case 0xA:
1790 /* Longword virtual access with protection check */
1791 gen_ldl(ctx);
1792 break;
1793 case 0xB:
1794 /* Quadword virtual access with protection check */
1795 gen_ldq(ctx);
1796 break;
1797 case 0xC:
1798 /* Longword virtual access with altenate access mode */
1799 gen_op_set_alt_mode();
1800 gen_op_ld_phys_to_virt();
1801 gen_op_ldl_raw();
1802 gen_op_restore_mode();
1803 break;
1804 case 0xD:
1805 /* Quadword virtual access with altenate access mode */
1806 gen_op_set_alt_mode();
1807 gen_op_ld_phys_to_virt();
1808 gen_op_ldq_raw();
1809 gen_op_restore_mode();
1810 break;
1811 case 0xE:
1812 /* Longword virtual access with alternate access mode and
1813 * protection checks
1814 */
1815 gen_op_set_alt_mode();
1816 gen_op_ldl_data();
1817 gen_op_restore_mode();
1818 break;
1819 case 0xF:
1820 /* Quadword virtual access with alternate access mode and
1821 * protection checks
1822 */
1823 gen_op_set_alt_mode();
1824 gen_op_ldq_data();
1825 gen_op_restore_mode();
1826 break;
1827 }
1828 if (ra != 31)
1829 tcg_gen_mov_i64(cpu_ir[ra], cpu_T[1]);
1830 break;
1831 #endif
1832 case 0x1C:
1833 switch (fn7) {
1834 case 0x00:
1835 /* SEXTB */
1836 if (!(ctx->amask & AMASK_BWX))
1837 goto invalid_opc;
1838 if (likely(rc != 31)) {
1839 if (islit)
1840 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
1841 else
1842 tcg_gen_ext8s_i64(cpu_ir[rc], cpu_ir[rb]);
1843 }
1844 break;
1845 case 0x01:
1846 /* SEXTW */
1847 if (!(ctx->amask & AMASK_BWX))
1848 goto invalid_opc;
1849 if (likely(rc != 31)) {
1850 if (islit)
1851 tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
1852 else
1853 tcg_gen_ext16s_i64(cpu_ir[rc], cpu_ir[rb]);
1854 }
1855 break;
1856 case 0x30:
1857 /* CTPOP */
1858 if (!(ctx->amask & AMASK_CIX))
1859 goto invalid_opc;
1860 if (likely(rc != 31)) {
1861 if (islit)
1862 tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
1863 else
1864 tcg_gen_helper_1_1(helper_ctpop, cpu_ir[rc], cpu_ir[rb]);
1865 }
1866 break;
1867 case 0x31:
1868 /* PERR */
1869 if (!(ctx->amask & AMASK_MVI))
1870 goto invalid_opc;
1871 /* XXX: TODO */
1872 goto invalid_opc;
1873 break;
1874 case 0x32:
1875 /* CTLZ */
1876 if (!(ctx->amask & AMASK_CIX))
1877 goto invalid_opc;
1878 if (likely(rc != 31)) {
1879 if (islit)
1880 tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
1881 else
1882 tcg_gen_helper_1_1(helper_ctlz, cpu_ir[rc], cpu_ir[rb]);
1883 }
1884 break;
1885 case 0x33:
1886 /* CTTZ */
1887 if (!(ctx->amask & AMASK_CIX))
1888 goto invalid_opc;
1889 if (likely(rc != 31)) {
1890 if (islit)
1891 tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
1892 else
1893 tcg_gen_helper_1_1(helper_cttz, cpu_ir[rc], cpu_ir[rb]);
1894 }
1895 break;
1896 case 0x34:
1897 /* UNPKBW */
1898 if (!(ctx->amask & AMASK_MVI))
1899 goto invalid_opc;
1900 /* XXX: TODO */
1901 goto invalid_opc;
1902 break;
1903 case 0x35:
1904 /* UNPKWL */
1905 if (!(ctx->amask & AMASK_MVI))
1906 goto invalid_opc;
1907 /* XXX: TODO */
1908 goto invalid_opc;
1909 break;
1910 case 0x36:
1911 /* PKWB */
1912 if (!(ctx->amask & AMASK_MVI))
1913 goto invalid_opc;
1914 /* XXX: TODO */
1915 goto invalid_opc;
1916 break;
1917 case 0x37:
1918 /* PKLB */
1919 if (!(ctx->amask & AMASK_MVI))
1920 goto invalid_opc;
1921 /* XXX: TODO */
1922 goto invalid_opc;
1923 break;
1924 case 0x38:
1925 /* MINSB8 */
1926 if (!(ctx->amask & AMASK_MVI))
1927 goto invalid_opc;
1928 /* XXX: TODO */
1929 goto invalid_opc;
1930 break;
1931 case 0x39:
1932 /* MINSW4 */
1933 if (!(ctx->amask & AMASK_MVI))
1934 goto invalid_opc;
1935 /* XXX: TODO */
1936 goto invalid_opc;
1937 break;
1938 case 0x3A:
1939 /* MINUB8 */
1940 if (!(ctx->amask & AMASK_MVI))
1941 goto invalid_opc;
1942 /* XXX: TODO */
1943 goto invalid_opc;
1944 break;
1945 case 0x3B:
1946 /* MINUW4 */
1947 if (!(ctx->amask & AMASK_MVI))
1948 goto invalid_opc;
1949 /* XXX: TODO */
1950 goto invalid_opc;
1951 break;
1952 case 0x3C:
1953 /* MAXUB8 */
1954 if (!(ctx->amask & AMASK_MVI))
1955 goto invalid_opc;
1956 /* XXX: TODO */
1957 goto invalid_opc;
1958 break;
1959 case 0x3D:
1960 /* MAXUW4 */
1961 if (!(ctx->amask & AMASK_MVI))
1962 goto invalid_opc;
1963 /* XXX: TODO */
1964 goto invalid_opc;
1965 break;
1966 case 0x3E:
1967 /* MAXSB8 */
1968 if (!(ctx->amask & AMASK_MVI))
1969 goto invalid_opc;
1970 /* XXX: TODO */
1971 goto invalid_opc;
1972 break;
1973 case 0x3F:
1974 /* MAXSW4 */
1975 if (!(ctx->amask & AMASK_MVI))
1976 goto invalid_opc;
1977 /* XXX: TODO */
1978 goto invalid_opc;
1979 break;
1980 case 0x70:
1981 /* FTOIT */
1982 if (!(ctx->amask & AMASK_FIX))
1983 goto invalid_opc;
1984 gen_fti(ctx, &gen_op_ftoit, ra, rb);
1985 break;
1986 case 0x78:
1987 /* FTOIS */
1988 if (!(ctx->amask & AMASK_FIX))
1989 goto invalid_opc;
1990 gen_fti(ctx, &gen_op_ftois, ra, rb);
1991 break;
1992 default:
1993 goto invalid_opc;
1994 }
1995 break;
1996 case 0x1D:
1997 /* HW_MTPR (PALcode) */
1998 #if defined (CONFIG_USER_ONLY)
1999 goto invalid_opc;
2000 #else
2001 if (!ctx->pal_mode)
2002 goto invalid_opc;
2003 if (ra != 31)
2004 tcg_gen_mov_i64(cpu_T[0], cpu_ir[ra]);
2005 else
2006 tcg_gen_movi_i64(cpu_T[0], 0);
2007 gen_op_mtpr(insn & 0xFF);
2008 ret = 2;
2009 break;
2010 #endif
2011 case 0x1E:
2012 /* HW_REI (PALcode) */
2013 #if defined (CONFIG_USER_ONLY)
2014 goto invalid_opc;
2015 #else
2016 if (!ctx->pal_mode)
2017 goto invalid_opc;
2018 if (rb == 31) {
2019 /* "Old" alpha */
2020 gen_op_hw_rei();
2021 } else {
2022 if (ra != 31)
2023 tcg_gen_mov_i64(cpu_T[0], cpu_ir[rb]);
2024 else
2025 tcg_gen_movi_i64(cpu_T[0], 0);
2026 tcg_gen_movi_i64(cpu_T[1], (((int64_t)insn << 51) >> 51));
2027 tcg_gen_add_i64(cpu_T[0], cpu_T[0], cpu_T[1]);
2028 gen_op_hw_ret();
2029 }
2030 ret = 2;
2031 break;
2032 #endif
2033 case 0x1F:
2034 /* HW_ST (PALcode) */
2035 #if defined (CONFIG_USER_ONLY)
2036 goto invalid_opc;
2037 #else
2038 if (!ctx->pal_mode)
2039 goto invalid_opc;
2040 if (ra != 31)
2041 tcg_gen_addi_i64(cpu_T[0], cpu_ir[rb], disp12);
2042 else
2043 tcg_gen_movi_i64(cpu_T[0], disp12);
2044 if (ra != 31)
2045 tcg_gen_mov_i64(cpu_T[1], cpu_ir[ra]);
2046 else
2047 tcg_gen_movi_i64(cpu_T[1], 0);
2048 switch ((insn >> 12) & 0xF) {
2049 case 0x0:
2050 /* Longword physical access */
2051 gen_op_stl_raw();
2052 break;
2053 case 0x1:
2054 /* Quadword physical access */
2055 gen_op_stq_raw();
2056 break;
2057 case 0x2:
2058 /* Longword physical access with lock */
2059 gen_op_stl_c_raw();
2060 break;
2061 case 0x3:
2062 /* Quadword physical access with lock */
2063 gen_op_stq_c_raw();
2064 break;
2065 case 0x4:
2066 /* Longword virtual access */
2067 gen_op_st_phys_to_virt();
2068 gen_op_stl_raw();
2069 break;
2070 case 0x5:
2071 /* Quadword virtual access */
2072 gen_op_st_phys_to_virt();
2073 gen_op_stq_raw();
2074 break;
2075 case 0x6:
2076 /* Invalid */
2077 goto invalid_opc;
2078 case 0x7:
2079 /* Invalid */
2080 goto invalid_opc;
2081 case 0x8:
2082 /* Invalid */
2083 goto invalid_opc;
2084 case 0x9:
2085 /* Invalid */
2086 goto invalid_opc;
2087 case 0xA:
2088 /* Invalid */
2089 goto invalid_opc;
2090 case 0xB:
2091 /* Invalid */
2092 goto invalid_opc;
2093 case 0xC:
2094 /* Longword virtual access with alternate access mode */
2095 gen_op_set_alt_mode();
2096 gen_op_st_phys_to_virt();
2097 gen_op_ldl_raw();
2098 gen_op_restore_mode();
2099 break;
2100 case 0xD:
2101 /* Quadword virtual access with alternate access mode */
2102 gen_op_set_alt_mode();
2103 gen_op_st_phys_to_virt();
2104 gen_op_ldq_raw();
2105 gen_op_restore_mode();
2106 break;
2107 case 0xE:
2108 /* Invalid */
2109 goto invalid_opc;
2110 case 0xF:
2111 /* Invalid */
2112 goto invalid_opc;
2113 }
2114 ret = 2;
2115 break;
2116 #endif
2117 case 0x20:
2118 /* LDF */
2119 #if 0 // TODO
2120 gen_load_fmem(ctx, &gen_ldf, ra, rb, disp16);
2121 #else
2122 goto invalid_opc;
2123 #endif
2124 break;
2125 case 0x21:
2126 /* LDG */
2127 #if 0 // TODO
2128 gen_load_fmem(ctx, &gen_ldg, ra, rb, disp16);
2129 #else
2130 goto invalid_opc;
2131 #endif
2132 break;
2133 case 0x22:
2134 /* LDS */
2135 gen_load_fmem(ctx, &gen_lds, ra, rb, disp16);
2136 break;
2137 case 0x23:
2138 /* LDT */
2139 gen_load_fmem(ctx, &gen_ldt, ra, rb, disp16);
2140 break;
2141 case 0x24:
2142 /* STF */
2143 #if 0 // TODO
2144 gen_store_fmem(ctx, &gen_stf, ra, rb, disp16);
2145 #else
2146 goto invalid_opc;
2147 #endif
2148 break;
2149 case 0x25:
2150 /* STG */
2151 #if 0 // TODO
2152 gen_store_fmem(ctx, &gen_stg, ra, rb, disp16);
2153 #else
2154 goto invalid_opc;
2155 #endif
2156 break;
2157 case 0x26:
2158 /* STS */
2159 gen_store_fmem(ctx, &gen_sts, ra, rb, disp16);
2160 break;
2161 case 0x27:
2162 /* STT */
2163 gen_store_fmem(ctx, &gen_stt, ra, rb, disp16);
2164 break;
2165 case 0x28:
2166 /* LDL */
2167 gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0);
2168 break;
2169 case 0x29:
2170 /* LDQ */
2171 gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0);
2172 break;
2173 case 0x2A:
2174 /* LDL_L */
2175 gen_load_mem_dyngen(ctx, &gen_ldl_l, ra, rb, disp16, 0);
2176 break;
2177 case 0x2B:
2178 /* LDQ_L */
2179 gen_load_mem_dyngen(ctx, &gen_ldq_l, ra, rb, disp16, 0);
2180 break;
2181 case 0x2C:
2182 /* STL */
2183 gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0);
2184 break;
2185 case 0x2D:
2186 /* STQ */
2187 gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0);
2188 break;
2189 case 0x2E:
2190 /* STL_C */
2191 gen_store_mem_dyngen(ctx, &gen_stl_c, ra, rb, disp16, 0);
2192 break;
2193 case 0x2F:
2194 /* STQ_C */
2195 gen_store_mem_dyngen(ctx, &gen_stq_c, ra, rb, disp16, 0);
2196 break;
2197 case 0x30:
2198 /* BR */
2199 if (ra != 31)
2200 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2201 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2202 ret = 1;
2203 break;
2204 case 0x31:
2205 /* FBEQ */
2206 gen_fbcond(ctx, &gen_op_cmpfeq, ra, disp16);
2207 ret = 1;
2208 break;
2209 case 0x32:
2210 /* FBLT */
2211 gen_fbcond(ctx, &gen_op_cmpflt, ra, disp16);
2212 ret = 1;
2213 break;
2214 case 0x33:
2215 /* FBLE */
2216 gen_fbcond(ctx, &gen_op_cmpfle, ra, disp16);
2217 ret = 1;
2218 break;
2219 case 0x34:
2220 /* BSR */
2221 if (ra != 31)
2222 tcg_gen_movi_i64(cpu_ir[ra], ctx->pc);
2223 tcg_gen_movi_i64(cpu_pc, ctx->pc + (int64_t)(disp21 << 2));
2224 ret = 1;
2225 break;
2226 case 0x35:
2227 /* FBNE */
2228 gen_fbcond(ctx, &gen_op_cmpfne, ra, disp16);
2229 ret = 1;
2230 break;
2231 case 0x36:
2232 /* FBGE */
2233 gen_fbcond(ctx, &gen_op_cmpfge, ra, disp16);
2234 ret = 1;
2235 break;
2236 case 0x37:
2237 /* FBGT */
2238 gen_fbcond(ctx, &gen_op_cmpfgt, ra, disp16);
2239 ret = 1;
2240 break;
2241 case 0x38:
2242 /* BLBC */
2243 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 1);
2244 ret = 1;
2245 break;
2246 case 0x39:
2247 /* BEQ */
2248 gen_bcond(ctx, TCG_COND_EQ, ra, disp16, 0);
2249 ret = 1;
2250 break;
2251 case 0x3A:
2252 /* BLT */
2253 gen_bcond(ctx, TCG_COND_LT, ra, disp16, 0);
2254 ret = 1;
2255 break;
2256 case 0x3B:
2257 /* BLE */
2258 gen_bcond(ctx, TCG_COND_LE, ra, disp16, 0);
2259 ret = 1;
2260 break;
2261 case 0x3C:
2262 /* BLBS */
2263 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 1);
2264 ret = 1;
2265 break;
2266 case 0x3D:
2267 /* BNE */
2268 gen_bcond(ctx, TCG_COND_NE, ra, disp16, 0);
2269 ret = 1;
2270 break;
2271 case 0x3E:
2272 /* BGE */
2273 gen_bcond(ctx, TCG_COND_GE, ra, disp16, 0);
2274 ret = 1;
2275 break;
2276 case 0x3F:
2277 /* BGT */
2278 gen_bcond(ctx, TCG_COND_GT, ra, disp16, 0);
2279 ret = 1;
2280 break;
2281 invalid_opc:
2282 gen_invalid(ctx);
2283 ret = 3;
2284 break;
2285 }
2286
2287 return ret;
2288 }
2289
2290 static always_inline void gen_intermediate_code_internal (CPUState *env,
2291 TranslationBlock *tb,
2292 int search_pc)
2293 {
2294 #if defined ALPHA_DEBUG_DISAS
2295 static int insn_count;
2296 #endif
2297 DisasContext ctx, *ctxp = &ctx;
2298 target_ulong pc_start;
2299 uint32_t insn;
2300 uint16_t *gen_opc_end;
2301 int j, lj = -1;
2302 int ret;
2303 int num_insns;
2304 int max_insns;
2305
2306 pc_start = tb->pc;
2307 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
2308 ctx.pc = pc_start;
2309 ctx.amask = env->amask;
2310 #if defined (CONFIG_USER_ONLY)
2311 ctx.mem_idx = 0;
2312 #else
2313 ctx.mem_idx = ((env->ps >> 3) & 3);
2314 ctx.pal_mode = env->ipr[IPR_EXC_ADDR] & 1;
2315 #endif
2316 num_insns = 0;
2317 max_insns = tb->cflags & CF_COUNT_MASK;
2318 if (max_insns == 0)
2319 max_insns = CF_COUNT_MASK;
2320
2321 gen_icount_start();
2322 for (ret = 0; ret == 0;) {
2323 if (env->nb_breakpoints > 0) {
2324 for(j = 0; j < env->nb_breakpoints; j++) {
2325 if (env->breakpoints[j] == ctx.pc) {
2326 gen_excp(&ctx, EXCP_DEBUG, 0);
2327 break;
2328 }
2329 }
2330 }
2331 if (search_pc) {
2332 j = gen_opc_ptr - gen_opc_buf;
2333 if (lj < j) {
2334 lj++;
2335 while (lj < j)
2336 gen_opc_instr_start[lj++] = 0;
2337 gen_opc_pc[lj] = ctx.pc;
2338 gen_opc_instr_start[lj] = 1;
2339 gen_opc_icount[lj] = num_insns;
2340 }
2341 }
2342 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
2343 gen_io_start();
2344 #if defined ALPHA_DEBUG_DISAS
2345 insn_count++;
2346 if (logfile != NULL) {
2347 fprintf(logfile, "pc " TARGET_FMT_lx " mem_idx %d\n",
2348 ctx.pc, ctx.mem_idx);
2349 }
2350 #endif
2351 insn = ldl_code(ctx.pc);
2352 #if defined ALPHA_DEBUG_DISAS
2353 insn_count++;
2354 if (logfile != NULL) {
2355 fprintf(logfile, "opcode %08x %d\n", insn, insn_count);
2356 }
2357 #endif
2358 num_insns++;
2359 ctx.pc += 4;
2360 ret = translate_one(ctxp, insn);
2361 if (ret != 0)
2362 break;
2363 /* if we reach a page boundary or are single stepping, stop
2364 * generation
2365 */
2366 if (((ctx.pc & (TARGET_PAGE_SIZE - 1)) == 0) ||
2367 (env->singlestep_enabled) ||
2368 num_insns >= max_insns) {
2369 break;
2370 }
2371 #if defined (DO_SINGLE_STEP)
2372 break;
2373 #endif
2374 }
2375 if (ret != 1 && ret != 3) {
2376 tcg_gen_movi_i64(cpu_pc, ctx.pc);
2377 }
2378 #if defined (DO_TB_FLUSH)
2379 tcg_gen_helper_0_0(helper_tb_flush);
2380 #endif
2381 if (tb->cflags & CF_LAST_IO)
2382 gen_io_end();
2383 /* Generate the return instruction */
2384 tcg_gen_exit_tb(0);
2385 gen_icount_end(tb, num_insns);
2386 *gen_opc_ptr = INDEX_op_end;
2387 if (search_pc) {
2388 j = gen_opc_ptr - gen_opc_buf;
2389 lj++;
2390 while (lj <= j)
2391 gen_opc_instr_start[lj++] = 0;
2392 } else {
2393 tb->size = ctx.pc - pc_start;
2394 tb->icount = num_insns;
2395 }
2396 #if defined ALPHA_DEBUG_DISAS
2397 if (loglevel & CPU_LOG_TB_CPU) {
2398 cpu_dump_state(env, logfile, fprintf, 0);
2399 }
2400 if (loglevel & CPU_LOG_TB_IN_ASM) {
2401 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
2402 target_disas(logfile, pc_start, ctx.pc - pc_start, 1);
2403 fprintf(logfile, "\n");
2404 }
2405 #endif
2406 }
2407
2408 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
2409 {
2410 gen_intermediate_code_internal(env, tb, 0);
2411 }
2412
2413 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
2414 {
2415 gen_intermediate_code_internal(env, tb, 1);
2416 }
2417
2418 CPUAlphaState * cpu_alpha_init (const char *cpu_model)
2419 {
2420 CPUAlphaState *env;
2421 uint64_t hwpcb;
2422
2423 env = qemu_mallocz(sizeof(CPUAlphaState));
2424 if (!env)
2425 return NULL;
2426 cpu_exec_init(env);
2427 alpha_translate_init();
2428 tlb_flush(env, 1);
2429 /* XXX: should not be hardcoded */
2430 env->implver = IMPLVER_2106x;
2431 env->ps = 0x1F00;
2432 #if defined (CONFIG_USER_ONLY)
2433 env->ps |= 1 << 3;
2434 #endif
2435 pal_init(env);
2436 /* Initialize IPR */
2437 hwpcb = env->ipr[IPR_PCBB];
2438 env->ipr[IPR_ASN] = 0;
2439 env->ipr[IPR_ASTEN] = 0;
2440 env->ipr[IPR_ASTSR] = 0;
2441 env->ipr[IPR_DATFX] = 0;
2442 /* XXX: fix this */
2443 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2444 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2445 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2446 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2447 env->ipr[IPR_FEN] = 0;
2448 env->ipr[IPR_IPL] = 31;
2449 env->ipr[IPR_MCES] = 0;
2450 env->ipr[IPR_PERFMON] = 0; /* Implementation specific */
2451 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2452 env->ipr[IPR_SISR] = 0;
2453 env->ipr[IPR_VIRBND] = -1ULL;
2454
2455 return env;
2456 }
2457
2458 void gen_pc_load(CPUState *env, TranslationBlock *tb,
2459 unsigned long searched_pc, int pc_pos, void *puc)
2460 {
2461 env->pc = gen_opc_pc[pc_pos];
2462 }