2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
35 #define ALPHA_DEBUG_DISAS
38 typedef struct DisasContext DisasContext
;
42 #if !defined (CONFIG_USER_ONLY)
48 /* global register indexes */
50 static TCGv cpu_ir
[31];
53 /* dyngen register indexes */
57 static char cpu_reg_names
[10*4+21*5];
59 #include "gen-icount.h"
61 static void alpha_translate_init(void)
65 static int done_init
= 0;
70 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
72 #if TARGET_LONG_BITS > HOST_LONG_BITS
73 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
74 offsetof(CPUState
, t0
), "T0");
75 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
76 offsetof(CPUState
, t1
), "T1");
77 cpu_T
[2] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
78 offsetof(CPUState
, t2
), "T2");
80 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_I64
, TCG_AREG1
, "T0");
81 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_I64
, TCG_AREG2
, "T1");
82 cpu_T
[2] = tcg_global_reg_new(TCG_TYPE_I64
, TCG_AREG3
, "T2");
86 for (i
= 0; i
< 31; i
++) {
87 sprintf(p
, "ir%d", i
);
88 cpu_ir
[i
] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
89 offsetof(CPUState
, ir
[i
]), p
);
90 p
+= (i
< 10) ? 4 : 5;
93 cpu_pc
= tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
94 offsetof(CPUState
, pc
), "pc");
96 /* register helpers */
98 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
104 static always_inline
void gen_op_nop (void)
106 #if defined(GENERATE_NOP)
111 #define GEN32(func, NAME) \
112 static GenOpFunc *NAME ## _table [32] = { \
113 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
114 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
115 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
116 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
117 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
118 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
119 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
120 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
122 static always_inline void func (int n) \
124 NAME ## _table[n](); \
128 /* Special hacks for fir31 */
129 #define gen_op_load_FT0_fir31 gen_op_reset_FT0
130 #define gen_op_load_FT1_fir31 gen_op_reset_FT1
131 #define gen_op_load_FT2_fir31 gen_op_reset_FT2
132 #define gen_op_store_FT0_fir31 gen_op_nop
133 #define gen_op_store_FT1_fir31 gen_op_nop
134 #define gen_op_store_FT2_fir31 gen_op_nop
135 #define gen_op_cmov_fir31 gen_op_nop
136 GEN32(gen_op_load_FT0_fir
, gen_op_load_FT0_fir
);
137 GEN32(gen_op_load_FT1_fir
, gen_op_load_FT1_fir
);
138 GEN32(gen_op_load_FT2_fir
, gen_op_load_FT2_fir
);
139 GEN32(gen_op_store_FT0_fir
, gen_op_store_FT0_fir
);
140 GEN32(gen_op_store_FT1_fir
, gen_op_store_FT1_fir
);
141 GEN32(gen_op_store_FT2_fir
, gen_op_store_FT2_fir
);
142 GEN32(gen_op_cmov_fir
, gen_op_cmov_fir
);
144 static always_inline
void gen_load_fir (DisasContext
*ctx
, int firn
, int Tn
)
148 gen_op_load_FT0_fir(firn
);
151 gen_op_load_FT1_fir(firn
);
154 gen_op_load_FT2_fir(firn
);
159 static always_inline
void gen_store_fir (DisasContext
*ctx
, int firn
, int Tn
)
163 gen_op_store_FT0_fir(firn
);
166 gen_op_store_FT1_fir(firn
);
169 gen_op_store_FT2_fir(firn
);
175 #if defined(CONFIG_USER_ONLY)
176 #define OP_LD_TABLE(width) \
177 static GenOpFunc *gen_op_ld##width[] = { \
178 &gen_op_ld##width##_raw, \
180 #define OP_ST_TABLE(width) \
181 static GenOpFunc *gen_op_st##width[] = { \
182 &gen_op_st##width##_raw, \
185 #define OP_LD_TABLE(width) \
186 static GenOpFunc *gen_op_ld##width[] = { \
187 &gen_op_ld##width##_kernel, \
188 &gen_op_ld##width##_executive, \
189 &gen_op_ld##width##_supervisor, \
190 &gen_op_ld##width##_user, \
192 #define OP_ST_TABLE(width) \
193 static GenOpFunc *gen_op_st##width[] = { \
194 &gen_op_st##width##_kernel, \
195 &gen_op_st##width##_executive, \
196 &gen_op_st##width##_supervisor, \
197 &gen_op_st##width##_user, \
201 #define GEN_LD(width) \
202 OP_LD_TABLE(width); \
203 static always_inline void gen_ld##width (DisasContext *ctx) \
205 (*gen_op_ld##width[ctx->mem_idx])(); \
208 #define GEN_ST(width) \
209 OP_ST_TABLE(width); \
210 static always_inline void gen_st##width (DisasContext *ctx) \
212 (*gen_op_st##width[ctx->mem_idx])(); \
230 #if 0 /* currently unused */
241 static always_inline
void _gen_op_bcond (DisasContext
*ctx
)
243 #if 0 // Qemu does not know how to do this...
244 gen_op_bcond(ctx
->pc
);
246 gen_op_bcond(ctx
->pc
>> 32, ctx
->pc
);
250 static always_inline
void gen_excp (DisasContext
*ctx
,
251 int exception
, int error_code
)
255 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
256 tmp1
= tcg_const_i32(exception
);
257 tmp2
= tcg_const_i32(error_code
);
258 tcg_gen_helper_0_2(helper_excp
, tmp1
, tmp2
);
263 static always_inline
void gen_invalid (DisasContext
*ctx
)
265 gen_excp(ctx
, EXCP_OPCDEC
, 0);
268 static always_inline
void gen_load_mem (DisasContext
*ctx
,
269 void (*gen_load_op
)(DisasContext
*ctx
),
270 int ra
, int rb
, int32_t disp16
,
273 if (ra
== 31 && disp16
== 0) {
278 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
280 tcg_gen_movi_i64(cpu_T
[0], disp16
);
282 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], ~0x7);
285 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[1]);
289 static always_inline
void gen_store_mem (DisasContext
*ctx
,
290 void (*gen_store_op
)(DisasContext
*ctx
),
291 int ra
, int rb
, int32_t disp16
,
295 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
297 tcg_gen_movi_i64(cpu_T
[0], disp16
);
299 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], ~0x7);
301 tcg_gen_mov_i64(cpu_T
[1], cpu_ir
[ra
]);
303 tcg_gen_movi_i64(cpu_T
[1], 0);
304 (*gen_store_op
)(ctx
);
307 static always_inline
void gen_load_fmem (DisasContext
*ctx
,
308 void (*gen_load_fop
)(DisasContext
*ctx
),
309 int ra
, int rb
, int32_t disp16
)
312 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
314 tcg_gen_movi_i64(cpu_T
[0], disp16
);
315 (*gen_load_fop
)(ctx
);
316 gen_store_fir(ctx
, ra
, 1);
319 static always_inline
void gen_store_fmem (DisasContext
*ctx
,
320 void (*gen_store_fop
)(DisasContext
*ctx
),
321 int ra
, int rb
, int32_t disp16
)
324 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
326 tcg_gen_movi_i64(cpu_T
[0], disp16
);
327 gen_load_fir(ctx
, ra
, 1);
328 (*gen_store_fop
)(ctx
);
331 static always_inline
void gen_bcond (DisasContext
*ctx
,
333 int ra
, int32_t disp16
, int mask
)
337 l1
= gen_new_label();
338 l2
= gen_new_label();
339 if (likely(ra
!= 31)) {
341 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
342 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
343 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
346 tcg_gen_brcondi_i64(cond
, cpu_ir
[ra
], 0, l1
);
348 /* Very uncommon case - Do not bother to optimize. */
349 TCGv tmp
= tcg_const_i64(0);
350 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
353 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
356 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
360 static always_inline
void gen_fbcond (DisasContext
*ctx
,
361 void (*gen_test_op
)(void),
362 int ra
, int32_t disp16
)
364 tcg_gen_movi_i64(cpu_T
[1], ctx
->pc
+ (int64_t)(disp16
<< 2));
365 gen_load_fir(ctx
, ra
, 0);
370 static always_inline
void gen_arith3 (DisasContext
*ctx
,
371 void (*gen_arith_op
)(void),
372 int ra
, int rb
, int rc
,
373 int islit
, uint8_t lit
)
376 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[ra
]);
378 tcg_gen_movi_i64(cpu_T
[0], 0);
380 tcg_gen_movi_i64(cpu_T
[1], lit
);
382 tcg_gen_mov_i64(cpu_T
[1], cpu_ir
[rb
]);
384 tcg_gen_movi_i64(cpu_T
[1], 0);
387 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_T
[0]);
390 static always_inline
void gen_cmov (DisasContext
*ctx
,
392 int ra
, int rb
, int rc
,
393 int islit
, int8_t lit
, int mask
)
397 if (unlikely(rc
== 31))
400 l1
= gen_new_label();
404 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
405 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
406 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
409 tcg_gen_brcondi_i64(inv_cond
, cpu_ir
[ra
], 0, l1
);
411 /* Very uncommon case - Do not bother to optimize. */
412 TCGv tmp
= tcg_const_i64(0);
413 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
418 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
420 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
422 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
426 static always_inline
void gen_farith2 (DisasContext
*ctx
,
427 void (*gen_arith_fop
)(void),
430 gen_load_fir(ctx
, rb
, 0);
432 gen_store_fir(ctx
, rc
, 0);
435 static always_inline
void gen_farith3 (DisasContext
*ctx
,
436 void (*gen_arith_fop
)(void),
437 int ra
, int rb
, int rc
)
439 gen_load_fir(ctx
, ra
, 0);
440 gen_load_fir(ctx
, rb
, 1);
442 gen_store_fir(ctx
, rc
, 0);
445 static always_inline
void gen_fcmov (DisasContext
*ctx
,
446 void (*gen_test_fop
)(void),
447 int ra
, int rb
, int rc
)
449 gen_load_fir(ctx
, ra
, 0);
450 gen_load_fir(ctx
, rb
, 1);
455 static always_inline
void gen_fti (DisasContext
*ctx
,
456 void (*gen_move_fop
)(void),
459 gen_load_fir(ctx
, rc
, 0);
462 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[0]);
465 static always_inline
void gen_itf (DisasContext
*ctx
,
466 void (*gen_move_fop
)(void),
470 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[ra
]);
472 tcg_gen_movi_i64(cpu_T
[0], 0);
474 gen_store_fir(ctx
, rc
, 0);
477 /* EXTWH, EXTWH, EXTLH, EXTQH */
478 static always_inline
void gen_ext_h(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
479 int ra
, int rb
, int rc
,
480 int islit
, int8_t lit
)
482 if (unlikely(rc
== 31))
487 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 64 - ((lit
& 7) * 8));
490 tmp1
= tcg_temp_new(TCG_TYPE_I64
);
491 tcg_gen_andi_i64(tmp1
, cpu_ir
[rb
], 7);
492 tcg_gen_shli_i64(tmp1
, tmp1
, 3);
493 tmp2
= tcg_const_i64(64);
494 tcg_gen_sub_i64(tmp1
, tmp2
, tmp1
);
496 if (tcg_gen_ext_i64
) {
497 tcg_gen_shl_i64(tmp1
, cpu_ir
[ra
], tmp1
);
498 tcg_gen_ext_i64(cpu_ir
[rc
], tmp1
);
500 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp1
);
503 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
505 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
508 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
509 static always_inline
void gen_ext_l(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
510 int ra
, int rb
, int rc
,
511 int islit
, int8_t lit
)
513 if (unlikely(rc
== 31))
518 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], (lit
& 7) * 8);
520 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
521 tcg_gen_andi_i64(tmp
, cpu_ir
[rb
], 7);
522 tcg_gen_shli_i64(tmp
, tmp
, 3);
523 if (tcg_gen_ext_i64
) {
524 tcg_gen_shr_i64(tmp
, cpu_ir
[ra
], tmp
);
525 tcg_gen_ext_i64(cpu_ir
[rc
], tmp
);
527 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
530 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
532 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
535 /* Code to call byte manipulation helpers, used by:
536 INSWH, INSLH, INSQH, INSBL, INSWL, INSLL, INSQL,
537 MSKWH, MSKLH, MSKQH, MSKBL, MSKWL, MSKLL, MSKQL,
540 WARNING: it assumes that when ra31 is used, the result is 0.
542 static always_inline
void gen_byte_manipulation(void *helper
,
543 int ra
, int rb
, int rc
,
544 int islit
, uint8_t lit
)
546 if (unlikely(rc
== 31))
550 if (islit
|| rb
== 31) {
551 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
553 tcg_gen_movi_i64(tmp
, lit
);
555 tcg_gen_movi_i64(tmp
, 0);
556 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
559 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
561 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
564 static always_inline
int translate_one (DisasContext
*ctx
, uint32_t insn
)
567 int32_t disp21
, disp16
, disp12
;
569 uint8_t opc
, ra
, rb
, rc
, sbz
, fpfn
, fn7
, fn2
, islit
;
573 /* Decode all instruction fields */
575 ra
= (insn
>> 21) & 0x1F;
576 rb
= (insn
>> 16) & 0x1F;
578 sbz
= (insn
>> 13) & 0x07;
579 islit
= (insn
>> 12) & 1;
580 lit
= (insn
>> 13) & 0xFF;
581 palcode
= insn
& 0x03FFFFFF;
582 disp21
= ((int32_t)((insn
& 0x001FFFFF) << 11)) >> 11;
583 disp16
= (int16_t)(insn
& 0x0000FFFF);
584 disp12
= (int32_t)((insn
& 0x00000FFF) << 20) >> 20;
585 fn16
= insn
& 0x0000FFFF;
586 fn11
= (insn
>> 5) & 0x000007FF;
588 fn7
= (insn
>> 5) & 0x0000007F;
589 fn2
= (insn
>> 5) & 0x00000003;
591 #if defined ALPHA_DEBUG_DISAS
592 if (logfile
!= NULL
) {
593 fprintf(logfile
, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
594 opc
, ra
, rb
, rc
, disp16
);
600 if (palcode
>= 0x80 && palcode
< 0xC0) {
601 /* Unprivileged PAL call */
602 gen_excp(ctx
, EXCP_CALL_PAL
+ ((palcode
& 0x1F) << 6), 0);
603 #if !defined (CONFIG_USER_ONLY)
604 } else if (palcode
< 0x40) {
605 /* Privileged PAL code */
606 if (ctx
->mem_idx
& 1)
609 gen_excp(ctx
, EXCP_CALL_PALP
+ ((palcode
& 0x1F) << 6), 0);
612 /* Invalid PAL call */
640 if (likely(ra
!= 31)) {
642 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
);
644 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
);
649 if (likely(ra
!= 31)) {
651 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
<< 16);
653 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
<< 16);
658 if (!(ctx
->amask
& AMASK_BWX
))
660 gen_load_mem(ctx
, &gen_ldbu
, ra
, rb
, disp16
, 0);
664 gen_load_mem(ctx
, &gen_ldq_u
, ra
, rb
, disp16
, 1);
668 if (!(ctx
->amask
& AMASK_BWX
))
670 gen_load_mem(ctx
, &gen_ldwu
, ra
, rb
, disp16
, 0);
674 if (!(ctx
->amask
& AMASK_BWX
))
676 gen_store_mem(ctx
, &gen_stw
, ra
, rb
, disp16
, 0);
680 if (!(ctx
->amask
& AMASK_BWX
))
682 gen_store_mem(ctx
, &gen_stb
, ra
, rb
, disp16
, 0);
686 gen_store_mem(ctx
, &gen_stq_u
, ra
, rb
, disp16
, 1);
692 if (likely(rc
!= 31)) {
695 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
696 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
697 } else if (rb
!= 31) {
698 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
699 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
701 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
704 tcg_gen_movi_i64(cpu_ir
[rc
], (int32_t)lit
);
706 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
708 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
714 if (likely(rc
!= 31)) {
716 if (islit
|| rb
!= 31) {
717 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
718 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
720 tcg_gen_addi_i64(tmp
, tmp
, lit
);
722 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
723 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
726 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 2);
727 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
731 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
733 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
735 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
741 if (likely(rc
!= 31)) {
744 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
745 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
746 } else if (rb
!= 31) {
747 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
748 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
750 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
753 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
755 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
756 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
758 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
764 if (likely(rc
!= 31)) {
766 if (islit
|| rb
!= 31) {
767 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
768 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
770 tcg_gen_subi_i64(tmp
, tmp
, lit
);
772 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
773 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
776 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 2);
777 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
781 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
783 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
784 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
786 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
792 gen_arith3(ctx
, &gen_op_cmpbge
, ra
, rb
, rc
, islit
, lit
);
796 if (likely(rc
!= 31)) {
798 if (islit
|| rb
!= 31) {
799 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
800 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
802 tcg_gen_addi_i64(tmp
, tmp
, lit
);
804 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
805 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
808 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 3);
809 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
813 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
815 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
817 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
823 if (likely(rc
!= 31)) {
825 if (islit
|| rb
!= 31) {
826 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
827 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
829 tcg_gen_subi_i64(tmp
, tmp
, lit
);
831 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
832 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
835 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 3);
836 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
840 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
842 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
843 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
845 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
851 gen_arith3(ctx
, &gen_op_cmpult
, ra
, rb
, rc
, islit
, lit
);
855 if (likely(rc
!= 31)) {
858 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
860 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
862 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
865 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
867 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
869 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
875 if (likely(rc
!= 31)) {
877 if (islit
|| rb
!= 31) {
878 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
879 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
881 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
883 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
886 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 2);
889 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
891 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
893 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
899 if (likely(rc
!= 31)) {
902 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
904 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
906 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
909 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
911 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
913 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
919 if (likely(rc
!= 31)) {
921 if (islit
|| rb
!= 31) {
922 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
923 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
925 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
927 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
930 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 2);
933 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
935 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
937 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
943 gen_arith3(ctx
, &gen_op_cmpeq
, ra
, rb
, rc
, islit
, lit
);
947 if (likely(rc
!= 31)) {
949 if (islit
|| rb
!= 31) {
950 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
951 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
953 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
955 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
958 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 3);
961 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
963 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
965 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
971 if (likely(rc
!= 31)) {
973 if (islit
|| rb
!= 31) {
974 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
975 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
977 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
979 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
982 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 3);
985 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
987 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
989 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
995 gen_arith3(ctx
, &gen_op_cmpule
, ra
, rb
, rc
, islit
, lit
);
999 gen_arith3(ctx
, &gen_op_addlv
, ra
, rb
, rc
, islit
, lit
);
1003 gen_arith3(ctx
, &gen_op_sublv
, ra
, rb
, rc
, islit
, lit
);
1007 gen_arith3(ctx
, &gen_op_cmplt
, ra
, rb
, rc
, islit
, lit
);
1011 gen_arith3(ctx
, &gen_op_addqv
, ra
, rb
, rc
, islit
, lit
);
1015 gen_arith3(ctx
, &gen_op_subqv
, ra
, rb
, rc
, islit
, lit
);
1019 gen_arith3(ctx
, &gen_op_cmple
, ra
, rb
, rc
, islit
, lit
);
1029 if (likely(rc
!= 31)) {
1030 if (ra
== 31 || (rb
== 31 && !islit
))
1031 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1033 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1035 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1040 if (likely(rc
!= 31)) {
1043 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1044 else if (rb
!= 31) {
1045 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
1046 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
1047 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
1050 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1052 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1057 gen_cmov(ctx
, TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 1);
1061 gen_cmov(ctx
, TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 1);
1065 if (likely(rc
!= 31)) {
1068 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1070 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1072 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1075 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1077 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1079 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1085 gen_cmov(ctx
, TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 0);
1089 gen_cmov(ctx
, TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 0);
1093 if (likely(rc
!= 31)) {
1094 if (rb
== 31 && !islit
)
1095 tcg_gen_movi_i64(cpu_ir
[rc
], ~0);
1096 else if (ra
!= 31) {
1098 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1100 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
1101 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
1102 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
1107 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1109 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1115 if (likely(rc
!= 31)) {
1118 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1120 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1122 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1125 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1127 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1129 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1135 gen_cmov(ctx
, TCG_COND_GE
, ra
, rb
, rc
, islit
, lit
, 0);
1139 gen_cmov(ctx
, TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
, 0);
1143 if (likely(rc
!= 31)) {
1146 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1147 else if (rb
!= 31) {
1148 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
1149 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
1150 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
1153 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1156 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1158 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1160 tcg_gen_movi_i64(cpu_ir
[rc
], ~0);
1166 if (likely(rc
!= 31)) {
1168 tcg_gen_movi_i64(cpu_ir
[rc
], helper_amask(lit
));
1170 tcg_gen_helper_1_1(helper_amask
, cpu_ir
[rc
], cpu_ir
[rb
]);
1172 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1177 gen_cmov(ctx
, TCG_COND_GT
, ra
, rb
, rc
, islit
, lit
, 0);
1181 gen_cmov(ctx
, TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
, 0);
1186 tcg_gen_helper_1_0(helper_load_implver
, cpu_ir
[rc
]);
1196 gen_byte_manipulation(helper_mskbl
, ra
, rb
, rc
, islit
, lit
);
1200 gen_ext_l(&tcg_gen_ext8u_i64
, ra
, rb
, rc
, islit
, lit
);
1204 gen_byte_manipulation(helper_insbl
, ra
, rb
, rc
, islit
, lit
);
1208 gen_byte_manipulation(helper_mskwl
, ra
, rb
, rc
, islit
, lit
);
1212 gen_ext_l(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1216 gen_byte_manipulation(helper_inswl
, ra
, rb
, rc
, islit
, lit
);
1220 gen_byte_manipulation(helper_mskll
, ra
, rb
, rc
, islit
, lit
);
1224 gen_ext_l(&tcg_gen_ext32u_i64
, ra
, rb
, rc
, islit
, lit
);
1228 gen_byte_manipulation(helper_insll
, ra
, rb
, rc
, islit
, lit
);
1232 gen_byte_manipulation(helper_zap
, ra
, rb
, rc
, islit
, lit
);
1236 gen_byte_manipulation(helper_zapnot
, ra
, rb
, rc
, islit
, lit
);
1240 gen_byte_manipulation(helper_mskql
, ra
, rb
, rc
, islit
, lit
);
1244 if (likely(rc
!= 31)) {
1247 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1248 else if (rb
!= 31) {
1249 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1250 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1251 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1252 tcg_temp_free(shift
);
1254 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1256 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1261 gen_ext_l(NULL
, ra
, rb
, rc
, islit
, lit
);
1265 if (likely(rc
!= 31)) {
1268 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1269 else if (rb
!= 31) {
1270 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1271 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1272 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1273 tcg_temp_free(shift
);
1275 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1277 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1282 gen_byte_manipulation(helper_insql
, ra
, rb
, rc
, islit
, lit
);
1286 if (likely(rc
!= 31)) {
1289 tcg_gen_sari_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1290 else if (rb
!= 31) {
1291 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1292 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1293 tcg_gen_sar_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1294 tcg_temp_free(shift
);
1296 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1298 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1303 gen_byte_manipulation(helper_mskwh
, ra
, rb
, rc
, islit
, lit
);
1307 gen_byte_manipulation(helper_inswh
, ra
, rb
, rc
, islit
, lit
);
1311 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1315 gen_byte_manipulation(helper_msklh
, ra
, rb
, rc
, islit
, lit
);
1319 gen_byte_manipulation(helper_inslh
, ra
, rb
, rc
, islit
, lit
);
1323 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1327 gen_byte_manipulation(helper_mskqh
, ra
, rb
, rc
, islit
, lit
);
1331 gen_byte_manipulation(helper_insqh
, ra
, rb
, rc
, islit
, lit
);
1335 gen_ext_h(NULL
, ra
, rb
, rc
, islit
, lit
);
1345 if (likely(rc
!= 31)) {
1346 if (ra
== 31 || (rb
== 31 && !islit
))
1347 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1350 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1352 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1353 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
1359 if (likely(rc
!= 31)) {
1360 if (ra
== 31 || (rb
== 31 && !islit
))
1361 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1363 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1365 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1370 gen_arith3(ctx
, &gen_op_umulh
, ra
, rb
, rc
, islit
, lit
);
1374 gen_arith3(ctx
, &gen_op_mullv
, ra
, rb
, rc
, islit
, lit
);
1378 gen_arith3(ctx
, &gen_op_mulqv
, ra
, rb
, rc
, islit
, lit
);
1385 switch (fpfn
) { /* f11 & 0x3F */
1388 if (!(ctx
->amask
& AMASK_FIX
))
1390 gen_itf(ctx
, &gen_op_itofs
, ra
, rc
);
1394 if (!(ctx
->amask
& AMASK_FIX
))
1396 gen_farith2(ctx
, &gen_op_sqrtf
, rb
, rc
);
1400 if (!(ctx
->amask
& AMASK_FIX
))
1402 gen_farith2(ctx
, &gen_op_sqrts
, rb
, rc
);
1406 if (!(ctx
->amask
& AMASK_FIX
))
1409 gen_itf(ctx
, &gen_op_itoff
, ra
, rc
);
1416 if (!(ctx
->amask
& AMASK_FIX
))
1418 gen_itf(ctx
, &gen_op_itoft
, ra
, rc
);
1422 if (!(ctx
->amask
& AMASK_FIX
))
1424 gen_farith2(ctx
, &gen_op_sqrtg
, rb
, rc
);
1428 if (!(ctx
->amask
& AMASK_FIX
))
1430 gen_farith2(ctx
, &gen_op_sqrtt
, rb
, rc
);
1437 /* VAX floating point */
1438 /* XXX: rounding mode and trap are ignored (!) */
1439 switch (fpfn
) { /* f11 & 0x3F */
1442 gen_farith3(ctx
, &gen_op_addf
, ra
, rb
, rc
);
1446 gen_farith3(ctx
, &gen_op_subf
, ra
, rb
, rc
);
1450 gen_farith3(ctx
, &gen_op_mulf
, ra
, rb
, rc
);
1454 gen_farith3(ctx
, &gen_op_divf
, ra
, rb
, rc
);
1459 gen_farith2(ctx
, &gen_op_cvtdg
, rb
, rc
);
1466 gen_farith3(ctx
, &gen_op_addg
, ra
, rb
, rc
);
1470 gen_farith3(ctx
, &gen_op_subg
, ra
, rb
, rc
);
1474 gen_farith3(ctx
, &gen_op_mulg
, ra
, rb
, rc
);
1478 gen_farith3(ctx
, &gen_op_divg
, ra
, rb
, rc
);
1482 gen_farith3(ctx
, &gen_op_cmpgeq
, ra
, rb
, rc
);
1486 gen_farith3(ctx
, &gen_op_cmpglt
, ra
, rb
, rc
);
1490 gen_farith3(ctx
, &gen_op_cmpgle
, ra
, rb
, rc
);
1494 gen_farith2(ctx
, &gen_op_cvtgf
, rb
, rc
);
1499 gen_farith2(ctx
, &gen_op_cvtgd
, rb
, rc
);
1506 gen_farith2(ctx
, &gen_op_cvtgq
, rb
, rc
);
1510 gen_farith2(ctx
, &gen_op_cvtqf
, rb
, rc
);
1514 gen_farith2(ctx
, &gen_op_cvtqg
, rb
, rc
);
1521 /* IEEE floating-point */
1522 /* XXX: rounding mode and traps are ignored (!) */
1523 switch (fpfn
) { /* f11 & 0x3F */
1526 gen_farith3(ctx
, &gen_op_adds
, ra
, rb
, rc
);
1530 gen_farith3(ctx
, &gen_op_subs
, ra
, rb
, rc
);
1534 gen_farith3(ctx
, &gen_op_muls
, ra
, rb
, rc
);
1538 gen_farith3(ctx
, &gen_op_divs
, ra
, rb
, rc
);
1542 gen_farith3(ctx
, &gen_op_addt
, ra
, rb
, rc
);
1546 gen_farith3(ctx
, &gen_op_subt
, ra
, rb
, rc
);
1550 gen_farith3(ctx
, &gen_op_mult
, ra
, rb
, rc
);
1554 gen_farith3(ctx
, &gen_op_divt
, ra
, rb
, rc
);
1558 gen_farith3(ctx
, &gen_op_cmptun
, ra
, rb
, rc
);
1562 gen_farith3(ctx
, &gen_op_cmpteq
, ra
, rb
, rc
);
1566 gen_farith3(ctx
, &gen_op_cmptlt
, ra
, rb
, rc
);
1570 gen_farith3(ctx
, &gen_op_cmptle
, ra
, rb
, rc
);
1573 /* XXX: incorrect */
1574 if (fn11
== 0x2AC) {
1576 gen_farith2(ctx
, &gen_op_cvtst
, rb
, rc
);
1579 gen_farith2(ctx
, &gen_op_cvtts
, rb
, rc
);
1584 gen_farith2(ctx
, &gen_op_cvttq
, rb
, rc
);
1588 gen_farith2(ctx
, &gen_op_cvtqs
, rb
, rc
);
1592 gen_farith2(ctx
, &gen_op_cvtqt
, rb
, rc
);
1602 gen_farith2(ctx
, &gen_op_cvtlq
, rb
, rc
);
1607 if (ra
== 31 && rc
== 31) {
1612 gen_load_fir(ctx
, rb
, 0);
1613 gen_store_fir(ctx
, rc
, 0);
1616 gen_farith3(ctx
, &gen_op_cpys
, ra
, rb
, rc
);
1621 gen_farith2(ctx
, &gen_op_cpysn
, rb
, rc
);
1625 gen_farith2(ctx
, &gen_op_cpyse
, rb
, rc
);
1629 gen_load_fir(ctx
, ra
, 0);
1630 gen_op_store_fpcr();
1635 gen_store_fir(ctx
, ra
, 0);
1639 gen_fcmov(ctx
, &gen_op_cmpfeq
, ra
, rb
, rc
);
1643 gen_fcmov(ctx
, &gen_op_cmpfne
, ra
, rb
, rc
);
1647 gen_fcmov(ctx
, &gen_op_cmpflt
, ra
, rb
, rc
);
1651 gen_fcmov(ctx
, &gen_op_cmpfge
, ra
, rb
, rc
);
1655 gen_fcmov(ctx
, &gen_op_cmpfle
, ra
, rb
, rc
);
1659 gen_fcmov(ctx
, &gen_op_cmpfgt
, ra
, rb
, rc
);
1663 gen_farith2(ctx
, &gen_op_cvtql
, rb
, rc
);
1667 gen_farith2(ctx
, &gen_op_cvtqlv
, rb
, rc
);
1671 gen_farith2(ctx
, &gen_op_cvtqlsv
, rb
, rc
);
1678 switch ((uint16_t)disp16
) {
1681 /* No-op. Just exit from the current tb */
1686 /* No-op. Just exit from the current tb */
1708 tcg_gen_helper_1_0(helper_load_pcc
, cpu_ir
[ra
]);
1713 tcg_gen_helper_1_0(helper_rc
, cpu_ir
[ra
]);
1717 /* XXX: TODO: evict tb cache at address rb */
1727 tcg_gen_helper_1_0(helper_rs
, cpu_ir
[ra
]);
1738 /* HW_MFPR (PALcode) */
1739 #if defined (CONFIG_USER_ONLY)
1744 gen_op_mfpr(insn
& 0xFF);
1746 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[0]);
1751 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
1753 tcg_gen_andi_i64(cpu_pc
, cpu_ir
[rb
], ~3);
1755 tcg_gen_movi_i64(cpu_pc
, 0);
1756 /* Those four jumps only differ by the branch prediction hint */
1774 /* HW_LD (PALcode) */
1775 #if defined (CONFIG_USER_ONLY)
1781 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[rb
]);
1783 tcg_gen_movi_i64(cpu_T
[0], 0);
1784 tcg_gen_movi_i64(cpu_T
[1], disp12
);
1785 tcg_gen_add_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1786 switch ((insn
>> 12) & 0xF) {
1788 /* Longword physical access */
1792 /* Quadword physical access */
1796 /* Longword physical access with lock */
1800 /* Quadword physical access with lock */
1804 /* Longword virtual PTE fetch */
1805 gen_op_ldl_kernel();
1808 /* Quadword virtual PTE fetch */
1809 gen_op_ldq_kernel();
1818 /* Longword virtual access */
1819 gen_op_ld_phys_to_virt();
1823 /* Quadword virtual access */
1824 gen_op_ld_phys_to_virt();
1828 /* Longword virtual access with protection check */
1832 /* Quadword virtual access with protection check */
1836 /* Longword virtual access with altenate access mode */
1837 gen_op_set_alt_mode();
1838 gen_op_ld_phys_to_virt();
1840 gen_op_restore_mode();
1843 /* Quadword virtual access with altenate access mode */
1844 gen_op_set_alt_mode();
1845 gen_op_ld_phys_to_virt();
1847 gen_op_restore_mode();
1850 /* Longword virtual access with alternate access mode and
1853 gen_op_set_alt_mode();
1855 gen_op_restore_mode();
1858 /* Quadword virtual access with alternate access mode and
1861 gen_op_set_alt_mode();
1863 gen_op_restore_mode();
1867 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[1]);
1874 if (!(ctx
->amask
& AMASK_BWX
))
1876 if (likely(rc
!= 31)) {
1878 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int8_t)lit
));
1880 tcg_gen_ext8s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1882 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1887 if (!(ctx
->amask
& AMASK_BWX
))
1889 if (likely(rc
!= 31)) {
1891 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int16_t)lit
));
1893 tcg_gen_ext16s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1895 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1900 if (!(ctx
->amask
& AMASK_CIX
))
1902 if (likely(rc
!= 31)) {
1904 tcg_gen_movi_i64(cpu_ir
[rc
], ctpop64(lit
));
1906 tcg_gen_helper_1_1(helper_ctpop
, cpu_ir
[rc
], cpu_ir
[rb
]);
1908 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1913 if (!(ctx
->amask
& AMASK_MVI
))
1920 if (!(ctx
->amask
& AMASK_CIX
))
1922 if (likely(rc
!= 31)) {
1924 tcg_gen_movi_i64(cpu_ir
[rc
], clz64(lit
));
1926 tcg_gen_helper_1_1(helper_ctlz
, cpu_ir
[rc
], cpu_ir
[rb
]);
1928 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1933 if (!(ctx
->amask
& AMASK_CIX
))
1935 if (likely(rc
!= 31)) {
1937 tcg_gen_movi_i64(cpu_ir
[rc
], ctz64(lit
));
1939 tcg_gen_helper_1_1(helper_cttz
, cpu_ir
[rc
], cpu_ir
[rb
]);
1941 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1946 if (!(ctx
->amask
& AMASK_MVI
))
1953 if (!(ctx
->amask
& AMASK_MVI
))
1960 if (!(ctx
->amask
& AMASK_MVI
))
1967 if (!(ctx
->amask
& AMASK_MVI
))
1974 if (!(ctx
->amask
& AMASK_MVI
))
1981 if (!(ctx
->amask
& AMASK_MVI
))
1988 if (!(ctx
->amask
& AMASK_MVI
))
1995 if (!(ctx
->amask
& AMASK_MVI
))
2002 if (!(ctx
->amask
& AMASK_MVI
))
2009 if (!(ctx
->amask
& AMASK_MVI
))
2016 if (!(ctx
->amask
& AMASK_MVI
))
2023 if (!(ctx
->amask
& AMASK_MVI
))
2030 if (!(ctx
->amask
& AMASK_FIX
))
2032 gen_fti(ctx
, &gen_op_ftoit
, ra
, rb
);
2036 if (!(ctx
->amask
& AMASK_FIX
))
2038 gen_fti(ctx
, &gen_op_ftois
, ra
, rb
);
2045 /* HW_MTPR (PALcode) */
2046 #if defined (CONFIG_USER_ONLY)
2052 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[ra
]);
2054 tcg_gen_movi_i64(cpu_T
[0], 0);
2055 gen_op_mtpr(insn
& 0xFF);
2060 /* HW_REI (PALcode) */
2061 #if defined (CONFIG_USER_ONLY)
2071 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[rb
]);
2073 tcg_gen_movi_i64(cpu_T
[0], 0);
2074 tcg_gen_movi_i64(cpu_T
[1], (((int64_t)insn
<< 51) >> 51));
2075 tcg_gen_add_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2082 /* HW_ST (PALcode) */
2083 #if defined (CONFIG_USER_ONLY)
2089 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp12
);
2091 tcg_gen_movi_i64(cpu_T
[0], disp12
);
2093 tcg_gen_mov_i64(cpu_T
[1], cpu_ir
[ra
]);
2095 tcg_gen_movi_i64(cpu_T
[1], 0);
2096 switch ((insn
>> 12) & 0xF) {
2098 /* Longword physical access */
2102 /* Quadword physical access */
2106 /* Longword physical access with lock */
2110 /* Quadword physical access with lock */
2114 /* Longword virtual access */
2115 gen_op_st_phys_to_virt();
2119 /* Quadword virtual access */
2120 gen_op_st_phys_to_virt();
2142 /* Longword virtual access with alternate access mode */
2143 gen_op_set_alt_mode();
2144 gen_op_st_phys_to_virt();
2146 gen_op_restore_mode();
2149 /* Quadword virtual access with alternate access mode */
2150 gen_op_set_alt_mode();
2151 gen_op_st_phys_to_virt();
2153 gen_op_restore_mode();
2168 gen_load_fmem(ctx
, &gen_ldf
, ra
, rb
, disp16
);
2176 gen_load_fmem(ctx
, &gen_ldg
, ra
, rb
, disp16
);
2183 gen_load_fmem(ctx
, &gen_lds
, ra
, rb
, disp16
);
2187 gen_load_fmem(ctx
, &gen_ldt
, ra
, rb
, disp16
);
2192 gen_store_fmem(ctx
, &gen_stf
, ra
, rb
, disp16
);
2200 gen_store_fmem(ctx
, &gen_stg
, ra
, rb
, disp16
);
2207 gen_store_fmem(ctx
, &gen_sts
, ra
, rb
, disp16
);
2211 gen_store_fmem(ctx
, &gen_stt
, ra
, rb
, disp16
);
2215 gen_load_mem(ctx
, &gen_ldl
, ra
, rb
, disp16
, 0);
2219 gen_load_mem(ctx
, &gen_ldq
, ra
, rb
, disp16
, 0);
2223 gen_load_mem(ctx
, &gen_ldl_l
, ra
, rb
, disp16
, 0);
2227 gen_load_mem(ctx
, &gen_ldq_l
, ra
, rb
, disp16
, 0);
2231 gen_store_mem(ctx
, &gen_stl
, ra
, rb
, disp16
, 0);
2235 gen_store_mem(ctx
, &gen_stq
, ra
, rb
, disp16
, 0);
2239 gen_store_mem(ctx
, &gen_stl_c
, ra
, rb
, disp16
, 0);
2243 gen_store_mem(ctx
, &gen_stq_c
, ra
, rb
, disp16
, 0);
2248 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2249 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2254 gen_fbcond(ctx
, &gen_op_cmpfeq
, ra
, disp16
);
2259 gen_fbcond(ctx
, &gen_op_cmpflt
, ra
, disp16
);
2264 gen_fbcond(ctx
, &gen_op_cmpfle
, ra
, disp16
);
2270 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2271 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2276 gen_fbcond(ctx
, &gen_op_cmpfne
, ra
, disp16
);
2281 gen_fbcond(ctx
, &gen_op_cmpfge
, ra
, disp16
);
2286 gen_fbcond(ctx
, &gen_op_cmpfgt
, ra
, disp16
);
2291 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 1);
2296 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 0);
2301 gen_bcond(ctx
, TCG_COND_LT
, ra
, disp16
, 0);
2306 gen_bcond(ctx
, TCG_COND_LE
, ra
, disp16
, 0);
2311 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 1);
2316 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 0);
2321 gen_bcond(ctx
, TCG_COND_GE
, ra
, disp16
, 0);
2326 gen_bcond(ctx
, TCG_COND_GT
, ra
, disp16
, 0);
2338 static always_inline
void gen_intermediate_code_internal (CPUState
*env
,
2339 TranslationBlock
*tb
,
2342 #if defined ALPHA_DEBUG_DISAS
2343 static int insn_count
;
2345 DisasContext ctx
, *ctxp
= &ctx
;
2346 target_ulong pc_start
;
2348 uint16_t *gen_opc_end
;
2355 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
2357 ctx
.amask
= env
->amask
;
2358 #if defined (CONFIG_USER_ONLY)
2361 ctx
.mem_idx
= ((env
->ps
>> 3) & 3);
2362 ctx
.pal_mode
= env
->ipr
[IPR_EXC_ADDR
] & 1;
2365 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
2367 max_insns
= CF_COUNT_MASK
;
2370 for (ret
= 0; ret
== 0;) {
2371 if (env
->nb_breakpoints
> 0) {
2372 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
2373 if (env
->breakpoints
[j
] == ctx
.pc
) {
2374 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2380 j
= gen_opc_ptr
- gen_opc_buf
;
2384 gen_opc_instr_start
[lj
++] = 0;
2385 gen_opc_pc
[lj
] = ctx
.pc
;
2386 gen_opc_instr_start
[lj
] = 1;
2387 gen_opc_icount
[lj
] = num_insns
;
2390 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
2392 #if defined ALPHA_DEBUG_DISAS
2394 if (logfile
!= NULL
) {
2395 fprintf(logfile
, "pc " TARGET_FMT_lx
" mem_idx %d\n",
2396 ctx
.pc
, ctx
.mem_idx
);
2399 insn
= ldl_code(ctx
.pc
);
2400 #if defined ALPHA_DEBUG_DISAS
2402 if (logfile
!= NULL
) {
2403 fprintf(logfile
, "opcode %08x %d\n", insn
, insn_count
);
2408 ret
= translate_one(ctxp
, insn
);
2411 /* if we reach a page boundary or are single stepping, stop
2414 if (((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0) ||
2415 (env
->singlestep_enabled
) ||
2416 num_insns
>= max_insns
) {
2419 #if defined (DO_SINGLE_STEP)
2423 if (ret
!= 1 && ret
!= 3) {
2424 tcg_gen_movi_i64(cpu_pc
, ctx
.pc
);
2426 #if defined (DO_TB_FLUSH)
2427 tcg_gen_helper_0_0(helper_tb_flush
);
2429 if (tb
->cflags
& CF_LAST_IO
)
2431 /* Generate the return instruction */
2433 gen_icount_end(tb
, num_insns
);
2434 *gen_opc_ptr
= INDEX_op_end
;
2436 j
= gen_opc_ptr
- gen_opc_buf
;
2439 gen_opc_instr_start
[lj
++] = 0;
2441 tb
->size
= ctx
.pc
- pc_start
;
2442 tb
->icount
= num_insns
;
2444 #if defined ALPHA_DEBUG_DISAS
2445 if (loglevel
& CPU_LOG_TB_CPU
) {
2446 cpu_dump_state(env
, logfile
, fprintf
, 0);
2448 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
2449 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
2450 target_disas(logfile
, pc_start
, ctx
.pc
- pc_start
, 1);
2451 fprintf(logfile
, "\n");
2456 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
2458 gen_intermediate_code_internal(env
, tb
, 0);
2461 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
2463 gen_intermediate_code_internal(env
, tb
, 1);
2466 CPUAlphaState
* cpu_alpha_init (const char *cpu_model
)
2471 env
= qemu_mallocz(sizeof(CPUAlphaState
));
2475 alpha_translate_init();
2477 /* XXX: should not be hardcoded */
2478 env
->implver
= IMPLVER_2106x
;
2480 #if defined (CONFIG_USER_ONLY)
2484 /* Initialize IPR */
2485 hwpcb
= env
->ipr
[IPR_PCBB
];
2486 env
->ipr
[IPR_ASN
] = 0;
2487 env
->ipr
[IPR_ASTEN
] = 0;
2488 env
->ipr
[IPR_ASTSR
] = 0;
2489 env
->ipr
[IPR_DATFX
] = 0;
2491 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2492 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2493 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2494 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2495 env
->ipr
[IPR_FEN
] = 0;
2496 env
->ipr
[IPR_IPL
] = 31;
2497 env
->ipr
[IPR_MCES
] = 0;
2498 env
->ipr
[IPR_PERFMON
] = 0; /* Implementation specific */
2499 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2500 env
->ipr
[IPR_SISR
] = 0;
2501 env
->ipr
[IPR_VIRBND
] = -1ULL;
2506 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2507 unsigned long searched_pc
, int pc_pos
, void *puc
)
2509 env
->pc
= gen_opc_pc
[pc_pos
];