2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
35 #define ALPHA_DEBUG_DISAS
38 typedef struct DisasContext DisasContext
;
42 #if !defined (CONFIG_USER_ONLY)
48 /* global register indexes */
50 static TCGv cpu_ir
[31];
53 /* dyngen register indexes */
57 static char cpu_reg_names
[10*4+21*5];
59 #include "gen-icount.h"
61 static void alpha_translate_init(void)
65 static int done_init
= 0;
70 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
72 #if TARGET_LONG_BITS > HOST_LONG_BITS
73 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
74 offsetof(CPUState
, t0
), "T0");
75 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
76 offsetof(CPUState
, t1
), "T1");
77 cpu_T
[2] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
78 offsetof(CPUState
, t2
), "T2");
80 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_I64
, TCG_AREG1
, "T0");
81 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_I64
, TCG_AREG2
, "T1");
82 cpu_T
[2] = tcg_global_reg_new(TCG_TYPE_I64
, TCG_AREG3
, "T2");
86 for (i
= 0; i
< 31; i
++) {
87 sprintf(p
, "ir%d", i
);
88 cpu_ir
[i
] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
89 offsetof(CPUState
, ir
[i
]), p
);
90 p
+= (i
< 10) ? 4 : 5;
93 cpu_pc
= tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
94 offsetof(CPUState
, pc
), "pc");
96 /* register helpers */
98 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
104 static always_inline
void gen_op_nop (void)
106 #if defined(GENERATE_NOP)
111 #define GEN32(func, NAME) \
112 static GenOpFunc *NAME ## _table [32] = { \
113 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
114 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
115 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
116 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
117 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
118 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
119 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
120 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
122 static always_inline void func (int n) \
124 NAME ## _table[n](); \
128 /* Special hacks for fir31 */
129 #define gen_op_load_FT0_fir31 gen_op_reset_FT0
130 #define gen_op_load_FT1_fir31 gen_op_reset_FT1
131 #define gen_op_load_FT2_fir31 gen_op_reset_FT2
132 #define gen_op_store_FT0_fir31 gen_op_nop
133 #define gen_op_store_FT1_fir31 gen_op_nop
134 #define gen_op_store_FT2_fir31 gen_op_nop
135 #define gen_op_cmov_fir31 gen_op_nop
136 GEN32(gen_op_load_FT0_fir
, gen_op_load_FT0_fir
);
137 GEN32(gen_op_load_FT1_fir
, gen_op_load_FT1_fir
);
138 GEN32(gen_op_load_FT2_fir
, gen_op_load_FT2_fir
);
139 GEN32(gen_op_store_FT0_fir
, gen_op_store_FT0_fir
);
140 GEN32(gen_op_store_FT1_fir
, gen_op_store_FT1_fir
);
141 GEN32(gen_op_store_FT2_fir
, gen_op_store_FT2_fir
);
142 GEN32(gen_op_cmov_fir
, gen_op_cmov_fir
);
144 static always_inline
void gen_load_fir (DisasContext
*ctx
, int firn
, int Tn
)
148 gen_op_load_FT0_fir(firn
);
151 gen_op_load_FT1_fir(firn
);
154 gen_op_load_FT2_fir(firn
);
159 static always_inline
void gen_store_fir (DisasContext
*ctx
, int firn
, int Tn
)
163 gen_op_store_FT0_fir(firn
);
166 gen_op_store_FT1_fir(firn
);
169 gen_op_store_FT2_fir(firn
);
175 #if defined(CONFIG_USER_ONLY)
176 #define OP_LD_TABLE(width) \
177 static GenOpFunc *gen_op_ld##width[] = { \
178 &gen_op_ld##width##_raw, \
180 #define OP_ST_TABLE(width) \
181 static GenOpFunc *gen_op_st##width[] = { \
182 &gen_op_st##width##_raw, \
185 #define OP_LD_TABLE(width) \
186 static GenOpFunc *gen_op_ld##width[] = { \
187 &gen_op_ld##width##_kernel, \
188 &gen_op_ld##width##_executive, \
189 &gen_op_ld##width##_supervisor, \
190 &gen_op_ld##width##_user, \
192 #define OP_ST_TABLE(width) \
193 static GenOpFunc *gen_op_st##width[] = { \
194 &gen_op_st##width##_kernel, \
195 &gen_op_st##width##_executive, \
196 &gen_op_st##width##_supervisor, \
197 &gen_op_st##width##_user, \
201 #define GEN_LD(width) \
202 OP_LD_TABLE(width); \
203 static always_inline void gen_ld##width (DisasContext *ctx) \
205 (*gen_op_ld##width[ctx->mem_idx])(); \
208 #define GEN_ST(width) \
209 OP_ST_TABLE(width); \
210 static always_inline void gen_st##width (DisasContext *ctx) \
212 (*gen_op_st##width[ctx->mem_idx])(); \
230 #if 0 /* currently unused */
241 static always_inline
void _gen_op_bcond (DisasContext
*ctx
)
243 #if 0 // Qemu does not know how to do this...
244 gen_op_bcond(ctx
->pc
);
246 gen_op_bcond(ctx
->pc
>> 32, ctx
->pc
);
250 static always_inline
void gen_excp (DisasContext
*ctx
,
251 int exception
, int error_code
)
253 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
254 gen_op_excp(exception
, error_code
);
257 static always_inline
void gen_invalid (DisasContext
*ctx
)
259 gen_excp(ctx
, EXCP_OPCDEC
, 0);
262 static always_inline
void gen_load_mem (DisasContext
*ctx
,
263 void (*gen_load_op
)(DisasContext
*ctx
),
264 int ra
, int rb
, int32_t disp16
,
267 if (ra
== 31 && disp16
== 0) {
272 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
274 tcg_gen_movi_i64(cpu_T
[0], disp16
);
276 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], ~0x7);
279 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[1]);
283 static always_inline
void gen_store_mem (DisasContext
*ctx
,
284 void (*gen_store_op
)(DisasContext
*ctx
),
285 int ra
, int rb
, int32_t disp16
,
289 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
291 tcg_gen_movi_i64(cpu_T
[0], disp16
);
293 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], ~0x7);
295 tcg_gen_mov_i64(cpu_T
[1], cpu_ir
[ra
]);
297 tcg_gen_movi_i64(cpu_T
[1], 0);
298 (*gen_store_op
)(ctx
);
301 static always_inline
void gen_load_fmem (DisasContext
*ctx
,
302 void (*gen_load_fop
)(DisasContext
*ctx
),
303 int ra
, int rb
, int32_t disp16
)
306 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
308 tcg_gen_movi_i64(cpu_T
[0], disp16
);
309 (*gen_load_fop
)(ctx
);
310 gen_store_fir(ctx
, ra
, 1);
313 static always_inline
void gen_store_fmem (DisasContext
*ctx
,
314 void (*gen_store_fop
)(DisasContext
*ctx
),
315 int ra
, int rb
, int32_t disp16
)
318 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
320 tcg_gen_movi_i64(cpu_T
[0], disp16
);
321 gen_load_fir(ctx
, ra
, 1);
322 (*gen_store_fop
)(ctx
);
325 static always_inline
void gen_bcond (DisasContext
*ctx
,
327 int ra
, int32_t disp16
, int mask
)
331 l1
= gen_new_label();
332 l2
= gen_new_label();
333 if (likely(ra
!= 31)) {
335 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
336 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
337 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
340 tcg_gen_brcondi_i64(cond
, cpu_ir
[ra
], 0, l1
);
342 /* Very uncommon case - Do not bother to optimize. */
343 TCGv tmp
= tcg_const_i64(0);
344 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
347 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
350 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
354 static always_inline
void gen_fbcond (DisasContext
*ctx
,
355 void (*gen_test_op
)(void),
356 int ra
, int32_t disp16
)
358 tcg_gen_movi_i64(cpu_T
[1], ctx
->pc
+ (int64_t)(disp16
<< 2));
359 gen_load_fir(ctx
, ra
, 0);
364 static always_inline
void gen_arith3 (DisasContext
*ctx
,
365 void (*gen_arith_op
)(void),
366 int ra
, int rb
, int rc
,
367 int islit
, uint8_t lit
)
370 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[ra
]);
372 tcg_gen_movi_i64(cpu_T
[0], 0);
374 tcg_gen_movi_i64(cpu_T
[1], lit
);
376 tcg_gen_mov_i64(cpu_T
[1], cpu_ir
[rb
]);
378 tcg_gen_movi_i64(cpu_T
[1], 0);
381 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_T
[0]);
384 static always_inline
void gen_cmov (DisasContext
*ctx
,
386 int ra
, int rb
, int rc
,
387 int islit
, int8_t lit
, int mask
)
391 if (unlikely(rc
== 31))
394 l1
= gen_new_label();
398 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
399 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
400 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
403 tcg_gen_brcondi_i64(inv_cond
, cpu_ir
[ra
], 0, l1
);
405 /* Very uncommon case - Do not bother to optimize. */
406 TCGv tmp
= tcg_const_i64(0);
407 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
412 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
414 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
416 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
420 static always_inline
void gen_farith2 (DisasContext
*ctx
,
421 void (*gen_arith_fop
)(void),
424 gen_load_fir(ctx
, rb
, 0);
426 gen_store_fir(ctx
, rc
, 0);
429 static always_inline
void gen_farith3 (DisasContext
*ctx
,
430 void (*gen_arith_fop
)(void),
431 int ra
, int rb
, int rc
)
433 gen_load_fir(ctx
, ra
, 0);
434 gen_load_fir(ctx
, rb
, 1);
436 gen_store_fir(ctx
, rc
, 0);
439 static always_inline
void gen_fcmov (DisasContext
*ctx
,
440 void (*gen_test_fop
)(void),
441 int ra
, int rb
, int rc
)
443 gen_load_fir(ctx
, ra
, 0);
444 gen_load_fir(ctx
, rb
, 1);
449 static always_inline
void gen_fti (DisasContext
*ctx
,
450 void (*gen_move_fop
)(void),
453 gen_load_fir(ctx
, rc
, 0);
456 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[0]);
459 static always_inline
void gen_itf (DisasContext
*ctx
,
460 void (*gen_move_fop
)(void),
464 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[ra
]);
466 tcg_gen_movi_i64(cpu_T
[0], 0);
468 gen_store_fir(ctx
, rc
, 0);
471 /* EXTWH, EXTWH, EXTLH, EXTQH */
472 static always_inline
void gen_ext_h(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
473 int ra
, int rb
, int rc
,
474 int islit
, int8_t lit
)
476 if (unlikely(rc
== 31))
481 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 64 - ((lit
& 7) * 8));
484 tmp1
= tcg_temp_new(TCG_TYPE_I64
);
485 tcg_gen_andi_i64(tmp1
, cpu_ir
[rb
], 7);
486 tcg_gen_shli_i64(tmp1
, tmp1
, 3);
487 tmp2
= tcg_const_i64(64);
488 tcg_gen_sub_i64(tmp1
, tmp2
, tmp1
);
490 if (tcg_gen_ext_i64
) {
491 tcg_gen_shl_i64(tmp1
, cpu_ir
[ra
], tmp1
);
492 tcg_gen_ext_i64(cpu_ir
[rc
], tmp1
);
494 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp1
);
497 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
499 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
502 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
503 static always_inline
void gen_ext_l(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
504 int ra
, int rb
, int rc
,
505 int islit
, int8_t lit
)
507 if (unlikely(rc
== 31))
512 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], (lit
& 7) * 8);
514 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
515 tcg_gen_andi_i64(tmp
, cpu_ir
[rb
], 7);
516 tcg_gen_shli_i64(tmp
, tmp
, 3);
517 if (tcg_gen_ext_i64
) {
518 tcg_gen_shr_i64(tmp
, cpu_ir
[ra
], tmp
);
519 tcg_gen_ext_i64(cpu_ir
[rc
], tmp
);
521 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
524 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
526 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
529 /* Code to call byte manipulation helpers, used by:
530 INSWH, INSLH, INSQH, INSBL, INSWL, INSLL, INSQL,
531 MSKWH, MSKLH, MSKQH, MSKBL, MSKWL, MSKLL, MSKQL,
534 WARNING: it assumes that when ra31 is used, the result is 0.
536 static always_inline
void gen_byte_manipulation(void *helper
,
537 int ra
, int rb
, int rc
,
538 int islit
, uint8_t lit
)
540 if (unlikely(rc
== 31))
544 if (islit
|| rb
== 31) {
545 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
547 tcg_gen_movi_i64(tmp
, lit
);
549 tcg_gen_movi_i64(tmp
, 0);
550 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
553 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
555 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
558 static always_inline
int translate_one (DisasContext
*ctx
, uint32_t insn
)
561 int32_t disp21
, disp16
, disp12
;
563 uint8_t opc
, ra
, rb
, rc
, sbz
, fpfn
, fn7
, fn2
, islit
;
567 /* Decode all instruction fields */
569 ra
= (insn
>> 21) & 0x1F;
570 rb
= (insn
>> 16) & 0x1F;
572 sbz
= (insn
>> 13) & 0x07;
573 islit
= (insn
>> 12) & 1;
574 lit
= (insn
>> 13) & 0xFF;
575 palcode
= insn
& 0x03FFFFFF;
576 disp21
= ((int32_t)((insn
& 0x001FFFFF) << 11)) >> 11;
577 disp16
= (int16_t)(insn
& 0x0000FFFF);
578 disp12
= (int32_t)((insn
& 0x00000FFF) << 20) >> 20;
579 fn16
= insn
& 0x0000FFFF;
580 fn11
= (insn
>> 5) & 0x000007FF;
582 fn7
= (insn
>> 5) & 0x0000007F;
583 fn2
= (insn
>> 5) & 0x00000003;
585 #if defined ALPHA_DEBUG_DISAS
586 if (logfile
!= NULL
) {
587 fprintf(logfile
, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
588 opc
, ra
, rb
, rc
, disp16
);
594 if (palcode
>= 0x80 && palcode
< 0xC0) {
595 /* Unprivileged PAL call */
596 gen_excp(ctx
, EXCP_CALL_PAL
+ ((palcode
& 0x1F) << 6), 0);
597 #if !defined (CONFIG_USER_ONLY)
598 } else if (palcode
< 0x40) {
599 /* Privileged PAL code */
600 if (ctx
->mem_idx
& 1)
603 gen_excp(ctx
, EXCP_CALL_PALP
+ ((palcode
& 0x1F) << 6), 0);
606 /* Invalid PAL call */
634 if (likely(ra
!= 31)) {
636 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
);
638 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
);
643 if (likely(ra
!= 31)) {
645 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
<< 16);
647 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
<< 16);
652 if (!(ctx
->amask
& AMASK_BWX
))
654 gen_load_mem(ctx
, &gen_ldbu
, ra
, rb
, disp16
, 0);
658 gen_load_mem(ctx
, &gen_ldq_u
, ra
, rb
, disp16
, 1);
662 if (!(ctx
->amask
& AMASK_BWX
))
664 gen_load_mem(ctx
, &gen_ldwu
, ra
, rb
, disp16
, 0);
668 if (!(ctx
->amask
& AMASK_BWX
))
670 gen_store_mem(ctx
, &gen_stw
, ra
, rb
, disp16
, 0);
674 if (!(ctx
->amask
& AMASK_BWX
))
676 gen_store_mem(ctx
, &gen_stb
, ra
, rb
, disp16
, 0);
680 gen_store_mem(ctx
, &gen_stq_u
, ra
, rb
, disp16
, 1);
686 if (likely(rc
!= 31)) {
689 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
690 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
691 } else if (rb
!= 31) {
692 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
693 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
695 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
698 tcg_gen_movi_i64(cpu_ir
[rc
], (int32_t)lit
);
700 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
702 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
708 if (likely(rc
!= 31)) {
710 if (islit
|| rb
!= 31) {
711 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
712 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
714 tcg_gen_addi_i64(tmp
, tmp
, lit
);
716 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
717 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
720 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 2);
721 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
725 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
727 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
729 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
735 if (likely(rc
!= 31)) {
738 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
739 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
740 } else if (rb
!= 31) {
741 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
742 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
744 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
747 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
749 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
750 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
752 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
758 if (likely(rc
!= 31)) {
760 if (islit
|| rb
!= 31) {
761 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
762 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
764 tcg_gen_subi_i64(tmp
, tmp
, lit
);
766 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
767 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
770 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 2);
771 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
775 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
777 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
778 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
780 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
786 gen_arith3(ctx
, &gen_op_cmpbge
, ra
, rb
, rc
, islit
, lit
);
790 if (likely(rc
!= 31)) {
792 if (islit
|| rb
!= 31) {
793 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
794 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
796 tcg_gen_addi_i64(tmp
, tmp
, lit
);
798 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
799 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
802 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 3);
803 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
807 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
809 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
811 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
817 if (likely(rc
!= 31)) {
819 if (islit
|| rb
!= 31) {
820 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
821 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
823 tcg_gen_subi_i64(tmp
, tmp
, lit
);
825 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
826 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
829 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 3);
830 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
834 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
836 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
837 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
839 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
845 gen_arith3(ctx
, &gen_op_cmpult
, ra
, rb
, rc
, islit
, lit
);
849 if (likely(rc
!= 31)) {
852 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
854 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
856 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
859 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
861 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
863 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
869 if (likely(rc
!= 31)) {
871 if (islit
|| rb
!= 31) {
872 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
873 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
875 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
877 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
880 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 2);
883 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
885 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
887 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
893 if (likely(rc
!= 31)) {
896 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
898 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
900 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
903 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
905 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
907 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
913 if (likely(rc
!= 31)) {
915 if (islit
|| rb
!= 31) {
916 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
917 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
919 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
921 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
924 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 2);
927 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
929 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
931 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
937 gen_arith3(ctx
, &gen_op_cmpeq
, ra
, rb
, rc
, islit
, lit
);
941 if (likely(rc
!= 31)) {
943 if (islit
|| rb
!= 31) {
944 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
945 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
947 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
949 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
952 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 3);
955 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
957 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
959 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
965 if (likely(rc
!= 31)) {
967 if (islit
|| rb
!= 31) {
968 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
969 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
971 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
973 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
976 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 3);
979 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
981 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
983 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
989 gen_arith3(ctx
, &gen_op_cmpule
, ra
, rb
, rc
, islit
, lit
);
993 gen_arith3(ctx
, &gen_op_addlv
, ra
, rb
, rc
, islit
, lit
);
997 gen_arith3(ctx
, &gen_op_sublv
, ra
, rb
, rc
, islit
, lit
);
1001 gen_arith3(ctx
, &gen_op_cmplt
, ra
, rb
, rc
, islit
, lit
);
1005 gen_arith3(ctx
, &gen_op_addqv
, ra
, rb
, rc
, islit
, lit
);
1009 gen_arith3(ctx
, &gen_op_subqv
, ra
, rb
, rc
, islit
, lit
);
1013 gen_arith3(ctx
, &gen_op_cmple
, ra
, rb
, rc
, islit
, lit
);
1023 if (likely(rc
!= 31)) {
1024 if (ra
== 31 || (rb
== 31 && !islit
))
1025 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1027 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1029 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1034 if (likely(rc
!= 31)) {
1037 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1038 else if (rb
!= 31) {
1039 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
1040 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
1041 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
1044 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1046 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1051 gen_cmov(ctx
, TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 1);
1055 gen_cmov(ctx
, TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 1);
1059 if (likely(rc
!= 31)) {
1062 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1064 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1066 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1069 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1071 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1073 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1079 gen_cmov(ctx
, TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 0);
1083 gen_cmov(ctx
, TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 0);
1087 if (likely(rc
!= 31)) {
1088 if (rb
== 31 && !islit
)
1089 tcg_gen_movi_i64(cpu_ir
[rc
], ~0);
1090 else if (ra
!= 31) {
1092 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1094 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
1095 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
1096 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
1101 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1103 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1109 if (likely(rc
!= 31)) {
1112 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1114 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1116 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1119 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1121 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1123 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1129 gen_cmov(ctx
, TCG_COND_GE
, ra
, rb
, rc
, islit
, lit
, 0);
1133 gen_cmov(ctx
, TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
, 0);
1137 if (likely(rc
!= 31)) {
1140 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1141 else if (rb
!= 31) {
1142 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
1143 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
1144 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
1147 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1150 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1152 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1154 tcg_gen_movi_i64(cpu_ir
[rc
], ~0);
1160 if (likely(rc
!= 31)) {
1162 tcg_gen_movi_i64(cpu_ir
[rc
], helper_amask(lit
));
1164 tcg_gen_helper_1_1(helper_amask
, cpu_ir
[rc
], cpu_ir
[rb
]);
1166 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1171 gen_cmov(ctx
, TCG_COND_GT
, ra
, rb
, rc
, islit
, lit
, 0);
1175 gen_cmov(ctx
, TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
, 0);
1179 gen_op_load_implver();
1181 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_T
[0]);
1191 gen_byte_manipulation(helper_mskbl
, ra
, rb
, rc
, islit
, lit
);
1195 gen_ext_l(&tcg_gen_ext8u_i64
, ra
, rb
, rc
, islit
, lit
);
1199 gen_byte_manipulation(helper_insbl
, ra
, rb
, rc
, islit
, lit
);
1203 gen_byte_manipulation(helper_mskwl
, ra
, rb
, rc
, islit
, lit
);
1207 gen_ext_l(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1211 gen_byte_manipulation(helper_inswl
, ra
, rb
, rc
, islit
, lit
);
1215 gen_byte_manipulation(helper_mskll
, ra
, rb
, rc
, islit
, lit
);
1219 gen_ext_l(&tcg_gen_ext32u_i64
, ra
, rb
, rc
, islit
, lit
);
1223 gen_byte_manipulation(helper_insll
, ra
, rb
, rc
, islit
, lit
);
1227 gen_byte_manipulation(helper_zap
, ra
, rb
, rc
, islit
, lit
);
1231 gen_byte_manipulation(helper_zapnot
, ra
, rb
, rc
, islit
, lit
);
1235 gen_byte_manipulation(helper_mskql
, ra
, rb
, rc
, islit
, lit
);
1239 if (likely(rc
!= 31)) {
1242 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1243 else if (rb
!= 31) {
1244 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1245 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1246 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1247 tcg_temp_free(shift
);
1249 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1251 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1256 gen_ext_l(NULL
, ra
, rb
, rc
, islit
, lit
);
1260 if (likely(rc
!= 31)) {
1263 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1264 else if (rb
!= 31) {
1265 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1266 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1267 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1268 tcg_temp_free(shift
);
1270 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1272 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1277 gen_byte_manipulation(helper_insql
, ra
, rb
, rc
, islit
, lit
);
1281 if (likely(rc
!= 31)) {
1284 tcg_gen_sari_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1285 else if (rb
!= 31) {
1286 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1287 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1288 tcg_gen_sar_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1289 tcg_temp_free(shift
);
1291 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
1293 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1298 gen_byte_manipulation(helper_mskwh
, ra
, rb
, rc
, islit
, lit
);
1302 gen_byte_manipulation(helper_inswh
, ra
, rb
, rc
, islit
, lit
);
1306 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1310 gen_byte_manipulation(helper_msklh
, ra
, rb
, rc
, islit
, lit
);
1314 gen_byte_manipulation(helper_inslh
, ra
, rb
, rc
, islit
, lit
);
1318 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1322 gen_byte_manipulation(helper_mskqh
, ra
, rb
, rc
, islit
, lit
);
1326 gen_byte_manipulation(helper_insqh
, ra
, rb
, rc
, islit
, lit
);
1330 gen_ext_h(NULL
, ra
, rb
, rc
, islit
, lit
);
1340 if (likely(rc
!= 31)) {
1341 if (ra
== 31 || (rb
== 31 && !islit
))
1342 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1345 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1347 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1348 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
1354 if (likely(rc
!= 31)) {
1355 if (ra
== 31 || (rb
== 31 && !islit
))
1356 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1358 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1360 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1365 gen_arith3(ctx
, &gen_op_umulh
, ra
, rb
, rc
, islit
, lit
);
1369 gen_arith3(ctx
, &gen_op_mullv
, ra
, rb
, rc
, islit
, lit
);
1373 gen_arith3(ctx
, &gen_op_mulqv
, ra
, rb
, rc
, islit
, lit
);
1380 switch (fpfn
) { /* f11 & 0x3F */
1383 if (!(ctx
->amask
& AMASK_FIX
))
1385 gen_itf(ctx
, &gen_op_itofs
, ra
, rc
);
1389 if (!(ctx
->amask
& AMASK_FIX
))
1391 gen_farith2(ctx
, &gen_op_sqrtf
, rb
, rc
);
1395 if (!(ctx
->amask
& AMASK_FIX
))
1397 gen_farith2(ctx
, &gen_op_sqrts
, rb
, rc
);
1401 if (!(ctx
->amask
& AMASK_FIX
))
1404 gen_itf(ctx
, &gen_op_itoff
, ra
, rc
);
1411 if (!(ctx
->amask
& AMASK_FIX
))
1413 gen_itf(ctx
, &gen_op_itoft
, ra
, rc
);
1417 if (!(ctx
->amask
& AMASK_FIX
))
1419 gen_farith2(ctx
, &gen_op_sqrtg
, rb
, rc
);
1423 if (!(ctx
->amask
& AMASK_FIX
))
1425 gen_farith2(ctx
, &gen_op_sqrtt
, rb
, rc
);
1432 /* VAX floating point */
1433 /* XXX: rounding mode and trap are ignored (!) */
1434 switch (fpfn
) { /* f11 & 0x3F */
1437 gen_farith3(ctx
, &gen_op_addf
, ra
, rb
, rc
);
1441 gen_farith3(ctx
, &gen_op_subf
, ra
, rb
, rc
);
1445 gen_farith3(ctx
, &gen_op_mulf
, ra
, rb
, rc
);
1449 gen_farith3(ctx
, &gen_op_divf
, ra
, rb
, rc
);
1454 gen_farith2(ctx
, &gen_op_cvtdg
, rb
, rc
);
1461 gen_farith3(ctx
, &gen_op_addg
, ra
, rb
, rc
);
1465 gen_farith3(ctx
, &gen_op_subg
, ra
, rb
, rc
);
1469 gen_farith3(ctx
, &gen_op_mulg
, ra
, rb
, rc
);
1473 gen_farith3(ctx
, &gen_op_divg
, ra
, rb
, rc
);
1477 gen_farith3(ctx
, &gen_op_cmpgeq
, ra
, rb
, rc
);
1481 gen_farith3(ctx
, &gen_op_cmpglt
, ra
, rb
, rc
);
1485 gen_farith3(ctx
, &gen_op_cmpgle
, ra
, rb
, rc
);
1489 gen_farith2(ctx
, &gen_op_cvtgf
, rb
, rc
);
1494 gen_farith2(ctx
, &gen_op_cvtgd
, rb
, rc
);
1501 gen_farith2(ctx
, &gen_op_cvtgq
, rb
, rc
);
1505 gen_farith2(ctx
, &gen_op_cvtqf
, rb
, rc
);
1509 gen_farith2(ctx
, &gen_op_cvtqg
, rb
, rc
);
1516 /* IEEE floating-point */
1517 /* XXX: rounding mode and traps are ignored (!) */
1518 switch (fpfn
) { /* f11 & 0x3F */
1521 gen_farith3(ctx
, &gen_op_adds
, ra
, rb
, rc
);
1525 gen_farith3(ctx
, &gen_op_subs
, ra
, rb
, rc
);
1529 gen_farith3(ctx
, &gen_op_muls
, ra
, rb
, rc
);
1533 gen_farith3(ctx
, &gen_op_divs
, ra
, rb
, rc
);
1537 gen_farith3(ctx
, &gen_op_addt
, ra
, rb
, rc
);
1541 gen_farith3(ctx
, &gen_op_subt
, ra
, rb
, rc
);
1545 gen_farith3(ctx
, &gen_op_mult
, ra
, rb
, rc
);
1549 gen_farith3(ctx
, &gen_op_divt
, ra
, rb
, rc
);
1553 gen_farith3(ctx
, &gen_op_cmptun
, ra
, rb
, rc
);
1557 gen_farith3(ctx
, &gen_op_cmpteq
, ra
, rb
, rc
);
1561 gen_farith3(ctx
, &gen_op_cmptlt
, ra
, rb
, rc
);
1565 gen_farith3(ctx
, &gen_op_cmptle
, ra
, rb
, rc
);
1568 /* XXX: incorrect */
1569 if (fn11
== 0x2AC) {
1571 gen_farith2(ctx
, &gen_op_cvtst
, rb
, rc
);
1574 gen_farith2(ctx
, &gen_op_cvtts
, rb
, rc
);
1579 gen_farith2(ctx
, &gen_op_cvttq
, rb
, rc
);
1583 gen_farith2(ctx
, &gen_op_cvtqs
, rb
, rc
);
1587 gen_farith2(ctx
, &gen_op_cvtqt
, rb
, rc
);
1597 gen_farith2(ctx
, &gen_op_cvtlq
, rb
, rc
);
1602 if (ra
== 31 && rc
== 31) {
1607 gen_load_fir(ctx
, rb
, 0);
1608 gen_store_fir(ctx
, rc
, 0);
1611 gen_farith3(ctx
, &gen_op_cpys
, ra
, rb
, rc
);
1616 gen_farith2(ctx
, &gen_op_cpysn
, rb
, rc
);
1620 gen_farith2(ctx
, &gen_op_cpyse
, rb
, rc
);
1624 gen_load_fir(ctx
, ra
, 0);
1625 gen_op_store_fpcr();
1630 gen_store_fir(ctx
, ra
, 0);
1634 gen_fcmov(ctx
, &gen_op_cmpfeq
, ra
, rb
, rc
);
1638 gen_fcmov(ctx
, &gen_op_cmpfne
, ra
, rb
, rc
);
1642 gen_fcmov(ctx
, &gen_op_cmpflt
, ra
, rb
, rc
);
1646 gen_fcmov(ctx
, &gen_op_cmpfge
, ra
, rb
, rc
);
1650 gen_fcmov(ctx
, &gen_op_cmpfle
, ra
, rb
, rc
);
1654 gen_fcmov(ctx
, &gen_op_cmpfgt
, ra
, rb
, rc
);
1658 gen_farith2(ctx
, &gen_op_cvtql
, rb
, rc
);
1662 gen_farith2(ctx
, &gen_op_cvtqlv
, rb
, rc
);
1666 gen_farith2(ctx
, &gen_op_cvtqlsv
, rb
, rc
);
1673 switch ((uint16_t)disp16
) {
1676 /* No-op. Just exit from the current tb */
1681 /* No-op. Just exit from the current tb */
1704 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[0]);
1710 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[0]);
1715 /* XXX: TODO: evict tb cache at address rb */
1726 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[0]);
1738 /* HW_MFPR (PALcode) */
1739 #if defined (CONFIG_USER_ONLY)
1744 gen_op_mfpr(insn
& 0xFF);
1746 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[0]);
1751 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
1753 tcg_gen_andi_i64(cpu_pc
, cpu_ir
[rb
], ~3);
1755 tcg_gen_movi_i64(cpu_pc
, 0);
1756 /* Those four jumps only differ by the branch prediction hint */
1774 /* HW_LD (PALcode) */
1775 #if defined (CONFIG_USER_ONLY)
1781 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[rb
]);
1783 tcg_gen_movi_i64(cpu_T
[0], 0);
1784 tcg_gen_movi_i64(cpu_T
[1], disp12
);
1785 tcg_gen_add_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1786 switch ((insn
>> 12) & 0xF) {
1788 /* Longword physical access */
1792 /* Quadword physical access */
1796 /* Longword physical access with lock */
1800 /* Quadword physical access with lock */
1804 /* Longword virtual PTE fetch */
1805 gen_op_ldl_kernel();
1808 /* Quadword virtual PTE fetch */
1809 gen_op_ldq_kernel();
1818 /* Longword virtual access */
1819 gen_op_ld_phys_to_virt();
1823 /* Quadword virtual access */
1824 gen_op_ld_phys_to_virt();
1828 /* Longword virtual access with protection check */
1832 /* Quadword virtual access with protection check */
1836 /* Longword virtual access with altenate access mode */
1837 gen_op_set_alt_mode();
1838 gen_op_ld_phys_to_virt();
1840 gen_op_restore_mode();
1843 /* Quadword virtual access with altenate access mode */
1844 gen_op_set_alt_mode();
1845 gen_op_ld_phys_to_virt();
1847 gen_op_restore_mode();
1850 /* Longword virtual access with alternate access mode and
1853 gen_op_set_alt_mode();
1855 gen_op_restore_mode();
1858 /* Quadword virtual access with alternate access mode and
1861 gen_op_set_alt_mode();
1863 gen_op_restore_mode();
1867 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[1]);
1874 if (!(ctx
->amask
& AMASK_BWX
))
1876 if (likely(rc
!= 31)) {
1878 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int8_t)lit
));
1880 tcg_gen_ext8s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1882 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1887 if (!(ctx
->amask
& AMASK_BWX
))
1889 if (likely(rc
!= 31)) {
1891 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int16_t)lit
));
1893 tcg_gen_ext16s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1895 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1900 if (!(ctx
->amask
& AMASK_CIX
))
1902 if (likely(rc
!= 31)) {
1904 tcg_gen_movi_i64(cpu_ir
[rc
], ctpop64(lit
));
1906 tcg_gen_helper_1_1(helper_ctpop
, cpu_ir
[rc
], cpu_ir
[rb
]);
1908 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1913 if (!(ctx
->amask
& AMASK_MVI
))
1920 if (!(ctx
->amask
& AMASK_CIX
))
1922 if (likely(rc
!= 31)) {
1924 tcg_gen_movi_i64(cpu_ir
[rc
], clz64(lit
));
1926 tcg_gen_helper_1_1(helper_ctlz
, cpu_ir
[rc
], cpu_ir
[rb
]);
1928 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1933 if (!(ctx
->amask
& AMASK_CIX
))
1935 if (likely(rc
!= 31)) {
1937 tcg_gen_movi_i64(cpu_ir
[rc
], ctz64(lit
));
1939 tcg_gen_helper_1_1(helper_cttz
, cpu_ir
[rc
], cpu_ir
[rb
]);
1941 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1946 if (!(ctx
->amask
& AMASK_MVI
))
1953 if (!(ctx
->amask
& AMASK_MVI
))
1960 if (!(ctx
->amask
& AMASK_MVI
))
1967 if (!(ctx
->amask
& AMASK_MVI
))
1974 if (!(ctx
->amask
& AMASK_MVI
))
1981 if (!(ctx
->amask
& AMASK_MVI
))
1988 if (!(ctx
->amask
& AMASK_MVI
))
1995 if (!(ctx
->amask
& AMASK_MVI
))
2002 if (!(ctx
->amask
& AMASK_MVI
))
2009 if (!(ctx
->amask
& AMASK_MVI
))
2016 if (!(ctx
->amask
& AMASK_MVI
))
2023 if (!(ctx
->amask
& AMASK_MVI
))
2030 if (!(ctx
->amask
& AMASK_FIX
))
2032 gen_fti(ctx
, &gen_op_ftoit
, ra
, rb
);
2036 if (!(ctx
->amask
& AMASK_FIX
))
2038 gen_fti(ctx
, &gen_op_ftois
, ra
, rb
);
2045 /* HW_MTPR (PALcode) */
2046 #if defined (CONFIG_USER_ONLY)
2052 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[ra
]);
2054 tcg_gen_movi_i64(cpu_T
[0], 0);
2055 gen_op_mtpr(insn
& 0xFF);
2060 /* HW_REI (PALcode) */
2061 #if defined (CONFIG_USER_ONLY)
2071 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[rb
]);
2073 tcg_gen_movi_i64(cpu_T
[0], 0);
2074 tcg_gen_movi_i64(cpu_T
[1], (((int64_t)insn
<< 51) >> 51));
2075 tcg_gen_add_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2082 /* HW_ST (PALcode) */
2083 #if defined (CONFIG_USER_ONLY)
2089 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp12
);
2091 tcg_gen_movi_i64(cpu_T
[0], disp12
);
2093 tcg_gen_mov_i64(cpu_T
[1], cpu_ir
[ra
]);
2095 tcg_gen_movi_i64(cpu_T
[1], 0);
2096 switch ((insn
>> 12) & 0xF) {
2098 /* Longword physical access */
2102 /* Quadword physical access */
2106 /* Longword physical access with lock */
2110 /* Quadword physical access with lock */
2114 /* Longword virtual access */
2115 gen_op_st_phys_to_virt();
2119 /* Quadword virtual access */
2120 gen_op_st_phys_to_virt();
2142 /* Longword virtual access with alternate access mode */
2143 gen_op_set_alt_mode();
2144 gen_op_st_phys_to_virt();
2146 gen_op_restore_mode();
2149 /* Quadword virtual access with alternate access mode */
2150 gen_op_set_alt_mode();
2151 gen_op_st_phys_to_virt();
2153 gen_op_restore_mode();
2168 gen_load_fmem(ctx
, &gen_ldf
, ra
, rb
, disp16
);
2176 gen_load_fmem(ctx
, &gen_ldg
, ra
, rb
, disp16
);
2183 gen_load_fmem(ctx
, &gen_lds
, ra
, rb
, disp16
);
2187 gen_load_fmem(ctx
, &gen_ldt
, ra
, rb
, disp16
);
2192 gen_store_fmem(ctx
, &gen_stf
, ra
, rb
, disp16
);
2200 gen_store_fmem(ctx
, &gen_stg
, ra
, rb
, disp16
);
2207 gen_store_fmem(ctx
, &gen_sts
, ra
, rb
, disp16
);
2211 gen_store_fmem(ctx
, &gen_stt
, ra
, rb
, disp16
);
2215 gen_load_mem(ctx
, &gen_ldl
, ra
, rb
, disp16
, 0);
2219 gen_load_mem(ctx
, &gen_ldq
, ra
, rb
, disp16
, 0);
2223 gen_load_mem(ctx
, &gen_ldl_l
, ra
, rb
, disp16
, 0);
2227 gen_load_mem(ctx
, &gen_ldq_l
, ra
, rb
, disp16
, 0);
2231 gen_store_mem(ctx
, &gen_stl
, ra
, rb
, disp16
, 0);
2235 gen_store_mem(ctx
, &gen_stq
, ra
, rb
, disp16
, 0);
2239 gen_store_mem(ctx
, &gen_stl_c
, ra
, rb
, disp16
, 0);
2243 gen_store_mem(ctx
, &gen_stq_c
, ra
, rb
, disp16
, 0);
2248 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2249 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2254 gen_fbcond(ctx
, &gen_op_cmpfeq
, ra
, disp16
);
2259 gen_fbcond(ctx
, &gen_op_cmpflt
, ra
, disp16
);
2264 gen_fbcond(ctx
, &gen_op_cmpfle
, ra
, disp16
);
2270 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2271 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2276 gen_fbcond(ctx
, &gen_op_cmpfne
, ra
, disp16
);
2281 gen_fbcond(ctx
, &gen_op_cmpfge
, ra
, disp16
);
2286 gen_fbcond(ctx
, &gen_op_cmpfgt
, ra
, disp16
);
2291 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 1);
2296 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 0);
2301 gen_bcond(ctx
, TCG_COND_LT
, ra
, disp16
, 0);
2306 gen_bcond(ctx
, TCG_COND_LE
, ra
, disp16
, 0);
2311 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 1);
2316 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 0);
2321 gen_bcond(ctx
, TCG_COND_GE
, ra
, disp16
, 0);
2326 gen_bcond(ctx
, TCG_COND_GT
, ra
, disp16
, 0);
2338 static always_inline
void gen_intermediate_code_internal (CPUState
*env
,
2339 TranslationBlock
*tb
,
2342 #if defined ALPHA_DEBUG_DISAS
2343 static int insn_count
;
2345 DisasContext ctx
, *ctxp
= &ctx
;
2346 target_ulong pc_start
;
2348 uint16_t *gen_opc_end
;
2355 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
2357 ctx
.amask
= env
->amask
;
2358 #if defined (CONFIG_USER_ONLY)
2361 ctx
.mem_idx
= ((env
->ps
>> 3) & 3);
2362 ctx
.pal_mode
= env
->ipr
[IPR_EXC_ADDR
] & 1;
2365 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
2367 max_insns
= CF_COUNT_MASK
;
2370 for (ret
= 0; ret
== 0;) {
2371 if (env
->nb_breakpoints
> 0) {
2372 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
2373 if (env
->breakpoints
[j
] == ctx
.pc
) {
2374 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2380 j
= gen_opc_ptr
- gen_opc_buf
;
2384 gen_opc_instr_start
[lj
++] = 0;
2385 gen_opc_pc
[lj
] = ctx
.pc
;
2386 gen_opc_instr_start
[lj
] = 1;
2387 gen_opc_icount
[lj
] = num_insns
;
2390 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
2392 #if defined ALPHA_DEBUG_DISAS
2394 if (logfile
!= NULL
) {
2395 fprintf(logfile
, "pc " TARGET_FMT_lx
" mem_idx %d\n",
2396 ctx
.pc
, ctx
.mem_idx
);
2399 insn
= ldl_code(ctx
.pc
);
2400 #if defined ALPHA_DEBUG_DISAS
2402 if (logfile
!= NULL
) {
2403 fprintf(logfile
, "opcode %08x %d\n", insn
, insn_count
);
2408 ret
= translate_one(ctxp
, insn
);
2411 /* if we reach a page boundary or are single stepping, stop
2414 if (((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0) ||
2415 (env
->singlestep_enabled
) ||
2416 num_insns
>= max_insns
) {
2419 #if defined (DO_SINGLE_STEP)
2423 if (ret
!= 1 && ret
!= 3) {
2424 tcg_gen_movi_i64(cpu_pc
, ctx
.pc
);
2426 #if defined (DO_TB_FLUSH)
2427 tcg_gen_helper_0_0(helper_tb_flush
);
2429 if (tb
->cflags
& CF_LAST_IO
)
2431 /* Generate the return instruction */
2433 gen_icount_end(tb
, num_insns
);
2434 *gen_opc_ptr
= INDEX_op_end
;
2436 j
= gen_opc_ptr
- gen_opc_buf
;
2439 gen_opc_instr_start
[lj
++] = 0;
2441 tb
->size
= ctx
.pc
- pc_start
;
2442 tb
->icount
= num_insns
;
2444 #if defined ALPHA_DEBUG_DISAS
2445 if (loglevel
& CPU_LOG_TB_CPU
) {
2446 cpu_dump_state(env
, logfile
, fprintf
, 0);
2448 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
2449 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
2450 target_disas(logfile
, pc_start
, ctx
.pc
- pc_start
, 1);
2451 fprintf(logfile
, "\n");
2456 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
2458 gen_intermediate_code_internal(env
, tb
, 0);
2461 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
2463 gen_intermediate_code_internal(env
, tb
, 1);
2466 CPUAlphaState
* cpu_alpha_init (const char *cpu_model
)
2471 env
= qemu_mallocz(sizeof(CPUAlphaState
));
2475 alpha_translate_init();
2477 /* XXX: should not be hardcoded */
2478 env
->implver
= IMPLVER_2106x
;
2480 #if defined (CONFIG_USER_ONLY)
2484 /* Initialize IPR */
2485 hwpcb
= env
->ipr
[IPR_PCBB
];
2486 env
->ipr
[IPR_ASN
] = 0;
2487 env
->ipr
[IPR_ASTEN
] = 0;
2488 env
->ipr
[IPR_ASTSR
] = 0;
2489 env
->ipr
[IPR_DATFX
] = 0;
2491 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2492 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2493 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2494 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2495 env
->ipr
[IPR_FEN
] = 0;
2496 env
->ipr
[IPR_IPL
] = 31;
2497 env
->ipr
[IPR_MCES
] = 0;
2498 env
->ipr
[IPR_PERFMON
] = 0; /* Implementation specific */
2499 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2500 env
->ipr
[IPR_SISR
] = 0;
2501 env
->ipr
[IPR_VIRBND
] = -1ULL;
2506 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2507 unsigned long searched_pc
, int pc_pos
, void *puc
)
2509 env
->pc
= gen_opc_pc
[pc_pos
];