2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
35 #define ALPHA_DEBUG_DISAS
38 typedef struct DisasContext DisasContext
;
42 #if !defined (CONFIG_USER_ONLY)
48 /* global register indexes */
50 static TCGv cpu_ir
[31];
53 /* dyngen register indexes */
57 static char cpu_reg_names
[10*4+21*5];
59 #include "gen-icount.h"
61 static void alpha_translate_init(void)
65 static int done_init
= 0;
70 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
72 #if TARGET_LONG_BITS > HOST_LONG_BITS
73 cpu_T
[0] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
74 offsetof(CPUState
, t0
), "T0");
75 cpu_T
[1] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
76 offsetof(CPUState
, t1
), "T1");
78 cpu_T
[0] = tcg_global_reg_new(TCG_TYPE_I64
, TCG_AREG1
, "T0");
79 cpu_T
[1] = tcg_global_reg_new(TCG_TYPE_I64
, TCG_AREG2
, "T1");
83 for (i
= 0; i
< 31; i
++) {
84 sprintf(p
, "ir%d", i
);
85 cpu_ir
[i
] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
86 offsetof(CPUState
, ir
[i
]), p
);
87 p
+= (i
< 10) ? 4 : 5;
90 cpu_pc
= tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
91 offsetof(CPUState
, pc
), "pc");
93 /* register helpers */
95 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
101 static always_inline
void gen_op_nop (void)
103 #if defined(GENERATE_NOP)
108 #define GEN32(func, NAME) \
109 static GenOpFunc *NAME ## _table [32] = { \
110 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
111 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
112 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
113 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
114 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
115 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
116 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
117 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
119 static always_inline void func (int n) \
121 NAME ## _table[n](); \
125 /* Special hacks for fir31 */
126 #define gen_op_load_FT0_fir31 gen_op_reset_FT0
127 #define gen_op_load_FT1_fir31 gen_op_reset_FT1
128 #define gen_op_load_FT2_fir31 gen_op_reset_FT2
129 #define gen_op_store_FT0_fir31 gen_op_nop
130 #define gen_op_store_FT1_fir31 gen_op_nop
131 #define gen_op_store_FT2_fir31 gen_op_nop
132 #define gen_op_cmov_fir31 gen_op_nop
133 GEN32(gen_op_load_FT0_fir
, gen_op_load_FT0_fir
);
134 GEN32(gen_op_load_FT1_fir
, gen_op_load_FT1_fir
);
135 GEN32(gen_op_load_FT2_fir
, gen_op_load_FT2_fir
);
136 GEN32(gen_op_store_FT0_fir
, gen_op_store_FT0_fir
);
137 GEN32(gen_op_store_FT1_fir
, gen_op_store_FT1_fir
);
138 GEN32(gen_op_store_FT2_fir
, gen_op_store_FT2_fir
);
139 GEN32(gen_op_cmov_fir
, gen_op_cmov_fir
);
141 static always_inline
void gen_load_fir (DisasContext
*ctx
, int firn
, int Tn
)
145 gen_op_load_FT0_fir(firn
);
148 gen_op_load_FT1_fir(firn
);
151 gen_op_load_FT2_fir(firn
);
156 static always_inline
void gen_store_fir (DisasContext
*ctx
, int firn
, int Tn
)
160 gen_op_store_FT0_fir(firn
);
163 gen_op_store_FT1_fir(firn
);
166 gen_op_store_FT2_fir(firn
);
172 #if defined(CONFIG_USER_ONLY)
173 #define OP_LD_TABLE(width) \
174 static GenOpFunc *gen_op_ld##width[] = { \
175 &gen_op_ld##width##_raw, \
177 #define OP_ST_TABLE(width) \
178 static GenOpFunc *gen_op_st##width[] = { \
179 &gen_op_st##width##_raw, \
182 #define OP_LD_TABLE(width) \
183 static GenOpFunc *gen_op_ld##width[] = { \
184 &gen_op_ld##width##_kernel, \
185 &gen_op_ld##width##_executive, \
186 &gen_op_ld##width##_supervisor, \
187 &gen_op_ld##width##_user, \
189 #define OP_ST_TABLE(width) \
190 static GenOpFunc *gen_op_st##width[] = { \
191 &gen_op_st##width##_kernel, \
192 &gen_op_st##width##_executive, \
193 &gen_op_st##width##_supervisor, \
194 &gen_op_st##width##_user, \
198 #define GEN_LD(width) \
199 OP_LD_TABLE(width); \
200 static always_inline void gen_ld##width (DisasContext *ctx) \
202 (*gen_op_ld##width[ctx->mem_idx])(); \
205 #define GEN_ST(width) \
206 OP_ST_TABLE(width); \
207 static always_inline void gen_st##width (DisasContext *ctx) \
209 (*gen_op_st##width[ctx->mem_idx])(); \
221 #if 0 /* currently unused */
232 static always_inline
void _gen_op_bcond (DisasContext
*ctx
)
234 #if 0 // Qemu does not know how to do this...
235 gen_op_bcond(ctx
->pc
);
237 gen_op_bcond(ctx
->pc
>> 32, ctx
->pc
);
241 static always_inline
void gen_excp (DisasContext
*ctx
,
242 int exception
, int error_code
)
246 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
247 tmp1
= tcg_const_i32(exception
);
248 tmp2
= tcg_const_i32(error_code
);
249 tcg_gen_helper_0_2(helper_excp
, tmp1
, tmp2
);
254 static always_inline
void gen_invalid (DisasContext
*ctx
)
256 gen_excp(ctx
, EXCP_OPCDEC
, 0);
259 static always_inline
void gen_load_mem_dyngen (DisasContext
*ctx
,
260 void (*gen_load_op
)(DisasContext
*ctx
),
261 int ra
, int rb
, int32_t disp16
,
264 if (ra
== 31 && disp16
== 0) {
269 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
271 tcg_gen_movi_i64(cpu_T
[0], disp16
);
273 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], ~0x7);
276 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[1]);
280 static always_inline
void gen_load_mem (DisasContext
*ctx
,
281 void (*tcg_gen_qemu_load
)(TCGv t0
, TCGv t1
, int flags
),
282 int ra
, int rb
, int32_t disp16
,
287 if (unlikely(ra
== 31))
290 addr
= tcg_temp_new(TCG_TYPE_I64
);
292 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
294 tcg_gen_andi_i64(addr
, addr
, ~0x7);
298 tcg_gen_movi_i64(addr
, disp16
);
300 tcg_gen_qemu_load(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
304 static always_inline
void gen_store_mem_dyngen (DisasContext
*ctx
,
305 void (*gen_store_op
)(DisasContext
*ctx
),
306 int ra
, int rb
, int32_t disp16
,
310 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
312 tcg_gen_movi_i64(cpu_T
[0], disp16
);
314 tcg_gen_andi_i64(cpu_T
[0], cpu_T
[0], ~0x7);
316 tcg_gen_mov_i64(cpu_T
[1], cpu_ir
[ra
]);
318 tcg_gen_movi_i64(cpu_T
[1], 0);
319 (*gen_store_op
)(ctx
);
322 static always_inline
void gen_store_mem (DisasContext
*ctx
,
323 void (*tcg_gen_qemu_store
)(TCGv t0
, TCGv t1
, int flags
),
324 int ra
, int rb
, int32_t disp16
,
327 TCGv addr
= tcg_temp_new(TCG_TYPE_I64
);
329 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
331 tcg_gen_andi_i64(addr
, addr
, ~0x7);
335 tcg_gen_movi_i64(addr
, disp16
);
338 tcg_gen_qemu_store(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
340 TCGv zero
= tcg_const_i64(0);
341 tcg_gen_qemu_store(zero
, addr
, ctx
->mem_idx
);
347 static always_inline
void gen_load_fmem (DisasContext
*ctx
,
348 void (*gen_load_fop
)(DisasContext
*ctx
),
349 int ra
, int rb
, int32_t disp16
)
352 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
354 tcg_gen_movi_i64(cpu_T
[0], disp16
);
355 (*gen_load_fop
)(ctx
);
356 gen_store_fir(ctx
, ra
, 1);
359 static always_inline
void gen_store_fmem (DisasContext
*ctx
,
360 void (*gen_store_fop
)(DisasContext
*ctx
),
361 int ra
, int rb
, int32_t disp16
)
364 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp16
);
366 tcg_gen_movi_i64(cpu_T
[0], disp16
);
367 gen_load_fir(ctx
, ra
, 1);
368 (*gen_store_fop
)(ctx
);
371 static always_inline
void gen_bcond (DisasContext
*ctx
,
373 int ra
, int32_t disp16
, int mask
)
377 l1
= gen_new_label();
378 l2
= gen_new_label();
379 if (likely(ra
!= 31)) {
381 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
382 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
383 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
386 tcg_gen_brcondi_i64(cond
, cpu_ir
[ra
], 0, l1
);
388 /* Very uncommon case - Do not bother to optimize. */
389 TCGv tmp
= tcg_const_i64(0);
390 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
393 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
396 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
400 static always_inline
void gen_fbcond (DisasContext
*ctx
,
401 void (*gen_test_op
)(void),
402 int ra
, int32_t disp16
)
404 tcg_gen_movi_i64(cpu_T
[1], ctx
->pc
+ (int64_t)(disp16
<< 2));
405 gen_load_fir(ctx
, ra
, 0);
410 static always_inline
void gen_cmov (DisasContext
*ctx
,
412 int ra
, int rb
, int rc
,
413 int islit
, uint8_t lit
, int mask
)
417 if (unlikely(rc
== 31))
420 l1
= gen_new_label();
424 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
425 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
426 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
429 tcg_gen_brcondi_i64(inv_cond
, cpu_ir
[ra
], 0, l1
);
431 /* Very uncommon case - Do not bother to optimize. */
432 TCGv tmp
= tcg_const_i64(0);
433 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
438 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
440 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
444 static always_inline
void gen_farith2 (DisasContext
*ctx
,
445 void (*gen_arith_fop
)(void),
448 gen_load_fir(ctx
, rb
, 0);
450 gen_store_fir(ctx
, rc
, 0);
453 static always_inline
void gen_farith3 (DisasContext
*ctx
,
454 void (*gen_arith_fop
)(void),
455 int ra
, int rb
, int rc
)
457 gen_load_fir(ctx
, ra
, 0);
458 gen_load_fir(ctx
, rb
, 1);
460 gen_store_fir(ctx
, rc
, 0);
463 static always_inline
void gen_fcmov (DisasContext
*ctx
,
464 void (*gen_test_fop
)(void),
465 int ra
, int rb
, int rc
)
467 gen_load_fir(ctx
, ra
, 0);
468 gen_load_fir(ctx
, rb
, 1);
473 static always_inline
void gen_fti (DisasContext
*ctx
,
474 void (*gen_move_fop
)(void),
477 gen_load_fir(ctx
, rc
, 0);
480 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[0]);
483 static always_inline
void gen_itf (DisasContext
*ctx
,
484 void (*gen_move_fop
)(void),
488 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[ra
]);
490 tcg_gen_movi_i64(cpu_T
[0], 0);
492 gen_store_fir(ctx
, rc
, 0);
495 /* EXTWH, EXTWH, EXTLH, EXTQH */
496 static always_inline
void gen_ext_h(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
497 int ra
, int rb
, int rc
,
498 int islit
, uint8_t lit
)
500 if (unlikely(rc
== 31))
506 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 64 - ((lit
& 7) * 8));
508 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
511 tmp1
= tcg_temp_new(TCG_TYPE_I64
);
512 tcg_gen_andi_i64(tmp1
, cpu_ir
[rb
], 7);
513 tcg_gen_shli_i64(tmp1
, tmp1
, 3);
514 tmp2
= tcg_const_i64(64);
515 tcg_gen_sub_i64(tmp1
, tmp2
, tmp1
);
517 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp1
);
521 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
523 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
526 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
527 static always_inline
void gen_ext_l(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
528 int ra
, int rb
, int rc
,
529 int islit
, uint8_t lit
)
531 if (unlikely(rc
== 31))
536 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], (lit
& 7) * 8);
538 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
539 tcg_gen_andi_i64(tmp
, cpu_ir
[rb
], 7);
540 tcg_gen_shli_i64(tmp
, tmp
, 3);
541 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
545 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
547 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
550 /* Code to call arith3 helpers */
551 static always_inline
void gen_arith3_helper(void *helper
,
552 int ra
, int rb
, int rc
,
553 int islit
, uint8_t lit
)
555 if (unlikely(rc
== 31))
560 TCGv tmp
= tcg_const_i64(lit
);
561 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
564 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
566 TCGv tmp1
= tcg_const_i64(0);
568 TCGv tmp2
= tcg_const_i64(lit
);
569 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], tmp1
, tmp2
);
572 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], tmp1
, cpu_ir
[rb
]);
577 static always_inline
void gen_cmp(TCGCond cond
,
578 int ra
, int rb
, int rc
,
579 int islit
, uint8_t lit
)
584 if (unlikely(rc
== 31))
587 l1
= gen_new_label();
588 l2
= gen_new_label();
591 tmp
= tcg_temp_new(TCG_TYPE_I64
);
592 tcg_gen_mov_i64(tmp
, cpu_ir
[ra
]);
594 tmp
= tcg_const_i64(0);
596 tcg_gen_brcondi_i64(cond
, tmp
, lit
, l1
);
598 tcg_gen_brcond_i64(cond
, tmp
, cpu_ir
[rb
], l1
);
600 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
603 tcg_gen_movi_i64(cpu_ir
[rc
], 1);
607 static always_inline
int translate_one (DisasContext
*ctx
, uint32_t insn
)
610 int32_t disp21
, disp16
, disp12
;
612 uint8_t opc
, ra
, rb
, rc
, sbz
, fpfn
, fn7
, fn2
, islit
;
616 /* Decode all instruction fields */
618 ra
= (insn
>> 21) & 0x1F;
619 rb
= (insn
>> 16) & 0x1F;
621 sbz
= (insn
>> 13) & 0x07;
622 islit
= (insn
>> 12) & 1;
623 if (rb
== 31 && !islit
) {
627 lit
= (insn
>> 13) & 0xFF;
628 palcode
= insn
& 0x03FFFFFF;
629 disp21
= ((int32_t)((insn
& 0x001FFFFF) << 11)) >> 11;
630 disp16
= (int16_t)(insn
& 0x0000FFFF);
631 disp12
= (int32_t)((insn
& 0x00000FFF) << 20) >> 20;
632 fn16
= insn
& 0x0000FFFF;
633 fn11
= (insn
>> 5) & 0x000007FF;
635 fn7
= (insn
>> 5) & 0x0000007F;
636 fn2
= (insn
>> 5) & 0x00000003;
638 #if defined ALPHA_DEBUG_DISAS
639 if (logfile
!= NULL
) {
640 fprintf(logfile
, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
641 opc
, ra
, rb
, rc
, disp16
);
647 if (palcode
>= 0x80 && palcode
< 0xC0) {
648 /* Unprivileged PAL call */
649 gen_excp(ctx
, EXCP_CALL_PAL
+ ((palcode
& 0x1F) << 6), 0);
650 #if !defined (CONFIG_USER_ONLY)
651 } else if (palcode
< 0x40) {
652 /* Privileged PAL code */
653 if (ctx
->mem_idx
& 1)
656 gen_excp(ctx
, EXCP_CALL_PALP
+ ((palcode
& 0x1F) << 6), 0);
659 /* Invalid PAL call */
687 if (likely(ra
!= 31)) {
689 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
);
691 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
);
696 if (likely(ra
!= 31)) {
698 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
<< 16);
700 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
<< 16);
705 if (!(ctx
->amask
& AMASK_BWX
))
707 gen_load_mem(ctx
, &tcg_gen_qemu_ld8u
, ra
, rb
, disp16
, 0);
711 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 1);
715 if (!(ctx
->amask
& AMASK_BWX
))
717 gen_load_mem(ctx
, &tcg_gen_qemu_ld16u
, ra
, rb
, disp16
, 1);
721 gen_store_mem(ctx
, &tcg_gen_qemu_st16
, ra
, rb
, disp16
, 0);
725 gen_store_mem(ctx
, &tcg_gen_qemu_st8
, ra
, rb
, disp16
, 0);
729 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 1);
735 if (likely(rc
!= 31)) {
738 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
739 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
741 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
742 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
746 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
748 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
754 if (likely(rc
!= 31)) {
756 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
757 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
759 tcg_gen_addi_i64(tmp
, tmp
, lit
);
761 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
762 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
766 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
768 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
774 if (likely(rc
!= 31)) {
777 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
779 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
780 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
783 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
785 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
786 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
792 if (likely(rc
!= 31)) {
794 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
795 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
797 tcg_gen_subi_i64(tmp
, tmp
, lit
);
799 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
800 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
804 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
806 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
807 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
814 gen_arith3_helper(helper_cmpbge
, ra
, rb
, rc
, islit
, lit
);
818 if (likely(rc
!= 31)) {
820 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
821 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
823 tcg_gen_addi_i64(tmp
, tmp
, lit
);
825 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
826 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
830 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
832 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
838 if (likely(rc
!= 31)) {
840 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
841 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
843 tcg_gen_subi_i64(tmp
, tmp
, lit
);
845 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
846 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
850 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
852 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
853 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
860 gen_cmp(TCG_COND_LTU
, ra
, rb
, rc
, islit
, lit
);
864 if (likely(rc
!= 31)) {
867 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
869 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
872 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
874 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
880 if (likely(rc
!= 31)) {
882 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
883 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
885 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
887 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
891 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
893 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
899 if (likely(rc
!= 31)) {
902 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
904 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
907 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
909 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
915 if (likely(rc
!= 31)) {
917 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
918 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
920 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
922 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
926 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
928 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
934 gen_cmp(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
);
938 if (likely(rc
!= 31)) {
940 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
941 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
943 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
945 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
949 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
951 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
957 if (likely(rc
!= 31)) {
959 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
960 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
962 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
964 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
968 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
970 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
976 gen_cmp(TCG_COND_LEU
, ra
, rb
, rc
, islit
, lit
);
980 gen_arith3_helper(helper_addlv
, ra
, rb
, rc
, islit
, lit
);
984 gen_arith3_helper(helper_sublv
, ra
, rb
, rc
, islit
, lit
);
988 gen_cmp(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
);
992 gen_arith3_helper(helper_addqv
, ra
, rb
, rc
, islit
, lit
);
996 gen_arith3_helper(helper_subqv
, ra
, rb
, rc
, islit
, lit
);
1000 gen_cmp(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
);
1010 if (likely(rc
!= 31)) {
1012 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1014 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1016 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1021 if (likely(rc
!= 31)) {
1024 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1026 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
1027 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
1028 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
1032 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1037 gen_cmov(ctx
, TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 1);
1041 gen_cmov(ctx
, TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 1);
1045 if (likely(rc
!= 31)) {
1048 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1050 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1053 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1055 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1061 gen_cmov(ctx
, TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 0);
1065 gen_cmov(ctx
, TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 0);
1069 if (likely(rc
!= 31)) {
1072 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1074 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
1075 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
1076 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
1081 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1083 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1089 if (likely(rc
!= 31)) {
1092 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1094 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1097 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1099 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1105 gen_cmov(ctx
, TCG_COND_GE
, ra
, rb
, rc
, islit
, lit
, 0);
1109 gen_cmov(ctx
, TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
, 0);
1113 if (likely(rc
!= 31)) {
1116 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1118 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
1119 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
1120 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
1125 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1127 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1133 if (likely(rc
!= 31)) {
1135 tcg_gen_movi_i64(cpu_ir
[rc
], helper_amask(lit
));
1137 tcg_gen_helper_1_1(helper_amask
, cpu_ir
[rc
], cpu_ir
[rb
]);
1142 gen_cmov(ctx
, TCG_COND_GT
, ra
, rb
, rc
, islit
, lit
, 0);
1146 gen_cmov(ctx
, TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
, 0);
1151 tcg_gen_helper_1_0(helper_load_implver
, cpu_ir
[rc
]);
1161 gen_arith3_helper(helper_mskbl
, ra
, rb
, rc
, islit
, lit
);
1165 gen_ext_l(&tcg_gen_ext8u_i64
, ra
, rb
, rc
, islit
, lit
);
1169 gen_arith3_helper(helper_insbl
, ra
, rb
, rc
, islit
, lit
);
1173 gen_arith3_helper(helper_mskwl
, ra
, rb
, rc
, islit
, lit
);
1177 gen_ext_l(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1181 gen_arith3_helper(helper_inswl
, ra
, rb
, rc
, islit
, lit
);
1185 gen_arith3_helper(helper_mskll
, ra
, rb
, rc
, islit
, lit
);
1189 gen_ext_l(&tcg_gen_ext32u_i64
, ra
, rb
, rc
, islit
, lit
);
1193 gen_arith3_helper(helper_insll
, ra
, rb
, rc
, islit
, lit
);
1197 gen_arith3_helper(helper_zap
, ra
, rb
, rc
, islit
, lit
);
1201 gen_arith3_helper(helper_zapnot
, ra
, rb
, rc
, islit
, lit
);
1205 gen_arith3_helper(helper_mskql
, ra
, rb
, rc
, islit
, lit
);
1209 if (likely(rc
!= 31)) {
1212 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1214 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1215 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1216 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1217 tcg_temp_free(shift
);
1220 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1225 gen_ext_l(NULL
, ra
, rb
, rc
, islit
, lit
);
1229 if (likely(rc
!= 31)) {
1232 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1234 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1235 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1236 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1237 tcg_temp_free(shift
);
1240 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1245 gen_arith3_helper(helper_insql
, ra
, rb
, rc
, islit
, lit
);
1249 if (likely(rc
!= 31)) {
1252 tcg_gen_sari_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1254 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1255 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1256 tcg_gen_sar_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1257 tcg_temp_free(shift
);
1260 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1265 gen_arith3_helper(helper_mskwh
, ra
, rb
, rc
, islit
, lit
);
1269 gen_arith3_helper(helper_inswh
, ra
, rb
, rc
, islit
, lit
);
1273 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1277 gen_arith3_helper(helper_msklh
, ra
, rb
, rc
, islit
, lit
);
1281 gen_arith3_helper(helper_inslh
, ra
, rb
, rc
, islit
, lit
);
1285 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1289 gen_arith3_helper(helper_mskqh
, ra
, rb
, rc
, islit
, lit
);
1293 gen_arith3_helper(helper_insqh
, ra
, rb
, rc
, islit
, lit
);
1297 gen_ext_h(NULL
, ra
, rb
, rc
, islit
, lit
);
1307 if (likely(rc
!= 31)) {
1309 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1312 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1314 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1315 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
1321 if (likely(rc
!= 31)) {
1323 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1325 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1327 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1332 gen_arith3_helper(helper_umulh
, ra
, rb
, rc
, islit
, lit
);
1336 gen_arith3_helper(helper_mullv
, ra
, rb
, rc
, islit
, lit
);
1340 gen_arith3_helper(helper_mulqv
, ra
, rb
, rc
, islit
, lit
);
1347 switch (fpfn
) { /* f11 & 0x3F */
1350 if (!(ctx
->amask
& AMASK_FIX
))
1352 gen_itf(ctx
, &gen_op_itofs
, ra
, rc
);
1356 if (!(ctx
->amask
& AMASK_FIX
))
1358 gen_farith2(ctx
, &gen_op_sqrtf
, rb
, rc
);
1362 if (!(ctx
->amask
& AMASK_FIX
))
1364 gen_farith2(ctx
, &gen_op_sqrts
, rb
, rc
);
1368 if (!(ctx
->amask
& AMASK_FIX
))
1371 gen_itf(ctx
, &gen_op_itoff
, ra
, rc
);
1378 if (!(ctx
->amask
& AMASK_FIX
))
1380 gen_itf(ctx
, &gen_op_itoft
, ra
, rc
);
1384 if (!(ctx
->amask
& AMASK_FIX
))
1386 gen_farith2(ctx
, &gen_op_sqrtg
, rb
, rc
);
1390 if (!(ctx
->amask
& AMASK_FIX
))
1392 gen_farith2(ctx
, &gen_op_sqrtt
, rb
, rc
);
1399 /* VAX floating point */
1400 /* XXX: rounding mode and trap are ignored (!) */
1401 switch (fpfn
) { /* f11 & 0x3F */
1404 gen_farith3(ctx
, &gen_op_addf
, ra
, rb
, rc
);
1408 gen_farith3(ctx
, &gen_op_subf
, ra
, rb
, rc
);
1412 gen_farith3(ctx
, &gen_op_mulf
, ra
, rb
, rc
);
1416 gen_farith3(ctx
, &gen_op_divf
, ra
, rb
, rc
);
1421 gen_farith2(ctx
, &gen_op_cvtdg
, rb
, rc
);
1428 gen_farith3(ctx
, &gen_op_addg
, ra
, rb
, rc
);
1432 gen_farith3(ctx
, &gen_op_subg
, ra
, rb
, rc
);
1436 gen_farith3(ctx
, &gen_op_mulg
, ra
, rb
, rc
);
1440 gen_farith3(ctx
, &gen_op_divg
, ra
, rb
, rc
);
1444 gen_farith3(ctx
, &gen_op_cmpgeq
, ra
, rb
, rc
);
1448 gen_farith3(ctx
, &gen_op_cmpglt
, ra
, rb
, rc
);
1452 gen_farith3(ctx
, &gen_op_cmpgle
, ra
, rb
, rc
);
1456 gen_farith2(ctx
, &gen_op_cvtgf
, rb
, rc
);
1461 gen_farith2(ctx
, &gen_op_cvtgd
, rb
, rc
);
1468 gen_farith2(ctx
, &gen_op_cvtgq
, rb
, rc
);
1472 gen_farith2(ctx
, &gen_op_cvtqf
, rb
, rc
);
1476 gen_farith2(ctx
, &gen_op_cvtqg
, rb
, rc
);
1483 /* IEEE floating-point */
1484 /* XXX: rounding mode and traps are ignored (!) */
1485 switch (fpfn
) { /* f11 & 0x3F */
1488 gen_farith3(ctx
, &gen_op_adds
, ra
, rb
, rc
);
1492 gen_farith3(ctx
, &gen_op_subs
, ra
, rb
, rc
);
1496 gen_farith3(ctx
, &gen_op_muls
, ra
, rb
, rc
);
1500 gen_farith3(ctx
, &gen_op_divs
, ra
, rb
, rc
);
1504 gen_farith3(ctx
, &gen_op_addt
, ra
, rb
, rc
);
1508 gen_farith3(ctx
, &gen_op_subt
, ra
, rb
, rc
);
1512 gen_farith3(ctx
, &gen_op_mult
, ra
, rb
, rc
);
1516 gen_farith3(ctx
, &gen_op_divt
, ra
, rb
, rc
);
1520 gen_farith3(ctx
, &gen_op_cmptun
, ra
, rb
, rc
);
1524 gen_farith3(ctx
, &gen_op_cmpteq
, ra
, rb
, rc
);
1528 gen_farith3(ctx
, &gen_op_cmptlt
, ra
, rb
, rc
);
1532 gen_farith3(ctx
, &gen_op_cmptle
, ra
, rb
, rc
);
1535 /* XXX: incorrect */
1536 if (fn11
== 0x2AC) {
1538 gen_farith2(ctx
, &gen_op_cvtst
, rb
, rc
);
1541 gen_farith2(ctx
, &gen_op_cvtts
, rb
, rc
);
1546 gen_farith2(ctx
, &gen_op_cvttq
, rb
, rc
);
1550 gen_farith2(ctx
, &gen_op_cvtqs
, rb
, rc
);
1554 gen_farith2(ctx
, &gen_op_cvtqt
, rb
, rc
);
1564 gen_farith2(ctx
, &gen_op_cvtlq
, rb
, rc
);
1569 if (ra
== 31 && rc
== 31) {
1574 gen_load_fir(ctx
, rb
, 0);
1575 gen_store_fir(ctx
, rc
, 0);
1578 gen_farith3(ctx
, &gen_op_cpys
, ra
, rb
, rc
);
1583 gen_farith2(ctx
, &gen_op_cpysn
, rb
, rc
);
1587 gen_farith2(ctx
, &gen_op_cpyse
, rb
, rc
);
1591 gen_load_fir(ctx
, ra
, 0);
1592 gen_op_store_fpcr();
1597 gen_store_fir(ctx
, ra
, 0);
1601 gen_fcmov(ctx
, &gen_op_cmpfeq
, ra
, rb
, rc
);
1605 gen_fcmov(ctx
, &gen_op_cmpfne
, ra
, rb
, rc
);
1609 gen_fcmov(ctx
, &gen_op_cmpflt
, ra
, rb
, rc
);
1613 gen_fcmov(ctx
, &gen_op_cmpfge
, ra
, rb
, rc
);
1617 gen_fcmov(ctx
, &gen_op_cmpfle
, ra
, rb
, rc
);
1621 gen_fcmov(ctx
, &gen_op_cmpfgt
, ra
, rb
, rc
);
1625 gen_farith2(ctx
, &gen_op_cvtql
, rb
, rc
);
1629 gen_farith2(ctx
, &gen_op_cvtqlv
, rb
, rc
);
1633 gen_farith2(ctx
, &gen_op_cvtqlsv
, rb
, rc
);
1640 switch ((uint16_t)disp16
) {
1643 /* No-op. Just exit from the current tb */
1648 /* No-op. Just exit from the current tb */
1670 tcg_gen_helper_1_0(helper_load_pcc
, cpu_ir
[ra
]);
1675 tcg_gen_helper_1_0(helper_rc
, cpu_ir
[ra
]);
1679 /* XXX: TODO: evict tb cache at address rb */
1689 tcg_gen_helper_1_0(helper_rs
, cpu_ir
[ra
]);
1700 /* HW_MFPR (PALcode) */
1701 #if defined (CONFIG_USER_ONLY)
1706 gen_op_mfpr(insn
& 0xFF);
1708 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[0]);
1713 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
1715 tcg_gen_andi_i64(cpu_pc
, cpu_ir
[rb
], ~3);
1717 tcg_gen_movi_i64(cpu_pc
, 0);
1718 /* Those four jumps only differ by the branch prediction hint */
1736 /* HW_LD (PALcode) */
1737 #if defined (CONFIG_USER_ONLY)
1743 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[rb
]);
1745 tcg_gen_movi_i64(cpu_T
[0], 0);
1746 tcg_gen_movi_i64(cpu_T
[1], disp12
);
1747 tcg_gen_add_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
1748 switch ((insn
>> 12) & 0xF) {
1750 /* Longword physical access */
1754 /* Quadword physical access */
1758 /* Longword physical access with lock */
1762 /* Quadword physical access with lock */
1766 /* Longword virtual PTE fetch */
1767 gen_op_ldl_kernel();
1770 /* Quadword virtual PTE fetch */
1771 gen_op_ldq_kernel();
1780 /* Longword virtual access */
1781 gen_op_ld_phys_to_virt();
1785 /* Quadword virtual access */
1786 gen_op_ld_phys_to_virt();
1790 /* Longword virtual access with protection check */
1794 /* Quadword virtual access with protection check */
1798 /* Longword virtual access with altenate access mode */
1799 gen_op_set_alt_mode();
1800 gen_op_ld_phys_to_virt();
1802 gen_op_restore_mode();
1805 /* Quadword virtual access with altenate access mode */
1806 gen_op_set_alt_mode();
1807 gen_op_ld_phys_to_virt();
1809 gen_op_restore_mode();
1812 /* Longword virtual access with alternate access mode and
1815 gen_op_set_alt_mode();
1817 gen_op_restore_mode();
1820 /* Quadword virtual access with alternate access mode and
1823 gen_op_set_alt_mode();
1825 gen_op_restore_mode();
1829 tcg_gen_mov_i64(cpu_ir
[ra
], cpu_T
[1]);
1836 if (!(ctx
->amask
& AMASK_BWX
))
1838 if (likely(rc
!= 31)) {
1840 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int8_t)lit
));
1842 tcg_gen_ext8s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1847 if (!(ctx
->amask
& AMASK_BWX
))
1849 if (likely(rc
!= 31)) {
1851 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int16_t)lit
));
1853 tcg_gen_ext16s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1858 if (!(ctx
->amask
& AMASK_CIX
))
1860 if (likely(rc
!= 31)) {
1862 tcg_gen_movi_i64(cpu_ir
[rc
], ctpop64(lit
));
1864 tcg_gen_helper_1_1(helper_ctpop
, cpu_ir
[rc
], cpu_ir
[rb
]);
1869 if (!(ctx
->amask
& AMASK_MVI
))
1876 if (!(ctx
->amask
& AMASK_CIX
))
1878 if (likely(rc
!= 31)) {
1880 tcg_gen_movi_i64(cpu_ir
[rc
], clz64(lit
));
1882 tcg_gen_helper_1_1(helper_ctlz
, cpu_ir
[rc
], cpu_ir
[rb
]);
1887 if (!(ctx
->amask
& AMASK_CIX
))
1889 if (likely(rc
!= 31)) {
1891 tcg_gen_movi_i64(cpu_ir
[rc
], ctz64(lit
));
1893 tcg_gen_helper_1_1(helper_cttz
, cpu_ir
[rc
], cpu_ir
[rb
]);
1898 if (!(ctx
->amask
& AMASK_MVI
))
1905 if (!(ctx
->amask
& AMASK_MVI
))
1912 if (!(ctx
->amask
& AMASK_MVI
))
1919 if (!(ctx
->amask
& AMASK_MVI
))
1926 if (!(ctx
->amask
& AMASK_MVI
))
1933 if (!(ctx
->amask
& AMASK_MVI
))
1940 if (!(ctx
->amask
& AMASK_MVI
))
1947 if (!(ctx
->amask
& AMASK_MVI
))
1954 if (!(ctx
->amask
& AMASK_MVI
))
1961 if (!(ctx
->amask
& AMASK_MVI
))
1968 if (!(ctx
->amask
& AMASK_MVI
))
1975 if (!(ctx
->amask
& AMASK_MVI
))
1982 if (!(ctx
->amask
& AMASK_FIX
))
1984 gen_fti(ctx
, &gen_op_ftoit
, ra
, rb
);
1988 if (!(ctx
->amask
& AMASK_FIX
))
1990 gen_fti(ctx
, &gen_op_ftois
, ra
, rb
);
1997 /* HW_MTPR (PALcode) */
1998 #if defined (CONFIG_USER_ONLY)
2004 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[ra
]);
2006 tcg_gen_movi_i64(cpu_T
[0], 0);
2007 gen_op_mtpr(insn
& 0xFF);
2012 /* HW_REI (PALcode) */
2013 #if defined (CONFIG_USER_ONLY)
2023 tcg_gen_mov_i64(cpu_T
[0], cpu_ir
[rb
]);
2025 tcg_gen_movi_i64(cpu_T
[0], 0);
2026 tcg_gen_movi_i64(cpu_T
[1], (((int64_t)insn
<< 51) >> 51));
2027 tcg_gen_add_i64(cpu_T
[0], cpu_T
[0], cpu_T
[1]);
2034 /* HW_ST (PALcode) */
2035 #if defined (CONFIG_USER_ONLY)
2041 tcg_gen_addi_i64(cpu_T
[0], cpu_ir
[rb
], disp12
);
2043 tcg_gen_movi_i64(cpu_T
[0], disp12
);
2045 tcg_gen_mov_i64(cpu_T
[1], cpu_ir
[ra
]);
2047 tcg_gen_movi_i64(cpu_T
[1], 0);
2048 switch ((insn
>> 12) & 0xF) {
2050 /* Longword physical access */
2054 /* Quadword physical access */
2058 /* Longword physical access with lock */
2062 /* Quadword physical access with lock */
2066 /* Longword virtual access */
2067 gen_op_st_phys_to_virt();
2071 /* Quadword virtual access */
2072 gen_op_st_phys_to_virt();
2094 /* Longword virtual access with alternate access mode */
2095 gen_op_set_alt_mode();
2096 gen_op_st_phys_to_virt();
2098 gen_op_restore_mode();
2101 /* Quadword virtual access with alternate access mode */
2102 gen_op_set_alt_mode();
2103 gen_op_st_phys_to_virt();
2105 gen_op_restore_mode();
2120 gen_load_fmem(ctx
, &gen_ldf
, ra
, rb
, disp16
);
2128 gen_load_fmem(ctx
, &gen_ldg
, ra
, rb
, disp16
);
2135 gen_load_fmem(ctx
, &gen_lds
, ra
, rb
, disp16
);
2139 gen_load_fmem(ctx
, &gen_ldt
, ra
, rb
, disp16
);
2144 gen_store_fmem(ctx
, &gen_stf
, ra
, rb
, disp16
);
2152 gen_store_fmem(ctx
, &gen_stg
, ra
, rb
, disp16
);
2159 gen_store_fmem(ctx
, &gen_sts
, ra
, rb
, disp16
);
2163 gen_store_fmem(ctx
, &gen_stt
, ra
, rb
, disp16
);
2167 gen_load_mem(ctx
, &tcg_gen_qemu_ld32s
, ra
, rb
, disp16
, 0);
2171 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0);
2175 gen_load_mem_dyngen(ctx
, &gen_ldl_l
, ra
, rb
, disp16
, 0);
2179 gen_load_mem_dyngen(ctx
, &gen_ldq_l
, ra
, rb
, disp16
, 0);
2183 gen_store_mem(ctx
, &tcg_gen_qemu_st32
, ra
, rb
, disp16
, 0);
2187 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0);
2191 gen_store_mem_dyngen(ctx
, &gen_stl_c
, ra
, rb
, disp16
, 0);
2195 gen_store_mem_dyngen(ctx
, &gen_stq_c
, ra
, rb
, disp16
, 0);
2200 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2201 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2206 gen_fbcond(ctx
, &gen_op_cmpfeq
, ra
, disp16
);
2211 gen_fbcond(ctx
, &gen_op_cmpflt
, ra
, disp16
);
2216 gen_fbcond(ctx
, &gen_op_cmpfle
, ra
, disp16
);
2222 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2223 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2228 gen_fbcond(ctx
, &gen_op_cmpfne
, ra
, disp16
);
2233 gen_fbcond(ctx
, &gen_op_cmpfge
, ra
, disp16
);
2238 gen_fbcond(ctx
, &gen_op_cmpfgt
, ra
, disp16
);
2243 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 1);
2248 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 0);
2253 gen_bcond(ctx
, TCG_COND_LT
, ra
, disp16
, 0);
2258 gen_bcond(ctx
, TCG_COND_LE
, ra
, disp16
, 0);
2263 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 1);
2268 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 0);
2273 gen_bcond(ctx
, TCG_COND_GE
, ra
, disp16
, 0);
2278 gen_bcond(ctx
, TCG_COND_GT
, ra
, disp16
, 0);
2290 static always_inline
void gen_intermediate_code_internal (CPUState
*env
,
2291 TranslationBlock
*tb
,
2294 #if defined ALPHA_DEBUG_DISAS
2295 static int insn_count
;
2297 DisasContext ctx
, *ctxp
= &ctx
;
2298 target_ulong pc_start
;
2300 uint16_t *gen_opc_end
;
2307 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
2309 ctx
.amask
= env
->amask
;
2310 #if defined (CONFIG_USER_ONLY)
2313 ctx
.mem_idx
= ((env
->ps
>> 3) & 3);
2314 ctx
.pal_mode
= env
->ipr
[IPR_EXC_ADDR
] & 1;
2317 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
2319 max_insns
= CF_COUNT_MASK
;
2322 for (ret
= 0; ret
== 0;) {
2323 if (env
->nb_breakpoints
> 0) {
2324 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
2325 if (env
->breakpoints
[j
] == ctx
.pc
) {
2326 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2332 j
= gen_opc_ptr
- gen_opc_buf
;
2336 gen_opc_instr_start
[lj
++] = 0;
2337 gen_opc_pc
[lj
] = ctx
.pc
;
2338 gen_opc_instr_start
[lj
] = 1;
2339 gen_opc_icount
[lj
] = num_insns
;
2342 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
2344 #if defined ALPHA_DEBUG_DISAS
2346 if (logfile
!= NULL
) {
2347 fprintf(logfile
, "pc " TARGET_FMT_lx
" mem_idx %d\n",
2348 ctx
.pc
, ctx
.mem_idx
);
2351 insn
= ldl_code(ctx
.pc
);
2352 #if defined ALPHA_DEBUG_DISAS
2354 if (logfile
!= NULL
) {
2355 fprintf(logfile
, "opcode %08x %d\n", insn
, insn_count
);
2360 ret
= translate_one(ctxp
, insn
);
2363 /* if we reach a page boundary or are single stepping, stop
2366 if (((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0) ||
2367 (env
->singlestep_enabled
) ||
2368 num_insns
>= max_insns
) {
2371 #if defined (DO_SINGLE_STEP)
2375 if (ret
!= 1 && ret
!= 3) {
2376 tcg_gen_movi_i64(cpu_pc
, ctx
.pc
);
2378 #if defined (DO_TB_FLUSH)
2379 tcg_gen_helper_0_0(helper_tb_flush
);
2381 if (tb
->cflags
& CF_LAST_IO
)
2383 /* Generate the return instruction */
2385 gen_icount_end(tb
, num_insns
);
2386 *gen_opc_ptr
= INDEX_op_end
;
2388 j
= gen_opc_ptr
- gen_opc_buf
;
2391 gen_opc_instr_start
[lj
++] = 0;
2393 tb
->size
= ctx
.pc
- pc_start
;
2394 tb
->icount
= num_insns
;
2396 #if defined ALPHA_DEBUG_DISAS
2397 if (loglevel
& CPU_LOG_TB_CPU
) {
2398 cpu_dump_state(env
, logfile
, fprintf
, 0);
2400 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
2401 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
2402 target_disas(logfile
, pc_start
, ctx
.pc
- pc_start
, 1);
2403 fprintf(logfile
, "\n");
2408 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
2410 gen_intermediate_code_internal(env
, tb
, 0);
2413 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
2415 gen_intermediate_code_internal(env
, tb
, 1);
2418 CPUAlphaState
* cpu_alpha_init (const char *cpu_model
)
2423 env
= qemu_mallocz(sizeof(CPUAlphaState
));
2427 alpha_translate_init();
2429 /* XXX: should not be hardcoded */
2430 env
->implver
= IMPLVER_2106x
;
2432 #if defined (CONFIG_USER_ONLY)
2436 /* Initialize IPR */
2437 hwpcb
= env
->ipr
[IPR_PCBB
];
2438 env
->ipr
[IPR_ASN
] = 0;
2439 env
->ipr
[IPR_ASTEN
] = 0;
2440 env
->ipr
[IPR_ASTSR
] = 0;
2441 env
->ipr
[IPR_DATFX
] = 0;
2443 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2444 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2445 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2446 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2447 env
->ipr
[IPR_FEN
] = 0;
2448 env
->ipr
[IPR_IPL
] = 31;
2449 env
->ipr
[IPR_MCES
] = 0;
2450 env
->ipr
[IPR_PERFMON
] = 0; /* Implementation specific */
2451 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2452 env
->ipr
[IPR_SISR
] = 0;
2453 env
->ipr
[IPR_VIRBND
] = -1ULL;
2458 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2459 unsigned long searched_pc
, int pc_pos
, void *puc
)
2461 env
->pc
= gen_opc_pc
[pc_pos
];