2 * Alpha emulation cpu translation for qemu.
4 * Copyright (c) 2007 Jocelyn Mayer
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
28 #include "host-utils.h"
31 #include "qemu-common.h"
33 #define DO_SINGLE_STEP
34 #define ALPHA_DEBUG_DISAS
37 typedef struct DisasContext DisasContext
;
41 #if !defined (CONFIG_USER_ONLY)
47 /* global register indexes */
49 static TCGv cpu_ir
[31];
50 static TCGv cpu_fir
[31];
55 static char cpu_reg_names
[10*4+21*5 + 10*5+21*6];
57 #include "gen-icount.h"
59 static void alpha_translate_init(void)
63 static int done_init
= 0;
68 cpu_env
= tcg_global_reg_new(TCG_TYPE_PTR
, TCG_AREG0
, "env");
71 for (i
= 0; i
< 31; i
++) {
72 sprintf(p
, "ir%d", i
);
73 cpu_ir
[i
] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
74 offsetof(CPUState
, ir
[i
]), p
);
75 p
+= (i
< 10) ? 4 : 5;
77 sprintf(p
, "fir%d", i
);
78 cpu_fir
[i
] = tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
79 offsetof(CPUState
, fir
[i
]), p
);
80 p
+= (i
< 10) ? 5 : 6;
83 cpu_pc
= tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
84 offsetof(CPUState
, pc
), "pc");
86 cpu_lock
= tcg_global_mem_new(TCG_TYPE_I64
, TCG_AREG0
,
87 offsetof(CPUState
, lock
), "lock");
89 /* register helpers */
91 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
97 static always_inline
void gen_excp (DisasContext
*ctx
,
98 int exception
, int error_code
)
102 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
103 tmp1
= tcg_const_i32(exception
);
104 tmp2
= tcg_const_i32(error_code
);
105 tcg_gen_helper_0_2(helper_excp
, tmp1
, tmp2
);
110 static always_inline
void gen_invalid (DisasContext
*ctx
)
112 gen_excp(ctx
, EXCP_OPCDEC
, 0);
115 static always_inline
void gen_qemu_ldf (TCGv t0
, TCGv t1
, int flags
)
117 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
118 tcg_gen_qemu_ld32u(tmp
, t1
, flags
);
119 tcg_gen_helper_1_1(helper_memory_to_f
, t0
, tmp
);
123 static always_inline
void gen_qemu_ldg (TCGv t0
, TCGv t1
, int flags
)
125 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
126 tcg_gen_qemu_ld64(tmp
, t1
, flags
);
127 tcg_gen_helper_1_1(helper_memory_to_g
, t0
, tmp
);
131 static always_inline
void gen_qemu_lds (TCGv t0
, TCGv t1
, int flags
)
133 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
134 tcg_gen_qemu_ld32u(tmp
, t1
, flags
);
135 tcg_gen_helper_1_1(helper_memory_to_s
, t0
, tmp
);
139 static always_inline
void gen_qemu_ldl_l (TCGv t0
, TCGv t1
, int flags
)
141 tcg_gen_mov_i64(cpu_lock
, t1
);
142 tcg_gen_qemu_ld32s(t0
, t1
, flags
);
145 static always_inline
void gen_qemu_ldq_l (TCGv t0
, TCGv t1
, int flags
)
147 tcg_gen_mov_i64(cpu_lock
, t1
);
148 tcg_gen_qemu_ld64(t0
, t1
, flags
);
151 static always_inline
void gen_load_mem (DisasContext
*ctx
,
152 void (*tcg_gen_qemu_load
)(TCGv t0
, TCGv t1
, int flags
),
153 int ra
, int rb
, int32_t disp16
,
158 if (unlikely(ra
== 31))
161 addr
= tcg_temp_new(TCG_TYPE_I64
);
163 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
165 tcg_gen_andi_i64(addr
, addr
, ~0x7);
169 tcg_gen_movi_i64(addr
, disp16
);
172 tcg_gen_qemu_load(cpu_fir
[ra
], addr
, ctx
->mem_idx
);
174 tcg_gen_qemu_load(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
178 static always_inline
void gen_qemu_stf (TCGv t0
, TCGv t1
, int flags
)
180 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
181 tcg_gen_helper_1_1(helper_f_to_memory
, tmp
, t0
);
182 tcg_gen_qemu_st32(tmp
, t1
, flags
);
186 static always_inline
void gen_qemu_stg (TCGv t0
, TCGv t1
, int flags
)
188 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
189 tcg_gen_helper_1_1(helper_g_to_memory
, tmp
, t0
);
190 tcg_gen_qemu_st64(tmp
, t1
, flags
);
194 static always_inline
void gen_qemu_sts (TCGv t0
, TCGv t1
, int flags
)
196 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
197 tcg_gen_helper_1_1(helper_s_to_memory
, tmp
, t0
);
198 tcg_gen_qemu_st32(tmp
, t1
, flags
);
202 static always_inline
void gen_qemu_stl_c (TCGv t0
, TCGv t1
, int flags
)
206 l1
= gen_new_label();
207 l2
= gen_new_label();
208 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_lock
, t1
, l1
);
209 tcg_gen_qemu_st32(t0
, t1
, flags
);
210 tcg_gen_movi_i64(t0
, 0);
213 tcg_gen_movi_i64(t0
, 1);
215 tcg_gen_movi_i64(cpu_lock
, -1);
218 static always_inline
void gen_qemu_stq_c (TCGv t0
, TCGv t1
, int flags
)
222 l1
= gen_new_label();
223 l2
= gen_new_label();
224 tcg_gen_brcond_i64(TCG_COND_NE
, cpu_lock
, t1
, l1
);
225 tcg_gen_qemu_st64(t0
, t1
, flags
);
226 tcg_gen_movi_i64(t0
, 0);
229 tcg_gen_movi_i64(t0
, 1);
231 tcg_gen_movi_i64(cpu_lock
, -1);
234 static always_inline
void gen_store_mem (DisasContext
*ctx
,
235 void (*tcg_gen_qemu_store
)(TCGv t0
, TCGv t1
, int flags
),
236 int ra
, int rb
, int32_t disp16
,
239 TCGv addr
= tcg_temp_new(TCG_TYPE_I64
);
241 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp16
);
243 tcg_gen_andi_i64(addr
, addr
, ~0x7);
247 tcg_gen_movi_i64(addr
, disp16
);
251 tcg_gen_qemu_store(cpu_fir
[ra
], addr
, ctx
->mem_idx
);
253 tcg_gen_qemu_store(cpu_ir
[ra
], addr
, ctx
->mem_idx
);
255 TCGv zero
= tcg_const_i64(0);
256 tcg_gen_qemu_store(zero
, addr
, ctx
->mem_idx
);
262 static always_inline
void gen_bcond (DisasContext
*ctx
,
264 int ra
, int32_t disp16
, int mask
)
268 l1
= gen_new_label();
269 l2
= gen_new_label();
270 if (likely(ra
!= 31)) {
272 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
273 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
274 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
277 tcg_gen_brcondi_i64(cond
, cpu_ir
[ra
], 0, l1
);
279 /* Very uncommon case - Do not bother to optimize. */
280 TCGv tmp
= tcg_const_i64(0);
281 tcg_gen_brcondi_i64(cond
, tmp
, 0, l1
);
284 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
287 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
291 static always_inline
void gen_fbcond (DisasContext
*ctx
,
293 int ra
, int32_t disp16
)
298 l1
= gen_new_label();
299 l2
= gen_new_label();
301 tmp
= tcg_temp_new(TCG_TYPE_I64
);
302 tcg_gen_helper_1_1(func
, tmp
, cpu_fir
[ra
]);
304 tmp
= tcg_const_i64(0);
305 tcg_gen_helper_1_1(func
, tmp
, tmp
);
307 tcg_gen_brcondi_i64(TCG_COND_NE
, tmp
, 0, l1
);
308 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
);
311 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp16
<< 2));
315 static always_inline
void gen_cmov (TCGCond inv_cond
,
316 int ra
, int rb
, int rc
,
317 int islit
, uint8_t lit
, int mask
)
321 if (unlikely(rc
== 31))
324 l1
= gen_new_label();
328 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
329 tcg_gen_andi_i64(tmp
, cpu_ir
[ra
], 1);
330 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
333 tcg_gen_brcondi_i64(inv_cond
, cpu_ir
[ra
], 0, l1
);
335 /* Very uncommon case - Do not bother to optimize. */
336 TCGv tmp
= tcg_const_i64(0);
337 tcg_gen_brcondi_i64(inv_cond
, tmp
, 0, l1
);
342 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
344 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
348 static always_inline
void gen_farith2 (void *helper
,
351 if (unlikely(rc
== 31))
355 tcg_gen_helper_1_1(helper
, cpu_fir
[rc
], cpu_fir
[rb
]);
357 TCGv tmp
= tcg_const_i64(0);
358 tcg_gen_helper_1_1(helper
, cpu_fir
[rc
], tmp
);
363 static always_inline
void gen_farith3 (void *helper
,
364 int ra
, int rb
, int rc
)
366 if (unlikely(rc
== 31))
371 tcg_gen_helper_1_2(helper
, cpu_fir
[rc
], cpu_fir
[ra
], cpu_fir
[rb
]);
373 TCGv tmp
= tcg_const_i64(0);
374 tcg_gen_helper_1_2(helper
, cpu_fir
[rc
], cpu_fir
[ra
], tmp
);
378 TCGv tmp
= tcg_const_i64(0);
380 tcg_gen_helper_1_2(helper
, cpu_fir
[rc
], tmp
, cpu_fir
[rb
]);
382 tcg_gen_helper_1_2(helper
, cpu_fir
[rc
], tmp
, tmp
);
387 static always_inline
void gen_fcmov (void *func
,
388 int ra
, int rb
, int rc
)
393 if (unlikely(rc
== 31))
396 l1
= gen_new_label();
397 tmp
= tcg_temp_new(TCG_TYPE_I64
);
399 tmp
= tcg_temp_new(TCG_TYPE_I64
);
400 tcg_gen_helper_1_1(func
, tmp
, cpu_fir
[ra
]);
402 tmp
= tcg_const_i64(0);
403 tcg_gen_helper_1_1(func
, tmp
, tmp
);
405 tcg_gen_brcondi_i64(TCG_COND_EQ
, tmp
, 0, l1
);
407 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_fir
[ra
]);
409 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
413 /* EXTWH, EXTWH, EXTLH, EXTQH */
414 static always_inline
void gen_ext_h(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
415 int ra
, int rb
, int rc
,
416 int islit
, uint8_t lit
)
418 if (unlikely(rc
== 31))
424 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], 64 - ((lit
& 7) * 8));
426 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[ra
]);
429 tmp1
= tcg_temp_new(TCG_TYPE_I64
);
430 tcg_gen_andi_i64(tmp1
, cpu_ir
[rb
], 7);
431 tcg_gen_shli_i64(tmp1
, tmp1
, 3);
432 tmp2
= tcg_const_i64(64);
433 tcg_gen_sub_i64(tmp1
, tmp2
, tmp1
);
435 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp1
);
439 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
441 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
444 /* EXTBL, EXTWL, EXTWL, EXTLL, EXTQL */
445 static always_inline
void gen_ext_l(void (*tcg_gen_ext_i64
)(TCGv t0
, TCGv t1
),
446 int ra
, int rb
, int rc
,
447 int islit
, uint8_t lit
)
449 if (unlikely(rc
== 31))
454 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], (lit
& 7) * 8);
456 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
457 tcg_gen_andi_i64(tmp
, cpu_ir
[rb
], 7);
458 tcg_gen_shli_i64(tmp
, tmp
, 3);
459 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
463 tcg_gen_ext_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
465 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
468 /* Code to call arith3 helpers */
469 static always_inline
void gen_arith3 (void *helper
,
470 int ra
, int rb
, int rc
,
471 int islit
, uint8_t lit
)
473 if (unlikely(rc
== 31))
478 TCGv tmp
= tcg_const_i64(lit
);
479 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
482 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
484 TCGv tmp1
= tcg_const_i64(0);
486 TCGv tmp2
= tcg_const_i64(lit
);
487 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], tmp1
, tmp2
);
490 tcg_gen_helper_1_2(helper
, cpu_ir
[rc
], tmp1
, cpu_ir
[rb
]);
495 static always_inline
void gen_cmp(TCGCond cond
,
496 int ra
, int rb
, int rc
,
497 int islit
, uint8_t lit
)
502 if (unlikely(rc
== 31))
505 l1
= gen_new_label();
506 l2
= gen_new_label();
509 tmp
= tcg_temp_new(TCG_TYPE_I64
);
510 tcg_gen_mov_i64(tmp
, cpu_ir
[ra
]);
512 tmp
= tcg_const_i64(0);
514 tcg_gen_brcondi_i64(cond
, tmp
, lit
, l1
);
516 tcg_gen_brcond_i64(cond
, tmp
, cpu_ir
[rb
], l1
);
518 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
521 tcg_gen_movi_i64(cpu_ir
[rc
], 1);
525 static always_inline
int translate_one (DisasContext
*ctx
, uint32_t insn
)
528 int32_t disp21
, disp16
, disp12
;
530 uint8_t opc
, ra
, rb
, rc
, sbz
, fpfn
, fn7
, fn2
, islit
;
534 /* Decode all instruction fields */
536 ra
= (insn
>> 21) & 0x1F;
537 rb
= (insn
>> 16) & 0x1F;
539 sbz
= (insn
>> 13) & 0x07;
540 islit
= (insn
>> 12) & 1;
541 if (rb
== 31 && !islit
) {
545 lit
= (insn
>> 13) & 0xFF;
546 palcode
= insn
& 0x03FFFFFF;
547 disp21
= ((int32_t)((insn
& 0x001FFFFF) << 11)) >> 11;
548 disp16
= (int16_t)(insn
& 0x0000FFFF);
549 disp12
= (int32_t)((insn
& 0x00000FFF) << 20) >> 20;
550 fn16
= insn
& 0x0000FFFF;
551 fn11
= (insn
>> 5) & 0x000007FF;
553 fn7
= (insn
>> 5) & 0x0000007F;
554 fn2
= (insn
>> 5) & 0x00000003;
556 #if defined ALPHA_DEBUG_DISAS
557 if (logfile
!= NULL
) {
558 fprintf(logfile
, "opc %02x ra %d rb %d rc %d disp16 %04x\n",
559 opc
, ra
, rb
, rc
, disp16
);
565 if (palcode
>= 0x80 && palcode
< 0xC0) {
566 /* Unprivileged PAL call */
567 gen_excp(ctx
, EXCP_CALL_PAL
+ ((palcode
& 0x1F) << 6), 0);
568 #if !defined (CONFIG_USER_ONLY)
569 } else if (palcode
< 0x40) {
570 /* Privileged PAL code */
571 if (ctx
->mem_idx
& 1)
574 gen_excp(ctx
, EXCP_CALL_PALP
+ ((palcode
& 0x1F) << 6), 0);
577 /* Invalid PAL call */
605 if (likely(ra
!= 31)) {
607 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
);
609 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
);
614 if (likely(ra
!= 31)) {
616 tcg_gen_addi_i64(cpu_ir
[ra
], cpu_ir
[rb
], disp16
<< 16);
618 tcg_gen_movi_i64(cpu_ir
[ra
], disp16
<< 16);
623 if (!(ctx
->amask
& AMASK_BWX
))
625 gen_load_mem(ctx
, &tcg_gen_qemu_ld8u
, ra
, rb
, disp16
, 0, 0);
629 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0, 1);
633 if (!(ctx
->amask
& AMASK_BWX
))
635 gen_load_mem(ctx
, &tcg_gen_qemu_ld16u
, ra
, rb
, disp16
, 0, 1);
639 gen_store_mem(ctx
, &tcg_gen_qemu_st16
, ra
, rb
, disp16
, 0, 0);
643 gen_store_mem(ctx
, &tcg_gen_qemu_st8
, ra
, rb
, disp16
, 0, 0);
647 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0, 1);
653 if (likely(rc
!= 31)) {
656 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
657 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
659 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
660 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
664 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
666 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
672 if (likely(rc
!= 31)) {
674 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
675 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
677 tcg_gen_addi_i64(tmp
, tmp
, lit
);
679 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
680 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
684 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
686 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
692 if (likely(rc
!= 31)) {
695 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
697 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
698 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
701 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
703 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
704 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
710 if (likely(rc
!= 31)) {
712 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
713 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
715 tcg_gen_subi_i64(tmp
, tmp
, lit
);
717 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
718 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
722 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
724 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
725 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
732 gen_arith3(helper_cmpbge
, ra
, rb
, rc
, islit
, lit
);
736 if (likely(rc
!= 31)) {
738 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
739 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
741 tcg_gen_addi_i64(tmp
, tmp
, lit
);
743 tcg_gen_add_i64(tmp
, tmp
, cpu_ir
[rb
]);
744 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
748 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
750 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
756 if (likely(rc
!= 31)) {
758 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
759 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
761 tcg_gen_subi_i64(tmp
, tmp
, lit
);
763 tcg_gen_sub_i64(tmp
, tmp
, cpu_ir
[rb
]);
764 tcg_gen_ext32s_i64(cpu_ir
[rc
], tmp
);
768 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
770 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
771 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
778 gen_cmp(TCG_COND_LTU
, ra
, rb
, rc
, islit
, lit
);
782 if (likely(rc
!= 31)) {
785 tcg_gen_addi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
787 tcg_gen_add_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
790 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
792 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
798 if (likely(rc
!= 31)) {
800 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
801 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
803 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
805 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
809 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
811 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
817 if (likely(rc
!= 31)) {
820 tcg_gen_subi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
822 tcg_gen_sub_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
825 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
827 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
833 if (likely(rc
!= 31)) {
835 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
836 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 2);
838 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
840 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
844 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
846 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
852 gen_cmp(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
);
856 if (likely(rc
!= 31)) {
858 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
859 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
861 tcg_gen_addi_i64(cpu_ir
[rc
], tmp
, lit
);
863 tcg_gen_add_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
867 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
869 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
875 if (likely(rc
!= 31)) {
877 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
878 tcg_gen_shli_i64(tmp
, cpu_ir
[ra
], 3);
880 tcg_gen_subi_i64(cpu_ir
[rc
], tmp
, lit
);
882 tcg_gen_sub_i64(cpu_ir
[rc
], tmp
, cpu_ir
[rb
]);
886 tcg_gen_movi_i64(cpu_ir
[rc
], -lit
);
888 tcg_gen_neg_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
894 gen_cmp(TCG_COND_LEU
, ra
, rb
, rc
, islit
, lit
);
898 gen_arith3(helper_addlv
, ra
, rb
, rc
, islit
, lit
);
902 gen_arith3(helper_sublv
, ra
, rb
, rc
, islit
, lit
);
906 gen_cmp(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
);
910 gen_arith3(helper_addqv
, ra
, rb
, rc
, islit
, lit
);
914 gen_arith3(helper_subqv
, ra
, rb
, rc
, islit
, lit
);
918 gen_cmp(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
);
928 if (likely(rc
!= 31)) {
930 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
932 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
934 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
939 if (likely(rc
!= 31)) {
942 tcg_gen_andi_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
944 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
945 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
946 tcg_gen_and_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
950 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
955 gen_cmov(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 1);
959 gen_cmov(TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 1);
963 if (likely(rc
!= 31)) {
966 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
968 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
971 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
973 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
979 gen_cmov(TCG_COND_NE
, ra
, rb
, rc
, islit
, lit
, 0);
983 gen_cmov(TCG_COND_EQ
, ra
, rb
, rc
, islit
, lit
, 0);
987 if (likely(rc
!= 31)) {
990 tcg_gen_ori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
992 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
993 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
994 tcg_gen_or_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
999 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1001 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1007 if (likely(rc
!= 31)) {
1010 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1012 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1015 tcg_gen_movi_i64(cpu_ir
[rc
], lit
);
1017 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1023 gen_cmov(TCG_COND_GE
, ra
, rb
, rc
, islit
, lit
, 0);
1027 gen_cmov(TCG_COND_LT
, ra
, rb
, rc
, islit
, lit
, 0);
1031 if (likely(rc
!= 31)) {
1034 tcg_gen_xori_i64(cpu_ir
[rc
], cpu_ir
[ra
], ~lit
);
1036 TCGv tmp
= tcg_temp_new(TCG_TYPE_I64
);
1037 tcg_gen_not_i64(tmp
, cpu_ir
[rb
]);
1038 tcg_gen_xor_i64(cpu_ir
[rc
], cpu_ir
[ra
], tmp
);
1043 tcg_gen_movi_i64(cpu_ir
[rc
], ~lit
);
1045 tcg_gen_not_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1051 if (likely(rc
!= 31)) {
1053 tcg_gen_movi_i64(cpu_ir
[rc
], helper_amask(lit
));
1055 tcg_gen_helper_1_1(helper_amask
, cpu_ir
[rc
], cpu_ir
[rb
]);
1060 gen_cmov(TCG_COND_GT
, ra
, rb
, rc
, islit
, lit
, 0);
1064 gen_cmov(TCG_COND_LE
, ra
, rb
, rc
, islit
, lit
, 0);
1069 tcg_gen_helper_1_0(helper_load_implver
, cpu_ir
[rc
]);
1079 gen_arith3(helper_mskbl
, ra
, rb
, rc
, islit
, lit
);
1083 gen_ext_l(&tcg_gen_ext8u_i64
, ra
, rb
, rc
, islit
, lit
);
1087 gen_arith3(helper_insbl
, ra
, rb
, rc
, islit
, lit
);
1091 gen_arith3(helper_mskwl
, ra
, rb
, rc
, islit
, lit
);
1095 gen_ext_l(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1099 gen_arith3(helper_inswl
, ra
, rb
, rc
, islit
, lit
);
1103 gen_arith3(helper_mskll
, ra
, rb
, rc
, islit
, lit
);
1107 gen_ext_l(&tcg_gen_ext32u_i64
, ra
, rb
, rc
, islit
, lit
);
1111 gen_arith3(helper_insll
, ra
, rb
, rc
, islit
, lit
);
1115 gen_arith3(helper_zap
, ra
, rb
, rc
, islit
, lit
);
1119 gen_arith3(helper_zapnot
, ra
, rb
, rc
, islit
, lit
);
1123 gen_arith3(helper_mskql
, ra
, rb
, rc
, islit
, lit
);
1127 if (likely(rc
!= 31)) {
1130 tcg_gen_shri_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1132 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1133 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1134 tcg_gen_shr_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1135 tcg_temp_free(shift
);
1138 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1143 gen_ext_l(NULL
, ra
, rb
, rc
, islit
, lit
);
1147 if (likely(rc
!= 31)) {
1150 tcg_gen_shli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1152 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1153 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1154 tcg_gen_shl_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1155 tcg_temp_free(shift
);
1158 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1163 gen_arith3(helper_insql
, ra
, rb
, rc
, islit
, lit
);
1167 if (likely(rc
!= 31)) {
1170 tcg_gen_sari_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
& 0x3f);
1172 TCGv shift
= tcg_temp_new(TCG_TYPE_I64
);
1173 tcg_gen_andi_i64(shift
, cpu_ir
[rb
], 0x3f);
1174 tcg_gen_sar_i64(cpu_ir
[rc
], cpu_ir
[ra
], shift
);
1175 tcg_temp_free(shift
);
1178 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1183 gen_arith3(helper_mskwh
, ra
, rb
, rc
, islit
, lit
);
1187 gen_arith3(helper_inswh
, ra
, rb
, rc
, islit
, lit
);
1191 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1195 gen_arith3(helper_msklh
, ra
, rb
, rc
, islit
, lit
);
1199 gen_arith3(helper_inslh
, ra
, rb
, rc
, islit
, lit
);
1203 gen_ext_h(&tcg_gen_ext16u_i64
, ra
, rb
, rc
, islit
, lit
);
1207 gen_arith3(helper_mskqh
, ra
, rb
, rc
, islit
, lit
);
1211 gen_arith3(helper_insqh
, ra
, rb
, rc
, islit
, lit
);
1215 gen_ext_h(NULL
, ra
, rb
, rc
, islit
, lit
);
1225 if (likely(rc
!= 31)) {
1227 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1230 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1232 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1233 tcg_gen_ext32s_i64(cpu_ir
[rc
], cpu_ir
[rc
]);
1239 if (likely(rc
!= 31)) {
1241 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1243 tcg_gen_muli_i64(cpu_ir
[rc
], cpu_ir
[ra
], lit
);
1245 tcg_gen_mul_i64(cpu_ir
[rc
], cpu_ir
[ra
], cpu_ir
[rb
]);
1250 gen_arith3(helper_umulh
, ra
, rb
, rc
, islit
, lit
);
1254 gen_arith3(helper_mullv
, ra
, rb
, rc
, islit
, lit
);
1258 gen_arith3(helper_mulqv
, ra
, rb
, rc
, islit
, lit
);
1265 switch (fpfn
) { /* f11 & 0x3F */
1268 if (!(ctx
->amask
& AMASK_FIX
))
1270 if (likely(rc
!= 31)) {
1272 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
1273 tcg_gen_trunc_i64_i32(tmp
, cpu_ir
[ra
]);
1274 tcg_gen_helper_1_1(helper_memory_to_s
, cpu_fir
[rc
], tmp
);
1277 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1282 if (!(ctx
->amask
& AMASK_FIX
))
1284 gen_farith2(&helper_sqrtf
, rb
, rc
);
1288 if (!(ctx
->amask
& AMASK_FIX
))
1290 gen_farith2(&helper_sqrts
, rb
, rc
);
1294 if (!(ctx
->amask
& AMASK_FIX
))
1296 if (likely(rc
!= 31)) {
1298 TCGv tmp
= tcg_temp_new(TCG_TYPE_I32
);
1299 tcg_gen_trunc_i64_i32(tmp
, cpu_ir
[ra
]);
1300 tcg_gen_helper_1_1(helper_memory_to_f
, cpu_fir
[rc
], tmp
);
1303 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1308 if (!(ctx
->amask
& AMASK_FIX
))
1310 if (likely(rc
!= 31)) {
1312 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_ir
[ra
]);
1314 tcg_gen_movi_i64(cpu_fir
[rc
], 0);
1319 if (!(ctx
->amask
& AMASK_FIX
))
1321 gen_farith2(&helper_sqrtg
, rb
, rc
);
1325 if (!(ctx
->amask
& AMASK_FIX
))
1327 gen_farith2(&helper_sqrtt
, rb
, rc
);
1334 /* VAX floating point */
1335 /* XXX: rounding mode and trap are ignored (!) */
1336 switch (fpfn
) { /* f11 & 0x3F */
1339 gen_farith3(&helper_addf
, ra
, rb
, rc
);
1343 gen_farith3(&helper_subf
, ra
, rb
, rc
);
1347 gen_farith3(&helper_mulf
, ra
, rb
, rc
);
1351 gen_farith3(&helper_divf
, ra
, rb
, rc
);
1356 gen_farith2(&helper_cvtdg
, rb
, rc
);
1363 gen_farith3(&helper_addg
, ra
, rb
, rc
);
1367 gen_farith3(&helper_subg
, ra
, rb
, rc
);
1371 gen_farith3(&helper_mulg
, ra
, rb
, rc
);
1375 gen_farith3(&helper_divg
, ra
, rb
, rc
);
1379 gen_farith3(&helper_cmpgeq
, ra
, rb
, rc
);
1383 gen_farith3(&helper_cmpglt
, ra
, rb
, rc
);
1387 gen_farith3(&helper_cmpgle
, ra
, rb
, rc
);
1391 gen_farith2(&helper_cvtgf
, rb
, rc
);
1396 gen_farith2(ctx
, &helper_cvtgd
, rb
, rc
);
1403 gen_farith2(&helper_cvtgq
, rb
, rc
);
1407 gen_farith2(&helper_cvtqf
, rb
, rc
);
1411 gen_farith2(&helper_cvtqg
, rb
, rc
);
1418 /* IEEE floating-point */
1419 /* XXX: rounding mode and traps are ignored (!) */
1420 switch (fpfn
) { /* f11 & 0x3F */
1423 gen_farith3(&helper_adds
, ra
, rb
, rc
);
1427 gen_farith3(&helper_subs
, ra
, rb
, rc
);
1431 gen_farith3(&helper_muls
, ra
, rb
, rc
);
1435 gen_farith3(&helper_divs
, ra
, rb
, rc
);
1439 gen_farith3(&helper_addt
, ra
, rb
, rc
);
1443 gen_farith3(&helper_subt
, ra
, rb
, rc
);
1447 gen_farith3(&helper_mult
, ra
, rb
, rc
);
1451 gen_farith3(&helper_divt
, ra
, rb
, rc
);
1455 gen_farith3(&helper_cmptun
, ra
, rb
, rc
);
1459 gen_farith3(&helper_cmpteq
, ra
, rb
, rc
);
1463 gen_farith3(&helper_cmptlt
, ra
, rb
, rc
);
1467 gen_farith3(&helper_cmptle
, ra
, rb
, rc
);
1470 /* XXX: incorrect */
1471 if (fn11
== 0x2AC) {
1473 gen_farith2(&helper_cvtst
, rb
, rc
);
1476 gen_farith2(&helper_cvtts
, rb
, rc
);
1481 gen_farith2(&helper_cvttq
, rb
, rc
);
1485 gen_farith2(&helper_cvtqs
, rb
, rc
);
1489 gen_farith2(&helper_cvtqt
, rb
, rc
);
1499 gen_farith2(&helper_cvtlq
, rb
, rc
);
1502 if (likely(rc
!= 31)) {
1505 tcg_gen_mov_i64(cpu_fir
[rc
], cpu_fir
[ra
]);
1508 gen_farith3(&helper_cpys
, ra
, rb
, rc
);
1513 gen_farith3(&helper_cpysn
, ra
, rb
, rc
);
1517 gen_farith3(&helper_cpyse
, ra
, rb
, rc
);
1521 if (likely(ra
!= 31))
1522 tcg_gen_helper_0_1(helper_store_fpcr
, cpu_fir
[ra
]);
1524 TCGv tmp
= tcg_const_i64(0);
1525 tcg_gen_helper_0_1(helper_store_fpcr
, tmp
);
1531 if (likely(ra
!= 31))
1532 tcg_gen_helper_1_0(helper_load_fpcr
, cpu_fir
[ra
]);
1536 gen_fcmov(&helper_cmpfeq
, ra
, rb
, rc
);
1540 gen_fcmov(&helper_cmpfne
, ra
, rb
, rc
);
1544 gen_fcmov(&helper_cmpflt
, ra
, rb
, rc
);
1548 gen_fcmov(&helper_cmpfge
, ra
, rb
, rc
);
1552 gen_fcmov(&helper_cmpfle
, ra
, rb
, rc
);
1556 gen_fcmov(&helper_cmpfgt
, ra
, rb
, rc
);
1560 gen_farith2(&helper_cvtql
, rb
, rc
);
1564 gen_farith2(&helper_cvtqlv
, rb
, rc
);
1568 gen_farith2(&helper_cvtqlsv
, rb
, rc
);
1575 switch ((uint16_t)disp16
) {
1578 /* No-op. Just exit from the current tb */
1583 /* No-op. Just exit from the current tb */
1605 tcg_gen_helper_1_0(helper_load_pcc
, cpu_ir
[ra
]);
1610 tcg_gen_helper_1_0(helper_rc
, cpu_ir
[ra
]);
1614 /* XXX: TODO: evict tb cache at address rb */
1624 tcg_gen_helper_1_0(helper_rs
, cpu_ir
[ra
]);
1635 /* HW_MFPR (PALcode) */
1636 #if defined (CONFIG_USER_ONLY)
1642 TCGv tmp
= tcg_const_i32(insn
& 0xFF);
1643 tcg_gen_helper_1_2(helper_mfpr
, cpu_ir
[ra
], tmp
, cpu_ir
[ra
]);
1650 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
1652 tcg_gen_andi_i64(cpu_pc
, cpu_ir
[rb
], ~3);
1654 tcg_gen_movi_i64(cpu_pc
, 0);
1655 /* Those four jumps only differ by the branch prediction hint */
1673 /* HW_LD (PALcode) */
1674 #if defined (CONFIG_USER_ONLY)
1680 TCGv addr
= tcg_temp_new(TCG_TYPE_I64
);
1682 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp12
);
1684 tcg_gen_movi_i64(addr
, disp12
);
1685 switch ((insn
>> 12) & 0xF) {
1687 /* Longword physical access */
1688 tcg_gen_helper_0_2(helper_ldl_raw
, cpu_ir
[ra
], addr
);
1691 /* Quadword physical access */
1692 tcg_gen_helper_0_2(helper_ldq_raw
, cpu_ir
[ra
], addr
);
1695 /* Longword physical access with lock */
1696 tcg_gen_helper_0_2(helper_ldl_l_raw
, cpu_ir
[ra
], addr
);
1699 /* Quadword physical access with lock */
1700 tcg_gen_helper_0_2(helper_ldq_l_raw
, cpu_ir
[ra
], addr
);
1703 /* Longword virtual PTE fetch */
1704 tcg_gen_helper_0_2(helper_ldl_kernel
, cpu_ir
[ra
], addr
);
1707 /* Quadword virtual PTE fetch */
1708 tcg_gen_helper_0_2(helper_ldq_kernel
, cpu_ir
[ra
], addr
);
1711 /* Incpu_ir[ra]id */
1712 goto incpu_ir
[ra
]id_opc
;
1714 /* Incpu_ir[ra]id */
1715 goto incpu_ir
[ra
]id_opc
;
1717 /* Longword virtual access */
1718 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
1719 tcg_gen_helper_0_2(helper_ldl_raw
, cpu_ir
[ra
], addr
);
1722 /* Quadword virtual access */
1723 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
1724 tcg_gen_helper_0_2(helper_ldq_raw
, cpu_ir
[ra
], addr
);
1727 /* Longword virtual access with protection check */
1728 tcg_gen_qemu_ld32s(cpu_ir
[ra
], addr
, ctx
->flags
);
1731 /* Quadword virtual access with protection check */
1732 tcg_gen_qemu_ld64(cpu_ir
[ra
], addr
, ctx
->flags
);
1735 /* Longword virtual access with altenate access mode */
1736 tcg_gen_helper_0_0(helper_set_alt_mode
);
1737 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
1738 tcg_gen_helper_0_2(helper_ldl_raw
, cpu_ir
[ra
], addr
);
1739 tcg_gen_helper_0_0(helper_restore_mode
);
1742 /* Quadword virtual access with altenate access mode */
1743 tcg_gen_helper_0_0(helper_set_alt_mode
);
1744 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
1745 tcg_gen_helper_0_2(helper_ldq_raw
, cpu_ir
[ra
], addr
);
1746 tcg_gen_helper_0_0(helper_restore_mode
);
1749 /* Longword virtual access with alternate access mode and
1752 tcg_gen_helper_0_0(helper_set_alt_mode
);
1753 tcg_gen_helper_0_2(helper_ldl_data
, cpu_ir
[ra
], addr
);
1754 tcg_gen_helper_0_0(helper_restore_mode
);
1757 /* Quadword virtual access with alternate access mode and
1760 tcg_gen_helper_0_0(helper_set_alt_mode
);
1761 tcg_gen_helper_0_2(helper_ldq_data
, cpu_ir
[ra
], addr
);
1762 tcg_gen_helper_0_0(helper_restore_mode
);
1765 tcg_temp_free(addr
);
1773 if (!(ctx
->amask
& AMASK_BWX
))
1775 if (likely(rc
!= 31)) {
1777 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int8_t)lit
));
1779 tcg_gen_ext8s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1784 if (!(ctx
->amask
& AMASK_BWX
))
1786 if (likely(rc
!= 31)) {
1788 tcg_gen_movi_i64(cpu_ir
[rc
], (int64_t)((int16_t)lit
));
1790 tcg_gen_ext16s_i64(cpu_ir
[rc
], cpu_ir
[rb
]);
1795 if (!(ctx
->amask
& AMASK_CIX
))
1797 if (likely(rc
!= 31)) {
1799 tcg_gen_movi_i64(cpu_ir
[rc
], ctpop64(lit
));
1801 tcg_gen_helper_1_1(helper_ctpop
, cpu_ir
[rc
], cpu_ir
[rb
]);
1806 if (!(ctx
->amask
& AMASK_MVI
))
1813 if (!(ctx
->amask
& AMASK_CIX
))
1815 if (likely(rc
!= 31)) {
1817 tcg_gen_movi_i64(cpu_ir
[rc
], clz64(lit
));
1819 tcg_gen_helper_1_1(helper_ctlz
, cpu_ir
[rc
], cpu_ir
[rb
]);
1824 if (!(ctx
->amask
& AMASK_CIX
))
1826 if (likely(rc
!= 31)) {
1828 tcg_gen_movi_i64(cpu_ir
[rc
], ctz64(lit
));
1830 tcg_gen_helper_1_1(helper_cttz
, cpu_ir
[rc
], cpu_ir
[rb
]);
1835 if (!(ctx
->amask
& AMASK_MVI
))
1842 if (!(ctx
->amask
& AMASK_MVI
))
1849 if (!(ctx
->amask
& AMASK_MVI
))
1856 if (!(ctx
->amask
& AMASK_MVI
))
1863 if (!(ctx
->amask
& AMASK_MVI
))
1870 if (!(ctx
->amask
& AMASK_MVI
))
1877 if (!(ctx
->amask
& AMASK_MVI
))
1884 if (!(ctx
->amask
& AMASK_MVI
))
1891 if (!(ctx
->amask
& AMASK_MVI
))
1898 if (!(ctx
->amask
& AMASK_MVI
))
1905 if (!(ctx
->amask
& AMASK_MVI
))
1912 if (!(ctx
->amask
& AMASK_MVI
))
1919 if (!(ctx
->amask
& AMASK_FIX
))
1921 if (likely(rc
!= 31)) {
1923 tcg_gen_mov_i64(cpu_ir
[rc
], cpu_fir
[ra
]);
1925 tcg_gen_movi_i64(cpu_ir
[rc
], 0);
1930 if (!(ctx
->amask
& AMASK_FIX
))
1933 TCGv tmp1
= tcg_temp_new(TCG_TYPE_I32
);
1935 tcg_gen_helper_1_1(helper_s_to_memory
, tmp1
, cpu_fir
[ra
]);
1937 TCGv tmp2
= tcg_const_i64(0);
1938 tcg_gen_helper_1_1(helper_s_to_memory
, tmp1
, tmp2
);
1939 tcg_temp_free(tmp2
);
1941 tcg_gen_ext_i32_i64(cpu_ir
[rc
], tmp1
);
1942 tcg_temp_free(tmp1
);
1950 /* HW_MTPR (PALcode) */
1951 #if defined (CONFIG_USER_ONLY)
1957 TCGv tmp1
= tcg_const_i32(insn
& 0xFF);
1959 tcg_gen_helper(helper_mtpr
, tmp1
, cpu_ir
[ra
]);
1961 TCGv tmp2
= tcg_const_i64(0);
1962 tcg_gen_helper(helper_mtpr
, tmp1
, tmp2
);
1963 tcg_temp_free(tmp2
);
1965 tcg_temp_free(tmp1
);
1971 /* HW_REI (PALcode) */
1972 #if defined (CONFIG_USER_ONLY)
1979 tcg_gen_helper_0_0(helper_hw_rei
);
1984 tmp
= tcg_temp_new(TCG_TYPE_I64
);
1985 tcg_gen_addi_i64(tmp
, cpu_ir
[rb
], (((int64_t)insn
<< 51) >> 51));
1987 tmp
= tcg_const_i64(((int64_t)insn
<< 51) >> 51);
1988 tcg_gen_helper_0_1(helper_hw_ret
, tmp
);
1995 /* HW_ST (PALcode) */
1996 #if defined (CONFIG_USER_ONLY)
2003 addr
= tcg_temp_new(TCG_TYPE_I64
);
2005 tcg_gen_addi_i64(addr
, cpu_ir
[rb
], disp12
);
2007 tcg_gen_movi_i64(addr
, disp12
);
2011 val
= tcg_temp_new(TCG_TYPE_I64
);
2012 tcg_gen_movi_i64(val
, 0);
2014 switch ((insn
>> 12) & 0xF) {
2016 /* Longword physical access */
2017 tcg_gen_helper_0_2(helper_stl_raw
, val
, addr
);
2020 /* Quadword physical access */
2021 tcg_gen_helper_0_2(helper_stq_raw
, val
, addr
);
2024 /* Longword physical access with lock */
2025 tcg_gen_helper_1_2(helper_stl_c_raw
, val
, val
, addr
);
2028 /* Quadword physical access with lock */
2029 tcg_gen_helper_1_2(helper_stq_c_raw
, val
, val
, addr
);
2032 /* Longword virtual access */
2033 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
2034 tcg_gen_helper_0_2(helper_stl_raw
, val
, addr
);
2037 /* Quadword virtual access */
2038 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
2039 tcg_gen_helper_0_2(helper_stq_raw
, val
, addr
);
2060 /* Longword virtual access with alternate access mode */
2061 tcg_gen_helper_0_0(helper_set_alt_mode
);
2062 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
2063 tcg_gen_helper_0_2(helper_stl_raw
, val
, addr
);
2064 tcg_gen_helper_0_0(helper_restore_mode
);
2067 /* Quadword virtual access with alternate access mode */
2068 tcg_gen_helper_0_0(helper_set_alt_mode
);
2069 tcg_gen_helper_1_1(helper_st_virt_to_phys
, addr
, addr
);
2070 tcg_gen_helper_0_2(helper_stl_raw
, val
, addr
);
2071 tcg_gen_helper_0_0(helper_restore_mode
);
2082 tcg_temp_free(addr
);
2089 gen_load_mem(ctx
, &gen_qemu_ldf
, ra
, rb
, disp16
, 1, 0);
2093 gen_load_mem(ctx
, &gen_qemu_ldg
, ra
, rb
, disp16
, 1, 0);
2097 gen_load_mem(ctx
, &gen_qemu_lds
, ra
, rb
, disp16
, 1, 0);
2101 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 1, 0);
2105 gen_store_mem(ctx
, &gen_qemu_stf
, ra
, rb
, disp16
, 1, 0);
2109 gen_store_mem(ctx
, &gen_qemu_stg
, ra
, rb
, disp16
, 1, 0);
2113 gen_store_mem(ctx
, &gen_qemu_sts
, ra
, rb
, disp16
, 1, 0);
2117 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 1, 0);
2121 gen_load_mem(ctx
, &tcg_gen_qemu_ld32s
, ra
, rb
, disp16
, 0, 0);
2125 gen_load_mem(ctx
, &tcg_gen_qemu_ld64
, ra
, rb
, disp16
, 0, 0);
2129 gen_load_mem(ctx
, &gen_qemu_ldl_l
, ra
, rb
, disp16
, 0, 0);
2133 gen_load_mem(ctx
, &gen_qemu_ldq_l
, ra
, rb
, disp16
, 0, 0);
2137 gen_store_mem(ctx
, &tcg_gen_qemu_st32
, ra
, rb
, disp16
, 0, 0);
2141 gen_store_mem(ctx
, &tcg_gen_qemu_st64
, ra
, rb
, disp16
, 0, 0);
2145 gen_store_mem(ctx
, &gen_qemu_stl_c
, ra
, rb
, disp16
, 0, 0);
2149 gen_store_mem(ctx
, &gen_qemu_stq_c
, ra
, rb
, disp16
, 0, 0);
2154 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2155 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2160 gen_fbcond(ctx
, &helper_cmpfeq
, ra
, disp16
);
2165 gen_fbcond(ctx
, &helper_cmpflt
, ra
, disp16
);
2170 gen_fbcond(ctx
, &helper_cmpfle
, ra
, disp16
);
2176 tcg_gen_movi_i64(cpu_ir
[ra
], ctx
->pc
);
2177 tcg_gen_movi_i64(cpu_pc
, ctx
->pc
+ (int64_t)(disp21
<< 2));
2182 gen_fbcond(ctx
, &helper_cmpfne
, ra
, disp16
);
2187 gen_fbcond(ctx
, &helper_cmpfge
, ra
, disp16
);
2192 gen_fbcond(ctx
, &helper_cmpfgt
, ra
, disp16
);
2197 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 1);
2202 gen_bcond(ctx
, TCG_COND_EQ
, ra
, disp16
, 0);
2207 gen_bcond(ctx
, TCG_COND_LT
, ra
, disp16
, 0);
2212 gen_bcond(ctx
, TCG_COND_LE
, ra
, disp16
, 0);
2217 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 1);
2222 gen_bcond(ctx
, TCG_COND_NE
, ra
, disp16
, 0);
2227 gen_bcond(ctx
, TCG_COND_GE
, ra
, disp16
, 0);
2232 gen_bcond(ctx
, TCG_COND_GT
, ra
, disp16
, 0);
2244 static always_inline
void gen_intermediate_code_internal (CPUState
*env
,
2245 TranslationBlock
*tb
,
2248 #if defined ALPHA_DEBUG_DISAS
2249 static int insn_count
;
2251 DisasContext ctx
, *ctxp
= &ctx
;
2252 target_ulong pc_start
;
2254 uint16_t *gen_opc_end
;
2261 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
2263 ctx
.amask
= env
->amask
;
2264 #if defined (CONFIG_USER_ONLY)
2267 ctx
.mem_idx
= ((env
->ps
>> 3) & 3);
2268 ctx
.pal_mode
= env
->ipr
[IPR_EXC_ADDR
] & 1;
2271 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
2273 max_insns
= CF_COUNT_MASK
;
2276 for (ret
= 0; ret
== 0;) {
2277 if (env
->nb_breakpoints
> 0) {
2278 for(j
= 0; j
< env
->nb_breakpoints
; j
++) {
2279 if (env
->breakpoints
[j
] == ctx
.pc
) {
2280 gen_excp(&ctx
, EXCP_DEBUG
, 0);
2286 j
= gen_opc_ptr
- gen_opc_buf
;
2290 gen_opc_instr_start
[lj
++] = 0;
2291 gen_opc_pc
[lj
] = ctx
.pc
;
2292 gen_opc_instr_start
[lj
] = 1;
2293 gen_opc_icount
[lj
] = num_insns
;
2296 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
2298 #if defined ALPHA_DEBUG_DISAS
2300 if (logfile
!= NULL
) {
2301 fprintf(logfile
, "pc " TARGET_FMT_lx
" mem_idx %d\n",
2302 ctx
.pc
, ctx
.mem_idx
);
2305 insn
= ldl_code(ctx
.pc
);
2306 #if defined ALPHA_DEBUG_DISAS
2308 if (logfile
!= NULL
) {
2309 fprintf(logfile
, "opcode %08x %d\n", insn
, insn_count
);
2314 ret
= translate_one(ctxp
, insn
);
2317 /* if we reach a page boundary or are single stepping, stop
2320 if (((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0) ||
2321 (env
->singlestep_enabled
) ||
2322 num_insns
>= max_insns
) {
2325 #if defined (DO_SINGLE_STEP)
2329 if (ret
!= 1 && ret
!= 3) {
2330 tcg_gen_movi_i64(cpu_pc
, ctx
.pc
);
2332 #if defined (DO_TB_FLUSH)
2333 tcg_gen_helper_0_0(helper_tb_flush
);
2335 if (tb
->cflags
& CF_LAST_IO
)
2337 /* Generate the return instruction */
2339 gen_icount_end(tb
, num_insns
);
2340 *gen_opc_ptr
= INDEX_op_end
;
2342 j
= gen_opc_ptr
- gen_opc_buf
;
2345 gen_opc_instr_start
[lj
++] = 0;
2347 tb
->size
= ctx
.pc
- pc_start
;
2348 tb
->icount
= num_insns
;
2350 #if defined ALPHA_DEBUG_DISAS
2351 if (loglevel
& CPU_LOG_TB_CPU
) {
2352 cpu_dump_state(env
, logfile
, fprintf
, 0);
2354 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
2355 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
2356 target_disas(logfile
, pc_start
, ctx
.pc
- pc_start
, 1);
2357 fprintf(logfile
, "\n");
2362 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
2364 gen_intermediate_code_internal(env
, tb
, 0);
2367 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
2369 gen_intermediate_code_internal(env
, tb
, 1);
2372 CPUAlphaState
* cpu_alpha_init (const char *cpu_model
)
2377 env
= qemu_mallocz(sizeof(CPUAlphaState
));
2381 alpha_translate_init();
2383 /* XXX: should not be hardcoded */
2384 env
->implver
= IMPLVER_2106x
;
2386 #if defined (CONFIG_USER_ONLY)
2390 /* Initialize IPR */
2391 hwpcb
= env
->ipr
[IPR_PCBB
];
2392 env
->ipr
[IPR_ASN
] = 0;
2393 env
->ipr
[IPR_ASTEN
] = 0;
2394 env
->ipr
[IPR_ASTSR
] = 0;
2395 env
->ipr
[IPR_DATFX
] = 0;
2397 // env->ipr[IPR_ESP] = ldq_raw(hwpcb + 8);
2398 // env->ipr[IPR_KSP] = ldq_raw(hwpcb + 0);
2399 // env->ipr[IPR_SSP] = ldq_raw(hwpcb + 16);
2400 // env->ipr[IPR_USP] = ldq_raw(hwpcb + 24);
2401 env
->ipr
[IPR_FEN
] = 0;
2402 env
->ipr
[IPR_IPL
] = 31;
2403 env
->ipr
[IPR_MCES
] = 0;
2404 env
->ipr
[IPR_PERFMON
] = 0; /* Implementation specific */
2405 // env->ipr[IPR_PTBR] = ldq_raw(hwpcb + 32);
2406 env
->ipr
[IPR_SISR
] = 0;
2407 env
->ipr
[IPR_VIRBND
] = -1ULL;
2412 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
2413 unsigned long searched_pc
, int pc_pos
, void *puc
)
2415 env
->pc
= gen_opc_pc
[pc_pos
];