4 * Copyright (c) 2005 Samuel Tardieu
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 #define SH4_DEBUG_DISAS
22 //#define SH4_SINGLE_STEP
32 typedef struct DisasContext
{
33 struct TranslationBlock
*tb
;
42 int singlestep_enabled
;
47 #if defined(CONFIG_USER_ONLY)
48 #define IS_USER(ctx) 1
50 #define IS_USER(ctx) (!(ctx->sr & SR_MD))
54 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
57 BS_STOP
= 1, /* We want to stop translation for any reason */
58 BS_BRANCH
= 2, /* We reached a branch condition */
59 BS_EXCP
= 3, /* We reached an exception condition */
62 /* global register indexes */
63 static TCGv_ptr cpu_env
;
64 static TCGv cpu_gregs
[24];
65 static TCGv cpu_pc
, cpu_sr
, cpu_ssr
, cpu_spc
, cpu_gbr
;
66 static TCGv cpu_vbr
, cpu_sgr
, cpu_dbr
, cpu_mach
, cpu_macl
;
67 static TCGv cpu_pr
, cpu_fpscr
, cpu_fpul
, cpu_ldst
;
68 static TCGv cpu_fregs
[32];
70 /* internal register indexes */
71 static TCGv cpu_flags
, cpu_delayed_pc
;
73 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
75 #include "gen-icount.h"
77 static void sh4_translate_init(void)
80 static int done_init
= 0;
81 static const char * const gregnames
[24] = {
82 "R0_BANK0", "R1_BANK0", "R2_BANK0", "R3_BANK0",
83 "R4_BANK0", "R5_BANK0", "R6_BANK0", "R7_BANK0",
84 "R8", "R9", "R10", "R11", "R12", "R13", "R14", "R15",
85 "R0_BANK1", "R1_BANK1", "R2_BANK1", "R3_BANK1",
86 "R4_BANK1", "R5_BANK1", "R6_BANK1", "R7_BANK1"
88 static const char * const fregnames
[32] = {
89 "FPR0_BANK0", "FPR1_BANK0", "FPR2_BANK0", "FPR3_BANK0",
90 "FPR4_BANK0", "FPR5_BANK0", "FPR6_BANK0", "FPR7_BANK0",
91 "FPR8_BANK0", "FPR9_BANK0", "FPR10_BANK0", "FPR11_BANK0",
92 "FPR12_BANK0", "FPR13_BANK0", "FPR14_BANK0", "FPR15_BANK0",
93 "FPR0_BANK1", "FPR1_BANK1", "FPR2_BANK1", "FPR3_BANK1",
94 "FPR4_BANK1", "FPR5_BANK1", "FPR6_BANK1", "FPR7_BANK1",
95 "FPR8_BANK1", "FPR9_BANK1", "FPR10_BANK1", "FPR11_BANK1",
96 "FPR12_BANK1", "FPR13_BANK1", "FPR14_BANK1", "FPR15_BANK1",
102 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
104 for (i
= 0; i
< 24; i
++)
105 cpu_gregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
106 offsetof(CPUSH4State
, gregs
[i
]),
109 cpu_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
110 offsetof(CPUSH4State
, pc
), "PC");
111 cpu_sr
= tcg_global_mem_new_i32(TCG_AREG0
,
112 offsetof(CPUSH4State
, sr
), "SR");
113 cpu_ssr
= tcg_global_mem_new_i32(TCG_AREG0
,
114 offsetof(CPUSH4State
, ssr
), "SSR");
115 cpu_spc
= tcg_global_mem_new_i32(TCG_AREG0
,
116 offsetof(CPUSH4State
, spc
), "SPC");
117 cpu_gbr
= tcg_global_mem_new_i32(TCG_AREG0
,
118 offsetof(CPUSH4State
, gbr
), "GBR");
119 cpu_vbr
= tcg_global_mem_new_i32(TCG_AREG0
,
120 offsetof(CPUSH4State
, vbr
), "VBR");
121 cpu_sgr
= tcg_global_mem_new_i32(TCG_AREG0
,
122 offsetof(CPUSH4State
, sgr
), "SGR");
123 cpu_dbr
= tcg_global_mem_new_i32(TCG_AREG0
,
124 offsetof(CPUSH4State
, dbr
), "DBR");
125 cpu_mach
= tcg_global_mem_new_i32(TCG_AREG0
,
126 offsetof(CPUSH4State
, mach
), "MACH");
127 cpu_macl
= tcg_global_mem_new_i32(TCG_AREG0
,
128 offsetof(CPUSH4State
, macl
), "MACL");
129 cpu_pr
= tcg_global_mem_new_i32(TCG_AREG0
,
130 offsetof(CPUSH4State
, pr
), "PR");
131 cpu_fpscr
= tcg_global_mem_new_i32(TCG_AREG0
,
132 offsetof(CPUSH4State
, fpscr
), "FPSCR");
133 cpu_fpul
= tcg_global_mem_new_i32(TCG_AREG0
,
134 offsetof(CPUSH4State
, fpul
), "FPUL");
136 cpu_flags
= tcg_global_mem_new_i32(TCG_AREG0
,
137 offsetof(CPUSH4State
, flags
), "_flags_");
138 cpu_delayed_pc
= tcg_global_mem_new_i32(TCG_AREG0
,
139 offsetof(CPUSH4State
, delayed_pc
),
141 cpu_ldst
= tcg_global_mem_new_i32(TCG_AREG0
,
142 offsetof(CPUSH4State
, ldst
), "_ldst_");
144 for (i
= 0; i
< 32; i
++)
145 cpu_fregs
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
146 offsetof(CPUSH4State
, fregs
[i
]),
149 /* register helpers */
156 void cpu_dump_state(CPUSH4State
* env
, FILE * f
,
157 int (*cpu_fprintf
) (FILE * f
, const char *fmt
, ...),
161 cpu_fprintf(f
, "pc=0x%08x sr=0x%08x pr=0x%08x fpscr=0x%08x\n",
162 env
->pc
, env
->sr
, env
->pr
, env
->fpscr
);
163 cpu_fprintf(f
, "spc=0x%08x ssr=0x%08x gbr=0x%08x vbr=0x%08x\n",
164 env
->spc
, env
->ssr
, env
->gbr
, env
->vbr
);
165 cpu_fprintf(f
, "sgr=0x%08x dbr=0x%08x delayed_pc=0x%08x fpul=0x%08x\n",
166 env
->sgr
, env
->dbr
, env
->delayed_pc
, env
->fpul
);
167 for (i
= 0; i
< 24; i
+= 4) {
168 cpu_fprintf(f
, "r%d=0x%08x r%d=0x%08x r%d=0x%08x r%d=0x%08x\n",
169 i
, env
->gregs
[i
], i
+ 1, env
->gregs
[i
+ 1],
170 i
+ 2, env
->gregs
[i
+ 2], i
+ 3, env
->gregs
[i
+ 3]);
172 if (env
->flags
& DELAY_SLOT
) {
173 cpu_fprintf(f
, "in delay slot (delayed_pc=0x%08x)\n",
175 } else if (env
->flags
& DELAY_SLOT_CONDITIONAL
) {
176 cpu_fprintf(f
, "in conditional delay slot (delayed_pc=0x%08x)\n",
181 void cpu_state_reset(CPUSH4State
*env
)
183 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
184 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
185 log_cpu_state(env
, 0);
188 memset(env
, 0, offsetof(CPUSH4State
, breakpoints
));
191 env
->pc
= 0xA0000000;
192 #if defined(CONFIG_USER_ONLY)
193 env
->fpscr
= FPSCR_PR
; /* value for userspace according to the kernel */
194 set_float_rounding_mode(float_round_nearest_even
, &env
->fp_status
); /* ?! */
196 env
->sr
= SR_MD
| SR_RB
| SR_BL
| SR_I3
| SR_I2
| SR_I1
| SR_I0
;
197 env
->fpscr
= FPSCR_DN
| FPSCR_RM_ZERO
; /* CPU reset value according to SH4 manual */
198 set_float_rounding_mode(float_round_to_zero
, &env
->fp_status
);
199 set_flush_to_zero(1, &env
->fp_status
);
201 set_default_nan_mode(1, &env
->fp_status
);
213 static sh4_def_t sh4_defs
[] = {
216 .id
= SH_CPU_SH7750R
,
220 .features
= SH_FEATURE_BCR3_AND_BCR4
,
223 .id
= SH_CPU_SH7751R
,
226 .cvr
= 0x00110000, /* Neutered caches, should be 0x20480000 */
227 .features
= SH_FEATURE_BCR3_AND_BCR4
,
234 .features
= SH_FEATURE_SH4A
,
238 static const sh4_def_t
*cpu_sh4_find_by_name(const char *name
)
242 if (strcasecmp(name
, "any") == 0)
245 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
246 if (strcasecmp(name
, sh4_defs
[i
].name
) == 0)
252 void sh4_cpu_list(FILE *f
, fprintf_function cpu_fprintf
)
256 for (i
= 0; i
< ARRAY_SIZE(sh4_defs
); i
++)
257 (*cpu_fprintf
)(f
, "%s\n", sh4_defs
[i
].name
);
260 static void cpu_register(CPUSH4State
*env
, const sh4_def_t
*def
)
268 CPUSH4State
*cpu_sh4_init(const char *cpu_model
)
272 const sh4_def_t
*def
;
274 def
= cpu_sh4_find_by_name(cpu_model
);
277 cpu
= SUPERH_CPU(object_new(TYPE_SUPERH_CPU
));
279 env
->features
= def
->features
;
281 env
->movcal_backup_tail
= &(env
->movcal_backup
);
282 sh4_translate_init();
283 env
->cpu_model_str
= cpu_model
;
284 cpu_state_reset(env
);
285 cpu_register(env
, def
);
290 static void gen_goto_tb(DisasContext
* ctx
, int n
, target_ulong dest
)
292 TranslationBlock
*tb
;
295 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
296 !ctx
->singlestep_enabled
) {
297 /* Use a direct jump if in same page and singlestep not enabled */
299 tcg_gen_movi_i32(cpu_pc
, dest
);
300 tcg_gen_exit_tb((tcg_target_long
)tb
+ n
);
302 tcg_gen_movi_i32(cpu_pc
, dest
);
303 if (ctx
->singlestep_enabled
)
309 static void gen_jump(DisasContext
* ctx
)
311 if (ctx
->delayed_pc
== (uint32_t) - 1) {
312 /* Target is not statically known, it comes necessarily from a
313 delayed jump as immediate jump are conditinal jumps */
314 tcg_gen_mov_i32(cpu_pc
, cpu_delayed_pc
);
315 if (ctx
->singlestep_enabled
)
319 gen_goto_tb(ctx
, 0, ctx
->delayed_pc
);
323 static inline void gen_branch_slot(uint32_t delayed_pc
, int t
)
326 int label
= gen_new_label();
327 tcg_gen_movi_i32(cpu_delayed_pc
, delayed_pc
);
329 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
330 tcg_gen_brcondi_i32(t
? TCG_COND_EQ
:TCG_COND_NE
, sr
, 0, label
);
331 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
332 gen_set_label(label
);
335 /* Immediate conditional jump (bt or bf) */
336 static void gen_conditional_jump(DisasContext
* ctx
,
337 target_ulong ift
, target_ulong ifnott
)
342 l1
= gen_new_label();
344 tcg_gen_andi_i32(sr
, cpu_sr
, SR_T
);
345 tcg_gen_brcondi_i32(TCG_COND_NE
, sr
, 0, l1
);
346 gen_goto_tb(ctx
, 0, ifnott
);
348 gen_goto_tb(ctx
, 1, ift
);
351 /* Delayed conditional jump (bt or bf) */
352 static void gen_delayed_conditional_jump(DisasContext
* ctx
)
357 l1
= gen_new_label();
359 tcg_gen_andi_i32(ds
, cpu_flags
, DELAY_SLOT_TRUE
);
360 tcg_gen_brcondi_i32(TCG_COND_NE
, ds
, 0, l1
);
361 gen_goto_tb(ctx
, 1, ctx
->pc
+ 2);
363 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, ~DELAY_SLOT_TRUE
);
367 static inline void gen_set_t(void)
369 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_T
);
372 static inline void gen_clr_t(void)
374 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
377 static inline void gen_cmp(int cond
, TCGv t0
, TCGv t1
)
382 tcg_gen_setcond_i32(cond
, t
, t1
, t0
);
383 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
384 tcg_gen_or_i32(cpu_sr
, cpu_sr
, t
);
389 static inline void gen_cmp_imm(int cond
, TCGv t0
, int32_t imm
)
394 tcg_gen_setcondi_i32(cond
, t
, t0
, imm
);
395 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
396 tcg_gen_or_i32(cpu_sr
, cpu_sr
, t
);
401 static inline void gen_store_flags(uint32_t flags
)
403 tcg_gen_andi_i32(cpu_flags
, cpu_flags
, DELAY_SLOT_TRUE
);
404 tcg_gen_ori_i32(cpu_flags
, cpu_flags
, flags
);
407 static inline void gen_copy_bit_i32(TCGv t0
, int p0
, TCGv t1
, int p1
)
409 TCGv tmp
= tcg_temp_new();
414 tcg_gen_andi_i32(tmp
, t1
, (1 << p1
));
415 tcg_gen_andi_i32(t0
, t0
, ~(1 << p0
));
417 tcg_gen_shri_i32(tmp
, tmp
, p1
- p0
);
419 tcg_gen_shli_i32(tmp
, tmp
, p0
- p1
);
420 tcg_gen_or_i32(t0
, t0
, tmp
);
425 static inline void gen_load_fpr64(TCGv_i64 t
, int reg
)
427 tcg_gen_concat_i32_i64(t
, cpu_fregs
[reg
+ 1], cpu_fregs
[reg
]);
430 static inline void gen_store_fpr64 (TCGv_i64 t
, int reg
)
432 TCGv_i32 tmp
= tcg_temp_new_i32();
433 tcg_gen_trunc_i64_i32(tmp
, t
);
434 tcg_gen_mov_i32(cpu_fregs
[reg
+ 1], tmp
);
435 tcg_gen_shri_i64(t
, t
, 32);
436 tcg_gen_trunc_i64_i32(tmp
, t
);
437 tcg_gen_mov_i32(cpu_fregs
[reg
], tmp
);
438 tcg_temp_free_i32(tmp
);
441 #define B3_0 (ctx->opcode & 0xf)
442 #define B6_4 ((ctx->opcode >> 4) & 0x7)
443 #define B7_4 ((ctx->opcode >> 4) & 0xf)
444 #define B7_0 (ctx->opcode & 0xff)
445 #define B7_0s ((int32_t) (int8_t) (ctx->opcode & 0xff))
446 #define B11_0s (ctx->opcode & 0x800 ? 0xfffff000 | (ctx->opcode & 0xfff) : \
447 (ctx->opcode & 0xfff))
448 #define B11_8 ((ctx->opcode >> 8) & 0xf)
449 #define B15_12 ((ctx->opcode >> 12) & 0xf)
451 #define REG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) == (SR_MD | SR_RB) ? \
452 (cpu_gregs[x + 16]) : (cpu_gregs[x]))
454 #define ALTREG(x) ((x) < 8 && (ctx->sr & (SR_MD | SR_RB)) != (SR_MD | SR_RB) \
455 ? (cpu_gregs[x + 16]) : (cpu_gregs[x]))
457 #define FREG(x) (ctx->fpscr & FPSCR_FR ? (x) ^ 0x10 : (x))
458 #define XHACK(x) ((((x) & 1 ) << 4) | ((x) & 0xe))
459 #define XREG(x) (ctx->fpscr & FPSCR_FR ? XHACK(x) ^ 0x10 : XHACK(x))
460 #define DREG(x) FREG(x) /* Assumes lsb of (x) is always 0 */
462 #define CHECK_NOT_DELAY_SLOT \
463 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) \
465 gen_helper_raise_slot_illegal_instruction(); \
466 ctx->bstate = BS_EXCP; \
470 #define CHECK_PRIVILEGED \
471 if (IS_USER(ctx)) { \
472 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
473 gen_helper_raise_slot_illegal_instruction(); \
475 gen_helper_raise_illegal_instruction(); \
477 ctx->bstate = BS_EXCP; \
481 #define CHECK_FPU_ENABLED \
482 if (ctx->flags & SR_FD) { \
483 if (ctx->flags & (DELAY_SLOT | DELAY_SLOT_CONDITIONAL)) { \
484 gen_helper_raise_slot_fpu_disable(); \
486 gen_helper_raise_fpu_disable(); \
488 ctx->bstate = BS_EXCP; \
492 static void _decode_opc(DisasContext
* ctx
)
494 /* This code tries to make movcal emulation sufficiently
495 accurate for Linux purposes. This instruction writes
496 memory, and prior to that, always allocates a cache line.
497 It is used in two contexts:
498 - in memcpy, where data is copied in blocks, the first write
499 of to a block uses movca.l for performance.
500 - in arch/sh/mm/cache-sh4.c, movcal.l + ocbi combination is used
501 to flush the cache. Here, the data written by movcal.l is never
502 written to memory, and the data written is just bogus.
504 To simulate this, we simulate movcal.l, we store the value to memory,
505 but we also remember the previous content. If we see ocbi, we check
506 if movcal.l for that address was done previously. If so, the write should
507 not have hit the memory, so we restore the previous content.
508 When we see an instruction that is neither movca.l
509 nor ocbi, the previous content is discarded.
511 To optimize, we only try to flush stores when we're at the start of
512 TB, or if we already saw movca.l in this TB and did not flush stores
516 int opcode
= ctx
->opcode
& 0xf0ff;
517 if (opcode
!= 0x0093 /* ocbi */
518 && opcode
!= 0x00c3 /* movca.l */)
520 gen_helper_discard_movcal_backup ();
526 fprintf(stderr
, "Translating opcode 0x%04x\n", ctx
->opcode
);
529 switch (ctx
->opcode
) {
530 case 0x0019: /* div0u */
531 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~(SR_M
| SR_Q
| SR_T
));
533 case 0x000b: /* rts */
535 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_pr
);
536 ctx
->flags
|= DELAY_SLOT
;
537 ctx
->delayed_pc
= (uint32_t) - 1;
539 case 0x0028: /* clrmac */
540 tcg_gen_movi_i32(cpu_mach
, 0);
541 tcg_gen_movi_i32(cpu_macl
, 0);
543 case 0x0048: /* clrs */
544 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_S
);
546 case 0x0008: /* clrt */
549 case 0x0038: /* ldtlb */
553 case 0x002b: /* rte */
556 tcg_gen_mov_i32(cpu_sr
, cpu_ssr
);
557 tcg_gen_mov_i32(cpu_delayed_pc
, cpu_spc
);
558 ctx
->flags
|= DELAY_SLOT
;
559 ctx
->delayed_pc
= (uint32_t) - 1;
561 case 0x0058: /* sets */
562 tcg_gen_ori_i32(cpu_sr
, cpu_sr
, SR_S
);
564 case 0x0018: /* sett */
567 case 0xfbfd: /* frchg */
568 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_FR
);
569 ctx
->bstate
= BS_STOP
;
571 case 0xf3fd: /* fschg */
572 tcg_gen_xori_i32(cpu_fpscr
, cpu_fpscr
, FPSCR_SZ
);
573 ctx
->bstate
= BS_STOP
;
575 case 0x0009: /* nop */
577 case 0x001b: /* sleep */
579 gen_helper_sleep(tcg_const_i32(ctx
->pc
+ 2));
583 switch (ctx
->opcode
& 0xf000) {
584 case 0x1000: /* mov.l Rm,@(disp,Rn) */
586 TCGv addr
= tcg_temp_new();
587 tcg_gen_addi_i32(addr
, REG(B11_8
), B3_0
* 4);
588 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
592 case 0x5000: /* mov.l @(disp,Rm),Rn */
594 TCGv addr
= tcg_temp_new();
595 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 4);
596 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
600 case 0xe000: /* mov #imm,Rn */
601 tcg_gen_movi_i32(REG(B11_8
), B7_0s
);
603 case 0x9000: /* mov.w @(disp,PC),Rn */
605 TCGv addr
= tcg_const_i32(ctx
->pc
+ 4 + B7_0
* 2);
606 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
610 case 0xd000: /* mov.l @(disp,PC),Rn */
612 TCGv addr
= tcg_const_i32((ctx
->pc
+ 4 + B7_0
* 4) & ~3);
613 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
617 case 0x7000: /* add #imm,Rn */
618 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), B7_0s
);
620 case 0xa000: /* bra disp */
622 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
623 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
624 ctx
->flags
|= DELAY_SLOT
;
626 case 0xb000: /* bsr disp */
628 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
629 ctx
->delayed_pc
= ctx
->pc
+ 4 + B11_0s
* 2;
630 tcg_gen_movi_i32(cpu_delayed_pc
, ctx
->delayed_pc
);
631 ctx
->flags
|= DELAY_SLOT
;
635 switch (ctx
->opcode
& 0xf00f) {
636 case 0x6003: /* mov Rm,Rn */
637 tcg_gen_mov_i32(REG(B11_8
), REG(B7_4
));
639 case 0x2000: /* mov.b Rm,@Rn */
640 tcg_gen_qemu_st8(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
642 case 0x2001: /* mov.w Rm,@Rn */
643 tcg_gen_qemu_st16(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
645 case 0x2002: /* mov.l Rm,@Rn */
646 tcg_gen_qemu_st32(REG(B7_4
), REG(B11_8
), ctx
->memidx
);
648 case 0x6000: /* mov.b @Rm,Rn */
649 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
651 case 0x6001: /* mov.w @Rm,Rn */
652 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
654 case 0x6002: /* mov.l @Rm,Rn */
655 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
657 case 0x2004: /* mov.b Rm,@-Rn */
659 TCGv addr
= tcg_temp_new();
660 tcg_gen_subi_i32(addr
, REG(B11_8
), 1);
661 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
); /* might cause re-execution */
662 tcg_gen_mov_i32(REG(B11_8
), addr
); /* modify register status */
666 case 0x2005: /* mov.w Rm,@-Rn */
668 TCGv addr
= tcg_temp_new();
669 tcg_gen_subi_i32(addr
, REG(B11_8
), 2);
670 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
671 tcg_gen_mov_i32(REG(B11_8
), addr
);
675 case 0x2006: /* mov.l Rm,@-Rn */
677 TCGv addr
= tcg_temp_new();
678 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
679 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
680 tcg_gen_mov_i32(REG(B11_8
), addr
);
683 case 0x6004: /* mov.b @Rm+,Rn */
684 tcg_gen_qemu_ld8s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
686 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 1);
688 case 0x6005: /* mov.w @Rm+,Rn */
689 tcg_gen_qemu_ld16s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
691 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
693 case 0x6006: /* mov.l @Rm+,Rn */
694 tcg_gen_qemu_ld32s(REG(B11_8
), REG(B7_4
), ctx
->memidx
);
696 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
698 case 0x0004: /* mov.b Rm,@(R0,Rn) */
700 TCGv addr
= tcg_temp_new();
701 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
702 tcg_gen_qemu_st8(REG(B7_4
), addr
, ctx
->memidx
);
706 case 0x0005: /* mov.w Rm,@(R0,Rn) */
708 TCGv addr
= tcg_temp_new();
709 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
710 tcg_gen_qemu_st16(REG(B7_4
), addr
, ctx
->memidx
);
714 case 0x0006: /* mov.l Rm,@(R0,Rn) */
716 TCGv addr
= tcg_temp_new();
717 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
718 tcg_gen_qemu_st32(REG(B7_4
), addr
, ctx
->memidx
);
722 case 0x000c: /* mov.b @(R0,Rm),Rn */
724 TCGv addr
= tcg_temp_new();
725 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
726 tcg_gen_qemu_ld8s(REG(B11_8
), addr
, ctx
->memidx
);
730 case 0x000d: /* mov.w @(R0,Rm),Rn */
732 TCGv addr
= tcg_temp_new();
733 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
734 tcg_gen_qemu_ld16s(REG(B11_8
), addr
, ctx
->memidx
);
738 case 0x000e: /* mov.l @(R0,Rm),Rn */
740 TCGv addr
= tcg_temp_new();
741 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
742 tcg_gen_qemu_ld32s(REG(B11_8
), addr
, ctx
->memidx
);
746 case 0x6008: /* swap.b Rm,Rn */
749 high
= tcg_temp_new();
750 tcg_gen_andi_i32(high
, REG(B7_4
), 0xffff0000);
751 low
= tcg_temp_new();
752 tcg_gen_ext16u_i32(low
, REG(B7_4
));
753 tcg_gen_bswap16_i32(low
, low
);
754 tcg_gen_or_i32(REG(B11_8
), high
, low
);
759 case 0x6009: /* swap.w Rm,Rn */
762 high
= tcg_temp_new();
763 tcg_gen_shli_i32(high
, REG(B7_4
), 16);
764 low
= tcg_temp_new();
765 tcg_gen_shri_i32(low
, REG(B7_4
), 16);
766 tcg_gen_ext16u_i32(low
, low
);
767 tcg_gen_or_i32(REG(B11_8
), high
, low
);
772 case 0x200d: /* xtrct Rm,Rn */
775 high
= tcg_temp_new();
776 tcg_gen_shli_i32(high
, REG(B7_4
), 16);
777 low
= tcg_temp_new();
778 tcg_gen_shri_i32(low
, REG(B11_8
), 16);
779 tcg_gen_ext16u_i32(low
, low
);
780 tcg_gen_or_i32(REG(B11_8
), high
, low
);
785 case 0x300c: /* add Rm,Rn */
786 tcg_gen_add_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
788 case 0x300e: /* addc Rm,Rn */
789 gen_helper_addc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
791 case 0x300f: /* addv Rm,Rn */
792 gen_helper_addv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
794 case 0x2009: /* and Rm,Rn */
795 tcg_gen_and_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
797 case 0x3000: /* cmp/eq Rm,Rn */
798 gen_cmp(TCG_COND_EQ
, REG(B7_4
), REG(B11_8
));
800 case 0x3003: /* cmp/ge Rm,Rn */
801 gen_cmp(TCG_COND_GE
, REG(B7_4
), REG(B11_8
));
803 case 0x3007: /* cmp/gt Rm,Rn */
804 gen_cmp(TCG_COND_GT
, REG(B7_4
), REG(B11_8
));
806 case 0x3006: /* cmp/hi Rm,Rn */
807 gen_cmp(TCG_COND_GTU
, REG(B7_4
), REG(B11_8
));
809 case 0x3002: /* cmp/hs Rm,Rn */
810 gen_cmp(TCG_COND_GEU
, REG(B7_4
), REG(B11_8
));
812 case 0x200c: /* cmp/str Rm,Rn */
814 TCGv cmp1
= tcg_temp_new();
815 TCGv cmp2
= tcg_temp_new();
816 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
817 tcg_gen_xor_i32(cmp1
, REG(B7_4
), REG(B11_8
));
818 tcg_gen_andi_i32(cmp2
, cmp1
, 0xff000000);
819 tcg_gen_setcondi_i32(TCG_COND_EQ
, cmp2
, cmp2
, 0);
820 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cmp2
);
821 tcg_gen_andi_i32(cmp2
, cmp1
, 0x00ff0000);
822 tcg_gen_setcondi_i32(TCG_COND_EQ
, cmp2
, cmp2
, 0);
823 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cmp2
);
824 tcg_gen_andi_i32(cmp2
, cmp1
, 0x0000ff00);
825 tcg_gen_setcondi_i32(TCG_COND_EQ
, cmp2
, cmp2
, 0);
826 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cmp2
);
827 tcg_gen_andi_i32(cmp2
, cmp1
, 0x000000ff);
828 tcg_gen_setcondi_i32(TCG_COND_EQ
, cmp2
, cmp2
, 0);
829 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cmp2
);
834 case 0x2007: /* div0s Rm,Rn */
836 gen_copy_bit_i32(cpu_sr
, 8, REG(B11_8
), 31); /* SR_Q */
837 gen_copy_bit_i32(cpu_sr
, 9, REG(B7_4
), 31); /* SR_M */
838 TCGv val
= tcg_temp_new();
839 tcg_gen_xor_i32(val
, REG(B7_4
), REG(B11_8
));
840 gen_copy_bit_i32(cpu_sr
, 0, val
, 31); /* SR_T */
844 case 0x3004: /* div1 Rm,Rn */
845 gen_helper_div1(REG(B11_8
), REG(B7_4
), REG(B11_8
));
847 case 0x300d: /* dmuls.l Rm,Rn */
849 TCGv_i64 tmp1
= tcg_temp_new_i64();
850 TCGv_i64 tmp2
= tcg_temp_new_i64();
852 tcg_gen_ext_i32_i64(tmp1
, REG(B7_4
));
853 tcg_gen_ext_i32_i64(tmp2
, REG(B11_8
));
854 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
855 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
856 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
857 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
859 tcg_temp_free_i64(tmp2
);
860 tcg_temp_free_i64(tmp1
);
863 case 0x3005: /* dmulu.l Rm,Rn */
865 TCGv_i64 tmp1
= tcg_temp_new_i64();
866 TCGv_i64 tmp2
= tcg_temp_new_i64();
868 tcg_gen_extu_i32_i64(tmp1
, REG(B7_4
));
869 tcg_gen_extu_i32_i64(tmp2
, REG(B11_8
));
870 tcg_gen_mul_i64(tmp1
, tmp1
, tmp2
);
871 tcg_gen_trunc_i64_i32(cpu_macl
, tmp1
);
872 tcg_gen_shri_i64(tmp1
, tmp1
, 32);
873 tcg_gen_trunc_i64_i32(cpu_mach
, tmp1
);
875 tcg_temp_free_i64(tmp2
);
876 tcg_temp_free_i64(tmp1
);
879 case 0x600e: /* exts.b Rm,Rn */
880 tcg_gen_ext8s_i32(REG(B11_8
), REG(B7_4
));
882 case 0x600f: /* exts.w Rm,Rn */
883 tcg_gen_ext16s_i32(REG(B11_8
), REG(B7_4
));
885 case 0x600c: /* extu.b Rm,Rn */
886 tcg_gen_ext8u_i32(REG(B11_8
), REG(B7_4
));
888 case 0x600d: /* extu.w Rm,Rn */
889 tcg_gen_ext16u_i32(REG(B11_8
), REG(B7_4
));
891 case 0x000f: /* mac.l @Rm+,@Rn+ */
894 arg0
= tcg_temp_new();
895 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
896 arg1
= tcg_temp_new();
897 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
898 gen_helper_macl(arg0
, arg1
);
901 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
902 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
905 case 0x400f: /* mac.w @Rm+,@Rn+ */
908 arg0
= tcg_temp_new();
909 tcg_gen_qemu_ld32s(arg0
, REG(B7_4
), ctx
->memidx
);
910 arg1
= tcg_temp_new();
911 tcg_gen_qemu_ld32s(arg1
, REG(B11_8
), ctx
->memidx
);
912 gen_helper_macw(arg0
, arg1
);
915 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 2);
916 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 2);
919 case 0x0007: /* mul.l Rm,Rn */
920 tcg_gen_mul_i32(cpu_macl
, REG(B7_4
), REG(B11_8
));
922 case 0x200f: /* muls.w Rm,Rn */
925 arg0
= tcg_temp_new();
926 tcg_gen_ext16s_i32(arg0
, REG(B7_4
));
927 arg1
= tcg_temp_new();
928 tcg_gen_ext16s_i32(arg1
, REG(B11_8
));
929 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
934 case 0x200e: /* mulu.w Rm,Rn */
937 arg0
= tcg_temp_new();
938 tcg_gen_ext16u_i32(arg0
, REG(B7_4
));
939 arg1
= tcg_temp_new();
940 tcg_gen_ext16u_i32(arg1
, REG(B11_8
));
941 tcg_gen_mul_i32(cpu_macl
, arg0
, arg1
);
946 case 0x600b: /* neg Rm,Rn */
947 tcg_gen_neg_i32(REG(B11_8
), REG(B7_4
));
949 case 0x600a: /* negc Rm,Rn */
953 tcg_gen_neg_i32(t0
, REG(B7_4
));
955 tcg_gen_andi_i32(t1
, cpu_sr
, SR_T
);
956 tcg_gen_sub_i32(REG(B11_8
), t0
, t1
);
957 tcg_gen_andi_i32(cpu_sr
, cpu_sr
, ~SR_T
);
958 tcg_gen_setcondi_i32(TCG_COND_GTU
, t1
, t0
, 0);
959 tcg_gen_or_i32(cpu_sr
, cpu_sr
, t1
);
960 tcg_gen_setcond_i32(TCG_COND_GTU
, t1
, REG(B11_8
), t0
);
961 tcg_gen_or_i32(cpu_sr
, cpu_sr
, t1
);
966 case 0x6007: /* not Rm,Rn */
967 tcg_gen_not_i32(REG(B11_8
), REG(B7_4
));
969 case 0x200b: /* or Rm,Rn */
970 tcg_gen_or_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
972 case 0x400c: /* shad Rm,Rn */
974 int label1
= gen_new_label();
975 int label2
= gen_new_label();
976 int label3
= gen_new_label();
977 int label4
= gen_new_label();
979 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
980 /* Rm positive, shift to the left */
981 shift
= tcg_temp_new();
982 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
983 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
984 tcg_temp_free(shift
);
986 /* Rm negative, shift to the right */
987 gen_set_label(label1
);
988 shift
= tcg_temp_new();
989 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
990 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
991 tcg_gen_not_i32(shift
, REG(B7_4
));
992 tcg_gen_andi_i32(shift
, shift
, 0x1f);
993 tcg_gen_addi_i32(shift
, shift
, 1);
994 tcg_gen_sar_i32(REG(B11_8
), REG(B11_8
), shift
);
995 tcg_temp_free(shift
);
998 gen_set_label(label2
);
999 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B11_8
), 0, label3
);
1000 tcg_gen_movi_i32(REG(B11_8
), 0);
1002 gen_set_label(label3
);
1003 tcg_gen_movi_i32(REG(B11_8
), 0xffffffff);
1004 gen_set_label(label4
);
1007 case 0x400d: /* shld Rm,Rn */
1009 int label1
= gen_new_label();
1010 int label2
= gen_new_label();
1011 int label3
= gen_new_label();
1013 tcg_gen_brcondi_i32(TCG_COND_LT
, REG(B7_4
), 0, label1
);
1014 /* Rm positive, shift to the left */
1015 shift
= tcg_temp_new();
1016 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
1017 tcg_gen_shl_i32(REG(B11_8
), REG(B11_8
), shift
);
1018 tcg_temp_free(shift
);
1020 /* Rm negative, shift to the right */
1021 gen_set_label(label1
);
1022 shift
= tcg_temp_new();
1023 tcg_gen_andi_i32(shift
, REG(B7_4
), 0x1f);
1024 tcg_gen_brcondi_i32(TCG_COND_EQ
, shift
, 0, label2
);
1025 tcg_gen_not_i32(shift
, REG(B7_4
));
1026 tcg_gen_andi_i32(shift
, shift
, 0x1f);
1027 tcg_gen_addi_i32(shift
, shift
, 1);
1028 tcg_gen_shr_i32(REG(B11_8
), REG(B11_8
), shift
);
1029 tcg_temp_free(shift
);
1032 gen_set_label(label2
);
1033 tcg_gen_movi_i32(REG(B11_8
), 0);
1034 gen_set_label(label3
);
1037 case 0x3008: /* sub Rm,Rn */
1038 tcg_gen_sub_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1040 case 0x300a: /* subc Rm,Rn */
1041 gen_helper_subc(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1043 case 0x300b: /* subv Rm,Rn */
1044 gen_helper_subv(REG(B11_8
), REG(B7_4
), REG(B11_8
));
1046 case 0x2008: /* tst Rm,Rn */
1048 TCGv val
= tcg_temp_new();
1049 tcg_gen_and_i32(val
, REG(B7_4
), REG(B11_8
));
1050 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1054 case 0x200a: /* xor Rm,Rn */
1055 tcg_gen_xor_i32(REG(B11_8
), REG(B11_8
), REG(B7_4
));
1057 case 0xf00c: /* fmov {F,D,X}Rm,{F,D,X}Rn - FPSCR: Nothing */
1059 if (ctx
->fpscr
& FPSCR_SZ
) {
1060 TCGv_i64 fp
= tcg_temp_new_i64();
1061 gen_load_fpr64(fp
, XREG(B7_4
));
1062 gen_store_fpr64(fp
, XREG(B11_8
));
1063 tcg_temp_free_i64(fp
);
1065 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1068 case 0xf00a: /* fmov {F,D,X}Rm,@Rn - FPSCR: Nothing */
1070 if (ctx
->fpscr
& FPSCR_SZ
) {
1071 TCGv addr_hi
= tcg_temp_new();
1072 int fr
= XREG(B7_4
);
1073 tcg_gen_addi_i32(addr_hi
, REG(B11_8
), 4);
1074 tcg_gen_qemu_st32(cpu_fregs
[fr
], REG(B11_8
), ctx
->memidx
);
1075 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1076 tcg_temp_free(addr_hi
);
1078 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], REG(B11_8
), ctx
->memidx
);
1081 case 0xf008: /* fmov @Rm,{F,D,X}Rn - FPSCR: Nothing */
1083 if (ctx
->fpscr
& FPSCR_SZ
) {
1084 TCGv addr_hi
= tcg_temp_new();
1085 int fr
= XREG(B11_8
);
1086 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1087 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1088 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1089 tcg_temp_free(addr_hi
);
1091 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1094 case 0xf009: /* fmov @Rm+,{F,D,X}Rn - FPSCR: Nothing */
1096 if (ctx
->fpscr
& FPSCR_SZ
) {
1097 TCGv addr_hi
= tcg_temp_new();
1098 int fr
= XREG(B11_8
);
1099 tcg_gen_addi_i32(addr_hi
, REG(B7_4
), 4);
1100 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], REG(B7_4
), ctx
->memidx
);
1101 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr_hi
, ctx
->memidx
);
1102 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 8);
1103 tcg_temp_free(addr_hi
);
1105 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], REG(B7_4
), ctx
->memidx
);
1106 tcg_gen_addi_i32(REG(B7_4
), REG(B7_4
), 4);
1109 case 0xf00b: /* fmov {F,D,X}Rm,@-Rn - FPSCR: Nothing */
1111 if (ctx
->fpscr
& FPSCR_SZ
) {
1112 TCGv addr
= tcg_temp_new_i32();
1113 int fr
= XREG(B7_4
);
1114 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1115 tcg_gen_qemu_st32(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1116 tcg_gen_subi_i32(addr
, addr
, 4);
1117 tcg_gen_qemu_st32(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1118 tcg_gen_mov_i32(REG(B11_8
), addr
);
1119 tcg_temp_free(addr
);
1122 addr
= tcg_temp_new_i32();
1123 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1124 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1125 tcg_gen_mov_i32(REG(B11_8
), addr
);
1126 tcg_temp_free(addr
);
1129 case 0xf006: /* fmov @(R0,Rm),{F,D,X}Rm - FPSCR: Nothing */
1132 TCGv addr
= tcg_temp_new_i32();
1133 tcg_gen_add_i32(addr
, REG(B7_4
), REG(0));
1134 if (ctx
->fpscr
& FPSCR_SZ
) {
1135 int fr
= XREG(B11_8
);
1136 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1137 tcg_gen_addi_i32(addr
, addr
, 4);
1138 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1140 tcg_gen_qemu_ld32u(cpu_fregs
[FREG(B11_8
)], addr
, ctx
->memidx
);
1142 tcg_temp_free(addr
);
1145 case 0xf007: /* fmov {F,D,X}Rn,@(R0,Rn) - FPSCR: Nothing */
1148 TCGv addr
= tcg_temp_new();
1149 tcg_gen_add_i32(addr
, REG(B11_8
), REG(0));
1150 if (ctx
->fpscr
& FPSCR_SZ
) {
1151 int fr
= XREG(B7_4
);
1152 tcg_gen_qemu_ld32u(cpu_fregs
[fr
], addr
, ctx
->memidx
);
1153 tcg_gen_addi_i32(addr
, addr
, 4);
1154 tcg_gen_qemu_ld32u(cpu_fregs
[fr
+1], addr
, ctx
->memidx
);
1156 tcg_gen_qemu_st32(cpu_fregs
[FREG(B7_4
)], addr
, ctx
->memidx
);
1158 tcg_temp_free(addr
);
1161 case 0xf000: /* fadd Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1162 case 0xf001: /* fsub Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1163 case 0xf002: /* fmul Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1164 case 0xf003: /* fdiv Rm,Rn - FPSCR: R[PR,Enable.O/U/I]/W[Cause,Flag] */
1165 case 0xf004: /* fcmp/eq Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1166 case 0xf005: /* fcmp/gt Rm,Rn - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1169 if (ctx
->fpscr
& FPSCR_PR
) {
1172 if (ctx
->opcode
& 0x0110)
1173 break; /* illegal instruction */
1174 fp0
= tcg_temp_new_i64();
1175 fp1
= tcg_temp_new_i64();
1176 gen_load_fpr64(fp0
, DREG(B11_8
));
1177 gen_load_fpr64(fp1
, DREG(B7_4
));
1178 switch (ctx
->opcode
& 0xf00f) {
1179 case 0xf000: /* fadd Rm,Rn */
1180 gen_helper_fadd_DT(fp0
, fp0
, fp1
);
1182 case 0xf001: /* fsub Rm,Rn */
1183 gen_helper_fsub_DT(fp0
, fp0
, fp1
);
1185 case 0xf002: /* fmul Rm,Rn */
1186 gen_helper_fmul_DT(fp0
, fp0
, fp1
);
1188 case 0xf003: /* fdiv Rm,Rn */
1189 gen_helper_fdiv_DT(fp0
, fp0
, fp1
);
1191 case 0xf004: /* fcmp/eq Rm,Rn */
1192 gen_helper_fcmp_eq_DT(fp0
, fp1
);
1194 case 0xf005: /* fcmp/gt Rm,Rn */
1195 gen_helper_fcmp_gt_DT(fp0
, fp1
);
1198 gen_store_fpr64(fp0
, DREG(B11_8
));
1199 tcg_temp_free_i64(fp0
);
1200 tcg_temp_free_i64(fp1
);
1202 switch (ctx
->opcode
& 0xf00f) {
1203 case 0xf000: /* fadd Rm,Rn */
1204 gen_helper_fadd_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1206 case 0xf001: /* fsub Rm,Rn */
1207 gen_helper_fsub_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1209 case 0xf002: /* fmul Rm,Rn */
1210 gen_helper_fmul_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1212 case 0xf003: /* fdiv Rm,Rn */
1213 gen_helper_fdiv_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1215 case 0xf004: /* fcmp/eq Rm,Rn */
1216 gen_helper_fcmp_eq_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1218 case 0xf005: /* fcmp/gt Rm,Rn */
1219 gen_helper_fcmp_gt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B7_4
)]);
1225 case 0xf00e: /* fmac FR0,RM,Rn */
1228 if (ctx
->fpscr
& FPSCR_PR
) {
1229 break; /* illegal instruction */
1231 gen_helper_fmac_FT(cpu_fregs
[FREG(B11_8
)],
1232 cpu_fregs
[FREG(0)], cpu_fregs
[FREG(B7_4
)], cpu_fregs
[FREG(B11_8
)]);
1238 switch (ctx
->opcode
& 0xff00) {
1239 case 0xc900: /* and #imm,R0 */
1240 tcg_gen_andi_i32(REG(0), REG(0), B7_0
);
1242 case 0xcd00: /* and.b #imm,@(R0,GBR) */
1245 addr
= tcg_temp_new();
1246 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1247 val
= tcg_temp_new();
1248 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1249 tcg_gen_andi_i32(val
, val
, B7_0
);
1250 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1252 tcg_temp_free(addr
);
1255 case 0x8b00: /* bf label */
1256 CHECK_NOT_DELAY_SLOT
1257 gen_conditional_jump(ctx
, ctx
->pc
+ 2,
1258 ctx
->pc
+ 4 + B7_0s
* 2);
1259 ctx
->bstate
= BS_BRANCH
;
1261 case 0x8f00: /* bf/s label */
1262 CHECK_NOT_DELAY_SLOT
1263 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 0);
1264 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1266 case 0x8900: /* bt label */
1267 CHECK_NOT_DELAY_SLOT
1268 gen_conditional_jump(ctx
, ctx
->pc
+ 4 + B7_0s
* 2,
1270 ctx
->bstate
= BS_BRANCH
;
1272 case 0x8d00: /* bt/s label */
1273 CHECK_NOT_DELAY_SLOT
1274 gen_branch_slot(ctx
->delayed_pc
= ctx
->pc
+ 4 + B7_0s
* 2, 1);
1275 ctx
->flags
|= DELAY_SLOT_CONDITIONAL
;
1277 case 0x8800: /* cmp/eq #imm,R0 */
1278 gen_cmp_imm(TCG_COND_EQ
, REG(0), B7_0s
);
1280 case 0xc400: /* mov.b @(disp,GBR),R0 */
1282 TCGv addr
= tcg_temp_new();
1283 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1284 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1285 tcg_temp_free(addr
);
1288 case 0xc500: /* mov.w @(disp,GBR),R0 */
1290 TCGv addr
= tcg_temp_new();
1291 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1292 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1293 tcg_temp_free(addr
);
1296 case 0xc600: /* mov.l @(disp,GBR),R0 */
1298 TCGv addr
= tcg_temp_new();
1299 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1300 tcg_gen_qemu_ld32s(REG(0), addr
, ctx
->memidx
);
1301 tcg_temp_free(addr
);
1304 case 0xc000: /* mov.b R0,@(disp,GBR) */
1306 TCGv addr
= tcg_temp_new();
1307 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
);
1308 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1309 tcg_temp_free(addr
);
1312 case 0xc100: /* mov.w R0,@(disp,GBR) */
1314 TCGv addr
= tcg_temp_new();
1315 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 2);
1316 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1317 tcg_temp_free(addr
);
1320 case 0xc200: /* mov.l R0,@(disp,GBR) */
1322 TCGv addr
= tcg_temp_new();
1323 tcg_gen_addi_i32(addr
, cpu_gbr
, B7_0
* 4);
1324 tcg_gen_qemu_st32(REG(0), addr
, ctx
->memidx
);
1325 tcg_temp_free(addr
);
1328 case 0x8000: /* mov.b R0,@(disp,Rn) */
1330 TCGv addr
= tcg_temp_new();
1331 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1332 tcg_gen_qemu_st8(REG(0), addr
, ctx
->memidx
);
1333 tcg_temp_free(addr
);
1336 case 0x8100: /* mov.w R0,@(disp,Rn) */
1338 TCGv addr
= tcg_temp_new();
1339 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1340 tcg_gen_qemu_st16(REG(0), addr
, ctx
->memidx
);
1341 tcg_temp_free(addr
);
1344 case 0x8400: /* mov.b @(disp,Rn),R0 */
1346 TCGv addr
= tcg_temp_new();
1347 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
);
1348 tcg_gen_qemu_ld8s(REG(0), addr
, ctx
->memidx
);
1349 tcg_temp_free(addr
);
1352 case 0x8500: /* mov.w @(disp,Rn),R0 */
1354 TCGv addr
= tcg_temp_new();
1355 tcg_gen_addi_i32(addr
, REG(B7_4
), B3_0
* 2);
1356 tcg_gen_qemu_ld16s(REG(0), addr
, ctx
->memidx
);
1357 tcg_temp_free(addr
);
1360 case 0xc700: /* mova @(disp,PC),R0 */
1361 tcg_gen_movi_i32(REG(0), ((ctx
->pc
& 0xfffffffc) + 4 + B7_0
* 4) & ~3);
1363 case 0xcb00: /* or #imm,R0 */
1364 tcg_gen_ori_i32(REG(0), REG(0), B7_0
);
1366 case 0xcf00: /* or.b #imm,@(R0,GBR) */
1369 addr
= tcg_temp_new();
1370 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1371 val
= tcg_temp_new();
1372 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1373 tcg_gen_ori_i32(val
, val
, B7_0
);
1374 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1376 tcg_temp_free(addr
);
1379 case 0xc300: /* trapa #imm */
1382 CHECK_NOT_DELAY_SLOT
1383 imm
= tcg_const_i32(B7_0
);
1384 gen_helper_trapa(imm
);
1386 ctx
->bstate
= BS_BRANCH
;
1389 case 0xc800: /* tst #imm,R0 */
1391 TCGv val
= tcg_temp_new();
1392 tcg_gen_andi_i32(val
, REG(0), B7_0
);
1393 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1397 case 0xcc00: /* tst.b #imm,@(R0,GBR) */
1399 TCGv val
= tcg_temp_new();
1400 tcg_gen_add_i32(val
, REG(0), cpu_gbr
);
1401 tcg_gen_qemu_ld8u(val
, val
, ctx
->memidx
);
1402 tcg_gen_andi_i32(val
, val
, B7_0
);
1403 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1407 case 0xca00: /* xor #imm,R0 */
1408 tcg_gen_xori_i32(REG(0), REG(0), B7_0
);
1410 case 0xce00: /* xor.b #imm,@(R0,GBR) */
1413 addr
= tcg_temp_new();
1414 tcg_gen_add_i32(addr
, REG(0), cpu_gbr
);
1415 val
= tcg_temp_new();
1416 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1417 tcg_gen_xori_i32(val
, val
, B7_0
);
1418 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1420 tcg_temp_free(addr
);
1425 switch (ctx
->opcode
& 0xf08f) {
1426 case 0x408e: /* ldc Rm,Rn_BANK */
1428 tcg_gen_mov_i32(ALTREG(B6_4
), REG(B11_8
));
1430 case 0x4087: /* ldc.l @Rm+,Rn_BANK */
1432 tcg_gen_qemu_ld32s(ALTREG(B6_4
), REG(B11_8
), ctx
->memidx
);
1433 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1435 case 0x0082: /* stc Rm_BANK,Rn */
1437 tcg_gen_mov_i32(REG(B11_8
), ALTREG(B6_4
));
1439 case 0x4083: /* stc.l Rm_BANK,@-Rn */
1442 TCGv addr
= tcg_temp_new();
1443 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1444 tcg_gen_qemu_st32(ALTREG(B6_4
), addr
, ctx
->memidx
);
1445 tcg_gen_mov_i32(REG(B11_8
), addr
);
1446 tcg_temp_free(addr
);
1451 switch (ctx
->opcode
& 0xf0ff) {
1452 case 0x0023: /* braf Rn */
1453 CHECK_NOT_DELAY_SLOT
1454 tcg_gen_addi_i32(cpu_delayed_pc
, REG(B11_8
), ctx
->pc
+ 4);
1455 ctx
->flags
|= DELAY_SLOT
;
1456 ctx
->delayed_pc
= (uint32_t) - 1;
1458 case 0x0003: /* bsrf Rn */
1459 CHECK_NOT_DELAY_SLOT
1460 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1461 tcg_gen_add_i32(cpu_delayed_pc
, REG(B11_8
), cpu_pr
);
1462 ctx
->flags
|= DELAY_SLOT
;
1463 ctx
->delayed_pc
= (uint32_t) - 1;
1465 case 0x4015: /* cmp/pl Rn */
1466 gen_cmp_imm(TCG_COND_GT
, REG(B11_8
), 0);
1468 case 0x4011: /* cmp/pz Rn */
1469 gen_cmp_imm(TCG_COND_GE
, REG(B11_8
), 0);
1471 case 0x4010: /* dt Rn */
1472 tcg_gen_subi_i32(REG(B11_8
), REG(B11_8
), 1);
1473 gen_cmp_imm(TCG_COND_EQ
, REG(B11_8
), 0);
1475 case 0x402b: /* jmp @Rn */
1476 CHECK_NOT_DELAY_SLOT
1477 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1478 ctx
->flags
|= DELAY_SLOT
;
1479 ctx
->delayed_pc
= (uint32_t) - 1;
1481 case 0x400b: /* jsr @Rn */
1482 CHECK_NOT_DELAY_SLOT
1483 tcg_gen_movi_i32(cpu_pr
, ctx
->pc
+ 4);
1484 tcg_gen_mov_i32(cpu_delayed_pc
, REG(B11_8
));
1485 ctx
->flags
|= DELAY_SLOT
;
1486 ctx
->delayed_pc
= (uint32_t) - 1;
1488 case 0x400e: /* ldc Rm,SR */
1490 tcg_gen_andi_i32(cpu_sr
, REG(B11_8
), 0x700083f3);
1491 ctx
->bstate
= BS_STOP
;
1493 case 0x4007: /* ldc.l @Rm+,SR */
1496 TCGv val
= tcg_temp_new();
1497 tcg_gen_qemu_ld32s(val
, REG(B11_8
), ctx
->memidx
);
1498 tcg_gen_andi_i32(cpu_sr
, val
, 0x700083f3);
1500 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1501 ctx
->bstate
= BS_STOP
;
1504 case 0x0002: /* stc SR,Rn */
1506 tcg_gen_mov_i32(REG(B11_8
), cpu_sr
);
1508 case 0x4003: /* stc SR,@-Rn */
1511 TCGv addr
= tcg_temp_new();
1512 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1513 tcg_gen_qemu_st32(cpu_sr
, addr
, ctx
->memidx
);
1514 tcg_gen_mov_i32(REG(B11_8
), addr
);
1515 tcg_temp_free(addr
);
1518 #define LD(reg,ldnum,ldpnum,prechk) \
1521 tcg_gen_mov_i32 (cpu_##reg, REG(B11_8)); \
1525 tcg_gen_qemu_ld32s (cpu_##reg, REG(B11_8), ctx->memidx); \
1526 tcg_gen_addi_i32(REG(B11_8), REG(B11_8), 4); \
1528 #define ST(reg,stnum,stpnum,prechk) \
1531 tcg_gen_mov_i32 (REG(B11_8), cpu_##reg); \
1536 TCGv addr = tcg_temp_new(); \
1537 tcg_gen_subi_i32(addr, REG(B11_8), 4); \
1538 tcg_gen_qemu_st32 (cpu_##reg, addr, ctx->memidx); \
1539 tcg_gen_mov_i32(REG(B11_8), addr); \
1540 tcg_temp_free(addr); \
1543 #define LDST(reg,ldnum,ldpnum,stnum,stpnum,prechk) \
1544 LD(reg,ldnum,ldpnum,prechk) \
1545 ST(reg,stnum,stpnum,prechk)
1546 LDST(gbr
, 0x401e, 0x4017, 0x0012, 0x4013, {})
1547 LDST(vbr
, 0x402e, 0x4027, 0x0022, 0x4023, CHECK_PRIVILEGED
)
1548 LDST(ssr
, 0x403e, 0x4037, 0x0032, 0x4033, CHECK_PRIVILEGED
)
1549 LDST(spc
, 0x404e, 0x4047, 0x0042, 0x4043, CHECK_PRIVILEGED
)
1550 ST(sgr
, 0x003a, 0x4032, CHECK_PRIVILEGED
)
1551 LD(sgr
, 0x403a, 0x4036, CHECK_PRIVILEGED
if (!(ctx
->features
& SH_FEATURE_SH4A
)) break;)
1552 LDST(dbr
, 0x40fa, 0x40f6, 0x00fa, 0x40f2, CHECK_PRIVILEGED
)
1553 LDST(mach
, 0x400a, 0x4006, 0x000a, 0x4002, {})
1554 LDST(macl
, 0x401a, 0x4016, 0x001a, 0x4012, {})
1555 LDST(pr
, 0x402a, 0x4026, 0x002a, 0x4022, {})
1556 LDST(fpul
, 0x405a, 0x4056, 0x005a, 0x4052, {CHECK_FPU_ENABLED
})
1557 case 0x406a: /* lds Rm,FPSCR */
1559 gen_helper_ld_fpscr(REG(B11_8
));
1560 ctx
->bstate
= BS_STOP
;
1562 case 0x4066: /* lds.l @Rm+,FPSCR */
1565 TCGv addr
= tcg_temp_new();
1566 tcg_gen_qemu_ld32s(addr
, REG(B11_8
), ctx
->memidx
);
1567 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1568 gen_helper_ld_fpscr(addr
);
1569 tcg_temp_free(addr
);
1570 ctx
->bstate
= BS_STOP
;
1573 case 0x006a: /* sts FPSCR,Rn */
1575 tcg_gen_andi_i32(REG(B11_8
), cpu_fpscr
, 0x003fffff);
1577 case 0x4062: /* sts FPSCR,@-Rn */
1581 val
= tcg_temp_new();
1582 tcg_gen_andi_i32(val
, cpu_fpscr
, 0x003fffff);
1583 addr
= tcg_temp_new();
1584 tcg_gen_subi_i32(addr
, REG(B11_8
), 4);
1585 tcg_gen_qemu_st32(val
, addr
, ctx
->memidx
);
1586 tcg_gen_mov_i32(REG(B11_8
), addr
);
1587 tcg_temp_free(addr
);
1591 case 0x00c3: /* movca.l R0,@Rm */
1593 TCGv val
= tcg_temp_new();
1594 tcg_gen_qemu_ld32u(val
, REG(B11_8
), ctx
->memidx
);
1595 gen_helper_movcal (REG(B11_8
), val
);
1596 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1598 ctx
->has_movcal
= 1;
1601 /* MOVUA.L @Rm,R0 (Rm) -> R0
1602 Load non-boundary-aligned data */
1603 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1606 /* MOVUA.L @Rm+,R0 (Rm) -> R0, Rm + 4 -> Rm
1607 Load non-boundary-aligned data */
1608 tcg_gen_qemu_ld32u(REG(0), REG(B11_8
), ctx
->memidx
);
1609 tcg_gen_addi_i32(REG(B11_8
), REG(B11_8
), 4);
1611 case 0x0029: /* movt Rn */
1612 tcg_gen_andi_i32(REG(B11_8
), cpu_sr
, SR_T
);
1617 If (T == 1) R0 -> (Rn)
1620 if (ctx
->features
& SH_FEATURE_SH4A
) {
1621 int label
= gen_new_label();
1623 tcg_gen_or_i32(cpu_sr
, cpu_sr
, cpu_ldst
);
1624 tcg_gen_brcondi_i32(TCG_COND_EQ
, cpu_ldst
, 0, label
);
1625 tcg_gen_qemu_st32(REG(0), REG(B11_8
), ctx
->memidx
);
1626 gen_set_label(label
);
1627 tcg_gen_movi_i32(cpu_ldst
, 0);
1635 When interrupt/exception
1638 if (ctx
->features
& SH_FEATURE_SH4A
) {
1639 tcg_gen_movi_i32(cpu_ldst
, 0);
1640 tcg_gen_qemu_ld32s(REG(0), REG(B11_8
), ctx
->memidx
);
1641 tcg_gen_movi_i32(cpu_ldst
, 1);
1645 case 0x0093: /* ocbi @Rn */
1647 gen_helper_ocbi (REG(B11_8
));
1650 case 0x00a3: /* ocbp @Rn */
1651 case 0x00b3: /* ocbwb @Rn */
1652 /* These instructions are supposed to do nothing in case of
1653 a cache miss. Given that we only partially emulate caches
1654 it is safe to simply ignore them. */
1656 case 0x0083: /* pref @Rn */
1658 case 0x00d3: /* prefi @Rn */
1659 if (ctx
->features
& SH_FEATURE_SH4A
)
1663 case 0x00e3: /* icbi @Rn */
1664 if (ctx
->features
& SH_FEATURE_SH4A
)
1668 case 0x00ab: /* synco */
1669 if (ctx
->features
& SH_FEATURE_SH4A
)
1673 case 0x4024: /* rotcl Rn */
1675 TCGv tmp
= tcg_temp_new();
1676 tcg_gen_mov_i32(tmp
, cpu_sr
);
1677 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1678 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1679 gen_copy_bit_i32(REG(B11_8
), 0, tmp
, 0);
1683 case 0x4025: /* rotcr Rn */
1685 TCGv tmp
= tcg_temp_new();
1686 tcg_gen_mov_i32(tmp
, cpu_sr
);
1687 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1688 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1689 gen_copy_bit_i32(REG(B11_8
), 31, tmp
, 0);
1693 case 0x4004: /* rotl Rn */
1694 tcg_gen_rotli_i32(REG(B11_8
), REG(B11_8
), 1);
1695 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1697 case 0x4005: /* rotr Rn */
1698 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1699 tcg_gen_rotri_i32(REG(B11_8
), REG(B11_8
), 1);
1701 case 0x4000: /* shll Rn */
1702 case 0x4020: /* shal Rn */
1703 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 31);
1704 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 1);
1706 case 0x4021: /* shar Rn */
1707 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1708 tcg_gen_sari_i32(REG(B11_8
), REG(B11_8
), 1);
1710 case 0x4001: /* shlr Rn */
1711 gen_copy_bit_i32(cpu_sr
, 0, REG(B11_8
), 0);
1712 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 1);
1714 case 0x4008: /* shll2 Rn */
1715 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 2);
1717 case 0x4018: /* shll8 Rn */
1718 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 8);
1720 case 0x4028: /* shll16 Rn */
1721 tcg_gen_shli_i32(REG(B11_8
), REG(B11_8
), 16);
1723 case 0x4009: /* shlr2 Rn */
1724 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 2);
1726 case 0x4019: /* shlr8 Rn */
1727 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 8);
1729 case 0x4029: /* shlr16 Rn */
1730 tcg_gen_shri_i32(REG(B11_8
), REG(B11_8
), 16);
1732 case 0x401b: /* tas.b @Rn */
1735 addr
= tcg_temp_local_new();
1736 tcg_gen_mov_i32(addr
, REG(B11_8
));
1737 val
= tcg_temp_local_new();
1738 tcg_gen_qemu_ld8u(val
, addr
, ctx
->memidx
);
1739 gen_cmp_imm(TCG_COND_EQ
, val
, 0);
1740 tcg_gen_ori_i32(val
, val
, 0x80);
1741 tcg_gen_qemu_st8(val
, addr
, ctx
->memidx
);
1743 tcg_temp_free(addr
);
1746 case 0xf00d: /* fsts FPUL,FRn - FPSCR: Nothing */
1748 tcg_gen_mov_i32(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1750 case 0xf01d: /* flds FRm,FPUL - FPSCR: Nothing */
1752 tcg_gen_mov_i32(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1754 case 0xf02d: /* float FPUL,FRn/DRn - FPSCR: R[PR,Enable.I]/W[Cause,Flag] */
1756 if (ctx
->fpscr
& FPSCR_PR
) {
1758 if (ctx
->opcode
& 0x0100)
1759 break; /* illegal instruction */
1760 fp
= tcg_temp_new_i64();
1761 gen_helper_float_DT(fp
, cpu_fpul
);
1762 gen_store_fpr64(fp
, DREG(B11_8
));
1763 tcg_temp_free_i64(fp
);
1766 gen_helper_float_FT(cpu_fregs
[FREG(B11_8
)], cpu_fpul
);
1769 case 0xf03d: /* ftrc FRm/DRm,FPUL - FPSCR: R[PR,Enable.V]/W[Cause,Flag] */
1771 if (ctx
->fpscr
& FPSCR_PR
) {
1773 if (ctx
->opcode
& 0x0100)
1774 break; /* illegal instruction */
1775 fp
= tcg_temp_new_i64();
1776 gen_load_fpr64(fp
, DREG(B11_8
));
1777 gen_helper_ftrc_DT(cpu_fpul
, fp
);
1778 tcg_temp_free_i64(fp
);
1781 gen_helper_ftrc_FT(cpu_fpul
, cpu_fregs
[FREG(B11_8
)]);
1784 case 0xf04d: /* fneg FRn/DRn - FPSCR: Nothing */
1787 gen_helper_fneg_T(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1790 case 0xf05d: /* fabs FRn/DRn */
1792 if (ctx
->fpscr
& FPSCR_PR
) {
1793 if (ctx
->opcode
& 0x0100)
1794 break; /* illegal instruction */
1795 TCGv_i64 fp
= tcg_temp_new_i64();
1796 gen_load_fpr64(fp
, DREG(B11_8
));
1797 gen_helper_fabs_DT(fp
, fp
);
1798 gen_store_fpr64(fp
, DREG(B11_8
));
1799 tcg_temp_free_i64(fp
);
1801 gen_helper_fabs_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1804 case 0xf06d: /* fsqrt FRn */
1806 if (ctx
->fpscr
& FPSCR_PR
) {
1807 if (ctx
->opcode
& 0x0100)
1808 break; /* illegal instruction */
1809 TCGv_i64 fp
= tcg_temp_new_i64();
1810 gen_load_fpr64(fp
, DREG(B11_8
));
1811 gen_helper_fsqrt_DT(fp
, fp
);
1812 gen_store_fpr64(fp
, DREG(B11_8
));
1813 tcg_temp_free_i64(fp
);
1815 gen_helper_fsqrt_FT(cpu_fregs
[FREG(B11_8
)], cpu_fregs
[FREG(B11_8
)]);
1818 case 0xf07d: /* fsrra FRn */
1821 case 0xf08d: /* fldi0 FRn - FPSCR: R[PR] */
1823 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1824 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0);
1827 case 0xf09d: /* fldi1 FRn - FPSCR: R[PR] */
1829 if (!(ctx
->fpscr
& FPSCR_PR
)) {
1830 tcg_gen_movi_i32(cpu_fregs
[FREG(B11_8
)], 0x3f800000);
1833 case 0xf0ad: /* fcnvsd FPUL,DRn */
1836 TCGv_i64 fp
= tcg_temp_new_i64();
1837 gen_helper_fcnvsd_FT_DT(fp
, cpu_fpul
);
1838 gen_store_fpr64(fp
, DREG(B11_8
));
1839 tcg_temp_free_i64(fp
);
1842 case 0xf0bd: /* fcnvds DRn,FPUL */
1845 TCGv_i64 fp
= tcg_temp_new_i64();
1846 gen_load_fpr64(fp
, DREG(B11_8
));
1847 gen_helper_fcnvds_DT_FT(cpu_fpul
, fp
);
1848 tcg_temp_free_i64(fp
);
1851 case 0xf0ed: /* fipr FVm,FVn */
1853 if ((ctx
->fpscr
& FPSCR_PR
) == 0) {
1855 m
= tcg_const_i32((ctx
->opcode
>> 8) & 3);
1856 n
= tcg_const_i32((ctx
->opcode
>> 10) & 3);
1857 gen_helper_fipr(m
, n
);
1863 case 0xf0fd: /* ftrv XMTRX,FVn */
1865 if ((ctx
->opcode
& 0x0300) == 0x0100 &&
1866 (ctx
->fpscr
& FPSCR_PR
) == 0) {
1868 n
= tcg_const_i32((ctx
->opcode
>> 10) & 3);
1876 fprintf(stderr
, "unknown instruction 0x%04x at pc 0x%08x\n",
1877 ctx
->opcode
, ctx
->pc
);
1880 if (ctx
->flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
)) {
1881 gen_helper_raise_slot_illegal_instruction();
1883 gen_helper_raise_illegal_instruction();
1885 ctx
->bstate
= BS_EXCP
;
1888 static void decode_opc(DisasContext
* ctx
)
1890 uint32_t old_flags
= ctx
->flags
;
1892 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
))) {
1893 tcg_gen_debug_insn_start(ctx
->pc
);
1898 if (old_flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
)) {
1899 if (ctx
->flags
& DELAY_SLOT_CLEARME
) {
1902 /* go out of the delay slot */
1903 uint32_t new_flags
= ctx
->flags
;
1904 new_flags
&= ~(DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
);
1905 gen_store_flags(new_flags
);
1908 ctx
->bstate
= BS_BRANCH
;
1909 if (old_flags
& DELAY_SLOT_CONDITIONAL
) {
1910 gen_delayed_conditional_jump(ctx
);
1911 } else if (old_flags
& DELAY_SLOT
) {
1917 /* go into a delay slot */
1918 if (ctx
->flags
& (DELAY_SLOT
| DELAY_SLOT_CONDITIONAL
))
1919 gen_store_flags(ctx
->flags
);
1923 gen_intermediate_code_internal(CPUSH4State
* env
, TranslationBlock
* tb
,
1927 target_ulong pc_start
;
1928 static uint16_t *gen_opc_end
;
1935 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
1937 ctx
.flags
= (uint32_t)tb
->flags
;
1938 ctx
.bstate
= BS_NONE
;
1940 ctx
.fpscr
= env
->fpscr
;
1941 ctx
.memidx
= (env
->sr
& SR_MD
) == 0 ? 1 : 0;
1942 /* We don't know if the delayed pc came from a dynamic or static branch,
1943 so assume it is a dynamic branch. */
1944 ctx
.delayed_pc
= -1; /* use delayed pc from env pointer */
1946 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
1947 ctx
.features
= env
->features
;
1948 ctx
.has_movcal
= (tb
->flags
& TB_FLAG_PENDING_MOVCA
);
1952 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
1954 max_insns
= CF_COUNT_MASK
;
1956 while (ctx
.bstate
== BS_NONE
&& gen_opc_ptr
< gen_opc_end
) {
1957 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
1958 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
1959 if (ctx
.pc
== bp
->pc
) {
1960 /* We have hit a breakpoint - make sure PC is up-to-date */
1961 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
1963 ctx
.bstate
= BS_EXCP
;
1969 i
= gen_opc_ptr
- gen_opc_buf
;
1973 gen_opc_instr_start
[ii
++] = 0;
1975 gen_opc_pc
[ii
] = ctx
.pc
;
1976 gen_opc_hflags
[ii
] = ctx
.flags
;
1977 gen_opc_instr_start
[ii
] = 1;
1978 gen_opc_icount
[ii
] = num_insns
;
1980 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
1983 fprintf(stderr
, "Loading opcode at address 0x%08x\n", ctx
.pc
);
1986 ctx
.opcode
= lduw_code(ctx
.pc
);
1990 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
1992 if (env
->singlestep_enabled
)
1994 if (num_insns
>= max_insns
)
1999 if (tb
->cflags
& CF_LAST_IO
)
2001 if (env
->singlestep_enabled
) {
2002 tcg_gen_movi_i32(cpu_pc
, ctx
.pc
);
2005 switch (ctx
.bstate
) {
2007 /* gen_op_interrupt_restart(); */
2011 gen_store_flags(ctx
.flags
| DELAY_SLOT_CLEARME
);
2013 gen_goto_tb(&ctx
, 0, ctx
.pc
);
2016 /* gen_op_interrupt_restart(); */
2025 gen_icount_end(tb
, num_insns
);
2026 *gen_opc_ptr
= INDEX_op_end
;
2028 i
= gen_opc_ptr
- gen_opc_buf
;
2031 gen_opc_instr_start
[ii
++] = 0;
2033 tb
->size
= ctx
.pc
- pc_start
;
2034 tb
->icount
= num_insns
;
2038 #ifdef SH4_DEBUG_DISAS
2039 qemu_log_mask(CPU_LOG_TB_IN_ASM
, "\n");
2041 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
2042 qemu_log("IN:\n"); /* , lookup_symbol(pc_start)); */
2043 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
2049 void gen_intermediate_code(CPUSH4State
* env
, struct TranslationBlock
*tb
)
2051 gen_intermediate_code_internal(env
, tb
, 0);
2054 void gen_intermediate_code_pc(CPUSH4State
* env
, struct TranslationBlock
*tb
)
2056 gen_intermediate_code_internal(env
, tb
, 1);
2059 void restore_state_to_opc(CPUSH4State
*env
, TranslationBlock
*tb
, int pc_pos
)
2061 env
->pc
= gen_opc_pc
[pc_pos
];
2062 env
->flags
= gen_opc_hflags
[pc_pos
];