4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 /* #define DEBUG_INLINE_BRANCHES */
22 #define S390X_DEBUG_DISAS
23 /* #define S390X_DEBUG_DISAS_VERBOSE */
25 #ifdef S390X_DEBUG_DISAS_VERBOSE
26 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
28 # define LOG_DISAS(...) do { } while (0)
32 #include "disas/disas.h"
36 /* global register indexes */
37 static TCGv_ptr cpu_env
;
39 #include "exec/gen-icount.h"
45 /* Information that (most) every instruction needs to manipulate. */
46 typedef struct DisasContext DisasContext
;
47 typedef struct DisasInsn DisasInsn
;
48 typedef struct DisasFields DisasFields
;
51 struct TranslationBlock
*tb
;
52 const DisasInsn
*insn
;
56 bool singlestep_enabled
;
60 /* Information carried about a condition to be evaluated. */
67 struct { TCGv_i64 a
, b
; } s64
;
68 struct { TCGv_i32 a
, b
; } s32
;
74 static void gen_op_calc_cc(DisasContext
*s
);
76 #ifdef DEBUG_INLINE_BRANCHES
77 static uint64_t inline_branch_hit
[CC_OP_MAX
];
78 static uint64_t inline_branch_miss
[CC_OP_MAX
];
81 static inline void debug_insn(uint64_t insn
)
83 LOG_DISAS("insn: 0x%" PRIx64
"\n", insn
);
86 static inline uint64_t pc_to_link_info(DisasContext
*s
, uint64_t pc
)
88 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
89 if (s
->tb
->flags
& FLAG_MASK_32
) {
90 return pc
| 0x80000000;
96 void cpu_dump_state(CPUS390XState
*env
, FILE *f
, fprintf_function cpu_fprintf
,
101 if (env
->cc_op
> 3) {
102 cpu_fprintf(f
, "PSW=mask %016" PRIx64
" addr %016" PRIx64
" cc %15s\n",
103 env
->psw
.mask
, env
->psw
.addr
, cc_name(env
->cc_op
));
105 cpu_fprintf(f
, "PSW=mask %016" PRIx64
" addr %016" PRIx64
" cc %02x\n",
106 env
->psw
.mask
, env
->psw
.addr
, env
->cc_op
);
109 for (i
= 0; i
< 16; i
++) {
110 cpu_fprintf(f
, "R%02d=%016" PRIx64
, i
, env
->regs
[i
]);
112 cpu_fprintf(f
, "\n");
118 for (i
= 0; i
< 16; i
++) {
119 cpu_fprintf(f
, "F%02d=%016" PRIx64
, i
, env
->fregs
[i
].ll
);
121 cpu_fprintf(f
, "\n");
127 #ifndef CONFIG_USER_ONLY
128 for (i
= 0; i
< 16; i
++) {
129 cpu_fprintf(f
, "C%02d=%016" PRIx64
, i
, env
->cregs
[i
]);
131 cpu_fprintf(f
, "\n");
138 #ifdef DEBUG_INLINE_BRANCHES
139 for (i
= 0; i
< CC_OP_MAX
; i
++) {
140 cpu_fprintf(f
, " %15s = %10ld\t%10ld\n", cc_name(i
),
141 inline_branch_miss
[i
], inline_branch_hit
[i
]);
145 cpu_fprintf(f
, "\n");
148 static TCGv_i64 psw_addr
;
149 static TCGv_i64 psw_mask
;
151 static TCGv_i32 cc_op
;
152 static TCGv_i64 cc_src
;
153 static TCGv_i64 cc_dst
;
154 static TCGv_i64 cc_vr
;
156 static char cpu_reg_names
[32][4];
157 static TCGv_i64 regs
[16];
158 static TCGv_i64 fregs
[16];
160 static uint8_t gen_opc_cc_op
[OPC_BUF_SIZE
];
162 void s390x_translate_init(void)
166 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
167 psw_addr
= tcg_global_mem_new_i64(TCG_AREG0
,
168 offsetof(CPUS390XState
, psw
.addr
),
170 psw_mask
= tcg_global_mem_new_i64(TCG_AREG0
,
171 offsetof(CPUS390XState
, psw
.mask
),
174 cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUS390XState
, cc_op
),
176 cc_src
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, cc_src
),
178 cc_dst
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, cc_dst
),
180 cc_vr
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, cc_vr
),
183 for (i
= 0; i
< 16; i
++) {
184 snprintf(cpu_reg_names
[i
], sizeof(cpu_reg_names
[0]), "r%d", i
);
185 regs
[i
] = tcg_global_mem_new(TCG_AREG0
,
186 offsetof(CPUS390XState
, regs
[i
]),
190 for (i
= 0; i
< 16; i
++) {
191 snprintf(cpu_reg_names
[i
+ 16], sizeof(cpu_reg_names
[0]), "f%d", i
);
192 fregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
193 offsetof(CPUS390XState
, fregs
[i
].d
),
194 cpu_reg_names
[i
+ 16]);
197 /* register helpers */
202 static inline TCGv_i64
load_reg(int reg
)
204 TCGv_i64 r
= tcg_temp_new_i64();
205 tcg_gen_mov_i64(r
, regs
[reg
]);
209 static inline TCGv_i64
load_freg(int reg
)
211 TCGv_i64 r
= tcg_temp_new_i64();
212 tcg_gen_mov_i64(r
, fregs
[reg
]);
216 static inline TCGv_i32
load_freg32(int reg
)
218 TCGv_i32 r
= tcg_temp_new_i32();
219 #if HOST_LONG_BITS == 32
220 tcg_gen_mov_i32(r
, TCGV_HIGH(fregs
[reg
]));
222 tcg_gen_shri_i64(MAKE_TCGV_I64(GET_TCGV_I32(r
)), fregs
[reg
], 32);
227 static inline TCGv_i32
load_reg32(int reg
)
229 TCGv_i32 r
= tcg_temp_new_i32();
230 tcg_gen_trunc_i64_i32(r
, regs
[reg
]);
234 static inline TCGv_i64
load_reg32_i64(int reg
)
236 TCGv_i64 r
= tcg_temp_new_i64();
237 tcg_gen_ext32s_i64(r
, regs
[reg
]);
241 static inline void store_reg(int reg
, TCGv_i64 v
)
243 tcg_gen_mov_i64(regs
[reg
], v
);
246 static inline void store_freg(int reg
, TCGv_i64 v
)
248 tcg_gen_mov_i64(fregs
[reg
], v
);
251 static inline void store_reg32(int reg
, TCGv_i32 v
)
253 /* 32 bit register writes keep the upper half */
254 #if HOST_LONG_BITS == 32
255 tcg_gen_mov_i32(TCGV_LOW(regs
[reg
]), v
);
257 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
],
258 MAKE_TCGV_I64(GET_TCGV_I32(v
)), 0, 32);
262 static inline void store_reg32_i64(int reg
, TCGv_i64 v
)
264 /* 32 bit register writes keep the upper half */
265 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], v
, 0, 32);
268 static inline void store_reg16(int reg
, TCGv_i32 v
)
270 /* 16 bit register writes keep the upper bytes */
271 #if HOST_LONG_BITS == 32
272 tcg_gen_deposit_i32(TCGV_LOW(regs
[reg
]), TCGV_LOW(regs
[reg
]), v
, 0, 16);
274 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
],
275 MAKE_TCGV_I64(GET_TCGV_I32(v
)), 0, 16);
279 static inline void store_reg8(int reg
, TCGv_i64 v
)
281 /* 8 bit register writes keep the upper bytes */
282 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], v
, 0, 8);
285 static inline void store_freg32(int reg
, TCGv_i32 v
)
287 /* 32 bit register writes keep the lower half */
288 #if HOST_LONG_BITS == 32
289 tcg_gen_mov_i32(TCGV_HIGH(fregs
[reg
]), v
);
291 tcg_gen_deposit_i64(fregs
[reg
], fregs
[reg
],
292 MAKE_TCGV_I64(GET_TCGV_I32(v
)), 32, 32);
296 static inline void update_psw_addr(DisasContext
*s
)
299 tcg_gen_movi_i64(psw_addr
, s
->pc
);
302 static inline void potential_page_fault(DisasContext
*s
)
304 #ifndef CONFIG_USER_ONLY
310 static inline uint64_t ld_code2(CPUS390XState
*env
, uint64_t pc
)
312 return (uint64_t)cpu_lduw_code(env
, pc
);
315 static inline uint64_t ld_code4(CPUS390XState
*env
, uint64_t pc
)
317 return (uint64_t)(uint32_t)cpu_ldl_code(env
, pc
);
320 static inline uint64_t ld_code6(CPUS390XState
*env
, uint64_t pc
)
322 return (ld_code2(env
, pc
) << 32) | ld_code4(env
, pc
+ 2);
325 static inline int get_mem_index(DisasContext
*s
)
327 switch (s
->tb
->flags
& FLAG_MASK_ASC
) {
328 case PSW_ASC_PRIMARY
>> 32:
330 case PSW_ASC_SECONDARY
>> 32:
332 case PSW_ASC_HOME
>> 32:
340 static void gen_exception(int excp
)
342 TCGv_i32 tmp
= tcg_const_i32(excp
);
343 gen_helper_exception(cpu_env
, tmp
);
344 tcg_temp_free_i32(tmp
);
347 static void gen_program_exception(DisasContext
*s
, int code
)
351 /* Remember what pgm exeption this was. */
352 tmp
= tcg_const_i32(code
);
353 tcg_gen_st_i32(tmp
, cpu_env
, offsetof(CPUS390XState
, int_pgm_code
));
354 tcg_temp_free_i32(tmp
);
356 tmp
= tcg_const_i32(s
->next_pc
- s
->pc
);
357 tcg_gen_st_i32(tmp
, cpu_env
, offsetof(CPUS390XState
, int_pgm_ilen
));
358 tcg_temp_free_i32(tmp
);
360 /* Advance past instruction. */
367 /* Trigger exception. */
368 gen_exception(EXCP_PGM
);
371 s
->is_jmp
= DISAS_EXCP
;
374 static inline void gen_illegal_opcode(DisasContext
*s
)
376 gen_program_exception(s
, PGM_SPECIFICATION
);
379 static inline void check_privileged(DisasContext
*s
)
381 if (s
->tb
->flags
& (PSW_MASK_PSTATE
>> 32)) {
382 gen_program_exception(s
, PGM_PRIVILEGED
);
386 static TCGv_i64
get_address(DisasContext
*s
, int x2
, int b2
, int d2
)
390 /* 31-bitify the immediate part; register contents are dealt with below */
391 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
397 tmp
= tcg_const_i64(d2
);
398 tcg_gen_add_i64(tmp
, tmp
, regs
[x2
]);
403 tcg_gen_add_i64(tmp
, tmp
, regs
[b2
]);
407 tmp
= tcg_const_i64(d2
);
408 tcg_gen_add_i64(tmp
, tmp
, regs
[b2
]);
413 tmp
= tcg_const_i64(d2
);
416 /* 31-bit mode mask if there are values loaded from registers */
417 if (!(s
->tb
->flags
& FLAG_MASK_64
) && (x2
|| b2
)) {
418 tcg_gen_andi_i64(tmp
, tmp
, 0x7fffffffUL
);
424 static void gen_op_movi_cc(DisasContext
*s
, uint32_t val
)
426 s
->cc_op
= CC_OP_CONST0
+ val
;
429 static void gen_op_update1_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 dst
)
431 tcg_gen_discard_i64(cc_src
);
432 tcg_gen_mov_i64(cc_dst
, dst
);
433 tcg_gen_discard_i64(cc_vr
);
437 static void gen_op_update1_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 dst
)
439 tcg_gen_discard_i64(cc_src
);
440 tcg_gen_extu_i32_i64(cc_dst
, dst
);
441 tcg_gen_discard_i64(cc_vr
);
445 static void gen_op_update2_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 src
,
448 tcg_gen_mov_i64(cc_src
, src
);
449 tcg_gen_mov_i64(cc_dst
, dst
);
450 tcg_gen_discard_i64(cc_vr
);
454 static void gen_op_update2_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 src
,
457 tcg_gen_extu_i32_i64(cc_src
, src
);
458 tcg_gen_extu_i32_i64(cc_dst
, dst
);
459 tcg_gen_discard_i64(cc_vr
);
463 static void gen_op_update3_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 src
,
464 TCGv_i64 dst
, TCGv_i64 vr
)
466 tcg_gen_mov_i64(cc_src
, src
);
467 tcg_gen_mov_i64(cc_dst
, dst
);
468 tcg_gen_mov_i64(cc_vr
, vr
);
472 static void gen_op_update3_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 src
,
473 TCGv_i32 dst
, TCGv_i32 vr
)
475 tcg_gen_extu_i32_i64(cc_src
, src
);
476 tcg_gen_extu_i32_i64(cc_dst
, dst
);
477 tcg_gen_extu_i32_i64(cc_vr
, vr
);
481 static inline void set_cc_nz_u32(DisasContext
*s
, TCGv_i32 val
)
483 gen_op_update1_cc_i32(s
, CC_OP_NZ
, val
);
486 static inline void set_cc_nz_u64(DisasContext
*s
, TCGv_i64 val
)
488 gen_op_update1_cc_i64(s
, CC_OP_NZ
, val
);
491 static inline void cmp_32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
494 gen_op_update2_cc_i32(s
, cond
, v1
, v2
);
497 static inline void cmp_64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
500 gen_op_update2_cc_i64(s
, cond
, v1
, v2
);
503 static inline void cmp_s32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
505 cmp_32(s
, v1
, v2
, CC_OP_LTGT_32
);
508 static inline void cmp_u32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
510 cmp_32(s
, v1
, v2
, CC_OP_LTUGTU_32
);
513 static inline void cmp_s32c(DisasContext
*s
, TCGv_i32 v1
, int32_t v2
)
515 /* XXX optimize for the constant? put it in s? */
516 TCGv_i32 tmp
= tcg_const_i32(v2
);
517 cmp_32(s
, v1
, tmp
, CC_OP_LTGT_32
);
518 tcg_temp_free_i32(tmp
);
521 static inline void cmp_u32c(DisasContext
*s
, TCGv_i32 v1
, uint32_t v2
)
523 TCGv_i32 tmp
= tcg_const_i32(v2
);
524 cmp_32(s
, v1
, tmp
, CC_OP_LTUGTU_32
);
525 tcg_temp_free_i32(tmp
);
528 static inline void cmp_s64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
)
530 cmp_64(s
, v1
, v2
, CC_OP_LTGT_64
);
533 static inline void cmp_u64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
)
535 cmp_64(s
, v1
, v2
, CC_OP_LTUGTU_64
);
538 static inline void cmp_s64c(DisasContext
*s
, TCGv_i64 v1
, int64_t v2
)
540 TCGv_i64 tmp
= tcg_const_i64(v2
);
542 tcg_temp_free_i64(tmp
);
545 static inline void cmp_u64c(DisasContext
*s
, TCGv_i64 v1
, uint64_t v2
)
547 TCGv_i64 tmp
= tcg_const_i64(v2
);
549 tcg_temp_free_i64(tmp
);
552 static inline void set_cc_s32(DisasContext
*s
, TCGv_i32 val
)
554 gen_op_update1_cc_i32(s
, CC_OP_LTGT0_32
, val
);
557 static inline void set_cc_s64(DisasContext
*s
, TCGv_i64 val
)
559 gen_op_update1_cc_i64(s
, CC_OP_LTGT0_64
, val
);
562 static void set_cc_addu64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
565 gen_op_update3_cc_i64(s
, CC_OP_ADDU_64
, v1
, v2
, vr
);
568 static void set_cc_abs64(DisasContext
*s
, TCGv_i64 v1
)
570 gen_op_update1_cc_i64(s
, CC_OP_ABS_64
, v1
);
573 static void set_cc_nabs64(DisasContext
*s
, TCGv_i64 v1
)
575 gen_op_update1_cc_i64(s
, CC_OP_NABS_64
, v1
);
578 static void set_cc_addu32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
581 gen_op_update3_cc_i32(s
, CC_OP_ADDU_32
, v1
, v2
, vr
);
584 static void set_cc_abs32(DisasContext
*s
, TCGv_i32 v1
)
586 gen_op_update1_cc_i32(s
, CC_OP_ABS_32
, v1
);
589 static void set_cc_nabs32(DisasContext
*s
, TCGv_i32 v1
)
591 gen_op_update1_cc_i32(s
, CC_OP_NABS_32
, v1
);
594 static void set_cc_comp32(DisasContext
*s
, TCGv_i32 v1
)
596 gen_op_update1_cc_i32(s
, CC_OP_COMP_32
, v1
);
599 static void set_cc_comp64(DisasContext
*s
, TCGv_i64 v1
)
601 gen_op_update1_cc_i64(s
, CC_OP_COMP_64
, v1
);
604 static void set_cc_icm(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
606 gen_op_update2_cc_i32(s
, CC_OP_ICM
, v1
, v2
);
609 static void set_cc_cmp_f32_i64(DisasContext
*s
, TCGv_i32 v1
, TCGv_i64 v2
)
611 tcg_gen_extu_i32_i64(cc_src
, v1
);
612 tcg_gen_mov_i64(cc_dst
, v2
);
613 tcg_gen_discard_i64(cc_vr
);
614 s
->cc_op
= CC_OP_LTGT_F32
;
617 static void gen_set_cc_nz_f32(DisasContext
*s
, TCGv_i32 v1
)
619 gen_op_update1_cc_i32(s
, CC_OP_NZ_F32
, v1
);
622 /* CC value is in env->cc_op */
623 static inline void set_cc_static(DisasContext
*s
)
625 tcg_gen_discard_i64(cc_src
);
626 tcg_gen_discard_i64(cc_dst
);
627 tcg_gen_discard_i64(cc_vr
);
628 s
->cc_op
= CC_OP_STATIC
;
631 static inline void gen_op_set_cc_op(DisasContext
*s
)
633 if (s
->cc_op
!= CC_OP_DYNAMIC
&& s
->cc_op
!= CC_OP_STATIC
) {
634 tcg_gen_movi_i32(cc_op
, s
->cc_op
);
638 static inline void gen_update_cc_op(DisasContext
*s
)
643 /* calculates cc into cc_op */
644 static void gen_op_calc_cc(DisasContext
*s
)
646 TCGv_i32 local_cc_op
= tcg_const_i32(s
->cc_op
);
647 TCGv_i64 dummy
= tcg_const_i64(0);
654 /* s->cc_op is the cc value */
655 tcg_gen_movi_i32(cc_op
, s
->cc_op
- CC_OP_CONST0
);
658 /* env->cc_op already is the cc value */
672 gen_helper_calc_cc(cc_op
, cpu_env
, local_cc_op
, dummy
, cc_dst
, dummy
);
677 case CC_OP_LTUGTU_32
:
678 case CC_OP_LTUGTU_64
:
685 gen_helper_calc_cc(cc_op
, cpu_env
, local_cc_op
, cc_src
, cc_dst
, dummy
);
696 gen_helper_calc_cc(cc_op
, cpu_env
, local_cc_op
, cc_src
, cc_dst
, cc_vr
);
699 /* unknown operation - assume 3 arguments and cc_op in env */
700 gen_helper_calc_cc(cc_op
, cpu_env
, cc_op
, cc_src
, cc_dst
, cc_vr
);
706 tcg_temp_free_i32(local_cc_op
);
707 tcg_temp_free_i64(dummy
);
709 /* We now have cc in cc_op as constant */
713 static inline void decode_rr(DisasContext
*s
, uint64_t insn
, int *r1
, int *r2
)
717 *r1
= (insn
>> 4) & 0xf;
721 static inline TCGv_i64
decode_rx(DisasContext
*s
, uint64_t insn
, int *r1
,
722 int *x2
, int *b2
, int *d2
)
726 *r1
= (insn
>> 20) & 0xf;
727 *x2
= (insn
>> 16) & 0xf;
728 *b2
= (insn
>> 12) & 0xf;
731 return get_address(s
, *x2
, *b2
, *d2
);
734 static inline void decode_rs(DisasContext
*s
, uint64_t insn
, int *r1
, int *r3
,
739 *r1
= (insn
>> 20) & 0xf;
741 *r3
= (insn
>> 16) & 0xf;
742 *b2
= (insn
>> 12) & 0xf;
746 static inline TCGv_i64
decode_si(DisasContext
*s
, uint64_t insn
, int *i2
,
751 *i2
= (insn
>> 16) & 0xff;
752 *b1
= (insn
>> 12) & 0xf;
755 return get_address(s
, 0, *b1
, *d1
);
758 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong pc
)
760 TranslationBlock
*tb
;
765 /* NOTE: we handle the case where the TB spans two pages here */
766 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
767 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
768 /* jump to same page: we can use a direct jump */
769 tcg_gen_goto_tb(tb_num
);
770 tcg_gen_movi_i64(psw_addr
, pc
);
771 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
773 /* jump to another page: currently not optimized */
774 tcg_gen_movi_i64(psw_addr
, pc
);
779 static inline void account_noninline_branch(DisasContext
*s
, int cc_op
)
781 #ifdef DEBUG_INLINE_BRANCHES
782 inline_branch_miss
[cc_op
]++;
786 static inline void account_inline_branch(DisasContext
*s
, int cc_op
)
788 #ifdef DEBUG_INLINE_BRANCHES
789 inline_branch_hit
[cc_op
]++;
793 /* Table of mask values to comparison codes, given a comparison as input.
794 For a true comparison CC=3 will never be set, but we treat this
795 conservatively for possible use when CC=3 indicates overflow. */
796 static const TCGCond ltgt_cond
[16] = {
797 TCG_COND_NEVER
, TCG_COND_NEVER
, /* | | | x */
798 TCG_COND_GT
, TCG_COND_NEVER
, /* | | GT | x */
799 TCG_COND_LT
, TCG_COND_NEVER
, /* | LT | | x */
800 TCG_COND_NE
, TCG_COND_NEVER
, /* | LT | GT | x */
801 TCG_COND_EQ
, TCG_COND_NEVER
, /* EQ | | | x */
802 TCG_COND_GE
, TCG_COND_NEVER
, /* EQ | | GT | x */
803 TCG_COND_LE
, TCG_COND_NEVER
, /* EQ | LT | | x */
804 TCG_COND_ALWAYS
, TCG_COND_ALWAYS
, /* EQ | LT | GT | x */
807 /* Table of mask values to comparison codes, given a logic op as input.
808 For such, only CC=0 and CC=1 should be possible. */
809 static const TCGCond nz_cond
[16] = {
811 TCG_COND_NEVER
, TCG_COND_NEVER
, TCG_COND_NEVER
, TCG_COND_NEVER
,
813 TCG_COND_NE
, TCG_COND_NE
, TCG_COND_NE
, TCG_COND_NE
,
815 TCG_COND_EQ
, TCG_COND_EQ
, TCG_COND_EQ
, TCG_COND_EQ
,
816 /* EQ | NE | x | x */
817 TCG_COND_ALWAYS
, TCG_COND_ALWAYS
, TCG_COND_ALWAYS
, TCG_COND_ALWAYS
,
820 /* Interpret MASK in terms of S->CC_OP, and fill in C with all the
821 details required to generate a TCG comparison. */
822 static void disas_jcc(DisasContext
*s
, DisasCompare
*c
, uint32_t mask
)
825 enum cc_op old_cc_op
= s
->cc_op
;
827 if (mask
== 15 || mask
== 0) {
828 c
->cond
= (mask
? TCG_COND_ALWAYS
: TCG_COND_NEVER
);
831 c
->g1
= c
->g2
= true;
836 /* Find the TCG condition for the mask + cc op. */
842 cond
= ltgt_cond
[mask
];
843 if (cond
== TCG_COND_NEVER
) {
846 account_inline_branch(s
, old_cc_op
);
849 case CC_OP_LTUGTU_32
:
850 case CC_OP_LTUGTU_64
:
851 cond
= tcg_unsigned_cond(ltgt_cond
[mask
]);
852 if (cond
== TCG_COND_NEVER
) {
855 account_inline_branch(s
, old_cc_op
);
859 cond
= nz_cond
[mask
];
860 if (cond
== TCG_COND_NEVER
) {
863 account_inline_branch(s
, old_cc_op
);
878 account_inline_branch(s
, old_cc_op
);
893 account_inline_branch(s
, old_cc_op
);
898 /* Calculate cc value. */
903 /* Jump based on CC. We'll load up the real cond below;
904 the assignment here merely avoids a compiler warning. */
905 account_noninline_branch(s
, old_cc_op
);
906 old_cc_op
= CC_OP_STATIC
;
907 cond
= TCG_COND_NEVER
;
911 /* Load up the arguments of the comparison. */
913 c
->g1
= c
->g2
= false;
917 c
->u
.s32
.a
= tcg_temp_new_i32();
918 tcg_gen_trunc_i64_i32(c
->u
.s32
.a
, cc_dst
);
919 c
->u
.s32
.b
= tcg_const_i32(0);
922 case CC_OP_LTUGTU_32
:
924 c
->u
.s32
.a
= tcg_temp_new_i32();
925 tcg_gen_trunc_i64_i32(c
->u
.s32
.a
, cc_src
);
926 c
->u
.s32
.b
= tcg_temp_new_i32();
927 tcg_gen_trunc_i64_i32(c
->u
.s32
.b
, cc_dst
);
934 c
->u
.s64
.b
= tcg_const_i64(0);
938 case CC_OP_LTUGTU_64
:
941 c
->g1
= c
->g2
= true;
946 c
->u
.s64
.a
= tcg_temp_new_i64();
947 c
->u
.s64
.b
= tcg_const_i64(0);
948 tcg_gen_and_i64(c
->u
.s64
.a
, cc_src
, cc_dst
);
956 case 0x8 | 0x4 | 0x2: /* cc != 3 */
958 c
->u
.s32
.b
= tcg_const_i32(3);
960 case 0x8 | 0x4 | 0x1: /* cc != 2 */
962 c
->u
.s32
.b
= tcg_const_i32(2);
964 case 0x8 | 0x2 | 0x1: /* cc != 1 */
966 c
->u
.s32
.b
= tcg_const_i32(1);
968 case 0x8 | 0x2: /* cc == 0 ||Â cc == 2 => (cc & 1) == 0 */
971 c
->u
.s32
.a
= tcg_temp_new_i32();
972 c
->u
.s32
.b
= tcg_const_i32(0);
973 tcg_gen_andi_i32(c
->u
.s32
.a
, cc_op
, 1);
975 case 0x8 | 0x4: /* cc < 2 */
977 c
->u
.s32
.b
= tcg_const_i32(2);
979 case 0x8: /* cc == 0 */
981 c
->u
.s32
.b
= tcg_const_i32(0);
983 case 0x4 | 0x2 | 0x1: /* cc != 0 */
985 c
->u
.s32
.b
= tcg_const_i32(0);
987 case 0x4 | 0x1: /* cc == 1 ||Â cc == 3 => (cc & 1) != 0 */
990 c
->u
.s32
.a
= tcg_temp_new_i32();
991 c
->u
.s32
.b
= tcg_const_i32(0);
992 tcg_gen_andi_i32(c
->u
.s32
.a
, cc_op
, 1);
994 case 0x4: /* cc == 1 */
996 c
->u
.s32
.b
= tcg_const_i32(1);
998 case 0x2 | 0x1: /* cc > 1 */
1000 c
->u
.s32
.b
= tcg_const_i32(1);
1002 case 0x2: /* cc == 2 */
1004 c
->u
.s32
.b
= tcg_const_i32(2);
1006 case 0x1: /* cc == 3 */
1008 c
->u
.s32
.b
= tcg_const_i32(3);
1011 /* CC is masked by something else: (8 >> cc) & mask. */
1014 c
->u
.s32
.a
= tcg_const_i32(8);
1015 c
->u
.s32
.b
= tcg_const_i32(0);
1016 tcg_gen_shr_i32(c
->u
.s32
.a
, c
->u
.s32
.a
, cc_op
);
1017 tcg_gen_andi_i32(c
->u
.s32
.a
, c
->u
.s32
.a
, mask
);
1028 static void free_compare(DisasCompare
*c
)
1032 tcg_temp_free_i64(c
->u
.s64
.a
);
1034 tcg_temp_free_i32(c
->u
.s32
.a
);
1039 tcg_temp_free_i64(c
->u
.s64
.b
);
1041 tcg_temp_free_i32(c
->u
.s32
.b
);
1046 static void gen_jcc(DisasContext
*s
, uint32_t mask
, int skip
)
1051 disas_jcc(s
, &c
, mask
);
1052 cond
= tcg_invert_cond(c
.cond
);
1055 tcg_gen_brcond_i64(cond
, c
.u
.s64
.a
, c
.u
.s64
.b
, skip
);
1057 tcg_gen_brcond_i32(cond
, c
.u
.s32
.a
, c
.u
.s32
.b
, skip
);
1063 static void gen_bcr(DisasContext
*s
, uint32_t mask
, TCGv_i64 target
,
1070 gen_update_cc_op(s
);
1071 tcg_gen_mov_i64(psw_addr
, target
);
1073 } else if (mask
== 0) {
1074 /* ignore cc and never match */
1075 gen_goto_tb(s
, 0, offset
+ 2);
1077 TCGv_i64 new_addr
= tcg_temp_local_new_i64();
1079 tcg_gen_mov_i64(new_addr
, target
);
1080 skip
= gen_new_label();
1081 gen_jcc(s
, mask
, skip
);
1082 gen_update_cc_op(s
);
1083 tcg_gen_mov_i64(psw_addr
, new_addr
);
1084 tcg_temp_free_i64(new_addr
);
1086 gen_set_label(skip
);
1087 tcg_temp_free_i64(new_addr
);
1088 gen_goto_tb(s
, 1, offset
+ 2);
1092 static void gen_brc(uint32_t mask
, DisasContext
*s
, int32_t offset
)
1098 gen_goto_tb(s
, 0, s
->pc
+ offset
);
1099 } else if (mask
== 0) {
1100 /* ignore cc and never match */
1101 gen_goto_tb(s
, 0, s
->pc
+ 4);
1103 skip
= gen_new_label();
1104 gen_jcc(s
, mask
, skip
);
1105 gen_goto_tb(s
, 0, s
->pc
+ offset
);
1106 gen_set_label(skip
);
1107 gen_goto_tb(s
, 1, s
->pc
+ 4);
1109 s
->is_jmp
= DISAS_TB_JUMP
;
1112 static void gen_op_mvc(DisasContext
*s
, int l
, TCGv_i64 s1
, TCGv_i64 s2
)
1116 int l_memset
= gen_new_label();
1117 int l_out
= gen_new_label();
1118 TCGv_i64 dest
= tcg_temp_local_new_i64();
1119 TCGv_i64 src
= tcg_temp_local_new_i64();
1122 /* Find out if we should use the inline version of mvc */
1137 /* Fall back to helper */
1138 vl
= tcg_const_i32(l
);
1139 potential_page_fault(s
);
1140 gen_helper_mvc(cpu_env
, vl
, s1
, s2
);
1141 tcg_temp_free_i32(vl
);
1145 tcg_gen_mov_i64(dest
, s1
);
1146 tcg_gen_mov_i64(src
, s2
);
1148 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
1149 /* XXX what if we overflow while moving? */
1150 tcg_gen_andi_i64(dest
, dest
, 0x7fffffffUL
);
1151 tcg_gen_andi_i64(src
, src
, 0x7fffffffUL
);
1154 tmp
= tcg_temp_new_i64();
1155 tcg_gen_addi_i64(tmp
, src
, 1);
1156 tcg_gen_brcond_i64(TCG_COND_EQ
, dest
, tmp
, l_memset
);
1157 tcg_temp_free_i64(tmp
);
1161 tmp
= tcg_temp_new_i64();
1163 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1164 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1166 tcg_temp_free_i64(tmp
);
1169 tmp
= tcg_temp_new_i64();
1171 tcg_gen_qemu_ld16u(tmp
, src
, get_mem_index(s
));
1172 tcg_gen_qemu_st16(tmp
, dest
, get_mem_index(s
));
1174 tcg_temp_free_i64(tmp
);
1177 tmp
= tcg_temp_new_i64();
1179 tcg_gen_qemu_ld32u(tmp
, src
, get_mem_index(s
));
1180 tcg_gen_qemu_st32(tmp
, dest
, get_mem_index(s
));
1182 tcg_temp_free_i64(tmp
);
1185 tmp
= tcg_temp_new_i64();
1186 tmp2
= tcg_temp_new_i64();
1188 tcg_gen_qemu_ld32u(tmp
, src
, get_mem_index(s
));
1189 tcg_gen_addi_i64(src
, src
, 4);
1190 tcg_gen_qemu_ld8u(tmp2
, src
, get_mem_index(s
));
1191 tcg_gen_qemu_st32(tmp
, dest
, get_mem_index(s
));
1192 tcg_gen_addi_i64(dest
, dest
, 4);
1193 tcg_gen_qemu_st8(tmp2
, dest
, get_mem_index(s
));
1195 tcg_temp_free_i64(tmp
);
1196 tcg_temp_free_i64(tmp2
);
1199 tmp
= tcg_temp_new_i64();
1201 tcg_gen_qemu_ld64(tmp
, src
, get_mem_index(s
));
1202 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1204 tcg_temp_free_i64(tmp
);
1207 /* The inline version can become too big for too uneven numbers, only
1208 use it on known good lengths */
1209 tmp
= tcg_temp_new_i64();
1210 tmp2
= tcg_const_i64(8);
1211 for (i
= 0; (i
+ 7) <= l
; i
+= 8) {
1212 tcg_gen_qemu_ld64(tmp
, src
, get_mem_index(s
));
1213 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1215 tcg_gen_add_i64(src
, src
, tmp2
);
1216 tcg_gen_add_i64(dest
, dest
, tmp2
);
1219 tcg_temp_free_i64(tmp2
);
1220 tmp2
= tcg_const_i64(1);
1222 for (; i
<= l
; i
++) {
1223 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1224 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1226 tcg_gen_add_i64(src
, src
, tmp2
);
1227 tcg_gen_add_i64(dest
, dest
, tmp2
);
1230 tcg_temp_free_i64(tmp2
);
1231 tcg_temp_free_i64(tmp
);
1237 gen_set_label(l_memset
);
1238 /* memset case (dest == (src + 1)) */
1240 tmp
= tcg_temp_new_i64();
1241 tmp2
= tcg_temp_new_i64();
1242 /* fill tmp with the byte */
1243 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1244 tcg_gen_shli_i64(tmp2
, tmp
, 8);
1245 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1246 tcg_gen_shli_i64(tmp2
, tmp
, 16);
1247 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1248 tcg_gen_shli_i64(tmp2
, tmp
, 32);
1249 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1250 tcg_temp_free_i64(tmp2
);
1252 tmp2
= tcg_const_i64(8);
1254 for (i
= 0; (i
+ 7) <= l
; i
+= 8) {
1255 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1256 tcg_gen_addi_i64(dest
, dest
, 8);
1259 tcg_temp_free_i64(tmp2
);
1260 tmp2
= tcg_const_i64(1);
1262 for (; i
<= l
; i
++) {
1263 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1264 tcg_gen_addi_i64(dest
, dest
, 1);
1267 tcg_temp_free_i64(tmp2
);
1268 tcg_temp_free_i64(tmp
);
1270 gen_set_label(l_out
);
1272 tcg_temp_free(dest
);
1276 static void gen_op_clc(DisasContext
*s
, int l
, TCGv_i64 s1
, TCGv_i64 s2
)
1282 /* check for simple 32bit or 64bit match */
1285 tmp
= tcg_temp_new_i64();
1286 tmp2
= tcg_temp_new_i64();
1288 tcg_gen_qemu_ld8u(tmp
, s1
, get_mem_index(s
));
1289 tcg_gen_qemu_ld8u(tmp2
, s2
, get_mem_index(s
));
1290 cmp_u64(s
, tmp
, tmp2
);
1292 tcg_temp_free_i64(tmp
);
1293 tcg_temp_free_i64(tmp2
);
1296 tmp
= tcg_temp_new_i64();
1297 tmp2
= tcg_temp_new_i64();
1299 tcg_gen_qemu_ld16u(tmp
, s1
, get_mem_index(s
));
1300 tcg_gen_qemu_ld16u(tmp2
, s2
, get_mem_index(s
));
1301 cmp_u64(s
, tmp
, tmp2
);
1303 tcg_temp_free_i64(tmp
);
1304 tcg_temp_free_i64(tmp2
);
1307 tmp
= tcg_temp_new_i64();
1308 tmp2
= tcg_temp_new_i64();
1310 tcg_gen_qemu_ld32u(tmp
, s1
, get_mem_index(s
));
1311 tcg_gen_qemu_ld32u(tmp2
, s2
, get_mem_index(s
));
1312 cmp_u64(s
, tmp
, tmp2
);
1314 tcg_temp_free_i64(tmp
);
1315 tcg_temp_free_i64(tmp2
);
1318 tmp
= tcg_temp_new_i64();
1319 tmp2
= tcg_temp_new_i64();
1321 tcg_gen_qemu_ld64(tmp
, s1
, get_mem_index(s
));
1322 tcg_gen_qemu_ld64(tmp2
, s2
, get_mem_index(s
));
1323 cmp_u64(s
, tmp
, tmp2
);
1325 tcg_temp_free_i64(tmp
);
1326 tcg_temp_free_i64(tmp2
);
1330 potential_page_fault(s
);
1331 vl
= tcg_const_i32(l
);
1332 gen_helper_clc(cc_op
, cpu_env
, vl
, s1
, s2
);
1333 tcg_temp_free_i32(vl
);
1337 static void disas_e3(CPUS390XState
*env
, DisasContext
* s
, int op
, int r1
,
1338 int x2
, int b2
, int d2
)
1340 TCGv_i64 addr
, tmp
, tmp2
, tmp3
, tmp4
;
1341 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
1343 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1344 op
, r1
, x2
, b2
, d2
);
1345 addr
= get_address(s
, x2
, b2
, d2
);
1347 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1348 case 0x4: /* lg r1,d2(x2,b2) */
1349 tcg_gen_qemu_ld64(regs
[r1
], addr
, get_mem_index(s
));
1351 set_cc_s64(s
, regs
[r1
]);
1354 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1355 tmp2
= tcg_temp_new_i64();
1356 tmp32_1
= tcg_temp_new_i32();
1357 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1358 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1359 store_reg32(r1
, tmp32_1
);
1360 set_cc_s32(s
, tmp32_1
);
1361 tcg_temp_free_i64(tmp2
);
1362 tcg_temp_free_i32(tmp32_1
);
1364 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1365 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1366 tmp2
= tcg_temp_new_i64();
1368 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1370 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1372 tmp4
= load_reg(r1
+ 1);
1373 tmp3
= tcg_temp_new_i64();
1374 tcg_gen_div_i64(tmp3
, tmp4
, tmp2
);
1375 store_reg(r1
+ 1, tmp3
);
1376 tcg_gen_rem_i64(tmp3
, tmp4
, tmp2
);
1377 store_reg(r1
, tmp3
);
1378 tcg_temp_free_i64(tmp2
);
1379 tcg_temp_free_i64(tmp3
);
1380 tcg_temp_free_i64(tmp4
);
1382 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1383 tmp2
= tcg_temp_new_i64();
1384 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1385 tcg_gen_bswap64_i64(tmp2
, tmp2
);
1386 store_reg(r1
, tmp2
);
1387 tcg_temp_free_i64(tmp2
);
1389 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1390 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1391 tmp2
= tcg_temp_new_i64();
1392 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1394 tcg_gen_ext32s_i64(tmp2
, tmp2
);
1396 store_reg(r1
, tmp2
);
1397 tcg_temp_free_i64(tmp2
);
1399 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1400 tmp2
= tcg_temp_new_i64();
1401 tcg_gen_qemu_ld16s(tmp2
, addr
, get_mem_index(s
));
1402 store_reg(r1
, tmp2
);
1403 tcg_temp_free_i64(tmp2
);
1405 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1406 tmp2
= tcg_temp_new_i64();
1407 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1408 tcg_gen_andi_i64(tmp2
, tmp2
, 0x7fffffffULL
);
1409 store_reg(r1
, tmp2
);
1410 tcg_temp_free_i64(tmp2
);
1412 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1413 tmp2
= tcg_temp_new_i64();
1414 tmp32_1
= tcg_temp_new_i32();
1415 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1416 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1417 tcg_temp_free_i64(tmp2
);
1418 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
1419 store_reg32(r1
, tmp32_1
);
1420 tcg_temp_free_i32(tmp32_1
);
1422 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1423 tmp2
= tcg_temp_new_i64();
1424 tmp32_1
= tcg_temp_new_i32();
1425 tcg_gen_qemu_ld16u(tmp2
, addr
, get_mem_index(s
));
1426 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1427 tcg_temp_free_i64(tmp2
);
1428 tcg_gen_bswap16_i32(tmp32_1
, tmp32_1
);
1429 store_reg16(r1
, tmp32_1
);
1430 tcg_temp_free_i32(tmp32_1
);
1432 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1433 case 0x21: /* CLG R1,D2(X2,B2) */
1434 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1435 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1436 tmp2
= tcg_temp_new_i64();
1440 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1443 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1446 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1454 cmp_s64(s
, regs
[r1
], tmp2
);
1458 cmp_u64(s
, regs
[r1
], tmp2
);
1463 tcg_temp_free_i64(tmp2
);
1465 case 0x24: /* stg r1, d2(x2,b2) */
1466 tcg_gen_qemu_st64(regs
[r1
], addr
, get_mem_index(s
));
1468 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1469 tmp32_1
= load_reg32(r1
);
1470 tmp2
= tcg_temp_new_i64();
1471 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
1472 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
1473 tcg_temp_free_i32(tmp32_1
);
1474 tcg_gen_qemu_st32(tmp2
, addr
, get_mem_index(s
));
1475 tcg_temp_free_i64(tmp2
);
1477 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1478 tmp32_1
= load_reg32(r1
);
1479 tmp2
= tcg_temp_new_i64();
1480 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
1481 tcg_temp_free_i32(tmp32_1
);
1482 tcg_gen_qemu_st32(tmp2
, addr
, get_mem_index(s
));
1483 tcg_temp_free_i64(tmp2
);
1485 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1486 tmp32_1
= load_reg32(r1
);
1487 tmp32_2
= tcg_temp_new_i32();
1488 tmp2
= tcg_temp_new_i64();
1489 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1490 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1491 tcg_temp_free_i64(tmp2
);
1492 tcg_gen_xor_i32(tmp32_2
, tmp32_1
, tmp32_2
);
1493 store_reg32(r1
, tmp32_2
);
1494 set_cc_nz_u32(s
, tmp32_2
);
1495 tcg_temp_free_i32(tmp32_1
);
1496 tcg_temp_free_i32(tmp32_2
);
1498 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1499 tmp3
= tcg_temp_new_i64();
1500 tcg_gen_qemu_ld32u(tmp3
, addr
, get_mem_index(s
));
1501 store_reg32_i64(r1
, tmp3
);
1502 tcg_temp_free_i64(tmp3
);
1504 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1505 store_reg(r1
, addr
);
1507 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1508 tmp32_1
= load_reg32(r1
);
1509 tmp2
= tcg_temp_new_i64();
1510 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
1511 tcg_gen_qemu_st8(tmp2
, addr
, get_mem_index(s
));
1512 tcg_temp_free_i32(tmp32_1
);
1513 tcg_temp_free_i64(tmp2
);
1515 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1516 tmp3
= tcg_temp_new_i64();
1517 tcg_gen_qemu_ld8u(tmp3
, addr
, get_mem_index(s
));
1518 store_reg8(r1
, tmp3
);
1519 tcg_temp_free_i64(tmp3
);
1521 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1522 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1523 tmp2
= tcg_temp_new_i64();
1524 tcg_gen_qemu_ld8s(tmp2
, addr
, get_mem_index(s
));
1527 tcg_gen_ext8s_i64(tmp2
, tmp2
);
1528 store_reg32_i64(r1
, tmp2
);
1531 tcg_gen_ext8s_i64(tmp2
, tmp2
);
1532 store_reg(r1
, tmp2
);
1537 tcg_temp_free_i64(tmp2
);
1539 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1540 tmp2
= tcg_temp_new_i64();
1541 tcg_gen_qemu_ld16s(tmp2
, addr
, get_mem_index(s
));
1542 store_reg32_i64(r1
, tmp2
);
1543 tcg_temp_free_i64(tmp2
);
1545 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1546 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1547 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1548 tmp3
= tcg_temp_new_i64();
1549 tcg_gen_qemu_ld64(tmp3
, addr
, get_mem_index(s
));
1552 tcg_gen_and_i64(regs
[r1
], regs
[r1
], tmp3
);
1555 tcg_gen_or_i64(regs
[r1
], regs
[r1
], tmp3
);
1558 tcg_gen_xor_i64(regs
[r1
], regs
[r1
], tmp3
);
1563 set_cc_nz_u64(s
, regs
[r1
]);
1564 tcg_temp_free_i64(tmp3
);
1566 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1567 tmp2
= tcg_temp_new_i64();
1568 tmp32_1
= tcg_const_i32(r1
);
1569 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1570 gen_helper_mlg(cpu_env
, tmp32_1
, tmp2
);
1571 tcg_temp_free_i64(tmp2
);
1572 tcg_temp_free_i32(tmp32_1
);
1574 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1575 tmp2
= tcg_temp_new_i64();
1576 tmp32_1
= tcg_const_i32(r1
);
1577 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1578 gen_helper_dlg(cpu_env
, tmp32_1
, tmp2
);
1579 tcg_temp_free_i64(tmp2
);
1580 tcg_temp_free_i32(tmp32_1
);
1582 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1583 tmp2
= tcg_temp_new_i64();
1584 tmp3
= tcg_temp_new_i64();
1585 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1586 /* XXX possible optimization point */
1588 tcg_gen_extu_i32_i64(tmp3
, cc_op
);
1589 tcg_gen_shri_i64(tmp3
, tmp3
, 1);
1590 tcg_gen_andi_i64(tmp3
, tmp3
, 1);
1591 tcg_gen_add_i64(tmp3
, tmp2
, tmp3
);
1592 tcg_gen_add_i64(tmp3
, regs
[r1
], tmp3
);
1593 store_reg(r1
, tmp3
);
1594 set_cc_addu64(s
, regs
[r1
], tmp2
, tmp3
);
1595 tcg_temp_free_i64(tmp2
);
1596 tcg_temp_free_i64(tmp3
);
1598 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1599 tmp2
= tcg_temp_new_i64();
1600 tmp32_1
= tcg_const_i32(r1
);
1601 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1602 /* XXX possible optimization point */
1604 gen_helper_slbg(cc_op
, cpu_env
, cc_op
, tmp32_1
, regs
[r1
], tmp2
);
1606 tcg_temp_free_i64(tmp2
);
1607 tcg_temp_free_i32(tmp32_1
);
1609 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1610 tcg_gen_qemu_ld8u(regs
[r1
], addr
, get_mem_index(s
));
1612 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1613 tcg_gen_qemu_ld16u(regs
[r1
], addr
, get_mem_index(s
));
1615 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1616 tmp2
= tcg_temp_new_i64();
1617 tcg_gen_qemu_ld8u(tmp2
, addr
, get_mem_index(s
));
1618 store_reg32_i64(r1
, tmp2
);
1619 tcg_temp_free_i64(tmp2
);
1621 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1622 tmp2
= tcg_temp_new_i64();
1623 tcg_gen_qemu_ld16u(tmp2
, addr
, get_mem_index(s
));
1624 store_reg32_i64(r1
, tmp2
);
1625 tcg_temp_free_i64(tmp2
);
1627 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1628 tmp2
= tcg_temp_new_i64();
1629 tmp3
= load_reg((r1
+ 1) & 15);
1630 tcg_gen_ext32u_i64(tmp3
, tmp3
);
1631 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1632 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
1633 store_reg32_i64((r1
+ 1) & 15, tmp2
);
1634 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
1635 store_reg32_i64(r1
, tmp2
);
1636 tcg_temp_free_i64(tmp2
);
1637 tcg_temp_free_i64(tmp3
);
1639 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1640 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1641 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1643 tmp2
= tcg_temp_new_i64();
1644 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1645 tmp3
= load_reg((r1
+ 1) & 15);
1646 tcg_gen_ext32u_i64(tmp2
, tmp2
);
1647 tcg_gen_ext32u_i64(tmp3
, tmp3
);
1648 tcg_gen_shli_i64(tmp
, tmp
, 32);
1649 tcg_gen_or_i64(tmp
, tmp
, tmp3
);
1651 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
1652 tcg_gen_div_i64(tmp
, tmp
, tmp2
);
1653 store_reg32_i64((r1
+ 1) & 15, tmp
);
1654 store_reg32_i64(r1
, tmp3
);
1655 tcg_temp_free_i64(tmp
);
1656 tcg_temp_free_i64(tmp2
);
1657 tcg_temp_free_i64(tmp3
);
1659 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1660 tmp2
= tcg_temp_new_i64();
1661 tmp32_1
= load_reg32(r1
);
1662 tmp32_2
= tcg_temp_new_i32();
1663 tmp32_3
= tcg_temp_new_i32();
1664 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1665 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1666 /* XXX possible optimization point */
1668 gen_helper_addc_u32(tmp32_3
, cc_op
, tmp32_1
, tmp32_2
);
1669 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1670 store_reg32(r1
, tmp32_3
);
1671 tcg_temp_free_i64(tmp2
);
1672 tcg_temp_free_i32(tmp32_1
);
1673 tcg_temp_free_i32(tmp32_2
);
1674 tcg_temp_free_i32(tmp32_3
);
1676 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1677 tmp2
= tcg_temp_new_i64();
1678 tmp32_1
= tcg_const_i32(r1
);
1679 tmp32_2
= tcg_temp_new_i32();
1680 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1681 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1682 /* XXX possible optimization point */
1684 gen_helper_slb(cc_op
, cpu_env
, cc_op
, tmp32_1
, tmp32_2
);
1686 tcg_temp_free_i64(tmp2
);
1687 tcg_temp_free_i32(tmp32_1
);
1688 tcg_temp_free_i32(tmp32_2
);
1691 LOG_DISAS("illegal e3 operation 0x%x\n", op
);
1692 gen_illegal_opcode(s
);
1695 tcg_temp_free_i64(addr
);
1698 #ifndef CONFIG_USER_ONLY
1699 static void disas_e5(CPUS390XState
*env
, DisasContext
* s
, uint64_t insn
)
1702 int op
= (insn
>> 32) & 0xff;
1704 tmp
= get_address(s
, 0, (insn
>> 28) & 0xf, (insn
>> 16) & 0xfff);
1705 tmp2
= get_address(s
, 0, (insn
>> 12) & 0xf, insn
& 0xfff);
1707 LOG_DISAS("disas_e5: insn %" PRIx64
"\n", insn
);
1709 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1710 /* Test Protection */
1711 potential_page_fault(s
);
1712 gen_helper_tprot(cc_op
, tmp
, tmp2
);
1716 LOG_DISAS("illegal e5 operation 0x%x\n", op
);
1717 gen_illegal_opcode(s
);
1721 tcg_temp_free_i64(tmp
);
1722 tcg_temp_free_i64(tmp2
);
1726 static void disas_eb(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
1727 int r3
, int b2
, int d2
)
1729 TCGv_i64 tmp
, tmp2
, tmp3
, tmp4
;
1730 TCGv_i32 tmp32_1
, tmp32_2
;
1733 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1734 op
, r1
, r3
, b2
, d2
);
1736 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1737 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1738 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1739 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1740 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1742 tmp
= get_address(s
, 0, b2
, d2
);
1743 tcg_gen_andi_i64(tmp
, tmp
, 0x3f);
1745 tmp
= tcg_const_i64(d2
& 0x3f);
1749 tcg_gen_shr_i64(regs
[r1
], regs
[r3
], tmp
);
1752 tcg_gen_shl_i64(regs
[r1
], regs
[r3
], tmp
);
1755 tcg_gen_sar_i64(regs
[r1
], regs
[r3
], tmp
);
1758 tmp2
= tcg_temp_new_i64();
1759 tmp3
= tcg_temp_new_i64();
1760 gen_op_update2_cc_i64(s
, CC_OP_SLAG
, regs
[r3
], tmp
);
1761 tcg_gen_shl_i64(tmp2
, regs
[r3
], tmp
);
1762 /* override sign bit with source sign */
1763 tcg_gen_andi_i64(tmp2
, tmp2
, ~0x8000000000000000ULL
);
1764 tcg_gen_andi_i64(tmp3
, regs
[r3
], 0x8000000000000000ULL
);
1765 tcg_gen_or_i64(regs
[r1
], tmp2
, tmp3
);
1766 tcg_temp_free_i64(tmp2
);
1767 tcg_temp_free_i64(tmp3
);
1770 tcg_gen_rotl_i64(regs
[r1
], regs
[r3
], tmp
);
1777 set_cc_s64(s
, regs
[r1
]);
1779 tcg_temp_free_i64(tmp
);
1781 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
1783 tmp
= get_address(s
, 0, b2
, d2
);
1784 tcg_gen_andi_i64(tmp
, tmp
, 0x3f);
1786 tmp
= tcg_const_i64(d2
& 0x3f);
1788 tmp32_1
= tcg_temp_new_i32();
1789 tmp32_2
= load_reg32(r3
);
1790 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
1793 tcg_gen_rotl_i32(tmp32_1
, tmp32_2
, tmp32_1
);
1799 store_reg32(r1
, tmp32_1
);
1800 tcg_temp_free_i64(tmp
);
1801 tcg_temp_free_i32(tmp32_1
);
1802 tcg_temp_free_i32(tmp32_2
);
1804 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
1805 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
1808 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
1809 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
1812 /* Apparently, unrolling lmg/stmg of any size gains performance -
1813 even for very long ones... */
1814 tmp
= get_address(s
, 0, b2
, d2
);
1815 tmp3
= tcg_const_i64(stm_len
);
1816 tmp4
= tcg_const_i64(op
== 0x26 ? 32 : 4);
1817 for (i
= r1
;; i
= (i
+ 1) % 16) {
1820 tcg_gen_qemu_ld64(regs
[i
], tmp
, get_mem_index(s
));
1823 tmp2
= tcg_temp_new_i64();
1824 #if HOST_LONG_BITS == 32
1825 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
1826 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs
[i
]), tmp2
);
1828 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
1829 tcg_gen_shl_i64(tmp2
, tmp2
, tmp4
);
1830 tcg_gen_ext32u_i64(regs
[i
], regs
[i
]);
1831 tcg_gen_or_i64(regs
[i
], regs
[i
], tmp2
);
1833 tcg_temp_free_i64(tmp2
);
1836 tcg_gen_qemu_st64(regs
[i
], tmp
, get_mem_index(s
));
1839 tmp2
= tcg_temp_new_i64();
1840 tcg_gen_shr_i64(tmp2
, regs
[i
], tmp4
);
1841 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
1842 tcg_temp_free_i64(tmp2
);
1850 tcg_gen_add_i64(tmp
, tmp
, tmp3
);
1852 tcg_temp_free_i64(tmp
);
1853 tcg_temp_free_i64(tmp3
);
1854 tcg_temp_free_i64(tmp4
);
1856 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
1857 tmp
= get_address(s
, 0, b2
, d2
);
1858 tmp32_1
= tcg_const_i32(r1
);
1859 tmp32_2
= tcg_const_i32(r3
);
1860 potential_page_fault(s
);
1861 gen_helper_stcmh(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1862 tcg_temp_free_i64(tmp
);
1863 tcg_temp_free_i32(tmp32_1
);
1864 tcg_temp_free_i32(tmp32_2
);
1866 #ifndef CONFIG_USER_ONLY
1867 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
1869 check_privileged(s
);
1870 tmp
= get_address(s
, 0, b2
, d2
);
1871 tmp32_1
= tcg_const_i32(r1
);
1872 tmp32_2
= tcg_const_i32(r3
);
1873 potential_page_fault(s
);
1874 gen_helper_lctlg(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1875 tcg_temp_free_i64(tmp
);
1876 tcg_temp_free_i32(tmp32_1
);
1877 tcg_temp_free_i32(tmp32_2
);
1879 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
1881 check_privileged(s
);
1882 tmp
= get_address(s
, 0, b2
, d2
);
1883 tmp32_1
= tcg_const_i32(r1
);
1884 tmp32_2
= tcg_const_i32(r3
);
1885 potential_page_fault(s
);
1886 gen_helper_stctg(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1887 tcg_temp_free_i64(tmp
);
1888 tcg_temp_free_i32(tmp32_1
);
1889 tcg_temp_free_i32(tmp32_2
);
1892 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
1893 tmp
= get_address(s
, 0, b2
, d2
);
1894 tmp32_1
= tcg_const_i32(r1
);
1895 tmp32_2
= tcg_const_i32(r3
);
1896 potential_page_fault(s
);
1897 /* XXX rewrite in tcg */
1898 gen_helper_csg(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1900 tcg_temp_free_i64(tmp
);
1901 tcg_temp_free_i32(tmp32_1
);
1902 tcg_temp_free_i32(tmp32_2
);
1904 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
1905 tmp
= get_address(s
, 0, b2
, d2
);
1906 tmp32_1
= tcg_const_i32(r1
);
1907 tmp32_2
= tcg_const_i32(r3
);
1908 potential_page_fault(s
);
1909 /* XXX rewrite in tcg */
1910 gen_helper_cdsg(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1912 tcg_temp_free_i64(tmp
);
1913 tcg_temp_free_i32(tmp32_1
);
1914 tcg_temp_free_i32(tmp32_2
);
1916 case 0x51: /* TMY D1(B1),I2 [SIY] */
1917 tmp
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the destination */
1918 tmp2
= tcg_const_i64((r1
<< 4) | r3
);
1919 tcg_gen_qemu_ld8u(tmp
, tmp
, get_mem_index(s
));
1920 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
1921 that incurs less conversions */
1922 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_32
);
1923 tcg_temp_free_i64(tmp
);
1924 tcg_temp_free_i64(tmp2
);
1926 case 0x52: /* MVIY D1(B1),I2 [SIY] */
1927 tmp
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the destination */
1928 tmp2
= tcg_const_i64((r1
<< 4) | r3
);
1929 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
1930 tcg_temp_free_i64(tmp
);
1931 tcg_temp_free_i64(tmp2
);
1933 case 0x55: /* CLIY D1(B1),I2 [SIY] */
1934 tmp3
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the 1st operand */
1935 tmp
= tcg_temp_new_i64();
1936 tmp32_1
= tcg_temp_new_i32();
1937 tcg_gen_qemu_ld8u(tmp
, tmp3
, get_mem_index(s
));
1938 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
1939 cmp_u32c(s
, tmp32_1
, (r1
<< 4) | r3
);
1940 tcg_temp_free_i64(tmp
);
1941 tcg_temp_free_i64(tmp3
);
1942 tcg_temp_free_i32(tmp32_1
);
1944 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
1945 tmp
= get_address(s
, 0, b2
, d2
);
1946 tmp32_1
= tcg_const_i32(r1
);
1947 tmp32_2
= tcg_const_i32(r3
);
1948 potential_page_fault(s
);
1949 /* XXX split CC calculation out */
1950 gen_helper_icmh(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1952 tcg_temp_free_i64(tmp
);
1953 tcg_temp_free_i32(tmp32_1
);
1954 tcg_temp_free_i32(tmp32_2
);
1957 LOG_DISAS("illegal eb operation 0x%x\n", op
);
1958 gen_illegal_opcode(s
);
1963 static void disas_ed(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
1964 int x2
, int b2
, int d2
, int r1b
)
1966 TCGv_i32 tmp_r1
, tmp32
;
1968 addr
= get_address(s
, x2
, b2
, d2
);
1969 tmp_r1
= tcg_const_i32(r1
);
1971 case 0x4: /* LDEB R1,D2(X2,B2) [RXE] */
1972 potential_page_fault(s
);
1973 gen_helper_ldeb(cpu_env
, tmp_r1
, addr
);
1975 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
1976 potential_page_fault(s
);
1977 gen_helper_lxdb(cpu_env
, tmp_r1
, addr
);
1979 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
1980 tmp
= tcg_temp_new_i64();
1981 tmp32
= load_freg32(r1
);
1982 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
1983 set_cc_cmp_f32_i64(s
, tmp32
, tmp
);
1984 tcg_temp_free_i64(tmp
);
1985 tcg_temp_free_i32(tmp32
);
1987 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
1988 tmp
= tcg_temp_new_i64();
1989 tmp32
= tcg_temp_new_i32();
1990 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
1991 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
1992 gen_helper_aeb(cpu_env
, tmp_r1
, tmp32
);
1993 tcg_temp_free_i64(tmp
);
1994 tcg_temp_free_i32(tmp32
);
1996 tmp32
= load_freg32(r1
);
1997 gen_set_cc_nz_f32(s
, tmp32
);
1998 tcg_temp_free_i32(tmp32
);
2000 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2001 tmp
= tcg_temp_new_i64();
2002 tmp32
= tcg_temp_new_i32();
2003 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2004 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2005 gen_helper_seb(cpu_env
, tmp_r1
, tmp32
);
2006 tcg_temp_free_i64(tmp
);
2007 tcg_temp_free_i32(tmp32
);
2009 tmp32
= load_freg32(r1
);
2010 gen_set_cc_nz_f32(s
, tmp32
);
2011 tcg_temp_free_i32(tmp32
);
2013 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2014 tmp
= tcg_temp_new_i64();
2015 tmp32
= tcg_temp_new_i32();
2016 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2017 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2018 gen_helper_deb(cpu_env
, tmp_r1
, tmp32
);
2019 tcg_temp_free_i64(tmp
);
2020 tcg_temp_free_i32(tmp32
);
2022 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2023 potential_page_fault(s
);
2024 gen_helper_tceb(cc_op
, cpu_env
, tmp_r1
, addr
);
2027 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2028 potential_page_fault(s
);
2029 gen_helper_tcdb(cc_op
, cpu_env
, tmp_r1
, addr
);
2032 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2033 potential_page_fault(s
);
2034 gen_helper_tcxb(cc_op
, cpu_env
, tmp_r1
, addr
);
2037 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2038 tmp
= tcg_temp_new_i64();
2039 tmp32
= tcg_temp_new_i32();
2040 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2041 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2042 gen_helper_meeb(cpu_env
, tmp_r1
, tmp32
);
2043 tcg_temp_free_i64(tmp
);
2044 tcg_temp_free_i32(tmp32
);
2046 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2047 potential_page_fault(s
);
2048 gen_helper_cdb(cc_op
, cpu_env
, tmp_r1
, addr
);
2051 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2052 potential_page_fault(s
);
2053 gen_helper_adb(cc_op
, cpu_env
, tmp_r1
, addr
);
2056 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2057 potential_page_fault(s
);
2058 gen_helper_sdb(cc_op
, cpu_env
, tmp_r1
, addr
);
2061 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2062 potential_page_fault(s
);
2063 gen_helper_mdb(cpu_env
, tmp_r1
, addr
);
2065 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2066 potential_page_fault(s
);
2067 gen_helper_ddb(cpu_env
, tmp_r1
, addr
);
2069 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2070 /* for RXF insns, r1 is R3 and r1b is R1 */
2071 tmp32
= tcg_const_i32(r1b
);
2072 potential_page_fault(s
);
2073 gen_helper_madb(cpu_env
, tmp32
, addr
, tmp_r1
);
2074 tcg_temp_free_i32(tmp32
);
2077 LOG_DISAS("illegal ed operation 0x%x\n", op
);
2078 gen_illegal_opcode(s
);
2081 tcg_temp_free_i32(tmp_r1
);
2082 tcg_temp_free_i64(addr
);
2085 static void disas_a5(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
2090 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op
, r1
, i2
);
2092 case 0x0: /* IIHH R1,I2 [RI] */
2093 tmp
= tcg_const_i64(i2
);
2094 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 48, 16);
2095 tcg_temp_free_i64(tmp
);
2097 case 0x1: /* IIHL R1,I2 [RI] */
2098 tmp
= tcg_const_i64(i2
);
2099 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 32, 16);
2100 tcg_temp_free_i64(tmp
);
2102 case 0x2: /* IILH R1,I2 [RI] */
2103 tmp
= tcg_const_i64(i2
);
2104 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 16, 16);
2105 tcg_temp_free_i64(tmp
);
2107 case 0x3: /* IILL R1,I2 [RI] */
2108 tmp
= tcg_const_i64(i2
);
2109 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 0, 16);
2110 tcg_temp_free_i64(tmp
);
2112 case 0x4: /* NIHH R1,I2 [RI] */
2113 case 0x8: /* OIHH R1,I2 [RI] */
2115 tmp32
= tcg_temp_new_i32();
2118 tmp2
= tcg_const_i64((((uint64_t)i2
) << 48)
2119 | 0x0000ffffffffffffULL
);
2120 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2123 tmp2
= tcg_const_i64(((uint64_t)i2
) << 48);
2124 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2130 tcg_gen_shri_i64(tmp2
, tmp
, 48);
2131 tcg_gen_trunc_i64_i32(tmp32
, tmp2
);
2132 set_cc_nz_u32(s
, tmp32
);
2133 tcg_temp_free_i64(tmp2
);
2134 tcg_temp_free_i32(tmp32
);
2135 tcg_temp_free_i64(tmp
);
2137 case 0x5: /* NIHL R1,I2 [RI] */
2138 case 0x9: /* OIHL R1,I2 [RI] */
2140 tmp32
= tcg_temp_new_i32();
2143 tmp2
= tcg_const_i64((((uint64_t)i2
) << 32)
2144 | 0xffff0000ffffffffULL
);
2145 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2148 tmp2
= tcg_const_i64(((uint64_t)i2
) << 32);
2149 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2155 tcg_gen_shri_i64(tmp2
, tmp
, 32);
2156 tcg_gen_trunc_i64_i32(tmp32
, tmp2
);
2157 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2158 set_cc_nz_u32(s
, tmp32
);
2159 tcg_temp_free_i64(tmp2
);
2160 tcg_temp_free_i32(tmp32
);
2161 tcg_temp_free_i64(tmp
);
2163 case 0x6: /* NILH R1,I2 [RI] */
2164 case 0xa: /* OILH R1,I2 [RI] */
2166 tmp32
= tcg_temp_new_i32();
2169 tmp2
= tcg_const_i64((((uint64_t)i2
) << 16)
2170 | 0xffffffff0000ffffULL
);
2171 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2174 tmp2
= tcg_const_i64(((uint64_t)i2
) << 16);
2175 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2181 tcg_gen_shri_i64(tmp
, tmp
, 16);
2182 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2183 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2184 set_cc_nz_u32(s
, tmp32
);
2185 tcg_temp_free_i64(tmp2
);
2186 tcg_temp_free_i32(tmp32
);
2187 tcg_temp_free_i64(tmp
);
2189 case 0x7: /* NILL R1,I2 [RI] */
2190 case 0xb: /* OILL R1,I2 [RI] */
2192 tmp32
= tcg_temp_new_i32();
2195 tmp2
= tcg_const_i64(i2
| 0xffffffffffff0000ULL
);
2196 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2199 tmp2
= tcg_const_i64(i2
);
2200 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2206 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2207 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2208 set_cc_nz_u32(s
, tmp32
); /* signedness should not matter here */
2209 tcg_temp_free_i64(tmp2
);
2210 tcg_temp_free_i32(tmp32
);
2211 tcg_temp_free_i64(tmp
);
2213 case 0xc: /* LLIHH R1,I2 [RI] */
2214 tmp
= tcg_const_i64( ((uint64_t)i2
) << 48 );
2216 tcg_temp_free_i64(tmp
);
2218 case 0xd: /* LLIHL R1,I2 [RI] */
2219 tmp
= tcg_const_i64( ((uint64_t)i2
) << 32 );
2221 tcg_temp_free_i64(tmp
);
2223 case 0xe: /* LLILH R1,I2 [RI] */
2224 tmp
= tcg_const_i64( ((uint64_t)i2
) << 16 );
2226 tcg_temp_free_i64(tmp
);
2228 case 0xf: /* LLILL R1,I2 [RI] */
2229 tmp
= tcg_const_i64(i2
);
2231 tcg_temp_free_i64(tmp
);
2234 LOG_DISAS("illegal a5 operation 0x%x\n", op
);
2235 gen_illegal_opcode(s
);
2240 static void disas_a7(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
2247 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op
, r1
, i2
);
2249 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2250 case 0x1: /* TMLL or TML R1,I2 [RI] */
2251 case 0x2: /* TMHH R1,I2 [RI] */
2252 case 0x3: /* TMHL R1,I2 [RI] */
2254 tmp2
= tcg_const_i64((uint16_t)i2
);
2257 tcg_gen_shri_i64(tmp
, tmp
, 16);
2262 tcg_gen_shri_i64(tmp
, tmp
, 48);
2265 tcg_gen_shri_i64(tmp
, tmp
, 32);
2268 tcg_gen_andi_i64(tmp
, tmp
, 0xffff);
2269 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_64
);
2270 tcg_temp_free_i64(tmp
);
2271 tcg_temp_free_i64(tmp2
);
2273 case 0x4: /* brc m1, i2 */
2274 gen_brc(r1
, s
, i2
* 2LL);
2276 case 0x5: /* BRAS R1,I2 [RI] */
2277 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 4));
2279 tcg_temp_free_i64(tmp
);
2280 gen_goto_tb(s
, 0, s
->pc
+ i2
* 2LL);
2281 s
->is_jmp
= DISAS_TB_JUMP
;
2283 case 0x6: /* BRCT R1,I2 [RI] */
2284 tmp32_1
= load_reg32(r1
);
2285 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
2286 store_reg32(r1
, tmp32_1
);
2287 gen_update_cc_op(s
);
2288 l1
= gen_new_label();
2289 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp32_1
, 0, l1
);
2290 gen_goto_tb(s
, 0, s
->pc
+ (i2
* 2LL));
2292 gen_goto_tb(s
, 1, s
->pc
+ 4);
2293 s
->is_jmp
= DISAS_TB_JUMP
;
2294 tcg_temp_free_i32(tmp32_1
);
2296 case 0x7: /* BRCTG R1,I2 [RI] */
2298 tcg_gen_subi_i64(tmp
, tmp
, 1);
2300 gen_update_cc_op(s
);
2301 l1
= gen_new_label();
2302 tcg_gen_brcondi_i64(TCG_COND_EQ
, tmp
, 0, l1
);
2303 gen_goto_tb(s
, 0, s
->pc
+ (i2
* 2LL));
2305 gen_goto_tb(s
, 1, s
->pc
+ 4);
2306 s
->is_jmp
= DISAS_TB_JUMP
;
2307 tcg_temp_free_i64(tmp
);
2309 case 0x8: /* lhi r1, i2 */
2310 tmp32_1
= tcg_const_i32(i2
);
2311 store_reg32(r1
, tmp32_1
);
2312 tcg_temp_free_i32(tmp32_1
);
2314 case 0x9: /* lghi r1, i2 */
2315 tmp
= tcg_const_i64(i2
);
2317 tcg_temp_free_i64(tmp
);
2319 case 0xe: /* CHI R1,I2 [RI] */
2320 tmp32_1
= load_reg32(r1
);
2321 cmp_s32c(s
, tmp32_1
, i2
);
2322 tcg_temp_free_i32(tmp32_1
);
2324 case 0xf: /* CGHI R1,I2 [RI] */
2326 cmp_s64c(s
, tmp
, i2
);
2327 tcg_temp_free_i64(tmp
);
2330 LOG_DISAS("illegal a7 operation 0x%x\n", op
);
2331 gen_illegal_opcode(s
);
2336 static void disas_b2(CPUS390XState
*env
, DisasContext
*s
, int op
,
2339 TCGv_i64 tmp
, tmp2
, tmp3
;
2340 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2342 #ifndef CONFIG_USER_ONLY
2346 r1
= (insn
>> 4) & 0xf;
2349 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op
, r1
, r2
);
2352 case 0x22: /* IPM R1 [RRE] */
2353 tmp32_1
= tcg_const_i32(r1
);
2355 gen_helper_ipm(cpu_env
, cc_op
, tmp32_1
);
2356 tcg_temp_free_i32(tmp32_1
);
2358 case 0x41: /* CKSM R1,R2 [RRE] */
2359 tmp32_1
= tcg_const_i32(r1
);
2360 tmp32_2
= tcg_const_i32(r2
);
2361 potential_page_fault(s
);
2362 gen_helper_cksm(cpu_env
, tmp32_1
, tmp32_2
);
2363 tcg_temp_free_i32(tmp32_1
);
2364 tcg_temp_free_i32(tmp32_2
);
2365 gen_op_movi_cc(s
, 0);
2367 case 0x4e: /* SAR R1,R2 [RRE] */
2368 tmp32_1
= load_reg32(r2
);
2369 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, aregs
[r1
]));
2370 tcg_temp_free_i32(tmp32_1
);
2372 case 0x4f: /* EAR R1,R2 [RRE] */
2373 tmp32_1
= tcg_temp_new_i32();
2374 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, aregs
[r2
]));
2375 store_reg32(r1
, tmp32_1
);
2376 tcg_temp_free_i32(tmp32_1
);
2378 case 0x54: /* MVPG R1,R2 [RRE] */
2380 tmp2
= load_reg(r1
);
2381 tmp3
= load_reg(r2
);
2382 potential_page_fault(s
);
2383 gen_helper_mvpg(cpu_env
, tmp
, tmp2
, tmp3
);
2384 tcg_temp_free_i64(tmp
);
2385 tcg_temp_free_i64(tmp2
);
2386 tcg_temp_free_i64(tmp3
);
2387 /* XXX check CCO bit and set CC accordingly */
2388 gen_op_movi_cc(s
, 0);
2390 case 0x55: /* MVST R1,R2 [RRE] */
2391 tmp32_1
= load_reg32(0);
2392 tmp32_2
= tcg_const_i32(r1
);
2393 tmp32_3
= tcg_const_i32(r2
);
2394 potential_page_fault(s
);
2395 gen_helper_mvst(cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2396 tcg_temp_free_i32(tmp32_1
);
2397 tcg_temp_free_i32(tmp32_2
);
2398 tcg_temp_free_i32(tmp32_3
);
2399 gen_op_movi_cc(s
, 1);
2401 case 0x5d: /* CLST R1,R2 [RRE] */
2402 tmp32_1
= load_reg32(0);
2403 tmp32_2
= tcg_const_i32(r1
);
2404 tmp32_3
= tcg_const_i32(r2
);
2405 potential_page_fault(s
);
2406 gen_helper_clst(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2408 tcg_temp_free_i32(tmp32_1
);
2409 tcg_temp_free_i32(tmp32_2
);
2410 tcg_temp_free_i32(tmp32_3
);
2412 case 0x5e: /* SRST R1,R2 [RRE] */
2413 tmp32_1
= load_reg32(0);
2414 tmp32_2
= tcg_const_i32(r1
);
2415 tmp32_3
= tcg_const_i32(r2
);
2416 potential_page_fault(s
);
2417 gen_helper_srst(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2419 tcg_temp_free_i32(tmp32_1
);
2420 tcg_temp_free_i32(tmp32_2
);
2421 tcg_temp_free_i32(tmp32_3
);
2424 #ifndef CONFIG_USER_ONLY
2425 case 0x02: /* STIDP D2(B2) [S] */
2427 check_privileged(s
);
2428 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2429 tmp
= get_address(s
, 0, b2
, d2
);
2430 potential_page_fault(s
);
2431 gen_helper_stidp(cpu_env
, tmp
);
2432 tcg_temp_free_i64(tmp
);
2434 case 0x04: /* SCK D2(B2) [S] */
2436 check_privileged(s
);
2437 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2438 tmp
= get_address(s
, 0, b2
, d2
);
2439 potential_page_fault(s
);
2440 gen_helper_sck(cc_op
, tmp
);
2442 tcg_temp_free_i64(tmp
);
2444 case 0x05: /* STCK D2(B2) [S] */
2446 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2447 tmp
= get_address(s
, 0, b2
, d2
);
2448 potential_page_fault(s
);
2449 gen_helper_stck(cc_op
, cpu_env
, tmp
);
2451 tcg_temp_free_i64(tmp
);
2453 case 0x06: /* SCKC D2(B2) [S] */
2454 /* Set Clock Comparator */
2455 check_privileged(s
);
2456 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2457 tmp
= get_address(s
, 0, b2
, d2
);
2458 potential_page_fault(s
);
2459 gen_helper_sckc(cpu_env
, tmp
);
2460 tcg_temp_free_i64(tmp
);
2462 case 0x07: /* STCKC D2(B2) [S] */
2463 /* Store Clock Comparator */
2464 check_privileged(s
);
2465 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2466 tmp
= get_address(s
, 0, b2
, d2
);
2467 potential_page_fault(s
);
2468 gen_helper_stckc(cpu_env
, tmp
);
2469 tcg_temp_free_i64(tmp
);
2471 case 0x08: /* SPT D2(B2) [S] */
2473 check_privileged(s
);
2474 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2475 tmp
= get_address(s
, 0, b2
, d2
);
2476 potential_page_fault(s
);
2477 gen_helper_spt(cpu_env
, tmp
);
2478 tcg_temp_free_i64(tmp
);
2480 case 0x09: /* STPT D2(B2) [S] */
2481 /* Store CPU Timer */
2482 check_privileged(s
);
2483 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2484 tmp
= get_address(s
, 0, b2
, d2
);
2485 potential_page_fault(s
);
2486 gen_helper_stpt(cpu_env
, tmp
);
2487 tcg_temp_free_i64(tmp
);
2489 case 0x0a: /* SPKA D2(B2) [S] */
2490 /* Set PSW Key from Address */
2491 check_privileged(s
);
2492 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2493 tmp
= get_address(s
, 0, b2
, d2
);
2494 tmp2
= tcg_temp_new_i64();
2495 tcg_gen_andi_i64(tmp2
, psw_mask
, ~PSW_MASK_KEY
);
2496 tcg_gen_shli_i64(tmp
, tmp
, PSW_SHIFT_KEY
- 4);
2497 tcg_gen_or_i64(psw_mask
, tmp2
, tmp
);
2498 tcg_temp_free_i64(tmp2
);
2499 tcg_temp_free_i64(tmp
);
2501 case 0x0d: /* PTLB [S] */
2503 check_privileged(s
);
2504 gen_helper_ptlb(cpu_env
);
2506 case 0x10: /* SPX D2(B2) [S] */
2507 /* Set Prefix Register */
2508 check_privileged(s
);
2509 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2510 tmp
= get_address(s
, 0, b2
, d2
);
2511 potential_page_fault(s
);
2512 gen_helper_spx(cpu_env
, tmp
);
2513 tcg_temp_free_i64(tmp
);
2515 case 0x11: /* STPX D2(B2) [S] */
2517 check_privileged(s
);
2518 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2519 tmp
= get_address(s
, 0, b2
, d2
);
2520 tmp2
= tcg_temp_new_i64();
2521 tcg_gen_ld_i64(tmp2
, cpu_env
, offsetof(CPUS390XState
, psa
));
2522 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2523 tcg_temp_free_i64(tmp
);
2524 tcg_temp_free_i64(tmp2
);
2526 case 0x12: /* STAP D2(B2) [S] */
2527 /* Store CPU Address */
2528 check_privileged(s
);
2529 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2530 tmp
= get_address(s
, 0, b2
, d2
);
2531 tmp2
= tcg_temp_new_i64();
2532 tmp32_1
= tcg_temp_new_i32();
2533 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, cpu_num
));
2534 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
2535 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2536 tcg_temp_free_i64(tmp
);
2537 tcg_temp_free_i64(tmp2
);
2538 tcg_temp_free_i32(tmp32_1
);
2540 case 0x21: /* IPTE R1,R2 [RRE] */
2541 /* Invalidate PTE */
2542 check_privileged(s
);
2543 r1
= (insn
>> 4) & 0xf;
2546 tmp2
= load_reg(r2
);
2547 gen_helper_ipte(cpu_env
, tmp
, tmp2
);
2548 tcg_temp_free_i64(tmp
);
2549 tcg_temp_free_i64(tmp2
);
2551 case 0x29: /* ISKE R1,R2 [RRE] */
2552 /* Insert Storage Key Extended */
2553 check_privileged(s
);
2554 r1
= (insn
>> 4) & 0xf;
2557 tmp2
= tcg_temp_new_i64();
2558 gen_helper_iske(tmp2
, cpu_env
, tmp
);
2559 store_reg(r1
, tmp2
);
2560 tcg_temp_free_i64(tmp
);
2561 tcg_temp_free_i64(tmp2
);
2563 case 0x2a: /* RRBE R1,R2 [RRE] */
2564 /* Set Storage Key Extended */
2565 check_privileged(s
);
2566 r1
= (insn
>> 4) & 0xf;
2568 tmp32_1
= load_reg32(r1
);
2570 gen_helper_rrbe(cc_op
, cpu_env
, tmp32_1
, tmp
);
2572 tcg_temp_free_i32(tmp32_1
);
2573 tcg_temp_free_i64(tmp
);
2575 case 0x2b: /* SSKE R1,R2 [RRE] */
2576 /* Set Storage Key Extended */
2577 check_privileged(s
);
2578 r1
= (insn
>> 4) & 0xf;
2580 tmp32_1
= load_reg32(r1
);
2582 gen_helper_sske(cpu_env
, tmp32_1
, tmp
);
2583 tcg_temp_free_i32(tmp32_1
);
2584 tcg_temp_free_i64(tmp
);
2586 case 0x34: /* STCH ? */
2587 /* Store Subchannel */
2588 check_privileged(s
);
2589 gen_op_movi_cc(s
, 3);
2591 case 0x46: /* STURA R1,R2 [RRE] */
2592 /* Store Using Real Address */
2593 check_privileged(s
);
2594 r1
= (insn
>> 4) & 0xf;
2596 tmp32_1
= load_reg32(r1
);
2598 potential_page_fault(s
);
2599 gen_helper_stura(cpu_env
, tmp
, tmp32_1
);
2600 tcg_temp_free_i32(tmp32_1
);
2601 tcg_temp_free_i64(tmp
);
2603 case 0x50: /* CSP R1,R2 [RRE] */
2604 /* Compare And Swap And Purge */
2605 check_privileged(s
);
2606 r1
= (insn
>> 4) & 0xf;
2608 tmp32_1
= tcg_const_i32(r1
);
2609 tmp32_2
= tcg_const_i32(r2
);
2610 gen_helper_csp(cc_op
, cpu_env
, tmp32_1
, tmp32_2
);
2612 tcg_temp_free_i32(tmp32_1
);
2613 tcg_temp_free_i32(tmp32_2
);
2615 case 0x5f: /* CHSC ? */
2616 /* Channel Subsystem Call */
2617 check_privileged(s
);
2618 gen_op_movi_cc(s
, 3);
2620 case 0x78: /* STCKE D2(B2) [S] */
2621 /* Store Clock Extended */
2622 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2623 tmp
= get_address(s
, 0, b2
, d2
);
2624 potential_page_fault(s
);
2625 gen_helper_stcke(cc_op
, cpu_env
, tmp
);
2627 tcg_temp_free_i64(tmp
);
2629 case 0x79: /* SACF D2(B2) [S] */
2630 /* Set Address Space Control Fast */
2631 check_privileged(s
);
2632 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2633 tmp
= get_address(s
, 0, b2
, d2
);
2634 potential_page_fault(s
);
2635 gen_helper_sacf(cpu_env
, tmp
);
2636 tcg_temp_free_i64(tmp
);
2637 /* addressing mode has changed, so end the block */
2640 s
->is_jmp
= DISAS_JUMP
;
2642 case 0x7d: /* STSI D2,(B2) [S] */
2643 check_privileged(s
);
2644 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2645 tmp
= get_address(s
, 0, b2
, d2
);
2646 tmp32_1
= load_reg32(0);
2647 tmp32_2
= load_reg32(1);
2648 potential_page_fault(s
);
2649 gen_helper_stsi(cc_op
, cpu_env
, tmp
, tmp32_1
, tmp32_2
);
2651 tcg_temp_free_i64(tmp
);
2652 tcg_temp_free_i32(tmp32_1
);
2653 tcg_temp_free_i32(tmp32_2
);
2655 case 0x9d: /* LFPC D2(B2) [S] */
2656 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2657 tmp
= get_address(s
, 0, b2
, d2
);
2658 tmp2
= tcg_temp_new_i64();
2659 tmp32_1
= tcg_temp_new_i32();
2660 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2661 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
2662 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
2663 tcg_temp_free_i64(tmp
);
2664 tcg_temp_free_i64(tmp2
);
2665 tcg_temp_free_i32(tmp32_1
);
2667 case 0xb1: /* STFL D2(B2) [S] */
2668 /* Store Facility List (CPU features) at 200 */
2669 check_privileged(s
);
2670 tmp2
= tcg_const_i64(0xc0000000);
2671 tmp
= tcg_const_i64(200);
2672 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2673 tcg_temp_free_i64(tmp2
);
2674 tcg_temp_free_i64(tmp
);
2676 case 0xb2: /* LPSWE D2(B2) [S] */
2677 /* Load PSW Extended */
2678 check_privileged(s
);
2679 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2680 tmp
= get_address(s
, 0, b2
, d2
);
2681 tmp2
= tcg_temp_new_i64();
2682 tmp3
= tcg_temp_new_i64();
2683 tcg_gen_qemu_ld64(tmp2
, tmp
, get_mem_index(s
));
2684 tcg_gen_addi_i64(tmp
, tmp
, 8);
2685 tcg_gen_qemu_ld64(tmp3
, tmp
, get_mem_index(s
));
2686 gen_helper_load_psw(cpu_env
, tmp2
, tmp3
);
2687 /* we need to keep cc_op intact */
2688 s
->is_jmp
= DISAS_JUMP
;
2689 tcg_temp_free_i64(tmp
);
2690 tcg_temp_free_i64(tmp2
);
2691 tcg_temp_free_i64(tmp3
);
2693 case 0x20: /* SERVC R1,R2 [RRE] */
2694 /* SCLP Service call (PV hypercall) */
2695 check_privileged(s
);
2696 potential_page_fault(s
);
2697 tmp32_1
= load_reg32(r2
);
2699 gen_helper_servc(cc_op
, cpu_env
, tmp32_1
, tmp
);
2701 tcg_temp_free_i32(tmp32_1
);
2702 tcg_temp_free_i64(tmp
);
2706 LOG_DISAS("illegal b2 operation 0x%x\n", op
);
2707 gen_illegal_opcode(s
);
2712 static void disas_b3(CPUS390XState
*env
, DisasContext
*s
, int op
, int m3
,
2716 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2717 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op
, m3
, r1
, r2
);
2718 #define FP_HELPER(i) \
2719 tmp32_1 = tcg_const_i32(r1); \
2720 tmp32_2 = tcg_const_i32(r2); \
2721 gen_helper_ ## i(cpu_env, tmp32_1, tmp32_2); \
2722 tcg_temp_free_i32(tmp32_1); \
2723 tcg_temp_free_i32(tmp32_2);
2725 #define FP_HELPER_CC(i) \
2726 tmp32_1 = tcg_const_i32(r1); \
2727 tmp32_2 = tcg_const_i32(r2); \
2728 gen_helper_ ## i(cc_op, cpu_env, tmp32_1, tmp32_2); \
2730 tcg_temp_free_i32(tmp32_1); \
2731 tcg_temp_free_i32(tmp32_2);
2734 case 0x0: /* LPEBR R1,R2 [RRE] */
2735 FP_HELPER_CC(lpebr
);
2737 case 0x2: /* LTEBR R1,R2 [RRE] */
2738 FP_HELPER_CC(ltebr
);
2740 case 0x3: /* LCEBR R1,R2 [RRE] */
2741 FP_HELPER_CC(lcebr
);
2743 case 0x4: /* LDEBR R1,R2 [RRE] */
2746 case 0x5: /* LXDBR R1,R2 [RRE] */
2749 case 0x9: /* CEBR R1,R2 [RRE] */
2752 case 0xa: /* AEBR R1,R2 [RRE] */
2755 case 0xb: /* SEBR R1,R2 [RRE] */
2758 case 0xd: /* DEBR R1,R2 [RRE] */
2761 case 0x10: /* LPDBR R1,R2 [RRE] */
2762 FP_HELPER_CC(lpdbr
);
2764 case 0x12: /* LTDBR R1,R2 [RRE] */
2765 FP_HELPER_CC(ltdbr
);
2767 case 0x13: /* LCDBR R1,R2 [RRE] */
2768 FP_HELPER_CC(lcdbr
);
2770 case 0x15: /* SQBDR R1,R2 [RRE] */
2773 case 0x17: /* MEEBR R1,R2 [RRE] */
2776 case 0x19: /* CDBR R1,R2 [RRE] */
2779 case 0x1a: /* ADBR R1,R2 [RRE] */
2782 case 0x1b: /* SDBR R1,R2 [RRE] */
2785 case 0x1c: /* MDBR R1,R2 [RRE] */
2788 case 0x1d: /* DDBR R1,R2 [RRE] */
2791 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
2792 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
2793 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
2794 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
2795 tmp32_1
= tcg_const_i32(m3
);
2796 tmp32_2
= tcg_const_i32(r2
);
2797 tmp32_3
= tcg_const_i32(r1
);
2800 gen_helper_maebr(cpu_env
, tmp32_1
, tmp32_3
, tmp32_2
);
2803 gen_helper_madbr(cpu_env
, tmp32_1
, tmp32_3
, tmp32_2
);
2806 gen_helper_msdbr(cpu_env
, tmp32_1
, tmp32_3
, tmp32_2
);
2811 tcg_temp_free_i32(tmp32_1
);
2812 tcg_temp_free_i32(tmp32_2
);
2813 tcg_temp_free_i32(tmp32_3
);
2815 case 0x40: /* LPXBR R1,R2 [RRE] */
2816 FP_HELPER_CC(lpxbr
);
2818 case 0x42: /* LTXBR R1,R2 [RRE] */
2819 FP_HELPER_CC(ltxbr
);
2821 case 0x43: /* LCXBR R1,R2 [RRE] */
2822 FP_HELPER_CC(lcxbr
);
2824 case 0x44: /* LEDBR R1,R2 [RRE] */
2827 case 0x45: /* LDXBR R1,R2 [RRE] */
2830 case 0x46: /* LEXBR R1,R2 [RRE] */
2833 case 0x49: /* CXBR R1,R2 [RRE] */
2836 case 0x4a: /* AXBR R1,R2 [RRE] */
2839 case 0x4b: /* SXBR R1,R2 [RRE] */
2842 case 0x4c: /* MXBR R1,R2 [RRE] */
2845 case 0x4d: /* DXBR R1,R2 [RRE] */
2848 case 0x65: /* LXR R1,R2 [RRE] */
2849 tmp
= load_freg(r2
);
2850 store_freg(r1
, tmp
);
2851 tcg_temp_free_i64(tmp
);
2852 tmp
= load_freg(r2
+ 2);
2853 store_freg(r1
+ 2, tmp
);
2854 tcg_temp_free_i64(tmp
);
2856 case 0x74: /* LZER R1 [RRE] */
2857 tmp32_1
= tcg_const_i32(r1
);
2858 gen_helper_lzer(cpu_env
, tmp32_1
);
2859 tcg_temp_free_i32(tmp32_1
);
2861 case 0x75: /* LZDR R1 [RRE] */
2862 tmp32_1
= tcg_const_i32(r1
);
2863 gen_helper_lzdr(cpu_env
, tmp32_1
);
2864 tcg_temp_free_i32(tmp32_1
);
2866 case 0x76: /* LZXR R1 [RRE] */
2867 tmp32_1
= tcg_const_i32(r1
);
2868 gen_helper_lzxr(cpu_env
, tmp32_1
);
2869 tcg_temp_free_i32(tmp32_1
);
2871 case 0x84: /* SFPC R1 [RRE] */
2872 tmp32_1
= load_reg32(r1
);
2873 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
2874 tcg_temp_free_i32(tmp32_1
);
2876 case 0x8c: /* EFPC R1 [RRE] */
2877 tmp32_1
= tcg_temp_new_i32();
2878 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
2879 store_reg32(r1
, tmp32_1
);
2880 tcg_temp_free_i32(tmp32_1
);
2882 case 0x94: /* CEFBR R1,R2 [RRE] */
2883 case 0x95: /* CDFBR R1,R2 [RRE] */
2884 case 0x96: /* CXFBR R1,R2 [RRE] */
2885 tmp32_1
= tcg_const_i32(r1
);
2886 tmp32_2
= load_reg32(r2
);
2889 gen_helper_cefbr(cpu_env
, tmp32_1
, tmp32_2
);
2892 gen_helper_cdfbr(cpu_env
, tmp32_1
, tmp32_2
);
2895 gen_helper_cxfbr(cpu_env
, tmp32_1
, tmp32_2
);
2900 tcg_temp_free_i32(tmp32_1
);
2901 tcg_temp_free_i32(tmp32_2
);
2903 case 0x98: /* CFEBR R1,R2 [RRE] */
2904 case 0x99: /* CFDBR R1,R2 [RRE] */
2905 case 0x9a: /* CFXBR R1,R2 [RRE] */
2906 tmp32_1
= tcg_const_i32(r1
);
2907 tmp32_2
= tcg_const_i32(r2
);
2908 tmp32_3
= tcg_const_i32(m3
);
2911 gen_helper_cfebr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2914 gen_helper_cfdbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2917 gen_helper_cfxbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2923 tcg_temp_free_i32(tmp32_1
);
2924 tcg_temp_free_i32(tmp32_2
);
2925 tcg_temp_free_i32(tmp32_3
);
2927 case 0xa4: /* CEGBR R1,R2 [RRE] */
2928 case 0xa5: /* CDGBR R1,R2 [RRE] */
2929 tmp32_1
= tcg_const_i32(r1
);
2933 gen_helper_cegbr(cpu_env
, tmp32_1
, tmp
);
2936 gen_helper_cdgbr(cpu_env
, tmp32_1
, tmp
);
2941 tcg_temp_free_i32(tmp32_1
);
2942 tcg_temp_free_i64(tmp
);
2944 case 0xa6: /* CXGBR R1,R2 [RRE] */
2945 tmp32_1
= tcg_const_i32(r1
);
2947 gen_helper_cxgbr(cpu_env
, tmp32_1
, tmp
);
2948 tcg_temp_free_i32(tmp32_1
);
2949 tcg_temp_free_i64(tmp
);
2951 case 0xa8: /* CGEBR R1,R2 [RRE] */
2952 tmp32_1
= tcg_const_i32(r1
);
2953 tmp32_2
= tcg_const_i32(r2
);
2954 tmp32_3
= tcg_const_i32(m3
);
2955 gen_helper_cgebr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2957 tcg_temp_free_i32(tmp32_1
);
2958 tcg_temp_free_i32(tmp32_2
);
2959 tcg_temp_free_i32(tmp32_3
);
2961 case 0xa9: /* CGDBR R1,R2 [RRE] */
2962 tmp32_1
= tcg_const_i32(r1
);
2963 tmp32_2
= tcg_const_i32(r2
);
2964 tmp32_3
= tcg_const_i32(m3
);
2965 gen_helper_cgdbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2967 tcg_temp_free_i32(tmp32_1
);
2968 tcg_temp_free_i32(tmp32_2
);
2969 tcg_temp_free_i32(tmp32_3
);
2971 case 0xaa: /* CGXBR R1,R2 [RRE] */
2972 tmp32_1
= tcg_const_i32(r1
);
2973 tmp32_2
= tcg_const_i32(r2
);
2974 tmp32_3
= tcg_const_i32(m3
);
2975 gen_helper_cgxbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2977 tcg_temp_free_i32(tmp32_1
);
2978 tcg_temp_free_i32(tmp32_2
);
2979 tcg_temp_free_i32(tmp32_3
);
2982 LOG_DISAS("illegal b3 operation 0x%x\n", op
);
2983 gen_illegal_opcode(s
);
2991 static void disas_b9(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
2994 TCGv_i64 tmp
, tmp2
, tmp3
;
2995 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2997 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op
, r1
, r2
);
2999 case 0x0: /* LPGR R1,R2 [RRE] */
3000 case 0x1: /* LNGR R1,R2 [RRE] */
3001 case 0x2: /* LTGR R1,R2 [RRE] */
3002 case 0x3: /* LCGR R1,R2 [RRE] */
3003 case 0x10: /* LPGFR R1,R2 [RRE] */
3004 case 0x11: /* LNFGR R1,R2 [RRE] */
3005 case 0x12: /* LTGFR R1,R2 [RRE] */
3006 case 0x13: /* LCGFR R1,R2 [RRE] */
3008 tmp
= load_reg32_i64(r2
);
3013 case 0x0: /* LP?GR */
3014 set_cc_abs64(s
, tmp
);
3015 gen_helper_abs_i64(tmp
, tmp
);
3018 case 0x1: /* LN?GR */
3019 set_cc_nabs64(s
, tmp
);
3020 gen_helper_nabs_i64(tmp
, tmp
);
3023 case 0x2: /* LT?GR */
3029 case 0x3: /* LC?GR */
3030 tcg_gen_neg_i64(regs
[r1
], tmp
);
3031 set_cc_comp64(s
, regs
[r1
]);
3034 tcg_temp_free_i64(tmp
);
3036 case 0x4: /* LGR R1,R2 [RRE] */
3037 store_reg(r1
, regs
[r2
]);
3039 case 0x6: /* LGBR R1,R2 [RRE] */
3040 tmp2
= load_reg(r2
);
3041 tcg_gen_ext8s_i64(tmp2
, tmp2
);
3042 store_reg(r1
, tmp2
);
3043 tcg_temp_free_i64(tmp2
);
3045 case 0xd: /* DSGR R1,R2 [RRE] */
3046 case 0x1d: /* DSGFR R1,R2 [RRE] */
3047 tmp
= load_reg(r1
+ 1);
3049 tmp2
= load_reg(r2
);
3051 tmp32_1
= load_reg32(r2
);
3052 tmp2
= tcg_temp_new_i64();
3053 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3054 tcg_temp_free_i32(tmp32_1
);
3056 tmp3
= tcg_temp_new_i64();
3057 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
3058 store_reg(r1
+ 1, tmp3
);
3059 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
3060 store_reg(r1
, tmp3
);
3061 tcg_temp_free_i64(tmp
);
3062 tcg_temp_free_i64(tmp2
);
3063 tcg_temp_free_i64(tmp3
);
3065 case 0x14: /* LGFR R1,R2 [RRE] */
3066 tmp32_1
= load_reg32(r2
);
3067 tmp
= tcg_temp_new_i64();
3068 tcg_gen_ext_i32_i64(tmp
, tmp32_1
);
3070 tcg_temp_free_i32(tmp32_1
);
3071 tcg_temp_free_i64(tmp
);
3073 case 0x16: /* LLGFR R1,R2 [RRE] */
3074 tmp32_1
= load_reg32(r2
);
3075 tmp
= tcg_temp_new_i64();
3076 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3078 tcg_temp_free_i32(tmp32_1
);
3079 tcg_temp_free_i64(tmp
);
3081 case 0x17: /* LLGTR R1,R2 [RRE] */
3082 tmp32_1
= load_reg32(r2
);
3083 tmp
= tcg_temp_new_i64();
3084 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0x7fffffffUL
);
3085 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3087 tcg_temp_free_i32(tmp32_1
);
3088 tcg_temp_free_i64(tmp
);
3090 case 0x0f: /* LRVGR R1,R2 [RRE] */
3091 tcg_gen_bswap64_i64(regs
[r1
], regs
[r2
]);
3093 case 0x1f: /* LRVR R1,R2 [RRE] */
3094 tmp32_1
= load_reg32(r2
);
3095 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
3096 store_reg32(r1
, tmp32_1
);
3097 tcg_temp_free_i32(tmp32_1
);
3099 case 0x20: /* CGR R1,R2 [RRE] */
3100 case 0x30: /* CGFR R1,R2 [RRE] */
3101 tmp2
= load_reg(r2
);
3103 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3106 cmp_s64(s
, tmp
, tmp2
);
3107 tcg_temp_free_i64(tmp
);
3108 tcg_temp_free_i64(tmp2
);
3110 case 0x21: /* CLGR R1,R2 [RRE] */
3111 case 0x31: /* CLGFR R1,R2 [RRE] */
3112 tmp2
= load_reg(r2
);
3114 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3117 cmp_u64(s
, tmp
, tmp2
);
3118 tcg_temp_free_i64(tmp
);
3119 tcg_temp_free_i64(tmp2
);
3121 case 0x26: /* LBR R1,R2 [RRE] */
3122 tmp32_1
= load_reg32(r2
);
3123 tcg_gen_ext8s_i32(tmp32_1
, tmp32_1
);
3124 store_reg32(r1
, tmp32_1
);
3125 tcg_temp_free_i32(tmp32_1
);
3127 case 0x27: /* LHR R1,R2 [RRE] */
3128 tmp32_1
= load_reg32(r2
);
3129 tcg_gen_ext16s_i32(tmp32_1
, tmp32_1
);
3130 store_reg32(r1
, tmp32_1
);
3131 tcg_temp_free_i32(tmp32_1
);
3133 case 0x80: /* NGR R1,R2 [RRE] */
3134 case 0x81: /* OGR R1,R2 [RRE] */
3135 case 0x82: /* XGR R1,R2 [RRE] */
3137 tmp2
= load_reg(r2
);
3140 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
3143 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
3146 tcg_gen_xor_i64(tmp
, tmp
, tmp2
);
3152 set_cc_nz_u64(s
, tmp
);
3153 tcg_temp_free_i64(tmp
);
3154 tcg_temp_free_i64(tmp2
);
3156 case 0x83: /* FLOGR R1,R2 [RRE] */
3158 tmp32_1
= tcg_const_i32(r1
);
3159 gen_helper_flogr(cc_op
, cpu_env
, tmp32_1
, tmp
);
3161 tcg_temp_free_i64(tmp
);
3162 tcg_temp_free_i32(tmp32_1
);
3164 case 0x84: /* LLGCR R1,R2 [RRE] */
3166 tcg_gen_andi_i64(tmp
, tmp
, 0xff);
3168 tcg_temp_free_i64(tmp
);
3170 case 0x85: /* LLGHR R1,R2 [RRE] */
3172 tcg_gen_andi_i64(tmp
, tmp
, 0xffff);
3174 tcg_temp_free_i64(tmp
);
3176 case 0x87: /* DLGR R1,R2 [RRE] */
3177 tmp32_1
= tcg_const_i32(r1
);
3179 gen_helper_dlg(cpu_env
, tmp32_1
, tmp
);
3180 tcg_temp_free_i64(tmp
);
3181 tcg_temp_free_i32(tmp32_1
);
3183 case 0x88: /* ALCGR R1,R2 [RRE] */
3185 tmp2
= load_reg(r2
);
3186 tmp3
= tcg_temp_new_i64();
3188 tcg_gen_extu_i32_i64(tmp3
, cc_op
);
3189 tcg_gen_shri_i64(tmp3
, tmp3
, 1);
3190 tcg_gen_andi_i64(tmp3
, tmp3
, 1);
3191 tcg_gen_add_i64(tmp3
, tmp2
, tmp3
);
3192 tcg_gen_add_i64(tmp3
, tmp
, tmp3
);
3193 store_reg(r1
, tmp3
);
3194 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3195 tcg_temp_free_i64(tmp
);
3196 tcg_temp_free_i64(tmp2
);
3197 tcg_temp_free_i64(tmp3
);
3199 case 0x89: /* SLBGR R1,R2 [RRE] */
3201 tmp2
= load_reg(r2
);
3202 tmp32_1
= tcg_const_i32(r1
);
3204 gen_helper_slbg(cc_op
, cpu_env
, cc_op
, tmp32_1
, tmp
, tmp2
);
3206 tcg_temp_free_i64(tmp
);
3207 tcg_temp_free_i64(tmp2
);
3208 tcg_temp_free_i32(tmp32_1
);
3210 case 0x94: /* LLCR R1,R2 [RRE] */
3211 tmp32_1
= load_reg32(r2
);
3212 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0xff);
3213 store_reg32(r1
, tmp32_1
);
3214 tcg_temp_free_i32(tmp32_1
);
3216 case 0x95: /* LLHR R1,R2 [RRE] */
3217 tmp32_1
= load_reg32(r2
);
3218 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0xffff);
3219 store_reg32(r1
, tmp32_1
);
3220 tcg_temp_free_i32(tmp32_1
);
3222 case 0x96: /* MLR R1,R2 [RRE] */
3223 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3224 tmp2
= load_reg(r2
);
3225 tmp3
= load_reg((r1
+ 1) & 15);
3226 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3227 tcg_gen_ext32u_i64(tmp3
, tmp3
);
3228 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
3229 store_reg32_i64((r1
+ 1) & 15, tmp2
);
3230 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
3231 store_reg32_i64(r1
, tmp2
);
3232 tcg_temp_free_i64(tmp2
);
3233 tcg_temp_free_i64(tmp3
);
3235 case 0x97: /* DLR R1,R2 [RRE] */
3236 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3237 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3239 tmp2
= load_reg(r2
);
3240 tmp3
= load_reg((r1
+ 1) & 15);
3241 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3242 tcg_gen_ext32u_i64(tmp3
, tmp3
);
3243 tcg_gen_shli_i64(tmp
, tmp
, 32);
3244 tcg_gen_or_i64(tmp
, tmp
, tmp3
);
3246 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
3247 tcg_gen_div_i64(tmp
, tmp
, tmp2
);
3248 store_reg32_i64((r1
+ 1) & 15, tmp
);
3249 store_reg32_i64(r1
, tmp3
);
3250 tcg_temp_free_i64(tmp
);
3251 tcg_temp_free_i64(tmp2
);
3252 tcg_temp_free_i64(tmp3
);
3254 case 0x98: /* ALCR R1,R2 [RRE] */
3255 tmp32_1
= load_reg32(r1
);
3256 tmp32_2
= load_reg32(r2
);
3257 tmp32_3
= tcg_temp_new_i32();
3258 /* XXX possible optimization point */
3260 gen_helper_addc_u32(tmp32_3
, cc_op
, tmp32_1
, tmp32_2
);
3261 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3262 store_reg32(r1
, tmp32_3
);
3263 tcg_temp_free_i32(tmp32_1
);
3264 tcg_temp_free_i32(tmp32_2
);
3265 tcg_temp_free_i32(tmp32_3
);
3267 case 0x99: /* SLBR R1,R2 [RRE] */
3268 tmp32_1
= load_reg32(r2
);
3269 tmp32_2
= tcg_const_i32(r1
);
3271 gen_helper_slb(cc_op
, cpu_env
, cc_op
, tmp32_2
, tmp32_1
);
3273 tcg_temp_free_i32(tmp32_1
);
3274 tcg_temp_free_i32(tmp32_2
);
3277 LOG_DISAS("illegal b9 operation 0x%x\n", op
);
3278 gen_illegal_opcode(s
);
3283 static void disas_c0(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
, int i2
)
3286 TCGv_i32 tmp32_1
, tmp32_2
;
3287 uint64_t target
= s
->pc
+ i2
* 2LL;
3290 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op
, r1
, i2
);
3293 case 0: /* larl r1, i2 */
3294 tmp
= tcg_const_i64(target
);
3296 tcg_temp_free_i64(tmp
);
3298 case 0x1: /* LGFI R1,I2 [RIL] */
3299 tmp
= tcg_const_i64((int64_t)i2
);
3301 tcg_temp_free_i64(tmp
);
3303 case 0x4: /* BRCL M1,I2 [RIL] */
3304 if (r1
== 15) { /* m1 == r1 */
3305 gen_goto_tb(s
, 0, target
);
3306 s
->is_jmp
= DISAS_TB_JUMP
;
3309 /* m1 & (1 << (3 - cc)) */
3310 tmp32_1
= tcg_const_i32(3);
3311 tmp32_2
= tcg_const_i32(1);
3313 tcg_gen_sub_i32(tmp32_1
, tmp32_1
, cc_op
);
3314 tcg_gen_shl_i32(tmp32_2
, tmp32_2
, tmp32_1
);
3315 tcg_temp_free_i32(tmp32_1
);
3316 tmp32_1
= tcg_const_i32(r1
); /* m1 == r1 */
3317 tcg_gen_and_i32(tmp32_1
, tmp32_1
, tmp32_2
);
3318 l1
= gen_new_label();
3319 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp32_1
, 0, l1
);
3320 gen_goto_tb(s
, 0, target
);
3322 gen_goto_tb(s
, 1, s
->pc
+ 6);
3323 s
->is_jmp
= DISAS_TB_JUMP
;
3324 tcg_temp_free_i32(tmp32_1
);
3325 tcg_temp_free_i32(tmp32_2
);
3327 case 0x5: /* brasl r1, i2 */
3328 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 6));
3330 tcg_temp_free_i64(tmp
);
3331 gen_goto_tb(s
, 0, target
);
3332 s
->is_jmp
= DISAS_TB_JUMP
;
3334 case 0x7: /* XILF R1,I2 [RIL] */
3335 case 0xb: /* NILF R1,I2 [RIL] */
3336 case 0xd: /* OILF R1,I2 [RIL] */
3337 tmp32_1
= load_reg32(r1
);
3340 tcg_gen_xori_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3343 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3346 tcg_gen_ori_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3351 store_reg32(r1
, tmp32_1
);
3352 set_cc_nz_u32(s
, tmp32_1
);
3353 tcg_temp_free_i32(tmp32_1
);
3355 case 0x9: /* IILF R1,I2 [RIL] */
3356 tmp32_1
= tcg_const_i32((uint32_t)i2
);
3357 store_reg32(r1
, tmp32_1
);
3358 tcg_temp_free_i32(tmp32_1
);
3360 case 0xa: /* NIHF R1,I2 [RIL] */
3362 tmp32_1
= tcg_temp_new_i32();
3363 tcg_gen_andi_i64(tmp
, tmp
, (((uint64_t)((uint32_t)i2
)) << 32)
3366 tcg_gen_shri_i64(tmp
, tmp
, 32);
3367 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
3368 set_cc_nz_u32(s
, tmp32_1
);
3369 tcg_temp_free_i64(tmp
);
3370 tcg_temp_free_i32(tmp32_1
);
3372 case 0xe: /* LLIHF R1,I2 [RIL] */
3373 tmp
= tcg_const_i64(((uint64_t)(uint32_t)i2
) << 32);
3375 tcg_temp_free_i64(tmp
);
3377 case 0xf: /* LLILF R1,I2 [RIL] */
3378 tmp
= tcg_const_i64((uint32_t)i2
);
3380 tcg_temp_free_i64(tmp
);
3383 LOG_DISAS("illegal c0 operation 0x%x\n", op
);
3384 gen_illegal_opcode(s
);
3389 static void disas_c2(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
3396 case 0xc: /* CGFI R1,I2 [RIL] */
3398 cmp_s64c(s
, tmp
, (int64_t)i2
);
3399 tcg_temp_free_i64(tmp
);
3401 case 0xe: /* CLGFI R1,I2 [RIL] */
3403 cmp_u64c(s
, tmp
, (uint64_t)(uint32_t)i2
);
3404 tcg_temp_free_i64(tmp
);
3406 case 0xd: /* CFI R1,I2 [RIL] */
3407 tmp32_1
= load_reg32(r1
);
3408 cmp_s32c(s
, tmp32_1
, i2
);
3409 tcg_temp_free_i32(tmp32_1
);
3411 case 0xf: /* CLFI R1,I2 [RIL] */
3412 tmp32_1
= load_reg32(r1
);
3413 cmp_u32c(s
, tmp32_1
, i2
);
3414 tcg_temp_free_i32(tmp32_1
);
3417 LOG_DISAS("illegal c2 operation 0x%x\n", op
);
3418 gen_illegal_opcode(s
);
3423 static void gen_and_or_xor_i32(int opc
, TCGv_i32 tmp
, TCGv_i32 tmp2
)
3425 switch (opc
& 0xf) {
3427 tcg_gen_and_i32(tmp
, tmp
, tmp2
);
3430 tcg_gen_or_i32(tmp
, tmp
, tmp2
);
3433 tcg_gen_xor_i32(tmp
, tmp
, tmp2
);
3440 static void disas_s390_insn(CPUS390XState
*env
, DisasContext
*s
)
3442 TCGv_i64 tmp
, tmp2
, tmp3
, tmp4
;
3443 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
, tmp32_4
;
3446 int op
, r1
, r2
, r3
, d1
, d2
, x2
, b1
, b2
, i
, i2
, r1b
;
3450 opc
= cpu_ldub_code(env
, s
->pc
);
3451 LOG_DISAS("opc 0x%x\n", opc
);
3454 #ifndef CONFIG_USER_ONLY
3455 case 0x01: /* SAM */
3456 insn
= ld_code2(env
, s
->pc
);
3457 /* set addressing mode, but we only do 64bit anyways */
3460 case 0x6: /* BCTR R1,R2 [RR] */
3461 insn
= ld_code2(env
, s
->pc
);
3462 decode_rr(s
, insn
, &r1
, &r2
);
3463 tmp32_1
= load_reg32(r1
);
3464 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
3465 store_reg32(r1
, tmp32_1
);
3468 gen_update_cc_op(s
);
3469 l1
= gen_new_label();
3470 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp32_1
, 0, l1
);
3472 /* not taking the branch, jump to after the instruction */
3473 gen_goto_tb(s
, 0, s
->pc
+ 2);
3476 /* take the branch, move R2 into psw.addr */
3477 tmp32_1
= load_reg32(r2
);
3478 tmp
= tcg_temp_new_i64();
3479 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3480 tcg_gen_mov_i64(psw_addr
, tmp
);
3481 s
->is_jmp
= DISAS_JUMP
;
3482 tcg_temp_free_i32(tmp32_1
);
3483 tcg_temp_free_i64(tmp
);
3486 case 0x7: /* BCR M1,R2 [RR] */
3487 insn
= ld_code2(env
, s
->pc
);
3488 decode_rr(s
, insn
, &r1
, &r2
);
3491 gen_bcr(s
, r1
, tmp
, s
->pc
);
3492 tcg_temp_free_i64(tmp
);
3493 s
->is_jmp
= DISAS_TB_JUMP
;
3495 /* XXX: "serialization and checkpoint-synchronization function"? */
3498 case 0xa: /* SVC I [RR] */
3499 insn
= ld_code2(env
, s
->pc
);
3504 tmp32_1
= tcg_const_i32(i
);
3505 tmp32_2
= tcg_const_i32(s
->next_pc
- s
->pc
);
3506 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, int_svc_code
));
3507 tcg_gen_st_i32(tmp32_2
, cpu_env
, offsetof(CPUS390XState
, int_svc_ilen
));
3508 gen_exception(EXCP_SVC
);
3509 s
->is_jmp
= DISAS_EXCP
;
3510 tcg_temp_free_i32(tmp32_1
);
3511 tcg_temp_free_i32(tmp32_2
);
3513 case 0xd: /* BASR R1,R2 [RR] */
3514 insn
= ld_code2(env
, s
->pc
);
3515 decode_rr(s
, insn
, &r1
, &r2
);
3516 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 2));
3519 tmp2
= load_reg(r2
);
3520 tcg_gen_mov_i64(psw_addr
, tmp2
);
3521 tcg_temp_free_i64(tmp2
);
3522 s
->is_jmp
= DISAS_JUMP
;
3524 tcg_temp_free_i64(tmp
);
3526 case 0xe: /* MVCL R1,R2 [RR] */
3527 insn
= ld_code2(env
, s
->pc
);
3528 decode_rr(s
, insn
, &r1
, &r2
);
3529 tmp32_1
= tcg_const_i32(r1
);
3530 tmp32_2
= tcg_const_i32(r2
);
3531 potential_page_fault(s
);
3532 gen_helper_mvcl(cc_op
, cpu_env
, tmp32_1
, tmp32_2
);
3534 tcg_temp_free_i32(tmp32_1
);
3535 tcg_temp_free_i32(tmp32_2
);
3537 case 0x10: /* LPR R1,R2 [RR] */
3538 insn
= ld_code2(env
, s
->pc
);
3539 decode_rr(s
, insn
, &r1
, &r2
);
3540 tmp32_1
= load_reg32(r2
);
3541 set_cc_abs32(s
, tmp32_1
);
3542 gen_helper_abs_i32(tmp32_1
, tmp32_1
);
3543 store_reg32(r1
, tmp32_1
);
3544 tcg_temp_free_i32(tmp32_1
);
3546 case 0x11: /* LNR R1,R2 [RR] */
3547 insn
= ld_code2(env
, s
->pc
);
3548 decode_rr(s
, insn
, &r1
, &r2
);
3549 tmp32_1
= load_reg32(r2
);
3550 set_cc_nabs32(s
, tmp32_1
);
3551 gen_helper_nabs_i32(tmp32_1
, tmp32_1
);
3552 store_reg32(r1
, tmp32_1
);
3553 tcg_temp_free_i32(tmp32_1
);
3555 case 0x12: /* LTR R1,R2 [RR] */
3556 insn
= ld_code2(env
, s
->pc
);
3557 decode_rr(s
, insn
, &r1
, &r2
);
3558 tmp32_1
= load_reg32(r2
);
3560 store_reg32(r1
, tmp32_1
);
3562 set_cc_s32(s
, tmp32_1
);
3563 tcg_temp_free_i32(tmp32_1
);
3565 case 0x13: /* LCR R1,R2 [RR] */
3566 insn
= ld_code2(env
, s
->pc
);
3567 decode_rr(s
, insn
, &r1
, &r2
);
3568 tmp32_1
= load_reg32(r2
);
3569 tcg_gen_neg_i32(tmp32_1
, tmp32_1
);
3570 store_reg32(r1
, tmp32_1
);
3571 set_cc_comp32(s
, tmp32_1
);
3572 tcg_temp_free_i32(tmp32_1
);
3574 case 0x14: /* NR R1,R2 [RR] */
3575 case 0x16: /* OR R1,R2 [RR] */
3576 case 0x17: /* XR R1,R2 [RR] */
3577 insn
= ld_code2(env
, s
->pc
);
3578 decode_rr(s
, insn
, &r1
, &r2
);
3579 tmp32_2
= load_reg32(r2
);
3580 tmp32_1
= load_reg32(r1
);
3581 gen_and_or_xor_i32(opc
, tmp32_1
, tmp32_2
);
3582 store_reg32(r1
, tmp32_1
);
3583 set_cc_nz_u32(s
, tmp32_1
);
3584 tcg_temp_free_i32(tmp32_1
);
3585 tcg_temp_free_i32(tmp32_2
);
3587 case 0x18: /* LR R1,R2 [RR] */
3588 insn
= ld_code2(env
, s
->pc
);
3589 decode_rr(s
, insn
, &r1
, &r2
);
3590 tmp32_1
= load_reg32(r2
);
3591 store_reg32(r1
, tmp32_1
);
3592 tcg_temp_free_i32(tmp32_1
);
3594 case 0x15: /* CLR R1,R2 [RR] */
3595 case 0x19: /* CR R1,R2 [RR] */
3596 insn
= ld_code2(env
, s
->pc
);
3597 decode_rr(s
, insn
, &r1
, &r2
);
3598 tmp32_1
= load_reg32(r1
);
3599 tmp32_2
= load_reg32(r2
);
3601 cmp_u32(s
, tmp32_1
, tmp32_2
);
3603 cmp_s32(s
, tmp32_1
, tmp32_2
);
3605 tcg_temp_free_i32(tmp32_1
);
3606 tcg_temp_free_i32(tmp32_2
);
3608 case 0x1c: /* MR R1,R2 [RR] */
3609 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3610 insn
= ld_code2(env
, s
->pc
);
3611 decode_rr(s
, insn
, &r1
, &r2
);
3612 tmp2
= load_reg(r2
);
3613 tmp3
= load_reg((r1
+ 1) & 15);
3614 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3615 tcg_gen_ext32s_i64(tmp3
, tmp3
);
3616 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
3617 store_reg32_i64((r1
+ 1) & 15, tmp2
);
3618 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
3619 store_reg32_i64(r1
, tmp2
);
3620 tcg_temp_free_i64(tmp2
);
3621 tcg_temp_free_i64(tmp3
);
3623 case 0x1d: /* DR R1,R2 [RR] */
3624 insn
= ld_code2(env
, s
->pc
);
3625 decode_rr(s
, insn
, &r1
, &r2
);
3626 tmp32_1
= load_reg32(r1
);
3627 tmp32_2
= load_reg32(r1
+ 1);
3628 tmp32_3
= load_reg32(r2
);
3630 tmp
= tcg_temp_new_i64(); /* dividend */
3631 tmp2
= tcg_temp_new_i64(); /* divisor */
3632 tmp3
= tcg_temp_new_i64();
3634 /* dividend is r(r1 << 32) | r(r1 + 1) */
3635 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3636 tcg_gen_extu_i32_i64(tmp2
, tmp32_2
);
3637 tcg_gen_shli_i64(tmp
, tmp
, 32);
3638 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
3640 /* divisor is r(r2) */
3641 tcg_gen_ext_i32_i64(tmp2
, tmp32_3
);
3643 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
3644 tcg_gen_rem_i64(tmp
, tmp
, tmp2
);
3646 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
3647 tcg_gen_trunc_i64_i32(tmp32_2
, tmp3
);
3649 store_reg32(r1
, tmp32_1
); /* remainder */
3650 store_reg32(r1
+ 1, tmp32_2
); /* quotient */
3651 tcg_temp_free_i32(tmp32_1
);
3652 tcg_temp_free_i32(tmp32_2
);
3653 tcg_temp_free_i32(tmp32_3
);
3654 tcg_temp_free_i64(tmp
);
3655 tcg_temp_free_i64(tmp2
);
3656 tcg_temp_free_i64(tmp3
);
3658 case 0x28: /* LDR R1,R2 [RR] */
3659 insn
= ld_code2(env
, s
->pc
);
3660 decode_rr(s
, insn
, &r1
, &r2
);
3661 tmp
= load_freg(r2
);
3662 store_freg(r1
, tmp
);
3663 tcg_temp_free_i64(tmp
);
3665 case 0x38: /* LER R1,R2 [RR] */
3666 insn
= ld_code2(env
, s
->pc
);
3667 decode_rr(s
, insn
, &r1
, &r2
);
3668 tmp32_1
= load_freg32(r2
);
3669 store_freg32(r1
, tmp32_1
);
3670 tcg_temp_free_i32(tmp32_1
);
3672 case 0x40: /* STH R1,D2(X2,B2) [RX] */
3673 insn
= ld_code4(env
, s
->pc
);
3674 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3675 tmp2
= load_reg(r1
);
3676 tcg_gen_qemu_st16(tmp2
, tmp
, get_mem_index(s
));
3677 tcg_temp_free_i64(tmp
);
3678 tcg_temp_free_i64(tmp2
);
3681 insn
= ld_code4(env
, s
->pc
);
3682 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3683 store_reg(r1
, tmp
); /* FIXME: 31/24-bit addressing */
3684 tcg_temp_free_i64(tmp
);
3686 case 0x42: /* STC R1,D2(X2,B2) [RX] */
3687 insn
= ld_code4(env
, s
->pc
);
3688 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3689 tmp2
= load_reg(r1
);
3690 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
3691 tcg_temp_free_i64(tmp
);
3692 tcg_temp_free_i64(tmp2
);
3694 case 0x43: /* IC R1,D2(X2,B2) [RX] */
3695 insn
= ld_code4(env
, s
->pc
);
3696 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3697 tmp2
= tcg_temp_new_i64();
3698 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
3699 store_reg8(r1
, tmp2
);
3700 tcg_temp_free_i64(tmp
);
3701 tcg_temp_free_i64(tmp2
);
3703 case 0x44: /* EX R1,D2(X2,B2) [RX] */
3704 insn
= ld_code4(env
, s
->pc
);
3705 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3706 tmp2
= load_reg(r1
);
3707 tmp3
= tcg_const_i64(s
->pc
+ 4);
3710 gen_helper_ex(cc_op
, cpu_env
, cc_op
, tmp2
, tmp
, tmp3
);
3712 tcg_temp_free_i64(tmp
);
3713 tcg_temp_free_i64(tmp2
);
3714 tcg_temp_free_i64(tmp3
);
3716 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
3717 insn
= ld_code4(env
, s
->pc
);
3718 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3719 tcg_temp_free_i64(tmp
);
3721 tmp32_1
= load_reg32(r1
);
3722 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
3723 store_reg32(r1
, tmp32_1
);
3725 gen_update_cc_op(s
);
3726 l1
= gen_new_label();
3727 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp32_1
, 0, l1
);
3729 /* not taking the branch, jump to after the instruction */
3730 gen_goto_tb(s
, 0, s
->pc
+ 4);
3733 /* take the branch, move R2 into psw.addr */
3734 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3735 tcg_gen_mov_i64(psw_addr
, tmp
);
3736 s
->is_jmp
= DISAS_JUMP
;
3737 tcg_temp_free_i32(tmp32_1
);
3738 tcg_temp_free_i64(tmp
);
3740 case 0x47: /* BC M1,D2(X2,B2) [RX] */
3741 insn
= ld_code4(env
, s
->pc
);
3742 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3743 gen_bcr(s
, r1
, tmp
, s
->pc
+ 4);
3744 tcg_temp_free_i64(tmp
);
3745 s
->is_jmp
= DISAS_TB_JUMP
;
3747 case 0x48: /* LH R1,D2(X2,B2) [RX] */
3748 insn
= ld_code4(env
, s
->pc
);
3749 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3750 tmp2
= tcg_temp_new_i64();
3751 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
3752 store_reg32_i64(r1
, tmp2
);
3753 tcg_temp_free_i64(tmp
);
3754 tcg_temp_free_i64(tmp2
);
3756 case 0x49: /* CH R1,D2(X2,B2) [RX] */
3757 insn
= ld_code4(env
, s
->pc
);
3758 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3759 tmp32_1
= load_reg32(r1
);
3760 tmp32_2
= tcg_temp_new_i32();
3761 tmp2
= tcg_temp_new_i64();
3762 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
3763 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
3764 cmp_s32(s
, tmp32_1
, tmp32_2
);
3765 tcg_temp_free_i32(tmp32_1
);
3766 tcg_temp_free_i32(tmp32_2
);
3767 tcg_temp_free_i64(tmp
);
3768 tcg_temp_free_i64(tmp2
);
3770 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
3771 insn
= ld_code4(env
, s
->pc
);
3772 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3773 tmp2
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 4));
3774 store_reg(r1
, tmp2
);
3775 tcg_gen_mov_i64(psw_addr
, tmp
);
3776 tcg_temp_free_i64(tmp
);
3777 tcg_temp_free_i64(tmp2
);
3778 s
->is_jmp
= DISAS_JUMP
;
3780 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
3781 insn
= ld_code4(env
, s
->pc
);
3782 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3783 tmp2
= tcg_temp_new_i64();
3784 tmp32_1
= tcg_temp_new_i32();
3785 tcg_gen_trunc_i64_i32(tmp32_1
, regs
[r1
]);
3786 gen_helper_cvd(tmp2
, tmp32_1
);
3787 tcg_gen_qemu_st64(tmp2
, tmp
, get_mem_index(s
));
3788 tcg_temp_free_i64(tmp
);
3789 tcg_temp_free_i64(tmp2
);
3790 tcg_temp_free_i32(tmp32_1
);
3792 case 0x50: /* st r1, d2(x2, b2) */
3793 insn
= ld_code4(env
, s
->pc
);
3794 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3795 tmp2
= load_reg(r1
);
3796 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
3797 tcg_temp_free_i64(tmp
);
3798 tcg_temp_free_i64(tmp2
);
3800 case 0x55: /* CL R1,D2(X2,B2) [RX] */
3801 insn
= ld_code4(env
, s
->pc
);
3802 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3803 tmp2
= tcg_temp_new_i64();
3804 tmp32_1
= tcg_temp_new_i32();
3805 tmp32_2
= load_reg32(r1
);
3806 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
3807 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
3808 cmp_u32(s
, tmp32_2
, tmp32_1
);
3809 tcg_temp_free_i64(tmp
);
3810 tcg_temp_free_i64(tmp2
);
3811 tcg_temp_free_i32(tmp32_1
);
3812 tcg_temp_free_i32(tmp32_2
);
3814 case 0x54: /* N R1,D2(X2,B2) [RX] */
3815 case 0x56: /* O R1,D2(X2,B2) [RX] */
3816 case 0x57: /* X R1,D2(X2,B2) [RX] */
3817 insn
= ld_code4(env
, s
->pc
);
3818 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3819 tmp2
= tcg_temp_new_i64();
3820 tmp32_1
= load_reg32(r1
);
3821 tmp32_2
= tcg_temp_new_i32();
3822 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
3823 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
3824 gen_and_or_xor_i32(opc
, tmp32_1
, tmp32_2
);
3825 store_reg32(r1
, tmp32_1
);
3826 set_cc_nz_u32(s
, tmp32_1
);
3827 tcg_temp_free_i64(tmp
);
3828 tcg_temp_free_i64(tmp2
);
3829 tcg_temp_free_i32(tmp32_1
);
3830 tcg_temp_free_i32(tmp32_2
);
3832 case 0x58: /* l r1, d2(x2, b2) */
3833 insn
= ld_code4(env
, s
->pc
);
3834 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3835 tmp2
= tcg_temp_new_i64();
3836 tmp32_1
= tcg_temp_new_i32();
3837 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
3838 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
3839 store_reg32(r1
, tmp32_1
);
3840 tcg_temp_free_i64(tmp
);
3841 tcg_temp_free_i64(tmp2
);
3842 tcg_temp_free_i32(tmp32_1
);
3844 case 0x59: /* C R1,D2(X2,B2) [RX] */
3845 insn
= ld_code4(env
, s
->pc
);
3846 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3847 tmp2
= tcg_temp_new_i64();
3848 tmp32_1
= tcg_temp_new_i32();
3849 tmp32_2
= load_reg32(r1
);
3850 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
3851 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
3852 cmp_s32(s
, tmp32_2
, tmp32_1
);
3853 tcg_temp_free_i64(tmp
);
3854 tcg_temp_free_i64(tmp2
);
3855 tcg_temp_free_i32(tmp32_1
);
3856 tcg_temp_free_i32(tmp32_2
);
3858 case 0x5c: /* M R1,D2(X2,B2) [RX] */
3859 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
3860 insn
= ld_code4(env
, s
->pc
);
3861 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3862 tmp2
= tcg_temp_new_i64();
3863 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
3864 tmp3
= load_reg((r1
+ 1) & 15);
3865 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3866 tcg_gen_ext32s_i64(tmp3
, tmp3
);
3867 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
3868 store_reg32_i64((r1
+ 1) & 15, tmp2
);
3869 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
3870 store_reg32_i64(r1
, tmp2
);
3871 tcg_temp_free_i64(tmp
);
3872 tcg_temp_free_i64(tmp2
);
3873 tcg_temp_free_i64(tmp3
);
3875 case 0x5d: /* D R1,D2(X2,B2) [RX] */
3876 insn
= ld_code4(env
, s
->pc
);
3877 tmp3
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3878 tmp32_1
= load_reg32(r1
);
3879 tmp32_2
= load_reg32(r1
+ 1);
3881 tmp
= tcg_temp_new_i64();
3882 tmp2
= tcg_temp_new_i64();
3884 /* dividend is r(r1 << 32) | r(r1 + 1) */
3885 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3886 tcg_gen_extu_i32_i64(tmp2
, tmp32_2
);
3887 tcg_gen_shli_i64(tmp
, tmp
, 32);
3888 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
3890 /* divisor is in memory */
3891 tcg_gen_qemu_ld32s(tmp2
, tmp3
, get_mem_index(s
));
3893 /* XXX divisor == 0 -> FixP divide exception */
3895 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
3896 tcg_gen_rem_i64(tmp
, tmp
, tmp2
);
3898 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
3899 tcg_gen_trunc_i64_i32(tmp32_2
, tmp3
);
3901 store_reg32(r1
, tmp32_1
); /* remainder */
3902 store_reg32(r1
+ 1, tmp32_2
); /* quotient */
3903 tcg_temp_free_i32(tmp32_1
);
3904 tcg_temp_free_i32(tmp32_2
);
3905 tcg_temp_free_i64(tmp
);
3906 tcg_temp_free_i64(tmp2
);
3907 tcg_temp_free_i64(tmp3
);
3909 case 0x60: /* STD R1,D2(X2,B2) [RX] */
3910 insn
= ld_code4(env
, s
->pc
);
3911 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3912 tmp2
= load_freg(r1
);
3913 tcg_gen_qemu_st64(tmp2
, tmp
, get_mem_index(s
));
3914 tcg_temp_free_i64(tmp
);
3915 tcg_temp_free_i64(tmp2
);
3917 case 0x68: /* LD R1,D2(X2,B2) [RX] */
3918 insn
= ld_code4(env
, s
->pc
);
3919 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3920 tmp2
= tcg_temp_new_i64();
3921 tcg_gen_qemu_ld64(tmp2
, tmp
, get_mem_index(s
));
3922 store_freg(r1
, tmp2
);
3923 tcg_temp_free_i64(tmp
);
3924 tcg_temp_free_i64(tmp2
);
3926 case 0x70: /* STE R1,D2(X2,B2) [RX] */
3927 insn
= ld_code4(env
, s
->pc
);
3928 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3929 tmp2
= tcg_temp_new_i64();
3930 tmp32_1
= load_freg32(r1
);
3931 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
3932 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
3933 tcg_temp_free_i64(tmp
);
3934 tcg_temp_free_i64(tmp2
);
3935 tcg_temp_free_i32(tmp32_1
);
3937 case 0x78: /* LE R1,D2(X2,B2) [RX] */
3938 insn
= ld_code4(env
, s
->pc
);
3939 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3940 tmp2
= tcg_temp_new_i64();
3941 tmp32_1
= tcg_temp_new_i32();
3942 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
3943 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
3944 store_freg32(r1
, tmp32_1
);
3945 tcg_temp_free_i64(tmp
);
3946 tcg_temp_free_i64(tmp2
);
3947 tcg_temp_free_i32(tmp32_1
);
3949 #ifndef CONFIG_USER_ONLY
3950 case 0x80: /* SSM D2(B2) [S] */
3951 /* Set System Mask */
3952 check_privileged(s
);
3953 insn
= ld_code4(env
, s
->pc
);
3954 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
3955 tmp
= get_address(s
, 0, b2
, d2
);
3956 tmp2
= tcg_temp_new_i64();
3957 tmp3
= tcg_temp_new_i64();
3958 tcg_gen_andi_i64(tmp3
, psw_mask
, ~0xff00000000000000ULL
);
3959 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
3960 tcg_gen_shli_i64(tmp2
, tmp2
, 56);
3961 tcg_gen_or_i64(psw_mask
, tmp3
, tmp2
);
3962 tcg_temp_free_i64(tmp
);
3963 tcg_temp_free_i64(tmp2
);
3964 tcg_temp_free_i64(tmp3
);
3966 case 0x82: /* LPSW D2(B2) [S] */
3968 check_privileged(s
);
3969 insn
= ld_code4(env
, s
->pc
);
3970 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
3971 tmp
= get_address(s
, 0, b2
, d2
);
3972 tmp2
= tcg_temp_new_i64();
3973 tmp3
= tcg_temp_new_i64();
3974 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
3975 tcg_gen_addi_i64(tmp
, tmp
, 4);
3976 tcg_gen_qemu_ld32u(tmp3
, tmp
, get_mem_index(s
));
3977 /* Convert the 32-bit PSW_MASK into the 64-bit PSW_MASK. */
3978 tcg_gen_shli_i64(tmp2
, tmp2
, 32);
3979 gen_helper_load_psw(cpu_env
, tmp2
, tmp3
);
3980 tcg_temp_free_i64(tmp
);
3981 tcg_temp_free_i64(tmp2
);
3982 tcg_temp_free_i64(tmp3
);
3983 /* we need to keep cc_op intact */
3984 s
->is_jmp
= DISAS_JUMP
;
3986 case 0x83: /* DIAG R1,R3,D2 [RS] */
3987 /* Diagnose call (KVM hypercall) */
3988 check_privileged(s
);
3989 potential_page_fault(s
);
3990 insn
= ld_code4(env
, s
->pc
);
3991 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
3992 tmp32_1
= tcg_const_i32(insn
& 0xfff);
3995 gen_helper_diag(tmp2
, cpu_env
, tmp32_1
, tmp2
, tmp3
);
3997 tcg_temp_free_i32(tmp32_1
);
3998 tcg_temp_free_i64(tmp2
);
3999 tcg_temp_free_i64(tmp3
);
4002 case 0x88: /* SRL R1,D2(B2) [RS] */
4003 case 0x89: /* SLL R1,D2(B2) [RS] */
4004 case 0x8a: /* SRA R1,D2(B2) [RS] */
4005 insn
= ld_code4(env
, s
->pc
);
4006 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4007 tmp
= get_address(s
, 0, b2
, d2
);
4008 tmp32_1
= load_reg32(r1
);
4009 tmp32_2
= tcg_temp_new_i32();
4010 tcg_gen_trunc_i64_i32(tmp32_2
, tmp
);
4011 tcg_gen_andi_i32(tmp32_2
, tmp32_2
, 0x3f);
4014 tcg_gen_shr_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4017 tcg_gen_shl_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4020 tcg_gen_sar_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4021 set_cc_s32(s
, tmp32_1
);
4026 store_reg32(r1
, tmp32_1
);
4027 tcg_temp_free_i64(tmp
);
4028 tcg_temp_free_i32(tmp32_1
);
4029 tcg_temp_free_i32(tmp32_2
);
4031 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4032 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4033 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4034 insn
= ld_code4(env
, s
->pc
);
4035 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4036 tmp
= get_address(s
, 0, b2
, d2
); /* shift */
4037 tmp2
= tcg_temp_new_i64();
4038 tmp32_1
= load_reg32(r1
);
4039 tmp32_2
= load_reg32(r1
+ 1);
4040 tcg_gen_concat_i32_i64(tmp2
, tmp32_2
, tmp32_1
); /* operand */
4043 tcg_gen_shr_i64(tmp2
, tmp2
, tmp
);
4046 tcg_gen_shl_i64(tmp2
, tmp2
, tmp
);
4049 tcg_gen_sar_i64(tmp2
, tmp2
, tmp
);
4050 set_cc_s64(s
, tmp2
);
4053 tcg_gen_shri_i64(tmp
, tmp2
, 32);
4054 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4055 store_reg32(r1
, tmp32_1
);
4056 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4057 store_reg32(r1
+ 1, tmp32_2
);
4058 tcg_temp_free_i64(tmp
);
4059 tcg_temp_free_i64(tmp2
);
4061 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4062 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4063 insn
= ld_code4(env
, s
->pc
);
4064 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4066 tmp
= get_address(s
, 0, b2
, d2
);
4067 tmp2
= tcg_temp_new_i64();
4068 tmp3
= tcg_const_i64(4);
4069 tmp4
= tcg_const_i64(0xffffffff00000000ULL
);
4070 for (i
= r1
;; i
= (i
+ 1) % 16) {
4072 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4073 tcg_gen_and_i64(regs
[i
], regs
[i
], tmp4
);
4074 tcg_gen_or_i64(regs
[i
], regs
[i
], tmp2
);
4076 tcg_gen_qemu_st32(regs
[i
], tmp
, get_mem_index(s
));
4081 tcg_gen_add_i64(tmp
, tmp
, tmp3
);
4083 tcg_temp_free_i64(tmp
);
4084 tcg_temp_free_i64(tmp2
);
4085 tcg_temp_free_i64(tmp3
);
4086 tcg_temp_free_i64(tmp4
);
4088 case 0x91: /* TM D1(B1),I2 [SI] */
4089 insn
= ld_code4(env
, s
->pc
);
4090 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4091 tmp2
= tcg_const_i64(i2
);
4092 tcg_gen_qemu_ld8u(tmp
, tmp
, get_mem_index(s
));
4093 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_32
);
4094 tcg_temp_free_i64(tmp
);
4095 tcg_temp_free_i64(tmp2
);
4097 case 0x92: /* MVI D1(B1),I2 [SI] */
4098 insn
= ld_code4(env
, s
->pc
);
4099 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4100 tmp2
= tcg_const_i64(i2
);
4101 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4102 tcg_temp_free_i64(tmp
);
4103 tcg_temp_free_i64(tmp2
);
4105 case 0x94: /* NI D1(B1),I2 [SI] */
4106 case 0x96: /* OI D1(B1),I2 [SI] */
4107 case 0x97: /* XI D1(B1),I2 [SI] */
4108 insn
= ld_code4(env
, s
->pc
);
4109 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4110 tmp2
= tcg_temp_new_i64();
4111 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4114 tcg_gen_andi_i64(tmp2
, tmp2
, i2
);
4117 tcg_gen_ori_i64(tmp2
, tmp2
, i2
);
4120 tcg_gen_xori_i64(tmp2
, tmp2
, i2
);
4125 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4126 set_cc_nz_u64(s
, tmp2
);
4127 tcg_temp_free_i64(tmp
);
4128 tcg_temp_free_i64(tmp2
);
4130 case 0x95: /* CLI D1(B1),I2 [SI] */
4131 insn
= ld_code4(env
, s
->pc
);
4132 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4133 tmp2
= tcg_temp_new_i64();
4134 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4135 cmp_u64c(s
, tmp2
, i2
);
4136 tcg_temp_free_i64(tmp
);
4137 tcg_temp_free_i64(tmp2
);
4139 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4140 insn
= ld_code4(env
, s
->pc
);
4141 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4142 tmp
= get_address(s
, 0, b2
, d2
);
4143 tmp32_1
= tcg_const_i32(r1
);
4144 tmp32_2
= tcg_const_i32(r3
);
4145 potential_page_fault(s
);
4146 gen_helper_lam(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4147 tcg_temp_free_i64(tmp
);
4148 tcg_temp_free_i32(tmp32_1
);
4149 tcg_temp_free_i32(tmp32_2
);
4151 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4152 insn
= ld_code4(env
, s
->pc
);
4153 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4154 tmp
= get_address(s
, 0, b2
, d2
);
4155 tmp32_1
= tcg_const_i32(r1
);
4156 tmp32_2
= tcg_const_i32(r3
);
4157 potential_page_fault(s
);
4158 gen_helper_stam(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4159 tcg_temp_free_i64(tmp
);
4160 tcg_temp_free_i32(tmp32_1
);
4161 tcg_temp_free_i32(tmp32_2
);
4164 insn
= ld_code4(env
, s
->pc
);
4165 r1
= (insn
>> 20) & 0xf;
4166 op
= (insn
>> 16) & 0xf;
4168 disas_a5(env
, s
, op
, r1
, i2
);
4171 insn
= ld_code4(env
, s
->pc
);
4172 r1
= (insn
>> 20) & 0xf;
4173 op
= (insn
>> 16) & 0xf;
4175 disas_a7(env
, s
, op
, r1
, i2
);
4177 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4178 insn
= ld_code4(env
, s
->pc
);
4179 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4180 tmp
= get_address(s
, 0, b2
, d2
);
4181 tmp32_1
= tcg_const_i32(r1
);
4182 tmp32_2
= tcg_const_i32(r3
);
4183 potential_page_fault(s
);
4184 gen_helper_mvcle(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4186 tcg_temp_free_i64(tmp
);
4187 tcg_temp_free_i32(tmp32_1
);
4188 tcg_temp_free_i32(tmp32_2
);
4190 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4191 insn
= ld_code4(env
, s
->pc
);
4192 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4193 tmp
= get_address(s
, 0, b2
, d2
);
4194 tmp32_1
= tcg_const_i32(r1
);
4195 tmp32_2
= tcg_const_i32(r3
);
4196 potential_page_fault(s
);
4197 gen_helper_clcle(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4199 tcg_temp_free_i64(tmp
);
4200 tcg_temp_free_i32(tmp32_1
);
4201 tcg_temp_free_i32(tmp32_2
);
4203 #ifndef CONFIG_USER_ONLY
4204 case 0xac: /* STNSM D1(B1),I2 [SI] */
4205 case 0xad: /* STOSM D1(B1),I2 [SI] */
4206 check_privileged(s
);
4207 insn
= ld_code4(env
, s
->pc
);
4208 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4209 tmp2
= tcg_temp_new_i64();
4210 tcg_gen_shri_i64(tmp2
, psw_mask
, 56);
4211 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4213 tcg_gen_andi_i64(psw_mask
, psw_mask
,
4214 ((uint64_t)i2
<< 56) | 0x00ffffffffffffffULL
);
4216 tcg_gen_ori_i64(psw_mask
, psw_mask
, (uint64_t)i2
<< 56);
4218 tcg_temp_free_i64(tmp
);
4219 tcg_temp_free_i64(tmp2
);
4221 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4222 check_privileged(s
);
4223 insn
= ld_code4(env
, s
->pc
);
4224 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4225 tmp
= get_address(s
, 0, b2
, d2
);
4226 tmp2
= load_reg(r3
);
4227 tmp32_1
= tcg_const_i32(r1
);
4228 potential_page_fault(s
);
4229 gen_helper_sigp(cc_op
, cpu_env
, tmp
, tmp32_1
, tmp2
);
4231 tcg_temp_free_i64(tmp
);
4232 tcg_temp_free_i64(tmp2
);
4233 tcg_temp_free_i32(tmp32_1
);
4235 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4236 check_privileged(s
);
4237 insn
= ld_code4(env
, s
->pc
);
4238 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4239 tmp32_1
= tcg_const_i32(r1
);
4240 potential_page_fault(s
);
4241 gen_helper_lra(cc_op
, cpu_env
, tmp
, tmp32_1
);
4243 tcg_temp_free_i64(tmp
);
4244 tcg_temp_free_i32(tmp32_1
);
4248 insn
= ld_code4(env
, s
->pc
);
4249 op
= (insn
>> 16) & 0xff;
4251 case 0x9c: /* STFPC D2(B2) [S] */
4253 b2
= (insn
>> 12) & 0xf;
4254 tmp32_1
= tcg_temp_new_i32();
4255 tmp
= tcg_temp_new_i64();
4256 tmp2
= get_address(s
, 0, b2
, d2
);
4257 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
4258 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4259 tcg_gen_qemu_st32(tmp
, tmp2
, get_mem_index(s
));
4260 tcg_temp_free_i32(tmp32_1
);
4261 tcg_temp_free_i64(tmp
);
4262 tcg_temp_free_i64(tmp2
);
4265 disas_b2(env
, s
, op
, insn
);
4270 insn
= ld_code4(env
, s
->pc
);
4271 op
= (insn
>> 16) & 0xff;
4272 r3
= (insn
>> 12) & 0xf; /* aka m3 */
4273 r1
= (insn
>> 4) & 0xf;
4275 disas_b3(env
, s
, op
, r3
, r1
, r2
);
4277 #ifndef CONFIG_USER_ONLY
4278 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4280 check_privileged(s
);
4281 insn
= ld_code4(env
, s
->pc
);
4282 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4283 tmp
= get_address(s
, 0, b2
, d2
);
4284 tmp32_1
= tcg_const_i32(r1
);
4285 tmp32_2
= tcg_const_i32(r3
);
4286 potential_page_fault(s
);
4287 gen_helper_stctl(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4288 tcg_temp_free_i64(tmp
);
4289 tcg_temp_free_i32(tmp32_1
);
4290 tcg_temp_free_i32(tmp32_2
);
4292 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4294 check_privileged(s
);
4295 insn
= ld_code4(env
, s
->pc
);
4296 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4297 tmp
= get_address(s
, 0, b2
, d2
);
4298 tmp32_1
= tcg_const_i32(r1
);
4299 tmp32_2
= tcg_const_i32(r3
);
4300 potential_page_fault(s
);
4301 gen_helper_lctl(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4302 tcg_temp_free_i64(tmp
);
4303 tcg_temp_free_i32(tmp32_1
);
4304 tcg_temp_free_i32(tmp32_2
);
4308 insn
= ld_code4(env
, s
->pc
);
4309 r1
= (insn
>> 4) & 0xf;
4311 op
= (insn
>> 16) & 0xff;
4312 disas_b9(env
, s
, op
, r1
, r2
);
4314 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4315 insn
= ld_code4(env
, s
->pc
);
4316 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4317 tmp
= get_address(s
, 0, b2
, d2
);
4318 tmp32_1
= tcg_const_i32(r1
);
4319 tmp32_2
= tcg_const_i32(r3
);
4320 potential_page_fault(s
);
4321 gen_helper_cs(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4323 tcg_temp_free_i64(tmp
);
4324 tcg_temp_free_i32(tmp32_1
);
4325 tcg_temp_free_i32(tmp32_2
);
4327 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4328 insn
= ld_code4(env
, s
->pc
);
4329 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4330 tmp
= get_address(s
, 0, b2
, d2
);
4331 tmp32_1
= load_reg32(r1
);
4332 tmp32_2
= tcg_const_i32(r3
);
4333 potential_page_fault(s
);
4334 gen_helper_clm(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp
);
4336 tcg_temp_free_i64(tmp
);
4337 tcg_temp_free_i32(tmp32_1
);
4338 tcg_temp_free_i32(tmp32_2
);
4340 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4341 insn
= ld_code4(env
, s
->pc
);
4342 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4343 tmp
= get_address(s
, 0, b2
, d2
);
4344 tmp32_1
= load_reg32(r1
);
4345 tmp32_2
= tcg_const_i32(r3
);
4346 potential_page_fault(s
);
4347 gen_helper_stcm(cpu_env
, tmp32_1
, tmp32_2
, tmp
);
4348 tcg_temp_free_i64(tmp
);
4349 tcg_temp_free_i32(tmp32_1
);
4350 tcg_temp_free_i32(tmp32_2
);
4352 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4353 insn
= ld_code4(env
, s
->pc
);
4354 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4356 /* effectively a 32-bit load */
4357 tmp
= get_address(s
, 0, b2
, d2
);
4358 tmp32_1
= tcg_temp_new_i32();
4359 tmp32_2
= tcg_const_i32(r3
);
4360 tcg_gen_qemu_ld32u(tmp
, tmp
, get_mem_index(s
));
4361 store_reg32_i64(r1
, tmp
);
4362 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4363 set_cc_icm(s
, tmp32_2
, tmp32_1
);
4364 tcg_temp_free_i64(tmp
);
4365 tcg_temp_free_i32(tmp32_1
);
4366 tcg_temp_free_i32(tmp32_2
);
4368 uint32_t mask
= 0x00ffffffUL
;
4369 uint32_t shift
= 24;
4371 tmp
= get_address(s
, 0, b2
, d2
);
4372 tmp2
= tcg_temp_new_i64();
4373 tmp32_1
= load_reg32(r1
);
4374 tmp32_2
= tcg_temp_new_i32();
4375 tmp32_3
= tcg_const_i32(r3
);
4376 tmp32_4
= tcg_const_i32(0);
4379 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4380 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4382 tcg_gen_shli_i32(tmp32_2
, tmp32_2
, shift
);
4384 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, mask
);
4385 tcg_gen_or_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4386 tcg_gen_or_i32(tmp32_4
, tmp32_4
, tmp32_2
);
4387 tcg_gen_addi_i64(tmp
, tmp
, 1);
4389 m3
= (m3
<< 1) & 0xf;
4390 mask
= (mask
>> 8) | 0xff000000UL
;
4393 store_reg32(r1
, tmp32_1
);
4394 set_cc_icm(s
, tmp32_3
, tmp32_4
);
4395 tcg_temp_free_i64(tmp
);
4396 tcg_temp_free_i64(tmp2
);
4397 tcg_temp_free_i32(tmp32_1
);
4398 tcg_temp_free_i32(tmp32_2
);
4399 tcg_temp_free_i32(tmp32_3
);
4400 tcg_temp_free_i32(tmp32_4
);
4402 /* i.e. env->cc = 0 */
4403 gen_op_movi_cc(s
, 0);
4408 insn
= ld_code6(env
, s
->pc
);
4409 r1
= (insn
>> 36) & 0xf;
4410 op
= (insn
>> 32) & 0xf;
4414 disas_c0(env
, s
, op
, r1
, i2
);
4417 disas_c2(env
, s
, op
, r1
, i2
);
4423 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4424 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4425 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4426 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4427 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4428 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4429 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4430 insn
= ld_code6(env
, s
->pc
);
4431 vl
= tcg_const_i32((insn
>> 32) & 0xff);
4432 b1
= (insn
>> 28) & 0xf;
4433 b2
= (insn
>> 12) & 0xf;
4434 d1
= (insn
>> 16) & 0xfff;
4436 tmp
= get_address(s
, 0, b1
, d1
);
4437 tmp2
= get_address(s
, 0, b2
, d2
);
4440 gen_op_mvc(s
, (insn
>> 32) & 0xff, tmp
, tmp2
);
4443 potential_page_fault(s
);
4444 gen_helper_nc(cc_op
, cpu_env
, vl
, tmp
, tmp2
);
4448 gen_op_clc(s
, (insn
>> 32) & 0xff, tmp
, tmp2
);
4451 potential_page_fault(s
);
4452 gen_helper_oc(cc_op
, cpu_env
, vl
, tmp
, tmp2
);
4456 potential_page_fault(s
);
4457 gen_helper_xc(cc_op
, cpu_env
, vl
, tmp
, tmp2
);
4461 potential_page_fault(s
);
4462 gen_helper_tr(cpu_env
, vl
, tmp
, tmp2
);
4466 potential_page_fault(s
);
4467 gen_helper_unpk(cpu_env
, vl
, tmp
, tmp2
);
4472 tcg_temp_free_i64(tmp
);
4473 tcg_temp_free_i64(tmp2
);
4475 #ifndef CONFIG_USER_ONLY
4476 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
4477 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
4478 check_privileged(s
);
4479 potential_page_fault(s
);
4480 insn
= ld_code6(env
, s
->pc
);
4481 r1
= (insn
>> 36) & 0xf;
4482 r3
= (insn
>> 32) & 0xf;
4483 b1
= (insn
>> 28) & 0xf;
4484 d1
= (insn
>> 16) & 0xfff;
4485 b2
= (insn
>> 12) & 0xf;
4489 tmp2
= get_address(s
, 0, b1
, d1
);
4490 tmp3
= get_address(s
, 0, b2
, d2
);
4492 gen_helper_mvcp(cc_op
, cpu_env
, tmp
, tmp2
, tmp3
);
4494 gen_helper_mvcs(cc_op
, cpu_env
, tmp
, tmp2
, tmp3
);
4497 tcg_temp_free_i64(tmp
);
4498 tcg_temp_free_i64(tmp2
);
4499 tcg_temp_free_i64(tmp3
);
4503 insn
= ld_code6(env
, s
->pc
);
4506 r1
= (insn
>> 36) & 0xf;
4507 x2
= (insn
>> 32) & 0xf;
4508 b2
= (insn
>> 28) & 0xf;
4509 d2
= ((int)((((insn
>> 16) & 0xfff)
4510 | ((insn
<< 4) & 0xff000)) << 12)) >> 12;
4511 disas_e3(env
, s
, op
, r1
, x2
, b2
, d2
);
4513 #ifndef CONFIG_USER_ONLY
4515 /* Test Protection */
4516 check_privileged(s
);
4517 insn
= ld_code6(env
, s
->pc
);
4519 disas_e5(env
, s
, insn
);
4523 insn
= ld_code6(env
, s
->pc
);
4526 r1
= (insn
>> 36) & 0xf;
4527 r3
= (insn
>> 32) & 0xf;
4528 b2
= (insn
>> 28) & 0xf;
4529 d2
= ((int)((((insn
>> 16) & 0xfff)
4530 | ((insn
<< 4) & 0xff000)) << 12)) >> 12;
4531 disas_eb(env
, s
, op
, r1
, r3
, b2
, d2
);
4534 insn
= ld_code6(env
, s
->pc
);
4537 r1
= (insn
>> 36) & 0xf;
4538 x2
= (insn
>> 32) & 0xf;
4539 b2
= (insn
>> 28) & 0xf;
4540 d2
= (short)((insn
>> 16) & 0xfff);
4541 r1b
= (insn
>> 12) & 0xf;
4542 disas_ed(env
, s
, op
, r1
, x2
, b2
, d2
, r1b
);
4545 qemu_log_mask(LOG_UNIMP
, "unimplemented opcode 0x%x\n", opc
);
4546 gen_illegal_opcode(s
);
4551 /* ====================================================================== */
4552 /* Define the insn format enumeration. */
4553 #define F0(N) FMT_##N,
4554 #define F1(N, X1) F0(N)
4555 #define F2(N, X1, X2) F0(N)
4556 #define F3(N, X1, X2, X3) F0(N)
4557 #define F4(N, X1, X2, X3, X4) F0(N)
4558 #define F5(N, X1, X2, X3, X4, X5) F0(N)
4561 #include "insn-format.def"
4571 /* Define a structure to hold the decoded fields. We'll store each inside
4572 an array indexed by an enum. In order to conserve memory, we'll arrange
4573 for fields that do not exist at the same time to overlap, thus the "C"
4574 for compact. For checking purposes there is an "O" for original index
4575 as well that will be applied to availability bitmaps. */
4577 enum DisasFieldIndexO
{
4600 enum DisasFieldIndexC
{
4631 struct DisasFields
{
4634 unsigned presentC
:16;
4635 unsigned int presentO
;
4639 /* This is the way fields are to be accessed out of DisasFields. */
4640 #define have_field(S, F) have_field1((S), FLD_O_##F)
4641 #define get_field(S, F) get_field1((S), FLD_O_##F, FLD_C_##F)
4643 static bool have_field1(const DisasFields
*f
, enum DisasFieldIndexO c
)
4645 return (f
->presentO
>> c
) & 1;
4648 static int get_field1(const DisasFields
*f
, enum DisasFieldIndexO o
,
4649 enum DisasFieldIndexC c
)
4651 assert(have_field1(f
, o
));
4655 /* Describe the layout of each field in each format. */
4656 typedef struct DisasField
{
4658 unsigned int size
:8;
4659 unsigned int type
:2;
4660 unsigned int indexC
:6;
4661 enum DisasFieldIndexO indexO
:8;
4664 typedef struct DisasFormatInfo
{
4665 DisasField op
[NUM_C_FIELD
];
4668 #define R(N, B) { B, 4, 0, FLD_C_r##N, FLD_O_r##N }
4669 #define M(N, B) { B, 4, 0, FLD_C_m##N, FLD_O_m##N }
4670 #define BD(N, BB, BD) { BB, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4671 { BD, 12, 0, FLD_C_d##N, FLD_O_d##N }
4672 #define BXD(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4673 { 12, 4, 0, FLD_C_x##N, FLD_O_x##N }, \
4674 { 20, 12, 0, FLD_C_d##N, FLD_O_d##N }
4675 #define BDL(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4676 { 20, 20, 2, FLD_C_d##N, FLD_O_d##N }
4677 #define BXDL(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4678 { 12, 4, 0, FLD_C_x##N, FLD_O_x##N }, \
4679 { 20, 20, 2, FLD_C_d##N, FLD_O_d##N }
4680 #define I(N, B, S) { B, S, 1, FLD_C_i##N, FLD_O_i##N }
4681 #define L(N, B, S) { B, S, 0, FLD_C_l##N, FLD_O_l##N }
4683 #define F0(N) { { } },
4684 #define F1(N, X1) { { X1 } },
4685 #define F2(N, X1, X2) { { X1, X2 } },
4686 #define F3(N, X1, X2, X3) { { X1, X2, X3 } },
4687 #define F4(N, X1, X2, X3, X4) { { X1, X2, X3, X4 } },
4688 #define F5(N, X1, X2, X3, X4, X5) { { X1, X2, X3, X4, X5 } },
4690 static const DisasFormatInfo format_info
[] = {
4691 #include "insn-format.def"
4709 /* Generally, we'll extract operands into this structures, operate upon
4710 them, and store them back. See the "in1", "in2", "prep", "wout" sets
4711 of routines below for more details. */
4713 bool g_out
, g_out2
, g_in1
, g_in2
;
4714 TCGv_i64 out
, out2
, in1
, in2
;
4718 /* Return values from translate_one, indicating the state of the TB. */
4720 /* Continue the TB. */
4722 /* We have emitted one or more goto_tb. No fixup required. */
4724 /* We are not using a goto_tb (for whatever reason), but have updated
4725 the PC (for whatever reason), so there's no need to do it again on
4728 /* We are exiting the TB, but have neither emitted a goto_tb, nor
4729 updated the PC for the next instruction to be executed. */
4731 /* We are ending the TB with a noreturn function call, e.g. longjmp.
4732 No following code will be executed. */
4736 typedef enum DisasFacility
{
4737 FAC_Z
, /* zarch (default) */
4738 FAC_CASS
, /* compare and swap and store */
4739 FAC_CASS2
, /* compare and swap and store 2*/
4740 FAC_DFP
, /* decimal floating point */
4741 FAC_DFPR
, /* decimal floating point rounding */
4742 FAC_DO
, /* distinct operands */
4743 FAC_EE
, /* execute extensions */
4744 FAC_EI
, /* extended immediate */
4745 FAC_FPE
, /* floating point extension */
4746 FAC_FPSSH
, /* floating point support sign handling */
4747 FAC_FPRGR
, /* FPR-GR transfer */
4748 FAC_GIE
, /* general instructions extension */
4749 FAC_HFP_MA
, /* HFP multiply-and-add/subtract */
4750 FAC_HW
, /* high-word */
4751 FAC_IEEEE_SIM
, /* IEEE exception sumilation */
4752 FAC_LOC
, /* load/store on condition */
4753 FAC_LD
, /* long displacement */
4754 FAC_PC
, /* population count */
4755 FAC_SCF
, /* store clock fast */
4756 FAC_SFLE
, /* store facility list extended */
4762 DisasFacility fac
:6;
4766 void (*help_in1
)(DisasContext
*, DisasFields
*, DisasOps
*);
4767 void (*help_in2
)(DisasContext
*, DisasFields
*, DisasOps
*);
4768 void (*help_prep
)(DisasContext
*, DisasFields
*, DisasOps
*);
4769 void (*help_wout
)(DisasContext
*, DisasFields
*, DisasOps
*);
4770 void (*help_cout
)(DisasContext
*, DisasOps
*);
4771 ExitStatus (*help_op
)(DisasContext
*, DisasOps
*);
4776 /* ====================================================================== */
4777 /* The operations. These perform the bulk of the work for any insn,
4778 usually after the operands have been loaded and output initialized. */
4780 static ExitStatus
op_add(DisasContext
*s
, DisasOps
*o
)
4782 tcg_gen_add_i64(o
->out
, o
->in1
, o
->in2
);
4786 static ExitStatus
op_mul(DisasContext
*s
, DisasOps
*o
)
4788 tcg_gen_mul_i64(o
->out
, o
->in1
, o
->in2
);
4792 static ExitStatus
op_sub(DisasContext
*s
, DisasOps
*o
)
4794 tcg_gen_sub_i64(o
->out
, o
->in1
, o
->in2
);
4798 /* ====================================================================== */
4799 /* The "Cc OUTput" generators. Given the generated output (and in some cases
4800 the original inputs), update the various cc data structures in order to
4801 be able to compute the new condition code. */
4803 static void cout_adds32(DisasContext
*s
, DisasOps
*o
)
4805 gen_op_update3_cc_i64(s
, CC_OP_ADD_32
, o
->in1
, o
->in2
, o
->out
);
4808 static void cout_adds64(DisasContext
*s
, DisasOps
*o
)
4810 gen_op_update3_cc_i64(s
, CC_OP_ADD_64
, o
->in1
, o
->in2
, o
->out
);
4813 static void cout_addu32(DisasContext
*s
, DisasOps
*o
)
4815 gen_op_update3_cc_i64(s
, CC_OP_ADDU_32
, o
->in1
, o
->in2
, o
->out
);
4818 static void cout_addu64(DisasContext
*s
, DisasOps
*o
)
4820 gen_op_update3_cc_i64(s
, CC_OP_ADDU_64
, o
->in1
, o
->in2
, o
->out
);
4823 static void cout_subs32(DisasContext
*s
, DisasOps
*o
)
4825 gen_op_update3_cc_i64(s
, CC_OP_SUB_32
, o
->in1
, o
->in2
, o
->out
);
4828 static void cout_subs64(DisasContext
*s
, DisasOps
*o
)
4830 gen_op_update3_cc_i64(s
, CC_OP_SUB_64
, o
->in1
, o
->in2
, o
->out
);
4833 static void cout_subu32(DisasContext
*s
, DisasOps
*o
)
4835 gen_op_update3_cc_i64(s
, CC_OP_SUBU_32
, o
->in1
, o
->in2
, o
->out
);
4838 static void cout_subu64(DisasContext
*s
, DisasOps
*o
)
4840 gen_op_update3_cc_i64(s
, CC_OP_SUBU_64
, o
->in1
, o
->in2
, o
->out
);
4843 /* ====================================================================== */
4844 /* The "PREPeration" generators. These initialize the DisasOps.OUT fields
4845 with the TCG register to which we will write. Used in combination with
4846 the "wout" generators, in some cases we need a new temporary, and in
4847 some cases we can write to a TCG global. */
4849 static void prep_new(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4851 o
->out
= tcg_temp_new_i64();
4854 static void prep_r1(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4856 o
->out
= regs
[get_field(f
, r1
)];
4860 /* ====================================================================== */
4861 /* The "Write OUTput" generators. These generally perform some non-trivial
4862 copy of data to TCG globals, or to main memory. The trivial cases are
4863 generally handled by having a "prep" generator install the TCG global
4864 as the destination of the operation. */
4866 static void wout_r1_32(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4868 store_reg32_i64(get_field(f
, r1
), o
->out
);
4871 static void wout_m1_32(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4873 tcg_gen_qemu_st32(o
->out
, o
->addr1
, get_mem_index(s
));
4876 static void wout_m1_64(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4878 tcg_gen_qemu_st64(o
->out
, o
->addr1
, get_mem_index(s
));
4881 /* ====================================================================== */
4882 /* The "INput 1" generators. These load the first operand to an insn. */
4884 static void in1_r1(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4886 o
->in1
= load_reg(get_field(f
, r1
));
4889 static void in1_r1_o(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4891 o
->in1
= regs
[get_field(f
, r1
)];
4895 static void in1_r2(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4897 o
->in1
= load_reg(get_field(f
, r2
));
4900 static void in1_r3(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4902 o
->in1
= load_reg(get_field(f
, r3
));
4905 static void in1_la1(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4907 o
->addr1
= get_address(s
, 0, get_field(f
, b1
), get_field(f
, d1
));
4910 static void in1_m1_32s(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4913 o
->in1
= tcg_temp_new_i64();
4914 tcg_gen_qemu_ld32s(o
->in1
, o
->addr1
, get_mem_index(s
));
4917 static void in1_m1_32u(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4920 o
->in1
= tcg_temp_new_i64();
4921 tcg_gen_qemu_ld32u(o
->in1
, o
->addr1
, get_mem_index(s
));
4924 static void in1_m1_64(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4927 o
->in1
= tcg_temp_new_i64();
4928 tcg_gen_qemu_ld64(o
->in1
, o
->addr1
, get_mem_index(s
));
4931 /* ====================================================================== */
4932 /* The "INput 2" generators. These load the second operand to an insn. */
4934 static void in2_r2(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4936 o
->in2
= load_reg(get_field(f
, r2
));
4939 static void in2_r2_o(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4941 o
->in2
= regs
[get_field(f
, r2
)];
4945 static void in2_r3(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4947 o
->in2
= load_reg(get_field(f
, r3
));
4950 static void in2_r2_32s(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4952 o
->in2
= tcg_temp_new_i64();
4953 tcg_gen_ext32s_i64(o
->in2
, regs
[get_field(f
, r2
)]);
4956 static void in2_r2_32u(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4958 o
->in2
= tcg_temp_new_i64();
4959 tcg_gen_ext32u_i64(o
->in2
, regs
[get_field(f
, r2
)]);
4962 static void in2_a2(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4964 int x2
= have_field(f
, x2
) ? get_field(f
, x2
) : 0;
4965 o
->in2
= get_address(s
, x2
, get_field(f
, b2
), get_field(f
, d2
));
4968 static void in2_m2_16s(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4971 tcg_gen_qemu_ld16s(o
->in2
, o
->in2
, get_mem_index(s
));
4974 static void in2_m2_32s(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4977 tcg_gen_qemu_ld32s(o
->in2
, o
->in2
, get_mem_index(s
));
4980 static void in2_m2_32u(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4983 tcg_gen_qemu_ld32u(o
->in2
, o
->in2
, get_mem_index(s
));
4986 static void in2_m2_64(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4989 tcg_gen_qemu_ld64(o
->in2
, o
->in2
, get_mem_index(s
));
4992 static void in2_i2(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4994 o
->in2
= tcg_const_i64(get_field(f
, i2
));
4997 static void in2_i2_32u(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4999 o
->in2
= tcg_const_i64((uint32_t)get_field(f
, i2
));
5002 /* ====================================================================== */
5004 /* Find opc within the table of insns. This is formulated as a switch
5005 statement so that (1) we get compile-time notice of cut-paste errors
5006 for duplicated opcodes, and (2) the compiler generates the binary
5007 search tree, rather than us having to post-process the table. */
5009 #define C(OPC, NM, FT, FC, I1, I2, P, W, OP, CC) \
5010 D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, 0)
5012 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) insn_ ## NM,
5014 enum DisasInsnEnum
{
5015 #include "insn-data.def"
5019 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) { \
5024 .help_in1 = in1_##I1, \
5025 .help_in2 = in2_##I2, \
5026 .help_prep = prep_##P, \
5027 .help_wout = wout_##W, \
5028 .help_cout = cout_##CC, \
5029 .help_op = op_##OP, \
5033 /* Allow 0 to be used for NULL in the table below. */
5041 static const DisasInsn insn_info
[] = {
5042 #include "insn-data.def"
5046 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) \
5047 case OPC: return &insn_info[insn_ ## NM];
5049 static const DisasInsn
*lookup_opc(uint16_t opc
)
5052 #include "insn-data.def"
5061 /* Extract a field from the insn. The INSN should be left-aligned in
5062 the uint64_t so that we can more easily utilize the big-bit-endian
5063 definitions we extract from the Principals of Operation. */
5065 static void extract_field(DisasFields
*o
, const DisasField
*f
, uint64_t insn
)
5073 /* Zero extract the field from the insn. */
5074 r
= (insn
<< f
->beg
) >> (64 - f
->size
);
5076 /* Sign-extend, or un-swap the field as necessary. */
5078 case 0: /* unsigned */
5080 case 1: /* signed */
5081 assert(f
->size
<= 32);
5082 m
= 1u << (f
->size
- 1);
5085 case 2: /* dl+dh split, signed 20 bit. */
5086 r
= ((int8_t)r
<< 12) | (r
>> 8);
5092 /* Validate that the "compressed" encoding we selected above is valid.
5093 I.e. we havn't make two different original fields overlap. */
5094 assert(((o
->presentC
>> f
->indexC
) & 1) == 0);
5095 o
->presentC
|= 1 << f
->indexC
;
5096 o
->presentO
|= 1 << f
->indexO
;
5098 o
->c
[f
->indexC
] = r
;
5101 /* Lookup the insn at the current PC, extracting the operands into O and
5102 returning the info struct for the insn. Returns NULL for invalid insn. */
5104 static const DisasInsn
*extract_insn(CPUS390XState
*env
, DisasContext
*s
,
5107 uint64_t insn
, pc
= s
->pc
;
5109 const DisasInsn
*info
;
5111 insn
= ld_code2(env
, pc
);
5112 op
= (insn
>> 8) & 0xff;
5113 ilen
= get_ilen(op
);
5114 s
->next_pc
= s
->pc
+ ilen
;
5121 insn
= ld_code4(env
, pc
) << 32;
5124 insn
= (insn
<< 48) | (ld_code4(env
, pc
+ 2) << 16);
5130 /* We can't actually determine the insn format until we've looked up
5131 the full insn opcode. Which we can't do without locating the
5132 secondary opcode. Assume by default that OP2 is at bit 40; for
5133 those smaller insns that don't actually have a secondary opcode
5134 this will correctly result in OP2 = 0. */
5140 case 0xb2: /* S, RRF, RRE */
5141 case 0xb3: /* RRE, RRD, RRF */
5142 case 0xb9: /* RRE, RRF */
5143 case 0xe5: /* SSE, SIL */
5144 op2
= (insn
<< 8) >> 56;
5148 case 0xc0: /* RIL */
5149 case 0xc2: /* RIL */
5150 case 0xc4: /* RIL */
5151 case 0xc6: /* RIL */
5152 case 0xc8: /* SSF */
5153 case 0xcc: /* RIL */
5154 op2
= (insn
<< 12) >> 60;
5156 case 0xd0 ... 0xdf: /* SS */
5162 case 0xee ... 0xf3: /* SS */
5163 case 0xf8 ... 0xfd: /* SS */
5167 op2
= (insn
<< 40) >> 56;
5171 memset(f
, 0, sizeof(*f
));
5175 /* Lookup the instruction. */
5176 info
= lookup_opc(op
<< 8 | op2
);
5178 /* If we found it, extract the operands. */
5180 DisasFormat fmt
= info
->fmt
;
5183 for (i
= 0; i
< NUM_C_FIELD
; ++i
) {
5184 extract_field(f
, &format_info
[fmt
].op
[i
], insn
);
5190 static ExitStatus
translate_one(CPUS390XState
*env
, DisasContext
*s
)
5192 const DisasInsn
*insn
;
5193 ExitStatus ret
= NO_EXIT
;
5197 insn
= extract_insn(env
, s
, &f
);
5199 /* If not found, try the old interpreter. This includes ILLOPC. */
5201 disas_s390_insn(env
, s
);
5202 switch (s
->is_jmp
) {
5210 ret
= EXIT_PC_UPDATED
;
5213 ret
= EXIT_NORETURN
;
5223 /* Set up the strutures we use to communicate with the helpers. */
5226 o
.g_out
= o
.g_out2
= o
.g_in1
= o
.g_in2
= false;
5227 TCGV_UNUSED_I64(o
.out
);
5228 TCGV_UNUSED_I64(o
.out2
);
5229 TCGV_UNUSED_I64(o
.in1
);
5230 TCGV_UNUSED_I64(o
.in2
);
5231 TCGV_UNUSED_I64(o
.addr1
);
5233 /* Implement the instruction. */
5234 if (insn
->help_in1
) {
5235 insn
->help_in1(s
, &f
, &o
);
5237 if (insn
->help_in2
) {
5238 insn
->help_in2(s
, &f
, &o
);
5240 if (insn
->help_prep
) {
5241 insn
->help_prep(s
, &f
, &o
);
5243 if (insn
->help_op
) {
5244 ret
= insn
->help_op(s
, &o
);
5246 if (insn
->help_wout
) {
5247 insn
->help_wout(s
, &f
, &o
);
5249 if (insn
->help_cout
) {
5250 insn
->help_cout(s
, &o
);
5253 /* Free any temporaries created by the helpers. */
5254 if (!TCGV_IS_UNUSED_I64(o
.out
) && !o
.g_out
) {
5255 tcg_temp_free_i64(o
.out
);
5257 if (!TCGV_IS_UNUSED_I64(o
.out2
) && !o
.g_out2
) {
5258 tcg_temp_free_i64(o
.out2
);
5260 if (!TCGV_IS_UNUSED_I64(o
.in1
) && !o
.g_in1
) {
5261 tcg_temp_free_i64(o
.in1
);
5263 if (!TCGV_IS_UNUSED_I64(o
.in2
) && !o
.g_in2
) {
5264 tcg_temp_free_i64(o
.in2
);
5266 if (!TCGV_IS_UNUSED_I64(o
.addr1
)) {
5267 tcg_temp_free_i64(o
.addr1
);
5270 /* Advance to the next instruction. */
5275 static inline void gen_intermediate_code_internal(CPUS390XState
*env
,
5276 TranslationBlock
*tb
,
5280 target_ulong pc_start
;
5281 uint64_t next_page_start
;
5282 uint16_t *gen_opc_end
;
5284 int num_insns
, max_insns
;
5292 if (!(tb
->flags
& FLAG_MASK_64
)) {
5293 pc_start
&= 0x7fffffff;
5298 dc
.cc_op
= CC_OP_DYNAMIC
;
5299 do_debug
= dc
.singlestep_enabled
= env
->singlestep_enabled
;
5300 dc
.is_jmp
= DISAS_NEXT
;
5302 gen_opc_end
= tcg_ctx
.gen_opc_buf
+ OPC_MAX_SIZE
;
5304 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
5307 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5308 if (max_insns
== 0) {
5309 max_insns
= CF_COUNT_MASK
;
5316 j
= tcg_ctx
.gen_opc_ptr
- tcg_ctx
.gen_opc_buf
;
5320 tcg_ctx
.gen_opc_instr_start
[lj
++] = 0;
5323 tcg_ctx
.gen_opc_pc
[lj
] = dc
.pc
;
5324 gen_opc_cc_op
[lj
] = dc
.cc_op
;
5325 tcg_ctx
.gen_opc_instr_start
[lj
] = 1;
5326 tcg_ctx
.gen_opc_icount
[lj
] = num_insns
;
5328 if (++num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
5332 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
5333 tcg_gen_debug_insn_start(dc
.pc
);
5337 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5338 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5339 if (bp
->pc
== dc
.pc
) {
5340 status
= EXIT_PC_STALE
;
5346 if (status
== NO_EXIT
) {
5347 status
= translate_one(env
, &dc
);
5350 /* If we reach a page boundary, are single stepping,
5351 or exhaust instruction count, stop generation. */
5352 if (status
== NO_EXIT
5353 && (dc
.pc
>= next_page_start
5354 || tcg_ctx
.gen_opc_ptr
>= gen_opc_end
5355 || num_insns
>= max_insns
5357 || env
->singlestep_enabled
)) {
5358 status
= EXIT_PC_STALE
;
5360 } while (status
== NO_EXIT
);
5362 if (tb
->cflags
& CF_LAST_IO
) {
5371 update_psw_addr(&dc
);
5373 case EXIT_PC_UPDATED
:
5374 if (singlestep
&& dc
.cc_op
!= CC_OP_DYNAMIC
) {
5375 gen_op_calc_cc(&dc
);
5377 /* Next TB starts off with CC_OP_DYNAMIC,
5378 so make sure the cc op type is in env */
5379 gen_op_set_cc_op(&dc
);
5382 gen_exception(EXCP_DEBUG
);
5384 /* Generate the return instruction */
5392 gen_icount_end(tb
, num_insns
);
5393 *tcg_ctx
.gen_opc_ptr
= INDEX_op_end
;
5395 j
= tcg_ctx
.gen_opc_ptr
- tcg_ctx
.gen_opc_buf
;
5398 tcg_ctx
.gen_opc_instr_start
[lj
++] = 0;
5401 tb
->size
= dc
.pc
- pc_start
;
5402 tb
->icount
= num_insns
;
5405 #if defined(S390X_DEBUG_DISAS)
5406 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5407 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5408 log_target_disas(env
, pc_start
, dc
.pc
- pc_start
, 1);
5414 void gen_intermediate_code (CPUS390XState
*env
, struct TranslationBlock
*tb
)
5416 gen_intermediate_code_internal(env
, tb
, 0);
5419 void gen_intermediate_code_pc (CPUS390XState
*env
, struct TranslationBlock
*tb
)
5421 gen_intermediate_code_internal(env
, tb
, 1);
5424 void restore_state_to_opc(CPUS390XState
*env
, TranslationBlock
*tb
, int pc_pos
)
5427 env
->psw
.addr
= tcg_ctx
.gen_opc_pc
[pc_pos
];
5428 cc_op
= gen_opc_cc_op
[pc_pos
];
5429 if ((cc_op
!= CC_OP_DYNAMIC
) && (cc_op
!= CC_OP_STATIC
)) {