4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 /* #define DEBUG_INLINE_BRANCHES */
22 #define S390X_DEBUG_DISAS
23 /* #define S390X_DEBUG_DISAS_VERBOSE */
25 #ifdef S390X_DEBUG_DISAS_VERBOSE
26 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
28 # define LOG_DISAS(...) do { } while (0)
32 #include "disas/disas.h"
36 /* global register indexes */
37 static TCGv_ptr cpu_env
;
39 #include "exec/gen-icount.h"
45 /* Information that (most) every instruction needs to manipulate. */
46 typedef struct DisasContext DisasContext
;
47 typedef struct DisasInsn DisasInsn
;
48 typedef struct DisasFields DisasFields
;
51 struct TranslationBlock
*tb
;
52 const DisasInsn
*insn
;
56 bool singlestep_enabled
;
60 /* Information carried about a condition to be evaluated. */
67 struct { TCGv_i64 a
, b
; } s64
;
68 struct { TCGv_i32 a
, b
; } s32
;
74 static void gen_op_calc_cc(DisasContext
*s
);
76 #ifdef DEBUG_INLINE_BRANCHES
77 static uint64_t inline_branch_hit
[CC_OP_MAX
];
78 static uint64_t inline_branch_miss
[CC_OP_MAX
];
81 static inline void debug_insn(uint64_t insn
)
83 LOG_DISAS("insn: 0x%" PRIx64
"\n", insn
);
86 static inline uint64_t pc_to_link_info(DisasContext
*s
, uint64_t pc
)
88 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
89 if (s
->tb
->flags
& FLAG_MASK_32
) {
90 return pc
| 0x80000000;
96 void cpu_dump_state(CPUS390XState
*env
, FILE *f
, fprintf_function cpu_fprintf
,
101 if (env
->cc_op
> 3) {
102 cpu_fprintf(f
, "PSW=mask %016" PRIx64
" addr %016" PRIx64
" cc %15s\n",
103 env
->psw
.mask
, env
->psw
.addr
, cc_name(env
->cc_op
));
105 cpu_fprintf(f
, "PSW=mask %016" PRIx64
" addr %016" PRIx64
" cc %02x\n",
106 env
->psw
.mask
, env
->psw
.addr
, env
->cc_op
);
109 for (i
= 0; i
< 16; i
++) {
110 cpu_fprintf(f
, "R%02d=%016" PRIx64
, i
, env
->regs
[i
]);
112 cpu_fprintf(f
, "\n");
118 for (i
= 0; i
< 16; i
++) {
119 cpu_fprintf(f
, "F%02d=%016" PRIx64
, i
, env
->fregs
[i
].ll
);
121 cpu_fprintf(f
, "\n");
127 #ifndef CONFIG_USER_ONLY
128 for (i
= 0; i
< 16; i
++) {
129 cpu_fprintf(f
, "C%02d=%016" PRIx64
, i
, env
->cregs
[i
]);
131 cpu_fprintf(f
, "\n");
138 #ifdef DEBUG_INLINE_BRANCHES
139 for (i
= 0; i
< CC_OP_MAX
; i
++) {
140 cpu_fprintf(f
, " %15s = %10ld\t%10ld\n", cc_name(i
),
141 inline_branch_miss
[i
], inline_branch_hit
[i
]);
145 cpu_fprintf(f
, "\n");
148 static TCGv_i64 psw_addr
;
149 static TCGv_i64 psw_mask
;
151 static TCGv_i32 cc_op
;
152 static TCGv_i64 cc_src
;
153 static TCGv_i64 cc_dst
;
154 static TCGv_i64 cc_vr
;
156 static char cpu_reg_names
[32][4];
157 static TCGv_i64 regs
[16];
158 static TCGv_i64 fregs
[16];
160 static uint8_t gen_opc_cc_op
[OPC_BUF_SIZE
];
162 void s390x_translate_init(void)
166 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
167 psw_addr
= tcg_global_mem_new_i64(TCG_AREG0
,
168 offsetof(CPUS390XState
, psw
.addr
),
170 psw_mask
= tcg_global_mem_new_i64(TCG_AREG0
,
171 offsetof(CPUS390XState
, psw
.mask
),
174 cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUS390XState
, cc_op
),
176 cc_src
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, cc_src
),
178 cc_dst
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, cc_dst
),
180 cc_vr
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, cc_vr
),
183 for (i
= 0; i
< 16; i
++) {
184 snprintf(cpu_reg_names
[i
], sizeof(cpu_reg_names
[0]), "r%d", i
);
185 regs
[i
] = tcg_global_mem_new(TCG_AREG0
,
186 offsetof(CPUS390XState
, regs
[i
]),
190 for (i
= 0; i
< 16; i
++) {
191 snprintf(cpu_reg_names
[i
+ 16], sizeof(cpu_reg_names
[0]), "f%d", i
);
192 fregs
[i
] = tcg_global_mem_new(TCG_AREG0
,
193 offsetof(CPUS390XState
, fregs
[i
].d
),
194 cpu_reg_names
[i
+ 16]);
197 /* register helpers */
202 static inline TCGv_i64
load_reg(int reg
)
204 TCGv_i64 r
= tcg_temp_new_i64();
205 tcg_gen_mov_i64(r
, regs
[reg
]);
209 static inline TCGv_i64
load_freg(int reg
)
211 TCGv_i64 r
= tcg_temp_new_i64();
212 tcg_gen_mov_i64(r
, fregs
[reg
]);
216 static inline TCGv_i32
load_freg32(int reg
)
218 TCGv_i32 r
= tcg_temp_new_i32();
219 #if HOST_LONG_BITS == 32
220 tcg_gen_mov_i32(r
, TCGV_HIGH(fregs
[reg
]));
222 tcg_gen_shri_i64(MAKE_TCGV_I64(GET_TCGV_I32(r
)), fregs
[reg
], 32);
227 static inline TCGv_i32
load_reg32(int reg
)
229 TCGv_i32 r
= tcg_temp_new_i32();
230 tcg_gen_trunc_i64_i32(r
, regs
[reg
]);
234 static inline TCGv_i64
load_reg32_i64(int reg
)
236 TCGv_i64 r
= tcg_temp_new_i64();
237 tcg_gen_ext32s_i64(r
, regs
[reg
]);
241 static inline void store_reg(int reg
, TCGv_i64 v
)
243 tcg_gen_mov_i64(regs
[reg
], v
);
246 static inline void store_freg(int reg
, TCGv_i64 v
)
248 tcg_gen_mov_i64(fregs
[reg
], v
);
251 static inline void store_reg32(int reg
, TCGv_i32 v
)
253 /* 32 bit register writes keep the upper half */
254 #if HOST_LONG_BITS == 32
255 tcg_gen_mov_i32(TCGV_LOW(regs
[reg
]), v
);
257 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
],
258 MAKE_TCGV_I64(GET_TCGV_I32(v
)), 0, 32);
262 static inline void store_reg32_i64(int reg
, TCGv_i64 v
)
264 /* 32 bit register writes keep the upper half */
265 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], v
, 0, 32);
268 static inline void store_reg16(int reg
, TCGv_i32 v
)
270 /* 16 bit register writes keep the upper bytes */
271 #if HOST_LONG_BITS == 32
272 tcg_gen_deposit_i32(TCGV_LOW(regs
[reg
]), TCGV_LOW(regs
[reg
]), v
, 0, 16);
274 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
],
275 MAKE_TCGV_I64(GET_TCGV_I32(v
)), 0, 16);
279 static inline void store_reg8(int reg
, TCGv_i64 v
)
281 /* 8 bit register writes keep the upper bytes */
282 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], v
, 0, 8);
285 static inline void store_freg32(int reg
, TCGv_i32 v
)
287 /* 32 bit register writes keep the lower half */
288 #if HOST_LONG_BITS == 32
289 tcg_gen_mov_i32(TCGV_HIGH(fregs
[reg
]), v
);
291 tcg_gen_deposit_i64(fregs
[reg
], fregs
[reg
],
292 MAKE_TCGV_I64(GET_TCGV_I32(v
)), 32, 32);
296 static inline void update_psw_addr(DisasContext
*s
)
299 tcg_gen_movi_i64(psw_addr
, s
->pc
);
302 static inline void potential_page_fault(DisasContext
*s
)
304 #ifndef CONFIG_USER_ONLY
310 static inline uint64_t ld_code2(CPUS390XState
*env
, uint64_t pc
)
312 return (uint64_t)cpu_lduw_code(env
, pc
);
315 static inline uint64_t ld_code4(CPUS390XState
*env
, uint64_t pc
)
317 return (uint64_t)(uint32_t)cpu_ldl_code(env
, pc
);
320 static inline uint64_t ld_code6(CPUS390XState
*env
, uint64_t pc
)
322 return (ld_code2(env
, pc
) << 32) | ld_code4(env
, pc
+ 2);
325 static inline int get_mem_index(DisasContext
*s
)
327 switch (s
->tb
->flags
& FLAG_MASK_ASC
) {
328 case PSW_ASC_PRIMARY
>> 32:
330 case PSW_ASC_SECONDARY
>> 32:
332 case PSW_ASC_HOME
>> 32:
340 static void gen_exception(int excp
)
342 TCGv_i32 tmp
= tcg_const_i32(excp
);
343 gen_helper_exception(cpu_env
, tmp
);
344 tcg_temp_free_i32(tmp
);
347 static void gen_program_exception(DisasContext
*s
, int code
)
351 /* Remember what pgm exeption this was. */
352 tmp
= tcg_const_i32(code
);
353 tcg_gen_st_i32(tmp
, cpu_env
, offsetof(CPUS390XState
, int_pgm_code
));
354 tcg_temp_free_i32(tmp
);
356 tmp
= tcg_const_i32(s
->next_pc
- s
->pc
);
357 tcg_gen_st_i32(tmp
, cpu_env
, offsetof(CPUS390XState
, int_pgm_ilen
));
358 tcg_temp_free_i32(tmp
);
360 /* Advance past instruction. */
367 /* Trigger exception. */
368 gen_exception(EXCP_PGM
);
371 s
->is_jmp
= DISAS_EXCP
;
374 static inline void gen_illegal_opcode(DisasContext
*s
)
376 gen_program_exception(s
, PGM_SPECIFICATION
);
379 static inline void check_privileged(DisasContext
*s
)
381 if (s
->tb
->flags
& (PSW_MASK_PSTATE
>> 32)) {
382 gen_program_exception(s
, PGM_PRIVILEGED
);
386 static TCGv_i64
get_address(DisasContext
*s
, int x2
, int b2
, int d2
)
390 /* 31-bitify the immediate part; register contents are dealt with below */
391 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
397 tmp
= tcg_const_i64(d2
);
398 tcg_gen_add_i64(tmp
, tmp
, regs
[x2
]);
403 tcg_gen_add_i64(tmp
, tmp
, regs
[b2
]);
407 tmp
= tcg_const_i64(d2
);
408 tcg_gen_add_i64(tmp
, tmp
, regs
[b2
]);
413 tmp
= tcg_const_i64(d2
);
416 /* 31-bit mode mask if there are values loaded from registers */
417 if (!(s
->tb
->flags
& FLAG_MASK_64
) && (x2
|| b2
)) {
418 tcg_gen_andi_i64(tmp
, tmp
, 0x7fffffffUL
);
424 static void gen_op_movi_cc(DisasContext
*s
, uint32_t val
)
426 s
->cc_op
= CC_OP_CONST0
+ val
;
429 static void gen_op_update1_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 dst
)
431 tcg_gen_discard_i64(cc_src
);
432 tcg_gen_mov_i64(cc_dst
, dst
);
433 tcg_gen_discard_i64(cc_vr
);
437 static void gen_op_update1_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 dst
)
439 tcg_gen_discard_i64(cc_src
);
440 tcg_gen_extu_i32_i64(cc_dst
, dst
);
441 tcg_gen_discard_i64(cc_vr
);
445 static void gen_op_update2_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 src
,
448 tcg_gen_mov_i64(cc_src
, src
);
449 tcg_gen_mov_i64(cc_dst
, dst
);
450 tcg_gen_discard_i64(cc_vr
);
454 static void gen_op_update2_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 src
,
457 tcg_gen_extu_i32_i64(cc_src
, src
);
458 tcg_gen_extu_i32_i64(cc_dst
, dst
);
459 tcg_gen_discard_i64(cc_vr
);
463 static void gen_op_update3_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 src
,
464 TCGv_i64 dst
, TCGv_i64 vr
)
466 tcg_gen_mov_i64(cc_src
, src
);
467 tcg_gen_mov_i64(cc_dst
, dst
);
468 tcg_gen_mov_i64(cc_vr
, vr
);
472 static void gen_op_update3_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 src
,
473 TCGv_i32 dst
, TCGv_i32 vr
)
475 tcg_gen_extu_i32_i64(cc_src
, src
);
476 tcg_gen_extu_i32_i64(cc_dst
, dst
);
477 tcg_gen_extu_i32_i64(cc_vr
, vr
);
481 static inline void set_cc_nz_u32(DisasContext
*s
, TCGv_i32 val
)
483 gen_op_update1_cc_i32(s
, CC_OP_NZ
, val
);
486 static inline void set_cc_nz_u64(DisasContext
*s
, TCGv_i64 val
)
488 gen_op_update1_cc_i64(s
, CC_OP_NZ
, val
);
491 static inline void cmp_32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
494 gen_op_update2_cc_i32(s
, cond
, v1
, v2
);
497 static inline void cmp_64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
500 gen_op_update2_cc_i64(s
, cond
, v1
, v2
);
503 static inline void cmp_s32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
505 cmp_32(s
, v1
, v2
, CC_OP_LTGT_32
);
508 static inline void cmp_u32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
510 cmp_32(s
, v1
, v2
, CC_OP_LTUGTU_32
);
513 static inline void cmp_s32c(DisasContext
*s
, TCGv_i32 v1
, int32_t v2
)
515 /* XXX optimize for the constant? put it in s? */
516 TCGv_i32 tmp
= tcg_const_i32(v2
);
517 cmp_32(s
, v1
, tmp
, CC_OP_LTGT_32
);
518 tcg_temp_free_i32(tmp
);
521 static inline void cmp_u32c(DisasContext
*s
, TCGv_i32 v1
, uint32_t v2
)
523 TCGv_i32 tmp
= tcg_const_i32(v2
);
524 cmp_32(s
, v1
, tmp
, CC_OP_LTUGTU_32
);
525 tcg_temp_free_i32(tmp
);
528 static inline void cmp_s64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
)
530 cmp_64(s
, v1
, v2
, CC_OP_LTGT_64
);
533 static inline void cmp_u64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
)
535 cmp_64(s
, v1
, v2
, CC_OP_LTUGTU_64
);
538 static inline void cmp_s64c(DisasContext
*s
, TCGv_i64 v1
, int64_t v2
)
540 TCGv_i64 tmp
= tcg_const_i64(v2
);
542 tcg_temp_free_i64(tmp
);
545 static inline void cmp_u64c(DisasContext
*s
, TCGv_i64 v1
, uint64_t v2
)
547 TCGv_i64 tmp
= tcg_const_i64(v2
);
549 tcg_temp_free_i64(tmp
);
552 static inline void set_cc_s32(DisasContext
*s
, TCGv_i32 val
)
554 gen_op_update1_cc_i32(s
, CC_OP_LTGT0_32
, val
);
557 static inline void set_cc_s64(DisasContext
*s
, TCGv_i64 val
)
559 gen_op_update1_cc_i64(s
, CC_OP_LTGT0_64
, val
);
562 static void set_cc_addu64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
565 gen_op_update3_cc_i64(s
, CC_OP_ADDU_64
, v1
, v2
, vr
);
568 static void set_cc_abs64(DisasContext
*s
, TCGv_i64 v1
)
570 gen_op_update1_cc_i64(s
, CC_OP_ABS_64
, v1
);
573 static void set_cc_nabs64(DisasContext
*s
, TCGv_i64 v1
)
575 gen_op_update1_cc_i64(s
, CC_OP_NABS_64
, v1
);
578 static void set_cc_add32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
, TCGv_i32 vr
)
580 gen_op_update3_cc_i32(s
, CC_OP_ADD_32
, v1
, v2
, vr
);
583 static void set_cc_addu32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
586 gen_op_update3_cc_i32(s
, CC_OP_ADDU_32
, v1
, v2
, vr
);
589 static void set_cc_sub32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
, TCGv_i32 vr
)
591 gen_op_update3_cc_i32(s
, CC_OP_SUB_32
, v1
, v2
, vr
);
594 static void set_cc_abs32(DisasContext
*s
, TCGv_i32 v1
)
596 gen_op_update1_cc_i32(s
, CC_OP_ABS_32
, v1
);
599 static void set_cc_nabs32(DisasContext
*s
, TCGv_i32 v1
)
601 gen_op_update1_cc_i32(s
, CC_OP_NABS_32
, v1
);
604 static void set_cc_comp32(DisasContext
*s
, TCGv_i32 v1
)
606 gen_op_update1_cc_i32(s
, CC_OP_COMP_32
, v1
);
609 static void set_cc_comp64(DisasContext
*s
, TCGv_i64 v1
)
611 gen_op_update1_cc_i64(s
, CC_OP_COMP_64
, v1
);
614 static void set_cc_icm(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
616 gen_op_update2_cc_i32(s
, CC_OP_ICM
, v1
, v2
);
619 static void set_cc_cmp_f32_i64(DisasContext
*s
, TCGv_i32 v1
, TCGv_i64 v2
)
621 tcg_gen_extu_i32_i64(cc_src
, v1
);
622 tcg_gen_mov_i64(cc_dst
, v2
);
623 tcg_gen_discard_i64(cc_vr
);
624 s
->cc_op
= CC_OP_LTGT_F32
;
627 static void gen_set_cc_nz_f32(DisasContext
*s
, TCGv_i32 v1
)
629 gen_op_update1_cc_i32(s
, CC_OP_NZ_F32
, v1
);
632 /* CC value is in env->cc_op */
633 static inline void set_cc_static(DisasContext
*s
)
635 tcg_gen_discard_i64(cc_src
);
636 tcg_gen_discard_i64(cc_dst
);
637 tcg_gen_discard_i64(cc_vr
);
638 s
->cc_op
= CC_OP_STATIC
;
641 static inline void gen_op_set_cc_op(DisasContext
*s
)
643 if (s
->cc_op
!= CC_OP_DYNAMIC
&& s
->cc_op
!= CC_OP_STATIC
) {
644 tcg_gen_movi_i32(cc_op
, s
->cc_op
);
648 static inline void gen_update_cc_op(DisasContext
*s
)
653 /* calculates cc into cc_op */
654 static void gen_op_calc_cc(DisasContext
*s
)
656 TCGv_i32 local_cc_op
= tcg_const_i32(s
->cc_op
);
657 TCGv_i64 dummy
= tcg_const_i64(0);
664 /* s->cc_op is the cc value */
665 tcg_gen_movi_i32(cc_op
, s
->cc_op
- CC_OP_CONST0
);
668 /* env->cc_op already is the cc value */
682 gen_helper_calc_cc(cc_op
, cpu_env
, local_cc_op
, dummy
, cc_dst
, dummy
);
687 case CC_OP_LTUGTU_32
:
688 case CC_OP_LTUGTU_64
:
695 gen_helper_calc_cc(cc_op
, cpu_env
, local_cc_op
, cc_src
, cc_dst
, dummy
);
706 gen_helper_calc_cc(cc_op
, cpu_env
, local_cc_op
, cc_src
, cc_dst
, cc_vr
);
709 /* unknown operation - assume 3 arguments and cc_op in env */
710 gen_helper_calc_cc(cc_op
, cpu_env
, cc_op
, cc_src
, cc_dst
, cc_vr
);
716 tcg_temp_free_i32(local_cc_op
);
717 tcg_temp_free_i64(dummy
);
719 /* We now have cc in cc_op as constant */
723 static inline void decode_rr(DisasContext
*s
, uint64_t insn
, int *r1
, int *r2
)
727 *r1
= (insn
>> 4) & 0xf;
731 static inline TCGv_i64
decode_rx(DisasContext
*s
, uint64_t insn
, int *r1
,
732 int *x2
, int *b2
, int *d2
)
736 *r1
= (insn
>> 20) & 0xf;
737 *x2
= (insn
>> 16) & 0xf;
738 *b2
= (insn
>> 12) & 0xf;
741 return get_address(s
, *x2
, *b2
, *d2
);
744 static inline void decode_rs(DisasContext
*s
, uint64_t insn
, int *r1
, int *r3
,
749 *r1
= (insn
>> 20) & 0xf;
751 *r3
= (insn
>> 16) & 0xf;
752 *b2
= (insn
>> 12) & 0xf;
756 static inline TCGv_i64
decode_si(DisasContext
*s
, uint64_t insn
, int *i2
,
761 *i2
= (insn
>> 16) & 0xff;
762 *b1
= (insn
>> 12) & 0xf;
765 return get_address(s
, 0, *b1
, *d1
);
768 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong pc
)
770 TranslationBlock
*tb
;
775 /* NOTE: we handle the case where the TB spans two pages here */
776 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
777 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
778 /* jump to same page: we can use a direct jump */
779 tcg_gen_goto_tb(tb_num
);
780 tcg_gen_movi_i64(psw_addr
, pc
);
781 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
783 /* jump to another page: currently not optimized */
784 tcg_gen_movi_i64(psw_addr
, pc
);
789 static inline void account_noninline_branch(DisasContext
*s
, int cc_op
)
791 #ifdef DEBUG_INLINE_BRANCHES
792 inline_branch_miss
[cc_op
]++;
796 static inline void account_inline_branch(DisasContext
*s
, int cc_op
)
798 #ifdef DEBUG_INLINE_BRANCHES
799 inline_branch_hit
[cc_op
]++;
803 /* Table of mask values to comparison codes, given a comparison as input.
804 For a true comparison CC=3 will never be set, but we treat this
805 conservatively for possible use when CC=3 indicates overflow. */
806 static const TCGCond ltgt_cond
[16] = {
807 TCG_COND_NEVER
, TCG_COND_NEVER
, /* | | | x */
808 TCG_COND_GT
, TCG_COND_NEVER
, /* | | GT | x */
809 TCG_COND_LT
, TCG_COND_NEVER
, /* | LT | | x */
810 TCG_COND_NE
, TCG_COND_NEVER
, /* | LT | GT | x */
811 TCG_COND_EQ
, TCG_COND_NEVER
, /* EQ | | | x */
812 TCG_COND_GE
, TCG_COND_NEVER
, /* EQ | | GT | x */
813 TCG_COND_LE
, TCG_COND_NEVER
, /* EQ | LT | | x */
814 TCG_COND_ALWAYS
, TCG_COND_ALWAYS
, /* EQ | LT | GT | x */
817 /* Table of mask values to comparison codes, given a logic op as input.
818 For such, only CC=0 and CC=1 should be possible. */
819 static const TCGCond nz_cond
[16] = {
821 TCG_COND_NEVER
, TCG_COND_NEVER
, TCG_COND_NEVER
, TCG_COND_NEVER
,
823 TCG_COND_NE
, TCG_COND_NE
, TCG_COND_NE
, TCG_COND_NE
,
825 TCG_COND_EQ
, TCG_COND_EQ
, TCG_COND_EQ
, TCG_COND_EQ
,
826 /* EQ | NE | x | x */
827 TCG_COND_ALWAYS
, TCG_COND_ALWAYS
, TCG_COND_ALWAYS
, TCG_COND_ALWAYS
,
830 /* Interpret MASK in terms of S->CC_OP, and fill in C with all the
831 details required to generate a TCG comparison. */
832 static void disas_jcc(DisasContext
*s
, DisasCompare
*c
, uint32_t mask
)
835 enum cc_op old_cc_op
= s
->cc_op
;
837 if (mask
== 15 || mask
== 0) {
838 c
->cond
= (mask
? TCG_COND_ALWAYS
: TCG_COND_NEVER
);
841 c
->g1
= c
->g2
= true;
846 /* Find the TCG condition for the mask + cc op. */
852 cond
= ltgt_cond
[mask
];
853 if (cond
== TCG_COND_NEVER
) {
856 account_inline_branch(s
, old_cc_op
);
859 case CC_OP_LTUGTU_32
:
860 case CC_OP_LTUGTU_64
:
861 cond
= tcg_unsigned_cond(ltgt_cond
[mask
]);
862 if (cond
== TCG_COND_NEVER
) {
865 account_inline_branch(s
, old_cc_op
);
869 cond
= nz_cond
[mask
];
870 if (cond
== TCG_COND_NEVER
) {
873 account_inline_branch(s
, old_cc_op
);
888 account_inline_branch(s
, old_cc_op
);
903 account_inline_branch(s
, old_cc_op
);
908 /* Calculate cc value. */
913 /* Jump based on CC. We'll load up the real cond below;
914 the assignment here merely avoids a compiler warning. */
915 account_noninline_branch(s
, old_cc_op
);
916 old_cc_op
= CC_OP_STATIC
;
917 cond
= TCG_COND_NEVER
;
921 /* Load up the arguments of the comparison. */
923 c
->g1
= c
->g2
= false;
927 c
->u
.s32
.a
= tcg_temp_new_i32();
928 tcg_gen_trunc_i64_i32(c
->u
.s32
.a
, cc_dst
);
929 c
->u
.s32
.b
= tcg_const_i32(0);
932 case CC_OP_LTUGTU_32
:
934 c
->u
.s32
.a
= tcg_temp_new_i32();
935 tcg_gen_trunc_i64_i32(c
->u
.s32
.a
, cc_src
);
936 c
->u
.s32
.b
= tcg_temp_new_i32();
937 tcg_gen_trunc_i64_i32(c
->u
.s32
.b
, cc_dst
);
944 c
->u
.s64
.b
= tcg_const_i64(0);
948 case CC_OP_LTUGTU_64
:
951 c
->g1
= c
->g2
= true;
956 c
->u
.s64
.a
= tcg_temp_new_i64();
957 c
->u
.s64
.b
= tcg_const_i64(0);
958 tcg_gen_and_i64(c
->u
.s64
.a
, cc_src
, cc_dst
);
966 case 0x8 | 0x4 | 0x2: /* cc != 3 */
968 c
->u
.s32
.b
= tcg_const_i32(3);
970 case 0x8 | 0x4 | 0x1: /* cc != 2 */
972 c
->u
.s32
.b
= tcg_const_i32(2);
974 case 0x8 | 0x2 | 0x1: /* cc != 1 */
976 c
->u
.s32
.b
= tcg_const_i32(1);
978 case 0x8 | 0x2: /* cc == 0 ||Â cc == 2 => (cc & 1) == 0 */
981 c
->u
.s32
.a
= tcg_temp_new_i32();
982 c
->u
.s32
.b
= tcg_const_i32(0);
983 tcg_gen_andi_i32(c
->u
.s32
.a
, cc_op
, 1);
985 case 0x8 | 0x4: /* cc < 2 */
987 c
->u
.s32
.b
= tcg_const_i32(2);
989 case 0x8: /* cc == 0 */
991 c
->u
.s32
.b
= tcg_const_i32(0);
993 case 0x4 | 0x2 | 0x1: /* cc != 0 */
995 c
->u
.s32
.b
= tcg_const_i32(0);
997 case 0x4 | 0x1: /* cc == 1 ||Â cc == 3 => (cc & 1) != 0 */
1000 c
->u
.s32
.a
= tcg_temp_new_i32();
1001 c
->u
.s32
.b
= tcg_const_i32(0);
1002 tcg_gen_andi_i32(c
->u
.s32
.a
, cc_op
, 1);
1004 case 0x4: /* cc == 1 */
1006 c
->u
.s32
.b
= tcg_const_i32(1);
1008 case 0x2 | 0x1: /* cc > 1 */
1009 cond
= TCG_COND_GTU
;
1010 c
->u
.s32
.b
= tcg_const_i32(1);
1012 case 0x2: /* cc == 2 */
1014 c
->u
.s32
.b
= tcg_const_i32(2);
1016 case 0x1: /* cc == 3 */
1018 c
->u
.s32
.b
= tcg_const_i32(3);
1021 /* CC is masked by something else: (8 >> cc) & mask. */
1024 c
->u
.s32
.a
= tcg_const_i32(8);
1025 c
->u
.s32
.b
= tcg_const_i32(0);
1026 tcg_gen_shr_i32(c
->u
.s32
.a
, c
->u
.s32
.a
, cc_op
);
1027 tcg_gen_andi_i32(c
->u
.s32
.a
, c
->u
.s32
.a
, mask
);
1038 static void free_compare(DisasCompare
*c
)
1042 tcg_temp_free_i64(c
->u
.s64
.a
);
1044 tcg_temp_free_i32(c
->u
.s32
.a
);
1049 tcg_temp_free_i64(c
->u
.s64
.b
);
1051 tcg_temp_free_i32(c
->u
.s32
.b
);
1056 static void gen_jcc(DisasContext
*s
, uint32_t mask
, int skip
)
1061 disas_jcc(s
, &c
, mask
);
1062 cond
= tcg_invert_cond(c
.cond
);
1065 tcg_gen_brcond_i64(cond
, c
.u
.s64
.a
, c
.u
.s64
.b
, skip
);
1067 tcg_gen_brcond_i32(cond
, c
.u
.s32
.a
, c
.u
.s32
.b
, skip
);
1073 static void gen_bcr(DisasContext
*s
, uint32_t mask
, TCGv_i64 target
,
1080 gen_update_cc_op(s
);
1081 tcg_gen_mov_i64(psw_addr
, target
);
1083 } else if (mask
== 0) {
1084 /* ignore cc and never match */
1085 gen_goto_tb(s
, 0, offset
+ 2);
1087 TCGv_i64 new_addr
= tcg_temp_local_new_i64();
1089 tcg_gen_mov_i64(new_addr
, target
);
1090 skip
= gen_new_label();
1091 gen_jcc(s
, mask
, skip
);
1092 gen_update_cc_op(s
);
1093 tcg_gen_mov_i64(psw_addr
, new_addr
);
1094 tcg_temp_free_i64(new_addr
);
1096 gen_set_label(skip
);
1097 tcg_temp_free_i64(new_addr
);
1098 gen_goto_tb(s
, 1, offset
+ 2);
1102 static void gen_brc(uint32_t mask
, DisasContext
*s
, int32_t offset
)
1108 gen_goto_tb(s
, 0, s
->pc
+ offset
);
1109 } else if (mask
== 0) {
1110 /* ignore cc and never match */
1111 gen_goto_tb(s
, 0, s
->pc
+ 4);
1113 skip
= gen_new_label();
1114 gen_jcc(s
, mask
, skip
);
1115 gen_goto_tb(s
, 0, s
->pc
+ offset
);
1116 gen_set_label(skip
);
1117 gen_goto_tb(s
, 1, s
->pc
+ 4);
1119 s
->is_jmp
= DISAS_TB_JUMP
;
1122 static void gen_op_mvc(DisasContext
*s
, int l
, TCGv_i64 s1
, TCGv_i64 s2
)
1126 int l_memset
= gen_new_label();
1127 int l_out
= gen_new_label();
1128 TCGv_i64 dest
= tcg_temp_local_new_i64();
1129 TCGv_i64 src
= tcg_temp_local_new_i64();
1132 /* Find out if we should use the inline version of mvc */
1147 /* Fall back to helper */
1148 vl
= tcg_const_i32(l
);
1149 potential_page_fault(s
);
1150 gen_helper_mvc(cpu_env
, vl
, s1
, s2
);
1151 tcg_temp_free_i32(vl
);
1155 tcg_gen_mov_i64(dest
, s1
);
1156 tcg_gen_mov_i64(src
, s2
);
1158 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
1159 /* XXX what if we overflow while moving? */
1160 tcg_gen_andi_i64(dest
, dest
, 0x7fffffffUL
);
1161 tcg_gen_andi_i64(src
, src
, 0x7fffffffUL
);
1164 tmp
= tcg_temp_new_i64();
1165 tcg_gen_addi_i64(tmp
, src
, 1);
1166 tcg_gen_brcond_i64(TCG_COND_EQ
, dest
, tmp
, l_memset
);
1167 tcg_temp_free_i64(tmp
);
1171 tmp
= tcg_temp_new_i64();
1173 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1174 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1176 tcg_temp_free_i64(tmp
);
1179 tmp
= tcg_temp_new_i64();
1181 tcg_gen_qemu_ld16u(tmp
, src
, get_mem_index(s
));
1182 tcg_gen_qemu_st16(tmp
, dest
, get_mem_index(s
));
1184 tcg_temp_free_i64(tmp
);
1187 tmp
= tcg_temp_new_i64();
1189 tcg_gen_qemu_ld32u(tmp
, src
, get_mem_index(s
));
1190 tcg_gen_qemu_st32(tmp
, dest
, get_mem_index(s
));
1192 tcg_temp_free_i64(tmp
);
1195 tmp
= tcg_temp_new_i64();
1196 tmp2
= tcg_temp_new_i64();
1198 tcg_gen_qemu_ld32u(tmp
, src
, get_mem_index(s
));
1199 tcg_gen_addi_i64(src
, src
, 4);
1200 tcg_gen_qemu_ld8u(tmp2
, src
, get_mem_index(s
));
1201 tcg_gen_qemu_st32(tmp
, dest
, get_mem_index(s
));
1202 tcg_gen_addi_i64(dest
, dest
, 4);
1203 tcg_gen_qemu_st8(tmp2
, dest
, get_mem_index(s
));
1205 tcg_temp_free_i64(tmp
);
1206 tcg_temp_free_i64(tmp2
);
1209 tmp
= tcg_temp_new_i64();
1211 tcg_gen_qemu_ld64(tmp
, src
, get_mem_index(s
));
1212 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1214 tcg_temp_free_i64(tmp
);
1217 /* The inline version can become too big for too uneven numbers, only
1218 use it on known good lengths */
1219 tmp
= tcg_temp_new_i64();
1220 tmp2
= tcg_const_i64(8);
1221 for (i
= 0; (i
+ 7) <= l
; i
+= 8) {
1222 tcg_gen_qemu_ld64(tmp
, src
, get_mem_index(s
));
1223 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1225 tcg_gen_add_i64(src
, src
, tmp2
);
1226 tcg_gen_add_i64(dest
, dest
, tmp2
);
1229 tcg_temp_free_i64(tmp2
);
1230 tmp2
= tcg_const_i64(1);
1232 for (; i
<= l
; i
++) {
1233 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1234 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1236 tcg_gen_add_i64(src
, src
, tmp2
);
1237 tcg_gen_add_i64(dest
, dest
, tmp2
);
1240 tcg_temp_free_i64(tmp2
);
1241 tcg_temp_free_i64(tmp
);
1247 gen_set_label(l_memset
);
1248 /* memset case (dest == (src + 1)) */
1250 tmp
= tcg_temp_new_i64();
1251 tmp2
= tcg_temp_new_i64();
1252 /* fill tmp with the byte */
1253 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1254 tcg_gen_shli_i64(tmp2
, tmp
, 8);
1255 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1256 tcg_gen_shli_i64(tmp2
, tmp
, 16);
1257 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1258 tcg_gen_shli_i64(tmp2
, tmp
, 32);
1259 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1260 tcg_temp_free_i64(tmp2
);
1262 tmp2
= tcg_const_i64(8);
1264 for (i
= 0; (i
+ 7) <= l
; i
+= 8) {
1265 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1266 tcg_gen_addi_i64(dest
, dest
, 8);
1269 tcg_temp_free_i64(tmp2
);
1270 tmp2
= tcg_const_i64(1);
1272 for (; i
<= l
; i
++) {
1273 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1274 tcg_gen_addi_i64(dest
, dest
, 1);
1277 tcg_temp_free_i64(tmp2
);
1278 tcg_temp_free_i64(tmp
);
1280 gen_set_label(l_out
);
1282 tcg_temp_free(dest
);
1286 static void gen_op_clc(DisasContext
*s
, int l
, TCGv_i64 s1
, TCGv_i64 s2
)
1292 /* check for simple 32bit or 64bit match */
1295 tmp
= tcg_temp_new_i64();
1296 tmp2
= tcg_temp_new_i64();
1298 tcg_gen_qemu_ld8u(tmp
, s1
, get_mem_index(s
));
1299 tcg_gen_qemu_ld8u(tmp2
, s2
, get_mem_index(s
));
1300 cmp_u64(s
, tmp
, tmp2
);
1302 tcg_temp_free_i64(tmp
);
1303 tcg_temp_free_i64(tmp2
);
1306 tmp
= tcg_temp_new_i64();
1307 tmp2
= tcg_temp_new_i64();
1309 tcg_gen_qemu_ld16u(tmp
, s1
, get_mem_index(s
));
1310 tcg_gen_qemu_ld16u(tmp2
, s2
, get_mem_index(s
));
1311 cmp_u64(s
, tmp
, tmp2
);
1313 tcg_temp_free_i64(tmp
);
1314 tcg_temp_free_i64(tmp2
);
1317 tmp
= tcg_temp_new_i64();
1318 tmp2
= tcg_temp_new_i64();
1320 tcg_gen_qemu_ld32u(tmp
, s1
, get_mem_index(s
));
1321 tcg_gen_qemu_ld32u(tmp2
, s2
, get_mem_index(s
));
1322 cmp_u64(s
, tmp
, tmp2
);
1324 tcg_temp_free_i64(tmp
);
1325 tcg_temp_free_i64(tmp2
);
1328 tmp
= tcg_temp_new_i64();
1329 tmp2
= tcg_temp_new_i64();
1331 tcg_gen_qemu_ld64(tmp
, s1
, get_mem_index(s
));
1332 tcg_gen_qemu_ld64(tmp2
, s2
, get_mem_index(s
));
1333 cmp_u64(s
, tmp
, tmp2
);
1335 tcg_temp_free_i64(tmp
);
1336 tcg_temp_free_i64(tmp2
);
1340 potential_page_fault(s
);
1341 vl
= tcg_const_i32(l
);
1342 gen_helper_clc(cc_op
, cpu_env
, vl
, s1
, s2
);
1343 tcg_temp_free_i32(vl
);
1347 static void disas_e3(CPUS390XState
*env
, DisasContext
* s
, int op
, int r1
,
1348 int x2
, int b2
, int d2
)
1350 TCGv_i64 addr
, tmp
, tmp2
, tmp3
, tmp4
;
1351 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
1353 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1354 op
, r1
, x2
, b2
, d2
);
1355 addr
= get_address(s
, x2
, b2
, d2
);
1357 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1358 case 0x4: /* lg r1,d2(x2,b2) */
1359 tcg_gen_qemu_ld64(regs
[r1
], addr
, get_mem_index(s
));
1361 set_cc_s64(s
, regs
[r1
]);
1364 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1365 tmp2
= tcg_temp_new_i64();
1366 tmp32_1
= tcg_temp_new_i32();
1367 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1368 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1369 store_reg32(r1
, tmp32_1
);
1370 set_cc_s32(s
, tmp32_1
);
1371 tcg_temp_free_i64(tmp2
);
1372 tcg_temp_free_i32(tmp32_1
);
1374 case 0xc: /* MSG R1,D2(X2,B2) [RXY] */
1375 case 0x1c: /* MSGF R1,D2(X2,B2) [RXY] */
1376 tmp2
= tcg_temp_new_i64();
1378 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1380 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1382 tcg_gen_mul_i64(regs
[r1
], regs
[r1
], tmp2
);
1383 tcg_temp_free_i64(tmp2
);
1385 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1386 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1387 tmp2
= tcg_temp_new_i64();
1389 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1391 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1393 tmp4
= load_reg(r1
+ 1);
1394 tmp3
= tcg_temp_new_i64();
1395 tcg_gen_div_i64(tmp3
, tmp4
, tmp2
);
1396 store_reg(r1
+ 1, tmp3
);
1397 tcg_gen_rem_i64(tmp3
, tmp4
, tmp2
);
1398 store_reg(r1
, tmp3
);
1399 tcg_temp_free_i64(tmp2
);
1400 tcg_temp_free_i64(tmp3
);
1401 tcg_temp_free_i64(tmp4
);
1403 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1404 tmp2
= tcg_temp_new_i64();
1405 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1406 tcg_gen_bswap64_i64(tmp2
, tmp2
);
1407 store_reg(r1
, tmp2
);
1408 tcg_temp_free_i64(tmp2
);
1410 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1411 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1412 tmp2
= tcg_temp_new_i64();
1413 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1415 tcg_gen_ext32s_i64(tmp2
, tmp2
);
1417 store_reg(r1
, tmp2
);
1418 tcg_temp_free_i64(tmp2
);
1420 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1421 tmp2
= tcg_temp_new_i64();
1422 tcg_gen_qemu_ld16s(tmp2
, addr
, get_mem_index(s
));
1423 store_reg(r1
, tmp2
);
1424 tcg_temp_free_i64(tmp2
);
1426 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1427 tmp2
= tcg_temp_new_i64();
1428 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1429 tcg_gen_andi_i64(tmp2
, tmp2
, 0x7fffffffULL
);
1430 store_reg(r1
, tmp2
);
1431 tcg_temp_free_i64(tmp2
);
1433 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1434 tmp2
= tcg_temp_new_i64();
1435 tmp32_1
= tcg_temp_new_i32();
1436 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1437 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1438 tcg_temp_free_i64(tmp2
);
1439 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
1440 store_reg32(r1
, tmp32_1
);
1441 tcg_temp_free_i32(tmp32_1
);
1443 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1444 tmp2
= tcg_temp_new_i64();
1445 tmp32_1
= tcg_temp_new_i32();
1446 tcg_gen_qemu_ld16u(tmp2
, addr
, get_mem_index(s
));
1447 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1448 tcg_temp_free_i64(tmp2
);
1449 tcg_gen_bswap16_i32(tmp32_1
, tmp32_1
);
1450 store_reg16(r1
, tmp32_1
);
1451 tcg_temp_free_i32(tmp32_1
);
1453 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1454 case 0x21: /* CLG R1,D2(X2,B2) */
1455 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1456 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1457 tmp2
= tcg_temp_new_i64();
1461 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1464 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1467 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1475 cmp_s64(s
, regs
[r1
], tmp2
);
1479 cmp_u64(s
, regs
[r1
], tmp2
);
1484 tcg_temp_free_i64(tmp2
);
1486 case 0x24: /* stg r1, d2(x2,b2) */
1487 tcg_gen_qemu_st64(regs
[r1
], addr
, get_mem_index(s
));
1489 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1490 tmp32_1
= load_reg32(r1
);
1491 tmp2
= tcg_temp_new_i64();
1492 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
1493 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
1494 tcg_temp_free_i32(tmp32_1
);
1495 tcg_gen_qemu_st32(tmp2
, addr
, get_mem_index(s
));
1496 tcg_temp_free_i64(tmp2
);
1498 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1499 tmp32_1
= load_reg32(r1
);
1500 tmp2
= tcg_temp_new_i64();
1501 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
1502 tcg_temp_free_i32(tmp32_1
);
1503 tcg_gen_qemu_st32(tmp2
, addr
, get_mem_index(s
));
1504 tcg_temp_free_i64(tmp2
);
1506 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1507 tmp32_1
= load_reg32(r1
);
1508 tmp32_2
= tcg_temp_new_i32();
1509 tmp2
= tcg_temp_new_i64();
1510 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1511 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1512 tcg_temp_free_i64(tmp2
);
1513 tcg_gen_xor_i32(tmp32_2
, tmp32_1
, tmp32_2
);
1514 store_reg32(r1
, tmp32_2
);
1515 set_cc_nz_u32(s
, tmp32_2
);
1516 tcg_temp_free_i32(tmp32_1
);
1517 tcg_temp_free_i32(tmp32_2
);
1519 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1520 tmp3
= tcg_temp_new_i64();
1521 tcg_gen_qemu_ld32u(tmp3
, addr
, get_mem_index(s
));
1522 store_reg32_i64(r1
, tmp3
);
1523 tcg_temp_free_i64(tmp3
);
1525 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1526 store_reg(r1
, addr
);
1528 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1529 tmp32_1
= load_reg32(r1
);
1530 tmp2
= tcg_temp_new_i64();
1531 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
1532 tcg_gen_qemu_st8(tmp2
, addr
, get_mem_index(s
));
1533 tcg_temp_free_i32(tmp32_1
);
1534 tcg_temp_free_i64(tmp2
);
1536 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1537 tmp3
= tcg_temp_new_i64();
1538 tcg_gen_qemu_ld8u(tmp3
, addr
, get_mem_index(s
));
1539 store_reg8(r1
, tmp3
);
1540 tcg_temp_free_i64(tmp3
);
1542 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1543 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1544 tmp2
= tcg_temp_new_i64();
1545 tcg_gen_qemu_ld8s(tmp2
, addr
, get_mem_index(s
));
1548 tcg_gen_ext8s_i64(tmp2
, tmp2
);
1549 store_reg32_i64(r1
, tmp2
);
1552 tcg_gen_ext8s_i64(tmp2
, tmp2
);
1553 store_reg(r1
, tmp2
);
1558 tcg_temp_free_i64(tmp2
);
1560 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1561 tmp2
= tcg_temp_new_i64();
1562 tcg_gen_qemu_ld16s(tmp2
, addr
, get_mem_index(s
));
1563 store_reg32_i64(r1
, tmp2
);
1564 tcg_temp_free_i64(tmp2
);
1566 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1567 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1568 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1569 tmp3
= tcg_temp_new_i64();
1570 tcg_gen_qemu_ld64(tmp3
, addr
, get_mem_index(s
));
1573 tcg_gen_and_i64(regs
[r1
], regs
[r1
], tmp3
);
1576 tcg_gen_or_i64(regs
[r1
], regs
[r1
], tmp3
);
1579 tcg_gen_xor_i64(regs
[r1
], regs
[r1
], tmp3
);
1584 set_cc_nz_u64(s
, regs
[r1
]);
1585 tcg_temp_free_i64(tmp3
);
1587 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1588 tmp2
= tcg_temp_new_i64();
1589 tmp32_1
= tcg_const_i32(r1
);
1590 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1591 gen_helper_mlg(cpu_env
, tmp32_1
, tmp2
);
1592 tcg_temp_free_i64(tmp2
);
1593 tcg_temp_free_i32(tmp32_1
);
1595 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1596 tmp2
= tcg_temp_new_i64();
1597 tmp32_1
= tcg_const_i32(r1
);
1598 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1599 gen_helper_dlg(cpu_env
, tmp32_1
, tmp2
);
1600 tcg_temp_free_i64(tmp2
);
1601 tcg_temp_free_i32(tmp32_1
);
1603 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1604 tmp2
= tcg_temp_new_i64();
1605 tmp3
= tcg_temp_new_i64();
1606 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1607 /* XXX possible optimization point */
1609 tcg_gen_extu_i32_i64(tmp3
, cc_op
);
1610 tcg_gen_shri_i64(tmp3
, tmp3
, 1);
1611 tcg_gen_andi_i64(tmp3
, tmp3
, 1);
1612 tcg_gen_add_i64(tmp3
, tmp2
, tmp3
);
1613 tcg_gen_add_i64(tmp3
, regs
[r1
], tmp3
);
1614 store_reg(r1
, tmp3
);
1615 set_cc_addu64(s
, regs
[r1
], tmp2
, tmp3
);
1616 tcg_temp_free_i64(tmp2
);
1617 tcg_temp_free_i64(tmp3
);
1619 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1620 tmp2
= tcg_temp_new_i64();
1621 tmp32_1
= tcg_const_i32(r1
);
1622 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1623 /* XXX possible optimization point */
1625 gen_helper_slbg(cc_op
, cpu_env
, cc_op
, tmp32_1
, regs
[r1
], tmp2
);
1627 tcg_temp_free_i64(tmp2
);
1628 tcg_temp_free_i32(tmp32_1
);
1630 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1631 tcg_gen_qemu_ld8u(regs
[r1
], addr
, get_mem_index(s
));
1633 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1634 tcg_gen_qemu_ld16u(regs
[r1
], addr
, get_mem_index(s
));
1636 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1637 tmp2
= tcg_temp_new_i64();
1638 tcg_gen_qemu_ld8u(tmp2
, addr
, get_mem_index(s
));
1639 store_reg32_i64(r1
, tmp2
);
1640 tcg_temp_free_i64(tmp2
);
1642 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1643 tmp2
= tcg_temp_new_i64();
1644 tcg_gen_qemu_ld16u(tmp2
, addr
, get_mem_index(s
));
1645 store_reg32_i64(r1
, tmp2
);
1646 tcg_temp_free_i64(tmp2
);
1648 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1649 tmp2
= tcg_temp_new_i64();
1650 tmp3
= load_reg((r1
+ 1) & 15);
1651 tcg_gen_ext32u_i64(tmp3
, tmp3
);
1652 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1653 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
1654 store_reg32_i64((r1
+ 1) & 15, tmp2
);
1655 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
1656 store_reg32_i64(r1
, tmp2
);
1657 tcg_temp_free_i64(tmp2
);
1658 tcg_temp_free_i64(tmp3
);
1660 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1661 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1662 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1664 tmp2
= tcg_temp_new_i64();
1665 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1666 tmp3
= load_reg((r1
+ 1) & 15);
1667 tcg_gen_ext32u_i64(tmp2
, tmp2
);
1668 tcg_gen_ext32u_i64(tmp3
, tmp3
);
1669 tcg_gen_shli_i64(tmp
, tmp
, 32);
1670 tcg_gen_or_i64(tmp
, tmp
, tmp3
);
1672 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
1673 tcg_gen_div_i64(tmp
, tmp
, tmp2
);
1674 store_reg32_i64((r1
+ 1) & 15, tmp
);
1675 store_reg32_i64(r1
, tmp3
);
1676 tcg_temp_free_i64(tmp
);
1677 tcg_temp_free_i64(tmp2
);
1678 tcg_temp_free_i64(tmp3
);
1680 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1681 tmp2
= tcg_temp_new_i64();
1682 tmp32_1
= load_reg32(r1
);
1683 tmp32_2
= tcg_temp_new_i32();
1684 tmp32_3
= tcg_temp_new_i32();
1685 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1686 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1687 /* XXX possible optimization point */
1689 gen_helper_addc_u32(tmp32_3
, cc_op
, tmp32_1
, tmp32_2
);
1690 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1691 store_reg32(r1
, tmp32_3
);
1692 tcg_temp_free_i64(tmp2
);
1693 tcg_temp_free_i32(tmp32_1
);
1694 tcg_temp_free_i32(tmp32_2
);
1695 tcg_temp_free_i32(tmp32_3
);
1697 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1698 tmp2
= tcg_temp_new_i64();
1699 tmp32_1
= tcg_const_i32(r1
);
1700 tmp32_2
= tcg_temp_new_i32();
1701 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1702 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1703 /* XXX possible optimization point */
1705 gen_helper_slb(cc_op
, cpu_env
, cc_op
, tmp32_1
, tmp32_2
);
1707 tcg_temp_free_i64(tmp2
);
1708 tcg_temp_free_i32(tmp32_1
);
1709 tcg_temp_free_i32(tmp32_2
);
1712 LOG_DISAS("illegal e3 operation 0x%x\n", op
);
1713 gen_illegal_opcode(s
);
1716 tcg_temp_free_i64(addr
);
1719 #ifndef CONFIG_USER_ONLY
1720 static void disas_e5(CPUS390XState
*env
, DisasContext
* s
, uint64_t insn
)
1723 int op
= (insn
>> 32) & 0xff;
1725 tmp
= get_address(s
, 0, (insn
>> 28) & 0xf, (insn
>> 16) & 0xfff);
1726 tmp2
= get_address(s
, 0, (insn
>> 12) & 0xf, insn
& 0xfff);
1728 LOG_DISAS("disas_e5: insn %" PRIx64
"\n", insn
);
1730 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1731 /* Test Protection */
1732 potential_page_fault(s
);
1733 gen_helper_tprot(cc_op
, tmp
, tmp2
);
1737 LOG_DISAS("illegal e5 operation 0x%x\n", op
);
1738 gen_illegal_opcode(s
);
1742 tcg_temp_free_i64(tmp
);
1743 tcg_temp_free_i64(tmp2
);
1747 static void disas_eb(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
1748 int r3
, int b2
, int d2
)
1750 TCGv_i64 tmp
, tmp2
, tmp3
, tmp4
;
1751 TCGv_i32 tmp32_1
, tmp32_2
;
1754 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1755 op
, r1
, r3
, b2
, d2
);
1757 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1758 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1759 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1760 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1761 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1763 tmp
= get_address(s
, 0, b2
, d2
);
1764 tcg_gen_andi_i64(tmp
, tmp
, 0x3f);
1766 tmp
= tcg_const_i64(d2
& 0x3f);
1770 tcg_gen_shr_i64(regs
[r1
], regs
[r3
], tmp
);
1773 tcg_gen_shl_i64(regs
[r1
], regs
[r3
], tmp
);
1776 tcg_gen_sar_i64(regs
[r1
], regs
[r3
], tmp
);
1779 tmp2
= tcg_temp_new_i64();
1780 tmp3
= tcg_temp_new_i64();
1781 gen_op_update2_cc_i64(s
, CC_OP_SLAG
, regs
[r3
], tmp
);
1782 tcg_gen_shl_i64(tmp2
, regs
[r3
], tmp
);
1783 /* override sign bit with source sign */
1784 tcg_gen_andi_i64(tmp2
, tmp2
, ~0x8000000000000000ULL
);
1785 tcg_gen_andi_i64(tmp3
, regs
[r3
], 0x8000000000000000ULL
);
1786 tcg_gen_or_i64(regs
[r1
], tmp2
, tmp3
);
1787 tcg_temp_free_i64(tmp2
);
1788 tcg_temp_free_i64(tmp3
);
1791 tcg_gen_rotl_i64(regs
[r1
], regs
[r3
], tmp
);
1798 set_cc_s64(s
, regs
[r1
]);
1800 tcg_temp_free_i64(tmp
);
1802 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
1804 tmp
= get_address(s
, 0, b2
, d2
);
1805 tcg_gen_andi_i64(tmp
, tmp
, 0x3f);
1807 tmp
= tcg_const_i64(d2
& 0x3f);
1809 tmp32_1
= tcg_temp_new_i32();
1810 tmp32_2
= load_reg32(r3
);
1811 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
1814 tcg_gen_rotl_i32(tmp32_1
, tmp32_2
, tmp32_1
);
1820 store_reg32(r1
, tmp32_1
);
1821 tcg_temp_free_i64(tmp
);
1822 tcg_temp_free_i32(tmp32_1
);
1823 tcg_temp_free_i32(tmp32_2
);
1825 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
1826 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
1829 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
1830 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
1833 /* Apparently, unrolling lmg/stmg of any size gains performance -
1834 even for very long ones... */
1835 tmp
= get_address(s
, 0, b2
, d2
);
1836 tmp3
= tcg_const_i64(stm_len
);
1837 tmp4
= tcg_const_i64(op
== 0x26 ? 32 : 4);
1838 for (i
= r1
;; i
= (i
+ 1) % 16) {
1841 tcg_gen_qemu_ld64(regs
[i
], tmp
, get_mem_index(s
));
1844 tmp2
= tcg_temp_new_i64();
1845 #if HOST_LONG_BITS == 32
1846 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
1847 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs
[i
]), tmp2
);
1849 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
1850 tcg_gen_shl_i64(tmp2
, tmp2
, tmp4
);
1851 tcg_gen_ext32u_i64(regs
[i
], regs
[i
]);
1852 tcg_gen_or_i64(regs
[i
], regs
[i
], tmp2
);
1854 tcg_temp_free_i64(tmp2
);
1857 tcg_gen_qemu_st64(regs
[i
], tmp
, get_mem_index(s
));
1860 tmp2
= tcg_temp_new_i64();
1861 tcg_gen_shr_i64(tmp2
, regs
[i
], tmp4
);
1862 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
1863 tcg_temp_free_i64(tmp2
);
1871 tcg_gen_add_i64(tmp
, tmp
, tmp3
);
1873 tcg_temp_free_i64(tmp
);
1874 tcg_temp_free_i64(tmp3
);
1875 tcg_temp_free_i64(tmp4
);
1877 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
1878 tmp
= get_address(s
, 0, b2
, d2
);
1879 tmp32_1
= tcg_const_i32(r1
);
1880 tmp32_2
= tcg_const_i32(r3
);
1881 potential_page_fault(s
);
1882 gen_helper_stcmh(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1883 tcg_temp_free_i64(tmp
);
1884 tcg_temp_free_i32(tmp32_1
);
1885 tcg_temp_free_i32(tmp32_2
);
1887 #ifndef CONFIG_USER_ONLY
1888 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
1890 check_privileged(s
);
1891 tmp
= get_address(s
, 0, b2
, d2
);
1892 tmp32_1
= tcg_const_i32(r1
);
1893 tmp32_2
= tcg_const_i32(r3
);
1894 potential_page_fault(s
);
1895 gen_helper_lctlg(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1896 tcg_temp_free_i64(tmp
);
1897 tcg_temp_free_i32(tmp32_1
);
1898 tcg_temp_free_i32(tmp32_2
);
1900 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
1902 check_privileged(s
);
1903 tmp
= get_address(s
, 0, b2
, d2
);
1904 tmp32_1
= tcg_const_i32(r1
);
1905 tmp32_2
= tcg_const_i32(r3
);
1906 potential_page_fault(s
);
1907 gen_helper_stctg(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1908 tcg_temp_free_i64(tmp
);
1909 tcg_temp_free_i32(tmp32_1
);
1910 tcg_temp_free_i32(tmp32_2
);
1913 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
1914 tmp
= get_address(s
, 0, b2
, d2
);
1915 tmp32_1
= tcg_const_i32(r1
);
1916 tmp32_2
= tcg_const_i32(r3
);
1917 potential_page_fault(s
);
1918 /* XXX rewrite in tcg */
1919 gen_helper_csg(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1921 tcg_temp_free_i64(tmp
);
1922 tcg_temp_free_i32(tmp32_1
);
1923 tcg_temp_free_i32(tmp32_2
);
1925 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
1926 tmp
= get_address(s
, 0, b2
, d2
);
1927 tmp32_1
= tcg_const_i32(r1
);
1928 tmp32_2
= tcg_const_i32(r3
);
1929 potential_page_fault(s
);
1930 /* XXX rewrite in tcg */
1931 gen_helper_cdsg(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1933 tcg_temp_free_i64(tmp
);
1934 tcg_temp_free_i32(tmp32_1
);
1935 tcg_temp_free_i32(tmp32_2
);
1937 case 0x51: /* TMY D1(B1),I2 [SIY] */
1938 tmp
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the destination */
1939 tmp2
= tcg_const_i64((r1
<< 4) | r3
);
1940 tcg_gen_qemu_ld8u(tmp
, tmp
, get_mem_index(s
));
1941 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
1942 that incurs less conversions */
1943 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_32
);
1944 tcg_temp_free_i64(tmp
);
1945 tcg_temp_free_i64(tmp2
);
1947 case 0x52: /* MVIY D1(B1),I2 [SIY] */
1948 tmp
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the destination */
1949 tmp2
= tcg_const_i64((r1
<< 4) | r3
);
1950 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
1951 tcg_temp_free_i64(tmp
);
1952 tcg_temp_free_i64(tmp2
);
1954 case 0x55: /* CLIY D1(B1),I2 [SIY] */
1955 tmp3
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the 1st operand */
1956 tmp
= tcg_temp_new_i64();
1957 tmp32_1
= tcg_temp_new_i32();
1958 tcg_gen_qemu_ld8u(tmp
, tmp3
, get_mem_index(s
));
1959 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
1960 cmp_u32c(s
, tmp32_1
, (r1
<< 4) | r3
);
1961 tcg_temp_free_i64(tmp
);
1962 tcg_temp_free_i64(tmp3
);
1963 tcg_temp_free_i32(tmp32_1
);
1965 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
1966 tmp
= get_address(s
, 0, b2
, d2
);
1967 tmp32_1
= tcg_const_i32(r1
);
1968 tmp32_2
= tcg_const_i32(r3
);
1969 potential_page_fault(s
);
1970 /* XXX split CC calculation out */
1971 gen_helper_icmh(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
1973 tcg_temp_free_i64(tmp
);
1974 tcg_temp_free_i32(tmp32_1
);
1975 tcg_temp_free_i32(tmp32_2
);
1978 LOG_DISAS("illegal eb operation 0x%x\n", op
);
1979 gen_illegal_opcode(s
);
1984 static void disas_ed(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
1985 int x2
, int b2
, int d2
, int r1b
)
1987 TCGv_i32 tmp_r1
, tmp32
;
1989 addr
= get_address(s
, x2
, b2
, d2
);
1990 tmp_r1
= tcg_const_i32(r1
);
1992 case 0x4: /* LDEB R1,D2(X2,B2) [RXE] */
1993 potential_page_fault(s
);
1994 gen_helper_ldeb(cpu_env
, tmp_r1
, addr
);
1996 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
1997 potential_page_fault(s
);
1998 gen_helper_lxdb(cpu_env
, tmp_r1
, addr
);
2000 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
2001 tmp
= tcg_temp_new_i64();
2002 tmp32
= load_freg32(r1
);
2003 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2004 set_cc_cmp_f32_i64(s
, tmp32
, tmp
);
2005 tcg_temp_free_i64(tmp
);
2006 tcg_temp_free_i32(tmp32
);
2008 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
2009 tmp
= tcg_temp_new_i64();
2010 tmp32
= tcg_temp_new_i32();
2011 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2012 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2013 gen_helper_aeb(cpu_env
, tmp_r1
, tmp32
);
2014 tcg_temp_free_i64(tmp
);
2015 tcg_temp_free_i32(tmp32
);
2017 tmp32
= load_freg32(r1
);
2018 gen_set_cc_nz_f32(s
, tmp32
);
2019 tcg_temp_free_i32(tmp32
);
2021 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2022 tmp
= tcg_temp_new_i64();
2023 tmp32
= tcg_temp_new_i32();
2024 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2025 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2026 gen_helper_seb(cpu_env
, tmp_r1
, tmp32
);
2027 tcg_temp_free_i64(tmp
);
2028 tcg_temp_free_i32(tmp32
);
2030 tmp32
= load_freg32(r1
);
2031 gen_set_cc_nz_f32(s
, tmp32
);
2032 tcg_temp_free_i32(tmp32
);
2034 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2035 tmp
= tcg_temp_new_i64();
2036 tmp32
= tcg_temp_new_i32();
2037 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2038 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2039 gen_helper_deb(cpu_env
, tmp_r1
, tmp32
);
2040 tcg_temp_free_i64(tmp
);
2041 tcg_temp_free_i32(tmp32
);
2043 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2044 potential_page_fault(s
);
2045 gen_helper_tceb(cc_op
, cpu_env
, tmp_r1
, addr
);
2048 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2049 potential_page_fault(s
);
2050 gen_helper_tcdb(cc_op
, cpu_env
, tmp_r1
, addr
);
2053 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2054 potential_page_fault(s
);
2055 gen_helper_tcxb(cc_op
, cpu_env
, tmp_r1
, addr
);
2058 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2059 tmp
= tcg_temp_new_i64();
2060 tmp32
= tcg_temp_new_i32();
2061 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2062 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2063 gen_helper_meeb(cpu_env
, tmp_r1
, tmp32
);
2064 tcg_temp_free_i64(tmp
);
2065 tcg_temp_free_i32(tmp32
);
2067 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2068 potential_page_fault(s
);
2069 gen_helper_cdb(cc_op
, cpu_env
, tmp_r1
, addr
);
2072 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2073 potential_page_fault(s
);
2074 gen_helper_adb(cc_op
, cpu_env
, tmp_r1
, addr
);
2077 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2078 potential_page_fault(s
);
2079 gen_helper_sdb(cc_op
, cpu_env
, tmp_r1
, addr
);
2082 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2083 potential_page_fault(s
);
2084 gen_helper_mdb(cpu_env
, tmp_r1
, addr
);
2086 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2087 potential_page_fault(s
);
2088 gen_helper_ddb(cpu_env
, tmp_r1
, addr
);
2090 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2091 /* for RXF insns, r1 is R3 and r1b is R1 */
2092 tmp32
= tcg_const_i32(r1b
);
2093 potential_page_fault(s
);
2094 gen_helper_madb(cpu_env
, tmp32
, addr
, tmp_r1
);
2095 tcg_temp_free_i32(tmp32
);
2098 LOG_DISAS("illegal ed operation 0x%x\n", op
);
2099 gen_illegal_opcode(s
);
2102 tcg_temp_free_i32(tmp_r1
);
2103 tcg_temp_free_i64(addr
);
2106 static void disas_a5(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
2111 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op
, r1
, i2
);
2113 case 0x0: /* IIHH R1,I2 [RI] */
2114 tmp
= tcg_const_i64(i2
);
2115 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 48, 16);
2116 tcg_temp_free_i64(tmp
);
2118 case 0x1: /* IIHL R1,I2 [RI] */
2119 tmp
= tcg_const_i64(i2
);
2120 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 32, 16);
2121 tcg_temp_free_i64(tmp
);
2123 case 0x2: /* IILH R1,I2 [RI] */
2124 tmp
= tcg_const_i64(i2
);
2125 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 16, 16);
2126 tcg_temp_free_i64(tmp
);
2128 case 0x3: /* IILL R1,I2 [RI] */
2129 tmp
= tcg_const_i64(i2
);
2130 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 0, 16);
2131 tcg_temp_free_i64(tmp
);
2133 case 0x4: /* NIHH R1,I2 [RI] */
2134 case 0x8: /* OIHH R1,I2 [RI] */
2136 tmp32
= tcg_temp_new_i32();
2139 tmp2
= tcg_const_i64((((uint64_t)i2
) << 48)
2140 | 0x0000ffffffffffffULL
);
2141 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2144 tmp2
= tcg_const_i64(((uint64_t)i2
) << 48);
2145 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2151 tcg_gen_shri_i64(tmp2
, tmp
, 48);
2152 tcg_gen_trunc_i64_i32(tmp32
, tmp2
);
2153 set_cc_nz_u32(s
, tmp32
);
2154 tcg_temp_free_i64(tmp2
);
2155 tcg_temp_free_i32(tmp32
);
2156 tcg_temp_free_i64(tmp
);
2158 case 0x5: /* NIHL R1,I2 [RI] */
2159 case 0x9: /* OIHL R1,I2 [RI] */
2161 tmp32
= tcg_temp_new_i32();
2164 tmp2
= tcg_const_i64((((uint64_t)i2
) << 32)
2165 | 0xffff0000ffffffffULL
);
2166 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2169 tmp2
= tcg_const_i64(((uint64_t)i2
) << 32);
2170 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2176 tcg_gen_shri_i64(tmp2
, tmp
, 32);
2177 tcg_gen_trunc_i64_i32(tmp32
, tmp2
);
2178 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2179 set_cc_nz_u32(s
, tmp32
);
2180 tcg_temp_free_i64(tmp2
);
2181 tcg_temp_free_i32(tmp32
);
2182 tcg_temp_free_i64(tmp
);
2184 case 0x6: /* NILH R1,I2 [RI] */
2185 case 0xa: /* OILH R1,I2 [RI] */
2187 tmp32
= tcg_temp_new_i32();
2190 tmp2
= tcg_const_i64((((uint64_t)i2
) << 16)
2191 | 0xffffffff0000ffffULL
);
2192 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2195 tmp2
= tcg_const_i64(((uint64_t)i2
) << 16);
2196 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2202 tcg_gen_shri_i64(tmp
, tmp
, 16);
2203 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2204 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2205 set_cc_nz_u32(s
, tmp32
);
2206 tcg_temp_free_i64(tmp2
);
2207 tcg_temp_free_i32(tmp32
);
2208 tcg_temp_free_i64(tmp
);
2210 case 0x7: /* NILL R1,I2 [RI] */
2211 case 0xb: /* OILL R1,I2 [RI] */
2213 tmp32
= tcg_temp_new_i32();
2216 tmp2
= tcg_const_i64(i2
| 0xffffffffffff0000ULL
);
2217 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2220 tmp2
= tcg_const_i64(i2
);
2221 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2227 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2228 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2229 set_cc_nz_u32(s
, tmp32
); /* signedness should not matter here */
2230 tcg_temp_free_i64(tmp2
);
2231 tcg_temp_free_i32(tmp32
);
2232 tcg_temp_free_i64(tmp
);
2234 case 0xc: /* LLIHH R1,I2 [RI] */
2235 tmp
= tcg_const_i64( ((uint64_t)i2
) << 48 );
2237 tcg_temp_free_i64(tmp
);
2239 case 0xd: /* LLIHL R1,I2 [RI] */
2240 tmp
= tcg_const_i64( ((uint64_t)i2
) << 32 );
2242 tcg_temp_free_i64(tmp
);
2244 case 0xe: /* LLILH R1,I2 [RI] */
2245 tmp
= tcg_const_i64( ((uint64_t)i2
) << 16 );
2247 tcg_temp_free_i64(tmp
);
2249 case 0xf: /* LLILL R1,I2 [RI] */
2250 tmp
= tcg_const_i64(i2
);
2252 tcg_temp_free_i64(tmp
);
2255 LOG_DISAS("illegal a5 operation 0x%x\n", op
);
2256 gen_illegal_opcode(s
);
2261 static void disas_a7(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
2268 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op
, r1
, i2
);
2270 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2271 case 0x1: /* TMLL or TML R1,I2 [RI] */
2272 case 0x2: /* TMHH R1,I2 [RI] */
2273 case 0x3: /* TMHL R1,I2 [RI] */
2275 tmp2
= tcg_const_i64((uint16_t)i2
);
2278 tcg_gen_shri_i64(tmp
, tmp
, 16);
2283 tcg_gen_shri_i64(tmp
, tmp
, 48);
2286 tcg_gen_shri_i64(tmp
, tmp
, 32);
2289 tcg_gen_andi_i64(tmp
, tmp
, 0xffff);
2290 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_64
);
2291 tcg_temp_free_i64(tmp
);
2292 tcg_temp_free_i64(tmp2
);
2294 case 0x4: /* brc m1, i2 */
2295 gen_brc(r1
, s
, i2
* 2LL);
2297 case 0x5: /* BRAS R1,I2 [RI] */
2298 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 4));
2300 tcg_temp_free_i64(tmp
);
2301 gen_goto_tb(s
, 0, s
->pc
+ i2
* 2LL);
2302 s
->is_jmp
= DISAS_TB_JUMP
;
2304 case 0x6: /* BRCT R1,I2 [RI] */
2305 tmp32_1
= load_reg32(r1
);
2306 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
2307 store_reg32(r1
, tmp32_1
);
2308 gen_update_cc_op(s
);
2309 l1
= gen_new_label();
2310 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp32_1
, 0, l1
);
2311 gen_goto_tb(s
, 0, s
->pc
+ (i2
* 2LL));
2313 gen_goto_tb(s
, 1, s
->pc
+ 4);
2314 s
->is_jmp
= DISAS_TB_JUMP
;
2315 tcg_temp_free_i32(tmp32_1
);
2317 case 0x7: /* BRCTG R1,I2 [RI] */
2319 tcg_gen_subi_i64(tmp
, tmp
, 1);
2321 gen_update_cc_op(s
);
2322 l1
= gen_new_label();
2323 tcg_gen_brcondi_i64(TCG_COND_EQ
, tmp
, 0, l1
);
2324 gen_goto_tb(s
, 0, s
->pc
+ (i2
* 2LL));
2326 gen_goto_tb(s
, 1, s
->pc
+ 4);
2327 s
->is_jmp
= DISAS_TB_JUMP
;
2328 tcg_temp_free_i64(tmp
);
2330 case 0x8: /* lhi r1, i2 */
2331 tmp32_1
= tcg_const_i32(i2
);
2332 store_reg32(r1
, tmp32_1
);
2333 tcg_temp_free_i32(tmp32_1
);
2335 case 0x9: /* lghi r1, i2 */
2336 tmp
= tcg_const_i64(i2
);
2338 tcg_temp_free_i64(tmp
);
2340 case 0xc: /* MHI R1,I2 [RI] */
2341 tmp32_1
= load_reg32(r1
);
2342 tcg_gen_muli_i32(tmp32_1
, tmp32_1
, i2
);
2343 store_reg32(r1
, tmp32_1
);
2344 tcg_temp_free_i32(tmp32_1
);
2346 case 0xd: /* MGHI R1,I2 [RI] */
2348 tcg_gen_muli_i64(tmp
, tmp
, i2
);
2350 tcg_temp_free_i64(tmp
);
2352 case 0xe: /* CHI R1,I2 [RI] */
2353 tmp32_1
= load_reg32(r1
);
2354 cmp_s32c(s
, tmp32_1
, i2
);
2355 tcg_temp_free_i32(tmp32_1
);
2357 case 0xf: /* CGHI R1,I2 [RI] */
2359 cmp_s64c(s
, tmp
, i2
);
2360 tcg_temp_free_i64(tmp
);
2363 LOG_DISAS("illegal a7 operation 0x%x\n", op
);
2364 gen_illegal_opcode(s
);
2369 static void disas_b2(CPUS390XState
*env
, DisasContext
*s
, int op
,
2372 TCGv_i64 tmp
, tmp2
, tmp3
;
2373 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2375 #ifndef CONFIG_USER_ONLY
2379 r1
= (insn
>> 4) & 0xf;
2382 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op
, r1
, r2
);
2385 case 0x22: /* IPM R1 [RRE] */
2386 tmp32_1
= tcg_const_i32(r1
);
2388 gen_helper_ipm(cpu_env
, cc_op
, tmp32_1
);
2389 tcg_temp_free_i32(tmp32_1
);
2391 case 0x41: /* CKSM R1,R2 [RRE] */
2392 tmp32_1
= tcg_const_i32(r1
);
2393 tmp32_2
= tcg_const_i32(r2
);
2394 potential_page_fault(s
);
2395 gen_helper_cksm(cpu_env
, tmp32_1
, tmp32_2
);
2396 tcg_temp_free_i32(tmp32_1
);
2397 tcg_temp_free_i32(tmp32_2
);
2398 gen_op_movi_cc(s
, 0);
2400 case 0x4e: /* SAR R1,R2 [RRE] */
2401 tmp32_1
= load_reg32(r2
);
2402 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, aregs
[r1
]));
2403 tcg_temp_free_i32(tmp32_1
);
2405 case 0x4f: /* EAR R1,R2 [RRE] */
2406 tmp32_1
= tcg_temp_new_i32();
2407 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, aregs
[r2
]));
2408 store_reg32(r1
, tmp32_1
);
2409 tcg_temp_free_i32(tmp32_1
);
2411 case 0x52: /* MSR R1,R2 [RRE] */
2412 tmp32_1
= load_reg32(r1
);
2413 tmp32_2
= load_reg32(r2
);
2414 tcg_gen_mul_i32(tmp32_1
, tmp32_1
, tmp32_2
);
2415 store_reg32(r1
, tmp32_1
);
2416 tcg_temp_free_i32(tmp32_1
);
2417 tcg_temp_free_i32(tmp32_2
);
2419 case 0x54: /* MVPG R1,R2 [RRE] */
2421 tmp2
= load_reg(r1
);
2422 tmp3
= load_reg(r2
);
2423 potential_page_fault(s
);
2424 gen_helper_mvpg(cpu_env
, tmp
, tmp2
, tmp3
);
2425 tcg_temp_free_i64(tmp
);
2426 tcg_temp_free_i64(tmp2
);
2427 tcg_temp_free_i64(tmp3
);
2428 /* XXX check CCO bit and set CC accordingly */
2429 gen_op_movi_cc(s
, 0);
2431 case 0x55: /* MVST R1,R2 [RRE] */
2432 tmp32_1
= load_reg32(0);
2433 tmp32_2
= tcg_const_i32(r1
);
2434 tmp32_3
= tcg_const_i32(r2
);
2435 potential_page_fault(s
);
2436 gen_helper_mvst(cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2437 tcg_temp_free_i32(tmp32_1
);
2438 tcg_temp_free_i32(tmp32_2
);
2439 tcg_temp_free_i32(tmp32_3
);
2440 gen_op_movi_cc(s
, 1);
2442 case 0x5d: /* CLST R1,R2 [RRE] */
2443 tmp32_1
= load_reg32(0);
2444 tmp32_2
= tcg_const_i32(r1
);
2445 tmp32_3
= tcg_const_i32(r2
);
2446 potential_page_fault(s
);
2447 gen_helper_clst(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2449 tcg_temp_free_i32(tmp32_1
);
2450 tcg_temp_free_i32(tmp32_2
);
2451 tcg_temp_free_i32(tmp32_3
);
2453 case 0x5e: /* SRST R1,R2 [RRE] */
2454 tmp32_1
= load_reg32(0);
2455 tmp32_2
= tcg_const_i32(r1
);
2456 tmp32_3
= tcg_const_i32(r2
);
2457 potential_page_fault(s
);
2458 gen_helper_srst(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2460 tcg_temp_free_i32(tmp32_1
);
2461 tcg_temp_free_i32(tmp32_2
);
2462 tcg_temp_free_i32(tmp32_3
);
2465 #ifndef CONFIG_USER_ONLY
2466 case 0x02: /* STIDP D2(B2) [S] */
2468 check_privileged(s
);
2469 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2470 tmp
= get_address(s
, 0, b2
, d2
);
2471 potential_page_fault(s
);
2472 gen_helper_stidp(cpu_env
, tmp
);
2473 tcg_temp_free_i64(tmp
);
2475 case 0x04: /* SCK D2(B2) [S] */
2477 check_privileged(s
);
2478 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2479 tmp
= get_address(s
, 0, b2
, d2
);
2480 potential_page_fault(s
);
2481 gen_helper_sck(cc_op
, tmp
);
2483 tcg_temp_free_i64(tmp
);
2485 case 0x05: /* STCK D2(B2) [S] */
2487 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2488 tmp
= get_address(s
, 0, b2
, d2
);
2489 potential_page_fault(s
);
2490 gen_helper_stck(cc_op
, cpu_env
, tmp
);
2492 tcg_temp_free_i64(tmp
);
2494 case 0x06: /* SCKC D2(B2) [S] */
2495 /* Set Clock Comparator */
2496 check_privileged(s
);
2497 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2498 tmp
= get_address(s
, 0, b2
, d2
);
2499 potential_page_fault(s
);
2500 gen_helper_sckc(cpu_env
, tmp
);
2501 tcg_temp_free_i64(tmp
);
2503 case 0x07: /* STCKC D2(B2) [S] */
2504 /* Store Clock Comparator */
2505 check_privileged(s
);
2506 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2507 tmp
= get_address(s
, 0, b2
, d2
);
2508 potential_page_fault(s
);
2509 gen_helper_stckc(cpu_env
, tmp
);
2510 tcg_temp_free_i64(tmp
);
2512 case 0x08: /* SPT D2(B2) [S] */
2514 check_privileged(s
);
2515 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2516 tmp
= get_address(s
, 0, b2
, d2
);
2517 potential_page_fault(s
);
2518 gen_helper_spt(cpu_env
, tmp
);
2519 tcg_temp_free_i64(tmp
);
2521 case 0x09: /* STPT D2(B2) [S] */
2522 /* Store CPU Timer */
2523 check_privileged(s
);
2524 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2525 tmp
= get_address(s
, 0, b2
, d2
);
2526 potential_page_fault(s
);
2527 gen_helper_stpt(cpu_env
, tmp
);
2528 tcg_temp_free_i64(tmp
);
2530 case 0x0a: /* SPKA D2(B2) [S] */
2531 /* Set PSW Key from Address */
2532 check_privileged(s
);
2533 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2534 tmp
= get_address(s
, 0, b2
, d2
);
2535 tmp2
= tcg_temp_new_i64();
2536 tcg_gen_andi_i64(tmp2
, psw_mask
, ~PSW_MASK_KEY
);
2537 tcg_gen_shli_i64(tmp
, tmp
, PSW_SHIFT_KEY
- 4);
2538 tcg_gen_or_i64(psw_mask
, tmp2
, tmp
);
2539 tcg_temp_free_i64(tmp2
);
2540 tcg_temp_free_i64(tmp
);
2542 case 0x0d: /* PTLB [S] */
2544 check_privileged(s
);
2545 gen_helper_ptlb(cpu_env
);
2547 case 0x10: /* SPX D2(B2) [S] */
2548 /* Set Prefix Register */
2549 check_privileged(s
);
2550 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2551 tmp
= get_address(s
, 0, b2
, d2
);
2552 potential_page_fault(s
);
2553 gen_helper_spx(cpu_env
, tmp
);
2554 tcg_temp_free_i64(tmp
);
2556 case 0x11: /* STPX D2(B2) [S] */
2558 check_privileged(s
);
2559 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2560 tmp
= get_address(s
, 0, b2
, d2
);
2561 tmp2
= tcg_temp_new_i64();
2562 tcg_gen_ld_i64(tmp2
, cpu_env
, offsetof(CPUS390XState
, psa
));
2563 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2564 tcg_temp_free_i64(tmp
);
2565 tcg_temp_free_i64(tmp2
);
2567 case 0x12: /* STAP D2(B2) [S] */
2568 /* Store CPU Address */
2569 check_privileged(s
);
2570 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2571 tmp
= get_address(s
, 0, b2
, d2
);
2572 tmp2
= tcg_temp_new_i64();
2573 tmp32_1
= tcg_temp_new_i32();
2574 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, cpu_num
));
2575 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
2576 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2577 tcg_temp_free_i64(tmp
);
2578 tcg_temp_free_i64(tmp2
);
2579 tcg_temp_free_i32(tmp32_1
);
2581 case 0x21: /* IPTE R1,R2 [RRE] */
2582 /* Invalidate PTE */
2583 check_privileged(s
);
2584 r1
= (insn
>> 4) & 0xf;
2587 tmp2
= load_reg(r2
);
2588 gen_helper_ipte(cpu_env
, tmp
, tmp2
);
2589 tcg_temp_free_i64(tmp
);
2590 tcg_temp_free_i64(tmp2
);
2592 case 0x29: /* ISKE R1,R2 [RRE] */
2593 /* Insert Storage Key Extended */
2594 check_privileged(s
);
2595 r1
= (insn
>> 4) & 0xf;
2598 tmp2
= tcg_temp_new_i64();
2599 gen_helper_iske(tmp2
, cpu_env
, tmp
);
2600 store_reg(r1
, tmp2
);
2601 tcg_temp_free_i64(tmp
);
2602 tcg_temp_free_i64(tmp2
);
2604 case 0x2a: /* RRBE R1,R2 [RRE] */
2605 /* Set Storage Key Extended */
2606 check_privileged(s
);
2607 r1
= (insn
>> 4) & 0xf;
2609 tmp32_1
= load_reg32(r1
);
2611 gen_helper_rrbe(cc_op
, cpu_env
, tmp32_1
, tmp
);
2613 tcg_temp_free_i32(tmp32_1
);
2614 tcg_temp_free_i64(tmp
);
2616 case 0x2b: /* SSKE R1,R2 [RRE] */
2617 /* Set Storage Key Extended */
2618 check_privileged(s
);
2619 r1
= (insn
>> 4) & 0xf;
2621 tmp32_1
= load_reg32(r1
);
2623 gen_helper_sske(cpu_env
, tmp32_1
, tmp
);
2624 tcg_temp_free_i32(tmp32_1
);
2625 tcg_temp_free_i64(tmp
);
2627 case 0x34: /* STCH ? */
2628 /* Store Subchannel */
2629 check_privileged(s
);
2630 gen_op_movi_cc(s
, 3);
2632 case 0x46: /* STURA R1,R2 [RRE] */
2633 /* Store Using Real Address */
2634 check_privileged(s
);
2635 r1
= (insn
>> 4) & 0xf;
2637 tmp32_1
= load_reg32(r1
);
2639 potential_page_fault(s
);
2640 gen_helper_stura(cpu_env
, tmp
, tmp32_1
);
2641 tcg_temp_free_i32(tmp32_1
);
2642 tcg_temp_free_i64(tmp
);
2644 case 0x50: /* CSP R1,R2 [RRE] */
2645 /* Compare And Swap And Purge */
2646 check_privileged(s
);
2647 r1
= (insn
>> 4) & 0xf;
2649 tmp32_1
= tcg_const_i32(r1
);
2650 tmp32_2
= tcg_const_i32(r2
);
2651 gen_helper_csp(cc_op
, cpu_env
, tmp32_1
, tmp32_2
);
2653 tcg_temp_free_i32(tmp32_1
);
2654 tcg_temp_free_i32(tmp32_2
);
2656 case 0x5f: /* CHSC ? */
2657 /* Channel Subsystem Call */
2658 check_privileged(s
);
2659 gen_op_movi_cc(s
, 3);
2661 case 0x78: /* STCKE D2(B2) [S] */
2662 /* Store Clock Extended */
2663 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2664 tmp
= get_address(s
, 0, b2
, d2
);
2665 potential_page_fault(s
);
2666 gen_helper_stcke(cc_op
, cpu_env
, tmp
);
2668 tcg_temp_free_i64(tmp
);
2670 case 0x79: /* SACF D2(B2) [S] */
2671 /* Set Address Space Control Fast */
2672 check_privileged(s
);
2673 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2674 tmp
= get_address(s
, 0, b2
, d2
);
2675 potential_page_fault(s
);
2676 gen_helper_sacf(cpu_env
, tmp
);
2677 tcg_temp_free_i64(tmp
);
2678 /* addressing mode has changed, so end the block */
2681 s
->is_jmp
= DISAS_JUMP
;
2683 case 0x7d: /* STSI D2,(B2) [S] */
2684 check_privileged(s
);
2685 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2686 tmp
= get_address(s
, 0, b2
, d2
);
2687 tmp32_1
= load_reg32(0);
2688 tmp32_2
= load_reg32(1);
2689 potential_page_fault(s
);
2690 gen_helper_stsi(cc_op
, cpu_env
, tmp
, tmp32_1
, tmp32_2
);
2692 tcg_temp_free_i64(tmp
);
2693 tcg_temp_free_i32(tmp32_1
);
2694 tcg_temp_free_i32(tmp32_2
);
2696 case 0x9d: /* LFPC D2(B2) [S] */
2697 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2698 tmp
= get_address(s
, 0, b2
, d2
);
2699 tmp2
= tcg_temp_new_i64();
2700 tmp32_1
= tcg_temp_new_i32();
2701 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2702 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
2703 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
2704 tcg_temp_free_i64(tmp
);
2705 tcg_temp_free_i64(tmp2
);
2706 tcg_temp_free_i32(tmp32_1
);
2708 case 0xb1: /* STFL D2(B2) [S] */
2709 /* Store Facility List (CPU features) at 200 */
2710 check_privileged(s
);
2711 tmp2
= tcg_const_i64(0xc0000000);
2712 tmp
= tcg_const_i64(200);
2713 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2714 tcg_temp_free_i64(tmp2
);
2715 tcg_temp_free_i64(tmp
);
2717 case 0xb2: /* LPSWE D2(B2) [S] */
2718 /* Load PSW Extended */
2719 check_privileged(s
);
2720 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2721 tmp
= get_address(s
, 0, b2
, d2
);
2722 tmp2
= tcg_temp_new_i64();
2723 tmp3
= tcg_temp_new_i64();
2724 tcg_gen_qemu_ld64(tmp2
, tmp
, get_mem_index(s
));
2725 tcg_gen_addi_i64(tmp
, tmp
, 8);
2726 tcg_gen_qemu_ld64(tmp3
, tmp
, get_mem_index(s
));
2727 gen_helper_load_psw(cpu_env
, tmp2
, tmp3
);
2728 /* we need to keep cc_op intact */
2729 s
->is_jmp
= DISAS_JUMP
;
2730 tcg_temp_free_i64(tmp
);
2731 tcg_temp_free_i64(tmp2
);
2732 tcg_temp_free_i64(tmp3
);
2734 case 0x20: /* SERVC R1,R2 [RRE] */
2735 /* SCLP Service call (PV hypercall) */
2736 check_privileged(s
);
2737 potential_page_fault(s
);
2738 tmp32_1
= load_reg32(r2
);
2740 gen_helper_servc(cc_op
, cpu_env
, tmp32_1
, tmp
);
2742 tcg_temp_free_i32(tmp32_1
);
2743 tcg_temp_free_i64(tmp
);
2747 LOG_DISAS("illegal b2 operation 0x%x\n", op
);
2748 gen_illegal_opcode(s
);
2753 static void disas_b3(CPUS390XState
*env
, DisasContext
*s
, int op
, int m3
,
2757 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2758 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op
, m3
, r1
, r2
);
2759 #define FP_HELPER(i) \
2760 tmp32_1 = tcg_const_i32(r1); \
2761 tmp32_2 = tcg_const_i32(r2); \
2762 gen_helper_ ## i(cpu_env, tmp32_1, tmp32_2); \
2763 tcg_temp_free_i32(tmp32_1); \
2764 tcg_temp_free_i32(tmp32_2);
2766 #define FP_HELPER_CC(i) \
2767 tmp32_1 = tcg_const_i32(r1); \
2768 tmp32_2 = tcg_const_i32(r2); \
2769 gen_helper_ ## i(cc_op, cpu_env, tmp32_1, tmp32_2); \
2771 tcg_temp_free_i32(tmp32_1); \
2772 tcg_temp_free_i32(tmp32_2);
2775 case 0x0: /* LPEBR R1,R2 [RRE] */
2776 FP_HELPER_CC(lpebr
);
2778 case 0x2: /* LTEBR R1,R2 [RRE] */
2779 FP_HELPER_CC(ltebr
);
2781 case 0x3: /* LCEBR R1,R2 [RRE] */
2782 FP_HELPER_CC(lcebr
);
2784 case 0x4: /* LDEBR R1,R2 [RRE] */
2787 case 0x5: /* LXDBR R1,R2 [RRE] */
2790 case 0x9: /* CEBR R1,R2 [RRE] */
2793 case 0xa: /* AEBR R1,R2 [RRE] */
2796 case 0xb: /* SEBR R1,R2 [RRE] */
2799 case 0xd: /* DEBR R1,R2 [RRE] */
2802 case 0x10: /* LPDBR R1,R2 [RRE] */
2803 FP_HELPER_CC(lpdbr
);
2805 case 0x12: /* LTDBR R1,R2 [RRE] */
2806 FP_HELPER_CC(ltdbr
);
2808 case 0x13: /* LCDBR R1,R2 [RRE] */
2809 FP_HELPER_CC(lcdbr
);
2811 case 0x15: /* SQBDR R1,R2 [RRE] */
2814 case 0x17: /* MEEBR R1,R2 [RRE] */
2817 case 0x19: /* CDBR R1,R2 [RRE] */
2820 case 0x1a: /* ADBR R1,R2 [RRE] */
2823 case 0x1b: /* SDBR R1,R2 [RRE] */
2826 case 0x1c: /* MDBR R1,R2 [RRE] */
2829 case 0x1d: /* DDBR R1,R2 [RRE] */
2832 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
2833 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
2834 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
2835 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
2836 tmp32_1
= tcg_const_i32(m3
);
2837 tmp32_2
= tcg_const_i32(r2
);
2838 tmp32_3
= tcg_const_i32(r1
);
2841 gen_helper_maebr(cpu_env
, tmp32_1
, tmp32_3
, tmp32_2
);
2844 gen_helper_madbr(cpu_env
, tmp32_1
, tmp32_3
, tmp32_2
);
2847 gen_helper_msdbr(cpu_env
, tmp32_1
, tmp32_3
, tmp32_2
);
2852 tcg_temp_free_i32(tmp32_1
);
2853 tcg_temp_free_i32(tmp32_2
);
2854 tcg_temp_free_i32(tmp32_3
);
2856 case 0x40: /* LPXBR R1,R2 [RRE] */
2857 FP_HELPER_CC(lpxbr
);
2859 case 0x42: /* LTXBR R1,R2 [RRE] */
2860 FP_HELPER_CC(ltxbr
);
2862 case 0x43: /* LCXBR R1,R2 [RRE] */
2863 FP_HELPER_CC(lcxbr
);
2865 case 0x44: /* LEDBR R1,R2 [RRE] */
2868 case 0x45: /* LDXBR R1,R2 [RRE] */
2871 case 0x46: /* LEXBR R1,R2 [RRE] */
2874 case 0x49: /* CXBR R1,R2 [RRE] */
2877 case 0x4a: /* AXBR R1,R2 [RRE] */
2880 case 0x4b: /* SXBR R1,R2 [RRE] */
2883 case 0x4c: /* MXBR R1,R2 [RRE] */
2886 case 0x4d: /* DXBR R1,R2 [RRE] */
2889 case 0x65: /* LXR R1,R2 [RRE] */
2890 tmp
= load_freg(r2
);
2891 store_freg(r1
, tmp
);
2892 tcg_temp_free_i64(tmp
);
2893 tmp
= load_freg(r2
+ 2);
2894 store_freg(r1
+ 2, tmp
);
2895 tcg_temp_free_i64(tmp
);
2897 case 0x74: /* LZER R1 [RRE] */
2898 tmp32_1
= tcg_const_i32(r1
);
2899 gen_helper_lzer(cpu_env
, tmp32_1
);
2900 tcg_temp_free_i32(tmp32_1
);
2902 case 0x75: /* LZDR R1 [RRE] */
2903 tmp32_1
= tcg_const_i32(r1
);
2904 gen_helper_lzdr(cpu_env
, tmp32_1
);
2905 tcg_temp_free_i32(tmp32_1
);
2907 case 0x76: /* LZXR R1 [RRE] */
2908 tmp32_1
= tcg_const_i32(r1
);
2909 gen_helper_lzxr(cpu_env
, tmp32_1
);
2910 tcg_temp_free_i32(tmp32_1
);
2912 case 0x84: /* SFPC R1 [RRE] */
2913 tmp32_1
= load_reg32(r1
);
2914 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
2915 tcg_temp_free_i32(tmp32_1
);
2917 case 0x8c: /* EFPC R1 [RRE] */
2918 tmp32_1
= tcg_temp_new_i32();
2919 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
2920 store_reg32(r1
, tmp32_1
);
2921 tcg_temp_free_i32(tmp32_1
);
2923 case 0x94: /* CEFBR R1,R2 [RRE] */
2924 case 0x95: /* CDFBR R1,R2 [RRE] */
2925 case 0x96: /* CXFBR R1,R2 [RRE] */
2926 tmp32_1
= tcg_const_i32(r1
);
2927 tmp32_2
= load_reg32(r2
);
2930 gen_helper_cefbr(cpu_env
, tmp32_1
, tmp32_2
);
2933 gen_helper_cdfbr(cpu_env
, tmp32_1
, tmp32_2
);
2936 gen_helper_cxfbr(cpu_env
, tmp32_1
, tmp32_2
);
2941 tcg_temp_free_i32(tmp32_1
);
2942 tcg_temp_free_i32(tmp32_2
);
2944 case 0x98: /* CFEBR R1,R2 [RRE] */
2945 case 0x99: /* CFDBR R1,R2 [RRE] */
2946 case 0x9a: /* CFXBR R1,R2 [RRE] */
2947 tmp32_1
= tcg_const_i32(r1
);
2948 tmp32_2
= tcg_const_i32(r2
);
2949 tmp32_3
= tcg_const_i32(m3
);
2952 gen_helper_cfebr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2955 gen_helper_cfdbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2958 gen_helper_cfxbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2964 tcg_temp_free_i32(tmp32_1
);
2965 tcg_temp_free_i32(tmp32_2
);
2966 tcg_temp_free_i32(tmp32_3
);
2968 case 0xa4: /* CEGBR R1,R2 [RRE] */
2969 case 0xa5: /* CDGBR R1,R2 [RRE] */
2970 tmp32_1
= tcg_const_i32(r1
);
2974 gen_helper_cegbr(cpu_env
, tmp32_1
, tmp
);
2977 gen_helper_cdgbr(cpu_env
, tmp32_1
, tmp
);
2982 tcg_temp_free_i32(tmp32_1
);
2983 tcg_temp_free_i64(tmp
);
2985 case 0xa6: /* CXGBR R1,R2 [RRE] */
2986 tmp32_1
= tcg_const_i32(r1
);
2988 gen_helper_cxgbr(cpu_env
, tmp32_1
, tmp
);
2989 tcg_temp_free_i32(tmp32_1
);
2990 tcg_temp_free_i64(tmp
);
2992 case 0xa8: /* CGEBR R1,R2 [RRE] */
2993 tmp32_1
= tcg_const_i32(r1
);
2994 tmp32_2
= tcg_const_i32(r2
);
2995 tmp32_3
= tcg_const_i32(m3
);
2996 gen_helper_cgebr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2998 tcg_temp_free_i32(tmp32_1
);
2999 tcg_temp_free_i32(tmp32_2
);
3000 tcg_temp_free_i32(tmp32_3
);
3002 case 0xa9: /* CGDBR R1,R2 [RRE] */
3003 tmp32_1
= tcg_const_i32(r1
);
3004 tmp32_2
= tcg_const_i32(r2
);
3005 tmp32_3
= tcg_const_i32(m3
);
3006 gen_helper_cgdbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
3008 tcg_temp_free_i32(tmp32_1
);
3009 tcg_temp_free_i32(tmp32_2
);
3010 tcg_temp_free_i32(tmp32_3
);
3012 case 0xaa: /* CGXBR R1,R2 [RRE] */
3013 tmp32_1
= tcg_const_i32(r1
);
3014 tmp32_2
= tcg_const_i32(r2
);
3015 tmp32_3
= tcg_const_i32(m3
);
3016 gen_helper_cgxbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
3018 tcg_temp_free_i32(tmp32_1
);
3019 tcg_temp_free_i32(tmp32_2
);
3020 tcg_temp_free_i32(tmp32_3
);
3023 LOG_DISAS("illegal b3 operation 0x%x\n", op
);
3024 gen_illegal_opcode(s
);
3032 static void disas_b9(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
3035 TCGv_i64 tmp
, tmp2
, tmp3
;
3036 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
3038 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op
, r1
, r2
);
3040 case 0x0: /* LPGR R1,R2 [RRE] */
3041 case 0x1: /* LNGR R1,R2 [RRE] */
3042 case 0x2: /* LTGR R1,R2 [RRE] */
3043 case 0x3: /* LCGR R1,R2 [RRE] */
3044 case 0x10: /* LPGFR R1,R2 [RRE] */
3045 case 0x11: /* LNFGR R1,R2 [RRE] */
3046 case 0x12: /* LTGFR R1,R2 [RRE] */
3047 case 0x13: /* LCGFR R1,R2 [RRE] */
3049 tmp
= load_reg32_i64(r2
);
3054 case 0x0: /* LP?GR */
3055 set_cc_abs64(s
, tmp
);
3056 gen_helper_abs_i64(tmp
, tmp
);
3059 case 0x1: /* LN?GR */
3060 set_cc_nabs64(s
, tmp
);
3061 gen_helper_nabs_i64(tmp
, tmp
);
3064 case 0x2: /* LT?GR */
3070 case 0x3: /* LC?GR */
3071 tcg_gen_neg_i64(regs
[r1
], tmp
);
3072 set_cc_comp64(s
, regs
[r1
]);
3075 tcg_temp_free_i64(tmp
);
3077 case 0x4: /* LGR R1,R2 [RRE] */
3078 store_reg(r1
, regs
[r2
]);
3080 case 0x6: /* LGBR R1,R2 [RRE] */
3081 tmp2
= load_reg(r2
);
3082 tcg_gen_ext8s_i64(tmp2
, tmp2
);
3083 store_reg(r1
, tmp2
);
3084 tcg_temp_free_i64(tmp2
);
3086 case 0xc: /* MSGR R1,R2 [RRE] */
3087 case 0x1c: /* MSGFR R1,R2 [RRE] */
3089 tmp2
= load_reg(r2
);
3091 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3093 tcg_gen_mul_i64(tmp
, tmp
, tmp2
);
3095 tcg_temp_free_i64(tmp
);
3096 tcg_temp_free_i64(tmp2
);
3098 case 0xd: /* DSGR R1,R2 [RRE] */
3099 case 0x1d: /* DSGFR R1,R2 [RRE] */
3100 tmp
= load_reg(r1
+ 1);
3102 tmp2
= load_reg(r2
);
3104 tmp32_1
= load_reg32(r2
);
3105 tmp2
= tcg_temp_new_i64();
3106 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3107 tcg_temp_free_i32(tmp32_1
);
3109 tmp3
= tcg_temp_new_i64();
3110 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
3111 store_reg(r1
+ 1, tmp3
);
3112 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
3113 store_reg(r1
, tmp3
);
3114 tcg_temp_free_i64(tmp
);
3115 tcg_temp_free_i64(tmp2
);
3116 tcg_temp_free_i64(tmp3
);
3118 case 0x14: /* LGFR R1,R2 [RRE] */
3119 tmp32_1
= load_reg32(r2
);
3120 tmp
= tcg_temp_new_i64();
3121 tcg_gen_ext_i32_i64(tmp
, tmp32_1
);
3123 tcg_temp_free_i32(tmp32_1
);
3124 tcg_temp_free_i64(tmp
);
3126 case 0x16: /* LLGFR R1,R2 [RRE] */
3127 tmp32_1
= load_reg32(r2
);
3128 tmp
= tcg_temp_new_i64();
3129 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3131 tcg_temp_free_i32(tmp32_1
);
3132 tcg_temp_free_i64(tmp
);
3134 case 0x17: /* LLGTR R1,R2 [RRE] */
3135 tmp32_1
= load_reg32(r2
);
3136 tmp
= tcg_temp_new_i64();
3137 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0x7fffffffUL
);
3138 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3140 tcg_temp_free_i32(tmp32_1
);
3141 tcg_temp_free_i64(tmp
);
3143 case 0x0f: /* LRVGR R1,R2 [RRE] */
3144 tcg_gen_bswap64_i64(regs
[r1
], regs
[r2
]);
3146 case 0x1f: /* LRVR R1,R2 [RRE] */
3147 tmp32_1
= load_reg32(r2
);
3148 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
3149 store_reg32(r1
, tmp32_1
);
3150 tcg_temp_free_i32(tmp32_1
);
3152 case 0x20: /* CGR R1,R2 [RRE] */
3153 case 0x30: /* CGFR R1,R2 [RRE] */
3154 tmp2
= load_reg(r2
);
3156 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3159 cmp_s64(s
, tmp
, tmp2
);
3160 tcg_temp_free_i64(tmp
);
3161 tcg_temp_free_i64(tmp2
);
3163 case 0x21: /* CLGR R1,R2 [RRE] */
3164 case 0x31: /* CLGFR R1,R2 [RRE] */
3165 tmp2
= load_reg(r2
);
3167 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3170 cmp_u64(s
, tmp
, tmp2
);
3171 tcg_temp_free_i64(tmp
);
3172 tcg_temp_free_i64(tmp2
);
3174 case 0x26: /* LBR R1,R2 [RRE] */
3175 tmp32_1
= load_reg32(r2
);
3176 tcg_gen_ext8s_i32(tmp32_1
, tmp32_1
);
3177 store_reg32(r1
, tmp32_1
);
3178 tcg_temp_free_i32(tmp32_1
);
3180 case 0x27: /* LHR R1,R2 [RRE] */
3181 tmp32_1
= load_reg32(r2
);
3182 tcg_gen_ext16s_i32(tmp32_1
, tmp32_1
);
3183 store_reg32(r1
, tmp32_1
);
3184 tcg_temp_free_i32(tmp32_1
);
3186 case 0x80: /* NGR R1,R2 [RRE] */
3187 case 0x81: /* OGR R1,R2 [RRE] */
3188 case 0x82: /* XGR R1,R2 [RRE] */
3190 tmp2
= load_reg(r2
);
3193 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
3196 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
3199 tcg_gen_xor_i64(tmp
, tmp
, tmp2
);
3205 set_cc_nz_u64(s
, tmp
);
3206 tcg_temp_free_i64(tmp
);
3207 tcg_temp_free_i64(tmp2
);
3209 case 0x83: /* FLOGR R1,R2 [RRE] */
3211 tmp32_1
= tcg_const_i32(r1
);
3212 gen_helper_flogr(cc_op
, cpu_env
, tmp32_1
, tmp
);
3214 tcg_temp_free_i64(tmp
);
3215 tcg_temp_free_i32(tmp32_1
);
3217 case 0x84: /* LLGCR R1,R2 [RRE] */
3219 tcg_gen_andi_i64(tmp
, tmp
, 0xff);
3221 tcg_temp_free_i64(tmp
);
3223 case 0x85: /* LLGHR R1,R2 [RRE] */
3225 tcg_gen_andi_i64(tmp
, tmp
, 0xffff);
3227 tcg_temp_free_i64(tmp
);
3229 case 0x87: /* DLGR R1,R2 [RRE] */
3230 tmp32_1
= tcg_const_i32(r1
);
3232 gen_helper_dlg(cpu_env
, tmp32_1
, tmp
);
3233 tcg_temp_free_i64(tmp
);
3234 tcg_temp_free_i32(tmp32_1
);
3236 case 0x88: /* ALCGR R1,R2 [RRE] */
3238 tmp2
= load_reg(r2
);
3239 tmp3
= tcg_temp_new_i64();
3241 tcg_gen_extu_i32_i64(tmp3
, cc_op
);
3242 tcg_gen_shri_i64(tmp3
, tmp3
, 1);
3243 tcg_gen_andi_i64(tmp3
, tmp3
, 1);
3244 tcg_gen_add_i64(tmp3
, tmp2
, tmp3
);
3245 tcg_gen_add_i64(tmp3
, tmp
, tmp3
);
3246 store_reg(r1
, tmp3
);
3247 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3248 tcg_temp_free_i64(tmp
);
3249 tcg_temp_free_i64(tmp2
);
3250 tcg_temp_free_i64(tmp3
);
3252 case 0x89: /* SLBGR R1,R2 [RRE] */
3254 tmp2
= load_reg(r2
);
3255 tmp32_1
= tcg_const_i32(r1
);
3257 gen_helper_slbg(cc_op
, cpu_env
, cc_op
, tmp32_1
, tmp
, tmp2
);
3259 tcg_temp_free_i64(tmp
);
3260 tcg_temp_free_i64(tmp2
);
3261 tcg_temp_free_i32(tmp32_1
);
3263 case 0x94: /* LLCR R1,R2 [RRE] */
3264 tmp32_1
= load_reg32(r2
);
3265 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0xff);
3266 store_reg32(r1
, tmp32_1
);
3267 tcg_temp_free_i32(tmp32_1
);
3269 case 0x95: /* LLHR R1,R2 [RRE] */
3270 tmp32_1
= load_reg32(r2
);
3271 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0xffff);
3272 store_reg32(r1
, tmp32_1
);
3273 tcg_temp_free_i32(tmp32_1
);
3275 case 0x96: /* MLR R1,R2 [RRE] */
3276 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3277 tmp2
= load_reg(r2
);
3278 tmp3
= load_reg((r1
+ 1) & 15);
3279 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3280 tcg_gen_ext32u_i64(tmp3
, tmp3
);
3281 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
3282 store_reg32_i64((r1
+ 1) & 15, tmp2
);
3283 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
3284 store_reg32_i64(r1
, tmp2
);
3285 tcg_temp_free_i64(tmp2
);
3286 tcg_temp_free_i64(tmp3
);
3288 case 0x97: /* DLR R1,R2 [RRE] */
3289 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3290 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3292 tmp2
= load_reg(r2
);
3293 tmp3
= load_reg((r1
+ 1) & 15);
3294 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3295 tcg_gen_ext32u_i64(tmp3
, tmp3
);
3296 tcg_gen_shli_i64(tmp
, tmp
, 32);
3297 tcg_gen_or_i64(tmp
, tmp
, tmp3
);
3299 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
3300 tcg_gen_div_i64(tmp
, tmp
, tmp2
);
3301 store_reg32_i64((r1
+ 1) & 15, tmp
);
3302 store_reg32_i64(r1
, tmp3
);
3303 tcg_temp_free_i64(tmp
);
3304 tcg_temp_free_i64(tmp2
);
3305 tcg_temp_free_i64(tmp3
);
3307 case 0x98: /* ALCR R1,R2 [RRE] */
3308 tmp32_1
= load_reg32(r1
);
3309 tmp32_2
= load_reg32(r2
);
3310 tmp32_3
= tcg_temp_new_i32();
3311 /* XXX possible optimization point */
3313 gen_helper_addc_u32(tmp32_3
, cc_op
, tmp32_1
, tmp32_2
);
3314 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3315 store_reg32(r1
, tmp32_3
);
3316 tcg_temp_free_i32(tmp32_1
);
3317 tcg_temp_free_i32(tmp32_2
);
3318 tcg_temp_free_i32(tmp32_3
);
3320 case 0x99: /* SLBR R1,R2 [RRE] */
3321 tmp32_1
= load_reg32(r2
);
3322 tmp32_2
= tcg_const_i32(r1
);
3324 gen_helper_slb(cc_op
, cpu_env
, cc_op
, tmp32_2
, tmp32_1
);
3326 tcg_temp_free_i32(tmp32_1
);
3327 tcg_temp_free_i32(tmp32_2
);
3330 LOG_DISAS("illegal b9 operation 0x%x\n", op
);
3331 gen_illegal_opcode(s
);
3336 static void disas_c0(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
, int i2
)
3339 TCGv_i32 tmp32_1
, tmp32_2
;
3340 uint64_t target
= s
->pc
+ i2
* 2LL;
3343 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op
, r1
, i2
);
3346 case 0: /* larl r1, i2 */
3347 tmp
= tcg_const_i64(target
);
3349 tcg_temp_free_i64(tmp
);
3351 case 0x1: /* LGFI R1,I2 [RIL] */
3352 tmp
= tcg_const_i64((int64_t)i2
);
3354 tcg_temp_free_i64(tmp
);
3356 case 0x4: /* BRCL M1,I2 [RIL] */
3357 if (r1
== 15) { /* m1 == r1 */
3358 gen_goto_tb(s
, 0, target
);
3359 s
->is_jmp
= DISAS_TB_JUMP
;
3362 /* m1 & (1 << (3 - cc)) */
3363 tmp32_1
= tcg_const_i32(3);
3364 tmp32_2
= tcg_const_i32(1);
3366 tcg_gen_sub_i32(tmp32_1
, tmp32_1
, cc_op
);
3367 tcg_gen_shl_i32(tmp32_2
, tmp32_2
, tmp32_1
);
3368 tcg_temp_free_i32(tmp32_1
);
3369 tmp32_1
= tcg_const_i32(r1
); /* m1 == r1 */
3370 tcg_gen_and_i32(tmp32_1
, tmp32_1
, tmp32_2
);
3371 l1
= gen_new_label();
3372 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp32_1
, 0, l1
);
3373 gen_goto_tb(s
, 0, target
);
3375 gen_goto_tb(s
, 1, s
->pc
+ 6);
3376 s
->is_jmp
= DISAS_TB_JUMP
;
3377 tcg_temp_free_i32(tmp32_1
);
3378 tcg_temp_free_i32(tmp32_2
);
3380 case 0x5: /* brasl r1, i2 */
3381 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 6));
3383 tcg_temp_free_i64(tmp
);
3384 gen_goto_tb(s
, 0, target
);
3385 s
->is_jmp
= DISAS_TB_JUMP
;
3387 case 0x7: /* XILF R1,I2 [RIL] */
3388 case 0xb: /* NILF R1,I2 [RIL] */
3389 case 0xd: /* OILF R1,I2 [RIL] */
3390 tmp32_1
= load_reg32(r1
);
3393 tcg_gen_xori_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3396 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3399 tcg_gen_ori_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3404 store_reg32(r1
, tmp32_1
);
3405 set_cc_nz_u32(s
, tmp32_1
);
3406 tcg_temp_free_i32(tmp32_1
);
3408 case 0x9: /* IILF R1,I2 [RIL] */
3409 tmp32_1
= tcg_const_i32((uint32_t)i2
);
3410 store_reg32(r1
, tmp32_1
);
3411 tcg_temp_free_i32(tmp32_1
);
3413 case 0xa: /* NIHF R1,I2 [RIL] */
3415 tmp32_1
= tcg_temp_new_i32();
3416 tcg_gen_andi_i64(tmp
, tmp
, (((uint64_t)((uint32_t)i2
)) << 32)
3419 tcg_gen_shri_i64(tmp
, tmp
, 32);
3420 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
3421 set_cc_nz_u32(s
, tmp32_1
);
3422 tcg_temp_free_i64(tmp
);
3423 tcg_temp_free_i32(tmp32_1
);
3425 case 0xe: /* LLIHF R1,I2 [RIL] */
3426 tmp
= tcg_const_i64(((uint64_t)(uint32_t)i2
) << 32);
3428 tcg_temp_free_i64(tmp
);
3430 case 0xf: /* LLILF R1,I2 [RIL] */
3431 tmp
= tcg_const_i64((uint32_t)i2
);
3433 tcg_temp_free_i64(tmp
);
3436 LOG_DISAS("illegal c0 operation 0x%x\n", op
);
3437 gen_illegal_opcode(s
);
3442 static void disas_c2(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
3449 case 0xc: /* CGFI R1,I2 [RIL] */
3451 cmp_s64c(s
, tmp
, (int64_t)i2
);
3452 tcg_temp_free_i64(tmp
);
3454 case 0xe: /* CLGFI R1,I2 [RIL] */
3456 cmp_u64c(s
, tmp
, (uint64_t)(uint32_t)i2
);
3457 tcg_temp_free_i64(tmp
);
3459 case 0xd: /* CFI R1,I2 [RIL] */
3460 tmp32_1
= load_reg32(r1
);
3461 cmp_s32c(s
, tmp32_1
, i2
);
3462 tcg_temp_free_i32(tmp32_1
);
3464 case 0xf: /* CLFI R1,I2 [RIL] */
3465 tmp32_1
= load_reg32(r1
);
3466 cmp_u32c(s
, tmp32_1
, i2
);
3467 tcg_temp_free_i32(tmp32_1
);
3470 LOG_DISAS("illegal c2 operation 0x%x\n", op
);
3471 gen_illegal_opcode(s
);
3476 static void gen_and_or_xor_i32(int opc
, TCGv_i32 tmp
, TCGv_i32 tmp2
)
3478 switch (opc
& 0xf) {
3480 tcg_gen_and_i32(tmp
, tmp
, tmp2
);
3483 tcg_gen_or_i32(tmp
, tmp
, tmp2
);
3486 tcg_gen_xor_i32(tmp
, tmp
, tmp2
);
3493 static void disas_s390_insn(CPUS390XState
*env
, DisasContext
*s
)
3495 TCGv_i64 tmp
, tmp2
, tmp3
, tmp4
;
3496 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
, tmp32_4
;
3499 int op
, r1
, r2
, r3
, d1
, d2
, x2
, b1
, b2
, i
, i2
, r1b
;
3503 opc
= cpu_ldub_code(env
, s
->pc
);
3504 LOG_DISAS("opc 0x%x\n", opc
);
3507 #ifndef CONFIG_USER_ONLY
3508 case 0x01: /* SAM */
3509 insn
= ld_code2(env
, s
->pc
);
3510 /* set addressing mode, but we only do 64bit anyways */
3513 case 0x6: /* BCTR R1,R2 [RR] */
3514 insn
= ld_code2(env
, s
->pc
);
3515 decode_rr(s
, insn
, &r1
, &r2
);
3516 tmp32_1
= load_reg32(r1
);
3517 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
3518 store_reg32(r1
, tmp32_1
);
3521 gen_update_cc_op(s
);
3522 l1
= gen_new_label();
3523 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp32_1
, 0, l1
);
3525 /* not taking the branch, jump to after the instruction */
3526 gen_goto_tb(s
, 0, s
->pc
+ 2);
3529 /* take the branch, move R2 into psw.addr */
3530 tmp32_1
= load_reg32(r2
);
3531 tmp
= tcg_temp_new_i64();
3532 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3533 tcg_gen_mov_i64(psw_addr
, tmp
);
3534 s
->is_jmp
= DISAS_JUMP
;
3535 tcg_temp_free_i32(tmp32_1
);
3536 tcg_temp_free_i64(tmp
);
3539 case 0x7: /* BCR M1,R2 [RR] */
3540 insn
= ld_code2(env
, s
->pc
);
3541 decode_rr(s
, insn
, &r1
, &r2
);
3544 gen_bcr(s
, r1
, tmp
, s
->pc
);
3545 tcg_temp_free_i64(tmp
);
3546 s
->is_jmp
= DISAS_TB_JUMP
;
3548 /* XXX: "serialization and checkpoint-synchronization function"? */
3551 case 0xa: /* SVC I [RR] */
3552 insn
= ld_code2(env
, s
->pc
);
3557 tmp32_1
= tcg_const_i32(i
);
3558 tmp32_2
= tcg_const_i32(s
->next_pc
- s
->pc
);
3559 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, int_svc_code
));
3560 tcg_gen_st_i32(tmp32_2
, cpu_env
, offsetof(CPUS390XState
, int_svc_ilen
));
3561 gen_exception(EXCP_SVC
);
3562 s
->is_jmp
= DISAS_EXCP
;
3563 tcg_temp_free_i32(tmp32_1
);
3564 tcg_temp_free_i32(tmp32_2
);
3566 case 0xd: /* BASR R1,R2 [RR] */
3567 insn
= ld_code2(env
, s
->pc
);
3568 decode_rr(s
, insn
, &r1
, &r2
);
3569 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 2));
3572 tmp2
= load_reg(r2
);
3573 tcg_gen_mov_i64(psw_addr
, tmp2
);
3574 tcg_temp_free_i64(tmp2
);
3575 s
->is_jmp
= DISAS_JUMP
;
3577 tcg_temp_free_i64(tmp
);
3579 case 0xe: /* MVCL R1,R2 [RR] */
3580 insn
= ld_code2(env
, s
->pc
);
3581 decode_rr(s
, insn
, &r1
, &r2
);
3582 tmp32_1
= tcg_const_i32(r1
);
3583 tmp32_2
= tcg_const_i32(r2
);
3584 potential_page_fault(s
);
3585 gen_helper_mvcl(cc_op
, cpu_env
, tmp32_1
, tmp32_2
);
3587 tcg_temp_free_i32(tmp32_1
);
3588 tcg_temp_free_i32(tmp32_2
);
3590 case 0x10: /* LPR R1,R2 [RR] */
3591 insn
= ld_code2(env
, s
->pc
);
3592 decode_rr(s
, insn
, &r1
, &r2
);
3593 tmp32_1
= load_reg32(r2
);
3594 set_cc_abs32(s
, tmp32_1
);
3595 gen_helper_abs_i32(tmp32_1
, tmp32_1
);
3596 store_reg32(r1
, tmp32_1
);
3597 tcg_temp_free_i32(tmp32_1
);
3599 case 0x11: /* LNR R1,R2 [RR] */
3600 insn
= ld_code2(env
, s
->pc
);
3601 decode_rr(s
, insn
, &r1
, &r2
);
3602 tmp32_1
= load_reg32(r2
);
3603 set_cc_nabs32(s
, tmp32_1
);
3604 gen_helper_nabs_i32(tmp32_1
, tmp32_1
);
3605 store_reg32(r1
, tmp32_1
);
3606 tcg_temp_free_i32(tmp32_1
);
3608 case 0x12: /* LTR R1,R2 [RR] */
3609 insn
= ld_code2(env
, s
->pc
);
3610 decode_rr(s
, insn
, &r1
, &r2
);
3611 tmp32_1
= load_reg32(r2
);
3613 store_reg32(r1
, tmp32_1
);
3615 set_cc_s32(s
, tmp32_1
);
3616 tcg_temp_free_i32(tmp32_1
);
3618 case 0x13: /* LCR R1,R2 [RR] */
3619 insn
= ld_code2(env
, s
->pc
);
3620 decode_rr(s
, insn
, &r1
, &r2
);
3621 tmp32_1
= load_reg32(r2
);
3622 tcg_gen_neg_i32(tmp32_1
, tmp32_1
);
3623 store_reg32(r1
, tmp32_1
);
3624 set_cc_comp32(s
, tmp32_1
);
3625 tcg_temp_free_i32(tmp32_1
);
3627 case 0x14: /* NR R1,R2 [RR] */
3628 case 0x16: /* OR R1,R2 [RR] */
3629 case 0x17: /* XR R1,R2 [RR] */
3630 insn
= ld_code2(env
, s
->pc
);
3631 decode_rr(s
, insn
, &r1
, &r2
);
3632 tmp32_2
= load_reg32(r2
);
3633 tmp32_1
= load_reg32(r1
);
3634 gen_and_or_xor_i32(opc
, tmp32_1
, tmp32_2
);
3635 store_reg32(r1
, tmp32_1
);
3636 set_cc_nz_u32(s
, tmp32_1
);
3637 tcg_temp_free_i32(tmp32_1
);
3638 tcg_temp_free_i32(tmp32_2
);
3640 case 0x18: /* LR R1,R2 [RR] */
3641 insn
= ld_code2(env
, s
->pc
);
3642 decode_rr(s
, insn
, &r1
, &r2
);
3643 tmp32_1
= load_reg32(r2
);
3644 store_reg32(r1
, tmp32_1
);
3645 tcg_temp_free_i32(tmp32_1
);
3647 case 0x15: /* CLR R1,R2 [RR] */
3648 case 0x19: /* CR R1,R2 [RR] */
3649 insn
= ld_code2(env
, s
->pc
);
3650 decode_rr(s
, insn
, &r1
, &r2
);
3651 tmp32_1
= load_reg32(r1
);
3652 tmp32_2
= load_reg32(r2
);
3654 cmp_u32(s
, tmp32_1
, tmp32_2
);
3656 cmp_s32(s
, tmp32_1
, tmp32_2
);
3658 tcg_temp_free_i32(tmp32_1
);
3659 tcg_temp_free_i32(tmp32_2
);
3661 case 0x1c: /* MR R1,R2 [RR] */
3662 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3663 insn
= ld_code2(env
, s
->pc
);
3664 decode_rr(s
, insn
, &r1
, &r2
);
3665 tmp2
= load_reg(r2
);
3666 tmp3
= load_reg((r1
+ 1) & 15);
3667 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3668 tcg_gen_ext32s_i64(tmp3
, tmp3
);
3669 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
3670 store_reg32_i64((r1
+ 1) & 15, tmp2
);
3671 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
3672 store_reg32_i64(r1
, tmp2
);
3673 tcg_temp_free_i64(tmp2
);
3674 tcg_temp_free_i64(tmp3
);
3676 case 0x1d: /* DR R1,R2 [RR] */
3677 insn
= ld_code2(env
, s
->pc
);
3678 decode_rr(s
, insn
, &r1
, &r2
);
3679 tmp32_1
= load_reg32(r1
);
3680 tmp32_2
= load_reg32(r1
+ 1);
3681 tmp32_3
= load_reg32(r2
);
3683 tmp
= tcg_temp_new_i64(); /* dividend */
3684 tmp2
= tcg_temp_new_i64(); /* divisor */
3685 tmp3
= tcg_temp_new_i64();
3687 /* dividend is r(r1 << 32) | r(r1 + 1) */
3688 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3689 tcg_gen_extu_i32_i64(tmp2
, tmp32_2
);
3690 tcg_gen_shli_i64(tmp
, tmp
, 32);
3691 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
3693 /* divisor is r(r2) */
3694 tcg_gen_ext_i32_i64(tmp2
, tmp32_3
);
3696 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
3697 tcg_gen_rem_i64(tmp
, tmp
, tmp2
);
3699 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
3700 tcg_gen_trunc_i64_i32(tmp32_2
, tmp3
);
3702 store_reg32(r1
, tmp32_1
); /* remainder */
3703 store_reg32(r1
+ 1, tmp32_2
); /* quotient */
3704 tcg_temp_free_i32(tmp32_1
);
3705 tcg_temp_free_i32(tmp32_2
);
3706 tcg_temp_free_i32(tmp32_3
);
3707 tcg_temp_free_i64(tmp
);
3708 tcg_temp_free_i64(tmp2
);
3709 tcg_temp_free_i64(tmp3
);
3711 case 0x28: /* LDR R1,R2 [RR] */
3712 insn
= ld_code2(env
, s
->pc
);
3713 decode_rr(s
, insn
, &r1
, &r2
);
3714 tmp
= load_freg(r2
);
3715 store_freg(r1
, tmp
);
3716 tcg_temp_free_i64(tmp
);
3718 case 0x38: /* LER R1,R2 [RR] */
3719 insn
= ld_code2(env
, s
->pc
);
3720 decode_rr(s
, insn
, &r1
, &r2
);
3721 tmp32_1
= load_freg32(r2
);
3722 store_freg32(r1
, tmp32_1
);
3723 tcg_temp_free_i32(tmp32_1
);
3725 case 0x40: /* STH R1,D2(X2,B2) [RX] */
3726 insn
= ld_code4(env
, s
->pc
);
3727 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3728 tmp2
= load_reg(r1
);
3729 tcg_gen_qemu_st16(tmp2
, tmp
, get_mem_index(s
));
3730 tcg_temp_free_i64(tmp
);
3731 tcg_temp_free_i64(tmp2
);
3734 insn
= ld_code4(env
, s
->pc
);
3735 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3736 store_reg(r1
, tmp
); /* FIXME: 31/24-bit addressing */
3737 tcg_temp_free_i64(tmp
);
3739 case 0x42: /* STC R1,D2(X2,B2) [RX] */
3740 insn
= ld_code4(env
, s
->pc
);
3741 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3742 tmp2
= load_reg(r1
);
3743 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
3744 tcg_temp_free_i64(tmp
);
3745 tcg_temp_free_i64(tmp2
);
3747 case 0x43: /* IC R1,D2(X2,B2) [RX] */
3748 insn
= ld_code4(env
, s
->pc
);
3749 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3750 tmp2
= tcg_temp_new_i64();
3751 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
3752 store_reg8(r1
, tmp2
);
3753 tcg_temp_free_i64(tmp
);
3754 tcg_temp_free_i64(tmp2
);
3756 case 0x44: /* EX R1,D2(X2,B2) [RX] */
3757 insn
= ld_code4(env
, s
->pc
);
3758 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3759 tmp2
= load_reg(r1
);
3760 tmp3
= tcg_const_i64(s
->pc
+ 4);
3763 gen_helper_ex(cc_op
, cpu_env
, cc_op
, tmp2
, tmp
, tmp3
);
3765 tcg_temp_free_i64(tmp
);
3766 tcg_temp_free_i64(tmp2
);
3767 tcg_temp_free_i64(tmp3
);
3769 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
3770 insn
= ld_code4(env
, s
->pc
);
3771 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3772 tcg_temp_free_i64(tmp
);
3774 tmp32_1
= load_reg32(r1
);
3775 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
3776 store_reg32(r1
, tmp32_1
);
3778 gen_update_cc_op(s
);
3779 l1
= gen_new_label();
3780 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp32_1
, 0, l1
);
3782 /* not taking the branch, jump to after the instruction */
3783 gen_goto_tb(s
, 0, s
->pc
+ 4);
3786 /* take the branch, move R2 into psw.addr */
3787 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3788 tcg_gen_mov_i64(psw_addr
, tmp
);
3789 s
->is_jmp
= DISAS_JUMP
;
3790 tcg_temp_free_i32(tmp32_1
);
3791 tcg_temp_free_i64(tmp
);
3793 case 0x47: /* BC M1,D2(X2,B2) [RX] */
3794 insn
= ld_code4(env
, s
->pc
);
3795 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3796 gen_bcr(s
, r1
, tmp
, s
->pc
+ 4);
3797 tcg_temp_free_i64(tmp
);
3798 s
->is_jmp
= DISAS_TB_JUMP
;
3800 case 0x48: /* LH R1,D2(X2,B2) [RX] */
3801 insn
= ld_code4(env
, s
->pc
);
3802 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3803 tmp2
= tcg_temp_new_i64();
3804 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
3805 store_reg32_i64(r1
, tmp2
);
3806 tcg_temp_free_i64(tmp
);
3807 tcg_temp_free_i64(tmp2
);
3809 case 0x49: /* CH R1,D2(X2,B2) [RX] */
3810 insn
= ld_code4(env
, s
->pc
);
3811 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3812 tmp32_1
= load_reg32(r1
);
3813 tmp32_2
= tcg_temp_new_i32();
3814 tmp2
= tcg_temp_new_i64();
3815 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
3816 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
3817 cmp_s32(s
, tmp32_1
, tmp32_2
);
3818 tcg_temp_free_i32(tmp32_1
);
3819 tcg_temp_free_i32(tmp32_2
);
3820 tcg_temp_free_i64(tmp
);
3821 tcg_temp_free_i64(tmp2
);
3823 case 0x4a: /* AH R1,D2(X2,B2) [RX] */
3824 case 0x4b: /* SH R1,D2(X2,B2) [RX] */
3825 case 0x4c: /* MH R1,D2(X2,B2) [RX] */
3826 insn
= ld_code4(env
, s
->pc
);
3827 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3828 tmp2
= tcg_temp_new_i64();
3829 tmp32_1
= load_reg32(r1
);
3830 tmp32_2
= tcg_temp_new_i32();
3831 tmp32_3
= tcg_temp_new_i32();
3833 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
3834 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
3837 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
3838 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3841 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
3842 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3845 tcg_gen_mul_i32(tmp32_3
, tmp32_1
, tmp32_2
);
3850 store_reg32(r1
, tmp32_3
);
3852 tcg_temp_free_i32(tmp32_1
);
3853 tcg_temp_free_i32(tmp32_2
);
3854 tcg_temp_free_i32(tmp32_3
);
3855 tcg_temp_free_i64(tmp
);
3856 tcg_temp_free_i64(tmp2
);
3858 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
3859 insn
= ld_code4(env
, s
->pc
);
3860 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3861 tmp2
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 4));
3862 store_reg(r1
, tmp2
);
3863 tcg_gen_mov_i64(psw_addr
, tmp
);
3864 tcg_temp_free_i64(tmp
);
3865 tcg_temp_free_i64(tmp2
);
3866 s
->is_jmp
= DISAS_JUMP
;
3868 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
3869 insn
= ld_code4(env
, s
->pc
);
3870 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3871 tmp2
= tcg_temp_new_i64();
3872 tmp32_1
= tcg_temp_new_i32();
3873 tcg_gen_trunc_i64_i32(tmp32_1
, regs
[r1
]);
3874 gen_helper_cvd(tmp2
, tmp32_1
);
3875 tcg_gen_qemu_st64(tmp2
, tmp
, get_mem_index(s
));
3876 tcg_temp_free_i64(tmp
);
3877 tcg_temp_free_i64(tmp2
);
3878 tcg_temp_free_i32(tmp32_1
);
3880 case 0x50: /* st r1, d2(x2, b2) */
3881 insn
= ld_code4(env
, s
->pc
);
3882 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3883 tmp2
= load_reg(r1
);
3884 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
3885 tcg_temp_free_i64(tmp
);
3886 tcg_temp_free_i64(tmp2
);
3888 case 0x55: /* CL R1,D2(X2,B2) [RX] */
3889 insn
= ld_code4(env
, s
->pc
);
3890 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3891 tmp2
= tcg_temp_new_i64();
3892 tmp32_1
= tcg_temp_new_i32();
3893 tmp32_2
= load_reg32(r1
);
3894 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
3895 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
3896 cmp_u32(s
, tmp32_2
, tmp32_1
);
3897 tcg_temp_free_i64(tmp
);
3898 tcg_temp_free_i64(tmp2
);
3899 tcg_temp_free_i32(tmp32_1
);
3900 tcg_temp_free_i32(tmp32_2
);
3902 case 0x54: /* N R1,D2(X2,B2) [RX] */
3903 case 0x56: /* O R1,D2(X2,B2) [RX] */
3904 case 0x57: /* X R1,D2(X2,B2) [RX] */
3905 insn
= ld_code4(env
, s
->pc
);
3906 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3907 tmp2
= tcg_temp_new_i64();
3908 tmp32_1
= load_reg32(r1
);
3909 tmp32_2
= tcg_temp_new_i32();
3910 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
3911 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
3912 gen_and_or_xor_i32(opc
, tmp32_1
, tmp32_2
);
3913 store_reg32(r1
, tmp32_1
);
3914 set_cc_nz_u32(s
, tmp32_1
);
3915 tcg_temp_free_i64(tmp
);
3916 tcg_temp_free_i64(tmp2
);
3917 tcg_temp_free_i32(tmp32_1
);
3918 tcg_temp_free_i32(tmp32_2
);
3920 case 0x58: /* l r1, d2(x2, b2) */
3921 insn
= ld_code4(env
, s
->pc
);
3922 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3923 tmp2
= tcg_temp_new_i64();
3924 tmp32_1
= tcg_temp_new_i32();
3925 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
3926 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
3927 store_reg32(r1
, tmp32_1
);
3928 tcg_temp_free_i64(tmp
);
3929 tcg_temp_free_i64(tmp2
);
3930 tcg_temp_free_i32(tmp32_1
);
3932 case 0x59: /* C R1,D2(X2,B2) [RX] */
3933 insn
= ld_code4(env
, s
->pc
);
3934 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3935 tmp2
= tcg_temp_new_i64();
3936 tmp32_1
= tcg_temp_new_i32();
3937 tmp32_2
= load_reg32(r1
);
3938 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
3939 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
3940 cmp_s32(s
, tmp32_2
, tmp32_1
);
3941 tcg_temp_free_i64(tmp
);
3942 tcg_temp_free_i64(tmp2
);
3943 tcg_temp_free_i32(tmp32_1
);
3944 tcg_temp_free_i32(tmp32_2
);
3946 case 0x5c: /* M R1,D2(X2,B2) [RX] */
3947 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
3948 insn
= ld_code4(env
, s
->pc
);
3949 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3950 tmp2
= tcg_temp_new_i64();
3951 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
3952 tmp3
= load_reg((r1
+ 1) & 15);
3953 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3954 tcg_gen_ext32s_i64(tmp3
, tmp3
);
3955 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
3956 store_reg32_i64((r1
+ 1) & 15, tmp2
);
3957 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
3958 store_reg32_i64(r1
, tmp2
);
3959 tcg_temp_free_i64(tmp
);
3960 tcg_temp_free_i64(tmp2
);
3961 tcg_temp_free_i64(tmp3
);
3963 case 0x5d: /* D R1,D2(X2,B2) [RX] */
3964 insn
= ld_code4(env
, s
->pc
);
3965 tmp3
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
3966 tmp32_1
= load_reg32(r1
);
3967 tmp32_2
= load_reg32(r1
+ 1);
3969 tmp
= tcg_temp_new_i64();
3970 tmp2
= tcg_temp_new_i64();
3972 /* dividend is r(r1 << 32) | r(r1 + 1) */
3973 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3974 tcg_gen_extu_i32_i64(tmp2
, tmp32_2
);
3975 tcg_gen_shli_i64(tmp
, tmp
, 32);
3976 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
3978 /* divisor is in memory */
3979 tcg_gen_qemu_ld32s(tmp2
, tmp3
, get_mem_index(s
));
3981 /* XXX divisor == 0 -> FixP divide exception */
3983 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
3984 tcg_gen_rem_i64(tmp
, tmp
, tmp2
);
3986 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
3987 tcg_gen_trunc_i64_i32(tmp32_2
, tmp3
);
3989 store_reg32(r1
, tmp32_1
); /* remainder */
3990 store_reg32(r1
+ 1, tmp32_2
); /* quotient */
3991 tcg_temp_free_i32(tmp32_1
);
3992 tcg_temp_free_i32(tmp32_2
);
3993 tcg_temp_free_i64(tmp
);
3994 tcg_temp_free_i64(tmp2
);
3995 tcg_temp_free_i64(tmp3
);
3997 case 0x60: /* STD R1,D2(X2,B2) [RX] */
3998 insn
= ld_code4(env
, s
->pc
);
3999 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4000 tmp2
= load_freg(r1
);
4001 tcg_gen_qemu_st64(tmp2
, tmp
, get_mem_index(s
));
4002 tcg_temp_free_i64(tmp
);
4003 tcg_temp_free_i64(tmp2
);
4005 case 0x68: /* LD R1,D2(X2,B2) [RX] */
4006 insn
= ld_code4(env
, s
->pc
);
4007 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4008 tmp2
= tcg_temp_new_i64();
4009 tcg_gen_qemu_ld64(tmp2
, tmp
, get_mem_index(s
));
4010 store_freg(r1
, tmp2
);
4011 tcg_temp_free_i64(tmp
);
4012 tcg_temp_free_i64(tmp2
);
4014 case 0x70: /* STE R1,D2(X2,B2) [RX] */
4015 insn
= ld_code4(env
, s
->pc
);
4016 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4017 tmp2
= tcg_temp_new_i64();
4018 tmp32_1
= load_freg32(r1
);
4019 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
4020 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
4021 tcg_temp_free_i64(tmp
);
4022 tcg_temp_free_i64(tmp2
);
4023 tcg_temp_free_i32(tmp32_1
);
4025 case 0x71: /* MS R1,D2(X2,B2) [RX] */
4026 insn
= ld_code4(env
, s
->pc
);
4027 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4028 tmp2
= tcg_temp_new_i64();
4029 tmp32_1
= load_reg32(r1
);
4030 tmp32_2
= tcg_temp_new_i32();
4031 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
4032 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4033 tcg_gen_mul_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4034 store_reg32(r1
, tmp32_1
);
4035 tcg_temp_free_i64(tmp
);
4036 tcg_temp_free_i64(tmp2
);
4037 tcg_temp_free_i32(tmp32_1
);
4038 tcg_temp_free_i32(tmp32_2
);
4040 case 0x78: /* LE R1,D2(X2,B2) [RX] */
4041 insn
= ld_code4(env
, s
->pc
);
4042 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4043 tmp2
= tcg_temp_new_i64();
4044 tmp32_1
= tcg_temp_new_i32();
4045 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4046 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4047 store_freg32(r1
, tmp32_1
);
4048 tcg_temp_free_i64(tmp
);
4049 tcg_temp_free_i64(tmp2
);
4050 tcg_temp_free_i32(tmp32_1
);
4052 #ifndef CONFIG_USER_ONLY
4053 case 0x80: /* SSM D2(B2) [S] */
4054 /* Set System Mask */
4055 check_privileged(s
);
4056 insn
= ld_code4(env
, s
->pc
);
4057 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4058 tmp
= get_address(s
, 0, b2
, d2
);
4059 tmp2
= tcg_temp_new_i64();
4060 tmp3
= tcg_temp_new_i64();
4061 tcg_gen_andi_i64(tmp3
, psw_mask
, ~0xff00000000000000ULL
);
4062 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4063 tcg_gen_shli_i64(tmp2
, tmp2
, 56);
4064 tcg_gen_or_i64(psw_mask
, tmp3
, tmp2
);
4065 tcg_temp_free_i64(tmp
);
4066 tcg_temp_free_i64(tmp2
);
4067 tcg_temp_free_i64(tmp3
);
4069 case 0x82: /* LPSW D2(B2) [S] */
4071 check_privileged(s
);
4072 insn
= ld_code4(env
, s
->pc
);
4073 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4074 tmp
= get_address(s
, 0, b2
, d2
);
4075 tmp2
= tcg_temp_new_i64();
4076 tmp3
= tcg_temp_new_i64();
4077 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4078 tcg_gen_addi_i64(tmp
, tmp
, 4);
4079 tcg_gen_qemu_ld32u(tmp3
, tmp
, get_mem_index(s
));
4080 /* Convert the 32-bit PSW_MASK into the 64-bit PSW_MASK. */
4081 tcg_gen_shli_i64(tmp2
, tmp2
, 32);
4082 gen_helper_load_psw(cpu_env
, tmp2
, tmp3
);
4083 tcg_temp_free_i64(tmp
);
4084 tcg_temp_free_i64(tmp2
);
4085 tcg_temp_free_i64(tmp3
);
4086 /* we need to keep cc_op intact */
4087 s
->is_jmp
= DISAS_JUMP
;
4089 case 0x83: /* DIAG R1,R3,D2 [RS] */
4090 /* Diagnose call (KVM hypercall) */
4091 check_privileged(s
);
4092 potential_page_fault(s
);
4093 insn
= ld_code4(env
, s
->pc
);
4094 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4095 tmp32_1
= tcg_const_i32(insn
& 0xfff);
4098 gen_helper_diag(tmp2
, cpu_env
, tmp32_1
, tmp2
, tmp3
);
4100 tcg_temp_free_i32(tmp32_1
);
4101 tcg_temp_free_i64(tmp2
);
4102 tcg_temp_free_i64(tmp3
);
4105 case 0x88: /* SRL R1,D2(B2) [RS] */
4106 case 0x89: /* SLL R1,D2(B2) [RS] */
4107 case 0x8a: /* SRA R1,D2(B2) [RS] */
4108 insn
= ld_code4(env
, s
->pc
);
4109 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4110 tmp
= get_address(s
, 0, b2
, d2
);
4111 tmp32_1
= load_reg32(r1
);
4112 tmp32_2
= tcg_temp_new_i32();
4113 tcg_gen_trunc_i64_i32(tmp32_2
, tmp
);
4114 tcg_gen_andi_i32(tmp32_2
, tmp32_2
, 0x3f);
4117 tcg_gen_shr_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4120 tcg_gen_shl_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4123 tcg_gen_sar_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4124 set_cc_s32(s
, tmp32_1
);
4129 store_reg32(r1
, tmp32_1
);
4130 tcg_temp_free_i64(tmp
);
4131 tcg_temp_free_i32(tmp32_1
);
4132 tcg_temp_free_i32(tmp32_2
);
4134 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4135 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4136 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4137 insn
= ld_code4(env
, s
->pc
);
4138 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4139 tmp
= get_address(s
, 0, b2
, d2
); /* shift */
4140 tmp2
= tcg_temp_new_i64();
4141 tmp32_1
= load_reg32(r1
);
4142 tmp32_2
= load_reg32(r1
+ 1);
4143 tcg_gen_concat_i32_i64(tmp2
, tmp32_2
, tmp32_1
); /* operand */
4146 tcg_gen_shr_i64(tmp2
, tmp2
, tmp
);
4149 tcg_gen_shl_i64(tmp2
, tmp2
, tmp
);
4152 tcg_gen_sar_i64(tmp2
, tmp2
, tmp
);
4153 set_cc_s64(s
, tmp2
);
4156 tcg_gen_shri_i64(tmp
, tmp2
, 32);
4157 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4158 store_reg32(r1
, tmp32_1
);
4159 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4160 store_reg32(r1
+ 1, tmp32_2
);
4161 tcg_temp_free_i64(tmp
);
4162 tcg_temp_free_i64(tmp2
);
4164 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4165 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4166 insn
= ld_code4(env
, s
->pc
);
4167 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4169 tmp
= get_address(s
, 0, b2
, d2
);
4170 tmp2
= tcg_temp_new_i64();
4171 tmp3
= tcg_const_i64(4);
4172 tmp4
= tcg_const_i64(0xffffffff00000000ULL
);
4173 for (i
= r1
;; i
= (i
+ 1) % 16) {
4175 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4176 tcg_gen_and_i64(regs
[i
], regs
[i
], tmp4
);
4177 tcg_gen_or_i64(regs
[i
], regs
[i
], tmp2
);
4179 tcg_gen_qemu_st32(regs
[i
], tmp
, get_mem_index(s
));
4184 tcg_gen_add_i64(tmp
, tmp
, tmp3
);
4186 tcg_temp_free_i64(tmp
);
4187 tcg_temp_free_i64(tmp2
);
4188 tcg_temp_free_i64(tmp3
);
4189 tcg_temp_free_i64(tmp4
);
4191 case 0x91: /* TM D1(B1),I2 [SI] */
4192 insn
= ld_code4(env
, s
->pc
);
4193 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4194 tmp2
= tcg_const_i64(i2
);
4195 tcg_gen_qemu_ld8u(tmp
, tmp
, get_mem_index(s
));
4196 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_32
);
4197 tcg_temp_free_i64(tmp
);
4198 tcg_temp_free_i64(tmp2
);
4200 case 0x92: /* MVI D1(B1),I2 [SI] */
4201 insn
= ld_code4(env
, s
->pc
);
4202 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4203 tmp2
= tcg_const_i64(i2
);
4204 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4205 tcg_temp_free_i64(tmp
);
4206 tcg_temp_free_i64(tmp2
);
4208 case 0x94: /* NI D1(B1),I2 [SI] */
4209 case 0x96: /* OI D1(B1),I2 [SI] */
4210 case 0x97: /* XI D1(B1),I2 [SI] */
4211 insn
= ld_code4(env
, s
->pc
);
4212 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4213 tmp2
= tcg_temp_new_i64();
4214 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4217 tcg_gen_andi_i64(tmp2
, tmp2
, i2
);
4220 tcg_gen_ori_i64(tmp2
, tmp2
, i2
);
4223 tcg_gen_xori_i64(tmp2
, tmp2
, i2
);
4228 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4229 set_cc_nz_u64(s
, tmp2
);
4230 tcg_temp_free_i64(tmp
);
4231 tcg_temp_free_i64(tmp2
);
4233 case 0x95: /* CLI D1(B1),I2 [SI] */
4234 insn
= ld_code4(env
, s
->pc
);
4235 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4236 tmp2
= tcg_temp_new_i64();
4237 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4238 cmp_u64c(s
, tmp2
, i2
);
4239 tcg_temp_free_i64(tmp
);
4240 tcg_temp_free_i64(tmp2
);
4242 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4243 insn
= ld_code4(env
, s
->pc
);
4244 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4245 tmp
= get_address(s
, 0, b2
, d2
);
4246 tmp32_1
= tcg_const_i32(r1
);
4247 tmp32_2
= tcg_const_i32(r3
);
4248 potential_page_fault(s
);
4249 gen_helper_lam(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4250 tcg_temp_free_i64(tmp
);
4251 tcg_temp_free_i32(tmp32_1
);
4252 tcg_temp_free_i32(tmp32_2
);
4254 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4255 insn
= ld_code4(env
, s
->pc
);
4256 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4257 tmp
= get_address(s
, 0, b2
, d2
);
4258 tmp32_1
= tcg_const_i32(r1
);
4259 tmp32_2
= tcg_const_i32(r3
);
4260 potential_page_fault(s
);
4261 gen_helper_stam(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4262 tcg_temp_free_i64(tmp
);
4263 tcg_temp_free_i32(tmp32_1
);
4264 tcg_temp_free_i32(tmp32_2
);
4267 insn
= ld_code4(env
, s
->pc
);
4268 r1
= (insn
>> 20) & 0xf;
4269 op
= (insn
>> 16) & 0xf;
4271 disas_a5(env
, s
, op
, r1
, i2
);
4274 insn
= ld_code4(env
, s
->pc
);
4275 r1
= (insn
>> 20) & 0xf;
4276 op
= (insn
>> 16) & 0xf;
4278 disas_a7(env
, s
, op
, r1
, i2
);
4280 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4281 insn
= ld_code4(env
, s
->pc
);
4282 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4283 tmp
= get_address(s
, 0, b2
, d2
);
4284 tmp32_1
= tcg_const_i32(r1
);
4285 tmp32_2
= tcg_const_i32(r3
);
4286 potential_page_fault(s
);
4287 gen_helper_mvcle(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4289 tcg_temp_free_i64(tmp
);
4290 tcg_temp_free_i32(tmp32_1
);
4291 tcg_temp_free_i32(tmp32_2
);
4293 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4294 insn
= ld_code4(env
, s
->pc
);
4295 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4296 tmp
= get_address(s
, 0, b2
, d2
);
4297 tmp32_1
= tcg_const_i32(r1
);
4298 tmp32_2
= tcg_const_i32(r3
);
4299 potential_page_fault(s
);
4300 gen_helper_clcle(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4302 tcg_temp_free_i64(tmp
);
4303 tcg_temp_free_i32(tmp32_1
);
4304 tcg_temp_free_i32(tmp32_2
);
4306 #ifndef CONFIG_USER_ONLY
4307 case 0xac: /* STNSM D1(B1),I2 [SI] */
4308 case 0xad: /* STOSM D1(B1),I2 [SI] */
4309 check_privileged(s
);
4310 insn
= ld_code4(env
, s
->pc
);
4311 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4312 tmp2
= tcg_temp_new_i64();
4313 tcg_gen_shri_i64(tmp2
, psw_mask
, 56);
4314 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4316 tcg_gen_andi_i64(psw_mask
, psw_mask
,
4317 ((uint64_t)i2
<< 56) | 0x00ffffffffffffffULL
);
4319 tcg_gen_ori_i64(psw_mask
, psw_mask
, (uint64_t)i2
<< 56);
4321 tcg_temp_free_i64(tmp
);
4322 tcg_temp_free_i64(tmp2
);
4324 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4325 check_privileged(s
);
4326 insn
= ld_code4(env
, s
->pc
);
4327 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4328 tmp
= get_address(s
, 0, b2
, d2
);
4329 tmp2
= load_reg(r3
);
4330 tmp32_1
= tcg_const_i32(r1
);
4331 potential_page_fault(s
);
4332 gen_helper_sigp(cc_op
, cpu_env
, tmp
, tmp32_1
, tmp2
);
4334 tcg_temp_free_i64(tmp
);
4335 tcg_temp_free_i64(tmp2
);
4336 tcg_temp_free_i32(tmp32_1
);
4338 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4339 check_privileged(s
);
4340 insn
= ld_code4(env
, s
->pc
);
4341 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4342 tmp32_1
= tcg_const_i32(r1
);
4343 potential_page_fault(s
);
4344 gen_helper_lra(cc_op
, cpu_env
, tmp
, tmp32_1
);
4346 tcg_temp_free_i64(tmp
);
4347 tcg_temp_free_i32(tmp32_1
);
4351 insn
= ld_code4(env
, s
->pc
);
4352 op
= (insn
>> 16) & 0xff;
4354 case 0x9c: /* STFPC D2(B2) [S] */
4356 b2
= (insn
>> 12) & 0xf;
4357 tmp32_1
= tcg_temp_new_i32();
4358 tmp
= tcg_temp_new_i64();
4359 tmp2
= get_address(s
, 0, b2
, d2
);
4360 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
4361 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4362 tcg_gen_qemu_st32(tmp
, tmp2
, get_mem_index(s
));
4363 tcg_temp_free_i32(tmp32_1
);
4364 tcg_temp_free_i64(tmp
);
4365 tcg_temp_free_i64(tmp2
);
4368 disas_b2(env
, s
, op
, insn
);
4373 insn
= ld_code4(env
, s
->pc
);
4374 op
= (insn
>> 16) & 0xff;
4375 r3
= (insn
>> 12) & 0xf; /* aka m3 */
4376 r1
= (insn
>> 4) & 0xf;
4378 disas_b3(env
, s
, op
, r3
, r1
, r2
);
4380 #ifndef CONFIG_USER_ONLY
4381 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4383 check_privileged(s
);
4384 insn
= ld_code4(env
, s
->pc
);
4385 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4386 tmp
= get_address(s
, 0, b2
, d2
);
4387 tmp32_1
= tcg_const_i32(r1
);
4388 tmp32_2
= tcg_const_i32(r3
);
4389 potential_page_fault(s
);
4390 gen_helper_stctl(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4391 tcg_temp_free_i64(tmp
);
4392 tcg_temp_free_i32(tmp32_1
);
4393 tcg_temp_free_i32(tmp32_2
);
4395 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4397 check_privileged(s
);
4398 insn
= ld_code4(env
, s
->pc
);
4399 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4400 tmp
= get_address(s
, 0, b2
, d2
);
4401 tmp32_1
= tcg_const_i32(r1
);
4402 tmp32_2
= tcg_const_i32(r3
);
4403 potential_page_fault(s
);
4404 gen_helper_lctl(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4405 tcg_temp_free_i64(tmp
);
4406 tcg_temp_free_i32(tmp32_1
);
4407 tcg_temp_free_i32(tmp32_2
);
4411 insn
= ld_code4(env
, s
->pc
);
4412 r1
= (insn
>> 4) & 0xf;
4414 op
= (insn
>> 16) & 0xff;
4415 disas_b9(env
, s
, op
, r1
, r2
);
4417 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4418 insn
= ld_code4(env
, s
->pc
);
4419 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4420 tmp
= get_address(s
, 0, b2
, d2
);
4421 tmp32_1
= tcg_const_i32(r1
);
4422 tmp32_2
= tcg_const_i32(r3
);
4423 potential_page_fault(s
);
4424 gen_helper_cs(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4426 tcg_temp_free_i64(tmp
);
4427 tcg_temp_free_i32(tmp32_1
);
4428 tcg_temp_free_i32(tmp32_2
);
4430 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4431 insn
= ld_code4(env
, s
->pc
);
4432 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4433 tmp
= get_address(s
, 0, b2
, d2
);
4434 tmp32_1
= load_reg32(r1
);
4435 tmp32_2
= tcg_const_i32(r3
);
4436 potential_page_fault(s
);
4437 gen_helper_clm(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp
);
4439 tcg_temp_free_i64(tmp
);
4440 tcg_temp_free_i32(tmp32_1
);
4441 tcg_temp_free_i32(tmp32_2
);
4443 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4444 insn
= ld_code4(env
, s
->pc
);
4445 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4446 tmp
= get_address(s
, 0, b2
, d2
);
4447 tmp32_1
= load_reg32(r1
);
4448 tmp32_2
= tcg_const_i32(r3
);
4449 potential_page_fault(s
);
4450 gen_helper_stcm(cpu_env
, tmp32_1
, tmp32_2
, tmp
);
4451 tcg_temp_free_i64(tmp
);
4452 tcg_temp_free_i32(tmp32_1
);
4453 tcg_temp_free_i32(tmp32_2
);
4455 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4456 insn
= ld_code4(env
, s
->pc
);
4457 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4459 /* effectively a 32-bit load */
4460 tmp
= get_address(s
, 0, b2
, d2
);
4461 tmp32_1
= tcg_temp_new_i32();
4462 tmp32_2
= tcg_const_i32(r3
);
4463 tcg_gen_qemu_ld32u(tmp
, tmp
, get_mem_index(s
));
4464 store_reg32_i64(r1
, tmp
);
4465 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4466 set_cc_icm(s
, tmp32_2
, tmp32_1
);
4467 tcg_temp_free_i64(tmp
);
4468 tcg_temp_free_i32(tmp32_1
);
4469 tcg_temp_free_i32(tmp32_2
);
4471 uint32_t mask
= 0x00ffffffUL
;
4472 uint32_t shift
= 24;
4474 tmp
= get_address(s
, 0, b2
, d2
);
4475 tmp2
= tcg_temp_new_i64();
4476 tmp32_1
= load_reg32(r1
);
4477 tmp32_2
= tcg_temp_new_i32();
4478 tmp32_3
= tcg_const_i32(r3
);
4479 tmp32_4
= tcg_const_i32(0);
4482 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4483 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4485 tcg_gen_shli_i32(tmp32_2
, tmp32_2
, shift
);
4487 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, mask
);
4488 tcg_gen_or_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4489 tcg_gen_or_i32(tmp32_4
, tmp32_4
, tmp32_2
);
4490 tcg_gen_addi_i64(tmp
, tmp
, 1);
4492 m3
= (m3
<< 1) & 0xf;
4493 mask
= (mask
>> 8) | 0xff000000UL
;
4496 store_reg32(r1
, tmp32_1
);
4497 set_cc_icm(s
, tmp32_3
, tmp32_4
);
4498 tcg_temp_free_i64(tmp
);
4499 tcg_temp_free_i64(tmp2
);
4500 tcg_temp_free_i32(tmp32_1
);
4501 tcg_temp_free_i32(tmp32_2
);
4502 tcg_temp_free_i32(tmp32_3
);
4503 tcg_temp_free_i32(tmp32_4
);
4505 /* i.e. env->cc = 0 */
4506 gen_op_movi_cc(s
, 0);
4511 insn
= ld_code6(env
, s
->pc
);
4512 r1
= (insn
>> 36) & 0xf;
4513 op
= (insn
>> 32) & 0xf;
4517 disas_c0(env
, s
, op
, r1
, i2
);
4520 disas_c2(env
, s
, op
, r1
, i2
);
4526 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4527 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4528 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4529 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4530 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4531 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4532 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4533 insn
= ld_code6(env
, s
->pc
);
4534 vl
= tcg_const_i32((insn
>> 32) & 0xff);
4535 b1
= (insn
>> 28) & 0xf;
4536 b2
= (insn
>> 12) & 0xf;
4537 d1
= (insn
>> 16) & 0xfff;
4539 tmp
= get_address(s
, 0, b1
, d1
);
4540 tmp2
= get_address(s
, 0, b2
, d2
);
4543 gen_op_mvc(s
, (insn
>> 32) & 0xff, tmp
, tmp2
);
4546 potential_page_fault(s
);
4547 gen_helper_nc(cc_op
, cpu_env
, vl
, tmp
, tmp2
);
4551 gen_op_clc(s
, (insn
>> 32) & 0xff, tmp
, tmp2
);
4554 potential_page_fault(s
);
4555 gen_helper_oc(cc_op
, cpu_env
, vl
, tmp
, tmp2
);
4559 potential_page_fault(s
);
4560 gen_helper_xc(cc_op
, cpu_env
, vl
, tmp
, tmp2
);
4564 potential_page_fault(s
);
4565 gen_helper_tr(cpu_env
, vl
, tmp
, tmp2
);
4569 potential_page_fault(s
);
4570 gen_helper_unpk(cpu_env
, vl
, tmp
, tmp2
);
4575 tcg_temp_free_i64(tmp
);
4576 tcg_temp_free_i64(tmp2
);
4578 #ifndef CONFIG_USER_ONLY
4579 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
4580 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
4581 check_privileged(s
);
4582 potential_page_fault(s
);
4583 insn
= ld_code6(env
, s
->pc
);
4584 r1
= (insn
>> 36) & 0xf;
4585 r3
= (insn
>> 32) & 0xf;
4586 b1
= (insn
>> 28) & 0xf;
4587 d1
= (insn
>> 16) & 0xfff;
4588 b2
= (insn
>> 12) & 0xf;
4592 tmp2
= get_address(s
, 0, b1
, d1
);
4593 tmp3
= get_address(s
, 0, b2
, d2
);
4595 gen_helper_mvcp(cc_op
, cpu_env
, tmp
, tmp2
, tmp3
);
4597 gen_helper_mvcs(cc_op
, cpu_env
, tmp
, tmp2
, tmp3
);
4600 tcg_temp_free_i64(tmp
);
4601 tcg_temp_free_i64(tmp2
);
4602 tcg_temp_free_i64(tmp3
);
4606 insn
= ld_code6(env
, s
->pc
);
4609 r1
= (insn
>> 36) & 0xf;
4610 x2
= (insn
>> 32) & 0xf;
4611 b2
= (insn
>> 28) & 0xf;
4612 d2
= ((int)((((insn
>> 16) & 0xfff)
4613 | ((insn
<< 4) & 0xff000)) << 12)) >> 12;
4614 disas_e3(env
, s
, op
, r1
, x2
, b2
, d2
);
4616 #ifndef CONFIG_USER_ONLY
4618 /* Test Protection */
4619 check_privileged(s
);
4620 insn
= ld_code6(env
, s
->pc
);
4622 disas_e5(env
, s
, insn
);
4626 insn
= ld_code6(env
, s
->pc
);
4629 r1
= (insn
>> 36) & 0xf;
4630 r3
= (insn
>> 32) & 0xf;
4631 b2
= (insn
>> 28) & 0xf;
4632 d2
= ((int)((((insn
>> 16) & 0xfff)
4633 | ((insn
<< 4) & 0xff000)) << 12)) >> 12;
4634 disas_eb(env
, s
, op
, r1
, r3
, b2
, d2
);
4637 insn
= ld_code6(env
, s
->pc
);
4640 r1
= (insn
>> 36) & 0xf;
4641 x2
= (insn
>> 32) & 0xf;
4642 b2
= (insn
>> 28) & 0xf;
4643 d2
= (short)((insn
>> 16) & 0xfff);
4644 r1b
= (insn
>> 12) & 0xf;
4645 disas_ed(env
, s
, op
, r1
, x2
, b2
, d2
, r1b
);
4648 qemu_log_mask(LOG_UNIMP
, "unimplemented opcode 0x%x\n", opc
);
4649 gen_illegal_opcode(s
);
4654 /* ====================================================================== */
4655 /* Define the insn format enumeration. */
4656 #define F0(N) FMT_##N,
4657 #define F1(N, X1) F0(N)
4658 #define F2(N, X1, X2) F0(N)
4659 #define F3(N, X1, X2, X3) F0(N)
4660 #define F4(N, X1, X2, X3, X4) F0(N)
4661 #define F5(N, X1, X2, X3, X4, X5) F0(N)
4664 #include "insn-format.def"
4674 /* Define a structure to hold the decoded fields. We'll store each inside
4675 an array indexed by an enum. In order to conserve memory, we'll arrange
4676 for fields that do not exist at the same time to overlap, thus the "C"
4677 for compact. For checking purposes there is an "O" for original index
4678 as well that will be applied to availability bitmaps. */
4680 enum DisasFieldIndexO
{
4703 enum DisasFieldIndexC
{
4734 struct DisasFields
{
4737 unsigned presentC
:16;
4738 unsigned int presentO
;
4742 /* This is the way fields are to be accessed out of DisasFields. */
4743 #define have_field(S, F) have_field1((S), FLD_O_##F)
4744 #define get_field(S, F) get_field1((S), FLD_O_##F, FLD_C_##F)
4746 static bool have_field1(const DisasFields
*f
, enum DisasFieldIndexO c
)
4748 return (f
->presentO
>> c
) & 1;
4751 static int get_field1(const DisasFields
*f
, enum DisasFieldIndexO o
,
4752 enum DisasFieldIndexC c
)
4754 assert(have_field1(f
, o
));
4758 /* Describe the layout of each field in each format. */
4759 typedef struct DisasField
{
4761 unsigned int size
:8;
4762 unsigned int type
:2;
4763 unsigned int indexC
:6;
4764 enum DisasFieldIndexO indexO
:8;
4767 typedef struct DisasFormatInfo
{
4768 DisasField op
[NUM_C_FIELD
];
4771 #define R(N, B) { B, 4, 0, FLD_C_r##N, FLD_O_r##N }
4772 #define M(N, B) { B, 4, 0, FLD_C_m##N, FLD_O_m##N }
4773 #define BD(N, BB, BD) { BB, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4774 { BD, 12, 0, FLD_C_d##N, FLD_O_d##N }
4775 #define BXD(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4776 { 12, 4, 0, FLD_C_x##N, FLD_O_x##N }, \
4777 { 20, 12, 0, FLD_C_d##N, FLD_O_d##N }
4778 #define BDL(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4779 { 20, 20, 2, FLD_C_d##N, FLD_O_d##N }
4780 #define BXDL(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4781 { 12, 4, 0, FLD_C_x##N, FLD_O_x##N }, \
4782 { 20, 20, 2, FLD_C_d##N, FLD_O_d##N }
4783 #define I(N, B, S) { B, S, 1, FLD_C_i##N, FLD_O_i##N }
4784 #define L(N, B, S) { B, S, 0, FLD_C_l##N, FLD_O_l##N }
4786 #define F0(N) { { } },
4787 #define F1(N, X1) { { X1 } },
4788 #define F2(N, X1, X2) { { X1, X2 } },
4789 #define F3(N, X1, X2, X3) { { X1, X2, X3 } },
4790 #define F4(N, X1, X2, X3, X4) { { X1, X2, X3, X4 } },
4791 #define F5(N, X1, X2, X3, X4, X5) { { X1, X2, X3, X4, X5 } },
4793 static const DisasFormatInfo format_info
[] = {
4794 #include "insn-format.def"
4812 /* Generally, we'll extract operands into this structures, operate upon
4813 them, and store them back. See the "in1", "in2", "prep", "wout" sets
4814 of routines below for more details. */
4816 bool g_out
, g_out2
, g_in1
, g_in2
;
4817 TCGv_i64 out
, out2
, in1
, in2
;
4821 /* Return values from translate_one, indicating the state of the TB. */
4823 /* Continue the TB. */
4825 /* We have emitted one or more goto_tb. No fixup required. */
4827 /* We are not using a goto_tb (for whatever reason), but have updated
4828 the PC (for whatever reason), so there's no need to do it again on
4831 /* We are exiting the TB, but have neither emitted a goto_tb, nor
4832 updated the PC for the next instruction to be executed. */
4834 /* We are ending the TB with a noreturn function call, e.g. longjmp.
4835 No following code will be executed. */
4839 typedef enum DisasFacility
{
4840 FAC_Z
, /* zarch (default) */
4841 FAC_CASS
, /* compare and swap and store */
4842 FAC_CASS2
, /* compare and swap and store 2*/
4843 FAC_DFP
, /* decimal floating point */
4844 FAC_DFPR
, /* decimal floating point rounding */
4845 FAC_DO
, /* distinct operands */
4846 FAC_EE
, /* execute extensions */
4847 FAC_EI
, /* extended immediate */
4848 FAC_FPE
, /* floating point extension */
4849 FAC_FPSSH
, /* floating point support sign handling */
4850 FAC_FPRGR
, /* FPR-GR transfer */
4851 FAC_GIE
, /* general instructions extension */
4852 FAC_HFP_MA
, /* HFP multiply-and-add/subtract */
4853 FAC_HW
, /* high-word */
4854 FAC_IEEEE_SIM
, /* IEEE exception sumilation */
4855 FAC_LOC
, /* load/store on condition */
4856 FAC_LD
, /* long displacement */
4857 FAC_PC
, /* population count */
4858 FAC_SCF
, /* store clock fast */
4859 FAC_SFLE
, /* store facility list extended */
4865 DisasFacility fac
:6;
4869 void (*help_in1
)(DisasContext
*, DisasFields
*, DisasOps
*);
4870 void (*help_in2
)(DisasContext
*, DisasFields
*, DisasOps
*);
4871 void (*help_prep
)(DisasContext
*, DisasFields
*, DisasOps
*);
4872 void (*help_wout
)(DisasContext
*, DisasFields
*, DisasOps
*);
4873 void (*help_cout
)(DisasContext
*, DisasOps
*);
4874 ExitStatus (*help_op
)(DisasContext
*, DisasOps
*);
4879 /* ====================================================================== */
4880 /* The operations. These perform the bulk of the work for any insn,
4881 usually after the operands have been loaded and output initialized. */
4883 static ExitStatus
op_add(DisasContext
*s
, DisasOps
*o
)
4885 tcg_gen_add_i64(o
->out
, o
->in1
, o
->in2
);
4889 static ExitStatus
op_sub(DisasContext
*s
, DisasOps
*o
)
4891 tcg_gen_sub_i64(o
->out
, o
->in1
, o
->in2
);
4895 /* ====================================================================== */
4896 /* The "Cc OUTput" generators. Given the generated output (and in some cases
4897 the original inputs), update the various cc data structures in order to
4898 be able to compute the new condition code. */
4900 static void cout_adds32(DisasContext
*s
, DisasOps
*o
)
4902 gen_op_update3_cc_i64(s
, CC_OP_ADD_32
, o
->in1
, o
->in2
, o
->out
);
4905 static void cout_adds64(DisasContext
*s
, DisasOps
*o
)
4907 gen_op_update3_cc_i64(s
, CC_OP_ADD_64
, o
->in1
, o
->in2
, o
->out
);
4910 static void cout_addu32(DisasContext
*s
, DisasOps
*o
)
4912 gen_op_update3_cc_i64(s
, CC_OP_ADDU_32
, o
->in1
, o
->in2
, o
->out
);
4915 static void cout_addu64(DisasContext
*s
, DisasOps
*o
)
4917 gen_op_update3_cc_i64(s
, CC_OP_ADDU_64
, o
->in1
, o
->in2
, o
->out
);
4920 static void cout_subs32(DisasContext
*s
, DisasOps
*o
)
4922 gen_op_update3_cc_i64(s
, CC_OP_SUB_32
, o
->in1
, o
->in2
, o
->out
);
4925 static void cout_subs64(DisasContext
*s
, DisasOps
*o
)
4927 gen_op_update3_cc_i64(s
, CC_OP_SUB_64
, o
->in1
, o
->in2
, o
->out
);
4930 static void cout_subu32(DisasContext
*s
, DisasOps
*o
)
4932 gen_op_update3_cc_i64(s
, CC_OP_SUBU_32
, o
->in1
, o
->in2
, o
->out
);
4935 static void cout_subu64(DisasContext
*s
, DisasOps
*o
)
4937 gen_op_update3_cc_i64(s
, CC_OP_SUBU_64
, o
->in1
, o
->in2
, o
->out
);
4940 /* ====================================================================== */
4941 /* The "PREPeration" generators. These initialize the DisasOps.OUT fields
4942 with the TCG register to which we will write. Used in combination with
4943 the "wout" generators, in some cases we need a new temporary, and in
4944 some cases we can write to a TCG global. */
4946 static void prep_new(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4948 o
->out
= tcg_temp_new_i64();
4951 static void prep_r1(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4953 o
->out
= regs
[get_field(f
, r1
)];
4957 /* ====================================================================== */
4958 /* The "Write OUTput" generators. These generally perform some non-trivial
4959 copy of data to TCG globals, or to main memory. The trivial cases are
4960 generally handled by having a "prep" generator install the TCG global
4961 as the destination of the operation. */
4963 static void wout_r1_32(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4965 store_reg32_i64(get_field(f
, r1
), o
->out
);
4968 static void wout_m1_32(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4970 tcg_gen_qemu_st32(o
->out
, o
->addr1
, get_mem_index(s
));
4973 static void wout_m1_64(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4975 tcg_gen_qemu_st64(o
->out
, o
->addr1
, get_mem_index(s
));
4978 /* ====================================================================== */
4979 /* The "INput 1" generators. These load the first operand to an insn. */
4981 static void in1_r1(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4983 o
->in1
= load_reg(get_field(f
, r1
));
4986 static void in1_r2(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4988 o
->in1
= load_reg(get_field(f
, r2
));
4991 static void in1_r3(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4993 o
->in1
= load_reg(get_field(f
, r3
));
4996 static void in1_la1(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
4998 o
->addr1
= get_address(s
, 0, get_field(f
, b1
), get_field(f
, d1
));
5001 static void in1_m1_32s(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5004 o
->in1
= tcg_temp_new_i64();
5005 tcg_gen_qemu_ld32s(o
->in1
, o
->addr1
, get_mem_index(s
));
5008 static void in1_m1_64(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5011 o
->in1
= tcg_temp_new_i64();
5012 tcg_gen_qemu_ld64(o
->in1
, o
->addr1
, get_mem_index(s
));
5015 /* ====================================================================== */
5016 /* The "INput 2" generators. These load the second operand to an insn. */
5018 static void in2_r2(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5020 o
->in2
= load_reg(get_field(f
, r2
));
5023 static void in2_r3(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5025 o
->in2
= load_reg(get_field(f
, r3
));
5028 static void in2_r2_32s(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5030 o
->in2
= tcg_temp_new_i64();
5031 tcg_gen_ext32s_i64(o
->in2
, regs
[get_field(f
, r2
)]);
5034 static void in2_r2_32u(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5036 o
->in2
= tcg_temp_new_i64();
5037 tcg_gen_ext32u_i64(o
->in2
, regs
[get_field(f
, r2
)]);
5040 static void in2_a2(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5042 int x2
= have_field(f
, x2
) ? get_field(f
, x2
) : 0;
5043 o
->in2
= get_address(s
, x2
, get_field(f
, b2
), get_field(f
, d2
));
5046 static void in2_m2_16s(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5049 tcg_gen_qemu_ld16s(o
->in2
, o
->in2
, get_mem_index(s
));
5052 static void in2_m2_32s(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5055 tcg_gen_qemu_ld32s(o
->in2
, o
->in2
, get_mem_index(s
));
5058 static void in2_m2_32u(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5061 tcg_gen_qemu_ld32u(o
->in2
, o
->in2
, get_mem_index(s
));
5064 static void in2_m2_64(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5067 tcg_gen_qemu_ld64(o
->in2
, o
->in2
, get_mem_index(s
));
5070 static void in2_i2(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5072 o
->in2
= tcg_const_i64(get_field(f
, i2
));
5075 static void in2_i2_32u(DisasContext
*s
, DisasFields
*f
, DisasOps
*o
)
5077 o
->in2
= tcg_const_i64((uint32_t)get_field(f
, i2
));
5080 /* ====================================================================== */
5082 /* Find opc within the table of insns. This is formulated as a switch
5083 statement so that (1) we get compile-time notice of cut-paste errors
5084 for duplicated opcodes, and (2) the compiler generates the binary
5085 search tree, rather than us having to post-process the table. */
5087 #define C(OPC, NM, FT, FC, I1, I2, P, W, OP, CC) \
5088 D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, 0)
5090 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) insn_ ## NM,
5092 enum DisasInsnEnum
{
5093 #include "insn-data.def"
5097 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) { \
5102 .help_in1 = in1_##I1, \
5103 .help_in2 = in2_##I2, \
5104 .help_prep = prep_##P, \
5105 .help_wout = wout_##W, \
5106 .help_cout = cout_##CC, \
5107 .help_op = op_##OP, \
5111 /* Allow 0 to be used for NULL in the table below. */
5119 static const DisasInsn insn_info
[] = {
5120 #include "insn-data.def"
5124 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) \
5125 case OPC: return &insn_info[insn_ ## NM];
5127 static const DisasInsn
*lookup_opc(uint16_t opc
)
5130 #include "insn-data.def"
5139 /* Extract a field from the insn. The INSN should be left-aligned in
5140 the uint64_t so that we can more easily utilize the big-bit-endian
5141 definitions we extract from the Principals of Operation. */
5143 static void extract_field(DisasFields
*o
, const DisasField
*f
, uint64_t insn
)
5151 /* Zero extract the field from the insn. */
5152 r
= (insn
<< f
->beg
) >> (64 - f
->size
);
5154 /* Sign-extend, or un-swap the field as necessary. */
5156 case 0: /* unsigned */
5158 case 1: /* signed */
5159 assert(f
->size
<= 32);
5160 m
= 1u << (f
->size
- 1);
5163 case 2: /* dl+dh split, signed 20 bit. */
5164 r
= ((int8_t)r
<< 12) | (r
>> 8);
5170 /* Validate that the "compressed" encoding we selected above is valid.
5171 I.e. we havn't make two different original fields overlap. */
5172 assert(((o
->presentC
>> f
->indexC
) & 1) == 0);
5173 o
->presentC
|= 1 << f
->indexC
;
5174 o
->presentO
|= 1 << f
->indexO
;
5176 o
->c
[f
->indexC
] = r
;
5179 /* Lookup the insn at the current PC, extracting the operands into O and
5180 returning the info struct for the insn. Returns NULL for invalid insn. */
5182 static const DisasInsn
*extract_insn(CPUS390XState
*env
, DisasContext
*s
,
5185 uint64_t insn
, pc
= s
->pc
;
5187 const DisasInsn
*info
;
5189 insn
= ld_code2(env
, pc
);
5190 op
= (insn
>> 8) & 0xff;
5191 ilen
= get_ilen(op
);
5192 s
->next_pc
= s
->pc
+ ilen
;
5199 insn
= ld_code4(env
, pc
) << 32;
5202 insn
= (insn
<< 48) | (ld_code4(env
, pc
+ 2) << 16);
5208 /* We can't actually determine the insn format until we've looked up
5209 the full insn opcode. Which we can't do without locating the
5210 secondary opcode. Assume by default that OP2 is at bit 40; for
5211 those smaller insns that don't actually have a secondary opcode
5212 this will correctly result in OP2 = 0. */
5218 case 0xb2: /* S, RRF, RRE */
5219 case 0xb3: /* RRE, RRD, RRF */
5220 case 0xb9: /* RRE, RRF */
5221 case 0xe5: /* SSE, SIL */
5222 op2
= (insn
<< 8) >> 56;
5226 case 0xc0: /* RIL */
5227 case 0xc2: /* RIL */
5228 case 0xc4: /* RIL */
5229 case 0xc6: /* RIL */
5230 case 0xc8: /* SSF */
5231 case 0xcc: /* RIL */
5232 op2
= (insn
<< 12) >> 60;
5234 case 0xd0 ... 0xdf: /* SS */
5240 case 0xee ... 0xf3: /* SS */
5241 case 0xf8 ... 0xfd: /* SS */
5245 op2
= (insn
<< 40) >> 56;
5249 memset(f
, 0, sizeof(*f
));
5253 /* Lookup the instruction. */
5254 info
= lookup_opc(op
<< 8 | op2
);
5256 /* If we found it, extract the operands. */
5258 DisasFormat fmt
= info
->fmt
;
5261 for (i
= 0; i
< NUM_C_FIELD
; ++i
) {
5262 extract_field(f
, &format_info
[fmt
].op
[i
], insn
);
5268 static ExitStatus
translate_one(CPUS390XState
*env
, DisasContext
*s
)
5270 const DisasInsn
*insn
;
5271 ExitStatus ret
= NO_EXIT
;
5275 insn
= extract_insn(env
, s
, &f
);
5277 /* If not found, try the old interpreter. This includes ILLOPC. */
5279 disas_s390_insn(env
, s
);
5280 switch (s
->is_jmp
) {
5288 ret
= EXIT_PC_UPDATED
;
5291 ret
= EXIT_NORETURN
;
5301 /* Set up the strutures we use to communicate with the helpers. */
5304 o
.g_out
= o
.g_out2
= o
.g_in1
= o
.g_in2
= false;
5305 TCGV_UNUSED_I64(o
.out
);
5306 TCGV_UNUSED_I64(o
.out2
);
5307 TCGV_UNUSED_I64(o
.in1
);
5308 TCGV_UNUSED_I64(o
.in2
);
5309 TCGV_UNUSED_I64(o
.addr1
);
5311 /* Implement the instruction. */
5312 if (insn
->help_in1
) {
5313 insn
->help_in1(s
, &f
, &o
);
5315 if (insn
->help_in2
) {
5316 insn
->help_in2(s
, &f
, &o
);
5318 if (insn
->help_prep
) {
5319 insn
->help_prep(s
, &f
, &o
);
5321 if (insn
->help_op
) {
5322 ret
= insn
->help_op(s
, &o
);
5324 if (insn
->help_wout
) {
5325 insn
->help_wout(s
, &f
, &o
);
5327 if (insn
->help_cout
) {
5328 insn
->help_cout(s
, &o
);
5331 /* Free any temporaries created by the helpers. */
5332 if (!TCGV_IS_UNUSED_I64(o
.out
) && !o
.g_out
) {
5333 tcg_temp_free_i64(o
.out
);
5335 if (!TCGV_IS_UNUSED_I64(o
.out2
) && !o
.g_out2
) {
5336 tcg_temp_free_i64(o
.out2
);
5338 if (!TCGV_IS_UNUSED_I64(o
.in1
) && !o
.g_in1
) {
5339 tcg_temp_free_i64(o
.in1
);
5341 if (!TCGV_IS_UNUSED_I64(o
.in2
) && !o
.g_in2
) {
5342 tcg_temp_free_i64(o
.in2
);
5344 if (!TCGV_IS_UNUSED_I64(o
.addr1
)) {
5345 tcg_temp_free_i64(o
.addr1
);
5348 /* Advance to the next instruction. */
5353 static inline void gen_intermediate_code_internal(CPUS390XState
*env
,
5354 TranslationBlock
*tb
,
5358 target_ulong pc_start
;
5359 uint64_t next_page_start
;
5360 uint16_t *gen_opc_end
;
5362 int num_insns
, max_insns
;
5370 if (!(tb
->flags
& FLAG_MASK_64
)) {
5371 pc_start
&= 0x7fffffff;
5376 dc
.cc_op
= CC_OP_DYNAMIC
;
5377 do_debug
= dc
.singlestep_enabled
= env
->singlestep_enabled
;
5378 dc
.is_jmp
= DISAS_NEXT
;
5380 gen_opc_end
= tcg_ctx
.gen_opc_buf
+ OPC_MAX_SIZE
;
5382 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
5385 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5386 if (max_insns
== 0) {
5387 max_insns
= CF_COUNT_MASK
;
5394 j
= tcg_ctx
.gen_opc_ptr
- tcg_ctx
.gen_opc_buf
;
5398 tcg_ctx
.gen_opc_instr_start
[lj
++] = 0;
5401 tcg_ctx
.gen_opc_pc
[lj
] = dc
.pc
;
5402 gen_opc_cc_op
[lj
] = dc
.cc_op
;
5403 tcg_ctx
.gen_opc_instr_start
[lj
] = 1;
5404 tcg_ctx
.gen_opc_icount
[lj
] = num_insns
;
5406 if (++num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
5410 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
| CPU_LOG_TB_OP_OPT
))) {
5411 tcg_gen_debug_insn_start(dc
.pc
);
5415 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5416 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5417 if (bp
->pc
== dc
.pc
) {
5418 status
= EXIT_PC_STALE
;
5424 if (status
== NO_EXIT
) {
5425 status
= translate_one(env
, &dc
);
5428 /* If we reach a page boundary, are single stepping,
5429 or exhaust instruction count, stop generation. */
5430 if (status
== NO_EXIT
5431 && (dc
.pc
>= next_page_start
5432 || tcg_ctx
.gen_opc_ptr
>= gen_opc_end
5433 || num_insns
>= max_insns
5435 || env
->singlestep_enabled
)) {
5436 status
= EXIT_PC_STALE
;
5438 } while (status
== NO_EXIT
);
5440 if (tb
->cflags
& CF_LAST_IO
) {
5449 update_psw_addr(&dc
);
5451 case EXIT_PC_UPDATED
:
5452 if (singlestep
&& dc
.cc_op
!= CC_OP_DYNAMIC
) {
5453 gen_op_calc_cc(&dc
);
5455 /* Next TB starts off with CC_OP_DYNAMIC,
5456 so make sure the cc op type is in env */
5457 gen_op_set_cc_op(&dc
);
5460 gen_exception(EXCP_DEBUG
);
5462 /* Generate the return instruction */
5470 gen_icount_end(tb
, num_insns
);
5471 *tcg_ctx
.gen_opc_ptr
= INDEX_op_end
;
5473 j
= tcg_ctx
.gen_opc_ptr
- tcg_ctx
.gen_opc_buf
;
5476 tcg_ctx
.gen_opc_instr_start
[lj
++] = 0;
5479 tb
->size
= dc
.pc
- pc_start
;
5480 tb
->icount
= num_insns
;
5483 #if defined(S390X_DEBUG_DISAS)
5484 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5485 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5486 log_target_disas(env
, pc_start
, dc
.pc
- pc_start
, 1);
5492 void gen_intermediate_code (CPUS390XState
*env
, struct TranslationBlock
*tb
)
5494 gen_intermediate_code_internal(env
, tb
, 0);
5497 void gen_intermediate_code_pc (CPUS390XState
*env
, struct TranslationBlock
*tb
)
5499 gen_intermediate_code_internal(env
, tb
, 1);
5502 void restore_state_to_opc(CPUS390XState
*env
, TranslationBlock
*tb
, int pc_pos
)
5505 env
->psw
.addr
= tcg_ctx
.gen_opc_pc
[pc_pos
];
5506 cc_op
= gen_opc_cc_op
[pc_pos
];
5507 if ((cc_op
!= CC_OP_DYNAMIC
) && (cc_op
!= CC_OP_STATIC
)) {