4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
21 /* #define DEBUG_ILLEGAL_INSTRUCTIONS */
22 /* #define DEBUG_INLINE_BRANCHES */
23 #define S390X_DEBUG_DISAS
24 /* #define S390X_DEBUG_DISAS_VERBOSE */
26 #ifdef S390X_DEBUG_DISAS_VERBOSE
27 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
29 # define LOG_DISAS(...) do { } while (0)
37 /* global register indexes */
38 static TCGv_ptr cpu_env
;
40 #include "gen-icount.h"
45 typedef struct DisasContext DisasContext
;
50 struct TranslationBlock
*tb
;
55 static void gen_op_calc_cc(DisasContext
*s
);
57 #ifdef DEBUG_INLINE_BRANCHES
58 static uint64_t inline_branch_hit
[CC_OP_MAX
];
59 static uint64_t inline_branch_miss
[CC_OP_MAX
];
62 static inline void debug_insn(uint64_t insn
)
64 LOG_DISAS("insn: 0x%" PRIx64
"\n", insn
);
67 static inline uint64_t pc_to_link_info(DisasContext
*s
, uint64_t pc
)
69 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
70 if (s
->tb
->flags
& FLAG_MASK_32
) {
71 return pc
| 0x80000000;
77 void cpu_dump_state(CPUS390XState
*env
, FILE *f
, fprintf_function cpu_fprintf
,
82 for (i
= 0; i
< 16; i
++) {
83 cpu_fprintf(f
, "R%02d=%016" PRIx64
, i
, env
->regs
[i
]);
91 for (i
= 0; i
< 16; i
++) {
92 cpu_fprintf(f
, "F%02d=%016" PRIx64
, i
, *(uint64_t *)&env
->fregs
[i
]);
100 cpu_fprintf(f
, "\n");
102 #ifndef CONFIG_USER_ONLY
103 for (i
= 0; i
< 16; i
++) {
104 cpu_fprintf(f
, "C%02d=%016" PRIx64
, i
, env
->cregs
[i
]);
106 cpu_fprintf(f
, "\n");
113 cpu_fprintf(f
, "\n");
115 if (env
->cc_op
> 3) {
116 cpu_fprintf(f
, "PSW=mask %016" PRIx64
" addr %016" PRIx64
" cc %15s\n",
117 env
->psw
.mask
, env
->psw
.addr
, cc_name(env
->cc_op
));
119 cpu_fprintf(f
, "PSW=mask %016" PRIx64
" addr %016" PRIx64
" cc %02x\n",
120 env
->psw
.mask
, env
->psw
.addr
, env
->cc_op
);
123 #ifdef DEBUG_INLINE_BRANCHES
124 for (i
= 0; i
< CC_OP_MAX
; i
++) {
125 cpu_fprintf(f
, " %15s = %10ld\t%10ld\n", cc_name(i
),
126 inline_branch_miss
[i
], inline_branch_hit
[i
]);
131 static TCGv_i64 psw_addr
;
132 static TCGv_i64 psw_mask
;
134 static TCGv_i32 cc_op
;
135 static TCGv_i64 cc_src
;
136 static TCGv_i64 cc_dst
;
137 static TCGv_i64 cc_vr
;
139 static char cpu_reg_names
[10*3 + 6*4];
140 static TCGv_i64 regs
[16];
142 static uint8_t gen_opc_cc_op
[OPC_BUF_SIZE
];
144 void s390x_translate_init(void)
147 size_t cpu_reg_names_size
= sizeof(cpu_reg_names
);
150 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
151 psw_addr
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, psw
.addr
),
153 psw_mask
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, psw
.mask
),
156 cc_op
= tcg_global_mem_new_i32(TCG_AREG0
, offsetof(CPUS390XState
, cc_op
),
158 cc_src
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, cc_src
),
160 cc_dst
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, cc_dst
),
162 cc_vr
= tcg_global_mem_new_i64(TCG_AREG0
, offsetof(CPUS390XState
, cc_vr
),
166 for (i
= 0; i
< 16; i
++) {
167 snprintf(p
, cpu_reg_names_size
, "r%d", i
);
168 regs
[i
] = tcg_global_mem_new(TCG_AREG0
,
169 offsetof(CPUS390XState
, regs
[i
]), p
);
170 p
+= (i
< 10) ? 3 : 4;
171 cpu_reg_names_size
-= (i
< 10) ? 3 : 4;
175 static inline TCGv_i64
load_reg(int reg
)
177 TCGv_i64 r
= tcg_temp_new_i64();
178 tcg_gen_mov_i64(r
, regs
[reg
]);
182 static inline TCGv_i64
load_freg(int reg
)
184 TCGv_i64 r
= tcg_temp_new_i64();
185 tcg_gen_ld_i64(r
, cpu_env
, offsetof(CPUS390XState
, fregs
[reg
].d
));
189 static inline TCGv_i32
load_freg32(int reg
)
191 TCGv_i32 r
= tcg_temp_new_i32();
192 tcg_gen_ld_i32(r
, cpu_env
, offsetof(CPUS390XState
, fregs
[reg
].l
.upper
));
196 static inline TCGv_i32
load_reg32(int reg
)
198 TCGv_i32 r
= tcg_temp_new_i32();
199 tcg_gen_trunc_i64_i32(r
, regs
[reg
]);
203 static inline TCGv_i64
load_reg32_i64(int reg
)
205 TCGv_i64 r
= tcg_temp_new_i64();
206 tcg_gen_ext32s_i64(r
, regs
[reg
]);
210 static inline void store_reg(int reg
, TCGv_i64 v
)
212 tcg_gen_mov_i64(regs
[reg
], v
);
215 static inline void store_freg(int reg
, TCGv_i64 v
)
217 tcg_gen_st_i64(v
, cpu_env
, offsetof(CPUS390XState
, fregs
[reg
].d
));
220 static inline void store_reg32(int reg
, TCGv_i32 v
)
222 #if HOST_LONG_BITS == 32
223 tcg_gen_mov_i32(TCGV_LOW(regs
[reg
]), v
);
225 TCGv_i64 tmp
= tcg_temp_new_i64();
226 tcg_gen_extu_i32_i64(tmp
, v
);
227 /* 32 bit register writes keep the upper half */
228 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], tmp
, 0, 32);
229 tcg_temp_free_i64(tmp
);
233 static inline void store_reg32_i64(int reg
, TCGv_i64 v
)
235 /* 32 bit register writes keep the upper half */
236 #if HOST_LONG_BITS == 32
237 tcg_gen_mov_i32(TCGV_LOW(regs
[reg
]), TCGV_LOW(v
));
239 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], v
, 0, 32);
243 static inline void store_reg16(int reg
, TCGv_i32 v
)
245 TCGv_i64 tmp
= tcg_temp_new_i64();
246 tcg_gen_extu_i32_i64(tmp
, v
);
247 /* 16 bit register writes keep the upper bytes */
248 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], tmp
, 0, 16);
249 tcg_temp_free_i64(tmp
);
252 static inline void store_reg8(int reg
, TCGv_i64 v
)
254 /* 8 bit register writes keep the upper bytes */
255 tcg_gen_deposit_i64(regs
[reg
], regs
[reg
], v
, 0, 8);
258 static inline void store_freg32(int reg
, TCGv_i32 v
)
260 tcg_gen_st_i32(v
, cpu_env
, offsetof(CPUS390XState
, fregs
[reg
].l
.upper
));
263 static inline void update_psw_addr(DisasContext
*s
)
266 tcg_gen_movi_i64(psw_addr
, s
->pc
);
269 static inline void potential_page_fault(DisasContext
*s
)
271 #ifndef CONFIG_USER_ONLY
277 static inline uint64_t ld_code2(CPUS390XState
*env
, uint64_t pc
)
279 return (uint64_t)cpu_lduw_code(env
, pc
);
282 static inline uint64_t ld_code4(CPUS390XState
*env
, uint64_t pc
)
284 return (uint64_t)cpu_ldl_code(env
, pc
);
287 static inline uint64_t ld_code6(CPUS390XState
*env
, uint64_t pc
)
290 opc
= (uint64_t)cpu_lduw_code(env
, pc
) << 32;
291 opc
|= (uint64_t)(uint32_t)cpu_ldl_code(env
, pc
+ 2);
295 static inline int get_mem_index(DisasContext
*s
)
297 switch (s
->tb
->flags
& FLAG_MASK_ASC
) {
298 case PSW_ASC_PRIMARY
>> 32:
300 case PSW_ASC_SECONDARY
>> 32:
302 case PSW_ASC_HOME
>> 32:
310 static inline void gen_debug(DisasContext
*s
)
312 TCGv_i32 tmp
= tcg_const_i32(EXCP_DEBUG
);
315 gen_helper_exception(cpu_env
, tmp
);
316 tcg_temp_free_i32(tmp
);
317 s
->is_jmp
= DISAS_EXCP
;
320 #ifdef CONFIG_USER_ONLY
322 static void gen_illegal_opcode(CPUS390XState
*env
, DisasContext
*s
, int ilc
)
324 TCGv_i32 tmp
= tcg_const_i32(EXCP_SPEC
);
327 gen_helper_exception(cpu_env
, tmp
);
328 tcg_temp_free_i32(tmp
);
329 s
->is_jmp
= DISAS_EXCP
;
332 #else /* CONFIG_USER_ONLY */
334 static void debug_print_inst(CPUS390XState
*env
, DisasContext
*s
, int ilc
)
336 #ifdef DEBUG_ILLEGAL_INSTRUCTIONS
341 inst
= ld_code2(env
, s
->pc
);
344 inst
= ld_code4(env
, s
->pc
);
347 inst
= ld_code6(env
, s
->pc
);
351 fprintf(stderr
, "Illegal instruction [%d at %016" PRIx64
"]: 0x%016"
352 PRIx64
"\n", ilc
, s
->pc
, inst
);
356 static void gen_program_exception(CPUS390XState
*env
, DisasContext
*s
, int ilc
,
361 debug_print_inst(env
, s
, ilc
);
363 /* remember what pgm exeption this was */
364 tmp
= tcg_const_i32(code
);
365 tcg_gen_st_i32(tmp
, cpu_env
, offsetof(CPUS390XState
, int_pgm_code
));
366 tcg_temp_free_i32(tmp
);
368 tmp
= tcg_const_i32(ilc
);
369 tcg_gen_st_i32(tmp
, cpu_env
, offsetof(CPUS390XState
, int_pgm_ilc
));
370 tcg_temp_free_i32(tmp
);
372 /* advance past instruction */
379 /* trigger exception */
380 tmp
= tcg_const_i32(EXCP_PGM
);
381 gen_helper_exception(cpu_env
, tmp
);
382 tcg_temp_free_i32(tmp
);
385 s
->is_jmp
= DISAS_EXCP
;
389 static void gen_illegal_opcode(CPUS390XState
*env
, DisasContext
*s
, int ilc
)
391 gen_program_exception(env
, s
, ilc
, PGM_SPECIFICATION
);
394 static void gen_privileged_exception(CPUS390XState
*env
, DisasContext
*s
,
397 gen_program_exception(env
, s
, ilc
, PGM_PRIVILEGED
);
400 static void check_privileged(CPUS390XState
*env
, DisasContext
*s
, int ilc
)
402 if (s
->tb
->flags
& (PSW_MASK_PSTATE
>> 32)) {
403 gen_privileged_exception(env
, s
, ilc
);
407 #endif /* CONFIG_USER_ONLY */
409 static TCGv_i64
get_address(DisasContext
*s
, int x2
, int b2
, int d2
)
413 /* 31-bitify the immediate part; register contents are dealt with below */
414 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
420 tmp
= tcg_const_i64(d2
);
421 tcg_gen_add_i64(tmp
, tmp
, regs
[x2
]);
426 tcg_gen_add_i64(tmp
, tmp
, regs
[b2
]);
430 tmp
= tcg_const_i64(d2
);
431 tcg_gen_add_i64(tmp
, tmp
, regs
[b2
]);
436 tmp
= tcg_const_i64(d2
);
439 /* 31-bit mode mask if there are values loaded from registers */
440 if (!(s
->tb
->flags
& FLAG_MASK_64
) && (x2
|| b2
)) {
441 tcg_gen_andi_i64(tmp
, tmp
, 0x7fffffffUL
);
447 static void gen_op_movi_cc(DisasContext
*s
, uint32_t val
)
449 s
->cc_op
= CC_OP_CONST0
+ val
;
452 static void gen_op_update1_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 dst
)
454 tcg_gen_discard_i64(cc_src
);
455 tcg_gen_mov_i64(cc_dst
, dst
);
456 tcg_gen_discard_i64(cc_vr
);
460 static void gen_op_update1_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 dst
)
462 tcg_gen_discard_i64(cc_src
);
463 tcg_gen_extu_i32_i64(cc_dst
, dst
);
464 tcg_gen_discard_i64(cc_vr
);
468 static void gen_op_update2_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 src
,
471 tcg_gen_mov_i64(cc_src
, src
);
472 tcg_gen_mov_i64(cc_dst
, dst
);
473 tcg_gen_discard_i64(cc_vr
);
477 static void gen_op_update2_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 src
,
480 tcg_gen_extu_i32_i64(cc_src
, src
);
481 tcg_gen_extu_i32_i64(cc_dst
, dst
);
482 tcg_gen_discard_i64(cc_vr
);
486 static void gen_op_update3_cc_i64(DisasContext
*s
, enum cc_op op
, TCGv_i64 src
,
487 TCGv_i64 dst
, TCGv_i64 vr
)
489 tcg_gen_mov_i64(cc_src
, src
);
490 tcg_gen_mov_i64(cc_dst
, dst
);
491 tcg_gen_mov_i64(cc_vr
, vr
);
495 static void gen_op_update3_cc_i32(DisasContext
*s
, enum cc_op op
, TCGv_i32 src
,
496 TCGv_i32 dst
, TCGv_i32 vr
)
498 tcg_gen_extu_i32_i64(cc_src
, src
);
499 tcg_gen_extu_i32_i64(cc_dst
, dst
);
500 tcg_gen_extu_i32_i64(cc_vr
, vr
);
504 static inline void set_cc_nz_u32(DisasContext
*s
, TCGv_i32 val
)
506 gen_op_update1_cc_i32(s
, CC_OP_NZ
, val
);
509 static inline void set_cc_nz_u64(DisasContext
*s
, TCGv_i64 val
)
511 gen_op_update1_cc_i64(s
, CC_OP_NZ
, val
);
514 static inline void cmp_32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
517 gen_op_update2_cc_i32(s
, cond
, v1
, v2
);
520 static inline void cmp_64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
523 gen_op_update2_cc_i64(s
, cond
, v1
, v2
);
526 static inline void cmp_s32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
528 cmp_32(s
, v1
, v2
, CC_OP_LTGT_32
);
531 static inline void cmp_u32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
533 cmp_32(s
, v1
, v2
, CC_OP_LTUGTU_32
);
536 static inline void cmp_s32c(DisasContext
*s
, TCGv_i32 v1
, int32_t v2
)
538 /* XXX optimize for the constant? put it in s? */
539 TCGv_i32 tmp
= tcg_const_i32(v2
);
540 cmp_32(s
, v1
, tmp
, CC_OP_LTGT_32
);
541 tcg_temp_free_i32(tmp
);
544 static inline void cmp_u32c(DisasContext
*s
, TCGv_i32 v1
, uint32_t v2
)
546 TCGv_i32 tmp
= tcg_const_i32(v2
);
547 cmp_32(s
, v1
, tmp
, CC_OP_LTUGTU_32
);
548 tcg_temp_free_i32(tmp
);
551 static inline void cmp_s64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
)
553 cmp_64(s
, v1
, v2
, CC_OP_LTGT_64
);
556 static inline void cmp_u64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
)
558 cmp_64(s
, v1
, v2
, CC_OP_LTUGTU_64
);
561 static inline void cmp_s64c(DisasContext
*s
, TCGv_i64 v1
, int64_t v2
)
563 TCGv_i64 tmp
= tcg_const_i64(v2
);
565 tcg_temp_free_i64(tmp
);
568 static inline void cmp_u64c(DisasContext
*s
, TCGv_i64 v1
, uint64_t v2
)
570 TCGv_i64 tmp
= tcg_const_i64(v2
);
572 tcg_temp_free_i64(tmp
);
575 static inline void set_cc_s32(DisasContext
*s
, TCGv_i32 val
)
577 gen_op_update1_cc_i32(s
, CC_OP_LTGT0_32
, val
);
580 static inline void set_cc_s64(DisasContext
*s
, TCGv_i64 val
)
582 gen_op_update1_cc_i64(s
, CC_OP_LTGT0_64
, val
);
585 static void set_cc_add64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
, TCGv_i64 vr
)
587 gen_op_update3_cc_i64(s
, CC_OP_ADD_64
, v1
, v2
, vr
);
590 static void set_cc_addu64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
593 gen_op_update3_cc_i64(s
, CC_OP_ADDU_64
, v1
, v2
, vr
);
596 static void set_cc_sub64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
, TCGv_i64 vr
)
598 gen_op_update3_cc_i64(s
, CC_OP_SUB_64
, v1
, v2
, vr
);
601 static void set_cc_subu64(DisasContext
*s
, TCGv_i64 v1
, TCGv_i64 v2
,
604 gen_op_update3_cc_i64(s
, CC_OP_SUBU_64
, v1
, v2
, vr
);
607 static void set_cc_abs64(DisasContext
*s
, TCGv_i64 v1
)
609 gen_op_update1_cc_i64(s
, CC_OP_ABS_64
, v1
);
612 static void set_cc_nabs64(DisasContext
*s
, TCGv_i64 v1
)
614 gen_op_update1_cc_i64(s
, CC_OP_NABS_64
, v1
);
617 static void set_cc_add32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
, TCGv_i32 vr
)
619 gen_op_update3_cc_i32(s
, CC_OP_ADD_32
, v1
, v2
, vr
);
622 static void set_cc_addu32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
625 gen_op_update3_cc_i32(s
, CC_OP_ADDU_32
, v1
, v2
, vr
);
628 static void set_cc_sub32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
, TCGv_i32 vr
)
630 gen_op_update3_cc_i32(s
, CC_OP_SUB_32
, v1
, v2
, vr
);
633 static void set_cc_subu32(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
,
636 gen_op_update3_cc_i32(s
, CC_OP_SUBU_32
, v1
, v2
, vr
);
639 static void set_cc_abs32(DisasContext
*s
, TCGv_i32 v1
)
641 gen_op_update1_cc_i32(s
, CC_OP_ABS_32
, v1
);
644 static void set_cc_nabs32(DisasContext
*s
, TCGv_i32 v1
)
646 gen_op_update1_cc_i32(s
, CC_OP_NABS_32
, v1
);
649 static void set_cc_comp32(DisasContext
*s
, TCGv_i32 v1
)
651 gen_op_update1_cc_i32(s
, CC_OP_COMP_32
, v1
);
654 static void set_cc_comp64(DisasContext
*s
, TCGv_i64 v1
)
656 gen_op_update1_cc_i64(s
, CC_OP_COMP_64
, v1
);
659 static void set_cc_icm(DisasContext
*s
, TCGv_i32 v1
, TCGv_i32 v2
)
661 gen_op_update2_cc_i32(s
, CC_OP_ICM
, v1
, v2
);
664 static void set_cc_cmp_f32_i64(DisasContext
*s
, TCGv_i32 v1
, TCGv_i64 v2
)
666 tcg_gen_extu_i32_i64(cc_src
, v1
);
667 tcg_gen_mov_i64(cc_dst
, v2
);
668 tcg_gen_discard_i64(cc_vr
);
669 s
->cc_op
= CC_OP_LTGT_F32
;
672 static void gen_set_cc_nz_f32(DisasContext
*s
, TCGv_i32 v1
)
674 gen_op_update1_cc_i32(s
, CC_OP_NZ_F32
, v1
);
677 /* CC value is in env->cc_op */
678 static inline void set_cc_static(DisasContext
*s
)
680 tcg_gen_discard_i64(cc_src
);
681 tcg_gen_discard_i64(cc_dst
);
682 tcg_gen_discard_i64(cc_vr
);
683 s
->cc_op
= CC_OP_STATIC
;
686 static inline void gen_op_set_cc_op(DisasContext
*s
)
688 if (s
->cc_op
!= CC_OP_DYNAMIC
&& s
->cc_op
!= CC_OP_STATIC
) {
689 tcg_gen_movi_i32(cc_op
, s
->cc_op
);
693 static inline void gen_update_cc_op(DisasContext
*s
)
698 /* calculates cc into cc_op */
699 static void gen_op_calc_cc(DisasContext
*s
)
701 TCGv_i32 local_cc_op
= tcg_const_i32(s
->cc_op
);
702 TCGv_i64 dummy
= tcg_const_i64(0);
709 /* s->cc_op is the cc value */
710 tcg_gen_movi_i32(cc_op
, s
->cc_op
- CC_OP_CONST0
);
713 /* env->cc_op already is the cc value */
727 gen_helper_calc_cc(cc_op
, cpu_env
, local_cc_op
, dummy
, cc_dst
, dummy
);
732 case CC_OP_LTUGTU_32
:
733 case CC_OP_LTUGTU_64
:
740 gen_helper_calc_cc(cc_op
, cpu_env
, local_cc_op
, cc_src
, cc_dst
, dummy
);
751 gen_helper_calc_cc(cc_op
, cpu_env
, local_cc_op
, cc_src
, cc_dst
, cc_vr
);
754 /* unknown operation - assume 3 arguments and cc_op in env */
755 gen_helper_calc_cc(cc_op
, cpu_env
, cc_op
, cc_src
, cc_dst
, cc_vr
);
761 tcg_temp_free_i32(local_cc_op
);
763 /* We now have cc in cc_op as constant */
767 static inline void decode_rr(DisasContext
*s
, uint64_t insn
, int *r1
, int *r2
)
771 *r1
= (insn
>> 4) & 0xf;
775 static inline TCGv_i64
decode_rx(DisasContext
*s
, uint64_t insn
, int *r1
,
776 int *x2
, int *b2
, int *d2
)
780 *r1
= (insn
>> 20) & 0xf;
781 *x2
= (insn
>> 16) & 0xf;
782 *b2
= (insn
>> 12) & 0xf;
785 return get_address(s
, *x2
, *b2
, *d2
);
788 static inline void decode_rs(DisasContext
*s
, uint64_t insn
, int *r1
, int *r3
,
793 *r1
= (insn
>> 20) & 0xf;
795 *r3
= (insn
>> 16) & 0xf;
796 *b2
= (insn
>> 12) & 0xf;
800 static inline TCGv_i64
decode_si(DisasContext
*s
, uint64_t insn
, int *i2
,
805 *i2
= (insn
>> 16) & 0xff;
806 *b1
= (insn
>> 12) & 0xf;
809 return get_address(s
, 0, *b1
, *d1
);
812 static inline void gen_goto_tb(DisasContext
*s
, int tb_num
, target_ulong pc
)
814 TranslationBlock
*tb
;
819 /* NOTE: we handle the case where the TB spans two pages here */
820 if ((pc
& TARGET_PAGE_MASK
) == (tb
->pc
& TARGET_PAGE_MASK
) ||
821 (pc
& TARGET_PAGE_MASK
) == ((s
->pc
- 1) & TARGET_PAGE_MASK
)) {
822 /* jump to same page: we can use a direct jump */
823 tcg_gen_goto_tb(tb_num
);
824 tcg_gen_movi_i64(psw_addr
, pc
);
825 tcg_gen_exit_tb((tcg_target_long
)tb
+ tb_num
);
827 /* jump to another page: currently not optimized */
828 tcg_gen_movi_i64(psw_addr
, pc
);
833 static inline void account_noninline_branch(DisasContext
*s
, int cc_op
)
835 #ifdef DEBUG_INLINE_BRANCHES
836 inline_branch_miss
[cc_op
]++;
840 static inline void account_inline_branch(DisasContext
*s
)
842 #ifdef DEBUG_INLINE_BRANCHES
843 inline_branch_hit
[s
->cc_op
]++;
847 static void gen_jcc(DisasContext
*s
, uint32_t mask
, int skip
)
849 TCGv_i32 tmp
, tmp2
, r
;
855 tmp
= tcg_temp_new_i32();
856 tcg_gen_trunc_i64_i32(tmp
, cc_dst
);
858 case 0x8 | 0x4: /* dst <= 0 */
859 tcg_gen_brcondi_i32(TCG_COND_GT
, tmp
, 0, skip
);
861 case 0x8 | 0x2: /* dst >= 0 */
862 tcg_gen_brcondi_i32(TCG_COND_LT
, tmp
, 0, skip
);
864 case 0x8: /* dst == 0 */
865 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp
, 0, skip
);
867 case 0x7: /* dst != 0 */
868 case 0x6: /* dst != 0 */
869 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp
, 0, skip
);
871 case 0x4: /* dst < 0 */
872 tcg_gen_brcondi_i32(TCG_COND_GE
, tmp
, 0, skip
);
874 case 0x2: /* dst > 0 */
875 tcg_gen_brcondi_i32(TCG_COND_LE
, tmp
, 0, skip
);
878 tcg_temp_free_i32(tmp
);
881 account_inline_branch(s
);
882 tcg_temp_free_i32(tmp
);
886 case 0x8 | 0x4: /* dst <= 0 */
887 tcg_gen_brcondi_i64(TCG_COND_GT
, cc_dst
, 0, skip
);
889 case 0x8 | 0x2: /* dst >= 0 */
890 tcg_gen_brcondi_i64(TCG_COND_LT
, cc_dst
, 0, skip
);
892 case 0x8: /* dst == 0 */
893 tcg_gen_brcondi_i64(TCG_COND_NE
, cc_dst
, 0, skip
);
895 case 0x7: /* dst != 0 */
896 case 0x6: /* dst != 0 */
897 tcg_gen_brcondi_i64(TCG_COND_EQ
, cc_dst
, 0, skip
);
899 case 0x4: /* dst < 0 */
900 tcg_gen_brcondi_i64(TCG_COND_GE
, cc_dst
, 0, skip
);
902 case 0x2: /* dst > 0 */
903 tcg_gen_brcondi_i64(TCG_COND_LE
, cc_dst
, 0, skip
);
908 account_inline_branch(s
);
911 tmp
= tcg_temp_new_i32();
912 tmp2
= tcg_temp_new_i32();
913 tcg_gen_trunc_i64_i32(tmp
, cc_src
);
914 tcg_gen_trunc_i64_i32(tmp2
, cc_dst
);
916 case 0x8 | 0x4: /* src <= dst */
917 tcg_gen_brcond_i32(TCG_COND_GT
, tmp
, tmp2
, skip
);
919 case 0x8 | 0x2: /* src >= dst */
920 tcg_gen_brcond_i32(TCG_COND_LT
, tmp
, tmp2
, skip
);
922 case 0x8: /* src == dst */
923 tcg_gen_brcond_i32(TCG_COND_NE
, tmp
, tmp2
, skip
);
925 case 0x7: /* src != dst */
926 case 0x6: /* src != dst */
927 tcg_gen_brcond_i32(TCG_COND_EQ
, tmp
, tmp2
, skip
);
929 case 0x4: /* src < dst */
930 tcg_gen_brcond_i32(TCG_COND_GE
, tmp
, tmp2
, skip
);
932 case 0x2: /* src > dst */
933 tcg_gen_brcond_i32(TCG_COND_LE
, tmp
, tmp2
, skip
);
936 tcg_temp_free_i32(tmp
);
937 tcg_temp_free_i32(tmp2
);
940 account_inline_branch(s
);
941 tcg_temp_free_i32(tmp
);
942 tcg_temp_free_i32(tmp2
);
946 case 0x8 | 0x4: /* src <= dst */
947 tcg_gen_brcond_i64(TCG_COND_GT
, cc_src
, cc_dst
, skip
);
949 case 0x8 | 0x2: /* src >= dst */
950 tcg_gen_brcond_i64(TCG_COND_LT
, cc_src
, cc_dst
, skip
);
952 case 0x8: /* src == dst */
953 tcg_gen_brcond_i64(TCG_COND_NE
, cc_src
, cc_dst
, skip
);
955 case 0x7: /* src != dst */
956 case 0x6: /* src != dst */
957 tcg_gen_brcond_i64(TCG_COND_EQ
, cc_src
, cc_dst
, skip
);
959 case 0x4: /* src < dst */
960 tcg_gen_brcond_i64(TCG_COND_GE
, cc_src
, cc_dst
, skip
);
962 case 0x2: /* src > dst */
963 tcg_gen_brcond_i64(TCG_COND_LE
, cc_src
, cc_dst
, skip
);
968 account_inline_branch(s
);
970 case CC_OP_LTUGTU_32
:
971 tmp
= tcg_temp_new_i32();
972 tmp2
= tcg_temp_new_i32();
973 tcg_gen_trunc_i64_i32(tmp
, cc_src
);
974 tcg_gen_trunc_i64_i32(tmp2
, cc_dst
);
976 case 0x8 | 0x4: /* src <= dst */
977 tcg_gen_brcond_i32(TCG_COND_GTU
, tmp
, tmp2
, skip
);
979 case 0x8 | 0x2: /* src >= dst */
980 tcg_gen_brcond_i32(TCG_COND_LTU
, tmp
, tmp2
, skip
);
982 case 0x8: /* src == dst */
983 tcg_gen_brcond_i32(TCG_COND_NE
, tmp
, tmp2
, skip
);
985 case 0x7: /* src != dst */
986 case 0x6: /* src != dst */
987 tcg_gen_brcond_i32(TCG_COND_EQ
, tmp
, tmp2
, skip
);
989 case 0x4: /* src < dst */
990 tcg_gen_brcond_i32(TCG_COND_GEU
, tmp
, tmp2
, skip
);
992 case 0x2: /* src > dst */
993 tcg_gen_brcond_i32(TCG_COND_LEU
, tmp
, tmp2
, skip
);
996 tcg_temp_free_i32(tmp
);
997 tcg_temp_free_i32(tmp2
);
1000 account_inline_branch(s
);
1001 tcg_temp_free_i32(tmp
);
1002 tcg_temp_free_i32(tmp2
);
1004 case CC_OP_LTUGTU_64
:
1006 case 0x8 | 0x4: /* src <= dst */
1007 tcg_gen_brcond_i64(TCG_COND_GTU
, cc_src
, cc_dst
, skip
);
1009 case 0x8 | 0x2: /* src >= dst */
1010 tcg_gen_brcond_i64(TCG_COND_LTU
, cc_src
, cc_dst
, skip
);
1012 case 0x8: /* src == dst */
1013 tcg_gen_brcond_i64(TCG_COND_NE
, cc_src
, cc_dst
, skip
);
1015 case 0x7: /* src != dst */
1016 case 0x6: /* src != dst */
1017 tcg_gen_brcond_i64(TCG_COND_EQ
, cc_src
, cc_dst
, skip
);
1019 case 0x4: /* src < dst */
1020 tcg_gen_brcond_i64(TCG_COND_GEU
, cc_src
, cc_dst
, skip
);
1022 case 0x2: /* src > dst */
1023 tcg_gen_brcond_i64(TCG_COND_LEU
, cc_src
, cc_dst
, skip
);
1028 account_inline_branch(s
);
1032 /* dst == 0 || dst != 0 */
1034 case 0x8 | 0x4 | 0x2:
1035 case 0x8 | 0x4 | 0x2 | 0x1:
1036 case 0x8 | 0x4 | 0x1:
1041 case 0x8 | 0x2 | 0x1:
1043 tcg_gen_brcondi_i64(TCG_COND_NE
, cc_dst
, 0, skip
);
1048 case 0x4 | 0x2 | 0x1:
1050 tcg_gen_brcondi_i64(TCG_COND_EQ
, cc_dst
, 0, skip
);
1055 account_inline_branch(s
);
1058 tmp
= tcg_temp_new_i32();
1059 tmp2
= tcg_temp_new_i32();
1061 tcg_gen_trunc_i64_i32(tmp
, cc_src
);
1062 tcg_gen_trunc_i64_i32(tmp2
, cc_dst
);
1063 tcg_gen_and_i32(tmp
, tmp
, tmp2
);
1065 case 0x8: /* val & mask == 0 */
1066 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp
, 0, skip
);
1068 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1069 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp
, 0, skip
);
1072 tcg_temp_free_i32(tmp
);
1073 tcg_temp_free_i32(tmp2
);
1076 tcg_temp_free_i32(tmp
);
1077 tcg_temp_free_i32(tmp2
);
1078 account_inline_branch(s
);
1081 tmp64
= tcg_temp_new_i64();
1083 tcg_gen_and_i64(tmp64
, cc_src
, cc_dst
);
1085 case 0x8: /* val & mask == 0 */
1086 tcg_gen_brcondi_i64(TCG_COND_NE
, tmp64
, 0, skip
);
1088 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1089 tcg_gen_brcondi_i64(TCG_COND_EQ
, tmp64
, 0, skip
);
1092 tcg_temp_free_i64(tmp64
);
1095 tcg_temp_free_i64(tmp64
);
1096 account_inline_branch(s
);
1100 case 0x8: /* val == 0 */
1101 tcg_gen_brcondi_i64(TCG_COND_NE
, cc_dst
, 0, skip
);
1103 case 0x4 | 0x2 | 0x1: /* val != 0 */
1104 case 0x4 | 0x2: /* val != 0 */
1105 tcg_gen_brcondi_i64(TCG_COND_EQ
, cc_dst
, 0, skip
);
1110 account_inline_branch(s
);
1113 old_cc_op
= s
->cc_op
;
1114 goto do_dynamic_nocccalc
;
1118 old_cc_op
= s
->cc_op
;
1119 /* calculate cc value */
1122 do_dynamic_nocccalc
:
1123 /* jump based on cc */
1124 account_noninline_branch(s
, old_cc_op
);
1127 case 0x8 | 0x4 | 0x2 | 0x1:
1130 case 0x8 | 0x4 | 0x2: /* cc != 3 */
1131 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 3, skip
);
1133 case 0x8 | 0x4 | 0x1: /* cc != 2 */
1134 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 2, skip
);
1136 case 0x8 | 0x2 | 0x1: /* cc != 1 */
1137 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 1, skip
);
1139 case 0x8 | 0x2: /* cc == 0 ||Â cc == 2 */
1140 tmp
= tcg_temp_new_i32();
1141 tcg_gen_andi_i32(tmp
, cc_op
, 1);
1142 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp
, 0, skip
);
1143 tcg_temp_free_i32(tmp
);
1145 case 0x8 | 0x4: /* cc < 2 */
1146 tcg_gen_brcondi_i32(TCG_COND_GEU
, cc_op
, 2, skip
);
1148 case 0x8: /* cc == 0 */
1149 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 0, skip
);
1151 case 0x4 | 0x2 | 0x1: /* cc != 0 */
1152 tcg_gen_brcondi_i32(TCG_COND_EQ
, cc_op
, 0, skip
);
1154 case 0x4 | 0x1: /* cc == 1 ||Â cc == 3 */
1155 tmp
= tcg_temp_new_i32();
1156 tcg_gen_andi_i32(tmp
, cc_op
, 1);
1157 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp
, 0, skip
);
1158 tcg_temp_free_i32(tmp
);
1160 case 0x4: /* cc == 1 */
1161 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 1, skip
);
1163 case 0x2 | 0x1: /* cc > 1 */
1164 tcg_gen_brcondi_i32(TCG_COND_LEU
, cc_op
, 1, skip
);
1166 case 0x2: /* cc == 2 */
1167 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 2, skip
);
1169 case 0x1: /* cc == 3 */
1170 tcg_gen_brcondi_i32(TCG_COND_NE
, cc_op
, 3, skip
);
1172 default: /* cc is masked by something else */
1173 tmp
= tcg_const_i32(3);
1175 tcg_gen_sub_i32(tmp
, tmp
, cc_op
);
1176 tmp2
= tcg_const_i32(1);
1178 tcg_gen_shl_i32(tmp2
, tmp2
, tmp
);
1179 r
= tcg_const_i32(mask
);
1180 /* mask & (1 << (3 - cc)) */
1181 tcg_gen_and_i32(r
, r
, tmp2
);
1182 tcg_temp_free_i32(tmp
);
1183 tcg_temp_free_i32(tmp2
);
1185 tcg_gen_brcondi_i32(TCG_COND_EQ
, r
, 0, skip
);
1186 tcg_temp_free_i32(r
);
1193 static void gen_bcr(DisasContext
*s
, uint32_t mask
, TCGv_i64 target
,
1200 tcg_gen_mov_i64(psw_addr
, target
);
1202 } else if (mask
== 0) {
1203 /* ignore cc and never match */
1204 gen_goto_tb(s
, 0, offset
+ 2);
1206 TCGv_i64 new_addr
= tcg_temp_local_new_i64();
1208 tcg_gen_mov_i64(new_addr
, target
);
1209 skip
= gen_new_label();
1210 gen_jcc(s
, mask
, skip
);
1211 tcg_gen_mov_i64(psw_addr
, new_addr
);
1212 tcg_temp_free_i64(new_addr
);
1214 gen_set_label(skip
);
1215 tcg_temp_free_i64(new_addr
);
1216 gen_goto_tb(s
, 1, offset
+ 2);
1220 static void gen_brc(uint32_t mask
, DisasContext
*s
, int32_t offset
)
1226 gen_goto_tb(s
, 0, s
->pc
+ offset
);
1227 } else if (mask
== 0) {
1228 /* ignore cc and never match */
1229 gen_goto_tb(s
, 0, s
->pc
+ 4);
1231 skip
= gen_new_label();
1232 gen_jcc(s
, mask
, skip
);
1233 gen_goto_tb(s
, 0, s
->pc
+ offset
);
1234 gen_set_label(skip
);
1235 gen_goto_tb(s
, 1, s
->pc
+ 4);
1237 s
->is_jmp
= DISAS_TB_JUMP
;
1240 static void gen_op_mvc(DisasContext
*s
, int l
, TCGv_i64 s1
, TCGv_i64 s2
)
1244 int l_memset
= gen_new_label();
1245 int l_out
= gen_new_label();
1246 TCGv_i64 dest
= tcg_temp_local_new_i64();
1247 TCGv_i64 src
= tcg_temp_local_new_i64();
1250 /* Find out if we should use the inline version of mvc */
1265 /* Fall back to helper */
1266 vl
= tcg_const_i32(l
);
1267 potential_page_fault(s
);
1268 gen_helper_mvc(cpu_env
, vl
, s1
, s2
);
1269 tcg_temp_free_i32(vl
);
1273 tcg_gen_mov_i64(dest
, s1
);
1274 tcg_gen_mov_i64(src
, s2
);
1276 if (!(s
->tb
->flags
& FLAG_MASK_64
)) {
1277 /* XXX what if we overflow while moving? */
1278 tcg_gen_andi_i64(dest
, dest
, 0x7fffffffUL
);
1279 tcg_gen_andi_i64(src
, src
, 0x7fffffffUL
);
1282 tmp
= tcg_temp_new_i64();
1283 tcg_gen_addi_i64(tmp
, src
, 1);
1284 tcg_gen_brcond_i64(TCG_COND_EQ
, dest
, tmp
, l_memset
);
1285 tcg_temp_free_i64(tmp
);
1289 tmp
= tcg_temp_new_i64();
1291 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1292 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1294 tcg_temp_free_i64(tmp
);
1297 tmp
= tcg_temp_new_i64();
1299 tcg_gen_qemu_ld16u(tmp
, src
, get_mem_index(s
));
1300 tcg_gen_qemu_st16(tmp
, dest
, get_mem_index(s
));
1302 tcg_temp_free_i64(tmp
);
1305 tmp
= tcg_temp_new_i64();
1307 tcg_gen_qemu_ld32u(tmp
, src
, get_mem_index(s
));
1308 tcg_gen_qemu_st32(tmp
, dest
, get_mem_index(s
));
1310 tcg_temp_free_i64(tmp
);
1313 tmp
= tcg_temp_new_i64();
1314 tmp2
= tcg_temp_new_i64();
1316 tcg_gen_qemu_ld32u(tmp
, src
, get_mem_index(s
));
1317 tcg_gen_addi_i64(src
, src
, 4);
1318 tcg_gen_qemu_ld8u(tmp2
, src
, get_mem_index(s
));
1319 tcg_gen_qemu_st32(tmp
, dest
, get_mem_index(s
));
1320 tcg_gen_addi_i64(dest
, dest
, 4);
1321 tcg_gen_qemu_st8(tmp2
, dest
, get_mem_index(s
));
1323 tcg_temp_free_i64(tmp
);
1324 tcg_temp_free_i64(tmp2
);
1327 tmp
= tcg_temp_new_i64();
1329 tcg_gen_qemu_ld64(tmp
, src
, get_mem_index(s
));
1330 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1332 tcg_temp_free_i64(tmp
);
1335 /* The inline version can become too big for too uneven numbers, only
1336 use it on known good lengths */
1337 tmp
= tcg_temp_new_i64();
1338 tmp2
= tcg_const_i64(8);
1339 for (i
= 0; (i
+ 7) <= l
; i
+= 8) {
1340 tcg_gen_qemu_ld64(tmp
, src
, get_mem_index(s
));
1341 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1343 tcg_gen_add_i64(src
, src
, tmp2
);
1344 tcg_gen_add_i64(dest
, dest
, tmp2
);
1347 tcg_temp_free_i64(tmp2
);
1348 tmp2
= tcg_const_i64(1);
1350 for (; i
<= l
; i
++) {
1351 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1352 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1354 tcg_gen_add_i64(src
, src
, tmp2
);
1355 tcg_gen_add_i64(dest
, dest
, tmp2
);
1358 tcg_temp_free_i64(tmp2
);
1359 tcg_temp_free_i64(tmp
);
1365 gen_set_label(l_memset
);
1366 /* memset case (dest == (src + 1)) */
1368 tmp
= tcg_temp_new_i64();
1369 tmp2
= tcg_temp_new_i64();
1370 /* fill tmp with the byte */
1371 tcg_gen_qemu_ld8u(tmp
, src
, get_mem_index(s
));
1372 tcg_gen_shli_i64(tmp2
, tmp
, 8);
1373 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1374 tcg_gen_shli_i64(tmp2
, tmp
, 16);
1375 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1376 tcg_gen_shli_i64(tmp2
, tmp
, 32);
1377 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
1378 tcg_temp_free_i64(tmp2
);
1380 tmp2
= tcg_const_i64(8);
1382 for (i
= 0; (i
+ 7) <= l
; i
+= 8) {
1383 tcg_gen_qemu_st64(tmp
, dest
, get_mem_index(s
));
1384 tcg_gen_addi_i64(dest
, dest
, 8);
1387 tcg_temp_free_i64(tmp2
);
1388 tmp2
= tcg_const_i64(1);
1390 for (; i
<= l
; i
++) {
1391 tcg_gen_qemu_st8(tmp
, dest
, get_mem_index(s
));
1392 tcg_gen_addi_i64(dest
, dest
, 1);
1395 tcg_temp_free_i64(tmp2
);
1396 tcg_temp_free_i64(tmp
);
1398 gen_set_label(l_out
);
1400 tcg_temp_free(dest
);
1404 static void gen_op_clc(DisasContext
*s
, int l
, TCGv_i64 s1
, TCGv_i64 s2
)
1410 /* check for simple 32bit or 64bit match */
1413 tmp
= tcg_temp_new_i64();
1414 tmp2
= tcg_temp_new_i64();
1416 tcg_gen_qemu_ld8u(tmp
, s1
, get_mem_index(s
));
1417 tcg_gen_qemu_ld8u(tmp2
, s2
, get_mem_index(s
));
1418 cmp_u64(s
, tmp
, tmp2
);
1420 tcg_temp_free_i64(tmp
);
1421 tcg_temp_free_i64(tmp2
);
1424 tmp
= tcg_temp_new_i64();
1425 tmp2
= tcg_temp_new_i64();
1427 tcg_gen_qemu_ld16u(tmp
, s1
, get_mem_index(s
));
1428 tcg_gen_qemu_ld16u(tmp2
, s2
, get_mem_index(s
));
1429 cmp_u64(s
, tmp
, tmp2
);
1431 tcg_temp_free_i64(tmp
);
1432 tcg_temp_free_i64(tmp2
);
1435 tmp
= tcg_temp_new_i64();
1436 tmp2
= tcg_temp_new_i64();
1438 tcg_gen_qemu_ld32u(tmp
, s1
, get_mem_index(s
));
1439 tcg_gen_qemu_ld32u(tmp2
, s2
, get_mem_index(s
));
1440 cmp_u64(s
, tmp
, tmp2
);
1442 tcg_temp_free_i64(tmp
);
1443 tcg_temp_free_i64(tmp2
);
1446 tmp
= tcg_temp_new_i64();
1447 tmp2
= tcg_temp_new_i64();
1449 tcg_gen_qemu_ld64(tmp
, s1
, get_mem_index(s
));
1450 tcg_gen_qemu_ld64(tmp2
, s2
, get_mem_index(s
));
1451 cmp_u64(s
, tmp
, tmp2
);
1453 tcg_temp_free_i64(tmp
);
1454 tcg_temp_free_i64(tmp2
);
1458 potential_page_fault(s
);
1459 vl
= tcg_const_i32(l
);
1460 gen_helper_clc(cc_op
, cpu_env
, vl
, s1
, s2
);
1461 tcg_temp_free_i32(vl
);
1465 static void disas_e3(CPUS390XState
*env
, DisasContext
* s
, int op
, int r1
,
1466 int x2
, int b2
, int d2
)
1468 TCGv_i64 addr
, tmp
, tmp2
, tmp3
, tmp4
;
1469 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
1471 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1472 op
, r1
, x2
, b2
, d2
);
1473 addr
= get_address(s
, x2
, b2
, d2
);
1475 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1476 case 0x4: /* lg r1,d2(x2,b2) */
1477 tcg_gen_qemu_ld64(regs
[r1
], addr
, get_mem_index(s
));
1479 set_cc_s64(s
, regs
[r1
]);
1482 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1483 tmp2
= tcg_temp_new_i64();
1484 tmp32_1
= tcg_temp_new_i32();
1485 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1486 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1487 store_reg32(r1
, tmp32_1
);
1488 set_cc_s32(s
, tmp32_1
);
1489 tcg_temp_free_i64(tmp2
);
1490 tcg_temp_free_i32(tmp32_1
);
1492 case 0xc: /* MSG R1,D2(X2,B2) [RXY] */
1493 case 0x1c: /* MSGF R1,D2(X2,B2) [RXY] */
1494 tmp2
= tcg_temp_new_i64();
1496 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1498 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1500 tcg_gen_mul_i64(regs
[r1
], regs
[r1
], tmp2
);
1501 tcg_temp_free_i64(tmp2
);
1503 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1504 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1505 tmp2
= tcg_temp_new_i64();
1507 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1509 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1511 tmp4
= load_reg(r1
+ 1);
1512 tmp3
= tcg_temp_new_i64();
1513 tcg_gen_div_i64(tmp3
, tmp4
, tmp2
);
1514 store_reg(r1
+ 1, tmp3
);
1515 tcg_gen_rem_i64(tmp3
, tmp4
, tmp2
);
1516 store_reg(r1
, tmp3
);
1517 tcg_temp_free_i64(tmp2
);
1518 tcg_temp_free_i64(tmp3
);
1519 tcg_temp_free_i64(tmp4
);
1521 case 0x8: /* AG R1,D2(X2,B2) [RXY] */
1522 case 0xa: /* ALG R1,D2(X2,B2) [RXY] */
1523 case 0x18: /* AGF R1,D2(X2,B2) [RXY] */
1524 case 0x1a: /* ALGF R1,D2(X2,B2) [RXY] */
1526 tmp2
= tcg_temp_new_i64();
1527 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1528 } else if (op
== 0x18) {
1529 tmp2
= tcg_temp_new_i64();
1530 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1532 tmp2
= tcg_temp_new_i64();
1533 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1535 tmp4
= load_reg(r1
);
1536 tmp3
= tcg_temp_new_i64();
1537 tcg_gen_add_i64(tmp3
, tmp4
, tmp2
);
1538 store_reg(r1
, tmp3
);
1542 set_cc_add64(s
, tmp4
, tmp2
, tmp3
);
1546 set_cc_addu64(s
, tmp4
, tmp2
, tmp3
);
1551 tcg_temp_free_i64(tmp2
);
1552 tcg_temp_free_i64(tmp3
);
1553 tcg_temp_free_i64(tmp4
);
1555 case 0x9: /* SG R1,D2(X2,B2) [RXY] */
1556 case 0xb: /* SLG R1,D2(X2,B2) [RXY] */
1557 case 0x19: /* SGF R1,D2(X2,B2) [RXY] */
1558 case 0x1b: /* SLGF R1,D2(X2,B2) [RXY] */
1559 tmp2
= tcg_temp_new_i64();
1561 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1562 } else if (op
== 0x1b) {
1563 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1565 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1567 tmp4
= load_reg(r1
);
1568 tmp3
= tcg_temp_new_i64();
1569 tcg_gen_sub_i64(tmp3
, tmp4
, tmp2
);
1570 store_reg(r1
, tmp3
);
1574 set_cc_sub64(s
, tmp4
, tmp2
, tmp3
);
1578 set_cc_subu64(s
, tmp4
, tmp2
, tmp3
);
1583 tcg_temp_free_i64(tmp2
);
1584 tcg_temp_free_i64(tmp3
);
1585 tcg_temp_free_i64(tmp4
);
1587 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1588 tmp2
= tcg_temp_new_i64();
1589 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1590 tcg_gen_bswap64_i64(tmp2
, tmp2
);
1591 store_reg(r1
, tmp2
);
1592 tcg_temp_free_i64(tmp2
);
1594 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1595 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1596 tmp2
= tcg_temp_new_i64();
1597 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1599 tcg_gen_ext32s_i64(tmp2
, tmp2
);
1601 store_reg(r1
, tmp2
);
1602 tcg_temp_free_i64(tmp2
);
1604 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1605 tmp2
= tcg_temp_new_i64();
1606 tcg_gen_qemu_ld16s(tmp2
, addr
, get_mem_index(s
));
1607 store_reg(r1
, tmp2
);
1608 tcg_temp_free_i64(tmp2
);
1610 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1611 tmp2
= tcg_temp_new_i64();
1612 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1613 tcg_gen_andi_i64(tmp2
, tmp2
, 0x7fffffffULL
);
1614 store_reg(r1
, tmp2
);
1615 tcg_temp_free_i64(tmp2
);
1617 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1618 tmp2
= tcg_temp_new_i64();
1619 tmp32_1
= tcg_temp_new_i32();
1620 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1621 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1622 tcg_temp_free_i64(tmp2
);
1623 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
1624 store_reg32(r1
, tmp32_1
);
1625 tcg_temp_free_i32(tmp32_1
);
1627 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1628 tmp2
= tcg_temp_new_i64();
1629 tmp32_1
= tcg_temp_new_i32();
1630 tcg_gen_qemu_ld16u(tmp2
, addr
, get_mem_index(s
));
1631 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
1632 tcg_temp_free_i64(tmp2
);
1633 tcg_gen_bswap16_i32(tmp32_1
, tmp32_1
);
1634 store_reg16(r1
, tmp32_1
);
1635 tcg_temp_free_i32(tmp32_1
);
1637 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1638 case 0x21: /* CLG R1,D2(X2,B2) */
1639 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1640 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1641 tmp2
= tcg_temp_new_i64();
1645 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1648 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1651 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1659 cmp_s64(s
, regs
[r1
], tmp2
);
1663 cmp_u64(s
, regs
[r1
], tmp2
);
1668 tcg_temp_free_i64(tmp2
);
1670 case 0x24: /* stg r1, d2(x2,b2) */
1671 tcg_gen_qemu_st64(regs
[r1
], addr
, get_mem_index(s
));
1673 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1674 tmp32_1
= load_reg32(r1
);
1675 tmp2
= tcg_temp_new_i64();
1676 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
1677 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
1678 tcg_temp_free_i32(tmp32_1
);
1679 tcg_gen_qemu_st32(tmp2
, addr
, get_mem_index(s
));
1680 tcg_temp_free_i64(tmp2
);
1682 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1683 tmp32_1
= load_reg32(r1
);
1684 tmp2
= tcg_temp_new_i64();
1685 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
1686 tcg_temp_free_i32(tmp32_1
);
1687 tcg_gen_qemu_st32(tmp2
, addr
, get_mem_index(s
));
1688 tcg_temp_free_i64(tmp2
);
1690 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1691 tmp32_1
= load_reg32(r1
);
1692 tmp32_2
= tcg_temp_new_i32();
1693 tmp2
= tcg_temp_new_i64();
1694 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1695 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1696 tcg_temp_free_i64(tmp2
);
1697 tcg_gen_xor_i32(tmp32_2
, tmp32_1
, tmp32_2
);
1698 store_reg32(r1
, tmp32_2
);
1699 set_cc_nz_u32(s
, tmp32_2
);
1700 tcg_temp_free_i32(tmp32_1
);
1701 tcg_temp_free_i32(tmp32_2
);
1703 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1704 tmp3
= tcg_temp_new_i64();
1705 tcg_gen_qemu_ld32u(tmp3
, addr
, get_mem_index(s
));
1706 store_reg32_i64(r1
, tmp3
);
1707 tcg_temp_free_i64(tmp3
);
1709 case 0x5a: /* AY R1,D2(X2,B2) [RXY] */
1710 case 0x5b: /* SY R1,D2(X2,B2) [RXY] */
1711 tmp32_1
= load_reg32(r1
);
1712 tmp32_2
= tcg_temp_new_i32();
1713 tmp32_3
= tcg_temp_new_i32();
1714 tmp2
= tcg_temp_new_i64();
1715 tcg_gen_qemu_ld32s(tmp2
, addr
, get_mem_index(s
));
1716 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1717 tcg_temp_free_i64(tmp2
);
1720 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
1723 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
1728 store_reg32(r1
, tmp32_3
);
1731 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1734 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1739 tcg_temp_free_i32(tmp32_1
);
1740 tcg_temp_free_i32(tmp32_2
);
1741 tcg_temp_free_i32(tmp32_3
);
1743 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1744 store_reg(r1
, addr
);
1746 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1747 tmp32_1
= load_reg32(r1
);
1748 tmp2
= tcg_temp_new_i64();
1749 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
1750 tcg_gen_qemu_st8(tmp2
, addr
, get_mem_index(s
));
1751 tcg_temp_free_i32(tmp32_1
);
1752 tcg_temp_free_i64(tmp2
);
1754 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1755 tmp3
= tcg_temp_new_i64();
1756 tcg_gen_qemu_ld8u(tmp3
, addr
, get_mem_index(s
));
1757 store_reg8(r1
, tmp3
);
1758 tcg_temp_free_i64(tmp3
);
1760 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1761 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1762 tmp2
= tcg_temp_new_i64();
1763 tcg_gen_qemu_ld8s(tmp2
, addr
, get_mem_index(s
));
1766 tcg_gen_ext8s_i64(tmp2
, tmp2
);
1767 store_reg32_i64(r1
, tmp2
);
1770 tcg_gen_ext8s_i64(tmp2
, tmp2
);
1771 store_reg(r1
, tmp2
);
1776 tcg_temp_free_i64(tmp2
);
1778 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1779 tmp2
= tcg_temp_new_i64();
1780 tcg_gen_qemu_ld16s(tmp2
, addr
, get_mem_index(s
));
1781 store_reg32_i64(r1
, tmp2
);
1782 tcg_temp_free_i64(tmp2
);
1784 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1785 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1786 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1787 tmp3
= tcg_temp_new_i64();
1788 tcg_gen_qemu_ld64(tmp3
, addr
, get_mem_index(s
));
1791 tcg_gen_and_i64(regs
[r1
], regs
[r1
], tmp3
);
1794 tcg_gen_or_i64(regs
[r1
], regs
[r1
], tmp3
);
1797 tcg_gen_xor_i64(regs
[r1
], regs
[r1
], tmp3
);
1802 set_cc_nz_u64(s
, regs
[r1
]);
1803 tcg_temp_free_i64(tmp3
);
1805 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1806 tmp2
= tcg_temp_new_i64();
1807 tmp32_1
= tcg_const_i32(r1
);
1808 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1809 gen_helper_mlg(cpu_env
, tmp32_1
, tmp2
);
1810 tcg_temp_free_i64(tmp2
);
1811 tcg_temp_free_i32(tmp32_1
);
1813 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1814 tmp2
= tcg_temp_new_i64();
1815 tmp32_1
= tcg_const_i32(r1
);
1816 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1817 gen_helper_dlg(cpu_env
, tmp32_1
, tmp2
);
1818 tcg_temp_free_i64(tmp2
);
1819 tcg_temp_free_i32(tmp32_1
);
1821 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1822 tmp2
= tcg_temp_new_i64();
1823 tmp3
= tcg_temp_new_i64();
1824 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1825 /* XXX possible optimization point */
1827 tcg_gen_extu_i32_i64(tmp3
, cc_op
);
1828 tcg_gen_shri_i64(tmp3
, tmp3
, 1);
1829 tcg_gen_andi_i64(tmp3
, tmp3
, 1);
1830 tcg_gen_add_i64(tmp3
, tmp2
, tmp3
);
1831 tcg_gen_add_i64(tmp3
, regs
[r1
], tmp3
);
1832 store_reg(r1
, tmp3
);
1833 set_cc_addu64(s
, regs
[r1
], tmp2
, tmp3
);
1834 tcg_temp_free_i64(tmp2
);
1835 tcg_temp_free_i64(tmp3
);
1837 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1838 tmp2
= tcg_temp_new_i64();
1839 tmp32_1
= tcg_const_i32(r1
);
1840 tcg_gen_qemu_ld64(tmp2
, addr
, get_mem_index(s
));
1841 /* XXX possible optimization point */
1843 gen_helper_slbg(cc_op
, cpu_env
, cc_op
, tmp32_1
, regs
[r1
], tmp2
);
1845 tcg_temp_free_i64(tmp2
);
1846 tcg_temp_free_i32(tmp32_1
);
1848 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1849 tcg_gen_qemu_ld8u(regs
[r1
], addr
, get_mem_index(s
));
1851 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1852 tcg_gen_qemu_ld16u(regs
[r1
], addr
, get_mem_index(s
));
1854 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1855 tmp2
= tcg_temp_new_i64();
1856 tcg_gen_qemu_ld8u(tmp2
, addr
, get_mem_index(s
));
1857 store_reg32_i64(r1
, tmp2
);
1858 tcg_temp_free_i64(tmp2
);
1860 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1861 tmp2
= tcg_temp_new_i64();
1862 tcg_gen_qemu_ld16u(tmp2
, addr
, get_mem_index(s
));
1863 store_reg32_i64(r1
, tmp2
);
1864 tcg_temp_free_i64(tmp2
);
1866 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1867 tmp2
= tcg_temp_new_i64();
1868 tmp3
= load_reg((r1
+ 1) & 15);
1869 tcg_gen_ext32u_i64(tmp3
, tmp3
);
1870 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1871 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
1872 store_reg32_i64((r1
+ 1) & 15, tmp2
);
1873 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
1874 store_reg32_i64(r1
, tmp2
);
1875 tcg_temp_free_i64(tmp2
);
1876 tcg_temp_free_i64(tmp3
);
1878 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1879 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1880 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1882 tmp2
= tcg_temp_new_i64();
1883 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1884 tmp3
= load_reg((r1
+ 1) & 15);
1885 tcg_gen_ext32u_i64(tmp2
, tmp2
);
1886 tcg_gen_ext32u_i64(tmp3
, tmp3
);
1887 tcg_gen_shli_i64(tmp
, tmp
, 32);
1888 tcg_gen_or_i64(tmp
, tmp
, tmp3
);
1890 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
1891 tcg_gen_div_i64(tmp
, tmp
, tmp2
);
1892 store_reg32_i64((r1
+ 1) & 15, tmp
);
1893 store_reg32_i64(r1
, tmp3
);
1894 tcg_temp_free_i64(tmp
);
1895 tcg_temp_free_i64(tmp2
);
1896 tcg_temp_free_i64(tmp3
);
1898 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1899 tmp2
= tcg_temp_new_i64();
1900 tmp32_1
= load_reg32(r1
);
1901 tmp32_2
= tcg_temp_new_i32();
1902 tmp32_3
= tcg_temp_new_i32();
1903 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1904 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1905 /* XXX possible optimization point */
1907 gen_helper_addc_u32(tmp32_3
, cc_op
, tmp32_1
, tmp32_2
);
1908 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
1909 store_reg32(r1
, tmp32_3
);
1910 tcg_temp_free_i64(tmp2
);
1911 tcg_temp_free_i32(tmp32_1
);
1912 tcg_temp_free_i32(tmp32_2
);
1913 tcg_temp_free_i32(tmp32_3
);
1915 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1916 tmp2
= tcg_temp_new_i64();
1917 tmp32_1
= tcg_const_i32(r1
);
1918 tmp32_2
= tcg_temp_new_i32();
1919 tcg_gen_qemu_ld32u(tmp2
, addr
, get_mem_index(s
));
1920 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
1921 /* XXX possible optimization point */
1923 gen_helper_slb(cc_op
, cpu_env
, cc_op
, tmp32_1
, tmp32_2
);
1925 tcg_temp_free_i64(tmp2
);
1926 tcg_temp_free_i32(tmp32_1
);
1927 tcg_temp_free_i32(tmp32_2
);
1930 LOG_DISAS("illegal e3 operation 0x%x\n", op
);
1931 gen_illegal_opcode(env
, s
, 3);
1934 tcg_temp_free_i64(addr
);
1937 #ifndef CONFIG_USER_ONLY
1938 static void disas_e5(CPUS390XState
*env
, DisasContext
* s
, uint64_t insn
)
1941 int op
= (insn
>> 32) & 0xff;
1943 tmp
= get_address(s
, 0, (insn
>> 28) & 0xf, (insn
>> 16) & 0xfff);
1944 tmp2
= get_address(s
, 0, (insn
>> 12) & 0xf, insn
& 0xfff);
1946 LOG_DISAS("disas_e5: insn %" PRIx64
"\n", insn
);
1948 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1949 /* Test Protection */
1950 potential_page_fault(s
);
1951 gen_helper_tprot(cc_op
, tmp
, tmp2
);
1955 LOG_DISAS("illegal e5 operation 0x%x\n", op
);
1956 gen_illegal_opcode(env
, s
, 3);
1960 tcg_temp_free_i64(tmp
);
1961 tcg_temp_free_i64(tmp2
);
1965 static void disas_eb(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
1966 int r3
, int b2
, int d2
)
1968 TCGv_i64 tmp
, tmp2
, tmp3
, tmp4
;
1969 TCGv_i32 tmp32_1
, tmp32_2
;
1973 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1974 op
, r1
, r3
, b2
, d2
);
1976 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1977 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1978 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1979 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1980 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1982 tmp
= get_address(s
, 0, b2
, d2
);
1983 tcg_gen_andi_i64(tmp
, tmp
, 0x3f);
1985 tmp
= tcg_const_i64(d2
& 0x3f);
1989 tcg_gen_shr_i64(regs
[r1
], regs
[r3
], tmp
);
1992 tcg_gen_shl_i64(regs
[r1
], regs
[r3
], tmp
);
1995 tcg_gen_sar_i64(regs
[r1
], regs
[r3
], tmp
);
1998 tmp2
= tcg_temp_new_i64();
1999 tmp3
= tcg_temp_new_i64();
2000 gen_op_update2_cc_i64(s
, CC_OP_SLAG
, regs
[r3
], tmp
);
2001 tcg_gen_shl_i64(tmp2
, regs
[r3
], tmp
);
2002 /* override sign bit with source sign */
2003 tcg_gen_andi_i64(tmp2
, tmp2
, ~0x8000000000000000ULL
);
2004 tcg_gen_andi_i64(tmp3
, regs
[r3
], 0x8000000000000000ULL
);
2005 tcg_gen_or_i64(regs
[r1
], tmp2
, tmp3
);
2006 tcg_temp_free_i64(tmp2
);
2007 tcg_temp_free_i64(tmp3
);
2010 tcg_gen_rotl_i64(regs
[r1
], regs
[r3
], tmp
);
2017 set_cc_s64(s
, regs
[r1
]);
2019 tcg_temp_free_i64(tmp
);
2021 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
2023 tmp
= get_address(s
, 0, b2
, d2
);
2024 tcg_gen_andi_i64(tmp
, tmp
, 0x3f);
2026 tmp
= tcg_const_i64(d2
& 0x3f);
2028 tmp32_1
= tcg_temp_new_i32();
2029 tmp32_2
= load_reg32(r3
);
2030 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
2033 tcg_gen_rotl_i32(tmp32_1
, tmp32_2
, tmp32_1
);
2039 store_reg32(r1
, tmp32_1
);
2040 tcg_temp_free_i64(tmp
);
2041 tcg_temp_free_i32(tmp32_1
);
2042 tcg_temp_free_i32(tmp32_2
);
2044 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
2045 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
2048 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
2049 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
2052 /* Apparently, unrolling lmg/stmg of any size gains performance -
2053 even for very long ones... */
2054 tmp
= get_address(s
, 0, b2
, d2
);
2055 tmp3
= tcg_const_i64(stm_len
);
2056 tmp4
= tcg_const_i64(op
== 0x26 ? 32 : 4);
2057 for (i
= r1
;; i
= (i
+ 1) % 16) {
2060 tcg_gen_qemu_ld64(regs
[i
], tmp
, get_mem_index(s
));
2063 tmp2
= tcg_temp_new_i64();
2064 #if HOST_LONG_BITS == 32
2065 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2066 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs
[i
]), tmp2
);
2068 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2069 tcg_gen_shl_i64(tmp2
, tmp2
, tmp4
);
2070 tcg_gen_ext32u_i64(regs
[i
], regs
[i
]);
2071 tcg_gen_or_i64(regs
[i
], regs
[i
], tmp2
);
2073 tcg_temp_free_i64(tmp2
);
2076 tcg_gen_qemu_st64(regs
[i
], tmp
, get_mem_index(s
));
2079 tmp2
= tcg_temp_new_i64();
2080 tcg_gen_shr_i64(tmp2
, regs
[i
], tmp4
);
2081 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2082 tcg_temp_free_i64(tmp2
);
2090 tcg_gen_add_i64(tmp
, tmp
, tmp3
);
2092 tcg_temp_free_i64(tmp
);
2093 tcg_temp_free_i64(tmp3
);
2094 tcg_temp_free_i64(tmp4
);
2096 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
2097 tmp
= get_address(s
, 0, b2
, d2
);
2098 tmp32_1
= tcg_const_i32(r1
);
2099 tmp32_2
= tcg_const_i32(r3
);
2100 potential_page_fault(s
);
2101 gen_helper_stcmh(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
2102 tcg_temp_free_i64(tmp
);
2103 tcg_temp_free_i32(tmp32_1
);
2104 tcg_temp_free_i32(tmp32_2
);
2106 #ifndef CONFIG_USER_ONLY
2107 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
2109 check_privileged(env
, s
, ilc
);
2110 tmp
= get_address(s
, 0, b2
, d2
);
2111 tmp32_1
= tcg_const_i32(r1
);
2112 tmp32_2
= tcg_const_i32(r3
);
2113 potential_page_fault(s
);
2114 gen_helper_lctlg(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
2115 tcg_temp_free_i64(tmp
);
2116 tcg_temp_free_i32(tmp32_1
);
2117 tcg_temp_free_i32(tmp32_2
);
2119 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
2121 check_privileged(env
, s
, ilc
);
2122 tmp
= get_address(s
, 0, b2
, d2
);
2123 tmp32_1
= tcg_const_i32(r1
);
2124 tmp32_2
= tcg_const_i32(r3
);
2125 potential_page_fault(s
);
2126 gen_helper_stctg(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
2127 tcg_temp_free_i64(tmp
);
2128 tcg_temp_free_i32(tmp32_1
);
2129 tcg_temp_free_i32(tmp32_2
);
2132 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
2133 tmp
= get_address(s
, 0, b2
, d2
);
2134 tmp32_1
= tcg_const_i32(r1
);
2135 tmp32_2
= tcg_const_i32(r3
);
2136 potential_page_fault(s
);
2137 /* XXX rewrite in tcg */
2138 gen_helper_csg(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
2140 tcg_temp_free_i64(tmp
);
2141 tcg_temp_free_i32(tmp32_1
);
2142 tcg_temp_free_i32(tmp32_2
);
2144 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
2145 tmp
= get_address(s
, 0, b2
, d2
);
2146 tmp32_1
= tcg_const_i32(r1
);
2147 tmp32_2
= tcg_const_i32(r3
);
2148 potential_page_fault(s
);
2149 /* XXX rewrite in tcg */
2150 gen_helper_cdsg(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
2152 tcg_temp_free_i64(tmp
);
2153 tcg_temp_free_i32(tmp32_1
);
2154 tcg_temp_free_i32(tmp32_2
);
2156 case 0x51: /* TMY D1(B1),I2 [SIY] */
2157 tmp
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the destination */
2158 tmp2
= tcg_const_i64((r1
<< 4) | r3
);
2159 tcg_gen_qemu_ld8u(tmp
, tmp
, get_mem_index(s
));
2160 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
2161 that incurs less conversions */
2162 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_32
);
2163 tcg_temp_free_i64(tmp
);
2164 tcg_temp_free_i64(tmp2
);
2166 case 0x52: /* MVIY D1(B1),I2 [SIY] */
2167 tmp
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the destination */
2168 tmp2
= tcg_const_i64((r1
<< 4) | r3
);
2169 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
2170 tcg_temp_free_i64(tmp
);
2171 tcg_temp_free_i64(tmp2
);
2173 case 0x55: /* CLIY D1(B1),I2 [SIY] */
2174 tmp3
= get_address(s
, 0, b2
, d2
); /* SIY -> this is the 1st operand */
2175 tmp
= tcg_temp_new_i64();
2176 tmp32_1
= tcg_temp_new_i32();
2177 tcg_gen_qemu_ld8u(tmp
, tmp3
, get_mem_index(s
));
2178 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
2179 cmp_u32c(s
, tmp32_1
, (r1
<< 4) | r3
);
2180 tcg_temp_free_i64(tmp
);
2181 tcg_temp_free_i64(tmp3
);
2182 tcg_temp_free_i32(tmp32_1
);
2184 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
2185 tmp
= get_address(s
, 0, b2
, d2
);
2186 tmp32_1
= tcg_const_i32(r1
);
2187 tmp32_2
= tcg_const_i32(r3
);
2188 potential_page_fault(s
);
2189 /* XXX split CC calculation out */
2190 gen_helper_icmh(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
2192 tcg_temp_free_i64(tmp
);
2193 tcg_temp_free_i32(tmp32_1
);
2194 tcg_temp_free_i32(tmp32_2
);
2197 LOG_DISAS("illegal eb operation 0x%x\n", op
);
2198 gen_illegal_opcode(env
, s
, ilc
);
2203 static void disas_ed(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
2204 int x2
, int b2
, int d2
, int r1b
)
2206 TCGv_i32 tmp_r1
, tmp32
;
2208 addr
= get_address(s
, x2
, b2
, d2
);
2209 tmp_r1
= tcg_const_i32(r1
);
2211 case 0x4: /* LDEB R1,D2(X2,B2) [RXE] */
2212 potential_page_fault(s
);
2213 gen_helper_ldeb(cpu_env
, tmp_r1
, addr
);
2215 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
2216 potential_page_fault(s
);
2217 gen_helper_lxdb(cpu_env
, tmp_r1
, addr
);
2219 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
2220 tmp
= tcg_temp_new_i64();
2221 tmp32
= load_freg32(r1
);
2222 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2223 set_cc_cmp_f32_i64(s
, tmp32
, tmp
);
2224 tcg_temp_free_i64(tmp
);
2225 tcg_temp_free_i32(tmp32
);
2227 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
2228 tmp
= tcg_temp_new_i64();
2229 tmp32
= tcg_temp_new_i32();
2230 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2231 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2232 gen_helper_aeb(cpu_env
, tmp_r1
, tmp32
);
2233 tcg_temp_free_i64(tmp
);
2234 tcg_temp_free_i32(tmp32
);
2236 tmp32
= load_freg32(r1
);
2237 gen_set_cc_nz_f32(s
, tmp32
);
2238 tcg_temp_free_i32(tmp32
);
2240 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2241 tmp
= tcg_temp_new_i64();
2242 tmp32
= tcg_temp_new_i32();
2243 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2244 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2245 gen_helper_seb(cpu_env
, tmp_r1
, tmp32
);
2246 tcg_temp_free_i64(tmp
);
2247 tcg_temp_free_i32(tmp32
);
2249 tmp32
= load_freg32(r1
);
2250 gen_set_cc_nz_f32(s
, tmp32
);
2251 tcg_temp_free_i32(tmp32
);
2253 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2254 tmp
= tcg_temp_new_i64();
2255 tmp32
= tcg_temp_new_i32();
2256 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2257 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2258 gen_helper_deb(cpu_env
, tmp_r1
, tmp32
);
2259 tcg_temp_free_i64(tmp
);
2260 tcg_temp_free_i32(tmp32
);
2262 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2263 potential_page_fault(s
);
2264 gen_helper_tceb(cc_op
, cpu_env
, tmp_r1
, addr
);
2267 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2268 potential_page_fault(s
);
2269 gen_helper_tcdb(cc_op
, cpu_env
, tmp_r1
, addr
);
2272 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2273 potential_page_fault(s
);
2274 gen_helper_tcxb(cc_op
, cpu_env
, tmp_r1
, addr
);
2277 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2278 tmp
= tcg_temp_new_i64();
2279 tmp32
= tcg_temp_new_i32();
2280 tcg_gen_qemu_ld32u(tmp
, addr
, get_mem_index(s
));
2281 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2282 gen_helper_meeb(cpu_env
, tmp_r1
, tmp32
);
2283 tcg_temp_free_i64(tmp
);
2284 tcg_temp_free_i32(tmp32
);
2286 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2287 potential_page_fault(s
);
2288 gen_helper_cdb(cc_op
, cpu_env
, tmp_r1
, addr
);
2291 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2292 potential_page_fault(s
);
2293 gen_helper_adb(cc_op
, cpu_env
, tmp_r1
, addr
);
2296 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2297 potential_page_fault(s
);
2298 gen_helper_sdb(cc_op
, cpu_env
, tmp_r1
, addr
);
2301 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2302 potential_page_fault(s
);
2303 gen_helper_mdb(cpu_env
, tmp_r1
, addr
);
2305 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2306 potential_page_fault(s
);
2307 gen_helper_ddb(cpu_env
, tmp_r1
, addr
);
2309 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2310 /* for RXF insns, r1 is R3 and r1b is R1 */
2311 tmp32
= tcg_const_i32(r1b
);
2312 potential_page_fault(s
);
2313 gen_helper_madb(cpu_env
, tmp32
, addr
, tmp_r1
);
2314 tcg_temp_free_i32(tmp32
);
2317 LOG_DISAS("illegal ed operation 0x%x\n", op
);
2318 gen_illegal_opcode(env
, s
, 3);
2321 tcg_temp_free_i32(tmp_r1
);
2322 tcg_temp_free_i64(addr
);
2325 static void disas_a5(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
2330 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op
, r1
, i2
);
2332 case 0x0: /* IIHH R1,I2 [RI] */
2333 tmp
= tcg_const_i64(i2
);
2334 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 48, 16);
2335 tcg_temp_free_i64(tmp
);
2337 case 0x1: /* IIHL R1,I2 [RI] */
2338 tmp
= tcg_const_i64(i2
);
2339 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 32, 16);
2340 tcg_temp_free_i64(tmp
);
2342 case 0x2: /* IILH R1,I2 [RI] */
2343 tmp
= tcg_const_i64(i2
);
2344 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 16, 16);
2345 tcg_temp_free_i64(tmp
);
2347 case 0x3: /* IILL R1,I2 [RI] */
2348 tmp
= tcg_const_i64(i2
);
2349 tcg_gen_deposit_i64(regs
[r1
], regs
[r1
], tmp
, 0, 16);
2350 tcg_temp_free_i64(tmp
);
2352 case 0x4: /* NIHH R1,I2 [RI] */
2353 case 0x8: /* OIHH R1,I2 [RI] */
2355 tmp32
= tcg_temp_new_i32();
2358 tmp2
= tcg_const_i64((((uint64_t)i2
) << 48)
2359 | 0x0000ffffffffffffULL
);
2360 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2363 tmp2
= tcg_const_i64(((uint64_t)i2
) << 48);
2364 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2370 tcg_gen_shri_i64(tmp2
, tmp
, 48);
2371 tcg_gen_trunc_i64_i32(tmp32
, tmp2
);
2372 set_cc_nz_u32(s
, tmp32
);
2373 tcg_temp_free_i64(tmp2
);
2374 tcg_temp_free_i32(tmp32
);
2375 tcg_temp_free_i64(tmp
);
2377 case 0x5: /* NIHL R1,I2 [RI] */
2378 case 0x9: /* OIHL R1,I2 [RI] */
2380 tmp32
= tcg_temp_new_i32();
2383 tmp2
= tcg_const_i64((((uint64_t)i2
) << 32)
2384 | 0xffff0000ffffffffULL
);
2385 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2388 tmp2
= tcg_const_i64(((uint64_t)i2
) << 32);
2389 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2395 tcg_gen_shri_i64(tmp2
, tmp
, 32);
2396 tcg_gen_trunc_i64_i32(tmp32
, tmp2
);
2397 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2398 set_cc_nz_u32(s
, tmp32
);
2399 tcg_temp_free_i64(tmp2
);
2400 tcg_temp_free_i32(tmp32
);
2401 tcg_temp_free_i64(tmp
);
2403 case 0x6: /* NILH R1,I2 [RI] */
2404 case 0xa: /* OILH R1,I2 [RI] */
2406 tmp32
= tcg_temp_new_i32();
2409 tmp2
= tcg_const_i64((((uint64_t)i2
) << 16)
2410 | 0xffffffff0000ffffULL
);
2411 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2414 tmp2
= tcg_const_i64(((uint64_t)i2
) << 16);
2415 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2421 tcg_gen_shri_i64(tmp
, tmp
, 16);
2422 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2423 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2424 set_cc_nz_u32(s
, tmp32
);
2425 tcg_temp_free_i64(tmp2
);
2426 tcg_temp_free_i32(tmp32
);
2427 tcg_temp_free_i64(tmp
);
2429 case 0x7: /* NILL R1,I2 [RI] */
2430 case 0xb: /* OILL R1,I2 [RI] */
2432 tmp32
= tcg_temp_new_i32();
2435 tmp2
= tcg_const_i64(i2
| 0xffffffffffff0000ULL
);
2436 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
2439 tmp2
= tcg_const_i64(i2
);
2440 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
2446 tcg_gen_trunc_i64_i32(tmp32
, tmp
);
2447 tcg_gen_andi_i32(tmp32
, tmp32
, 0xffff);
2448 set_cc_nz_u32(s
, tmp32
); /* signedness should not matter here */
2449 tcg_temp_free_i64(tmp2
);
2450 tcg_temp_free_i32(tmp32
);
2451 tcg_temp_free_i64(tmp
);
2453 case 0xc: /* LLIHH R1,I2 [RI] */
2454 tmp
= tcg_const_i64( ((uint64_t)i2
) << 48 );
2456 tcg_temp_free_i64(tmp
);
2458 case 0xd: /* LLIHL R1,I2 [RI] */
2459 tmp
= tcg_const_i64( ((uint64_t)i2
) << 32 );
2461 tcg_temp_free_i64(tmp
);
2463 case 0xe: /* LLILH R1,I2 [RI] */
2464 tmp
= tcg_const_i64( ((uint64_t)i2
) << 16 );
2466 tcg_temp_free_i64(tmp
);
2468 case 0xf: /* LLILL R1,I2 [RI] */
2469 tmp
= tcg_const_i64(i2
);
2471 tcg_temp_free_i64(tmp
);
2474 LOG_DISAS("illegal a5 operation 0x%x\n", op
);
2475 gen_illegal_opcode(env
, s
, 2);
2480 static void disas_a7(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
2484 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2487 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op
, r1
, i2
);
2489 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2490 case 0x1: /* TMLL or TML R1,I2 [RI] */
2491 case 0x2: /* TMHH R1,I2 [RI] */
2492 case 0x3: /* TMHL R1,I2 [RI] */
2494 tmp2
= tcg_const_i64((uint16_t)i2
);
2497 tcg_gen_shri_i64(tmp
, tmp
, 16);
2502 tcg_gen_shri_i64(tmp
, tmp
, 48);
2505 tcg_gen_shri_i64(tmp
, tmp
, 32);
2508 tcg_gen_andi_i64(tmp
, tmp
, 0xffff);
2509 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_64
);
2510 tcg_temp_free_i64(tmp
);
2511 tcg_temp_free_i64(tmp2
);
2513 case 0x4: /* brc m1, i2 */
2514 gen_brc(r1
, s
, i2
* 2LL);
2516 case 0x5: /* BRAS R1,I2 [RI] */
2517 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 4));
2519 tcg_temp_free_i64(tmp
);
2520 gen_goto_tb(s
, 0, s
->pc
+ i2
* 2LL);
2521 s
->is_jmp
= DISAS_TB_JUMP
;
2523 case 0x6: /* BRCT R1,I2 [RI] */
2524 tmp32_1
= load_reg32(r1
);
2525 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
2526 store_reg32(r1
, tmp32_1
);
2527 gen_update_cc_op(s
);
2528 l1
= gen_new_label();
2529 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp32_1
, 0, l1
);
2530 gen_goto_tb(s
, 0, s
->pc
+ (i2
* 2LL));
2532 gen_goto_tb(s
, 1, s
->pc
+ 4);
2533 s
->is_jmp
= DISAS_TB_JUMP
;
2534 tcg_temp_free_i32(tmp32_1
);
2536 case 0x7: /* BRCTG R1,I2 [RI] */
2538 tcg_gen_subi_i64(tmp
, tmp
, 1);
2540 gen_update_cc_op(s
);
2541 l1
= gen_new_label();
2542 tcg_gen_brcondi_i64(TCG_COND_EQ
, tmp
, 0, l1
);
2543 gen_goto_tb(s
, 0, s
->pc
+ (i2
* 2LL));
2545 gen_goto_tb(s
, 1, s
->pc
+ 4);
2546 s
->is_jmp
= DISAS_TB_JUMP
;
2547 tcg_temp_free_i64(tmp
);
2549 case 0x8: /* lhi r1, i2 */
2550 tmp32_1
= tcg_const_i32(i2
);
2551 store_reg32(r1
, tmp32_1
);
2552 tcg_temp_free_i32(tmp32_1
);
2554 case 0x9: /* lghi r1, i2 */
2555 tmp
= tcg_const_i64(i2
);
2557 tcg_temp_free_i64(tmp
);
2559 case 0xa: /* AHI R1,I2 [RI] */
2560 tmp32_1
= load_reg32(r1
);
2561 tmp32_2
= tcg_temp_new_i32();
2562 tmp32_3
= tcg_const_i32(i2
);
2565 tcg_gen_subi_i32(tmp32_2
, tmp32_1
, -i2
);
2567 tcg_gen_add_i32(tmp32_2
, tmp32_1
, tmp32_3
);
2570 store_reg32(r1
, tmp32_2
);
2571 set_cc_add32(s
, tmp32_1
, tmp32_3
, tmp32_2
);
2572 tcg_temp_free_i32(tmp32_1
);
2573 tcg_temp_free_i32(tmp32_2
);
2574 tcg_temp_free_i32(tmp32_3
);
2576 case 0xb: /* aghi r1, i2 */
2578 tmp2
= tcg_const_i64(i2
);
2581 tcg_gen_subi_i64(regs
[r1
], tmp
, -i2
);
2583 tcg_gen_add_i64(regs
[r1
], tmp
, tmp2
);
2585 set_cc_add64(s
, tmp
, tmp2
, regs
[r1
]);
2586 tcg_temp_free_i64(tmp
);
2587 tcg_temp_free_i64(tmp2
);
2589 case 0xc: /* MHI R1,I2 [RI] */
2590 tmp32_1
= load_reg32(r1
);
2591 tcg_gen_muli_i32(tmp32_1
, tmp32_1
, i2
);
2592 store_reg32(r1
, tmp32_1
);
2593 tcg_temp_free_i32(tmp32_1
);
2595 case 0xd: /* MGHI R1,I2 [RI] */
2597 tcg_gen_muli_i64(tmp
, tmp
, i2
);
2599 tcg_temp_free_i64(tmp
);
2601 case 0xe: /* CHI R1,I2 [RI] */
2602 tmp32_1
= load_reg32(r1
);
2603 cmp_s32c(s
, tmp32_1
, i2
);
2604 tcg_temp_free_i32(tmp32_1
);
2606 case 0xf: /* CGHI R1,I2 [RI] */
2608 cmp_s64c(s
, tmp
, i2
);
2609 tcg_temp_free_i64(tmp
);
2612 LOG_DISAS("illegal a7 operation 0x%x\n", op
);
2613 gen_illegal_opcode(env
, s
, 2);
2618 static void disas_b2(CPUS390XState
*env
, DisasContext
*s
, int op
,
2621 TCGv_i64 tmp
, tmp2
, tmp3
;
2622 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
2625 #ifndef CONFIG_USER_ONLY
2629 r1
= (insn
>> 4) & 0xf;
2632 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op
, r1
, r2
);
2635 case 0x22: /* IPM R1 [RRE] */
2636 tmp32_1
= tcg_const_i32(r1
);
2638 gen_helper_ipm(cpu_env
, cc_op
, tmp32_1
);
2639 tcg_temp_free_i32(tmp32_1
);
2641 case 0x41: /* CKSM R1,R2 [RRE] */
2642 tmp32_1
= tcg_const_i32(r1
);
2643 tmp32_2
= tcg_const_i32(r2
);
2644 potential_page_fault(s
);
2645 gen_helper_cksm(cpu_env
, tmp32_1
, tmp32_2
);
2646 tcg_temp_free_i32(tmp32_1
);
2647 tcg_temp_free_i32(tmp32_2
);
2648 gen_op_movi_cc(s
, 0);
2650 case 0x4e: /* SAR R1,R2 [RRE] */
2651 tmp32_1
= load_reg32(r2
);
2652 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, aregs
[r1
]));
2653 tcg_temp_free_i32(tmp32_1
);
2655 case 0x4f: /* EAR R1,R2 [RRE] */
2656 tmp32_1
= tcg_temp_new_i32();
2657 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, aregs
[r2
]));
2658 store_reg32(r1
, tmp32_1
);
2659 tcg_temp_free_i32(tmp32_1
);
2661 case 0x52: /* MSR R1,R2 [RRE] */
2662 tmp32_1
= load_reg32(r1
);
2663 tmp32_2
= load_reg32(r2
);
2664 tcg_gen_mul_i32(tmp32_1
, tmp32_1
, tmp32_2
);
2665 store_reg32(r1
, tmp32_1
);
2666 tcg_temp_free_i32(tmp32_1
);
2667 tcg_temp_free_i32(tmp32_2
);
2669 case 0x54: /* MVPG R1,R2 [RRE] */
2671 tmp2
= load_reg(r1
);
2672 tmp3
= load_reg(r2
);
2673 potential_page_fault(s
);
2674 gen_helper_mvpg(cpu_env
, tmp
, tmp2
, tmp3
);
2675 tcg_temp_free_i64(tmp
);
2676 tcg_temp_free_i64(tmp2
);
2677 tcg_temp_free_i64(tmp3
);
2678 /* XXX check CCO bit and set CC accordingly */
2679 gen_op_movi_cc(s
, 0);
2681 case 0x55: /* MVST R1,R2 [RRE] */
2682 tmp32_1
= load_reg32(0);
2683 tmp32_2
= tcg_const_i32(r1
);
2684 tmp32_3
= tcg_const_i32(r2
);
2685 potential_page_fault(s
);
2686 gen_helper_mvst(cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2687 tcg_temp_free_i32(tmp32_1
);
2688 tcg_temp_free_i32(tmp32_2
);
2689 tcg_temp_free_i32(tmp32_3
);
2690 gen_op_movi_cc(s
, 1);
2692 case 0x5d: /* CLST R1,R2 [RRE] */
2693 tmp32_1
= load_reg32(0);
2694 tmp32_2
= tcg_const_i32(r1
);
2695 tmp32_3
= tcg_const_i32(r2
);
2696 potential_page_fault(s
);
2697 gen_helper_clst(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2699 tcg_temp_free_i32(tmp32_1
);
2700 tcg_temp_free_i32(tmp32_2
);
2701 tcg_temp_free_i32(tmp32_3
);
2703 case 0x5e: /* SRST R1,R2 [RRE] */
2704 tmp32_1
= load_reg32(0);
2705 tmp32_2
= tcg_const_i32(r1
);
2706 tmp32_3
= tcg_const_i32(r2
);
2707 potential_page_fault(s
);
2708 gen_helper_srst(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
2710 tcg_temp_free_i32(tmp32_1
);
2711 tcg_temp_free_i32(tmp32_2
);
2712 tcg_temp_free_i32(tmp32_3
);
2715 #ifndef CONFIG_USER_ONLY
2716 case 0x02: /* STIDP D2(B2) [S] */
2718 check_privileged(env
, s
, ilc
);
2719 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2720 tmp
= get_address(s
, 0, b2
, d2
);
2721 potential_page_fault(s
);
2722 gen_helper_stidp(cpu_env
, tmp
);
2723 tcg_temp_free_i64(tmp
);
2725 case 0x04: /* SCK D2(B2) [S] */
2727 check_privileged(env
, s
, ilc
);
2728 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2729 tmp
= get_address(s
, 0, b2
, d2
);
2730 potential_page_fault(s
);
2731 gen_helper_sck(cc_op
, tmp
);
2733 tcg_temp_free_i64(tmp
);
2735 case 0x05: /* STCK D2(B2) [S] */
2737 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2738 tmp
= get_address(s
, 0, b2
, d2
);
2739 potential_page_fault(s
);
2740 gen_helper_stck(cc_op
, cpu_env
, tmp
);
2742 tcg_temp_free_i64(tmp
);
2744 case 0x06: /* SCKC D2(B2) [S] */
2745 /* Set Clock Comparator */
2746 check_privileged(env
, s
, ilc
);
2747 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2748 tmp
= get_address(s
, 0, b2
, d2
);
2749 potential_page_fault(s
);
2750 gen_helper_sckc(cpu_env
, tmp
);
2751 tcg_temp_free_i64(tmp
);
2753 case 0x07: /* STCKC D2(B2) [S] */
2754 /* Store Clock Comparator */
2755 check_privileged(env
, s
, ilc
);
2756 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2757 tmp
= get_address(s
, 0, b2
, d2
);
2758 potential_page_fault(s
);
2759 gen_helper_stckc(cpu_env
, tmp
);
2760 tcg_temp_free_i64(tmp
);
2762 case 0x08: /* SPT D2(B2) [S] */
2764 check_privileged(env
, s
, ilc
);
2765 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2766 tmp
= get_address(s
, 0, b2
, d2
);
2767 potential_page_fault(s
);
2768 gen_helper_spt(cpu_env
, tmp
);
2769 tcg_temp_free_i64(tmp
);
2771 case 0x09: /* STPT D2(B2) [S] */
2772 /* Store CPU Timer */
2773 check_privileged(env
, s
, ilc
);
2774 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2775 tmp
= get_address(s
, 0, b2
, d2
);
2776 potential_page_fault(s
);
2777 gen_helper_stpt(cpu_env
, tmp
);
2778 tcg_temp_free_i64(tmp
);
2780 case 0x0a: /* SPKA D2(B2) [S] */
2781 /* Set PSW Key from Address */
2782 check_privileged(env
, s
, ilc
);
2783 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2784 tmp
= get_address(s
, 0, b2
, d2
);
2785 tmp2
= tcg_temp_new_i64();
2786 tcg_gen_andi_i64(tmp2
, psw_mask
, ~PSW_MASK_KEY
);
2787 tcg_gen_shli_i64(tmp
, tmp
, PSW_SHIFT_KEY
- 4);
2788 tcg_gen_or_i64(psw_mask
, tmp2
, tmp
);
2789 tcg_temp_free_i64(tmp2
);
2790 tcg_temp_free_i64(tmp
);
2792 case 0x0d: /* PTLB [S] */
2794 check_privileged(env
, s
, ilc
);
2795 gen_helper_ptlb(cpu_env
);
2797 case 0x10: /* SPX D2(B2) [S] */
2798 /* Set Prefix Register */
2799 check_privileged(env
, s
, ilc
);
2800 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2801 tmp
= get_address(s
, 0, b2
, d2
);
2802 potential_page_fault(s
);
2803 gen_helper_spx(cpu_env
, tmp
);
2804 tcg_temp_free_i64(tmp
);
2806 case 0x11: /* STPX D2(B2) [S] */
2808 check_privileged(env
, s
, ilc
);
2809 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2810 tmp
= get_address(s
, 0, b2
, d2
);
2811 tmp2
= tcg_temp_new_i64();
2812 tcg_gen_ld_i64(tmp2
, cpu_env
, offsetof(CPUS390XState
, psa
));
2813 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2814 tcg_temp_free_i64(tmp
);
2815 tcg_temp_free_i64(tmp2
);
2817 case 0x12: /* STAP D2(B2) [S] */
2818 /* Store CPU Address */
2819 check_privileged(env
, s
, ilc
);
2820 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2821 tmp
= get_address(s
, 0, b2
, d2
);
2822 tmp2
= tcg_temp_new_i64();
2823 tmp32_1
= tcg_temp_new_i32();
2824 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, cpu_num
));
2825 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
2826 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2827 tcg_temp_free_i64(tmp
);
2828 tcg_temp_free_i64(tmp2
);
2829 tcg_temp_free_i32(tmp32_1
);
2831 case 0x21: /* IPTE R1,R2 [RRE] */
2832 /* Invalidate PTE */
2833 check_privileged(env
, s
, ilc
);
2834 r1
= (insn
>> 4) & 0xf;
2837 tmp2
= load_reg(r2
);
2838 gen_helper_ipte(cpu_env
, tmp
, tmp2
);
2839 tcg_temp_free_i64(tmp
);
2840 tcg_temp_free_i64(tmp2
);
2842 case 0x29: /* ISKE R1,R2 [RRE] */
2843 /* Insert Storage Key Extended */
2844 check_privileged(env
, s
, ilc
);
2845 r1
= (insn
>> 4) & 0xf;
2848 tmp2
= tcg_temp_new_i64();
2849 gen_helper_iske(tmp2
, cpu_env
, tmp
);
2850 store_reg(r1
, tmp2
);
2851 tcg_temp_free_i64(tmp
);
2852 tcg_temp_free_i64(tmp2
);
2854 case 0x2a: /* RRBE R1,R2 [RRE] */
2855 /* Set Storage Key Extended */
2856 check_privileged(env
, s
, ilc
);
2857 r1
= (insn
>> 4) & 0xf;
2859 tmp32_1
= load_reg32(r1
);
2861 gen_helper_rrbe(cc_op
, cpu_env
, tmp32_1
, tmp
);
2863 tcg_temp_free_i32(tmp32_1
);
2864 tcg_temp_free_i64(tmp
);
2866 case 0x2b: /* SSKE R1,R2 [RRE] */
2867 /* Set Storage Key Extended */
2868 check_privileged(env
, s
, ilc
);
2869 r1
= (insn
>> 4) & 0xf;
2871 tmp32_1
= load_reg32(r1
);
2873 gen_helper_sske(cpu_env
, tmp32_1
, tmp
);
2874 tcg_temp_free_i32(tmp32_1
);
2875 tcg_temp_free_i64(tmp
);
2877 case 0x34: /* STCH ? */
2878 /* Store Subchannel */
2879 check_privileged(env
, s
, ilc
);
2880 gen_op_movi_cc(s
, 3);
2882 case 0x46: /* STURA R1,R2 [RRE] */
2883 /* Store Using Real Address */
2884 check_privileged(env
, s
, ilc
);
2885 r1
= (insn
>> 4) & 0xf;
2887 tmp32_1
= load_reg32(r1
);
2889 potential_page_fault(s
);
2890 gen_helper_stura(cpu_env
, tmp
, tmp32_1
);
2891 tcg_temp_free_i32(tmp32_1
);
2892 tcg_temp_free_i64(tmp
);
2894 case 0x50: /* CSP R1,R2 [RRE] */
2895 /* Compare And Swap And Purge */
2896 check_privileged(env
, s
, ilc
);
2897 r1
= (insn
>> 4) & 0xf;
2899 tmp32_1
= tcg_const_i32(r1
);
2900 tmp32_2
= tcg_const_i32(r2
);
2901 gen_helper_csp(cc_op
, cpu_env
, tmp32_1
, tmp32_2
);
2903 tcg_temp_free_i32(tmp32_1
);
2904 tcg_temp_free_i32(tmp32_2
);
2906 case 0x5f: /* CHSC ? */
2907 /* Channel Subsystem Call */
2908 check_privileged(env
, s
, ilc
);
2909 gen_op_movi_cc(s
, 3);
2911 case 0x78: /* STCKE D2(B2) [S] */
2912 /* Store Clock Extended */
2913 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2914 tmp
= get_address(s
, 0, b2
, d2
);
2915 potential_page_fault(s
);
2916 gen_helper_stcke(cc_op
, cpu_env
, tmp
);
2918 tcg_temp_free_i64(tmp
);
2920 case 0x79: /* SACF D2(B2) [S] */
2921 /* Store Clock Extended */
2922 check_privileged(env
, s
, ilc
);
2923 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2924 tmp
= get_address(s
, 0, b2
, d2
);
2925 potential_page_fault(s
);
2926 gen_helper_sacf(cpu_env
, tmp
);
2927 tcg_temp_free_i64(tmp
);
2928 /* addressing mode has changed, so end the block */
2931 s
->is_jmp
= DISAS_EXCP
;
2933 case 0x7d: /* STSI D2,(B2) [S] */
2934 check_privileged(env
, s
, ilc
);
2935 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2936 tmp
= get_address(s
, 0, b2
, d2
);
2937 tmp32_1
= load_reg32(0);
2938 tmp32_2
= load_reg32(1);
2939 potential_page_fault(s
);
2940 gen_helper_stsi(cc_op
, cpu_env
, tmp
, tmp32_1
, tmp32_2
);
2942 tcg_temp_free_i64(tmp
);
2943 tcg_temp_free_i32(tmp32_1
);
2944 tcg_temp_free_i32(tmp32_2
);
2946 case 0x9d: /* LFPC D2(B2) [S] */
2947 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2948 tmp
= get_address(s
, 0, b2
, d2
);
2949 tmp2
= tcg_temp_new_i64();
2950 tmp32_1
= tcg_temp_new_i32();
2951 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
2952 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
2953 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
2954 tcg_temp_free_i64(tmp
);
2955 tcg_temp_free_i64(tmp2
);
2956 tcg_temp_free_i32(tmp32_1
);
2958 case 0xb1: /* STFL D2(B2) [S] */
2959 /* Store Facility List (CPU features) at 200 */
2960 check_privileged(env
, s
, ilc
);
2961 tmp2
= tcg_const_i64(0xc0000000);
2962 tmp
= tcg_const_i64(200);
2963 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
2964 tcg_temp_free_i64(tmp2
);
2965 tcg_temp_free_i64(tmp
);
2967 case 0xb2: /* LPSWE D2(B2) [S] */
2968 /* Load PSW Extended */
2969 check_privileged(env
, s
, ilc
);
2970 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
2971 tmp
= get_address(s
, 0, b2
, d2
);
2972 tmp2
= tcg_temp_new_i64();
2973 tmp3
= tcg_temp_new_i64();
2974 tcg_gen_qemu_ld64(tmp2
, tmp
, get_mem_index(s
));
2975 tcg_gen_addi_i64(tmp
, tmp
, 8);
2976 tcg_gen_qemu_ld64(tmp3
, tmp
, get_mem_index(s
));
2977 gen_helper_load_psw(cpu_env
, tmp2
, tmp3
);
2978 /* we need to keep cc_op intact */
2979 s
->is_jmp
= DISAS_JUMP
;
2980 tcg_temp_free_i64(tmp
);
2981 tcg_temp_free_i64(tmp2
);
2982 tcg_temp_free_i64(tmp3
);
2984 case 0x20: /* SERVC R1,R2 [RRE] */
2985 /* SCLP Service call (PV hypercall) */
2986 check_privileged(env
, s
, ilc
);
2987 potential_page_fault(s
);
2988 tmp32_1
= load_reg32(r2
);
2990 gen_helper_servc(cc_op
, cpu_env
, tmp32_1
, tmp
);
2992 tcg_temp_free_i32(tmp32_1
);
2993 tcg_temp_free_i64(tmp
);
2997 LOG_DISAS("illegal b2 operation 0x%x\n", op
);
2998 gen_illegal_opcode(env
, s
, ilc
);
3003 static void disas_b3(CPUS390XState
*env
, DisasContext
*s
, int op
, int m3
,
3007 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
3008 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op
, m3
, r1
, r2
);
3009 #define FP_HELPER(i) \
3010 tmp32_1 = tcg_const_i32(r1); \
3011 tmp32_2 = tcg_const_i32(r2); \
3012 gen_helper_ ## i(cpu_env, tmp32_1, tmp32_2); \
3013 tcg_temp_free_i32(tmp32_1); \
3014 tcg_temp_free_i32(tmp32_2);
3016 #define FP_HELPER_CC(i) \
3017 tmp32_1 = tcg_const_i32(r1); \
3018 tmp32_2 = tcg_const_i32(r2); \
3019 gen_helper_ ## i(cc_op, cpu_env, tmp32_1, tmp32_2); \
3021 tcg_temp_free_i32(tmp32_1); \
3022 tcg_temp_free_i32(tmp32_2);
3025 case 0x0: /* LPEBR R1,R2 [RRE] */
3026 FP_HELPER_CC(lpebr
);
3028 case 0x2: /* LTEBR R1,R2 [RRE] */
3029 FP_HELPER_CC(ltebr
);
3031 case 0x3: /* LCEBR R1,R2 [RRE] */
3032 FP_HELPER_CC(lcebr
);
3034 case 0x4: /* LDEBR R1,R2 [RRE] */
3037 case 0x5: /* LXDBR R1,R2 [RRE] */
3040 case 0x9: /* CEBR R1,R2 [RRE] */
3043 case 0xa: /* AEBR R1,R2 [RRE] */
3046 case 0xb: /* SEBR R1,R2 [RRE] */
3049 case 0xd: /* DEBR R1,R2 [RRE] */
3052 case 0x10: /* LPDBR R1,R2 [RRE] */
3053 FP_HELPER_CC(lpdbr
);
3055 case 0x12: /* LTDBR R1,R2 [RRE] */
3056 FP_HELPER_CC(ltdbr
);
3058 case 0x13: /* LCDBR R1,R2 [RRE] */
3059 FP_HELPER_CC(lcdbr
);
3061 case 0x15: /* SQBDR R1,R2 [RRE] */
3064 case 0x17: /* MEEBR R1,R2 [RRE] */
3067 case 0x19: /* CDBR R1,R2 [RRE] */
3070 case 0x1a: /* ADBR R1,R2 [RRE] */
3073 case 0x1b: /* SDBR R1,R2 [RRE] */
3076 case 0x1c: /* MDBR R1,R2 [RRE] */
3079 case 0x1d: /* DDBR R1,R2 [RRE] */
3082 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
3083 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
3084 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
3085 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
3086 tmp32_1
= tcg_const_i32(m3
);
3087 tmp32_2
= tcg_const_i32(r2
);
3088 tmp32_3
= tcg_const_i32(r1
);
3091 gen_helper_maebr(cpu_env
, tmp32_1
, tmp32_3
, tmp32_2
);
3094 gen_helper_madbr(cpu_env
, tmp32_1
, tmp32_3
, tmp32_2
);
3097 gen_helper_msdbr(cpu_env
, tmp32_1
, tmp32_3
, tmp32_2
);
3102 tcg_temp_free_i32(tmp32_1
);
3103 tcg_temp_free_i32(tmp32_2
);
3104 tcg_temp_free_i32(tmp32_3
);
3106 case 0x40: /* LPXBR R1,R2 [RRE] */
3107 FP_HELPER_CC(lpxbr
);
3109 case 0x42: /* LTXBR R1,R2 [RRE] */
3110 FP_HELPER_CC(ltxbr
);
3112 case 0x43: /* LCXBR R1,R2 [RRE] */
3113 FP_HELPER_CC(lcxbr
);
3115 case 0x44: /* LEDBR R1,R2 [RRE] */
3118 case 0x45: /* LDXBR R1,R2 [RRE] */
3121 case 0x46: /* LEXBR R1,R2 [RRE] */
3124 case 0x49: /* CXBR R1,R2 [RRE] */
3127 case 0x4a: /* AXBR R1,R2 [RRE] */
3130 case 0x4b: /* SXBR R1,R2 [RRE] */
3133 case 0x4c: /* MXBR R1,R2 [RRE] */
3136 case 0x4d: /* DXBR R1,R2 [RRE] */
3139 case 0x65: /* LXR R1,R2 [RRE] */
3140 tmp
= load_freg(r2
);
3141 store_freg(r1
, tmp
);
3142 tcg_temp_free_i64(tmp
);
3143 tmp
= load_freg(r2
+ 2);
3144 store_freg(r1
+ 2, tmp
);
3145 tcg_temp_free_i64(tmp
);
3147 case 0x74: /* LZER R1 [RRE] */
3148 tmp32_1
= tcg_const_i32(r1
);
3149 gen_helper_lzer(cpu_env
, tmp32_1
);
3150 tcg_temp_free_i32(tmp32_1
);
3152 case 0x75: /* LZDR R1 [RRE] */
3153 tmp32_1
= tcg_const_i32(r1
);
3154 gen_helper_lzdr(cpu_env
, tmp32_1
);
3155 tcg_temp_free_i32(tmp32_1
);
3157 case 0x76: /* LZXR R1 [RRE] */
3158 tmp32_1
= tcg_const_i32(r1
);
3159 gen_helper_lzxr(cpu_env
, tmp32_1
);
3160 tcg_temp_free_i32(tmp32_1
);
3162 case 0x84: /* SFPC R1 [RRE] */
3163 tmp32_1
= load_reg32(r1
);
3164 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
3165 tcg_temp_free_i32(tmp32_1
);
3167 case 0x8c: /* EFPC R1 [RRE] */
3168 tmp32_1
= tcg_temp_new_i32();
3169 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
3170 store_reg32(r1
, tmp32_1
);
3171 tcg_temp_free_i32(tmp32_1
);
3173 case 0x94: /* CEFBR R1,R2 [RRE] */
3174 case 0x95: /* CDFBR R1,R2 [RRE] */
3175 case 0x96: /* CXFBR R1,R2 [RRE] */
3176 tmp32_1
= tcg_const_i32(r1
);
3177 tmp32_2
= load_reg32(r2
);
3180 gen_helper_cefbr(cpu_env
, tmp32_1
, tmp32_2
);
3183 gen_helper_cdfbr(cpu_env
, tmp32_1
, tmp32_2
);
3186 gen_helper_cxfbr(cpu_env
, tmp32_1
, tmp32_2
);
3191 tcg_temp_free_i32(tmp32_1
);
3192 tcg_temp_free_i32(tmp32_2
);
3194 case 0x98: /* CFEBR R1,R2 [RRE] */
3195 case 0x99: /* CFDBR R1,R2 [RRE] */
3196 case 0x9a: /* CFXBR R1,R2 [RRE] */
3197 tmp32_1
= tcg_const_i32(r1
);
3198 tmp32_2
= tcg_const_i32(r2
);
3199 tmp32_3
= tcg_const_i32(m3
);
3202 gen_helper_cfebr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
3205 gen_helper_cfdbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
3208 gen_helper_cfxbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
3214 tcg_temp_free_i32(tmp32_1
);
3215 tcg_temp_free_i32(tmp32_2
);
3216 tcg_temp_free_i32(tmp32_3
);
3218 case 0xa4: /* CEGBR R1,R2 [RRE] */
3219 case 0xa5: /* CDGBR R1,R2 [RRE] */
3220 tmp32_1
= tcg_const_i32(r1
);
3224 gen_helper_cegbr(cpu_env
, tmp32_1
, tmp
);
3227 gen_helper_cdgbr(cpu_env
, tmp32_1
, tmp
);
3232 tcg_temp_free_i32(tmp32_1
);
3233 tcg_temp_free_i64(tmp
);
3235 case 0xa6: /* CXGBR R1,R2 [RRE] */
3236 tmp32_1
= tcg_const_i32(r1
);
3238 gen_helper_cxgbr(cpu_env
, tmp32_1
, tmp
);
3239 tcg_temp_free_i32(tmp32_1
);
3240 tcg_temp_free_i64(tmp
);
3242 case 0xa8: /* CGEBR R1,R2 [RRE] */
3243 tmp32_1
= tcg_const_i32(r1
);
3244 tmp32_2
= tcg_const_i32(r2
);
3245 tmp32_3
= tcg_const_i32(m3
);
3246 gen_helper_cgebr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
3248 tcg_temp_free_i32(tmp32_1
);
3249 tcg_temp_free_i32(tmp32_2
);
3250 tcg_temp_free_i32(tmp32_3
);
3252 case 0xa9: /* CGDBR R1,R2 [RRE] */
3253 tmp32_1
= tcg_const_i32(r1
);
3254 tmp32_2
= tcg_const_i32(r2
);
3255 tmp32_3
= tcg_const_i32(m3
);
3256 gen_helper_cgdbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
3258 tcg_temp_free_i32(tmp32_1
);
3259 tcg_temp_free_i32(tmp32_2
);
3260 tcg_temp_free_i32(tmp32_3
);
3262 case 0xaa: /* CGXBR R1,R2 [RRE] */
3263 tmp32_1
= tcg_const_i32(r1
);
3264 tmp32_2
= tcg_const_i32(r2
);
3265 tmp32_3
= tcg_const_i32(m3
);
3266 gen_helper_cgxbr(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp32_3
);
3268 tcg_temp_free_i32(tmp32_1
);
3269 tcg_temp_free_i32(tmp32_2
);
3270 tcg_temp_free_i32(tmp32_3
);
3273 LOG_DISAS("illegal b3 operation 0x%x\n", op
);
3274 gen_illegal_opcode(env
, s
, 2);
3282 static void disas_b9(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
3285 TCGv_i64 tmp
, tmp2
, tmp3
;
3286 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
3288 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op
, r1
, r2
);
3290 case 0x0: /* LPGR R1,R2 [RRE] */
3291 case 0x1: /* LNGR R1,R2 [RRE] */
3292 case 0x2: /* LTGR R1,R2 [RRE] */
3293 case 0x3: /* LCGR R1,R2 [RRE] */
3294 case 0x10: /* LPGFR R1,R2 [RRE] */
3295 case 0x11: /* LNFGR R1,R2 [RRE] */
3296 case 0x12: /* LTGFR R1,R2 [RRE] */
3297 case 0x13: /* LCGFR R1,R2 [RRE] */
3299 tmp
= load_reg32_i64(r2
);
3304 case 0x0: /* LP?GR */
3305 set_cc_abs64(s
, tmp
);
3306 gen_helper_abs_i64(tmp
, tmp
);
3309 case 0x1: /* LN?GR */
3310 set_cc_nabs64(s
, tmp
);
3311 gen_helper_nabs_i64(tmp
, tmp
);
3314 case 0x2: /* LT?GR */
3320 case 0x3: /* LC?GR */
3321 tcg_gen_neg_i64(regs
[r1
], tmp
);
3322 set_cc_comp64(s
, regs
[r1
]);
3325 tcg_temp_free_i64(tmp
);
3327 case 0x4: /* LGR R1,R2 [RRE] */
3328 store_reg(r1
, regs
[r2
]);
3330 case 0x6: /* LGBR R1,R2 [RRE] */
3331 tmp2
= load_reg(r2
);
3332 tcg_gen_ext8s_i64(tmp2
, tmp2
);
3333 store_reg(r1
, tmp2
);
3334 tcg_temp_free_i64(tmp2
);
3336 case 0x8: /* AGR R1,R2 [RRE] */
3337 case 0xa: /* ALGR R1,R2 [RRE] */
3339 tmp2
= load_reg(r2
);
3340 tmp3
= tcg_temp_new_i64();
3341 tcg_gen_add_i64(tmp3
, tmp
, tmp2
);
3342 store_reg(r1
, tmp3
);
3345 set_cc_add64(s
, tmp
, tmp2
, tmp3
);
3348 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3353 tcg_temp_free_i64(tmp
);
3354 tcg_temp_free_i64(tmp2
);
3355 tcg_temp_free_i64(tmp3
);
3357 case 0x9: /* SGR R1,R2 [RRE] */
3358 case 0xb: /* SLGR R1,R2 [RRE] */
3359 case 0x1b: /* SLGFR R1,R2 [RRE] */
3360 case 0x19: /* SGFR R1,R2 [RRE] */
3364 tmp32_1
= load_reg32(r2
);
3365 tmp2
= tcg_temp_new_i64();
3366 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
3367 tcg_temp_free_i32(tmp32_1
);
3370 tmp32_1
= load_reg32(r2
);
3371 tmp2
= tcg_temp_new_i64();
3372 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3373 tcg_temp_free_i32(tmp32_1
);
3376 tmp2
= load_reg(r2
);
3379 tmp3
= tcg_temp_new_i64();
3380 tcg_gen_sub_i64(tmp3
, tmp
, tmp2
);
3381 store_reg(r1
, tmp3
);
3385 set_cc_sub64(s
, tmp
, tmp2
, tmp3
);
3389 set_cc_subu64(s
, tmp
, tmp2
, tmp3
);
3394 tcg_temp_free_i64(tmp
);
3395 tcg_temp_free_i64(tmp2
);
3396 tcg_temp_free_i64(tmp3
);
3398 case 0xc: /* MSGR R1,R2 [RRE] */
3399 case 0x1c: /* MSGFR R1,R2 [RRE] */
3401 tmp2
= load_reg(r2
);
3403 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3405 tcg_gen_mul_i64(tmp
, tmp
, tmp2
);
3407 tcg_temp_free_i64(tmp
);
3408 tcg_temp_free_i64(tmp2
);
3410 case 0xd: /* DSGR R1,R2 [RRE] */
3411 case 0x1d: /* DSGFR R1,R2 [RRE] */
3412 tmp
= load_reg(r1
+ 1);
3414 tmp2
= load_reg(r2
);
3416 tmp32_1
= load_reg32(r2
);
3417 tmp2
= tcg_temp_new_i64();
3418 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3419 tcg_temp_free_i32(tmp32_1
);
3421 tmp3
= tcg_temp_new_i64();
3422 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
3423 store_reg(r1
+ 1, tmp3
);
3424 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
3425 store_reg(r1
, tmp3
);
3426 tcg_temp_free_i64(tmp
);
3427 tcg_temp_free_i64(tmp2
);
3428 tcg_temp_free_i64(tmp3
);
3430 case 0x14: /* LGFR R1,R2 [RRE] */
3431 tmp32_1
= load_reg32(r2
);
3432 tmp
= tcg_temp_new_i64();
3433 tcg_gen_ext_i32_i64(tmp
, tmp32_1
);
3435 tcg_temp_free_i32(tmp32_1
);
3436 tcg_temp_free_i64(tmp
);
3438 case 0x16: /* LLGFR R1,R2 [RRE] */
3439 tmp32_1
= load_reg32(r2
);
3440 tmp
= tcg_temp_new_i64();
3441 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3443 tcg_temp_free_i32(tmp32_1
);
3444 tcg_temp_free_i64(tmp
);
3446 case 0x17: /* LLGTR R1,R2 [RRE] */
3447 tmp32_1
= load_reg32(r2
);
3448 tmp
= tcg_temp_new_i64();
3449 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0x7fffffffUL
);
3450 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3452 tcg_temp_free_i32(tmp32_1
);
3453 tcg_temp_free_i64(tmp
);
3455 case 0x18: /* AGFR R1,R2 [RRE] */
3456 case 0x1a: /* ALGFR R1,R2 [RRE] */
3457 tmp32_1
= load_reg32(r2
);
3458 tmp2
= tcg_temp_new_i64();
3460 tcg_gen_ext_i32_i64(tmp2
, tmp32_1
);
3462 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
3464 tcg_temp_free_i32(tmp32_1
);
3466 tmp3
= tcg_temp_new_i64();
3467 tcg_gen_add_i64(tmp3
, tmp
, tmp2
);
3468 store_reg(r1
, tmp3
);
3470 set_cc_add64(s
, tmp
, tmp2
, tmp3
);
3472 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3474 tcg_temp_free_i64(tmp
);
3475 tcg_temp_free_i64(tmp2
);
3476 tcg_temp_free_i64(tmp3
);
3478 case 0x0f: /* LRVGR R1,R2 [RRE] */
3479 tcg_gen_bswap64_i64(regs
[r1
], regs
[r2
]);
3481 case 0x1f: /* LRVR R1,R2 [RRE] */
3482 tmp32_1
= load_reg32(r2
);
3483 tcg_gen_bswap32_i32(tmp32_1
, tmp32_1
);
3484 store_reg32(r1
, tmp32_1
);
3485 tcg_temp_free_i32(tmp32_1
);
3487 case 0x20: /* CGR R1,R2 [RRE] */
3488 case 0x30: /* CGFR R1,R2 [RRE] */
3489 tmp2
= load_reg(r2
);
3491 tcg_gen_ext32s_i64(tmp2
, tmp2
);
3494 cmp_s64(s
, tmp
, tmp2
);
3495 tcg_temp_free_i64(tmp
);
3496 tcg_temp_free_i64(tmp2
);
3498 case 0x21: /* CLGR R1,R2 [RRE] */
3499 case 0x31: /* CLGFR R1,R2 [RRE] */
3500 tmp2
= load_reg(r2
);
3502 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3505 cmp_u64(s
, tmp
, tmp2
);
3506 tcg_temp_free_i64(tmp
);
3507 tcg_temp_free_i64(tmp2
);
3509 case 0x26: /* LBR R1,R2 [RRE] */
3510 tmp32_1
= load_reg32(r2
);
3511 tcg_gen_ext8s_i32(tmp32_1
, tmp32_1
);
3512 store_reg32(r1
, tmp32_1
);
3513 tcg_temp_free_i32(tmp32_1
);
3515 case 0x27: /* LHR R1,R2 [RRE] */
3516 tmp32_1
= load_reg32(r2
);
3517 tcg_gen_ext16s_i32(tmp32_1
, tmp32_1
);
3518 store_reg32(r1
, tmp32_1
);
3519 tcg_temp_free_i32(tmp32_1
);
3521 case 0x80: /* NGR R1,R2 [RRE] */
3522 case 0x81: /* OGR R1,R2 [RRE] */
3523 case 0x82: /* XGR R1,R2 [RRE] */
3525 tmp2
= load_reg(r2
);
3528 tcg_gen_and_i64(tmp
, tmp
, tmp2
);
3531 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
3534 tcg_gen_xor_i64(tmp
, tmp
, tmp2
);
3540 set_cc_nz_u64(s
, tmp
);
3541 tcg_temp_free_i64(tmp
);
3542 tcg_temp_free_i64(tmp2
);
3544 case 0x83: /* FLOGR R1,R2 [RRE] */
3546 tmp32_1
= tcg_const_i32(r1
);
3547 gen_helper_flogr(cc_op
, cpu_env
, tmp32_1
, tmp
);
3549 tcg_temp_free_i64(tmp
);
3550 tcg_temp_free_i32(tmp32_1
);
3552 case 0x84: /* LLGCR R1,R2 [RRE] */
3554 tcg_gen_andi_i64(tmp
, tmp
, 0xff);
3556 tcg_temp_free_i64(tmp
);
3558 case 0x85: /* LLGHR R1,R2 [RRE] */
3560 tcg_gen_andi_i64(tmp
, tmp
, 0xffff);
3562 tcg_temp_free_i64(tmp
);
3564 case 0x87: /* DLGR R1,R2 [RRE] */
3565 tmp32_1
= tcg_const_i32(r1
);
3567 gen_helper_dlg(cpu_env
, tmp32_1
, tmp
);
3568 tcg_temp_free_i64(tmp
);
3569 tcg_temp_free_i32(tmp32_1
);
3571 case 0x88: /* ALCGR R1,R2 [RRE] */
3573 tmp2
= load_reg(r2
);
3574 tmp3
= tcg_temp_new_i64();
3576 tcg_gen_extu_i32_i64(tmp3
, cc_op
);
3577 tcg_gen_shri_i64(tmp3
, tmp3
, 1);
3578 tcg_gen_andi_i64(tmp3
, tmp3
, 1);
3579 tcg_gen_add_i64(tmp3
, tmp2
, tmp3
);
3580 tcg_gen_add_i64(tmp3
, tmp
, tmp3
);
3581 store_reg(r1
, tmp3
);
3582 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3583 tcg_temp_free_i64(tmp
);
3584 tcg_temp_free_i64(tmp2
);
3585 tcg_temp_free_i64(tmp3
);
3587 case 0x89: /* SLBGR R1,R2 [RRE] */
3589 tmp2
= load_reg(r2
);
3590 tmp32_1
= tcg_const_i32(r1
);
3592 gen_helper_slbg(cc_op
, cpu_env
, cc_op
, tmp32_1
, tmp
, tmp2
);
3594 tcg_temp_free_i64(tmp
);
3595 tcg_temp_free_i64(tmp2
);
3596 tcg_temp_free_i32(tmp32_1
);
3598 case 0x94: /* LLCR R1,R2 [RRE] */
3599 tmp32_1
= load_reg32(r2
);
3600 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0xff);
3601 store_reg32(r1
, tmp32_1
);
3602 tcg_temp_free_i32(tmp32_1
);
3604 case 0x95: /* LLHR R1,R2 [RRE] */
3605 tmp32_1
= load_reg32(r2
);
3606 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, 0xffff);
3607 store_reg32(r1
, tmp32_1
);
3608 tcg_temp_free_i32(tmp32_1
);
3610 case 0x96: /* MLR R1,R2 [RRE] */
3611 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3612 tmp2
= load_reg(r2
);
3613 tmp3
= load_reg((r1
+ 1) & 15);
3614 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3615 tcg_gen_ext32u_i64(tmp3
, tmp3
);
3616 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
3617 store_reg32_i64((r1
+ 1) & 15, tmp2
);
3618 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
3619 store_reg32_i64(r1
, tmp2
);
3620 tcg_temp_free_i64(tmp2
);
3621 tcg_temp_free_i64(tmp3
);
3623 case 0x97: /* DLR R1,R2 [RRE] */
3624 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3625 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3627 tmp2
= load_reg(r2
);
3628 tmp3
= load_reg((r1
+ 1) & 15);
3629 tcg_gen_ext32u_i64(tmp2
, tmp2
);
3630 tcg_gen_ext32u_i64(tmp3
, tmp3
);
3631 tcg_gen_shli_i64(tmp
, tmp
, 32);
3632 tcg_gen_or_i64(tmp
, tmp
, tmp3
);
3634 tcg_gen_rem_i64(tmp3
, tmp
, tmp2
);
3635 tcg_gen_div_i64(tmp
, tmp
, tmp2
);
3636 store_reg32_i64((r1
+ 1) & 15, tmp
);
3637 store_reg32_i64(r1
, tmp3
);
3638 tcg_temp_free_i64(tmp
);
3639 tcg_temp_free_i64(tmp2
);
3640 tcg_temp_free_i64(tmp3
);
3642 case 0x98: /* ALCR R1,R2 [RRE] */
3643 tmp32_1
= load_reg32(r1
);
3644 tmp32_2
= load_reg32(r2
);
3645 tmp32_3
= tcg_temp_new_i32();
3646 /* XXX possible optimization point */
3648 gen_helper_addc_u32(tmp32_3
, cc_op
, tmp32_1
, tmp32_2
);
3649 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3650 store_reg32(r1
, tmp32_3
);
3651 tcg_temp_free_i32(tmp32_1
);
3652 tcg_temp_free_i32(tmp32_2
);
3653 tcg_temp_free_i32(tmp32_3
);
3655 case 0x99: /* SLBR R1,R2 [RRE] */
3656 tmp32_1
= load_reg32(r2
);
3657 tmp32_2
= tcg_const_i32(r1
);
3659 gen_helper_slb(cc_op
, cpu_env
, cc_op
, tmp32_2
, tmp32_1
);
3661 tcg_temp_free_i32(tmp32_1
);
3662 tcg_temp_free_i32(tmp32_2
);
3665 LOG_DISAS("illegal b9 operation 0x%x\n", op
);
3666 gen_illegal_opcode(env
, s
, 2);
3671 static void disas_c0(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
, int i2
)
3674 TCGv_i32 tmp32_1
, tmp32_2
;
3675 uint64_t target
= s
->pc
+ i2
* 2LL;
3678 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op
, r1
, i2
);
3681 case 0: /* larl r1, i2 */
3682 tmp
= tcg_const_i64(target
);
3684 tcg_temp_free_i64(tmp
);
3686 case 0x1: /* LGFI R1,I2 [RIL] */
3687 tmp
= tcg_const_i64((int64_t)i2
);
3689 tcg_temp_free_i64(tmp
);
3691 case 0x4: /* BRCL M1,I2 [RIL] */
3692 /* m1 & (1 << (3 - cc)) */
3693 tmp32_1
= tcg_const_i32(3);
3694 tmp32_2
= tcg_const_i32(1);
3696 tcg_gen_sub_i32(tmp32_1
, tmp32_1
, cc_op
);
3697 tcg_gen_shl_i32(tmp32_2
, tmp32_2
, tmp32_1
);
3698 tcg_temp_free_i32(tmp32_1
);
3699 tmp32_1
= tcg_const_i32(r1
); /* m1 == r1 */
3700 tcg_gen_and_i32(tmp32_1
, tmp32_1
, tmp32_2
);
3701 l1
= gen_new_label();
3702 tcg_gen_brcondi_i32(TCG_COND_EQ
, tmp32_1
, 0, l1
);
3703 gen_goto_tb(s
, 0, target
);
3705 gen_goto_tb(s
, 1, s
->pc
+ 6);
3706 s
->is_jmp
= DISAS_TB_JUMP
;
3707 tcg_temp_free_i32(tmp32_1
);
3708 tcg_temp_free_i32(tmp32_2
);
3710 case 0x5: /* brasl r1, i2 */
3711 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 6));
3713 tcg_temp_free_i64(tmp
);
3714 gen_goto_tb(s
, 0, target
);
3715 s
->is_jmp
= DISAS_TB_JUMP
;
3717 case 0x7: /* XILF R1,I2 [RIL] */
3718 case 0xb: /* NILF R1,I2 [RIL] */
3719 case 0xd: /* OILF R1,I2 [RIL] */
3720 tmp32_1
= load_reg32(r1
);
3723 tcg_gen_xori_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3726 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3729 tcg_gen_ori_i32(tmp32_1
, tmp32_1
, (uint32_t)i2
);
3734 store_reg32(r1
, tmp32_1
);
3735 set_cc_nz_u32(s
, tmp32_1
);
3736 tcg_temp_free_i32(tmp32_1
);
3738 case 0x9: /* IILF R1,I2 [RIL] */
3739 tmp32_1
= tcg_const_i32((uint32_t)i2
);
3740 store_reg32(r1
, tmp32_1
);
3741 tcg_temp_free_i32(tmp32_1
);
3743 case 0xa: /* NIHF R1,I2 [RIL] */
3745 tmp32_1
= tcg_temp_new_i32();
3746 tcg_gen_andi_i64(tmp
, tmp
, (((uint64_t)((uint32_t)i2
)) << 32)
3749 tcg_gen_shri_i64(tmp
, tmp
, 32);
3750 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
3751 set_cc_nz_u32(s
, tmp32_1
);
3752 tcg_temp_free_i64(tmp
);
3753 tcg_temp_free_i32(tmp32_1
);
3755 case 0xe: /* LLIHF R1,I2 [RIL] */
3756 tmp
= tcg_const_i64(((uint64_t)(uint32_t)i2
) << 32);
3758 tcg_temp_free_i64(tmp
);
3760 case 0xf: /* LLILF R1,I2 [RIL] */
3761 tmp
= tcg_const_i64((uint32_t)i2
);
3763 tcg_temp_free_i64(tmp
);
3766 LOG_DISAS("illegal c0 operation 0x%x\n", op
);
3767 gen_illegal_opcode(env
, s
, 3);
3772 static void disas_c2(CPUS390XState
*env
, DisasContext
*s
, int op
, int r1
,
3775 TCGv_i64 tmp
, tmp2
, tmp3
;
3776 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
;
3779 case 0x4: /* SLGFI R1,I2 [RIL] */
3780 case 0xa: /* ALGFI R1,I2 [RIL] */
3782 tmp2
= tcg_const_i64((uint64_t)(uint32_t)i2
);
3783 tmp3
= tcg_temp_new_i64();
3786 tcg_gen_sub_i64(tmp3
, tmp
, tmp2
);
3787 set_cc_subu64(s
, tmp
, tmp2
, tmp3
);
3790 tcg_gen_add_i64(tmp3
, tmp
, tmp2
);
3791 set_cc_addu64(s
, tmp
, tmp2
, tmp3
);
3796 store_reg(r1
, tmp3
);
3797 tcg_temp_free_i64(tmp
);
3798 tcg_temp_free_i64(tmp2
);
3799 tcg_temp_free_i64(tmp3
);
3801 case 0x5: /* SLFI R1,I2 [RIL] */
3802 case 0xb: /* ALFI R1,I2 [RIL] */
3803 tmp32_1
= load_reg32(r1
);
3804 tmp32_2
= tcg_const_i32(i2
);
3805 tmp32_3
= tcg_temp_new_i32();
3808 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
3809 set_cc_subu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3812 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
3813 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
3818 store_reg32(r1
, tmp32_3
);
3819 tcg_temp_free_i32(tmp32_1
);
3820 tcg_temp_free_i32(tmp32_2
);
3821 tcg_temp_free_i32(tmp32_3
);
3823 case 0xc: /* CGFI R1,I2 [RIL] */
3825 cmp_s64c(s
, tmp
, (int64_t)i2
);
3826 tcg_temp_free_i64(tmp
);
3828 case 0xe: /* CLGFI R1,I2 [RIL] */
3830 cmp_u64c(s
, tmp
, (uint64_t)(uint32_t)i2
);
3831 tcg_temp_free_i64(tmp
);
3833 case 0xd: /* CFI R1,I2 [RIL] */
3834 tmp32_1
= load_reg32(r1
);
3835 cmp_s32c(s
, tmp32_1
, i2
);
3836 tcg_temp_free_i32(tmp32_1
);
3838 case 0xf: /* CLFI R1,I2 [RIL] */
3839 tmp32_1
= load_reg32(r1
);
3840 cmp_u32c(s
, tmp32_1
, i2
);
3841 tcg_temp_free_i32(tmp32_1
);
3844 LOG_DISAS("illegal c2 operation 0x%x\n", op
);
3845 gen_illegal_opcode(env
, s
, 3);
3850 static void gen_and_or_xor_i32(int opc
, TCGv_i32 tmp
, TCGv_i32 tmp2
)
3852 switch (opc
& 0xf) {
3854 tcg_gen_and_i32(tmp
, tmp
, tmp2
);
3857 tcg_gen_or_i32(tmp
, tmp
, tmp2
);
3860 tcg_gen_xor_i32(tmp
, tmp
, tmp2
);
3867 static void disas_s390_insn(CPUS390XState
*env
, DisasContext
*s
)
3869 TCGv_i64 tmp
, tmp2
, tmp3
, tmp4
;
3870 TCGv_i32 tmp32_1
, tmp32_2
, tmp32_3
, tmp32_4
;
3873 int op
, r1
, r2
, r3
, d1
, d2
, x2
, b1
, b2
, i
, i2
, r1b
;
3878 opc
= cpu_ldub_code(env
, s
->pc
);
3879 LOG_DISAS("opc 0x%x\n", opc
);
3884 #ifndef CONFIG_USER_ONLY
3885 case 0x01: /* SAM */
3886 insn
= ld_code2(env
, s
->pc
);
3887 /* set addressing mode, but we only do 64bit anyways */
3890 case 0x6: /* BCTR R1,R2 [RR] */
3891 insn
= ld_code2(env
, s
->pc
);
3892 decode_rr(s
, insn
, &r1
, &r2
);
3893 tmp32_1
= load_reg32(r1
);
3894 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
3895 store_reg32(r1
, tmp32_1
);
3898 gen_update_cc_op(s
);
3899 l1
= gen_new_label();
3900 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp32_1
, 0, l1
);
3902 /* not taking the branch, jump to after the instruction */
3903 gen_goto_tb(s
, 0, s
->pc
+ 2);
3906 /* take the branch, move R2 into psw.addr */
3907 tmp32_1
= load_reg32(r2
);
3908 tmp
= tcg_temp_new_i64();
3909 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
3910 tcg_gen_mov_i64(psw_addr
, tmp
);
3911 s
->is_jmp
= DISAS_JUMP
;
3912 tcg_temp_free_i32(tmp32_1
);
3913 tcg_temp_free_i64(tmp
);
3916 case 0x7: /* BCR M1,R2 [RR] */
3917 insn
= ld_code2(env
, s
->pc
);
3918 decode_rr(s
, insn
, &r1
, &r2
);
3921 gen_bcr(s
, r1
, tmp
, s
->pc
);
3922 tcg_temp_free_i64(tmp
);
3923 s
->is_jmp
= DISAS_TB_JUMP
;
3925 /* XXX: "serialization and checkpoint-synchronization function"? */
3928 case 0xa: /* SVC I [RR] */
3929 insn
= ld_code2(env
, s
->pc
);
3934 tmp32_1
= tcg_const_i32(i
);
3935 tmp32_2
= tcg_const_i32(ilc
* 2);
3936 tmp32_3
= tcg_const_i32(EXCP_SVC
);
3937 tcg_gen_st_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, int_svc_code
));
3938 tcg_gen_st_i32(tmp32_2
, cpu_env
, offsetof(CPUS390XState
, int_svc_ilc
));
3939 gen_helper_exception(cpu_env
, tmp32_3
);
3940 s
->is_jmp
= DISAS_EXCP
;
3941 tcg_temp_free_i32(tmp32_1
);
3942 tcg_temp_free_i32(tmp32_2
);
3943 tcg_temp_free_i32(tmp32_3
);
3945 case 0xd: /* BASR R1,R2 [RR] */
3946 insn
= ld_code2(env
, s
->pc
);
3947 decode_rr(s
, insn
, &r1
, &r2
);
3948 tmp
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 2));
3951 tmp2
= load_reg(r2
);
3952 tcg_gen_mov_i64(psw_addr
, tmp2
);
3953 tcg_temp_free_i64(tmp2
);
3954 s
->is_jmp
= DISAS_JUMP
;
3956 tcg_temp_free_i64(tmp
);
3958 case 0xe: /* MVCL R1,R2 [RR] */
3959 insn
= ld_code2(env
, s
->pc
);
3960 decode_rr(s
, insn
, &r1
, &r2
);
3961 tmp32_1
= tcg_const_i32(r1
);
3962 tmp32_2
= tcg_const_i32(r2
);
3963 potential_page_fault(s
);
3964 gen_helper_mvcl(cc_op
, cpu_env
, tmp32_1
, tmp32_2
);
3966 tcg_temp_free_i32(tmp32_1
);
3967 tcg_temp_free_i32(tmp32_2
);
3969 case 0x10: /* LPR R1,R2 [RR] */
3970 insn
= ld_code2(env
, s
->pc
);
3971 decode_rr(s
, insn
, &r1
, &r2
);
3972 tmp32_1
= load_reg32(r2
);
3973 set_cc_abs32(s
, tmp32_1
);
3974 gen_helper_abs_i32(tmp32_1
, tmp32_1
);
3975 store_reg32(r1
, tmp32_1
);
3976 tcg_temp_free_i32(tmp32_1
);
3978 case 0x11: /* LNR R1,R2 [RR] */
3979 insn
= ld_code2(env
, s
->pc
);
3980 decode_rr(s
, insn
, &r1
, &r2
);
3981 tmp32_1
= load_reg32(r2
);
3982 set_cc_nabs32(s
, tmp32_1
);
3983 gen_helper_nabs_i32(tmp32_1
, tmp32_1
);
3984 store_reg32(r1
, tmp32_1
);
3985 tcg_temp_free_i32(tmp32_1
);
3987 case 0x12: /* LTR R1,R2 [RR] */
3988 insn
= ld_code2(env
, s
->pc
);
3989 decode_rr(s
, insn
, &r1
, &r2
);
3990 tmp32_1
= load_reg32(r2
);
3992 store_reg32(r1
, tmp32_1
);
3994 set_cc_s32(s
, tmp32_1
);
3995 tcg_temp_free_i32(tmp32_1
);
3997 case 0x13: /* LCR R1,R2 [RR] */
3998 insn
= ld_code2(env
, s
->pc
);
3999 decode_rr(s
, insn
, &r1
, &r2
);
4000 tmp32_1
= load_reg32(r2
);
4001 tcg_gen_neg_i32(tmp32_1
, tmp32_1
);
4002 store_reg32(r1
, tmp32_1
);
4003 set_cc_comp32(s
, tmp32_1
);
4004 tcg_temp_free_i32(tmp32_1
);
4006 case 0x14: /* NR R1,R2 [RR] */
4007 case 0x16: /* OR R1,R2 [RR] */
4008 case 0x17: /* XR R1,R2 [RR] */
4009 insn
= ld_code2(env
, s
->pc
);
4010 decode_rr(s
, insn
, &r1
, &r2
);
4011 tmp32_2
= load_reg32(r2
);
4012 tmp32_1
= load_reg32(r1
);
4013 gen_and_or_xor_i32(opc
, tmp32_1
, tmp32_2
);
4014 store_reg32(r1
, tmp32_1
);
4015 set_cc_nz_u32(s
, tmp32_1
);
4016 tcg_temp_free_i32(tmp32_1
);
4017 tcg_temp_free_i32(tmp32_2
);
4019 case 0x18: /* LR R1,R2 [RR] */
4020 insn
= ld_code2(env
, s
->pc
);
4021 decode_rr(s
, insn
, &r1
, &r2
);
4022 tmp32_1
= load_reg32(r2
);
4023 store_reg32(r1
, tmp32_1
);
4024 tcg_temp_free_i32(tmp32_1
);
4026 case 0x15: /* CLR R1,R2 [RR] */
4027 case 0x19: /* CR R1,R2 [RR] */
4028 insn
= ld_code2(env
, s
->pc
);
4029 decode_rr(s
, insn
, &r1
, &r2
);
4030 tmp32_1
= load_reg32(r1
);
4031 tmp32_2
= load_reg32(r2
);
4033 cmp_u32(s
, tmp32_1
, tmp32_2
);
4035 cmp_s32(s
, tmp32_1
, tmp32_2
);
4037 tcg_temp_free_i32(tmp32_1
);
4038 tcg_temp_free_i32(tmp32_2
);
4040 case 0x1a: /* AR R1,R2 [RR] */
4041 case 0x1e: /* ALR R1,R2 [RR] */
4042 insn
= ld_code2(env
, s
->pc
);
4043 decode_rr(s
, insn
, &r1
, &r2
);
4044 tmp32_1
= load_reg32(r1
);
4045 tmp32_2
= load_reg32(r2
);
4046 tmp32_3
= tcg_temp_new_i32();
4047 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4048 store_reg32(r1
, tmp32_3
);
4050 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4052 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4054 tcg_temp_free_i32(tmp32_1
);
4055 tcg_temp_free_i32(tmp32_2
);
4056 tcg_temp_free_i32(tmp32_3
);
4058 case 0x1b: /* SR R1,R2 [RR] */
4059 case 0x1f: /* SLR R1,R2 [RR] */
4060 insn
= ld_code2(env
, s
->pc
);
4061 decode_rr(s
, insn
, &r1
, &r2
);
4062 tmp32_1
= load_reg32(r1
);
4063 tmp32_2
= load_reg32(r2
);
4064 tmp32_3
= tcg_temp_new_i32();
4065 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4066 store_reg32(r1
, tmp32_3
);
4068 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4070 set_cc_subu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4072 tcg_temp_free_i32(tmp32_1
);
4073 tcg_temp_free_i32(tmp32_2
);
4074 tcg_temp_free_i32(tmp32_3
);
4076 case 0x1c: /* MR R1,R2 [RR] */
4077 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
4078 insn
= ld_code2(env
, s
->pc
);
4079 decode_rr(s
, insn
, &r1
, &r2
);
4080 tmp2
= load_reg(r2
);
4081 tmp3
= load_reg((r1
+ 1) & 15);
4082 tcg_gen_ext32s_i64(tmp2
, tmp2
);
4083 tcg_gen_ext32s_i64(tmp3
, tmp3
);
4084 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
4085 store_reg32_i64((r1
+ 1) & 15, tmp2
);
4086 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
4087 store_reg32_i64(r1
, tmp2
);
4088 tcg_temp_free_i64(tmp2
);
4089 tcg_temp_free_i64(tmp3
);
4091 case 0x1d: /* DR R1,R2 [RR] */
4092 insn
= ld_code2(env
, s
->pc
);
4093 decode_rr(s
, insn
, &r1
, &r2
);
4094 tmp32_1
= load_reg32(r1
);
4095 tmp32_2
= load_reg32(r1
+ 1);
4096 tmp32_3
= load_reg32(r2
);
4098 tmp
= tcg_temp_new_i64(); /* dividend */
4099 tmp2
= tcg_temp_new_i64(); /* divisor */
4100 tmp3
= tcg_temp_new_i64();
4102 /* dividend is r(r1 << 32) | r(r1 + 1) */
4103 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4104 tcg_gen_extu_i32_i64(tmp2
, tmp32_2
);
4105 tcg_gen_shli_i64(tmp
, tmp
, 32);
4106 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
4108 /* divisor is r(r2) */
4109 tcg_gen_ext_i32_i64(tmp2
, tmp32_3
);
4111 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
4112 tcg_gen_rem_i64(tmp
, tmp
, tmp2
);
4114 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4115 tcg_gen_trunc_i64_i32(tmp32_2
, tmp3
);
4117 store_reg32(r1
, tmp32_1
); /* remainder */
4118 store_reg32(r1
+ 1, tmp32_2
); /* quotient */
4119 tcg_temp_free_i32(tmp32_1
);
4120 tcg_temp_free_i32(tmp32_2
);
4121 tcg_temp_free_i32(tmp32_3
);
4122 tcg_temp_free_i64(tmp
);
4123 tcg_temp_free_i64(tmp2
);
4124 tcg_temp_free_i64(tmp3
);
4126 case 0x28: /* LDR R1,R2 [RR] */
4127 insn
= ld_code2(env
, s
->pc
);
4128 decode_rr(s
, insn
, &r1
, &r2
);
4129 tmp
= load_freg(r2
);
4130 store_freg(r1
, tmp
);
4131 tcg_temp_free_i64(tmp
);
4133 case 0x38: /* LER R1,R2 [RR] */
4134 insn
= ld_code2(env
, s
->pc
);
4135 decode_rr(s
, insn
, &r1
, &r2
);
4136 tmp32_1
= load_freg32(r2
);
4137 store_freg32(r1
, tmp32_1
);
4138 tcg_temp_free_i32(tmp32_1
);
4140 case 0x40: /* STH R1,D2(X2,B2) [RX] */
4141 insn
= ld_code4(env
, s
->pc
);
4142 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4143 tmp2
= load_reg(r1
);
4144 tcg_gen_qemu_st16(tmp2
, tmp
, get_mem_index(s
));
4145 tcg_temp_free_i64(tmp
);
4146 tcg_temp_free_i64(tmp2
);
4149 insn
= ld_code4(env
, s
->pc
);
4150 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4151 store_reg(r1
, tmp
); /* FIXME: 31/24-bit addressing */
4152 tcg_temp_free_i64(tmp
);
4154 case 0x42: /* STC R1,D2(X2,B2) [RX] */
4155 insn
= ld_code4(env
, s
->pc
);
4156 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4157 tmp2
= load_reg(r1
);
4158 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4159 tcg_temp_free_i64(tmp
);
4160 tcg_temp_free_i64(tmp2
);
4162 case 0x43: /* IC R1,D2(X2,B2) [RX] */
4163 insn
= ld_code4(env
, s
->pc
);
4164 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4165 tmp2
= tcg_temp_new_i64();
4166 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4167 store_reg8(r1
, tmp2
);
4168 tcg_temp_free_i64(tmp
);
4169 tcg_temp_free_i64(tmp2
);
4171 case 0x44: /* EX R1,D2(X2,B2) [RX] */
4172 insn
= ld_code4(env
, s
->pc
);
4173 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4174 tmp2
= load_reg(r1
);
4175 tmp3
= tcg_const_i64(s
->pc
+ 4);
4178 gen_helper_ex(cc_op
, cpu_env
, cc_op
, tmp2
, tmp
, tmp3
);
4180 tcg_temp_free_i64(tmp
);
4181 tcg_temp_free_i64(tmp2
);
4182 tcg_temp_free_i64(tmp3
);
4184 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
4185 insn
= ld_code4(env
, s
->pc
);
4186 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4187 tcg_temp_free_i64(tmp
);
4189 tmp32_1
= load_reg32(r1
);
4190 tcg_gen_subi_i32(tmp32_1
, tmp32_1
, 1);
4191 store_reg32(r1
, tmp32_1
);
4193 gen_update_cc_op(s
);
4194 l1
= gen_new_label();
4195 tcg_gen_brcondi_i32(TCG_COND_NE
, tmp32_1
, 0, l1
);
4197 /* not taking the branch, jump to after the instruction */
4198 gen_goto_tb(s
, 0, s
->pc
+ 4);
4201 /* take the branch, move R2 into psw.addr */
4202 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4203 tcg_gen_mov_i64(psw_addr
, tmp
);
4204 s
->is_jmp
= DISAS_JUMP
;
4205 tcg_temp_free_i32(tmp32_1
);
4206 tcg_temp_free_i64(tmp
);
4208 case 0x47: /* BC M1,D2(X2,B2) [RX] */
4209 insn
= ld_code4(env
, s
->pc
);
4210 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4211 gen_bcr(s
, r1
, tmp
, s
->pc
+ 4);
4212 tcg_temp_free_i64(tmp
);
4213 s
->is_jmp
= DISAS_TB_JUMP
;
4215 case 0x48: /* LH R1,D2(X2,B2) [RX] */
4216 insn
= ld_code4(env
, s
->pc
);
4217 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4218 tmp2
= tcg_temp_new_i64();
4219 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
4220 store_reg32_i64(r1
, tmp2
);
4221 tcg_temp_free_i64(tmp
);
4222 tcg_temp_free_i64(tmp2
);
4224 case 0x49: /* CH R1,D2(X2,B2) [RX] */
4225 insn
= ld_code4(env
, s
->pc
);
4226 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4227 tmp32_1
= load_reg32(r1
);
4228 tmp32_2
= tcg_temp_new_i32();
4229 tmp2
= tcg_temp_new_i64();
4230 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
4231 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4232 cmp_s32(s
, tmp32_1
, tmp32_2
);
4233 tcg_temp_free_i32(tmp32_1
);
4234 tcg_temp_free_i32(tmp32_2
);
4235 tcg_temp_free_i64(tmp
);
4236 tcg_temp_free_i64(tmp2
);
4238 case 0x4a: /* AH R1,D2(X2,B2) [RX] */
4239 case 0x4b: /* SH R1,D2(X2,B2) [RX] */
4240 case 0x4c: /* MH R1,D2(X2,B2) [RX] */
4241 insn
= ld_code4(env
, s
->pc
);
4242 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4243 tmp2
= tcg_temp_new_i64();
4244 tmp32_1
= load_reg32(r1
);
4245 tmp32_2
= tcg_temp_new_i32();
4246 tmp32_3
= tcg_temp_new_i32();
4248 tcg_gen_qemu_ld16s(tmp2
, tmp
, get_mem_index(s
));
4249 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4252 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4253 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4256 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4257 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4260 tcg_gen_mul_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4265 store_reg32(r1
, tmp32_3
);
4267 tcg_temp_free_i32(tmp32_1
);
4268 tcg_temp_free_i32(tmp32_2
);
4269 tcg_temp_free_i32(tmp32_3
);
4270 tcg_temp_free_i64(tmp
);
4271 tcg_temp_free_i64(tmp2
);
4273 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
4274 insn
= ld_code4(env
, s
->pc
);
4275 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4276 tmp2
= tcg_const_i64(pc_to_link_info(s
, s
->pc
+ 4));
4277 store_reg(r1
, tmp2
);
4278 tcg_gen_mov_i64(psw_addr
, tmp
);
4279 tcg_temp_free_i64(tmp
);
4280 tcg_temp_free_i64(tmp2
);
4281 s
->is_jmp
= DISAS_JUMP
;
4283 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
4284 insn
= ld_code4(env
, s
->pc
);
4285 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4286 tmp2
= tcg_temp_new_i64();
4287 tmp32_1
= tcg_temp_new_i32();
4288 tcg_gen_trunc_i64_i32(tmp32_1
, regs
[r1
]);
4289 gen_helper_cvd(tmp2
, tmp32_1
);
4290 tcg_gen_qemu_st64(tmp2
, tmp
, get_mem_index(s
));
4291 tcg_temp_free_i64(tmp
);
4292 tcg_temp_free_i64(tmp2
);
4293 tcg_temp_free_i32(tmp32_1
);
4295 case 0x50: /* st r1, d2(x2, b2) */
4296 insn
= ld_code4(env
, s
->pc
);
4297 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4298 tmp2
= load_reg(r1
);
4299 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
4300 tcg_temp_free_i64(tmp
);
4301 tcg_temp_free_i64(tmp2
);
4303 case 0x55: /* CL R1,D2(X2,B2) [RX] */
4304 insn
= ld_code4(env
, s
->pc
);
4305 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4306 tmp2
= tcg_temp_new_i64();
4307 tmp32_1
= tcg_temp_new_i32();
4308 tmp32_2
= load_reg32(r1
);
4309 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4310 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4311 cmp_u32(s
, tmp32_2
, tmp32_1
);
4312 tcg_temp_free_i64(tmp
);
4313 tcg_temp_free_i64(tmp2
);
4314 tcg_temp_free_i32(tmp32_1
);
4315 tcg_temp_free_i32(tmp32_2
);
4317 case 0x54: /* N R1,D2(X2,B2) [RX] */
4318 case 0x56: /* O R1,D2(X2,B2) [RX] */
4319 case 0x57: /* X R1,D2(X2,B2) [RX] */
4320 insn
= ld_code4(env
, s
->pc
);
4321 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4322 tmp2
= tcg_temp_new_i64();
4323 tmp32_1
= load_reg32(r1
);
4324 tmp32_2
= tcg_temp_new_i32();
4325 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4326 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4327 gen_and_or_xor_i32(opc
, tmp32_1
, tmp32_2
);
4328 store_reg32(r1
, tmp32_1
);
4329 set_cc_nz_u32(s
, tmp32_1
);
4330 tcg_temp_free_i64(tmp
);
4331 tcg_temp_free_i64(tmp2
);
4332 tcg_temp_free_i32(tmp32_1
);
4333 tcg_temp_free_i32(tmp32_2
);
4335 case 0x58: /* l r1, d2(x2, b2) */
4336 insn
= ld_code4(env
, s
->pc
);
4337 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4338 tmp2
= tcg_temp_new_i64();
4339 tmp32_1
= tcg_temp_new_i32();
4340 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4341 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4342 store_reg32(r1
, tmp32_1
);
4343 tcg_temp_free_i64(tmp
);
4344 tcg_temp_free_i64(tmp2
);
4345 tcg_temp_free_i32(tmp32_1
);
4347 case 0x59: /* C R1,D2(X2,B2) [RX] */
4348 insn
= ld_code4(env
, s
->pc
);
4349 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4350 tmp2
= tcg_temp_new_i64();
4351 tmp32_1
= tcg_temp_new_i32();
4352 tmp32_2
= load_reg32(r1
);
4353 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
4354 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4355 cmp_s32(s
, tmp32_2
, tmp32_1
);
4356 tcg_temp_free_i64(tmp
);
4357 tcg_temp_free_i64(tmp2
);
4358 tcg_temp_free_i32(tmp32_1
);
4359 tcg_temp_free_i32(tmp32_2
);
4361 case 0x5a: /* A R1,D2(X2,B2) [RX] */
4362 case 0x5b: /* S R1,D2(X2,B2) [RX] */
4363 case 0x5e: /* AL R1,D2(X2,B2) [RX] */
4364 case 0x5f: /* SL R1,D2(X2,B2) [RX] */
4365 insn
= ld_code4(env
, s
->pc
);
4366 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4367 tmp32_1
= load_reg32(r1
);
4368 tmp32_2
= tcg_temp_new_i32();
4369 tmp32_3
= tcg_temp_new_i32();
4370 tcg_gen_qemu_ld32s(tmp
, tmp
, get_mem_index(s
));
4371 tcg_gen_trunc_i64_i32(tmp32_2
, tmp
);
4375 tcg_gen_add_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4379 tcg_gen_sub_i32(tmp32_3
, tmp32_1
, tmp32_2
);
4384 store_reg32(r1
, tmp32_3
);
4387 set_cc_add32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4390 set_cc_addu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4393 set_cc_sub32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4396 set_cc_subu32(s
, tmp32_1
, tmp32_2
, tmp32_3
);
4401 tcg_temp_free_i64(tmp
);
4402 tcg_temp_free_i32(tmp32_1
);
4403 tcg_temp_free_i32(tmp32_2
);
4404 tcg_temp_free_i32(tmp32_3
);
4406 case 0x5c: /* M R1,D2(X2,B2) [RX] */
4407 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
4408 insn
= ld_code4(env
, s
->pc
);
4409 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4410 tmp2
= tcg_temp_new_i64();
4411 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
4412 tmp3
= load_reg((r1
+ 1) & 15);
4413 tcg_gen_ext32s_i64(tmp2
, tmp2
);
4414 tcg_gen_ext32s_i64(tmp3
, tmp3
);
4415 tcg_gen_mul_i64(tmp2
, tmp2
, tmp3
);
4416 store_reg32_i64((r1
+ 1) & 15, tmp2
);
4417 tcg_gen_shri_i64(tmp2
, tmp2
, 32);
4418 store_reg32_i64(r1
, tmp2
);
4419 tcg_temp_free_i64(tmp
);
4420 tcg_temp_free_i64(tmp2
);
4421 tcg_temp_free_i64(tmp3
);
4423 case 0x5d: /* D R1,D2(X2,B2) [RX] */
4424 insn
= ld_code4(env
, s
->pc
);
4425 tmp3
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4426 tmp32_1
= load_reg32(r1
);
4427 tmp32_2
= load_reg32(r1
+ 1);
4429 tmp
= tcg_temp_new_i64();
4430 tmp2
= tcg_temp_new_i64();
4432 /* dividend is r(r1 << 32) | r(r1 + 1) */
4433 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4434 tcg_gen_extu_i32_i64(tmp2
, tmp32_2
);
4435 tcg_gen_shli_i64(tmp
, tmp
, 32);
4436 tcg_gen_or_i64(tmp
, tmp
, tmp2
);
4438 /* divisor is in memory */
4439 tcg_gen_qemu_ld32s(tmp2
, tmp3
, get_mem_index(s
));
4441 /* XXX divisor == 0 -> FixP divide exception */
4443 tcg_gen_div_i64(tmp3
, tmp
, tmp2
);
4444 tcg_gen_rem_i64(tmp
, tmp
, tmp2
);
4446 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4447 tcg_gen_trunc_i64_i32(tmp32_2
, tmp3
);
4449 store_reg32(r1
, tmp32_1
); /* remainder */
4450 store_reg32(r1
+ 1, tmp32_2
); /* quotient */
4451 tcg_temp_free_i32(tmp32_1
);
4452 tcg_temp_free_i32(tmp32_2
);
4453 tcg_temp_free_i64(tmp
);
4454 tcg_temp_free_i64(tmp2
);
4455 tcg_temp_free_i64(tmp3
);
4457 case 0x60: /* STD R1,D2(X2,B2) [RX] */
4458 insn
= ld_code4(env
, s
->pc
);
4459 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4460 tmp2
= load_freg(r1
);
4461 tcg_gen_qemu_st64(tmp2
, tmp
, get_mem_index(s
));
4462 tcg_temp_free_i64(tmp
);
4463 tcg_temp_free_i64(tmp2
);
4465 case 0x68: /* LD R1,D2(X2,B2) [RX] */
4466 insn
= ld_code4(env
, s
->pc
);
4467 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4468 tmp2
= tcg_temp_new_i64();
4469 tcg_gen_qemu_ld64(tmp2
, tmp
, get_mem_index(s
));
4470 store_freg(r1
, tmp2
);
4471 tcg_temp_free_i64(tmp
);
4472 tcg_temp_free_i64(tmp2
);
4474 case 0x70: /* STE R1,D2(X2,B2) [RX] */
4475 insn
= ld_code4(env
, s
->pc
);
4476 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4477 tmp2
= tcg_temp_new_i64();
4478 tmp32_1
= load_freg32(r1
);
4479 tcg_gen_extu_i32_i64(tmp2
, tmp32_1
);
4480 tcg_gen_qemu_st32(tmp2
, tmp
, get_mem_index(s
));
4481 tcg_temp_free_i64(tmp
);
4482 tcg_temp_free_i64(tmp2
);
4483 tcg_temp_free_i32(tmp32_1
);
4485 case 0x71: /* MS R1,D2(X2,B2) [RX] */
4486 insn
= ld_code4(env
, s
->pc
);
4487 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4488 tmp2
= tcg_temp_new_i64();
4489 tmp32_1
= load_reg32(r1
);
4490 tmp32_2
= tcg_temp_new_i32();
4491 tcg_gen_qemu_ld32s(tmp2
, tmp
, get_mem_index(s
));
4492 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4493 tcg_gen_mul_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4494 store_reg32(r1
, tmp32_1
);
4495 tcg_temp_free_i64(tmp
);
4496 tcg_temp_free_i64(tmp2
);
4497 tcg_temp_free_i32(tmp32_1
);
4498 tcg_temp_free_i32(tmp32_2
);
4500 case 0x78: /* LE R1,D2(X2,B2) [RX] */
4501 insn
= ld_code4(env
, s
->pc
);
4502 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4503 tmp2
= tcg_temp_new_i64();
4504 tmp32_1
= tcg_temp_new_i32();
4505 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4506 tcg_gen_trunc_i64_i32(tmp32_1
, tmp2
);
4507 store_freg32(r1
, tmp32_1
);
4508 tcg_temp_free_i64(tmp
);
4509 tcg_temp_free_i64(tmp2
);
4510 tcg_temp_free_i32(tmp32_1
);
4512 #ifndef CONFIG_USER_ONLY
4513 case 0x80: /* SSM D2(B2) [S] */
4514 /* Set System Mask */
4515 check_privileged(env
, s
, ilc
);
4516 insn
= ld_code4(env
, s
->pc
);
4517 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4518 tmp
= get_address(s
, 0, b2
, d2
);
4519 tmp2
= tcg_temp_new_i64();
4520 tmp3
= tcg_temp_new_i64();
4521 tcg_gen_andi_i64(tmp3
, psw_mask
, ~0xff00000000000000ULL
);
4522 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4523 tcg_gen_shli_i64(tmp2
, tmp2
, 56);
4524 tcg_gen_or_i64(psw_mask
, tmp3
, tmp2
);
4525 tcg_temp_free_i64(tmp
);
4526 tcg_temp_free_i64(tmp2
);
4527 tcg_temp_free_i64(tmp3
);
4529 case 0x82: /* LPSW D2(B2) [S] */
4531 check_privileged(env
, s
, ilc
);
4532 insn
= ld_code4(env
, s
->pc
);
4533 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4534 tmp
= get_address(s
, 0, b2
, d2
);
4535 tmp2
= tcg_temp_new_i64();
4536 tmp3
= tcg_temp_new_i64();
4537 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4538 tcg_gen_addi_i64(tmp
, tmp
, 4);
4539 tcg_gen_qemu_ld32u(tmp3
, tmp
, get_mem_index(s
));
4540 gen_helper_load_psw(cpu_env
, tmp2
, tmp3
);
4541 tcg_temp_free_i64(tmp
);
4542 tcg_temp_free_i64(tmp2
);
4543 tcg_temp_free_i64(tmp3
);
4544 /* we need to keep cc_op intact */
4545 s
->is_jmp
= DISAS_JUMP
;
4547 case 0x83: /* DIAG R1,R3,D2 [RS] */
4548 /* Diagnose call (KVM hypercall) */
4549 check_privileged(env
, s
, ilc
);
4550 potential_page_fault(s
);
4551 insn
= ld_code4(env
, s
->pc
);
4552 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4553 tmp32_1
= tcg_const_i32(insn
& 0xfff);
4556 gen_helper_diag(tmp2
, cpu_env
, tmp32_1
, tmp2
, tmp3
);
4558 tcg_temp_free_i32(tmp32_1
);
4559 tcg_temp_free_i64(tmp2
);
4560 tcg_temp_free_i64(tmp3
);
4563 case 0x88: /* SRL R1,D2(B2) [RS] */
4564 case 0x89: /* SLL R1,D2(B2) [RS] */
4565 case 0x8a: /* SRA R1,D2(B2) [RS] */
4566 insn
= ld_code4(env
, s
->pc
);
4567 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4568 tmp
= get_address(s
, 0, b2
, d2
);
4569 tmp32_1
= load_reg32(r1
);
4570 tmp32_2
= tcg_temp_new_i32();
4571 tcg_gen_trunc_i64_i32(tmp32_2
, tmp
);
4572 tcg_gen_andi_i32(tmp32_2
, tmp32_2
, 0x3f);
4575 tcg_gen_shr_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4578 tcg_gen_shl_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4581 tcg_gen_sar_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4582 set_cc_s32(s
, tmp32_1
);
4587 store_reg32(r1
, tmp32_1
);
4588 tcg_temp_free_i64(tmp
);
4589 tcg_temp_free_i32(tmp32_1
);
4590 tcg_temp_free_i32(tmp32_2
);
4592 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4593 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4594 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4595 insn
= ld_code4(env
, s
->pc
);
4596 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4597 tmp
= get_address(s
, 0, b2
, d2
); /* shift */
4598 tmp2
= tcg_temp_new_i64();
4599 tmp32_1
= load_reg32(r1
);
4600 tmp32_2
= load_reg32(r1
+ 1);
4601 tcg_gen_concat_i32_i64(tmp2
, tmp32_2
, tmp32_1
); /* operand */
4604 tcg_gen_shr_i64(tmp2
, tmp2
, tmp
);
4607 tcg_gen_shl_i64(tmp2
, tmp2
, tmp
);
4610 tcg_gen_sar_i64(tmp2
, tmp2
, tmp
);
4611 set_cc_s64(s
, tmp2
);
4614 tcg_gen_shri_i64(tmp
, tmp2
, 32);
4615 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4616 store_reg32(r1
, tmp32_1
);
4617 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4618 store_reg32(r1
+ 1, tmp32_2
);
4619 tcg_temp_free_i64(tmp
);
4620 tcg_temp_free_i64(tmp2
);
4622 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4623 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4624 insn
= ld_code4(env
, s
->pc
);
4625 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4627 tmp
= get_address(s
, 0, b2
, d2
);
4628 tmp2
= tcg_temp_new_i64();
4629 tmp3
= tcg_const_i64(4);
4630 tmp4
= tcg_const_i64(0xffffffff00000000ULL
);
4631 for (i
= r1
;; i
= (i
+ 1) % 16) {
4633 tcg_gen_qemu_ld32u(tmp2
, tmp
, get_mem_index(s
));
4634 tcg_gen_and_i64(regs
[i
], regs
[i
], tmp4
);
4635 tcg_gen_or_i64(regs
[i
], regs
[i
], tmp2
);
4637 tcg_gen_qemu_st32(regs
[i
], tmp
, get_mem_index(s
));
4642 tcg_gen_add_i64(tmp
, tmp
, tmp3
);
4644 tcg_temp_free_i64(tmp
);
4645 tcg_temp_free_i64(tmp2
);
4646 tcg_temp_free_i64(tmp3
);
4647 tcg_temp_free_i64(tmp4
);
4649 case 0x91: /* TM D1(B1),I2 [SI] */
4650 insn
= ld_code4(env
, s
->pc
);
4651 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4652 tmp2
= tcg_const_i64(i2
);
4653 tcg_gen_qemu_ld8u(tmp
, tmp
, get_mem_index(s
));
4654 cmp_64(s
, tmp
, tmp2
, CC_OP_TM_32
);
4655 tcg_temp_free_i64(tmp
);
4656 tcg_temp_free_i64(tmp2
);
4658 case 0x92: /* MVI D1(B1),I2 [SI] */
4659 insn
= ld_code4(env
, s
->pc
);
4660 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4661 tmp2
= tcg_const_i64(i2
);
4662 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4663 tcg_temp_free_i64(tmp
);
4664 tcg_temp_free_i64(tmp2
);
4666 case 0x94: /* NI D1(B1),I2 [SI] */
4667 case 0x96: /* OI D1(B1),I2 [SI] */
4668 case 0x97: /* XI D1(B1),I2 [SI] */
4669 insn
= ld_code4(env
, s
->pc
);
4670 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4671 tmp2
= tcg_temp_new_i64();
4672 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4675 tcg_gen_andi_i64(tmp2
, tmp2
, i2
);
4678 tcg_gen_ori_i64(tmp2
, tmp2
, i2
);
4681 tcg_gen_xori_i64(tmp2
, tmp2
, i2
);
4686 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4687 set_cc_nz_u64(s
, tmp2
);
4688 tcg_temp_free_i64(tmp
);
4689 tcg_temp_free_i64(tmp2
);
4691 case 0x95: /* CLI D1(B1),I2 [SI] */
4692 insn
= ld_code4(env
, s
->pc
);
4693 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4694 tmp2
= tcg_temp_new_i64();
4695 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4696 cmp_u64c(s
, tmp2
, i2
);
4697 tcg_temp_free_i64(tmp
);
4698 tcg_temp_free_i64(tmp2
);
4700 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4701 insn
= ld_code4(env
, s
->pc
);
4702 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4703 tmp
= get_address(s
, 0, b2
, d2
);
4704 tmp32_1
= tcg_const_i32(r1
);
4705 tmp32_2
= tcg_const_i32(r3
);
4706 potential_page_fault(s
);
4707 gen_helper_lam(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4708 tcg_temp_free_i64(tmp
);
4709 tcg_temp_free_i32(tmp32_1
);
4710 tcg_temp_free_i32(tmp32_2
);
4712 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4713 insn
= ld_code4(env
, s
->pc
);
4714 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4715 tmp
= get_address(s
, 0, b2
, d2
);
4716 tmp32_1
= tcg_const_i32(r1
);
4717 tmp32_2
= tcg_const_i32(r3
);
4718 potential_page_fault(s
);
4719 gen_helper_stam(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4720 tcg_temp_free_i64(tmp
);
4721 tcg_temp_free_i32(tmp32_1
);
4722 tcg_temp_free_i32(tmp32_2
);
4725 insn
= ld_code4(env
, s
->pc
);
4726 r1
= (insn
>> 20) & 0xf;
4727 op
= (insn
>> 16) & 0xf;
4729 disas_a5(env
, s
, op
, r1
, i2
);
4732 insn
= ld_code4(env
, s
->pc
);
4733 r1
= (insn
>> 20) & 0xf;
4734 op
= (insn
>> 16) & 0xf;
4736 disas_a7(env
, s
, op
, r1
, i2
);
4738 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4739 insn
= ld_code4(env
, s
->pc
);
4740 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4741 tmp
= get_address(s
, 0, b2
, d2
);
4742 tmp32_1
= tcg_const_i32(r1
);
4743 tmp32_2
= tcg_const_i32(r3
);
4744 potential_page_fault(s
);
4745 gen_helper_mvcle(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4747 tcg_temp_free_i64(tmp
);
4748 tcg_temp_free_i32(tmp32_1
);
4749 tcg_temp_free_i32(tmp32_2
);
4751 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4752 insn
= ld_code4(env
, s
->pc
);
4753 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4754 tmp
= get_address(s
, 0, b2
, d2
);
4755 tmp32_1
= tcg_const_i32(r1
);
4756 tmp32_2
= tcg_const_i32(r3
);
4757 potential_page_fault(s
);
4758 gen_helper_clcle(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4760 tcg_temp_free_i64(tmp
);
4761 tcg_temp_free_i32(tmp32_1
);
4762 tcg_temp_free_i32(tmp32_2
);
4764 #ifndef CONFIG_USER_ONLY
4765 case 0xac: /* STNSM D1(B1),I2 [SI] */
4766 case 0xad: /* STOSM D1(B1),I2 [SI] */
4767 check_privileged(env
, s
, ilc
);
4768 insn
= ld_code4(env
, s
->pc
);
4769 tmp
= decode_si(s
, insn
, &i2
, &b1
, &d1
);
4770 tmp2
= tcg_temp_new_i64();
4771 tcg_gen_shri_i64(tmp2
, psw_mask
, 56);
4772 tcg_gen_qemu_st8(tmp2
, tmp
, get_mem_index(s
));
4774 tcg_gen_andi_i64(psw_mask
, psw_mask
,
4775 ((uint64_t)i2
<< 56) | 0x00ffffffffffffffULL
);
4777 tcg_gen_ori_i64(psw_mask
, psw_mask
, (uint64_t)i2
<< 56);
4779 tcg_temp_free_i64(tmp
);
4780 tcg_temp_free_i64(tmp2
);
4782 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4783 check_privileged(env
, s
, ilc
);
4784 insn
= ld_code4(env
, s
->pc
);
4785 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4786 tmp
= get_address(s
, 0, b2
, d2
);
4787 tmp2
= load_reg(r3
);
4788 tmp32_1
= tcg_const_i32(r1
);
4789 potential_page_fault(s
);
4790 gen_helper_sigp(cc_op
, cpu_env
, tmp
, tmp32_1
, tmp2
);
4792 tcg_temp_free_i64(tmp
);
4793 tcg_temp_free_i64(tmp2
);
4794 tcg_temp_free_i32(tmp32_1
);
4796 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4797 check_privileged(env
, s
, ilc
);
4798 insn
= ld_code4(env
, s
->pc
);
4799 tmp
= decode_rx(s
, insn
, &r1
, &x2
, &b2
, &d2
);
4800 tmp32_1
= tcg_const_i32(r1
);
4801 potential_page_fault(s
);
4802 gen_helper_lra(cc_op
, cpu_env
, tmp
, tmp32_1
);
4804 tcg_temp_free_i64(tmp
);
4805 tcg_temp_free_i32(tmp32_1
);
4809 insn
= ld_code4(env
, s
->pc
);
4810 op
= (insn
>> 16) & 0xff;
4812 case 0x9c: /* STFPC D2(B2) [S] */
4814 b2
= (insn
>> 12) & 0xf;
4815 tmp32_1
= tcg_temp_new_i32();
4816 tmp
= tcg_temp_new_i64();
4817 tmp2
= get_address(s
, 0, b2
, d2
);
4818 tcg_gen_ld_i32(tmp32_1
, cpu_env
, offsetof(CPUS390XState
, fpc
));
4819 tcg_gen_extu_i32_i64(tmp
, tmp32_1
);
4820 tcg_gen_qemu_st32(tmp
, tmp2
, get_mem_index(s
));
4821 tcg_temp_free_i32(tmp32_1
);
4822 tcg_temp_free_i64(tmp
);
4823 tcg_temp_free_i64(tmp2
);
4826 disas_b2(env
, s
, op
, insn
);
4831 insn
= ld_code4(env
, s
->pc
);
4832 op
= (insn
>> 16) & 0xff;
4833 r3
= (insn
>> 12) & 0xf; /* aka m3 */
4834 r1
= (insn
>> 4) & 0xf;
4836 disas_b3(env
, s
, op
, r3
, r1
, r2
);
4838 #ifndef CONFIG_USER_ONLY
4839 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4841 check_privileged(env
, s
, ilc
);
4842 insn
= ld_code4(env
, s
->pc
);
4843 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4844 tmp
= get_address(s
, 0, b2
, d2
);
4845 tmp32_1
= tcg_const_i32(r1
);
4846 tmp32_2
= tcg_const_i32(r3
);
4847 potential_page_fault(s
);
4848 gen_helper_stctl(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4849 tcg_temp_free_i64(tmp
);
4850 tcg_temp_free_i32(tmp32_1
);
4851 tcg_temp_free_i32(tmp32_2
);
4853 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4855 check_privileged(env
, s
, ilc
);
4856 insn
= ld_code4(env
, s
->pc
);
4857 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4858 tmp
= get_address(s
, 0, b2
, d2
);
4859 tmp32_1
= tcg_const_i32(r1
);
4860 tmp32_2
= tcg_const_i32(r3
);
4861 potential_page_fault(s
);
4862 gen_helper_lctl(cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4863 tcg_temp_free_i64(tmp
);
4864 tcg_temp_free_i32(tmp32_1
);
4865 tcg_temp_free_i32(tmp32_2
);
4869 insn
= ld_code4(env
, s
->pc
);
4870 r1
= (insn
>> 4) & 0xf;
4872 op
= (insn
>> 16) & 0xff;
4873 disas_b9(env
, s
, op
, r1
, r2
);
4875 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4876 insn
= ld_code4(env
, s
->pc
);
4877 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4878 tmp
= get_address(s
, 0, b2
, d2
);
4879 tmp32_1
= tcg_const_i32(r1
);
4880 tmp32_2
= tcg_const_i32(r3
);
4881 potential_page_fault(s
);
4882 gen_helper_cs(cc_op
, cpu_env
, tmp32_1
, tmp
, tmp32_2
);
4884 tcg_temp_free_i64(tmp
);
4885 tcg_temp_free_i32(tmp32_1
);
4886 tcg_temp_free_i32(tmp32_2
);
4888 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4889 insn
= ld_code4(env
, s
->pc
);
4890 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4891 tmp
= get_address(s
, 0, b2
, d2
);
4892 tmp32_1
= load_reg32(r1
);
4893 tmp32_2
= tcg_const_i32(r3
);
4894 potential_page_fault(s
);
4895 gen_helper_clm(cc_op
, cpu_env
, tmp32_1
, tmp32_2
, tmp
);
4897 tcg_temp_free_i64(tmp
);
4898 tcg_temp_free_i32(tmp32_1
);
4899 tcg_temp_free_i32(tmp32_2
);
4901 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4902 insn
= ld_code4(env
, s
->pc
);
4903 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4904 tmp
= get_address(s
, 0, b2
, d2
);
4905 tmp32_1
= load_reg32(r1
);
4906 tmp32_2
= tcg_const_i32(r3
);
4907 potential_page_fault(s
);
4908 gen_helper_stcm(cpu_env
, tmp32_1
, tmp32_2
, tmp
);
4909 tcg_temp_free_i64(tmp
);
4910 tcg_temp_free_i32(tmp32_1
);
4911 tcg_temp_free_i32(tmp32_2
);
4913 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4914 insn
= ld_code4(env
, s
->pc
);
4915 decode_rs(s
, insn
, &r1
, &r3
, &b2
, &d2
);
4917 /* effectively a 32-bit load */
4918 tmp
= get_address(s
, 0, b2
, d2
);
4919 tmp32_1
= tcg_temp_new_i32();
4920 tmp32_2
= tcg_const_i32(r3
);
4921 tcg_gen_qemu_ld32u(tmp
, tmp
, get_mem_index(s
));
4922 store_reg32_i64(r1
, tmp
);
4923 tcg_gen_trunc_i64_i32(tmp32_1
, tmp
);
4924 set_cc_icm(s
, tmp32_2
, tmp32_1
);
4925 tcg_temp_free_i64(tmp
);
4926 tcg_temp_free_i32(tmp32_1
);
4927 tcg_temp_free_i32(tmp32_2
);
4929 uint32_t mask
= 0x00ffffffUL
;
4930 uint32_t shift
= 24;
4932 tmp
= get_address(s
, 0, b2
, d2
);
4933 tmp2
= tcg_temp_new_i64();
4934 tmp32_1
= load_reg32(r1
);
4935 tmp32_2
= tcg_temp_new_i32();
4936 tmp32_3
= tcg_const_i32(r3
);
4937 tmp32_4
= tcg_const_i32(0);
4940 tcg_gen_qemu_ld8u(tmp2
, tmp
, get_mem_index(s
));
4941 tcg_gen_trunc_i64_i32(tmp32_2
, tmp2
);
4943 tcg_gen_shli_i32(tmp32_2
, tmp32_2
, shift
);
4945 tcg_gen_andi_i32(tmp32_1
, tmp32_1
, mask
);
4946 tcg_gen_or_i32(tmp32_1
, tmp32_1
, tmp32_2
);
4947 tcg_gen_or_i32(tmp32_4
, tmp32_4
, tmp32_2
);
4948 tcg_gen_addi_i64(tmp
, tmp
, 1);
4950 m3
= (m3
<< 1) & 0xf;
4951 mask
= (mask
>> 8) | 0xff000000UL
;
4954 store_reg32(r1
, tmp32_1
);
4955 set_cc_icm(s
, tmp32_3
, tmp32_4
);
4956 tcg_temp_free_i64(tmp
);
4957 tcg_temp_free_i64(tmp2
);
4958 tcg_temp_free_i32(tmp32_1
);
4959 tcg_temp_free_i32(tmp32_2
);
4960 tcg_temp_free_i32(tmp32_3
);
4961 tcg_temp_free_i32(tmp32_4
);
4963 /* i.e. env->cc = 0 */
4964 gen_op_movi_cc(s
, 0);
4969 insn
= ld_code6(env
, s
->pc
);
4970 r1
= (insn
>> 36) & 0xf;
4971 op
= (insn
>> 32) & 0xf;
4975 disas_c0(env
, s
, op
, r1
, i2
);
4978 disas_c2(env
, s
, op
, r1
, i2
);
4984 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4985 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4986 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4987 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4988 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4989 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4990 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4991 insn
= ld_code6(env
, s
->pc
);
4992 vl
= tcg_const_i32((insn
>> 32) & 0xff);
4993 b1
= (insn
>> 28) & 0xf;
4994 b2
= (insn
>> 12) & 0xf;
4995 d1
= (insn
>> 16) & 0xfff;
4997 tmp
= get_address(s
, 0, b1
, d1
);
4998 tmp2
= get_address(s
, 0, b2
, d2
);
5001 gen_op_mvc(s
, (insn
>> 32) & 0xff, tmp
, tmp2
);
5004 potential_page_fault(s
);
5005 gen_helper_nc(cc_op
, cpu_env
, vl
, tmp
, tmp2
);
5009 gen_op_clc(s
, (insn
>> 32) & 0xff, tmp
, tmp2
);
5012 potential_page_fault(s
);
5013 gen_helper_oc(cc_op
, cpu_env
, vl
, tmp
, tmp2
);
5017 potential_page_fault(s
);
5018 gen_helper_xc(cc_op
, cpu_env
, vl
, tmp
, tmp2
);
5022 potential_page_fault(s
);
5023 gen_helper_tr(cpu_env
, vl
, tmp
, tmp2
);
5027 potential_page_fault(s
);
5028 gen_helper_unpk(cpu_env
, vl
, tmp
, tmp2
);
5033 tcg_temp_free_i64(tmp
);
5034 tcg_temp_free_i64(tmp2
);
5036 #ifndef CONFIG_USER_ONLY
5037 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
5038 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
5039 check_privileged(env
, s
, ilc
);
5040 potential_page_fault(s
);
5041 insn
= ld_code6(env
, s
->pc
);
5042 r1
= (insn
>> 36) & 0xf;
5043 r3
= (insn
>> 32) & 0xf;
5044 b1
= (insn
>> 28) & 0xf;
5045 d1
= (insn
>> 16) & 0xfff;
5046 b2
= (insn
>> 12) & 0xf;
5050 tmp2
= get_address(s
, 0, b1
, d1
);
5051 tmp3
= get_address(s
, 0, b2
, d2
);
5053 gen_helper_mvcp(cc_op
, cpu_env
, tmp
, tmp2
, tmp3
);
5055 gen_helper_mvcs(cc_op
, cpu_env
, tmp
, tmp2
, tmp3
);
5058 tcg_temp_free_i64(tmp
);
5059 tcg_temp_free_i64(tmp2
);
5060 tcg_temp_free_i64(tmp3
);
5064 insn
= ld_code6(env
, s
->pc
);
5067 r1
= (insn
>> 36) & 0xf;
5068 x2
= (insn
>> 32) & 0xf;
5069 b2
= (insn
>> 28) & 0xf;
5070 d2
= ((int)((((insn
>> 16) & 0xfff)
5071 | ((insn
<< 4) & 0xff000)) << 12)) >> 12;
5072 disas_e3(env
, s
, op
, r1
, x2
, b2
, d2
);
5074 #ifndef CONFIG_USER_ONLY
5076 /* Test Protection */
5077 check_privileged(env
, s
, ilc
);
5078 insn
= ld_code6(env
, s
->pc
);
5080 disas_e5(env
, s
, insn
);
5084 insn
= ld_code6(env
, s
->pc
);
5087 r1
= (insn
>> 36) & 0xf;
5088 r3
= (insn
>> 32) & 0xf;
5089 b2
= (insn
>> 28) & 0xf;
5090 d2
= ((int)((((insn
>> 16) & 0xfff)
5091 | ((insn
<< 4) & 0xff000)) << 12)) >> 12;
5092 disas_eb(env
, s
, op
, r1
, r3
, b2
, d2
);
5095 insn
= ld_code6(env
, s
->pc
);
5098 r1
= (insn
>> 36) & 0xf;
5099 x2
= (insn
>> 32) & 0xf;
5100 b2
= (insn
>> 28) & 0xf;
5101 d2
= (short)((insn
>> 16) & 0xfff);
5102 r1b
= (insn
>> 12) & 0xf;
5103 disas_ed(env
, s
, op
, r1
, x2
, b2
, d2
, r1b
);
5106 qemu_log_mask(LOG_UNIMP
, "unimplemented opcode 0x%x\n", opc
);
5107 gen_illegal_opcode(env
, s
, ilc
);
5111 /* Instruction length is encoded in the opcode */
5115 static inline void gen_intermediate_code_internal(CPUS390XState
*env
,
5116 TranslationBlock
*tb
,
5120 target_ulong pc_start
;
5121 uint64_t next_page_start
;
5122 uint16_t *gen_opc_end
;
5124 int num_insns
, max_insns
;
5130 if (!(tb
->flags
& FLAG_MASK_64
)) {
5131 pc_start
&= 0x7fffffff;
5135 dc
.is_jmp
= DISAS_NEXT
;
5137 dc
.cc_op
= CC_OP_DYNAMIC
;
5139 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
5141 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
5144 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
5145 if (max_insns
== 0) {
5146 max_insns
= CF_COUNT_MASK
;
5152 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
5153 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
5154 if (bp
->pc
== dc
.pc
) {
5161 j
= gen_opc_ptr
- gen_opc_buf
;
5165 gen_opc_instr_start
[lj
++] = 0;
5168 gen_opc_pc
[lj
] = dc
.pc
;
5169 gen_opc_cc_op
[lj
] = dc
.cc_op
;
5170 gen_opc_instr_start
[lj
] = 1;
5171 gen_opc_icount
[lj
] = num_insns
;
5173 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
5176 #if defined(S390X_DEBUG_DISAS_VERBOSE)
5177 LOG_DISAS("pc " TARGET_FMT_lx
"\n",
5180 disas_s390_insn(env
, &dc
);
5183 if (env
->singlestep_enabled
) {
5186 } while (!dc
.is_jmp
&& gen_opc_ptr
< gen_opc_end
&& dc
.pc
< next_page_start
5187 && num_insns
< max_insns
&& !env
->singlestep_enabled
5191 update_psw_addr(&dc
);
5194 if (singlestep
&& dc
.cc_op
!= CC_OP_DYNAMIC
) {
5195 gen_op_calc_cc(&dc
);
5197 /* next TB starts off with CC_OP_DYNAMIC, so make sure the cc op type
5199 gen_op_set_cc_op(&dc
);
5202 if (tb
->cflags
& CF_LAST_IO
) {
5205 /* Generate the return instruction */
5206 if (dc
.is_jmp
!= DISAS_TB_JUMP
) {
5209 gen_icount_end(tb
, num_insns
);
5210 *gen_opc_ptr
= INDEX_op_end
;
5212 j
= gen_opc_ptr
- gen_opc_buf
;
5215 gen_opc_instr_start
[lj
++] = 0;
5218 tb
->size
= dc
.pc
- pc_start
;
5219 tb
->icount
= num_insns
;
5221 #if defined(S390X_DEBUG_DISAS)
5222 log_cpu_state_mask(CPU_LOG_TB_CPU
, env
, 0);
5223 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
5224 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
5225 log_target_disas(pc_start
, dc
.pc
- pc_start
, 1);
5231 void gen_intermediate_code (CPUS390XState
*env
, struct TranslationBlock
*tb
)
5233 gen_intermediate_code_internal(env
, tb
, 0);
5236 void gen_intermediate_code_pc (CPUS390XState
*env
, struct TranslationBlock
*tb
)
5238 gen_intermediate_code_internal(env
, tb
, 1);
5241 void restore_state_to_opc(CPUS390XState
*env
, TranslationBlock
*tb
, int pc_pos
)
5244 env
->psw
.addr
= gen_opc_pc
[pc_pos
];
5245 cc_op
= gen_opc_cc_op
[pc_pos
];
5246 if ((cc_op
!= CC_OP_DYNAMIC
) && (cc_op
!= CC_OP_STATIC
)) {