]> git.proxmox.com Git - qemu.git/blob - target-s390x/translate.c
target-s390: Convert MULTIPLY HALFWORD, SINGLE
[qemu.git] / target-s390x / translate.c
1 /*
2 * S/390 translation
3 *
4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21 /* #define DEBUG_INLINE_BRANCHES */
22 #define S390X_DEBUG_DISAS
23 /* #define S390X_DEBUG_DISAS_VERBOSE */
24
25 #ifdef S390X_DEBUG_DISAS_VERBOSE
26 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
27 #else
28 # define LOG_DISAS(...) do { } while (0)
29 #endif
30
31 #include "cpu.h"
32 #include "disas/disas.h"
33 #include "tcg-op.h"
34 #include "qemu/log.h"
35
36 /* global register indexes */
37 static TCGv_ptr cpu_env;
38
39 #include "exec/gen-icount.h"
40 #include "helper.h"
41 #define GEN_HELPER 1
42 #include "helper.h"
43
44
45 /* Information that (most) every instruction needs to manipulate. */
46 typedef struct DisasContext DisasContext;
47 typedef struct DisasInsn DisasInsn;
48 typedef struct DisasFields DisasFields;
49
50 struct DisasContext {
51 struct TranslationBlock *tb;
52 const DisasInsn *insn;
53 DisasFields *fields;
54 uint64_t pc, next_pc;
55 enum cc_op cc_op;
56 bool singlestep_enabled;
57 int is_jmp;
58 };
59
60 /* Information carried about a condition to be evaluated. */
61 typedef struct {
62 TCGCond cond:8;
63 bool is_64;
64 bool g1;
65 bool g2;
66 union {
67 struct { TCGv_i64 a, b; } s64;
68 struct { TCGv_i32 a, b; } s32;
69 } u;
70 } DisasCompare;
71
72 #define DISAS_EXCP 4
73
74 static void gen_op_calc_cc(DisasContext *s);
75
76 #ifdef DEBUG_INLINE_BRANCHES
77 static uint64_t inline_branch_hit[CC_OP_MAX];
78 static uint64_t inline_branch_miss[CC_OP_MAX];
79 #endif
80
81 static inline void debug_insn(uint64_t insn)
82 {
83 LOG_DISAS("insn: 0x%" PRIx64 "\n", insn);
84 }
85
86 static inline uint64_t pc_to_link_info(DisasContext *s, uint64_t pc)
87 {
88 if (!(s->tb->flags & FLAG_MASK_64)) {
89 if (s->tb->flags & FLAG_MASK_32) {
90 return pc | 0x80000000;
91 }
92 }
93 return pc;
94 }
95
96 void cpu_dump_state(CPUS390XState *env, FILE *f, fprintf_function cpu_fprintf,
97 int flags)
98 {
99 int i;
100
101 if (env->cc_op > 3) {
102 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %15s\n",
103 env->psw.mask, env->psw.addr, cc_name(env->cc_op));
104 } else {
105 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %02x\n",
106 env->psw.mask, env->psw.addr, env->cc_op);
107 }
108
109 for (i = 0; i < 16; i++) {
110 cpu_fprintf(f, "R%02d=%016" PRIx64, i, env->regs[i]);
111 if ((i % 4) == 3) {
112 cpu_fprintf(f, "\n");
113 } else {
114 cpu_fprintf(f, " ");
115 }
116 }
117
118 for (i = 0; i < 16; i++) {
119 cpu_fprintf(f, "F%02d=%016" PRIx64, i, env->fregs[i].ll);
120 if ((i % 4) == 3) {
121 cpu_fprintf(f, "\n");
122 } else {
123 cpu_fprintf(f, " ");
124 }
125 }
126
127 #ifndef CONFIG_USER_ONLY
128 for (i = 0; i < 16; i++) {
129 cpu_fprintf(f, "C%02d=%016" PRIx64, i, env->cregs[i]);
130 if ((i % 4) == 3) {
131 cpu_fprintf(f, "\n");
132 } else {
133 cpu_fprintf(f, " ");
134 }
135 }
136 #endif
137
138 #ifdef DEBUG_INLINE_BRANCHES
139 for (i = 0; i < CC_OP_MAX; i++) {
140 cpu_fprintf(f, " %15s = %10ld\t%10ld\n", cc_name(i),
141 inline_branch_miss[i], inline_branch_hit[i]);
142 }
143 #endif
144
145 cpu_fprintf(f, "\n");
146 }
147
148 static TCGv_i64 psw_addr;
149 static TCGv_i64 psw_mask;
150
151 static TCGv_i32 cc_op;
152 static TCGv_i64 cc_src;
153 static TCGv_i64 cc_dst;
154 static TCGv_i64 cc_vr;
155
156 static char cpu_reg_names[32][4];
157 static TCGv_i64 regs[16];
158 static TCGv_i64 fregs[16];
159
160 static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
161
162 void s390x_translate_init(void)
163 {
164 int i;
165
166 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
167 psw_addr = tcg_global_mem_new_i64(TCG_AREG0,
168 offsetof(CPUS390XState, psw.addr),
169 "psw_addr");
170 psw_mask = tcg_global_mem_new_i64(TCG_AREG0,
171 offsetof(CPUS390XState, psw.mask),
172 "psw_mask");
173
174 cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUS390XState, cc_op),
175 "cc_op");
176 cc_src = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_src),
177 "cc_src");
178 cc_dst = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_dst),
179 "cc_dst");
180 cc_vr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_vr),
181 "cc_vr");
182
183 for (i = 0; i < 16; i++) {
184 snprintf(cpu_reg_names[i], sizeof(cpu_reg_names[0]), "r%d", i);
185 regs[i] = tcg_global_mem_new(TCG_AREG0,
186 offsetof(CPUS390XState, regs[i]),
187 cpu_reg_names[i]);
188 }
189
190 for (i = 0; i < 16; i++) {
191 snprintf(cpu_reg_names[i + 16], sizeof(cpu_reg_names[0]), "f%d", i);
192 fregs[i] = tcg_global_mem_new(TCG_AREG0,
193 offsetof(CPUS390XState, fregs[i].d),
194 cpu_reg_names[i + 16]);
195 }
196
197 /* register helpers */
198 #define GEN_HELPER 2
199 #include "helper.h"
200 }
201
202 static inline TCGv_i64 load_reg(int reg)
203 {
204 TCGv_i64 r = tcg_temp_new_i64();
205 tcg_gen_mov_i64(r, regs[reg]);
206 return r;
207 }
208
209 static inline TCGv_i64 load_freg(int reg)
210 {
211 TCGv_i64 r = tcg_temp_new_i64();
212 tcg_gen_mov_i64(r, fregs[reg]);
213 return r;
214 }
215
216 static inline TCGv_i32 load_freg32(int reg)
217 {
218 TCGv_i32 r = tcg_temp_new_i32();
219 #if HOST_LONG_BITS == 32
220 tcg_gen_mov_i32(r, TCGV_HIGH(fregs[reg]));
221 #else
222 tcg_gen_shri_i64(MAKE_TCGV_I64(GET_TCGV_I32(r)), fregs[reg], 32);
223 #endif
224 return r;
225 }
226
227 static inline TCGv_i32 load_reg32(int reg)
228 {
229 TCGv_i32 r = tcg_temp_new_i32();
230 tcg_gen_trunc_i64_i32(r, regs[reg]);
231 return r;
232 }
233
234 static inline TCGv_i64 load_reg32_i64(int reg)
235 {
236 TCGv_i64 r = tcg_temp_new_i64();
237 tcg_gen_ext32s_i64(r, regs[reg]);
238 return r;
239 }
240
241 static inline void store_reg(int reg, TCGv_i64 v)
242 {
243 tcg_gen_mov_i64(regs[reg], v);
244 }
245
246 static inline void store_freg(int reg, TCGv_i64 v)
247 {
248 tcg_gen_mov_i64(fregs[reg], v);
249 }
250
251 static inline void store_reg32(int reg, TCGv_i32 v)
252 {
253 /* 32 bit register writes keep the upper half */
254 #if HOST_LONG_BITS == 32
255 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), v);
256 #else
257 tcg_gen_deposit_i64(regs[reg], regs[reg],
258 MAKE_TCGV_I64(GET_TCGV_I32(v)), 0, 32);
259 #endif
260 }
261
262 static inline void store_reg32_i64(int reg, TCGv_i64 v)
263 {
264 /* 32 bit register writes keep the upper half */
265 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 32);
266 }
267
268 static inline void store_reg16(int reg, TCGv_i32 v)
269 {
270 /* 16 bit register writes keep the upper bytes */
271 #if HOST_LONG_BITS == 32
272 tcg_gen_deposit_i32(TCGV_LOW(regs[reg]), TCGV_LOW(regs[reg]), v, 0, 16);
273 #else
274 tcg_gen_deposit_i64(regs[reg], regs[reg],
275 MAKE_TCGV_I64(GET_TCGV_I32(v)), 0, 16);
276 #endif
277 }
278
279 static inline void store_reg8(int reg, TCGv_i64 v)
280 {
281 /* 8 bit register writes keep the upper bytes */
282 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 8);
283 }
284
285 static inline void store_freg32(int reg, TCGv_i32 v)
286 {
287 /* 32 bit register writes keep the lower half */
288 #if HOST_LONG_BITS == 32
289 tcg_gen_mov_i32(TCGV_HIGH(fregs[reg]), v);
290 #else
291 tcg_gen_deposit_i64(fregs[reg], fregs[reg],
292 MAKE_TCGV_I64(GET_TCGV_I32(v)), 32, 32);
293 #endif
294 }
295
296 static inline void update_psw_addr(DisasContext *s)
297 {
298 /* psw.addr */
299 tcg_gen_movi_i64(psw_addr, s->pc);
300 }
301
302 static inline void potential_page_fault(DisasContext *s)
303 {
304 #ifndef CONFIG_USER_ONLY
305 update_psw_addr(s);
306 gen_op_calc_cc(s);
307 #endif
308 }
309
310 static inline uint64_t ld_code2(CPUS390XState *env, uint64_t pc)
311 {
312 return (uint64_t)cpu_lduw_code(env, pc);
313 }
314
315 static inline uint64_t ld_code4(CPUS390XState *env, uint64_t pc)
316 {
317 return (uint64_t)(uint32_t)cpu_ldl_code(env, pc);
318 }
319
320 static inline uint64_t ld_code6(CPUS390XState *env, uint64_t pc)
321 {
322 return (ld_code2(env, pc) << 32) | ld_code4(env, pc + 2);
323 }
324
325 static inline int get_mem_index(DisasContext *s)
326 {
327 switch (s->tb->flags & FLAG_MASK_ASC) {
328 case PSW_ASC_PRIMARY >> 32:
329 return 0;
330 case PSW_ASC_SECONDARY >> 32:
331 return 1;
332 case PSW_ASC_HOME >> 32:
333 return 2;
334 default:
335 tcg_abort();
336 break;
337 }
338 }
339
340 static void gen_exception(int excp)
341 {
342 TCGv_i32 tmp = tcg_const_i32(excp);
343 gen_helper_exception(cpu_env, tmp);
344 tcg_temp_free_i32(tmp);
345 }
346
347 static void gen_program_exception(DisasContext *s, int code)
348 {
349 TCGv_i32 tmp;
350
351 /* Remember what pgm exeption this was. */
352 tmp = tcg_const_i32(code);
353 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUS390XState, int_pgm_code));
354 tcg_temp_free_i32(tmp);
355
356 tmp = tcg_const_i32(s->next_pc - s->pc);
357 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUS390XState, int_pgm_ilen));
358 tcg_temp_free_i32(tmp);
359
360 /* Advance past instruction. */
361 s->pc = s->next_pc;
362 update_psw_addr(s);
363
364 /* Save off cc. */
365 gen_op_calc_cc(s);
366
367 /* Trigger exception. */
368 gen_exception(EXCP_PGM);
369
370 /* End TB here. */
371 s->is_jmp = DISAS_EXCP;
372 }
373
374 static inline void gen_illegal_opcode(DisasContext *s)
375 {
376 gen_program_exception(s, PGM_SPECIFICATION);
377 }
378
379 static inline void check_privileged(DisasContext *s)
380 {
381 if (s->tb->flags & (PSW_MASK_PSTATE >> 32)) {
382 gen_program_exception(s, PGM_PRIVILEGED);
383 }
384 }
385
386 static TCGv_i64 get_address(DisasContext *s, int x2, int b2, int d2)
387 {
388 TCGv_i64 tmp;
389
390 /* 31-bitify the immediate part; register contents are dealt with below */
391 if (!(s->tb->flags & FLAG_MASK_64)) {
392 d2 &= 0x7fffffffUL;
393 }
394
395 if (x2) {
396 if (d2) {
397 tmp = tcg_const_i64(d2);
398 tcg_gen_add_i64(tmp, tmp, regs[x2]);
399 } else {
400 tmp = load_reg(x2);
401 }
402 if (b2) {
403 tcg_gen_add_i64(tmp, tmp, regs[b2]);
404 }
405 } else if (b2) {
406 if (d2) {
407 tmp = tcg_const_i64(d2);
408 tcg_gen_add_i64(tmp, tmp, regs[b2]);
409 } else {
410 tmp = load_reg(b2);
411 }
412 } else {
413 tmp = tcg_const_i64(d2);
414 }
415
416 /* 31-bit mode mask if there are values loaded from registers */
417 if (!(s->tb->flags & FLAG_MASK_64) && (x2 || b2)) {
418 tcg_gen_andi_i64(tmp, tmp, 0x7fffffffUL);
419 }
420
421 return tmp;
422 }
423
424 static void gen_op_movi_cc(DisasContext *s, uint32_t val)
425 {
426 s->cc_op = CC_OP_CONST0 + val;
427 }
428
429 static void gen_op_update1_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 dst)
430 {
431 tcg_gen_discard_i64(cc_src);
432 tcg_gen_mov_i64(cc_dst, dst);
433 tcg_gen_discard_i64(cc_vr);
434 s->cc_op = op;
435 }
436
437 static void gen_op_update1_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 dst)
438 {
439 tcg_gen_discard_i64(cc_src);
440 tcg_gen_extu_i32_i64(cc_dst, dst);
441 tcg_gen_discard_i64(cc_vr);
442 s->cc_op = op;
443 }
444
445 static void gen_op_update2_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
446 TCGv_i64 dst)
447 {
448 tcg_gen_mov_i64(cc_src, src);
449 tcg_gen_mov_i64(cc_dst, dst);
450 tcg_gen_discard_i64(cc_vr);
451 s->cc_op = op;
452 }
453
454 static void gen_op_update2_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
455 TCGv_i32 dst)
456 {
457 tcg_gen_extu_i32_i64(cc_src, src);
458 tcg_gen_extu_i32_i64(cc_dst, dst);
459 tcg_gen_discard_i64(cc_vr);
460 s->cc_op = op;
461 }
462
463 static void gen_op_update3_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
464 TCGv_i64 dst, TCGv_i64 vr)
465 {
466 tcg_gen_mov_i64(cc_src, src);
467 tcg_gen_mov_i64(cc_dst, dst);
468 tcg_gen_mov_i64(cc_vr, vr);
469 s->cc_op = op;
470 }
471
472 static void gen_op_update3_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
473 TCGv_i32 dst, TCGv_i32 vr)
474 {
475 tcg_gen_extu_i32_i64(cc_src, src);
476 tcg_gen_extu_i32_i64(cc_dst, dst);
477 tcg_gen_extu_i32_i64(cc_vr, vr);
478 s->cc_op = op;
479 }
480
481 static inline void set_cc_nz_u32(DisasContext *s, TCGv_i32 val)
482 {
483 gen_op_update1_cc_i32(s, CC_OP_NZ, val);
484 }
485
486 static inline void set_cc_nz_u64(DisasContext *s, TCGv_i64 val)
487 {
488 gen_op_update1_cc_i64(s, CC_OP_NZ, val);
489 }
490
491 static inline void cmp_32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
492 enum cc_op cond)
493 {
494 gen_op_update2_cc_i32(s, cond, v1, v2);
495 }
496
497 static inline void cmp_64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
498 enum cc_op cond)
499 {
500 gen_op_update2_cc_i64(s, cond, v1, v2);
501 }
502
503 static inline void cmp_s32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
504 {
505 cmp_32(s, v1, v2, CC_OP_LTGT_32);
506 }
507
508 static inline void cmp_u32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
509 {
510 cmp_32(s, v1, v2, CC_OP_LTUGTU_32);
511 }
512
513 static inline void cmp_s32c(DisasContext *s, TCGv_i32 v1, int32_t v2)
514 {
515 /* XXX optimize for the constant? put it in s? */
516 TCGv_i32 tmp = tcg_const_i32(v2);
517 cmp_32(s, v1, tmp, CC_OP_LTGT_32);
518 tcg_temp_free_i32(tmp);
519 }
520
521 static inline void cmp_u32c(DisasContext *s, TCGv_i32 v1, uint32_t v2)
522 {
523 TCGv_i32 tmp = tcg_const_i32(v2);
524 cmp_32(s, v1, tmp, CC_OP_LTUGTU_32);
525 tcg_temp_free_i32(tmp);
526 }
527
528 static inline void cmp_s64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
529 {
530 cmp_64(s, v1, v2, CC_OP_LTGT_64);
531 }
532
533 static inline void cmp_u64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
534 {
535 cmp_64(s, v1, v2, CC_OP_LTUGTU_64);
536 }
537
538 static inline void cmp_s64c(DisasContext *s, TCGv_i64 v1, int64_t v2)
539 {
540 TCGv_i64 tmp = tcg_const_i64(v2);
541 cmp_s64(s, v1, tmp);
542 tcg_temp_free_i64(tmp);
543 }
544
545 static inline void cmp_u64c(DisasContext *s, TCGv_i64 v1, uint64_t v2)
546 {
547 TCGv_i64 tmp = tcg_const_i64(v2);
548 cmp_u64(s, v1, tmp);
549 tcg_temp_free_i64(tmp);
550 }
551
552 static inline void set_cc_s32(DisasContext *s, TCGv_i32 val)
553 {
554 gen_op_update1_cc_i32(s, CC_OP_LTGT0_32, val);
555 }
556
557 static inline void set_cc_s64(DisasContext *s, TCGv_i64 val)
558 {
559 gen_op_update1_cc_i64(s, CC_OP_LTGT0_64, val);
560 }
561
562 static void set_cc_addu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
563 TCGv_i64 vr)
564 {
565 gen_op_update3_cc_i64(s, CC_OP_ADDU_64, v1, v2, vr);
566 }
567
568 static void set_cc_abs64(DisasContext *s, TCGv_i64 v1)
569 {
570 gen_op_update1_cc_i64(s, CC_OP_ABS_64, v1);
571 }
572
573 static void set_cc_nabs64(DisasContext *s, TCGv_i64 v1)
574 {
575 gen_op_update1_cc_i64(s, CC_OP_NABS_64, v1);
576 }
577
578 static void set_cc_addu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
579 TCGv_i32 vr)
580 {
581 gen_op_update3_cc_i32(s, CC_OP_ADDU_32, v1, v2, vr);
582 }
583
584 static void set_cc_abs32(DisasContext *s, TCGv_i32 v1)
585 {
586 gen_op_update1_cc_i32(s, CC_OP_ABS_32, v1);
587 }
588
589 static void set_cc_nabs32(DisasContext *s, TCGv_i32 v1)
590 {
591 gen_op_update1_cc_i32(s, CC_OP_NABS_32, v1);
592 }
593
594 static void set_cc_comp32(DisasContext *s, TCGv_i32 v1)
595 {
596 gen_op_update1_cc_i32(s, CC_OP_COMP_32, v1);
597 }
598
599 static void set_cc_comp64(DisasContext *s, TCGv_i64 v1)
600 {
601 gen_op_update1_cc_i64(s, CC_OP_COMP_64, v1);
602 }
603
604 static void set_cc_icm(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
605 {
606 gen_op_update2_cc_i32(s, CC_OP_ICM, v1, v2);
607 }
608
609 static void set_cc_cmp_f32_i64(DisasContext *s, TCGv_i32 v1, TCGv_i64 v2)
610 {
611 tcg_gen_extu_i32_i64(cc_src, v1);
612 tcg_gen_mov_i64(cc_dst, v2);
613 tcg_gen_discard_i64(cc_vr);
614 s->cc_op = CC_OP_LTGT_F32;
615 }
616
617 static void gen_set_cc_nz_f32(DisasContext *s, TCGv_i32 v1)
618 {
619 gen_op_update1_cc_i32(s, CC_OP_NZ_F32, v1);
620 }
621
622 /* CC value is in env->cc_op */
623 static inline void set_cc_static(DisasContext *s)
624 {
625 tcg_gen_discard_i64(cc_src);
626 tcg_gen_discard_i64(cc_dst);
627 tcg_gen_discard_i64(cc_vr);
628 s->cc_op = CC_OP_STATIC;
629 }
630
631 static inline void gen_op_set_cc_op(DisasContext *s)
632 {
633 if (s->cc_op != CC_OP_DYNAMIC && s->cc_op != CC_OP_STATIC) {
634 tcg_gen_movi_i32(cc_op, s->cc_op);
635 }
636 }
637
638 static inline void gen_update_cc_op(DisasContext *s)
639 {
640 gen_op_set_cc_op(s);
641 }
642
643 /* calculates cc into cc_op */
644 static void gen_op_calc_cc(DisasContext *s)
645 {
646 TCGv_i32 local_cc_op = tcg_const_i32(s->cc_op);
647 TCGv_i64 dummy = tcg_const_i64(0);
648
649 switch (s->cc_op) {
650 case CC_OP_CONST0:
651 case CC_OP_CONST1:
652 case CC_OP_CONST2:
653 case CC_OP_CONST3:
654 /* s->cc_op is the cc value */
655 tcg_gen_movi_i32(cc_op, s->cc_op - CC_OP_CONST0);
656 break;
657 case CC_OP_STATIC:
658 /* env->cc_op already is the cc value */
659 break;
660 case CC_OP_NZ:
661 case CC_OP_ABS_64:
662 case CC_OP_NABS_64:
663 case CC_OP_ABS_32:
664 case CC_OP_NABS_32:
665 case CC_OP_LTGT0_32:
666 case CC_OP_LTGT0_64:
667 case CC_OP_COMP_32:
668 case CC_OP_COMP_64:
669 case CC_OP_NZ_F32:
670 case CC_OP_NZ_F64:
671 /* 1 argument */
672 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, dummy, cc_dst, dummy);
673 break;
674 case CC_OP_ICM:
675 case CC_OP_LTGT_32:
676 case CC_OP_LTGT_64:
677 case CC_OP_LTUGTU_32:
678 case CC_OP_LTUGTU_64:
679 case CC_OP_TM_32:
680 case CC_OP_TM_64:
681 case CC_OP_LTGT_F32:
682 case CC_OP_LTGT_F64:
683 case CC_OP_SLAG:
684 /* 2 arguments */
685 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, cc_src, cc_dst, dummy);
686 break;
687 case CC_OP_ADD_64:
688 case CC_OP_ADDU_64:
689 case CC_OP_SUB_64:
690 case CC_OP_SUBU_64:
691 case CC_OP_ADD_32:
692 case CC_OP_ADDU_32:
693 case CC_OP_SUB_32:
694 case CC_OP_SUBU_32:
695 /* 3 arguments */
696 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, cc_src, cc_dst, cc_vr);
697 break;
698 case CC_OP_DYNAMIC:
699 /* unknown operation - assume 3 arguments and cc_op in env */
700 gen_helper_calc_cc(cc_op, cpu_env, cc_op, cc_src, cc_dst, cc_vr);
701 break;
702 default:
703 tcg_abort();
704 }
705
706 tcg_temp_free_i32(local_cc_op);
707 tcg_temp_free_i64(dummy);
708
709 /* We now have cc in cc_op as constant */
710 set_cc_static(s);
711 }
712
713 static inline void decode_rr(DisasContext *s, uint64_t insn, int *r1, int *r2)
714 {
715 debug_insn(insn);
716
717 *r1 = (insn >> 4) & 0xf;
718 *r2 = insn & 0xf;
719 }
720
721 static inline TCGv_i64 decode_rx(DisasContext *s, uint64_t insn, int *r1,
722 int *x2, int *b2, int *d2)
723 {
724 debug_insn(insn);
725
726 *r1 = (insn >> 20) & 0xf;
727 *x2 = (insn >> 16) & 0xf;
728 *b2 = (insn >> 12) & 0xf;
729 *d2 = insn & 0xfff;
730
731 return get_address(s, *x2, *b2, *d2);
732 }
733
734 static inline void decode_rs(DisasContext *s, uint64_t insn, int *r1, int *r3,
735 int *b2, int *d2)
736 {
737 debug_insn(insn);
738
739 *r1 = (insn >> 20) & 0xf;
740 /* aka m3 */
741 *r3 = (insn >> 16) & 0xf;
742 *b2 = (insn >> 12) & 0xf;
743 *d2 = insn & 0xfff;
744 }
745
746 static inline TCGv_i64 decode_si(DisasContext *s, uint64_t insn, int *i2,
747 int *b1, int *d1)
748 {
749 debug_insn(insn);
750
751 *i2 = (insn >> 16) & 0xff;
752 *b1 = (insn >> 12) & 0xf;
753 *d1 = insn & 0xfff;
754
755 return get_address(s, 0, *b1, *d1);
756 }
757
758 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong pc)
759 {
760 TranslationBlock *tb;
761
762 gen_update_cc_op(s);
763
764 tb = s->tb;
765 /* NOTE: we handle the case where the TB spans two pages here */
766 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
767 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
768 /* jump to same page: we can use a direct jump */
769 tcg_gen_goto_tb(tb_num);
770 tcg_gen_movi_i64(psw_addr, pc);
771 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
772 } else {
773 /* jump to another page: currently not optimized */
774 tcg_gen_movi_i64(psw_addr, pc);
775 tcg_gen_exit_tb(0);
776 }
777 }
778
779 static inline void account_noninline_branch(DisasContext *s, int cc_op)
780 {
781 #ifdef DEBUG_INLINE_BRANCHES
782 inline_branch_miss[cc_op]++;
783 #endif
784 }
785
786 static inline void account_inline_branch(DisasContext *s, int cc_op)
787 {
788 #ifdef DEBUG_INLINE_BRANCHES
789 inline_branch_hit[cc_op]++;
790 #endif
791 }
792
793 /* Table of mask values to comparison codes, given a comparison as input.
794 For a true comparison CC=3 will never be set, but we treat this
795 conservatively for possible use when CC=3 indicates overflow. */
796 static const TCGCond ltgt_cond[16] = {
797 TCG_COND_NEVER, TCG_COND_NEVER, /* | | | x */
798 TCG_COND_GT, TCG_COND_NEVER, /* | | GT | x */
799 TCG_COND_LT, TCG_COND_NEVER, /* | LT | | x */
800 TCG_COND_NE, TCG_COND_NEVER, /* | LT | GT | x */
801 TCG_COND_EQ, TCG_COND_NEVER, /* EQ | | | x */
802 TCG_COND_GE, TCG_COND_NEVER, /* EQ | | GT | x */
803 TCG_COND_LE, TCG_COND_NEVER, /* EQ | LT | | x */
804 TCG_COND_ALWAYS, TCG_COND_ALWAYS, /* EQ | LT | GT | x */
805 };
806
807 /* Table of mask values to comparison codes, given a logic op as input.
808 For such, only CC=0 and CC=1 should be possible. */
809 static const TCGCond nz_cond[16] = {
810 /* | | x | x */
811 TCG_COND_NEVER, TCG_COND_NEVER, TCG_COND_NEVER, TCG_COND_NEVER,
812 /* | NE | x | x */
813 TCG_COND_NE, TCG_COND_NE, TCG_COND_NE, TCG_COND_NE,
814 /* EQ | | x | x */
815 TCG_COND_EQ, TCG_COND_EQ, TCG_COND_EQ, TCG_COND_EQ,
816 /* EQ | NE | x | x */
817 TCG_COND_ALWAYS, TCG_COND_ALWAYS, TCG_COND_ALWAYS, TCG_COND_ALWAYS,
818 };
819
820 /* Interpret MASK in terms of S->CC_OP, and fill in C with all the
821 details required to generate a TCG comparison. */
822 static void disas_jcc(DisasContext *s, DisasCompare *c, uint32_t mask)
823 {
824 TCGCond cond;
825 enum cc_op old_cc_op = s->cc_op;
826
827 if (mask == 15 || mask == 0) {
828 c->cond = (mask ? TCG_COND_ALWAYS : TCG_COND_NEVER);
829 c->u.s32.a = cc_op;
830 c->u.s32.b = cc_op;
831 c->g1 = c->g2 = true;
832 c->is_64 = false;
833 return;
834 }
835
836 /* Find the TCG condition for the mask + cc op. */
837 switch (old_cc_op) {
838 case CC_OP_LTGT0_32:
839 case CC_OP_LTGT0_64:
840 case CC_OP_LTGT_32:
841 case CC_OP_LTGT_64:
842 cond = ltgt_cond[mask];
843 if (cond == TCG_COND_NEVER) {
844 goto do_dynamic;
845 }
846 account_inline_branch(s, old_cc_op);
847 break;
848
849 case CC_OP_LTUGTU_32:
850 case CC_OP_LTUGTU_64:
851 cond = tcg_unsigned_cond(ltgt_cond[mask]);
852 if (cond == TCG_COND_NEVER) {
853 goto do_dynamic;
854 }
855 account_inline_branch(s, old_cc_op);
856 break;
857
858 case CC_OP_NZ:
859 cond = nz_cond[mask];
860 if (cond == TCG_COND_NEVER) {
861 goto do_dynamic;
862 }
863 account_inline_branch(s, old_cc_op);
864 break;
865
866 case CC_OP_TM_32:
867 case CC_OP_TM_64:
868 switch (mask) {
869 case 8:
870 cond = TCG_COND_EQ;
871 break;
872 case 4 | 2 | 1:
873 cond = TCG_COND_NE;
874 break;
875 default:
876 goto do_dynamic;
877 }
878 account_inline_branch(s, old_cc_op);
879 break;
880
881 case CC_OP_ICM:
882 switch (mask) {
883 case 8:
884 cond = TCG_COND_EQ;
885 break;
886 case 4 | 2 | 1:
887 case 4 | 2:
888 cond = TCG_COND_NE;
889 break;
890 default:
891 goto do_dynamic;
892 }
893 account_inline_branch(s, old_cc_op);
894 break;
895
896 default:
897 do_dynamic:
898 /* Calculate cc value. */
899 gen_op_calc_cc(s);
900 /* FALLTHRU */
901
902 case CC_OP_STATIC:
903 /* Jump based on CC. We'll load up the real cond below;
904 the assignment here merely avoids a compiler warning. */
905 account_noninline_branch(s, old_cc_op);
906 old_cc_op = CC_OP_STATIC;
907 cond = TCG_COND_NEVER;
908 break;
909 }
910
911 /* Load up the arguments of the comparison. */
912 c->is_64 = true;
913 c->g1 = c->g2 = false;
914 switch (old_cc_op) {
915 case CC_OP_LTGT0_32:
916 c->is_64 = false;
917 c->u.s32.a = tcg_temp_new_i32();
918 tcg_gen_trunc_i64_i32(c->u.s32.a, cc_dst);
919 c->u.s32.b = tcg_const_i32(0);
920 break;
921 case CC_OP_LTGT_32:
922 case CC_OP_LTUGTU_32:
923 c->is_64 = false;
924 c->u.s32.a = tcg_temp_new_i32();
925 tcg_gen_trunc_i64_i32(c->u.s32.a, cc_src);
926 c->u.s32.b = tcg_temp_new_i32();
927 tcg_gen_trunc_i64_i32(c->u.s32.b, cc_dst);
928 break;
929
930 case CC_OP_LTGT0_64:
931 case CC_OP_NZ:
932 case CC_OP_ICM:
933 c->u.s64.a = cc_dst;
934 c->u.s64.b = tcg_const_i64(0);
935 c->g1 = true;
936 break;
937 case CC_OP_LTGT_64:
938 case CC_OP_LTUGTU_64:
939 c->u.s64.a = cc_src;
940 c->u.s64.b = cc_dst;
941 c->g1 = c->g2 = true;
942 break;
943
944 case CC_OP_TM_32:
945 case CC_OP_TM_64:
946 c->u.s64.a = tcg_temp_new_i64();
947 c->u.s64.b = tcg_const_i64(0);
948 tcg_gen_and_i64(c->u.s64.a, cc_src, cc_dst);
949 break;
950
951 case CC_OP_STATIC:
952 c->is_64 = false;
953 c->u.s32.a = cc_op;
954 c->g1 = true;
955 switch (mask) {
956 case 0x8 | 0x4 | 0x2: /* cc != 3 */
957 cond = TCG_COND_NE;
958 c->u.s32.b = tcg_const_i32(3);
959 break;
960 case 0x8 | 0x4 | 0x1: /* cc != 2 */
961 cond = TCG_COND_NE;
962 c->u.s32.b = tcg_const_i32(2);
963 break;
964 case 0x8 | 0x2 | 0x1: /* cc != 1 */
965 cond = TCG_COND_NE;
966 c->u.s32.b = tcg_const_i32(1);
967 break;
968 case 0x8 | 0x2: /* cc == 0 || cc == 2 => (cc & 1) == 0 */
969 cond = TCG_COND_EQ;
970 c->g1 = false;
971 c->u.s32.a = tcg_temp_new_i32();
972 c->u.s32.b = tcg_const_i32(0);
973 tcg_gen_andi_i32(c->u.s32.a, cc_op, 1);
974 break;
975 case 0x8 | 0x4: /* cc < 2 */
976 cond = TCG_COND_LTU;
977 c->u.s32.b = tcg_const_i32(2);
978 break;
979 case 0x8: /* cc == 0 */
980 cond = TCG_COND_EQ;
981 c->u.s32.b = tcg_const_i32(0);
982 break;
983 case 0x4 | 0x2 | 0x1: /* cc != 0 */
984 cond = TCG_COND_NE;
985 c->u.s32.b = tcg_const_i32(0);
986 break;
987 case 0x4 | 0x1: /* cc == 1 || cc == 3 => (cc & 1) != 0 */
988 cond = TCG_COND_NE;
989 c->g1 = false;
990 c->u.s32.a = tcg_temp_new_i32();
991 c->u.s32.b = tcg_const_i32(0);
992 tcg_gen_andi_i32(c->u.s32.a, cc_op, 1);
993 break;
994 case 0x4: /* cc == 1 */
995 cond = TCG_COND_EQ;
996 c->u.s32.b = tcg_const_i32(1);
997 break;
998 case 0x2 | 0x1: /* cc > 1 */
999 cond = TCG_COND_GTU;
1000 c->u.s32.b = tcg_const_i32(1);
1001 break;
1002 case 0x2: /* cc == 2 */
1003 cond = TCG_COND_EQ;
1004 c->u.s32.b = tcg_const_i32(2);
1005 break;
1006 case 0x1: /* cc == 3 */
1007 cond = TCG_COND_EQ;
1008 c->u.s32.b = tcg_const_i32(3);
1009 break;
1010 default:
1011 /* CC is masked by something else: (8 >> cc) & mask. */
1012 cond = TCG_COND_NE;
1013 c->g1 = false;
1014 c->u.s32.a = tcg_const_i32(8);
1015 c->u.s32.b = tcg_const_i32(0);
1016 tcg_gen_shr_i32(c->u.s32.a, c->u.s32.a, cc_op);
1017 tcg_gen_andi_i32(c->u.s32.a, c->u.s32.a, mask);
1018 break;
1019 }
1020 break;
1021
1022 default:
1023 abort();
1024 }
1025 c->cond = cond;
1026 }
1027
1028 static void free_compare(DisasCompare *c)
1029 {
1030 if (!c->g1) {
1031 if (c->is_64) {
1032 tcg_temp_free_i64(c->u.s64.a);
1033 } else {
1034 tcg_temp_free_i32(c->u.s32.a);
1035 }
1036 }
1037 if (!c->g2) {
1038 if (c->is_64) {
1039 tcg_temp_free_i64(c->u.s64.b);
1040 } else {
1041 tcg_temp_free_i32(c->u.s32.b);
1042 }
1043 }
1044 }
1045
1046 static void gen_jcc(DisasContext *s, uint32_t mask, int skip)
1047 {
1048 DisasCompare c;
1049 TCGCond cond;
1050
1051 disas_jcc(s, &c, mask);
1052 cond = tcg_invert_cond(c.cond);
1053
1054 if (c.is_64) {
1055 tcg_gen_brcond_i64(cond, c.u.s64.a, c.u.s64.b, skip);
1056 } else {
1057 tcg_gen_brcond_i32(cond, c.u.s32.a, c.u.s32.b, skip);
1058 }
1059
1060 free_compare(&c);
1061 }
1062
1063 static void gen_bcr(DisasContext *s, uint32_t mask, TCGv_i64 target,
1064 uint64_t offset)
1065 {
1066 int skip;
1067
1068 if (mask == 0xf) {
1069 /* unconditional */
1070 gen_update_cc_op(s);
1071 tcg_gen_mov_i64(psw_addr, target);
1072 tcg_gen_exit_tb(0);
1073 } else if (mask == 0) {
1074 /* ignore cc and never match */
1075 gen_goto_tb(s, 0, offset + 2);
1076 } else {
1077 TCGv_i64 new_addr = tcg_temp_local_new_i64();
1078
1079 tcg_gen_mov_i64(new_addr, target);
1080 skip = gen_new_label();
1081 gen_jcc(s, mask, skip);
1082 gen_update_cc_op(s);
1083 tcg_gen_mov_i64(psw_addr, new_addr);
1084 tcg_temp_free_i64(new_addr);
1085 tcg_gen_exit_tb(0);
1086 gen_set_label(skip);
1087 tcg_temp_free_i64(new_addr);
1088 gen_goto_tb(s, 1, offset + 2);
1089 }
1090 }
1091
1092 static void gen_brc(uint32_t mask, DisasContext *s, int32_t offset)
1093 {
1094 int skip;
1095
1096 if (mask == 0xf) {
1097 /* unconditional */
1098 gen_goto_tb(s, 0, s->pc + offset);
1099 } else if (mask == 0) {
1100 /* ignore cc and never match */
1101 gen_goto_tb(s, 0, s->pc + 4);
1102 } else {
1103 skip = gen_new_label();
1104 gen_jcc(s, mask, skip);
1105 gen_goto_tb(s, 0, s->pc + offset);
1106 gen_set_label(skip);
1107 gen_goto_tb(s, 1, s->pc + 4);
1108 }
1109 s->is_jmp = DISAS_TB_JUMP;
1110 }
1111
1112 static void gen_op_mvc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1113 {
1114 TCGv_i64 tmp, tmp2;
1115 int i;
1116 int l_memset = gen_new_label();
1117 int l_out = gen_new_label();
1118 TCGv_i64 dest = tcg_temp_local_new_i64();
1119 TCGv_i64 src = tcg_temp_local_new_i64();
1120 TCGv_i32 vl;
1121
1122 /* Find out if we should use the inline version of mvc */
1123 switch (l) {
1124 case 0:
1125 case 1:
1126 case 2:
1127 case 3:
1128 case 4:
1129 case 5:
1130 case 6:
1131 case 7:
1132 case 11:
1133 case 15:
1134 /* use inline */
1135 break;
1136 default:
1137 /* Fall back to helper */
1138 vl = tcg_const_i32(l);
1139 potential_page_fault(s);
1140 gen_helper_mvc(cpu_env, vl, s1, s2);
1141 tcg_temp_free_i32(vl);
1142 return;
1143 }
1144
1145 tcg_gen_mov_i64(dest, s1);
1146 tcg_gen_mov_i64(src, s2);
1147
1148 if (!(s->tb->flags & FLAG_MASK_64)) {
1149 /* XXX what if we overflow while moving? */
1150 tcg_gen_andi_i64(dest, dest, 0x7fffffffUL);
1151 tcg_gen_andi_i64(src, src, 0x7fffffffUL);
1152 }
1153
1154 tmp = tcg_temp_new_i64();
1155 tcg_gen_addi_i64(tmp, src, 1);
1156 tcg_gen_brcond_i64(TCG_COND_EQ, dest, tmp, l_memset);
1157 tcg_temp_free_i64(tmp);
1158
1159 switch (l) {
1160 case 0:
1161 tmp = tcg_temp_new_i64();
1162
1163 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1164 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1165
1166 tcg_temp_free_i64(tmp);
1167 break;
1168 case 1:
1169 tmp = tcg_temp_new_i64();
1170
1171 tcg_gen_qemu_ld16u(tmp, src, get_mem_index(s));
1172 tcg_gen_qemu_st16(tmp, dest, get_mem_index(s));
1173
1174 tcg_temp_free_i64(tmp);
1175 break;
1176 case 3:
1177 tmp = tcg_temp_new_i64();
1178
1179 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1180 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1181
1182 tcg_temp_free_i64(tmp);
1183 break;
1184 case 4:
1185 tmp = tcg_temp_new_i64();
1186 tmp2 = tcg_temp_new_i64();
1187
1188 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1189 tcg_gen_addi_i64(src, src, 4);
1190 tcg_gen_qemu_ld8u(tmp2, src, get_mem_index(s));
1191 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1192 tcg_gen_addi_i64(dest, dest, 4);
1193 tcg_gen_qemu_st8(tmp2, dest, get_mem_index(s));
1194
1195 tcg_temp_free_i64(tmp);
1196 tcg_temp_free_i64(tmp2);
1197 break;
1198 case 7:
1199 tmp = tcg_temp_new_i64();
1200
1201 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1202 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1203
1204 tcg_temp_free_i64(tmp);
1205 break;
1206 default:
1207 /* The inline version can become too big for too uneven numbers, only
1208 use it on known good lengths */
1209 tmp = tcg_temp_new_i64();
1210 tmp2 = tcg_const_i64(8);
1211 for (i = 0; (i + 7) <= l; i += 8) {
1212 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1213 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1214
1215 tcg_gen_add_i64(src, src, tmp2);
1216 tcg_gen_add_i64(dest, dest, tmp2);
1217 }
1218
1219 tcg_temp_free_i64(tmp2);
1220 tmp2 = tcg_const_i64(1);
1221
1222 for (; i <= l; i++) {
1223 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1224 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1225
1226 tcg_gen_add_i64(src, src, tmp2);
1227 tcg_gen_add_i64(dest, dest, tmp2);
1228 }
1229
1230 tcg_temp_free_i64(tmp2);
1231 tcg_temp_free_i64(tmp);
1232 break;
1233 }
1234
1235 tcg_gen_br(l_out);
1236
1237 gen_set_label(l_memset);
1238 /* memset case (dest == (src + 1)) */
1239
1240 tmp = tcg_temp_new_i64();
1241 tmp2 = tcg_temp_new_i64();
1242 /* fill tmp with the byte */
1243 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1244 tcg_gen_shli_i64(tmp2, tmp, 8);
1245 tcg_gen_or_i64(tmp, tmp, tmp2);
1246 tcg_gen_shli_i64(tmp2, tmp, 16);
1247 tcg_gen_or_i64(tmp, tmp, tmp2);
1248 tcg_gen_shli_i64(tmp2, tmp, 32);
1249 tcg_gen_or_i64(tmp, tmp, tmp2);
1250 tcg_temp_free_i64(tmp2);
1251
1252 tmp2 = tcg_const_i64(8);
1253
1254 for (i = 0; (i + 7) <= l; i += 8) {
1255 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1256 tcg_gen_addi_i64(dest, dest, 8);
1257 }
1258
1259 tcg_temp_free_i64(tmp2);
1260 tmp2 = tcg_const_i64(1);
1261
1262 for (; i <= l; i++) {
1263 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1264 tcg_gen_addi_i64(dest, dest, 1);
1265 }
1266
1267 tcg_temp_free_i64(tmp2);
1268 tcg_temp_free_i64(tmp);
1269
1270 gen_set_label(l_out);
1271
1272 tcg_temp_free(dest);
1273 tcg_temp_free(src);
1274 }
1275
1276 static void gen_op_clc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1277 {
1278 TCGv_i64 tmp;
1279 TCGv_i64 tmp2;
1280 TCGv_i32 vl;
1281
1282 /* check for simple 32bit or 64bit match */
1283 switch (l) {
1284 case 0:
1285 tmp = tcg_temp_new_i64();
1286 tmp2 = tcg_temp_new_i64();
1287
1288 tcg_gen_qemu_ld8u(tmp, s1, get_mem_index(s));
1289 tcg_gen_qemu_ld8u(tmp2, s2, get_mem_index(s));
1290 cmp_u64(s, tmp, tmp2);
1291
1292 tcg_temp_free_i64(tmp);
1293 tcg_temp_free_i64(tmp2);
1294 return;
1295 case 1:
1296 tmp = tcg_temp_new_i64();
1297 tmp2 = tcg_temp_new_i64();
1298
1299 tcg_gen_qemu_ld16u(tmp, s1, get_mem_index(s));
1300 tcg_gen_qemu_ld16u(tmp2, s2, get_mem_index(s));
1301 cmp_u64(s, tmp, tmp2);
1302
1303 tcg_temp_free_i64(tmp);
1304 tcg_temp_free_i64(tmp2);
1305 return;
1306 case 3:
1307 tmp = tcg_temp_new_i64();
1308 tmp2 = tcg_temp_new_i64();
1309
1310 tcg_gen_qemu_ld32u(tmp, s1, get_mem_index(s));
1311 tcg_gen_qemu_ld32u(tmp2, s2, get_mem_index(s));
1312 cmp_u64(s, tmp, tmp2);
1313
1314 tcg_temp_free_i64(tmp);
1315 tcg_temp_free_i64(tmp2);
1316 return;
1317 case 7:
1318 tmp = tcg_temp_new_i64();
1319 tmp2 = tcg_temp_new_i64();
1320
1321 tcg_gen_qemu_ld64(tmp, s1, get_mem_index(s));
1322 tcg_gen_qemu_ld64(tmp2, s2, get_mem_index(s));
1323 cmp_u64(s, tmp, tmp2);
1324
1325 tcg_temp_free_i64(tmp);
1326 tcg_temp_free_i64(tmp2);
1327 return;
1328 }
1329
1330 potential_page_fault(s);
1331 vl = tcg_const_i32(l);
1332 gen_helper_clc(cc_op, cpu_env, vl, s1, s2);
1333 tcg_temp_free_i32(vl);
1334 set_cc_static(s);
1335 }
1336
1337 static void disas_e3(CPUS390XState *env, DisasContext* s, int op, int r1,
1338 int x2, int b2, int d2)
1339 {
1340 TCGv_i64 addr, tmp, tmp2, tmp3, tmp4;
1341 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
1342
1343 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1344 op, r1, x2, b2, d2);
1345 addr = get_address(s, x2, b2, d2);
1346 switch (op) {
1347 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1348 case 0x4: /* lg r1,d2(x2,b2) */
1349 tcg_gen_qemu_ld64(regs[r1], addr, get_mem_index(s));
1350 if (op == 0x2) {
1351 set_cc_s64(s, regs[r1]);
1352 }
1353 break;
1354 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1355 tmp2 = tcg_temp_new_i64();
1356 tmp32_1 = tcg_temp_new_i32();
1357 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1358 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1359 store_reg32(r1, tmp32_1);
1360 set_cc_s32(s, tmp32_1);
1361 tcg_temp_free_i64(tmp2);
1362 tcg_temp_free_i32(tmp32_1);
1363 break;
1364 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1365 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1366 tmp2 = tcg_temp_new_i64();
1367 if (op == 0x1d) {
1368 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1369 } else {
1370 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1371 }
1372 tmp4 = load_reg(r1 + 1);
1373 tmp3 = tcg_temp_new_i64();
1374 tcg_gen_div_i64(tmp3, tmp4, tmp2);
1375 store_reg(r1 + 1, tmp3);
1376 tcg_gen_rem_i64(tmp3, tmp4, tmp2);
1377 store_reg(r1, tmp3);
1378 tcg_temp_free_i64(tmp2);
1379 tcg_temp_free_i64(tmp3);
1380 tcg_temp_free_i64(tmp4);
1381 break;
1382 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1383 tmp2 = tcg_temp_new_i64();
1384 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1385 tcg_gen_bswap64_i64(tmp2, tmp2);
1386 store_reg(r1, tmp2);
1387 tcg_temp_free_i64(tmp2);
1388 break;
1389 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1390 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1391 tmp2 = tcg_temp_new_i64();
1392 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1393 if (op == 0x14) {
1394 tcg_gen_ext32s_i64(tmp2, tmp2);
1395 }
1396 store_reg(r1, tmp2);
1397 tcg_temp_free_i64(tmp2);
1398 break;
1399 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1400 tmp2 = tcg_temp_new_i64();
1401 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1402 store_reg(r1, tmp2);
1403 tcg_temp_free_i64(tmp2);
1404 break;
1405 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1406 tmp2 = tcg_temp_new_i64();
1407 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1408 tcg_gen_andi_i64(tmp2, tmp2, 0x7fffffffULL);
1409 store_reg(r1, tmp2);
1410 tcg_temp_free_i64(tmp2);
1411 break;
1412 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1413 tmp2 = tcg_temp_new_i64();
1414 tmp32_1 = tcg_temp_new_i32();
1415 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1416 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1417 tcg_temp_free_i64(tmp2);
1418 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1419 store_reg32(r1, tmp32_1);
1420 tcg_temp_free_i32(tmp32_1);
1421 break;
1422 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1423 tmp2 = tcg_temp_new_i64();
1424 tmp32_1 = tcg_temp_new_i32();
1425 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1426 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1427 tcg_temp_free_i64(tmp2);
1428 tcg_gen_bswap16_i32(tmp32_1, tmp32_1);
1429 store_reg16(r1, tmp32_1);
1430 tcg_temp_free_i32(tmp32_1);
1431 break;
1432 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1433 case 0x21: /* CLG R1,D2(X2,B2) */
1434 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1435 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1436 tmp2 = tcg_temp_new_i64();
1437 switch (op) {
1438 case 0x20:
1439 case 0x21:
1440 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1441 break;
1442 case 0x30:
1443 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1444 break;
1445 case 0x31:
1446 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1447 break;
1448 default:
1449 tcg_abort();
1450 }
1451 switch (op) {
1452 case 0x20:
1453 case 0x30:
1454 cmp_s64(s, regs[r1], tmp2);
1455 break;
1456 case 0x21:
1457 case 0x31:
1458 cmp_u64(s, regs[r1], tmp2);
1459 break;
1460 default:
1461 tcg_abort();
1462 }
1463 tcg_temp_free_i64(tmp2);
1464 break;
1465 case 0x24: /* stg r1, d2(x2,b2) */
1466 tcg_gen_qemu_st64(regs[r1], addr, get_mem_index(s));
1467 break;
1468 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1469 tmp32_1 = load_reg32(r1);
1470 tmp2 = tcg_temp_new_i64();
1471 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1472 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1473 tcg_temp_free_i32(tmp32_1);
1474 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1475 tcg_temp_free_i64(tmp2);
1476 break;
1477 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1478 tmp32_1 = load_reg32(r1);
1479 tmp2 = tcg_temp_new_i64();
1480 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1481 tcg_temp_free_i32(tmp32_1);
1482 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1483 tcg_temp_free_i64(tmp2);
1484 break;
1485 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1486 tmp32_1 = load_reg32(r1);
1487 tmp32_2 = tcg_temp_new_i32();
1488 tmp2 = tcg_temp_new_i64();
1489 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1490 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1491 tcg_temp_free_i64(tmp2);
1492 tcg_gen_xor_i32(tmp32_2, tmp32_1, tmp32_2);
1493 store_reg32(r1, tmp32_2);
1494 set_cc_nz_u32(s, tmp32_2);
1495 tcg_temp_free_i32(tmp32_1);
1496 tcg_temp_free_i32(tmp32_2);
1497 break;
1498 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1499 tmp3 = tcg_temp_new_i64();
1500 tcg_gen_qemu_ld32u(tmp3, addr, get_mem_index(s));
1501 store_reg32_i64(r1, tmp3);
1502 tcg_temp_free_i64(tmp3);
1503 break;
1504 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1505 store_reg(r1, addr);
1506 break;
1507 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1508 tmp32_1 = load_reg32(r1);
1509 tmp2 = tcg_temp_new_i64();
1510 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
1511 tcg_gen_qemu_st8(tmp2, addr, get_mem_index(s));
1512 tcg_temp_free_i32(tmp32_1);
1513 tcg_temp_free_i64(tmp2);
1514 break;
1515 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1516 tmp3 = tcg_temp_new_i64();
1517 tcg_gen_qemu_ld8u(tmp3, addr, get_mem_index(s));
1518 store_reg8(r1, tmp3);
1519 tcg_temp_free_i64(tmp3);
1520 break;
1521 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1522 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1523 tmp2 = tcg_temp_new_i64();
1524 tcg_gen_qemu_ld8s(tmp2, addr, get_mem_index(s));
1525 switch (op) {
1526 case 0x76:
1527 tcg_gen_ext8s_i64(tmp2, tmp2);
1528 store_reg32_i64(r1, tmp2);
1529 break;
1530 case 0x77:
1531 tcg_gen_ext8s_i64(tmp2, tmp2);
1532 store_reg(r1, tmp2);
1533 break;
1534 default:
1535 tcg_abort();
1536 }
1537 tcg_temp_free_i64(tmp2);
1538 break;
1539 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1540 tmp2 = tcg_temp_new_i64();
1541 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1542 store_reg32_i64(r1, tmp2);
1543 tcg_temp_free_i64(tmp2);
1544 break;
1545 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1546 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1547 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1548 tmp3 = tcg_temp_new_i64();
1549 tcg_gen_qemu_ld64(tmp3, addr, get_mem_index(s));
1550 switch (op) {
1551 case 0x80:
1552 tcg_gen_and_i64(regs[r1], regs[r1], tmp3);
1553 break;
1554 case 0x81:
1555 tcg_gen_or_i64(regs[r1], regs[r1], tmp3);
1556 break;
1557 case 0x82:
1558 tcg_gen_xor_i64(regs[r1], regs[r1], tmp3);
1559 break;
1560 default:
1561 tcg_abort();
1562 }
1563 set_cc_nz_u64(s, regs[r1]);
1564 tcg_temp_free_i64(tmp3);
1565 break;
1566 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1567 tmp2 = tcg_temp_new_i64();
1568 tmp32_1 = tcg_const_i32(r1);
1569 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1570 gen_helper_mlg(cpu_env, tmp32_1, tmp2);
1571 tcg_temp_free_i64(tmp2);
1572 tcg_temp_free_i32(tmp32_1);
1573 break;
1574 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1575 tmp2 = tcg_temp_new_i64();
1576 tmp32_1 = tcg_const_i32(r1);
1577 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1578 gen_helper_dlg(cpu_env, tmp32_1, tmp2);
1579 tcg_temp_free_i64(tmp2);
1580 tcg_temp_free_i32(tmp32_1);
1581 break;
1582 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1583 tmp2 = tcg_temp_new_i64();
1584 tmp3 = tcg_temp_new_i64();
1585 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1586 /* XXX possible optimization point */
1587 gen_op_calc_cc(s);
1588 tcg_gen_extu_i32_i64(tmp3, cc_op);
1589 tcg_gen_shri_i64(tmp3, tmp3, 1);
1590 tcg_gen_andi_i64(tmp3, tmp3, 1);
1591 tcg_gen_add_i64(tmp3, tmp2, tmp3);
1592 tcg_gen_add_i64(tmp3, regs[r1], tmp3);
1593 store_reg(r1, tmp3);
1594 set_cc_addu64(s, regs[r1], tmp2, tmp3);
1595 tcg_temp_free_i64(tmp2);
1596 tcg_temp_free_i64(tmp3);
1597 break;
1598 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1599 tmp2 = tcg_temp_new_i64();
1600 tmp32_1 = tcg_const_i32(r1);
1601 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1602 /* XXX possible optimization point */
1603 gen_op_calc_cc(s);
1604 gen_helper_slbg(cc_op, cpu_env, cc_op, tmp32_1, regs[r1], tmp2);
1605 set_cc_static(s);
1606 tcg_temp_free_i64(tmp2);
1607 tcg_temp_free_i32(tmp32_1);
1608 break;
1609 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1610 tcg_gen_qemu_ld8u(regs[r1], addr, get_mem_index(s));
1611 break;
1612 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1613 tcg_gen_qemu_ld16u(regs[r1], addr, get_mem_index(s));
1614 break;
1615 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1616 tmp2 = tcg_temp_new_i64();
1617 tcg_gen_qemu_ld8u(tmp2, addr, get_mem_index(s));
1618 store_reg32_i64(r1, tmp2);
1619 tcg_temp_free_i64(tmp2);
1620 break;
1621 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1622 tmp2 = tcg_temp_new_i64();
1623 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1624 store_reg32_i64(r1, tmp2);
1625 tcg_temp_free_i64(tmp2);
1626 break;
1627 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1628 tmp2 = tcg_temp_new_i64();
1629 tmp3 = load_reg((r1 + 1) & 15);
1630 tcg_gen_ext32u_i64(tmp3, tmp3);
1631 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1632 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
1633 store_reg32_i64((r1 + 1) & 15, tmp2);
1634 tcg_gen_shri_i64(tmp2, tmp2, 32);
1635 store_reg32_i64(r1, tmp2);
1636 tcg_temp_free_i64(tmp2);
1637 tcg_temp_free_i64(tmp3);
1638 break;
1639 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1640 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1641 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1642 tmp = load_reg(r1);
1643 tmp2 = tcg_temp_new_i64();
1644 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1645 tmp3 = load_reg((r1 + 1) & 15);
1646 tcg_gen_ext32u_i64(tmp2, tmp2);
1647 tcg_gen_ext32u_i64(tmp3, tmp3);
1648 tcg_gen_shli_i64(tmp, tmp, 32);
1649 tcg_gen_or_i64(tmp, tmp, tmp3);
1650
1651 tcg_gen_rem_i64(tmp3, tmp, tmp2);
1652 tcg_gen_div_i64(tmp, tmp, tmp2);
1653 store_reg32_i64((r1 + 1) & 15, tmp);
1654 store_reg32_i64(r1, tmp3);
1655 tcg_temp_free_i64(tmp);
1656 tcg_temp_free_i64(tmp2);
1657 tcg_temp_free_i64(tmp3);
1658 break;
1659 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1660 tmp2 = tcg_temp_new_i64();
1661 tmp32_1 = load_reg32(r1);
1662 tmp32_2 = tcg_temp_new_i32();
1663 tmp32_3 = tcg_temp_new_i32();
1664 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1665 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1666 /* XXX possible optimization point */
1667 gen_op_calc_cc(s);
1668 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
1669 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
1670 store_reg32(r1, tmp32_3);
1671 tcg_temp_free_i64(tmp2);
1672 tcg_temp_free_i32(tmp32_1);
1673 tcg_temp_free_i32(tmp32_2);
1674 tcg_temp_free_i32(tmp32_3);
1675 break;
1676 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1677 tmp2 = tcg_temp_new_i64();
1678 tmp32_1 = tcg_const_i32(r1);
1679 tmp32_2 = tcg_temp_new_i32();
1680 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1681 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1682 /* XXX possible optimization point */
1683 gen_op_calc_cc(s);
1684 gen_helper_slb(cc_op, cpu_env, cc_op, tmp32_1, tmp32_2);
1685 set_cc_static(s);
1686 tcg_temp_free_i64(tmp2);
1687 tcg_temp_free_i32(tmp32_1);
1688 tcg_temp_free_i32(tmp32_2);
1689 break;
1690 default:
1691 LOG_DISAS("illegal e3 operation 0x%x\n", op);
1692 gen_illegal_opcode(s);
1693 break;
1694 }
1695 tcg_temp_free_i64(addr);
1696 }
1697
1698 #ifndef CONFIG_USER_ONLY
1699 static void disas_e5(CPUS390XState *env, DisasContext* s, uint64_t insn)
1700 {
1701 TCGv_i64 tmp, tmp2;
1702 int op = (insn >> 32) & 0xff;
1703
1704 tmp = get_address(s, 0, (insn >> 28) & 0xf, (insn >> 16) & 0xfff);
1705 tmp2 = get_address(s, 0, (insn >> 12) & 0xf, insn & 0xfff);
1706
1707 LOG_DISAS("disas_e5: insn %" PRIx64 "\n", insn);
1708 switch (op) {
1709 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1710 /* Test Protection */
1711 potential_page_fault(s);
1712 gen_helper_tprot(cc_op, tmp, tmp2);
1713 set_cc_static(s);
1714 break;
1715 default:
1716 LOG_DISAS("illegal e5 operation 0x%x\n", op);
1717 gen_illegal_opcode(s);
1718 break;
1719 }
1720
1721 tcg_temp_free_i64(tmp);
1722 tcg_temp_free_i64(tmp2);
1723 }
1724 #endif
1725
1726 static void disas_eb(CPUS390XState *env, DisasContext *s, int op, int r1,
1727 int r3, int b2, int d2)
1728 {
1729 TCGv_i64 tmp, tmp2, tmp3, tmp4;
1730 TCGv_i32 tmp32_1, tmp32_2;
1731 int i, stm_len;
1732
1733 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1734 op, r1, r3, b2, d2);
1735 switch (op) {
1736 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1737 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1738 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1739 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1740 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1741 if (b2) {
1742 tmp = get_address(s, 0, b2, d2);
1743 tcg_gen_andi_i64(tmp, tmp, 0x3f);
1744 } else {
1745 tmp = tcg_const_i64(d2 & 0x3f);
1746 }
1747 switch (op) {
1748 case 0xc:
1749 tcg_gen_shr_i64(regs[r1], regs[r3], tmp);
1750 break;
1751 case 0xd:
1752 tcg_gen_shl_i64(regs[r1], regs[r3], tmp);
1753 break;
1754 case 0xa:
1755 tcg_gen_sar_i64(regs[r1], regs[r3], tmp);
1756 break;
1757 case 0xb:
1758 tmp2 = tcg_temp_new_i64();
1759 tmp3 = tcg_temp_new_i64();
1760 gen_op_update2_cc_i64(s, CC_OP_SLAG, regs[r3], tmp);
1761 tcg_gen_shl_i64(tmp2, regs[r3], tmp);
1762 /* override sign bit with source sign */
1763 tcg_gen_andi_i64(tmp2, tmp2, ~0x8000000000000000ULL);
1764 tcg_gen_andi_i64(tmp3, regs[r3], 0x8000000000000000ULL);
1765 tcg_gen_or_i64(regs[r1], tmp2, tmp3);
1766 tcg_temp_free_i64(tmp2);
1767 tcg_temp_free_i64(tmp3);
1768 break;
1769 case 0x1c:
1770 tcg_gen_rotl_i64(regs[r1], regs[r3], tmp);
1771 break;
1772 default:
1773 tcg_abort();
1774 break;
1775 }
1776 if (op == 0xa) {
1777 set_cc_s64(s, regs[r1]);
1778 }
1779 tcg_temp_free_i64(tmp);
1780 break;
1781 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
1782 if (b2) {
1783 tmp = get_address(s, 0, b2, d2);
1784 tcg_gen_andi_i64(tmp, tmp, 0x3f);
1785 } else {
1786 tmp = tcg_const_i64(d2 & 0x3f);
1787 }
1788 tmp32_1 = tcg_temp_new_i32();
1789 tmp32_2 = load_reg32(r3);
1790 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
1791 switch (op) {
1792 case 0x1d:
1793 tcg_gen_rotl_i32(tmp32_1, tmp32_2, tmp32_1);
1794 break;
1795 default:
1796 tcg_abort();
1797 break;
1798 }
1799 store_reg32(r1, tmp32_1);
1800 tcg_temp_free_i64(tmp);
1801 tcg_temp_free_i32(tmp32_1);
1802 tcg_temp_free_i32(tmp32_2);
1803 break;
1804 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
1805 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
1806 stm_len = 8;
1807 goto do_mh;
1808 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
1809 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
1810 stm_len = 4;
1811 do_mh:
1812 /* Apparently, unrolling lmg/stmg of any size gains performance -
1813 even for very long ones... */
1814 tmp = get_address(s, 0, b2, d2);
1815 tmp3 = tcg_const_i64(stm_len);
1816 tmp4 = tcg_const_i64(op == 0x26 ? 32 : 4);
1817 for (i = r1;; i = (i + 1) % 16) {
1818 switch (op) {
1819 case 0x4:
1820 tcg_gen_qemu_ld64(regs[i], tmp, get_mem_index(s));
1821 break;
1822 case 0x96:
1823 tmp2 = tcg_temp_new_i64();
1824 #if HOST_LONG_BITS == 32
1825 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
1826 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs[i]), tmp2);
1827 #else
1828 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
1829 tcg_gen_shl_i64(tmp2, tmp2, tmp4);
1830 tcg_gen_ext32u_i64(regs[i], regs[i]);
1831 tcg_gen_or_i64(regs[i], regs[i], tmp2);
1832 #endif
1833 tcg_temp_free_i64(tmp2);
1834 break;
1835 case 0x24:
1836 tcg_gen_qemu_st64(regs[i], tmp, get_mem_index(s));
1837 break;
1838 case 0x26:
1839 tmp2 = tcg_temp_new_i64();
1840 tcg_gen_shr_i64(tmp2, regs[i], tmp4);
1841 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
1842 tcg_temp_free_i64(tmp2);
1843 break;
1844 default:
1845 tcg_abort();
1846 }
1847 if (i == r3) {
1848 break;
1849 }
1850 tcg_gen_add_i64(tmp, tmp, tmp3);
1851 }
1852 tcg_temp_free_i64(tmp);
1853 tcg_temp_free_i64(tmp3);
1854 tcg_temp_free_i64(tmp4);
1855 break;
1856 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
1857 tmp = get_address(s, 0, b2, d2);
1858 tmp32_1 = tcg_const_i32(r1);
1859 tmp32_2 = tcg_const_i32(r3);
1860 potential_page_fault(s);
1861 gen_helper_stcmh(cpu_env, tmp32_1, tmp, tmp32_2);
1862 tcg_temp_free_i64(tmp);
1863 tcg_temp_free_i32(tmp32_1);
1864 tcg_temp_free_i32(tmp32_2);
1865 break;
1866 #ifndef CONFIG_USER_ONLY
1867 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
1868 /* Load Control */
1869 check_privileged(s);
1870 tmp = get_address(s, 0, b2, d2);
1871 tmp32_1 = tcg_const_i32(r1);
1872 tmp32_2 = tcg_const_i32(r3);
1873 potential_page_fault(s);
1874 gen_helper_lctlg(cpu_env, tmp32_1, tmp, tmp32_2);
1875 tcg_temp_free_i64(tmp);
1876 tcg_temp_free_i32(tmp32_1);
1877 tcg_temp_free_i32(tmp32_2);
1878 break;
1879 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
1880 /* Store Control */
1881 check_privileged(s);
1882 tmp = get_address(s, 0, b2, d2);
1883 tmp32_1 = tcg_const_i32(r1);
1884 tmp32_2 = tcg_const_i32(r3);
1885 potential_page_fault(s);
1886 gen_helper_stctg(cpu_env, tmp32_1, tmp, tmp32_2);
1887 tcg_temp_free_i64(tmp);
1888 tcg_temp_free_i32(tmp32_1);
1889 tcg_temp_free_i32(tmp32_2);
1890 break;
1891 #endif
1892 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
1893 tmp = get_address(s, 0, b2, d2);
1894 tmp32_1 = tcg_const_i32(r1);
1895 tmp32_2 = tcg_const_i32(r3);
1896 potential_page_fault(s);
1897 /* XXX rewrite in tcg */
1898 gen_helper_csg(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
1899 set_cc_static(s);
1900 tcg_temp_free_i64(tmp);
1901 tcg_temp_free_i32(tmp32_1);
1902 tcg_temp_free_i32(tmp32_2);
1903 break;
1904 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
1905 tmp = get_address(s, 0, b2, d2);
1906 tmp32_1 = tcg_const_i32(r1);
1907 tmp32_2 = tcg_const_i32(r3);
1908 potential_page_fault(s);
1909 /* XXX rewrite in tcg */
1910 gen_helper_cdsg(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
1911 set_cc_static(s);
1912 tcg_temp_free_i64(tmp);
1913 tcg_temp_free_i32(tmp32_1);
1914 tcg_temp_free_i32(tmp32_2);
1915 break;
1916 case 0x51: /* TMY D1(B1),I2 [SIY] */
1917 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
1918 tmp2 = tcg_const_i64((r1 << 4) | r3);
1919 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
1920 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
1921 that incurs less conversions */
1922 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
1923 tcg_temp_free_i64(tmp);
1924 tcg_temp_free_i64(tmp2);
1925 break;
1926 case 0x52: /* MVIY D1(B1),I2 [SIY] */
1927 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
1928 tmp2 = tcg_const_i64((r1 << 4) | r3);
1929 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
1930 tcg_temp_free_i64(tmp);
1931 tcg_temp_free_i64(tmp2);
1932 break;
1933 case 0x55: /* CLIY D1(B1),I2 [SIY] */
1934 tmp3 = get_address(s, 0, b2, d2); /* SIY -> this is the 1st operand */
1935 tmp = tcg_temp_new_i64();
1936 tmp32_1 = tcg_temp_new_i32();
1937 tcg_gen_qemu_ld8u(tmp, tmp3, get_mem_index(s));
1938 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
1939 cmp_u32c(s, tmp32_1, (r1 << 4) | r3);
1940 tcg_temp_free_i64(tmp);
1941 tcg_temp_free_i64(tmp3);
1942 tcg_temp_free_i32(tmp32_1);
1943 break;
1944 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
1945 tmp = get_address(s, 0, b2, d2);
1946 tmp32_1 = tcg_const_i32(r1);
1947 tmp32_2 = tcg_const_i32(r3);
1948 potential_page_fault(s);
1949 /* XXX split CC calculation out */
1950 gen_helper_icmh(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
1951 set_cc_static(s);
1952 tcg_temp_free_i64(tmp);
1953 tcg_temp_free_i32(tmp32_1);
1954 tcg_temp_free_i32(tmp32_2);
1955 break;
1956 default:
1957 LOG_DISAS("illegal eb operation 0x%x\n", op);
1958 gen_illegal_opcode(s);
1959 break;
1960 }
1961 }
1962
1963 static void disas_ed(CPUS390XState *env, DisasContext *s, int op, int r1,
1964 int x2, int b2, int d2, int r1b)
1965 {
1966 TCGv_i32 tmp_r1, tmp32;
1967 TCGv_i64 addr, tmp;
1968 addr = get_address(s, x2, b2, d2);
1969 tmp_r1 = tcg_const_i32(r1);
1970 switch (op) {
1971 case 0x4: /* LDEB R1,D2(X2,B2) [RXE] */
1972 potential_page_fault(s);
1973 gen_helper_ldeb(cpu_env, tmp_r1, addr);
1974 break;
1975 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
1976 potential_page_fault(s);
1977 gen_helper_lxdb(cpu_env, tmp_r1, addr);
1978 break;
1979 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
1980 tmp = tcg_temp_new_i64();
1981 tmp32 = load_freg32(r1);
1982 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
1983 set_cc_cmp_f32_i64(s, tmp32, tmp);
1984 tcg_temp_free_i64(tmp);
1985 tcg_temp_free_i32(tmp32);
1986 break;
1987 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
1988 tmp = tcg_temp_new_i64();
1989 tmp32 = tcg_temp_new_i32();
1990 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
1991 tcg_gen_trunc_i64_i32(tmp32, tmp);
1992 gen_helper_aeb(cpu_env, tmp_r1, tmp32);
1993 tcg_temp_free_i64(tmp);
1994 tcg_temp_free_i32(tmp32);
1995
1996 tmp32 = load_freg32(r1);
1997 gen_set_cc_nz_f32(s, tmp32);
1998 tcg_temp_free_i32(tmp32);
1999 break;
2000 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2001 tmp = tcg_temp_new_i64();
2002 tmp32 = tcg_temp_new_i32();
2003 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2004 tcg_gen_trunc_i64_i32(tmp32, tmp);
2005 gen_helper_seb(cpu_env, tmp_r1, tmp32);
2006 tcg_temp_free_i64(tmp);
2007 tcg_temp_free_i32(tmp32);
2008
2009 tmp32 = load_freg32(r1);
2010 gen_set_cc_nz_f32(s, tmp32);
2011 tcg_temp_free_i32(tmp32);
2012 break;
2013 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2014 tmp = tcg_temp_new_i64();
2015 tmp32 = tcg_temp_new_i32();
2016 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2017 tcg_gen_trunc_i64_i32(tmp32, tmp);
2018 gen_helper_deb(cpu_env, tmp_r1, tmp32);
2019 tcg_temp_free_i64(tmp);
2020 tcg_temp_free_i32(tmp32);
2021 break;
2022 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2023 potential_page_fault(s);
2024 gen_helper_tceb(cc_op, cpu_env, tmp_r1, addr);
2025 set_cc_static(s);
2026 break;
2027 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2028 potential_page_fault(s);
2029 gen_helper_tcdb(cc_op, cpu_env, tmp_r1, addr);
2030 set_cc_static(s);
2031 break;
2032 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2033 potential_page_fault(s);
2034 gen_helper_tcxb(cc_op, cpu_env, tmp_r1, addr);
2035 set_cc_static(s);
2036 break;
2037 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2038 tmp = tcg_temp_new_i64();
2039 tmp32 = tcg_temp_new_i32();
2040 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2041 tcg_gen_trunc_i64_i32(tmp32, tmp);
2042 gen_helper_meeb(cpu_env, tmp_r1, tmp32);
2043 tcg_temp_free_i64(tmp);
2044 tcg_temp_free_i32(tmp32);
2045 break;
2046 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2047 potential_page_fault(s);
2048 gen_helper_cdb(cc_op, cpu_env, tmp_r1, addr);
2049 set_cc_static(s);
2050 break;
2051 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2052 potential_page_fault(s);
2053 gen_helper_adb(cc_op, cpu_env, tmp_r1, addr);
2054 set_cc_static(s);
2055 break;
2056 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2057 potential_page_fault(s);
2058 gen_helper_sdb(cc_op, cpu_env, tmp_r1, addr);
2059 set_cc_static(s);
2060 break;
2061 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2062 potential_page_fault(s);
2063 gen_helper_mdb(cpu_env, tmp_r1, addr);
2064 break;
2065 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2066 potential_page_fault(s);
2067 gen_helper_ddb(cpu_env, tmp_r1, addr);
2068 break;
2069 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2070 /* for RXF insns, r1 is R3 and r1b is R1 */
2071 tmp32 = tcg_const_i32(r1b);
2072 potential_page_fault(s);
2073 gen_helper_madb(cpu_env, tmp32, addr, tmp_r1);
2074 tcg_temp_free_i32(tmp32);
2075 break;
2076 default:
2077 LOG_DISAS("illegal ed operation 0x%x\n", op);
2078 gen_illegal_opcode(s);
2079 return;
2080 }
2081 tcg_temp_free_i32(tmp_r1);
2082 tcg_temp_free_i64(addr);
2083 }
2084
2085 static void disas_a5(CPUS390XState *env, DisasContext *s, int op, int r1,
2086 int i2)
2087 {
2088 TCGv_i64 tmp, tmp2;
2089 TCGv_i32 tmp32;
2090 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2091 switch (op) {
2092 case 0x0: /* IIHH R1,I2 [RI] */
2093 tmp = tcg_const_i64(i2);
2094 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 48, 16);
2095 tcg_temp_free_i64(tmp);
2096 break;
2097 case 0x1: /* IIHL R1,I2 [RI] */
2098 tmp = tcg_const_i64(i2);
2099 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 32, 16);
2100 tcg_temp_free_i64(tmp);
2101 break;
2102 case 0x2: /* IILH R1,I2 [RI] */
2103 tmp = tcg_const_i64(i2);
2104 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 16, 16);
2105 tcg_temp_free_i64(tmp);
2106 break;
2107 case 0x3: /* IILL R1,I2 [RI] */
2108 tmp = tcg_const_i64(i2);
2109 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 0, 16);
2110 tcg_temp_free_i64(tmp);
2111 break;
2112 case 0x4: /* NIHH R1,I2 [RI] */
2113 case 0x8: /* OIHH R1,I2 [RI] */
2114 tmp = load_reg(r1);
2115 tmp32 = tcg_temp_new_i32();
2116 switch (op) {
2117 case 0x4:
2118 tmp2 = tcg_const_i64((((uint64_t)i2) << 48)
2119 | 0x0000ffffffffffffULL);
2120 tcg_gen_and_i64(tmp, tmp, tmp2);
2121 break;
2122 case 0x8:
2123 tmp2 = tcg_const_i64(((uint64_t)i2) << 48);
2124 tcg_gen_or_i64(tmp, tmp, tmp2);
2125 break;
2126 default:
2127 tcg_abort();
2128 }
2129 store_reg(r1, tmp);
2130 tcg_gen_shri_i64(tmp2, tmp, 48);
2131 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2132 set_cc_nz_u32(s, tmp32);
2133 tcg_temp_free_i64(tmp2);
2134 tcg_temp_free_i32(tmp32);
2135 tcg_temp_free_i64(tmp);
2136 break;
2137 case 0x5: /* NIHL R1,I2 [RI] */
2138 case 0x9: /* OIHL R1,I2 [RI] */
2139 tmp = load_reg(r1);
2140 tmp32 = tcg_temp_new_i32();
2141 switch (op) {
2142 case 0x5:
2143 tmp2 = tcg_const_i64((((uint64_t)i2) << 32)
2144 | 0xffff0000ffffffffULL);
2145 tcg_gen_and_i64(tmp, tmp, tmp2);
2146 break;
2147 case 0x9:
2148 tmp2 = tcg_const_i64(((uint64_t)i2) << 32);
2149 tcg_gen_or_i64(tmp, tmp, tmp2);
2150 break;
2151 default:
2152 tcg_abort();
2153 }
2154 store_reg(r1, tmp);
2155 tcg_gen_shri_i64(tmp2, tmp, 32);
2156 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2157 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2158 set_cc_nz_u32(s, tmp32);
2159 tcg_temp_free_i64(tmp2);
2160 tcg_temp_free_i32(tmp32);
2161 tcg_temp_free_i64(tmp);
2162 break;
2163 case 0x6: /* NILH R1,I2 [RI] */
2164 case 0xa: /* OILH R1,I2 [RI] */
2165 tmp = load_reg(r1);
2166 tmp32 = tcg_temp_new_i32();
2167 switch (op) {
2168 case 0x6:
2169 tmp2 = tcg_const_i64((((uint64_t)i2) << 16)
2170 | 0xffffffff0000ffffULL);
2171 tcg_gen_and_i64(tmp, tmp, tmp2);
2172 break;
2173 case 0xa:
2174 tmp2 = tcg_const_i64(((uint64_t)i2) << 16);
2175 tcg_gen_or_i64(tmp, tmp, tmp2);
2176 break;
2177 default:
2178 tcg_abort();
2179 }
2180 store_reg(r1, tmp);
2181 tcg_gen_shri_i64(tmp, tmp, 16);
2182 tcg_gen_trunc_i64_i32(tmp32, tmp);
2183 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2184 set_cc_nz_u32(s, tmp32);
2185 tcg_temp_free_i64(tmp2);
2186 tcg_temp_free_i32(tmp32);
2187 tcg_temp_free_i64(tmp);
2188 break;
2189 case 0x7: /* NILL R1,I2 [RI] */
2190 case 0xb: /* OILL R1,I2 [RI] */
2191 tmp = load_reg(r1);
2192 tmp32 = tcg_temp_new_i32();
2193 switch (op) {
2194 case 0x7:
2195 tmp2 = tcg_const_i64(i2 | 0xffffffffffff0000ULL);
2196 tcg_gen_and_i64(tmp, tmp, tmp2);
2197 break;
2198 case 0xb:
2199 tmp2 = tcg_const_i64(i2);
2200 tcg_gen_or_i64(tmp, tmp, tmp2);
2201 break;
2202 default:
2203 tcg_abort();
2204 }
2205 store_reg(r1, tmp);
2206 tcg_gen_trunc_i64_i32(tmp32, tmp);
2207 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2208 set_cc_nz_u32(s, tmp32); /* signedness should not matter here */
2209 tcg_temp_free_i64(tmp2);
2210 tcg_temp_free_i32(tmp32);
2211 tcg_temp_free_i64(tmp);
2212 break;
2213 case 0xc: /* LLIHH R1,I2 [RI] */
2214 tmp = tcg_const_i64( ((uint64_t)i2) << 48 );
2215 store_reg(r1, tmp);
2216 tcg_temp_free_i64(tmp);
2217 break;
2218 case 0xd: /* LLIHL R1,I2 [RI] */
2219 tmp = tcg_const_i64( ((uint64_t)i2) << 32 );
2220 store_reg(r1, tmp);
2221 tcg_temp_free_i64(tmp);
2222 break;
2223 case 0xe: /* LLILH R1,I2 [RI] */
2224 tmp = tcg_const_i64( ((uint64_t)i2) << 16 );
2225 store_reg(r1, tmp);
2226 tcg_temp_free_i64(tmp);
2227 break;
2228 case 0xf: /* LLILL R1,I2 [RI] */
2229 tmp = tcg_const_i64(i2);
2230 store_reg(r1, tmp);
2231 tcg_temp_free_i64(tmp);
2232 break;
2233 default:
2234 LOG_DISAS("illegal a5 operation 0x%x\n", op);
2235 gen_illegal_opcode(s);
2236 return;
2237 }
2238 }
2239
2240 static void disas_a7(CPUS390XState *env, DisasContext *s, int op, int r1,
2241 int i2)
2242 {
2243 TCGv_i64 tmp, tmp2;
2244 TCGv_i32 tmp32_1;
2245 int l1;
2246
2247 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2248 switch (op) {
2249 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2250 case 0x1: /* TMLL or TML R1,I2 [RI] */
2251 case 0x2: /* TMHH R1,I2 [RI] */
2252 case 0x3: /* TMHL R1,I2 [RI] */
2253 tmp = load_reg(r1);
2254 tmp2 = tcg_const_i64((uint16_t)i2);
2255 switch (op) {
2256 case 0x0:
2257 tcg_gen_shri_i64(tmp, tmp, 16);
2258 break;
2259 case 0x1:
2260 break;
2261 case 0x2:
2262 tcg_gen_shri_i64(tmp, tmp, 48);
2263 break;
2264 case 0x3:
2265 tcg_gen_shri_i64(tmp, tmp, 32);
2266 break;
2267 }
2268 tcg_gen_andi_i64(tmp, tmp, 0xffff);
2269 cmp_64(s, tmp, tmp2, CC_OP_TM_64);
2270 tcg_temp_free_i64(tmp);
2271 tcg_temp_free_i64(tmp2);
2272 break;
2273 case 0x4: /* brc m1, i2 */
2274 gen_brc(r1, s, i2 * 2LL);
2275 return;
2276 case 0x5: /* BRAS R1,I2 [RI] */
2277 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
2278 store_reg(r1, tmp);
2279 tcg_temp_free_i64(tmp);
2280 gen_goto_tb(s, 0, s->pc + i2 * 2LL);
2281 s->is_jmp = DISAS_TB_JUMP;
2282 break;
2283 case 0x6: /* BRCT R1,I2 [RI] */
2284 tmp32_1 = load_reg32(r1);
2285 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
2286 store_reg32(r1, tmp32_1);
2287 gen_update_cc_op(s);
2288 l1 = gen_new_label();
2289 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
2290 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2291 gen_set_label(l1);
2292 gen_goto_tb(s, 1, s->pc + 4);
2293 s->is_jmp = DISAS_TB_JUMP;
2294 tcg_temp_free_i32(tmp32_1);
2295 break;
2296 case 0x7: /* BRCTG R1,I2 [RI] */
2297 tmp = load_reg(r1);
2298 tcg_gen_subi_i64(tmp, tmp, 1);
2299 store_reg(r1, tmp);
2300 gen_update_cc_op(s);
2301 l1 = gen_new_label();
2302 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
2303 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2304 gen_set_label(l1);
2305 gen_goto_tb(s, 1, s->pc + 4);
2306 s->is_jmp = DISAS_TB_JUMP;
2307 tcg_temp_free_i64(tmp);
2308 break;
2309 case 0x8: /* lhi r1, i2 */
2310 tmp32_1 = tcg_const_i32(i2);
2311 store_reg32(r1, tmp32_1);
2312 tcg_temp_free_i32(tmp32_1);
2313 break;
2314 case 0x9: /* lghi r1, i2 */
2315 tmp = tcg_const_i64(i2);
2316 store_reg(r1, tmp);
2317 tcg_temp_free_i64(tmp);
2318 break;
2319 case 0xe: /* CHI R1,I2 [RI] */
2320 tmp32_1 = load_reg32(r1);
2321 cmp_s32c(s, tmp32_1, i2);
2322 tcg_temp_free_i32(tmp32_1);
2323 break;
2324 case 0xf: /* CGHI R1,I2 [RI] */
2325 tmp = load_reg(r1);
2326 cmp_s64c(s, tmp, i2);
2327 tcg_temp_free_i64(tmp);
2328 break;
2329 default:
2330 LOG_DISAS("illegal a7 operation 0x%x\n", op);
2331 gen_illegal_opcode(s);
2332 return;
2333 }
2334 }
2335
2336 static void disas_b2(CPUS390XState *env, DisasContext *s, int op,
2337 uint32_t insn)
2338 {
2339 TCGv_i64 tmp, tmp2, tmp3;
2340 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2341 int r1, r2;
2342 #ifndef CONFIG_USER_ONLY
2343 int r3, d2, b2;
2344 #endif
2345
2346 r1 = (insn >> 4) & 0xf;
2347 r2 = insn & 0xf;
2348
2349 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op, r1, r2);
2350
2351 switch (op) {
2352 case 0x22: /* IPM R1 [RRE] */
2353 tmp32_1 = tcg_const_i32(r1);
2354 gen_op_calc_cc(s);
2355 gen_helper_ipm(cpu_env, cc_op, tmp32_1);
2356 tcg_temp_free_i32(tmp32_1);
2357 break;
2358 case 0x41: /* CKSM R1,R2 [RRE] */
2359 tmp32_1 = tcg_const_i32(r1);
2360 tmp32_2 = tcg_const_i32(r2);
2361 potential_page_fault(s);
2362 gen_helper_cksm(cpu_env, tmp32_1, tmp32_2);
2363 tcg_temp_free_i32(tmp32_1);
2364 tcg_temp_free_i32(tmp32_2);
2365 gen_op_movi_cc(s, 0);
2366 break;
2367 case 0x4e: /* SAR R1,R2 [RRE] */
2368 tmp32_1 = load_reg32(r2);
2369 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, aregs[r1]));
2370 tcg_temp_free_i32(tmp32_1);
2371 break;
2372 case 0x4f: /* EAR R1,R2 [RRE] */
2373 tmp32_1 = tcg_temp_new_i32();
2374 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, aregs[r2]));
2375 store_reg32(r1, tmp32_1);
2376 tcg_temp_free_i32(tmp32_1);
2377 break;
2378 case 0x54: /* MVPG R1,R2 [RRE] */
2379 tmp = load_reg(0);
2380 tmp2 = load_reg(r1);
2381 tmp3 = load_reg(r2);
2382 potential_page_fault(s);
2383 gen_helper_mvpg(cpu_env, tmp, tmp2, tmp3);
2384 tcg_temp_free_i64(tmp);
2385 tcg_temp_free_i64(tmp2);
2386 tcg_temp_free_i64(tmp3);
2387 /* XXX check CCO bit and set CC accordingly */
2388 gen_op_movi_cc(s, 0);
2389 break;
2390 case 0x55: /* MVST R1,R2 [RRE] */
2391 tmp32_1 = load_reg32(0);
2392 tmp32_2 = tcg_const_i32(r1);
2393 tmp32_3 = tcg_const_i32(r2);
2394 potential_page_fault(s);
2395 gen_helper_mvst(cpu_env, tmp32_1, tmp32_2, tmp32_3);
2396 tcg_temp_free_i32(tmp32_1);
2397 tcg_temp_free_i32(tmp32_2);
2398 tcg_temp_free_i32(tmp32_3);
2399 gen_op_movi_cc(s, 1);
2400 break;
2401 case 0x5d: /* CLST R1,R2 [RRE] */
2402 tmp32_1 = load_reg32(0);
2403 tmp32_2 = tcg_const_i32(r1);
2404 tmp32_3 = tcg_const_i32(r2);
2405 potential_page_fault(s);
2406 gen_helper_clst(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2407 set_cc_static(s);
2408 tcg_temp_free_i32(tmp32_1);
2409 tcg_temp_free_i32(tmp32_2);
2410 tcg_temp_free_i32(tmp32_3);
2411 break;
2412 case 0x5e: /* SRST R1,R2 [RRE] */
2413 tmp32_1 = load_reg32(0);
2414 tmp32_2 = tcg_const_i32(r1);
2415 tmp32_3 = tcg_const_i32(r2);
2416 potential_page_fault(s);
2417 gen_helper_srst(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2418 set_cc_static(s);
2419 tcg_temp_free_i32(tmp32_1);
2420 tcg_temp_free_i32(tmp32_2);
2421 tcg_temp_free_i32(tmp32_3);
2422 break;
2423
2424 #ifndef CONFIG_USER_ONLY
2425 case 0x02: /* STIDP D2(B2) [S] */
2426 /* Store CPU ID */
2427 check_privileged(s);
2428 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2429 tmp = get_address(s, 0, b2, d2);
2430 potential_page_fault(s);
2431 gen_helper_stidp(cpu_env, tmp);
2432 tcg_temp_free_i64(tmp);
2433 break;
2434 case 0x04: /* SCK D2(B2) [S] */
2435 /* Set Clock */
2436 check_privileged(s);
2437 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2438 tmp = get_address(s, 0, b2, d2);
2439 potential_page_fault(s);
2440 gen_helper_sck(cc_op, tmp);
2441 set_cc_static(s);
2442 tcg_temp_free_i64(tmp);
2443 break;
2444 case 0x05: /* STCK D2(B2) [S] */
2445 /* Store Clock */
2446 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2447 tmp = get_address(s, 0, b2, d2);
2448 potential_page_fault(s);
2449 gen_helper_stck(cc_op, cpu_env, tmp);
2450 set_cc_static(s);
2451 tcg_temp_free_i64(tmp);
2452 break;
2453 case 0x06: /* SCKC D2(B2) [S] */
2454 /* Set Clock Comparator */
2455 check_privileged(s);
2456 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2457 tmp = get_address(s, 0, b2, d2);
2458 potential_page_fault(s);
2459 gen_helper_sckc(cpu_env, tmp);
2460 tcg_temp_free_i64(tmp);
2461 break;
2462 case 0x07: /* STCKC D2(B2) [S] */
2463 /* Store Clock Comparator */
2464 check_privileged(s);
2465 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2466 tmp = get_address(s, 0, b2, d2);
2467 potential_page_fault(s);
2468 gen_helper_stckc(cpu_env, tmp);
2469 tcg_temp_free_i64(tmp);
2470 break;
2471 case 0x08: /* SPT D2(B2) [S] */
2472 /* Set CPU Timer */
2473 check_privileged(s);
2474 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2475 tmp = get_address(s, 0, b2, d2);
2476 potential_page_fault(s);
2477 gen_helper_spt(cpu_env, tmp);
2478 tcg_temp_free_i64(tmp);
2479 break;
2480 case 0x09: /* STPT D2(B2) [S] */
2481 /* Store CPU Timer */
2482 check_privileged(s);
2483 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2484 tmp = get_address(s, 0, b2, d2);
2485 potential_page_fault(s);
2486 gen_helper_stpt(cpu_env, tmp);
2487 tcg_temp_free_i64(tmp);
2488 break;
2489 case 0x0a: /* SPKA D2(B2) [S] */
2490 /* Set PSW Key from Address */
2491 check_privileged(s);
2492 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2493 tmp = get_address(s, 0, b2, d2);
2494 tmp2 = tcg_temp_new_i64();
2495 tcg_gen_andi_i64(tmp2, psw_mask, ~PSW_MASK_KEY);
2496 tcg_gen_shli_i64(tmp, tmp, PSW_SHIFT_KEY - 4);
2497 tcg_gen_or_i64(psw_mask, tmp2, tmp);
2498 tcg_temp_free_i64(tmp2);
2499 tcg_temp_free_i64(tmp);
2500 break;
2501 case 0x0d: /* PTLB [S] */
2502 /* Purge TLB */
2503 check_privileged(s);
2504 gen_helper_ptlb(cpu_env);
2505 break;
2506 case 0x10: /* SPX D2(B2) [S] */
2507 /* Set Prefix Register */
2508 check_privileged(s);
2509 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2510 tmp = get_address(s, 0, b2, d2);
2511 potential_page_fault(s);
2512 gen_helper_spx(cpu_env, tmp);
2513 tcg_temp_free_i64(tmp);
2514 break;
2515 case 0x11: /* STPX D2(B2) [S] */
2516 /* Store Prefix */
2517 check_privileged(s);
2518 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2519 tmp = get_address(s, 0, b2, d2);
2520 tmp2 = tcg_temp_new_i64();
2521 tcg_gen_ld_i64(tmp2, cpu_env, offsetof(CPUS390XState, psa));
2522 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2523 tcg_temp_free_i64(tmp);
2524 tcg_temp_free_i64(tmp2);
2525 break;
2526 case 0x12: /* STAP D2(B2) [S] */
2527 /* Store CPU Address */
2528 check_privileged(s);
2529 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2530 tmp = get_address(s, 0, b2, d2);
2531 tmp2 = tcg_temp_new_i64();
2532 tmp32_1 = tcg_temp_new_i32();
2533 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, cpu_num));
2534 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
2535 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2536 tcg_temp_free_i64(tmp);
2537 tcg_temp_free_i64(tmp2);
2538 tcg_temp_free_i32(tmp32_1);
2539 break;
2540 case 0x21: /* IPTE R1,R2 [RRE] */
2541 /* Invalidate PTE */
2542 check_privileged(s);
2543 r1 = (insn >> 4) & 0xf;
2544 r2 = insn & 0xf;
2545 tmp = load_reg(r1);
2546 tmp2 = load_reg(r2);
2547 gen_helper_ipte(cpu_env, tmp, tmp2);
2548 tcg_temp_free_i64(tmp);
2549 tcg_temp_free_i64(tmp2);
2550 break;
2551 case 0x29: /* ISKE R1,R2 [RRE] */
2552 /* Insert Storage Key Extended */
2553 check_privileged(s);
2554 r1 = (insn >> 4) & 0xf;
2555 r2 = insn & 0xf;
2556 tmp = load_reg(r2);
2557 tmp2 = tcg_temp_new_i64();
2558 gen_helper_iske(tmp2, cpu_env, tmp);
2559 store_reg(r1, tmp2);
2560 tcg_temp_free_i64(tmp);
2561 tcg_temp_free_i64(tmp2);
2562 break;
2563 case 0x2a: /* RRBE R1,R2 [RRE] */
2564 /* Set Storage Key Extended */
2565 check_privileged(s);
2566 r1 = (insn >> 4) & 0xf;
2567 r2 = insn & 0xf;
2568 tmp32_1 = load_reg32(r1);
2569 tmp = load_reg(r2);
2570 gen_helper_rrbe(cc_op, cpu_env, tmp32_1, tmp);
2571 set_cc_static(s);
2572 tcg_temp_free_i32(tmp32_1);
2573 tcg_temp_free_i64(tmp);
2574 break;
2575 case 0x2b: /* SSKE R1,R2 [RRE] */
2576 /* Set Storage Key Extended */
2577 check_privileged(s);
2578 r1 = (insn >> 4) & 0xf;
2579 r2 = insn & 0xf;
2580 tmp32_1 = load_reg32(r1);
2581 tmp = load_reg(r2);
2582 gen_helper_sske(cpu_env, tmp32_1, tmp);
2583 tcg_temp_free_i32(tmp32_1);
2584 tcg_temp_free_i64(tmp);
2585 break;
2586 case 0x34: /* STCH ? */
2587 /* Store Subchannel */
2588 check_privileged(s);
2589 gen_op_movi_cc(s, 3);
2590 break;
2591 case 0x46: /* STURA R1,R2 [RRE] */
2592 /* Store Using Real Address */
2593 check_privileged(s);
2594 r1 = (insn >> 4) & 0xf;
2595 r2 = insn & 0xf;
2596 tmp32_1 = load_reg32(r1);
2597 tmp = load_reg(r2);
2598 potential_page_fault(s);
2599 gen_helper_stura(cpu_env, tmp, tmp32_1);
2600 tcg_temp_free_i32(tmp32_1);
2601 tcg_temp_free_i64(tmp);
2602 break;
2603 case 0x50: /* CSP R1,R2 [RRE] */
2604 /* Compare And Swap And Purge */
2605 check_privileged(s);
2606 r1 = (insn >> 4) & 0xf;
2607 r2 = insn & 0xf;
2608 tmp32_1 = tcg_const_i32(r1);
2609 tmp32_2 = tcg_const_i32(r2);
2610 gen_helper_csp(cc_op, cpu_env, tmp32_1, tmp32_2);
2611 set_cc_static(s);
2612 tcg_temp_free_i32(tmp32_1);
2613 tcg_temp_free_i32(tmp32_2);
2614 break;
2615 case 0x5f: /* CHSC ? */
2616 /* Channel Subsystem Call */
2617 check_privileged(s);
2618 gen_op_movi_cc(s, 3);
2619 break;
2620 case 0x78: /* STCKE D2(B2) [S] */
2621 /* Store Clock Extended */
2622 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2623 tmp = get_address(s, 0, b2, d2);
2624 potential_page_fault(s);
2625 gen_helper_stcke(cc_op, cpu_env, tmp);
2626 set_cc_static(s);
2627 tcg_temp_free_i64(tmp);
2628 break;
2629 case 0x79: /* SACF D2(B2) [S] */
2630 /* Set Address Space Control Fast */
2631 check_privileged(s);
2632 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2633 tmp = get_address(s, 0, b2, d2);
2634 potential_page_fault(s);
2635 gen_helper_sacf(cpu_env, tmp);
2636 tcg_temp_free_i64(tmp);
2637 /* addressing mode has changed, so end the block */
2638 s->pc = s->next_pc;
2639 update_psw_addr(s);
2640 s->is_jmp = DISAS_JUMP;
2641 break;
2642 case 0x7d: /* STSI D2,(B2) [S] */
2643 check_privileged(s);
2644 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2645 tmp = get_address(s, 0, b2, d2);
2646 tmp32_1 = load_reg32(0);
2647 tmp32_2 = load_reg32(1);
2648 potential_page_fault(s);
2649 gen_helper_stsi(cc_op, cpu_env, tmp, tmp32_1, tmp32_2);
2650 set_cc_static(s);
2651 tcg_temp_free_i64(tmp);
2652 tcg_temp_free_i32(tmp32_1);
2653 tcg_temp_free_i32(tmp32_2);
2654 break;
2655 case 0x9d: /* LFPC D2(B2) [S] */
2656 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2657 tmp = get_address(s, 0, b2, d2);
2658 tmp2 = tcg_temp_new_i64();
2659 tmp32_1 = tcg_temp_new_i32();
2660 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2661 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
2662 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
2663 tcg_temp_free_i64(tmp);
2664 tcg_temp_free_i64(tmp2);
2665 tcg_temp_free_i32(tmp32_1);
2666 break;
2667 case 0xb1: /* STFL D2(B2) [S] */
2668 /* Store Facility List (CPU features) at 200 */
2669 check_privileged(s);
2670 tmp2 = tcg_const_i64(0xc0000000);
2671 tmp = tcg_const_i64(200);
2672 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2673 tcg_temp_free_i64(tmp2);
2674 tcg_temp_free_i64(tmp);
2675 break;
2676 case 0xb2: /* LPSWE D2(B2) [S] */
2677 /* Load PSW Extended */
2678 check_privileged(s);
2679 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2680 tmp = get_address(s, 0, b2, d2);
2681 tmp2 = tcg_temp_new_i64();
2682 tmp3 = tcg_temp_new_i64();
2683 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
2684 tcg_gen_addi_i64(tmp, tmp, 8);
2685 tcg_gen_qemu_ld64(tmp3, tmp, get_mem_index(s));
2686 gen_helper_load_psw(cpu_env, tmp2, tmp3);
2687 /* we need to keep cc_op intact */
2688 s->is_jmp = DISAS_JUMP;
2689 tcg_temp_free_i64(tmp);
2690 tcg_temp_free_i64(tmp2);
2691 tcg_temp_free_i64(tmp3);
2692 break;
2693 case 0x20: /* SERVC R1,R2 [RRE] */
2694 /* SCLP Service call (PV hypercall) */
2695 check_privileged(s);
2696 potential_page_fault(s);
2697 tmp32_1 = load_reg32(r2);
2698 tmp = load_reg(r1);
2699 gen_helper_servc(cc_op, cpu_env, tmp32_1, tmp);
2700 set_cc_static(s);
2701 tcg_temp_free_i32(tmp32_1);
2702 tcg_temp_free_i64(tmp);
2703 break;
2704 #endif
2705 default:
2706 LOG_DISAS("illegal b2 operation 0x%x\n", op);
2707 gen_illegal_opcode(s);
2708 break;
2709 }
2710 }
2711
2712 static void disas_b3(CPUS390XState *env, DisasContext *s, int op, int m3,
2713 int r1, int r2)
2714 {
2715 TCGv_i64 tmp;
2716 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2717 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op, m3, r1, r2);
2718 #define FP_HELPER(i) \
2719 tmp32_1 = tcg_const_i32(r1); \
2720 tmp32_2 = tcg_const_i32(r2); \
2721 gen_helper_ ## i(cpu_env, tmp32_1, tmp32_2); \
2722 tcg_temp_free_i32(tmp32_1); \
2723 tcg_temp_free_i32(tmp32_2);
2724
2725 #define FP_HELPER_CC(i) \
2726 tmp32_1 = tcg_const_i32(r1); \
2727 tmp32_2 = tcg_const_i32(r2); \
2728 gen_helper_ ## i(cc_op, cpu_env, tmp32_1, tmp32_2); \
2729 set_cc_static(s); \
2730 tcg_temp_free_i32(tmp32_1); \
2731 tcg_temp_free_i32(tmp32_2);
2732
2733 switch (op) {
2734 case 0x0: /* LPEBR R1,R2 [RRE] */
2735 FP_HELPER_CC(lpebr);
2736 break;
2737 case 0x2: /* LTEBR R1,R2 [RRE] */
2738 FP_HELPER_CC(ltebr);
2739 break;
2740 case 0x3: /* LCEBR R1,R2 [RRE] */
2741 FP_HELPER_CC(lcebr);
2742 break;
2743 case 0x4: /* LDEBR R1,R2 [RRE] */
2744 FP_HELPER(ldebr);
2745 break;
2746 case 0x5: /* LXDBR R1,R2 [RRE] */
2747 FP_HELPER(lxdbr);
2748 break;
2749 case 0x9: /* CEBR R1,R2 [RRE] */
2750 FP_HELPER_CC(cebr);
2751 break;
2752 case 0xa: /* AEBR R1,R2 [RRE] */
2753 FP_HELPER_CC(aebr);
2754 break;
2755 case 0xb: /* SEBR R1,R2 [RRE] */
2756 FP_HELPER_CC(sebr);
2757 break;
2758 case 0xd: /* DEBR R1,R2 [RRE] */
2759 FP_HELPER(debr);
2760 break;
2761 case 0x10: /* LPDBR R1,R2 [RRE] */
2762 FP_HELPER_CC(lpdbr);
2763 break;
2764 case 0x12: /* LTDBR R1,R2 [RRE] */
2765 FP_HELPER_CC(ltdbr);
2766 break;
2767 case 0x13: /* LCDBR R1,R2 [RRE] */
2768 FP_HELPER_CC(lcdbr);
2769 break;
2770 case 0x15: /* SQBDR R1,R2 [RRE] */
2771 FP_HELPER(sqdbr);
2772 break;
2773 case 0x17: /* MEEBR R1,R2 [RRE] */
2774 FP_HELPER(meebr);
2775 break;
2776 case 0x19: /* CDBR R1,R2 [RRE] */
2777 FP_HELPER_CC(cdbr);
2778 break;
2779 case 0x1a: /* ADBR R1,R2 [RRE] */
2780 FP_HELPER_CC(adbr);
2781 break;
2782 case 0x1b: /* SDBR R1,R2 [RRE] */
2783 FP_HELPER_CC(sdbr);
2784 break;
2785 case 0x1c: /* MDBR R1,R2 [RRE] */
2786 FP_HELPER(mdbr);
2787 break;
2788 case 0x1d: /* DDBR R1,R2 [RRE] */
2789 FP_HELPER(ddbr);
2790 break;
2791 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
2792 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
2793 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
2794 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
2795 tmp32_1 = tcg_const_i32(m3);
2796 tmp32_2 = tcg_const_i32(r2);
2797 tmp32_3 = tcg_const_i32(r1);
2798 switch (op) {
2799 case 0xe:
2800 gen_helper_maebr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
2801 break;
2802 case 0x1e:
2803 gen_helper_madbr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
2804 break;
2805 case 0x1f:
2806 gen_helper_msdbr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
2807 break;
2808 default:
2809 tcg_abort();
2810 }
2811 tcg_temp_free_i32(tmp32_1);
2812 tcg_temp_free_i32(tmp32_2);
2813 tcg_temp_free_i32(tmp32_3);
2814 break;
2815 case 0x40: /* LPXBR R1,R2 [RRE] */
2816 FP_HELPER_CC(lpxbr);
2817 break;
2818 case 0x42: /* LTXBR R1,R2 [RRE] */
2819 FP_HELPER_CC(ltxbr);
2820 break;
2821 case 0x43: /* LCXBR R1,R2 [RRE] */
2822 FP_HELPER_CC(lcxbr);
2823 break;
2824 case 0x44: /* LEDBR R1,R2 [RRE] */
2825 FP_HELPER(ledbr);
2826 break;
2827 case 0x45: /* LDXBR R1,R2 [RRE] */
2828 FP_HELPER(ldxbr);
2829 break;
2830 case 0x46: /* LEXBR R1,R2 [RRE] */
2831 FP_HELPER(lexbr);
2832 break;
2833 case 0x49: /* CXBR R1,R2 [RRE] */
2834 FP_HELPER_CC(cxbr);
2835 break;
2836 case 0x4a: /* AXBR R1,R2 [RRE] */
2837 FP_HELPER_CC(axbr);
2838 break;
2839 case 0x4b: /* SXBR R1,R2 [RRE] */
2840 FP_HELPER_CC(sxbr);
2841 break;
2842 case 0x4c: /* MXBR R1,R2 [RRE] */
2843 FP_HELPER(mxbr);
2844 break;
2845 case 0x4d: /* DXBR R1,R2 [RRE] */
2846 FP_HELPER(dxbr);
2847 break;
2848 case 0x65: /* LXR R1,R2 [RRE] */
2849 tmp = load_freg(r2);
2850 store_freg(r1, tmp);
2851 tcg_temp_free_i64(tmp);
2852 tmp = load_freg(r2 + 2);
2853 store_freg(r1 + 2, tmp);
2854 tcg_temp_free_i64(tmp);
2855 break;
2856 case 0x74: /* LZER R1 [RRE] */
2857 tmp32_1 = tcg_const_i32(r1);
2858 gen_helper_lzer(cpu_env, tmp32_1);
2859 tcg_temp_free_i32(tmp32_1);
2860 break;
2861 case 0x75: /* LZDR R1 [RRE] */
2862 tmp32_1 = tcg_const_i32(r1);
2863 gen_helper_lzdr(cpu_env, tmp32_1);
2864 tcg_temp_free_i32(tmp32_1);
2865 break;
2866 case 0x76: /* LZXR R1 [RRE] */
2867 tmp32_1 = tcg_const_i32(r1);
2868 gen_helper_lzxr(cpu_env, tmp32_1);
2869 tcg_temp_free_i32(tmp32_1);
2870 break;
2871 case 0x84: /* SFPC R1 [RRE] */
2872 tmp32_1 = load_reg32(r1);
2873 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
2874 tcg_temp_free_i32(tmp32_1);
2875 break;
2876 case 0x8c: /* EFPC R1 [RRE] */
2877 tmp32_1 = tcg_temp_new_i32();
2878 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
2879 store_reg32(r1, tmp32_1);
2880 tcg_temp_free_i32(tmp32_1);
2881 break;
2882 case 0x94: /* CEFBR R1,R2 [RRE] */
2883 case 0x95: /* CDFBR R1,R2 [RRE] */
2884 case 0x96: /* CXFBR R1,R2 [RRE] */
2885 tmp32_1 = tcg_const_i32(r1);
2886 tmp32_2 = load_reg32(r2);
2887 switch (op) {
2888 case 0x94:
2889 gen_helper_cefbr(cpu_env, tmp32_1, tmp32_2);
2890 break;
2891 case 0x95:
2892 gen_helper_cdfbr(cpu_env, tmp32_1, tmp32_2);
2893 break;
2894 case 0x96:
2895 gen_helper_cxfbr(cpu_env, tmp32_1, tmp32_2);
2896 break;
2897 default:
2898 tcg_abort();
2899 }
2900 tcg_temp_free_i32(tmp32_1);
2901 tcg_temp_free_i32(tmp32_2);
2902 break;
2903 case 0x98: /* CFEBR R1,R2 [RRE] */
2904 case 0x99: /* CFDBR R1,R2 [RRE] */
2905 case 0x9a: /* CFXBR R1,R2 [RRE] */
2906 tmp32_1 = tcg_const_i32(r1);
2907 tmp32_2 = tcg_const_i32(r2);
2908 tmp32_3 = tcg_const_i32(m3);
2909 switch (op) {
2910 case 0x98:
2911 gen_helper_cfebr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2912 break;
2913 case 0x99:
2914 gen_helper_cfdbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2915 break;
2916 case 0x9a:
2917 gen_helper_cfxbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2918 break;
2919 default:
2920 tcg_abort();
2921 }
2922 set_cc_static(s);
2923 tcg_temp_free_i32(tmp32_1);
2924 tcg_temp_free_i32(tmp32_2);
2925 tcg_temp_free_i32(tmp32_3);
2926 break;
2927 case 0xa4: /* CEGBR R1,R2 [RRE] */
2928 case 0xa5: /* CDGBR R1,R2 [RRE] */
2929 tmp32_1 = tcg_const_i32(r1);
2930 tmp = load_reg(r2);
2931 switch (op) {
2932 case 0xa4:
2933 gen_helper_cegbr(cpu_env, tmp32_1, tmp);
2934 break;
2935 case 0xa5:
2936 gen_helper_cdgbr(cpu_env, tmp32_1, tmp);
2937 break;
2938 default:
2939 tcg_abort();
2940 }
2941 tcg_temp_free_i32(tmp32_1);
2942 tcg_temp_free_i64(tmp);
2943 break;
2944 case 0xa6: /* CXGBR R1,R2 [RRE] */
2945 tmp32_1 = tcg_const_i32(r1);
2946 tmp = load_reg(r2);
2947 gen_helper_cxgbr(cpu_env, tmp32_1, tmp);
2948 tcg_temp_free_i32(tmp32_1);
2949 tcg_temp_free_i64(tmp);
2950 break;
2951 case 0xa8: /* CGEBR R1,R2 [RRE] */
2952 tmp32_1 = tcg_const_i32(r1);
2953 tmp32_2 = tcg_const_i32(r2);
2954 tmp32_3 = tcg_const_i32(m3);
2955 gen_helper_cgebr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2956 set_cc_static(s);
2957 tcg_temp_free_i32(tmp32_1);
2958 tcg_temp_free_i32(tmp32_2);
2959 tcg_temp_free_i32(tmp32_3);
2960 break;
2961 case 0xa9: /* CGDBR R1,R2 [RRE] */
2962 tmp32_1 = tcg_const_i32(r1);
2963 tmp32_2 = tcg_const_i32(r2);
2964 tmp32_3 = tcg_const_i32(m3);
2965 gen_helper_cgdbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2966 set_cc_static(s);
2967 tcg_temp_free_i32(tmp32_1);
2968 tcg_temp_free_i32(tmp32_2);
2969 tcg_temp_free_i32(tmp32_3);
2970 break;
2971 case 0xaa: /* CGXBR R1,R2 [RRE] */
2972 tmp32_1 = tcg_const_i32(r1);
2973 tmp32_2 = tcg_const_i32(r2);
2974 tmp32_3 = tcg_const_i32(m3);
2975 gen_helper_cgxbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2976 set_cc_static(s);
2977 tcg_temp_free_i32(tmp32_1);
2978 tcg_temp_free_i32(tmp32_2);
2979 tcg_temp_free_i32(tmp32_3);
2980 break;
2981 default:
2982 LOG_DISAS("illegal b3 operation 0x%x\n", op);
2983 gen_illegal_opcode(s);
2984 break;
2985 }
2986
2987 #undef FP_HELPER_CC
2988 #undef FP_HELPER
2989 }
2990
2991 static void disas_b9(CPUS390XState *env, DisasContext *s, int op, int r1,
2992 int r2)
2993 {
2994 TCGv_i64 tmp, tmp2, tmp3;
2995 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2996
2997 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op, r1, r2);
2998 switch (op) {
2999 case 0x0: /* LPGR R1,R2 [RRE] */
3000 case 0x1: /* LNGR R1,R2 [RRE] */
3001 case 0x2: /* LTGR R1,R2 [RRE] */
3002 case 0x3: /* LCGR R1,R2 [RRE] */
3003 case 0x10: /* LPGFR R1,R2 [RRE] */
3004 case 0x11: /* LNFGR R1,R2 [RRE] */
3005 case 0x12: /* LTGFR R1,R2 [RRE] */
3006 case 0x13: /* LCGFR R1,R2 [RRE] */
3007 if (op & 0x10) {
3008 tmp = load_reg32_i64(r2);
3009 } else {
3010 tmp = load_reg(r2);
3011 }
3012 switch (op & 0xf) {
3013 case 0x0: /* LP?GR */
3014 set_cc_abs64(s, tmp);
3015 gen_helper_abs_i64(tmp, tmp);
3016 store_reg(r1, tmp);
3017 break;
3018 case 0x1: /* LN?GR */
3019 set_cc_nabs64(s, tmp);
3020 gen_helper_nabs_i64(tmp, tmp);
3021 store_reg(r1, tmp);
3022 break;
3023 case 0x2: /* LT?GR */
3024 if (r1 != r2) {
3025 store_reg(r1, tmp);
3026 }
3027 set_cc_s64(s, tmp);
3028 break;
3029 case 0x3: /* LC?GR */
3030 tcg_gen_neg_i64(regs[r1], tmp);
3031 set_cc_comp64(s, regs[r1]);
3032 break;
3033 }
3034 tcg_temp_free_i64(tmp);
3035 break;
3036 case 0x4: /* LGR R1,R2 [RRE] */
3037 store_reg(r1, regs[r2]);
3038 break;
3039 case 0x6: /* LGBR R1,R2 [RRE] */
3040 tmp2 = load_reg(r2);
3041 tcg_gen_ext8s_i64(tmp2, tmp2);
3042 store_reg(r1, tmp2);
3043 tcg_temp_free_i64(tmp2);
3044 break;
3045 case 0xd: /* DSGR R1,R2 [RRE] */
3046 case 0x1d: /* DSGFR R1,R2 [RRE] */
3047 tmp = load_reg(r1 + 1);
3048 if (op == 0xd) {
3049 tmp2 = load_reg(r2);
3050 } else {
3051 tmp32_1 = load_reg32(r2);
3052 tmp2 = tcg_temp_new_i64();
3053 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3054 tcg_temp_free_i32(tmp32_1);
3055 }
3056 tmp3 = tcg_temp_new_i64();
3057 tcg_gen_div_i64(tmp3, tmp, tmp2);
3058 store_reg(r1 + 1, tmp3);
3059 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3060 store_reg(r1, tmp3);
3061 tcg_temp_free_i64(tmp);
3062 tcg_temp_free_i64(tmp2);
3063 tcg_temp_free_i64(tmp3);
3064 break;
3065 case 0x14: /* LGFR R1,R2 [RRE] */
3066 tmp32_1 = load_reg32(r2);
3067 tmp = tcg_temp_new_i64();
3068 tcg_gen_ext_i32_i64(tmp, tmp32_1);
3069 store_reg(r1, tmp);
3070 tcg_temp_free_i32(tmp32_1);
3071 tcg_temp_free_i64(tmp);
3072 break;
3073 case 0x16: /* LLGFR R1,R2 [RRE] */
3074 tmp32_1 = load_reg32(r2);
3075 tmp = tcg_temp_new_i64();
3076 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3077 store_reg(r1, tmp);
3078 tcg_temp_free_i32(tmp32_1);
3079 tcg_temp_free_i64(tmp);
3080 break;
3081 case 0x17: /* LLGTR R1,R2 [RRE] */
3082 tmp32_1 = load_reg32(r2);
3083 tmp = tcg_temp_new_i64();
3084 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0x7fffffffUL);
3085 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3086 store_reg(r1, tmp);
3087 tcg_temp_free_i32(tmp32_1);
3088 tcg_temp_free_i64(tmp);
3089 break;
3090 case 0x0f: /* LRVGR R1,R2 [RRE] */
3091 tcg_gen_bswap64_i64(regs[r1], regs[r2]);
3092 break;
3093 case 0x1f: /* LRVR R1,R2 [RRE] */
3094 tmp32_1 = load_reg32(r2);
3095 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
3096 store_reg32(r1, tmp32_1);
3097 tcg_temp_free_i32(tmp32_1);
3098 break;
3099 case 0x20: /* CGR R1,R2 [RRE] */
3100 case 0x30: /* CGFR R1,R2 [RRE] */
3101 tmp2 = load_reg(r2);
3102 if (op == 0x30) {
3103 tcg_gen_ext32s_i64(tmp2, tmp2);
3104 }
3105 tmp = load_reg(r1);
3106 cmp_s64(s, tmp, tmp2);
3107 tcg_temp_free_i64(tmp);
3108 tcg_temp_free_i64(tmp2);
3109 break;
3110 case 0x21: /* CLGR R1,R2 [RRE] */
3111 case 0x31: /* CLGFR R1,R2 [RRE] */
3112 tmp2 = load_reg(r2);
3113 if (op == 0x31) {
3114 tcg_gen_ext32u_i64(tmp2, tmp2);
3115 }
3116 tmp = load_reg(r1);
3117 cmp_u64(s, tmp, tmp2);
3118 tcg_temp_free_i64(tmp);
3119 tcg_temp_free_i64(tmp2);
3120 break;
3121 case 0x26: /* LBR R1,R2 [RRE] */
3122 tmp32_1 = load_reg32(r2);
3123 tcg_gen_ext8s_i32(tmp32_1, tmp32_1);
3124 store_reg32(r1, tmp32_1);
3125 tcg_temp_free_i32(tmp32_1);
3126 break;
3127 case 0x27: /* LHR R1,R2 [RRE] */
3128 tmp32_1 = load_reg32(r2);
3129 tcg_gen_ext16s_i32(tmp32_1, tmp32_1);
3130 store_reg32(r1, tmp32_1);
3131 tcg_temp_free_i32(tmp32_1);
3132 break;
3133 case 0x80: /* NGR R1,R2 [RRE] */
3134 case 0x81: /* OGR R1,R2 [RRE] */
3135 case 0x82: /* XGR R1,R2 [RRE] */
3136 tmp = load_reg(r1);
3137 tmp2 = load_reg(r2);
3138 switch (op) {
3139 case 0x80:
3140 tcg_gen_and_i64(tmp, tmp, tmp2);
3141 break;
3142 case 0x81:
3143 tcg_gen_or_i64(tmp, tmp, tmp2);
3144 break;
3145 case 0x82:
3146 tcg_gen_xor_i64(tmp, tmp, tmp2);
3147 break;
3148 default:
3149 tcg_abort();
3150 }
3151 store_reg(r1, tmp);
3152 set_cc_nz_u64(s, tmp);
3153 tcg_temp_free_i64(tmp);
3154 tcg_temp_free_i64(tmp2);
3155 break;
3156 case 0x83: /* FLOGR R1,R2 [RRE] */
3157 tmp = load_reg(r2);
3158 tmp32_1 = tcg_const_i32(r1);
3159 gen_helper_flogr(cc_op, cpu_env, tmp32_1, tmp);
3160 set_cc_static(s);
3161 tcg_temp_free_i64(tmp);
3162 tcg_temp_free_i32(tmp32_1);
3163 break;
3164 case 0x84: /* LLGCR R1,R2 [RRE] */
3165 tmp = load_reg(r2);
3166 tcg_gen_andi_i64(tmp, tmp, 0xff);
3167 store_reg(r1, tmp);
3168 tcg_temp_free_i64(tmp);
3169 break;
3170 case 0x85: /* LLGHR R1,R2 [RRE] */
3171 tmp = load_reg(r2);
3172 tcg_gen_andi_i64(tmp, tmp, 0xffff);
3173 store_reg(r1, tmp);
3174 tcg_temp_free_i64(tmp);
3175 break;
3176 case 0x87: /* DLGR R1,R2 [RRE] */
3177 tmp32_1 = tcg_const_i32(r1);
3178 tmp = load_reg(r2);
3179 gen_helper_dlg(cpu_env, tmp32_1, tmp);
3180 tcg_temp_free_i64(tmp);
3181 tcg_temp_free_i32(tmp32_1);
3182 break;
3183 case 0x88: /* ALCGR R1,R2 [RRE] */
3184 tmp = load_reg(r1);
3185 tmp2 = load_reg(r2);
3186 tmp3 = tcg_temp_new_i64();
3187 gen_op_calc_cc(s);
3188 tcg_gen_extu_i32_i64(tmp3, cc_op);
3189 tcg_gen_shri_i64(tmp3, tmp3, 1);
3190 tcg_gen_andi_i64(tmp3, tmp3, 1);
3191 tcg_gen_add_i64(tmp3, tmp2, tmp3);
3192 tcg_gen_add_i64(tmp3, tmp, tmp3);
3193 store_reg(r1, tmp3);
3194 set_cc_addu64(s, tmp, tmp2, tmp3);
3195 tcg_temp_free_i64(tmp);
3196 tcg_temp_free_i64(tmp2);
3197 tcg_temp_free_i64(tmp3);
3198 break;
3199 case 0x89: /* SLBGR R1,R2 [RRE] */
3200 tmp = load_reg(r1);
3201 tmp2 = load_reg(r2);
3202 tmp32_1 = tcg_const_i32(r1);
3203 gen_op_calc_cc(s);
3204 gen_helper_slbg(cc_op, cpu_env, cc_op, tmp32_1, tmp, tmp2);
3205 set_cc_static(s);
3206 tcg_temp_free_i64(tmp);
3207 tcg_temp_free_i64(tmp2);
3208 tcg_temp_free_i32(tmp32_1);
3209 break;
3210 case 0x94: /* LLCR R1,R2 [RRE] */
3211 tmp32_1 = load_reg32(r2);
3212 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xff);
3213 store_reg32(r1, tmp32_1);
3214 tcg_temp_free_i32(tmp32_1);
3215 break;
3216 case 0x95: /* LLHR R1,R2 [RRE] */
3217 tmp32_1 = load_reg32(r2);
3218 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xffff);
3219 store_reg32(r1, tmp32_1);
3220 tcg_temp_free_i32(tmp32_1);
3221 break;
3222 case 0x96: /* MLR R1,R2 [RRE] */
3223 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3224 tmp2 = load_reg(r2);
3225 tmp3 = load_reg((r1 + 1) & 15);
3226 tcg_gen_ext32u_i64(tmp2, tmp2);
3227 tcg_gen_ext32u_i64(tmp3, tmp3);
3228 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3229 store_reg32_i64((r1 + 1) & 15, tmp2);
3230 tcg_gen_shri_i64(tmp2, tmp2, 32);
3231 store_reg32_i64(r1, tmp2);
3232 tcg_temp_free_i64(tmp2);
3233 tcg_temp_free_i64(tmp3);
3234 break;
3235 case 0x97: /* DLR R1,R2 [RRE] */
3236 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3237 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3238 tmp = load_reg(r1);
3239 tmp2 = load_reg(r2);
3240 tmp3 = load_reg((r1 + 1) & 15);
3241 tcg_gen_ext32u_i64(tmp2, tmp2);
3242 tcg_gen_ext32u_i64(tmp3, tmp3);
3243 tcg_gen_shli_i64(tmp, tmp, 32);
3244 tcg_gen_or_i64(tmp, tmp, tmp3);
3245
3246 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3247 tcg_gen_div_i64(tmp, tmp, tmp2);
3248 store_reg32_i64((r1 + 1) & 15, tmp);
3249 store_reg32_i64(r1, tmp3);
3250 tcg_temp_free_i64(tmp);
3251 tcg_temp_free_i64(tmp2);
3252 tcg_temp_free_i64(tmp3);
3253 break;
3254 case 0x98: /* ALCR R1,R2 [RRE] */
3255 tmp32_1 = load_reg32(r1);
3256 tmp32_2 = load_reg32(r2);
3257 tmp32_3 = tcg_temp_new_i32();
3258 /* XXX possible optimization point */
3259 gen_op_calc_cc(s);
3260 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
3261 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3262 store_reg32(r1, tmp32_3);
3263 tcg_temp_free_i32(tmp32_1);
3264 tcg_temp_free_i32(tmp32_2);
3265 tcg_temp_free_i32(tmp32_3);
3266 break;
3267 case 0x99: /* SLBR R1,R2 [RRE] */
3268 tmp32_1 = load_reg32(r2);
3269 tmp32_2 = tcg_const_i32(r1);
3270 gen_op_calc_cc(s);
3271 gen_helper_slb(cc_op, cpu_env, cc_op, tmp32_2, tmp32_1);
3272 set_cc_static(s);
3273 tcg_temp_free_i32(tmp32_1);
3274 tcg_temp_free_i32(tmp32_2);
3275 break;
3276 default:
3277 LOG_DISAS("illegal b9 operation 0x%x\n", op);
3278 gen_illegal_opcode(s);
3279 break;
3280 }
3281 }
3282
3283 static void disas_c0(CPUS390XState *env, DisasContext *s, int op, int r1, int i2)
3284 {
3285 TCGv_i64 tmp;
3286 TCGv_i32 tmp32_1, tmp32_2;
3287 uint64_t target = s->pc + i2 * 2LL;
3288 int l1;
3289
3290 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op, r1, i2);
3291
3292 switch (op) {
3293 case 0: /* larl r1, i2 */
3294 tmp = tcg_const_i64(target);
3295 store_reg(r1, tmp);
3296 tcg_temp_free_i64(tmp);
3297 break;
3298 case 0x1: /* LGFI R1,I2 [RIL] */
3299 tmp = tcg_const_i64((int64_t)i2);
3300 store_reg(r1, tmp);
3301 tcg_temp_free_i64(tmp);
3302 break;
3303 case 0x4: /* BRCL M1,I2 [RIL] */
3304 if (r1 == 15) { /* m1 == r1 */
3305 gen_goto_tb(s, 0, target);
3306 s->is_jmp = DISAS_TB_JUMP;
3307 break;
3308 }
3309 /* m1 & (1 << (3 - cc)) */
3310 tmp32_1 = tcg_const_i32(3);
3311 tmp32_2 = tcg_const_i32(1);
3312 gen_op_calc_cc(s);
3313 tcg_gen_sub_i32(tmp32_1, tmp32_1, cc_op);
3314 tcg_gen_shl_i32(tmp32_2, tmp32_2, tmp32_1);
3315 tcg_temp_free_i32(tmp32_1);
3316 tmp32_1 = tcg_const_i32(r1); /* m1 == r1 */
3317 tcg_gen_and_i32(tmp32_1, tmp32_1, tmp32_2);
3318 l1 = gen_new_label();
3319 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
3320 gen_goto_tb(s, 0, target);
3321 gen_set_label(l1);
3322 gen_goto_tb(s, 1, s->pc + 6);
3323 s->is_jmp = DISAS_TB_JUMP;
3324 tcg_temp_free_i32(tmp32_1);
3325 tcg_temp_free_i32(tmp32_2);
3326 break;
3327 case 0x5: /* brasl r1, i2 */
3328 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 6));
3329 store_reg(r1, tmp);
3330 tcg_temp_free_i64(tmp);
3331 gen_goto_tb(s, 0, target);
3332 s->is_jmp = DISAS_TB_JUMP;
3333 break;
3334 case 0x7: /* XILF R1,I2 [RIL] */
3335 case 0xb: /* NILF R1,I2 [RIL] */
3336 case 0xd: /* OILF R1,I2 [RIL] */
3337 tmp32_1 = load_reg32(r1);
3338 switch (op) {
3339 case 0x7:
3340 tcg_gen_xori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3341 break;
3342 case 0xb:
3343 tcg_gen_andi_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3344 break;
3345 case 0xd:
3346 tcg_gen_ori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3347 break;
3348 default:
3349 tcg_abort();
3350 }
3351 store_reg32(r1, tmp32_1);
3352 set_cc_nz_u32(s, tmp32_1);
3353 tcg_temp_free_i32(tmp32_1);
3354 break;
3355 case 0x9: /* IILF R1,I2 [RIL] */
3356 tmp32_1 = tcg_const_i32((uint32_t)i2);
3357 store_reg32(r1, tmp32_1);
3358 tcg_temp_free_i32(tmp32_1);
3359 break;
3360 case 0xa: /* NIHF R1,I2 [RIL] */
3361 tmp = load_reg(r1);
3362 tmp32_1 = tcg_temp_new_i32();
3363 tcg_gen_andi_i64(tmp, tmp, (((uint64_t)((uint32_t)i2)) << 32)
3364 | 0xffffffffULL);
3365 store_reg(r1, tmp);
3366 tcg_gen_shri_i64(tmp, tmp, 32);
3367 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3368 set_cc_nz_u32(s, tmp32_1);
3369 tcg_temp_free_i64(tmp);
3370 tcg_temp_free_i32(tmp32_1);
3371 break;
3372 case 0xe: /* LLIHF R1,I2 [RIL] */
3373 tmp = tcg_const_i64(((uint64_t)(uint32_t)i2) << 32);
3374 store_reg(r1, tmp);
3375 tcg_temp_free_i64(tmp);
3376 break;
3377 case 0xf: /* LLILF R1,I2 [RIL] */
3378 tmp = tcg_const_i64((uint32_t)i2);
3379 store_reg(r1, tmp);
3380 tcg_temp_free_i64(tmp);
3381 break;
3382 default:
3383 LOG_DISAS("illegal c0 operation 0x%x\n", op);
3384 gen_illegal_opcode(s);
3385 break;
3386 }
3387 }
3388
3389 static void disas_c2(CPUS390XState *env, DisasContext *s, int op, int r1,
3390 int i2)
3391 {
3392 TCGv_i64 tmp;
3393 TCGv_i32 tmp32_1;
3394
3395 switch (op) {
3396 case 0xc: /* CGFI R1,I2 [RIL] */
3397 tmp = load_reg(r1);
3398 cmp_s64c(s, tmp, (int64_t)i2);
3399 tcg_temp_free_i64(tmp);
3400 break;
3401 case 0xe: /* CLGFI R1,I2 [RIL] */
3402 tmp = load_reg(r1);
3403 cmp_u64c(s, tmp, (uint64_t)(uint32_t)i2);
3404 tcg_temp_free_i64(tmp);
3405 break;
3406 case 0xd: /* CFI R1,I2 [RIL] */
3407 tmp32_1 = load_reg32(r1);
3408 cmp_s32c(s, tmp32_1, i2);
3409 tcg_temp_free_i32(tmp32_1);
3410 break;
3411 case 0xf: /* CLFI R1,I2 [RIL] */
3412 tmp32_1 = load_reg32(r1);
3413 cmp_u32c(s, tmp32_1, i2);
3414 tcg_temp_free_i32(tmp32_1);
3415 break;
3416 default:
3417 LOG_DISAS("illegal c2 operation 0x%x\n", op);
3418 gen_illegal_opcode(s);
3419 break;
3420 }
3421 }
3422
3423 static void gen_and_or_xor_i32(int opc, TCGv_i32 tmp, TCGv_i32 tmp2)
3424 {
3425 switch (opc & 0xf) {
3426 case 0x4:
3427 tcg_gen_and_i32(tmp, tmp, tmp2);
3428 break;
3429 case 0x6:
3430 tcg_gen_or_i32(tmp, tmp, tmp2);
3431 break;
3432 case 0x7:
3433 tcg_gen_xor_i32(tmp, tmp, tmp2);
3434 break;
3435 default:
3436 tcg_abort();
3437 }
3438 }
3439
3440 static void disas_s390_insn(CPUS390XState *env, DisasContext *s)
3441 {
3442 TCGv_i64 tmp, tmp2, tmp3, tmp4;
3443 TCGv_i32 tmp32_1, tmp32_2, tmp32_3, tmp32_4;
3444 unsigned char opc;
3445 uint64_t insn;
3446 int op, r1, r2, r3, d1, d2, x2, b1, b2, i, i2, r1b;
3447 TCGv_i32 vl;
3448 int l1;
3449
3450 opc = cpu_ldub_code(env, s->pc);
3451 LOG_DISAS("opc 0x%x\n", opc);
3452
3453 switch (opc) {
3454 #ifndef CONFIG_USER_ONLY
3455 case 0x01: /* SAM */
3456 insn = ld_code2(env, s->pc);
3457 /* set addressing mode, but we only do 64bit anyways */
3458 break;
3459 #endif
3460 case 0x6: /* BCTR R1,R2 [RR] */
3461 insn = ld_code2(env, s->pc);
3462 decode_rr(s, insn, &r1, &r2);
3463 tmp32_1 = load_reg32(r1);
3464 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
3465 store_reg32(r1, tmp32_1);
3466
3467 if (r2) {
3468 gen_update_cc_op(s);
3469 l1 = gen_new_label();
3470 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
3471
3472 /* not taking the branch, jump to after the instruction */
3473 gen_goto_tb(s, 0, s->pc + 2);
3474 gen_set_label(l1);
3475
3476 /* take the branch, move R2 into psw.addr */
3477 tmp32_1 = load_reg32(r2);
3478 tmp = tcg_temp_new_i64();
3479 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3480 tcg_gen_mov_i64(psw_addr, tmp);
3481 s->is_jmp = DISAS_JUMP;
3482 tcg_temp_free_i32(tmp32_1);
3483 tcg_temp_free_i64(tmp);
3484 }
3485 break;
3486 case 0x7: /* BCR M1,R2 [RR] */
3487 insn = ld_code2(env, s->pc);
3488 decode_rr(s, insn, &r1, &r2);
3489 if (r2) {
3490 tmp = load_reg(r2);
3491 gen_bcr(s, r1, tmp, s->pc);
3492 tcg_temp_free_i64(tmp);
3493 s->is_jmp = DISAS_TB_JUMP;
3494 } else {
3495 /* XXX: "serialization and checkpoint-synchronization function"? */
3496 }
3497 break;
3498 case 0xa: /* SVC I [RR] */
3499 insn = ld_code2(env, s->pc);
3500 debug_insn(insn);
3501 i = insn & 0xff;
3502 update_psw_addr(s);
3503 gen_op_calc_cc(s);
3504 tmp32_1 = tcg_const_i32(i);
3505 tmp32_2 = tcg_const_i32(s->next_pc - s->pc);
3506 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, int_svc_code));
3507 tcg_gen_st_i32(tmp32_2, cpu_env, offsetof(CPUS390XState, int_svc_ilen));
3508 gen_exception(EXCP_SVC);
3509 s->is_jmp = DISAS_EXCP;
3510 tcg_temp_free_i32(tmp32_1);
3511 tcg_temp_free_i32(tmp32_2);
3512 break;
3513 case 0xd: /* BASR R1,R2 [RR] */
3514 insn = ld_code2(env, s->pc);
3515 decode_rr(s, insn, &r1, &r2);
3516 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 2));
3517 store_reg(r1, tmp);
3518 if (r2) {
3519 tmp2 = load_reg(r2);
3520 tcg_gen_mov_i64(psw_addr, tmp2);
3521 tcg_temp_free_i64(tmp2);
3522 s->is_jmp = DISAS_JUMP;
3523 }
3524 tcg_temp_free_i64(tmp);
3525 break;
3526 case 0xe: /* MVCL R1,R2 [RR] */
3527 insn = ld_code2(env, s->pc);
3528 decode_rr(s, insn, &r1, &r2);
3529 tmp32_1 = tcg_const_i32(r1);
3530 tmp32_2 = tcg_const_i32(r2);
3531 potential_page_fault(s);
3532 gen_helper_mvcl(cc_op, cpu_env, tmp32_1, tmp32_2);
3533 set_cc_static(s);
3534 tcg_temp_free_i32(tmp32_1);
3535 tcg_temp_free_i32(tmp32_2);
3536 break;
3537 case 0x10: /* LPR R1,R2 [RR] */
3538 insn = ld_code2(env, s->pc);
3539 decode_rr(s, insn, &r1, &r2);
3540 tmp32_1 = load_reg32(r2);
3541 set_cc_abs32(s, tmp32_1);
3542 gen_helper_abs_i32(tmp32_1, tmp32_1);
3543 store_reg32(r1, tmp32_1);
3544 tcg_temp_free_i32(tmp32_1);
3545 break;
3546 case 0x11: /* LNR R1,R2 [RR] */
3547 insn = ld_code2(env, s->pc);
3548 decode_rr(s, insn, &r1, &r2);
3549 tmp32_1 = load_reg32(r2);
3550 set_cc_nabs32(s, tmp32_1);
3551 gen_helper_nabs_i32(tmp32_1, tmp32_1);
3552 store_reg32(r1, tmp32_1);
3553 tcg_temp_free_i32(tmp32_1);
3554 break;
3555 case 0x12: /* LTR R1,R2 [RR] */
3556 insn = ld_code2(env, s->pc);
3557 decode_rr(s, insn, &r1, &r2);
3558 tmp32_1 = load_reg32(r2);
3559 if (r1 != r2) {
3560 store_reg32(r1, tmp32_1);
3561 }
3562 set_cc_s32(s, tmp32_1);
3563 tcg_temp_free_i32(tmp32_1);
3564 break;
3565 case 0x13: /* LCR R1,R2 [RR] */
3566 insn = ld_code2(env, s->pc);
3567 decode_rr(s, insn, &r1, &r2);
3568 tmp32_1 = load_reg32(r2);
3569 tcg_gen_neg_i32(tmp32_1, tmp32_1);
3570 store_reg32(r1, tmp32_1);
3571 set_cc_comp32(s, tmp32_1);
3572 tcg_temp_free_i32(tmp32_1);
3573 break;
3574 case 0x14: /* NR R1,R2 [RR] */
3575 case 0x16: /* OR R1,R2 [RR] */
3576 case 0x17: /* XR R1,R2 [RR] */
3577 insn = ld_code2(env, s->pc);
3578 decode_rr(s, insn, &r1, &r2);
3579 tmp32_2 = load_reg32(r2);
3580 tmp32_1 = load_reg32(r1);
3581 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
3582 store_reg32(r1, tmp32_1);
3583 set_cc_nz_u32(s, tmp32_1);
3584 tcg_temp_free_i32(tmp32_1);
3585 tcg_temp_free_i32(tmp32_2);
3586 break;
3587 case 0x18: /* LR R1,R2 [RR] */
3588 insn = ld_code2(env, s->pc);
3589 decode_rr(s, insn, &r1, &r2);
3590 tmp32_1 = load_reg32(r2);
3591 store_reg32(r1, tmp32_1);
3592 tcg_temp_free_i32(tmp32_1);
3593 break;
3594 case 0x15: /* CLR R1,R2 [RR] */
3595 case 0x19: /* CR R1,R2 [RR] */
3596 insn = ld_code2(env, s->pc);
3597 decode_rr(s, insn, &r1, &r2);
3598 tmp32_1 = load_reg32(r1);
3599 tmp32_2 = load_reg32(r2);
3600 if (opc == 0x15) {
3601 cmp_u32(s, tmp32_1, tmp32_2);
3602 } else {
3603 cmp_s32(s, tmp32_1, tmp32_2);
3604 }
3605 tcg_temp_free_i32(tmp32_1);
3606 tcg_temp_free_i32(tmp32_2);
3607 break;
3608 case 0x1c: /* MR R1,R2 [RR] */
3609 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3610 insn = ld_code2(env, s->pc);
3611 decode_rr(s, insn, &r1, &r2);
3612 tmp2 = load_reg(r2);
3613 tmp3 = load_reg((r1 + 1) & 15);
3614 tcg_gen_ext32s_i64(tmp2, tmp2);
3615 tcg_gen_ext32s_i64(tmp3, tmp3);
3616 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3617 store_reg32_i64((r1 + 1) & 15, tmp2);
3618 tcg_gen_shri_i64(tmp2, tmp2, 32);
3619 store_reg32_i64(r1, tmp2);
3620 tcg_temp_free_i64(tmp2);
3621 tcg_temp_free_i64(tmp3);
3622 break;
3623 case 0x1d: /* DR R1,R2 [RR] */
3624 insn = ld_code2(env, s->pc);
3625 decode_rr(s, insn, &r1, &r2);
3626 tmp32_1 = load_reg32(r1);
3627 tmp32_2 = load_reg32(r1 + 1);
3628 tmp32_3 = load_reg32(r2);
3629
3630 tmp = tcg_temp_new_i64(); /* dividend */
3631 tmp2 = tcg_temp_new_i64(); /* divisor */
3632 tmp3 = tcg_temp_new_i64();
3633
3634 /* dividend is r(r1 << 32) | r(r1 + 1) */
3635 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3636 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
3637 tcg_gen_shli_i64(tmp, tmp, 32);
3638 tcg_gen_or_i64(tmp, tmp, tmp2);
3639
3640 /* divisor is r(r2) */
3641 tcg_gen_ext_i32_i64(tmp2, tmp32_3);
3642
3643 tcg_gen_div_i64(tmp3, tmp, tmp2);
3644 tcg_gen_rem_i64(tmp, tmp, tmp2);
3645
3646 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3647 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
3648
3649 store_reg32(r1, tmp32_1); /* remainder */
3650 store_reg32(r1 + 1, tmp32_2); /* quotient */
3651 tcg_temp_free_i32(tmp32_1);
3652 tcg_temp_free_i32(tmp32_2);
3653 tcg_temp_free_i32(tmp32_3);
3654 tcg_temp_free_i64(tmp);
3655 tcg_temp_free_i64(tmp2);
3656 tcg_temp_free_i64(tmp3);
3657 break;
3658 case 0x28: /* LDR R1,R2 [RR] */
3659 insn = ld_code2(env, s->pc);
3660 decode_rr(s, insn, &r1, &r2);
3661 tmp = load_freg(r2);
3662 store_freg(r1, tmp);
3663 tcg_temp_free_i64(tmp);
3664 break;
3665 case 0x38: /* LER R1,R2 [RR] */
3666 insn = ld_code2(env, s->pc);
3667 decode_rr(s, insn, &r1, &r2);
3668 tmp32_1 = load_freg32(r2);
3669 store_freg32(r1, tmp32_1);
3670 tcg_temp_free_i32(tmp32_1);
3671 break;
3672 case 0x40: /* STH R1,D2(X2,B2) [RX] */
3673 insn = ld_code4(env, s->pc);
3674 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3675 tmp2 = load_reg(r1);
3676 tcg_gen_qemu_st16(tmp2, tmp, get_mem_index(s));
3677 tcg_temp_free_i64(tmp);
3678 tcg_temp_free_i64(tmp2);
3679 break;
3680 case 0x41: /* la */
3681 insn = ld_code4(env, s->pc);
3682 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3683 store_reg(r1, tmp); /* FIXME: 31/24-bit addressing */
3684 tcg_temp_free_i64(tmp);
3685 break;
3686 case 0x42: /* STC R1,D2(X2,B2) [RX] */
3687 insn = ld_code4(env, s->pc);
3688 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3689 tmp2 = load_reg(r1);
3690 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
3691 tcg_temp_free_i64(tmp);
3692 tcg_temp_free_i64(tmp2);
3693 break;
3694 case 0x43: /* IC R1,D2(X2,B2) [RX] */
3695 insn = ld_code4(env, s->pc);
3696 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3697 tmp2 = tcg_temp_new_i64();
3698 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
3699 store_reg8(r1, tmp2);
3700 tcg_temp_free_i64(tmp);
3701 tcg_temp_free_i64(tmp2);
3702 break;
3703 case 0x44: /* EX R1,D2(X2,B2) [RX] */
3704 insn = ld_code4(env, s->pc);
3705 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3706 tmp2 = load_reg(r1);
3707 tmp3 = tcg_const_i64(s->pc + 4);
3708 update_psw_addr(s);
3709 gen_op_calc_cc(s);
3710 gen_helper_ex(cc_op, cpu_env, cc_op, tmp2, tmp, tmp3);
3711 set_cc_static(s);
3712 tcg_temp_free_i64(tmp);
3713 tcg_temp_free_i64(tmp2);
3714 tcg_temp_free_i64(tmp3);
3715 break;
3716 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
3717 insn = ld_code4(env, s->pc);
3718 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3719 tcg_temp_free_i64(tmp);
3720
3721 tmp32_1 = load_reg32(r1);
3722 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
3723 store_reg32(r1, tmp32_1);
3724
3725 gen_update_cc_op(s);
3726 l1 = gen_new_label();
3727 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
3728
3729 /* not taking the branch, jump to after the instruction */
3730 gen_goto_tb(s, 0, s->pc + 4);
3731 gen_set_label(l1);
3732
3733 /* take the branch, move R2 into psw.addr */
3734 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3735 tcg_gen_mov_i64(psw_addr, tmp);
3736 s->is_jmp = DISAS_JUMP;
3737 tcg_temp_free_i32(tmp32_1);
3738 tcg_temp_free_i64(tmp);
3739 break;
3740 case 0x47: /* BC M1,D2(X2,B2) [RX] */
3741 insn = ld_code4(env, s->pc);
3742 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3743 gen_bcr(s, r1, tmp, s->pc + 4);
3744 tcg_temp_free_i64(tmp);
3745 s->is_jmp = DISAS_TB_JUMP;
3746 break;
3747 case 0x48: /* LH R1,D2(X2,B2) [RX] */
3748 insn = ld_code4(env, s->pc);
3749 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3750 tmp2 = tcg_temp_new_i64();
3751 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
3752 store_reg32_i64(r1, tmp2);
3753 tcg_temp_free_i64(tmp);
3754 tcg_temp_free_i64(tmp2);
3755 break;
3756 case 0x49: /* CH R1,D2(X2,B2) [RX] */
3757 insn = ld_code4(env, s->pc);
3758 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3759 tmp32_1 = load_reg32(r1);
3760 tmp32_2 = tcg_temp_new_i32();
3761 tmp2 = tcg_temp_new_i64();
3762 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
3763 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
3764 cmp_s32(s, tmp32_1, tmp32_2);
3765 tcg_temp_free_i32(tmp32_1);
3766 tcg_temp_free_i32(tmp32_2);
3767 tcg_temp_free_i64(tmp);
3768 tcg_temp_free_i64(tmp2);
3769 break;
3770 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
3771 insn = ld_code4(env, s->pc);
3772 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3773 tmp2 = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
3774 store_reg(r1, tmp2);
3775 tcg_gen_mov_i64(psw_addr, tmp);
3776 tcg_temp_free_i64(tmp);
3777 tcg_temp_free_i64(tmp2);
3778 s->is_jmp = DISAS_JUMP;
3779 break;
3780 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
3781 insn = ld_code4(env, s->pc);
3782 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3783 tmp2 = tcg_temp_new_i64();
3784 tmp32_1 = tcg_temp_new_i32();
3785 tcg_gen_trunc_i64_i32(tmp32_1, regs[r1]);
3786 gen_helper_cvd(tmp2, tmp32_1);
3787 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
3788 tcg_temp_free_i64(tmp);
3789 tcg_temp_free_i64(tmp2);
3790 tcg_temp_free_i32(tmp32_1);
3791 break;
3792 case 0x50: /* st r1, d2(x2, b2) */
3793 insn = ld_code4(env, s->pc);
3794 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3795 tmp2 = load_reg(r1);
3796 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
3797 tcg_temp_free_i64(tmp);
3798 tcg_temp_free_i64(tmp2);
3799 break;
3800 case 0x55: /* CL R1,D2(X2,B2) [RX] */
3801 insn = ld_code4(env, s->pc);
3802 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3803 tmp2 = tcg_temp_new_i64();
3804 tmp32_1 = tcg_temp_new_i32();
3805 tmp32_2 = load_reg32(r1);
3806 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
3807 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
3808 cmp_u32(s, tmp32_2, tmp32_1);
3809 tcg_temp_free_i64(tmp);
3810 tcg_temp_free_i64(tmp2);
3811 tcg_temp_free_i32(tmp32_1);
3812 tcg_temp_free_i32(tmp32_2);
3813 break;
3814 case 0x54: /* N R1,D2(X2,B2) [RX] */
3815 case 0x56: /* O R1,D2(X2,B2) [RX] */
3816 case 0x57: /* X R1,D2(X2,B2) [RX] */
3817 insn = ld_code4(env, s->pc);
3818 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3819 tmp2 = tcg_temp_new_i64();
3820 tmp32_1 = load_reg32(r1);
3821 tmp32_2 = tcg_temp_new_i32();
3822 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
3823 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
3824 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
3825 store_reg32(r1, tmp32_1);
3826 set_cc_nz_u32(s, tmp32_1);
3827 tcg_temp_free_i64(tmp);
3828 tcg_temp_free_i64(tmp2);
3829 tcg_temp_free_i32(tmp32_1);
3830 tcg_temp_free_i32(tmp32_2);
3831 break;
3832 case 0x58: /* l r1, d2(x2, b2) */
3833 insn = ld_code4(env, s->pc);
3834 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3835 tmp2 = tcg_temp_new_i64();
3836 tmp32_1 = tcg_temp_new_i32();
3837 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
3838 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
3839 store_reg32(r1, tmp32_1);
3840 tcg_temp_free_i64(tmp);
3841 tcg_temp_free_i64(tmp2);
3842 tcg_temp_free_i32(tmp32_1);
3843 break;
3844 case 0x59: /* C R1,D2(X2,B2) [RX] */
3845 insn = ld_code4(env, s->pc);
3846 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3847 tmp2 = tcg_temp_new_i64();
3848 tmp32_1 = tcg_temp_new_i32();
3849 tmp32_2 = load_reg32(r1);
3850 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
3851 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
3852 cmp_s32(s, tmp32_2, tmp32_1);
3853 tcg_temp_free_i64(tmp);
3854 tcg_temp_free_i64(tmp2);
3855 tcg_temp_free_i32(tmp32_1);
3856 tcg_temp_free_i32(tmp32_2);
3857 break;
3858 case 0x5c: /* M R1,D2(X2,B2) [RX] */
3859 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
3860 insn = ld_code4(env, s->pc);
3861 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3862 tmp2 = tcg_temp_new_i64();
3863 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
3864 tmp3 = load_reg((r1 + 1) & 15);
3865 tcg_gen_ext32s_i64(tmp2, tmp2);
3866 tcg_gen_ext32s_i64(tmp3, tmp3);
3867 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3868 store_reg32_i64((r1 + 1) & 15, tmp2);
3869 tcg_gen_shri_i64(tmp2, tmp2, 32);
3870 store_reg32_i64(r1, tmp2);
3871 tcg_temp_free_i64(tmp);
3872 tcg_temp_free_i64(tmp2);
3873 tcg_temp_free_i64(tmp3);
3874 break;
3875 case 0x5d: /* D R1,D2(X2,B2) [RX] */
3876 insn = ld_code4(env, s->pc);
3877 tmp3 = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3878 tmp32_1 = load_reg32(r1);
3879 tmp32_2 = load_reg32(r1 + 1);
3880
3881 tmp = tcg_temp_new_i64();
3882 tmp2 = tcg_temp_new_i64();
3883
3884 /* dividend is r(r1 << 32) | r(r1 + 1) */
3885 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3886 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
3887 tcg_gen_shli_i64(tmp, tmp, 32);
3888 tcg_gen_or_i64(tmp, tmp, tmp2);
3889
3890 /* divisor is in memory */
3891 tcg_gen_qemu_ld32s(tmp2, tmp3, get_mem_index(s));
3892
3893 /* XXX divisor == 0 -> FixP divide exception */
3894
3895 tcg_gen_div_i64(tmp3, tmp, tmp2);
3896 tcg_gen_rem_i64(tmp, tmp, tmp2);
3897
3898 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3899 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
3900
3901 store_reg32(r1, tmp32_1); /* remainder */
3902 store_reg32(r1 + 1, tmp32_2); /* quotient */
3903 tcg_temp_free_i32(tmp32_1);
3904 tcg_temp_free_i32(tmp32_2);
3905 tcg_temp_free_i64(tmp);
3906 tcg_temp_free_i64(tmp2);
3907 tcg_temp_free_i64(tmp3);
3908 break;
3909 case 0x60: /* STD R1,D2(X2,B2) [RX] */
3910 insn = ld_code4(env, s->pc);
3911 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3912 tmp2 = load_freg(r1);
3913 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
3914 tcg_temp_free_i64(tmp);
3915 tcg_temp_free_i64(tmp2);
3916 break;
3917 case 0x68: /* LD R1,D2(X2,B2) [RX] */
3918 insn = ld_code4(env, s->pc);
3919 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3920 tmp2 = tcg_temp_new_i64();
3921 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
3922 store_freg(r1, tmp2);
3923 tcg_temp_free_i64(tmp);
3924 tcg_temp_free_i64(tmp2);
3925 break;
3926 case 0x70: /* STE R1,D2(X2,B2) [RX] */
3927 insn = ld_code4(env, s->pc);
3928 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3929 tmp2 = tcg_temp_new_i64();
3930 tmp32_1 = load_freg32(r1);
3931 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3932 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
3933 tcg_temp_free_i64(tmp);
3934 tcg_temp_free_i64(tmp2);
3935 tcg_temp_free_i32(tmp32_1);
3936 break;
3937 case 0x78: /* LE R1,D2(X2,B2) [RX] */
3938 insn = ld_code4(env, s->pc);
3939 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3940 tmp2 = tcg_temp_new_i64();
3941 tmp32_1 = tcg_temp_new_i32();
3942 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
3943 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
3944 store_freg32(r1, tmp32_1);
3945 tcg_temp_free_i64(tmp);
3946 tcg_temp_free_i64(tmp2);
3947 tcg_temp_free_i32(tmp32_1);
3948 break;
3949 #ifndef CONFIG_USER_ONLY
3950 case 0x80: /* SSM D2(B2) [S] */
3951 /* Set System Mask */
3952 check_privileged(s);
3953 insn = ld_code4(env, s->pc);
3954 decode_rs(s, insn, &r1, &r3, &b2, &d2);
3955 tmp = get_address(s, 0, b2, d2);
3956 tmp2 = tcg_temp_new_i64();
3957 tmp3 = tcg_temp_new_i64();
3958 tcg_gen_andi_i64(tmp3, psw_mask, ~0xff00000000000000ULL);
3959 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
3960 tcg_gen_shli_i64(tmp2, tmp2, 56);
3961 tcg_gen_or_i64(psw_mask, tmp3, tmp2);
3962 tcg_temp_free_i64(tmp);
3963 tcg_temp_free_i64(tmp2);
3964 tcg_temp_free_i64(tmp3);
3965 break;
3966 case 0x82: /* LPSW D2(B2) [S] */
3967 /* Load PSW */
3968 check_privileged(s);
3969 insn = ld_code4(env, s->pc);
3970 decode_rs(s, insn, &r1, &r3, &b2, &d2);
3971 tmp = get_address(s, 0, b2, d2);
3972 tmp2 = tcg_temp_new_i64();
3973 tmp3 = tcg_temp_new_i64();
3974 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
3975 tcg_gen_addi_i64(tmp, tmp, 4);
3976 tcg_gen_qemu_ld32u(tmp3, tmp, get_mem_index(s));
3977 /* Convert the 32-bit PSW_MASK into the 64-bit PSW_MASK. */
3978 tcg_gen_shli_i64(tmp2, tmp2, 32);
3979 gen_helper_load_psw(cpu_env, tmp2, tmp3);
3980 tcg_temp_free_i64(tmp);
3981 tcg_temp_free_i64(tmp2);
3982 tcg_temp_free_i64(tmp3);
3983 /* we need to keep cc_op intact */
3984 s->is_jmp = DISAS_JUMP;
3985 break;
3986 case 0x83: /* DIAG R1,R3,D2 [RS] */
3987 /* Diagnose call (KVM hypercall) */
3988 check_privileged(s);
3989 potential_page_fault(s);
3990 insn = ld_code4(env, s->pc);
3991 decode_rs(s, insn, &r1, &r3, &b2, &d2);
3992 tmp32_1 = tcg_const_i32(insn & 0xfff);
3993 tmp2 = load_reg(2);
3994 tmp3 = load_reg(1);
3995 gen_helper_diag(tmp2, cpu_env, tmp32_1, tmp2, tmp3);
3996 store_reg(2, tmp2);
3997 tcg_temp_free_i32(tmp32_1);
3998 tcg_temp_free_i64(tmp2);
3999 tcg_temp_free_i64(tmp3);
4000 break;
4001 #endif
4002 case 0x88: /* SRL R1,D2(B2) [RS] */
4003 case 0x89: /* SLL R1,D2(B2) [RS] */
4004 case 0x8a: /* SRA R1,D2(B2) [RS] */
4005 insn = ld_code4(env, s->pc);
4006 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4007 tmp = get_address(s, 0, b2, d2);
4008 tmp32_1 = load_reg32(r1);
4009 tmp32_2 = tcg_temp_new_i32();
4010 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4011 tcg_gen_andi_i32(tmp32_2, tmp32_2, 0x3f);
4012 switch (opc) {
4013 case 0x88:
4014 tcg_gen_shr_i32(tmp32_1, tmp32_1, tmp32_2);
4015 break;
4016 case 0x89:
4017 tcg_gen_shl_i32(tmp32_1, tmp32_1, tmp32_2);
4018 break;
4019 case 0x8a:
4020 tcg_gen_sar_i32(tmp32_1, tmp32_1, tmp32_2);
4021 set_cc_s32(s, tmp32_1);
4022 break;
4023 default:
4024 tcg_abort();
4025 }
4026 store_reg32(r1, tmp32_1);
4027 tcg_temp_free_i64(tmp);
4028 tcg_temp_free_i32(tmp32_1);
4029 tcg_temp_free_i32(tmp32_2);
4030 break;
4031 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4032 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4033 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4034 insn = ld_code4(env, s->pc);
4035 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4036 tmp = get_address(s, 0, b2, d2); /* shift */
4037 tmp2 = tcg_temp_new_i64();
4038 tmp32_1 = load_reg32(r1);
4039 tmp32_2 = load_reg32(r1 + 1);
4040 tcg_gen_concat_i32_i64(tmp2, tmp32_2, tmp32_1); /* operand */
4041 switch (opc) {
4042 case 0x8c:
4043 tcg_gen_shr_i64(tmp2, tmp2, tmp);
4044 break;
4045 case 0x8d:
4046 tcg_gen_shl_i64(tmp2, tmp2, tmp);
4047 break;
4048 case 0x8e:
4049 tcg_gen_sar_i64(tmp2, tmp2, tmp);
4050 set_cc_s64(s, tmp2);
4051 break;
4052 }
4053 tcg_gen_shri_i64(tmp, tmp2, 32);
4054 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4055 store_reg32(r1, tmp32_1);
4056 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4057 store_reg32(r1 + 1, tmp32_2);
4058 tcg_temp_free_i64(tmp);
4059 tcg_temp_free_i64(tmp2);
4060 break;
4061 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4062 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4063 insn = ld_code4(env, s->pc);
4064 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4065
4066 tmp = get_address(s, 0, b2, d2);
4067 tmp2 = tcg_temp_new_i64();
4068 tmp3 = tcg_const_i64(4);
4069 tmp4 = tcg_const_i64(0xffffffff00000000ULL);
4070 for (i = r1;; i = (i + 1) % 16) {
4071 if (opc == 0x98) {
4072 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4073 tcg_gen_and_i64(regs[i], regs[i], tmp4);
4074 tcg_gen_or_i64(regs[i], regs[i], tmp2);
4075 } else {
4076 tcg_gen_qemu_st32(regs[i], tmp, get_mem_index(s));
4077 }
4078 if (i == r3) {
4079 break;
4080 }
4081 tcg_gen_add_i64(tmp, tmp, tmp3);
4082 }
4083 tcg_temp_free_i64(tmp);
4084 tcg_temp_free_i64(tmp2);
4085 tcg_temp_free_i64(tmp3);
4086 tcg_temp_free_i64(tmp4);
4087 break;
4088 case 0x91: /* TM D1(B1),I2 [SI] */
4089 insn = ld_code4(env, s->pc);
4090 tmp = decode_si(s, insn, &i2, &b1, &d1);
4091 tmp2 = tcg_const_i64(i2);
4092 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
4093 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
4094 tcg_temp_free_i64(tmp);
4095 tcg_temp_free_i64(tmp2);
4096 break;
4097 case 0x92: /* MVI D1(B1),I2 [SI] */
4098 insn = ld_code4(env, s->pc);
4099 tmp = decode_si(s, insn, &i2, &b1, &d1);
4100 tmp2 = tcg_const_i64(i2);
4101 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4102 tcg_temp_free_i64(tmp);
4103 tcg_temp_free_i64(tmp2);
4104 break;
4105 case 0x94: /* NI D1(B1),I2 [SI] */
4106 case 0x96: /* OI D1(B1),I2 [SI] */
4107 case 0x97: /* XI D1(B1),I2 [SI] */
4108 insn = ld_code4(env, s->pc);
4109 tmp = decode_si(s, insn, &i2, &b1, &d1);
4110 tmp2 = tcg_temp_new_i64();
4111 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4112 switch (opc) {
4113 case 0x94:
4114 tcg_gen_andi_i64(tmp2, tmp2, i2);
4115 break;
4116 case 0x96:
4117 tcg_gen_ori_i64(tmp2, tmp2, i2);
4118 break;
4119 case 0x97:
4120 tcg_gen_xori_i64(tmp2, tmp2, i2);
4121 break;
4122 default:
4123 tcg_abort();
4124 }
4125 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4126 set_cc_nz_u64(s, tmp2);
4127 tcg_temp_free_i64(tmp);
4128 tcg_temp_free_i64(tmp2);
4129 break;
4130 case 0x95: /* CLI D1(B1),I2 [SI] */
4131 insn = ld_code4(env, s->pc);
4132 tmp = decode_si(s, insn, &i2, &b1, &d1);
4133 tmp2 = tcg_temp_new_i64();
4134 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4135 cmp_u64c(s, tmp2, i2);
4136 tcg_temp_free_i64(tmp);
4137 tcg_temp_free_i64(tmp2);
4138 break;
4139 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4140 insn = ld_code4(env, s->pc);
4141 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4142 tmp = get_address(s, 0, b2, d2);
4143 tmp32_1 = tcg_const_i32(r1);
4144 tmp32_2 = tcg_const_i32(r3);
4145 potential_page_fault(s);
4146 gen_helper_lam(cpu_env, tmp32_1, tmp, tmp32_2);
4147 tcg_temp_free_i64(tmp);
4148 tcg_temp_free_i32(tmp32_1);
4149 tcg_temp_free_i32(tmp32_2);
4150 break;
4151 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4152 insn = ld_code4(env, s->pc);
4153 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4154 tmp = get_address(s, 0, b2, d2);
4155 tmp32_1 = tcg_const_i32(r1);
4156 tmp32_2 = tcg_const_i32(r3);
4157 potential_page_fault(s);
4158 gen_helper_stam(cpu_env, tmp32_1, tmp, tmp32_2);
4159 tcg_temp_free_i64(tmp);
4160 tcg_temp_free_i32(tmp32_1);
4161 tcg_temp_free_i32(tmp32_2);
4162 break;
4163 case 0xa5:
4164 insn = ld_code4(env, s->pc);
4165 r1 = (insn >> 20) & 0xf;
4166 op = (insn >> 16) & 0xf;
4167 i2 = insn & 0xffff;
4168 disas_a5(env, s, op, r1, i2);
4169 break;
4170 case 0xa7:
4171 insn = ld_code4(env, s->pc);
4172 r1 = (insn >> 20) & 0xf;
4173 op = (insn >> 16) & 0xf;
4174 i2 = (short)insn;
4175 disas_a7(env, s, op, r1, i2);
4176 break;
4177 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4178 insn = ld_code4(env, s->pc);
4179 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4180 tmp = get_address(s, 0, b2, d2);
4181 tmp32_1 = tcg_const_i32(r1);
4182 tmp32_2 = tcg_const_i32(r3);
4183 potential_page_fault(s);
4184 gen_helper_mvcle(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4185 set_cc_static(s);
4186 tcg_temp_free_i64(tmp);
4187 tcg_temp_free_i32(tmp32_1);
4188 tcg_temp_free_i32(tmp32_2);
4189 break;
4190 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4191 insn = ld_code4(env, s->pc);
4192 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4193 tmp = get_address(s, 0, b2, d2);
4194 tmp32_1 = tcg_const_i32(r1);
4195 tmp32_2 = tcg_const_i32(r3);
4196 potential_page_fault(s);
4197 gen_helper_clcle(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4198 set_cc_static(s);
4199 tcg_temp_free_i64(tmp);
4200 tcg_temp_free_i32(tmp32_1);
4201 tcg_temp_free_i32(tmp32_2);
4202 break;
4203 #ifndef CONFIG_USER_ONLY
4204 case 0xac: /* STNSM D1(B1),I2 [SI] */
4205 case 0xad: /* STOSM D1(B1),I2 [SI] */
4206 check_privileged(s);
4207 insn = ld_code4(env, s->pc);
4208 tmp = decode_si(s, insn, &i2, &b1, &d1);
4209 tmp2 = tcg_temp_new_i64();
4210 tcg_gen_shri_i64(tmp2, psw_mask, 56);
4211 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4212 if (opc == 0xac) {
4213 tcg_gen_andi_i64(psw_mask, psw_mask,
4214 ((uint64_t)i2 << 56) | 0x00ffffffffffffffULL);
4215 } else {
4216 tcg_gen_ori_i64(psw_mask, psw_mask, (uint64_t)i2 << 56);
4217 }
4218 tcg_temp_free_i64(tmp);
4219 tcg_temp_free_i64(tmp2);
4220 break;
4221 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4222 check_privileged(s);
4223 insn = ld_code4(env, s->pc);
4224 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4225 tmp = get_address(s, 0, b2, d2);
4226 tmp2 = load_reg(r3);
4227 tmp32_1 = tcg_const_i32(r1);
4228 potential_page_fault(s);
4229 gen_helper_sigp(cc_op, cpu_env, tmp, tmp32_1, tmp2);
4230 set_cc_static(s);
4231 tcg_temp_free_i64(tmp);
4232 tcg_temp_free_i64(tmp2);
4233 tcg_temp_free_i32(tmp32_1);
4234 break;
4235 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4236 check_privileged(s);
4237 insn = ld_code4(env, s->pc);
4238 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4239 tmp32_1 = tcg_const_i32(r1);
4240 potential_page_fault(s);
4241 gen_helper_lra(cc_op, cpu_env, tmp, tmp32_1);
4242 set_cc_static(s);
4243 tcg_temp_free_i64(tmp);
4244 tcg_temp_free_i32(tmp32_1);
4245 break;
4246 #endif
4247 case 0xb2:
4248 insn = ld_code4(env, s->pc);
4249 op = (insn >> 16) & 0xff;
4250 switch (op) {
4251 case 0x9c: /* STFPC D2(B2) [S] */
4252 d2 = insn & 0xfff;
4253 b2 = (insn >> 12) & 0xf;
4254 tmp32_1 = tcg_temp_new_i32();
4255 tmp = tcg_temp_new_i64();
4256 tmp2 = get_address(s, 0, b2, d2);
4257 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
4258 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4259 tcg_gen_qemu_st32(tmp, tmp2, get_mem_index(s));
4260 tcg_temp_free_i32(tmp32_1);
4261 tcg_temp_free_i64(tmp);
4262 tcg_temp_free_i64(tmp2);
4263 break;
4264 default:
4265 disas_b2(env, s, op, insn);
4266 break;
4267 }
4268 break;
4269 case 0xb3:
4270 insn = ld_code4(env, s->pc);
4271 op = (insn >> 16) & 0xff;
4272 r3 = (insn >> 12) & 0xf; /* aka m3 */
4273 r1 = (insn >> 4) & 0xf;
4274 r2 = insn & 0xf;
4275 disas_b3(env, s, op, r3, r1, r2);
4276 break;
4277 #ifndef CONFIG_USER_ONLY
4278 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4279 /* Store Control */
4280 check_privileged(s);
4281 insn = ld_code4(env, s->pc);
4282 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4283 tmp = get_address(s, 0, b2, d2);
4284 tmp32_1 = tcg_const_i32(r1);
4285 tmp32_2 = tcg_const_i32(r3);
4286 potential_page_fault(s);
4287 gen_helper_stctl(cpu_env, tmp32_1, tmp, tmp32_2);
4288 tcg_temp_free_i64(tmp);
4289 tcg_temp_free_i32(tmp32_1);
4290 tcg_temp_free_i32(tmp32_2);
4291 break;
4292 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4293 /* Load Control */
4294 check_privileged(s);
4295 insn = ld_code4(env, s->pc);
4296 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4297 tmp = get_address(s, 0, b2, d2);
4298 tmp32_1 = tcg_const_i32(r1);
4299 tmp32_2 = tcg_const_i32(r3);
4300 potential_page_fault(s);
4301 gen_helper_lctl(cpu_env, tmp32_1, tmp, tmp32_2);
4302 tcg_temp_free_i64(tmp);
4303 tcg_temp_free_i32(tmp32_1);
4304 tcg_temp_free_i32(tmp32_2);
4305 break;
4306 #endif
4307 case 0xb9:
4308 insn = ld_code4(env, s->pc);
4309 r1 = (insn >> 4) & 0xf;
4310 r2 = insn & 0xf;
4311 op = (insn >> 16) & 0xff;
4312 disas_b9(env, s, op, r1, r2);
4313 break;
4314 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4315 insn = ld_code4(env, s->pc);
4316 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4317 tmp = get_address(s, 0, b2, d2);
4318 tmp32_1 = tcg_const_i32(r1);
4319 tmp32_2 = tcg_const_i32(r3);
4320 potential_page_fault(s);
4321 gen_helper_cs(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4322 set_cc_static(s);
4323 tcg_temp_free_i64(tmp);
4324 tcg_temp_free_i32(tmp32_1);
4325 tcg_temp_free_i32(tmp32_2);
4326 break;
4327 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4328 insn = ld_code4(env, s->pc);
4329 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4330 tmp = get_address(s, 0, b2, d2);
4331 tmp32_1 = load_reg32(r1);
4332 tmp32_2 = tcg_const_i32(r3);
4333 potential_page_fault(s);
4334 gen_helper_clm(cc_op, cpu_env, tmp32_1, tmp32_2, tmp);
4335 set_cc_static(s);
4336 tcg_temp_free_i64(tmp);
4337 tcg_temp_free_i32(tmp32_1);
4338 tcg_temp_free_i32(tmp32_2);
4339 break;
4340 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4341 insn = ld_code4(env, s->pc);
4342 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4343 tmp = get_address(s, 0, b2, d2);
4344 tmp32_1 = load_reg32(r1);
4345 tmp32_2 = tcg_const_i32(r3);
4346 potential_page_fault(s);
4347 gen_helper_stcm(cpu_env, tmp32_1, tmp32_2, tmp);
4348 tcg_temp_free_i64(tmp);
4349 tcg_temp_free_i32(tmp32_1);
4350 tcg_temp_free_i32(tmp32_2);
4351 break;
4352 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4353 insn = ld_code4(env, s->pc);
4354 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4355 if (r3 == 15) {
4356 /* effectively a 32-bit load */
4357 tmp = get_address(s, 0, b2, d2);
4358 tmp32_1 = tcg_temp_new_i32();
4359 tmp32_2 = tcg_const_i32(r3);
4360 tcg_gen_qemu_ld32u(tmp, tmp, get_mem_index(s));
4361 store_reg32_i64(r1, tmp);
4362 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4363 set_cc_icm(s, tmp32_2, tmp32_1);
4364 tcg_temp_free_i64(tmp);
4365 tcg_temp_free_i32(tmp32_1);
4366 tcg_temp_free_i32(tmp32_2);
4367 } else if (r3) {
4368 uint32_t mask = 0x00ffffffUL;
4369 uint32_t shift = 24;
4370 int m3 = r3;
4371 tmp = get_address(s, 0, b2, d2);
4372 tmp2 = tcg_temp_new_i64();
4373 tmp32_1 = load_reg32(r1);
4374 tmp32_2 = tcg_temp_new_i32();
4375 tmp32_3 = tcg_const_i32(r3);
4376 tmp32_4 = tcg_const_i32(0);
4377 while (m3) {
4378 if (m3 & 8) {
4379 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4380 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4381 if (shift) {
4382 tcg_gen_shli_i32(tmp32_2, tmp32_2, shift);
4383 }
4384 tcg_gen_andi_i32(tmp32_1, tmp32_1, mask);
4385 tcg_gen_or_i32(tmp32_1, tmp32_1, tmp32_2);
4386 tcg_gen_or_i32(tmp32_4, tmp32_4, tmp32_2);
4387 tcg_gen_addi_i64(tmp, tmp, 1);
4388 }
4389 m3 = (m3 << 1) & 0xf;
4390 mask = (mask >> 8) | 0xff000000UL;
4391 shift -= 8;
4392 }
4393 store_reg32(r1, tmp32_1);
4394 set_cc_icm(s, tmp32_3, tmp32_4);
4395 tcg_temp_free_i64(tmp);
4396 tcg_temp_free_i64(tmp2);
4397 tcg_temp_free_i32(tmp32_1);
4398 tcg_temp_free_i32(tmp32_2);
4399 tcg_temp_free_i32(tmp32_3);
4400 tcg_temp_free_i32(tmp32_4);
4401 } else {
4402 /* i.e. env->cc = 0 */
4403 gen_op_movi_cc(s, 0);
4404 }
4405 break;
4406 case 0xc0:
4407 case 0xc2:
4408 insn = ld_code6(env, s->pc);
4409 r1 = (insn >> 36) & 0xf;
4410 op = (insn >> 32) & 0xf;
4411 i2 = (int)insn;
4412 switch (opc) {
4413 case 0xc0:
4414 disas_c0(env, s, op, r1, i2);
4415 break;
4416 case 0xc2:
4417 disas_c2(env, s, op, r1, i2);
4418 break;
4419 default:
4420 tcg_abort();
4421 }
4422 break;
4423 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4424 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4425 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4426 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4427 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4428 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4429 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4430 insn = ld_code6(env, s->pc);
4431 vl = tcg_const_i32((insn >> 32) & 0xff);
4432 b1 = (insn >> 28) & 0xf;
4433 b2 = (insn >> 12) & 0xf;
4434 d1 = (insn >> 16) & 0xfff;
4435 d2 = insn & 0xfff;
4436 tmp = get_address(s, 0, b1, d1);
4437 tmp2 = get_address(s, 0, b2, d2);
4438 switch (opc) {
4439 case 0xd2:
4440 gen_op_mvc(s, (insn >> 32) & 0xff, tmp, tmp2);
4441 break;
4442 case 0xd4:
4443 potential_page_fault(s);
4444 gen_helper_nc(cc_op, cpu_env, vl, tmp, tmp2);
4445 set_cc_static(s);
4446 break;
4447 case 0xd5:
4448 gen_op_clc(s, (insn >> 32) & 0xff, tmp, tmp2);
4449 break;
4450 case 0xd6:
4451 potential_page_fault(s);
4452 gen_helper_oc(cc_op, cpu_env, vl, tmp, tmp2);
4453 set_cc_static(s);
4454 break;
4455 case 0xd7:
4456 potential_page_fault(s);
4457 gen_helper_xc(cc_op, cpu_env, vl, tmp, tmp2);
4458 set_cc_static(s);
4459 break;
4460 case 0xdc:
4461 potential_page_fault(s);
4462 gen_helper_tr(cpu_env, vl, tmp, tmp2);
4463 set_cc_static(s);
4464 break;
4465 case 0xf3:
4466 potential_page_fault(s);
4467 gen_helper_unpk(cpu_env, vl, tmp, tmp2);
4468 break;
4469 default:
4470 tcg_abort();
4471 }
4472 tcg_temp_free_i64(tmp);
4473 tcg_temp_free_i64(tmp2);
4474 break;
4475 #ifndef CONFIG_USER_ONLY
4476 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
4477 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
4478 check_privileged(s);
4479 potential_page_fault(s);
4480 insn = ld_code6(env, s->pc);
4481 r1 = (insn >> 36) & 0xf;
4482 r3 = (insn >> 32) & 0xf;
4483 b1 = (insn >> 28) & 0xf;
4484 d1 = (insn >> 16) & 0xfff;
4485 b2 = (insn >> 12) & 0xf;
4486 d2 = insn & 0xfff;
4487 tmp = load_reg(r1);
4488 /* XXX key in r3 */
4489 tmp2 = get_address(s, 0, b1, d1);
4490 tmp3 = get_address(s, 0, b2, d2);
4491 if (opc == 0xda) {
4492 gen_helper_mvcp(cc_op, cpu_env, tmp, tmp2, tmp3);
4493 } else {
4494 gen_helper_mvcs(cc_op, cpu_env, tmp, tmp2, tmp3);
4495 }
4496 set_cc_static(s);
4497 tcg_temp_free_i64(tmp);
4498 tcg_temp_free_i64(tmp2);
4499 tcg_temp_free_i64(tmp3);
4500 break;
4501 #endif
4502 case 0xe3:
4503 insn = ld_code6(env, s->pc);
4504 debug_insn(insn);
4505 op = insn & 0xff;
4506 r1 = (insn >> 36) & 0xf;
4507 x2 = (insn >> 32) & 0xf;
4508 b2 = (insn >> 28) & 0xf;
4509 d2 = ((int)((((insn >> 16) & 0xfff)
4510 | ((insn << 4) & 0xff000)) << 12)) >> 12;
4511 disas_e3(env, s, op, r1, x2, b2, d2 );
4512 break;
4513 #ifndef CONFIG_USER_ONLY
4514 case 0xe5:
4515 /* Test Protection */
4516 check_privileged(s);
4517 insn = ld_code6(env, s->pc);
4518 debug_insn(insn);
4519 disas_e5(env, s, insn);
4520 break;
4521 #endif
4522 case 0xeb:
4523 insn = ld_code6(env, s->pc);
4524 debug_insn(insn);
4525 op = insn & 0xff;
4526 r1 = (insn >> 36) & 0xf;
4527 r3 = (insn >> 32) & 0xf;
4528 b2 = (insn >> 28) & 0xf;
4529 d2 = ((int)((((insn >> 16) & 0xfff)
4530 | ((insn << 4) & 0xff000)) << 12)) >> 12;
4531 disas_eb(env, s, op, r1, r3, b2, d2);
4532 break;
4533 case 0xed:
4534 insn = ld_code6(env, s->pc);
4535 debug_insn(insn);
4536 op = insn & 0xff;
4537 r1 = (insn >> 36) & 0xf;
4538 x2 = (insn >> 32) & 0xf;
4539 b2 = (insn >> 28) & 0xf;
4540 d2 = (short)((insn >> 16) & 0xfff);
4541 r1b = (insn >> 12) & 0xf;
4542 disas_ed(env, s, op, r1, x2, b2, d2, r1b);
4543 break;
4544 default:
4545 qemu_log_mask(LOG_UNIMP, "unimplemented opcode 0x%x\n", opc);
4546 gen_illegal_opcode(s);
4547 break;
4548 }
4549 }
4550
4551 /* ====================================================================== */
4552 /* Define the insn format enumeration. */
4553 #define F0(N) FMT_##N,
4554 #define F1(N, X1) F0(N)
4555 #define F2(N, X1, X2) F0(N)
4556 #define F3(N, X1, X2, X3) F0(N)
4557 #define F4(N, X1, X2, X3, X4) F0(N)
4558 #define F5(N, X1, X2, X3, X4, X5) F0(N)
4559
4560 typedef enum {
4561 #include "insn-format.def"
4562 } DisasFormat;
4563
4564 #undef F0
4565 #undef F1
4566 #undef F2
4567 #undef F3
4568 #undef F4
4569 #undef F5
4570
4571 /* Define a structure to hold the decoded fields. We'll store each inside
4572 an array indexed by an enum. In order to conserve memory, we'll arrange
4573 for fields that do not exist at the same time to overlap, thus the "C"
4574 for compact. For checking purposes there is an "O" for original index
4575 as well that will be applied to availability bitmaps. */
4576
4577 enum DisasFieldIndexO {
4578 FLD_O_r1,
4579 FLD_O_r2,
4580 FLD_O_r3,
4581 FLD_O_m1,
4582 FLD_O_m3,
4583 FLD_O_m4,
4584 FLD_O_b1,
4585 FLD_O_b2,
4586 FLD_O_b4,
4587 FLD_O_d1,
4588 FLD_O_d2,
4589 FLD_O_d4,
4590 FLD_O_x2,
4591 FLD_O_l1,
4592 FLD_O_l2,
4593 FLD_O_i1,
4594 FLD_O_i2,
4595 FLD_O_i3,
4596 FLD_O_i4,
4597 FLD_O_i5
4598 };
4599
4600 enum DisasFieldIndexC {
4601 FLD_C_r1 = 0,
4602 FLD_C_m1 = 0,
4603 FLD_C_b1 = 0,
4604 FLD_C_i1 = 0,
4605
4606 FLD_C_r2 = 1,
4607 FLD_C_b2 = 1,
4608 FLD_C_i2 = 1,
4609
4610 FLD_C_r3 = 2,
4611 FLD_C_m3 = 2,
4612 FLD_C_i3 = 2,
4613
4614 FLD_C_m4 = 3,
4615 FLD_C_b4 = 3,
4616 FLD_C_i4 = 3,
4617 FLD_C_l1 = 3,
4618
4619 FLD_C_i5 = 4,
4620 FLD_C_d1 = 4,
4621
4622 FLD_C_d2 = 5,
4623
4624 FLD_C_d4 = 6,
4625 FLD_C_x2 = 6,
4626 FLD_C_l2 = 6,
4627
4628 NUM_C_FIELD = 7
4629 };
4630
4631 struct DisasFields {
4632 unsigned op:8;
4633 unsigned op2:8;
4634 unsigned presentC:16;
4635 unsigned int presentO;
4636 int c[NUM_C_FIELD];
4637 };
4638
4639 /* This is the way fields are to be accessed out of DisasFields. */
4640 #define have_field(S, F) have_field1((S), FLD_O_##F)
4641 #define get_field(S, F) get_field1((S), FLD_O_##F, FLD_C_##F)
4642
4643 static bool have_field1(const DisasFields *f, enum DisasFieldIndexO c)
4644 {
4645 return (f->presentO >> c) & 1;
4646 }
4647
4648 static int get_field1(const DisasFields *f, enum DisasFieldIndexO o,
4649 enum DisasFieldIndexC c)
4650 {
4651 assert(have_field1(f, o));
4652 return f->c[c];
4653 }
4654
4655 /* Describe the layout of each field in each format. */
4656 typedef struct DisasField {
4657 unsigned int beg:8;
4658 unsigned int size:8;
4659 unsigned int type:2;
4660 unsigned int indexC:6;
4661 enum DisasFieldIndexO indexO:8;
4662 } DisasField;
4663
4664 typedef struct DisasFormatInfo {
4665 DisasField op[NUM_C_FIELD];
4666 } DisasFormatInfo;
4667
4668 #define R(N, B) { B, 4, 0, FLD_C_r##N, FLD_O_r##N }
4669 #define M(N, B) { B, 4, 0, FLD_C_m##N, FLD_O_m##N }
4670 #define BD(N, BB, BD) { BB, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4671 { BD, 12, 0, FLD_C_d##N, FLD_O_d##N }
4672 #define BXD(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4673 { 12, 4, 0, FLD_C_x##N, FLD_O_x##N }, \
4674 { 20, 12, 0, FLD_C_d##N, FLD_O_d##N }
4675 #define BDL(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4676 { 20, 20, 2, FLD_C_d##N, FLD_O_d##N }
4677 #define BXDL(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4678 { 12, 4, 0, FLD_C_x##N, FLD_O_x##N }, \
4679 { 20, 20, 2, FLD_C_d##N, FLD_O_d##N }
4680 #define I(N, B, S) { B, S, 1, FLD_C_i##N, FLD_O_i##N }
4681 #define L(N, B, S) { B, S, 0, FLD_C_l##N, FLD_O_l##N }
4682
4683 #define F0(N) { { } },
4684 #define F1(N, X1) { { X1 } },
4685 #define F2(N, X1, X2) { { X1, X2 } },
4686 #define F3(N, X1, X2, X3) { { X1, X2, X3 } },
4687 #define F4(N, X1, X2, X3, X4) { { X1, X2, X3, X4 } },
4688 #define F5(N, X1, X2, X3, X4, X5) { { X1, X2, X3, X4, X5 } },
4689
4690 static const DisasFormatInfo format_info[] = {
4691 #include "insn-format.def"
4692 };
4693
4694 #undef F0
4695 #undef F1
4696 #undef F2
4697 #undef F3
4698 #undef F4
4699 #undef F5
4700 #undef R
4701 #undef M
4702 #undef BD
4703 #undef BXD
4704 #undef BDL
4705 #undef BXDL
4706 #undef I
4707 #undef L
4708
4709 /* Generally, we'll extract operands into this structures, operate upon
4710 them, and store them back. See the "in1", "in2", "prep", "wout" sets
4711 of routines below for more details. */
4712 typedef struct {
4713 bool g_out, g_out2, g_in1, g_in2;
4714 TCGv_i64 out, out2, in1, in2;
4715 TCGv_i64 addr1;
4716 } DisasOps;
4717
4718 /* Return values from translate_one, indicating the state of the TB. */
4719 typedef enum {
4720 /* Continue the TB. */
4721 NO_EXIT,
4722 /* We have emitted one or more goto_tb. No fixup required. */
4723 EXIT_GOTO_TB,
4724 /* We are not using a goto_tb (for whatever reason), but have updated
4725 the PC (for whatever reason), so there's no need to do it again on
4726 exiting the TB. */
4727 EXIT_PC_UPDATED,
4728 /* We are exiting the TB, but have neither emitted a goto_tb, nor
4729 updated the PC for the next instruction to be executed. */
4730 EXIT_PC_STALE,
4731 /* We are ending the TB with a noreturn function call, e.g. longjmp.
4732 No following code will be executed. */
4733 EXIT_NORETURN,
4734 } ExitStatus;
4735
4736 typedef enum DisasFacility {
4737 FAC_Z, /* zarch (default) */
4738 FAC_CASS, /* compare and swap and store */
4739 FAC_CASS2, /* compare and swap and store 2*/
4740 FAC_DFP, /* decimal floating point */
4741 FAC_DFPR, /* decimal floating point rounding */
4742 FAC_DO, /* distinct operands */
4743 FAC_EE, /* execute extensions */
4744 FAC_EI, /* extended immediate */
4745 FAC_FPE, /* floating point extension */
4746 FAC_FPSSH, /* floating point support sign handling */
4747 FAC_FPRGR, /* FPR-GR transfer */
4748 FAC_GIE, /* general instructions extension */
4749 FAC_HFP_MA, /* HFP multiply-and-add/subtract */
4750 FAC_HW, /* high-word */
4751 FAC_IEEEE_SIM, /* IEEE exception sumilation */
4752 FAC_LOC, /* load/store on condition */
4753 FAC_LD, /* long displacement */
4754 FAC_PC, /* population count */
4755 FAC_SCF, /* store clock fast */
4756 FAC_SFLE, /* store facility list extended */
4757 } DisasFacility;
4758
4759 struct DisasInsn {
4760 unsigned opc:16;
4761 DisasFormat fmt:6;
4762 DisasFacility fac:6;
4763
4764 const char *name;
4765
4766 void (*help_in1)(DisasContext *, DisasFields *, DisasOps *);
4767 void (*help_in2)(DisasContext *, DisasFields *, DisasOps *);
4768 void (*help_prep)(DisasContext *, DisasFields *, DisasOps *);
4769 void (*help_wout)(DisasContext *, DisasFields *, DisasOps *);
4770 void (*help_cout)(DisasContext *, DisasOps *);
4771 ExitStatus (*help_op)(DisasContext *, DisasOps *);
4772
4773 uint64_t data;
4774 };
4775
4776 /* ====================================================================== */
4777 /* The operations. These perform the bulk of the work for any insn,
4778 usually after the operands have been loaded and output initialized. */
4779
4780 static ExitStatus op_add(DisasContext *s, DisasOps *o)
4781 {
4782 tcg_gen_add_i64(o->out, o->in1, o->in2);
4783 return NO_EXIT;
4784 }
4785
4786 static ExitStatus op_mul(DisasContext *s, DisasOps *o)
4787 {
4788 tcg_gen_mul_i64(o->out, o->in1, o->in2);
4789 return NO_EXIT;
4790 }
4791
4792 static ExitStatus op_sub(DisasContext *s, DisasOps *o)
4793 {
4794 tcg_gen_sub_i64(o->out, o->in1, o->in2);
4795 return NO_EXIT;
4796 }
4797
4798 /* ====================================================================== */
4799 /* The "Cc OUTput" generators. Given the generated output (and in some cases
4800 the original inputs), update the various cc data structures in order to
4801 be able to compute the new condition code. */
4802
4803 static void cout_adds32(DisasContext *s, DisasOps *o)
4804 {
4805 gen_op_update3_cc_i64(s, CC_OP_ADD_32, o->in1, o->in2, o->out);
4806 }
4807
4808 static void cout_adds64(DisasContext *s, DisasOps *o)
4809 {
4810 gen_op_update3_cc_i64(s, CC_OP_ADD_64, o->in1, o->in2, o->out);
4811 }
4812
4813 static void cout_addu32(DisasContext *s, DisasOps *o)
4814 {
4815 gen_op_update3_cc_i64(s, CC_OP_ADDU_32, o->in1, o->in2, o->out);
4816 }
4817
4818 static void cout_addu64(DisasContext *s, DisasOps *o)
4819 {
4820 gen_op_update3_cc_i64(s, CC_OP_ADDU_64, o->in1, o->in2, o->out);
4821 }
4822
4823 static void cout_subs32(DisasContext *s, DisasOps *o)
4824 {
4825 gen_op_update3_cc_i64(s, CC_OP_SUB_32, o->in1, o->in2, o->out);
4826 }
4827
4828 static void cout_subs64(DisasContext *s, DisasOps *o)
4829 {
4830 gen_op_update3_cc_i64(s, CC_OP_SUB_64, o->in1, o->in2, o->out);
4831 }
4832
4833 static void cout_subu32(DisasContext *s, DisasOps *o)
4834 {
4835 gen_op_update3_cc_i64(s, CC_OP_SUBU_32, o->in1, o->in2, o->out);
4836 }
4837
4838 static void cout_subu64(DisasContext *s, DisasOps *o)
4839 {
4840 gen_op_update3_cc_i64(s, CC_OP_SUBU_64, o->in1, o->in2, o->out);
4841 }
4842
4843 /* ====================================================================== */
4844 /* The "PREPeration" generators. These initialize the DisasOps.OUT fields
4845 with the TCG register to which we will write. Used in combination with
4846 the "wout" generators, in some cases we need a new temporary, and in
4847 some cases we can write to a TCG global. */
4848
4849 static void prep_new(DisasContext *s, DisasFields *f, DisasOps *o)
4850 {
4851 o->out = tcg_temp_new_i64();
4852 }
4853
4854 static void prep_r1(DisasContext *s, DisasFields *f, DisasOps *o)
4855 {
4856 o->out = regs[get_field(f, r1)];
4857 o->g_out = true;
4858 }
4859
4860 /* ====================================================================== */
4861 /* The "Write OUTput" generators. These generally perform some non-trivial
4862 copy of data to TCG globals, or to main memory. The trivial cases are
4863 generally handled by having a "prep" generator install the TCG global
4864 as the destination of the operation. */
4865
4866 static void wout_r1_32(DisasContext *s, DisasFields *f, DisasOps *o)
4867 {
4868 store_reg32_i64(get_field(f, r1), o->out);
4869 }
4870
4871 static void wout_m1_32(DisasContext *s, DisasFields *f, DisasOps *o)
4872 {
4873 tcg_gen_qemu_st32(o->out, o->addr1, get_mem_index(s));
4874 }
4875
4876 static void wout_m1_64(DisasContext *s, DisasFields *f, DisasOps *o)
4877 {
4878 tcg_gen_qemu_st64(o->out, o->addr1, get_mem_index(s));
4879 }
4880
4881 /* ====================================================================== */
4882 /* The "INput 1" generators. These load the first operand to an insn. */
4883
4884 static void in1_r1(DisasContext *s, DisasFields *f, DisasOps *o)
4885 {
4886 o->in1 = load_reg(get_field(f, r1));
4887 }
4888
4889 static void in1_r1_o(DisasContext *s, DisasFields *f, DisasOps *o)
4890 {
4891 o->in1 = regs[get_field(f, r1)];
4892 o->g_in1 = true;
4893 }
4894
4895 static void in1_r2(DisasContext *s, DisasFields *f, DisasOps *o)
4896 {
4897 o->in1 = load_reg(get_field(f, r2));
4898 }
4899
4900 static void in1_r3(DisasContext *s, DisasFields *f, DisasOps *o)
4901 {
4902 o->in1 = load_reg(get_field(f, r3));
4903 }
4904
4905 static void in1_la1(DisasContext *s, DisasFields *f, DisasOps *o)
4906 {
4907 o->addr1 = get_address(s, 0, get_field(f, b1), get_field(f, d1));
4908 }
4909
4910 static void in1_m1_32s(DisasContext *s, DisasFields *f, DisasOps *o)
4911 {
4912 in1_la1(s, f, o);
4913 o->in1 = tcg_temp_new_i64();
4914 tcg_gen_qemu_ld32s(o->in1, o->addr1, get_mem_index(s));
4915 }
4916
4917 static void in1_m1_32u(DisasContext *s, DisasFields *f, DisasOps *o)
4918 {
4919 in1_la1(s, f, o);
4920 o->in1 = tcg_temp_new_i64();
4921 tcg_gen_qemu_ld32u(o->in1, o->addr1, get_mem_index(s));
4922 }
4923
4924 static void in1_m1_64(DisasContext *s, DisasFields *f, DisasOps *o)
4925 {
4926 in1_la1(s, f, o);
4927 o->in1 = tcg_temp_new_i64();
4928 tcg_gen_qemu_ld64(o->in1, o->addr1, get_mem_index(s));
4929 }
4930
4931 /* ====================================================================== */
4932 /* The "INput 2" generators. These load the second operand to an insn. */
4933
4934 static void in2_r2(DisasContext *s, DisasFields *f, DisasOps *o)
4935 {
4936 o->in2 = load_reg(get_field(f, r2));
4937 }
4938
4939 static void in2_r2_o(DisasContext *s, DisasFields *f, DisasOps *o)
4940 {
4941 o->in2 = regs[get_field(f, r2)];
4942 o->g_in2 = true;
4943 }
4944
4945 static void in2_r3(DisasContext *s, DisasFields *f, DisasOps *o)
4946 {
4947 o->in2 = load_reg(get_field(f, r3));
4948 }
4949
4950 static void in2_r2_32s(DisasContext *s, DisasFields *f, DisasOps *o)
4951 {
4952 o->in2 = tcg_temp_new_i64();
4953 tcg_gen_ext32s_i64(o->in2, regs[get_field(f, r2)]);
4954 }
4955
4956 static void in2_r2_32u(DisasContext *s, DisasFields *f, DisasOps *o)
4957 {
4958 o->in2 = tcg_temp_new_i64();
4959 tcg_gen_ext32u_i64(o->in2, regs[get_field(f, r2)]);
4960 }
4961
4962 static void in2_a2(DisasContext *s, DisasFields *f, DisasOps *o)
4963 {
4964 int x2 = have_field(f, x2) ? get_field(f, x2) : 0;
4965 o->in2 = get_address(s, x2, get_field(f, b2), get_field(f, d2));
4966 }
4967
4968 static void in2_m2_16s(DisasContext *s, DisasFields *f, DisasOps *o)
4969 {
4970 in2_a2(s, f, o);
4971 tcg_gen_qemu_ld16s(o->in2, o->in2, get_mem_index(s));
4972 }
4973
4974 static void in2_m2_32s(DisasContext *s, DisasFields *f, DisasOps *o)
4975 {
4976 in2_a2(s, f, o);
4977 tcg_gen_qemu_ld32s(o->in2, o->in2, get_mem_index(s));
4978 }
4979
4980 static void in2_m2_32u(DisasContext *s, DisasFields *f, DisasOps *o)
4981 {
4982 in2_a2(s, f, o);
4983 tcg_gen_qemu_ld32u(o->in2, o->in2, get_mem_index(s));
4984 }
4985
4986 static void in2_m2_64(DisasContext *s, DisasFields *f, DisasOps *o)
4987 {
4988 in2_a2(s, f, o);
4989 tcg_gen_qemu_ld64(o->in2, o->in2, get_mem_index(s));
4990 }
4991
4992 static void in2_i2(DisasContext *s, DisasFields *f, DisasOps *o)
4993 {
4994 o->in2 = tcg_const_i64(get_field(f, i2));
4995 }
4996
4997 static void in2_i2_32u(DisasContext *s, DisasFields *f, DisasOps *o)
4998 {
4999 o->in2 = tcg_const_i64((uint32_t)get_field(f, i2));
5000 }
5001
5002 /* ====================================================================== */
5003
5004 /* Find opc within the table of insns. This is formulated as a switch
5005 statement so that (1) we get compile-time notice of cut-paste errors
5006 for duplicated opcodes, and (2) the compiler generates the binary
5007 search tree, rather than us having to post-process the table. */
5008
5009 #define C(OPC, NM, FT, FC, I1, I2, P, W, OP, CC) \
5010 D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, 0)
5011
5012 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) insn_ ## NM,
5013
5014 enum DisasInsnEnum {
5015 #include "insn-data.def"
5016 };
5017
5018 #undef D
5019 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) { \
5020 .opc = OPC, \
5021 .fmt = FMT_##FT, \
5022 .fac = FAC_##FC, \
5023 .name = #NM, \
5024 .help_in1 = in1_##I1, \
5025 .help_in2 = in2_##I2, \
5026 .help_prep = prep_##P, \
5027 .help_wout = wout_##W, \
5028 .help_cout = cout_##CC, \
5029 .help_op = op_##OP, \
5030 .data = D \
5031 },
5032
5033 /* Allow 0 to be used for NULL in the table below. */
5034 #define in1_0 NULL
5035 #define in2_0 NULL
5036 #define prep_0 NULL
5037 #define wout_0 NULL
5038 #define cout_0 NULL
5039 #define op_0 NULL
5040
5041 static const DisasInsn insn_info[] = {
5042 #include "insn-data.def"
5043 };
5044
5045 #undef D
5046 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) \
5047 case OPC: return &insn_info[insn_ ## NM];
5048
5049 static const DisasInsn *lookup_opc(uint16_t opc)
5050 {
5051 switch (opc) {
5052 #include "insn-data.def"
5053 default:
5054 return NULL;
5055 }
5056 }
5057
5058 #undef D
5059 #undef C
5060
5061 /* Extract a field from the insn. The INSN should be left-aligned in
5062 the uint64_t so that we can more easily utilize the big-bit-endian
5063 definitions we extract from the Principals of Operation. */
5064
5065 static void extract_field(DisasFields *o, const DisasField *f, uint64_t insn)
5066 {
5067 uint32_t r, m;
5068
5069 if (f->size == 0) {
5070 return;
5071 }
5072
5073 /* Zero extract the field from the insn. */
5074 r = (insn << f->beg) >> (64 - f->size);
5075
5076 /* Sign-extend, or un-swap the field as necessary. */
5077 switch (f->type) {
5078 case 0: /* unsigned */
5079 break;
5080 case 1: /* signed */
5081 assert(f->size <= 32);
5082 m = 1u << (f->size - 1);
5083 r = (r ^ m) - m;
5084 break;
5085 case 2: /* dl+dh split, signed 20 bit. */
5086 r = ((int8_t)r << 12) | (r >> 8);
5087 break;
5088 default:
5089 abort();
5090 }
5091
5092 /* Validate that the "compressed" encoding we selected above is valid.
5093 I.e. we havn't make two different original fields overlap. */
5094 assert(((o->presentC >> f->indexC) & 1) == 0);
5095 o->presentC |= 1 << f->indexC;
5096 o->presentO |= 1 << f->indexO;
5097
5098 o->c[f->indexC] = r;
5099 }
5100
5101 /* Lookup the insn at the current PC, extracting the operands into O and
5102 returning the info struct for the insn. Returns NULL for invalid insn. */
5103
5104 static const DisasInsn *extract_insn(CPUS390XState *env, DisasContext *s,
5105 DisasFields *f)
5106 {
5107 uint64_t insn, pc = s->pc;
5108 int op, op2, ilen;
5109 const DisasInsn *info;
5110
5111 insn = ld_code2(env, pc);
5112 op = (insn >> 8) & 0xff;
5113 ilen = get_ilen(op);
5114 s->next_pc = s->pc + ilen;
5115
5116 switch (ilen) {
5117 case 2:
5118 insn = insn << 48;
5119 break;
5120 case 4:
5121 insn = ld_code4(env, pc) << 32;
5122 break;
5123 case 6:
5124 insn = (insn << 48) | (ld_code4(env, pc + 2) << 16);
5125 break;
5126 default:
5127 abort();
5128 }
5129
5130 /* We can't actually determine the insn format until we've looked up
5131 the full insn opcode. Which we can't do without locating the
5132 secondary opcode. Assume by default that OP2 is at bit 40; for
5133 those smaller insns that don't actually have a secondary opcode
5134 this will correctly result in OP2 = 0. */
5135 switch (op) {
5136 case 0x01: /* E */
5137 case 0x80: /* S */
5138 case 0x82: /* S */
5139 case 0x93: /* S */
5140 case 0xb2: /* S, RRF, RRE */
5141 case 0xb3: /* RRE, RRD, RRF */
5142 case 0xb9: /* RRE, RRF */
5143 case 0xe5: /* SSE, SIL */
5144 op2 = (insn << 8) >> 56;
5145 break;
5146 case 0xa5: /* RI */
5147 case 0xa7: /* RI */
5148 case 0xc0: /* RIL */
5149 case 0xc2: /* RIL */
5150 case 0xc4: /* RIL */
5151 case 0xc6: /* RIL */
5152 case 0xc8: /* SSF */
5153 case 0xcc: /* RIL */
5154 op2 = (insn << 12) >> 60;
5155 break;
5156 case 0xd0 ... 0xdf: /* SS */
5157 case 0xe1: /* SS */
5158 case 0xe2: /* SS */
5159 case 0xe8: /* SS */
5160 case 0xe9: /* SS */
5161 case 0xea: /* SS */
5162 case 0xee ... 0xf3: /* SS */
5163 case 0xf8 ... 0xfd: /* SS */
5164 op2 = 0;
5165 break;
5166 default:
5167 op2 = (insn << 40) >> 56;
5168 break;
5169 }
5170
5171 memset(f, 0, sizeof(*f));
5172 f->op = op;
5173 f->op2 = op2;
5174
5175 /* Lookup the instruction. */
5176 info = lookup_opc(op << 8 | op2);
5177
5178 /* If we found it, extract the operands. */
5179 if (info != NULL) {
5180 DisasFormat fmt = info->fmt;
5181 int i;
5182
5183 for (i = 0; i < NUM_C_FIELD; ++i) {
5184 extract_field(f, &format_info[fmt].op[i], insn);
5185 }
5186 }
5187 return info;
5188 }
5189
5190 static ExitStatus translate_one(CPUS390XState *env, DisasContext *s)
5191 {
5192 const DisasInsn *insn;
5193 ExitStatus ret = NO_EXIT;
5194 DisasFields f;
5195 DisasOps o;
5196
5197 insn = extract_insn(env, s, &f);
5198
5199 /* If not found, try the old interpreter. This includes ILLOPC. */
5200 if (insn == NULL) {
5201 disas_s390_insn(env, s);
5202 switch (s->is_jmp) {
5203 case DISAS_NEXT:
5204 ret = NO_EXIT;
5205 break;
5206 case DISAS_TB_JUMP:
5207 ret = EXIT_GOTO_TB;
5208 break;
5209 case DISAS_JUMP:
5210 ret = EXIT_PC_UPDATED;
5211 break;
5212 case DISAS_EXCP:
5213 ret = EXIT_NORETURN;
5214 break;
5215 default:
5216 abort();
5217 }
5218
5219 s->pc = s->next_pc;
5220 return ret;
5221 }
5222
5223 /* Set up the strutures we use to communicate with the helpers. */
5224 s->insn = insn;
5225 s->fields = &f;
5226 o.g_out = o.g_out2 = o.g_in1 = o.g_in2 = false;
5227 TCGV_UNUSED_I64(o.out);
5228 TCGV_UNUSED_I64(o.out2);
5229 TCGV_UNUSED_I64(o.in1);
5230 TCGV_UNUSED_I64(o.in2);
5231 TCGV_UNUSED_I64(o.addr1);
5232
5233 /* Implement the instruction. */
5234 if (insn->help_in1) {
5235 insn->help_in1(s, &f, &o);
5236 }
5237 if (insn->help_in2) {
5238 insn->help_in2(s, &f, &o);
5239 }
5240 if (insn->help_prep) {
5241 insn->help_prep(s, &f, &o);
5242 }
5243 if (insn->help_op) {
5244 ret = insn->help_op(s, &o);
5245 }
5246 if (insn->help_wout) {
5247 insn->help_wout(s, &f, &o);
5248 }
5249 if (insn->help_cout) {
5250 insn->help_cout(s, &o);
5251 }
5252
5253 /* Free any temporaries created by the helpers. */
5254 if (!TCGV_IS_UNUSED_I64(o.out) && !o.g_out) {
5255 tcg_temp_free_i64(o.out);
5256 }
5257 if (!TCGV_IS_UNUSED_I64(o.out2) && !o.g_out2) {
5258 tcg_temp_free_i64(o.out2);
5259 }
5260 if (!TCGV_IS_UNUSED_I64(o.in1) && !o.g_in1) {
5261 tcg_temp_free_i64(o.in1);
5262 }
5263 if (!TCGV_IS_UNUSED_I64(o.in2) && !o.g_in2) {
5264 tcg_temp_free_i64(o.in2);
5265 }
5266 if (!TCGV_IS_UNUSED_I64(o.addr1)) {
5267 tcg_temp_free_i64(o.addr1);
5268 }
5269
5270 /* Advance to the next instruction. */
5271 s->pc = s->next_pc;
5272 return ret;
5273 }
5274
5275 static inline void gen_intermediate_code_internal(CPUS390XState *env,
5276 TranslationBlock *tb,
5277 int search_pc)
5278 {
5279 DisasContext dc;
5280 target_ulong pc_start;
5281 uint64_t next_page_start;
5282 uint16_t *gen_opc_end;
5283 int j, lj = -1;
5284 int num_insns, max_insns;
5285 CPUBreakpoint *bp;
5286 ExitStatus status;
5287 bool do_debug;
5288
5289 pc_start = tb->pc;
5290
5291 /* 31-bit mode */
5292 if (!(tb->flags & FLAG_MASK_64)) {
5293 pc_start &= 0x7fffffff;
5294 }
5295
5296 dc.tb = tb;
5297 dc.pc = pc_start;
5298 dc.cc_op = CC_OP_DYNAMIC;
5299 do_debug = dc.singlestep_enabled = env->singlestep_enabled;
5300 dc.is_jmp = DISAS_NEXT;
5301
5302 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
5303
5304 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
5305
5306 num_insns = 0;
5307 max_insns = tb->cflags & CF_COUNT_MASK;
5308 if (max_insns == 0) {
5309 max_insns = CF_COUNT_MASK;
5310 }
5311
5312 gen_icount_start();
5313
5314 do {
5315 if (search_pc) {
5316 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5317 if (lj < j) {
5318 lj++;
5319 while (lj < j) {
5320 tcg_ctx.gen_opc_instr_start[lj++] = 0;
5321 }
5322 }
5323 tcg_ctx.gen_opc_pc[lj] = dc.pc;
5324 gen_opc_cc_op[lj] = dc.cc_op;
5325 tcg_ctx.gen_opc_instr_start[lj] = 1;
5326 tcg_ctx.gen_opc_icount[lj] = num_insns;
5327 }
5328 if (++num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5329 gen_io_start();
5330 }
5331
5332 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
5333 tcg_gen_debug_insn_start(dc.pc);
5334 }
5335
5336 status = NO_EXIT;
5337 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5338 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5339 if (bp->pc == dc.pc) {
5340 status = EXIT_PC_STALE;
5341 do_debug = true;
5342 break;
5343 }
5344 }
5345 }
5346 if (status == NO_EXIT) {
5347 status = translate_one(env, &dc);
5348 }
5349
5350 /* If we reach a page boundary, are single stepping,
5351 or exhaust instruction count, stop generation. */
5352 if (status == NO_EXIT
5353 && (dc.pc >= next_page_start
5354 || tcg_ctx.gen_opc_ptr >= gen_opc_end
5355 || num_insns >= max_insns
5356 || singlestep
5357 || env->singlestep_enabled)) {
5358 status = EXIT_PC_STALE;
5359 }
5360 } while (status == NO_EXIT);
5361
5362 if (tb->cflags & CF_LAST_IO) {
5363 gen_io_end();
5364 }
5365
5366 switch (status) {
5367 case EXIT_GOTO_TB:
5368 case EXIT_NORETURN:
5369 break;
5370 case EXIT_PC_STALE:
5371 update_psw_addr(&dc);
5372 /* FALLTHRU */
5373 case EXIT_PC_UPDATED:
5374 if (singlestep && dc.cc_op != CC_OP_DYNAMIC) {
5375 gen_op_calc_cc(&dc);
5376 } else {
5377 /* Next TB starts off with CC_OP_DYNAMIC,
5378 so make sure the cc op type is in env */
5379 gen_op_set_cc_op(&dc);
5380 }
5381 if (do_debug) {
5382 gen_exception(EXCP_DEBUG);
5383 } else {
5384 /* Generate the return instruction */
5385 tcg_gen_exit_tb(0);
5386 }
5387 break;
5388 default:
5389 abort();
5390 }
5391
5392 gen_icount_end(tb, num_insns);
5393 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
5394 if (search_pc) {
5395 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5396 lj++;
5397 while (lj <= j) {
5398 tcg_ctx.gen_opc_instr_start[lj++] = 0;
5399 }
5400 } else {
5401 tb->size = dc.pc - pc_start;
5402 tb->icount = num_insns;
5403 }
5404
5405 #if defined(S390X_DEBUG_DISAS)
5406 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5407 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5408 log_target_disas(env, pc_start, dc.pc - pc_start, 1);
5409 qemu_log("\n");
5410 }
5411 #endif
5412 }
5413
5414 void gen_intermediate_code (CPUS390XState *env, struct TranslationBlock *tb)
5415 {
5416 gen_intermediate_code_internal(env, tb, 0);
5417 }
5418
5419 void gen_intermediate_code_pc (CPUS390XState *env, struct TranslationBlock *tb)
5420 {
5421 gen_intermediate_code_internal(env, tb, 1);
5422 }
5423
5424 void restore_state_to_opc(CPUS390XState *env, TranslationBlock *tb, int pc_pos)
5425 {
5426 int cc_op;
5427 env->psw.addr = tcg_ctx.gen_opc_pc[pc_pos];
5428 cc_op = gen_opc_cc_op[pc_pos];
5429 if ((cc_op != CC_OP_DYNAMIC) && (cc_op != CC_OP_STATIC)) {
5430 env->cc_op = cc_op;
5431 }
5432 }