]> git.proxmox.com Git - qemu.git/blob - target-s390x/translate.c
target-s390: Implement ADD LOGICAL WITH SIGNED IMMEDIATE
[qemu.git] / target-s390x / translate.c
1 /*
2 * S/390 translation
3 *
4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21 /* #define DEBUG_INLINE_BRANCHES */
22 #define S390X_DEBUG_DISAS
23 /* #define S390X_DEBUG_DISAS_VERBOSE */
24
25 #ifdef S390X_DEBUG_DISAS_VERBOSE
26 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
27 #else
28 # define LOG_DISAS(...) do { } while (0)
29 #endif
30
31 #include "cpu.h"
32 #include "disas/disas.h"
33 #include "tcg-op.h"
34 #include "qemu/log.h"
35
36 /* global register indexes */
37 static TCGv_ptr cpu_env;
38
39 #include "exec/gen-icount.h"
40 #include "helper.h"
41 #define GEN_HELPER 1
42 #include "helper.h"
43
44
45 /* Information that (most) every instruction needs to manipulate. */
46 typedef struct DisasContext DisasContext;
47 typedef struct DisasInsn DisasInsn;
48 typedef struct DisasFields DisasFields;
49
50 struct DisasContext {
51 struct TranslationBlock *tb;
52 const DisasInsn *insn;
53 DisasFields *fields;
54 uint64_t pc, next_pc;
55 enum cc_op cc_op;
56 bool singlestep_enabled;
57 int is_jmp;
58 };
59
60 /* Information carried about a condition to be evaluated. */
61 typedef struct {
62 TCGCond cond:8;
63 bool is_64;
64 bool g1;
65 bool g2;
66 union {
67 struct { TCGv_i64 a, b; } s64;
68 struct { TCGv_i32 a, b; } s32;
69 } u;
70 } DisasCompare;
71
72 #define DISAS_EXCP 4
73
74 static void gen_op_calc_cc(DisasContext *s);
75
76 #ifdef DEBUG_INLINE_BRANCHES
77 static uint64_t inline_branch_hit[CC_OP_MAX];
78 static uint64_t inline_branch_miss[CC_OP_MAX];
79 #endif
80
81 static inline void debug_insn(uint64_t insn)
82 {
83 LOG_DISAS("insn: 0x%" PRIx64 "\n", insn);
84 }
85
86 static inline uint64_t pc_to_link_info(DisasContext *s, uint64_t pc)
87 {
88 if (!(s->tb->flags & FLAG_MASK_64)) {
89 if (s->tb->flags & FLAG_MASK_32) {
90 return pc | 0x80000000;
91 }
92 }
93 return pc;
94 }
95
96 void cpu_dump_state(CPUS390XState *env, FILE *f, fprintf_function cpu_fprintf,
97 int flags)
98 {
99 int i;
100
101 if (env->cc_op > 3) {
102 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %15s\n",
103 env->psw.mask, env->psw.addr, cc_name(env->cc_op));
104 } else {
105 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %02x\n",
106 env->psw.mask, env->psw.addr, env->cc_op);
107 }
108
109 for (i = 0; i < 16; i++) {
110 cpu_fprintf(f, "R%02d=%016" PRIx64, i, env->regs[i]);
111 if ((i % 4) == 3) {
112 cpu_fprintf(f, "\n");
113 } else {
114 cpu_fprintf(f, " ");
115 }
116 }
117
118 for (i = 0; i < 16; i++) {
119 cpu_fprintf(f, "F%02d=%016" PRIx64, i, env->fregs[i].ll);
120 if ((i % 4) == 3) {
121 cpu_fprintf(f, "\n");
122 } else {
123 cpu_fprintf(f, " ");
124 }
125 }
126
127 #ifndef CONFIG_USER_ONLY
128 for (i = 0; i < 16; i++) {
129 cpu_fprintf(f, "C%02d=%016" PRIx64, i, env->cregs[i]);
130 if ((i % 4) == 3) {
131 cpu_fprintf(f, "\n");
132 } else {
133 cpu_fprintf(f, " ");
134 }
135 }
136 #endif
137
138 #ifdef DEBUG_INLINE_BRANCHES
139 for (i = 0; i < CC_OP_MAX; i++) {
140 cpu_fprintf(f, " %15s = %10ld\t%10ld\n", cc_name(i),
141 inline_branch_miss[i], inline_branch_hit[i]);
142 }
143 #endif
144
145 cpu_fprintf(f, "\n");
146 }
147
148 static TCGv_i64 psw_addr;
149 static TCGv_i64 psw_mask;
150
151 static TCGv_i32 cc_op;
152 static TCGv_i64 cc_src;
153 static TCGv_i64 cc_dst;
154 static TCGv_i64 cc_vr;
155
156 static char cpu_reg_names[32][4];
157 static TCGv_i64 regs[16];
158 static TCGv_i64 fregs[16];
159
160 static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
161
162 void s390x_translate_init(void)
163 {
164 int i;
165
166 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
167 psw_addr = tcg_global_mem_new_i64(TCG_AREG0,
168 offsetof(CPUS390XState, psw.addr),
169 "psw_addr");
170 psw_mask = tcg_global_mem_new_i64(TCG_AREG0,
171 offsetof(CPUS390XState, psw.mask),
172 "psw_mask");
173
174 cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUS390XState, cc_op),
175 "cc_op");
176 cc_src = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_src),
177 "cc_src");
178 cc_dst = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_dst),
179 "cc_dst");
180 cc_vr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUS390XState, cc_vr),
181 "cc_vr");
182
183 for (i = 0; i < 16; i++) {
184 snprintf(cpu_reg_names[i], sizeof(cpu_reg_names[0]), "r%d", i);
185 regs[i] = tcg_global_mem_new(TCG_AREG0,
186 offsetof(CPUS390XState, regs[i]),
187 cpu_reg_names[i]);
188 }
189
190 for (i = 0; i < 16; i++) {
191 snprintf(cpu_reg_names[i + 16], sizeof(cpu_reg_names[0]), "f%d", i);
192 fregs[i] = tcg_global_mem_new(TCG_AREG0,
193 offsetof(CPUS390XState, fregs[i].d),
194 cpu_reg_names[i + 16]);
195 }
196
197 /* register helpers */
198 #define GEN_HELPER 2
199 #include "helper.h"
200 }
201
202 static inline TCGv_i64 load_reg(int reg)
203 {
204 TCGv_i64 r = tcg_temp_new_i64();
205 tcg_gen_mov_i64(r, regs[reg]);
206 return r;
207 }
208
209 static inline TCGv_i64 load_freg(int reg)
210 {
211 TCGv_i64 r = tcg_temp_new_i64();
212 tcg_gen_mov_i64(r, fregs[reg]);
213 return r;
214 }
215
216 static inline TCGv_i32 load_freg32(int reg)
217 {
218 TCGv_i32 r = tcg_temp_new_i32();
219 #if HOST_LONG_BITS == 32
220 tcg_gen_mov_i32(r, TCGV_HIGH(fregs[reg]));
221 #else
222 tcg_gen_shri_i64(MAKE_TCGV_I64(GET_TCGV_I32(r)), fregs[reg], 32);
223 #endif
224 return r;
225 }
226
227 static inline TCGv_i32 load_reg32(int reg)
228 {
229 TCGv_i32 r = tcg_temp_new_i32();
230 tcg_gen_trunc_i64_i32(r, regs[reg]);
231 return r;
232 }
233
234 static inline TCGv_i64 load_reg32_i64(int reg)
235 {
236 TCGv_i64 r = tcg_temp_new_i64();
237 tcg_gen_ext32s_i64(r, regs[reg]);
238 return r;
239 }
240
241 static inline void store_reg(int reg, TCGv_i64 v)
242 {
243 tcg_gen_mov_i64(regs[reg], v);
244 }
245
246 static inline void store_freg(int reg, TCGv_i64 v)
247 {
248 tcg_gen_mov_i64(fregs[reg], v);
249 }
250
251 static inline void store_reg32(int reg, TCGv_i32 v)
252 {
253 /* 32 bit register writes keep the upper half */
254 #if HOST_LONG_BITS == 32
255 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), v);
256 #else
257 tcg_gen_deposit_i64(regs[reg], regs[reg],
258 MAKE_TCGV_I64(GET_TCGV_I32(v)), 0, 32);
259 #endif
260 }
261
262 static inline void store_reg32_i64(int reg, TCGv_i64 v)
263 {
264 /* 32 bit register writes keep the upper half */
265 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 32);
266 }
267
268 static inline void store_reg16(int reg, TCGv_i32 v)
269 {
270 /* 16 bit register writes keep the upper bytes */
271 #if HOST_LONG_BITS == 32
272 tcg_gen_deposit_i32(TCGV_LOW(regs[reg]), TCGV_LOW(regs[reg]), v, 0, 16);
273 #else
274 tcg_gen_deposit_i64(regs[reg], regs[reg],
275 MAKE_TCGV_I64(GET_TCGV_I32(v)), 0, 16);
276 #endif
277 }
278
279 static inline void store_reg8(int reg, TCGv_i64 v)
280 {
281 /* 8 bit register writes keep the upper bytes */
282 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 8);
283 }
284
285 static inline void store_freg32(int reg, TCGv_i32 v)
286 {
287 /* 32 bit register writes keep the lower half */
288 #if HOST_LONG_BITS == 32
289 tcg_gen_mov_i32(TCGV_HIGH(fregs[reg]), v);
290 #else
291 tcg_gen_deposit_i64(fregs[reg], fregs[reg],
292 MAKE_TCGV_I64(GET_TCGV_I32(v)), 32, 32);
293 #endif
294 }
295
296 static inline void update_psw_addr(DisasContext *s)
297 {
298 /* psw.addr */
299 tcg_gen_movi_i64(psw_addr, s->pc);
300 }
301
302 static inline void potential_page_fault(DisasContext *s)
303 {
304 #ifndef CONFIG_USER_ONLY
305 update_psw_addr(s);
306 gen_op_calc_cc(s);
307 #endif
308 }
309
310 static inline uint64_t ld_code2(CPUS390XState *env, uint64_t pc)
311 {
312 return (uint64_t)cpu_lduw_code(env, pc);
313 }
314
315 static inline uint64_t ld_code4(CPUS390XState *env, uint64_t pc)
316 {
317 return (uint64_t)(uint32_t)cpu_ldl_code(env, pc);
318 }
319
320 static inline uint64_t ld_code6(CPUS390XState *env, uint64_t pc)
321 {
322 return (ld_code2(env, pc) << 32) | ld_code4(env, pc + 2);
323 }
324
325 static inline int get_mem_index(DisasContext *s)
326 {
327 switch (s->tb->flags & FLAG_MASK_ASC) {
328 case PSW_ASC_PRIMARY >> 32:
329 return 0;
330 case PSW_ASC_SECONDARY >> 32:
331 return 1;
332 case PSW_ASC_HOME >> 32:
333 return 2;
334 default:
335 tcg_abort();
336 break;
337 }
338 }
339
340 static void gen_exception(int excp)
341 {
342 TCGv_i32 tmp = tcg_const_i32(excp);
343 gen_helper_exception(cpu_env, tmp);
344 tcg_temp_free_i32(tmp);
345 }
346
347 static void gen_program_exception(DisasContext *s, int code)
348 {
349 TCGv_i32 tmp;
350
351 /* Remember what pgm exeption this was. */
352 tmp = tcg_const_i32(code);
353 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUS390XState, int_pgm_code));
354 tcg_temp_free_i32(tmp);
355
356 tmp = tcg_const_i32(s->next_pc - s->pc);
357 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUS390XState, int_pgm_ilen));
358 tcg_temp_free_i32(tmp);
359
360 /* Advance past instruction. */
361 s->pc = s->next_pc;
362 update_psw_addr(s);
363
364 /* Save off cc. */
365 gen_op_calc_cc(s);
366
367 /* Trigger exception. */
368 gen_exception(EXCP_PGM);
369
370 /* End TB here. */
371 s->is_jmp = DISAS_EXCP;
372 }
373
374 static inline void gen_illegal_opcode(DisasContext *s)
375 {
376 gen_program_exception(s, PGM_SPECIFICATION);
377 }
378
379 static inline void check_privileged(DisasContext *s)
380 {
381 if (s->tb->flags & (PSW_MASK_PSTATE >> 32)) {
382 gen_program_exception(s, PGM_PRIVILEGED);
383 }
384 }
385
386 static TCGv_i64 get_address(DisasContext *s, int x2, int b2, int d2)
387 {
388 TCGv_i64 tmp;
389
390 /* 31-bitify the immediate part; register contents are dealt with below */
391 if (!(s->tb->flags & FLAG_MASK_64)) {
392 d2 &= 0x7fffffffUL;
393 }
394
395 if (x2) {
396 if (d2) {
397 tmp = tcg_const_i64(d2);
398 tcg_gen_add_i64(tmp, tmp, regs[x2]);
399 } else {
400 tmp = load_reg(x2);
401 }
402 if (b2) {
403 tcg_gen_add_i64(tmp, tmp, regs[b2]);
404 }
405 } else if (b2) {
406 if (d2) {
407 tmp = tcg_const_i64(d2);
408 tcg_gen_add_i64(tmp, tmp, regs[b2]);
409 } else {
410 tmp = load_reg(b2);
411 }
412 } else {
413 tmp = tcg_const_i64(d2);
414 }
415
416 /* 31-bit mode mask if there are values loaded from registers */
417 if (!(s->tb->flags & FLAG_MASK_64) && (x2 || b2)) {
418 tcg_gen_andi_i64(tmp, tmp, 0x7fffffffUL);
419 }
420
421 return tmp;
422 }
423
424 static void gen_op_movi_cc(DisasContext *s, uint32_t val)
425 {
426 s->cc_op = CC_OP_CONST0 + val;
427 }
428
429 static void gen_op_update1_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 dst)
430 {
431 tcg_gen_discard_i64(cc_src);
432 tcg_gen_mov_i64(cc_dst, dst);
433 tcg_gen_discard_i64(cc_vr);
434 s->cc_op = op;
435 }
436
437 static void gen_op_update1_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 dst)
438 {
439 tcg_gen_discard_i64(cc_src);
440 tcg_gen_extu_i32_i64(cc_dst, dst);
441 tcg_gen_discard_i64(cc_vr);
442 s->cc_op = op;
443 }
444
445 static void gen_op_update2_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
446 TCGv_i64 dst)
447 {
448 tcg_gen_mov_i64(cc_src, src);
449 tcg_gen_mov_i64(cc_dst, dst);
450 tcg_gen_discard_i64(cc_vr);
451 s->cc_op = op;
452 }
453
454 static void gen_op_update2_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
455 TCGv_i32 dst)
456 {
457 tcg_gen_extu_i32_i64(cc_src, src);
458 tcg_gen_extu_i32_i64(cc_dst, dst);
459 tcg_gen_discard_i64(cc_vr);
460 s->cc_op = op;
461 }
462
463 static void gen_op_update3_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
464 TCGv_i64 dst, TCGv_i64 vr)
465 {
466 tcg_gen_mov_i64(cc_src, src);
467 tcg_gen_mov_i64(cc_dst, dst);
468 tcg_gen_mov_i64(cc_vr, vr);
469 s->cc_op = op;
470 }
471
472 static void gen_op_update3_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
473 TCGv_i32 dst, TCGv_i32 vr)
474 {
475 tcg_gen_extu_i32_i64(cc_src, src);
476 tcg_gen_extu_i32_i64(cc_dst, dst);
477 tcg_gen_extu_i32_i64(cc_vr, vr);
478 s->cc_op = op;
479 }
480
481 static inline void set_cc_nz_u32(DisasContext *s, TCGv_i32 val)
482 {
483 gen_op_update1_cc_i32(s, CC_OP_NZ, val);
484 }
485
486 static inline void set_cc_nz_u64(DisasContext *s, TCGv_i64 val)
487 {
488 gen_op_update1_cc_i64(s, CC_OP_NZ, val);
489 }
490
491 static inline void cmp_32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
492 enum cc_op cond)
493 {
494 gen_op_update2_cc_i32(s, cond, v1, v2);
495 }
496
497 static inline void cmp_64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
498 enum cc_op cond)
499 {
500 gen_op_update2_cc_i64(s, cond, v1, v2);
501 }
502
503 static inline void cmp_s32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
504 {
505 cmp_32(s, v1, v2, CC_OP_LTGT_32);
506 }
507
508 static inline void cmp_u32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
509 {
510 cmp_32(s, v1, v2, CC_OP_LTUGTU_32);
511 }
512
513 static inline void cmp_s32c(DisasContext *s, TCGv_i32 v1, int32_t v2)
514 {
515 /* XXX optimize for the constant? put it in s? */
516 TCGv_i32 tmp = tcg_const_i32(v2);
517 cmp_32(s, v1, tmp, CC_OP_LTGT_32);
518 tcg_temp_free_i32(tmp);
519 }
520
521 static inline void cmp_u32c(DisasContext *s, TCGv_i32 v1, uint32_t v2)
522 {
523 TCGv_i32 tmp = tcg_const_i32(v2);
524 cmp_32(s, v1, tmp, CC_OP_LTUGTU_32);
525 tcg_temp_free_i32(tmp);
526 }
527
528 static inline void cmp_s64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
529 {
530 cmp_64(s, v1, v2, CC_OP_LTGT_64);
531 }
532
533 static inline void cmp_u64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
534 {
535 cmp_64(s, v1, v2, CC_OP_LTUGTU_64);
536 }
537
538 static inline void cmp_s64c(DisasContext *s, TCGv_i64 v1, int64_t v2)
539 {
540 TCGv_i64 tmp = tcg_const_i64(v2);
541 cmp_s64(s, v1, tmp);
542 tcg_temp_free_i64(tmp);
543 }
544
545 static inline void cmp_u64c(DisasContext *s, TCGv_i64 v1, uint64_t v2)
546 {
547 TCGv_i64 tmp = tcg_const_i64(v2);
548 cmp_u64(s, v1, tmp);
549 tcg_temp_free_i64(tmp);
550 }
551
552 static inline void set_cc_s32(DisasContext *s, TCGv_i32 val)
553 {
554 gen_op_update1_cc_i32(s, CC_OP_LTGT0_32, val);
555 }
556
557 static inline void set_cc_s64(DisasContext *s, TCGv_i64 val)
558 {
559 gen_op_update1_cc_i64(s, CC_OP_LTGT0_64, val);
560 }
561
562 static void set_cc_addu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
563 TCGv_i64 vr)
564 {
565 gen_op_update3_cc_i64(s, CC_OP_ADDU_64, v1, v2, vr);
566 }
567
568 static void set_cc_abs64(DisasContext *s, TCGv_i64 v1)
569 {
570 gen_op_update1_cc_i64(s, CC_OP_ABS_64, v1);
571 }
572
573 static void set_cc_nabs64(DisasContext *s, TCGv_i64 v1)
574 {
575 gen_op_update1_cc_i64(s, CC_OP_NABS_64, v1);
576 }
577
578 static void set_cc_add32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
579 {
580 gen_op_update3_cc_i32(s, CC_OP_ADD_32, v1, v2, vr);
581 }
582
583 static void set_cc_addu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
584 TCGv_i32 vr)
585 {
586 gen_op_update3_cc_i32(s, CC_OP_ADDU_32, v1, v2, vr);
587 }
588
589 static void set_cc_sub32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
590 {
591 gen_op_update3_cc_i32(s, CC_OP_SUB_32, v1, v2, vr);
592 }
593
594 static void set_cc_abs32(DisasContext *s, TCGv_i32 v1)
595 {
596 gen_op_update1_cc_i32(s, CC_OP_ABS_32, v1);
597 }
598
599 static void set_cc_nabs32(DisasContext *s, TCGv_i32 v1)
600 {
601 gen_op_update1_cc_i32(s, CC_OP_NABS_32, v1);
602 }
603
604 static void set_cc_comp32(DisasContext *s, TCGv_i32 v1)
605 {
606 gen_op_update1_cc_i32(s, CC_OP_COMP_32, v1);
607 }
608
609 static void set_cc_comp64(DisasContext *s, TCGv_i64 v1)
610 {
611 gen_op_update1_cc_i64(s, CC_OP_COMP_64, v1);
612 }
613
614 static void set_cc_icm(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
615 {
616 gen_op_update2_cc_i32(s, CC_OP_ICM, v1, v2);
617 }
618
619 static void set_cc_cmp_f32_i64(DisasContext *s, TCGv_i32 v1, TCGv_i64 v2)
620 {
621 tcg_gen_extu_i32_i64(cc_src, v1);
622 tcg_gen_mov_i64(cc_dst, v2);
623 tcg_gen_discard_i64(cc_vr);
624 s->cc_op = CC_OP_LTGT_F32;
625 }
626
627 static void gen_set_cc_nz_f32(DisasContext *s, TCGv_i32 v1)
628 {
629 gen_op_update1_cc_i32(s, CC_OP_NZ_F32, v1);
630 }
631
632 /* CC value is in env->cc_op */
633 static inline void set_cc_static(DisasContext *s)
634 {
635 tcg_gen_discard_i64(cc_src);
636 tcg_gen_discard_i64(cc_dst);
637 tcg_gen_discard_i64(cc_vr);
638 s->cc_op = CC_OP_STATIC;
639 }
640
641 static inline void gen_op_set_cc_op(DisasContext *s)
642 {
643 if (s->cc_op != CC_OP_DYNAMIC && s->cc_op != CC_OP_STATIC) {
644 tcg_gen_movi_i32(cc_op, s->cc_op);
645 }
646 }
647
648 static inline void gen_update_cc_op(DisasContext *s)
649 {
650 gen_op_set_cc_op(s);
651 }
652
653 /* calculates cc into cc_op */
654 static void gen_op_calc_cc(DisasContext *s)
655 {
656 TCGv_i32 local_cc_op = tcg_const_i32(s->cc_op);
657 TCGv_i64 dummy = tcg_const_i64(0);
658
659 switch (s->cc_op) {
660 case CC_OP_CONST0:
661 case CC_OP_CONST1:
662 case CC_OP_CONST2:
663 case CC_OP_CONST3:
664 /* s->cc_op is the cc value */
665 tcg_gen_movi_i32(cc_op, s->cc_op - CC_OP_CONST0);
666 break;
667 case CC_OP_STATIC:
668 /* env->cc_op already is the cc value */
669 break;
670 case CC_OP_NZ:
671 case CC_OP_ABS_64:
672 case CC_OP_NABS_64:
673 case CC_OP_ABS_32:
674 case CC_OP_NABS_32:
675 case CC_OP_LTGT0_32:
676 case CC_OP_LTGT0_64:
677 case CC_OP_COMP_32:
678 case CC_OP_COMP_64:
679 case CC_OP_NZ_F32:
680 case CC_OP_NZ_F64:
681 /* 1 argument */
682 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, dummy, cc_dst, dummy);
683 break;
684 case CC_OP_ICM:
685 case CC_OP_LTGT_32:
686 case CC_OP_LTGT_64:
687 case CC_OP_LTUGTU_32:
688 case CC_OP_LTUGTU_64:
689 case CC_OP_TM_32:
690 case CC_OP_TM_64:
691 case CC_OP_LTGT_F32:
692 case CC_OP_LTGT_F64:
693 case CC_OP_SLAG:
694 /* 2 arguments */
695 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, cc_src, cc_dst, dummy);
696 break;
697 case CC_OP_ADD_64:
698 case CC_OP_ADDU_64:
699 case CC_OP_SUB_64:
700 case CC_OP_SUBU_64:
701 case CC_OP_ADD_32:
702 case CC_OP_ADDU_32:
703 case CC_OP_SUB_32:
704 case CC_OP_SUBU_32:
705 /* 3 arguments */
706 gen_helper_calc_cc(cc_op, cpu_env, local_cc_op, cc_src, cc_dst, cc_vr);
707 break;
708 case CC_OP_DYNAMIC:
709 /* unknown operation - assume 3 arguments and cc_op in env */
710 gen_helper_calc_cc(cc_op, cpu_env, cc_op, cc_src, cc_dst, cc_vr);
711 break;
712 default:
713 tcg_abort();
714 }
715
716 tcg_temp_free_i32(local_cc_op);
717 tcg_temp_free_i64(dummy);
718
719 /* We now have cc in cc_op as constant */
720 set_cc_static(s);
721 }
722
723 static inline void decode_rr(DisasContext *s, uint64_t insn, int *r1, int *r2)
724 {
725 debug_insn(insn);
726
727 *r1 = (insn >> 4) & 0xf;
728 *r2 = insn & 0xf;
729 }
730
731 static inline TCGv_i64 decode_rx(DisasContext *s, uint64_t insn, int *r1,
732 int *x2, int *b2, int *d2)
733 {
734 debug_insn(insn);
735
736 *r1 = (insn >> 20) & 0xf;
737 *x2 = (insn >> 16) & 0xf;
738 *b2 = (insn >> 12) & 0xf;
739 *d2 = insn & 0xfff;
740
741 return get_address(s, *x2, *b2, *d2);
742 }
743
744 static inline void decode_rs(DisasContext *s, uint64_t insn, int *r1, int *r3,
745 int *b2, int *d2)
746 {
747 debug_insn(insn);
748
749 *r1 = (insn >> 20) & 0xf;
750 /* aka m3 */
751 *r3 = (insn >> 16) & 0xf;
752 *b2 = (insn >> 12) & 0xf;
753 *d2 = insn & 0xfff;
754 }
755
756 static inline TCGv_i64 decode_si(DisasContext *s, uint64_t insn, int *i2,
757 int *b1, int *d1)
758 {
759 debug_insn(insn);
760
761 *i2 = (insn >> 16) & 0xff;
762 *b1 = (insn >> 12) & 0xf;
763 *d1 = insn & 0xfff;
764
765 return get_address(s, 0, *b1, *d1);
766 }
767
768 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong pc)
769 {
770 TranslationBlock *tb;
771
772 gen_update_cc_op(s);
773
774 tb = s->tb;
775 /* NOTE: we handle the case where the TB spans two pages here */
776 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
777 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
778 /* jump to same page: we can use a direct jump */
779 tcg_gen_goto_tb(tb_num);
780 tcg_gen_movi_i64(psw_addr, pc);
781 tcg_gen_exit_tb((tcg_target_long)tb + tb_num);
782 } else {
783 /* jump to another page: currently not optimized */
784 tcg_gen_movi_i64(psw_addr, pc);
785 tcg_gen_exit_tb(0);
786 }
787 }
788
789 static inline void account_noninline_branch(DisasContext *s, int cc_op)
790 {
791 #ifdef DEBUG_INLINE_BRANCHES
792 inline_branch_miss[cc_op]++;
793 #endif
794 }
795
796 static inline void account_inline_branch(DisasContext *s, int cc_op)
797 {
798 #ifdef DEBUG_INLINE_BRANCHES
799 inline_branch_hit[cc_op]++;
800 #endif
801 }
802
803 /* Table of mask values to comparison codes, given a comparison as input.
804 For a true comparison CC=3 will never be set, but we treat this
805 conservatively for possible use when CC=3 indicates overflow. */
806 static const TCGCond ltgt_cond[16] = {
807 TCG_COND_NEVER, TCG_COND_NEVER, /* | | | x */
808 TCG_COND_GT, TCG_COND_NEVER, /* | | GT | x */
809 TCG_COND_LT, TCG_COND_NEVER, /* | LT | | x */
810 TCG_COND_NE, TCG_COND_NEVER, /* | LT | GT | x */
811 TCG_COND_EQ, TCG_COND_NEVER, /* EQ | | | x */
812 TCG_COND_GE, TCG_COND_NEVER, /* EQ | | GT | x */
813 TCG_COND_LE, TCG_COND_NEVER, /* EQ | LT | | x */
814 TCG_COND_ALWAYS, TCG_COND_ALWAYS, /* EQ | LT | GT | x */
815 };
816
817 /* Table of mask values to comparison codes, given a logic op as input.
818 For such, only CC=0 and CC=1 should be possible. */
819 static const TCGCond nz_cond[16] = {
820 /* | | x | x */
821 TCG_COND_NEVER, TCG_COND_NEVER, TCG_COND_NEVER, TCG_COND_NEVER,
822 /* | NE | x | x */
823 TCG_COND_NE, TCG_COND_NE, TCG_COND_NE, TCG_COND_NE,
824 /* EQ | | x | x */
825 TCG_COND_EQ, TCG_COND_EQ, TCG_COND_EQ, TCG_COND_EQ,
826 /* EQ | NE | x | x */
827 TCG_COND_ALWAYS, TCG_COND_ALWAYS, TCG_COND_ALWAYS, TCG_COND_ALWAYS,
828 };
829
830 /* Interpret MASK in terms of S->CC_OP, and fill in C with all the
831 details required to generate a TCG comparison. */
832 static void disas_jcc(DisasContext *s, DisasCompare *c, uint32_t mask)
833 {
834 TCGCond cond;
835 enum cc_op old_cc_op = s->cc_op;
836
837 if (mask == 15 || mask == 0) {
838 c->cond = (mask ? TCG_COND_ALWAYS : TCG_COND_NEVER);
839 c->u.s32.a = cc_op;
840 c->u.s32.b = cc_op;
841 c->g1 = c->g2 = true;
842 c->is_64 = false;
843 return;
844 }
845
846 /* Find the TCG condition for the mask + cc op. */
847 switch (old_cc_op) {
848 case CC_OP_LTGT0_32:
849 case CC_OP_LTGT0_64:
850 case CC_OP_LTGT_32:
851 case CC_OP_LTGT_64:
852 cond = ltgt_cond[mask];
853 if (cond == TCG_COND_NEVER) {
854 goto do_dynamic;
855 }
856 account_inline_branch(s, old_cc_op);
857 break;
858
859 case CC_OP_LTUGTU_32:
860 case CC_OP_LTUGTU_64:
861 cond = tcg_unsigned_cond(ltgt_cond[mask]);
862 if (cond == TCG_COND_NEVER) {
863 goto do_dynamic;
864 }
865 account_inline_branch(s, old_cc_op);
866 break;
867
868 case CC_OP_NZ:
869 cond = nz_cond[mask];
870 if (cond == TCG_COND_NEVER) {
871 goto do_dynamic;
872 }
873 account_inline_branch(s, old_cc_op);
874 break;
875
876 case CC_OP_TM_32:
877 case CC_OP_TM_64:
878 switch (mask) {
879 case 8:
880 cond = TCG_COND_EQ;
881 break;
882 case 4 | 2 | 1:
883 cond = TCG_COND_NE;
884 break;
885 default:
886 goto do_dynamic;
887 }
888 account_inline_branch(s, old_cc_op);
889 break;
890
891 case CC_OP_ICM:
892 switch (mask) {
893 case 8:
894 cond = TCG_COND_EQ;
895 break;
896 case 4 | 2 | 1:
897 case 4 | 2:
898 cond = TCG_COND_NE;
899 break;
900 default:
901 goto do_dynamic;
902 }
903 account_inline_branch(s, old_cc_op);
904 break;
905
906 default:
907 do_dynamic:
908 /* Calculate cc value. */
909 gen_op_calc_cc(s);
910 /* FALLTHRU */
911
912 case CC_OP_STATIC:
913 /* Jump based on CC. We'll load up the real cond below;
914 the assignment here merely avoids a compiler warning. */
915 account_noninline_branch(s, old_cc_op);
916 old_cc_op = CC_OP_STATIC;
917 cond = TCG_COND_NEVER;
918 break;
919 }
920
921 /* Load up the arguments of the comparison. */
922 c->is_64 = true;
923 c->g1 = c->g2 = false;
924 switch (old_cc_op) {
925 case CC_OP_LTGT0_32:
926 c->is_64 = false;
927 c->u.s32.a = tcg_temp_new_i32();
928 tcg_gen_trunc_i64_i32(c->u.s32.a, cc_dst);
929 c->u.s32.b = tcg_const_i32(0);
930 break;
931 case CC_OP_LTGT_32:
932 case CC_OP_LTUGTU_32:
933 c->is_64 = false;
934 c->u.s32.a = tcg_temp_new_i32();
935 tcg_gen_trunc_i64_i32(c->u.s32.a, cc_src);
936 c->u.s32.b = tcg_temp_new_i32();
937 tcg_gen_trunc_i64_i32(c->u.s32.b, cc_dst);
938 break;
939
940 case CC_OP_LTGT0_64:
941 case CC_OP_NZ:
942 case CC_OP_ICM:
943 c->u.s64.a = cc_dst;
944 c->u.s64.b = tcg_const_i64(0);
945 c->g1 = true;
946 break;
947 case CC_OP_LTGT_64:
948 case CC_OP_LTUGTU_64:
949 c->u.s64.a = cc_src;
950 c->u.s64.b = cc_dst;
951 c->g1 = c->g2 = true;
952 break;
953
954 case CC_OP_TM_32:
955 case CC_OP_TM_64:
956 c->u.s64.a = tcg_temp_new_i64();
957 c->u.s64.b = tcg_const_i64(0);
958 tcg_gen_and_i64(c->u.s64.a, cc_src, cc_dst);
959 break;
960
961 case CC_OP_STATIC:
962 c->is_64 = false;
963 c->u.s32.a = cc_op;
964 c->g1 = true;
965 switch (mask) {
966 case 0x8 | 0x4 | 0x2: /* cc != 3 */
967 cond = TCG_COND_NE;
968 c->u.s32.b = tcg_const_i32(3);
969 break;
970 case 0x8 | 0x4 | 0x1: /* cc != 2 */
971 cond = TCG_COND_NE;
972 c->u.s32.b = tcg_const_i32(2);
973 break;
974 case 0x8 | 0x2 | 0x1: /* cc != 1 */
975 cond = TCG_COND_NE;
976 c->u.s32.b = tcg_const_i32(1);
977 break;
978 case 0x8 | 0x2: /* cc == 0 || cc == 2 => (cc & 1) == 0 */
979 cond = TCG_COND_EQ;
980 c->g1 = false;
981 c->u.s32.a = tcg_temp_new_i32();
982 c->u.s32.b = tcg_const_i32(0);
983 tcg_gen_andi_i32(c->u.s32.a, cc_op, 1);
984 break;
985 case 0x8 | 0x4: /* cc < 2 */
986 cond = TCG_COND_LTU;
987 c->u.s32.b = tcg_const_i32(2);
988 break;
989 case 0x8: /* cc == 0 */
990 cond = TCG_COND_EQ;
991 c->u.s32.b = tcg_const_i32(0);
992 break;
993 case 0x4 | 0x2 | 0x1: /* cc != 0 */
994 cond = TCG_COND_NE;
995 c->u.s32.b = tcg_const_i32(0);
996 break;
997 case 0x4 | 0x1: /* cc == 1 || cc == 3 => (cc & 1) != 0 */
998 cond = TCG_COND_NE;
999 c->g1 = false;
1000 c->u.s32.a = tcg_temp_new_i32();
1001 c->u.s32.b = tcg_const_i32(0);
1002 tcg_gen_andi_i32(c->u.s32.a, cc_op, 1);
1003 break;
1004 case 0x4: /* cc == 1 */
1005 cond = TCG_COND_EQ;
1006 c->u.s32.b = tcg_const_i32(1);
1007 break;
1008 case 0x2 | 0x1: /* cc > 1 */
1009 cond = TCG_COND_GTU;
1010 c->u.s32.b = tcg_const_i32(1);
1011 break;
1012 case 0x2: /* cc == 2 */
1013 cond = TCG_COND_EQ;
1014 c->u.s32.b = tcg_const_i32(2);
1015 break;
1016 case 0x1: /* cc == 3 */
1017 cond = TCG_COND_EQ;
1018 c->u.s32.b = tcg_const_i32(3);
1019 break;
1020 default:
1021 /* CC is masked by something else: (8 >> cc) & mask. */
1022 cond = TCG_COND_NE;
1023 c->g1 = false;
1024 c->u.s32.a = tcg_const_i32(8);
1025 c->u.s32.b = tcg_const_i32(0);
1026 tcg_gen_shr_i32(c->u.s32.a, c->u.s32.a, cc_op);
1027 tcg_gen_andi_i32(c->u.s32.a, c->u.s32.a, mask);
1028 break;
1029 }
1030 break;
1031
1032 default:
1033 abort();
1034 }
1035 c->cond = cond;
1036 }
1037
1038 static void free_compare(DisasCompare *c)
1039 {
1040 if (!c->g1) {
1041 if (c->is_64) {
1042 tcg_temp_free_i64(c->u.s64.a);
1043 } else {
1044 tcg_temp_free_i32(c->u.s32.a);
1045 }
1046 }
1047 if (!c->g2) {
1048 if (c->is_64) {
1049 tcg_temp_free_i64(c->u.s64.b);
1050 } else {
1051 tcg_temp_free_i32(c->u.s32.b);
1052 }
1053 }
1054 }
1055
1056 static void gen_jcc(DisasContext *s, uint32_t mask, int skip)
1057 {
1058 DisasCompare c;
1059 TCGCond cond;
1060
1061 disas_jcc(s, &c, mask);
1062 cond = tcg_invert_cond(c.cond);
1063
1064 if (c.is_64) {
1065 tcg_gen_brcond_i64(cond, c.u.s64.a, c.u.s64.b, skip);
1066 } else {
1067 tcg_gen_brcond_i32(cond, c.u.s32.a, c.u.s32.b, skip);
1068 }
1069
1070 free_compare(&c);
1071 }
1072
1073 static void gen_bcr(DisasContext *s, uint32_t mask, TCGv_i64 target,
1074 uint64_t offset)
1075 {
1076 int skip;
1077
1078 if (mask == 0xf) {
1079 /* unconditional */
1080 gen_update_cc_op(s);
1081 tcg_gen_mov_i64(psw_addr, target);
1082 tcg_gen_exit_tb(0);
1083 } else if (mask == 0) {
1084 /* ignore cc and never match */
1085 gen_goto_tb(s, 0, offset + 2);
1086 } else {
1087 TCGv_i64 new_addr = tcg_temp_local_new_i64();
1088
1089 tcg_gen_mov_i64(new_addr, target);
1090 skip = gen_new_label();
1091 gen_jcc(s, mask, skip);
1092 gen_update_cc_op(s);
1093 tcg_gen_mov_i64(psw_addr, new_addr);
1094 tcg_temp_free_i64(new_addr);
1095 tcg_gen_exit_tb(0);
1096 gen_set_label(skip);
1097 tcg_temp_free_i64(new_addr);
1098 gen_goto_tb(s, 1, offset + 2);
1099 }
1100 }
1101
1102 static void gen_brc(uint32_t mask, DisasContext *s, int32_t offset)
1103 {
1104 int skip;
1105
1106 if (mask == 0xf) {
1107 /* unconditional */
1108 gen_goto_tb(s, 0, s->pc + offset);
1109 } else if (mask == 0) {
1110 /* ignore cc and never match */
1111 gen_goto_tb(s, 0, s->pc + 4);
1112 } else {
1113 skip = gen_new_label();
1114 gen_jcc(s, mask, skip);
1115 gen_goto_tb(s, 0, s->pc + offset);
1116 gen_set_label(skip);
1117 gen_goto_tb(s, 1, s->pc + 4);
1118 }
1119 s->is_jmp = DISAS_TB_JUMP;
1120 }
1121
1122 static void gen_op_mvc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1123 {
1124 TCGv_i64 tmp, tmp2;
1125 int i;
1126 int l_memset = gen_new_label();
1127 int l_out = gen_new_label();
1128 TCGv_i64 dest = tcg_temp_local_new_i64();
1129 TCGv_i64 src = tcg_temp_local_new_i64();
1130 TCGv_i32 vl;
1131
1132 /* Find out if we should use the inline version of mvc */
1133 switch (l) {
1134 case 0:
1135 case 1:
1136 case 2:
1137 case 3:
1138 case 4:
1139 case 5:
1140 case 6:
1141 case 7:
1142 case 11:
1143 case 15:
1144 /* use inline */
1145 break;
1146 default:
1147 /* Fall back to helper */
1148 vl = tcg_const_i32(l);
1149 potential_page_fault(s);
1150 gen_helper_mvc(cpu_env, vl, s1, s2);
1151 tcg_temp_free_i32(vl);
1152 return;
1153 }
1154
1155 tcg_gen_mov_i64(dest, s1);
1156 tcg_gen_mov_i64(src, s2);
1157
1158 if (!(s->tb->flags & FLAG_MASK_64)) {
1159 /* XXX what if we overflow while moving? */
1160 tcg_gen_andi_i64(dest, dest, 0x7fffffffUL);
1161 tcg_gen_andi_i64(src, src, 0x7fffffffUL);
1162 }
1163
1164 tmp = tcg_temp_new_i64();
1165 tcg_gen_addi_i64(tmp, src, 1);
1166 tcg_gen_brcond_i64(TCG_COND_EQ, dest, tmp, l_memset);
1167 tcg_temp_free_i64(tmp);
1168
1169 switch (l) {
1170 case 0:
1171 tmp = tcg_temp_new_i64();
1172
1173 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1174 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1175
1176 tcg_temp_free_i64(tmp);
1177 break;
1178 case 1:
1179 tmp = tcg_temp_new_i64();
1180
1181 tcg_gen_qemu_ld16u(tmp, src, get_mem_index(s));
1182 tcg_gen_qemu_st16(tmp, dest, get_mem_index(s));
1183
1184 tcg_temp_free_i64(tmp);
1185 break;
1186 case 3:
1187 tmp = tcg_temp_new_i64();
1188
1189 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1190 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1191
1192 tcg_temp_free_i64(tmp);
1193 break;
1194 case 4:
1195 tmp = tcg_temp_new_i64();
1196 tmp2 = tcg_temp_new_i64();
1197
1198 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1199 tcg_gen_addi_i64(src, src, 4);
1200 tcg_gen_qemu_ld8u(tmp2, src, get_mem_index(s));
1201 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1202 tcg_gen_addi_i64(dest, dest, 4);
1203 tcg_gen_qemu_st8(tmp2, dest, get_mem_index(s));
1204
1205 tcg_temp_free_i64(tmp);
1206 tcg_temp_free_i64(tmp2);
1207 break;
1208 case 7:
1209 tmp = tcg_temp_new_i64();
1210
1211 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1212 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1213
1214 tcg_temp_free_i64(tmp);
1215 break;
1216 default:
1217 /* The inline version can become too big for too uneven numbers, only
1218 use it on known good lengths */
1219 tmp = tcg_temp_new_i64();
1220 tmp2 = tcg_const_i64(8);
1221 for (i = 0; (i + 7) <= l; i += 8) {
1222 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1223 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1224
1225 tcg_gen_add_i64(src, src, tmp2);
1226 tcg_gen_add_i64(dest, dest, tmp2);
1227 }
1228
1229 tcg_temp_free_i64(tmp2);
1230 tmp2 = tcg_const_i64(1);
1231
1232 for (; i <= l; i++) {
1233 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1234 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1235
1236 tcg_gen_add_i64(src, src, tmp2);
1237 tcg_gen_add_i64(dest, dest, tmp2);
1238 }
1239
1240 tcg_temp_free_i64(tmp2);
1241 tcg_temp_free_i64(tmp);
1242 break;
1243 }
1244
1245 tcg_gen_br(l_out);
1246
1247 gen_set_label(l_memset);
1248 /* memset case (dest == (src + 1)) */
1249
1250 tmp = tcg_temp_new_i64();
1251 tmp2 = tcg_temp_new_i64();
1252 /* fill tmp with the byte */
1253 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1254 tcg_gen_shli_i64(tmp2, tmp, 8);
1255 tcg_gen_or_i64(tmp, tmp, tmp2);
1256 tcg_gen_shli_i64(tmp2, tmp, 16);
1257 tcg_gen_or_i64(tmp, tmp, tmp2);
1258 tcg_gen_shli_i64(tmp2, tmp, 32);
1259 tcg_gen_or_i64(tmp, tmp, tmp2);
1260 tcg_temp_free_i64(tmp2);
1261
1262 tmp2 = tcg_const_i64(8);
1263
1264 for (i = 0; (i + 7) <= l; i += 8) {
1265 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1266 tcg_gen_addi_i64(dest, dest, 8);
1267 }
1268
1269 tcg_temp_free_i64(tmp2);
1270 tmp2 = tcg_const_i64(1);
1271
1272 for (; i <= l; i++) {
1273 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1274 tcg_gen_addi_i64(dest, dest, 1);
1275 }
1276
1277 tcg_temp_free_i64(tmp2);
1278 tcg_temp_free_i64(tmp);
1279
1280 gen_set_label(l_out);
1281
1282 tcg_temp_free(dest);
1283 tcg_temp_free(src);
1284 }
1285
1286 static void gen_op_clc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1287 {
1288 TCGv_i64 tmp;
1289 TCGv_i64 tmp2;
1290 TCGv_i32 vl;
1291
1292 /* check for simple 32bit or 64bit match */
1293 switch (l) {
1294 case 0:
1295 tmp = tcg_temp_new_i64();
1296 tmp2 = tcg_temp_new_i64();
1297
1298 tcg_gen_qemu_ld8u(tmp, s1, get_mem_index(s));
1299 tcg_gen_qemu_ld8u(tmp2, s2, get_mem_index(s));
1300 cmp_u64(s, tmp, tmp2);
1301
1302 tcg_temp_free_i64(tmp);
1303 tcg_temp_free_i64(tmp2);
1304 return;
1305 case 1:
1306 tmp = tcg_temp_new_i64();
1307 tmp2 = tcg_temp_new_i64();
1308
1309 tcg_gen_qemu_ld16u(tmp, s1, get_mem_index(s));
1310 tcg_gen_qemu_ld16u(tmp2, s2, get_mem_index(s));
1311 cmp_u64(s, tmp, tmp2);
1312
1313 tcg_temp_free_i64(tmp);
1314 tcg_temp_free_i64(tmp2);
1315 return;
1316 case 3:
1317 tmp = tcg_temp_new_i64();
1318 tmp2 = tcg_temp_new_i64();
1319
1320 tcg_gen_qemu_ld32u(tmp, s1, get_mem_index(s));
1321 tcg_gen_qemu_ld32u(tmp2, s2, get_mem_index(s));
1322 cmp_u64(s, tmp, tmp2);
1323
1324 tcg_temp_free_i64(tmp);
1325 tcg_temp_free_i64(tmp2);
1326 return;
1327 case 7:
1328 tmp = tcg_temp_new_i64();
1329 tmp2 = tcg_temp_new_i64();
1330
1331 tcg_gen_qemu_ld64(tmp, s1, get_mem_index(s));
1332 tcg_gen_qemu_ld64(tmp2, s2, get_mem_index(s));
1333 cmp_u64(s, tmp, tmp2);
1334
1335 tcg_temp_free_i64(tmp);
1336 tcg_temp_free_i64(tmp2);
1337 return;
1338 }
1339
1340 potential_page_fault(s);
1341 vl = tcg_const_i32(l);
1342 gen_helper_clc(cc_op, cpu_env, vl, s1, s2);
1343 tcg_temp_free_i32(vl);
1344 set_cc_static(s);
1345 }
1346
1347 static void disas_e3(CPUS390XState *env, DisasContext* s, int op, int r1,
1348 int x2, int b2, int d2)
1349 {
1350 TCGv_i64 addr, tmp, tmp2, tmp3, tmp4;
1351 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
1352
1353 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1354 op, r1, x2, b2, d2);
1355 addr = get_address(s, x2, b2, d2);
1356 switch (op) {
1357 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1358 case 0x4: /* lg r1,d2(x2,b2) */
1359 tcg_gen_qemu_ld64(regs[r1], addr, get_mem_index(s));
1360 if (op == 0x2) {
1361 set_cc_s64(s, regs[r1]);
1362 }
1363 break;
1364 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1365 tmp2 = tcg_temp_new_i64();
1366 tmp32_1 = tcg_temp_new_i32();
1367 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1368 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1369 store_reg32(r1, tmp32_1);
1370 set_cc_s32(s, tmp32_1);
1371 tcg_temp_free_i64(tmp2);
1372 tcg_temp_free_i32(tmp32_1);
1373 break;
1374 case 0xc: /* MSG R1,D2(X2,B2) [RXY] */
1375 case 0x1c: /* MSGF R1,D2(X2,B2) [RXY] */
1376 tmp2 = tcg_temp_new_i64();
1377 if (op == 0xc) {
1378 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1379 } else {
1380 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1381 }
1382 tcg_gen_mul_i64(regs[r1], regs[r1], tmp2);
1383 tcg_temp_free_i64(tmp2);
1384 break;
1385 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1386 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1387 tmp2 = tcg_temp_new_i64();
1388 if (op == 0x1d) {
1389 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1390 } else {
1391 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1392 }
1393 tmp4 = load_reg(r1 + 1);
1394 tmp3 = tcg_temp_new_i64();
1395 tcg_gen_div_i64(tmp3, tmp4, tmp2);
1396 store_reg(r1 + 1, tmp3);
1397 tcg_gen_rem_i64(tmp3, tmp4, tmp2);
1398 store_reg(r1, tmp3);
1399 tcg_temp_free_i64(tmp2);
1400 tcg_temp_free_i64(tmp3);
1401 tcg_temp_free_i64(tmp4);
1402 break;
1403 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1404 tmp2 = tcg_temp_new_i64();
1405 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1406 tcg_gen_bswap64_i64(tmp2, tmp2);
1407 store_reg(r1, tmp2);
1408 tcg_temp_free_i64(tmp2);
1409 break;
1410 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1411 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1412 tmp2 = tcg_temp_new_i64();
1413 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1414 if (op == 0x14) {
1415 tcg_gen_ext32s_i64(tmp2, tmp2);
1416 }
1417 store_reg(r1, tmp2);
1418 tcg_temp_free_i64(tmp2);
1419 break;
1420 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1421 tmp2 = tcg_temp_new_i64();
1422 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1423 store_reg(r1, tmp2);
1424 tcg_temp_free_i64(tmp2);
1425 break;
1426 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1427 tmp2 = tcg_temp_new_i64();
1428 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1429 tcg_gen_andi_i64(tmp2, tmp2, 0x7fffffffULL);
1430 store_reg(r1, tmp2);
1431 tcg_temp_free_i64(tmp2);
1432 break;
1433 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1434 tmp2 = tcg_temp_new_i64();
1435 tmp32_1 = tcg_temp_new_i32();
1436 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1437 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1438 tcg_temp_free_i64(tmp2);
1439 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1440 store_reg32(r1, tmp32_1);
1441 tcg_temp_free_i32(tmp32_1);
1442 break;
1443 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1444 tmp2 = tcg_temp_new_i64();
1445 tmp32_1 = tcg_temp_new_i32();
1446 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1447 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1448 tcg_temp_free_i64(tmp2);
1449 tcg_gen_bswap16_i32(tmp32_1, tmp32_1);
1450 store_reg16(r1, tmp32_1);
1451 tcg_temp_free_i32(tmp32_1);
1452 break;
1453 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1454 case 0x21: /* CLG R1,D2(X2,B2) */
1455 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1456 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1457 tmp2 = tcg_temp_new_i64();
1458 switch (op) {
1459 case 0x20:
1460 case 0x21:
1461 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1462 break;
1463 case 0x30:
1464 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1465 break;
1466 case 0x31:
1467 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1468 break;
1469 default:
1470 tcg_abort();
1471 }
1472 switch (op) {
1473 case 0x20:
1474 case 0x30:
1475 cmp_s64(s, regs[r1], tmp2);
1476 break;
1477 case 0x21:
1478 case 0x31:
1479 cmp_u64(s, regs[r1], tmp2);
1480 break;
1481 default:
1482 tcg_abort();
1483 }
1484 tcg_temp_free_i64(tmp2);
1485 break;
1486 case 0x24: /* stg r1, d2(x2,b2) */
1487 tcg_gen_qemu_st64(regs[r1], addr, get_mem_index(s));
1488 break;
1489 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1490 tmp32_1 = load_reg32(r1);
1491 tmp2 = tcg_temp_new_i64();
1492 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1493 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1494 tcg_temp_free_i32(tmp32_1);
1495 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1496 tcg_temp_free_i64(tmp2);
1497 break;
1498 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1499 tmp32_1 = load_reg32(r1);
1500 tmp2 = tcg_temp_new_i64();
1501 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1502 tcg_temp_free_i32(tmp32_1);
1503 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1504 tcg_temp_free_i64(tmp2);
1505 break;
1506 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1507 tmp32_1 = load_reg32(r1);
1508 tmp32_2 = tcg_temp_new_i32();
1509 tmp2 = tcg_temp_new_i64();
1510 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1511 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1512 tcg_temp_free_i64(tmp2);
1513 tcg_gen_xor_i32(tmp32_2, tmp32_1, tmp32_2);
1514 store_reg32(r1, tmp32_2);
1515 set_cc_nz_u32(s, tmp32_2);
1516 tcg_temp_free_i32(tmp32_1);
1517 tcg_temp_free_i32(tmp32_2);
1518 break;
1519 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1520 tmp3 = tcg_temp_new_i64();
1521 tcg_gen_qemu_ld32u(tmp3, addr, get_mem_index(s));
1522 store_reg32_i64(r1, tmp3);
1523 tcg_temp_free_i64(tmp3);
1524 break;
1525 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1526 store_reg(r1, addr);
1527 break;
1528 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1529 tmp32_1 = load_reg32(r1);
1530 tmp2 = tcg_temp_new_i64();
1531 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
1532 tcg_gen_qemu_st8(tmp2, addr, get_mem_index(s));
1533 tcg_temp_free_i32(tmp32_1);
1534 tcg_temp_free_i64(tmp2);
1535 break;
1536 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1537 tmp3 = tcg_temp_new_i64();
1538 tcg_gen_qemu_ld8u(tmp3, addr, get_mem_index(s));
1539 store_reg8(r1, tmp3);
1540 tcg_temp_free_i64(tmp3);
1541 break;
1542 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1543 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1544 tmp2 = tcg_temp_new_i64();
1545 tcg_gen_qemu_ld8s(tmp2, addr, get_mem_index(s));
1546 switch (op) {
1547 case 0x76:
1548 tcg_gen_ext8s_i64(tmp2, tmp2);
1549 store_reg32_i64(r1, tmp2);
1550 break;
1551 case 0x77:
1552 tcg_gen_ext8s_i64(tmp2, tmp2);
1553 store_reg(r1, tmp2);
1554 break;
1555 default:
1556 tcg_abort();
1557 }
1558 tcg_temp_free_i64(tmp2);
1559 break;
1560 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1561 tmp2 = tcg_temp_new_i64();
1562 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1563 store_reg32_i64(r1, tmp2);
1564 tcg_temp_free_i64(tmp2);
1565 break;
1566 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1567 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1568 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1569 tmp3 = tcg_temp_new_i64();
1570 tcg_gen_qemu_ld64(tmp3, addr, get_mem_index(s));
1571 switch (op) {
1572 case 0x80:
1573 tcg_gen_and_i64(regs[r1], regs[r1], tmp3);
1574 break;
1575 case 0x81:
1576 tcg_gen_or_i64(regs[r1], regs[r1], tmp3);
1577 break;
1578 case 0x82:
1579 tcg_gen_xor_i64(regs[r1], regs[r1], tmp3);
1580 break;
1581 default:
1582 tcg_abort();
1583 }
1584 set_cc_nz_u64(s, regs[r1]);
1585 tcg_temp_free_i64(tmp3);
1586 break;
1587 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1588 tmp2 = tcg_temp_new_i64();
1589 tmp32_1 = tcg_const_i32(r1);
1590 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1591 gen_helper_mlg(cpu_env, tmp32_1, tmp2);
1592 tcg_temp_free_i64(tmp2);
1593 tcg_temp_free_i32(tmp32_1);
1594 break;
1595 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1596 tmp2 = tcg_temp_new_i64();
1597 tmp32_1 = tcg_const_i32(r1);
1598 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1599 gen_helper_dlg(cpu_env, tmp32_1, tmp2);
1600 tcg_temp_free_i64(tmp2);
1601 tcg_temp_free_i32(tmp32_1);
1602 break;
1603 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1604 tmp2 = tcg_temp_new_i64();
1605 tmp3 = tcg_temp_new_i64();
1606 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1607 /* XXX possible optimization point */
1608 gen_op_calc_cc(s);
1609 tcg_gen_extu_i32_i64(tmp3, cc_op);
1610 tcg_gen_shri_i64(tmp3, tmp3, 1);
1611 tcg_gen_andi_i64(tmp3, tmp3, 1);
1612 tcg_gen_add_i64(tmp3, tmp2, tmp3);
1613 tcg_gen_add_i64(tmp3, regs[r1], tmp3);
1614 store_reg(r1, tmp3);
1615 set_cc_addu64(s, regs[r1], tmp2, tmp3);
1616 tcg_temp_free_i64(tmp2);
1617 tcg_temp_free_i64(tmp3);
1618 break;
1619 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1620 tmp2 = tcg_temp_new_i64();
1621 tmp32_1 = tcg_const_i32(r1);
1622 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1623 /* XXX possible optimization point */
1624 gen_op_calc_cc(s);
1625 gen_helper_slbg(cc_op, cpu_env, cc_op, tmp32_1, regs[r1], tmp2);
1626 set_cc_static(s);
1627 tcg_temp_free_i64(tmp2);
1628 tcg_temp_free_i32(tmp32_1);
1629 break;
1630 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1631 tcg_gen_qemu_ld8u(regs[r1], addr, get_mem_index(s));
1632 break;
1633 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1634 tcg_gen_qemu_ld16u(regs[r1], addr, get_mem_index(s));
1635 break;
1636 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1637 tmp2 = tcg_temp_new_i64();
1638 tcg_gen_qemu_ld8u(tmp2, addr, get_mem_index(s));
1639 store_reg32_i64(r1, tmp2);
1640 tcg_temp_free_i64(tmp2);
1641 break;
1642 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1643 tmp2 = tcg_temp_new_i64();
1644 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1645 store_reg32_i64(r1, tmp2);
1646 tcg_temp_free_i64(tmp2);
1647 break;
1648 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1649 tmp2 = tcg_temp_new_i64();
1650 tmp3 = load_reg((r1 + 1) & 15);
1651 tcg_gen_ext32u_i64(tmp3, tmp3);
1652 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1653 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
1654 store_reg32_i64((r1 + 1) & 15, tmp2);
1655 tcg_gen_shri_i64(tmp2, tmp2, 32);
1656 store_reg32_i64(r1, tmp2);
1657 tcg_temp_free_i64(tmp2);
1658 tcg_temp_free_i64(tmp3);
1659 break;
1660 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1661 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1662 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1663 tmp = load_reg(r1);
1664 tmp2 = tcg_temp_new_i64();
1665 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1666 tmp3 = load_reg((r1 + 1) & 15);
1667 tcg_gen_ext32u_i64(tmp2, tmp2);
1668 tcg_gen_ext32u_i64(tmp3, tmp3);
1669 tcg_gen_shli_i64(tmp, tmp, 32);
1670 tcg_gen_or_i64(tmp, tmp, tmp3);
1671
1672 tcg_gen_rem_i64(tmp3, tmp, tmp2);
1673 tcg_gen_div_i64(tmp, tmp, tmp2);
1674 store_reg32_i64((r1 + 1) & 15, tmp);
1675 store_reg32_i64(r1, tmp3);
1676 tcg_temp_free_i64(tmp);
1677 tcg_temp_free_i64(tmp2);
1678 tcg_temp_free_i64(tmp3);
1679 break;
1680 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1681 tmp2 = tcg_temp_new_i64();
1682 tmp32_1 = load_reg32(r1);
1683 tmp32_2 = tcg_temp_new_i32();
1684 tmp32_3 = tcg_temp_new_i32();
1685 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1686 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1687 /* XXX possible optimization point */
1688 gen_op_calc_cc(s);
1689 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
1690 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
1691 store_reg32(r1, tmp32_3);
1692 tcg_temp_free_i64(tmp2);
1693 tcg_temp_free_i32(tmp32_1);
1694 tcg_temp_free_i32(tmp32_2);
1695 tcg_temp_free_i32(tmp32_3);
1696 break;
1697 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1698 tmp2 = tcg_temp_new_i64();
1699 tmp32_1 = tcg_const_i32(r1);
1700 tmp32_2 = tcg_temp_new_i32();
1701 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1702 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1703 /* XXX possible optimization point */
1704 gen_op_calc_cc(s);
1705 gen_helper_slb(cc_op, cpu_env, cc_op, tmp32_1, tmp32_2);
1706 set_cc_static(s);
1707 tcg_temp_free_i64(tmp2);
1708 tcg_temp_free_i32(tmp32_1);
1709 tcg_temp_free_i32(tmp32_2);
1710 break;
1711 default:
1712 LOG_DISAS("illegal e3 operation 0x%x\n", op);
1713 gen_illegal_opcode(s);
1714 break;
1715 }
1716 tcg_temp_free_i64(addr);
1717 }
1718
1719 #ifndef CONFIG_USER_ONLY
1720 static void disas_e5(CPUS390XState *env, DisasContext* s, uint64_t insn)
1721 {
1722 TCGv_i64 tmp, tmp2;
1723 int op = (insn >> 32) & 0xff;
1724
1725 tmp = get_address(s, 0, (insn >> 28) & 0xf, (insn >> 16) & 0xfff);
1726 tmp2 = get_address(s, 0, (insn >> 12) & 0xf, insn & 0xfff);
1727
1728 LOG_DISAS("disas_e5: insn %" PRIx64 "\n", insn);
1729 switch (op) {
1730 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1731 /* Test Protection */
1732 potential_page_fault(s);
1733 gen_helper_tprot(cc_op, tmp, tmp2);
1734 set_cc_static(s);
1735 break;
1736 default:
1737 LOG_DISAS("illegal e5 operation 0x%x\n", op);
1738 gen_illegal_opcode(s);
1739 break;
1740 }
1741
1742 tcg_temp_free_i64(tmp);
1743 tcg_temp_free_i64(tmp2);
1744 }
1745 #endif
1746
1747 static void disas_eb(CPUS390XState *env, DisasContext *s, int op, int r1,
1748 int r3, int b2, int d2)
1749 {
1750 TCGv_i64 tmp, tmp2, tmp3, tmp4;
1751 TCGv_i32 tmp32_1, tmp32_2;
1752 int i, stm_len;
1753
1754 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1755 op, r1, r3, b2, d2);
1756 switch (op) {
1757 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1758 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1759 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1760 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1761 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1762 if (b2) {
1763 tmp = get_address(s, 0, b2, d2);
1764 tcg_gen_andi_i64(tmp, tmp, 0x3f);
1765 } else {
1766 tmp = tcg_const_i64(d2 & 0x3f);
1767 }
1768 switch (op) {
1769 case 0xc:
1770 tcg_gen_shr_i64(regs[r1], regs[r3], tmp);
1771 break;
1772 case 0xd:
1773 tcg_gen_shl_i64(regs[r1], regs[r3], tmp);
1774 break;
1775 case 0xa:
1776 tcg_gen_sar_i64(regs[r1], regs[r3], tmp);
1777 break;
1778 case 0xb:
1779 tmp2 = tcg_temp_new_i64();
1780 tmp3 = tcg_temp_new_i64();
1781 gen_op_update2_cc_i64(s, CC_OP_SLAG, regs[r3], tmp);
1782 tcg_gen_shl_i64(tmp2, regs[r3], tmp);
1783 /* override sign bit with source sign */
1784 tcg_gen_andi_i64(tmp2, tmp2, ~0x8000000000000000ULL);
1785 tcg_gen_andi_i64(tmp3, regs[r3], 0x8000000000000000ULL);
1786 tcg_gen_or_i64(regs[r1], tmp2, tmp3);
1787 tcg_temp_free_i64(tmp2);
1788 tcg_temp_free_i64(tmp3);
1789 break;
1790 case 0x1c:
1791 tcg_gen_rotl_i64(regs[r1], regs[r3], tmp);
1792 break;
1793 default:
1794 tcg_abort();
1795 break;
1796 }
1797 if (op == 0xa) {
1798 set_cc_s64(s, regs[r1]);
1799 }
1800 tcg_temp_free_i64(tmp);
1801 break;
1802 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
1803 if (b2) {
1804 tmp = get_address(s, 0, b2, d2);
1805 tcg_gen_andi_i64(tmp, tmp, 0x3f);
1806 } else {
1807 tmp = tcg_const_i64(d2 & 0x3f);
1808 }
1809 tmp32_1 = tcg_temp_new_i32();
1810 tmp32_2 = load_reg32(r3);
1811 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
1812 switch (op) {
1813 case 0x1d:
1814 tcg_gen_rotl_i32(tmp32_1, tmp32_2, tmp32_1);
1815 break;
1816 default:
1817 tcg_abort();
1818 break;
1819 }
1820 store_reg32(r1, tmp32_1);
1821 tcg_temp_free_i64(tmp);
1822 tcg_temp_free_i32(tmp32_1);
1823 tcg_temp_free_i32(tmp32_2);
1824 break;
1825 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
1826 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
1827 stm_len = 8;
1828 goto do_mh;
1829 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
1830 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
1831 stm_len = 4;
1832 do_mh:
1833 /* Apparently, unrolling lmg/stmg of any size gains performance -
1834 even for very long ones... */
1835 tmp = get_address(s, 0, b2, d2);
1836 tmp3 = tcg_const_i64(stm_len);
1837 tmp4 = tcg_const_i64(op == 0x26 ? 32 : 4);
1838 for (i = r1;; i = (i + 1) % 16) {
1839 switch (op) {
1840 case 0x4:
1841 tcg_gen_qemu_ld64(regs[i], tmp, get_mem_index(s));
1842 break;
1843 case 0x96:
1844 tmp2 = tcg_temp_new_i64();
1845 #if HOST_LONG_BITS == 32
1846 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
1847 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs[i]), tmp2);
1848 #else
1849 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
1850 tcg_gen_shl_i64(tmp2, tmp2, tmp4);
1851 tcg_gen_ext32u_i64(regs[i], regs[i]);
1852 tcg_gen_or_i64(regs[i], regs[i], tmp2);
1853 #endif
1854 tcg_temp_free_i64(tmp2);
1855 break;
1856 case 0x24:
1857 tcg_gen_qemu_st64(regs[i], tmp, get_mem_index(s));
1858 break;
1859 case 0x26:
1860 tmp2 = tcg_temp_new_i64();
1861 tcg_gen_shr_i64(tmp2, regs[i], tmp4);
1862 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
1863 tcg_temp_free_i64(tmp2);
1864 break;
1865 default:
1866 tcg_abort();
1867 }
1868 if (i == r3) {
1869 break;
1870 }
1871 tcg_gen_add_i64(tmp, tmp, tmp3);
1872 }
1873 tcg_temp_free_i64(tmp);
1874 tcg_temp_free_i64(tmp3);
1875 tcg_temp_free_i64(tmp4);
1876 break;
1877 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
1878 tmp = get_address(s, 0, b2, d2);
1879 tmp32_1 = tcg_const_i32(r1);
1880 tmp32_2 = tcg_const_i32(r3);
1881 potential_page_fault(s);
1882 gen_helper_stcmh(cpu_env, tmp32_1, tmp, tmp32_2);
1883 tcg_temp_free_i64(tmp);
1884 tcg_temp_free_i32(tmp32_1);
1885 tcg_temp_free_i32(tmp32_2);
1886 break;
1887 #ifndef CONFIG_USER_ONLY
1888 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
1889 /* Load Control */
1890 check_privileged(s);
1891 tmp = get_address(s, 0, b2, d2);
1892 tmp32_1 = tcg_const_i32(r1);
1893 tmp32_2 = tcg_const_i32(r3);
1894 potential_page_fault(s);
1895 gen_helper_lctlg(cpu_env, tmp32_1, tmp, tmp32_2);
1896 tcg_temp_free_i64(tmp);
1897 tcg_temp_free_i32(tmp32_1);
1898 tcg_temp_free_i32(tmp32_2);
1899 break;
1900 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
1901 /* Store Control */
1902 check_privileged(s);
1903 tmp = get_address(s, 0, b2, d2);
1904 tmp32_1 = tcg_const_i32(r1);
1905 tmp32_2 = tcg_const_i32(r3);
1906 potential_page_fault(s);
1907 gen_helper_stctg(cpu_env, tmp32_1, tmp, tmp32_2);
1908 tcg_temp_free_i64(tmp);
1909 tcg_temp_free_i32(tmp32_1);
1910 tcg_temp_free_i32(tmp32_2);
1911 break;
1912 #endif
1913 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
1914 tmp = get_address(s, 0, b2, d2);
1915 tmp32_1 = tcg_const_i32(r1);
1916 tmp32_2 = tcg_const_i32(r3);
1917 potential_page_fault(s);
1918 /* XXX rewrite in tcg */
1919 gen_helper_csg(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
1920 set_cc_static(s);
1921 tcg_temp_free_i64(tmp);
1922 tcg_temp_free_i32(tmp32_1);
1923 tcg_temp_free_i32(tmp32_2);
1924 break;
1925 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
1926 tmp = get_address(s, 0, b2, d2);
1927 tmp32_1 = tcg_const_i32(r1);
1928 tmp32_2 = tcg_const_i32(r3);
1929 potential_page_fault(s);
1930 /* XXX rewrite in tcg */
1931 gen_helper_cdsg(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
1932 set_cc_static(s);
1933 tcg_temp_free_i64(tmp);
1934 tcg_temp_free_i32(tmp32_1);
1935 tcg_temp_free_i32(tmp32_2);
1936 break;
1937 case 0x51: /* TMY D1(B1),I2 [SIY] */
1938 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
1939 tmp2 = tcg_const_i64((r1 << 4) | r3);
1940 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
1941 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
1942 that incurs less conversions */
1943 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
1944 tcg_temp_free_i64(tmp);
1945 tcg_temp_free_i64(tmp2);
1946 break;
1947 case 0x52: /* MVIY D1(B1),I2 [SIY] */
1948 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
1949 tmp2 = tcg_const_i64((r1 << 4) | r3);
1950 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
1951 tcg_temp_free_i64(tmp);
1952 tcg_temp_free_i64(tmp2);
1953 break;
1954 case 0x55: /* CLIY D1(B1),I2 [SIY] */
1955 tmp3 = get_address(s, 0, b2, d2); /* SIY -> this is the 1st operand */
1956 tmp = tcg_temp_new_i64();
1957 tmp32_1 = tcg_temp_new_i32();
1958 tcg_gen_qemu_ld8u(tmp, tmp3, get_mem_index(s));
1959 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
1960 cmp_u32c(s, tmp32_1, (r1 << 4) | r3);
1961 tcg_temp_free_i64(tmp);
1962 tcg_temp_free_i64(tmp3);
1963 tcg_temp_free_i32(tmp32_1);
1964 break;
1965 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
1966 tmp = get_address(s, 0, b2, d2);
1967 tmp32_1 = tcg_const_i32(r1);
1968 tmp32_2 = tcg_const_i32(r3);
1969 potential_page_fault(s);
1970 /* XXX split CC calculation out */
1971 gen_helper_icmh(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
1972 set_cc_static(s);
1973 tcg_temp_free_i64(tmp);
1974 tcg_temp_free_i32(tmp32_1);
1975 tcg_temp_free_i32(tmp32_2);
1976 break;
1977 default:
1978 LOG_DISAS("illegal eb operation 0x%x\n", op);
1979 gen_illegal_opcode(s);
1980 break;
1981 }
1982 }
1983
1984 static void disas_ed(CPUS390XState *env, DisasContext *s, int op, int r1,
1985 int x2, int b2, int d2, int r1b)
1986 {
1987 TCGv_i32 tmp_r1, tmp32;
1988 TCGv_i64 addr, tmp;
1989 addr = get_address(s, x2, b2, d2);
1990 tmp_r1 = tcg_const_i32(r1);
1991 switch (op) {
1992 case 0x4: /* LDEB R1,D2(X2,B2) [RXE] */
1993 potential_page_fault(s);
1994 gen_helper_ldeb(cpu_env, tmp_r1, addr);
1995 break;
1996 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
1997 potential_page_fault(s);
1998 gen_helper_lxdb(cpu_env, tmp_r1, addr);
1999 break;
2000 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
2001 tmp = tcg_temp_new_i64();
2002 tmp32 = load_freg32(r1);
2003 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2004 set_cc_cmp_f32_i64(s, tmp32, tmp);
2005 tcg_temp_free_i64(tmp);
2006 tcg_temp_free_i32(tmp32);
2007 break;
2008 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
2009 tmp = tcg_temp_new_i64();
2010 tmp32 = tcg_temp_new_i32();
2011 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2012 tcg_gen_trunc_i64_i32(tmp32, tmp);
2013 gen_helper_aeb(cpu_env, tmp_r1, tmp32);
2014 tcg_temp_free_i64(tmp);
2015 tcg_temp_free_i32(tmp32);
2016
2017 tmp32 = load_freg32(r1);
2018 gen_set_cc_nz_f32(s, tmp32);
2019 tcg_temp_free_i32(tmp32);
2020 break;
2021 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2022 tmp = tcg_temp_new_i64();
2023 tmp32 = tcg_temp_new_i32();
2024 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2025 tcg_gen_trunc_i64_i32(tmp32, tmp);
2026 gen_helper_seb(cpu_env, tmp_r1, tmp32);
2027 tcg_temp_free_i64(tmp);
2028 tcg_temp_free_i32(tmp32);
2029
2030 tmp32 = load_freg32(r1);
2031 gen_set_cc_nz_f32(s, tmp32);
2032 tcg_temp_free_i32(tmp32);
2033 break;
2034 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2035 tmp = tcg_temp_new_i64();
2036 tmp32 = tcg_temp_new_i32();
2037 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2038 tcg_gen_trunc_i64_i32(tmp32, tmp);
2039 gen_helper_deb(cpu_env, tmp_r1, tmp32);
2040 tcg_temp_free_i64(tmp);
2041 tcg_temp_free_i32(tmp32);
2042 break;
2043 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2044 potential_page_fault(s);
2045 gen_helper_tceb(cc_op, cpu_env, tmp_r1, addr);
2046 set_cc_static(s);
2047 break;
2048 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2049 potential_page_fault(s);
2050 gen_helper_tcdb(cc_op, cpu_env, tmp_r1, addr);
2051 set_cc_static(s);
2052 break;
2053 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2054 potential_page_fault(s);
2055 gen_helper_tcxb(cc_op, cpu_env, tmp_r1, addr);
2056 set_cc_static(s);
2057 break;
2058 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2059 tmp = tcg_temp_new_i64();
2060 tmp32 = tcg_temp_new_i32();
2061 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2062 tcg_gen_trunc_i64_i32(tmp32, tmp);
2063 gen_helper_meeb(cpu_env, tmp_r1, tmp32);
2064 tcg_temp_free_i64(tmp);
2065 tcg_temp_free_i32(tmp32);
2066 break;
2067 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2068 potential_page_fault(s);
2069 gen_helper_cdb(cc_op, cpu_env, tmp_r1, addr);
2070 set_cc_static(s);
2071 break;
2072 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2073 potential_page_fault(s);
2074 gen_helper_adb(cc_op, cpu_env, tmp_r1, addr);
2075 set_cc_static(s);
2076 break;
2077 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2078 potential_page_fault(s);
2079 gen_helper_sdb(cc_op, cpu_env, tmp_r1, addr);
2080 set_cc_static(s);
2081 break;
2082 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2083 potential_page_fault(s);
2084 gen_helper_mdb(cpu_env, tmp_r1, addr);
2085 break;
2086 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2087 potential_page_fault(s);
2088 gen_helper_ddb(cpu_env, tmp_r1, addr);
2089 break;
2090 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2091 /* for RXF insns, r1 is R3 and r1b is R1 */
2092 tmp32 = tcg_const_i32(r1b);
2093 potential_page_fault(s);
2094 gen_helper_madb(cpu_env, tmp32, addr, tmp_r1);
2095 tcg_temp_free_i32(tmp32);
2096 break;
2097 default:
2098 LOG_DISAS("illegal ed operation 0x%x\n", op);
2099 gen_illegal_opcode(s);
2100 return;
2101 }
2102 tcg_temp_free_i32(tmp_r1);
2103 tcg_temp_free_i64(addr);
2104 }
2105
2106 static void disas_a5(CPUS390XState *env, DisasContext *s, int op, int r1,
2107 int i2)
2108 {
2109 TCGv_i64 tmp, tmp2;
2110 TCGv_i32 tmp32;
2111 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2112 switch (op) {
2113 case 0x0: /* IIHH R1,I2 [RI] */
2114 tmp = tcg_const_i64(i2);
2115 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 48, 16);
2116 tcg_temp_free_i64(tmp);
2117 break;
2118 case 0x1: /* IIHL R1,I2 [RI] */
2119 tmp = tcg_const_i64(i2);
2120 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 32, 16);
2121 tcg_temp_free_i64(tmp);
2122 break;
2123 case 0x2: /* IILH R1,I2 [RI] */
2124 tmp = tcg_const_i64(i2);
2125 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 16, 16);
2126 tcg_temp_free_i64(tmp);
2127 break;
2128 case 0x3: /* IILL R1,I2 [RI] */
2129 tmp = tcg_const_i64(i2);
2130 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 0, 16);
2131 tcg_temp_free_i64(tmp);
2132 break;
2133 case 0x4: /* NIHH R1,I2 [RI] */
2134 case 0x8: /* OIHH R1,I2 [RI] */
2135 tmp = load_reg(r1);
2136 tmp32 = tcg_temp_new_i32();
2137 switch (op) {
2138 case 0x4:
2139 tmp2 = tcg_const_i64((((uint64_t)i2) << 48)
2140 | 0x0000ffffffffffffULL);
2141 tcg_gen_and_i64(tmp, tmp, tmp2);
2142 break;
2143 case 0x8:
2144 tmp2 = tcg_const_i64(((uint64_t)i2) << 48);
2145 tcg_gen_or_i64(tmp, tmp, tmp2);
2146 break;
2147 default:
2148 tcg_abort();
2149 }
2150 store_reg(r1, tmp);
2151 tcg_gen_shri_i64(tmp2, tmp, 48);
2152 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2153 set_cc_nz_u32(s, tmp32);
2154 tcg_temp_free_i64(tmp2);
2155 tcg_temp_free_i32(tmp32);
2156 tcg_temp_free_i64(tmp);
2157 break;
2158 case 0x5: /* NIHL R1,I2 [RI] */
2159 case 0x9: /* OIHL R1,I2 [RI] */
2160 tmp = load_reg(r1);
2161 tmp32 = tcg_temp_new_i32();
2162 switch (op) {
2163 case 0x5:
2164 tmp2 = tcg_const_i64((((uint64_t)i2) << 32)
2165 | 0xffff0000ffffffffULL);
2166 tcg_gen_and_i64(tmp, tmp, tmp2);
2167 break;
2168 case 0x9:
2169 tmp2 = tcg_const_i64(((uint64_t)i2) << 32);
2170 tcg_gen_or_i64(tmp, tmp, tmp2);
2171 break;
2172 default:
2173 tcg_abort();
2174 }
2175 store_reg(r1, tmp);
2176 tcg_gen_shri_i64(tmp2, tmp, 32);
2177 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2178 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2179 set_cc_nz_u32(s, tmp32);
2180 tcg_temp_free_i64(tmp2);
2181 tcg_temp_free_i32(tmp32);
2182 tcg_temp_free_i64(tmp);
2183 break;
2184 case 0x6: /* NILH R1,I2 [RI] */
2185 case 0xa: /* OILH R1,I2 [RI] */
2186 tmp = load_reg(r1);
2187 tmp32 = tcg_temp_new_i32();
2188 switch (op) {
2189 case 0x6:
2190 tmp2 = tcg_const_i64((((uint64_t)i2) << 16)
2191 | 0xffffffff0000ffffULL);
2192 tcg_gen_and_i64(tmp, tmp, tmp2);
2193 break;
2194 case 0xa:
2195 tmp2 = tcg_const_i64(((uint64_t)i2) << 16);
2196 tcg_gen_or_i64(tmp, tmp, tmp2);
2197 break;
2198 default:
2199 tcg_abort();
2200 }
2201 store_reg(r1, tmp);
2202 tcg_gen_shri_i64(tmp, tmp, 16);
2203 tcg_gen_trunc_i64_i32(tmp32, tmp);
2204 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2205 set_cc_nz_u32(s, tmp32);
2206 tcg_temp_free_i64(tmp2);
2207 tcg_temp_free_i32(tmp32);
2208 tcg_temp_free_i64(tmp);
2209 break;
2210 case 0x7: /* NILL R1,I2 [RI] */
2211 case 0xb: /* OILL R1,I2 [RI] */
2212 tmp = load_reg(r1);
2213 tmp32 = tcg_temp_new_i32();
2214 switch (op) {
2215 case 0x7:
2216 tmp2 = tcg_const_i64(i2 | 0xffffffffffff0000ULL);
2217 tcg_gen_and_i64(tmp, tmp, tmp2);
2218 break;
2219 case 0xb:
2220 tmp2 = tcg_const_i64(i2);
2221 tcg_gen_or_i64(tmp, tmp, tmp2);
2222 break;
2223 default:
2224 tcg_abort();
2225 }
2226 store_reg(r1, tmp);
2227 tcg_gen_trunc_i64_i32(tmp32, tmp);
2228 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2229 set_cc_nz_u32(s, tmp32); /* signedness should not matter here */
2230 tcg_temp_free_i64(tmp2);
2231 tcg_temp_free_i32(tmp32);
2232 tcg_temp_free_i64(tmp);
2233 break;
2234 case 0xc: /* LLIHH R1,I2 [RI] */
2235 tmp = tcg_const_i64( ((uint64_t)i2) << 48 );
2236 store_reg(r1, tmp);
2237 tcg_temp_free_i64(tmp);
2238 break;
2239 case 0xd: /* LLIHL R1,I2 [RI] */
2240 tmp = tcg_const_i64( ((uint64_t)i2) << 32 );
2241 store_reg(r1, tmp);
2242 tcg_temp_free_i64(tmp);
2243 break;
2244 case 0xe: /* LLILH R1,I2 [RI] */
2245 tmp = tcg_const_i64( ((uint64_t)i2) << 16 );
2246 store_reg(r1, tmp);
2247 tcg_temp_free_i64(tmp);
2248 break;
2249 case 0xf: /* LLILL R1,I2 [RI] */
2250 tmp = tcg_const_i64(i2);
2251 store_reg(r1, tmp);
2252 tcg_temp_free_i64(tmp);
2253 break;
2254 default:
2255 LOG_DISAS("illegal a5 operation 0x%x\n", op);
2256 gen_illegal_opcode(s);
2257 return;
2258 }
2259 }
2260
2261 static void disas_a7(CPUS390XState *env, DisasContext *s, int op, int r1,
2262 int i2)
2263 {
2264 TCGv_i64 tmp, tmp2;
2265 TCGv_i32 tmp32_1;
2266 int l1;
2267
2268 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2269 switch (op) {
2270 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2271 case 0x1: /* TMLL or TML R1,I2 [RI] */
2272 case 0x2: /* TMHH R1,I2 [RI] */
2273 case 0x3: /* TMHL R1,I2 [RI] */
2274 tmp = load_reg(r1);
2275 tmp2 = tcg_const_i64((uint16_t)i2);
2276 switch (op) {
2277 case 0x0:
2278 tcg_gen_shri_i64(tmp, tmp, 16);
2279 break;
2280 case 0x1:
2281 break;
2282 case 0x2:
2283 tcg_gen_shri_i64(tmp, tmp, 48);
2284 break;
2285 case 0x3:
2286 tcg_gen_shri_i64(tmp, tmp, 32);
2287 break;
2288 }
2289 tcg_gen_andi_i64(tmp, tmp, 0xffff);
2290 cmp_64(s, tmp, tmp2, CC_OP_TM_64);
2291 tcg_temp_free_i64(tmp);
2292 tcg_temp_free_i64(tmp2);
2293 break;
2294 case 0x4: /* brc m1, i2 */
2295 gen_brc(r1, s, i2 * 2LL);
2296 return;
2297 case 0x5: /* BRAS R1,I2 [RI] */
2298 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
2299 store_reg(r1, tmp);
2300 tcg_temp_free_i64(tmp);
2301 gen_goto_tb(s, 0, s->pc + i2 * 2LL);
2302 s->is_jmp = DISAS_TB_JUMP;
2303 break;
2304 case 0x6: /* BRCT R1,I2 [RI] */
2305 tmp32_1 = load_reg32(r1);
2306 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
2307 store_reg32(r1, tmp32_1);
2308 gen_update_cc_op(s);
2309 l1 = gen_new_label();
2310 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
2311 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2312 gen_set_label(l1);
2313 gen_goto_tb(s, 1, s->pc + 4);
2314 s->is_jmp = DISAS_TB_JUMP;
2315 tcg_temp_free_i32(tmp32_1);
2316 break;
2317 case 0x7: /* BRCTG R1,I2 [RI] */
2318 tmp = load_reg(r1);
2319 tcg_gen_subi_i64(tmp, tmp, 1);
2320 store_reg(r1, tmp);
2321 gen_update_cc_op(s);
2322 l1 = gen_new_label();
2323 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
2324 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2325 gen_set_label(l1);
2326 gen_goto_tb(s, 1, s->pc + 4);
2327 s->is_jmp = DISAS_TB_JUMP;
2328 tcg_temp_free_i64(tmp);
2329 break;
2330 case 0x8: /* lhi r1, i2 */
2331 tmp32_1 = tcg_const_i32(i2);
2332 store_reg32(r1, tmp32_1);
2333 tcg_temp_free_i32(tmp32_1);
2334 break;
2335 case 0x9: /* lghi r1, i2 */
2336 tmp = tcg_const_i64(i2);
2337 store_reg(r1, tmp);
2338 tcg_temp_free_i64(tmp);
2339 break;
2340 case 0xc: /* MHI R1,I2 [RI] */
2341 tmp32_1 = load_reg32(r1);
2342 tcg_gen_muli_i32(tmp32_1, tmp32_1, i2);
2343 store_reg32(r1, tmp32_1);
2344 tcg_temp_free_i32(tmp32_1);
2345 break;
2346 case 0xd: /* MGHI R1,I2 [RI] */
2347 tmp = load_reg(r1);
2348 tcg_gen_muli_i64(tmp, tmp, i2);
2349 store_reg(r1, tmp);
2350 tcg_temp_free_i64(tmp);
2351 break;
2352 case 0xe: /* CHI R1,I2 [RI] */
2353 tmp32_1 = load_reg32(r1);
2354 cmp_s32c(s, tmp32_1, i2);
2355 tcg_temp_free_i32(tmp32_1);
2356 break;
2357 case 0xf: /* CGHI R1,I2 [RI] */
2358 tmp = load_reg(r1);
2359 cmp_s64c(s, tmp, i2);
2360 tcg_temp_free_i64(tmp);
2361 break;
2362 default:
2363 LOG_DISAS("illegal a7 operation 0x%x\n", op);
2364 gen_illegal_opcode(s);
2365 return;
2366 }
2367 }
2368
2369 static void disas_b2(CPUS390XState *env, DisasContext *s, int op,
2370 uint32_t insn)
2371 {
2372 TCGv_i64 tmp, tmp2, tmp3;
2373 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2374 int r1, r2;
2375 #ifndef CONFIG_USER_ONLY
2376 int r3, d2, b2;
2377 #endif
2378
2379 r1 = (insn >> 4) & 0xf;
2380 r2 = insn & 0xf;
2381
2382 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op, r1, r2);
2383
2384 switch (op) {
2385 case 0x22: /* IPM R1 [RRE] */
2386 tmp32_1 = tcg_const_i32(r1);
2387 gen_op_calc_cc(s);
2388 gen_helper_ipm(cpu_env, cc_op, tmp32_1);
2389 tcg_temp_free_i32(tmp32_1);
2390 break;
2391 case 0x41: /* CKSM R1,R2 [RRE] */
2392 tmp32_1 = tcg_const_i32(r1);
2393 tmp32_2 = tcg_const_i32(r2);
2394 potential_page_fault(s);
2395 gen_helper_cksm(cpu_env, tmp32_1, tmp32_2);
2396 tcg_temp_free_i32(tmp32_1);
2397 tcg_temp_free_i32(tmp32_2);
2398 gen_op_movi_cc(s, 0);
2399 break;
2400 case 0x4e: /* SAR R1,R2 [RRE] */
2401 tmp32_1 = load_reg32(r2);
2402 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, aregs[r1]));
2403 tcg_temp_free_i32(tmp32_1);
2404 break;
2405 case 0x4f: /* EAR R1,R2 [RRE] */
2406 tmp32_1 = tcg_temp_new_i32();
2407 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, aregs[r2]));
2408 store_reg32(r1, tmp32_1);
2409 tcg_temp_free_i32(tmp32_1);
2410 break;
2411 case 0x52: /* MSR R1,R2 [RRE] */
2412 tmp32_1 = load_reg32(r1);
2413 tmp32_2 = load_reg32(r2);
2414 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
2415 store_reg32(r1, tmp32_1);
2416 tcg_temp_free_i32(tmp32_1);
2417 tcg_temp_free_i32(tmp32_2);
2418 break;
2419 case 0x54: /* MVPG R1,R2 [RRE] */
2420 tmp = load_reg(0);
2421 tmp2 = load_reg(r1);
2422 tmp3 = load_reg(r2);
2423 potential_page_fault(s);
2424 gen_helper_mvpg(cpu_env, tmp, tmp2, tmp3);
2425 tcg_temp_free_i64(tmp);
2426 tcg_temp_free_i64(tmp2);
2427 tcg_temp_free_i64(tmp3);
2428 /* XXX check CCO bit and set CC accordingly */
2429 gen_op_movi_cc(s, 0);
2430 break;
2431 case 0x55: /* MVST R1,R2 [RRE] */
2432 tmp32_1 = load_reg32(0);
2433 tmp32_2 = tcg_const_i32(r1);
2434 tmp32_3 = tcg_const_i32(r2);
2435 potential_page_fault(s);
2436 gen_helper_mvst(cpu_env, tmp32_1, tmp32_2, tmp32_3);
2437 tcg_temp_free_i32(tmp32_1);
2438 tcg_temp_free_i32(tmp32_2);
2439 tcg_temp_free_i32(tmp32_3);
2440 gen_op_movi_cc(s, 1);
2441 break;
2442 case 0x5d: /* CLST R1,R2 [RRE] */
2443 tmp32_1 = load_reg32(0);
2444 tmp32_2 = tcg_const_i32(r1);
2445 tmp32_3 = tcg_const_i32(r2);
2446 potential_page_fault(s);
2447 gen_helper_clst(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2448 set_cc_static(s);
2449 tcg_temp_free_i32(tmp32_1);
2450 tcg_temp_free_i32(tmp32_2);
2451 tcg_temp_free_i32(tmp32_3);
2452 break;
2453 case 0x5e: /* SRST R1,R2 [RRE] */
2454 tmp32_1 = load_reg32(0);
2455 tmp32_2 = tcg_const_i32(r1);
2456 tmp32_3 = tcg_const_i32(r2);
2457 potential_page_fault(s);
2458 gen_helper_srst(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2459 set_cc_static(s);
2460 tcg_temp_free_i32(tmp32_1);
2461 tcg_temp_free_i32(tmp32_2);
2462 tcg_temp_free_i32(tmp32_3);
2463 break;
2464
2465 #ifndef CONFIG_USER_ONLY
2466 case 0x02: /* STIDP D2(B2) [S] */
2467 /* Store CPU ID */
2468 check_privileged(s);
2469 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2470 tmp = get_address(s, 0, b2, d2);
2471 potential_page_fault(s);
2472 gen_helper_stidp(cpu_env, tmp);
2473 tcg_temp_free_i64(tmp);
2474 break;
2475 case 0x04: /* SCK D2(B2) [S] */
2476 /* Set Clock */
2477 check_privileged(s);
2478 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2479 tmp = get_address(s, 0, b2, d2);
2480 potential_page_fault(s);
2481 gen_helper_sck(cc_op, tmp);
2482 set_cc_static(s);
2483 tcg_temp_free_i64(tmp);
2484 break;
2485 case 0x05: /* STCK D2(B2) [S] */
2486 /* Store Clock */
2487 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2488 tmp = get_address(s, 0, b2, d2);
2489 potential_page_fault(s);
2490 gen_helper_stck(cc_op, cpu_env, tmp);
2491 set_cc_static(s);
2492 tcg_temp_free_i64(tmp);
2493 break;
2494 case 0x06: /* SCKC D2(B2) [S] */
2495 /* Set Clock Comparator */
2496 check_privileged(s);
2497 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2498 tmp = get_address(s, 0, b2, d2);
2499 potential_page_fault(s);
2500 gen_helper_sckc(cpu_env, tmp);
2501 tcg_temp_free_i64(tmp);
2502 break;
2503 case 0x07: /* STCKC D2(B2) [S] */
2504 /* Store Clock Comparator */
2505 check_privileged(s);
2506 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2507 tmp = get_address(s, 0, b2, d2);
2508 potential_page_fault(s);
2509 gen_helper_stckc(cpu_env, tmp);
2510 tcg_temp_free_i64(tmp);
2511 break;
2512 case 0x08: /* SPT D2(B2) [S] */
2513 /* Set CPU Timer */
2514 check_privileged(s);
2515 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2516 tmp = get_address(s, 0, b2, d2);
2517 potential_page_fault(s);
2518 gen_helper_spt(cpu_env, tmp);
2519 tcg_temp_free_i64(tmp);
2520 break;
2521 case 0x09: /* STPT D2(B2) [S] */
2522 /* Store CPU Timer */
2523 check_privileged(s);
2524 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2525 tmp = get_address(s, 0, b2, d2);
2526 potential_page_fault(s);
2527 gen_helper_stpt(cpu_env, tmp);
2528 tcg_temp_free_i64(tmp);
2529 break;
2530 case 0x0a: /* SPKA D2(B2) [S] */
2531 /* Set PSW Key from Address */
2532 check_privileged(s);
2533 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2534 tmp = get_address(s, 0, b2, d2);
2535 tmp2 = tcg_temp_new_i64();
2536 tcg_gen_andi_i64(tmp2, psw_mask, ~PSW_MASK_KEY);
2537 tcg_gen_shli_i64(tmp, tmp, PSW_SHIFT_KEY - 4);
2538 tcg_gen_or_i64(psw_mask, tmp2, tmp);
2539 tcg_temp_free_i64(tmp2);
2540 tcg_temp_free_i64(tmp);
2541 break;
2542 case 0x0d: /* PTLB [S] */
2543 /* Purge TLB */
2544 check_privileged(s);
2545 gen_helper_ptlb(cpu_env);
2546 break;
2547 case 0x10: /* SPX D2(B2) [S] */
2548 /* Set Prefix Register */
2549 check_privileged(s);
2550 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2551 tmp = get_address(s, 0, b2, d2);
2552 potential_page_fault(s);
2553 gen_helper_spx(cpu_env, tmp);
2554 tcg_temp_free_i64(tmp);
2555 break;
2556 case 0x11: /* STPX D2(B2) [S] */
2557 /* Store Prefix */
2558 check_privileged(s);
2559 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2560 tmp = get_address(s, 0, b2, d2);
2561 tmp2 = tcg_temp_new_i64();
2562 tcg_gen_ld_i64(tmp2, cpu_env, offsetof(CPUS390XState, psa));
2563 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2564 tcg_temp_free_i64(tmp);
2565 tcg_temp_free_i64(tmp2);
2566 break;
2567 case 0x12: /* STAP D2(B2) [S] */
2568 /* Store CPU Address */
2569 check_privileged(s);
2570 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2571 tmp = get_address(s, 0, b2, d2);
2572 tmp2 = tcg_temp_new_i64();
2573 tmp32_1 = tcg_temp_new_i32();
2574 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, cpu_num));
2575 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
2576 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2577 tcg_temp_free_i64(tmp);
2578 tcg_temp_free_i64(tmp2);
2579 tcg_temp_free_i32(tmp32_1);
2580 break;
2581 case 0x21: /* IPTE R1,R2 [RRE] */
2582 /* Invalidate PTE */
2583 check_privileged(s);
2584 r1 = (insn >> 4) & 0xf;
2585 r2 = insn & 0xf;
2586 tmp = load_reg(r1);
2587 tmp2 = load_reg(r2);
2588 gen_helper_ipte(cpu_env, tmp, tmp2);
2589 tcg_temp_free_i64(tmp);
2590 tcg_temp_free_i64(tmp2);
2591 break;
2592 case 0x29: /* ISKE R1,R2 [RRE] */
2593 /* Insert Storage Key Extended */
2594 check_privileged(s);
2595 r1 = (insn >> 4) & 0xf;
2596 r2 = insn & 0xf;
2597 tmp = load_reg(r2);
2598 tmp2 = tcg_temp_new_i64();
2599 gen_helper_iske(tmp2, cpu_env, tmp);
2600 store_reg(r1, tmp2);
2601 tcg_temp_free_i64(tmp);
2602 tcg_temp_free_i64(tmp2);
2603 break;
2604 case 0x2a: /* RRBE R1,R2 [RRE] */
2605 /* Set Storage Key Extended */
2606 check_privileged(s);
2607 r1 = (insn >> 4) & 0xf;
2608 r2 = insn & 0xf;
2609 tmp32_1 = load_reg32(r1);
2610 tmp = load_reg(r2);
2611 gen_helper_rrbe(cc_op, cpu_env, tmp32_1, tmp);
2612 set_cc_static(s);
2613 tcg_temp_free_i32(tmp32_1);
2614 tcg_temp_free_i64(tmp);
2615 break;
2616 case 0x2b: /* SSKE R1,R2 [RRE] */
2617 /* Set Storage Key Extended */
2618 check_privileged(s);
2619 r1 = (insn >> 4) & 0xf;
2620 r2 = insn & 0xf;
2621 tmp32_1 = load_reg32(r1);
2622 tmp = load_reg(r2);
2623 gen_helper_sske(cpu_env, tmp32_1, tmp);
2624 tcg_temp_free_i32(tmp32_1);
2625 tcg_temp_free_i64(tmp);
2626 break;
2627 case 0x34: /* STCH ? */
2628 /* Store Subchannel */
2629 check_privileged(s);
2630 gen_op_movi_cc(s, 3);
2631 break;
2632 case 0x46: /* STURA R1,R2 [RRE] */
2633 /* Store Using Real Address */
2634 check_privileged(s);
2635 r1 = (insn >> 4) & 0xf;
2636 r2 = insn & 0xf;
2637 tmp32_1 = load_reg32(r1);
2638 tmp = load_reg(r2);
2639 potential_page_fault(s);
2640 gen_helper_stura(cpu_env, tmp, tmp32_1);
2641 tcg_temp_free_i32(tmp32_1);
2642 tcg_temp_free_i64(tmp);
2643 break;
2644 case 0x50: /* CSP R1,R2 [RRE] */
2645 /* Compare And Swap And Purge */
2646 check_privileged(s);
2647 r1 = (insn >> 4) & 0xf;
2648 r2 = insn & 0xf;
2649 tmp32_1 = tcg_const_i32(r1);
2650 tmp32_2 = tcg_const_i32(r2);
2651 gen_helper_csp(cc_op, cpu_env, tmp32_1, tmp32_2);
2652 set_cc_static(s);
2653 tcg_temp_free_i32(tmp32_1);
2654 tcg_temp_free_i32(tmp32_2);
2655 break;
2656 case 0x5f: /* CHSC ? */
2657 /* Channel Subsystem Call */
2658 check_privileged(s);
2659 gen_op_movi_cc(s, 3);
2660 break;
2661 case 0x78: /* STCKE D2(B2) [S] */
2662 /* Store Clock Extended */
2663 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2664 tmp = get_address(s, 0, b2, d2);
2665 potential_page_fault(s);
2666 gen_helper_stcke(cc_op, cpu_env, tmp);
2667 set_cc_static(s);
2668 tcg_temp_free_i64(tmp);
2669 break;
2670 case 0x79: /* SACF D2(B2) [S] */
2671 /* Set Address Space Control Fast */
2672 check_privileged(s);
2673 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2674 tmp = get_address(s, 0, b2, d2);
2675 potential_page_fault(s);
2676 gen_helper_sacf(cpu_env, tmp);
2677 tcg_temp_free_i64(tmp);
2678 /* addressing mode has changed, so end the block */
2679 s->pc = s->next_pc;
2680 update_psw_addr(s);
2681 s->is_jmp = DISAS_JUMP;
2682 break;
2683 case 0x7d: /* STSI D2,(B2) [S] */
2684 check_privileged(s);
2685 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2686 tmp = get_address(s, 0, b2, d2);
2687 tmp32_1 = load_reg32(0);
2688 tmp32_2 = load_reg32(1);
2689 potential_page_fault(s);
2690 gen_helper_stsi(cc_op, cpu_env, tmp, tmp32_1, tmp32_2);
2691 set_cc_static(s);
2692 tcg_temp_free_i64(tmp);
2693 tcg_temp_free_i32(tmp32_1);
2694 tcg_temp_free_i32(tmp32_2);
2695 break;
2696 case 0x9d: /* LFPC D2(B2) [S] */
2697 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2698 tmp = get_address(s, 0, b2, d2);
2699 tmp2 = tcg_temp_new_i64();
2700 tmp32_1 = tcg_temp_new_i32();
2701 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2702 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
2703 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
2704 tcg_temp_free_i64(tmp);
2705 tcg_temp_free_i64(tmp2);
2706 tcg_temp_free_i32(tmp32_1);
2707 break;
2708 case 0xb1: /* STFL D2(B2) [S] */
2709 /* Store Facility List (CPU features) at 200 */
2710 check_privileged(s);
2711 tmp2 = tcg_const_i64(0xc0000000);
2712 tmp = tcg_const_i64(200);
2713 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2714 tcg_temp_free_i64(tmp2);
2715 tcg_temp_free_i64(tmp);
2716 break;
2717 case 0xb2: /* LPSWE D2(B2) [S] */
2718 /* Load PSW Extended */
2719 check_privileged(s);
2720 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2721 tmp = get_address(s, 0, b2, d2);
2722 tmp2 = tcg_temp_new_i64();
2723 tmp3 = tcg_temp_new_i64();
2724 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
2725 tcg_gen_addi_i64(tmp, tmp, 8);
2726 tcg_gen_qemu_ld64(tmp3, tmp, get_mem_index(s));
2727 gen_helper_load_psw(cpu_env, tmp2, tmp3);
2728 /* we need to keep cc_op intact */
2729 s->is_jmp = DISAS_JUMP;
2730 tcg_temp_free_i64(tmp);
2731 tcg_temp_free_i64(tmp2);
2732 tcg_temp_free_i64(tmp3);
2733 break;
2734 case 0x20: /* SERVC R1,R2 [RRE] */
2735 /* SCLP Service call (PV hypercall) */
2736 check_privileged(s);
2737 potential_page_fault(s);
2738 tmp32_1 = load_reg32(r2);
2739 tmp = load_reg(r1);
2740 gen_helper_servc(cc_op, cpu_env, tmp32_1, tmp);
2741 set_cc_static(s);
2742 tcg_temp_free_i32(tmp32_1);
2743 tcg_temp_free_i64(tmp);
2744 break;
2745 #endif
2746 default:
2747 LOG_DISAS("illegal b2 operation 0x%x\n", op);
2748 gen_illegal_opcode(s);
2749 break;
2750 }
2751 }
2752
2753 static void disas_b3(CPUS390XState *env, DisasContext *s, int op, int m3,
2754 int r1, int r2)
2755 {
2756 TCGv_i64 tmp;
2757 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2758 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op, m3, r1, r2);
2759 #define FP_HELPER(i) \
2760 tmp32_1 = tcg_const_i32(r1); \
2761 tmp32_2 = tcg_const_i32(r2); \
2762 gen_helper_ ## i(cpu_env, tmp32_1, tmp32_2); \
2763 tcg_temp_free_i32(tmp32_1); \
2764 tcg_temp_free_i32(tmp32_2);
2765
2766 #define FP_HELPER_CC(i) \
2767 tmp32_1 = tcg_const_i32(r1); \
2768 tmp32_2 = tcg_const_i32(r2); \
2769 gen_helper_ ## i(cc_op, cpu_env, tmp32_1, tmp32_2); \
2770 set_cc_static(s); \
2771 tcg_temp_free_i32(tmp32_1); \
2772 tcg_temp_free_i32(tmp32_2);
2773
2774 switch (op) {
2775 case 0x0: /* LPEBR R1,R2 [RRE] */
2776 FP_HELPER_CC(lpebr);
2777 break;
2778 case 0x2: /* LTEBR R1,R2 [RRE] */
2779 FP_HELPER_CC(ltebr);
2780 break;
2781 case 0x3: /* LCEBR R1,R2 [RRE] */
2782 FP_HELPER_CC(lcebr);
2783 break;
2784 case 0x4: /* LDEBR R1,R2 [RRE] */
2785 FP_HELPER(ldebr);
2786 break;
2787 case 0x5: /* LXDBR R1,R2 [RRE] */
2788 FP_HELPER(lxdbr);
2789 break;
2790 case 0x9: /* CEBR R1,R2 [RRE] */
2791 FP_HELPER_CC(cebr);
2792 break;
2793 case 0xa: /* AEBR R1,R2 [RRE] */
2794 FP_HELPER_CC(aebr);
2795 break;
2796 case 0xb: /* SEBR R1,R2 [RRE] */
2797 FP_HELPER_CC(sebr);
2798 break;
2799 case 0xd: /* DEBR R1,R2 [RRE] */
2800 FP_HELPER(debr);
2801 break;
2802 case 0x10: /* LPDBR R1,R2 [RRE] */
2803 FP_HELPER_CC(lpdbr);
2804 break;
2805 case 0x12: /* LTDBR R1,R2 [RRE] */
2806 FP_HELPER_CC(ltdbr);
2807 break;
2808 case 0x13: /* LCDBR R1,R2 [RRE] */
2809 FP_HELPER_CC(lcdbr);
2810 break;
2811 case 0x15: /* SQBDR R1,R2 [RRE] */
2812 FP_HELPER(sqdbr);
2813 break;
2814 case 0x17: /* MEEBR R1,R2 [RRE] */
2815 FP_HELPER(meebr);
2816 break;
2817 case 0x19: /* CDBR R1,R2 [RRE] */
2818 FP_HELPER_CC(cdbr);
2819 break;
2820 case 0x1a: /* ADBR R1,R2 [RRE] */
2821 FP_HELPER_CC(adbr);
2822 break;
2823 case 0x1b: /* SDBR R1,R2 [RRE] */
2824 FP_HELPER_CC(sdbr);
2825 break;
2826 case 0x1c: /* MDBR R1,R2 [RRE] */
2827 FP_HELPER(mdbr);
2828 break;
2829 case 0x1d: /* DDBR R1,R2 [RRE] */
2830 FP_HELPER(ddbr);
2831 break;
2832 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
2833 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
2834 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
2835 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
2836 tmp32_1 = tcg_const_i32(m3);
2837 tmp32_2 = tcg_const_i32(r2);
2838 tmp32_3 = tcg_const_i32(r1);
2839 switch (op) {
2840 case 0xe:
2841 gen_helper_maebr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
2842 break;
2843 case 0x1e:
2844 gen_helper_madbr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
2845 break;
2846 case 0x1f:
2847 gen_helper_msdbr(cpu_env, tmp32_1, tmp32_3, tmp32_2);
2848 break;
2849 default:
2850 tcg_abort();
2851 }
2852 tcg_temp_free_i32(tmp32_1);
2853 tcg_temp_free_i32(tmp32_2);
2854 tcg_temp_free_i32(tmp32_3);
2855 break;
2856 case 0x40: /* LPXBR R1,R2 [RRE] */
2857 FP_HELPER_CC(lpxbr);
2858 break;
2859 case 0x42: /* LTXBR R1,R2 [RRE] */
2860 FP_HELPER_CC(ltxbr);
2861 break;
2862 case 0x43: /* LCXBR R1,R2 [RRE] */
2863 FP_HELPER_CC(lcxbr);
2864 break;
2865 case 0x44: /* LEDBR R1,R2 [RRE] */
2866 FP_HELPER(ledbr);
2867 break;
2868 case 0x45: /* LDXBR R1,R2 [RRE] */
2869 FP_HELPER(ldxbr);
2870 break;
2871 case 0x46: /* LEXBR R1,R2 [RRE] */
2872 FP_HELPER(lexbr);
2873 break;
2874 case 0x49: /* CXBR R1,R2 [RRE] */
2875 FP_HELPER_CC(cxbr);
2876 break;
2877 case 0x4a: /* AXBR R1,R2 [RRE] */
2878 FP_HELPER_CC(axbr);
2879 break;
2880 case 0x4b: /* SXBR R1,R2 [RRE] */
2881 FP_HELPER_CC(sxbr);
2882 break;
2883 case 0x4c: /* MXBR R1,R2 [RRE] */
2884 FP_HELPER(mxbr);
2885 break;
2886 case 0x4d: /* DXBR R1,R2 [RRE] */
2887 FP_HELPER(dxbr);
2888 break;
2889 case 0x65: /* LXR R1,R2 [RRE] */
2890 tmp = load_freg(r2);
2891 store_freg(r1, tmp);
2892 tcg_temp_free_i64(tmp);
2893 tmp = load_freg(r2 + 2);
2894 store_freg(r1 + 2, tmp);
2895 tcg_temp_free_i64(tmp);
2896 break;
2897 case 0x74: /* LZER R1 [RRE] */
2898 tmp32_1 = tcg_const_i32(r1);
2899 gen_helper_lzer(cpu_env, tmp32_1);
2900 tcg_temp_free_i32(tmp32_1);
2901 break;
2902 case 0x75: /* LZDR R1 [RRE] */
2903 tmp32_1 = tcg_const_i32(r1);
2904 gen_helper_lzdr(cpu_env, tmp32_1);
2905 tcg_temp_free_i32(tmp32_1);
2906 break;
2907 case 0x76: /* LZXR R1 [RRE] */
2908 tmp32_1 = tcg_const_i32(r1);
2909 gen_helper_lzxr(cpu_env, tmp32_1);
2910 tcg_temp_free_i32(tmp32_1);
2911 break;
2912 case 0x84: /* SFPC R1 [RRE] */
2913 tmp32_1 = load_reg32(r1);
2914 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
2915 tcg_temp_free_i32(tmp32_1);
2916 break;
2917 case 0x8c: /* EFPC R1 [RRE] */
2918 tmp32_1 = tcg_temp_new_i32();
2919 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
2920 store_reg32(r1, tmp32_1);
2921 tcg_temp_free_i32(tmp32_1);
2922 break;
2923 case 0x94: /* CEFBR R1,R2 [RRE] */
2924 case 0x95: /* CDFBR R1,R2 [RRE] */
2925 case 0x96: /* CXFBR R1,R2 [RRE] */
2926 tmp32_1 = tcg_const_i32(r1);
2927 tmp32_2 = load_reg32(r2);
2928 switch (op) {
2929 case 0x94:
2930 gen_helper_cefbr(cpu_env, tmp32_1, tmp32_2);
2931 break;
2932 case 0x95:
2933 gen_helper_cdfbr(cpu_env, tmp32_1, tmp32_2);
2934 break;
2935 case 0x96:
2936 gen_helper_cxfbr(cpu_env, tmp32_1, tmp32_2);
2937 break;
2938 default:
2939 tcg_abort();
2940 }
2941 tcg_temp_free_i32(tmp32_1);
2942 tcg_temp_free_i32(tmp32_2);
2943 break;
2944 case 0x98: /* CFEBR R1,R2 [RRE] */
2945 case 0x99: /* CFDBR R1,R2 [RRE] */
2946 case 0x9a: /* CFXBR R1,R2 [RRE] */
2947 tmp32_1 = tcg_const_i32(r1);
2948 tmp32_2 = tcg_const_i32(r2);
2949 tmp32_3 = tcg_const_i32(m3);
2950 switch (op) {
2951 case 0x98:
2952 gen_helper_cfebr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2953 break;
2954 case 0x99:
2955 gen_helper_cfdbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2956 break;
2957 case 0x9a:
2958 gen_helper_cfxbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2959 break;
2960 default:
2961 tcg_abort();
2962 }
2963 set_cc_static(s);
2964 tcg_temp_free_i32(tmp32_1);
2965 tcg_temp_free_i32(tmp32_2);
2966 tcg_temp_free_i32(tmp32_3);
2967 break;
2968 case 0xa4: /* CEGBR R1,R2 [RRE] */
2969 case 0xa5: /* CDGBR R1,R2 [RRE] */
2970 tmp32_1 = tcg_const_i32(r1);
2971 tmp = load_reg(r2);
2972 switch (op) {
2973 case 0xa4:
2974 gen_helper_cegbr(cpu_env, tmp32_1, tmp);
2975 break;
2976 case 0xa5:
2977 gen_helper_cdgbr(cpu_env, tmp32_1, tmp);
2978 break;
2979 default:
2980 tcg_abort();
2981 }
2982 tcg_temp_free_i32(tmp32_1);
2983 tcg_temp_free_i64(tmp);
2984 break;
2985 case 0xa6: /* CXGBR R1,R2 [RRE] */
2986 tmp32_1 = tcg_const_i32(r1);
2987 tmp = load_reg(r2);
2988 gen_helper_cxgbr(cpu_env, tmp32_1, tmp);
2989 tcg_temp_free_i32(tmp32_1);
2990 tcg_temp_free_i64(tmp);
2991 break;
2992 case 0xa8: /* CGEBR R1,R2 [RRE] */
2993 tmp32_1 = tcg_const_i32(r1);
2994 tmp32_2 = tcg_const_i32(r2);
2995 tmp32_3 = tcg_const_i32(m3);
2996 gen_helper_cgebr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
2997 set_cc_static(s);
2998 tcg_temp_free_i32(tmp32_1);
2999 tcg_temp_free_i32(tmp32_2);
3000 tcg_temp_free_i32(tmp32_3);
3001 break;
3002 case 0xa9: /* CGDBR R1,R2 [RRE] */
3003 tmp32_1 = tcg_const_i32(r1);
3004 tmp32_2 = tcg_const_i32(r2);
3005 tmp32_3 = tcg_const_i32(m3);
3006 gen_helper_cgdbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3007 set_cc_static(s);
3008 tcg_temp_free_i32(tmp32_1);
3009 tcg_temp_free_i32(tmp32_2);
3010 tcg_temp_free_i32(tmp32_3);
3011 break;
3012 case 0xaa: /* CGXBR R1,R2 [RRE] */
3013 tmp32_1 = tcg_const_i32(r1);
3014 tmp32_2 = tcg_const_i32(r2);
3015 tmp32_3 = tcg_const_i32(m3);
3016 gen_helper_cgxbr(cc_op, cpu_env, tmp32_1, tmp32_2, tmp32_3);
3017 set_cc_static(s);
3018 tcg_temp_free_i32(tmp32_1);
3019 tcg_temp_free_i32(tmp32_2);
3020 tcg_temp_free_i32(tmp32_3);
3021 break;
3022 default:
3023 LOG_DISAS("illegal b3 operation 0x%x\n", op);
3024 gen_illegal_opcode(s);
3025 break;
3026 }
3027
3028 #undef FP_HELPER_CC
3029 #undef FP_HELPER
3030 }
3031
3032 static void disas_b9(CPUS390XState *env, DisasContext *s, int op, int r1,
3033 int r2)
3034 {
3035 TCGv_i64 tmp, tmp2, tmp3;
3036 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3037
3038 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op, r1, r2);
3039 switch (op) {
3040 case 0x0: /* LPGR R1,R2 [RRE] */
3041 case 0x1: /* LNGR R1,R2 [RRE] */
3042 case 0x2: /* LTGR R1,R2 [RRE] */
3043 case 0x3: /* LCGR R1,R2 [RRE] */
3044 case 0x10: /* LPGFR R1,R2 [RRE] */
3045 case 0x11: /* LNFGR R1,R2 [RRE] */
3046 case 0x12: /* LTGFR R1,R2 [RRE] */
3047 case 0x13: /* LCGFR R1,R2 [RRE] */
3048 if (op & 0x10) {
3049 tmp = load_reg32_i64(r2);
3050 } else {
3051 tmp = load_reg(r2);
3052 }
3053 switch (op & 0xf) {
3054 case 0x0: /* LP?GR */
3055 set_cc_abs64(s, tmp);
3056 gen_helper_abs_i64(tmp, tmp);
3057 store_reg(r1, tmp);
3058 break;
3059 case 0x1: /* LN?GR */
3060 set_cc_nabs64(s, tmp);
3061 gen_helper_nabs_i64(tmp, tmp);
3062 store_reg(r1, tmp);
3063 break;
3064 case 0x2: /* LT?GR */
3065 if (r1 != r2) {
3066 store_reg(r1, tmp);
3067 }
3068 set_cc_s64(s, tmp);
3069 break;
3070 case 0x3: /* LC?GR */
3071 tcg_gen_neg_i64(regs[r1], tmp);
3072 set_cc_comp64(s, regs[r1]);
3073 break;
3074 }
3075 tcg_temp_free_i64(tmp);
3076 break;
3077 case 0x4: /* LGR R1,R2 [RRE] */
3078 store_reg(r1, regs[r2]);
3079 break;
3080 case 0x6: /* LGBR R1,R2 [RRE] */
3081 tmp2 = load_reg(r2);
3082 tcg_gen_ext8s_i64(tmp2, tmp2);
3083 store_reg(r1, tmp2);
3084 tcg_temp_free_i64(tmp2);
3085 break;
3086 case 0xc: /* MSGR R1,R2 [RRE] */
3087 case 0x1c: /* MSGFR R1,R2 [RRE] */
3088 tmp = load_reg(r1);
3089 tmp2 = load_reg(r2);
3090 if (op == 0x1c) {
3091 tcg_gen_ext32s_i64(tmp2, tmp2);
3092 }
3093 tcg_gen_mul_i64(tmp, tmp, tmp2);
3094 store_reg(r1, tmp);
3095 tcg_temp_free_i64(tmp);
3096 tcg_temp_free_i64(tmp2);
3097 break;
3098 case 0xd: /* DSGR R1,R2 [RRE] */
3099 case 0x1d: /* DSGFR R1,R2 [RRE] */
3100 tmp = load_reg(r1 + 1);
3101 if (op == 0xd) {
3102 tmp2 = load_reg(r2);
3103 } else {
3104 tmp32_1 = load_reg32(r2);
3105 tmp2 = tcg_temp_new_i64();
3106 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3107 tcg_temp_free_i32(tmp32_1);
3108 }
3109 tmp3 = tcg_temp_new_i64();
3110 tcg_gen_div_i64(tmp3, tmp, tmp2);
3111 store_reg(r1 + 1, tmp3);
3112 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3113 store_reg(r1, tmp3);
3114 tcg_temp_free_i64(tmp);
3115 tcg_temp_free_i64(tmp2);
3116 tcg_temp_free_i64(tmp3);
3117 break;
3118 case 0x14: /* LGFR R1,R2 [RRE] */
3119 tmp32_1 = load_reg32(r2);
3120 tmp = tcg_temp_new_i64();
3121 tcg_gen_ext_i32_i64(tmp, tmp32_1);
3122 store_reg(r1, tmp);
3123 tcg_temp_free_i32(tmp32_1);
3124 tcg_temp_free_i64(tmp);
3125 break;
3126 case 0x16: /* LLGFR R1,R2 [RRE] */
3127 tmp32_1 = load_reg32(r2);
3128 tmp = tcg_temp_new_i64();
3129 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3130 store_reg(r1, tmp);
3131 tcg_temp_free_i32(tmp32_1);
3132 tcg_temp_free_i64(tmp);
3133 break;
3134 case 0x17: /* LLGTR R1,R2 [RRE] */
3135 tmp32_1 = load_reg32(r2);
3136 tmp = tcg_temp_new_i64();
3137 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0x7fffffffUL);
3138 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3139 store_reg(r1, tmp);
3140 tcg_temp_free_i32(tmp32_1);
3141 tcg_temp_free_i64(tmp);
3142 break;
3143 case 0x0f: /* LRVGR R1,R2 [RRE] */
3144 tcg_gen_bswap64_i64(regs[r1], regs[r2]);
3145 break;
3146 case 0x1f: /* LRVR R1,R2 [RRE] */
3147 tmp32_1 = load_reg32(r2);
3148 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
3149 store_reg32(r1, tmp32_1);
3150 tcg_temp_free_i32(tmp32_1);
3151 break;
3152 case 0x20: /* CGR R1,R2 [RRE] */
3153 case 0x30: /* CGFR R1,R2 [RRE] */
3154 tmp2 = load_reg(r2);
3155 if (op == 0x30) {
3156 tcg_gen_ext32s_i64(tmp2, tmp2);
3157 }
3158 tmp = load_reg(r1);
3159 cmp_s64(s, tmp, tmp2);
3160 tcg_temp_free_i64(tmp);
3161 tcg_temp_free_i64(tmp2);
3162 break;
3163 case 0x21: /* CLGR R1,R2 [RRE] */
3164 case 0x31: /* CLGFR R1,R2 [RRE] */
3165 tmp2 = load_reg(r2);
3166 if (op == 0x31) {
3167 tcg_gen_ext32u_i64(tmp2, tmp2);
3168 }
3169 tmp = load_reg(r1);
3170 cmp_u64(s, tmp, tmp2);
3171 tcg_temp_free_i64(tmp);
3172 tcg_temp_free_i64(tmp2);
3173 break;
3174 case 0x26: /* LBR R1,R2 [RRE] */
3175 tmp32_1 = load_reg32(r2);
3176 tcg_gen_ext8s_i32(tmp32_1, tmp32_1);
3177 store_reg32(r1, tmp32_1);
3178 tcg_temp_free_i32(tmp32_1);
3179 break;
3180 case 0x27: /* LHR R1,R2 [RRE] */
3181 tmp32_1 = load_reg32(r2);
3182 tcg_gen_ext16s_i32(tmp32_1, tmp32_1);
3183 store_reg32(r1, tmp32_1);
3184 tcg_temp_free_i32(tmp32_1);
3185 break;
3186 case 0x80: /* NGR R1,R2 [RRE] */
3187 case 0x81: /* OGR R1,R2 [RRE] */
3188 case 0x82: /* XGR R1,R2 [RRE] */
3189 tmp = load_reg(r1);
3190 tmp2 = load_reg(r2);
3191 switch (op) {
3192 case 0x80:
3193 tcg_gen_and_i64(tmp, tmp, tmp2);
3194 break;
3195 case 0x81:
3196 tcg_gen_or_i64(tmp, tmp, tmp2);
3197 break;
3198 case 0x82:
3199 tcg_gen_xor_i64(tmp, tmp, tmp2);
3200 break;
3201 default:
3202 tcg_abort();
3203 }
3204 store_reg(r1, tmp);
3205 set_cc_nz_u64(s, tmp);
3206 tcg_temp_free_i64(tmp);
3207 tcg_temp_free_i64(tmp2);
3208 break;
3209 case 0x83: /* FLOGR R1,R2 [RRE] */
3210 tmp = load_reg(r2);
3211 tmp32_1 = tcg_const_i32(r1);
3212 gen_helper_flogr(cc_op, cpu_env, tmp32_1, tmp);
3213 set_cc_static(s);
3214 tcg_temp_free_i64(tmp);
3215 tcg_temp_free_i32(tmp32_1);
3216 break;
3217 case 0x84: /* LLGCR R1,R2 [RRE] */
3218 tmp = load_reg(r2);
3219 tcg_gen_andi_i64(tmp, tmp, 0xff);
3220 store_reg(r1, tmp);
3221 tcg_temp_free_i64(tmp);
3222 break;
3223 case 0x85: /* LLGHR R1,R2 [RRE] */
3224 tmp = load_reg(r2);
3225 tcg_gen_andi_i64(tmp, tmp, 0xffff);
3226 store_reg(r1, tmp);
3227 tcg_temp_free_i64(tmp);
3228 break;
3229 case 0x87: /* DLGR R1,R2 [RRE] */
3230 tmp32_1 = tcg_const_i32(r1);
3231 tmp = load_reg(r2);
3232 gen_helper_dlg(cpu_env, tmp32_1, tmp);
3233 tcg_temp_free_i64(tmp);
3234 tcg_temp_free_i32(tmp32_1);
3235 break;
3236 case 0x88: /* ALCGR R1,R2 [RRE] */
3237 tmp = load_reg(r1);
3238 tmp2 = load_reg(r2);
3239 tmp3 = tcg_temp_new_i64();
3240 gen_op_calc_cc(s);
3241 tcg_gen_extu_i32_i64(tmp3, cc_op);
3242 tcg_gen_shri_i64(tmp3, tmp3, 1);
3243 tcg_gen_andi_i64(tmp3, tmp3, 1);
3244 tcg_gen_add_i64(tmp3, tmp2, tmp3);
3245 tcg_gen_add_i64(tmp3, tmp, tmp3);
3246 store_reg(r1, tmp3);
3247 set_cc_addu64(s, tmp, tmp2, tmp3);
3248 tcg_temp_free_i64(tmp);
3249 tcg_temp_free_i64(tmp2);
3250 tcg_temp_free_i64(tmp3);
3251 break;
3252 case 0x89: /* SLBGR R1,R2 [RRE] */
3253 tmp = load_reg(r1);
3254 tmp2 = load_reg(r2);
3255 tmp32_1 = tcg_const_i32(r1);
3256 gen_op_calc_cc(s);
3257 gen_helper_slbg(cc_op, cpu_env, cc_op, tmp32_1, tmp, tmp2);
3258 set_cc_static(s);
3259 tcg_temp_free_i64(tmp);
3260 tcg_temp_free_i64(tmp2);
3261 tcg_temp_free_i32(tmp32_1);
3262 break;
3263 case 0x94: /* LLCR R1,R2 [RRE] */
3264 tmp32_1 = load_reg32(r2);
3265 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xff);
3266 store_reg32(r1, tmp32_1);
3267 tcg_temp_free_i32(tmp32_1);
3268 break;
3269 case 0x95: /* LLHR R1,R2 [RRE] */
3270 tmp32_1 = load_reg32(r2);
3271 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xffff);
3272 store_reg32(r1, tmp32_1);
3273 tcg_temp_free_i32(tmp32_1);
3274 break;
3275 case 0x96: /* MLR R1,R2 [RRE] */
3276 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3277 tmp2 = load_reg(r2);
3278 tmp3 = load_reg((r1 + 1) & 15);
3279 tcg_gen_ext32u_i64(tmp2, tmp2);
3280 tcg_gen_ext32u_i64(tmp3, tmp3);
3281 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3282 store_reg32_i64((r1 + 1) & 15, tmp2);
3283 tcg_gen_shri_i64(tmp2, tmp2, 32);
3284 store_reg32_i64(r1, tmp2);
3285 tcg_temp_free_i64(tmp2);
3286 tcg_temp_free_i64(tmp3);
3287 break;
3288 case 0x97: /* DLR R1,R2 [RRE] */
3289 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3290 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3291 tmp = load_reg(r1);
3292 tmp2 = load_reg(r2);
3293 tmp3 = load_reg((r1 + 1) & 15);
3294 tcg_gen_ext32u_i64(tmp2, tmp2);
3295 tcg_gen_ext32u_i64(tmp3, tmp3);
3296 tcg_gen_shli_i64(tmp, tmp, 32);
3297 tcg_gen_or_i64(tmp, tmp, tmp3);
3298
3299 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3300 tcg_gen_div_i64(tmp, tmp, tmp2);
3301 store_reg32_i64((r1 + 1) & 15, tmp);
3302 store_reg32_i64(r1, tmp3);
3303 tcg_temp_free_i64(tmp);
3304 tcg_temp_free_i64(tmp2);
3305 tcg_temp_free_i64(tmp3);
3306 break;
3307 case 0x98: /* ALCR R1,R2 [RRE] */
3308 tmp32_1 = load_reg32(r1);
3309 tmp32_2 = load_reg32(r2);
3310 tmp32_3 = tcg_temp_new_i32();
3311 /* XXX possible optimization point */
3312 gen_op_calc_cc(s);
3313 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
3314 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3315 store_reg32(r1, tmp32_3);
3316 tcg_temp_free_i32(tmp32_1);
3317 tcg_temp_free_i32(tmp32_2);
3318 tcg_temp_free_i32(tmp32_3);
3319 break;
3320 case 0x99: /* SLBR R1,R2 [RRE] */
3321 tmp32_1 = load_reg32(r2);
3322 tmp32_2 = tcg_const_i32(r1);
3323 gen_op_calc_cc(s);
3324 gen_helper_slb(cc_op, cpu_env, cc_op, tmp32_2, tmp32_1);
3325 set_cc_static(s);
3326 tcg_temp_free_i32(tmp32_1);
3327 tcg_temp_free_i32(tmp32_2);
3328 break;
3329 default:
3330 LOG_DISAS("illegal b9 operation 0x%x\n", op);
3331 gen_illegal_opcode(s);
3332 break;
3333 }
3334 }
3335
3336 static void disas_c0(CPUS390XState *env, DisasContext *s, int op, int r1, int i2)
3337 {
3338 TCGv_i64 tmp;
3339 TCGv_i32 tmp32_1, tmp32_2;
3340 uint64_t target = s->pc + i2 * 2LL;
3341 int l1;
3342
3343 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op, r1, i2);
3344
3345 switch (op) {
3346 case 0: /* larl r1, i2 */
3347 tmp = tcg_const_i64(target);
3348 store_reg(r1, tmp);
3349 tcg_temp_free_i64(tmp);
3350 break;
3351 case 0x1: /* LGFI R1,I2 [RIL] */
3352 tmp = tcg_const_i64((int64_t)i2);
3353 store_reg(r1, tmp);
3354 tcg_temp_free_i64(tmp);
3355 break;
3356 case 0x4: /* BRCL M1,I2 [RIL] */
3357 if (r1 == 15) { /* m1 == r1 */
3358 gen_goto_tb(s, 0, target);
3359 s->is_jmp = DISAS_TB_JUMP;
3360 break;
3361 }
3362 /* m1 & (1 << (3 - cc)) */
3363 tmp32_1 = tcg_const_i32(3);
3364 tmp32_2 = tcg_const_i32(1);
3365 gen_op_calc_cc(s);
3366 tcg_gen_sub_i32(tmp32_1, tmp32_1, cc_op);
3367 tcg_gen_shl_i32(tmp32_2, tmp32_2, tmp32_1);
3368 tcg_temp_free_i32(tmp32_1);
3369 tmp32_1 = tcg_const_i32(r1); /* m1 == r1 */
3370 tcg_gen_and_i32(tmp32_1, tmp32_1, tmp32_2);
3371 l1 = gen_new_label();
3372 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
3373 gen_goto_tb(s, 0, target);
3374 gen_set_label(l1);
3375 gen_goto_tb(s, 1, s->pc + 6);
3376 s->is_jmp = DISAS_TB_JUMP;
3377 tcg_temp_free_i32(tmp32_1);
3378 tcg_temp_free_i32(tmp32_2);
3379 break;
3380 case 0x5: /* brasl r1, i2 */
3381 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 6));
3382 store_reg(r1, tmp);
3383 tcg_temp_free_i64(tmp);
3384 gen_goto_tb(s, 0, target);
3385 s->is_jmp = DISAS_TB_JUMP;
3386 break;
3387 case 0x7: /* XILF R1,I2 [RIL] */
3388 case 0xb: /* NILF R1,I2 [RIL] */
3389 case 0xd: /* OILF R1,I2 [RIL] */
3390 tmp32_1 = load_reg32(r1);
3391 switch (op) {
3392 case 0x7:
3393 tcg_gen_xori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3394 break;
3395 case 0xb:
3396 tcg_gen_andi_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3397 break;
3398 case 0xd:
3399 tcg_gen_ori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3400 break;
3401 default:
3402 tcg_abort();
3403 }
3404 store_reg32(r1, tmp32_1);
3405 set_cc_nz_u32(s, tmp32_1);
3406 tcg_temp_free_i32(tmp32_1);
3407 break;
3408 case 0x9: /* IILF R1,I2 [RIL] */
3409 tmp32_1 = tcg_const_i32((uint32_t)i2);
3410 store_reg32(r1, tmp32_1);
3411 tcg_temp_free_i32(tmp32_1);
3412 break;
3413 case 0xa: /* NIHF R1,I2 [RIL] */
3414 tmp = load_reg(r1);
3415 tmp32_1 = tcg_temp_new_i32();
3416 tcg_gen_andi_i64(tmp, tmp, (((uint64_t)((uint32_t)i2)) << 32)
3417 | 0xffffffffULL);
3418 store_reg(r1, tmp);
3419 tcg_gen_shri_i64(tmp, tmp, 32);
3420 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3421 set_cc_nz_u32(s, tmp32_1);
3422 tcg_temp_free_i64(tmp);
3423 tcg_temp_free_i32(tmp32_1);
3424 break;
3425 case 0xe: /* LLIHF R1,I2 [RIL] */
3426 tmp = tcg_const_i64(((uint64_t)(uint32_t)i2) << 32);
3427 store_reg(r1, tmp);
3428 tcg_temp_free_i64(tmp);
3429 break;
3430 case 0xf: /* LLILF R1,I2 [RIL] */
3431 tmp = tcg_const_i64((uint32_t)i2);
3432 store_reg(r1, tmp);
3433 tcg_temp_free_i64(tmp);
3434 break;
3435 default:
3436 LOG_DISAS("illegal c0 operation 0x%x\n", op);
3437 gen_illegal_opcode(s);
3438 break;
3439 }
3440 }
3441
3442 static void disas_c2(CPUS390XState *env, DisasContext *s, int op, int r1,
3443 int i2)
3444 {
3445 TCGv_i64 tmp;
3446 TCGv_i32 tmp32_1;
3447
3448 switch (op) {
3449 case 0xc: /* CGFI R1,I2 [RIL] */
3450 tmp = load_reg(r1);
3451 cmp_s64c(s, tmp, (int64_t)i2);
3452 tcg_temp_free_i64(tmp);
3453 break;
3454 case 0xe: /* CLGFI R1,I2 [RIL] */
3455 tmp = load_reg(r1);
3456 cmp_u64c(s, tmp, (uint64_t)(uint32_t)i2);
3457 tcg_temp_free_i64(tmp);
3458 break;
3459 case 0xd: /* CFI R1,I2 [RIL] */
3460 tmp32_1 = load_reg32(r1);
3461 cmp_s32c(s, tmp32_1, i2);
3462 tcg_temp_free_i32(tmp32_1);
3463 break;
3464 case 0xf: /* CLFI R1,I2 [RIL] */
3465 tmp32_1 = load_reg32(r1);
3466 cmp_u32c(s, tmp32_1, i2);
3467 tcg_temp_free_i32(tmp32_1);
3468 break;
3469 default:
3470 LOG_DISAS("illegal c2 operation 0x%x\n", op);
3471 gen_illegal_opcode(s);
3472 break;
3473 }
3474 }
3475
3476 static void gen_and_or_xor_i32(int opc, TCGv_i32 tmp, TCGv_i32 tmp2)
3477 {
3478 switch (opc & 0xf) {
3479 case 0x4:
3480 tcg_gen_and_i32(tmp, tmp, tmp2);
3481 break;
3482 case 0x6:
3483 tcg_gen_or_i32(tmp, tmp, tmp2);
3484 break;
3485 case 0x7:
3486 tcg_gen_xor_i32(tmp, tmp, tmp2);
3487 break;
3488 default:
3489 tcg_abort();
3490 }
3491 }
3492
3493 static void disas_s390_insn(CPUS390XState *env, DisasContext *s)
3494 {
3495 TCGv_i64 tmp, tmp2, tmp3, tmp4;
3496 TCGv_i32 tmp32_1, tmp32_2, tmp32_3, tmp32_4;
3497 unsigned char opc;
3498 uint64_t insn;
3499 int op, r1, r2, r3, d1, d2, x2, b1, b2, i, i2, r1b;
3500 TCGv_i32 vl;
3501 int l1;
3502
3503 opc = cpu_ldub_code(env, s->pc);
3504 LOG_DISAS("opc 0x%x\n", opc);
3505
3506 switch (opc) {
3507 #ifndef CONFIG_USER_ONLY
3508 case 0x01: /* SAM */
3509 insn = ld_code2(env, s->pc);
3510 /* set addressing mode, but we only do 64bit anyways */
3511 break;
3512 #endif
3513 case 0x6: /* BCTR R1,R2 [RR] */
3514 insn = ld_code2(env, s->pc);
3515 decode_rr(s, insn, &r1, &r2);
3516 tmp32_1 = load_reg32(r1);
3517 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
3518 store_reg32(r1, tmp32_1);
3519
3520 if (r2) {
3521 gen_update_cc_op(s);
3522 l1 = gen_new_label();
3523 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
3524
3525 /* not taking the branch, jump to after the instruction */
3526 gen_goto_tb(s, 0, s->pc + 2);
3527 gen_set_label(l1);
3528
3529 /* take the branch, move R2 into psw.addr */
3530 tmp32_1 = load_reg32(r2);
3531 tmp = tcg_temp_new_i64();
3532 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3533 tcg_gen_mov_i64(psw_addr, tmp);
3534 s->is_jmp = DISAS_JUMP;
3535 tcg_temp_free_i32(tmp32_1);
3536 tcg_temp_free_i64(tmp);
3537 }
3538 break;
3539 case 0x7: /* BCR M1,R2 [RR] */
3540 insn = ld_code2(env, s->pc);
3541 decode_rr(s, insn, &r1, &r2);
3542 if (r2) {
3543 tmp = load_reg(r2);
3544 gen_bcr(s, r1, tmp, s->pc);
3545 tcg_temp_free_i64(tmp);
3546 s->is_jmp = DISAS_TB_JUMP;
3547 } else {
3548 /* XXX: "serialization and checkpoint-synchronization function"? */
3549 }
3550 break;
3551 case 0xa: /* SVC I [RR] */
3552 insn = ld_code2(env, s->pc);
3553 debug_insn(insn);
3554 i = insn & 0xff;
3555 update_psw_addr(s);
3556 gen_op_calc_cc(s);
3557 tmp32_1 = tcg_const_i32(i);
3558 tmp32_2 = tcg_const_i32(s->next_pc - s->pc);
3559 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, int_svc_code));
3560 tcg_gen_st_i32(tmp32_2, cpu_env, offsetof(CPUS390XState, int_svc_ilen));
3561 gen_exception(EXCP_SVC);
3562 s->is_jmp = DISAS_EXCP;
3563 tcg_temp_free_i32(tmp32_1);
3564 tcg_temp_free_i32(tmp32_2);
3565 break;
3566 case 0xd: /* BASR R1,R2 [RR] */
3567 insn = ld_code2(env, s->pc);
3568 decode_rr(s, insn, &r1, &r2);
3569 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 2));
3570 store_reg(r1, tmp);
3571 if (r2) {
3572 tmp2 = load_reg(r2);
3573 tcg_gen_mov_i64(psw_addr, tmp2);
3574 tcg_temp_free_i64(tmp2);
3575 s->is_jmp = DISAS_JUMP;
3576 }
3577 tcg_temp_free_i64(tmp);
3578 break;
3579 case 0xe: /* MVCL R1,R2 [RR] */
3580 insn = ld_code2(env, s->pc);
3581 decode_rr(s, insn, &r1, &r2);
3582 tmp32_1 = tcg_const_i32(r1);
3583 tmp32_2 = tcg_const_i32(r2);
3584 potential_page_fault(s);
3585 gen_helper_mvcl(cc_op, cpu_env, tmp32_1, tmp32_2);
3586 set_cc_static(s);
3587 tcg_temp_free_i32(tmp32_1);
3588 tcg_temp_free_i32(tmp32_2);
3589 break;
3590 case 0x10: /* LPR R1,R2 [RR] */
3591 insn = ld_code2(env, s->pc);
3592 decode_rr(s, insn, &r1, &r2);
3593 tmp32_1 = load_reg32(r2);
3594 set_cc_abs32(s, tmp32_1);
3595 gen_helper_abs_i32(tmp32_1, tmp32_1);
3596 store_reg32(r1, tmp32_1);
3597 tcg_temp_free_i32(tmp32_1);
3598 break;
3599 case 0x11: /* LNR R1,R2 [RR] */
3600 insn = ld_code2(env, s->pc);
3601 decode_rr(s, insn, &r1, &r2);
3602 tmp32_1 = load_reg32(r2);
3603 set_cc_nabs32(s, tmp32_1);
3604 gen_helper_nabs_i32(tmp32_1, tmp32_1);
3605 store_reg32(r1, tmp32_1);
3606 tcg_temp_free_i32(tmp32_1);
3607 break;
3608 case 0x12: /* LTR R1,R2 [RR] */
3609 insn = ld_code2(env, s->pc);
3610 decode_rr(s, insn, &r1, &r2);
3611 tmp32_1 = load_reg32(r2);
3612 if (r1 != r2) {
3613 store_reg32(r1, tmp32_1);
3614 }
3615 set_cc_s32(s, tmp32_1);
3616 tcg_temp_free_i32(tmp32_1);
3617 break;
3618 case 0x13: /* LCR R1,R2 [RR] */
3619 insn = ld_code2(env, s->pc);
3620 decode_rr(s, insn, &r1, &r2);
3621 tmp32_1 = load_reg32(r2);
3622 tcg_gen_neg_i32(tmp32_1, tmp32_1);
3623 store_reg32(r1, tmp32_1);
3624 set_cc_comp32(s, tmp32_1);
3625 tcg_temp_free_i32(tmp32_1);
3626 break;
3627 case 0x14: /* NR R1,R2 [RR] */
3628 case 0x16: /* OR R1,R2 [RR] */
3629 case 0x17: /* XR R1,R2 [RR] */
3630 insn = ld_code2(env, s->pc);
3631 decode_rr(s, insn, &r1, &r2);
3632 tmp32_2 = load_reg32(r2);
3633 tmp32_1 = load_reg32(r1);
3634 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
3635 store_reg32(r1, tmp32_1);
3636 set_cc_nz_u32(s, tmp32_1);
3637 tcg_temp_free_i32(tmp32_1);
3638 tcg_temp_free_i32(tmp32_2);
3639 break;
3640 case 0x18: /* LR R1,R2 [RR] */
3641 insn = ld_code2(env, s->pc);
3642 decode_rr(s, insn, &r1, &r2);
3643 tmp32_1 = load_reg32(r2);
3644 store_reg32(r1, tmp32_1);
3645 tcg_temp_free_i32(tmp32_1);
3646 break;
3647 case 0x15: /* CLR R1,R2 [RR] */
3648 case 0x19: /* CR R1,R2 [RR] */
3649 insn = ld_code2(env, s->pc);
3650 decode_rr(s, insn, &r1, &r2);
3651 tmp32_1 = load_reg32(r1);
3652 tmp32_2 = load_reg32(r2);
3653 if (opc == 0x15) {
3654 cmp_u32(s, tmp32_1, tmp32_2);
3655 } else {
3656 cmp_s32(s, tmp32_1, tmp32_2);
3657 }
3658 tcg_temp_free_i32(tmp32_1);
3659 tcg_temp_free_i32(tmp32_2);
3660 break;
3661 case 0x1c: /* MR R1,R2 [RR] */
3662 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3663 insn = ld_code2(env, s->pc);
3664 decode_rr(s, insn, &r1, &r2);
3665 tmp2 = load_reg(r2);
3666 tmp3 = load_reg((r1 + 1) & 15);
3667 tcg_gen_ext32s_i64(tmp2, tmp2);
3668 tcg_gen_ext32s_i64(tmp3, tmp3);
3669 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3670 store_reg32_i64((r1 + 1) & 15, tmp2);
3671 tcg_gen_shri_i64(tmp2, tmp2, 32);
3672 store_reg32_i64(r1, tmp2);
3673 tcg_temp_free_i64(tmp2);
3674 tcg_temp_free_i64(tmp3);
3675 break;
3676 case 0x1d: /* DR R1,R2 [RR] */
3677 insn = ld_code2(env, s->pc);
3678 decode_rr(s, insn, &r1, &r2);
3679 tmp32_1 = load_reg32(r1);
3680 tmp32_2 = load_reg32(r1 + 1);
3681 tmp32_3 = load_reg32(r2);
3682
3683 tmp = tcg_temp_new_i64(); /* dividend */
3684 tmp2 = tcg_temp_new_i64(); /* divisor */
3685 tmp3 = tcg_temp_new_i64();
3686
3687 /* dividend is r(r1 << 32) | r(r1 + 1) */
3688 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3689 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
3690 tcg_gen_shli_i64(tmp, tmp, 32);
3691 tcg_gen_or_i64(tmp, tmp, tmp2);
3692
3693 /* divisor is r(r2) */
3694 tcg_gen_ext_i32_i64(tmp2, tmp32_3);
3695
3696 tcg_gen_div_i64(tmp3, tmp, tmp2);
3697 tcg_gen_rem_i64(tmp, tmp, tmp2);
3698
3699 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3700 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
3701
3702 store_reg32(r1, tmp32_1); /* remainder */
3703 store_reg32(r1 + 1, tmp32_2); /* quotient */
3704 tcg_temp_free_i32(tmp32_1);
3705 tcg_temp_free_i32(tmp32_2);
3706 tcg_temp_free_i32(tmp32_3);
3707 tcg_temp_free_i64(tmp);
3708 tcg_temp_free_i64(tmp2);
3709 tcg_temp_free_i64(tmp3);
3710 break;
3711 case 0x28: /* LDR R1,R2 [RR] */
3712 insn = ld_code2(env, s->pc);
3713 decode_rr(s, insn, &r1, &r2);
3714 tmp = load_freg(r2);
3715 store_freg(r1, tmp);
3716 tcg_temp_free_i64(tmp);
3717 break;
3718 case 0x38: /* LER R1,R2 [RR] */
3719 insn = ld_code2(env, s->pc);
3720 decode_rr(s, insn, &r1, &r2);
3721 tmp32_1 = load_freg32(r2);
3722 store_freg32(r1, tmp32_1);
3723 tcg_temp_free_i32(tmp32_1);
3724 break;
3725 case 0x40: /* STH R1,D2(X2,B2) [RX] */
3726 insn = ld_code4(env, s->pc);
3727 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3728 tmp2 = load_reg(r1);
3729 tcg_gen_qemu_st16(tmp2, tmp, get_mem_index(s));
3730 tcg_temp_free_i64(tmp);
3731 tcg_temp_free_i64(tmp2);
3732 break;
3733 case 0x41: /* la */
3734 insn = ld_code4(env, s->pc);
3735 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3736 store_reg(r1, tmp); /* FIXME: 31/24-bit addressing */
3737 tcg_temp_free_i64(tmp);
3738 break;
3739 case 0x42: /* STC R1,D2(X2,B2) [RX] */
3740 insn = ld_code4(env, s->pc);
3741 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3742 tmp2 = load_reg(r1);
3743 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
3744 tcg_temp_free_i64(tmp);
3745 tcg_temp_free_i64(tmp2);
3746 break;
3747 case 0x43: /* IC R1,D2(X2,B2) [RX] */
3748 insn = ld_code4(env, s->pc);
3749 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3750 tmp2 = tcg_temp_new_i64();
3751 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
3752 store_reg8(r1, tmp2);
3753 tcg_temp_free_i64(tmp);
3754 tcg_temp_free_i64(tmp2);
3755 break;
3756 case 0x44: /* EX R1,D2(X2,B2) [RX] */
3757 insn = ld_code4(env, s->pc);
3758 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3759 tmp2 = load_reg(r1);
3760 tmp3 = tcg_const_i64(s->pc + 4);
3761 update_psw_addr(s);
3762 gen_op_calc_cc(s);
3763 gen_helper_ex(cc_op, cpu_env, cc_op, tmp2, tmp, tmp3);
3764 set_cc_static(s);
3765 tcg_temp_free_i64(tmp);
3766 tcg_temp_free_i64(tmp2);
3767 tcg_temp_free_i64(tmp3);
3768 break;
3769 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
3770 insn = ld_code4(env, s->pc);
3771 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3772 tcg_temp_free_i64(tmp);
3773
3774 tmp32_1 = load_reg32(r1);
3775 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
3776 store_reg32(r1, tmp32_1);
3777
3778 gen_update_cc_op(s);
3779 l1 = gen_new_label();
3780 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
3781
3782 /* not taking the branch, jump to after the instruction */
3783 gen_goto_tb(s, 0, s->pc + 4);
3784 gen_set_label(l1);
3785
3786 /* take the branch, move R2 into psw.addr */
3787 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3788 tcg_gen_mov_i64(psw_addr, tmp);
3789 s->is_jmp = DISAS_JUMP;
3790 tcg_temp_free_i32(tmp32_1);
3791 tcg_temp_free_i64(tmp);
3792 break;
3793 case 0x47: /* BC M1,D2(X2,B2) [RX] */
3794 insn = ld_code4(env, s->pc);
3795 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3796 gen_bcr(s, r1, tmp, s->pc + 4);
3797 tcg_temp_free_i64(tmp);
3798 s->is_jmp = DISAS_TB_JUMP;
3799 break;
3800 case 0x48: /* LH R1,D2(X2,B2) [RX] */
3801 insn = ld_code4(env, s->pc);
3802 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3803 tmp2 = tcg_temp_new_i64();
3804 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
3805 store_reg32_i64(r1, tmp2);
3806 tcg_temp_free_i64(tmp);
3807 tcg_temp_free_i64(tmp2);
3808 break;
3809 case 0x49: /* CH R1,D2(X2,B2) [RX] */
3810 insn = ld_code4(env, s->pc);
3811 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3812 tmp32_1 = load_reg32(r1);
3813 tmp32_2 = tcg_temp_new_i32();
3814 tmp2 = tcg_temp_new_i64();
3815 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
3816 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
3817 cmp_s32(s, tmp32_1, tmp32_2);
3818 tcg_temp_free_i32(tmp32_1);
3819 tcg_temp_free_i32(tmp32_2);
3820 tcg_temp_free_i64(tmp);
3821 tcg_temp_free_i64(tmp2);
3822 break;
3823 case 0x4a: /* AH R1,D2(X2,B2) [RX] */
3824 case 0x4b: /* SH R1,D2(X2,B2) [RX] */
3825 case 0x4c: /* MH R1,D2(X2,B2) [RX] */
3826 insn = ld_code4(env, s->pc);
3827 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3828 tmp2 = tcg_temp_new_i64();
3829 tmp32_1 = load_reg32(r1);
3830 tmp32_2 = tcg_temp_new_i32();
3831 tmp32_3 = tcg_temp_new_i32();
3832
3833 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
3834 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
3835 switch (opc) {
3836 case 0x4a:
3837 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
3838 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
3839 break;
3840 case 0x4b:
3841 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
3842 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
3843 break;
3844 case 0x4c:
3845 tcg_gen_mul_i32(tmp32_3, tmp32_1, tmp32_2);
3846 break;
3847 default:
3848 tcg_abort();
3849 }
3850 store_reg32(r1, tmp32_3);
3851
3852 tcg_temp_free_i32(tmp32_1);
3853 tcg_temp_free_i32(tmp32_2);
3854 tcg_temp_free_i32(tmp32_3);
3855 tcg_temp_free_i64(tmp);
3856 tcg_temp_free_i64(tmp2);
3857 break;
3858 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
3859 insn = ld_code4(env, s->pc);
3860 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3861 tmp2 = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
3862 store_reg(r1, tmp2);
3863 tcg_gen_mov_i64(psw_addr, tmp);
3864 tcg_temp_free_i64(tmp);
3865 tcg_temp_free_i64(tmp2);
3866 s->is_jmp = DISAS_JUMP;
3867 break;
3868 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
3869 insn = ld_code4(env, s->pc);
3870 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3871 tmp2 = tcg_temp_new_i64();
3872 tmp32_1 = tcg_temp_new_i32();
3873 tcg_gen_trunc_i64_i32(tmp32_1, regs[r1]);
3874 gen_helper_cvd(tmp2, tmp32_1);
3875 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
3876 tcg_temp_free_i64(tmp);
3877 tcg_temp_free_i64(tmp2);
3878 tcg_temp_free_i32(tmp32_1);
3879 break;
3880 case 0x50: /* st r1, d2(x2, b2) */
3881 insn = ld_code4(env, s->pc);
3882 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3883 tmp2 = load_reg(r1);
3884 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
3885 tcg_temp_free_i64(tmp);
3886 tcg_temp_free_i64(tmp2);
3887 break;
3888 case 0x55: /* CL R1,D2(X2,B2) [RX] */
3889 insn = ld_code4(env, s->pc);
3890 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3891 tmp2 = tcg_temp_new_i64();
3892 tmp32_1 = tcg_temp_new_i32();
3893 tmp32_2 = load_reg32(r1);
3894 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
3895 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
3896 cmp_u32(s, tmp32_2, tmp32_1);
3897 tcg_temp_free_i64(tmp);
3898 tcg_temp_free_i64(tmp2);
3899 tcg_temp_free_i32(tmp32_1);
3900 tcg_temp_free_i32(tmp32_2);
3901 break;
3902 case 0x54: /* N R1,D2(X2,B2) [RX] */
3903 case 0x56: /* O R1,D2(X2,B2) [RX] */
3904 case 0x57: /* X R1,D2(X2,B2) [RX] */
3905 insn = ld_code4(env, s->pc);
3906 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3907 tmp2 = tcg_temp_new_i64();
3908 tmp32_1 = load_reg32(r1);
3909 tmp32_2 = tcg_temp_new_i32();
3910 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
3911 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
3912 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
3913 store_reg32(r1, tmp32_1);
3914 set_cc_nz_u32(s, tmp32_1);
3915 tcg_temp_free_i64(tmp);
3916 tcg_temp_free_i64(tmp2);
3917 tcg_temp_free_i32(tmp32_1);
3918 tcg_temp_free_i32(tmp32_2);
3919 break;
3920 case 0x58: /* l r1, d2(x2, b2) */
3921 insn = ld_code4(env, s->pc);
3922 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3923 tmp2 = tcg_temp_new_i64();
3924 tmp32_1 = tcg_temp_new_i32();
3925 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
3926 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
3927 store_reg32(r1, tmp32_1);
3928 tcg_temp_free_i64(tmp);
3929 tcg_temp_free_i64(tmp2);
3930 tcg_temp_free_i32(tmp32_1);
3931 break;
3932 case 0x59: /* C R1,D2(X2,B2) [RX] */
3933 insn = ld_code4(env, s->pc);
3934 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3935 tmp2 = tcg_temp_new_i64();
3936 tmp32_1 = tcg_temp_new_i32();
3937 tmp32_2 = load_reg32(r1);
3938 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
3939 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
3940 cmp_s32(s, tmp32_2, tmp32_1);
3941 tcg_temp_free_i64(tmp);
3942 tcg_temp_free_i64(tmp2);
3943 tcg_temp_free_i32(tmp32_1);
3944 tcg_temp_free_i32(tmp32_2);
3945 break;
3946 case 0x5c: /* M R1,D2(X2,B2) [RX] */
3947 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
3948 insn = ld_code4(env, s->pc);
3949 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3950 tmp2 = tcg_temp_new_i64();
3951 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
3952 tmp3 = load_reg((r1 + 1) & 15);
3953 tcg_gen_ext32s_i64(tmp2, tmp2);
3954 tcg_gen_ext32s_i64(tmp3, tmp3);
3955 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3956 store_reg32_i64((r1 + 1) & 15, tmp2);
3957 tcg_gen_shri_i64(tmp2, tmp2, 32);
3958 store_reg32_i64(r1, tmp2);
3959 tcg_temp_free_i64(tmp);
3960 tcg_temp_free_i64(tmp2);
3961 tcg_temp_free_i64(tmp3);
3962 break;
3963 case 0x5d: /* D R1,D2(X2,B2) [RX] */
3964 insn = ld_code4(env, s->pc);
3965 tmp3 = decode_rx(s, insn, &r1, &x2, &b2, &d2);
3966 tmp32_1 = load_reg32(r1);
3967 tmp32_2 = load_reg32(r1 + 1);
3968
3969 tmp = tcg_temp_new_i64();
3970 tmp2 = tcg_temp_new_i64();
3971
3972 /* dividend is r(r1 << 32) | r(r1 + 1) */
3973 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3974 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
3975 tcg_gen_shli_i64(tmp, tmp, 32);
3976 tcg_gen_or_i64(tmp, tmp, tmp2);
3977
3978 /* divisor is in memory */
3979 tcg_gen_qemu_ld32s(tmp2, tmp3, get_mem_index(s));
3980
3981 /* XXX divisor == 0 -> FixP divide exception */
3982
3983 tcg_gen_div_i64(tmp3, tmp, tmp2);
3984 tcg_gen_rem_i64(tmp, tmp, tmp2);
3985
3986 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3987 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
3988
3989 store_reg32(r1, tmp32_1); /* remainder */
3990 store_reg32(r1 + 1, tmp32_2); /* quotient */
3991 tcg_temp_free_i32(tmp32_1);
3992 tcg_temp_free_i32(tmp32_2);
3993 tcg_temp_free_i64(tmp);
3994 tcg_temp_free_i64(tmp2);
3995 tcg_temp_free_i64(tmp3);
3996 break;
3997 case 0x60: /* STD R1,D2(X2,B2) [RX] */
3998 insn = ld_code4(env, s->pc);
3999 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4000 tmp2 = load_freg(r1);
4001 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4002 tcg_temp_free_i64(tmp);
4003 tcg_temp_free_i64(tmp2);
4004 break;
4005 case 0x68: /* LD R1,D2(X2,B2) [RX] */
4006 insn = ld_code4(env, s->pc);
4007 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4008 tmp2 = tcg_temp_new_i64();
4009 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
4010 store_freg(r1, tmp2);
4011 tcg_temp_free_i64(tmp);
4012 tcg_temp_free_i64(tmp2);
4013 break;
4014 case 0x70: /* STE R1,D2(X2,B2) [RX] */
4015 insn = ld_code4(env, s->pc);
4016 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4017 tmp2 = tcg_temp_new_i64();
4018 tmp32_1 = load_freg32(r1);
4019 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
4020 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4021 tcg_temp_free_i64(tmp);
4022 tcg_temp_free_i64(tmp2);
4023 tcg_temp_free_i32(tmp32_1);
4024 break;
4025 case 0x71: /* MS R1,D2(X2,B2) [RX] */
4026 insn = ld_code4(env, s->pc);
4027 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4028 tmp2 = tcg_temp_new_i64();
4029 tmp32_1 = load_reg32(r1);
4030 tmp32_2 = tcg_temp_new_i32();
4031 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4032 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4033 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
4034 store_reg32(r1, tmp32_1);
4035 tcg_temp_free_i64(tmp);
4036 tcg_temp_free_i64(tmp2);
4037 tcg_temp_free_i32(tmp32_1);
4038 tcg_temp_free_i32(tmp32_2);
4039 break;
4040 case 0x78: /* LE R1,D2(X2,B2) [RX] */
4041 insn = ld_code4(env, s->pc);
4042 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4043 tmp2 = tcg_temp_new_i64();
4044 tmp32_1 = tcg_temp_new_i32();
4045 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4046 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4047 store_freg32(r1, tmp32_1);
4048 tcg_temp_free_i64(tmp);
4049 tcg_temp_free_i64(tmp2);
4050 tcg_temp_free_i32(tmp32_1);
4051 break;
4052 #ifndef CONFIG_USER_ONLY
4053 case 0x80: /* SSM D2(B2) [S] */
4054 /* Set System Mask */
4055 check_privileged(s);
4056 insn = ld_code4(env, s->pc);
4057 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4058 tmp = get_address(s, 0, b2, d2);
4059 tmp2 = tcg_temp_new_i64();
4060 tmp3 = tcg_temp_new_i64();
4061 tcg_gen_andi_i64(tmp3, psw_mask, ~0xff00000000000000ULL);
4062 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4063 tcg_gen_shli_i64(tmp2, tmp2, 56);
4064 tcg_gen_or_i64(psw_mask, tmp3, tmp2);
4065 tcg_temp_free_i64(tmp);
4066 tcg_temp_free_i64(tmp2);
4067 tcg_temp_free_i64(tmp3);
4068 break;
4069 case 0x82: /* LPSW D2(B2) [S] */
4070 /* Load PSW */
4071 check_privileged(s);
4072 insn = ld_code4(env, s->pc);
4073 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4074 tmp = get_address(s, 0, b2, d2);
4075 tmp2 = tcg_temp_new_i64();
4076 tmp3 = tcg_temp_new_i64();
4077 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4078 tcg_gen_addi_i64(tmp, tmp, 4);
4079 tcg_gen_qemu_ld32u(tmp3, tmp, get_mem_index(s));
4080 /* Convert the 32-bit PSW_MASK into the 64-bit PSW_MASK. */
4081 tcg_gen_shli_i64(tmp2, tmp2, 32);
4082 gen_helper_load_psw(cpu_env, tmp2, tmp3);
4083 tcg_temp_free_i64(tmp);
4084 tcg_temp_free_i64(tmp2);
4085 tcg_temp_free_i64(tmp3);
4086 /* we need to keep cc_op intact */
4087 s->is_jmp = DISAS_JUMP;
4088 break;
4089 case 0x83: /* DIAG R1,R3,D2 [RS] */
4090 /* Diagnose call (KVM hypercall) */
4091 check_privileged(s);
4092 potential_page_fault(s);
4093 insn = ld_code4(env, s->pc);
4094 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4095 tmp32_1 = tcg_const_i32(insn & 0xfff);
4096 tmp2 = load_reg(2);
4097 tmp3 = load_reg(1);
4098 gen_helper_diag(tmp2, cpu_env, tmp32_1, tmp2, tmp3);
4099 store_reg(2, tmp2);
4100 tcg_temp_free_i32(tmp32_1);
4101 tcg_temp_free_i64(tmp2);
4102 tcg_temp_free_i64(tmp3);
4103 break;
4104 #endif
4105 case 0x88: /* SRL R1,D2(B2) [RS] */
4106 case 0x89: /* SLL R1,D2(B2) [RS] */
4107 case 0x8a: /* SRA R1,D2(B2) [RS] */
4108 insn = ld_code4(env, s->pc);
4109 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4110 tmp = get_address(s, 0, b2, d2);
4111 tmp32_1 = load_reg32(r1);
4112 tmp32_2 = tcg_temp_new_i32();
4113 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4114 tcg_gen_andi_i32(tmp32_2, tmp32_2, 0x3f);
4115 switch (opc) {
4116 case 0x88:
4117 tcg_gen_shr_i32(tmp32_1, tmp32_1, tmp32_2);
4118 break;
4119 case 0x89:
4120 tcg_gen_shl_i32(tmp32_1, tmp32_1, tmp32_2);
4121 break;
4122 case 0x8a:
4123 tcg_gen_sar_i32(tmp32_1, tmp32_1, tmp32_2);
4124 set_cc_s32(s, tmp32_1);
4125 break;
4126 default:
4127 tcg_abort();
4128 }
4129 store_reg32(r1, tmp32_1);
4130 tcg_temp_free_i64(tmp);
4131 tcg_temp_free_i32(tmp32_1);
4132 tcg_temp_free_i32(tmp32_2);
4133 break;
4134 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4135 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4136 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4137 insn = ld_code4(env, s->pc);
4138 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4139 tmp = get_address(s, 0, b2, d2); /* shift */
4140 tmp2 = tcg_temp_new_i64();
4141 tmp32_1 = load_reg32(r1);
4142 tmp32_2 = load_reg32(r1 + 1);
4143 tcg_gen_concat_i32_i64(tmp2, tmp32_2, tmp32_1); /* operand */
4144 switch (opc) {
4145 case 0x8c:
4146 tcg_gen_shr_i64(tmp2, tmp2, tmp);
4147 break;
4148 case 0x8d:
4149 tcg_gen_shl_i64(tmp2, tmp2, tmp);
4150 break;
4151 case 0x8e:
4152 tcg_gen_sar_i64(tmp2, tmp2, tmp);
4153 set_cc_s64(s, tmp2);
4154 break;
4155 }
4156 tcg_gen_shri_i64(tmp, tmp2, 32);
4157 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4158 store_reg32(r1, tmp32_1);
4159 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4160 store_reg32(r1 + 1, tmp32_2);
4161 tcg_temp_free_i64(tmp);
4162 tcg_temp_free_i64(tmp2);
4163 break;
4164 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4165 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4166 insn = ld_code4(env, s->pc);
4167 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4168
4169 tmp = get_address(s, 0, b2, d2);
4170 tmp2 = tcg_temp_new_i64();
4171 tmp3 = tcg_const_i64(4);
4172 tmp4 = tcg_const_i64(0xffffffff00000000ULL);
4173 for (i = r1;; i = (i + 1) % 16) {
4174 if (opc == 0x98) {
4175 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4176 tcg_gen_and_i64(regs[i], regs[i], tmp4);
4177 tcg_gen_or_i64(regs[i], regs[i], tmp2);
4178 } else {
4179 tcg_gen_qemu_st32(regs[i], tmp, get_mem_index(s));
4180 }
4181 if (i == r3) {
4182 break;
4183 }
4184 tcg_gen_add_i64(tmp, tmp, tmp3);
4185 }
4186 tcg_temp_free_i64(tmp);
4187 tcg_temp_free_i64(tmp2);
4188 tcg_temp_free_i64(tmp3);
4189 tcg_temp_free_i64(tmp4);
4190 break;
4191 case 0x91: /* TM D1(B1),I2 [SI] */
4192 insn = ld_code4(env, s->pc);
4193 tmp = decode_si(s, insn, &i2, &b1, &d1);
4194 tmp2 = tcg_const_i64(i2);
4195 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
4196 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
4197 tcg_temp_free_i64(tmp);
4198 tcg_temp_free_i64(tmp2);
4199 break;
4200 case 0x92: /* MVI D1(B1),I2 [SI] */
4201 insn = ld_code4(env, s->pc);
4202 tmp = decode_si(s, insn, &i2, &b1, &d1);
4203 tmp2 = tcg_const_i64(i2);
4204 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4205 tcg_temp_free_i64(tmp);
4206 tcg_temp_free_i64(tmp2);
4207 break;
4208 case 0x94: /* NI D1(B1),I2 [SI] */
4209 case 0x96: /* OI D1(B1),I2 [SI] */
4210 case 0x97: /* XI D1(B1),I2 [SI] */
4211 insn = ld_code4(env, s->pc);
4212 tmp = decode_si(s, insn, &i2, &b1, &d1);
4213 tmp2 = tcg_temp_new_i64();
4214 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4215 switch (opc) {
4216 case 0x94:
4217 tcg_gen_andi_i64(tmp2, tmp2, i2);
4218 break;
4219 case 0x96:
4220 tcg_gen_ori_i64(tmp2, tmp2, i2);
4221 break;
4222 case 0x97:
4223 tcg_gen_xori_i64(tmp2, tmp2, i2);
4224 break;
4225 default:
4226 tcg_abort();
4227 }
4228 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4229 set_cc_nz_u64(s, tmp2);
4230 tcg_temp_free_i64(tmp);
4231 tcg_temp_free_i64(tmp2);
4232 break;
4233 case 0x95: /* CLI D1(B1),I2 [SI] */
4234 insn = ld_code4(env, s->pc);
4235 tmp = decode_si(s, insn, &i2, &b1, &d1);
4236 tmp2 = tcg_temp_new_i64();
4237 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4238 cmp_u64c(s, tmp2, i2);
4239 tcg_temp_free_i64(tmp);
4240 tcg_temp_free_i64(tmp2);
4241 break;
4242 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4243 insn = ld_code4(env, s->pc);
4244 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4245 tmp = get_address(s, 0, b2, d2);
4246 tmp32_1 = tcg_const_i32(r1);
4247 tmp32_2 = tcg_const_i32(r3);
4248 potential_page_fault(s);
4249 gen_helper_lam(cpu_env, tmp32_1, tmp, tmp32_2);
4250 tcg_temp_free_i64(tmp);
4251 tcg_temp_free_i32(tmp32_1);
4252 tcg_temp_free_i32(tmp32_2);
4253 break;
4254 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4255 insn = ld_code4(env, s->pc);
4256 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4257 tmp = get_address(s, 0, b2, d2);
4258 tmp32_1 = tcg_const_i32(r1);
4259 tmp32_2 = tcg_const_i32(r3);
4260 potential_page_fault(s);
4261 gen_helper_stam(cpu_env, tmp32_1, tmp, tmp32_2);
4262 tcg_temp_free_i64(tmp);
4263 tcg_temp_free_i32(tmp32_1);
4264 tcg_temp_free_i32(tmp32_2);
4265 break;
4266 case 0xa5:
4267 insn = ld_code4(env, s->pc);
4268 r1 = (insn >> 20) & 0xf;
4269 op = (insn >> 16) & 0xf;
4270 i2 = insn & 0xffff;
4271 disas_a5(env, s, op, r1, i2);
4272 break;
4273 case 0xa7:
4274 insn = ld_code4(env, s->pc);
4275 r1 = (insn >> 20) & 0xf;
4276 op = (insn >> 16) & 0xf;
4277 i2 = (short)insn;
4278 disas_a7(env, s, op, r1, i2);
4279 break;
4280 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4281 insn = ld_code4(env, s->pc);
4282 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4283 tmp = get_address(s, 0, b2, d2);
4284 tmp32_1 = tcg_const_i32(r1);
4285 tmp32_2 = tcg_const_i32(r3);
4286 potential_page_fault(s);
4287 gen_helper_mvcle(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4288 set_cc_static(s);
4289 tcg_temp_free_i64(tmp);
4290 tcg_temp_free_i32(tmp32_1);
4291 tcg_temp_free_i32(tmp32_2);
4292 break;
4293 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4294 insn = ld_code4(env, s->pc);
4295 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4296 tmp = get_address(s, 0, b2, d2);
4297 tmp32_1 = tcg_const_i32(r1);
4298 tmp32_2 = tcg_const_i32(r3);
4299 potential_page_fault(s);
4300 gen_helper_clcle(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4301 set_cc_static(s);
4302 tcg_temp_free_i64(tmp);
4303 tcg_temp_free_i32(tmp32_1);
4304 tcg_temp_free_i32(tmp32_2);
4305 break;
4306 #ifndef CONFIG_USER_ONLY
4307 case 0xac: /* STNSM D1(B1),I2 [SI] */
4308 case 0xad: /* STOSM D1(B1),I2 [SI] */
4309 check_privileged(s);
4310 insn = ld_code4(env, s->pc);
4311 tmp = decode_si(s, insn, &i2, &b1, &d1);
4312 tmp2 = tcg_temp_new_i64();
4313 tcg_gen_shri_i64(tmp2, psw_mask, 56);
4314 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4315 if (opc == 0xac) {
4316 tcg_gen_andi_i64(psw_mask, psw_mask,
4317 ((uint64_t)i2 << 56) | 0x00ffffffffffffffULL);
4318 } else {
4319 tcg_gen_ori_i64(psw_mask, psw_mask, (uint64_t)i2 << 56);
4320 }
4321 tcg_temp_free_i64(tmp);
4322 tcg_temp_free_i64(tmp2);
4323 break;
4324 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4325 check_privileged(s);
4326 insn = ld_code4(env, s->pc);
4327 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4328 tmp = get_address(s, 0, b2, d2);
4329 tmp2 = load_reg(r3);
4330 tmp32_1 = tcg_const_i32(r1);
4331 potential_page_fault(s);
4332 gen_helper_sigp(cc_op, cpu_env, tmp, tmp32_1, tmp2);
4333 set_cc_static(s);
4334 tcg_temp_free_i64(tmp);
4335 tcg_temp_free_i64(tmp2);
4336 tcg_temp_free_i32(tmp32_1);
4337 break;
4338 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4339 check_privileged(s);
4340 insn = ld_code4(env, s->pc);
4341 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4342 tmp32_1 = tcg_const_i32(r1);
4343 potential_page_fault(s);
4344 gen_helper_lra(cc_op, cpu_env, tmp, tmp32_1);
4345 set_cc_static(s);
4346 tcg_temp_free_i64(tmp);
4347 tcg_temp_free_i32(tmp32_1);
4348 break;
4349 #endif
4350 case 0xb2:
4351 insn = ld_code4(env, s->pc);
4352 op = (insn >> 16) & 0xff;
4353 switch (op) {
4354 case 0x9c: /* STFPC D2(B2) [S] */
4355 d2 = insn & 0xfff;
4356 b2 = (insn >> 12) & 0xf;
4357 tmp32_1 = tcg_temp_new_i32();
4358 tmp = tcg_temp_new_i64();
4359 tmp2 = get_address(s, 0, b2, d2);
4360 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUS390XState, fpc));
4361 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4362 tcg_gen_qemu_st32(tmp, tmp2, get_mem_index(s));
4363 tcg_temp_free_i32(tmp32_1);
4364 tcg_temp_free_i64(tmp);
4365 tcg_temp_free_i64(tmp2);
4366 break;
4367 default:
4368 disas_b2(env, s, op, insn);
4369 break;
4370 }
4371 break;
4372 case 0xb3:
4373 insn = ld_code4(env, s->pc);
4374 op = (insn >> 16) & 0xff;
4375 r3 = (insn >> 12) & 0xf; /* aka m3 */
4376 r1 = (insn >> 4) & 0xf;
4377 r2 = insn & 0xf;
4378 disas_b3(env, s, op, r3, r1, r2);
4379 break;
4380 #ifndef CONFIG_USER_ONLY
4381 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4382 /* Store Control */
4383 check_privileged(s);
4384 insn = ld_code4(env, s->pc);
4385 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4386 tmp = get_address(s, 0, b2, d2);
4387 tmp32_1 = tcg_const_i32(r1);
4388 tmp32_2 = tcg_const_i32(r3);
4389 potential_page_fault(s);
4390 gen_helper_stctl(cpu_env, tmp32_1, tmp, tmp32_2);
4391 tcg_temp_free_i64(tmp);
4392 tcg_temp_free_i32(tmp32_1);
4393 tcg_temp_free_i32(tmp32_2);
4394 break;
4395 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4396 /* Load Control */
4397 check_privileged(s);
4398 insn = ld_code4(env, s->pc);
4399 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4400 tmp = get_address(s, 0, b2, d2);
4401 tmp32_1 = tcg_const_i32(r1);
4402 tmp32_2 = tcg_const_i32(r3);
4403 potential_page_fault(s);
4404 gen_helper_lctl(cpu_env, tmp32_1, tmp, tmp32_2);
4405 tcg_temp_free_i64(tmp);
4406 tcg_temp_free_i32(tmp32_1);
4407 tcg_temp_free_i32(tmp32_2);
4408 break;
4409 #endif
4410 case 0xb9:
4411 insn = ld_code4(env, s->pc);
4412 r1 = (insn >> 4) & 0xf;
4413 r2 = insn & 0xf;
4414 op = (insn >> 16) & 0xff;
4415 disas_b9(env, s, op, r1, r2);
4416 break;
4417 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4418 insn = ld_code4(env, s->pc);
4419 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4420 tmp = get_address(s, 0, b2, d2);
4421 tmp32_1 = tcg_const_i32(r1);
4422 tmp32_2 = tcg_const_i32(r3);
4423 potential_page_fault(s);
4424 gen_helper_cs(cc_op, cpu_env, tmp32_1, tmp, tmp32_2);
4425 set_cc_static(s);
4426 tcg_temp_free_i64(tmp);
4427 tcg_temp_free_i32(tmp32_1);
4428 tcg_temp_free_i32(tmp32_2);
4429 break;
4430 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4431 insn = ld_code4(env, s->pc);
4432 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4433 tmp = get_address(s, 0, b2, d2);
4434 tmp32_1 = load_reg32(r1);
4435 tmp32_2 = tcg_const_i32(r3);
4436 potential_page_fault(s);
4437 gen_helper_clm(cc_op, cpu_env, tmp32_1, tmp32_2, tmp);
4438 set_cc_static(s);
4439 tcg_temp_free_i64(tmp);
4440 tcg_temp_free_i32(tmp32_1);
4441 tcg_temp_free_i32(tmp32_2);
4442 break;
4443 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4444 insn = ld_code4(env, s->pc);
4445 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4446 tmp = get_address(s, 0, b2, d2);
4447 tmp32_1 = load_reg32(r1);
4448 tmp32_2 = tcg_const_i32(r3);
4449 potential_page_fault(s);
4450 gen_helper_stcm(cpu_env, tmp32_1, tmp32_2, tmp);
4451 tcg_temp_free_i64(tmp);
4452 tcg_temp_free_i32(tmp32_1);
4453 tcg_temp_free_i32(tmp32_2);
4454 break;
4455 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4456 insn = ld_code4(env, s->pc);
4457 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4458 if (r3 == 15) {
4459 /* effectively a 32-bit load */
4460 tmp = get_address(s, 0, b2, d2);
4461 tmp32_1 = tcg_temp_new_i32();
4462 tmp32_2 = tcg_const_i32(r3);
4463 tcg_gen_qemu_ld32u(tmp, tmp, get_mem_index(s));
4464 store_reg32_i64(r1, tmp);
4465 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4466 set_cc_icm(s, tmp32_2, tmp32_1);
4467 tcg_temp_free_i64(tmp);
4468 tcg_temp_free_i32(tmp32_1);
4469 tcg_temp_free_i32(tmp32_2);
4470 } else if (r3) {
4471 uint32_t mask = 0x00ffffffUL;
4472 uint32_t shift = 24;
4473 int m3 = r3;
4474 tmp = get_address(s, 0, b2, d2);
4475 tmp2 = tcg_temp_new_i64();
4476 tmp32_1 = load_reg32(r1);
4477 tmp32_2 = tcg_temp_new_i32();
4478 tmp32_3 = tcg_const_i32(r3);
4479 tmp32_4 = tcg_const_i32(0);
4480 while (m3) {
4481 if (m3 & 8) {
4482 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4483 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4484 if (shift) {
4485 tcg_gen_shli_i32(tmp32_2, tmp32_2, shift);
4486 }
4487 tcg_gen_andi_i32(tmp32_1, tmp32_1, mask);
4488 tcg_gen_or_i32(tmp32_1, tmp32_1, tmp32_2);
4489 tcg_gen_or_i32(tmp32_4, tmp32_4, tmp32_2);
4490 tcg_gen_addi_i64(tmp, tmp, 1);
4491 }
4492 m3 = (m3 << 1) & 0xf;
4493 mask = (mask >> 8) | 0xff000000UL;
4494 shift -= 8;
4495 }
4496 store_reg32(r1, tmp32_1);
4497 set_cc_icm(s, tmp32_3, tmp32_4);
4498 tcg_temp_free_i64(tmp);
4499 tcg_temp_free_i64(tmp2);
4500 tcg_temp_free_i32(tmp32_1);
4501 tcg_temp_free_i32(tmp32_2);
4502 tcg_temp_free_i32(tmp32_3);
4503 tcg_temp_free_i32(tmp32_4);
4504 } else {
4505 /* i.e. env->cc = 0 */
4506 gen_op_movi_cc(s, 0);
4507 }
4508 break;
4509 case 0xc0:
4510 case 0xc2:
4511 insn = ld_code6(env, s->pc);
4512 r1 = (insn >> 36) & 0xf;
4513 op = (insn >> 32) & 0xf;
4514 i2 = (int)insn;
4515 switch (opc) {
4516 case 0xc0:
4517 disas_c0(env, s, op, r1, i2);
4518 break;
4519 case 0xc2:
4520 disas_c2(env, s, op, r1, i2);
4521 break;
4522 default:
4523 tcg_abort();
4524 }
4525 break;
4526 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4527 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4528 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4529 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4530 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4531 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4532 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4533 insn = ld_code6(env, s->pc);
4534 vl = tcg_const_i32((insn >> 32) & 0xff);
4535 b1 = (insn >> 28) & 0xf;
4536 b2 = (insn >> 12) & 0xf;
4537 d1 = (insn >> 16) & 0xfff;
4538 d2 = insn & 0xfff;
4539 tmp = get_address(s, 0, b1, d1);
4540 tmp2 = get_address(s, 0, b2, d2);
4541 switch (opc) {
4542 case 0xd2:
4543 gen_op_mvc(s, (insn >> 32) & 0xff, tmp, tmp2);
4544 break;
4545 case 0xd4:
4546 potential_page_fault(s);
4547 gen_helper_nc(cc_op, cpu_env, vl, tmp, tmp2);
4548 set_cc_static(s);
4549 break;
4550 case 0xd5:
4551 gen_op_clc(s, (insn >> 32) & 0xff, tmp, tmp2);
4552 break;
4553 case 0xd6:
4554 potential_page_fault(s);
4555 gen_helper_oc(cc_op, cpu_env, vl, tmp, tmp2);
4556 set_cc_static(s);
4557 break;
4558 case 0xd7:
4559 potential_page_fault(s);
4560 gen_helper_xc(cc_op, cpu_env, vl, tmp, tmp2);
4561 set_cc_static(s);
4562 break;
4563 case 0xdc:
4564 potential_page_fault(s);
4565 gen_helper_tr(cpu_env, vl, tmp, tmp2);
4566 set_cc_static(s);
4567 break;
4568 case 0xf3:
4569 potential_page_fault(s);
4570 gen_helper_unpk(cpu_env, vl, tmp, tmp2);
4571 break;
4572 default:
4573 tcg_abort();
4574 }
4575 tcg_temp_free_i64(tmp);
4576 tcg_temp_free_i64(tmp2);
4577 break;
4578 #ifndef CONFIG_USER_ONLY
4579 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
4580 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
4581 check_privileged(s);
4582 potential_page_fault(s);
4583 insn = ld_code6(env, s->pc);
4584 r1 = (insn >> 36) & 0xf;
4585 r3 = (insn >> 32) & 0xf;
4586 b1 = (insn >> 28) & 0xf;
4587 d1 = (insn >> 16) & 0xfff;
4588 b2 = (insn >> 12) & 0xf;
4589 d2 = insn & 0xfff;
4590 tmp = load_reg(r1);
4591 /* XXX key in r3 */
4592 tmp2 = get_address(s, 0, b1, d1);
4593 tmp3 = get_address(s, 0, b2, d2);
4594 if (opc == 0xda) {
4595 gen_helper_mvcp(cc_op, cpu_env, tmp, tmp2, tmp3);
4596 } else {
4597 gen_helper_mvcs(cc_op, cpu_env, tmp, tmp2, tmp3);
4598 }
4599 set_cc_static(s);
4600 tcg_temp_free_i64(tmp);
4601 tcg_temp_free_i64(tmp2);
4602 tcg_temp_free_i64(tmp3);
4603 break;
4604 #endif
4605 case 0xe3:
4606 insn = ld_code6(env, s->pc);
4607 debug_insn(insn);
4608 op = insn & 0xff;
4609 r1 = (insn >> 36) & 0xf;
4610 x2 = (insn >> 32) & 0xf;
4611 b2 = (insn >> 28) & 0xf;
4612 d2 = ((int)((((insn >> 16) & 0xfff)
4613 | ((insn << 4) & 0xff000)) << 12)) >> 12;
4614 disas_e3(env, s, op, r1, x2, b2, d2 );
4615 break;
4616 #ifndef CONFIG_USER_ONLY
4617 case 0xe5:
4618 /* Test Protection */
4619 check_privileged(s);
4620 insn = ld_code6(env, s->pc);
4621 debug_insn(insn);
4622 disas_e5(env, s, insn);
4623 break;
4624 #endif
4625 case 0xeb:
4626 insn = ld_code6(env, s->pc);
4627 debug_insn(insn);
4628 op = insn & 0xff;
4629 r1 = (insn >> 36) & 0xf;
4630 r3 = (insn >> 32) & 0xf;
4631 b2 = (insn >> 28) & 0xf;
4632 d2 = ((int)((((insn >> 16) & 0xfff)
4633 | ((insn << 4) & 0xff000)) << 12)) >> 12;
4634 disas_eb(env, s, op, r1, r3, b2, d2);
4635 break;
4636 case 0xed:
4637 insn = ld_code6(env, s->pc);
4638 debug_insn(insn);
4639 op = insn & 0xff;
4640 r1 = (insn >> 36) & 0xf;
4641 x2 = (insn >> 32) & 0xf;
4642 b2 = (insn >> 28) & 0xf;
4643 d2 = (short)((insn >> 16) & 0xfff);
4644 r1b = (insn >> 12) & 0xf;
4645 disas_ed(env, s, op, r1, x2, b2, d2, r1b);
4646 break;
4647 default:
4648 qemu_log_mask(LOG_UNIMP, "unimplemented opcode 0x%x\n", opc);
4649 gen_illegal_opcode(s);
4650 break;
4651 }
4652 }
4653
4654 /* ====================================================================== */
4655 /* Define the insn format enumeration. */
4656 #define F0(N) FMT_##N,
4657 #define F1(N, X1) F0(N)
4658 #define F2(N, X1, X2) F0(N)
4659 #define F3(N, X1, X2, X3) F0(N)
4660 #define F4(N, X1, X2, X3, X4) F0(N)
4661 #define F5(N, X1, X2, X3, X4, X5) F0(N)
4662
4663 typedef enum {
4664 #include "insn-format.def"
4665 } DisasFormat;
4666
4667 #undef F0
4668 #undef F1
4669 #undef F2
4670 #undef F3
4671 #undef F4
4672 #undef F5
4673
4674 /* Define a structure to hold the decoded fields. We'll store each inside
4675 an array indexed by an enum. In order to conserve memory, we'll arrange
4676 for fields that do not exist at the same time to overlap, thus the "C"
4677 for compact. For checking purposes there is an "O" for original index
4678 as well that will be applied to availability bitmaps. */
4679
4680 enum DisasFieldIndexO {
4681 FLD_O_r1,
4682 FLD_O_r2,
4683 FLD_O_r3,
4684 FLD_O_m1,
4685 FLD_O_m3,
4686 FLD_O_m4,
4687 FLD_O_b1,
4688 FLD_O_b2,
4689 FLD_O_b4,
4690 FLD_O_d1,
4691 FLD_O_d2,
4692 FLD_O_d4,
4693 FLD_O_x2,
4694 FLD_O_l1,
4695 FLD_O_l2,
4696 FLD_O_i1,
4697 FLD_O_i2,
4698 FLD_O_i3,
4699 FLD_O_i4,
4700 FLD_O_i5
4701 };
4702
4703 enum DisasFieldIndexC {
4704 FLD_C_r1 = 0,
4705 FLD_C_m1 = 0,
4706 FLD_C_b1 = 0,
4707 FLD_C_i1 = 0,
4708
4709 FLD_C_r2 = 1,
4710 FLD_C_b2 = 1,
4711 FLD_C_i2 = 1,
4712
4713 FLD_C_r3 = 2,
4714 FLD_C_m3 = 2,
4715 FLD_C_i3 = 2,
4716
4717 FLD_C_m4 = 3,
4718 FLD_C_b4 = 3,
4719 FLD_C_i4 = 3,
4720 FLD_C_l1 = 3,
4721
4722 FLD_C_i5 = 4,
4723 FLD_C_d1 = 4,
4724
4725 FLD_C_d2 = 5,
4726
4727 FLD_C_d4 = 6,
4728 FLD_C_x2 = 6,
4729 FLD_C_l2 = 6,
4730
4731 NUM_C_FIELD = 7
4732 };
4733
4734 struct DisasFields {
4735 unsigned op:8;
4736 unsigned op2:8;
4737 unsigned presentC:16;
4738 unsigned int presentO;
4739 int c[NUM_C_FIELD];
4740 };
4741
4742 /* This is the way fields are to be accessed out of DisasFields. */
4743 #define have_field(S, F) have_field1((S), FLD_O_##F)
4744 #define get_field(S, F) get_field1((S), FLD_O_##F, FLD_C_##F)
4745
4746 static bool have_field1(const DisasFields *f, enum DisasFieldIndexO c)
4747 {
4748 return (f->presentO >> c) & 1;
4749 }
4750
4751 static int get_field1(const DisasFields *f, enum DisasFieldIndexO o,
4752 enum DisasFieldIndexC c)
4753 {
4754 assert(have_field1(f, o));
4755 return f->c[c];
4756 }
4757
4758 /* Describe the layout of each field in each format. */
4759 typedef struct DisasField {
4760 unsigned int beg:8;
4761 unsigned int size:8;
4762 unsigned int type:2;
4763 unsigned int indexC:6;
4764 enum DisasFieldIndexO indexO:8;
4765 } DisasField;
4766
4767 typedef struct DisasFormatInfo {
4768 DisasField op[NUM_C_FIELD];
4769 } DisasFormatInfo;
4770
4771 #define R(N, B) { B, 4, 0, FLD_C_r##N, FLD_O_r##N }
4772 #define M(N, B) { B, 4, 0, FLD_C_m##N, FLD_O_m##N }
4773 #define BD(N, BB, BD) { BB, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4774 { BD, 12, 0, FLD_C_d##N, FLD_O_d##N }
4775 #define BXD(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4776 { 12, 4, 0, FLD_C_x##N, FLD_O_x##N }, \
4777 { 20, 12, 0, FLD_C_d##N, FLD_O_d##N }
4778 #define BDL(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4779 { 20, 20, 2, FLD_C_d##N, FLD_O_d##N }
4780 #define BXDL(N) { 16, 4, 0, FLD_C_b##N, FLD_O_b##N }, \
4781 { 12, 4, 0, FLD_C_x##N, FLD_O_x##N }, \
4782 { 20, 20, 2, FLD_C_d##N, FLD_O_d##N }
4783 #define I(N, B, S) { B, S, 1, FLD_C_i##N, FLD_O_i##N }
4784 #define L(N, B, S) { B, S, 0, FLD_C_l##N, FLD_O_l##N }
4785
4786 #define F0(N) { { } },
4787 #define F1(N, X1) { { X1 } },
4788 #define F2(N, X1, X2) { { X1, X2 } },
4789 #define F3(N, X1, X2, X3) { { X1, X2, X3 } },
4790 #define F4(N, X1, X2, X3, X4) { { X1, X2, X3, X4 } },
4791 #define F5(N, X1, X2, X3, X4, X5) { { X1, X2, X3, X4, X5 } },
4792
4793 static const DisasFormatInfo format_info[] = {
4794 #include "insn-format.def"
4795 };
4796
4797 #undef F0
4798 #undef F1
4799 #undef F2
4800 #undef F3
4801 #undef F4
4802 #undef F5
4803 #undef R
4804 #undef M
4805 #undef BD
4806 #undef BXD
4807 #undef BDL
4808 #undef BXDL
4809 #undef I
4810 #undef L
4811
4812 /* Generally, we'll extract operands into this structures, operate upon
4813 them, and store them back. See the "in1", "in2", "prep", "wout" sets
4814 of routines below for more details. */
4815 typedef struct {
4816 bool g_out, g_out2, g_in1, g_in2;
4817 TCGv_i64 out, out2, in1, in2;
4818 TCGv_i64 addr1;
4819 } DisasOps;
4820
4821 /* Return values from translate_one, indicating the state of the TB. */
4822 typedef enum {
4823 /* Continue the TB. */
4824 NO_EXIT,
4825 /* We have emitted one or more goto_tb. No fixup required. */
4826 EXIT_GOTO_TB,
4827 /* We are not using a goto_tb (for whatever reason), but have updated
4828 the PC (for whatever reason), so there's no need to do it again on
4829 exiting the TB. */
4830 EXIT_PC_UPDATED,
4831 /* We are exiting the TB, but have neither emitted a goto_tb, nor
4832 updated the PC for the next instruction to be executed. */
4833 EXIT_PC_STALE,
4834 /* We are ending the TB with a noreturn function call, e.g. longjmp.
4835 No following code will be executed. */
4836 EXIT_NORETURN,
4837 } ExitStatus;
4838
4839 typedef enum DisasFacility {
4840 FAC_Z, /* zarch (default) */
4841 FAC_CASS, /* compare and swap and store */
4842 FAC_CASS2, /* compare and swap and store 2*/
4843 FAC_DFP, /* decimal floating point */
4844 FAC_DFPR, /* decimal floating point rounding */
4845 FAC_DO, /* distinct operands */
4846 FAC_EE, /* execute extensions */
4847 FAC_EI, /* extended immediate */
4848 FAC_FPE, /* floating point extension */
4849 FAC_FPSSH, /* floating point support sign handling */
4850 FAC_FPRGR, /* FPR-GR transfer */
4851 FAC_GIE, /* general instructions extension */
4852 FAC_HFP_MA, /* HFP multiply-and-add/subtract */
4853 FAC_HW, /* high-word */
4854 FAC_IEEEE_SIM, /* IEEE exception sumilation */
4855 FAC_LOC, /* load/store on condition */
4856 FAC_LD, /* long displacement */
4857 FAC_PC, /* population count */
4858 FAC_SCF, /* store clock fast */
4859 FAC_SFLE, /* store facility list extended */
4860 } DisasFacility;
4861
4862 struct DisasInsn {
4863 unsigned opc:16;
4864 DisasFormat fmt:6;
4865 DisasFacility fac:6;
4866
4867 const char *name;
4868
4869 void (*help_in1)(DisasContext *, DisasFields *, DisasOps *);
4870 void (*help_in2)(DisasContext *, DisasFields *, DisasOps *);
4871 void (*help_prep)(DisasContext *, DisasFields *, DisasOps *);
4872 void (*help_wout)(DisasContext *, DisasFields *, DisasOps *);
4873 void (*help_cout)(DisasContext *, DisasOps *);
4874 ExitStatus (*help_op)(DisasContext *, DisasOps *);
4875
4876 uint64_t data;
4877 };
4878
4879 /* ====================================================================== */
4880 /* The operations. These perform the bulk of the work for any insn,
4881 usually after the operands have been loaded and output initialized. */
4882
4883 static ExitStatus op_add(DisasContext *s, DisasOps *o)
4884 {
4885 tcg_gen_add_i64(o->out, o->in1, o->in2);
4886 return NO_EXIT;
4887 }
4888
4889 static ExitStatus op_sub(DisasContext *s, DisasOps *o)
4890 {
4891 tcg_gen_sub_i64(o->out, o->in1, o->in2);
4892 return NO_EXIT;
4893 }
4894
4895 /* ====================================================================== */
4896 /* The "Cc OUTput" generators. Given the generated output (and in some cases
4897 the original inputs), update the various cc data structures in order to
4898 be able to compute the new condition code. */
4899
4900 static void cout_adds32(DisasContext *s, DisasOps *o)
4901 {
4902 gen_op_update3_cc_i64(s, CC_OP_ADD_32, o->in1, o->in2, o->out);
4903 }
4904
4905 static void cout_adds64(DisasContext *s, DisasOps *o)
4906 {
4907 gen_op_update3_cc_i64(s, CC_OP_ADD_64, o->in1, o->in2, o->out);
4908 }
4909
4910 static void cout_addu32(DisasContext *s, DisasOps *o)
4911 {
4912 gen_op_update3_cc_i64(s, CC_OP_ADDU_32, o->in1, o->in2, o->out);
4913 }
4914
4915 static void cout_addu64(DisasContext *s, DisasOps *o)
4916 {
4917 gen_op_update3_cc_i64(s, CC_OP_ADDU_64, o->in1, o->in2, o->out);
4918 }
4919
4920 static void cout_subs32(DisasContext *s, DisasOps *o)
4921 {
4922 gen_op_update3_cc_i64(s, CC_OP_SUB_32, o->in1, o->in2, o->out);
4923 }
4924
4925 static void cout_subs64(DisasContext *s, DisasOps *o)
4926 {
4927 gen_op_update3_cc_i64(s, CC_OP_SUB_64, o->in1, o->in2, o->out);
4928 }
4929
4930 static void cout_subu32(DisasContext *s, DisasOps *o)
4931 {
4932 gen_op_update3_cc_i64(s, CC_OP_SUBU_32, o->in1, o->in2, o->out);
4933 }
4934
4935 static void cout_subu64(DisasContext *s, DisasOps *o)
4936 {
4937 gen_op_update3_cc_i64(s, CC_OP_SUBU_64, o->in1, o->in2, o->out);
4938 }
4939
4940 /* ====================================================================== */
4941 /* The "PREPeration" generators. These initialize the DisasOps.OUT fields
4942 with the TCG register to which we will write. Used in combination with
4943 the "wout" generators, in some cases we need a new temporary, and in
4944 some cases we can write to a TCG global. */
4945
4946 static void prep_new(DisasContext *s, DisasFields *f, DisasOps *o)
4947 {
4948 o->out = tcg_temp_new_i64();
4949 }
4950
4951 static void prep_r1(DisasContext *s, DisasFields *f, DisasOps *o)
4952 {
4953 o->out = regs[get_field(f, r1)];
4954 o->g_out = true;
4955 }
4956
4957 /* ====================================================================== */
4958 /* The "Write OUTput" generators. These generally perform some non-trivial
4959 copy of data to TCG globals, or to main memory. The trivial cases are
4960 generally handled by having a "prep" generator install the TCG global
4961 as the destination of the operation. */
4962
4963 static void wout_r1_32(DisasContext *s, DisasFields *f, DisasOps *o)
4964 {
4965 store_reg32_i64(get_field(f, r1), o->out);
4966 }
4967
4968 static void wout_m1_32(DisasContext *s, DisasFields *f, DisasOps *o)
4969 {
4970 tcg_gen_qemu_st32(o->out, o->addr1, get_mem_index(s));
4971 }
4972
4973 static void wout_m1_64(DisasContext *s, DisasFields *f, DisasOps *o)
4974 {
4975 tcg_gen_qemu_st64(o->out, o->addr1, get_mem_index(s));
4976 }
4977
4978 /* ====================================================================== */
4979 /* The "INput 1" generators. These load the first operand to an insn. */
4980
4981 static void in1_r1(DisasContext *s, DisasFields *f, DisasOps *o)
4982 {
4983 o->in1 = load_reg(get_field(f, r1));
4984 }
4985
4986 static void in1_r2(DisasContext *s, DisasFields *f, DisasOps *o)
4987 {
4988 o->in1 = load_reg(get_field(f, r2));
4989 }
4990
4991 static void in1_r3(DisasContext *s, DisasFields *f, DisasOps *o)
4992 {
4993 o->in1 = load_reg(get_field(f, r3));
4994 }
4995
4996 static void in1_la1(DisasContext *s, DisasFields *f, DisasOps *o)
4997 {
4998 o->addr1 = get_address(s, 0, get_field(f, b1), get_field(f, d1));
4999 }
5000
5001 static void in1_m1_32s(DisasContext *s, DisasFields *f, DisasOps *o)
5002 {
5003 in1_la1(s, f, o);
5004 o->in1 = tcg_temp_new_i64();
5005 tcg_gen_qemu_ld32s(o->in1, o->addr1, get_mem_index(s));
5006 }
5007
5008 static void in1_m1_32u(DisasContext *s, DisasFields *f, DisasOps *o)
5009 {
5010 in1_la1(s, f, o);
5011 o->in1 = tcg_temp_new_i64();
5012 tcg_gen_qemu_ld32u(o->in1, o->addr1, get_mem_index(s));
5013 }
5014
5015 static void in1_m1_64(DisasContext *s, DisasFields *f, DisasOps *o)
5016 {
5017 in1_la1(s, f, o);
5018 o->in1 = tcg_temp_new_i64();
5019 tcg_gen_qemu_ld64(o->in1, o->addr1, get_mem_index(s));
5020 }
5021
5022 /* ====================================================================== */
5023 /* The "INput 2" generators. These load the second operand to an insn. */
5024
5025 static void in2_r2(DisasContext *s, DisasFields *f, DisasOps *o)
5026 {
5027 o->in2 = load_reg(get_field(f, r2));
5028 }
5029
5030 static void in2_r3(DisasContext *s, DisasFields *f, DisasOps *o)
5031 {
5032 o->in2 = load_reg(get_field(f, r3));
5033 }
5034
5035 static void in2_r2_32s(DisasContext *s, DisasFields *f, DisasOps *o)
5036 {
5037 o->in2 = tcg_temp_new_i64();
5038 tcg_gen_ext32s_i64(o->in2, regs[get_field(f, r2)]);
5039 }
5040
5041 static void in2_r2_32u(DisasContext *s, DisasFields *f, DisasOps *o)
5042 {
5043 o->in2 = tcg_temp_new_i64();
5044 tcg_gen_ext32u_i64(o->in2, regs[get_field(f, r2)]);
5045 }
5046
5047 static void in2_a2(DisasContext *s, DisasFields *f, DisasOps *o)
5048 {
5049 int x2 = have_field(f, x2) ? get_field(f, x2) : 0;
5050 o->in2 = get_address(s, x2, get_field(f, b2), get_field(f, d2));
5051 }
5052
5053 static void in2_m2_16s(DisasContext *s, DisasFields *f, DisasOps *o)
5054 {
5055 in2_a2(s, f, o);
5056 tcg_gen_qemu_ld16s(o->in2, o->in2, get_mem_index(s));
5057 }
5058
5059 static void in2_m2_32s(DisasContext *s, DisasFields *f, DisasOps *o)
5060 {
5061 in2_a2(s, f, o);
5062 tcg_gen_qemu_ld32s(o->in2, o->in2, get_mem_index(s));
5063 }
5064
5065 static void in2_m2_32u(DisasContext *s, DisasFields *f, DisasOps *o)
5066 {
5067 in2_a2(s, f, o);
5068 tcg_gen_qemu_ld32u(o->in2, o->in2, get_mem_index(s));
5069 }
5070
5071 static void in2_m2_64(DisasContext *s, DisasFields *f, DisasOps *o)
5072 {
5073 in2_a2(s, f, o);
5074 tcg_gen_qemu_ld64(o->in2, o->in2, get_mem_index(s));
5075 }
5076
5077 static void in2_i2(DisasContext *s, DisasFields *f, DisasOps *o)
5078 {
5079 o->in2 = tcg_const_i64(get_field(f, i2));
5080 }
5081
5082 static void in2_i2_32u(DisasContext *s, DisasFields *f, DisasOps *o)
5083 {
5084 o->in2 = tcg_const_i64((uint32_t)get_field(f, i2));
5085 }
5086
5087 /* ====================================================================== */
5088
5089 /* Find opc within the table of insns. This is formulated as a switch
5090 statement so that (1) we get compile-time notice of cut-paste errors
5091 for duplicated opcodes, and (2) the compiler generates the binary
5092 search tree, rather than us having to post-process the table. */
5093
5094 #define C(OPC, NM, FT, FC, I1, I2, P, W, OP, CC) \
5095 D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, 0)
5096
5097 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) insn_ ## NM,
5098
5099 enum DisasInsnEnum {
5100 #include "insn-data.def"
5101 };
5102
5103 #undef D
5104 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) { \
5105 .opc = OPC, \
5106 .fmt = FMT_##FT, \
5107 .fac = FAC_##FC, \
5108 .name = #NM, \
5109 .help_in1 = in1_##I1, \
5110 .help_in2 = in2_##I2, \
5111 .help_prep = prep_##P, \
5112 .help_wout = wout_##W, \
5113 .help_cout = cout_##CC, \
5114 .help_op = op_##OP, \
5115 .data = D \
5116 },
5117
5118 /* Allow 0 to be used for NULL in the table below. */
5119 #define in1_0 NULL
5120 #define in2_0 NULL
5121 #define prep_0 NULL
5122 #define wout_0 NULL
5123 #define cout_0 NULL
5124 #define op_0 NULL
5125
5126 static const DisasInsn insn_info[] = {
5127 #include "insn-data.def"
5128 };
5129
5130 #undef D
5131 #define D(OPC, NM, FT, FC, I1, I2, P, W, OP, CC, D) \
5132 case OPC: return &insn_info[insn_ ## NM];
5133
5134 static const DisasInsn *lookup_opc(uint16_t opc)
5135 {
5136 switch (opc) {
5137 #include "insn-data.def"
5138 default:
5139 return NULL;
5140 }
5141 }
5142
5143 #undef D
5144 #undef C
5145
5146 /* Extract a field from the insn. The INSN should be left-aligned in
5147 the uint64_t so that we can more easily utilize the big-bit-endian
5148 definitions we extract from the Principals of Operation. */
5149
5150 static void extract_field(DisasFields *o, const DisasField *f, uint64_t insn)
5151 {
5152 uint32_t r, m;
5153
5154 if (f->size == 0) {
5155 return;
5156 }
5157
5158 /* Zero extract the field from the insn. */
5159 r = (insn << f->beg) >> (64 - f->size);
5160
5161 /* Sign-extend, or un-swap the field as necessary. */
5162 switch (f->type) {
5163 case 0: /* unsigned */
5164 break;
5165 case 1: /* signed */
5166 assert(f->size <= 32);
5167 m = 1u << (f->size - 1);
5168 r = (r ^ m) - m;
5169 break;
5170 case 2: /* dl+dh split, signed 20 bit. */
5171 r = ((int8_t)r << 12) | (r >> 8);
5172 break;
5173 default:
5174 abort();
5175 }
5176
5177 /* Validate that the "compressed" encoding we selected above is valid.
5178 I.e. we havn't make two different original fields overlap. */
5179 assert(((o->presentC >> f->indexC) & 1) == 0);
5180 o->presentC |= 1 << f->indexC;
5181 o->presentO |= 1 << f->indexO;
5182
5183 o->c[f->indexC] = r;
5184 }
5185
5186 /* Lookup the insn at the current PC, extracting the operands into O and
5187 returning the info struct for the insn. Returns NULL for invalid insn. */
5188
5189 static const DisasInsn *extract_insn(CPUS390XState *env, DisasContext *s,
5190 DisasFields *f)
5191 {
5192 uint64_t insn, pc = s->pc;
5193 int op, op2, ilen;
5194 const DisasInsn *info;
5195
5196 insn = ld_code2(env, pc);
5197 op = (insn >> 8) & 0xff;
5198 ilen = get_ilen(op);
5199 s->next_pc = s->pc + ilen;
5200
5201 switch (ilen) {
5202 case 2:
5203 insn = insn << 48;
5204 break;
5205 case 4:
5206 insn = ld_code4(env, pc) << 32;
5207 break;
5208 case 6:
5209 insn = (insn << 48) | (ld_code4(env, pc + 2) << 16);
5210 break;
5211 default:
5212 abort();
5213 }
5214
5215 /* We can't actually determine the insn format until we've looked up
5216 the full insn opcode. Which we can't do without locating the
5217 secondary opcode. Assume by default that OP2 is at bit 40; for
5218 those smaller insns that don't actually have a secondary opcode
5219 this will correctly result in OP2 = 0. */
5220 switch (op) {
5221 case 0x01: /* E */
5222 case 0x80: /* S */
5223 case 0x82: /* S */
5224 case 0x93: /* S */
5225 case 0xb2: /* S, RRF, RRE */
5226 case 0xb3: /* RRE, RRD, RRF */
5227 case 0xb9: /* RRE, RRF */
5228 case 0xe5: /* SSE, SIL */
5229 op2 = (insn << 8) >> 56;
5230 break;
5231 case 0xa5: /* RI */
5232 case 0xa7: /* RI */
5233 case 0xc0: /* RIL */
5234 case 0xc2: /* RIL */
5235 case 0xc4: /* RIL */
5236 case 0xc6: /* RIL */
5237 case 0xc8: /* SSF */
5238 case 0xcc: /* RIL */
5239 op2 = (insn << 12) >> 60;
5240 break;
5241 case 0xd0 ... 0xdf: /* SS */
5242 case 0xe1: /* SS */
5243 case 0xe2: /* SS */
5244 case 0xe8: /* SS */
5245 case 0xe9: /* SS */
5246 case 0xea: /* SS */
5247 case 0xee ... 0xf3: /* SS */
5248 case 0xf8 ... 0xfd: /* SS */
5249 op2 = 0;
5250 break;
5251 default:
5252 op2 = (insn << 40) >> 56;
5253 break;
5254 }
5255
5256 memset(f, 0, sizeof(*f));
5257 f->op = op;
5258 f->op2 = op2;
5259
5260 /* Lookup the instruction. */
5261 info = lookup_opc(op << 8 | op2);
5262
5263 /* If we found it, extract the operands. */
5264 if (info != NULL) {
5265 DisasFormat fmt = info->fmt;
5266 int i;
5267
5268 for (i = 0; i < NUM_C_FIELD; ++i) {
5269 extract_field(f, &format_info[fmt].op[i], insn);
5270 }
5271 }
5272 return info;
5273 }
5274
5275 static ExitStatus translate_one(CPUS390XState *env, DisasContext *s)
5276 {
5277 const DisasInsn *insn;
5278 ExitStatus ret = NO_EXIT;
5279 DisasFields f;
5280 DisasOps o;
5281
5282 insn = extract_insn(env, s, &f);
5283
5284 /* If not found, try the old interpreter. This includes ILLOPC. */
5285 if (insn == NULL) {
5286 disas_s390_insn(env, s);
5287 switch (s->is_jmp) {
5288 case DISAS_NEXT:
5289 ret = NO_EXIT;
5290 break;
5291 case DISAS_TB_JUMP:
5292 ret = EXIT_GOTO_TB;
5293 break;
5294 case DISAS_JUMP:
5295 ret = EXIT_PC_UPDATED;
5296 break;
5297 case DISAS_EXCP:
5298 ret = EXIT_NORETURN;
5299 break;
5300 default:
5301 abort();
5302 }
5303
5304 s->pc = s->next_pc;
5305 return ret;
5306 }
5307
5308 /* Set up the strutures we use to communicate with the helpers. */
5309 s->insn = insn;
5310 s->fields = &f;
5311 o.g_out = o.g_out2 = o.g_in1 = o.g_in2 = false;
5312 TCGV_UNUSED_I64(o.out);
5313 TCGV_UNUSED_I64(o.out2);
5314 TCGV_UNUSED_I64(o.in1);
5315 TCGV_UNUSED_I64(o.in2);
5316 TCGV_UNUSED_I64(o.addr1);
5317
5318 /* Implement the instruction. */
5319 if (insn->help_in1) {
5320 insn->help_in1(s, &f, &o);
5321 }
5322 if (insn->help_in2) {
5323 insn->help_in2(s, &f, &o);
5324 }
5325 if (insn->help_prep) {
5326 insn->help_prep(s, &f, &o);
5327 }
5328 if (insn->help_op) {
5329 ret = insn->help_op(s, &o);
5330 }
5331 if (insn->help_wout) {
5332 insn->help_wout(s, &f, &o);
5333 }
5334 if (insn->help_cout) {
5335 insn->help_cout(s, &o);
5336 }
5337
5338 /* Free any temporaries created by the helpers. */
5339 if (!TCGV_IS_UNUSED_I64(o.out) && !o.g_out) {
5340 tcg_temp_free_i64(o.out);
5341 }
5342 if (!TCGV_IS_UNUSED_I64(o.out2) && !o.g_out2) {
5343 tcg_temp_free_i64(o.out2);
5344 }
5345 if (!TCGV_IS_UNUSED_I64(o.in1) && !o.g_in1) {
5346 tcg_temp_free_i64(o.in1);
5347 }
5348 if (!TCGV_IS_UNUSED_I64(o.in2) && !o.g_in2) {
5349 tcg_temp_free_i64(o.in2);
5350 }
5351 if (!TCGV_IS_UNUSED_I64(o.addr1)) {
5352 tcg_temp_free_i64(o.addr1);
5353 }
5354
5355 /* Advance to the next instruction. */
5356 s->pc = s->next_pc;
5357 return ret;
5358 }
5359
5360 static inline void gen_intermediate_code_internal(CPUS390XState *env,
5361 TranslationBlock *tb,
5362 int search_pc)
5363 {
5364 DisasContext dc;
5365 target_ulong pc_start;
5366 uint64_t next_page_start;
5367 uint16_t *gen_opc_end;
5368 int j, lj = -1;
5369 int num_insns, max_insns;
5370 CPUBreakpoint *bp;
5371 ExitStatus status;
5372 bool do_debug;
5373
5374 pc_start = tb->pc;
5375
5376 /* 31-bit mode */
5377 if (!(tb->flags & FLAG_MASK_64)) {
5378 pc_start &= 0x7fffffff;
5379 }
5380
5381 dc.tb = tb;
5382 dc.pc = pc_start;
5383 dc.cc_op = CC_OP_DYNAMIC;
5384 do_debug = dc.singlestep_enabled = env->singlestep_enabled;
5385 dc.is_jmp = DISAS_NEXT;
5386
5387 gen_opc_end = tcg_ctx.gen_opc_buf + OPC_MAX_SIZE;
5388
5389 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
5390
5391 num_insns = 0;
5392 max_insns = tb->cflags & CF_COUNT_MASK;
5393 if (max_insns == 0) {
5394 max_insns = CF_COUNT_MASK;
5395 }
5396
5397 gen_icount_start();
5398
5399 do {
5400 if (search_pc) {
5401 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5402 if (lj < j) {
5403 lj++;
5404 while (lj < j) {
5405 tcg_ctx.gen_opc_instr_start[lj++] = 0;
5406 }
5407 }
5408 tcg_ctx.gen_opc_pc[lj] = dc.pc;
5409 gen_opc_cc_op[lj] = dc.cc_op;
5410 tcg_ctx.gen_opc_instr_start[lj] = 1;
5411 tcg_ctx.gen_opc_icount[lj] = num_insns;
5412 }
5413 if (++num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5414 gen_io_start();
5415 }
5416
5417 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP | CPU_LOG_TB_OP_OPT))) {
5418 tcg_gen_debug_insn_start(dc.pc);
5419 }
5420
5421 status = NO_EXIT;
5422 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5423 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5424 if (bp->pc == dc.pc) {
5425 status = EXIT_PC_STALE;
5426 do_debug = true;
5427 break;
5428 }
5429 }
5430 }
5431 if (status == NO_EXIT) {
5432 status = translate_one(env, &dc);
5433 }
5434
5435 /* If we reach a page boundary, are single stepping,
5436 or exhaust instruction count, stop generation. */
5437 if (status == NO_EXIT
5438 && (dc.pc >= next_page_start
5439 || tcg_ctx.gen_opc_ptr >= gen_opc_end
5440 || num_insns >= max_insns
5441 || singlestep
5442 || env->singlestep_enabled)) {
5443 status = EXIT_PC_STALE;
5444 }
5445 } while (status == NO_EXIT);
5446
5447 if (tb->cflags & CF_LAST_IO) {
5448 gen_io_end();
5449 }
5450
5451 switch (status) {
5452 case EXIT_GOTO_TB:
5453 case EXIT_NORETURN:
5454 break;
5455 case EXIT_PC_STALE:
5456 update_psw_addr(&dc);
5457 /* FALLTHRU */
5458 case EXIT_PC_UPDATED:
5459 if (singlestep && dc.cc_op != CC_OP_DYNAMIC) {
5460 gen_op_calc_cc(&dc);
5461 } else {
5462 /* Next TB starts off with CC_OP_DYNAMIC,
5463 so make sure the cc op type is in env */
5464 gen_op_set_cc_op(&dc);
5465 }
5466 if (do_debug) {
5467 gen_exception(EXCP_DEBUG);
5468 } else {
5469 /* Generate the return instruction */
5470 tcg_gen_exit_tb(0);
5471 }
5472 break;
5473 default:
5474 abort();
5475 }
5476
5477 gen_icount_end(tb, num_insns);
5478 *tcg_ctx.gen_opc_ptr = INDEX_op_end;
5479 if (search_pc) {
5480 j = tcg_ctx.gen_opc_ptr - tcg_ctx.gen_opc_buf;
5481 lj++;
5482 while (lj <= j) {
5483 tcg_ctx.gen_opc_instr_start[lj++] = 0;
5484 }
5485 } else {
5486 tb->size = dc.pc - pc_start;
5487 tb->icount = num_insns;
5488 }
5489
5490 #if defined(S390X_DEBUG_DISAS)
5491 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5492 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5493 log_target_disas(env, pc_start, dc.pc - pc_start, 1);
5494 qemu_log("\n");
5495 }
5496 #endif
5497 }
5498
5499 void gen_intermediate_code (CPUS390XState *env, struct TranslationBlock *tb)
5500 {
5501 gen_intermediate_code_internal(env, tb, 0);
5502 }
5503
5504 void gen_intermediate_code_pc (CPUS390XState *env, struct TranslationBlock *tb)
5505 {
5506 gen_intermediate_code_internal(env, tb, 1);
5507 }
5508
5509 void restore_state_to_opc(CPUS390XState *env, TranslationBlock *tb, int pc_pos)
5510 {
5511 int cc_op;
5512 env->psw.addr = tcg_ctx.gen_opc_pc[pc_pos];
5513 cc_op = gen_opc_cc_op[pc_pos];
5514 if ((cc_op != CC_OP_DYNAMIC) && (cc_op != CC_OP_STATIC)) {
5515 env->cc_op = cc_op;
5516 }
5517 }