]> git.proxmox.com Git - qemu.git/blob - target-s390x/translate.c
target-s390x: Add missing tcg_temp_free_i32()
[qemu.git] / target-s390x / translate.c
1 /*
2 * S/390 translation
3 *
4 * Copyright (c) 2009 Ulrich Hecht
5 * Copyright (c) 2010 Alexander Graf
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25
26 /* #define DEBUG_ILLEGAL_INSTRUCTIONS */
27 /* #define DEBUG_INLINE_BRANCHES */
28 #define S390X_DEBUG_DISAS
29 /* #define S390X_DEBUG_DISAS_VERBOSE */
30
31 #ifdef S390X_DEBUG_DISAS_VERBOSE
32 # define LOG_DISAS(...) qemu_log(__VA_ARGS__)
33 #else
34 # define LOG_DISAS(...) do { } while (0)
35 #endif
36
37 #include "cpu.h"
38 #include "exec-all.h"
39 #include "disas.h"
40 #include "tcg-op.h"
41 #include "qemu-log.h"
42
43 /* global register indexes */
44 static TCGv_ptr cpu_env;
45
46 #include "gen-icount.h"
47 #include "helpers.h"
48 #define GEN_HELPER 1
49 #include "helpers.h"
50
51 typedef struct DisasContext DisasContext;
52 struct DisasContext {
53 uint64_t pc;
54 int is_jmp;
55 enum cc_op cc_op;
56 struct TranslationBlock *tb;
57 };
58
59 #define DISAS_EXCP 4
60
61 static void gen_op_calc_cc(DisasContext *s);
62
63 #ifdef DEBUG_INLINE_BRANCHES
64 static uint64_t inline_branch_hit[CC_OP_MAX];
65 static uint64_t inline_branch_miss[CC_OP_MAX];
66 #endif
67
68 static inline void debug_insn(uint64_t insn)
69 {
70 LOG_DISAS("insn: 0x%" PRIx64 "\n", insn);
71 }
72
73 static inline uint64_t pc_to_link_info(DisasContext *s, uint64_t pc)
74 {
75 if (!(s->tb->flags & FLAG_MASK_64)) {
76 if (s->tb->flags & FLAG_MASK_32) {
77 return pc | 0x80000000;
78 }
79 }
80 return pc;
81 }
82
83 void cpu_dump_state(CPUState *env, FILE *f, fprintf_function cpu_fprintf,
84 int flags)
85 {
86 int i;
87
88 for (i = 0; i < 16; i++) {
89 cpu_fprintf(f, "R%02d=%016" PRIx64, i, env->regs[i]);
90 if ((i % 4) == 3) {
91 cpu_fprintf(f, "\n");
92 } else {
93 cpu_fprintf(f, " ");
94 }
95 }
96
97 for (i = 0; i < 16; i++) {
98 cpu_fprintf(f, "F%02d=%016" PRIx64, i, *(uint64_t *)&env->fregs[i]);
99 if ((i % 4) == 3) {
100 cpu_fprintf(f, "\n");
101 } else {
102 cpu_fprintf(f, " ");
103 }
104 }
105
106 cpu_fprintf(f, "\n");
107
108 #ifndef CONFIG_USER_ONLY
109 for (i = 0; i < 16; i++) {
110 cpu_fprintf(f, "C%02d=%016" PRIx64, i, env->cregs[i]);
111 if ((i % 4) == 3) {
112 cpu_fprintf(f, "\n");
113 } else {
114 cpu_fprintf(f, " ");
115 }
116 }
117 #endif
118
119 cpu_fprintf(f, "\n");
120
121 if (env->cc_op > 3) {
122 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %15s\n",
123 env->psw.mask, env->psw.addr, cc_name(env->cc_op));
124 } else {
125 cpu_fprintf(f, "PSW=mask %016" PRIx64 " addr %016" PRIx64 " cc %02x\n",
126 env->psw.mask, env->psw.addr, env->cc_op);
127 }
128
129 #ifdef DEBUG_INLINE_BRANCHES
130 for (i = 0; i < CC_OP_MAX; i++) {
131 cpu_fprintf(f, " %15s = %10ld\t%10ld\n", cc_name(i),
132 inline_branch_miss[i], inline_branch_hit[i]);
133 }
134 #endif
135 }
136
137 static TCGv_i64 psw_addr;
138 static TCGv_i64 psw_mask;
139
140 static TCGv_i32 cc_op;
141 static TCGv_i64 cc_src;
142 static TCGv_i64 cc_dst;
143 static TCGv_i64 cc_vr;
144
145 static char cpu_reg_names[10*3 + 6*4];
146 static TCGv_i64 regs[16];
147
148 static uint8_t gen_opc_cc_op[OPC_BUF_SIZE];
149
150 void s390x_translate_init(void)
151 {
152 int i;
153 size_t cpu_reg_names_size = sizeof(cpu_reg_names);
154 char *p;
155
156 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
157 psw_addr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, psw.addr),
158 "psw_addr");
159 psw_mask = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, psw.mask),
160 "psw_mask");
161
162 cc_op = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUState, cc_op),
163 "cc_op");
164 cc_src = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, cc_src),
165 "cc_src");
166 cc_dst = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, cc_dst),
167 "cc_dst");
168 cc_vr = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, cc_vr),
169 "cc_vr");
170
171 p = cpu_reg_names;
172 for (i = 0; i < 16; i++) {
173 snprintf(p, cpu_reg_names_size, "r%d", i);
174 regs[i] = tcg_global_mem_new(TCG_AREG0,
175 offsetof(CPUState, regs[i]), p);
176 p += (i < 10) ? 3 : 4;
177 cpu_reg_names_size -= (i < 10) ? 3 : 4;
178 }
179 }
180
181 static inline TCGv_i64 load_reg(int reg)
182 {
183 TCGv_i64 r = tcg_temp_new_i64();
184 tcg_gen_mov_i64(r, regs[reg]);
185 return r;
186 }
187
188 static inline TCGv_i64 load_freg(int reg)
189 {
190 TCGv_i64 r = tcg_temp_new_i64();
191 tcg_gen_ld_i64(r, cpu_env, offsetof(CPUState, fregs[reg].d));
192 return r;
193 }
194
195 static inline TCGv_i32 load_freg32(int reg)
196 {
197 TCGv_i32 r = tcg_temp_new_i32();
198 tcg_gen_ld_i32(r, cpu_env, offsetof(CPUState, fregs[reg].l.upper));
199 return r;
200 }
201
202 static inline TCGv_i32 load_reg32(int reg)
203 {
204 TCGv_i32 r = tcg_temp_new_i32();
205 tcg_gen_trunc_i64_i32(r, regs[reg]);
206 return r;
207 }
208
209 static inline TCGv_i64 load_reg32_i64(int reg)
210 {
211 TCGv_i64 r = tcg_temp_new_i64();
212 tcg_gen_ext32s_i64(r, regs[reg]);
213 return r;
214 }
215
216 static inline void store_reg(int reg, TCGv_i64 v)
217 {
218 tcg_gen_mov_i64(regs[reg], v);
219 }
220
221 static inline void store_freg(int reg, TCGv_i64 v)
222 {
223 tcg_gen_st_i64(v, cpu_env, offsetof(CPUState, fregs[reg].d));
224 }
225
226 static inline void store_reg32(int reg, TCGv_i32 v)
227 {
228 #if HOST_LONG_BITS == 32
229 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), v);
230 #else
231 TCGv_i64 tmp = tcg_temp_new_i64();
232 tcg_gen_extu_i32_i64(tmp, v);
233 /* 32 bit register writes keep the upper half */
234 tcg_gen_deposit_i64(regs[reg], regs[reg], tmp, 0, 32);
235 tcg_temp_free_i64(tmp);
236 #endif
237 }
238
239 static inline void store_reg32_i64(int reg, TCGv_i64 v)
240 {
241 /* 32 bit register writes keep the upper half */
242 #if HOST_LONG_BITS == 32
243 tcg_gen_mov_i32(TCGV_LOW(regs[reg]), TCGV_LOW(v));
244 #else
245 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 32);
246 #endif
247 }
248
249 static inline void store_reg16(int reg, TCGv_i32 v)
250 {
251 TCGv_i64 tmp = tcg_temp_new_i64();
252 tcg_gen_extu_i32_i64(tmp, v);
253 /* 16 bit register writes keep the upper bytes */
254 tcg_gen_deposit_i64(regs[reg], regs[reg], tmp, 0, 16);
255 tcg_temp_free_i64(tmp);
256 }
257
258 static inline void store_reg8(int reg, TCGv_i64 v)
259 {
260 /* 8 bit register writes keep the upper bytes */
261 tcg_gen_deposit_i64(regs[reg], regs[reg], v, 0, 8);
262 }
263
264 static inline void store_freg32(int reg, TCGv_i32 v)
265 {
266 tcg_gen_st_i32(v, cpu_env, offsetof(CPUState, fregs[reg].l.upper));
267 }
268
269 static inline void update_psw_addr(DisasContext *s)
270 {
271 /* psw.addr */
272 tcg_gen_movi_i64(psw_addr, s->pc);
273 }
274
275 static inline void potential_page_fault(DisasContext *s)
276 {
277 #ifndef CONFIG_USER_ONLY
278 update_psw_addr(s);
279 gen_op_calc_cc(s);
280 #endif
281 }
282
283 static inline uint64_t ld_code2(uint64_t pc)
284 {
285 return (uint64_t)lduw_code(pc);
286 }
287
288 static inline uint64_t ld_code4(uint64_t pc)
289 {
290 return (uint64_t)ldl_code(pc);
291 }
292
293 static inline uint64_t ld_code6(uint64_t pc)
294 {
295 uint64_t opc;
296 opc = (uint64_t)lduw_code(pc) << 32;
297 opc |= (uint64_t)(uint32_t)ldl_code(pc+2);
298 return opc;
299 }
300
301 static inline int get_mem_index(DisasContext *s)
302 {
303 switch (s->tb->flags & FLAG_MASK_ASC) {
304 case PSW_ASC_PRIMARY >> 32:
305 return 0;
306 case PSW_ASC_SECONDARY >> 32:
307 return 1;
308 case PSW_ASC_HOME >> 32:
309 return 2;
310 default:
311 tcg_abort();
312 break;
313 }
314 }
315
316 static inline void gen_debug(DisasContext *s)
317 {
318 TCGv_i32 tmp = tcg_const_i32(EXCP_DEBUG);
319 update_psw_addr(s);
320 gen_op_calc_cc(s);
321 gen_helper_exception(tmp);
322 tcg_temp_free_i32(tmp);
323 s->is_jmp = DISAS_EXCP;
324 }
325
326 #ifdef CONFIG_USER_ONLY
327
328 static void gen_illegal_opcode(DisasContext *s, int ilc)
329 {
330 TCGv_i32 tmp = tcg_const_i32(EXCP_SPEC);
331 update_psw_addr(s);
332 gen_op_calc_cc(s);
333 gen_helper_exception(tmp);
334 tcg_temp_free_i32(tmp);
335 s->is_jmp = DISAS_EXCP;
336 }
337
338 #else /* CONFIG_USER_ONLY */
339
340 static void debug_print_inst(DisasContext *s, int ilc)
341 {
342 #ifdef DEBUG_ILLEGAL_INSTRUCTIONS
343 uint64_t inst = 0;
344
345 switch (ilc & 3) {
346 case 1:
347 inst = ld_code2(s->pc);
348 break;
349 case 2:
350 inst = ld_code4(s->pc);
351 break;
352 case 3:
353 inst = ld_code6(s->pc);
354 break;
355 }
356
357 fprintf(stderr, "Illegal instruction [%d at %016" PRIx64 "]: 0x%016"
358 PRIx64 "\n", ilc, s->pc, inst);
359 #endif
360 }
361
362 static void gen_program_exception(DisasContext *s, int ilc, int code)
363 {
364 TCGv_i32 tmp;
365
366 debug_print_inst(s, ilc);
367
368 /* remember what pgm exeption this was */
369 tmp = tcg_const_i32(code);
370 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, int_pgm_code));
371 tcg_temp_free_i32(tmp);
372
373 tmp = tcg_const_i32(ilc);
374 tcg_gen_st_i32(tmp, cpu_env, offsetof(CPUState, int_pgm_ilc));
375 tcg_temp_free_i32(tmp);
376
377 /* advance past instruction */
378 s->pc += (ilc * 2);
379 update_psw_addr(s);
380
381 /* save off cc */
382 gen_op_calc_cc(s);
383
384 /* trigger exception */
385 tmp = tcg_const_i32(EXCP_PGM);
386 gen_helper_exception(tmp);
387 tcg_temp_free_i32(tmp);
388
389 /* end TB here */
390 s->is_jmp = DISAS_EXCP;
391 }
392
393
394 static void gen_illegal_opcode(DisasContext *s, int ilc)
395 {
396 gen_program_exception(s, ilc, PGM_SPECIFICATION);
397 }
398
399 static void gen_privileged_exception(DisasContext *s, int ilc)
400 {
401 gen_program_exception(s, ilc, PGM_PRIVILEGED);
402 }
403
404 static void check_privileged(DisasContext *s, int ilc)
405 {
406 if (s->tb->flags & (PSW_MASK_PSTATE >> 32)) {
407 gen_privileged_exception(s, ilc);
408 }
409 }
410
411 #endif /* CONFIG_USER_ONLY */
412
413 static TCGv_i64 get_address(DisasContext *s, int x2, int b2, int d2)
414 {
415 TCGv_i64 tmp;
416
417 /* 31-bitify the immediate part; register contents are dealt with below */
418 if (!(s->tb->flags & FLAG_MASK_64)) {
419 d2 &= 0x7fffffffUL;
420 }
421
422 if (x2) {
423 if (d2) {
424 tmp = tcg_const_i64(d2);
425 tcg_gen_add_i64(tmp, tmp, regs[x2]);
426 } else {
427 tmp = load_reg(x2);
428 }
429 if (b2) {
430 tcg_gen_add_i64(tmp, tmp, regs[b2]);
431 }
432 } else if (b2) {
433 if (d2) {
434 tmp = tcg_const_i64(d2);
435 tcg_gen_add_i64(tmp, tmp, regs[b2]);
436 } else {
437 tmp = load_reg(b2);
438 }
439 } else {
440 tmp = tcg_const_i64(d2);
441 }
442
443 /* 31-bit mode mask if there are values loaded from registers */
444 if (!(s->tb->flags & FLAG_MASK_64) && (x2 || b2)) {
445 tcg_gen_andi_i64(tmp, tmp, 0x7fffffffUL);
446 }
447
448 return tmp;
449 }
450
451 static void gen_op_movi_cc(DisasContext *s, uint32_t val)
452 {
453 s->cc_op = CC_OP_CONST0 + val;
454 }
455
456 static void gen_op_update1_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 dst)
457 {
458 tcg_gen_discard_i64(cc_src);
459 tcg_gen_mov_i64(cc_dst, dst);
460 tcg_gen_discard_i64(cc_vr);
461 s->cc_op = op;
462 }
463
464 static void gen_op_update1_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 dst)
465 {
466 tcg_gen_discard_i64(cc_src);
467 tcg_gen_extu_i32_i64(cc_dst, dst);
468 tcg_gen_discard_i64(cc_vr);
469 s->cc_op = op;
470 }
471
472 static void gen_op_update2_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
473 TCGv_i64 dst)
474 {
475 tcg_gen_mov_i64(cc_src, src);
476 tcg_gen_mov_i64(cc_dst, dst);
477 tcg_gen_discard_i64(cc_vr);
478 s->cc_op = op;
479 }
480
481 static void gen_op_update2_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
482 TCGv_i32 dst)
483 {
484 tcg_gen_extu_i32_i64(cc_src, src);
485 tcg_gen_extu_i32_i64(cc_dst, dst);
486 tcg_gen_discard_i64(cc_vr);
487 s->cc_op = op;
488 }
489
490 static void gen_op_update3_cc_i64(DisasContext *s, enum cc_op op, TCGv_i64 src,
491 TCGv_i64 dst, TCGv_i64 vr)
492 {
493 tcg_gen_mov_i64(cc_src, src);
494 tcg_gen_mov_i64(cc_dst, dst);
495 tcg_gen_mov_i64(cc_vr, vr);
496 s->cc_op = op;
497 }
498
499 static void gen_op_update3_cc_i32(DisasContext *s, enum cc_op op, TCGv_i32 src,
500 TCGv_i32 dst, TCGv_i32 vr)
501 {
502 tcg_gen_extu_i32_i64(cc_src, src);
503 tcg_gen_extu_i32_i64(cc_dst, dst);
504 tcg_gen_extu_i32_i64(cc_vr, vr);
505 s->cc_op = op;
506 }
507
508 static inline void set_cc_nz_u32(DisasContext *s, TCGv_i32 val)
509 {
510 gen_op_update1_cc_i32(s, CC_OP_NZ, val);
511 }
512
513 static inline void set_cc_nz_u64(DisasContext *s, TCGv_i64 val)
514 {
515 gen_op_update1_cc_i64(s, CC_OP_NZ, val);
516 }
517
518 static inline void cmp_32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
519 enum cc_op cond)
520 {
521 gen_op_update2_cc_i32(s, cond, v1, v2);
522 }
523
524 static inline void cmp_64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
525 enum cc_op cond)
526 {
527 gen_op_update2_cc_i64(s, cond, v1, v2);
528 }
529
530 static inline void cmp_s32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
531 {
532 cmp_32(s, v1, v2, CC_OP_LTGT_32);
533 }
534
535 static inline void cmp_u32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
536 {
537 cmp_32(s, v1, v2, CC_OP_LTUGTU_32);
538 }
539
540 static inline void cmp_s32c(DisasContext *s, TCGv_i32 v1, int32_t v2)
541 {
542 /* XXX optimize for the constant? put it in s? */
543 TCGv_i32 tmp = tcg_const_i32(v2);
544 cmp_32(s, v1, tmp, CC_OP_LTGT_32);
545 tcg_temp_free_i32(tmp);
546 }
547
548 static inline void cmp_u32c(DisasContext *s, TCGv_i32 v1, uint32_t v2)
549 {
550 TCGv_i32 tmp = tcg_const_i32(v2);
551 cmp_32(s, v1, tmp, CC_OP_LTUGTU_32);
552 tcg_temp_free_i32(tmp);
553 }
554
555 static inline void cmp_s64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
556 {
557 cmp_64(s, v1, v2, CC_OP_LTGT_64);
558 }
559
560 static inline void cmp_u64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2)
561 {
562 cmp_64(s, v1, v2, CC_OP_LTUGTU_64);
563 }
564
565 static inline void cmp_s64c(DisasContext *s, TCGv_i64 v1, int64_t v2)
566 {
567 TCGv_i64 tmp = tcg_const_i64(v2);
568 cmp_s64(s, v1, tmp);
569 tcg_temp_free_i64(tmp);
570 }
571
572 static inline void cmp_u64c(DisasContext *s, TCGv_i64 v1, uint64_t v2)
573 {
574 TCGv_i64 tmp = tcg_const_i64(v2);
575 cmp_u64(s, v1, tmp);
576 tcg_temp_free_i64(tmp);
577 }
578
579 static inline void set_cc_s32(DisasContext *s, TCGv_i32 val)
580 {
581 gen_op_update1_cc_i32(s, CC_OP_LTGT0_32, val);
582 }
583
584 static inline void set_cc_s64(DisasContext *s, TCGv_i64 val)
585 {
586 gen_op_update1_cc_i64(s, CC_OP_LTGT0_64, val);
587 }
588
589 static void set_cc_add64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2, TCGv_i64 vr)
590 {
591 gen_op_update3_cc_i64(s, CC_OP_ADD_64, v1, v2, vr);
592 }
593
594 static void set_cc_addu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
595 TCGv_i64 vr)
596 {
597 gen_op_update3_cc_i64(s, CC_OP_ADDU_64, v1, v2, vr);
598 }
599
600 static void set_cc_sub64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2, TCGv_i64 vr)
601 {
602 gen_op_update3_cc_i64(s, CC_OP_SUB_64, v1, v2, vr);
603 }
604
605 static void set_cc_subu64(DisasContext *s, TCGv_i64 v1, TCGv_i64 v2,
606 TCGv_i64 vr)
607 {
608 gen_op_update3_cc_i64(s, CC_OP_SUBU_64, v1, v2, vr);
609 }
610
611 static void set_cc_abs64(DisasContext *s, TCGv_i64 v1)
612 {
613 gen_op_update1_cc_i64(s, CC_OP_ABS_64, v1);
614 }
615
616 static void set_cc_nabs64(DisasContext *s, TCGv_i64 v1)
617 {
618 gen_op_update1_cc_i64(s, CC_OP_NABS_64, v1);
619 }
620
621 static void set_cc_add32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
622 {
623 gen_op_update3_cc_i32(s, CC_OP_ADD_32, v1, v2, vr);
624 }
625
626 static void set_cc_addu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
627 TCGv_i32 vr)
628 {
629 gen_op_update3_cc_i32(s, CC_OP_ADDU_32, v1, v2, vr);
630 }
631
632 static void set_cc_sub32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2, TCGv_i32 vr)
633 {
634 gen_op_update3_cc_i32(s, CC_OP_SUB_32, v1, v2, vr);
635 }
636
637 static void set_cc_subu32(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2,
638 TCGv_i32 vr)
639 {
640 gen_op_update3_cc_i32(s, CC_OP_SUBU_32, v1, v2, vr);
641 }
642
643 static void set_cc_abs32(DisasContext *s, TCGv_i32 v1)
644 {
645 gen_op_update1_cc_i32(s, CC_OP_ABS_32, v1);
646 }
647
648 static void set_cc_nabs32(DisasContext *s, TCGv_i32 v1)
649 {
650 gen_op_update1_cc_i32(s, CC_OP_NABS_32, v1);
651 }
652
653 static void set_cc_comp32(DisasContext *s, TCGv_i32 v1)
654 {
655 gen_op_update1_cc_i32(s, CC_OP_COMP_32, v1);
656 }
657
658 static void set_cc_comp64(DisasContext *s, TCGv_i64 v1)
659 {
660 gen_op_update1_cc_i64(s, CC_OP_COMP_64, v1);
661 }
662
663 static void set_cc_icm(DisasContext *s, TCGv_i32 v1, TCGv_i32 v2)
664 {
665 gen_op_update2_cc_i32(s, CC_OP_ICM, v1, v2);
666 }
667
668 static void set_cc_cmp_f32_i64(DisasContext *s, TCGv_i32 v1, TCGv_i64 v2)
669 {
670 tcg_gen_extu_i32_i64(cc_src, v1);
671 tcg_gen_mov_i64(cc_dst, v2);
672 tcg_gen_discard_i64(cc_vr);
673 s->cc_op = CC_OP_LTGT_F32;
674 }
675
676 static void set_cc_nz_f32(DisasContext *s, TCGv_i32 v1)
677 {
678 gen_op_update1_cc_i32(s, CC_OP_NZ_F32, v1);
679 }
680
681 static inline void set_cc_nz_f64(DisasContext *s, TCGv_i64 v1)
682 {
683 gen_op_update1_cc_i64(s, CC_OP_NZ_F64, v1);
684 }
685
686 /* CC value is in env->cc_op */
687 static inline void set_cc_static(DisasContext *s)
688 {
689 tcg_gen_discard_i64(cc_src);
690 tcg_gen_discard_i64(cc_dst);
691 tcg_gen_discard_i64(cc_vr);
692 s->cc_op = CC_OP_STATIC;
693 }
694
695 static inline void gen_op_set_cc_op(DisasContext *s)
696 {
697 if (s->cc_op != CC_OP_DYNAMIC && s->cc_op != CC_OP_STATIC) {
698 tcg_gen_movi_i32(cc_op, s->cc_op);
699 }
700 }
701
702 static inline void gen_update_cc_op(DisasContext *s)
703 {
704 gen_op_set_cc_op(s);
705 }
706
707 /* calculates cc into cc_op */
708 static void gen_op_calc_cc(DisasContext *s)
709 {
710 TCGv_i32 local_cc_op = tcg_const_i32(s->cc_op);
711 TCGv_i64 dummy = tcg_const_i64(0);
712
713 switch (s->cc_op) {
714 case CC_OP_CONST0:
715 case CC_OP_CONST1:
716 case CC_OP_CONST2:
717 case CC_OP_CONST3:
718 /* s->cc_op is the cc value */
719 tcg_gen_movi_i32(cc_op, s->cc_op - CC_OP_CONST0);
720 break;
721 case CC_OP_STATIC:
722 /* env->cc_op already is the cc value */
723 break;
724 case CC_OP_NZ:
725 case CC_OP_ABS_64:
726 case CC_OP_NABS_64:
727 case CC_OP_ABS_32:
728 case CC_OP_NABS_32:
729 case CC_OP_LTGT0_32:
730 case CC_OP_LTGT0_64:
731 case CC_OP_COMP_32:
732 case CC_OP_COMP_64:
733 case CC_OP_NZ_F32:
734 case CC_OP_NZ_F64:
735 /* 1 argument */
736 gen_helper_calc_cc(cc_op, local_cc_op, dummy, cc_dst, dummy);
737 break;
738 case CC_OP_ICM:
739 case CC_OP_LTGT_32:
740 case CC_OP_LTGT_64:
741 case CC_OP_LTUGTU_32:
742 case CC_OP_LTUGTU_64:
743 case CC_OP_TM_32:
744 case CC_OP_TM_64:
745 case CC_OP_LTGT_F32:
746 case CC_OP_LTGT_F64:
747 case CC_OP_SLAG:
748 /* 2 arguments */
749 gen_helper_calc_cc(cc_op, local_cc_op, cc_src, cc_dst, dummy);
750 break;
751 case CC_OP_ADD_64:
752 case CC_OP_ADDU_64:
753 case CC_OP_SUB_64:
754 case CC_OP_SUBU_64:
755 case CC_OP_ADD_32:
756 case CC_OP_ADDU_32:
757 case CC_OP_SUB_32:
758 case CC_OP_SUBU_32:
759 /* 3 arguments */
760 gen_helper_calc_cc(cc_op, local_cc_op, cc_src, cc_dst, cc_vr);
761 break;
762 case CC_OP_DYNAMIC:
763 /* unknown operation - assume 3 arguments and cc_op in env */
764 gen_helper_calc_cc(cc_op, cc_op, cc_src, cc_dst, cc_vr);
765 break;
766 default:
767 tcg_abort();
768 }
769
770 tcg_temp_free_i32(local_cc_op);
771
772 /* We now have cc in cc_op as constant */
773 set_cc_static(s);
774 }
775
776 static inline void decode_rr(DisasContext *s, uint64_t insn, int *r1, int *r2)
777 {
778 debug_insn(insn);
779
780 *r1 = (insn >> 4) & 0xf;
781 *r2 = insn & 0xf;
782 }
783
784 static inline TCGv_i64 decode_rx(DisasContext *s, uint64_t insn, int *r1,
785 int *x2, int *b2, int *d2)
786 {
787 debug_insn(insn);
788
789 *r1 = (insn >> 20) & 0xf;
790 *x2 = (insn >> 16) & 0xf;
791 *b2 = (insn >> 12) & 0xf;
792 *d2 = insn & 0xfff;
793
794 return get_address(s, *x2, *b2, *d2);
795 }
796
797 static inline void decode_rs(DisasContext *s, uint64_t insn, int *r1, int *r3,
798 int *b2, int *d2)
799 {
800 debug_insn(insn);
801
802 *r1 = (insn >> 20) & 0xf;
803 /* aka m3 */
804 *r3 = (insn >> 16) & 0xf;
805 *b2 = (insn >> 12) & 0xf;
806 *d2 = insn & 0xfff;
807 }
808
809 static inline TCGv_i64 decode_si(DisasContext *s, uint64_t insn, int *i2,
810 int *b1, int *d1)
811 {
812 debug_insn(insn);
813
814 *i2 = (insn >> 16) & 0xff;
815 *b1 = (insn >> 12) & 0xf;
816 *d1 = insn & 0xfff;
817
818 return get_address(s, 0, *b1, *d1);
819 }
820
821 static inline void gen_goto_tb(DisasContext *s, int tb_num, target_ulong pc)
822 {
823 TranslationBlock *tb;
824
825 gen_update_cc_op(s);
826
827 tb = s->tb;
828 /* NOTE: we handle the case where the TB spans two pages here */
829 if ((pc & TARGET_PAGE_MASK) == (tb->pc & TARGET_PAGE_MASK) ||
830 (pc & TARGET_PAGE_MASK) == ((s->pc - 1) & TARGET_PAGE_MASK)) {
831 /* jump to same page: we can use a direct jump */
832 tcg_gen_goto_tb(tb_num);
833 tcg_gen_movi_i64(psw_addr, pc);
834 tcg_gen_exit_tb((long)tb + tb_num);
835 } else {
836 /* jump to another page: currently not optimized */
837 tcg_gen_movi_i64(psw_addr, pc);
838 tcg_gen_exit_tb(0);
839 }
840 }
841
842 static inline void account_noninline_branch(DisasContext *s, int cc_op)
843 {
844 #ifdef DEBUG_INLINE_BRANCHES
845 inline_branch_miss[cc_op]++;
846 #endif
847 }
848
849 static inline void account_inline_branch(DisasContext *s)
850 {
851 #ifdef DEBUG_INLINE_BRANCHES
852 inline_branch_hit[s->cc_op]++;
853 #endif
854 }
855
856 static void gen_jcc(DisasContext *s, uint32_t mask, int skip)
857 {
858 TCGv_i32 tmp, tmp2, r;
859 TCGv_i64 tmp64;
860 int old_cc_op;
861
862 switch (s->cc_op) {
863 case CC_OP_LTGT0_32:
864 tmp = tcg_temp_new_i32();
865 tcg_gen_trunc_i64_i32(tmp, cc_dst);
866 switch (mask) {
867 case 0x8 | 0x4: /* dst <= 0 */
868 tcg_gen_brcondi_i32(TCG_COND_GT, tmp, 0, skip);
869 break;
870 case 0x8 | 0x2: /* dst >= 0 */
871 tcg_gen_brcondi_i32(TCG_COND_LT, tmp, 0, skip);
872 break;
873 case 0x8: /* dst == 0 */
874 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
875 break;
876 case 0x7: /* dst != 0 */
877 case 0x6: /* dst != 0 */
878 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
879 break;
880 case 0x4: /* dst < 0 */
881 tcg_gen_brcondi_i32(TCG_COND_GE, tmp, 0, skip);
882 break;
883 case 0x2: /* dst > 0 */
884 tcg_gen_brcondi_i32(TCG_COND_LE, tmp, 0, skip);
885 break;
886 default:
887 tcg_temp_free_i32(tmp);
888 goto do_dynamic;
889 }
890 account_inline_branch(s);
891 tcg_temp_free_i32(tmp);
892 break;
893 case CC_OP_LTGT0_64:
894 switch (mask) {
895 case 0x8 | 0x4: /* dst <= 0 */
896 tcg_gen_brcondi_i64(TCG_COND_GT, cc_dst, 0, skip);
897 break;
898 case 0x8 | 0x2: /* dst >= 0 */
899 tcg_gen_brcondi_i64(TCG_COND_LT, cc_dst, 0, skip);
900 break;
901 case 0x8: /* dst == 0 */
902 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
903 break;
904 case 0x7: /* dst != 0 */
905 case 0x6: /* dst != 0 */
906 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
907 break;
908 case 0x4: /* dst < 0 */
909 tcg_gen_brcondi_i64(TCG_COND_GE, cc_dst, 0, skip);
910 break;
911 case 0x2: /* dst > 0 */
912 tcg_gen_brcondi_i64(TCG_COND_LE, cc_dst, 0, skip);
913 break;
914 default:
915 goto do_dynamic;
916 }
917 account_inline_branch(s);
918 break;
919 case CC_OP_LTGT_32:
920 tmp = tcg_temp_new_i32();
921 tmp2 = tcg_temp_new_i32();
922 tcg_gen_trunc_i64_i32(tmp, cc_src);
923 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
924 switch (mask) {
925 case 0x8 | 0x4: /* src <= dst */
926 tcg_gen_brcond_i32(TCG_COND_GT, tmp, tmp2, skip);
927 break;
928 case 0x8 | 0x2: /* src >= dst */
929 tcg_gen_brcond_i32(TCG_COND_LT, tmp, tmp2, skip);
930 break;
931 case 0x8: /* src == dst */
932 tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, skip);
933 break;
934 case 0x7: /* src != dst */
935 case 0x6: /* src != dst */
936 tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, skip);
937 break;
938 case 0x4: /* src < dst */
939 tcg_gen_brcond_i32(TCG_COND_GE, tmp, tmp2, skip);
940 break;
941 case 0x2: /* src > dst */
942 tcg_gen_brcond_i32(TCG_COND_LE, tmp, tmp2, skip);
943 break;
944 default:
945 tcg_temp_free_i32(tmp);
946 tcg_temp_free_i32(tmp2);
947 goto do_dynamic;
948 }
949 account_inline_branch(s);
950 tcg_temp_free_i32(tmp);
951 tcg_temp_free_i32(tmp2);
952 break;
953 case CC_OP_LTGT_64:
954 switch (mask) {
955 case 0x8 | 0x4: /* src <= dst */
956 tcg_gen_brcond_i64(TCG_COND_GT, cc_src, cc_dst, skip);
957 break;
958 case 0x8 | 0x2: /* src >= dst */
959 tcg_gen_brcond_i64(TCG_COND_LT, cc_src, cc_dst, skip);
960 break;
961 case 0x8: /* src == dst */
962 tcg_gen_brcond_i64(TCG_COND_NE, cc_src, cc_dst, skip);
963 break;
964 case 0x7: /* src != dst */
965 case 0x6: /* src != dst */
966 tcg_gen_brcond_i64(TCG_COND_EQ, cc_src, cc_dst, skip);
967 break;
968 case 0x4: /* src < dst */
969 tcg_gen_brcond_i64(TCG_COND_GE, cc_src, cc_dst, skip);
970 break;
971 case 0x2: /* src > dst */
972 tcg_gen_brcond_i64(TCG_COND_LE, cc_src, cc_dst, skip);
973 break;
974 default:
975 goto do_dynamic;
976 }
977 account_inline_branch(s);
978 break;
979 case CC_OP_LTUGTU_32:
980 tmp = tcg_temp_new_i32();
981 tmp2 = tcg_temp_new_i32();
982 tcg_gen_trunc_i64_i32(tmp, cc_src);
983 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
984 switch (mask) {
985 case 0x8 | 0x4: /* src <= dst */
986 tcg_gen_brcond_i32(TCG_COND_GTU, tmp, tmp2, skip);
987 break;
988 case 0x8 | 0x2: /* src >= dst */
989 tcg_gen_brcond_i32(TCG_COND_LTU, tmp, tmp2, skip);
990 break;
991 case 0x8: /* src == dst */
992 tcg_gen_brcond_i32(TCG_COND_NE, tmp, tmp2, skip);
993 break;
994 case 0x7: /* src != dst */
995 case 0x6: /* src != dst */
996 tcg_gen_brcond_i32(TCG_COND_EQ, tmp, tmp2, skip);
997 break;
998 case 0x4: /* src < dst */
999 tcg_gen_brcond_i32(TCG_COND_GEU, tmp, tmp2, skip);
1000 break;
1001 case 0x2: /* src > dst */
1002 tcg_gen_brcond_i32(TCG_COND_LEU, tmp, tmp2, skip);
1003 break;
1004 default:
1005 tcg_temp_free_i32(tmp);
1006 tcg_temp_free_i32(tmp2);
1007 goto do_dynamic;
1008 }
1009 account_inline_branch(s);
1010 tcg_temp_free_i32(tmp);
1011 tcg_temp_free_i32(tmp2);
1012 break;
1013 case CC_OP_LTUGTU_64:
1014 switch (mask) {
1015 case 0x8 | 0x4: /* src <= dst */
1016 tcg_gen_brcond_i64(TCG_COND_GTU, cc_src, cc_dst, skip);
1017 break;
1018 case 0x8 | 0x2: /* src >= dst */
1019 tcg_gen_brcond_i64(TCG_COND_LTU, cc_src, cc_dst, skip);
1020 break;
1021 case 0x8: /* src == dst */
1022 tcg_gen_brcond_i64(TCG_COND_NE, cc_src, cc_dst, skip);
1023 break;
1024 case 0x7: /* src != dst */
1025 case 0x6: /* src != dst */
1026 tcg_gen_brcond_i64(TCG_COND_EQ, cc_src, cc_dst, skip);
1027 break;
1028 case 0x4: /* src < dst */
1029 tcg_gen_brcond_i64(TCG_COND_GEU, cc_src, cc_dst, skip);
1030 break;
1031 case 0x2: /* src > dst */
1032 tcg_gen_brcond_i64(TCG_COND_LEU, cc_src, cc_dst, skip);
1033 break;
1034 default:
1035 goto do_dynamic;
1036 }
1037 account_inline_branch(s);
1038 break;
1039 case CC_OP_NZ:
1040 switch (mask) {
1041 /* dst == 0 || dst != 0 */
1042 case 0x8 | 0x4:
1043 case 0x8 | 0x4 | 0x2:
1044 case 0x8 | 0x4 | 0x2 | 0x1:
1045 case 0x8 | 0x4 | 0x1:
1046 break;
1047 /* dst == 0 */
1048 case 0x8:
1049 case 0x8 | 0x2:
1050 case 0x8 | 0x2 | 0x1:
1051 case 0x8 | 0x1:
1052 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
1053 break;
1054 /* dst != 0 */
1055 case 0x4:
1056 case 0x4 | 0x2:
1057 case 0x4 | 0x2 | 0x1:
1058 case 0x4 | 0x1:
1059 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
1060 break;
1061 default:
1062 goto do_dynamic;
1063 }
1064 account_inline_branch(s);
1065 break;
1066 case CC_OP_TM_32:
1067 tmp = tcg_temp_new_i32();
1068 tmp2 = tcg_temp_new_i32();
1069
1070 tcg_gen_trunc_i64_i32(tmp, cc_src);
1071 tcg_gen_trunc_i64_i32(tmp2, cc_dst);
1072 tcg_gen_and_i32(tmp, tmp, tmp2);
1073 switch (mask) {
1074 case 0x8: /* val & mask == 0 */
1075 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
1076 break;
1077 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1078 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
1079 break;
1080 default:
1081 tcg_temp_free_i32(tmp);
1082 tcg_temp_free_i32(tmp2);
1083 goto do_dynamic;
1084 }
1085 tcg_temp_free_i32(tmp);
1086 tcg_temp_free_i32(tmp2);
1087 account_inline_branch(s);
1088 break;
1089 case CC_OP_TM_64:
1090 tmp64 = tcg_temp_new_i64();
1091
1092 tcg_gen_and_i64(tmp64, cc_src, cc_dst);
1093 switch (mask) {
1094 case 0x8: /* val & mask == 0 */
1095 tcg_gen_brcondi_i64(TCG_COND_NE, tmp64, 0, skip);
1096 break;
1097 case 0x4 | 0x2 | 0x1: /* val & mask != 0 */
1098 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp64, 0, skip);
1099 break;
1100 default:
1101 tcg_temp_free_i64(tmp64);
1102 goto do_dynamic;
1103 }
1104 tcg_temp_free_i64(tmp64);
1105 account_inline_branch(s);
1106 break;
1107 case CC_OP_ICM:
1108 switch (mask) {
1109 case 0x8: /* val == 0 */
1110 tcg_gen_brcondi_i64(TCG_COND_NE, cc_dst, 0, skip);
1111 break;
1112 case 0x4 | 0x2 | 0x1: /* val != 0 */
1113 case 0x4 | 0x2: /* val != 0 */
1114 tcg_gen_brcondi_i64(TCG_COND_EQ, cc_dst, 0, skip);
1115 break;
1116 default:
1117 goto do_dynamic;
1118 }
1119 account_inline_branch(s);
1120 break;
1121 case CC_OP_STATIC:
1122 old_cc_op = s->cc_op;
1123 goto do_dynamic_nocccalc;
1124 case CC_OP_DYNAMIC:
1125 default:
1126 do_dynamic:
1127 old_cc_op = s->cc_op;
1128 /* calculate cc value */
1129 gen_op_calc_cc(s);
1130
1131 do_dynamic_nocccalc:
1132 /* jump based on cc */
1133 account_noninline_branch(s, old_cc_op);
1134
1135 switch (mask) {
1136 case 0x8 | 0x4 | 0x2 | 0x1:
1137 /* always true */
1138 break;
1139 case 0x8 | 0x4 | 0x2: /* cc != 3 */
1140 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 3, skip);
1141 break;
1142 case 0x8 | 0x4 | 0x1: /* cc != 2 */
1143 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 2, skip);
1144 break;
1145 case 0x8 | 0x2 | 0x1: /* cc != 1 */
1146 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 1, skip);
1147 break;
1148 case 0x8 | 0x2: /* cc == 0 || cc == 2 */
1149 tmp = tcg_temp_new_i32();
1150 tcg_gen_andi_i32(tmp, cc_op, 1);
1151 tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, skip);
1152 tcg_temp_free_i32(tmp);
1153 break;
1154 case 0x8 | 0x4: /* cc < 2 */
1155 tcg_gen_brcondi_i32(TCG_COND_GEU, cc_op, 2, skip);
1156 break;
1157 case 0x8: /* cc == 0 */
1158 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 0, skip);
1159 break;
1160 case 0x4 | 0x2 | 0x1: /* cc != 0 */
1161 tcg_gen_brcondi_i32(TCG_COND_EQ, cc_op, 0, skip);
1162 break;
1163 case 0x4 | 0x1: /* cc == 1 || cc == 3 */
1164 tmp = tcg_temp_new_i32();
1165 tcg_gen_andi_i32(tmp, cc_op, 1);
1166 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, skip);
1167 tcg_temp_free_i32(tmp);
1168 break;
1169 case 0x4: /* cc == 1 */
1170 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 1, skip);
1171 break;
1172 case 0x2 | 0x1: /* cc > 1 */
1173 tcg_gen_brcondi_i32(TCG_COND_LEU, cc_op, 1, skip);
1174 break;
1175 case 0x2: /* cc == 2 */
1176 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 2, skip);
1177 break;
1178 case 0x1: /* cc == 3 */
1179 tcg_gen_brcondi_i32(TCG_COND_NE, cc_op, 3, skip);
1180 break;
1181 default: /* cc is masked by something else */
1182 tmp = tcg_const_i32(3);
1183 /* 3 - cc */
1184 tcg_gen_sub_i32(tmp, tmp, cc_op);
1185 tmp2 = tcg_const_i32(1);
1186 /* 1 << (3 - cc) */
1187 tcg_gen_shl_i32(tmp2, tmp2, tmp);
1188 r = tcg_const_i32(mask);
1189 /* mask & (1 << (3 - cc)) */
1190 tcg_gen_and_i32(r, r, tmp2);
1191 tcg_temp_free_i32(tmp);
1192 tcg_temp_free_i32(tmp2);
1193
1194 tcg_gen_brcondi_i32(TCG_COND_EQ, r, 0, skip);
1195 tcg_temp_free_i32(r);
1196 break;
1197 }
1198 break;
1199 }
1200 }
1201
1202 static void gen_bcr(DisasContext *s, uint32_t mask, TCGv_i64 target,
1203 uint64_t offset)
1204 {
1205 int skip;
1206
1207 if (mask == 0xf) {
1208 /* unconditional */
1209 tcg_gen_mov_i64(psw_addr, target);
1210 tcg_gen_exit_tb(0);
1211 } else if (mask == 0) {
1212 /* ignore cc and never match */
1213 gen_goto_tb(s, 0, offset + 2);
1214 } else {
1215 TCGv_i64 new_addr = tcg_temp_local_new_i64();
1216
1217 tcg_gen_mov_i64(new_addr, target);
1218 skip = gen_new_label();
1219 gen_jcc(s, mask, skip);
1220 tcg_gen_mov_i64(psw_addr, new_addr);
1221 tcg_temp_free_i64(new_addr);
1222 tcg_gen_exit_tb(0);
1223 gen_set_label(skip);
1224 tcg_temp_free_i64(new_addr);
1225 gen_goto_tb(s, 1, offset + 2);
1226 }
1227 }
1228
1229 static void gen_brc(uint32_t mask, DisasContext *s, int32_t offset)
1230 {
1231 int skip;
1232
1233 if (mask == 0xf) {
1234 /* unconditional */
1235 gen_goto_tb(s, 0, s->pc + offset);
1236 } else if (mask == 0) {
1237 /* ignore cc and never match */
1238 gen_goto_tb(s, 0, s->pc + 4);
1239 } else {
1240 skip = gen_new_label();
1241 gen_jcc(s, mask, skip);
1242 gen_goto_tb(s, 0, s->pc + offset);
1243 gen_set_label(skip);
1244 gen_goto_tb(s, 1, s->pc + 4);
1245 }
1246 s->is_jmp = DISAS_TB_JUMP;
1247 }
1248
1249 static void gen_op_mvc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1250 {
1251 TCGv_i64 tmp, tmp2;
1252 int i;
1253 int l_memset = gen_new_label();
1254 int l_out = gen_new_label();
1255 TCGv_i64 dest = tcg_temp_local_new_i64();
1256 TCGv_i64 src = tcg_temp_local_new_i64();
1257 TCGv_i32 vl;
1258
1259 /* Find out if we should use the inline version of mvc */
1260 switch (l) {
1261 case 0:
1262 case 1:
1263 case 2:
1264 case 3:
1265 case 4:
1266 case 5:
1267 case 6:
1268 case 7:
1269 case 11:
1270 case 15:
1271 /* use inline */
1272 break;
1273 default:
1274 /* Fall back to helper */
1275 vl = tcg_const_i32(l);
1276 potential_page_fault(s);
1277 gen_helper_mvc(vl, s1, s2);
1278 tcg_temp_free_i32(vl);
1279 return;
1280 }
1281
1282 tcg_gen_mov_i64(dest, s1);
1283 tcg_gen_mov_i64(src, s2);
1284
1285 if (!(s->tb->flags & FLAG_MASK_64)) {
1286 /* XXX what if we overflow while moving? */
1287 tcg_gen_andi_i64(dest, dest, 0x7fffffffUL);
1288 tcg_gen_andi_i64(src, src, 0x7fffffffUL);
1289 }
1290
1291 tmp = tcg_temp_new_i64();
1292 tcg_gen_addi_i64(tmp, src, 1);
1293 tcg_gen_brcond_i64(TCG_COND_EQ, dest, tmp, l_memset);
1294 tcg_temp_free_i64(tmp);
1295
1296 switch (l) {
1297 case 0:
1298 tmp = tcg_temp_new_i64();
1299
1300 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1301 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1302
1303 tcg_temp_free_i64(tmp);
1304 break;
1305 case 1:
1306 tmp = tcg_temp_new_i64();
1307
1308 tcg_gen_qemu_ld16u(tmp, src, get_mem_index(s));
1309 tcg_gen_qemu_st16(tmp, dest, get_mem_index(s));
1310
1311 tcg_temp_free_i64(tmp);
1312 break;
1313 case 3:
1314 tmp = tcg_temp_new_i64();
1315
1316 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1317 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1318
1319 tcg_temp_free_i64(tmp);
1320 break;
1321 case 4:
1322 tmp = tcg_temp_new_i64();
1323 tmp2 = tcg_temp_new_i64();
1324
1325 tcg_gen_qemu_ld32u(tmp, src, get_mem_index(s));
1326 tcg_gen_addi_i64(src, src, 4);
1327 tcg_gen_qemu_ld8u(tmp2, src, get_mem_index(s));
1328 tcg_gen_qemu_st32(tmp, dest, get_mem_index(s));
1329 tcg_gen_addi_i64(dest, dest, 4);
1330 tcg_gen_qemu_st8(tmp2, dest, get_mem_index(s));
1331
1332 tcg_temp_free_i64(tmp);
1333 tcg_temp_free_i64(tmp2);
1334 break;
1335 case 7:
1336 tmp = tcg_temp_new_i64();
1337
1338 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1339 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1340
1341 tcg_temp_free_i64(tmp);
1342 break;
1343 default:
1344 /* The inline version can become too big for too uneven numbers, only
1345 use it on known good lengths */
1346 tmp = tcg_temp_new_i64();
1347 tmp2 = tcg_const_i64(8);
1348 for (i = 0; (i + 7) <= l; i += 8) {
1349 tcg_gen_qemu_ld64(tmp, src, get_mem_index(s));
1350 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1351
1352 tcg_gen_add_i64(src, src, tmp2);
1353 tcg_gen_add_i64(dest, dest, tmp2);
1354 }
1355
1356 tcg_temp_free_i64(tmp2);
1357 tmp2 = tcg_const_i64(1);
1358
1359 for (; i <= l; i++) {
1360 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1361 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1362
1363 tcg_gen_add_i64(src, src, tmp2);
1364 tcg_gen_add_i64(dest, dest, tmp2);
1365 }
1366
1367 tcg_temp_free_i64(tmp2);
1368 tcg_temp_free_i64(tmp);
1369 break;
1370 }
1371
1372 tcg_gen_br(l_out);
1373
1374 gen_set_label(l_memset);
1375 /* memset case (dest == (src + 1)) */
1376
1377 tmp = tcg_temp_new_i64();
1378 tmp2 = tcg_temp_new_i64();
1379 /* fill tmp with the byte */
1380 tcg_gen_qemu_ld8u(tmp, src, get_mem_index(s));
1381 tcg_gen_shli_i64(tmp2, tmp, 8);
1382 tcg_gen_or_i64(tmp, tmp, tmp2);
1383 tcg_gen_shli_i64(tmp2, tmp, 16);
1384 tcg_gen_or_i64(tmp, tmp, tmp2);
1385 tcg_gen_shli_i64(tmp2, tmp, 32);
1386 tcg_gen_or_i64(tmp, tmp, tmp2);
1387 tcg_temp_free_i64(tmp2);
1388
1389 tmp2 = tcg_const_i64(8);
1390
1391 for (i = 0; (i + 7) <= l; i += 8) {
1392 tcg_gen_qemu_st64(tmp, dest, get_mem_index(s));
1393 tcg_gen_addi_i64(dest, dest, 8);
1394 }
1395
1396 tcg_temp_free_i64(tmp2);
1397 tmp2 = tcg_const_i64(1);
1398
1399 for (; i <= l; i++) {
1400 tcg_gen_qemu_st8(tmp, dest, get_mem_index(s));
1401 tcg_gen_addi_i64(dest, dest, 1);
1402 }
1403
1404 tcg_temp_free_i64(tmp2);
1405 tcg_temp_free_i64(tmp);
1406
1407 gen_set_label(l_out);
1408
1409 tcg_temp_free(dest);
1410 tcg_temp_free(src);
1411 }
1412
1413 static void gen_op_clc(DisasContext *s, int l, TCGv_i64 s1, TCGv_i64 s2)
1414 {
1415 TCGv_i64 tmp;
1416 TCGv_i64 tmp2;
1417 TCGv_i32 vl;
1418
1419 /* check for simple 32bit or 64bit match */
1420 switch (l) {
1421 case 0:
1422 tmp = tcg_temp_new_i64();
1423 tmp2 = tcg_temp_new_i64();
1424
1425 tcg_gen_qemu_ld8u(tmp, s1, get_mem_index(s));
1426 tcg_gen_qemu_ld8u(tmp2, s2, get_mem_index(s));
1427 cmp_u64(s, tmp, tmp2);
1428
1429 tcg_temp_free_i64(tmp);
1430 tcg_temp_free_i64(tmp2);
1431 return;
1432 case 1:
1433 tmp = tcg_temp_new_i64();
1434 tmp2 = tcg_temp_new_i64();
1435
1436 tcg_gen_qemu_ld16u(tmp, s1, get_mem_index(s));
1437 tcg_gen_qemu_ld16u(tmp2, s2, get_mem_index(s));
1438 cmp_u64(s, tmp, tmp2);
1439
1440 tcg_temp_free_i64(tmp);
1441 tcg_temp_free_i64(tmp2);
1442 return;
1443 case 3:
1444 tmp = tcg_temp_new_i64();
1445 tmp2 = tcg_temp_new_i64();
1446
1447 tcg_gen_qemu_ld32u(tmp, s1, get_mem_index(s));
1448 tcg_gen_qemu_ld32u(tmp2, s2, get_mem_index(s));
1449 cmp_u64(s, tmp, tmp2);
1450
1451 tcg_temp_free_i64(tmp);
1452 tcg_temp_free_i64(tmp2);
1453 return;
1454 case 7:
1455 tmp = tcg_temp_new_i64();
1456 tmp2 = tcg_temp_new_i64();
1457
1458 tcg_gen_qemu_ld64(tmp, s1, get_mem_index(s));
1459 tcg_gen_qemu_ld64(tmp2, s2, get_mem_index(s));
1460 cmp_u64(s, tmp, tmp2);
1461
1462 tcg_temp_free_i64(tmp);
1463 tcg_temp_free_i64(tmp2);
1464 return;
1465 }
1466
1467 potential_page_fault(s);
1468 vl = tcg_const_i32(l);
1469 gen_helper_clc(cc_op, vl, s1, s2);
1470 tcg_temp_free_i32(vl);
1471 set_cc_static(s);
1472 }
1473
1474 static void disas_e3(DisasContext* s, int op, int r1, int x2, int b2, int d2)
1475 {
1476 TCGv_i64 addr, tmp, tmp2, tmp3, tmp4;
1477 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
1478
1479 LOG_DISAS("disas_e3: op 0x%x r1 %d x2 %d b2 %d d2 %d\n",
1480 op, r1, x2, b2, d2);
1481 addr = get_address(s, x2, b2, d2);
1482 switch (op) {
1483 case 0x2: /* LTG R1,D2(X2,B2) [RXY] */
1484 case 0x4: /* lg r1,d2(x2,b2) */
1485 tcg_gen_qemu_ld64(regs[r1], addr, get_mem_index(s));
1486 if (op == 0x2) {
1487 set_cc_s64(s, regs[r1]);
1488 }
1489 break;
1490 case 0x12: /* LT R1,D2(X2,B2) [RXY] */
1491 tmp2 = tcg_temp_new_i64();
1492 tmp32_1 = tcg_temp_new_i32();
1493 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1494 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1495 store_reg32(r1, tmp32_1);
1496 set_cc_s32(s, tmp32_1);
1497 tcg_temp_free_i64(tmp2);
1498 tcg_temp_free_i32(tmp32_1);
1499 break;
1500 case 0xc: /* MSG R1,D2(X2,B2) [RXY] */
1501 case 0x1c: /* MSGF R1,D2(X2,B2) [RXY] */
1502 tmp2 = tcg_temp_new_i64();
1503 if (op == 0xc) {
1504 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1505 } else {
1506 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1507 }
1508 tcg_gen_mul_i64(regs[r1], regs[r1], tmp2);
1509 tcg_temp_free_i64(tmp2);
1510 break;
1511 case 0xd: /* DSG R1,D2(X2,B2) [RXY] */
1512 case 0x1d: /* DSGF R1,D2(X2,B2) [RXY] */
1513 tmp2 = tcg_temp_new_i64();
1514 if (op == 0x1d) {
1515 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1516 } else {
1517 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1518 }
1519 tmp4 = load_reg(r1 + 1);
1520 tmp3 = tcg_temp_new_i64();
1521 tcg_gen_div_i64(tmp3, tmp4, tmp2);
1522 store_reg(r1 + 1, tmp3);
1523 tcg_gen_rem_i64(tmp3, tmp4, tmp2);
1524 store_reg(r1, tmp3);
1525 tcg_temp_free_i64(tmp2);
1526 tcg_temp_free_i64(tmp3);
1527 tcg_temp_free_i64(tmp4);
1528 break;
1529 case 0x8: /* AG R1,D2(X2,B2) [RXY] */
1530 case 0xa: /* ALG R1,D2(X2,B2) [RXY] */
1531 case 0x18: /* AGF R1,D2(X2,B2) [RXY] */
1532 case 0x1a: /* ALGF R1,D2(X2,B2) [RXY] */
1533 if (op == 0x1a) {
1534 tmp2 = tcg_temp_new_i64();
1535 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1536 } else if (op == 0x18) {
1537 tmp2 = tcg_temp_new_i64();
1538 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1539 } else {
1540 tmp2 = tcg_temp_new_i64();
1541 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1542 }
1543 tmp4 = load_reg(r1);
1544 tmp3 = tcg_temp_new_i64();
1545 tcg_gen_add_i64(tmp3, tmp4, tmp2);
1546 store_reg(r1, tmp3);
1547 switch (op) {
1548 case 0x8:
1549 case 0x18:
1550 set_cc_add64(s, tmp4, tmp2, tmp3);
1551 break;
1552 case 0xa:
1553 case 0x1a:
1554 set_cc_addu64(s, tmp4, tmp2, tmp3);
1555 break;
1556 default:
1557 tcg_abort();
1558 }
1559 tcg_temp_free_i64(tmp2);
1560 tcg_temp_free_i64(tmp3);
1561 tcg_temp_free_i64(tmp4);
1562 break;
1563 case 0x9: /* SG R1,D2(X2,B2) [RXY] */
1564 case 0xb: /* SLG R1,D2(X2,B2) [RXY] */
1565 case 0x19: /* SGF R1,D2(X2,B2) [RXY] */
1566 case 0x1b: /* SLGF R1,D2(X2,B2) [RXY] */
1567 tmp2 = tcg_temp_new_i64();
1568 if (op == 0x19) {
1569 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1570 } else if (op == 0x1b) {
1571 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1572 } else {
1573 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1574 }
1575 tmp4 = load_reg(r1);
1576 tmp3 = tcg_temp_new_i64();
1577 tcg_gen_sub_i64(tmp3, tmp4, tmp2);
1578 store_reg(r1, tmp3);
1579 switch (op) {
1580 case 0x9:
1581 case 0x19:
1582 set_cc_sub64(s, tmp4, tmp2, tmp3);
1583 break;
1584 case 0xb:
1585 case 0x1b:
1586 set_cc_subu64(s, tmp4, tmp2, tmp3);
1587 break;
1588 default:
1589 tcg_abort();
1590 }
1591 tcg_temp_free_i64(tmp2);
1592 tcg_temp_free_i64(tmp3);
1593 tcg_temp_free_i64(tmp4);
1594 break;
1595 case 0xf: /* LRVG R1,D2(X2,B2) [RXE] */
1596 tmp2 = tcg_temp_new_i64();
1597 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1598 tcg_gen_bswap64_i64(tmp2, tmp2);
1599 store_reg(r1, tmp2);
1600 tcg_temp_free_i64(tmp2);
1601 break;
1602 case 0x14: /* LGF R1,D2(X2,B2) [RXY] */
1603 case 0x16: /* LLGF R1,D2(X2,B2) [RXY] */
1604 tmp2 = tcg_temp_new_i64();
1605 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1606 if (op == 0x14) {
1607 tcg_gen_ext32s_i64(tmp2, tmp2);
1608 }
1609 store_reg(r1, tmp2);
1610 tcg_temp_free_i64(tmp2);
1611 break;
1612 case 0x15: /* LGH R1,D2(X2,B2) [RXY] */
1613 tmp2 = tcg_temp_new_i64();
1614 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1615 store_reg(r1, tmp2);
1616 tcg_temp_free_i64(tmp2);
1617 break;
1618 case 0x17: /* LLGT R1,D2(X2,B2) [RXY] */
1619 tmp2 = tcg_temp_new_i64();
1620 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1621 tcg_gen_andi_i64(tmp2, tmp2, 0x7fffffffULL);
1622 store_reg(r1, tmp2);
1623 tcg_temp_free_i64(tmp2);
1624 break;
1625 case 0x1e: /* LRV R1,D2(X2,B2) [RXY] */
1626 tmp2 = tcg_temp_new_i64();
1627 tmp32_1 = tcg_temp_new_i32();
1628 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1629 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1630 tcg_temp_free_i64(tmp2);
1631 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1632 store_reg32(r1, tmp32_1);
1633 tcg_temp_free_i32(tmp32_1);
1634 break;
1635 case 0x1f: /* LRVH R1,D2(X2,B2) [RXY] */
1636 tmp2 = tcg_temp_new_i64();
1637 tmp32_1 = tcg_temp_new_i32();
1638 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1639 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
1640 tcg_temp_free_i64(tmp2);
1641 tcg_gen_bswap16_i32(tmp32_1, tmp32_1);
1642 store_reg16(r1, tmp32_1);
1643 tcg_temp_free_i32(tmp32_1);
1644 break;
1645 case 0x20: /* CG R1,D2(X2,B2) [RXY] */
1646 case 0x21: /* CLG R1,D2(X2,B2) */
1647 case 0x30: /* CGF R1,D2(X2,B2) [RXY] */
1648 case 0x31: /* CLGF R1,D2(X2,B2) [RXY] */
1649 tmp2 = tcg_temp_new_i64();
1650 switch (op) {
1651 case 0x20:
1652 case 0x21:
1653 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1654 break;
1655 case 0x30:
1656 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1657 break;
1658 case 0x31:
1659 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1660 break;
1661 default:
1662 tcg_abort();
1663 }
1664 switch (op) {
1665 case 0x20:
1666 case 0x30:
1667 cmp_s64(s, regs[r1], tmp2);
1668 break;
1669 case 0x21:
1670 case 0x31:
1671 cmp_u64(s, regs[r1], tmp2);
1672 break;
1673 default:
1674 tcg_abort();
1675 }
1676 tcg_temp_free_i64(tmp2);
1677 break;
1678 case 0x24: /* stg r1, d2(x2,b2) */
1679 tcg_gen_qemu_st64(regs[r1], addr, get_mem_index(s));
1680 break;
1681 case 0x3e: /* STRV R1,D2(X2,B2) [RXY] */
1682 tmp32_1 = load_reg32(r1);
1683 tmp2 = tcg_temp_new_i64();
1684 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
1685 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1686 tcg_temp_free_i32(tmp32_1);
1687 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1688 tcg_temp_free_i64(tmp2);
1689 break;
1690 case 0x50: /* STY R1,D2(X2,B2) [RXY] */
1691 tmp32_1 = load_reg32(r1);
1692 tmp2 = tcg_temp_new_i64();
1693 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
1694 tcg_temp_free_i32(tmp32_1);
1695 tcg_gen_qemu_st32(tmp2, addr, get_mem_index(s));
1696 tcg_temp_free_i64(tmp2);
1697 break;
1698 case 0x57: /* XY R1,D2(X2,B2) [RXY] */
1699 tmp32_1 = load_reg32(r1);
1700 tmp32_2 = tcg_temp_new_i32();
1701 tmp2 = tcg_temp_new_i64();
1702 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1703 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1704 tcg_temp_free_i64(tmp2);
1705 tcg_gen_xor_i32(tmp32_2, tmp32_1, tmp32_2);
1706 store_reg32(r1, tmp32_2);
1707 set_cc_nz_u32(s, tmp32_2);
1708 tcg_temp_free_i32(tmp32_1);
1709 tcg_temp_free_i32(tmp32_2);
1710 break;
1711 case 0x58: /* LY R1,D2(X2,B2) [RXY] */
1712 tmp3 = tcg_temp_new_i64();
1713 tcg_gen_qemu_ld32u(tmp3, addr, get_mem_index(s));
1714 store_reg32_i64(r1, tmp3);
1715 tcg_temp_free_i64(tmp3);
1716 break;
1717 case 0x5a: /* AY R1,D2(X2,B2) [RXY] */
1718 case 0x5b: /* SY R1,D2(X2,B2) [RXY] */
1719 tmp32_1 = load_reg32(r1);
1720 tmp32_2 = tcg_temp_new_i32();
1721 tmp32_3 = tcg_temp_new_i32();
1722 tmp2 = tcg_temp_new_i64();
1723 tcg_gen_qemu_ld32s(tmp2, addr, get_mem_index(s));
1724 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1725 tcg_temp_free_i64(tmp2);
1726 switch (op) {
1727 case 0x5a:
1728 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
1729 break;
1730 case 0x5b:
1731 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
1732 break;
1733 default:
1734 tcg_abort();
1735 }
1736 store_reg32(r1, tmp32_3);
1737 switch (op) {
1738 case 0x5a:
1739 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
1740 break;
1741 case 0x5b:
1742 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
1743 break;
1744 default:
1745 tcg_abort();
1746 }
1747 tcg_temp_free_i32(tmp32_1);
1748 tcg_temp_free_i32(tmp32_2);
1749 tcg_temp_free_i32(tmp32_3);
1750 break;
1751 case 0x71: /* LAY R1,D2(X2,B2) [RXY] */
1752 store_reg(r1, addr);
1753 break;
1754 case 0x72: /* STCY R1,D2(X2,B2) [RXY] */
1755 tmp32_1 = load_reg32(r1);
1756 tmp2 = tcg_temp_new_i64();
1757 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
1758 tcg_gen_qemu_st8(tmp2, addr, get_mem_index(s));
1759 tcg_temp_free_i32(tmp32_1);
1760 tcg_temp_free_i64(tmp2);
1761 break;
1762 case 0x73: /* ICY R1,D2(X2,B2) [RXY] */
1763 tmp3 = tcg_temp_new_i64();
1764 tcg_gen_qemu_ld8u(tmp3, addr, get_mem_index(s));
1765 store_reg8(r1, tmp3);
1766 tcg_temp_free_i64(tmp3);
1767 break;
1768 case 0x76: /* LB R1,D2(X2,B2) [RXY] */
1769 case 0x77: /* LGB R1,D2(X2,B2) [RXY] */
1770 tmp2 = tcg_temp_new_i64();
1771 tcg_gen_qemu_ld8s(tmp2, addr, get_mem_index(s));
1772 switch (op) {
1773 case 0x76:
1774 tcg_gen_ext8s_i64(tmp2, tmp2);
1775 store_reg32_i64(r1, tmp2);
1776 break;
1777 case 0x77:
1778 tcg_gen_ext8s_i64(tmp2, tmp2);
1779 store_reg(r1, tmp2);
1780 break;
1781 default:
1782 tcg_abort();
1783 }
1784 tcg_temp_free_i64(tmp2);
1785 break;
1786 case 0x78: /* LHY R1,D2(X2,B2) [RXY] */
1787 tmp2 = tcg_temp_new_i64();
1788 tcg_gen_qemu_ld16s(tmp2, addr, get_mem_index(s));
1789 store_reg32_i64(r1, tmp2);
1790 tcg_temp_free_i64(tmp2);
1791 break;
1792 case 0x80: /* NG R1,D2(X2,B2) [RXY] */
1793 case 0x81: /* OG R1,D2(X2,B2) [RXY] */
1794 case 0x82: /* XG R1,D2(X2,B2) [RXY] */
1795 tmp3 = tcg_temp_new_i64();
1796 tcg_gen_qemu_ld64(tmp3, addr, get_mem_index(s));
1797 switch (op) {
1798 case 0x80:
1799 tcg_gen_and_i64(regs[r1], regs[r1], tmp3);
1800 break;
1801 case 0x81:
1802 tcg_gen_or_i64(regs[r1], regs[r1], tmp3);
1803 break;
1804 case 0x82:
1805 tcg_gen_xor_i64(regs[r1], regs[r1], tmp3);
1806 break;
1807 default:
1808 tcg_abort();
1809 }
1810 set_cc_nz_u64(s, regs[r1]);
1811 tcg_temp_free_i64(tmp3);
1812 break;
1813 case 0x86: /* MLG R1,D2(X2,B2) [RXY] */
1814 tmp2 = tcg_temp_new_i64();
1815 tmp32_1 = tcg_const_i32(r1);
1816 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1817 gen_helper_mlg(tmp32_1, tmp2);
1818 tcg_temp_free_i64(tmp2);
1819 tcg_temp_free_i32(tmp32_1);
1820 break;
1821 case 0x87: /* DLG R1,D2(X2,B2) [RXY] */
1822 tmp2 = tcg_temp_new_i64();
1823 tmp32_1 = tcg_const_i32(r1);
1824 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1825 gen_helper_dlg(tmp32_1, tmp2);
1826 tcg_temp_free_i64(tmp2);
1827 tcg_temp_free_i32(tmp32_1);
1828 break;
1829 case 0x88: /* ALCG R1,D2(X2,B2) [RXY] */
1830 tmp2 = tcg_temp_new_i64();
1831 tmp3 = tcg_temp_new_i64();
1832 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1833 /* XXX possible optimization point */
1834 gen_op_calc_cc(s);
1835 tcg_gen_extu_i32_i64(tmp3, cc_op);
1836 tcg_gen_shri_i64(tmp3, tmp3, 1);
1837 tcg_gen_andi_i64(tmp3, tmp3, 1);
1838 tcg_gen_add_i64(tmp3, tmp2, tmp3);
1839 tcg_gen_add_i64(tmp3, regs[r1], tmp3);
1840 store_reg(r1, tmp3);
1841 set_cc_addu64(s, regs[r1], tmp2, tmp3);
1842 tcg_temp_free_i64(tmp2);
1843 tcg_temp_free_i64(tmp3);
1844 break;
1845 case 0x89: /* SLBG R1,D2(X2,B2) [RXY] */
1846 tmp2 = tcg_temp_new_i64();
1847 tmp32_1 = tcg_const_i32(r1);
1848 tcg_gen_qemu_ld64(tmp2, addr, get_mem_index(s));
1849 /* XXX possible optimization point */
1850 gen_op_calc_cc(s);
1851 gen_helper_slbg(cc_op, cc_op, tmp32_1, regs[r1], tmp2);
1852 set_cc_static(s);
1853 tcg_temp_free_i64(tmp2);
1854 tcg_temp_free_i32(tmp32_1);
1855 break;
1856 case 0x90: /* LLGC R1,D2(X2,B2) [RXY] */
1857 tcg_gen_qemu_ld8u(regs[r1], addr, get_mem_index(s));
1858 break;
1859 case 0x91: /* LLGH R1,D2(X2,B2) [RXY] */
1860 tcg_gen_qemu_ld16u(regs[r1], addr, get_mem_index(s));
1861 break;
1862 case 0x94: /* LLC R1,D2(X2,B2) [RXY] */
1863 tmp2 = tcg_temp_new_i64();
1864 tcg_gen_qemu_ld8u(tmp2, addr, get_mem_index(s));
1865 store_reg32_i64(r1, tmp2);
1866 tcg_temp_free_i64(tmp2);
1867 break;
1868 case 0x95: /* LLH R1,D2(X2,B2) [RXY] */
1869 tmp2 = tcg_temp_new_i64();
1870 tcg_gen_qemu_ld16u(tmp2, addr, get_mem_index(s));
1871 store_reg32_i64(r1, tmp2);
1872 tcg_temp_free_i64(tmp2);
1873 break;
1874 case 0x96: /* ML R1,D2(X2,B2) [RXY] */
1875 tmp2 = tcg_temp_new_i64();
1876 tmp3 = load_reg((r1 + 1) & 15);
1877 tcg_gen_ext32u_i64(tmp3, tmp3);
1878 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1879 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
1880 store_reg32_i64((r1 + 1) & 15, tmp2);
1881 tcg_gen_shri_i64(tmp2, tmp2, 32);
1882 store_reg32_i64(r1, tmp2);
1883 tcg_temp_free_i64(tmp2);
1884 tcg_temp_free_i64(tmp3);
1885 break;
1886 case 0x97: /* DL R1,D2(X2,B2) [RXY] */
1887 /* reg(r1) = reg(r1, r1+1) % ld32(addr) */
1888 /* reg(r1+1) = reg(r1, r1+1) / ld32(addr) */
1889 tmp = load_reg(r1);
1890 tmp2 = tcg_temp_new_i64();
1891 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1892 tmp3 = load_reg((r1 + 1) & 15);
1893 tcg_gen_ext32u_i64(tmp2, tmp2);
1894 tcg_gen_ext32u_i64(tmp3, tmp3);
1895 tcg_gen_shli_i64(tmp, tmp, 32);
1896 tcg_gen_or_i64(tmp, tmp, tmp3);
1897
1898 tcg_gen_rem_i64(tmp3, tmp, tmp2);
1899 tcg_gen_div_i64(tmp, tmp, tmp2);
1900 store_reg32_i64((r1 + 1) & 15, tmp);
1901 store_reg32_i64(r1, tmp3);
1902 tcg_temp_free_i64(tmp);
1903 tcg_temp_free_i64(tmp2);
1904 tcg_temp_free_i64(tmp3);
1905 break;
1906 case 0x98: /* ALC R1,D2(X2,B2) [RXY] */
1907 tmp2 = tcg_temp_new_i64();
1908 tmp32_1 = load_reg32(r1);
1909 tmp32_2 = tcg_temp_new_i32();
1910 tmp32_3 = tcg_temp_new_i32();
1911 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1912 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1913 /* XXX possible optimization point */
1914 gen_op_calc_cc(s);
1915 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
1916 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
1917 store_reg32(r1, tmp32_3);
1918 tcg_temp_free_i64(tmp2);
1919 tcg_temp_free_i32(tmp32_1);
1920 tcg_temp_free_i32(tmp32_2);
1921 tcg_temp_free_i32(tmp32_3);
1922 break;
1923 case 0x99: /* SLB R1,D2(X2,B2) [RXY] */
1924 tmp2 = tcg_temp_new_i64();
1925 tmp32_1 = tcg_const_i32(r1);
1926 tmp32_2 = tcg_temp_new_i32();
1927 tcg_gen_qemu_ld32u(tmp2, addr, get_mem_index(s));
1928 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
1929 /* XXX possible optimization point */
1930 gen_op_calc_cc(s);
1931 gen_helper_slb(cc_op, cc_op, tmp32_1, tmp32_2);
1932 set_cc_static(s);
1933 tcg_temp_free_i64(tmp2);
1934 tcg_temp_free_i32(tmp32_1);
1935 tcg_temp_free_i32(tmp32_2);
1936 break;
1937 default:
1938 LOG_DISAS("illegal e3 operation 0x%x\n", op);
1939 gen_illegal_opcode(s, 3);
1940 break;
1941 }
1942 tcg_temp_free_i64(addr);
1943 }
1944
1945 #ifndef CONFIG_USER_ONLY
1946 static void disas_e5(DisasContext* s, uint64_t insn)
1947 {
1948 TCGv_i64 tmp, tmp2;
1949 int op = (insn >> 32) & 0xff;
1950
1951 tmp = get_address(s, 0, (insn >> 28) & 0xf, (insn >> 16) & 0xfff);
1952 tmp2 = get_address(s, 0, (insn >> 12) & 0xf, insn & 0xfff);
1953
1954 LOG_DISAS("disas_e5: insn %" PRIx64 "\n", insn);
1955 switch (op) {
1956 case 0x01: /* TPROT D1(B1),D2(B2) [SSE] */
1957 /* Test Protection */
1958 potential_page_fault(s);
1959 gen_helper_tprot(cc_op, tmp, tmp2);
1960 set_cc_static(s);
1961 break;
1962 default:
1963 LOG_DISAS("illegal e5 operation 0x%x\n", op);
1964 gen_illegal_opcode(s, 3);
1965 break;
1966 }
1967
1968 tcg_temp_free_i64(tmp);
1969 tcg_temp_free_i64(tmp2);
1970 }
1971 #endif
1972
1973 static void disas_eb(DisasContext *s, int op, int r1, int r3, int b2, int d2)
1974 {
1975 TCGv_i64 tmp, tmp2, tmp3, tmp4;
1976 TCGv_i32 tmp32_1, tmp32_2;
1977 int i, stm_len;
1978 int ilc = 3;
1979
1980 LOG_DISAS("disas_eb: op 0x%x r1 %d r3 %d b2 %d d2 0x%x\n",
1981 op, r1, r3, b2, d2);
1982 switch (op) {
1983 case 0xc: /* SRLG R1,R3,D2(B2) [RSY] */
1984 case 0xd: /* SLLG R1,R3,D2(B2) [RSY] */
1985 case 0xa: /* SRAG R1,R3,D2(B2) [RSY] */
1986 case 0xb: /* SLAG R1,R3,D2(B2) [RSY] */
1987 case 0x1c: /* RLLG R1,R3,D2(B2) [RSY] */
1988 if (b2) {
1989 tmp = get_address(s, 0, b2, d2);
1990 tcg_gen_andi_i64(tmp, tmp, 0x3f);
1991 } else {
1992 tmp = tcg_const_i64(d2 & 0x3f);
1993 }
1994 switch (op) {
1995 case 0xc:
1996 tcg_gen_shr_i64(regs[r1], regs[r3], tmp);
1997 break;
1998 case 0xd:
1999 tcg_gen_shl_i64(regs[r1], regs[r3], tmp);
2000 break;
2001 case 0xa:
2002 tcg_gen_sar_i64(regs[r1], regs[r3], tmp);
2003 break;
2004 case 0xb:
2005 tmp2 = tcg_temp_new_i64();
2006 tmp3 = tcg_temp_new_i64();
2007 gen_op_update2_cc_i64(s, CC_OP_SLAG, regs[r3], tmp);
2008 tcg_gen_shl_i64(tmp2, regs[r3], tmp);
2009 /* override sign bit with source sign */
2010 tcg_gen_andi_i64(tmp2, tmp2, ~0x8000000000000000ULL);
2011 tcg_gen_andi_i64(tmp3, regs[r3], 0x8000000000000000ULL);
2012 tcg_gen_or_i64(regs[r1], tmp2, tmp3);
2013 tcg_temp_free_i64(tmp2);
2014 tcg_temp_free_i64(tmp3);
2015 break;
2016 case 0x1c:
2017 tcg_gen_rotl_i64(regs[r1], regs[r3], tmp);
2018 break;
2019 default:
2020 tcg_abort();
2021 break;
2022 }
2023 if (op == 0xa) {
2024 set_cc_s64(s, regs[r1]);
2025 }
2026 tcg_temp_free_i64(tmp);
2027 break;
2028 case 0x1d: /* RLL R1,R3,D2(B2) [RSY] */
2029 if (b2) {
2030 tmp = get_address(s, 0, b2, d2);
2031 tcg_gen_andi_i64(tmp, tmp, 0x3f);
2032 } else {
2033 tmp = tcg_const_i64(d2 & 0x3f);
2034 }
2035 tmp32_1 = tcg_temp_new_i32();
2036 tmp32_2 = load_reg32(r3);
2037 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
2038 switch (op) {
2039 case 0x1d:
2040 tcg_gen_rotl_i32(tmp32_1, tmp32_2, tmp32_1);
2041 break;
2042 default:
2043 tcg_abort();
2044 break;
2045 }
2046 store_reg32(r1, tmp32_1);
2047 tcg_temp_free_i64(tmp);
2048 tcg_temp_free_i32(tmp32_1);
2049 tcg_temp_free_i32(tmp32_2);
2050 break;
2051 case 0x4: /* LMG R1,R3,D2(B2) [RSE] */
2052 case 0x24: /* STMG R1,R3,D2(B2) [RSE] */
2053 stm_len = 8;
2054 goto do_mh;
2055 case 0x26: /* STMH R1,R3,D2(B2) [RSE] */
2056 case 0x96: /* LMH R1,R3,D2(B2) [RSE] */
2057 stm_len = 4;
2058 do_mh:
2059 /* Apparently, unrolling lmg/stmg of any size gains performance -
2060 even for very long ones... */
2061 tmp = get_address(s, 0, b2, d2);
2062 tmp3 = tcg_const_i64(stm_len);
2063 tmp4 = tcg_const_i64(op == 0x26 ? 32 : 4);
2064 for (i = r1;; i = (i + 1) % 16) {
2065 switch (op) {
2066 case 0x4:
2067 tcg_gen_qemu_ld64(regs[i], tmp, get_mem_index(s));
2068 break;
2069 case 0x96:
2070 tmp2 = tcg_temp_new_i64();
2071 #if HOST_LONG_BITS == 32
2072 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2073 tcg_gen_trunc_i64_i32(TCGV_HIGH(regs[i]), tmp2);
2074 #else
2075 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2076 tcg_gen_shl_i64(tmp2, tmp2, tmp4);
2077 tcg_gen_ext32u_i64(regs[i], regs[i]);
2078 tcg_gen_or_i64(regs[i], regs[i], tmp2);
2079 #endif
2080 tcg_temp_free_i64(tmp2);
2081 break;
2082 case 0x24:
2083 tcg_gen_qemu_st64(regs[i], tmp, get_mem_index(s));
2084 break;
2085 case 0x26:
2086 tmp2 = tcg_temp_new_i64();
2087 tcg_gen_shr_i64(tmp2, regs[i], tmp4);
2088 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2089 tcg_temp_free_i64(tmp2);
2090 break;
2091 default:
2092 tcg_abort();
2093 }
2094 if (i == r3) {
2095 break;
2096 }
2097 tcg_gen_add_i64(tmp, tmp, tmp3);
2098 }
2099 tcg_temp_free_i64(tmp);
2100 tcg_temp_free_i64(tmp3);
2101 tcg_temp_free_i64(tmp4);
2102 break;
2103 case 0x2c: /* STCMH R1,M3,D2(B2) [RSY] */
2104 tmp = get_address(s, 0, b2, d2);
2105 tmp32_1 = tcg_const_i32(r1);
2106 tmp32_2 = tcg_const_i32(r3);
2107 potential_page_fault(s);
2108 gen_helper_stcmh(tmp32_1, tmp, tmp32_2);
2109 tcg_temp_free_i64(tmp);
2110 tcg_temp_free_i32(tmp32_1);
2111 tcg_temp_free_i32(tmp32_2);
2112 break;
2113 #ifndef CONFIG_USER_ONLY
2114 case 0x2f: /* LCTLG R1,R3,D2(B2) [RSE] */
2115 /* Load Control */
2116 check_privileged(s, ilc);
2117 tmp = get_address(s, 0, b2, d2);
2118 tmp32_1 = tcg_const_i32(r1);
2119 tmp32_2 = tcg_const_i32(r3);
2120 potential_page_fault(s);
2121 gen_helper_lctlg(tmp32_1, tmp, tmp32_2);
2122 tcg_temp_free_i64(tmp);
2123 tcg_temp_free_i32(tmp32_1);
2124 tcg_temp_free_i32(tmp32_2);
2125 break;
2126 case 0x25: /* STCTG R1,R3,D2(B2) [RSE] */
2127 /* Store Control */
2128 check_privileged(s, ilc);
2129 tmp = get_address(s, 0, b2, d2);
2130 tmp32_1 = tcg_const_i32(r1);
2131 tmp32_2 = tcg_const_i32(r3);
2132 potential_page_fault(s);
2133 gen_helper_stctg(tmp32_1, tmp, tmp32_2);
2134 tcg_temp_free_i64(tmp);
2135 tcg_temp_free_i32(tmp32_1);
2136 tcg_temp_free_i32(tmp32_2);
2137 break;
2138 #endif
2139 case 0x30: /* CSG R1,R3,D2(B2) [RSY] */
2140 tmp = get_address(s, 0, b2, d2);
2141 tmp32_1 = tcg_const_i32(r1);
2142 tmp32_2 = tcg_const_i32(r3);
2143 potential_page_fault(s);
2144 /* XXX rewrite in tcg */
2145 gen_helper_csg(cc_op, tmp32_1, tmp, tmp32_2);
2146 set_cc_static(s);
2147 tcg_temp_free_i64(tmp);
2148 tcg_temp_free_i32(tmp32_1);
2149 tcg_temp_free_i32(tmp32_2);
2150 break;
2151 case 0x3e: /* CDSG R1,R3,D2(B2) [RSY] */
2152 tmp = get_address(s, 0, b2, d2);
2153 tmp32_1 = tcg_const_i32(r1);
2154 tmp32_2 = tcg_const_i32(r3);
2155 potential_page_fault(s);
2156 /* XXX rewrite in tcg */
2157 gen_helper_cdsg(cc_op, tmp32_1, tmp, tmp32_2);
2158 set_cc_static(s);
2159 tcg_temp_free_i64(tmp);
2160 tcg_temp_free_i32(tmp32_1);
2161 tcg_temp_free_i32(tmp32_2);
2162 break;
2163 case 0x51: /* TMY D1(B1),I2 [SIY] */
2164 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
2165 tmp2 = tcg_const_i64((r1 << 4) | r3);
2166 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
2167 /* yes, this is a 32 bit operation with 64 bit tcg registers, because
2168 that incurs less conversions */
2169 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
2170 tcg_temp_free_i64(tmp);
2171 tcg_temp_free_i64(tmp2);
2172 break;
2173 case 0x52: /* MVIY D1(B1),I2 [SIY] */
2174 tmp = get_address(s, 0, b2, d2); /* SIY -> this is the destination */
2175 tmp2 = tcg_const_i64((r1 << 4) | r3);
2176 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
2177 tcg_temp_free_i64(tmp);
2178 tcg_temp_free_i64(tmp2);
2179 break;
2180 case 0x55: /* CLIY D1(B1),I2 [SIY] */
2181 tmp3 = get_address(s, 0, b2, d2); /* SIY -> this is the 1st operand */
2182 tmp = tcg_temp_new_i64();
2183 tmp32_1 = tcg_temp_new_i32();
2184 tcg_gen_qemu_ld8u(tmp, tmp3, get_mem_index(s));
2185 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
2186 cmp_u32c(s, tmp32_1, (r1 << 4) | r3);
2187 tcg_temp_free_i64(tmp);
2188 tcg_temp_free_i64(tmp3);
2189 tcg_temp_free_i32(tmp32_1);
2190 break;
2191 case 0x80: /* ICMH R1,M3,D2(B2) [RSY] */
2192 tmp = get_address(s, 0, b2, d2);
2193 tmp32_1 = tcg_const_i32(r1);
2194 tmp32_2 = tcg_const_i32(r3);
2195 potential_page_fault(s);
2196 /* XXX split CC calculation out */
2197 gen_helper_icmh(cc_op, tmp32_1, tmp, tmp32_2);
2198 set_cc_static(s);
2199 tcg_temp_free_i64(tmp);
2200 tcg_temp_free_i32(tmp32_1);
2201 tcg_temp_free_i32(tmp32_2);
2202 break;
2203 default:
2204 LOG_DISAS("illegal eb operation 0x%x\n", op);
2205 gen_illegal_opcode(s, ilc);
2206 break;
2207 }
2208 }
2209
2210 static void disas_ed(DisasContext *s, int op, int r1, int x2, int b2, int d2,
2211 int r1b)
2212 {
2213 TCGv_i32 tmp_r1, tmp32;
2214 TCGv_i64 addr, tmp;
2215 addr = get_address(s, x2, b2, d2);
2216 tmp_r1 = tcg_const_i32(r1);
2217 switch (op) {
2218 case 0x5: /* LXDB R1,D2(X2,B2) [RXE] */
2219 potential_page_fault(s);
2220 gen_helper_lxdb(tmp_r1, addr);
2221 break;
2222 case 0x9: /* CEB R1,D2(X2,B2) [RXE] */
2223 tmp = tcg_temp_new_i64();
2224 tmp32 = load_freg32(r1);
2225 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2226 set_cc_cmp_f32_i64(s, tmp32, tmp);
2227 tcg_temp_free_i64(tmp);
2228 tcg_temp_free_i32(tmp32);
2229 break;
2230 case 0xa: /* AEB R1,D2(X2,B2) [RXE] */
2231 tmp = tcg_temp_new_i64();
2232 tmp32 = tcg_temp_new_i32();
2233 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2234 tcg_gen_trunc_i64_i32(tmp32, tmp);
2235 gen_helper_aeb(tmp_r1, tmp32);
2236 tcg_temp_free_i64(tmp);
2237 tcg_temp_free_i32(tmp32);
2238
2239 tmp32 = load_freg32(r1);
2240 set_cc_nz_f32(s, tmp32);
2241 tcg_temp_free_i32(tmp32);
2242 break;
2243 case 0xb: /* SEB R1,D2(X2,B2) [RXE] */
2244 tmp = tcg_temp_new_i64();
2245 tmp32 = tcg_temp_new_i32();
2246 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2247 tcg_gen_trunc_i64_i32(tmp32, tmp);
2248 gen_helper_seb(tmp_r1, tmp32);
2249 tcg_temp_free_i64(tmp);
2250 tcg_temp_free_i32(tmp32);
2251
2252 tmp32 = load_freg32(r1);
2253 set_cc_nz_f32(s, tmp32);
2254 tcg_temp_free_i32(tmp32);
2255 break;
2256 case 0xd: /* DEB R1,D2(X2,B2) [RXE] */
2257 tmp = tcg_temp_new_i64();
2258 tmp32 = tcg_temp_new_i32();
2259 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2260 tcg_gen_trunc_i64_i32(tmp32, tmp);
2261 gen_helper_deb(tmp_r1, tmp32);
2262 tcg_temp_free_i64(tmp);
2263 tcg_temp_free_i32(tmp32);
2264 break;
2265 case 0x10: /* TCEB R1,D2(X2,B2) [RXE] */
2266 potential_page_fault(s);
2267 gen_helper_tceb(cc_op, tmp_r1, addr);
2268 set_cc_static(s);
2269 break;
2270 case 0x11: /* TCDB R1,D2(X2,B2) [RXE] */
2271 potential_page_fault(s);
2272 gen_helper_tcdb(cc_op, tmp_r1, addr);
2273 set_cc_static(s);
2274 break;
2275 case 0x12: /* TCXB R1,D2(X2,B2) [RXE] */
2276 potential_page_fault(s);
2277 gen_helper_tcxb(cc_op, tmp_r1, addr);
2278 set_cc_static(s);
2279 break;
2280 case 0x17: /* MEEB R1,D2(X2,B2) [RXE] */
2281 tmp = tcg_temp_new_i64();
2282 tmp32 = tcg_temp_new_i32();
2283 tcg_gen_qemu_ld32u(tmp, addr, get_mem_index(s));
2284 tcg_gen_trunc_i64_i32(tmp32, tmp);
2285 gen_helper_meeb(tmp_r1, tmp32);
2286 tcg_temp_free_i64(tmp);
2287 tcg_temp_free_i32(tmp32);
2288 break;
2289 case 0x19: /* CDB R1,D2(X2,B2) [RXE] */
2290 potential_page_fault(s);
2291 gen_helper_cdb(cc_op, tmp_r1, addr);
2292 set_cc_static(s);
2293 break;
2294 case 0x1a: /* ADB R1,D2(X2,B2) [RXE] */
2295 potential_page_fault(s);
2296 gen_helper_adb(cc_op, tmp_r1, addr);
2297 set_cc_static(s);
2298 break;
2299 case 0x1b: /* SDB R1,D2(X2,B2) [RXE] */
2300 potential_page_fault(s);
2301 gen_helper_sdb(cc_op, tmp_r1, addr);
2302 set_cc_static(s);
2303 break;
2304 case 0x1c: /* MDB R1,D2(X2,B2) [RXE] */
2305 potential_page_fault(s);
2306 gen_helper_mdb(tmp_r1, addr);
2307 break;
2308 case 0x1d: /* DDB R1,D2(X2,B2) [RXE] */
2309 potential_page_fault(s);
2310 gen_helper_ddb(tmp_r1, addr);
2311 break;
2312 case 0x1e: /* MADB R1,R3,D2(X2,B2) [RXF] */
2313 /* for RXF insns, r1 is R3 and r1b is R1 */
2314 tmp32 = tcg_const_i32(r1b);
2315 potential_page_fault(s);
2316 gen_helper_madb(tmp32, addr, tmp_r1);
2317 tcg_temp_free_i32(tmp32);
2318 break;
2319 default:
2320 LOG_DISAS("illegal ed operation 0x%x\n", op);
2321 gen_illegal_opcode(s, 3);
2322 return;
2323 }
2324 tcg_temp_free_i32(tmp_r1);
2325 tcg_temp_free_i64(addr);
2326 }
2327
2328 static void disas_a5(DisasContext *s, int op, int r1, int i2)
2329 {
2330 TCGv_i64 tmp, tmp2;
2331 TCGv_i32 tmp32;
2332 LOG_DISAS("disas_a5: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2333 switch (op) {
2334 case 0x0: /* IIHH R1,I2 [RI] */
2335 tmp = tcg_const_i64(i2);
2336 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 48, 16);
2337 break;
2338 case 0x1: /* IIHL R1,I2 [RI] */
2339 tmp = tcg_const_i64(i2);
2340 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 32, 16);
2341 break;
2342 case 0x2: /* IILH R1,I2 [RI] */
2343 tmp = tcg_const_i64(i2);
2344 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 16, 16);
2345 break;
2346 case 0x3: /* IILL R1,I2 [RI] */
2347 tmp = tcg_const_i64(i2);
2348 tcg_gen_deposit_i64(regs[r1], regs[r1], tmp, 0, 16);
2349 break;
2350 case 0x4: /* NIHH R1,I2 [RI] */
2351 case 0x8: /* OIHH R1,I2 [RI] */
2352 tmp = load_reg(r1);
2353 tmp32 = tcg_temp_new_i32();
2354 switch (op) {
2355 case 0x4:
2356 tmp2 = tcg_const_i64((((uint64_t)i2) << 48)
2357 | 0x0000ffffffffffffULL);
2358 tcg_gen_and_i64(tmp, tmp, tmp2);
2359 break;
2360 case 0x8:
2361 tmp2 = tcg_const_i64(((uint64_t)i2) << 48);
2362 tcg_gen_or_i64(tmp, tmp, tmp2);
2363 break;
2364 default:
2365 tcg_abort();
2366 }
2367 store_reg(r1, tmp);
2368 tcg_gen_shri_i64(tmp2, tmp, 48);
2369 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2370 set_cc_nz_u32(s, tmp32);
2371 tcg_temp_free_i64(tmp2);
2372 tcg_temp_free_i32(tmp32);
2373 break;
2374 case 0x5: /* NIHL R1,I2 [RI] */
2375 case 0x9: /* OIHL R1,I2 [RI] */
2376 tmp = load_reg(r1);
2377 tmp32 = tcg_temp_new_i32();
2378 switch (op) {
2379 case 0x5:
2380 tmp2 = tcg_const_i64((((uint64_t)i2) << 32)
2381 | 0xffff0000ffffffffULL);
2382 tcg_gen_and_i64(tmp, tmp, tmp2);
2383 break;
2384 case 0x9:
2385 tmp2 = tcg_const_i64(((uint64_t)i2) << 32);
2386 tcg_gen_or_i64(tmp, tmp, tmp2);
2387 break;
2388 default:
2389 tcg_abort();
2390 }
2391 store_reg(r1, tmp);
2392 tcg_gen_shri_i64(tmp2, tmp, 32);
2393 tcg_gen_trunc_i64_i32(tmp32, tmp2);
2394 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2395 set_cc_nz_u32(s, tmp32);
2396 tcg_temp_free_i64(tmp2);
2397 tcg_temp_free_i32(tmp32);
2398 break;
2399 case 0x6: /* NILH R1,I2 [RI] */
2400 case 0xa: /* OILH R1,I2 [RI] */
2401 tmp = load_reg(r1);
2402 tmp32 = tcg_temp_new_i32();
2403 switch (op) {
2404 case 0x6:
2405 tmp2 = tcg_const_i64((((uint64_t)i2) << 16)
2406 | 0xffffffff0000ffffULL);
2407 tcg_gen_and_i64(tmp, tmp, tmp2);
2408 break;
2409 case 0xa:
2410 tmp2 = tcg_const_i64(((uint64_t)i2) << 16);
2411 tcg_gen_or_i64(tmp, tmp, tmp2);
2412 break;
2413 default:
2414 tcg_abort();
2415 }
2416 store_reg(r1, tmp);
2417 tcg_gen_shri_i64(tmp, tmp, 16);
2418 tcg_gen_trunc_i64_i32(tmp32, tmp);
2419 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2420 set_cc_nz_u32(s, tmp32);
2421 tcg_temp_free_i64(tmp2);
2422 tcg_temp_free_i32(tmp32);
2423 break;
2424 case 0x7: /* NILL R1,I2 [RI] */
2425 case 0xb: /* OILL R1,I2 [RI] */
2426 tmp = load_reg(r1);
2427 tmp32 = tcg_temp_new_i32();
2428 switch (op) {
2429 case 0x7:
2430 tmp2 = tcg_const_i64(i2 | 0xffffffffffff0000ULL);
2431 tcg_gen_and_i64(tmp, tmp, tmp2);
2432 break;
2433 case 0xb:
2434 tmp2 = tcg_const_i64(i2);
2435 tcg_gen_or_i64(tmp, tmp, tmp2);
2436 break;
2437 default:
2438 tcg_abort();
2439 }
2440 store_reg(r1, tmp);
2441 tcg_gen_trunc_i64_i32(tmp32, tmp);
2442 tcg_gen_andi_i32(tmp32, tmp32, 0xffff);
2443 set_cc_nz_u32(s, tmp32); /* signedness should not matter here */
2444 tcg_temp_free_i64(tmp2);
2445 tcg_temp_free_i32(tmp32);
2446 break;
2447 case 0xc: /* LLIHH R1,I2 [RI] */
2448 tmp = tcg_const_i64( ((uint64_t)i2) << 48 );
2449 store_reg(r1, tmp);
2450 break;
2451 case 0xd: /* LLIHL R1,I2 [RI] */
2452 tmp = tcg_const_i64( ((uint64_t)i2) << 32 );
2453 store_reg(r1, tmp);
2454 break;
2455 case 0xe: /* LLILH R1,I2 [RI] */
2456 tmp = tcg_const_i64( ((uint64_t)i2) << 16 );
2457 store_reg(r1, tmp);
2458 break;
2459 case 0xf: /* LLILL R1,I2 [RI] */
2460 tmp = tcg_const_i64(i2);
2461 store_reg(r1, tmp);
2462 break;
2463 default:
2464 LOG_DISAS("illegal a5 operation 0x%x\n", op);
2465 gen_illegal_opcode(s, 2);
2466 return;
2467 }
2468 tcg_temp_free_i64(tmp);
2469 }
2470
2471 static void disas_a7(DisasContext *s, int op, int r1, int i2)
2472 {
2473 TCGv_i64 tmp, tmp2;
2474 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2475 int l1;
2476
2477 LOG_DISAS("disas_a7: op 0x%x r1 %d i2 0x%x\n", op, r1, i2);
2478 switch (op) {
2479 case 0x0: /* TMLH or TMH R1,I2 [RI] */
2480 case 0x1: /* TMLL or TML R1,I2 [RI] */
2481 case 0x2: /* TMHH R1,I2 [RI] */
2482 case 0x3: /* TMHL R1,I2 [RI] */
2483 tmp = load_reg(r1);
2484 tmp2 = tcg_const_i64((uint16_t)i2);
2485 switch (op) {
2486 case 0x0:
2487 tcg_gen_shri_i64(tmp, tmp, 16);
2488 break;
2489 case 0x1:
2490 break;
2491 case 0x2:
2492 tcg_gen_shri_i64(tmp, tmp, 48);
2493 break;
2494 case 0x3:
2495 tcg_gen_shri_i64(tmp, tmp, 32);
2496 break;
2497 }
2498 tcg_gen_andi_i64(tmp, tmp, 0xffff);
2499 cmp_64(s, tmp, tmp2, CC_OP_TM_64);
2500 tcg_temp_free_i64(tmp);
2501 tcg_temp_free_i64(tmp2);
2502 break;
2503 case 0x4: /* brc m1, i2 */
2504 gen_brc(r1, s, i2 * 2LL);
2505 return;
2506 case 0x5: /* BRAS R1,I2 [RI] */
2507 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
2508 store_reg(r1, tmp);
2509 tcg_temp_free_i64(tmp);
2510 gen_goto_tb(s, 0, s->pc + i2 * 2LL);
2511 s->is_jmp = DISAS_TB_JUMP;
2512 break;
2513 case 0x6: /* BRCT R1,I2 [RI] */
2514 tmp32_1 = load_reg32(r1);
2515 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
2516 store_reg32(r1, tmp32_1);
2517 gen_update_cc_op(s);
2518 l1 = gen_new_label();
2519 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
2520 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2521 gen_set_label(l1);
2522 gen_goto_tb(s, 1, s->pc + 4);
2523 s->is_jmp = DISAS_TB_JUMP;
2524 tcg_temp_free_i32(tmp32_1);
2525 break;
2526 case 0x7: /* BRCTG R1,I2 [RI] */
2527 tmp = load_reg(r1);
2528 tcg_gen_subi_i64(tmp, tmp, 1);
2529 store_reg(r1, tmp);
2530 gen_update_cc_op(s);
2531 l1 = gen_new_label();
2532 tcg_gen_brcondi_i64(TCG_COND_EQ, tmp, 0, l1);
2533 gen_goto_tb(s, 0, s->pc + (i2 * 2LL));
2534 gen_set_label(l1);
2535 gen_goto_tb(s, 1, s->pc + 4);
2536 s->is_jmp = DISAS_TB_JUMP;
2537 tcg_temp_free_i64(tmp);
2538 break;
2539 case 0x8: /* lhi r1, i2 */
2540 tmp32_1 = tcg_const_i32(i2);
2541 store_reg32(r1, tmp32_1);
2542 tcg_temp_free_i32(tmp32_1);
2543 break;
2544 case 0x9: /* lghi r1, i2 */
2545 tmp = tcg_const_i64(i2);
2546 store_reg(r1, tmp);
2547 tcg_temp_free_i64(tmp);
2548 break;
2549 case 0xa: /* AHI R1,I2 [RI] */
2550 tmp32_1 = load_reg32(r1);
2551 tmp32_2 = tcg_temp_new_i32();
2552 tmp32_3 = tcg_const_i32(i2);
2553
2554 if (i2 < 0) {
2555 tcg_gen_subi_i32(tmp32_2, tmp32_1, -i2);
2556 } else {
2557 tcg_gen_add_i32(tmp32_2, tmp32_1, tmp32_3);
2558 }
2559
2560 store_reg32(r1, tmp32_2);
2561 set_cc_add32(s, tmp32_1, tmp32_3, tmp32_2);
2562 tcg_temp_free_i32(tmp32_1);
2563 tcg_temp_free_i32(tmp32_2);
2564 tcg_temp_free_i32(tmp32_3);
2565 break;
2566 case 0xb: /* aghi r1, i2 */
2567 tmp = load_reg(r1);
2568 tmp2 = tcg_const_i64(i2);
2569
2570 if (i2 < 0) {
2571 tcg_gen_subi_i64(regs[r1], tmp, -i2);
2572 } else {
2573 tcg_gen_add_i64(regs[r1], tmp, tmp2);
2574 }
2575 set_cc_add64(s, tmp, tmp2, regs[r1]);
2576 tcg_temp_free_i64(tmp);
2577 tcg_temp_free_i64(tmp2);
2578 break;
2579 case 0xc: /* MHI R1,I2 [RI] */
2580 tmp32_1 = load_reg32(r1);
2581 tcg_gen_muli_i32(tmp32_1, tmp32_1, i2);
2582 store_reg32(r1, tmp32_1);
2583 tcg_temp_free_i32(tmp32_1);
2584 break;
2585 case 0xd: /* MGHI R1,I2 [RI] */
2586 tmp = load_reg(r1);
2587 tcg_gen_muli_i64(tmp, tmp, i2);
2588 store_reg(r1, tmp);
2589 tcg_temp_free_i64(tmp);
2590 break;
2591 case 0xe: /* CHI R1,I2 [RI] */
2592 tmp32_1 = load_reg32(r1);
2593 cmp_s32c(s, tmp32_1, i2);
2594 tcg_temp_free_i32(tmp32_1);
2595 break;
2596 case 0xf: /* CGHI R1,I2 [RI] */
2597 tmp = load_reg(r1);
2598 cmp_s64c(s, tmp, i2);
2599 tcg_temp_free_i64(tmp);
2600 break;
2601 default:
2602 LOG_DISAS("illegal a7 operation 0x%x\n", op);
2603 gen_illegal_opcode(s, 2);
2604 return;
2605 }
2606 }
2607
2608 static void disas_b2(DisasContext *s, int op, uint32_t insn)
2609 {
2610 TCGv_i64 tmp, tmp2, tmp3;
2611 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2612 int r1, r2;
2613 int ilc = 2;
2614 #ifndef CONFIG_USER_ONLY
2615 int r3, d2, b2;
2616 #endif
2617
2618 r1 = (insn >> 4) & 0xf;
2619 r2 = insn & 0xf;
2620
2621 LOG_DISAS("disas_b2: op 0x%x r1 %d r2 %d\n", op, r1, r2);
2622
2623 switch (op) {
2624 case 0x22: /* IPM R1 [RRE] */
2625 tmp32_1 = tcg_const_i32(r1);
2626 gen_op_calc_cc(s);
2627 gen_helper_ipm(cc_op, tmp32_1);
2628 tcg_temp_free_i32(tmp32_1);
2629 break;
2630 case 0x41: /* CKSM R1,R2 [RRE] */
2631 tmp32_1 = tcg_const_i32(r1);
2632 tmp32_2 = tcg_const_i32(r2);
2633 potential_page_fault(s);
2634 gen_helper_cksm(tmp32_1, tmp32_2);
2635 tcg_temp_free_i32(tmp32_1);
2636 tcg_temp_free_i32(tmp32_2);
2637 gen_op_movi_cc(s, 0);
2638 break;
2639 case 0x4e: /* SAR R1,R2 [RRE] */
2640 tmp32_1 = load_reg32(r2);
2641 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, aregs[r1]));
2642 tcg_temp_free_i32(tmp32_1);
2643 break;
2644 case 0x4f: /* EAR R1,R2 [RRE] */
2645 tmp32_1 = tcg_temp_new_i32();
2646 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, aregs[r2]));
2647 store_reg32(r1, tmp32_1);
2648 tcg_temp_free_i32(tmp32_1);
2649 break;
2650 case 0x52: /* MSR R1,R2 [RRE] */
2651 tmp32_1 = load_reg32(r1);
2652 tmp32_2 = load_reg32(r2);
2653 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
2654 store_reg32(r1, tmp32_1);
2655 tcg_temp_free_i32(tmp32_1);
2656 tcg_temp_free_i32(tmp32_2);
2657 break;
2658 case 0x54: /* MVPG R1,R2 [RRE] */
2659 tmp = load_reg(0);
2660 tmp2 = load_reg(r1);
2661 tmp3 = load_reg(r2);
2662 potential_page_fault(s);
2663 gen_helper_mvpg(tmp, tmp2, tmp3);
2664 tcg_temp_free_i64(tmp);
2665 tcg_temp_free_i64(tmp2);
2666 tcg_temp_free_i64(tmp3);
2667 /* XXX check CCO bit and set CC accordingly */
2668 gen_op_movi_cc(s, 0);
2669 break;
2670 case 0x55: /* MVST R1,R2 [RRE] */
2671 tmp32_1 = load_reg32(0);
2672 tmp32_2 = tcg_const_i32(r1);
2673 tmp32_3 = tcg_const_i32(r2);
2674 potential_page_fault(s);
2675 gen_helper_mvst(tmp32_1, tmp32_2, tmp32_3);
2676 tcg_temp_free_i32(tmp32_1);
2677 tcg_temp_free_i32(tmp32_2);
2678 tcg_temp_free_i32(tmp32_3);
2679 gen_op_movi_cc(s, 1);
2680 break;
2681 case 0x5d: /* CLST R1,R2 [RRE] */
2682 tmp32_1 = load_reg32(0);
2683 tmp32_2 = tcg_const_i32(r1);
2684 tmp32_3 = tcg_const_i32(r2);
2685 potential_page_fault(s);
2686 gen_helper_clst(cc_op, tmp32_1, tmp32_2, tmp32_3);
2687 set_cc_static(s);
2688 tcg_temp_free_i32(tmp32_1);
2689 tcg_temp_free_i32(tmp32_2);
2690 tcg_temp_free_i32(tmp32_3);
2691 break;
2692 case 0x5e: /* SRST R1,R2 [RRE] */
2693 tmp32_1 = load_reg32(0);
2694 tmp32_2 = tcg_const_i32(r1);
2695 tmp32_3 = tcg_const_i32(r2);
2696 potential_page_fault(s);
2697 gen_helper_srst(cc_op, tmp32_1, tmp32_2, tmp32_3);
2698 set_cc_static(s);
2699 tcg_temp_free_i32(tmp32_1);
2700 tcg_temp_free_i32(tmp32_2);
2701 tcg_temp_free_i32(tmp32_3);
2702 break;
2703
2704 #ifndef CONFIG_USER_ONLY
2705 case 0x02: /* STIDP D2(B2) [S] */
2706 /* Store CPU ID */
2707 check_privileged(s, ilc);
2708 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2709 tmp = get_address(s, 0, b2, d2);
2710 potential_page_fault(s);
2711 gen_helper_stidp(tmp);
2712 tcg_temp_free_i64(tmp);
2713 break;
2714 case 0x04: /* SCK D2(B2) [S] */
2715 /* Set Clock */
2716 check_privileged(s, ilc);
2717 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2718 tmp = get_address(s, 0, b2, d2);
2719 potential_page_fault(s);
2720 gen_helper_sck(cc_op, tmp);
2721 set_cc_static(s);
2722 tcg_temp_free_i64(tmp);
2723 break;
2724 case 0x05: /* STCK D2(B2) [S] */
2725 /* Store Clock */
2726 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2727 tmp = get_address(s, 0, b2, d2);
2728 potential_page_fault(s);
2729 gen_helper_stck(cc_op, tmp);
2730 set_cc_static(s);
2731 tcg_temp_free_i64(tmp);
2732 break;
2733 case 0x06: /* SCKC D2(B2) [S] */
2734 /* Set Clock Comparator */
2735 check_privileged(s, ilc);
2736 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2737 tmp = get_address(s, 0, b2, d2);
2738 potential_page_fault(s);
2739 gen_helper_sckc(tmp);
2740 tcg_temp_free_i64(tmp);
2741 break;
2742 case 0x07: /* STCKC D2(B2) [S] */
2743 /* Store Clock Comparator */
2744 check_privileged(s, ilc);
2745 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2746 tmp = get_address(s, 0, b2, d2);
2747 potential_page_fault(s);
2748 gen_helper_stckc(tmp);
2749 tcg_temp_free_i64(tmp);
2750 break;
2751 case 0x08: /* SPT D2(B2) [S] */
2752 /* Set CPU Timer */
2753 check_privileged(s, ilc);
2754 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2755 tmp = get_address(s, 0, b2, d2);
2756 potential_page_fault(s);
2757 gen_helper_spt(tmp);
2758 tcg_temp_free_i64(tmp);
2759 break;
2760 case 0x09: /* STPT D2(B2) [S] */
2761 /* Store CPU Timer */
2762 check_privileged(s, ilc);
2763 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2764 tmp = get_address(s, 0, b2, d2);
2765 potential_page_fault(s);
2766 gen_helper_stpt(tmp);
2767 tcg_temp_free_i64(tmp);
2768 break;
2769 case 0x0a: /* SPKA D2(B2) [S] */
2770 /* Set PSW Key from Address */
2771 check_privileged(s, ilc);
2772 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2773 tmp = get_address(s, 0, b2, d2);
2774 tmp2 = tcg_temp_new_i64();
2775 tcg_gen_andi_i64(tmp2, psw_mask, ~PSW_MASK_KEY);
2776 tcg_gen_shli_i64(tmp, tmp, PSW_SHIFT_KEY - 4);
2777 tcg_gen_or_i64(psw_mask, tmp2, tmp);
2778 tcg_temp_free_i64(tmp2);
2779 tcg_temp_free_i64(tmp);
2780 break;
2781 case 0x0d: /* PTLB [S] */
2782 /* Purge TLB */
2783 check_privileged(s, ilc);
2784 gen_helper_ptlb();
2785 break;
2786 case 0x10: /* SPX D2(B2) [S] */
2787 /* Set Prefix Register */
2788 check_privileged(s, ilc);
2789 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2790 tmp = get_address(s, 0, b2, d2);
2791 potential_page_fault(s);
2792 gen_helper_spx(tmp);
2793 tcg_temp_free_i64(tmp);
2794 break;
2795 case 0x11: /* STPX D2(B2) [S] */
2796 /* Store Prefix */
2797 check_privileged(s, ilc);
2798 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2799 tmp = get_address(s, 0, b2, d2);
2800 tmp2 = tcg_temp_new_i64();
2801 tcg_gen_ld_i64(tmp2, cpu_env, offsetof(CPUState, psa));
2802 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2803 tcg_temp_free_i64(tmp);
2804 tcg_temp_free_i64(tmp2);
2805 break;
2806 case 0x12: /* STAP D2(B2) [S] */
2807 /* Store CPU Address */
2808 check_privileged(s, ilc);
2809 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2810 tmp = get_address(s, 0, b2, d2);
2811 tmp2 = tcg_temp_new_i64();
2812 tmp32_1 = tcg_temp_new_i32();
2813 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, cpu_num));
2814 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
2815 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2816 tcg_temp_free_i64(tmp);
2817 tcg_temp_free_i64(tmp2);
2818 tcg_temp_free_i32(tmp32_1);
2819 break;
2820 case 0x21: /* IPTE R1,R2 [RRE] */
2821 /* Invalidate PTE */
2822 check_privileged(s, ilc);
2823 r1 = (insn >> 4) & 0xf;
2824 r2 = insn & 0xf;
2825 tmp = load_reg(r1);
2826 tmp2 = load_reg(r2);
2827 gen_helper_ipte(tmp, tmp2);
2828 tcg_temp_free_i64(tmp);
2829 tcg_temp_free_i64(tmp2);
2830 break;
2831 case 0x29: /* ISKE R1,R2 [RRE] */
2832 /* Insert Storage Key Extended */
2833 check_privileged(s, ilc);
2834 r1 = (insn >> 4) & 0xf;
2835 r2 = insn & 0xf;
2836 tmp = load_reg(r2);
2837 tmp2 = tcg_temp_new_i64();
2838 gen_helper_iske(tmp2, tmp);
2839 store_reg(r1, tmp2);
2840 tcg_temp_free_i64(tmp);
2841 tcg_temp_free_i64(tmp2);
2842 break;
2843 case 0x2a: /* RRBE R1,R2 [RRE] */
2844 /* Set Storage Key Extended */
2845 check_privileged(s, ilc);
2846 r1 = (insn >> 4) & 0xf;
2847 r2 = insn & 0xf;
2848 tmp32_1 = load_reg32(r1);
2849 tmp = load_reg(r2);
2850 gen_helper_rrbe(cc_op, tmp32_1, tmp);
2851 set_cc_static(s);
2852 tcg_temp_free_i32(tmp32_1);
2853 tcg_temp_free_i64(tmp);
2854 break;
2855 case 0x2b: /* SSKE R1,R2 [RRE] */
2856 /* Set Storage Key Extended */
2857 check_privileged(s, ilc);
2858 r1 = (insn >> 4) & 0xf;
2859 r2 = insn & 0xf;
2860 tmp32_1 = load_reg32(r1);
2861 tmp = load_reg(r2);
2862 gen_helper_sske(tmp32_1, tmp);
2863 tcg_temp_free_i32(tmp32_1);
2864 tcg_temp_free_i64(tmp);
2865 break;
2866 case 0x34: /* STCH ? */
2867 /* Store Subchannel */
2868 check_privileged(s, ilc);
2869 gen_op_movi_cc(s, 3);
2870 break;
2871 case 0x46: /* STURA R1,R2 [RRE] */
2872 /* Store Using Real Address */
2873 check_privileged(s, ilc);
2874 r1 = (insn >> 4) & 0xf;
2875 r2 = insn & 0xf;
2876 tmp32_1 = load_reg32(r1);
2877 tmp = load_reg(r2);
2878 potential_page_fault(s);
2879 gen_helper_stura(tmp, tmp32_1);
2880 tcg_temp_free_i32(tmp32_1);
2881 tcg_temp_free_i64(tmp);
2882 break;
2883 case 0x50: /* CSP R1,R2 [RRE] */
2884 /* Compare And Swap And Purge */
2885 check_privileged(s, ilc);
2886 r1 = (insn >> 4) & 0xf;
2887 r2 = insn & 0xf;
2888 tmp32_1 = tcg_const_i32(r1);
2889 tmp32_2 = tcg_const_i32(r2);
2890 gen_helper_csp(cc_op, tmp32_1, tmp32_2);
2891 set_cc_static(s);
2892 tcg_temp_free_i32(tmp32_1);
2893 tcg_temp_free_i32(tmp32_2);
2894 break;
2895 case 0x5f: /* CHSC ? */
2896 /* Channel Subsystem Call */
2897 check_privileged(s, ilc);
2898 gen_op_movi_cc(s, 3);
2899 break;
2900 case 0x78: /* STCKE D2(B2) [S] */
2901 /* Store Clock Extended */
2902 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2903 tmp = get_address(s, 0, b2, d2);
2904 potential_page_fault(s);
2905 gen_helper_stcke(cc_op, tmp);
2906 set_cc_static(s);
2907 tcg_temp_free_i64(tmp);
2908 break;
2909 case 0x79: /* SACF D2(B2) [S] */
2910 /* Store Clock Extended */
2911 check_privileged(s, ilc);
2912 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2913 tmp = get_address(s, 0, b2, d2);
2914 potential_page_fault(s);
2915 gen_helper_sacf(tmp);
2916 tcg_temp_free_i64(tmp);
2917 /* addressing mode has changed, so end the block */
2918 s->pc += ilc * 2;
2919 update_psw_addr(s);
2920 s->is_jmp = DISAS_EXCP;
2921 break;
2922 case 0x7d: /* STSI D2,(B2) [S] */
2923 check_privileged(s, ilc);
2924 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2925 tmp = get_address(s, 0, b2, d2);
2926 tmp32_1 = load_reg32(0);
2927 tmp32_2 = load_reg32(1);
2928 potential_page_fault(s);
2929 gen_helper_stsi(cc_op, tmp, tmp32_1, tmp32_2);
2930 set_cc_static(s);
2931 tcg_temp_free_i64(tmp);
2932 tcg_temp_free_i32(tmp32_1);
2933 tcg_temp_free_i32(tmp32_2);
2934 break;
2935 case 0x9d: /* LFPC D2(B2) [S] */
2936 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2937 tmp = get_address(s, 0, b2, d2);
2938 tmp2 = tcg_temp_new_i64();
2939 tmp32_1 = tcg_temp_new_i32();
2940 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
2941 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
2942 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
2943 tcg_temp_free_i64(tmp);
2944 tcg_temp_free_i64(tmp2);
2945 tcg_temp_free_i32(tmp32_1);
2946 break;
2947 case 0xb1: /* STFL D2(B2) [S] */
2948 /* Store Facility List (CPU features) at 200 */
2949 check_privileged(s, ilc);
2950 tmp2 = tcg_const_i64(0xc0000000);
2951 tmp = tcg_const_i64(200);
2952 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
2953 tcg_temp_free_i64(tmp2);
2954 tcg_temp_free_i64(tmp);
2955 break;
2956 case 0xb2: /* LPSWE D2(B2) [S] */
2957 /* Load PSW Extended */
2958 check_privileged(s, ilc);
2959 decode_rs(s, insn, &r1, &r3, &b2, &d2);
2960 tmp = get_address(s, 0, b2, d2);
2961 tmp2 = tcg_temp_new_i64();
2962 tmp3 = tcg_temp_new_i64();
2963 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
2964 tcg_gen_addi_i64(tmp, tmp, 8);
2965 tcg_gen_qemu_ld64(tmp3, tmp, get_mem_index(s));
2966 gen_helper_load_psw(tmp2, tmp3);
2967 /* we need to keep cc_op intact */
2968 s->is_jmp = DISAS_JUMP;
2969 tcg_temp_free_i64(tmp);
2970 tcg_temp_free_i64(tmp2);
2971 tcg_temp_free_i64(tmp3);
2972 break;
2973 case 0x20: /* SERVC R1,R2 [RRE] */
2974 /* SCLP Service call (PV hypercall) */
2975 check_privileged(s, ilc);
2976 potential_page_fault(s);
2977 tmp32_1 = load_reg32(r2);
2978 tmp = load_reg(r1);
2979 gen_helper_servc(cc_op, tmp32_1, tmp);
2980 set_cc_static(s);
2981 tcg_temp_free_i32(tmp32_1);
2982 tcg_temp_free_i64(tmp);
2983 break;
2984 #endif
2985 default:
2986 LOG_DISAS("illegal b2 operation 0x%x\n", op);
2987 gen_illegal_opcode(s, ilc);
2988 break;
2989 }
2990 }
2991
2992 static void disas_b3(DisasContext *s, int op, int m3, int r1, int r2)
2993 {
2994 TCGv_i64 tmp;
2995 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
2996 LOG_DISAS("disas_b3: op 0x%x m3 0x%x r1 %d r2 %d\n", op, m3, r1, r2);
2997 #define FP_HELPER(i) \
2998 tmp32_1 = tcg_const_i32(r1); \
2999 tmp32_2 = tcg_const_i32(r2); \
3000 gen_helper_ ## i (tmp32_1, tmp32_2); \
3001 tcg_temp_free_i32(tmp32_1); \
3002 tcg_temp_free_i32(tmp32_2);
3003
3004 #define FP_HELPER_CC(i) \
3005 tmp32_1 = tcg_const_i32(r1); \
3006 tmp32_2 = tcg_const_i32(r2); \
3007 gen_helper_ ## i (cc_op, tmp32_1, tmp32_2); \
3008 set_cc_static(s); \
3009 tcg_temp_free_i32(tmp32_1); \
3010 tcg_temp_free_i32(tmp32_2);
3011
3012 switch (op) {
3013 case 0x0: /* LPEBR R1,R2 [RRE] */
3014 FP_HELPER_CC(lpebr);
3015 break;
3016 case 0x2: /* LTEBR R1,R2 [RRE] */
3017 FP_HELPER_CC(ltebr);
3018 break;
3019 case 0x3: /* LCEBR R1,R2 [RRE] */
3020 FP_HELPER_CC(lcebr);
3021 break;
3022 case 0x4: /* LDEBR R1,R2 [RRE] */
3023 FP_HELPER(ldebr);
3024 break;
3025 case 0x5: /* LXDBR R1,R2 [RRE] */
3026 FP_HELPER(lxdbr);
3027 break;
3028 case 0x9: /* CEBR R1,R2 [RRE] */
3029 FP_HELPER_CC(cebr);
3030 break;
3031 case 0xa: /* AEBR R1,R2 [RRE] */
3032 FP_HELPER_CC(aebr);
3033 break;
3034 case 0xb: /* SEBR R1,R2 [RRE] */
3035 FP_HELPER_CC(sebr);
3036 break;
3037 case 0xd: /* DEBR R1,R2 [RRE] */
3038 FP_HELPER(debr);
3039 break;
3040 case 0x10: /* LPDBR R1,R2 [RRE] */
3041 FP_HELPER_CC(lpdbr);
3042 break;
3043 case 0x12: /* LTDBR R1,R2 [RRE] */
3044 FP_HELPER_CC(ltdbr);
3045 break;
3046 case 0x13: /* LCDBR R1,R2 [RRE] */
3047 FP_HELPER_CC(lcdbr);
3048 break;
3049 case 0x15: /* SQBDR R1,R2 [RRE] */
3050 FP_HELPER(sqdbr);
3051 break;
3052 case 0x17: /* MEEBR R1,R2 [RRE] */
3053 FP_HELPER(meebr);
3054 break;
3055 case 0x19: /* CDBR R1,R2 [RRE] */
3056 FP_HELPER_CC(cdbr);
3057 break;
3058 case 0x1a: /* ADBR R1,R2 [RRE] */
3059 FP_HELPER_CC(adbr);
3060 break;
3061 case 0x1b: /* SDBR R1,R2 [RRE] */
3062 FP_HELPER_CC(sdbr);
3063 break;
3064 case 0x1c: /* MDBR R1,R2 [RRE] */
3065 FP_HELPER(mdbr);
3066 break;
3067 case 0x1d: /* DDBR R1,R2 [RRE] */
3068 FP_HELPER(ddbr);
3069 break;
3070 case 0xe: /* MAEBR R1,R3,R2 [RRF] */
3071 case 0x1e: /* MADBR R1,R3,R2 [RRF] */
3072 case 0x1f: /* MSDBR R1,R3,R2 [RRF] */
3073 /* for RRF insns, m3 is R1, r1 is R3, and r2 is R2 */
3074 tmp32_1 = tcg_const_i32(m3);
3075 tmp32_2 = tcg_const_i32(r2);
3076 tmp32_3 = tcg_const_i32(r1);
3077 switch (op) {
3078 case 0xe:
3079 gen_helper_maebr(tmp32_1, tmp32_3, tmp32_2);
3080 break;
3081 case 0x1e:
3082 gen_helper_madbr(tmp32_1, tmp32_3, tmp32_2);
3083 break;
3084 case 0x1f:
3085 gen_helper_msdbr(tmp32_1, tmp32_3, tmp32_2);
3086 break;
3087 default:
3088 tcg_abort();
3089 }
3090 tcg_temp_free_i32(tmp32_1);
3091 tcg_temp_free_i32(tmp32_2);
3092 tcg_temp_free_i32(tmp32_3);
3093 break;
3094 case 0x40: /* LPXBR R1,R2 [RRE] */
3095 FP_HELPER_CC(lpxbr);
3096 break;
3097 case 0x42: /* LTXBR R1,R2 [RRE] */
3098 FP_HELPER_CC(ltxbr);
3099 break;
3100 case 0x43: /* LCXBR R1,R2 [RRE] */
3101 FP_HELPER_CC(lcxbr);
3102 break;
3103 case 0x44: /* LEDBR R1,R2 [RRE] */
3104 FP_HELPER(ledbr);
3105 break;
3106 case 0x45: /* LDXBR R1,R2 [RRE] */
3107 FP_HELPER(ldxbr);
3108 break;
3109 case 0x46: /* LEXBR R1,R2 [RRE] */
3110 FP_HELPER(lexbr);
3111 break;
3112 case 0x49: /* CXBR R1,R2 [RRE] */
3113 FP_HELPER_CC(cxbr);
3114 break;
3115 case 0x4a: /* AXBR R1,R2 [RRE] */
3116 FP_HELPER_CC(axbr);
3117 break;
3118 case 0x4b: /* SXBR R1,R2 [RRE] */
3119 FP_HELPER_CC(sxbr);
3120 break;
3121 case 0x4c: /* MXBR R1,R2 [RRE] */
3122 FP_HELPER(mxbr);
3123 break;
3124 case 0x4d: /* DXBR R1,R2 [RRE] */
3125 FP_HELPER(dxbr);
3126 break;
3127 case 0x65: /* LXR R1,R2 [RRE] */
3128 tmp = load_freg(r2);
3129 store_freg(r1, tmp);
3130 tcg_temp_free_i64(tmp);
3131 tmp = load_freg(r2 + 2);
3132 store_freg(r1 + 2, tmp);
3133 tcg_temp_free_i64(tmp);
3134 break;
3135 case 0x74: /* LZER R1 [RRE] */
3136 tmp32_1 = tcg_const_i32(r1);
3137 gen_helper_lzer(tmp32_1);
3138 tcg_temp_free_i32(tmp32_1);
3139 break;
3140 case 0x75: /* LZDR R1 [RRE] */
3141 tmp32_1 = tcg_const_i32(r1);
3142 gen_helper_lzdr(tmp32_1);
3143 tcg_temp_free_i32(tmp32_1);
3144 break;
3145 case 0x76: /* LZXR R1 [RRE] */
3146 tmp32_1 = tcg_const_i32(r1);
3147 gen_helper_lzxr(tmp32_1);
3148 tcg_temp_free_i32(tmp32_1);
3149 break;
3150 case 0x84: /* SFPC R1 [RRE] */
3151 tmp32_1 = load_reg32(r1);
3152 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
3153 tcg_temp_free_i32(tmp32_1);
3154 break;
3155 case 0x8c: /* EFPC R1 [RRE] */
3156 tmp32_1 = tcg_temp_new_i32();
3157 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
3158 store_reg32(r1, tmp32_1);
3159 tcg_temp_free_i32(tmp32_1);
3160 break;
3161 case 0x94: /* CEFBR R1,R2 [RRE] */
3162 case 0x95: /* CDFBR R1,R2 [RRE] */
3163 case 0x96: /* CXFBR R1,R2 [RRE] */
3164 tmp32_1 = tcg_const_i32(r1);
3165 tmp32_2 = load_reg32(r2);
3166 switch (op) {
3167 case 0x94:
3168 gen_helper_cefbr(tmp32_1, tmp32_2);
3169 break;
3170 case 0x95:
3171 gen_helper_cdfbr(tmp32_1, tmp32_2);
3172 break;
3173 case 0x96:
3174 gen_helper_cxfbr(tmp32_1, tmp32_2);
3175 break;
3176 default:
3177 tcg_abort();
3178 }
3179 tcg_temp_free_i32(tmp32_1);
3180 tcg_temp_free_i32(tmp32_2);
3181 break;
3182 case 0x98: /* CFEBR R1,R2 [RRE] */
3183 case 0x99: /* CFDBR R1,R2 [RRE] */
3184 case 0x9a: /* CFXBR R1,R2 [RRE] */
3185 tmp32_1 = tcg_const_i32(r1);
3186 tmp32_2 = tcg_const_i32(r2);
3187 tmp32_3 = tcg_const_i32(m3);
3188 switch (op) {
3189 case 0x98:
3190 gen_helper_cfebr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3191 break;
3192 case 0x99:
3193 gen_helper_cfdbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3194 break;
3195 case 0x9a:
3196 gen_helper_cfxbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3197 break;
3198 default:
3199 tcg_abort();
3200 }
3201 set_cc_static(s);
3202 tcg_temp_free_i32(tmp32_1);
3203 tcg_temp_free_i32(tmp32_2);
3204 tcg_temp_free_i32(tmp32_3);
3205 break;
3206 case 0xa4: /* CEGBR R1,R2 [RRE] */
3207 case 0xa5: /* CDGBR R1,R2 [RRE] */
3208 tmp32_1 = tcg_const_i32(r1);
3209 tmp = load_reg(r2);
3210 switch (op) {
3211 case 0xa4:
3212 gen_helper_cegbr(tmp32_1, tmp);
3213 break;
3214 case 0xa5:
3215 gen_helper_cdgbr(tmp32_1, tmp);
3216 break;
3217 default:
3218 tcg_abort();
3219 }
3220 tcg_temp_free_i32(tmp32_1);
3221 tcg_temp_free_i64(tmp);
3222 break;
3223 case 0xa6: /* CXGBR R1,R2 [RRE] */
3224 tmp32_1 = tcg_const_i32(r1);
3225 tmp = load_reg(r2);
3226 gen_helper_cxgbr(tmp32_1, tmp);
3227 tcg_temp_free_i32(tmp32_1);
3228 tcg_temp_free_i64(tmp);
3229 break;
3230 case 0xa8: /* CGEBR R1,R2 [RRE] */
3231 tmp32_1 = tcg_const_i32(r1);
3232 tmp32_2 = tcg_const_i32(r2);
3233 tmp32_3 = tcg_const_i32(m3);
3234 gen_helper_cgebr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3235 set_cc_static(s);
3236 tcg_temp_free_i32(tmp32_1);
3237 tcg_temp_free_i32(tmp32_2);
3238 tcg_temp_free_i32(tmp32_3);
3239 break;
3240 case 0xa9: /* CGDBR R1,R2 [RRE] */
3241 tmp32_1 = tcg_const_i32(r1);
3242 tmp32_2 = tcg_const_i32(r2);
3243 tmp32_3 = tcg_const_i32(m3);
3244 gen_helper_cgdbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3245 set_cc_static(s);
3246 tcg_temp_free_i32(tmp32_1);
3247 tcg_temp_free_i32(tmp32_2);
3248 tcg_temp_free_i32(tmp32_3);
3249 break;
3250 case 0xaa: /* CGXBR R1,R2 [RRE] */
3251 tmp32_1 = tcg_const_i32(r1);
3252 tmp32_2 = tcg_const_i32(r2);
3253 tmp32_3 = tcg_const_i32(m3);
3254 gen_helper_cgxbr(cc_op, tmp32_1, tmp32_2, tmp32_3);
3255 set_cc_static(s);
3256 tcg_temp_free_i32(tmp32_1);
3257 tcg_temp_free_i32(tmp32_2);
3258 tcg_temp_free_i32(tmp32_3);
3259 break;
3260 default:
3261 LOG_DISAS("illegal b3 operation 0x%x\n", op);
3262 gen_illegal_opcode(s, 2);
3263 break;
3264 }
3265
3266 #undef FP_HELPER_CC
3267 #undef FP_HELPER
3268 }
3269
3270 static void disas_b9(DisasContext *s, int op, int r1, int r2)
3271 {
3272 TCGv_i64 tmp, tmp2, tmp3;
3273 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3274
3275 LOG_DISAS("disas_b9: op 0x%x r1 %d r2 %d\n", op, r1, r2);
3276 switch (op) {
3277 case 0x0: /* LPGR R1,R2 [RRE] */
3278 case 0x1: /* LNGR R1,R2 [RRE] */
3279 case 0x2: /* LTGR R1,R2 [RRE] */
3280 case 0x3: /* LCGR R1,R2 [RRE] */
3281 case 0x10: /* LPGFR R1,R2 [RRE] */
3282 case 0x11: /* LNFGR R1,R2 [RRE] */
3283 case 0x12: /* LTGFR R1,R2 [RRE] */
3284 case 0x13: /* LCGFR R1,R2 [RRE] */
3285 if (op & 0x10) {
3286 tmp = load_reg32_i64(r2);
3287 } else {
3288 tmp = load_reg(r2);
3289 }
3290 switch (op & 0xf) {
3291 case 0x0: /* LP?GR */
3292 set_cc_abs64(s, tmp);
3293 gen_helper_abs_i64(tmp, tmp);
3294 store_reg(r1, tmp);
3295 break;
3296 case 0x1: /* LN?GR */
3297 set_cc_nabs64(s, tmp);
3298 gen_helper_nabs_i64(tmp, tmp);
3299 store_reg(r1, tmp);
3300 break;
3301 case 0x2: /* LT?GR */
3302 if (r1 != r2) {
3303 store_reg(r1, tmp);
3304 }
3305 set_cc_s64(s, tmp);
3306 break;
3307 case 0x3: /* LC?GR */
3308 tcg_gen_neg_i64(regs[r1], tmp);
3309 set_cc_comp64(s, regs[r1]);
3310 break;
3311 }
3312 tcg_temp_free_i64(tmp);
3313 break;
3314 case 0x4: /* LGR R1,R2 [RRE] */
3315 store_reg(r1, regs[r2]);
3316 break;
3317 case 0x6: /* LGBR R1,R2 [RRE] */
3318 tmp2 = load_reg(r2);
3319 tcg_gen_ext8s_i64(tmp2, tmp2);
3320 store_reg(r1, tmp2);
3321 tcg_temp_free_i64(tmp2);
3322 break;
3323 case 0x8: /* AGR R1,R2 [RRE] */
3324 case 0xa: /* ALGR R1,R2 [RRE] */
3325 tmp = load_reg(r1);
3326 tmp2 = load_reg(r2);
3327 tmp3 = tcg_temp_new_i64();
3328 tcg_gen_add_i64(tmp3, tmp, tmp2);
3329 store_reg(r1, tmp3);
3330 switch (op) {
3331 case 0x8:
3332 set_cc_add64(s, tmp, tmp2, tmp3);
3333 break;
3334 case 0xa:
3335 set_cc_addu64(s, tmp, tmp2, tmp3);
3336 break;
3337 default:
3338 tcg_abort();
3339 }
3340 tcg_temp_free_i64(tmp);
3341 tcg_temp_free_i64(tmp2);
3342 tcg_temp_free_i64(tmp3);
3343 break;
3344 case 0x9: /* SGR R1,R2 [RRE] */
3345 case 0xb: /* SLGR R1,R2 [RRE] */
3346 case 0x1b: /* SLGFR R1,R2 [RRE] */
3347 case 0x19: /* SGFR R1,R2 [RRE] */
3348 tmp = load_reg(r1);
3349 switch (op) {
3350 case 0x1b:
3351 tmp32_1 = load_reg32(r2);
3352 tmp2 = tcg_temp_new_i64();
3353 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3354 tcg_temp_free_i32(tmp32_1);
3355 break;
3356 case 0x19:
3357 tmp32_1 = load_reg32(r2);
3358 tmp2 = tcg_temp_new_i64();
3359 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3360 tcg_temp_free_i32(tmp32_1);
3361 break;
3362 default:
3363 tmp2 = load_reg(r2);
3364 break;
3365 }
3366 tmp3 = tcg_temp_new_i64();
3367 tcg_gen_sub_i64(tmp3, tmp, tmp2);
3368 store_reg(r1, tmp3);
3369 switch (op) {
3370 case 0x9:
3371 case 0x19:
3372 set_cc_sub64(s, tmp, tmp2, tmp3);
3373 break;
3374 case 0xb:
3375 case 0x1b:
3376 set_cc_subu64(s, tmp, tmp2, tmp3);
3377 break;
3378 default:
3379 tcg_abort();
3380 }
3381 tcg_temp_free_i64(tmp);
3382 tcg_temp_free_i64(tmp2);
3383 tcg_temp_free_i64(tmp3);
3384 break;
3385 case 0xc: /* MSGR R1,R2 [RRE] */
3386 case 0x1c: /* MSGFR R1,R2 [RRE] */
3387 tmp = load_reg(r1);
3388 tmp2 = load_reg(r2);
3389 if (op == 0x1c) {
3390 tcg_gen_ext32s_i64(tmp2, tmp2);
3391 }
3392 tcg_gen_mul_i64(tmp, tmp, tmp2);
3393 store_reg(r1, tmp);
3394 tcg_temp_free_i64(tmp);
3395 tcg_temp_free_i64(tmp2);
3396 break;
3397 case 0xd: /* DSGR R1,R2 [RRE] */
3398 case 0x1d: /* DSGFR R1,R2 [RRE] */
3399 tmp = load_reg(r1 + 1);
3400 if (op == 0xd) {
3401 tmp2 = load_reg(r2);
3402 } else {
3403 tmp32_1 = load_reg32(r2);
3404 tmp2 = tcg_temp_new_i64();
3405 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3406 tcg_temp_free_i32(tmp32_1);
3407 }
3408 tmp3 = tcg_temp_new_i64();
3409 tcg_gen_div_i64(tmp3, tmp, tmp2);
3410 store_reg(r1 + 1, tmp3);
3411 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3412 store_reg(r1, tmp3);
3413 tcg_temp_free_i64(tmp);
3414 tcg_temp_free_i64(tmp2);
3415 tcg_temp_free_i64(tmp3);
3416 break;
3417 case 0x14: /* LGFR R1,R2 [RRE] */
3418 tmp32_1 = load_reg32(r2);
3419 tmp = tcg_temp_new_i64();
3420 tcg_gen_ext_i32_i64(tmp, tmp32_1);
3421 store_reg(r1, tmp);
3422 tcg_temp_free_i32(tmp32_1);
3423 tcg_temp_free_i64(tmp);
3424 break;
3425 case 0x16: /* LLGFR R1,R2 [RRE] */
3426 tmp32_1 = load_reg32(r2);
3427 tmp = tcg_temp_new_i64();
3428 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3429 store_reg(r1, tmp);
3430 tcg_temp_free_i32(tmp32_1);
3431 tcg_temp_free_i64(tmp);
3432 break;
3433 case 0x17: /* LLGTR R1,R2 [RRE] */
3434 tmp32_1 = load_reg32(r2);
3435 tmp = tcg_temp_new_i64();
3436 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0x7fffffffUL);
3437 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3438 store_reg(r1, tmp);
3439 tcg_temp_free_i32(tmp32_1);
3440 tcg_temp_free_i64(tmp);
3441 break;
3442 case 0x18: /* AGFR R1,R2 [RRE] */
3443 case 0x1a: /* ALGFR R1,R2 [RRE] */
3444 tmp32_1 = load_reg32(r2);
3445 tmp2 = tcg_temp_new_i64();
3446 if (op == 0x18) {
3447 tcg_gen_ext_i32_i64(tmp2, tmp32_1);
3448 } else {
3449 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
3450 }
3451 tcg_temp_free_i32(tmp32_1);
3452 tmp = load_reg(r1);
3453 tmp3 = tcg_temp_new_i64();
3454 tcg_gen_add_i64(tmp3, tmp, tmp2);
3455 store_reg(r1, tmp3);
3456 if (op == 0x18) {
3457 set_cc_add64(s, tmp, tmp2, tmp3);
3458 } else {
3459 set_cc_addu64(s, tmp, tmp2, tmp3);
3460 }
3461 tcg_temp_free_i64(tmp);
3462 tcg_temp_free_i64(tmp2);
3463 tcg_temp_free_i64(tmp3);
3464 break;
3465 case 0x1f: /* LRVR R1,R2 [RRE] */
3466 tmp32_1 = load_reg32(r2);
3467 tcg_gen_bswap32_i32(tmp32_1, tmp32_1);
3468 store_reg32(r1, tmp32_1);
3469 tcg_temp_free_i32(tmp32_1);
3470 break;
3471 case 0x20: /* CGR R1,R2 [RRE] */
3472 case 0x30: /* CGFR R1,R2 [RRE] */
3473 tmp2 = load_reg(r2);
3474 if (op == 0x30) {
3475 tcg_gen_ext32s_i64(tmp2, tmp2);
3476 }
3477 tmp = load_reg(r1);
3478 cmp_s64(s, tmp, tmp2);
3479 tcg_temp_free_i64(tmp);
3480 tcg_temp_free_i64(tmp2);
3481 break;
3482 case 0x21: /* CLGR R1,R2 [RRE] */
3483 case 0x31: /* CLGFR R1,R2 [RRE] */
3484 tmp2 = load_reg(r2);
3485 if (op == 0x31) {
3486 tcg_gen_ext32u_i64(tmp2, tmp2);
3487 }
3488 tmp = load_reg(r1);
3489 cmp_u64(s, tmp, tmp2);
3490 tcg_temp_free_i64(tmp);
3491 tcg_temp_free_i64(tmp2);
3492 break;
3493 case 0x26: /* LBR R1,R2 [RRE] */
3494 tmp32_1 = load_reg32(r2);
3495 tcg_gen_ext8s_i32(tmp32_1, tmp32_1);
3496 store_reg32(r1, tmp32_1);
3497 tcg_temp_free_i32(tmp32_1);
3498 break;
3499 case 0x27: /* LHR R1,R2 [RRE] */
3500 tmp32_1 = load_reg32(r2);
3501 tcg_gen_ext16s_i32(tmp32_1, tmp32_1);
3502 store_reg32(r1, tmp32_1);
3503 tcg_temp_free_i32(tmp32_1);
3504 break;
3505 case 0x80: /* NGR R1,R2 [RRE] */
3506 case 0x81: /* OGR R1,R2 [RRE] */
3507 case 0x82: /* XGR R1,R2 [RRE] */
3508 tmp = load_reg(r1);
3509 tmp2 = load_reg(r2);
3510 switch (op) {
3511 case 0x80:
3512 tcg_gen_and_i64(tmp, tmp, tmp2);
3513 break;
3514 case 0x81:
3515 tcg_gen_or_i64(tmp, tmp, tmp2);
3516 break;
3517 case 0x82:
3518 tcg_gen_xor_i64(tmp, tmp, tmp2);
3519 break;
3520 default:
3521 tcg_abort();
3522 }
3523 store_reg(r1, tmp);
3524 set_cc_nz_u64(s, tmp);
3525 tcg_temp_free_i64(tmp);
3526 tcg_temp_free_i64(tmp2);
3527 break;
3528 case 0x83: /* FLOGR R1,R2 [RRE] */
3529 tmp = load_reg(r2);
3530 tmp32_1 = tcg_const_i32(r1);
3531 gen_helper_flogr(cc_op, tmp32_1, tmp);
3532 set_cc_static(s);
3533 tcg_temp_free_i64(tmp);
3534 tcg_temp_free_i32(tmp32_1);
3535 break;
3536 case 0x84: /* LLGCR R1,R2 [RRE] */
3537 tmp = load_reg(r2);
3538 tcg_gen_andi_i64(tmp, tmp, 0xff);
3539 store_reg(r1, tmp);
3540 tcg_temp_free_i64(tmp);
3541 break;
3542 case 0x85: /* LLGHR R1,R2 [RRE] */
3543 tmp = load_reg(r2);
3544 tcg_gen_andi_i64(tmp, tmp, 0xffff);
3545 store_reg(r1, tmp);
3546 tcg_temp_free_i64(tmp);
3547 break;
3548 case 0x87: /* DLGR R1,R2 [RRE] */
3549 tmp32_1 = tcg_const_i32(r1);
3550 tmp = load_reg(r2);
3551 gen_helper_dlg(tmp32_1, tmp);
3552 tcg_temp_free_i64(tmp);
3553 tcg_temp_free_i32(tmp32_1);
3554 break;
3555 case 0x88: /* ALCGR R1,R2 [RRE] */
3556 tmp = load_reg(r1);
3557 tmp2 = load_reg(r2);
3558 tmp3 = tcg_temp_new_i64();
3559 gen_op_calc_cc(s);
3560 tcg_gen_extu_i32_i64(tmp3, cc_op);
3561 tcg_gen_shri_i64(tmp3, tmp3, 1);
3562 tcg_gen_andi_i64(tmp3, tmp3, 1);
3563 tcg_gen_add_i64(tmp3, tmp2, tmp3);
3564 tcg_gen_add_i64(tmp3, tmp, tmp3);
3565 store_reg(r1, tmp3);
3566 set_cc_addu64(s, tmp, tmp2, tmp3);
3567 tcg_temp_free_i64(tmp);
3568 tcg_temp_free_i64(tmp2);
3569 tcg_temp_free_i64(tmp3);
3570 break;
3571 case 0x89: /* SLBGR R1,R2 [RRE] */
3572 tmp = load_reg(r1);
3573 tmp2 = load_reg(r2);
3574 tmp32_1 = tcg_const_i32(r1);
3575 gen_op_calc_cc(s);
3576 gen_helper_slbg(cc_op, cc_op, tmp32_1, tmp, tmp2);
3577 set_cc_static(s);
3578 tcg_temp_free_i64(tmp);
3579 tcg_temp_free_i64(tmp2);
3580 tcg_temp_free_i32(tmp32_1);
3581 break;
3582 case 0x94: /* LLCR R1,R2 [RRE] */
3583 tmp32_1 = load_reg32(r2);
3584 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xff);
3585 store_reg32(r1, tmp32_1);
3586 tcg_temp_free_i32(tmp32_1);
3587 break;
3588 case 0x95: /* LLHR R1,R2 [RRE] */
3589 tmp32_1 = load_reg32(r2);
3590 tcg_gen_andi_i32(tmp32_1, tmp32_1, 0xffff);
3591 store_reg32(r1, tmp32_1);
3592 tcg_temp_free_i32(tmp32_1);
3593 break;
3594 case 0x96: /* MLR R1,R2 [RRE] */
3595 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
3596 tmp2 = load_reg(r2);
3597 tmp3 = load_reg((r1 + 1) & 15);
3598 tcg_gen_ext32u_i64(tmp2, tmp2);
3599 tcg_gen_ext32u_i64(tmp3, tmp3);
3600 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
3601 store_reg32_i64((r1 + 1) & 15, tmp2);
3602 tcg_gen_shri_i64(tmp2, tmp2, 32);
3603 store_reg32_i64(r1, tmp2);
3604 tcg_temp_free_i64(tmp2);
3605 tcg_temp_free_i64(tmp3);
3606 break;
3607 case 0x97: /* DLR R1,R2 [RRE] */
3608 /* reg(r1) = reg(r1, r1+1) % reg(r2) */
3609 /* reg(r1+1) = reg(r1, r1+1) / reg(r2) */
3610 tmp = load_reg(r1);
3611 tmp2 = load_reg(r2);
3612 tmp3 = load_reg((r1 + 1) & 15);
3613 tcg_gen_ext32u_i64(tmp2, tmp2);
3614 tcg_gen_ext32u_i64(tmp3, tmp3);
3615 tcg_gen_shli_i64(tmp, tmp, 32);
3616 tcg_gen_or_i64(tmp, tmp, tmp3);
3617
3618 tcg_gen_rem_i64(tmp3, tmp, tmp2);
3619 tcg_gen_div_i64(tmp, tmp, tmp2);
3620 store_reg32_i64((r1 + 1) & 15, tmp);
3621 store_reg32_i64(r1, tmp3);
3622 tcg_temp_free_i64(tmp);
3623 tcg_temp_free_i64(tmp2);
3624 tcg_temp_free_i64(tmp3);
3625 break;
3626 case 0x98: /* ALCR R1,R2 [RRE] */
3627 tmp32_1 = load_reg32(r1);
3628 tmp32_2 = load_reg32(r2);
3629 tmp32_3 = tcg_temp_new_i32();
3630 /* XXX possible optimization point */
3631 gen_op_calc_cc(s);
3632 gen_helper_addc_u32(tmp32_3, cc_op, tmp32_1, tmp32_2);
3633 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3634 store_reg32(r1, tmp32_3);
3635 tcg_temp_free_i32(tmp32_1);
3636 tcg_temp_free_i32(tmp32_2);
3637 tcg_temp_free_i32(tmp32_3);
3638 break;
3639 case 0x99: /* SLBR R1,R2 [RRE] */
3640 tmp32_1 = load_reg32(r2);
3641 tmp32_2 = tcg_const_i32(r1);
3642 gen_op_calc_cc(s);
3643 gen_helper_slb(cc_op, cc_op, tmp32_2, tmp32_1);
3644 set_cc_static(s);
3645 tcg_temp_free_i32(tmp32_1);
3646 tcg_temp_free_i32(tmp32_2);
3647 break;
3648 default:
3649 LOG_DISAS("illegal b9 operation 0x%x\n", op);
3650 gen_illegal_opcode(s, 2);
3651 break;
3652 }
3653 }
3654
3655 static void disas_c0(DisasContext *s, int op, int r1, int i2)
3656 {
3657 TCGv_i64 tmp;
3658 TCGv_i32 tmp32_1, tmp32_2;
3659 uint64_t target = s->pc + i2 * 2LL;
3660 int l1;
3661
3662 LOG_DISAS("disas_c0: op 0x%x r1 %d i2 %d\n", op, r1, i2);
3663
3664 switch (op) {
3665 case 0: /* larl r1, i2 */
3666 tmp = tcg_const_i64(target);
3667 store_reg(r1, tmp);
3668 tcg_temp_free_i64(tmp);
3669 break;
3670 case 0x1: /* LGFI R1,I2 [RIL] */
3671 tmp = tcg_const_i64((int64_t)i2);
3672 store_reg(r1, tmp);
3673 tcg_temp_free_i64(tmp);
3674 break;
3675 case 0x4: /* BRCL M1,I2 [RIL] */
3676 /* m1 & (1 << (3 - cc)) */
3677 tmp32_1 = tcg_const_i32(3);
3678 tmp32_2 = tcg_const_i32(1);
3679 gen_op_calc_cc(s);
3680 tcg_gen_sub_i32(tmp32_1, tmp32_1, cc_op);
3681 tcg_gen_shl_i32(tmp32_2, tmp32_2, tmp32_1);
3682 tcg_temp_free_i32(tmp32_1);
3683 tmp32_1 = tcg_const_i32(r1); /* m1 == r1 */
3684 tcg_gen_and_i32(tmp32_1, tmp32_1, tmp32_2);
3685 l1 = gen_new_label();
3686 tcg_gen_brcondi_i32(TCG_COND_EQ, tmp32_1, 0, l1);
3687 gen_goto_tb(s, 0, target);
3688 gen_set_label(l1);
3689 gen_goto_tb(s, 1, s->pc + 6);
3690 s->is_jmp = DISAS_TB_JUMP;
3691 tcg_temp_free_i32(tmp32_1);
3692 tcg_temp_free_i32(tmp32_2);
3693 break;
3694 case 0x5: /* brasl r1, i2 */
3695 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 6));
3696 store_reg(r1, tmp);
3697 tcg_temp_free_i64(tmp);
3698 gen_goto_tb(s, 0, target);
3699 s->is_jmp = DISAS_TB_JUMP;
3700 break;
3701 case 0x7: /* XILF R1,I2 [RIL] */
3702 case 0xb: /* NILF R1,I2 [RIL] */
3703 case 0xd: /* OILF R1,I2 [RIL] */
3704 tmp32_1 = load_reg32(r1);
3705 switch (op) {
3706 case 0x7:
3707 tcg_gen_xori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3708 break;
3709 case 0xb:
3710 tcg_gen_andi_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3711 break;
3712 case 0xd:
3713 tcg_gen_ori_i32(tmp32_1, tmp32_1, (uint32_t)i2);
3714 break;
3715 default:
3716 tcg_abort();
3717 }
3718 store_reg32(r1, tmp32_1);
3719 set_cc_nz_u32(s, tmp32_1);
3720 tcg_temp_free_i32(tmp32_1);
3721 break;
3722 case 0x9: /* IILF R1,I2 [RIL] */
3723 tmp32_1 = tcg_const_i32((uint32_t)i2);
3724 store_reg32(r1, tmp32_1);
3725 tcg_temp_free_i32(tmp32_1);
3726 break;
3727 case 0xa: /* NIHF R1,I2 [RIL] */
3728 tmp = load_reg(r1);
3729 tmp32_1 = tcg_temp_new_i32();
3730 tcg_gen_andi_i64(tmp, tmp, (((uint64_t)((uint32_t)i2)) << 32)
3731 | 0xffffffffULL);
3732 store_reg(r1, tmp);
3733 tcg_gen_shri_i64(tmp, tmp, 32);
3734 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
3735 set_cc_nz_u32(s, tmp32_1);
3736 tcg_temp_free_i64(tmp);
3737 tcg_temp_free_i32(tmp32_1);
3738 break;
3739 case 0xe: /* LLIHF R1,I2 [RIL] */
3740 tmp = tcg_const_i64(((uint64_t)(uint32_t)i2) << 32);
3741 store_reg(r1, tmp);
3742 tcg_temp_free_i64(tmp);
3743 break;
3744 case 0xf: /* LLILF R1,I2 [RIL] */
3745 tmp = tcg_const_i64((uint32_t)i2);
3746 store_reg(r1, tmp);
3747 tcg_temp_free_i64(tmp);
3748 break;
3749 default:
3750 LOG_DISAS("illegal c0 operation 0x%x\n", op);
3751 gen_illegal_opcode(s, 3);
3752 break;
3753 }
3754 }
3755
3756 static void disas_c2(DisasContext *s, int op, int r1, int i2)
3757 {
3758 TCGv_i64 tmp, tmp2, tmp3;
3759 TCGv_i32 tmp32_1, tmp32_2, tmp32_3;
3760
3761 switch (op) {
3762 case 0x4: /* SLGFI R1,I2 [RIL] */
3763 case 0xa: /* ALGFI R1,I2 [RIL] */
3764 tmp = load_reg(r1);
3765 tmp2 = tcg_const_i64((uint64_t)(uint32_t)i2);
3766 tmp3 = tcg_temp_new_i64();
3767 switch (op) {
3768 case 0x4:
3769 tcg_gen_sub_i64(tmp3, tmp, tmp2);
3770 set_cc_subu64(s, tmp, tmp2, tmp3);
3771 break;
3772 case 0xa:
3773 tcg_gen_add_i64(tmp3, tmp, tmp2);
3774 set_cc_addu64(s, tmp, tmp2, tmp3);
3775 break;
3776 default:
3777 tcg_abort();
3778 }
3779 store_reg(r1, tmp3);
3780 tcg_temp_free_i64(tmp);
3781 tcg_temp_free_i64(tmp2);
3782 tcg_temp_free_i64(tmp3);
3783 break;
3784 case 0x5: /* SLFI R1,I2 [RIL] */
3785 case 0xb: /* ALFI R1,I2 [RIL] */
3786 tmp32_1 = load_reg32(r1);
3787 tmp32_2 = tcg_const_i32(i2);
3788 tmp32_3 = tcg_temp_new_i32();
3789 switch (op) {
3790 case 0x5:
3791 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
3792 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
3793 break;
3794 case 0xb:
3795 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
3796 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
3797 break;
3798 default:
3799 tcg_abort();
3800 }
3801 store_reg32(r1, tmp32_3);
3802 tcg_temp_free_i32(tmp32_1);
3803 tcg_temp_free_i32(tmp32_2);
3804 tcg_temp_free_i32(tmp32_3);
3805 break;
3806 case 0xc: /* CGFI R1,I2 [RIL] */
3807 tmp = load_reg(r1);
3808 cmp_s64c(s, tmp, (int64_t)i2);
3809 tcg_temp_free_i64(tmp);
3810 break;
3811 case 0xe: /* CLGFI R1,I2 [RIL] */
3812 tmp = load_reg(r1);
3813 cmp_u64c(s, tmp, (uint64_t)(uint32_t)i2);
3814 tcg_temp_free_i64(tmp);
3815 break;
3816 case 0xd: /* CFI R1,I2 [RIL] */
3817 tmp32_1 = load_reg32(r1);
3818 cmp_s32c(s, tmp32_1, i2);
3819 tcg_temp_free_i32(tmp32_1);
3820 break;
3821 case 0xf: /* CLFI R1,I2 [RIL] */
3822 tmp32_1 = load_reg32(r1);
3823 cmp_u32c(s, tmp32_1, i2);
3824 tcg_temp_free_i32(tmp32_1);
3825 break;
3826 default:
3827 LOG_DISAS("illegal c2 operation 0x%x\n", op);
3828 gen_illegal_opcode(s, 3);
3829 break;
3830 }
3831 }
3832
3833 static void gen_and_or_xor_i32(int opc, TCGv_i32 tmp, TCGv_i32 tmp2)
3834 {
3835 switch (opc & 0xf) {
3836 case 0x4:
3837 tcg_gen_and_i32(tmp, tmp, tmp2);
3838 break;
3839 case 0x6:
3840 tcg_gen_or_i32(tmp, tmp, tmp2);
3841 break;
3842 case 0x7:
3843 tcg_gen_xor_i32(tmp, tmp, tmp2);
3844 break;
3845 default:
3846 tcg_abort();
3847 }
3848 }
3849
3850 static void disas_s390_insn(DisasContext *s)
3851 {
3852 TCGv_i64 tmp, tmp2, tmp3, tmp4;
3853 TCGv_i32 tmp32_1, tmp32_2, tmp32_3, tmp32_4;
3854 unsigned char opc;
3855 uint64_t insn;
3856 int op, r1, r2, r3, d1, d2, x2, b1, b2, i, i2, r1b;
3857 TCGv_i32 vl;
3858 int ilc;
3859 int l1;
3860
3861 opc = ldub_code(s->pc);
3862 LOG_DISAS("opc 0x%x\n", opc);
3863
3864 ilc = get_ilc(opc);
3865
3866 switch (opc) {
3867 #ifndef CONFIG_USER_ONLY
3868 case 0x01: /* SAM */
3869 insn = ld_code2(s->pc);
3870 /* set addressing mode, but we only do 64bit anyways */
3871 break;
3872 #endif
3873 case 0x6: /* BCTR R1,R2 [RR] */
3874 insn = ld_code2(s->pc);
3875 decode_rr(s, insn, &r1, &r2);
3876 tmp32_1 = load_reg32(r1);
3877 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
3878 store_reg32(r1, tmp32_1);
3879
3880 if (r2) {
3881 gen_update_cc_op(s);
3882 l1 = gen_new_label();
3883 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
3884
3885 /* not taking the branch, jump to after the instruction */
3886 gen_goto_tb(s, 0, s->pc + 2);
3887 gen_set_label(l1);
3888
3889 /* take the branch, move R2 into psw.addr */
3890 tmp32_1 = load_reg32(r2);
3891 tmp = tcg_temp_new_i64();
3892 tcg_gen_extu_i32_i64(tmp, tmp32_1);
3893 tcg_gen_mov_i64(psw_addr, tmp);
3894 s->is_jmp = DISAS_JUMP;
3895 tcg_temp_free_i32(tmp32_1);
3896 tcg_temp_free_i64(tmp);
3897 }
3898 break;
3899 case 0x7: /* BCR M1,R2 [RR] */
3900 insn = ld_code2(s->pc);
3901 decode_rr(s, insn, &r1, &r2);
3902 if (r2) {
3903 tmp = load_reg(r2);
3904 gen_bcr(s, r1, tmp, s->pc);
3905 tcg_temp_free_i64(tmp);
3906 s->is_jmp = DISAS_TB_JUMP;
3907 } else {
3908 /* XXX: "serialization and checkpoint-synchronization function"? */
3909 }
3910 break;
3911 case 0xa: /* SVC I [RR] */
3912 insn = ld_code2(s->pc);
3913 debug_insn(insn);
3914 i = insn & 0xff;
3915 update_psw_addr(s);
3916 gen_op_calc_cc(s);
3917 tmp32_1 = tcg_const_i32(i);
3918 tmp32_2 = tcg_const_i32(ilc * 2);
3919 tmp32_3 = tcg_const_i32(EXCP_SVC);
3920 tcg_gen_st_i32(tmp32_1, cpu_env, offsetof(CPUState, int_svc_code));
3921 tcg_gen_st_i32(tmp32_2, cpu_env, offsetof(CPUState, int_svc_ilc));
3922 gen_helper_exception(tmp32_3);
3923 s->is_jmp = DISAS_EXCP;
3924 tcg_temp_free_i32(tmp32_1);
3925 tcg_temp_free_i32(tmp32_2);
3926 tcg_temp_free_i32(tmp32_3);
3927 break;
3928 case 0xd: /* BASR R1,R2 [RR] */
3929 insn = ld_code2(s->pc);
3930 decode_rr(s, insn, &r1, &r2);
3931 tmp = tcg_const_i64(pc_to_link_info(s, s->pc + 2));
3932 store_reg(r1, tmp);
3933 if (r2) {
3934 tmp2 = load_reg(r2);
3935 tcg_gen_mov_i64(psw_addr, tmp2);
3936 tcg_temp_free_i64(tmp2);
3937 s->is_jmp = DISAS_JUMP;
3938 }
3939 tcg_temp_free_i64(tmp);
3940 break;
3941 case 0xe: /* MVCL R1,R2 [RR] */
3942 insn = ld_code2(s->pc);
3943 decode_rr(s, insn, &r1, &r2);
3944 tmp32_1 = tcg_const_i32(r1);
3945 tmp32_2 = tcg_const_i32(r2);
3946 potential_page_fault(s);
3947 gen_helper_mvcl(cc_op, tmp32_1, tmp32_2);
3948 set_cc_static(s);
3949 tcg_temp_free_i32(tmp32_1);
3950 tcg_temp_free_i32(tmp32_2);
3951 break;
3952 case 0x10: /* LPR R1,R2 [RR] */
3953 insn = ld_code2(s->pc);
3954 decode_rr(s, insn, &r1, &r2);
3955 tmp32_1 = load_reg32(r2);
3956 set_cc_abs32(s, tmp32_1);
3957 gen_helper_abs_i32(tmp32_1, tmp32_1);
3958 store_reg32(r1, tmp32_1);
3959 tcg_temp_free_i32(tmp32_1);
3960 break;
3961 case 0x11: /* LNR R1,R2 [RR] */
3962 insn = ld_code2(s->pc);
3963 decode_rr(s, insn, &r1, &r2);
3964 tmp32_1 = load_reg32(r2);
3965 set_cc_nabs32(s, tmp32_1);
3966 gen_helper_nabs_i32(tmp32_1, tmp32_1);
3967 store_reg32(r1, tmp32_1);
3968 tcg_temp_free_i32(tmp32_1);
3969 break;
3970 case 0x12: /* LTR R1,R2 [RR] */
3971 insn = ld_code2(s->pc);
3972 decode_rr(s, insn, &r1, &r2);
3973 tmp32_1 = load_reg32(r2);
3974 if (r1 != r2) {
3975 store_reg32(r1, tmp32_1);
3976 }
3977 set_cc_s32(s, tmp32_1);
3978 tcg_temp_free_i32(tmp32_1);
3979 break;
3980 case 0x13: /* LCR R1,R2 [RR] */
3981 insn = ld_code2(s->pc);
3982 decode_rr(s, insn, &r1, &r2);
3983 tmp32_1 = load_reg32(r2);
3984 tcg_gen_neg_i32(tmp32_1, tmp32_1);
3985 store_reg32(r1, tmp32_1);
3986 set_cc_comp32(s, tmp32_1);
3987 tcg_temp_free_i32(tmp32_1);
3988 break;
3989 case 0x14: /* NR R1,R2 [RR] */
3990 case 0x16: /* OR R1,R2 [RR] */
3991 case 0x17: /* XR R1,R2 [RR] */
3992 insn = ld_code2(s->pc);
3993 decode_rr(s, insn, &r1, &r2);
3994 tmp32_2 = load_reg32(r2);
3995 tmp32_1 = load_reg32(r1);
3996 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
3997 store_reg32(r1, tmp32_1);
3998 set_cc_nz_u32(s, tmp32_1);
3999 tcg_temp_free_i32(tmp32_1);
4000 tcg_temp_free_i32(tmp32_2);
4001 break;
4002 case 0x18: /* LR R1,R2 [RR] */
4003 insn = ld_code2(s->pc);
4004 decode_rr(s, insn, &r1, &r2);
4005 tmp32_1 = load_reg32(r2);
4006 store_reg32(r1, tmp32_1);
4007 tcg_temp_free_i32(tmp32_1);
4008 break;
4009 case 0x15: /* CLR R1,R2 [RR] */
4010 case 0x19: /* CR R1,R2 [RR] */
4011 insn = ld_code2(s->pc);
4012 decode_rr(s, insn, &r1, &r2);
4013 tmp32_1 = load_reg32(r1);
4014 tmp32_2 = load_reg32(r2);
4015 if (opc == 0x15) {
4016 cmp_u32(s, tmp32_1, tmp32_2);
4017 } else {
4018 cmp_s32(s, tmp32_1, tmp32_2);
4019 }
4020 tcg_temp_free_i32(tmp32_1);
4021 tcg_temp_free_i32(tmp32_2);
4022 break;
4023 case 0x1a: /* AR R1,R2 [RR] */
4024 case 0x1e: /* ALR R1,R2 [RR] */
4025 insn = ld_code2(s->pc);
4026 decode_rr(s, insn, &r1, &r2);
4027 tmp32_1 = load_reg32(r1);
4028 tmp32_2 = load_reg32(r2);
4029 tmp32_3 = tcg_temp_new_i32();
4030 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4031 store_reg32(r1, tmp32_3);
4032 if (opc == 0x1a) {
4033 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4034 } else {
4035 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
4036 }
4037 tcg_temp_free_i32(tmp32_1);
4038 tcg_temp_free_i32(tmp32_2);
4039 tcg_temp_free_i32(tmp32_3);
4040 break;
4041 case 0x1b: /* SR R1,R2 [RR] */
4042 case 0x1f: /* SLR R1,R2 [RR] */
4043 insn = ld_code2(s->pc);
4044 decode_rr(s, insn, &r1, &r2);
4045 tmp32_1 = load_reg32(r1);
4046 tmp32_2 = load_reg32(r2);
4047 tmp32_3 = tcg_temp_new_i32();
4048 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4049 store_reg32(r1, tmp32_3);
4050 if (opc == 0x1b) {
4051 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4052 } else {
4053 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
4054 }
4055 tcg_temp_free_i32(tmp32_1);
4056 tcg_temp_free_i32(tmp32_2);
4057 tcg_temp_free_i32(tmp32_3);
4058 break;
4059 case 0x1c: /* MR R1,R2 [RR] */
4060 /* reg(r1, r1+1) = reg(r1+1) * reg(r2) */
4061 insn = ld_code2(s->pc);
4062 decode_rr(s, insn, &r1, &r2);
4063 tmp2 = load_reg(r2);
4064 tmp3 = load_reg((r1 + 1) & 15);
4065 tcg_gen_ext32s_i64(tmp2, tmp2);
4066 tcg_gen_ext32s_i64(tmp3, tmp3);
4067 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
4068 store_reg32_i64((r1 + 1) & 15, tmp2);
4069 tcg_gen_shri_i64(tmp2, tmp2, 32);
4070 store_reg32_i64(r1, tmp2);
4071 tcg_temp_free_i64(tmp2);
4072 tcg_temp_free_i64(tmp3);
4073 break;
4074 case 0x1d: /* DR R1,R2 [RR] */
4075 insn = ld_code2(s->pc);
4076 decode_rr(s, insn, &r1, &r2);
4077 tmp32_1 = load_reg32(r1);
4078 tmp32_2 = load_reg32(r1 + 1);
4079 tmp32_3 = load_reg32(r2);
4080
4081 tmp = tcg_temp_new_i64(); /* dividend */
4082 tmp2 = tcg_temp_new_i64(); /* divisor */
4083 tmp3 = tcg_temp_new_i64();
4084
4085 /* dividend is r(r1 << 32) | r(r1 + 1) */
4086 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4087 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
4088 tcg_gen_shli_i64(tmp, tmp, 32);
4089 tcg_gen_or_i64(tmp, tmp, tmp2);
4090
4091 /* divisor is r(r2) */
4092 tcg_gen_ext_i32_i64(tmp2, tmp32_3);
4093
4094 tcg_gen_div_i64(tmp3, tmp, tmp2);
4095 tcg_gen_rem_i64(tmp, tmp, tmp2);
4096
4097 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4098 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
4099
4100 store_reg32(r1, tmp32_1); /* remainder */
4101 store_reg32(r1 + 1, tmp32_2); /* quotient */
4102 tcg_temp_free_i32(tmp32_1);
4103 tcg_temp_free_i32(tmp32_2);
4104 tcg_temp_free_i32(tmp32_3);
4105 tcg_temp_free_i64(tmp);
4106 tcg_temp_free_i64(tmp2);
4107 tcg_temp_free_i64(tmp3);
4108 break;
4109 case 0x28: /* LDR R1,R2 [RR] */
4110 insn = ld_code2(s->pc);
4111 decode_rr(s, insn, &r1, &r2);
4112 tmp = load_freg(r2);
4113 store_freg(r1, tmp);
4114 tcg_temp_free_i64(tmp);
4115 break;
4116 case 0x38: /* LER R1,R2 [RR] */
4117 insn = ld_code2(s->pc);
4118 decode_rr(s, insn, &r1, &r2);
4119 tmp32_1 = load_freg32(r2);
4120 store_freg32(r1, tmp32_1);
4121 tcg_temp_free_i32(tmp32_1);
4122 break;
4123 case 0x40: /* STH R1,D2(X2,B2) [RX] */
4124 insn = ld_code4(s->pc);
4125 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4126 tmp2 = load_reg(r1);
4127 tcg_gen_qemu_st16(tmp2, tmp, get_mem_index(s));
4128 tcg_temp_free_i64(tmp);
4129 tcg_temp_free_i64(tmp2);
4130 break;
4131 case 0x41: /* la */
4132 insn = ld_code4(s->pc);
4133 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4134 store_reg(r1, tmp); /* FIXME: 31/24-bit addressing */
4135 tcg_temp_free_i64(tmp);
4136 break;
4137 case 0x42: /* STC R1,D2(X2,B2) [RX] */
4138 insn = ld_code4(s->pc);
4139 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4140 tmp2 = load_reg(r1);
4141 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4142 tcg_temp_free_i64(tmp);
4143 tcg_temp_free_i64(tmp2);
4144 break;
4145 case 0x43: /* IC R1,D2(X2,B2) [RX] */
4146 insn = ld_code4(s->pc);
4147 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4148 tmp2 = tcg_temp_new_i64();
4149 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4150 store_reg8(r1, tmp2);
4151 tcg_temp_free_i64(tmp);
4152 tcg_temp_free_i64(tmp2);
4153 break;
4154 case 0x44: /* EX R1,D2(X2,B2) [RX] */
4155 insn = ld_code4(s->pc);
4156 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4157 tmp2 = load_reg(r1);
4158 tmp3 = tcg_const_i64(s->pc + 4);
4159 update_psw_addr(s);
4160 gen_op_calc_cc(s);
4161 gen_helper_ex(cc_op, cc_op, tmp2, tmp, tmp3);
4162 set_cc_static(s);
4163 tcg_temp_free_i64(tmp);
4164 tcg_temp_free_i64(tmp2);
4165 tcg_temp_free_i64(tmp3);
4166 break;
4167 case 0x46: /* BCT R1,D2(X2,B2) [RX] */
4168 insn = ld_code4(s->pc);
4169 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4170 tcg_temp_free_i64(tmp);
4171
4172 tmp32_1 = load_reg32(r1);
4173 tcg_gen_subi_i32(tmp32_1, tmp32_1, 1);
4174 store_reg32(r1, tmp32_1);
4175
4176 gen_update_cc_op(s);
4177 l1 = gen_new_label();
4178 tcg_gen_brcondi_i32(TCG_COND_NE, tmp32_1, 0, l1);
4179
4180 /* not taking the branch, jump to after the instruction */
4181 gen_goto_tb(s, 0, s->pc + 4);
4182 gen_set_label(l1);
4183
4184 /* take the branch, move R2 into psw.addr */
4185 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4186 tcg_gen_mov_i64(psw_addr, tmp);
4187 s->is_jmp = DISAS_JUMP;
4188 tcg_temp_free_i32(tmp32_1);
4189 tcg_temp_free_i64(tmp);
4190 break;
4191 case 0x47: /* BC M1,D2(X2,B2) [RX] */
4192 insn = ld_code4(s->pc);
4193 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4194 gen_bcr(s, r1, tmp, s->pc + 4);
4195 tcg_temp_free_i64(tmp);
4196 s->is_jmp = DISAS_TB_JUMP;
4197 break;
4198 case 0x48: /* LH R1,D2(X2,B2) [RX] */
4199 insn = ld_code4(s->pc);
4200 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4201 tmp2 = tcg_temp_new_i64();
4202 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4203 store_reg32_i64(r1, tmp2);
4204 tcg_temp_free_i64(tmp);
4205 tcg_temp_free_i64(tmp2);
4206 break;
4207 case 0x49: /* CH R1,D2(X2,B2) [RX] */
4208 insn = ld_code4(s->pc);
4209 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4210 tmp32_1 = load_reg32(r1);
4211 tmp32_2 = tcg_temp_new_i32();
4212 tmp2 = tcg_temp_new_i64();
4213 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4214 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4215 cmp_s32(s, tmp32_1, tmp32_2);
4216 tcg_temp_free_i32(tmp32_1);
4217 tcg_temp_free_i32(tmp32_2);
4218 tcg_temp_free_i64(tmp);
4219 tcg_temp_free_i64(tmp2);
4220 break;
4221 case 0x4a: /* AH R1,D2(X2,B2) [RX] */
4222 case 0x4b: /* SH R1,D2(X2,B2) [RX] */
4223 case 0x4c: /* MH R1,D2(X2,B2) [RX] */
4224 insn = ld_code4(s->pc);
4225 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4226 tmp2 = tcg_temp_new_i64();
4227 tmp32_1 = load_reg32(r1);
4228 tmp32_2 = tcg_temp_new_i32();
4229 tmp32_3 = tcg_temp_new_i32();
4230
4231 tcg_gen_qemu_ld16s(tmp2, tmp, get_mem_index(s));
4232 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4233 switch (opc) {
4234 case 0x4a:
4235 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4236 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4237 break;
4238 case 0x4b:
4239 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4240 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4241 break;
4242 case 0x4c:
4243 tcg_gen_mul_i32(tmp32_3, tmp32_1, tmp32_2);
4244 break;
4245 default:
4246 tcg_abort();
4247 }
4248 store_reg32(r1, tmp32_3);
4249
4250 tcg_temp_free_i32(tmp32_1);
4251 tcg_temp_free_i32(tmp32_2);
4252 tcg_temp_free_i32(tmp32_3);
4253 tcg_temp_free_i64(tmp);
4254 tcg_temp_free_i64(tmp2);
4255 break;
4256 case 0x4d: /* BAS R1,D2(X2,B2) [RX] */
4257 insn = ld_code4(s->pc);
4258 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4259 tmp2 = tcg_const_i64(pc_to_link_info(s, s->pc + 4));
4260 store_reg(r1, tmp2);
4261 tcg_gen_mov_i64(psw_addr, tmp);
4262 tcg_temp_free_i64(tmp);
4263 tcg_temp_free_i64(tmp2);
4264 s->is_jmp = DISAS_JUMP;
4265 break;
4266 case 0x4e: /* CVD R1,D2(X2,B2) [RX] */
4267 insn = ld_code4(s->pc);
4268 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4269 tmp2 = tcg_temp_new_i64();
4270 tmp32_1 = tcg_temp_new_i32();
4271 tcg_gen_trunc_i64_i32(tmp32_1, regs[r1]);
4272 gen_helper_cvd(tmp2, tmp32_1);
4273 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4274 tcg_temp_free_i64(tmp);
4275 tcg_temp_free_i64(tmp2);
4276 tcg_temp_free_i32(tmp32_1);
4277 break;
4278 case 0x50: /* st r1, d2(x2, b2) */
4279 insn = ld_code4(s->pc);
4280 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4281 tmp2 = load_reg(r1);
4282 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4283 tcg_temp_free_i64(tmp);
4284 tcg_temp_free_i64(tmp2);
4285 break;
4286 case 0x55: /* CL R1,D2(X2,B2) [RX] */
4287 insn = ld_code4(s->pc);
4288 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4289 tmp2 = tcg_temp_new_i64();
4290 tmp32_1 = tcg_temp_new_i32();
4291 tmp32_2 = load_reg32(r1);
4292 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4293 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4294 cmp_u32(s, tmp32_2, tmp32_1);
4295 tcg_temp_free_i64(tmp);
4296 tcg_temp_free_i64(tmp2);
4297 tcg_temp_free_i32(tmp32_1);
4298 tcg_temp_free_i32(tmp32_2);
4299 break;
4300 case 0x54: /* N R1,D2(X2,B2) [RX] */
4301 case 0x56: /* O R1,D2(X2,B2) [RX] */
4302 case 0x57: /* X R1,D2(X2,B2) [RX] */
4303 insn = ld_code4(s->pc);
4304 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4305 tmp2 = tcg_temp_new_i64();
4306 tmp32_1 = load_reg32(r1);
4307 tmp32_2 = tcg_temp_new_i32();
4308 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4309 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4310 gen_and_or_xor_i32(opc, tmp32_1, tmp32_2);
4311 store_reg32(r1, tmp32_1);
4312 set_cc_nz_u32(s, tmp32_1);
4313 tcg_temp_free_i64(tmp);
4314 tcg_temp_free_i64(tmp2);
4315 tcg_temp_free_i32(tmp32_1);
4316 tcg_temp_free_i32(tmp32_2);
4317 break;
4318 case 0x58: /* l r1, d2(x2, b2) */
4319 insn = ld_code4(s->pc);
4320 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4321 tmp2 = tcg_temp_new_i64();
4322 tmp32_1 = tcg_temp_new_i32();
4323 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4324 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4325 store_reg32(r1, tmp32_1);
4326 tcg_temp_free_i64(tmp);
4327 tcg_temp_free_i64(tmp2);
4328 tcg_temp_free_i32(tmp32_1);
4329 break;
4330 case 0x59: /* C R1,D2(X2,B2) [RX] */
4331 insn = ld_code4(s->pc);
4332 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4333 tmp2 = tcg_temp_new_i64();
4334 tmp32_1 = tcg_temp_new_i32();
4335 tmp32_2 = load_reg32(r1);
4336 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4337 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4338 cmp_s32(s, tmp32_2, tmp32_1);
4339 tcg_temp_free_i64(tmp);
4340 tcg_temp_free_i64(tmp2);
4341 tcg_temp_free_i32(tmp32_1);
4342 tcg_temp_free_i32(tmp32_2);
4343 break;
4344 case 0x5a: /* A R1,D2(X2,B2) [RX] */
4345 case 0x5b: /* S R1,D2(X2,B2) [RX] */
4346 case 0x5e: /* AL R1,D2(X2,B2) [RX] */
4347 case 0x5f: /* SL R1,D2(X2,B2) [RX] */
4348 insn = ld_code4(s->pc);
4349 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4350 tmp32_1 = load_reg32(r1);
4351 tmp32_2 = tcg_temp_new_i32();
4352 tmp32_3 = tcg_temp_new_i32();
4353 tcg_gen_qemu_ld32s(tmp, tmp, get_mem_index(s));
4354 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4355 switch (opc) {
4356 case 0x5a:
4357 case 0x5e:
4358 tcg_gen_add_i32(tmp32_3, tmp32_1, tmp32_2);
4359 break;
4360 case 0x5b:
4361 case 0x5f:
4362 tcg_gen_sub_i32(tmp32_3, tmp32_1, tmp32_2);
4363 break;
4364 default:
4365 tcg_abort();
4366 }
4367 store_reg32(r1, tmp32_3);
4368 switch (opc) {
4369 case 0x5a:
4370 set_cc_add32(s, tmp32_1, tmp32_2, tmp32_3);
4371 break;
4372 case 0x5e:
4373 set_cc_addu32(s, tmp32_1, tmp32_2, tmp32_3);
4374 break;
4375 case 0x5b:
4376 set_cc_sub32(s, tmp32_1, tmp32_2, tmp32_3);
4377 break;
4378 case 0x5f:
4379 set_cc_subu32(s, tmp32_1, tmp32_2, tmp32_3);
4380 break;
4381 default:
4382 tcg_abort();
4383 }
4384 tcg_temp_free_i64(tmp);
4385 tcg_temp_free_i32(tmp32_1);
4386 tcg_temp_free_i32(tmp32_2);
4387 tcg_temp_free_i32(tmp32_3);
4388 break;
4389 case 0x5c: /* M R1,D2(X2,B2) [RX] */
4390 /* reg(r1, r1+1) = reg(r1+1) * *(s32*)addr */
4391 insn = ld_code4(s->pc);
4392 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4393 tmp2 = tcg_temp_new_i64();
4394 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4395 tmp3 = load_reg((r1 + 1) & 15);
4396 tcg_gen_ext32s_i64(tmp2, tmp2);
4397 tcg_gen_ext32s_i64(tmp3, tmp3);
4398 tcg_gen_mul_i64(tmp2, tmp2, tmp3);
4399 store_reg32_i64((r1 + 1) & 15, tmp2);
4400 tcg_gen_shri_i64(tmp2, tmp2, 32);
4401 store_reg32_i64(r1, tmp2);
4402 tcg_temp_free_i64(tmp);
4403 tcg_temp_free_i64(tmp2);
4404 tcg_temp_free_i64(tmp3);
4405 break;
4406 case 0x5d: /* D R1,D2(X2,B2) [RX] */
4407 insn = ld_code4(s->pc);
4408 tmp3 = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4409 tmp32_1 = load_reg32(r1);
4410 tmp32_2 = load_reg32(r1 + 1);
4411
4412 tmp = tcg_temp_new_i64();
4413 tmp2 = tcg_temp_new_i64();
4414
4415 /* dividend is r(r1 << 32) | r(r1 + 1) */
4416 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4417 tcg_gen_extu_i32_i64(tmp2, tmp32_2);
4418 tcg_gen_shli_i64(tmp, tmp, 32);
4419 tcg_gen_or_i64(tmp, tmp, tmp2);
4420
4421 /* divisor is in memory */
4422 tcg_gen_qemu_ld32s(tmp2, tmp3, get_mem_index(s));
4423
4424 /* XXX divisor == 0 -> FixP divide exception */
4425
4426 tcg_gen_div_i64(tmp3, tmp, tmp2);
4427 tcg_gen_rem_i64(tmp, tmp, tmp2);
4428
4429 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4430 tcg_gen_trunc_i64_i32(tmp32_2, tmp3);
4431
4432 store_reg32(r1, tmp32_1); /* remainder */
4433 store_reg32(r1 + 1, tmp32_2); /* quotient */
4434 tcg_temp_free_i32(tmp32_1);
4435 tcg_temp_free_i32(tmp32_2);
4436 tcg_temp_free_i64(tmp);
4437 tcg_temp_free_i64(tmp2);
4438 tcg_temp_free_i64(tmp3);
4439 break;
4440 case 0x60: /* STD R1,D2(X2,B2) [RX] */
4441 insn = ld_code4(s->pc);
4442 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4443 tmp2 = load_freg(r1);
4444 tcg_gen_qemu_st64(tmp2, tmp, get_mem_index(s));
4445 tcg_temp_free_i64(tmp);
4446 tcg_temp_free_i64(tmp2);
4447 break;
4448 case 0x68: /* LD R1,D2(X2,B2) [RX] */
4449 insn = ld_code4(s->pc);
4450 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4451 tmp2 = tcg_temp_new_i64();
4452 tcg_gen_qemu_ld64(tmp2, tmp, get_mem_index(s));
4453 store_freg(r1, tmp2);
4454 tcg_temp_free_i64(tmp);
4455 tcg_temp_free_i64(tmp2);
4456 break;
4457 case 0x70: /* STE R1,D2(X2,B2) [RX] */
4458 insn = ld_code4(s->pc);
4459 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4460 tmp2 = tcg_temp_new_i64();
4461 tmp32_1 = load_freg32(r1);
4462 tcg_gen_extu_i32_i64(tmp2, tmp32_1);
4463 tcg_gen_qemu_st32(tmp2, tmp, get_mem_index(s));
4464 tcg_temp_free_i64(tmp);
4465 tcg_temp_free_i64(tmp2);
4466 tcg_temp_free_i32(tmp32_1);
4467 break;
4468 case 0x71: /* MS R1,D2(X2,B2) [RX] */
4469 insn = ld_code4(s->pc);
4470 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4471 tmp2 = tcg_temp_new_i64();
4472 tmp32_1 = load_reg32(r1);
4473 tmp32_2 = tcg_temp_new_i32();
4474 tcg_gen_qemu_ld32s(tmp2, tmp, get_mem_index(s));
4475 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4476 tcg_gen_mul_i32(tmp32_1, tmp32_1, tmp32_2);
4477 store_reg32(r1, tmp32_1);
4478 tcg_temp_free_i64(tmp);
4479 tcg_temp_free_i64(tmp2);
4480 tcg_temp_free_i32(tmp32_1);
4481 tcg_temp_free_i32(tmp32_2);
4482 break;
4483 case 0x78: /* LE R1,D2(X2,B2) [RX] */
4484 insn = ld_code4(s->pc);
4485 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4486 tmp2 = tcg_temp_new_i64();
4487 tmp32_1 = tcg_temp_new_i32();
4488 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4489 tcg_gen_trunc_i64_i32(tmp32_1, tmp2);
4490 store_freg32(r1, tmp32_1);
4491 tcg_temp_free_i64(tmp);
4492 tcg_temp_free_i64(tmp2);
4493 tcg_temp_free_i32(tmp32_1);
4494 break;
4495 #ifndef CONFIG_USER_ONLY
4496 case 0x80: /* SSM D2(B2) [S] */
4497 /* Set System Mask */
4498 check_privileged(s, ilc);
4499 insn = ld_code4(s->pc);
4500 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4501 tmp = get_address(s, 0, b2, d2);
4502 tmp2 = tcg_temp_new_i64();
4503 tmp3 = tcg_temp_new_i64();
4504 tcg_gen_andi_i64(tmp3, psw_mask, ~0xff00000000000000ULL);
4505 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4506 tcg_gen_shli_i64(tmp2, tmp2, 56);
4507 tcg_gen_or_i64(psw_mask, tmp3, tmp2);
4508 tcg_temp_free_i64(tmp);
4509 tcg_temp_free_i64(tmp2);
4510 tcg_temp_free_i64(tmp3);
4511 break;
4512 case 0x82: /* LPSW D2(B2) [S] */
4513 /* Load PSW */
4514 check_privileged(s, ilc);
4515 insn = ld_code4(s->pc);
4516 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4517 tmp = get_address(s, 0, b2, d2);
4518 tmp2 = tcg_temp_new_i64();
4519 tmp3 = tcg_temp_new_i64();
4520 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4521 tcg_gen_addi_i64(tmp, tmp, 4);
4522 tcg_gen_qemu_ld32u(tmp3, tmp, get_mem_index(s));
4523 gen_helper_load_psw(tmp2, tmp3);
4524 tcg_temp_free_i64(tmp);
4525 tcg_temp_free_i64(tmp2);
4526 tcg_temp_free_i64(tmp3);
4527 /* we need to keep cc_op intact */
4528 s->is_jmp = DISAS_JUMP;
4529 break;
4530 case 0x83: /* DIAG R1,R3,D2 [RS] */
4531 /* Diagnose call (KVM hypercall) */
4532 check_privileged(s, ilc);
4533 potential_page_fault(s);
4534 insn = ld_code4(s->pc);
4535 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4536 tmp32_1 = tcg_const_i32(insn & 0xfff);
4537 tmp2 = load_reg(2);
4538 tmp3 = load_reg(1);
4539 gen_helper_diag(tmp2, tmp32_1, tmp2, tmp3);
4540 store_reg(2, tmp2);
4541 tcg_temp_free_i32(tmp32_1);
4542 tcg_temp_free_i64(tmp2);
4543 tcg_temp_free_i64(tmp3);
4544 break;
4545 #endif
4546 case 0x88: /* SRL R1,D2(B2) [RS] */
4547 case 0x89: /* SLL R1,D2(B2) [RS] */
4548 case 0x8a: /* SRA R1,D2(B2) [RS] */
4549 insn = ld_code4(s->pc);
4550 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4551 tmp = get_address(s, 0, b2, d2);
4552 tmp32_1 = load_reg32(r1);
4553 tmp32_2 = tcg_temp_new_i32();
4554 tcg_gen_trunc_i64_i32(tmp32_2, tmp);
4555 tcg_gen_andi_i32(tmp32_2, tmp32_2, 0x3f);
4556 switch (opc) {
4557 case 0x88:
4558 tcg_gen_shr_i32(tmp32_1, tmp32_1, tmp32_2);
4559 break;
4560 case 0x89:
4561 tcg_gen_shl_i32(tmp32_1, tmp32_1, tmp32_2);
4562 break;
4563 case 0x8a:
4564 tcg_gen_sar_i32(tmp32_1, tmp32_1, tmp32_2);
4565 set_cc_s32(s, tmp32_1);
4566 break;
4567 default:
4568 tcg_abort();
4569 }
4570 store_reg32(r1, tmp32_1);
4571 tcg_temp_free_i64(tmp);
4572 tcg_temp_free_i32(tmp32_1);
4573 tcg_temp_free_i32(tmp32_2);
4574 break;
4575 case 0x8c: /* SRDL R1,D2(B2) [RS] */
4576 case 0x8d: /* SLDL R1,D2(B2) [RS] */
4577 case 0x8e: /* SRDA R1,D2(B2) [RS] */
4578 insn = ld_code4(s->pc);
4579 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4580 tmp = get_address(s, 0, b2, d2); /* shift */
4581 tmp2 = tcg_temp_new_i64();
4582 tmp32_1 = load_reg32(r1);
4583 tmp32_2 = load_reg32(r1 + 1);
4584 tcg_gen_concat_i32_i64(tmp2, tmp32_2, tmp32_1); /* operand */
4585 switch (opc) {
4586 case 0x8c:
4587 tcg_gen_shr_i64(tmp2, tmp2, tmp);
4588 break;
4589 case 0x8d:
4590 tcg_gen_shl_i64(tmp2, tmp2, tmp);
4591 break;
4592 case 0x8e:
4593 tcg_gen_sar_i64(tmp2, tmp2, tmp);
4594 set_cc_s64(s, tmp2);
4595 break;
4596 }
4597 tcg_gen_shri_i64(tmp, tmp2, 32);
4598 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4599 store_reg32(r1, tmp32_1);
4600 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4601 store_reg32(r1 + 1, tmp32_2);
4602 tcg_temp_free_i64(tmp);
4603 tcg_temp_free_i64(tmp2);
4604 break;
4605 case 0x98: /* LM R1,R3,D2(B2) [RS] */
4606 case 0x90: /* STM R1,R3,D2(B2) [RS] */
4607 insn = ld_code4(s->pc);
4608 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4609
4610 tmp = get_address(s, 0, b2, d2);
4611 tmp2 = tcg_temp_new_i64();
4612 tmp3 = tcg_const_i64(4);
4613 tmp4 = tcg_const_i64(0xffffffff00000000ULL);
4614 for (i = r1;; i = (i + 1) % 16) {
4615 if (opc == 0x98) {
4616 tcg_gen_qemu_ld32u(tmp2, tmp, get_mem_index(s));
4617 tcg_gen_and_i64(regs[i], regs[i], tmp4);
4618 tcg_gen_or_i64(regs[i], regs[i], tmp2);
4619 } else {
4620 tcg_gen_qemu_st32(regs[i], tmp, get_mem_index(s));
4621 }
4622 if (i == r3) {
4623 break;
4624 }
4625 tcg_gen_add_i64(tmp, tmp, tmp3);
4626 }
4627 tcg_temp_free_i64(tmp);
4628 tcg_temp_free_i64(tmp2);
4629 tcg_temp_free_i64(tmp3);
4630 tcg_temp_free_i64(tmp4);
4631 break;
4632 case 0x91: /* TM D1(B1),I2 [SI] */
4633 insn = ld_code4(s->pc);
4634 tmp = decode_si(s, insn, &i2, &b1, &d1);
4635 tmp2 = tcg_const_i64(i2);
4636 tcg_gen_qemu_ld8u(tmp, tmp, get_mem_index(s));
4637 cmp_64(s, tmp, tmp2, CC_OP_TM_32);
4638 tcg_temp_free_i64(tmp);
4639 tcg_temp_free_i64(tmp2);
4640 break;
4641 case 0x92: /* MVI D1(B1),I2 [SI] */
4642 insn = ld_code4(s->pc);
4643 tmp = decode_si(s, insn, &i2, &b1, &d1);
4644 tmp2 = tcg_const_i64(i2);
4645 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4646 tcg_temp_free_i64(tmp);
4647 tcg_temp_free_i64(tmp2);
4648 break;
4649 case 0x94: /* NI D1(B1),I2 [SI] */
4650 case 0x96: /* OI D1(B1),I2 [SI] */
4651 case 0x97: /* XI D1(B1),I2 [SI] */
4652 insn = ld_code4(s->pc);
4653 tmp = decode_si(s, insn, &i2, &b1, &d1);
4654 tmp2 = tcg_temp_new_i64();
4655 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4656 switch (opc) {
4657 case 0x94:
4658 tcg_gen_andi_i64(tmp2, tmp2, i2);
4659 break;
4660 case 0x96:
4661 tcg_gen_ori_i64(tmp2, tmp2, i2);
4662 break;
4663 case 0x97:
4664 tcg_gen_xori_i64(tmp2, tmp2, i2);
4665 break;
4666 default:
4667 tcg_abort();
4668 }
4669 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4670 set_cc_nz_u64(s, tmp2);
4671 tcg_temp_free_i64(tmp);
4672 tcg_temp_free_i64(tmp2);
4673 break;
4674 case 0x95: /* CLI D1(B1),I2 [SI] */
4675 insn = ld_code4(s->pc);
4676 tmp = decode_si(s, insn, &i2, &b1, &d1);
4677 tmp2 = tcg_temp_new_i64();
4678 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4679 cmp_u64c(s, tmp2, i2);
4680 tcg_temp_free_i64(tmp);
4681 tcg_temp_free_i64(tmp2);
4682 break;
4683 case 0x9a: /* LAM R1,R3,D2(B2) [RS] */
4684 insn = ld_code4(s->pc);
4685 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4686 tmp = get_address(s, 0, b2, d2);
4687 tmp32_1 = tcg_const_i32(r1);
4688 tmp32_2 = tcg_const_i32(r3);
4689 potential_page_fault(s);
4690 gen_helper_lam(tmp32_1, tmp, tmp32_2);
4691 tcg_temp_free_i64(tmp);
4692 tcg_temp_free_i32(tmp32_1);
4693 tcg_temp_free_i32(tmp32_2);
4694 break;
4695 case 0x9b: /* STAM R1,R3,D2(B2) [RS] */
4696 insn = ld_code4(s->pc);
4697 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4698 tmp = get_address(s, 0, b2, d2);
4699 tmp32_1 = tcg_const_i32(r1);
4700 tmp32_2 = tcg_const_i32(r3);
4701 potential_page_fault(s);
4702 gen_helper_stam(tmp32_1, tmp, tmp32_2);
4703 tcg_temp_free_i64(tmp);
4704 tcg_temp_free_i32(tmp32_1);
4705 tcg_temp_free_i32(tmp32_2);
4706 break;
4707 case 0xa5:
4708 insn = ld_code4(s->pc);
4709 r1 = (insn >> 20) & 0xf;
4710 op = (insn >> 16) & 0xf;
4711 i2 = insn & 0xffff;
4712 disas_a5(s, op, r1, i2);
4713 break;
4714 case 0xa7:
4715 insn = ld_code4(s->pc);
4716 r1 = (insn >> 20) & 0xf;
4717 op = (insn >> 16) & 0xf;
4718 i2 = (short)insn;
4719 disas_a7(s, op, r1, i2);
4720 break;
4721 case 0xa8: /* MVCLE R1,R3,D2(B2) [RS] */
4722 insn = ld_code4(s->pc);
4723 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4724 tmp = get_address(s, 0, b2, d2);
4725 tmp32_1 = tcg_const_i32(r1);
4726 tmp32_2 = tcg_const_i32(r3);
4727 potential_page_fault(s);
4728 gen_helper_mvcle(cc_op, tmp32_1, tmp, tmp32_2);
4729 set_cc_static(s);
4730 tcg_temp_free_i64(tmp);
4731 tcg_temp_free_i32(tmp32_1);
4732 tcg_temp_free_i32(tmp32_2);
4733 break;
4734 case 0xa9: /* CLCLE R1,R3,D2(B2) [RS] */
4735 insn = ld_code4(s->pc);
4736 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4737 tmp = get_address(s, 0, b2, d2);
4738 tmp32_1 = tcg_const_i32(r1);
4739 tmp32_2 = tcg_const_i32(r3);
4740 potential_page_fault(s);
4741 gen_helper_clcle(cc_op, tmp32_1, tmp, tmp32_2);
4742 set_cc_static(s);
4743 tcg_temp_free_i64(tmp);
4744 tcg_temp_free_i32(tmp32_1);
4745 tcg_temp_free_i32(tmp32_2);
4746 break;
4747 #ifndef CONFIG_USER_ONLY
4748 case 0xac: /* STNSM D1(B1),I2 [SI] */
4749 case 0xad: /* STOSM D1(B1),I2 [SI] */
4750 check_privileged(s, ilc);
4751 insn = ld_code4(s->pc);
4752 tmp = decode_si(s, insn, &i2, &b1, &d1);
4753 tmp2 = tcg_temp_new_i64();
4754 tcg_gen_shri_i64(tmp2, psw_mask, 56);
4755 tcg_gen_qemu_st8(tmp2, tmp, get_mem_index(s));
4756 if (opc == 0xac) {
4757 tcg_gen_andi_i64(psw_mask, psw_mask,
4758 ((uint64_t)i2 << 56) | 0x00ffffffffffffffULL);
4759 } else {
4760 tcg_gen_ori_i64(psw_mask, psw_mask, (uint64_t)i2 << 56);
4761 }
4762 tcg_temp_free_i64(tmp);
4763 tcg_temp_free_i64(tmp2);
4764 break;
4765 case 0xae: /* SIGP R1,R3,D2(B2) [RS] */
4766 check_privileged(s, ilc);
4767 insn = ld_code4(s->pc);
4768 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4769 tmp = get_address(s, 0, b2, d2);
4770 tmp2 = load_reg(r3);
4771 tmp32_1 = tcg_const_i32(r1);
4772 potential_page_fault(s);
4773 gen_helper_sigp(cc_op, tmp, tmp32_1, tmp2);
4774 set_cc_static(s);
4775 tcg_temp_free_i64(tmp);
4776 tcg_temp_free_i64(tmp2);
4777 tcg_temp_free_i32(tmp32_1);
4778 break;
4779 case 0xb1: /* LRA R1,D2(X2, B2) [RX] */
4780 check_privileged(s, ilc);
4781 insn = ld_code4(s->pc);
4782 tmp = decode_rx(s, insn, &r1, &x2, &b2, &d2);
4783 tmp32_1 = tcg_const_i32(r1);
4784 potential_page_fault(s);
4785 gen_helper_lra(cc_op, tmp, tmp32_1);
4786 set_cc_static(s);
4787 tcg_temp_free_i64(tmp);
4788 tcg_temp_free_i32(tmp32_1);
4789 break;
4790 #endif
4791 case 0xb2:
4792 insn = ld_code4(s->pc);
4793 op = (insn >> 16) & 0xff;
4794 switch (op) {
4795 case 0x9c: /* STFPC D2(B2) [S] */
4796 d2 = insn & 0xfff;
4797 b2 = (insn >> 12) & 0xf;
4798 tmp32_1 = tcg_temp_new_i32();
4799 tmp = tcg_temp_new_i64();
4800 tmp2 = get_address(s, 0, b2, d2);
4801 tcg_gen_ld_i32(tmp32_1, cpu_env, offsetof(CPUState, fpc));
4802 tcg_gen_extu_i32_i64(tmp, tmp32_1);
4803 tcg_gen_qemu_st32(tmp, tmp2, get_mem_index(s));
4804 tcg_temp_free_i32(tmp32_1);
4805 tcg_temp_free_i64(tmp);
4806 tcg_temp_free_i64(tmp2);
4807 break;
4808 default:
4809 disas_b2(s, op, insn);
4810 break;
4811 }
4812 break;
4813 case 0xb3:
4814 insn = ld_code4(s->pc);
4815 op = (insn >> 16) & 0xff;
4816 r3 = (insn >> 12) & 0xf; /* aka m3 */
4817 r1 = (insn >> 4) & 0xf;
4818 r2 = insn & 0xf;
4819 disas_b3(s, op, r3, r1, r2);
4820 break;
4821 #ifndef CONFIG_USER_ONLY
4822 case 0xb6: /* STCTL R1,R3,D2(B2) [RS] */
4823 /* Store Control */
4824 check_privileged(s, ilc);
4825 insn = ld_code4(s->pc);
4826 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4827 tmp = get_address(s, 0, b2, d2);
4828 tmp32_1 = tcg_const_i32(r1);
4829 tmp32_2 = tcg_const_i32(r3);
4830 potential_page_fault(s);
4831 gen_helper_stctl(tmp32_1, tmp, tmp32_2);
4832 tcg_temp_free_i64(tmp);
4833 tcg_temp_free_i32(tmp32_1);
4834 tcg_temp_free_i32(tmp32_2);
4835 break;
4836 case 0xb7: /* LCTL R1,R3,D2(B2) [RS] */
4837 /* Load Control */
4838 check_privileged(s, ilc);
4839 insn = ld_code4(s->pc);
4840 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4841 tmp = get_address(s, 0, b2, d2);
4842 tmp32_1 = tcg_const_i32(r1);
4843 tmp32_2 = tcg_const_i32(r3);
4844 potential_page_fault(s);
4845 gen_helper_lctl(tmp32_1, tmp, tmp32_2);
4846 tcg_temp_free_i64(tmp);
4847 tcg_temp_free_i32(tmp32_1);
4848 tcg_temp_free_i32(tmp32_2);
4849 break;
4850 #endif
4851 case 0xb9:
4852 insn = ld_code4(s->pc);
4853 r1 = (insn >> 4) & 0xf;
4854 r2 = insn & 0xf;
4855 op = (insn >> 16) & 0xff;
4856 disas_b9(s, op, r1, r2);
4857 break;
4858 case 0xba: /* CS R1,R3,D2(B2) [RS] */
4859 insn = ld_code4(s->pc);
4860 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4861 tmp = get_address(s, 0, b2, d2);
4862 tmp32_1 = tcg_const_i32(r1);
4863 tmp32_2 = tcg_const_i32(r3);
4864 potential_page_fault(s);
4865 gen_helper_cs(cc_op, tmp32_1, tmp, tmp32_2);
4866 set_cc_static(s);
4867 tcg_temp_free_i64(tmp);
4868 tcg_temp_free_i32(tmp32_1);
4869 tcg_temp_free_i32(tmp32_2);
4870 break;
4871 case 0xbd: /* CLM R1,M3,D2(B2) [RS] */
4872 insn = ld_code4(s->pc);
4873 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4874 tmp = get_address(s, 0, b2, d2);
4875 tmp32_1 = load_reg32(r1);
4876 tmp32_2 = tcg_const_i32(r3);
4877 potential_page_fault(s);
4878 gen_helper_clm(cc_op, tmp32_1, tmp32_2, tmp);
4879 set_cc_static(s);
4880 tcg_temp_free_i64(tmp);
4881 tcg_temp_free_i32(tmp32_1);
4882 tcg_temp_free_i32(tmp32_2);
4883 break;
4884 case 0xbe: /* STCM R1,M3,D2(B2) [RS] */
4885 insn = ld_code4(s->pc);
4886 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4887 tmp = get_address(s, 0, b2, d2);
4888 tmp32_1 = load_reg32(r1);
4889 tmp32_2 = tcg_const_i32(r3);
4890 potential_page_fault(s);
4891 gen_helper_stcm(tmp32_1, tmp32_2, tmp);
4892 tcg_temp_free_i64(tmp);
4893 tcg_temp_free_i32(tmp32_1);
4894 tcg_temp_free_i32(tmp32_2);
4895 break;
4896 case 0xbf: /* ICM R1,M3,D2(B2) [RS] */
4897 insn = ld_code4(s->pc);
4898 decode_rs(s, insn, &r1, &r3, &b2, &d2);
4899 if (r3 == 15) {
4900 /* effectively a 32-bit load */
4901 tmp = get_address(s, 0, b2, d2);
4902 tmp32_1 = tcg_temp_new_i32();
4903 tmp32_2 = tcg_const_i32(r3);
4904 tcg_gen_qemu_ld32u(tmp, tmp, get_mem_index(s));
4905 store_reg32_i64(r1, tmp);
4906 tcg_gen_trunc_i64_i32(tmp32_1, tmp);
4907 set_cc_icm(s, tmp32_2, tmp32_1);
4908 tcg_temp_free_i64(tmp);
4909 tcg_temp_free_i32(tmp32_1);
4910 tcg_temp_free_i32(tmp32_2);
4911 } else if (r3) {
4912 uint32_t mask = 0x00ffffffUL;
4913 uint32_t shift = 24;
4914 int m3 = r3;
4915 tmp = get_address(s, 0, b2, d2);
4916 tmp2 = tcg_temp_new_i64();
4917 tmp32_1 = load_reg32(r1);
4918 tmp32_2 = tcg_temp_new_i32();
4919 tmp32_3 = tcg_const_i32(r3);
4920 tmp32_4 = tcg_const_i32(0);
4921 while (m3) {
4922 if (m3 & 8) {
4923 tcg_gen_qemu_ld8u(tmp2, tmp, get_mem_index(s));
4924 tcg_gen_trunc_i64_i32(tmp32_2, tmp2);
4925 if (shift) {
4926 tcg_gen_shli_i32(tmp32_2, tmp32_2, shift);
4927 }
4928 tcg_gen_andi_i32(tmp32_1, tmp32_1, mask);
4929 tcg_gen_or_i32(tmp32_1, tmp32_1, tmp32_2);
4930 tcg_gen_or_i32(tmp32_4, tmp32_4, tmp32_2);
4931 tcg_gen_addi_i64(tmp, tmp, 1);
4932 }
4933 m3 = (m3 << 1) & 0xf;
4934 mask = (mask >> 8) | 0xff000000UL;
4935 shift -= 8;
4936 }
4937 store_reg32(r1, tmp32_1);
4938 set_cc_icm(s, tmp32_3, tmp32_4);
4939 tcg_temp_free_i64(tmp);
4940 tcg_temp_free_i64(tmp2);
4941 tcg_temp_free_i32(tmp32_1);
4942 tcg_temp_free_i32(tmp32_2);
4943 tcg_temp_free_i32(tmp32_3);
4944 tcg_temp_free_i32(tmp32_4);
4945 } else {
4946 /* i.e. env->cc = 0 */
4947 gen_op_movi_cc(s, 0);
4948 }
4949 break;
4950 case 0xc0:
4951 case 0xc2:
4952 insn = ld_code6(s->pc);
4953 r1 = (insn >> 36) & 0xf;
4954 op = (insn >> 32) & 0xf;
4955 i2 = (int)insn;
4956 switch (opc) {
4957 case 0xc0:
4958 disas_c0(s, op, r1, i2);
4959 break;
4960 case 0xc2:
4961 disas_c2(s, op, r1, i2);
4962 break;
4963 default:
4964 tcg_abort();
4965 }
4966 break;
4967 case 0xd2: /* MVC D1(L,B1),D2(B2) [SS] */
4968 case 0xd4: /* NC D1(L,B1),D2(B2) [SS] */
4969 case 0xd5: /* CLC D1(L,B1),D2(B2) [SS] */
4970 case 0xd6: /* OC D1(L,B1),D2(B2) [SS] */
4971 case 0xd7: /* XC D1(L,B1),D2(B2) [SS] */
4972 case 0xdc: /* TR D1(L,B1),D2(B2) [SS] */
4973 case 0xf3: /* UNPK D1(L1,B1),D2(L2,B2) [SS] */
4974 insn = ld_code6(s->pc);
4975 vl = tcg_const_i32((insn >> 32) & 0xff);
4976 b1 = (insn >> 28) & 0xf;
4977 b2 = (insn >> 12) & 0xf;
4978 d1 = (insn >> 16) & 0xfff;
4979 d2 = insn & 0xfff;
4980 tmp = get_address(s, 0, b1, d1);
4981 tmp2 = get_address(s, 0, b2, d2);
4982 switch (opc) {
4983 case 0xd2:
4984 gen_op_mvc(s, (insn >> 32) & 0xff, tmp, tmp2);
4985 break;
4986 case 0xd4:
4987 potential_page_fault(s);
4988 gen_helper_nc(cc_op, vl, tmp, tmp2);
4989 set_cc_static(s);
4990 break;
4991 case 0xd5:
4992 gen_op_clc(s, (insn >> 32) & 0xff, tmp, tmp2);
4993 break;
4994 case 0xd6:
4995 potential_page_fault(s);
4996 gen_helper_oc(cc_op, vl, tmp, tmp2);
4997 set_cc_static(s);
4998 break;
4999 case 0xd7:
5000 potential_page_fault(s);
5001 gen_helper_xc(cc_op, vl, tmp, tmp2);
5002 set_cc_static(s);
5003 break;
5004 case 0xdc:
5005 potential_page_fault(s);
5006 gen_helper_tr(vl, tmp, tmp2);
5007 set_cc_static(s);
5008 break;
5009 case 0xf3:
5010 potential_page_fault(s);
5011 gen_helper_unpk(vl, tmp, tmp2);
5012 break;
5013 default:
5014 tcg_abort();
5015 }
5016 tcg_temp_free_i64(tmp);
5017 tcg_temp_free_i64(tmp2);
5018 break;
5019 #ifndef CONFIG_USER_ONLY
5020 case 0xda: /* MVCP D1(R1,B1),D2(B2),R3 [SS] */
5021 case 0xdb: /* MVCS D1(R1,B1),D2(B2),R3 [SS] */
5022 check_privileged(s, ilc);
5023 potential_page_fault(s);
5024 insn = ld_code6(s->pc);
5025 r1 = (insn >> 36) & 0xf;
5026 r3 = (insn >> 32) & 0xf;
5027 b1 = (insn >> 28) & 0xf;
5028 d1 = (insn >> 16) & 0xfff;
5029 b2 = (insn >> 12) & 0xf;
5030 d2 = insn & 0xfff;
5031 tmp = load_reg(r1);
5032 /* XXX key in r3 */
5033 tmp2 = get_address(s, 0, b1, d1);
5034 tmp3 = get_address(s, 0, b2, d2);
5035 if (opc == 0xda) {
5036 gen_helper_mvcp(cc_op, tmp, tmp2, tmp3);
5037 } else {
5038 gen_helper_mvcs(cc_op, tmp, tmp2, tmp3);
5039 }
5040 set_cc_static(s);
5041 tcg_temp_free_i64(tmp);
5042 tcg_temp_free_i64(tmp2);
5043 tcg_temp_free_i64(tmp3);
5044 break;
5045 #endif
5046 case 0xe3:
5047 insn = ld_code6(s->pc);
5048 debug_insn(insn);
5049 op = insn & 0xff;
5050 r1 = (insn >> 36) & 0xf;
5051 x2 = (insn >> 32) & 0xf;
5052 b2 = (insn >> 28) & 0xf;
5053 d2 = ((int)((((insn >> 16) & 0xfff)
5054 | ((insn << 4) & 0xff000)) << 12)) >> 12;
5055 disas_e3(s, op, r1, x2, b2, d2 );
5056 break;
5057 #ifndef CONFIG_USER_ONLY
5058 case 0xe5:
5059 /* Test Protection */
5060 check_privileged(s, ilc);
5061 insn = ld_code6(s->pc);
5062 debug_insn(insn);
5063 disas_e5(s, insn);
5064 break;
5065 #endif
5066 case 0xeb:
5067 insn = ld_code6(s->pc);
5068 debug_insn(insn);
5069 op = insn & 0xff;
5070 r1 = (insn >> 36) & 0xf;
5071 r3 = (insn >> 32) & 0xf;
5072 b2 = (insn >> 28) & 0xf;
5073 d2 = ((int)((((insn >> 16) & 0xfff)
5074 | ((insn << 4) & 0xff000)) << 12)) >> 12;
5075 disas_eb(s, op, r1, r3, b2, d2);
5076 break;
5077 case 0xed:
5078 insn = ld_code6(s->pc);
5079 debug_insn(insn);
5080 op = insn & 0xff;
5081 r1 = (insn >> 36) & 0xf;
5082 x2 = (insn >> 32) & 0xf;
5083 b2 = (insn >> 28) & 0xf;
5084 d2 = (short)((insn >> 16) & 0xfff);
5085 r1b = (insn >> 12) & 0xf;
5086 disas_ed(s, op, r1, x2, b2, d2, r1b);
5087 break;
5088 default:
5089 LOG_DISAS("unimplemented opcode 0x%x\n", opc);
5090 gen_illegal_opcode(s, ilc);
5091 break;
5092 }
5093
5094 /* Instruction length is encoded in the opcode */
5095 s->pc += (ilc * 2);
5096 }
5097
5098 static inline void gen_intermediate_code_internal(CPUState *env,
5099 TranslationBlock *tb,
5100 int search_pc)
5101 {
5102 DisasContext dc;
5103 target_ulong pc_start;
5104 uint64_t next_page_start;
5105 uint16_t *gen_opc_end;
5106 int j, lj = -1;
5107 int num_insns, max_insns;
5108 CPUBreakpoint *bp;
5109
5110 pc_start = tb->pc;
5111
5112 /* 31-bit mode */
5113 if (!(tb->flags & FLAG_MASK_64)) {
5114 pc_start &= 0x7fffffff;
5115 }
5116
5117 dc.pc = pc_start;
5118 dc.is_jmp = DISAS_NEXT;
5119 dc.tb = tb;
5120 dc.cc_op = CC_OP_DYNAMIC;
5121
5122 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
5123
5124 next_page_start = (pc_start & TARGET_PAGE_MASK) + TARGET_PAGE_SIZE;
5125
5126 num_insns = 0;
5127 max_insns = tb->cflags & CF_COUNT_MASK;
5128 if (max_insns == 0) {
5129 max_insns = CF_COUNT_MASK;
5130 }
5131
5132 gen_icount_start();
5133
5134 do {
5135 if (unlikely(!QTAILQ_EMPTY(&env->breakpoints))) {
5136 QTAILQ_FOREACH(bp, &env->breakpoints, entry) {
5137 if (bp->pc == dc.pc) {
5138 gen_debug(&dc);
5139 break;
5140 }
5141 }
5142 }
5143 if (search_pc) {
5144 j = gen_opc_ptr - gen_opc_buf;
5145 if (lj < j) {
5146 lj++;
5147 while (lj < j) {
5148 gen_opc_instr_start[lj++] = 0;
5149 }
5150 }
5151 gen_opc_pc[lj] = dc.pc;
5152 gen_opc_cc_op[lj] = dc.cc_op;
5153 gen_opc_instr_start[lj] = 1;
5154 gen_opc_icount[lj] = num_insns;
5155 }
5156 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO)) {
5157 gen_io_start();
5158 }
5159 #if defined(S390X_DEBUG_DISAS_VERBOSE)
5160 LOG_DISAS("pc " TARGET_FMT_lx "\n",
5161 dc.pc);
5162 #endif
5163 disas_s390_insn(&dc);
5164
5165 num_insns++;
5166 if (env->singlestep_enabled) {
5167 gen_debug(&dc);
5168 }
5169 } while (!dc.is_jmp && gen_opc_ptr < gen_opc_end && dc.pc < next_page_start
5170 && num_insns < max_insns && !env->singlestep_enabled
5171 && !singlestep);
5172
5173 if (!dc.is_jmp) {
5174 update_psw_addr(&dc);
5175 }
5176
5177 if (singlestep && dc.cc_op != CC_OP_DYNAMIC) {
5178 gen_op_calc_cc(&dc);
5179 } else {
5180 /* next TB starts off with CC_OP_DYNAMIC, so make sure the cc op type
5181 is in env */
5182 gen_op_set_cc_op(&dc);
5183 }
5184
5185 if (tb->cflags & CF_LAST_IO) {
5186 gen_io_end();
5187 }
5188 /* Generate the return instruction */
5189 if (dc.is_jmp != DISAS_TB_JUMP) {
5190 tcg_gen_exit_tb(0);
5191 }
5192 gen_icount_end(tb, num_insns);
5193 *gen_opc_ptr = INDEX_op_end;
5194 if (search_pc) {
5195 j = gen_opc_ptr - gen_opc_buf;
5196 lj++;
5197 while (lj <= j) {
5198 gen_opc_instr_start[lj++] = 0;
5199 }
5200 } else {
5201 tb->size = dc.pc - pc_start;
5202 tb->icount = num_insns;
5203 }
5204 #if defined(S390X_DEBUG_DISAS)
5205 log_cpu_state_mask(CPU_LOG_TB_CPU, env, 0);
5206 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
5207 qemu_log("IN: %s\n", lookup_symbol(pc_start));
5208 log_target_disas(pc_start, dc.pc - pc_start, 1);
5209 qemu_log("\n");
5210 }
5211 #endif
5212 }
5213
5214 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
5215 {
5216 gen_intermediate_code_internal(env, tb, 0);
5217 }
5218
5219 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
5220 {
5221 gen_intermediate_code_internal(env, tb, 1);
5222 }
5223
5224 void restore_state_to_opc(CPUState *env, TranslationBlock *tb, int pc_pos)
5225 {
5226 int cc_op;
5227 env->psw.addr = gen_opc_pc[pc_pos];
5228 cc_op = gen_opc_cc_op[pc_pos];
5229 if ((cc_op != CC_OP_DYNAMIC) && (cc_op != CC_OP_STATIC)) {
5230 env->cc_op = cc_op;
5231 }
5232 }