]> git.proxmox.com Git - mirror_qemu.git/blob - target/ppc/translate.c
target/ppc: Introduce DISAS_{EXIT,CHAIN}{,_UPDATE}
[mirror_qemu.git] / target / ppc / translate.c
1 /*
2 * PowerPC emulation for qemu: main translation routines.
3 *
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34
35 #include "trace-tcg.h"
36 #include "exec/translator.h"
37 #include "exec/log.h"
38 #include "qemu/atomic128.h"
39 #include "spr_tcg.h"
40
41 #include "qemu/qemu-print.h"
42 #include "qapi/error.h"
43
44 #define CPU_SINGLE_STEP 0x1
45 #define CPU_BRANCH_STEP 0x2
46 #define GDBSTUB_SINGLE_STEP 0x4
47
48 /* Include definitions for instructions classes and implementations flags */
49 /* #define PPC_DEBUG_DISAS */
50 /* #define DO_PPC_STATISTICS */
51
52 #ifdef PPC_DEBUG_DISAS
53 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
54 #else
55 # define LOG_DISAS(...) do { } while (0)
56 #endif
57 /*****************************************************************************/
58 /* Code translation helpers */
59
60 /* global register indexes */
61 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */
62 + 10 * 4 + 22 * 5 /* SPE GPRh */
63 + 8 * 5 /* CRF */];
64 static TCGv cpu_gpr[32];
65 static TCGv cpu_gprh[32];
66 static TCGv_i32 cpu_crf[8];
67 static TCGv cpu_nip;
68 static TCGv cpu_msr;
69 static TCGv cpu_ctr;
70 static TCGv cpu_lr;
71 #if defined(TARGET_PPC64)
72 static TCGv cpu_cfar;
73 #endif
74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
75 static TCGv cpu_reserve;
76 static TCGv cpu_reserve_val;
77 static TCGv cpu_fpscr;
78 static TCGv_i32 cpu_access_type;
79
80 #include "exec/gen-icount.h"
81
82 void ppc_translate_init(void)
83 {
84 int i;
85 char *p;
86 size_t cpu_reg_names_size;
87
88 p = cpu_reg_names;
89 cpu_reg_names_size = sizeof(cpu_reg_names);
90
91 for (i = 0; i < 8; i++) {
92 snprintf(p, cpu_reg_names_size, "crf%d", i);
93 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
94 offsetof(CPUPPCState, crf[i]), p);
95 p += 5;
96 cpu_reg_names_size -= 5;
97 }
98
99 for (i = 0; i < 32; i++) {
100 snprintf(p, cpu_reg_names_size, "r%d", i);
101 cpu_gpr[i] = tcg_global_mem_new(cpu_env,
102 offsetof(CPUPPCState, gpr[i]), p);
103 p += (i < 10) ? 3 : 4;
104 cpu_reg_names_size -= (i < 10) ? 3 : 4;
105 snprintf(p, cpu_reg_names_size, "r%dH", i);
106 cpu_gprh[i] = tcg_global_mem_new(cpu_env,
107 offsetof(CPUPPCState, gprh[i]), p);
108 p += (i < 10) ? 4 : 5;
109 cpu_reg_names_size -= (i < 10) ? 4 : 5;
110 }
111
112 cpu_nip = tcg_global_mem_new(cpu_env,
113 offsetof(CPUPPCState, nip), "nip");
114
115 cpu_msr = tcg_global_mem_new(cpu_env,
116 offsetof(CPUPPCState, msr), "msr");
117
118 cpu_ctr = tcg_global_mem_new(cpu_env,
119 offsetof(CPUPPCState, ctr), "ctr");
120
121 cpu_lr = tcg_global_mem_new(cpu_env,
122 offsetof(CPUPPCState, lr), "lr");
123
124 #if defined(TARGET_PPC64)
125 cpu_cfar = tcg_global_mem_new(cpu_env,
126 offsetof(CPUPPCState, cfar), "cfar");
127 #endif
128
129 cpu_xer = tcg_global_mem_new(cpu_env,
130 offsetof(CPUPPCState, xer), "xer");
131 cpu_so = tcg_global_mem_new(cpu_env,
132 offsetof(CPUPPCState, so), "SO");
133 cpu_ov = tcg_global_mem_new(cpu_env,
134 offsetof(CPUPPCState, ov), "OV");
135 cpu_ca = tcg_global_mem_new(cpu_env,
136 offsetof(CPUPPCState, ca), "CA");
137 cpu_ov32 = tcg_global_mem_new(cpu_env,
138 offsetof(CPUPPCState, ov32), "OV32");
139 cpu_ca32 = tcg_global_mem_new(cpu_env,
140 offsetof(CPUPPCState, ca32), "CA32");
141
142 cpu_reserve = tcg_global_mem_new(cpu_env,
143 offsetof(CPUPPCState, reserve_addr),
144 "reserve_addr");
145 cpu_reserve_val = tcg_global_mem_new(cpu_env,
146 offsetof(CPUPPCState, reserve_val),
147 "reserve_val");
148
149 cpu_fpscr = tcg_global_mem_new(cpu_env,
150 offsetof(CPUPPCState, fpscr), "fpscr");
151
152 cpu_access_type = tcg_global_mem_new_i32(cpu_env,
153 offsetof(CPUPPCState, access_type),
154 "access_type");
155 }
156
157 /* internal defines */
158 struct DisasContext {
159 DisasContextBase base;
160 target_ulong cia; /* current instruction address */
161 uint32_t opcode;
162 uint32_t exception;
163 /* Routine used to access memory */
164 bool pr, hv, dr, le_mode;
165 bool lazy_tlb_flush;
166 bool need_access_type;
167 int mem_idx;
168 int access_type;
169 /* Translation flags */
170 MemOp default_tcg_memop_mask;
171 #if defined(TARGET_PPC64)
172 bool sf_mode;
173 bool has_cfar;
174 #endif
175 bool fpu_enabled;
176 bool altivec_enabled;
177 bool vsx_enabled;
178 bool spe_enabled;
179 bool tm_enabled;
180 bool gtse;
181 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
182 int singlestep_enabled;
183 uint32_t flags;
184 uint64_t insns_flags;
185 uint64_t insns_flags2;
186 };
187
188 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */
189 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */
190 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */
191 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */
192
193 /* Return true iff byteswap is needed in a scalar memop */
194 static inline bool need_byteswap(const DisasContext *ctx)
195 {
196 #if defined(TARGET_WORDS_BIGENDIAN)
197 return ctx->le_mode;
198 #else
199 return !ctx->le_mode;
200 #endif
201 }
202
203 /* True when active word size < size of target_long. */
204 #ifdef TARGET_PPC64
205 # define NARROW_MODE(C) (!(C)->sf_mode)
206 #else
207 # define NARROW_MODE(C) 0
208 #endif
209
210 struct opc_handler_t {
211 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
212 uint32_t inval1;
213 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
214 uint32_t inval2;
215 /* instruction type */
216 uint64_t type;
217 /* extended instruction type */
218 uint64_t type2;
219 /* handler */
220 void (*handler)(DisasContext *ctx);
221 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
222 const char *oname;
223 #endif
224 #if defined(DO_PPC_STATISTICS)
225 uint64_t count;
226 #endif
227 };
228
229 /* SPR load/store helpers */
230 static inline void gen_load_spr(TCGv t, int reg)
231 {
232 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
233 }
234
235 static inline void gen_store_spr(int reg, TCGv t)
236 {
237 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
238 }
239
240 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
241 {
242 if (ctx->need_access_type && ctx->access_type != access_type) {
243 tcg_gen_movi_i32(cpu_access_type, access_type);
244 ctx->access_type = access_type;
245 }
246 }
247
248 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
249 {
250 if (NARROW_MODE(ctx)) {
251 nip = (uint32_t)nip;
252 }
253 tcg_gen_movi_tl(cpu_nip, nip);
254 }
255
256 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
257 {
258 TCGv_i32 t0, t1;
259
260 /*
261 * These are all synchronous exceptions, we set the PC back to the
262 * faulting instruction
263 */
264 if (ctx->exception == POWERPC_EXCP_NONE) {
265 gen_update_nip(ctx, ctx->cia);
266 }
267 t0 = tcg_const_i32(excp);
268 t1 = tcg_const_i32(error);
269 gen_helper_raise_exception_err(cpu_env, t0, t1);
270 tcg_temp_free_i32(t0);
271 tcg_temp_free_i32(t1);
272 ctx->exception = excp;
273 ctx->base.is_jmp = DISAS_NORETURN;
274 }
275
276 static void gen_exception(DisasContext *ctx, uint32_t excp)
277 {
278 TCGv_i32 t0;
279
280 /*
281 * These are all synchronous exceptions, we set the PC back to the
282 * faulting instruction
283 */
284 if (ctx->exception == POWERPC_EXCP_NONE) {
285 gen_update_nip(ctx, ctx->cia);
286 }
287 t0 = tcg_const_i32(excp);
288 gen_helper_raise_exception(cpu_env, t0);
289 tcg_temp_free_i32(t0);
290 ctx->exception = excp;
291 ctx->base.is_jmp = DISAS_NORETURN;
292 }
293
294 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
295 target_ulong nip)
296 {
297 TCGv_i32 t0;
298
299 gen_update_nip(ctx, nip);
300 t0 = tcg_const_i32(excp);
301 gen_helper_raise_exception(cpu_env, t0);
302 tcg_temp_free_i32(t0);
303 ctx->exception = excp;
304 ctx->base.is_jmp = DISAS_NORETURN;
305 }
306
307 /*
308 * Tells the caller what is the appropriate exception to generate and prepares
309 * SPR registers for this exception.
310 *
311 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
312 * POWERPC_EXCP_DEBUG (on BookE).
313 */
314 static uint32_t gen_prep_dbgex(DisasContext *ctx)
315 {
316 if (ctx->flags & POWERPC_FLAG_DE) {
317 target_ulong dbsr = 0;
318 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
319 dbsr = DBCR0_ICMP;
320 } else {
321 /* Must have been branch */
322 dbsr = DBCR0_BRT;
323 }
324 TCGv t0 = tcg_temp_new();
325 gen_load_spr(t0, SPR_BOOKE_DBSR);
326 tcg_gen_ori_tl(t0, t0, dbsr);
327 gen_store_spr(SPR_BOOKE_DBSR, t0);
328 tcg_temp_free(t0);
329 return POWERPC_EXCP_DEBUG;
330 } else {
331 return POWERPC_EXCP_TRACE;
332 }
333 }
334
335 static void gen_debug_exception(DisasContext *ctx)
336 {
337 gen_helper_raise_exception(cpu_env, tcg_constant_i32(EXCP_DEBUG));
338 ctx->base.is_jmp = DISAS_NORETURN;
339 }
340
341 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
342 {
343 /* Will be converted to program check if needed */
344 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
345 }
346
347 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
348 {
349 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
350 }
351
352 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
353 {
354 /* Will be converted to program check if needed */
355 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
356 }
357
358 /* Stop translation */
359 static inline void gen_stop_exception(DisasContext *ctx)
360 {
361 gen_update_nip(ctx, ctx->base.pc_next);
362 ctx->exception = POWERPC_EXCP_STOP;
363 }
364
365 #ifndef CONFIG_USER_ONLY
366 /* No need to update nip here, as execution flow will change */
367 static inline void gen_sync_exception(DisasContext *ctx)
368 {
369 ctx->exception = POWERPC_EXCP_SYNC;
370 }
371 #endif
372
373 /*****************************************************************************/
374 /* SPR READ/WRITE CALLBACKS */
375
376 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
377 {
378 #if 0
379 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
380 printf("ERROR: try to access SPR %d !\n", sprn);
381 #endif
382 }
383
384 /* #define PPC_DUMP_SPR_ACCESSES */
385
386 /*
387 * Generic callbacks:
388 * do nothing but store/retrieve spr value
389 */
390 static void spr_load_dump_spr(int sprn)
391 {
392 #ifdef PPC_DUMP_SPR_ACCESSES
393 TCGv_i32 t0 = tcg_const_i32(sprn);
394 gen_helper_load_dump_spr(cpu_env, t0);
395 tcg_temp_free_i32(t0);
396 #endif
397 }
398
399 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
400 {
401 gen_load_spr(cpu_gpr[gprn], sprn);
402 spr_load_dump_spr(sprn);
403 }
404
405 static void spr_store_dump_spr(int sprn)
406 {
407 #ifdef PPC_DUMP_SPR_ACCESSES
408 TCGv_i32 t0 = tcg_const_i32(sprn);
409 gen_helper_store_dump_spr(cpu_env, t0);
410 tcg_temp_free_i32(t0);
411 #endif
412 }
413
414 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
415 {
416 gen_store_spr(sprn, cpu_gpr[gprn]);
417 spr_store_dump_spr(sprn);
418 }
419
420 #if !defined(CONFIG_USER_ONLY)
421 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
422 {
423 #ifdef TARGET_PPC64
424 TCGv t0 = tcg_temp_new();
425 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
426 gen_store_spr(sprn, t0);
427 tcg_temp_free(t0);
428 spr_store_dump_spr(sprn);
429 #else
430 spr_write_generic(ctx, sprn, gprn);
431 #endif
432 }
433
434 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
435 {
436 TCGv t0 = tcg_temp_new();
437 TCGv t1 = tcg_temp_new();
438 gen_load_spr(t0, sprn);
439 tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
440 tcg_gen_and_tl(t0, t0, t1);
441 gen_store_spr(sprn, t0);
442 tcg_temp_free(t0);
443 tcg_temp_free(t1);
444 }
445
446 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
447 {
448 }
449
450 #endif
451
452 /* SPR common to all PowerPC */
453 /* XER */
454 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
455 {
456 TCGv dst = cpu_gpr[gprn];
457 TCGv t0 = tcg_temp_new();
458 TCGv t1 = tcg_temp_new();
459 TCGv t2 = tcg_temp_new();
460 tcg_gen_mov_tl(dst, cpu_xer);
461 tcg_gen_shli_tl(t0, cpu_so, XER_SO);
462 tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
463 tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
464 tcg_gen_or_tl(t0, t0, t1);
465 tcg_gen_or_tl(dst, dst, t2);
466 tcg_gen_or_tl(dst, dst, t0);
467 if (is_isa300(ctx)) {
468 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
469 tcg_gen_or_tl(dst, dst, t0);
470 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
471 tcg_gen_or_tl(dst, dst, t0);
472 }
473 tcg_temp_free(t0);
474 tcg_temp_free(t1);
475 tcg_temp_free(t2);
476 }
477
478 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
479 {
480 TCGv src = cpu_gpr[gprn];
481 /* Write all flags, while reading back check for isa300 */
482 tcg_gen_andi_tl(cpu_xer, src,
483 ~((1u << XER_SO) |
484 (1u << XER_OV) | (1u << XER_OV32) |
485 (1u << XER_CA) | (1u << XER_CA32)));
486 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
487 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
488 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
489 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
490 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
491 }
492
493 /* LR */
494 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
495 {
496 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
497 }
498
499 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
500 {
501 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
502 }
503
504 /* CFAR */
505 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
506 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
507 {
508 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
509 }
510
511 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
512 {
513 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
514 }
515 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
516
517 /* CTR */
518 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
519 {
520 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
521 }
522
523 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
524 {
525 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
526 }
527
528 /* User read access to SPR */
529 /* USPRx */
530 /* UMMCRx */
531 /* UPMCx */
532 /* USIA */
533 /* UDECR */
534 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
535 {
536 gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
537 }
538
539 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
540 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
541 {
542 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
543 }
544 #endif
545
546 /* SPR common to all non-embedded PowerPC */
547 /* DECR */
548 #if !defined(CONFIG_USER_ONLY)
549 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
550 {
551 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
552 gen_io_start();
553 }
554 gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
555 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
556 gen_stop_exception(ctx);
557 }
558 }
559
560 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
561 {
562 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
563 gen_io_start();
564 }
565 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
566 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
567 gen_stop_exception(ctx);
568 }
569 }
570 #endif
571
572 /* SPR common to all non-embedded PowerPC, except 601 */
573 /* Time base */
574 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
575 {
576 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
577 gen_io_start();
578 }
579 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
580 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
581 gen_io_end();
582 gen_stop_exception(ctx);
583 }
584 }
585
586 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
587 {
588 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
589 gen_io_start();
590 }
591 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
592 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
593 gen_io_end();
594 gen_stop_exception(ctx);
595 }
596 }
597
598 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
599 {
600 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
601 }
602
603 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
604 {
605 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
606 }
607
608 #if !defined(CONFIG_USER_ONLY)
609 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
610 {
611 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
612 gen_io_start();
613 }
614 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
615 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
616 gen_io_end();
617 gen_stop_exception(ctx);
618 }
619 }
620
621 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
622 {
623 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
624 gen_io_start();
625 }
626 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
627 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
628 gen_io_end();
629 gen_stop_exception(ctx);
630 }
631 }
632
633 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
634 {
635 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
636 }
637
638 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
639 {
640 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
641 }
642
643 #if defined(TARGET_PPC64)
644 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
645 {
646 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
647 gen_io_start();
648 }
649 gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
650 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
651 gen_stop_exception(ctx);
652 }
653 }
654
655 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
656 {
657 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
658 gen_io_start();
659 }
660 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
661 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
662 gen_stop_exception(ctx);
663 }
664 }
665
666 /* HDECR */
667 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
668 {
669 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
670 gen_io_start();
671 }
672 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
673 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
674 gen_io_end();
675 gen_stop_exception(ctx);
676 }
677 }
678
679 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
680 {
681 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
682 gen_io_start();
683 }
684 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
685 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
686 gen_io_end();
687 gen_stop_exception(ctx);
688 }
689 }
690
691 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
692 {
693 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
694 gen_io_start();
695 }
696 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
697 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
698 gen_stop_exception(ctx);
699 }
700 }
701
702 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
703 {
704 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
705 gen_io_start();
706 }
707 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
708 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
709 gen_stop_exception(ctx);
710 }
711 }
712
713 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
714 {
715 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
716 gen_io_start();
717 }
718 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
719 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
720 gen_stop_exception(ctx);
721 }
722 }
723
724 #endif
725 #endif
726
727 #if !defined(CONFIG_USER_ONLY)
728 /* IBAT0U...IBAT0U */
729 /* IBAT0L...IBAT7L */
730 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
731 {
732 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
733 offsetof(CPUPPCState,
734 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
735 }
736
737 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
738 {
739 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
740 offsetof(CPUPPCState,
741 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
742 }
743
744 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
745 {
746 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
747 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
748 tcg_temp_free_i32(t0);
749 }
750
751 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
752 {
753 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
754 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
755 tcg_temp_free_i32(t0);
756 }
757
758 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
759 {
760 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
761 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
762 tcg_temp_free_i32(t0);
763 }
764
765 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
766 {
767 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
768 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
769 tcg_temp_free_i32(t0);
770 }
771
772 /* DBAT0U...DBAT7U */
773 /* DBAT0L...DBAT7L */
774 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
775 {
776 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
777 offsetof(CPUPPCState,
778 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
779 }
780
781 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
782 {
783 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
784 offsetof(CPUPPCState,
785 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
786 }
787
788 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
789 {
790 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
791 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
792 tcg_temp_free_i32(t0);
793 }
794
795 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
796 {
797 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
798 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
799 tcg_temp_free_i32(t0);
800 }
801
802 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
803 {
804 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
805 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
806 tcg_temp_free_i32(t0);
807 }
808
809 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
810 {
811 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
812 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
813 tcg_temp_free_i32(t0);
814 }
815
816 /* SDR1 */
817 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
818 {
819 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
820 }
821
822 #if defined(TARGET_PPC64)
823 /* 64 bits PowerPC specific SPRs */
824 /* PIDR */
825 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
826 {
827 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
828 }
829
830 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
831 {
832 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
833 }
834
835 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
836 {
837 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
838 }
839
840 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
841 {
842 TCGv t0 = tcg_temp_new();
843 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
844 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
845 tcg_temp_free(t0);
846 }
847 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
848 {
849 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
850 }
851
852 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
853 {
854 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
855 }
856
857 /* DPDES */
858 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
859 {
860 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
861 }
862
863 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
864 {
865 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
866 }
867 #endif
868 #endif
869
870 /* PowerPC 601 specific registers */
871 /* RTC */
872 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn)
873 {
874 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env);
875 }
876
877 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn)
878 {
879 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env);
880 }
881
882 #if !defined(CONFIG_USER_ONLY)
883 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn)
884 {
885 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]);
886 }
887
888 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn)
889 {
890 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]);
891 }
892
893 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn)
894 {
895 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]);
896 /* Must stop the translation as endianness may have changed */
897 gen_stop_exception(ctx);
898 }
899 #endif
900
901 /* Unified bats */
902 #if !defined(CONFIG_USER_ONLY)
903 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn)
904 {
905 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
906 offsetof(CPUPPCState,
907 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
908 }
909
910 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn)
911 {
912 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
913 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]);
914 tcg_temp_free_i32(t0);
915 }
916
917 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn)
918 {
919 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
920 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]);
921 tcg_temp_free_i32(t0);
922 }
923 #endif
924
925 /* PowerPC 40x specific registers */
926 #if !defined(CONFIG_USER_ONLY)
927 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
928 {
929 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
930 gen_io_start();
931 }
932 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
933 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
934 gen_stop_exception(ctx);
935 }
936 }
937
938 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
939 {
940 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
941 gen_io_start();
942 }
943 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
944 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
945 gen_stop_exception(ctx);
946 }
947 }
948
949 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
950 {
951 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
952 gen_io_start();
953 }
954 gen_store_spr(sprn, cpu_gpr[gprn]);
955 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
956 /* We must stop translation as we may have rebooted */
957 gen_stop_exception(ctx);
958 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
959 gen_stop_exception(ctx);
960 }
961 }
962
963 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
964 {
965 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
966 gen_io_start();
967 }
968 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
969 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
970 gen_stop_exception(ctx);
971 }
972 }
973
974 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
975 {
976 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
977 gen_io_start();
978 }
979 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
980 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
981 gen_stop_exception(ctx);
982 }
983 }
984
985 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
986 {
987 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
988 gen_io_start();
989 }
990 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
991 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
992 gen_stop_exception(ctx);
993 }
994 }
995 #endif
996
997 /* PowerPC 403 specific registers */
998 /* PBL1 / PBU1 / PBL2 / PBU2 */
999 #if !defined(CONFIG_USER_ONLY)
1000 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn)
1001 {
1002 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
1003 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1]));
1004 }
1005
1006 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn)
1007 {
1008 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1);
1009 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]);
1010 tcg_temp_free_i32(t0);
1011 }
1012
1013 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
1014 {
1015 TCGv t0 = tcg_temp_new();
1016 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
1017 gen_store_spr(SPR_PIR, t0);
1018 tcg_temp_free(t0);
1019 }
1020 #endif
1021
1022 /* SPE specific registers */
1023 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
1024 {
1025 TCGv_i32 t0 = tcg_temp_new_i32();
1026 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
1027 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
1028 tcg_temp_free_i32(t0);
1029 }
1030
1031 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
1032 {
1033 TCGv_i32 t0 = tcg_temp_new_i32();
1034 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
1035 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
1036 tcg_temp_free_i32(t0);
1037 }
1038
1039 #if !defined(CONFIG_USER_ONLY)
1040 /* Callback used to write the exception vector base */
1041 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
1042 {
1043 TCGv t0 = tcg_temp_new();
1044 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
1045 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1046 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
1047 gen_store_spr(sprn, t0);
1048 tcg_temp_free(t0);
1049 }
1050
1051 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
1052 {
1053 int sprn_offs;
1054
1055 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
1056 sprn_offs = sprn - SPR_BOOKE_IVOR0;
1057 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
1058 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
1059 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
1060 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
1061 } else {
1062 printf("Trying to write an unknown exception vector %d %03x\n",
1063 sprn, sprn);
1064 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
1065 return;
1066 }
1067
1068 TCGv t0 = tcg_temp_new();
1069 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
1070 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1071 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
1072 gen_store_spr(sprn, t0);
1073 tcg_temp_free(t0);
1074 }
1075 #endif
1076
1077 #ifdef TARGET_PPC64
1078 #ifndef CONFIG_USER_ONLY
1079 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
1080 {
1081 TCGv t0 = tcg_temp_new();
1082 TCGv t1 = tcg_temp_new();
1083 TCGv t2 = tcg_temp_new();
1084
1085 /*
1086 * Note, the HV=1 PR=0 case is handled earlier by simply using
1087 * spr_write_generic for HV mode in the SPR table
1088 */
1089
1090 /* Build insertion mask into t1 based on context */
1091 if (ctx->pr) {
1092 gen_load_spr(t1, SPR_UAMOR);
1093 } else {
1094 gen_load_spr(t1, SPR_AMOR);
1095 }
1096
1097 /* Mask new bits into t2 */
1098 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1099
1100 /* Load AMR and clear new bits in t0 */
1101 gen_load_spr(t0, SPR_AMR);
1102 tcg_gen_andc_tl(t0, t0, t1);
1103
1104 /* Or'in new bits and write it out */
1105 tcg_gen_or_tl(t0, t0, t2);
1106 gen_store_spr(SPR_AMR, t0);
1107 spr_store_dump_spr(SPR_AMR);
1108
1109 tcg_temp_free(t0);
1110 tcg_temp_free(t1);
1111 tcg_temp_free(t2);
1112 }
1113
1114 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
1115 {
1116 TCGv t0 = tcg_temp_new();
1117 TCGv t1 = tcg_temp_new();
1118 TCGv t2 = tcg_temp_new();
1119
1120 /*
1121 * Note, the HV=1 case is handled earlier by simply using
1122 * spr_write_generic for HV mode in the SPR table
1123 */
1124
1125 /* Build insertion mask into t1 based on context */
1126 gen_load_spr(t1, SPR_AMOR);
1127
1128 /* Mask new bits into t2 */
1129 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1130
1131 /* Load AMR and clear new bits in t0 */
1132 gen_load_spr(t0, SPR_UAMOR);
1133 tcg_gen_andc_tl(t0, t0, t1);
1134
1135 /* Or'in new bits and write it out */
1136 tcg_gen_or_tl(t0, t0, t2);
1137 gen_store_spr(SPR_UAMOR, t0);
1138 spr_store_dump_spr(SPR_UAMOR);
1139
1140 tcg_temp_free(t0);
1141 tcg_temp_free(t1);
1142 tcg_temp_free(t2);
1143 }
1144
1145 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1146 {
1147 TCGv t0 = tcg_temp_new();
1148 TCGv t1 = tcg_temp_new();
1149 TCGv t2 = tcg_temp_new();
1150
1151 /*
1152 * Note, the HV=1 case is handled earlier by simply using
1153 * spr_write_generic for HV mode in the SPR table
1154 */
1155
1156 /* Build insertion mask into t1 based on context */
1157 gen_load_spr(t1, SPR_AMOR);
1158
1159 /* Mask new bits into t2 */
1160 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1161
1162 /* Load AMR and clear new bits in t0 */
1163 gen_load_spr(t0, SPR_IAMR);
1164 tcg_gen_andc_tl(t0, t0, t1);
1165
1166 /* Or'in new bits and write it out */
1167 tcg_gen_or_tl(t0, t0, t2);
1168 gen_store_spr(SPR_IAMR, t0);
1169 spr_store_dump_spr(SPR_IAMR);
1170
1171 tcg_temp_free(t0);
1172 tcg_temp_free(t1);
1173 tcg_temp_free(t2);
1174 }
1175 #endif
1176 #endif
1177
1178 #ifndef CONFIG_USER_ONLY
1179 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1180 {
1181 gen_helper_fixup_thrm(cpu_env);
1182 gen_load_spr(cpu_gpr[gprn], sprn);
1183 spr_load_dump_spr(sprn);
1184 }
1185 #endif /* !CONFIG_USER_ONLY */
1186
1187 #if !defined(CONFIG_USER_ONLY)
1188 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1189 {
1190 TCGv t0 = tcg_temp_new();
1191
1192 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1193 gen_store_spr(sprn, t0);
1194 tcg_temp_free(t0);
1195 }
1196
1197 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1198 {
1199 TCGv t0 = tcg_temp_new();
1200
1201 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1202 gen_store_spr(sprn, t0);
1203 tcg_temp_free(t0);
1204 }
1205
1206 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1207 {
1208 TCGv t0 = tcg_temp_new();
1209
1210 tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1211 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1212 gen_store_spr(sprn, t0);
1213 tcg_temp_free(t0);
1214 }
1215
1216 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1217 {
1218 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1219 }
1220
1221 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1222 {
1223 TCGv_i32 t0 = tcg_const_i32(sprn);
1224 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1225 tcg_temp_free_i32(t0);
1226 }
1227 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1228 {
1229 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1230 }
1231 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1232 {
1233 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1234 }
1235
1236 #endif
1237
1238 #if !defined(CONFIG_USER_ONLY)
1239 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1240 {
1241 TCGv val = tcg_temp_new();
1242 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1243 gen_store_spr(SPR_BOOKE_MAS3, val);
1244 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1245 gen_store_spr(SPR_BOOKE_MAS7, val);
1246 tcg_temp_free(val);
1247 }
1248
1249 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1250 {
1251 TCGv mas7 = tcg_temp_new();
1252 TCGv mas3 = tcg_temp_new();
1253 gen_load_spr(mas7, SPR_BOOKE_MAS7);
1254 tcg_gen_shli_tl(mas7, mas7, 32);
1255 gen_load_spr(mas3, SPR_BOOKE_MAS3);
1256 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1257 tcg_temp_free(mas3);
1258 tcg_temp_free(mas7);
1259 }
1260
1261 #endif
1262
1263 #ifdef TARGET_PPC64
1264 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1265 int bit, int sprn, int cause)
1266 {
1267 TCGv_i32 t1 = tcg_const_i32(bit);
1268 TCGv_i32 t2 = tcg_const_i32(sprn);
1269 TCGv_i32 t3 = tcg_const_i32(cause);
1270
1271 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1272
1273 tcg_temp_free_i32(t3);
1274 tcg_temp_free_i32(t2);
1275 tcg_temp_free_i32(t1);
1276 }
1277
1278 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1279 int bit, int sprn, int cause)
1280 {
1281 TCGv_i32 t1 = tcg_const_i32(bit);
1282 TCGv_i32 t2 = tcg_const_i32(sprn);
1283 TCGv_i32 t3 = tcg_const_i32(cause);
1284
1285 gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1286
1287 tcg_temp_free_i32(t3);
1288 tcg_temp_free_i32(t2);
1289 tcg_temp_free_i32(t1);
1290 }
1291
1292 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1293 {
1294 TCGv spr_up = tcg_temp_new();
1295 TCGv spr = tcg_temp_new();
1296
1297 gen_load_spr(spr, sprn - 1);
1298 tcg_gen_shri_tl(spr_up, spr, 32);
1299 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1300
1301 tcg_temp_free(spr);
1302 tcg_temp_free(spr_up);
1303 }
1304
1305 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1306 {
1307 TCGv spr = tcg_temp_new();
1308
1309 gen_load_spr(spr, sprn - 1);
1310 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1311 gen_store_spr(sprn - 1, spr);
1312
1313 tcg_temp_free(spr);
1314 }
1315
1316 #if !defined(CONFIG_USER_ONLY)
1317 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1318 {
1319 TCGv hmer = tcg_temp_new();
1320
1321 gen_load_spr(hmer, sprn);
1322 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1323 gen_store_spr(sprn, hmer);
1324 spr_store_dump_spr(sprn);
1325 tcg_temp_free(hmer);
1326 }
1327
1328 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1329 {
1330 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1331 }
1332 #endif /* !defined(CONFIG_USER_ONLY) */
1333
1334 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1335 {
1336 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1337 spr_read_generic(ctx, gprn, sprn);
1338 }
1339
1340 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1341 {
1342 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1343 spr_write_generic(ctx, sprn, gprn);
1344 }
1345
1346 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1347 {
1348 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1349 spr_read_generic(ctx, gprn, sprn);
1350 }
1351
1352 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1353 {
1354 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1355 spr_write_generic(ctx, sprn, gprn);
1356 }
1357
1358 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1359 {
1360 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1361 spr_read_prev_upper32(ctx, gprn, sprn);
1362 }
1363
1364 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1365 {
1366 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1367 spr_write_prev_upper32(ctx, sprn, gprn);
1368 }
1369
1370 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1371 {
1372 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1373 spr_read_generic(ctx, gprn, sprn);
1374 }
1375
1376 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1377 {
1378 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1379 spr_write_generic(ctx, sprn, gprn);
1380 }
1381
1382 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1383 {
1384 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1385 spr_read_prev_upper32(ctx, gprn, sprn);
1386 }
1387
1388 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1389 {
1390 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1391 spr_write_prev_upper32(ctx, sprn, gprn);
1392 }
1393 #endif
1394
1395 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
1396 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1397
1398 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
1399 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1400
1401 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
1402 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1403
1404 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
1405 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1406
1407 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \
1408 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1409
1410 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1411 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1412
1413 typedef struct opcode_t {
1414 unsigned char opc1, opc2, opc3, opc4;
1415 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1416 unsigned char pad[4];
1417 #endif
1418 opc_handler_t handler;
1419 const char *oname;
1420 } opcode_t;
1421
1422 /* Helpers for priv. check */
1423 #define GEN_PRIV \
1424 do { \
1425 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \
1426 } while (0)
1427
1428 #if defined(CONFIG_USER_ONLY)
1429 #define CHK_HV GEN_PRIV
1430 #define CHK_SV GEN_PRIV
1431 #define CHK_HVRM GEN_PRIV
1432 #else
1433 #define CHK_HV \
1434 do { \
1435 if (unlikely(ctx->pr || !ctx->hv)) { \
1436 GEN_PRIV; \
1437 } \
1438 } while (0)
1439 #define CHK_SV \
1440 do { \
1441 if (unlikely(ctx->pr)) { \
1442 GEN_PRIV; \
1443 } \
1444 } while (0)
1445 #define CHK_HVRM \
1446 do { \
1447 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1448 GEN_PRIV; \
1449 } \
1450 } while (0)
1451 #endif
1452
1453 #define CHK_NONE
1454
1455 /*****************************************************************************/
1456 /* PowerPC instructions table */
1457
1458 #if defined(DO_PPC_STATISTICS)
1459 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
1460 { \
1461 .opc1 = op1, \
1462 .opc2 = op2, \
1463 .opc3 = op3, \
1464 .opc4 = 0xff, \
1465 .handler = { \
1466 .inval1 = invl, \
1467 .type = _typ, \
1468 .type2 = _typ2, \
1469 .handler = &gen_##name, \
1470 .oname = stringify(name), \
1471 }, \
1472 .oname = stringify(name), \
1473 }
1474 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
1475 { \
1476 .opc1 = op1, \
1477 .opc2 = op2, \
1478 .opc3 = op3, \
1479 .opc4 = 0xff, \
1480 .handler = { \
1481 .inval1 = invl1, \
1482 .inval2 = invl2, \
1483 .type = _typ, \
1484 .type2 = _typ2, \
1485 .handler = &gen_##name, \
1486 .oname = stringify(name), \
1487 }, \
1488 .oname = stringify(name), \
1489 }
1490 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
1491 { \
1492 .opc1 = op1, \
1493 .opc2 = op2, \
1494 .opc3 = op3, \
1495 .opc4 = 0xff, \
1496 .handler = { \
1497 .inval1 = invl, \
1498 .type = _typ, \
1499 .type2 = _typ2, \
1500 .handler = &gen_##name, \
1501 .oname = onam, \
1502 }, \
1503 .oname = onam, \
1504 }
1505 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
1506 { \
1507 .opc1 = op1, \
1508 .opc2 = op2, \
1509 .opc3 = op3, \
1510 .opc4 = op4, \
1511 .handler = { \
1512 .inval1 = invl, \
1513 .type = _typ, \
1514 .type2 = _typ2, \
1515 .handler = &gen_##name, \
1516 .oname = stringify(name), \
1517 }, \
1518 .oname = stringify(name), \
1519 }
1520 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \
1521 { \
1522 .opc1 = op1, \
1523 .opc2 = op2, \
1524 .opc3 = op3, \
1525 .opc4 = op4, \
1526 .handler = { \
1527 .inval1 = invl, \
1528 .type = _typ, \
1529 .type2 = _typ2, \
1530 .handler = &gen_##name, \
1531 .oname = onam, \
1532 }, \
1533 .oname = onam, \
1534 }
1535 #else
1536 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
1537 { \
1538 .opc1 = op1, \
1539 .opc2 = op2, \
1540 .opc3 = op3, \
1541 .opc4 = 0xff, \
1542 .handler = { \
1543 .inval1 = invl, \
1544 .type = _typ, \
1545 .type2 = _typ2, \
1546 .handler = &gen_##name, \
1547 }, \
1548 .oname = stringify(name), \
1549 }
1550 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
1551 { \
1552 .opc1 = op1, \
1553 .opc2 = op2, \
1554 .opc3 = op3, \
1555 .opc4 = 0xff, \
1556 .handler = { \
1557 .inval1 = invl1, \
1558 .inval2 = invl2, \
1559 .type = _typ, \
1560 .type2 = _typ2, \
1561 .handler = &gen_##name, \
1562 }, \
1563 .oname = stringify(name), \
1564 }
1565 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
1566 { \
1567 .opc1 = op1, \
1568 .opc2 = op2, \
1569 .opc3 = op3, \
1570 .opc4 = 0xff, \
1571 .handler = { \
1572 .inval1 = invl, \
1573 .type = _typ, \
1574 .type2 = _typ2, \
1575 .handler = &gen_##name, \
1576 }, \
1577 .oname = onam, \
1578 }
1579 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
1580 { \
1581 .opc1 = op1, \
1582 .opc2 = op2, \
1583 .opc3 = op3, \
1584 .opc4 = op4, \
1585 .handler = { \
1586 .inval1 = invl, \
1587 .type = _typ, \
1588 .type2 = _typ2, \
1589 .handler = &gen_##name, \
1590 }, \
1591 .oname = stringify(name), \
1592 }
1593 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \
1594 { \
1595 .opc1 = op1, \
1596 .opc2 = op2, \
1597 .opc3 = op3, \
1598 .opc4 = op4, \
1599 .handler = { \
1600 .inval1 = invl, \
1601 .type = _typ, \
1602 .type2 = _typ2, \
1603 .handler = &gen_##name, \
1604 }, \
1605 .oname = onam, \
1606 }
1607 #endif
1608
1609 /* Invalid instruction */
1610 static void gen_invalid(DisasContext *ctx)
1611 {
1612 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1613 }
1614
1615 static opc_handler_t invalid_handler = {
1616 .inval1 = 0xFFFFFFFF,
1617 .inval2 = 0xFFFFFFFF,
1618 .type = PPC_NONE,
1619 .type2 = PPC_NONE,
1620 .handler = gen_invalid,
1621 };
1622
1623 /*** Integer comparison ***/
1624
1625 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1626 {
1627 TCGv t0 = tcg_temp_new();
1628 TCGv t1 = tcg_temp_new();
1629 TCGv_i32 t = tcg_temp_new_i32();
1630
1631 tcg_gen_movi_tl(t0, CRF_EQ);
1632 tcg_gen_movi_tl(t1, CRF_LT);
1633 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1634 t0, arg0, arg1, t1, t0);
1635 tcg_gen_movi_tl(t1, CRF_GT);
1636 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1637 t0, arg0, arg1, t1, t0);
1638
1639 tcg_gen_trunc_tl_i32(t, t0);
1640 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1641 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1642
1643 tcg_temp_free(t0);
1644 tcg_temp_free(t1);
1645 tcg_temp_free_i32(t);
1646 }
1647
1648 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1649 {
1650 TCGv t0 = tcg_const_tl(arg1);
1651 gen_op_cmp(arg0, t0, s, crf);
1652 tcg_temp_free(t0);
1653 }
1654
1655 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1656 {
1657 TCGv t0, t1;
1658 t0 = tcg_temp_new();
1659 t1 = tcg_temp_new();
1660 if (s) {
1661 tcg_gen_ext32s_tl(t0, arg0);
1662 tcg_gen_ext32s_tl(t1, arg1);
1663 } else {
1664 tcg_gen_ext32u_tl(t0, arg0);
1665 tcg_gen_ext32u_tl(t1, arg1);
1666 }
1667 gen_op_cmp(t0, t1, s, crf);
1668 tcg_temp_free(t1);
1669 tcg_temp_free(t0);
1670 }
1671
1672 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1673 {
1674 TCGv t0 = tcg_const_tl(arg1);
1675 gen_op_cmp32(arg0, t0, s, crf);
1676 tcg_temp_free(t0);
1677 }
1678
1679 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1680 {
1681 if (NARROW_MODE(ctx)) {
1682 gen_op_cmpi32(reg, 0, 1, 0);
1683 } else {
1684 gen_op_cmpi(reg, 0, 1, 0);
1685 }
1686 }
1687
1688 /* cmp */
1689 static void gen_cmp(DisasContext *ctx)
1690 {
1691 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1692 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1693 1, crfD(ctx->opcode));
1694 } else {
1695 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1696 1, crfD(ctx->opcode));
1697 }
1698 }
1699
1700 /* cmpi */
1701 static void gen_cmpi(DisasContext *ctx)
1702 {
1703 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1704 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
1705 1, crfD(ctx->opcode));
1706 } else {
1707 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
1708 1, crfD(ctx->opcode));
1709 }
1710 }
1711
1712 /* cmpl */
1713 static void gen_cmpl(DisasContext *ctx)
1714 {
1715 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1716 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1717 0, crfD(ctx->opcode));
1718 } else {
1719 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1720 0, crfD(ctx->opcode));
1721 }
1722 }
1723
1724 /* cmpli */
1725 static void gen_cmpli(DisasContext *ctx)
1726 {
1727 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1728 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
1729 0, crfD(ctx->opcode));
1730 } else {
1731 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
1732 0, crfD(ctx->opcode));
1733 }
1734 }
1735
1736 /* cmprb - range comparison: isupper, isaplha, islower*/
1737 static void gen_cmprb(DisasContext *ctx)
1738 {
1739 TCGv_i32 src1 = tcg_temp_new_i32();
1740 TCGv_i32 src2 = tcg_temp_new_i32();
1741 TCGv_i32 src2lo = tcg_temp_new_i32();
1742 TCGv_i32 src2hi = tcg_temp_new_i32();
1743 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1744
1745 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1746 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1747
1748 tcg_gen_andi_i32(src1, src1, 0xFF);
1749 tcg_gen_ext8u_i32(src2lo, src2);
1750 tcg_gen_shri_i32(src2, src2, 8);
1751 tcg_gen_ext8u_i32(src2hi, src2);
1752
1753 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1754 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1755 tcg_gen_and_i32(crf, src2lo, src2hi);
1756
1757 if (ctx->opcode & 0x00200000) {
1758 tcg_gen_shri_i32(src2, src2, 8);
1759 tcg_gen_ext8u_i32(src2lo, src2);
1760 tcg_gen_shri_i32(src2, src2, 8);
1761 tcg_gen_ext8u_i32(src2hi, src2);
1762 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1763 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1764 tcg_gen_and_i32(src2lo, src2lo, src2hi);
1765 tcg_gen_or_i32(crf, crf, src2lo);
1766 }
1767 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1768 tcg_temp_free_i32(src1);
1769 tcg_temp_free_i32(src2);
1770 tcg_temp_free_i32(src2lo);
1771 tcg_temp_free_i32(src2hi);
1772 }
1773
1774 #if defined(TARGET_PPC64)
1775 /* cmpeqb */
1776 static void gen_cmpeqb(DisasContext *ctx)
1777 {
1778 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1779 cpu_gpr[rB(ctx->opcode)]);
1780 }
1781 #endif
1782
1783 /* isel (PowerPC 2.03 specification) */
1784 static void gen_isel(DisasContext *ctx)
1785 {
1786 uint32_t bi = rC(ctx->opcode);
1787 uint32_t mask = 0x08 >> (bi & 0x03);
1788 TCGv t0 = tcg_temp_new();
1789 TCGv zr;
1790
1791 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1792 tcg_gen_andi_tl(t0, t0, mask);
1793
1794 zr = tcg_const_tl(0);
1795 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1796 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1797 cpu_gpr[rB(ctx->opcode)]);
1798 tcg_temp_free(zr);
1799 tcg_temp_free(t0);
1800 }
1801
1802 /* cmpb: PowerPC 2.05 specification */
1803 static void gen_cmpb(DisasContext *ctx)
1804 {
1805 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1806 cpu_gpr[rB(ctx->opcode)]);
1807 }
1808
1809 /*** Integer arithmetic ***/
1810
1811 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1812 TCGv arg1, TCGv arg2, int sub)
1813 {
1814 TCGv t0 = tcg_temp_new();
1815
1816 tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1817 tcg_gen_xor_tl(t0, arg1, arg2);
1818 if (sub) {
1819 tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1820 } else {
1821 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1822 }
1823 tcg_temp_free(t0);
1824 if (NARROW_MODE(ctx)) {
1825 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1826 if (is_isa300(ctx)) {
1827 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1828 }
1829 } else {
1830 if (is_isa300(ctx)) {
1831 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1832 }
1833 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1834 }
1835 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1836 }
1837
1838 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1839 TCGv res, TCGv arg0, TCGv arg1,
1840 TCGv ca32, int sub)
1841 {
1842 TCGv t0;
1843
1844 if (!is_isa300(ctx)) {
1845 return;
1846 }
1847
1848 t0 = tcg_temp_new();
1849 if (sub) {
1850 tcg_gen_eqv_tl(t0, arg0, arg1);
1851 } else {
1852 tcg_gen_xor_tl(t0, arg0, arg1);
1853 }
1854 tcg_gen_xor_tl(t0, t0, res);
1855 tcg_gen_extract_tl(ca32, t0, 32, 1);
1856 tcg_temp_free(t0);
1857 }
1858
1859 /* Common add function */
1860 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1861 TCGv arg2, TCGv ca, TCGv ca32,
1862 bool add_ca, bool compute_ca,
1863 bool compute_ov, bool compute_rc0)
1864 {
1865 TCGv t0 = ret;
1866
1867 if (compute_ca || compute_ov) {
1868 t0 = tcg_temp_new();
1869 }
1870
1871 if (compute_ca) {
1872 if (NARROW_MODE(ctx)) {
1873 /*
1874 * Caution: a non-obvious corner case of the spec is that
1875 * we must produce the *entire* 64-bit addition, but
1876 * produce the carry into bit 32.
1877 */
1878 TCGv t1 = tcg_temp_new();
1879 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
1880 tcg_gen_add_tl(t0, arg1, arg2);
1881 if (add_ca) {
1882 tcg_gen_add_tl(t0, t0, ca);
1883 }
1884 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */
1885 tcg_temp_free(t1);
1886 tcg_gen_extract_tl(ca, ca, 32, 1);
1887 if (is_isa300(ctx)) {
1888 tcg_gen_mov_tl(ca32, ca);
1889 }
1890 } else {
1891 TCGv zero = tcg_const_tl(0);
1892 if (add_ca) {
1893 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1894 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1895 } else {
1896 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1897 }
1898 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1899 tcg_temp_free(zero);
1900 }
1901 } else {
1902 tcg_gen_add_tl(t0, arg1, arg2);
1903 if (add_ca) {
1904 tcg_gen_add_tl(t0, t0, ca);
1905 }
1906 }
1907
1908 if (compute_ov) {
1909 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1910 }
1911 if (unlikely(compute_rc0)) {
1912 gen_set_Rc0(ctx, t0);
1913 }
1914
1915 if (t0 != ret) {
1916 tcg_gen_mov_tl(ret, t0);
1917 tcg_temp_free(t0);
1918 }
1919 }
1920 /* Add functions with two operands */
1921 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \
1922 static void glue(gen_, name)(DisasContext *ctx) \
1923 { \
1924 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1925 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1926 ca, glue(ca, 32), \
1927 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1928 }
1929 /* Add functions with one operand and one immediate */
1930 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \
1931 add_ca, compute_ca, compute_ov) \
1932 static void glue(gen_, name)(DisasContext *ctx) \
1933 { \
1934 TCGv t0 = tcg_const_tl(const_val); \
1935 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1936 cpu_gpr[rA(ctx->opcode)], t0, \
1937 ca, glue(ca, 32), \
1938 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1939 tcg_temp_free(t0); \
1940 }
1941
1942 /* add add. addo addo. */
1943 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1944 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1945 /* addc addc. addco addco. */
1946 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1947 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1948 /* adde adde. addeo addeo. */
1949 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1950 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1951 /* addme addme. addmeo addmeo. */
1952 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1953 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1954 /* addex */
1955 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1956 /* addze addze. addzeo addzeo.*/
1957 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1958 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1959 /* addi */
1960 static void gen_addi(DisasContext *ctx)
1961 {
1962 target_long simm = SIMM(ctx->opcode);
1963
1964 if (rA(ctx->opcode) == 0) {
1965 /* li case */
1966 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
1967 } else {
1968 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
1969 cpu_gpr[rA(ctx->opcode)], simm);
1970 }
1971 }
1972 /* addic addic.*/
1973 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1974 {
1975 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1976 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1977 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1978 tcg_temp_free(c);
1979 }
1980
1981 static void gen_addic(DisasContext *ctx)
1982 {
1983 gen_op_addic(ctx, 0);
1984 }
1985
1986 static void gen_addic_(DisasContext *ctx)
1987 {
1988 gen_op_addic(ctx, 1);
1989 }
1990
1991 /* addis */
1992 static void gen_addis(DisasContext *ctx)
1993 {
1994 target_long simm = SIMM(ctx->opcode);
1995
1996 if (rA(ctx->opcode) == 0) {
1997 /* lis case */
1998 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
1999 } else {
2000 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
2001 cpu_gpr[rA(ctx->opcode)], simm << 16);
2002 }
2003 }
2004
2005 /* addpcis */
2006 static void gen_addpcis(DisasContext *ctx)
2007 {
2008 target_long d = DX(ctx->opcode);
2009
2010 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16));
2011 }
2012
2013 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
2014 TCGv arg2, int sign, int compute_ov)
2015 {
2016 TCGv_i32 t0 = tcg_temp_new_i32();
2017 TCGv_i32 t1 = tcg_temp_new_i32();
2018 TCGv_i32 t2 = tcg_temp_new_i32();
2019 TCGv_i32 t3 = tcg_temp_new_i32();
2020
2021 tcg_gen_trunc_tl_i32(t0, arg1);
2022 tcg_gen_trunc_tl_i32(t1, arg2);
2023 if (sign) {
2024 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
2025 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
2026 tcg_gen_and_i32(t2, t2, t3);
2027 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
2028 tcg_gen_or_i32(t2, t2, t3);
2029 tcg_gen_movi_i32(t3, 0);
2030 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
2031 tcg_gen_div_i32(t3, t0, t1);
2032 tcg_gen_extu_i32_tl(ret, t3);
2033 } else {
2034 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
2035 tcg_gen_movi_i32(t3, 0);
2036 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
2037 tcg_gen_divu_i32(t3, t0, t1);
2038 tcg_gen_extu_i32_tl(ret, t3);
2039 }
2040 if (compute_ov) {
2041 tcg_gen_extu_i32_tl(cpu_ov, t2);
2042 if (is_isa300(ctx)) {
2043 tcg_gen_extu_i32_tl(cpu_ov32, t2);
2044 }
2045 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2046 }
2047 tcg_temp_free_i32(t0);
2048 tcg_temp_free_i32(t1);
2049 tcg_temp_free_i32(t2);
2050 tcg_temp_free_i32(t3);
2051
2052 if (unlikely(Rc(ctx->opcode) != 0)) {
2053 gen_set_Rc0(ctx, ret);
2054 }
2055 }
2056 /* Div functions */
2057 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
2058 static void glue(gen_, name)(DisasContext *ctx) \
2059 { \
2060 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
2061 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2062 sign, compute_ov); \
2063 }
2064 /* divwu divwu. divwuo divwuo. */
2065 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
2066 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
2067 /* divw divw. divwo divwo. */
2068 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
2069 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
2070
2071 /* div[wd]eu[o][.] */
2072 #define GEN_DIVE(name, hlpr, compute_ov) \
2073 static void gen_##name(DisasContext *ctx) \
2074 { \
2075 TCGv_i32 t0 = tcg_const_i32(compute_ov); \
2076 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \
2077 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
2078 tcg_temp_free_i32(t0); \
2079 if (unlikely(Rc(ctx->opcode) != 0)) { \
2080 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
2081 } \
2082 }
2083
2084 GEN_DIVE(divweu, divweu, 0);
2085 GEN_DIVE(divweuo, divweu, 1);
2086 GEN_DIVE(divwe, divwe, 0);
2087 GEN_DIVE(divweo, divwe, 1);
2088
2089 #if defined(TARGET_PPC64)
2090 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
2091 TCGv arg2, int sign, int compute_ov)
2092 {
2093 TCGv_i64 t0 = tcg_temp_new_i64();
2094 TCGv_i64 t1 = tcg_temp_new_i64();
2095 TCGv_i64 t2 = tcg_temp_new_i64();
2096 TCGv_i64 t3 = tcg_temp_new_i64();
2097
2098 tcg_gen_mov_i64(t0, arg1);
2099 tcg_gen_mov_i64(t1, arg2);
2100 if (sign) {
2101 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
2102 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
2103 tcg_gen_and_i64(t2, t2, t3);
2104 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
2105 tcg_gen_or_i64(t2, t2, t3);
2106 tcg_gen_movi_i64(t3, 0);
2107 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
2108 tcg_gen_div_i64(ret, t0, t1);
2109 } else {
2110 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
2111 tcg_gen_movi_i64(t3, 0);
2112 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
2113 tcg_gen_divu_i64(ret, t0, t1);
2114 }
2115 if (compute_ov) {
2116 tcg_gen_mov_tl(cpu_ov, t2);
2117 if (is_isa300(ctx)) {
2118 tcg_gen_mov_tl(cpu_ov32, t2);
2119 }
2120 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2121 }
2122 tcg_temp_free_i64(t0);
2123 tcg_temp_free_i64(t1);
2124 tcg_temp_free_i64(t2);
2125 tcg_temp_free_i64(t3);
2126
2127 if (unlikely(Rc(ctx->opcode) != 0)) {
2128 gen_set_Rc0(ctx, ret);
2129 }
2130 }
2131
2132 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
2133 static void glue(gen_, name)(DisasContext *ctx) \
2134 { \
2135 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
2136 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2137 sign, compute_ov); \
2138 }
2139 /* divdu divdu. divduo divduo. */
2140 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
2141 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
2142 /* divd divd. divdo divdo. */
2143 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
2144 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
2145
2146 GEN_DIVE(divdeu, divdeu, 0);
2147 GEN_DIVE(divdeuo, divdeu, 1);
2148 GEN_DIVE(divde, divde, 0);
2149 GEN_DIVE(divdeo, divde, 1);
2150 #endif
2151
2152 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
2153 TCGv arg2, int sign)
2154 {
2155 TCGv_i32 t0 = tcg_temp_new_i32();
2156 TCGv_i32 t1 = tcg_temp_new_i32();
2157
2158 tcg_gen_trunc_tl_i32(t0, arg1);
2159 tcg_gen_trunc_tl_i32(t1, arg2);
2160 if (sign) {
2161 TCGv_i32 t2 = tcg_temp_new_i32();
2162 TCGv_i32 t3 = tcg_temp_new_i32();
2163 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
2164 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
2165 tcg_gen_and_i32(t2, t2, t3);
2166 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
2167 tcg_gen_or_i32(t2, t2, t3);
2168 tcg_gen_movi_i32(t3, 0);
2169 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
2170 tcg_gen_rem_i32(t3, t0, t1);
2171 tcg_gen_ext_i32_tl(ret, t3);
2172 tcg_temp_free_i32(t2);
2173 tcg_temp_free_i32(t3);
2174 } else {
2175 TCGv_i32 t2 = tcg_const_i32(1);
2176 TCGv_i32 t3 = tcg_const_i32(0);
2177 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
2178 tcg_gen_remu_i32(t3, t0, t1);
2179 tcg_gen_extu_i32_tl(ret, t3);
2180 tcg_temp_free_i32(t2);
2181 tcg_temp_free_i32(t3);
2182 }
2183 tcg_temp_free_i32(t0);
2184 tcg_temp_free_i32(t1);
2185 }
2186
2187 #define GEN_INT_ARITH_MODW(name, opc3, sign) \
2188 static void glue(gen_, name)(DisasContext *ctx) \
2189 { \
2190 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \
2191 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2192 sign); \
2193 }
2194
2195 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
2196 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
2197
2198 #if defined(TARGET_PPC64)
2199 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
2200 TCGv arg2, int sign)
2201 {
2202 TCGv_i64 t0 = tcg_temp_new_i64();
2203 TCGv_i64 t1 = tcg_temp_new_i64();
2204
2205 tcg_gen_mov_i64(t0, arg1);
2206 tcg_gen_mov_i64(t1, arg2);
2207 if (sign) {
2208 TCGv_i64 t2 = tcg_temp_new_i64();
2209 TCGv_i64 t3 = tcg_temp_new_i64();
2210 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
2211 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
2212 tcg_gen_and_i64(t2, t2, t3);
2213 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
2214 tcg_gen_or_i64(t2, t2, t3);
2215 tcg_gen_movi_i64(t3, 0);
2216 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
2217 tcg_gen_rem_i64(ret, t0, t1);
2218 tcg_temp_free_i64(t2);
2219 tcg_temp_free_i64(t3);
2220 } else {
2221 TCGv_i64 t2 = tcg_const_i64(1);
2222 TCGv_i64 t3 = tcg_const_i64(0);
2223 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
2224 tcg_gen_remu_i64(ret, t0, t1);
2225 tcg_temp_free_i64(t2);
2226 tcg_temp_free_i64(t3);
2227 }
2228 tcg_temp_free_i64(t0);
2229 tcg_temp_free_i64(t1);
2230 }
2231
2232 #define GEN_INT_ARITH_MODD(name, opc3, sign) \
2233 static void glue(gen_, name)(DisasContext *ctx) \
2234 { \
2235 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \
2236 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2237 sign); \
2238 }
2239
2240 GEN_INT_ARITH_MODD(modud, 0x08, 0);
2241 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
2242 #endif
2243
2244 /* mulhw mulhw. */
2245 static void gen_mulhw(DisasContext *ctx)
2246 {
2247 TCGv_i32 t0 = tcg_temp_new_i32();
2248 TCGv_i32 t1 = tcg_temp_new_i32();
2249
2250 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2251 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2252 tcg_gen_muls2_i32(t0, t1, t0, t1);
2253 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
2254 tcg_temp_free_i32(t0);
2255 tcg_temp_free_i32(t1);
2256 if (unlikely(Rc(ctx->opcode) != 0)) {
2257 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2258 }
2259 }
2260
2261 /* mulhwu mulhwu. */
2262 static void gen_mulhwu(DisasContext *ctx)
2263 {
2264 TCGv_i32 t0 = tcg_temp_new_i32();
2265 TCGv_i32 t1 = tcg_temp_new_i32();
2266
2267 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2268 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2269 tcg_gen_mulu2_i32(t0, t1, t0, t1);
2270 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
2271 tcg_temp_free_i32(t0);
2272 tcg_temp_free_i32(t1);
2273 if (unlikely(Rc(ctx->opcode) != 0)) {
2274 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2275 }
2276 }
2277
2278 /* mullw mullw. */
2279 static void gen_mullw(DisasContext *ctx)
2280 {
2281 #if defined(TARGET_PPC64)
2282 TCGv_i64 t0, t1;
2283 t0 = tcg_temp_new_i64();
2284 t1 = tcg_temp_new_i64();
2285 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
2286 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
2287 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2288 tcg_temp_free(t0);
2289 tcg_temp_free(t1);
2290 #else
2291 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2292 cpu_gpr[rB(ctx->opcode)]);
2293 #endif
2294 if (unlikely(Rc(ctx->opcode) != 0)) {
2295 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2296 }
2297 }
2298
2299 /* mullwo mullwo. */
2300 static void gen_mullwo(DisasContext *ctx)
2301 {
2302 TCGv_i32 t0 = tcg_temp_new_i32();
2303 TCGv_i32 t1 = tcg_temp_new_i32();
2304
2305 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2306 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2307 tcg_gen_muls2_i32(t0, t1, t0, t1);
2308 #if defined(TARGET_PPC64)
2309 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2310 #else
2311 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2312 #endif
2313
2314 tcg_gen_sari_i32(t0, t0, 31);
2315 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2316 tcg_gen_extu_i32_tl(cpu_ov, t0);
2317 if (is_isa300(ctx)) {
2318 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2319 }
2320 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2321
2322 tcg_temp_free_i32(t0);
2323 tcg_temp_free_i32(t1);
2324 if (unlikely(Rc(ctx->opcode) != 0)) {
2325 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2326 }
2327 }
2328
2329 /* mulli */
2330 static void gen_mulli(DisasContext *ctx)
2331 {
2332 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2333 SIMM(ctx->opcode));
2334 }
2335
2336 #if defined(TARGET_PPC64)
2337 /* mulhd mulhd. */
2338 static void gen_mulhd(DisasContext *ctx)
2339 {
2340 TCGv lo = tcg_temp_new();
2341 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2342 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2343 tcg_temp_free(lo);
2344 if (unlikely(Rc(ctx->opcode) != 0)) {
2345 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2346 }
2347 }
2348
2349 /* mulhdu mulhdu. */
2350 static void gen_mulhdu(DisasContext *ctx)
2351 {
2352 TCGv lo = tcg_temp_new();
2353 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2354 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2355 tcg_temp_free(lo);
2356 if (unlikely(Rc(ctx->opcode) != 0)) {
2357 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2358 }
2359 }
2360
2361 /* mulld mulld. */
2362 static void gen_mulld(DisasContext *ctx)
2363 {
2364 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2365 cpu_gpr[rB(ctx->opcode)]);
2366 if (unlikely(Rc(ctx->opcode) != 0)) {
2367 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2368 }
2369 }
2370
2371 /* mulldo mulldo. */
2372 static void gen_mulldo(DisasContext *ctx)
2373 {
2374 TCGv_i64 t0 = tcg_temp_new_i64();
2375 TCGv_i64 t1 = tcg_temp_new_i64();
2376
2377 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2378 cpu_gpr[rB(ctx->opcode)]);
2379 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2380
2381 tcg_gen_sari_i64(t0, t0, 63);
2382 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2383 if (is_isa300(ctx)) {
2384 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2385 }
2386 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2387
2388 tcg_temp_free_i64(t0);
2389 tcg_temp_free_i64(t1);
2390
2391 if (unlikely(Rc(ctx->opcode) != 0)) {
2392 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2393 }
2394 }
2395 #endif
2396
2397 /* Common subf function */
2398 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2399 TCGv arg2, bool add_ca, bool compute_ca,
2400 bool compute_ov, bool compute_rc0)
2401 {
2402 TCGv t0 = ret;
2403
2404 if (compute_ca || compute_ov) {
2405 t0 = tcg_temp_new();
2406 }
2407
2408 if (compute_ca) {
2409 /* dest = ~arg1 + arg2 [+ ca]. */
2410 if (NARROW_MODE(ctx)) {
2411 /*
2412 * Caution: a non-obvious corner case of the spec is that
2413 * we must produce the *entire* 64-bit addition, but
2414 * produce the carry into bit 32.
2415 */
2416 TCGv inv1 = tcg_temp_new();
2417 TCGv t1 = tcg_temp_new();
2418 tcg_gen_not_tl(inv1, arg1);
2419 if (add_ca) {
2420 tcg_gen_add_tl(t0, arg2, cpu_ca);
2421 } else {
2422 tcg_gen_addi_tl(t0, arg2, 1);
2423 }
2424 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
2425 tcg_gen_add_tl(t0, t0, inv1);
2426 tcg_temp_free(inv1);
2427 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
2428 tcg_temp_free(t1);
2429 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2430 if (is_isa300(ctx)) {
2431 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2432 }
2433 } else if (add_ca) {
2434 TCGv zero, inv1 = tcg_temp_new();
2435 tcg_gen_not_tl(inv1, arg1);
2436 zero = tcg_const_tl(0);
2437 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2438 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2439 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2440 tcg_temp_free(zero);
2441 tcg_temp_free(inv1);
2442 } else {
2443 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2444 tcg_gen_sub_tl(t0, arg2, arg1);
2445 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2446 }
2447 } else if (add_ca) {
2448 /*
2449 * Since we're ignoring carry-out, we can simplify the
2450 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2451 */
2452 tcg_gen_sub_tl(t0, arg2, arg1);
2453 tcg_gen_add_tl(t0, t0, cpu_ca);
2454 tcg_gen_subi_tl(t0, t0, 1);
2455 } else {
2456 tcg_gen_sub_tl(t0, arg2, arg1);
2457 }
2458
2459 if (compute_ov) {
2460 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2461 }
2462 if (unlikely(compute_rc0)) {
2463 gen_set_Rc0(ctx, t0);
2464 }
2465
2466 if (t0 != ret) {
2467 tcg_gen_mov_tl(ret, t0);
2468 tcg_temp_free(t0);
2469 }
2470 }
2471 /* Sub functions with Two operands functions */
2472 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
2473 static void glue(gen_, name)(DisasContext *ctx) \
2474 { \
2475 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
2476 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2477 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
2478 }
2479 /* Sub functions with one operand and one immediate */
2480 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
2481 add_ca, compute_ca, compute_ov) \
2482 static void glue(gen_, name)(DisasContext *ctx) \
2483 { \
2484 TCGv t0 = tcg_const_tl(const_val); \
2485 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
2486 cpu_gpr[rA(ctx->opcode)], t0, \
2487 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
2488 tcg_temp_free(t0); \
2489 }
2490 /* subf subf. subfo subfo. */
2491 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2492 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2493 /* subfc subfc. subfco subfco. */
2494 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2495 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2496 /* subfe subfe. subfeo subfo. */
2497 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2498 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2499 /* subfme subfme. subfmeo subfmeo. */
2500 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2501 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2502 /* subfze subfze. subfzeo subfzeo.*/
2503 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2504 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2505
2506 /* subfic */
2507 static void gen_subfic(DisasContext *ctx)
2508 {
2509 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2510 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2511 c, 0, 1, 0, 0);
2512 tcg_temp_free(c);
2513 }
2514
2515 /* neg neg. nego nego. */
2516 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2517 {
2518 TCGv zero = tcg_const_tl(0);
2519 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2520 zero, 0, 0, compute_ov, Rc(ctx->opcode));
2521 tcg_temp_free(zero);
2522 }
2523
2524 static void gen_neg(DisasContext *ctx)
2525 {
2526 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2527 if (unlikely(Rc(ctx->opcode))) {
2528 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2529 }
2530 }
2531
2532 static void gen_nego(DisasContext *ctx)
2533 {
2534 gen_op_arith_neg(ctx, 1);
2535 }
2536
2537 /*** Integer logical ***/
2538 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
2539 static void glue(gen_, name)(DisasContext *ctx) \
2540 { \
2541 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
2542 cpu_gpr[rB(ctx->opcode)]); \
2543 if (unlikely(Rc(ctx->opcode) != 0)) \
2544 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
2545 }
2546
2547 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
2548 static void glue(gen_, name)(DisasContext *ctx) \
2549 { \
2550 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
2551 if (unlikely(Rc(ctx->opcode) != 0)) \
2552 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
2553 }
2554
2555 /* and & and. */
2556 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2557 /* andc & andc. */
2558 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2559
2560 /* andi. */
2561 static void gen_andi_(DisasContext *ctx)
2562 {
2563 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2564 UIMM(ctx->opcode));
2565 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2566 }
2567
2568 /* andis. */
2569 static void gen_andis_(DisasContext *ctx)
2570 {
2571 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2572 UIMM(ctx->opcode) << 16);
2573 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2574 }
2575
2576 /* cntlzw */
2577 static void gen_cntlzw(DisasContext *ctx)
2578 {
2579 TCGv_i32 t = tcg_temp_new_i32();
2580
2581 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2582 tcg_gen_clzi_i32(t, t, 32);
2583 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2584 tcg_temp_free_i32(t);
2585
2586 if (unlikely(Rc(ctx->opcode) != 0)) {
2587 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2588 }
2589 }
2590
2591 /* cnttzw */
2592 static void gen_cnttzw(DisasContext *ctx)
2593 {
2594 TCGv_i32 t = tcg_temp_new_i32();
2595
2596 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2597 tcg_gen_ctzi_i32(t, t, 32);
2598 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2599 tcg_temp_free_i32(t);
2600
2601 if (unlikely(Rc(ctx->opcode) != 0)) {
2602 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2603 }
2604 }
2605
2606 /* eqv & eqv. */
2607 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2608 /* extsb & extsb. */
2609 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2610 /* extsh & extsh. */
2611 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2612 /* nand & nand. */
2613 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2614 /* nor & nor. */
2615 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2616
2617 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2618 static void gen_pause(DisasContext *ctx)
2619 {
2620 TCGv_i32 t0 = tcg_const_i32(0);
2621 tcg_gen_st_i32(t0, cpu_env,
2622 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2623 tcg_temp_free_i32(t0);
2624
2625 /* Stop translation, this gives other CPUs a chance to run */
2626 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2627 }
2628 #endif /* defined(TARGET_PPC64) */
2629
2630 /* or & or. */
2631 static void gen_or(DisasContext *ctx)
2632 {
2633 int rs, ra, rb;
2634
2635 rs = rS(ctx->opcode);
2636 ra = rA(ctx->opcode);
2637 rb = rB(ctx->opcode);
2638 /* Optimisation for mr. ri case */
2639 if (rs != ra || rs != rb) {
2640 if (rs != rb) {
2641 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2642 } else {
2643 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2644 }
2645 if (unlikely(Rc(ctx->opcode) != 0)) {
2646 gen_set_Rc0(ctx, cpu_gpr[ra]);
2647 }
2648 } else if (unlikely(Rc(ctx->opcode) != 0)) {
2649 gen_set_Rc0(ctx, cpu_gpr[rs]);
2650 #if defined(TARGET_PPC64)
2651 } else if (rs != 0) { /* 0 is nop */
2652 int prio = 0;
2653
2654 switch (rs) {
2655 case 1:
2656 /* Set process priority to low */
2657 prio = 2;
2658 break;
2659 case 6:
2660 /* Set process priority to medium-low */
2661 prio = 3;
2662 break;
2663 case 2:
2664 /* Set process priority to normal */
2665 prio = 4;
2666 break;
2667 #if !defined(CONFIG_USER_ONLY)
2668 case 31:
2669 if (!ctx->pr) {
2670 /* Set process priority to very low */
2671 prio = 1;
2672 }
2673 break;
2674 case 5:
2675 if (!ctx->pr) {
2676 /* Set process priority to medium-hight */
2677 prio = 5;
2678 }
2679 break;
2680 case 3:
2681 if (!ctx->pr) {
2682 /* Set process priority to high */
2683 prio = 6;
2684 }
2685 break;
2686 case 7:
2687 if (ctx->hv && !ctx->pr) {
2688 /* Set process priority to very high */
2689 prio = 7;
2690 }
2691 break;
2692 #endif
2693 default:
2694 break;
2695 }
2696 if (prio) {
2697 TCGv t0 = tcg_temp_new();
2698 gen_load_spr(t0, SPR_PPR);
2699 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2700 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2701 gen_store_spr(SPR_PPR, t0);
2702 tcg_temp_free(t0);
2703 }
2704 #if !defined(CONFIG_USER_ONLY)
2705 /*
2706 * Pause out of TCG otherwise spin loops with smt_low eat too
2707 * much CPU and the kernel hangs. This applies to all
2708 * encodings other than no-op, e.g., miso(rs=26), yield(27),
2709 * mdoio(29), mdoom(30), and all currently undefined.
2710 */
2711 gen_pause(ctx);
2712 #endif
2713 #endif
2714 }
2715 }
2716 /* orc & orc. */
2717 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2718
2719 /* xor & xor. */
2720 static void gen_xor(DisasContext *ctx)
2721 {
2722 /* Optimisation for "set to zero" case */
2723 if (rS(ctx->opcode) != rB(ctx->opcode)) {
2724 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2725 cpu_gpr[rB(ctx->opcode)]);
2726 } else {
2727 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2728 }
2729 if (unlikely(Rc(ctx->opcode) != 0)) {
2730 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2731 }
2732 }
2733
2734 /* ori */
2735 static void gen_ori(DisasContext *ctx)
2736 {
2737 target_ulong uimm = UIMM(ctx->opcode);
2738
2739 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2740 return;
2741 }
2742 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2743 }
2744
2745 /* oris */
2746 static void gen_oris(DisasContext *ctx)
2747 {
2748 target_ulong uimm = UIMM(ctx->opcode);
2749
2750 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2751 /* NOP */
2752 return;
2753 }
2754 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2755 uimm << 16);
2756 }
2757
2758 /* xori */
2759 static void gen_xori(DisasContext *ctx)
2760 {
2761 target_ulong uimm = UIMM(ctx->opcode);
2762
2763 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2764 /* NOP */
2765 return;
2766 }
2767 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2768 }
2769
2770 /* xoris */
2771 static void gen_xoris(DisasContext *ctx)
2772 {
2773 target_ulong uimm = UIMM(ctx->opcode);
2774
2775 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2776 /* NOP */
2777 return;
2778 }
2779 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2780 uimm << 16);
2781 }
2782
2783 /* popcntb : PowerPC 2.03 specification */
2784 static void gen_popcntb(DisasContext *ctx)
2785 {
2786 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2787 }
2788
2789 static void gen_popcntw(DisasContext *ctx)
2790 {
2791 #if defined(TARGET_PPC64)
2792 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2793 #else
2794 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2795 #endif
2796 }
2797
2798 #if defined(TARGET_PPC64)
2799 /* popcntd: PowerPC 2.06 specification */
2800 static void gen_popcntd(DisasContext *ctx)
2801 {
2802 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2803 }
2804 #endif
2805
2806 /* prtyw: PowerPC 2.05 specification */
2807 static void gen_prtyw(DisasContext *ctx)
2808 {
2809 TCGv ra = cpu_gpr[rA(ctx->opcode)];
2810 TCGv rs = cpu_gpr[rS(ctx->opcode)];
2811 TCGv t0 = tcg_temp_new();
2812 tcg_gen_shri_tl(t0, rs, 16);
2813 tcg_gen_xor_tl(ra, rs, t0);
2814 tcg_gen_shri_tl(t0, ra, 8);
2815 tcg_gen_xor_tl(ra, ra, t0);
2816 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2817 tcg_temp_free(t0);
2818 }
2819
2820 #if defined(TARGET_PPC64)
2821 /* prtyd: PowerPC 2.05 specification */
2822 static void gen_prtyd(DisasContext *ctx)
2823 {
2824 TCGv ra = cpu_gpr[rA(ctx->opcode)];
2825 TCGv rs = cpu_gpr[rS(ctx->opcode)];
2826 TCGv t0 = tcg_temp_new();
2827 tcg_gen_shri_tl(t0, rs, 32);
2828 tcg_gen_xor_tl(ra, rs, t0);
2829 tcg_gen_shri_tl(t0, ra, 16);
2830 tcg_gen_xor_tl(ra, ra, t0);
2831 tcg_gen_shri_tl(t0, ra, 8);
2832 tcg_gen_xor_tl(ra, ra, t0);
2833 tcg_gen_andi_tl(ra, ra, 1);
2834 tcg_temp_free(t0);
2835 }
2836 #endif
2837
2838 #if defined(TARGET_PPC64)
2839 /* bpermd */
2840 static void gen_bpermd(DisasContext *ctx)
2841 {
2842 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2843 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2844 }
2845 #endif
2846
2847 #if defined(TARGET_PPC64)
2848 /* extsw & extsw. */
2849 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2850
2851 /* cntlzd */
2852 static void gen_cntlzd(DisasContext *ctx)
2853 {
2854 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2855 if (unlikely(Rc(ctx->opcode) != 0)) {
2856 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2857 }
2858 }
2859
2860 /* cnttzd */
2861 static void gen_cnttzd(DisasContext *ctx)
2862 {
2863 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2864 if (unlikely(Rc(ctx->opcode) != 0)) {
2865 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2866 }
2867 }
2868
2869 /* darn */
2870 static void gen_darn(DisasContext *ctx)
2871 {
2872 int l = L(ctx->opcode);
2873
2874 if (l > 2) {
2875 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2876 } else {
2877 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
2878 gen_io_start();
2879 }
2880 if (l == 0) {
2881 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2882 } else {
2883 /* Return 64-bit random for both CRN and RRN */
2884 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2885 }
2886 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
2887 gen_stop_exception(ctx);
2888 }
2889 }
2890 }
2891 #endif
2892
2893 /*** Integer rotate ***/
2894
2895 /* rlwimi & rlwimi. */
2896 static void gen_rlwimi(DisasContext *ctx)
2897 {
2898 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2899 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2900 uint32_t sh = SH(ctx->opcode);
2901 uint32_t mb = MB(ctx->opcode);
2902 uint32_t me = ME(ctx->opcode);
2903
2904 if (sh == (31 - me) && mb <= me) {
2905 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2906 } else {
2907 target_ulong mask;
2908 bool mask_in_32b = true;
2909 TCGv t1;
2910
2911 #if defined(TARGET_PPC64)
2912 mb += 32;
2913 me += 32;
2914 #endif
2915 mask = MASK(mb, me);
2916
2917 #if defined(TARGET_PPC64)
2918 if (mask > 0xffffffffu) {
2919 mask_in_32b = false;
2920 }
2921 #endif
2922 t1 = tcg_temp_new();
2923 if (mask_in_32b) {
2924 TCGv_i32 t0 = tcg_temp_new_i32();
2925 tcg_gen_trunc_tl_i32(t0, t_rs);
2926 tcg_gen_rotli_i32(t0, t0, sh);
2927 tcg_gen_extu_i32_tl(t1, t0);
2928 tcg_temp_free_i32(t0);
2929 } else {
2930 #if defined(TARGET_PPC64)
2931 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2932 tcg_gen_rotli_i64(t1, t1, sh);
2933 #else
2934 g_assert_not_reached();
2935 #endif
2936 }
2937
2938 tcg_gen_andi_tl(t1, t1, mask);
2939 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2940 tcg_gen_or_tl(t_ra, t_ra, t1);
2941 tcg_temp_free(t1);
2942 }
2943 if (unlikely(Rc(ctx->opcode) != 0)) {
2944 gen_set_Rc0(ctx, t_ra);
2945 }
2946 }
2947
2948 /* rlwinm & rlwinm. */
2949 static void gen_rlwinm(DisasContext *ctx)
2950 {
2951 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2952 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2953 int sh = SH(ctx->opcode);
2954 int mb = MB(ctx->opcode);
2955 int me = ME(ctx->opcode);
2956 int len = me - mb + 1;
2957 int rsh = (32 - sh) & 31;
2958
2959 if (sh != 0 && len > 0 && me == (31 - sh)) {
2960 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2961 } else if (me == 31 && rsh + len <= 32) {
2962 tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2963 } else {
2964 target_ulong mask;
2965 bool mask_in_32b = true;
2966 #if defined(TARGET_PPC64)
2967 mb += 32;
2968 me += 32;
2969 #endif
2970 mask = MASK(mb, me);
2971 #if defined(TARGET_PPC64)
2972 if (mask > 0xffffffffu) {
2973 mask_in_32b = false;
2974 }
2975 #endif
2976 if (mask_in_32b) {
2977 if (sh == 0) {
2978 tcg_gen_andi_tl(t_ra, t_rs, mask);
2979 } else {
2980 TCGv_i32 t0 = tcg_temp_new_i32();
2981 tcg_gen_trunc_tl_i32(t0, t_rs);
2982 tcg_gen_rotli_i32(t0, t0, sh);
2983 tcg_gen_andi_i32(t0, t0, mask);
2984 tcg_gen_extu_i32_tl(t_ra, t0);
2985 tcg_temp_free_i32(t0);
2986 }
2987 } else {
2988 #if defined(TARGET_PPC64)
2989 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2990 tcg_gen_rotli_i64(t_ra, t_ra, sh);
2991 tcg_gen_andi_i64(t_ra, t_ra, mask);
2992 #else
2993 g_assert_not_reached();
2994 #endif
2995 }
2996 }
2997 if (unlikely(Rc(ctx->opcode) != 0)) {
2998 gen_set_Rc0(ctx, t_ra);
2999 }
3000 }
3001
3002 /* rlwnm & rlwnm. */
3003 static void gen_rlwnm(DisasContext *ctx)
3004 {
3005 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
3006 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
3007 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
3008 uint32_t mb = MB(ctx->opcode);
3009 uint32_t me = ME(ctx->opcode);
3010 target_ulong mask;
3011 bool mask_in_32b = true;
3012
3013 #if defined(TARGET_PPC64)
3014 mb += 32;
3015 me += 32;
3016 #endif
3017 mask = MASK(mb, me);
3018
3019 #if defined(TARGET_PPC64)
3020 if (mask > 0xffffffffu) {
3021 mask_in_32b = false;
3022 }
3023 #endif
3024 if (mask_in_32b) {
3025 TCGv_i32 t0 = tcg_temp_new_i32();
3026 TCGv_i32 t1 = tcg_temp_new_i32();
3027 tcg_gen_trunc_tl_i32(t0, t_rb);
3028 tcg_gen_trunc_tl_i32(t1, t_rs);
3029 tcg_gen_andi_i32(t0, t0, 0x1f);
3030 tcg_gen_rotl_i32(t1, t1, t0);
3031 tcg_gen_extu_i32_tl(t_ra, t1);
3032 tcg_temp_free_i32(t0);
3033 tcg_temp_free_i32(t1);
3034 } else {
3035 #if defined(TARGET_PPC64)
3036 TCGv_i64 t0 = tcg_temp_new_i64();
3037 tcg_gen_andi_i64(t0, t_rb, 0x1f);
3038 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
3039 tcg_gen_rotl_i64(t_ra, t_ra, t0);
3040 tcg_temp_free_i64(t0);
3041 #else
3042 g_assert_not_reached();
3043 #endif
3044 }
3045
3046 tcg_gen_andi_tl(t_ra, t_ra, mask);
3047
3048 if (unlikely(Rc(ctx->opcode) != 0)) {
3049 gen_set_Rc0(ctx, t_ra);
3050 }
3051 }
3052
3053 #if defined(TARGET_PPC64)
3054 #define GEN_PPC64_R2(name, opc1, opc2) \
3055 static void glue(gen_, name##0)(DisasContext *ctx) \
3056 { \
3057 gen_##name(ctx, 0); \
3058 } \
3059 \
3060 static void glue(gen_, name##1)(DisasContext *ctx) \
3061 { \
3062 gen_##name(ctx, 1); \
3063 }
3064 #define GEN_PPC64_R4(name, opc1, opc2) \
3065 static void glue(gen_, name##0)(DisasContext *ctx) \
3066 { \
3067 gen_##name(ctx, 0, 0); \
3068 } \
3069 \
3070 static void glue(gen_, name##1)(DisasContext *ctx) \
3071 { \
3072 gen_##name(ctx, 0, 1); \
3073 } \
3074 \
3075 static void glue(gen_, name##2)(DisasContext *ctx) \
3076 { \
3077 gen_##name(ctx, 1, 0); \
3078 } \
3079 \
3080 static void glue(gen_, name##3)(DisasContext *ctx) \
3081 { \
3082 gen_##name(ctx, 1, 1); \
3083 }
3084
3085 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
3086 {
3087 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
3088 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
3089 int len = me - mb + 1;
3090 int rsh = (64 - sh) & 63;
3091
3092 if (sh != 0 && len > 0 && me == (63 - sh)) {
3093 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
3094 } else if (me == 63 && rsh + len <= 64) {
3095 tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
3096 } else {
3097 tcg_gen_rotli_tl(t_ra, t_rs, sh);
3098 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
3099 }
3100 if (unlikely(Rc(ctx->opcode) != 0)) {
3101 gen_set_Rc0(ctx, t_ra);
3102 }
3103 }
3104
3105 /* rldicl - rldicl. */
3106 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
3107 {
3108 uint32_t sh, mb;
3109
3110 sh = SH(ctx->opcode) | (shn << 5);
3111 mb = MB(ctx->opcode) | (mbn << 5);
3112 gen_rldinm(ctx, mb, 63, sh);
3113 }
3114 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
3115
3116 /* rldicr - rldicr. */
3117 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
3118 {
3119 uint32_t sh, me;
3120
3121 sh = SH(ctx->opcode) | (shn << 5);
3122 me = MB(ctx->opcode) | (men << 5);
3123 gen_rldinm(ctx, 0, me, sh);
3124 }
3125 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
3126
3127 /* rldic - rldic. */
3128 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
3129 {
3130 uint32_t sh, mb;
3131
3132 sh = SH(ctx->opcode) | (shn << 5);
3133 mb = MB(ctx->opcode) | (mbn << 5);
3134 gen_rldinm(ctx, mb, 63 - sh, sh);
3135 }
3136 GEN_PPC64_R4(rldic, 0x1E, 0x04);
3137
3138 static void gen_rldnm(DisasContext *ctx, int mb, int me)
3139 {
3140 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
3141 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
3142 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
3143 TCGv t0;
3144
3145 t0 = tcg_temp_new();
3146 tcg_gen_andi_tl(t0, t_rb, 0x3f);
3147 tcg_gen_rotl_tl(t_ra, t_rs, t0);
3148 tcg_temp_free(t0);
3149
3150 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
3151 if (unlikely(Rc(ctx->opcode) != 0)) {
3152 gen_set_Rc0(ctx, t_ra);
3153 }
3154 }
3155
3156 /* rldcl - rldcl. */
3157 static inline void gen_rldcl(DisasContext *ctx, int mbn)
3158 {
3159 uint32_t mb;
3160
3161 mb = MB(ctx->opcode) | (mbn << 5);
3162 gen_rldnm(ctx, mb, 63);
3163 }
3164 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
3165
3166 /* rldcr - rldcr. */
3167 static inline void gen_rldcr(DisasContext *ctx, int men)
3168 {
3169 uint32_t me;
3170
3171 me = MB(ctx->opcode) | (men << 5);
3172 gen_rldnm(ctx, 0, me);
3173 }
3174 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
3175
3176 /* rldimi - rldimi. */
3177 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
3178 {
3179 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
3180 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
3181 uint32_t sh = SH(ctx->opcode) | (shn << 5);
3182 uint32_t mb = MB(ctx->opcode) | (mbn << 5);
3183 uint32_t me = 63 - sh;
3184
3185 if (mb <= me) {
3186 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
3187 } else {
3188 target_ulong mask = MASK(mb, me);
3189 TCGv t1 = tcg_temp_new();
3190
3191 tcg_gen_rotli_tl(t1, t_rs, sh);
3192 tcg_gen_andi_tl(t1, t1, mask);
3193 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
3194 tcg_gen_or_tl(t_ra, t_ra, t1);
3195 tcg_temp_free(t1);
3196 }
3197 if (unlikely(Rc(ctx->opcode) != 0)) {
3198 gen_set_Rc0(ctx, t_ra);
3199 }
3200 }
3201 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
3202 #endif
3203
3204 /*** Integer shift ***/
3205
3206 /* slw & slw. */
3207 static void gen_slw(DisasContext *ctx)
3208 {
3209 TCGv t0, t1;
3210
3211 t0 = tcg_temp_new();
3212 /* AND rS with a mask that is 0 when rB >= 0x20 */
3213 #if defined(TARGET_PPC64)
3214 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3215 tcg_gen_sari_tl(t0, t0, 0x3f);
3216 #else
3217 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3218 tcg_gen_sari_tl(t0, t0, 0x1f);
3219 #endif
3220 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3221 t1 = tcg_temp_new();
3222 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3223 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3224 tcg_temp_free(t1);
3225 tcg_temp_free(t0);
3226 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
3227 if (unlikely(Rc(ctx->opcode) != 0)) {
3228 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3229 }
3230 }
3231
3232 /* sraw & sraw. */
3233 static void gen_sraw(DisasContext *ctx)
3234 {
3235 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
3236 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3237 if (unlikely(Rc(ctx->opcode) != 0)) {
3238 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3239 }
3240 }
3241
3242 /* srawi & srawi. */
3243 static void gen_srawi(DisasContext *ctx)
3244 {
3245 int sh = SH(ctx->opcode);
3246 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3247 TCGv src = cpu_gpr[rS(ctx->opcode)];
3248 if (sh == 0) {
3249 tcg_gen_ext32s_tl(dst, src);
3250 tcg_gen_movi_tl(cpu_ca, 0);
3251 if (is_isa300(ctx)) {
3252 tcg_gen_movi_tl(cpu_ca32, 0);
3253 }
3254 } else {
3255 TCGv t0;
3256 tcg_gen_ext32s_tl(dst, src);
3257 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
3258 t0 = tcg_temp_new();
3259 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
3260 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3261 tcg_temp_free(t0);
3262 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3263 if (is_isa300(ctx)) {
3264 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3265 }
3266 tcg_gen_sari_tl(dst, dst, sh);
3267 }
3268 if (unlikely(Rc(ctx->opcode) != 0)) {
3269 gen_set_Rc0(ctx, dst);
3270 }
3271 }
3272
3273 /* srw & srw. */
3274 static void gen_srw(DisasContext *ctx)
3275 {
3276 TCGv t0, t1;
3277
3278 t0 = tcg_temp_new();
3279 /* AND rS with a mask that is 0 when rB >= 0x20 */
3280 #if defined(TARGET_PPC64)
3281 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3282 tcg_gen_sari_tl(t0, t0, 0x3f);
3283 #else
3284 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3285 tcg_gen_sari_tl(t0, t0, 0x1f);
3286 #endif
3287 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3288 tcg_gen_ext32u_tl(t0, t0);
3289 t1 = tcg_temp_new();
3290 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3291 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3292 tcg_temp_free(t1);
3293 tcg_temp_free(t0);
3294 if (unlikely(Rc(ctx->opcode) != 0)) {
3295 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3296 }
3297 }
3298
3299 #if defined(TARGET_PPC64)
3300 /* sld & sld. */
3301 static void gen_sld(DisasContext *ctx)
3302 {
3303 TCGv t0, t1;
3304
3305 t0 = tcg_temp_new();
3306 /* AND rS with a mask that is 0 when rB >= 0x40 */
3307 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3308 tcg_gen_sari_tl(t0, t0, 0x3f);
3309 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3310 t1 = tcg_temp_new();
3311 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3312 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3313 tcg_temp_free(t1);
3314 tcg_temp_free(t0);
3315 if (unlikely(Rc(ctx->opcode) != 0)) {
3316 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3317 }
3318 }
3319
3320 /* srad & srad. */
3321 static void gen_srad(DisasContext *ctx)
3322 {
3323 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3324 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3325 if (unlikely(Rc(ctx->opcode) != 0)) {
3326 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3327 }
3328 }
3329 /* sradi & sradi. */
3330 static inline void gen_sradi(DisasContext *ctx, int n)
3331 {
3332 int sh = SH(ctx->opcode) + (n << 5);
3333 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3334 TCGv src = cpu_gpr[rS(ctx->opcode)];
3335 if (sh == 0) {
3336 tcg_gen_mov_tl(dst, src);
3337 tcg_gen_movi_tl(cpu_ca, 0);
3338 if (is_isa300(ctx)) {
3339 tcg_gen_movi_tl(cpu_ca32, 0);
3340 }
3341 } else {
3342 TCGv t0;
3343 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3344 t0 = tcg_temp_new();
3345 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3346 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3347 tcg_temp_free(t0);
3348 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3349 if (is_isa300(ctx)) {
3350 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3351 }
3352 tcg_gen_sari_tl(dst, src, sh);
3353 }
3354 if (unlikely(Rc(ctx->opcode) != 0)) {
3355 gen_set_Rc0(ctx, dst);
3356 }
3357 }
3358
3359 static void gen_sradi0(DisasContext *ctx)
3360 {
3361 gen_sradi(ctx, 0);
3362 }
3363
3364 static void gen_sradi1(DisasContext *ctx)
3365 {
3366 gen_sradi(ctx, 1);
3367 }
3368
3369 /* extswsli & extswsli. */
3370 static inline void gen_extswsli(DisasContext *ctx, int n)
3371 {
3372 int sh = SH(ctx->opcode) + (n << 5);
3373 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3374 TCGv src = cpu_gpr[rS(ctx->opcode)];
3375
3376 tcg_gen_ext32s_tl(dst, src);
3377 tcg_gen_shli_tl(dst, dst, sh);
3378 if (unlikely(Rc(ctx->opcode) != 0)) {
3379 gen_set_Rc0(ctx, dst);
3380 }
3381 }
3382
3383 static void gen_extswsli0(DisasContext *ctx)
3384 {
3385 gen_extswsli(ctx, 0);
3386 }
3387
3388 static void gen_extswsli1(DisasContext *ctx)
3389 {
3390 gen_extswsli(ctx, 1);
3391 }
3392
3393 /* srd & srd. */
3394 static void gen_srd(DisasContext *ctx)
3395 {
3396 TCGv t0, t1;
3397
3398 t0 = tcg_temp_new();
3399 /* AND rS with a mask that is 0 when rB >= 0x40 */
3400 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3401 tcg_gen_sari_tl(t0, t0, 0x3f);
3402 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3403 t1 = tcg_temp_new();
3404 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3405 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3406 tcg_temp_free(t1);
3407 tcg_temp_free(t0);
3408 if (unlikely(Rc(ctx->opcode) != 0)) {
3409 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3410 }
3411 }
3412 #endif
3413
3414 /*** Addressing modes ***/
3415 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3416 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3417 target_long maskl)
3418 {
3419 target_long simm = SIMM(ctx->opcode);
3420
3421 simm &= ~maskl;
3422 if (rA(ctx->opcode) == 0) {
3423 if (NARROW_MODE(ctx)) {
3424 simm = (uint32_t)simm;
3425 }
3426 tcg_gen_movi_tl(EA, simm);
3427 } else if (likely(simm != 0)) {
3428 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3429 if (NARROW_MODE(ctx)) {
3430 tcg_gen_ext32u_tl(EA, EA);
3431 }
3432 } else {
3433 if (NARROW_MODE(ctx)) {
3434 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3435 } else {
3436 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3437 }
3438 }
3439 }
3440
3441 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3442 {
3443 if (rA(ctx->opcode) == 0) {
3444 if (NARROW_MODE(ctx)) {
3445 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3446 } else {
3447 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3448 }
3449 } else {
3450 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3451 if (NARROW_MODE(ctx)) {
3452 tcg_gen_ext32u_tl(EA, EA);
3453 }
3454 }
3455 }
3456
3457 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3458 {
3459 if (rA(ctx->opcode) == 0) {
3460 tcg_gen_movi_tl(EA, 0);
3461 } else if (NARROW_MODE(ctx)) {
3462 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3463 } else {
3464 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3465 }
3466 }
3467
3468 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3469 target_long val)
3470 {
3471 tcg_gen_addi_tl(ret, arg1, val);
3472 if (NARROW_MODE(ctx)) {
3473 tcg_gen_ext32u_tl(ret, ret);
3474 }
3475 }
3476
3477 static inline void gen_align_no_le(DisasContext *ctx)
3478 {
3479 gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3480 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3481 }
3482
3483 /*** Integer load ***/
3484 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3485 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3486
3487 #define GEN_QEMU_LOAD_TL(ldop, op) \
3488 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \
3489 TCGv val, \
3490 TCGv addr) \
3491 { \
3492 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \
3493 }
3494
3495 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB))
3496 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3497 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3498 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3499 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3500
3501 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3502 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3503
3504 #define GEN_QEMU_LOAD_64(ldop, op) \
3505 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \
3506 TCGv_i64 val, \
3507 TCGv addr) \
3508 { \
3509 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \
3510 }
3511
3512 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB))
3513 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3514 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3515 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3516 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q))
3517
3518 #if defined(TARGET_PPC64)
3519 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q))
3520 #endif
3521
3522 #define GEN_QEMU_STORE_TL(stop, op) \
3523 static void glue(gen_qemu_, stop)(DisasContext *ctx, \
3524 TCGv val, \
3525 TCGv addr) \
3526 { \
3527 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \
3528 }
3529
3530 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB))
3531 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3532 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3533
3534 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3535 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3536
3537 #define GEN_QEMU_STORE_64(stop, op) \
3538 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \
3539 TCGv_i64 val, \
3540 TCGv addr) \
3541 { \
3542 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \
3543 }
3544
3545 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB))
3546 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3547 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3548 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q))
3549
3550 #if defined(TARGET_PPC64)
3551 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q))
3552 #endif
3553
3554 #define GEN_LD(name, ldop, opc, type) \
3555 static void glue(gen_, name)(DisasContext *ctx) \
3556 { \
3557 TCGv EA; \
3558 gen_set_access_type(ctx, ACCESS_INT); \
3559 EA = tcg_temp_new(); \
3560 gen_addr_imm_index(ctx, EA, 0); \
3561 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3562 tcg_temp_free(EA); \
3563 }
3564
3565 #define GEN_LDU(name, ldop, opc, type) \
3566 static void glue(gen_, name##u)(DisasContext *ctx) \
3567 { \
3568 TCGv EA; \
3569 if (unlikely(rA(ctx->opcode) == 0 || \
3570 rA(ctx->opcode) == rD(ctx->opcode))) { \
3571 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3572 return; \
3573 } \
3574 gen_set_access_type(ctx, ACCESS_INT); \
3575 EA = tcg_temp_new(); \
3576 if (type == PPC_64B) \
3577 gen_addr_imm_index(ctx, EA, 0x03); \
3578 else \
3579 gen_addr_imm_index(ctx, EA, 0); \
3580 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3581 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3582 tcg_temp_free(EA); \
3583 }
3584
3585 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
3586 static void glue(gen_, name##ux)(DisasContext *ctx) \
3587 { \
3588 TCGv EA; \
3589 if (unlikely(rA(ctx->opcode) == 0 || \
3590 rA(ctx->opcode) == rD(ctx->opcode))) { \
3591 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3592 return; \
3593 } \
3594 gen_set_access_type(ctx, ACCESS_INT); \
3595 EA = tcg_temp_new(); \
3596 gen_addr_reg_index(ctx, EA); \
3597 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3598 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3599 tcg_temp_free(EA); \
3600 }
3601
3602 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
3603 static void glue(gen_, name##x)(DisasContext *ctx) \
3604 { \
3605 TCGv EA; \
3606 chk; \
3607 gen_set_access_type(ctx, ACCESS_INT); \
3608 EA = tcg_temp_new(); \
3609 gen_addr_reg_index(ctx, EA); \
3610 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3611 tcg_temp_free(EA); \
3612 }
3613
3614 #define GEN_LDX(name, ldop, opc2, opc3, type) \
3615 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3616
3617 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \
3618 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3619
3620 #define GEN_LDS(name, ldop, op, type) \
3621 GEN_LD(name, ldop, op | 0x20, type); \
3622 GEN_LDU(name, ldop, op | 0x21, type); \
3623 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
3624 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
3625
3626 /* lbz lbzu lbzux lbzx */
3627 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
3628 /* lha lhau lhaux lhax */
3629 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
3630 /* lhz lhzu lhzux lhzx */
3631 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
3632 /* lwz lwzu lwzux lwzx */
3633 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
3634
3635 #define GEN_LDEPX(name, ldop, opc2, opc3) \
3636 static void glue(gen_, name##epx)(DisasContext *ctx) \
3637 { \
3638 TCGv EA; \
3639 CHK_SV; \
3640 gen_set_access_type(ctx, ACCESS_INT); \
3641 EA = tcg_temp_new(); \
3642 gen_addr_reg_index(ctx, EA); \
3643 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3644 tcg_temp_free(EA); \
3645 }
3646
3647 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3648 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3649 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3650 #if defined(TARGET_PPC64)
3651 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00)
3652 #endif
3653
3654 #if defined(TARGET_PPC64)
3655 /* lwaux */
3656 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
3657 /* lwax */
3658 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
3659 /* ldux */
3660 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B);
3661 /* ldx */
3662 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B);
3663
3664 /* CI load/store variants */
3665 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3666 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3667 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3668 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3669
3670 static void gen_ld(DisasContext *ctx)
3671 {
3672 TCGv EA;
3673 if (Rc(ctx->opcode)) {
3674 if (unlikely(rA(ctx->opcode) == 0 ||
3675 rA(ctx->opcode) == rD(ctx->opcode))) {
3676 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3677 return;
3678 }
3679 }
3680 gen_set_access_type(ctx, ACCESS_INT);
3681 EA = tcg_temp_new();
3682 gen_addr_imm_index(ctx, EA, 0x03);
3683 if (ctx->opcode & 0x02) {
3684 /* lwa (lwau is undefined) */
3685 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
3686 } else {
3687 /* ld - ldu */
3688 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
3689 }
3690 if (Rc(ctx->opcode)) {
3691 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3692 }
3693 tcg_temp_free(EA);
3694 }
3695
3696 /* lq */
3697 static void gen_lq(DisasContext *ctx)
3698 {
3699 int ra, rd;
3700 TCGv EA, hi, lo;
3701
3702 /* lq is a legal user mode instruction starting in ISA 2.07 */
3703 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3704 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3705
3706 if (!legal_in_user_mode && ctx->pr) {
3707 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3708 return;
3709 }
3710
3711 if (!le_is_supported && ctx->le_mode) {
3712 gen_align_no_le(ctx);
3713 return;
3714 }
3715 ra = rA(ctx->opcode);
3716 rd = rD(ctx->opcode);
3717 if (unlikely((rd & 1) || rd == ra)) {
3718 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3719 return;
3720 }
3721
3722 gen_set_access_type(ctx, ACCESS_INT);
3723 EA = tcg_temp_new();
3724 gen_addr_imm_index(ctx, EA, 0x0F);
3725
3726 /* Note that the low part is always in RD+1, even in LE mode. */
3727 lo = cpu_gpr[rd + 1];
3728 hi = cpu_gpr[rd];
3729
3730 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3731 if (HAVE_ATOMIC128) {
3732 TCGv_i32 oi = tcg_temp_new_i32();
3733 if (ctx->le_mode) {
3734 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx));
3735 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3736 } else {
3737 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx));
3738 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3739 }
3740 tcg_temp_free_i32(oi);
3741 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3742 } else {
3743 /* Restart with exclusive lock. */
3744 gen_helper_exit_atomic(cpu_env);
3745 ctx->base.is_jmp = DISAS_NORETURN;
3746 }
3747 } else if (ctx->le_mode) {
3748 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ);
3749 gen_addr_add(ctx, EA, EA, 8);
3750 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ);
3751 } else {
3752 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ);
3753 gen_addr_add(ctx, EA, EA, 8);
3754 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ);
3755 }
3756 tcg_temp_free(EA);
3757 }
3758 #endif
3759
3760 /*** Integer store ***/
3761 #define GEN_ST(name, stop, opc, type) \
3762 static void glue(gen_, name)(DisasContext *ctx) \
3763 { \
3764 TCGv EA; \
3765 gen_set_access_type(ctx, ACCESS_INT); \
3766 EA = tcg_temp_new(); \
3767 gen_addr_imm_index(ctx, EA, 0); \
3768 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3769 tcg_temp_free(EA); \
3770 }
3771
3772 #define GEN_STU(name, stop, opc, type) \
3773 static void glue(gen_, stop##u)(DisasContext *ctx) \
3774 { \
3775 TCGv EA; \
3776 if (unlikely(rA(ctx->opcode) == 0)) { \
3777 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3778 return; \
3779 } \
3780 gen_set_access_type(ctx, ACCESS_INT); \
3781 EA = tcg_temp_new(); \
3782 if (type == PPC_64B) \
3783 gen_addr_imm_index(ctx, EA, 0x03); \
3784 else \
3785 gen_addr_imm_index(ctx, EA, 0); \
3786 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3787 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3788 tcg_temp_free(EA); \
3789 }
3790
3791 #define GEN_STUX(name, stop, opc2, opc3, type) \
3792 static void glue(gen_, name##ux)(DisasContext *ctx) \
3793 { \
3794 TCGv EA; \
3795 if (unlikely(rA(ctx->opcode) == 0)) { \
3796 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3797 return; \
3798 } \
3799 gen_set_access_type(ctx, ACCESS_INT); \
3800 EA = tcg_temp_new(); \
3801 gen_addr_reg_index(ctx, EA); \
3802 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3803 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3804 tcg_temp_free(EA); \
3805 }
3806
3807 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
3808 static void glue(gen_, name##x)(DisasContext *ctx) \
3809 { \
3810 TCGv EA; \
3811 chk; \
3812 gen_set_access_type(ctx, ACCESS_INT); \
3813 EA = tcg_temp_new(); \
3814 gen_addr_reg_index(ctx, EA); \
3815 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3816 tcg_temp_free(EA); \
3817 }
3818 #define GEN_STX(name, stop, opc2, opc3, type) \
3819 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3820
3821 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \
3822 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3823
3824 #define GEN_STS(name, stop, op, type) \
3825 GEN_ST(name, stop, op | 0x20, type); \
3826 GEN_STU(name, stop, op | 0x21, type); \
3827 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
3828 GEN_STX(name, stop, 0x17, op | 0x00, type)
3829
3830 /* stb stbu stbux stbx */
3831 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
3832 /* sth sthu sthux sthx */
3833 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
3834 /* stw stwu stwux stwx */
3835 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
3836
3837 #define GEN_STEPX(name, stop, opc2, opc3) \
3838 static void glue(gen_, name##epx)(DisasContext *ctx) \
3839 { \
3840 TCGv EA; \
3841 CHK_SV; \
3842 gen_set_access_type(ctx, ACCESS_INT); \
3843 EA = tcg_temp_new(); \
3844 gen_addr_reg_index(ctx, EA); \
3845 tcg_gen_qemu_st_tl( \
3846 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \
3847 tcg_temp_free(EA); \
3848 }
3849
3850 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3851 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3852 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3853 #if defined(TARGET_PPC64)
3854 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04)
3855 #endif
3856
3857 #if defined(TARGET_PPC64)
3858 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B);
3859 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B);
3860 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3861 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3862 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3863 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3864
3865 static void gen_std(DisasContext *ctx)
3866 {
3867 int rs;
3868 TCGv EA;
3869
3870 rs = rS(ctx->opcode);
3871 if ((ctx->opcode & 0x3) == 0x2) { /* stq */
3872 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3873 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3874 TCGv hi, lo;
3875
3876 if (!(ctx->insns_flags & PPC_64BX)) {
3877 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3878 }
3879
3880 if (!legal_in_user_mode && ctx->pr) {
3881 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3882 return;
3883 }
3884
3885 if (!le_is_supported && ctx->le_mode) {
3886 gen_align_no_le(ctx);
3887 return;
3888 }
3889
3890 if (unlikely(rs & 1)) {
3891 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3892 return;
3893 }
3894 gen_set_access_type(ctx, ACCESS_INT);
3895 EA = tcg_temp_new();
3896 gen_addr_imm_index(ctx, EA, 0x03);
3897
3898 /* Note that the low part is always in RS+1, even in LE mode. */
3899 lo = cpu_gpr[rs + 1];
3900 hi = cpu_gpr[rs];
3901
3902 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3903 if (HAVE_ATOMIC128) {
3904 TCGv_i32 oi = tcg_temp_new_i32();
3905 if (ctx->le_mode) {
3906 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx));
3907 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi);
3908 } else {
3909 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx));
3910 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi);
3911 }
3912 tcg_temp_free_i32(oi);
3913 } else {
3914 /* Restart with exclusive lock. */
3915 gen_helper_exit_atomic(cpu_env);
3916 ctx->base.is_jmp = DISAS_NORETURN;
3917 }
3918 } else if (ctx->le_mode) {
3919 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ);
3920 gen_addr_add(ctx, EA, EA, 8);
3921 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ);
3922 } else {
3923 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ);
3924 gen_addr_add(ctx, EA, EA, 8);
3925 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ);
3926 }
3927 tcg_temp_free(EA);
3928 } else {
3929 /* std / stdu */
3930 if (Rc(ctx->opcode)) {
3931 if (unlikely(rA(ctx->opcode) == 0)) {
3932 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3933 return;
3934 }
3935 }
3936 gen_set_access_type(ctx, ACCESS_INT);
3937 EA = tcg_temp_new();
3938 gen_addr_imm_index(ctx, EA, 0x03);
3939 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA);
3940 if (Rc(ctx->opcode)) {
3941 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3942 }
3943 tcg_temp_free(EA);
3944 }
3945 }
3946 #endif
3947 /*** Integer load and store with byte reverse ***/
3948
3949 /* lhbrx */
3950 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3951
3952 /* lwbrx */
3953 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3954
3955 #if defined(TARGET_PPC64)
3956 /* ldbrx */
3957 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3958 /* stdbrx */
3959 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3960 #endif /* TARGET_PPC64 */
3961
3962 /* sthbrx */
3963 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3964 /* stwbrx */
3965 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3966
3967 /*** Integer load and store multiple ***/
3968
3969 /* lmw */
3970 static void gen_lmw(DisasContext *ctx)
3971 {
3972 TCGv t0;
3973 TCGv_i32 t1;
3974
3975 if (ctx->le_mode) {
3976 gen_align_no_le(ctx);
3977 return;
3978 }
3979 gen_set_access_type(ctx, ACCESS_INT);
3980 t0 = tcg_temp_new();
3981 t1 = tcg_const_i32(rD(ctx->opcode));
3982 gen_addr_imm_index(ctx, t0, 0);
3983 gen_helper_lmw(cpu_env, t0, t1);
3984 tcg_temp_free(t0);
3985 tcg_temp_free_i32(t1);
3986 }
3987
3988 /* stmw */
3989 static void gen_stmw(DisasContext *ctx)
3990 {
3991 TCGv t0;
3992 TCGv_i32 t1;
3993
3994 if (ctx->le_mode) {
3995 gen_align_no_le(ctx);
3996 return;
3997 }
3998 gen_set_access_type(ctx, ACCESS_INT);
3999 t0 = tcg_temp_new();
4000 t1 = tcg_const_i32(rS(ctx->opcode));
4001 gen_addr_imm_index(ctx, t0, 0);
4002 gen_helper_stmw(cpu_env, t0, t1);
4003 tcg_temp_free(t0);
4004 tcg_temp_free_i32(t1);
4005 }
4006
4007 /*** Integer load and store strings ***/
4008
4009 /* lswi */
4010 /*
4011 * PowerPC32 specification says we must generate an exception if rA is
4012 * in the range of registers to be loaded. In an other hand, IBM says
4013 * this is valid, but rA won't be loaded. For now, I'll follow the
4014 * spec...
4015 */
4016 static void gen_lswi(DisasContext *ctx)
4017 {
4018 TCGv t0;
4019 TCGv_i32 t1, t2;
4020 int nb = NB(ctx->opcode);
4021 int start = rD(ctx->opcode);
4022 int ra = rA(ctx->opcode);
4023 int nr;
4024
4025 if (ctx->le_mode) {
4026 gen_align_no_le(ctx);
4027 return;
4028 }
4029 if (nb == 0) {
4030 nb = 32;
4031 }
4032 nr = DIV_ROUND_UP(nb, 4);
4033 if (unlikely(lsw_reg_in_range(start, nr, ra))) {
4034 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
4035 return;
4036 }
4037 gen_set_access_type(ctx, ACCESS_INT);
4038 t0 = tcg_temp_new();
4039 gen_addr_register(ctx, t0);
4040 t1 = tcg_const_i32(nb);
4041 t2 = tcg_const_i32(start);
4042 gen_helper_lsw(cpu_env, t0, t1, t2);
4043 tcg_temp_free(t0);
4044 tcg_temp_free_i32(t1);
4045 tcg_temp_free_i32(t2);
4046 }
4047
4048 /* lswx */
4049 static void gen_lswx(DisasContext *ctx)
4050 {
4051 TCGv t0;
4052 TCGv_i32 t1, t2, t3;
4053
4054 if (ctx->le_mode) {
4055 gen_align_no_le(ctx);
4056 return;
4057 }
4058 gen_set_access_type(ctx, ACCESS_INT);
4059 t0 = tcg_temp_new();
4060 gen_addr_reg_index(ctx, t0);
4061 t1 = tcg_const_i32(rD(ctx->opcode));
4062 t2 = tcg_const_i32(rA(ctx->opcode));
4063 t3 = tcg_const_i32(rB(ctx->opcode));
4064 gen_helper_lswx(cpu_env, t0, t1, t2, t3);
4065 tcg_temp_free(t0);
4066 tcg_temp_free_i32(t1);
4067 tcg_temp_free_i32(t2);
4068 tcg_temp_free_i32(t3);
4069 }
4070
4071 /* stswi */
4072 static void gen_stswi(DisasContext *ctx)
4073 {
4074 TCGv t0;
4075 TCGv_i32 t1, t2;
4076 int nb = NB(ctx->opcode);
4077
4078 if (ctx->le_mode) {
4079 gen_align_no_le(ctx);
4080 return;
4081 }
4082 gen_set_access_type(ctx, ACCESS_INT);
4083 t0 = tcg_temp_new();
4084 gen_addr_register(ctx, t0);
4085 if (nb == 0) {
4086 nb = 32;
4087 }
4088 t1 = tcg_const_i32(nb);
4089 t2 = tcg_const_i32(rS(ctx->opcode));
4090 gen_helper_stsw(cpu_env, t0, t1, t2);
4091 tcg_temp_free(t0);
4092 tcg_temp_free_i32(t1);
4093 tcg_temp_free_i32(t2);
4094 }
4095
4096 /* stswx */
4097 static void gen_stswx(DisasContext *ctx)
4098 {
4099 TCGv t0;
4100 TCGv_i32 t1, t2;
4101
4102 if (ctx->le_mode) {
4103 gen_align_no_le(ctx);
4104 return;
4105 }
4106 gen_set_access_type(ctx, ACCESS_INT);
4107 t0 = tcg_temp_new();
4108 gen_addr_reg_index(ctx, t0);
4109 t1 = tcg_temp_new_i32();
4110 tcg_gen_trunc_tl_i32(t1, cpu_xer);
4111 tcg_gen_andi_i32(t1, t1, 0x7F);
4112 t2 = tcg_const_i32(rS(ctx->opcode));
4113 gen_helper_stsw(cpu_env, t0, t1, t2);
4114 tcg_temp_free(t0);
4115 tcg_temp_free_i32(t1);
4116 tcg_temp_free_i32(t2);
4117 }
4118
4119 /*** Memory synchronisation ***/
4120 /* eieio */
4121 static void gen_eieio(DisasContext *ctx)
4122 {
4123 TCGBar bar = TCG_MO_LD_ST;
4124
4125 /*
4126 * POWER9 has a eieio instruction variant using bit 6 as a hint to
4127 * tell the CPU it is a store-forwarding barrier.
4128 */
4129 if (ctx->opcode & 0x2000000) {
4130 /*
4131 * ISA says that "Reserved fields in instructions are ignored
4132 * by the processor". So ignore the bit 6 on non-POWER9 CPU but
4133 * as this is not an instruction software should be using,
4134 * complain to the user.
4135 */
4136 if (!(ctx->insns_flags2 & PPC2_ISA300)) {
4137 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
4138 TARGET_FMT_lx "\n", ctx->cia);
4139 } else {
4140 bar = TCG_MO_ST_LD;
4141 }
4142 }
4143
4144 tcg_gen_mb(bar | TCG_BAR_SC);
4145 }
4146
4147 #if !defined(CONFIG_USER_ONLY)
4148 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
4149 {
4150 TCGv_i32 t;
4151 TCGLabel *l;
4152
4153 if (!ctx->lazy_tlb_flush) {
4154 return;
4155 }
4156 l = gen_new_label();
4157 t = tcg_temp_new_i32();
4158 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
4159 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
4160 if (global) {
4161 gen_helper_check_tlb_flush_global(cpu_env);
4162 } else {
4163 gen_helper_check_tlb_flush_local(cpu_env);
4164 }
4165 gen_set_label(l);
4166 tcg_temp_free_i32(t);
4167 }
4168 #else
4169 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
4170 #endif
4171
4172 /* isync */
4173 static void gen_isync(DisasContext *ctx)
4174 {
4175 /*
4176 * We need to check for a pending TLB flush. This can only happen in
4177 * kernel mode however so check MSR_PR
4178 */
4179 if (!ctx->pr) {
4180 gen_check_tlb_flush(ctx, false);
4181 }
4182 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
4183 gen_stop_exception(ctx);
4184 }
4185
4186 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE))
4187
4188 static void gen_load_locked(DisasContext *ctx, MemOp memop)
4189 {
4190 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
4191 TCGv t0 = tcg_temp_new();
4192
4193 gen_set_access_type(ctx, ACCESS_RES);
4194 gen_addr_reg_index(ctx, t0);
4195 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
4196 tcg_gen_mov_tl(cpu_reserve, t0);
4197 tcg_gen_mov_tl(cpu_reserve_val, gpr);
4198 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
4199 tcg_temp_free(t0);
4200 }
4201
4202 #define LARX(name, memop) \
4203 static void gen_##name(DisasContext *ctx) \
4204 { \
4205 gen_load_locked(ctx, memop); \
4206 }
4207
4208 /* lwarx */
4209 LARX(lbarx, DEF_MEMOP(MO_UB))
4210 LARX(lharx, DEF_MEMOP(MO_UW))
4211 LARX(lwarx, DEF_MEMOP(MO_UL))
4212
4213 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
4214 TCGv EA, TCGCond cond, int addend)
4215 {
4216 TCGv t = tcg_temp_new();
4217 TCGv t2 = tcg_temp_new();
4218 TCGv u = tcg_temp_new();
4219
4220 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
4221 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
4222 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
4223 tcg_gen_addi_tl(u, t, addend);
4224
4225 /* E.g. for fetch and increment bounded... */
4226 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
4227 tcg_gen_movcond_tl(cond, u, t, t2, u, t);
4228 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
4229
4230 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
4231 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
4232 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
4233
4234 tcg_temp_free(t);
4235 tcg_temp_free(t2);
4236 tcg_temp_free(u);
4237 }
4238
4239 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
4240 {
4241 uint32_t gpr_FC = FC(ctx->opcode);
4242 TCGv EA = tcg_temp_new();
4243 int rt = rD(ctx->opcode);
4244 bool need_serial;
4245 TCGv src, dst;
4246
4247 gen_addr_register(ctx, EA);
4248 dst = cpu_gpr[rt];
4249 src = cpu_gpr[(rt + 1) & 31];
4250
4251 need_serial = false;
4252 memop |= MO_ALIGN;
4253 switch (gpr_FC) {
4254 case 0: /* Fetch and add */
4255 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
4256 break;
4257 case 1: /* Fetch and xor */
4258 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
4259 break;
4260 case 2: /* Fetch and or */
4261 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
4262 break;
4263 case 3: /* Fetch and 'and' */
4264 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
4265 break;
4266 case 4: /* Fetch and max unsigned */
4267 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
4268 break;
4269 case 5: /* Fetch and max signed */
4270 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
4271 break;
4272 case 6: /* Fetch and min unsigned */
4273 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
4274 break;
4275 case 7: /* Fetch and min signed */
4276 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
4277 break;
4278 case 8: /* Swap */
4279 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
4280 break;
4281
4282 case 16: /* Compare and swap not equal */
4283 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4284 need_serial = true;
4285 } else {
4286 TCGv t0 = tcg_temp_new();
4287 TCGv t1 = tcg_temp_new();
4288
4289 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
4290 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
4291 tcg_gen_mov_tl(t1, src);
4292 } else {
4293 tcg_gen_ext32u_tl(t1, src);
4294 }
4295 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
4296 cpu_gpr[(rt + 2) & 31], t0);
4297 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
4298 tcg_gen_mov_tl(dst, t0);
4299
4300 tcg_temp_free(t0);
4301 tcg_temp_free(t1);
4302 }
4303 break;
4304
4305 case 24: /* Fetch and increment bounded */
4306 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4307 need_serial = true;
4308 } else {
4309 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
4310 }
4311 break;
4312 case 25: /* Fetch and increment equal */
4313 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4314 need_serial = true;
4315 } else {
4316 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
4317 }
4318 break;
4319 case 28: /* Fetch and decrement bounded */
4320 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4321 need_serial = true;
4322 } else {
4323 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
4324 }
4325 break;
4326
4327 default:
4328 /* invoke data storage error handler */
4329 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
4330 }
4331 tcg_temp_free(EA);
4332
4333 if (need_serial) {
4334 /* Restart with exclusive lock. */
4335 gen_helper_exit_atomic(cpu_env);
4336 ctx->base.is_jmp = DISAS_NORETURN;
4337 }
4338 }
4339
4340 static void gen_lwat(DisasContext *ctx)
4341 {
4342 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
4343 }
4344
4345 #ifdef TARGET_PPC64
4346 static void gen_ldat(DisasContext *ctx)
4347 {
4348 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q));
4349 }
4350 #endif
4351
4352 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
4353 {
4354 uint32_t gpr_FC = FC(ctx->opcode);
4355 TCGv EA = tcg_temp_new();
4356 TCGv src, discard;
4357
4358 gen_addr_register(ctx, EA);
4359 src = cpu_gpr[rD(ctx->opcode)];
4360 discard = tcg_temp_new();
4361
4362 memop |= MO_ALIGN;
4363 switch (gpr_FC) {
4364 case 0: /* add and Store */
4365 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4366 break;
4367 case 1: /* xor and Store */
4368 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4369 break;
4370 case 2: /* Or and Store */
4371 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4372 break;
4373 case 3: /* 'and' and Store */
4374 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4375 break;
4376 case 4: /* Store max unsigned */
4377 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4378 break;
4379 case 5: /* Store max signed */
4380 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4381 break;
4382 case 6: /* Store min unsigned */
4383 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4384 break;
4385 case 7: /* Store min signed */
4386 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4387 break;
4388 case 24: /* Store twin */
4389 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4390 /* Restart with exclusive lock. */
4391 gen_helper_exit_atomic(cpu_env);
4392 ctx->base.is_jmp = DISAS_NORETURN;
4393 } else {
4394 TCGv t = tcg_temp_new();
4395 TCGv t2 = tcg_temp_new();
4396 TCGv s = tcg_temp_new();
4397 TCGv s2 = tcg_temp_new();
4398 TCGv ea_plus_s = tcg_temp_new();
4399
4400 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
4401 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
4402 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
4403 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
4404 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
4405 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
4406 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
4407
4408 tcg_temp_free(ea_plus_s);
4409 tcg_temp_free(s2);
4410 tcg_temp_free(s);
4411 tcg_temp_free(t2);
4412 tcg_temp_free(t);
4413 }
4414 break;
4415 default:
4416 /* invoke data storage error handler */
4417 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
4418 }
4419 tcg_temp_free(discard);
4420 tcg_temp_free(EA);
4421 }
4422
4423 static void gen_stwat(DisasContext *ctx)
4424 {
4425 gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
4426 }
4427
4428 #ifdef TARGET_PPC64
4429 static void gen_stdat(DisasContext *ctx)
4430 {
4431 gen_st_atomic(ctx, DEF_MEMOP(MO_Q));
4432 }
4433 #endif
4434
4435 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
4436 {
4437 TCGLabel *l1 = gen_new_label();
4438 TCGLabel *l2 = gen_new_label();
4439 TCGv t0 = tcg_temp_new();
4440 int reg = rS(ctx->opcode);
4441
4442 gen_set_access_type(ctx, ACCESS_RES);
4443 gen_addr_reg_index(ctx, t0);
4444 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
4445 tcg_temp_free(t0);
4446
4447 t0 = tcg_temp_new();
4448 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
4449 cpu_gpr[reg], ctx->mem_idx,
4450 DEF_MEMOP(memop) | MO_ALIGN);
4451 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
4452 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
4453 tcg_gen_or_tl(t0, t0, cpu_so);
4454 tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
4455 tcg_temp_free(t0);
4456 tcg_gen_br(l2);
4457
4458 gen_set_label(l1);
4459
4460 /*
4461 * Address mismatch implies failure. But we still need to provide
4462 * the memory barrier semantics of the instruction.
4463 */
4464 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
4465 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4466
4467 gen_set_label(l2);
4468 tcg_gen_movi_tl(cpu_reserve, -1);
4469 }
4470
4471 #define STCX(name, memop) \
4472 static void gen_##name(DisasContext *ctx) \
4473 { \
4474 gen_conditional_store(ctx, memop); \
4475 }
4476
4477 STCX(stbcx_, DEF_MEMOP(MO_UB))
4478 STCX(sthcx_, DEF_MEMOP(MO_UW))
4479 STCX(stwcx_, DEF_MEMOP(MO_UL))
4480
4481 #if defined(TARGET_PPC64)
4482 /* ldarx */
4483 LARX(ldarx, DEF_MEMOP(MO_Q))
4484 /* stdcx. */
4485 STCX(stdcx_, DEF_MEMOP(MO_Q))
4486
4487 /* lqarx */
4488 static void gen_lqarx(DisasContext *ctx)
4489 {
4490 int rd = rD(ctx->opcode);
4491 TCGv EA, hi, lo;
4492
4493 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
4494 (rd == rB(ctx->opcode)))) {
4495 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4496 return;
4497 }
4498
4499 gen_set_access_type(ctx, ACCESS_RES);
4500 EA = tcg_temp_new();
4501 gen_addr_reg_index(ctx, EA);
4502
4503 /* Note that the low part is always in RD+1, even in LE mode. */
4504 lo = cpu_gpr[rd + 1];
4505 hi = cpu_gpr[rd];
4506
4507 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4508 if (HAVE_ATOMIC128) {
4509 TCGv_i32 oi = tcg_temp_new_i32();
4510 if (ctx->le_mode) {
4511 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16,
4512 ctx->mem_idx));
4513 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
4514 } else {
4515 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16,
4516 ctx->mem_idx));
4517 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
4518 }
4519 tcg_temp_free_i32(oi);
4520 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
4521 } else {
4522 /* Restart with exclusive lock. */
4523 gen_helper_exit_atomic(cpu_env);
4524 ctx->base.is_jmp = DISAS_NORETURN;
4525 tcg_temp_free(EA);
4526 return;
4527 }
4528 } else if (ctx->le_mode) {
4529 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16);
4530 tcg_gen_mov_tl(cpu_reserve, EA);
4531 gen_addr_add(ctx, EA, EA, 8);
4532 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ);
4533 } else {
4534 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16);
4535 tcg_gen_mov_tl(cpu_reserve, EA);
4536 gen_addr_add(ctx, EA, EA, 8);
4537 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ);
4538 }
4539 tcg_temp_free(EA);
4540
4541 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
4542 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
4543 }
4544
4545 /* stqcx. */
4546 static void gen_stqcx_(DisasContext *ctx)
4547 {
4548 int rs = rS(ctx->opcode);
4549 TCGv EA, hi, lo;
4550
4551 if (unlikely(rs & 1)) {
4552 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4553 return;
4554 }
4555
4556 gen_set_access_type(ctx, ACCESS_RES);
4557 EA = tcg_temp_new();
4558 gen_addr_reg_index(ctx, EA);
4559
4560 /* Note that the low part is always in RS+1, even in LE mode. */
4561 lo = cpu_gpr[rs + 1];
4562 hi = cpu_gpr[rs];
4563
4564 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4565 if (HAVE_CMPXCHG128) {
4566 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16);
4567 if (ctx->le_mode) {
4568 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env,
4569 EA, lo, hi, oi);
4570 } else {
4571 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env,
4572 EA, lo, hi, oi);
4573 }
4574 tcg_temp_free_i32(oi);
4575 } else {
4576 /* Restart with exclusive lock. */
4577 gen_helper_exit_atomic(cpu_env);
4578 ctx->base.is_jmp = DISAS_NORETURN;
4579 }
4580 tcg_temp_free(EA);
4581 } else {
4582 TCGLabel *lab_fail = gen_new_label();
4583 TCGLabel *lab_over = gen_new_label();
4584 TCGv_i64 t0 = tcg_temp_new_i64();
4585 TCGv_i64 t1 = tcg_temp_new_i64();
4586
4587 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4588 tcg_temp_free(EA);
4589
4590 gen_qemu_ld64_i64(ctx, t0, cpu_reserve);
4591 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4592 ? offsetof(CPUPPCState, reserve_val2)
4593 : offsetof(CPUPPCState, reserve_val)));
4594 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4595
4596 tcg_gen_addi_i64(t0, cpu_reserve, 8);
4597 gen_qemu_ld64_i64(ctx, t0, t0);
4598 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4599 ? offsetof(CPUPPCState, reserve_val)
4600 : offsetof(CPUPPCState, reserve_val2)));
4601 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4602
4603 /* Success */
4604 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve);
4605 tcg_gen_addi_i64(t0, cpu_reserve, 8);
4606 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0);
4607
4608 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4609 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
4610 tcg_gen_br(lab_over);
4611
4612 gen_set_label(lab_fail);
4613 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4614
4615 gen_set_label(lab_over);
4616 tcg_gen_movi_tl(cpu_reserve, -1);
4617 tcg_temp_free_i64(t0);
4618 tcg_temp_free_i64(t1);
4619 }
4620 }
4621 #endif /* defined(TARGET_PPC64) */
4622
4623 /* sync */
4624 static void gen_sync(DisasContext *ctx)
4625 {
4626 uint32_t l = (ctx->opcode >> 21) & 3;
4627
4628 /*
4629 * We may need to check for a pending TLB flush.
4630 *
4631 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4632 *
4633 * Additionally, this can only happen in kernel mode however so
4634 * check MSR_PR as well.
4635 */
4636 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4637 gen_check_tlb_flush(ctx, true);
4638 }
4639 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
4640 }
4641
4642 /* wait */
4643 static void gen_wait(DisasContext *ctx)
4644 {
4645 TCGv_i32 t0 = tcg_const_i32(1);
4646 tcg_gen_st_i32(t0, cpu_env,
4647 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4648 tcg_temp_free_i32(t0);
4649 /* Stop translation, as the CPU is supposed to sleep from now */
4650 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4651 }
4652
4653 #if defined(TARGET_PPC64)
4654 static void gen_doze(DisasContext *ctx)
4655 {
4656 #if defined(CONFIG_USER_ONLY)
4657 GEN_PRIV;
4658 #else
4659 TCGv_i32 t;
4660
4661 CHK_HV;
4662 t = tcg_const_i32(PPC_PM_DOZE);
4663 gen_helper_pminsn(cpu_env, t);
4664 tcg_temp_free_i32(t);
4665 /* Stop translation, as the CPU is supposed to sleep from now */
4666 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4667 #endif /* defined(CONFIG_USER_ONLY) */
4668 }
4669
4670 static void gen_nap(DisasContext *ctx)
4671 {
4672 #if defined(CONFIG_USER_ONLY)
4673 GEN_PRIV;
4674 #else
4675 TCGv_i32 t;
4676
4677 CHK_HV;
4678 t = tcg_const_i32(PPC_PM_NAP);
4679 gen_helper_pminsn(cpu_env, t);
4680 tcg_temp_free_i32(t);
4681 /* Stop translation, as the CPU is supposed to sleep from now */
4682 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4683 #endif /* defined(CONFIG_USER_ONLY) */
4684 }
4685
4686 static void gen_stop(DisasContext *ctx)
4687 {
4688 #if defined(CONFIG_USER_ONLY)
4689 GEN_PRIV;
4690 #else
4691 TCGv_i32 t;
4692
4693 CHK_HV;
4694 t = tcg_const_i32(PPC_PM_STOP);
4695 gen_helper_pminsn(cpu_env, t);
4696 tcg_temp_free_i32(t);
4697 /* Stop translation, as the CPU is supposed to sleep from now */
4698 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4699 #endif /* defined(CONFIG_USER_ONLY) */
4700 }
4701
4702 static void gen_sleep(DisasContext *ctx)
4703 {
4704 #if defined(CONFIG_USER_ONLY)
4705 GEN_PRIV;
4706 #else
4707 TCGv_i32 t;
4708
4709 CHK_HV;
4710 t = tcg_const_i32(PPC_PM_SLEEP);
4711 gen_helper_pminsn(cpu_env, t);
4712 tcg_temp_free_i32(t);
4713 /* Stop translation, as the CPU is supposed to sleep from now */
4714 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4715 #endif /* defined(CONFIG_USER_ONLY) */
4716 }
4717
4718 static void gen_rvwinkle(DisasContext *ctx)
4719 {
4720 #if defined(CONFIG_USER_ONLY)
4721 GEN_PRIV;
4722 #else
4723 TCGv_i32 t;
4724
4725 CHK_HV;
4726 t = tcg_const_i32(PPC_PM_RVWINKLE);
4727 gen_helper_pminsn(cpu_env, t);
4728 tcg_temp_free_i32(t);
4729 /* Stop translation, as the CPU is supposed to sleep from now */
4730 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4731 #endif /* defined(CONFIG_USER_ONLY) */
4732 }
4733 #endif /* #if defined(TARGET_PPC64) */
4734
4735 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4736 {
4737 #if defined(TARGET_PPC64)
4738 if (ctx->has_cfar) {
4739 tcg_gen_movi_tl(cpu_cfar, nip);
4740 }
4741 #endif
4742 }
4743
4744 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4745 {
4746 if (unlikely(ctx->singlestep_enabled)) {
4747 return false;
4748 }
4749
4750 #ifndef CONFIG_USER_ONLY
4751 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
4752 #else
4753 return true;
4754 #endif
4755 }
4756
4757 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4758 {
4759 int sse = ctx->singlestep_enabled;
4760 if (unlikely(sse)) {
4761 if (sse & GDBSTUB_SINGLE_STEP) {
4762 gen_debug_exception(ctx);
4763 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) {
4764 uint32_t excp = gen_prep_dbgex(ctx);
4765 gen_exception(ctx, excp);
4766 }
4767 tcg_gen_exit_tb(NULL, 0);
4768 } else {
4769 tcg_gen_lookup_and_goto_ptr();
4770 }
4771 }
4772
4773 /*** Branch ***/
4774 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4775 {
4776 if (NARROW_MODE(ctx)) {
4777 dest = (uint32_t) dest;
4778 }
4779 if (use_goto_tb(ctx, dest)) {
4780 tcg_gen_goto_tb(n);
4781 tcg_gen_movi_tl(cpu_nip, dest & ~3);
4782 tcg_gen_exit_tb(ctx->base.tb, n);
4783 } else {
4784 tcg_gen_movi_tl(cpu_nip, dest & ~3);
4785 gen_lookup_and_goto_ptr(ctx);
4786 }
4787 }
4788
4789 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4790 {
4791 if (NARROW_MODE(ctx)) {
4792 nip = (uint32_t)nip;
4793 }
4794 tcg_gen_movi_tl(cpu_lr, nip);
4795 }
4796
4797 /* b ba bl bla */
4798 static void gen_b(DisasContext *ctx)
4799 {
4800 target_ulong li, target;
4801
4802 ctx->exception = POWERPC_EXCP_BRANCH;
4803 /* sign extend LI */
4804 li = LI(ctx->opcode);
4805 li = (li ^ 0x02000000) - 0x02000000;
4806 if (likely(AA(ctx->opcode) == 0)) {
4807 target = ctx->cia + li;
4808 } else {
4809 target = li;
4810 }
4811 if (LK(ctx->opcode)) {
4812 gen_setlr(ctx, ctx->base.pc_next);
4813 }
4814 gen_update_cfar(ctx, ctx->cia);
4815 gen_goto_tb(ctx, 0, target);
4816 }
4817
4818 #define BCOND_IM 0
4819 #define BCOND_LR 1
4820 #define BCOND_CTR 2
4821 #define BCOND_TAR 3
4822
4823 static void gen_bcond(DisasContext *ctx, int type)
4824 {
4825 uint32_t bo = BO(ctx->opcode);
4826 TCGLabel *l1;
4827 TCGv target;
4828 ctx->exception = POWERPC_EXCP_BRANCH;
4829
4830 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4831 target = tcg_temp_local_new();
4832 if (type == BCOND_CTR) {
4833 tcg_gen_mov_tl(target, cpu_ctr);
4834 } else if (type == BCOND_TAR) {
4835 gen_load_spr(target, SPR_TAR);
4836 } else {
4837 tcg_gen_mov_tl(target, cpu_lr);
4838 }
4839 } else {
4840 target = NULL;
4841 }
4842 if (LK(ctx->opcode)) {
4843 gen_setlr(ctx, ctx->base.pc_next);
4844 }
4845 l1 = gen_new_label();
4846 if ((bo & 0x4) == 0) {
4847 /* Decrement and test CTR */
4848 TCGv temp = tcg_temp_new();
4849
4850 if (type == BCOND_CTR) {
4851 /*
4852 * All ISAs up to v3 describe this form of bcctr as invalid but
4853 * some processors, ie. 64-bit server processors compliant with
4854 * arch 2.x, do implement a "test and decrement" logic instead,
4855 * as described in their respective UMs. This logic involves CTR
4856 * to act as both the branch target and a counter, which makes
4857 * it basically useless and thus never used in real code.
4858 *
4859 * This form was hence chosen to trigger extra micro-architectural
4860 * side-effect on real HW needed for the Spectre v2 workaround.
4861 * It is up to guests that implement such workaround, ie. linux, to
4862 * use this form in a way it just triggers the side-effect without
4863 * doing anything else harmful.
4864 */
4865 if (unlikely(!is_book3s_arch2x(ctx))) {
4866 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4867 tcg_temp_free(temp);
4868 tcg_temp_free(target);
4869 return;
4870 }
4871
4872 if (NARROW_MODE(ctx)) {
4873 tcg_gen_ext32u_tl(temp, cpu_ctr);
4874 } else {
4875 tcg_gen_mov_tl(temp, cpu_ctr);
4876 }
4877 if (bo & 0x2) {
4878 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4879 } else {
4880 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4881 }
4882 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4883 } else {
4884 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4885 if (NARROW_MODE(ctx)) {
4886 tcg_gen_ext32u_tl(temp, cpu_ctr);
4887 } else {
4888 tcg_gen_mov_tl(temp, cpu_ctr);
4889 }
4890 if (bo & 0x2) {
4891 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4892 } else {
4893 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4894 }
4895 }
4896 tcg_temp_free(temp);
4897 }
4898 if ((bo & 0x10) == 0) {
4899 /* Test CR */
4900 uint32_t bi = BI(ctx->opcode);
4901 uint32_t mask = 0x08 >> (bi & 0x03);
4902 TCGv_i32 temp = tcg_temp_new_i32();
4903
4904 if (bo & 0x8) {
4905 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4906 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4907 } else {
4908 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4909 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4910 }
4911 tcg_temp_free_i32(temp);
4912 }
4913 gen_update_cfar(ctx, ctx->cia);
4914 if (type == BCOND_IM) {
4915 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4916 if (likely(AA(ctx->opcode) == 0)) {
4917 gen_goto_tb(ctx, 0, ctx->cia + li);
4918 } else {
4919 gen_goto_tb(ctx, 0, li);
4920 }
4921 } else {
4922 if (NARROW_MODE(ctx)) {
4923 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4924 } else {
4925 tcg_gen_andi_tl(cpu_nip, target, ~3);
4926 }
4927 gen_lookup_and_goto_ptr(ctx);
4928 tcg_temp_free(target);
4929 }
4930 if ((bo & 0x14) != 0x14) {
4931 /* fallthrough case */
4932 gen_set_label(l1);
4933 gen_goto_tb(ctx, 1, ctx->base.pc_next);
4934 }
4935 }
4936
4937 static void gen_bc(DisasContext *ctx)
4938 {
4939 gen_bcond(ctx, BCOND_IM);
4940 }
4941
4942 static void gen_bcctr(DisasContext *ctx)
4943 {
4944 gen_bcond(ctx, BCOND_CTR);
4945 }
4946
4947 static void gen_bclr(DisasContext *ctx)
4948 {
4949 gen_bcond(ctx, BCOND_LR);
4950 }
4951
4952 static void gen_bctar(DisasContext *ctx)
4953 {
4954 gen_bcond(ctx, BCOND_TAR);
4955 }
4956
4957 /*** Condition register logical ***/
4958 #define GEN_CRLOGIC(name, tcg_op, opc) \
4959 static void glue(gen_, name)(DisasContext *ctx) \
4960 { \
4961 uint8_t bitmask; \
4962 int sh; \
4963 TCGv_i32 t0, t1; \
4964 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
4965 t0 = tcg_temp_new_i32(); \
4966 if (sh > 0) \
4967 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
4968 else if (sh < 0) \
4969 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
4970 else \
4971 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
4972 t1 = tcg_temp_new_i32(); \
4973 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
4974 if (sh > 0) \
4975 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
4976 else if (sh < 0) \
4977 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
4978 else \
4979 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
4980 tcg_op(t0, t0, t1); \
4981 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \
4982 tcg_gen_andi_i32(t0, t0, bitmask); \
4983 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
4984 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
4985 tcg_temp_free_i32(t0); \
4986 tcg_temp_free_i32(t1); \
4987 }
4988
4989 /* crand */
4990 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4991 /* crandc */
4992 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4993 /* creqv */
4994 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4995 /* crnand */
4996 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4997 /* crnor */
4998 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4999 /* cror */
5000 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
5001 /* crorc */
5002 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
5003 /* crxor */
5004 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
5005
5006 /* mcrf */
5007 static void gen_mcrf(DisasContext *ctx)
5008 {
5009 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
5010 }
5011
5012 /*** System linkage ***/
5013
5014 /* rfi (supervisor only) */
5015 static void gen_rfi(DisasContext *ctx)
5016 {
5017 #if defined(CONFIG_USER_ONLY)
5018 GEN_PRIV;
5019 #else
5020 /*
5021 * This instruction doesn't exist anymore on 64-bit server
5022 * processors compliant with arch 2.x
5023 */
5024 if (is_book3s_arch2x(ctx)) {
5025 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5026 return;
5027 }
5028 /* Restore CPU state */
5029 CHK_SV;
5030 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5031 gen_io_start();
5032 }
5033 gen_update_cfar(ctx, ctx->cia);
5034 gen_helper_rfi(cpu_env);
5035 gen_sync_exception(ctx);
5036 #endif
5037 }
5038
5039 #if defined(TARGET_PPC64)
5040 static void gen_rfid(DisasContext *ctx)
5041 {
5042 #if defined(CONFIG_USER_ONLY)
5043 GEN_PRIV;
5044 #else
5045 /* Restore CPU state */
5046 CHK_SV;
5047 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5048 gen_io_start();
5049 }
5050 gen_update_cfar(ctx, ctx->cia);
5051 gen_helper_rfid(cpu_env);
5052 gen_sync_exception(ctx);
5053 #endif
5054 }
5055
5056 #if !defined(CONFIG_USER_ONLY)
5057 static void gen_rfscv(DisasContext *ctx)
5058 {
5059 #if defined(CONFIG_USER_ONLY)
5060 GEN_PRIV;
5061 #else
5062 /* Restore CPU state */
5063 CHK_SV;
5064 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5065 gen_io_start();
5066 }
5067 gen_update_cfar(ctx, ctx->cia);
5068 gen_helper_rfscv(cpu_env);
5069 gen_sync_exception(ctx);
5070 #endif
5071 }
5072 #endif
5073
5074 static void gen_hrfid(DisasContext *ctx)
5075 {
5076 #if defined(CONFIG_USER_ONLY)
5077 GEN_PRIV;
5078 #else
5079 /* Restore CPU state */
5080 CHK_HV;
5081 gen_helper_hrfid(cpu_env);
5082 gen_sync_exception(ctx);
5083 #endif
5084 }
5085 #endif
5086
5087 /* sc */
5088 #if defined(CONFIG_USER_ONLY)
5089 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
5090 #else
5091 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
5092 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
5093 #endif
5094 static void gen_sc(DisasContext *ctx)
5095 {
5096 uint32_t lev;
5097
5098 lev = (ctx->opcode >> 5) & 0x7F;
5099 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
5100 }
5101
5102 #if defined(TARGET_PPC64)
5103 #if !defined(CONFIG_USER_ONLY)
5104 static void gen_scv(DisasContext *ctx)
5105 {
5106 uint32_t lev = (ctx->opcode >> 5) & 0x7F;
5107
5108 /* Set the PC back to the faulting instruction. */
5109 if (ctx->exception == POWERPC_EXCP_NONE) {
5110 gen_update_nip(ctx, ctx->cia);
5111 }
5112 gen_helper_scv(cpu_env, tcg_constant_i32(lev));
5113
5114 /* This need not be exact, just not POWERPC_EXCP_NONE */
5115 ctx->exception = POWERPC_SYSCALL_VECTORED;
5116 }
5117 #endif
5118 #endif
5119
5120 /*** Trap ***/
5121
5122 /* Check for unconditional traps (always or never) */
5123 static bool check_unconditional_trap(DisasContext *ctx)
5124 {
5125 /* Trap never */
5126 if (TO(ctx->opcode) == 0) {
5127 return true;
5128 }
5129 /* Trap always */
5130 if (TO(ctx->opcode) == 31) {
5131 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
5132 return true;
5133 }
5134 return false;
5135 }
5136
5137 /* tw */
5138 static void gen_tw(DisasContext *ctx)
5139 {
5140 TCGv_i32 t0;
5141
5142 if (check_unconditional_trap(ctx)) {
5143 return;
5144 }
5145 t0 = tcg_const_i32(TO(ctx->opcode));
5146 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
5147 t0);
5148 tcg_temp_free_i32(t0);
5149 }
5150
5151 /* twi */
5152 static void gen_twi(DisasContext *ctx)
5153 {
5154 TCGv t0;
5155 TCGv_i32 t1;
5156
5157 if (check_unconditional_trap(ctx)) {
5158 return;
5159 }
5160 t0 = tcg_const_tl(SIMM(ctx->opcode));
5161 t1 = tcg_const_i32(TO(ctx->opcode));
5162 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
5163 tcg_temp_free(t0);
5164 tcg_temp_free_i32(t1);
5165 }
5166
5167 #if defined(TARGET_PPC64)
5168 /* td */
5169 static void gen_td(DisasContext *ctx)
5170 {
5171 TCGv_i32 t0;
5172
5173 if (check_unconditional_trap(ctx)) {
5174 return;
5175 }
5176 t0 = tcg_const_i32(TO(ctx->opcode));
5177 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
5178 t0);
5179 tcg_temp_free_i32(t0);
5180 }
5181
5182 /* tdi */
5183 static void gen_tdi(DisasContext *ctx)
5184 {
5185 TCGv t0;
5186 TCGv_i32 t1;
5187
5188 if (check_unconditional_trap(ctx)) {
5189 return;
5190 }
5191 t0 = tcg_const_tl(SIMM(ctx->opcode));
5192 t1 = tcg_const_i32(TO(ctx->opcode));
5193 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
5194 tcg_temp_free(t0);
5195 tcg_temp_free_i32(t1);
5196 }
5197 #endif
5198
5199 /*** Processor control ***/
5200
5201 /* mcrxr */
5202 static void gen_mcrxr(DisasContext *ctx)
5203 {
5204 TCGv_i32 t0 = tcg_temp_new_i32();
5205 TCGv_i32 t1 = tcg_temp_new_i32();
5206 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
5207
5208 tcg_gen_trunc_tl_i32(t0, cpu_so);
5209 tcg_gen_trunc_tl_i32(t1, cpu_ov);
5210 tcg_gen_trunc_tl_i32(dst, cpu_ca);
5211 tcg_gen_shli_i32(t0, t0, 3);
5212 tcg_gen_shli_i32(t1, t1, 2);
5213 tcg_gen_shli_i32(dst, dst, 1);
5214 tcg_gen_or_i32(dst, dst, t0);
5215 tcg_gen_or_i32(dst, dst, t1);
5216 tcg_temp_free_i32(t0);
5217 tcg_temp_free_i32(t1);
5218
5219 tcg_gen_movi_tl(cpu_so, 0);
5220 tcg_gen_movi_tl(cpu_ov, 0);
5221 tcg_gen_movi_tl(cpu_ca, 0);
5222 }
5223
5224 #ifdef TARGET_PPC64
5225 /* mcrxrx */
5226 static void gen_mcrxrx(DisasContext *ctx)
5227 {
5228 TCGv t0 = tcg_temp_new();
5229 TCGv t1 = tcg_temp_new();
5230 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
5231
5232 /* copy OV and OV32 */
5233 tcg_gen_shli_tl(t0, cpu_ov, 1);
5234 tcg_gen_or_tl(t0, t0, cpu_ov32);
5235 tcg_gen_shli_tl(t0, t0, 2);
5236 /* copy CA and CA32 */
5237 tcg_gen_shli_tl(t1, cpu_ca, 1);
5238 tcg_gen_or_tl(t1, t1, cpu_ca32);
5239 tcg_gen_or_tl(t0, t0, t1);
5240 tcg_gen_trunc_tl_i32(dst, t0);
5241 tcg_temp_free(t0);
5242 tcg_temp_free(t1);
5243 }
5244 #endif
5245
5246 /* mfcr mfocrf */
5247 static void gen_mfcr(DisasContext *ctx)
5248 {
5249 uint32_t crm, crn;
5250
5251 if (likely(ctx->opcode & 0x00100000)) {
5252 crm = CRM(ctx->opcode);
5253 if (likely(crm && ((crm & (crm - 1)) == 0))) {
5254 crn = ctz32(crm);
5255 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
5256 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
5257 cpu_gpr[rD(ctx->opcode)], crn * 4);
5258 }
5259 } else {
5260 TCGv_i32 t0 = tcg_temp_new_i32();
5261 tcg_gen_mov_i32(t0, cpu_crf[0]);
5262 tcg_gen_shli_i32(t0, t0, 4);
5263 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
5264 tcg_gen_shli_i32(t0, t0, 4);
5265 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
5266 tcg_gen_shli_i32(t0, t0, 4);
5267 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
5268 tcg_gen_shli_i32(t0, t0, 4);
5269 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
5270 tcg_gen_shli_i32(t0, t0, 4);
5271 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
5272 tcg_gen_shli_i32(t0, t0, 4);
5273 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
5274 tcg_gen_shli_i32(t0, t0, 4);
5275 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
5276 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5277 tcg_temp_free_i32(t0);
5278 }
5279 }
5280
5281 /* mfmsr */
5282 static void gen_mfmsr(DisasContext *ctx)
5283 {
5284 CHK_SV;
5285 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
5286 }
5287
5288 /* mfspr */
5289 static inline void gen_op_mfspr(DisasContext *ctx)
5290 {
5291 void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
5292 uint32_t sprn = SPR(ctx->opcode);
5293
5294 #if defined(CONFIG_USER_ONLY)
5295 read_cb = ctx->spr_cb[sprn].uea_read;
5296 #else
5297 if (ctx->pr) {
5298 read_cb = ctx->spr_cb[sprn].uea_read;
5299 } else if (ctx->hv) {
5300 read_cb = ctx->spr_cb[sprn].hea_read;
5301 } else {
5302 read_cb = ctx->spr_cb[sprn].oea_read;
5303 }
5304 #endif
5305 if (likely(read_cb != NULL)) {
5306 if (likely(read_cb != SPR_NOACCESS)) {
5307 (*read_cb)(ctx, rD(ctx->opcode), sprn);
5308 } else {
5309 /* Privilege exception */
5310 /*
5311 * This is a hack to avoid warnings when running Linux:
5312 * this OS breaks the PowerPC virtualisation model,
5313 * allowing userland application to read the PVR
5314 */
5315 if (sprn != SPR_PVR) {
5316 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
5317 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5318 ctx->cia);
5319 }
5320 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5321 }
5322 } else {
5323 /* ISA 2.07 defines these as no-ops */
5324 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5325 (sprn >= 808 && sprn <= 811)) {
5326 /* This is a nop */
5327 return;
5328 }
5329 /* Not defined */
5330 qemu_log_mask(LOG_GUEST_ERROR,
5331 "Trying to read invalid spr %d (0x%03x) at "
5332 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5333
5334 /*
5335 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5336 * generate a priv, a hv emu or a no-op
5337 */
5338 if (sprn & 0x10) {
5339 if (ctx->pr) {
5340 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5341 }
5342 } else {
5343 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
5344 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5345 }
5346 }
5347 }
5348 }
5349
5350 static void gen_mfspr(DisasContext *ctx)
5351 {
5352 gen_op_mfspr(ctx);
5353 }
5354
5355 /* mftb */
5356 static void gen_mftb(DisasContext *ctx)
5357 {
5358 gen_op_mfspr(ctx);
5359 }
5360
5361 /* mtcrf mtocrf*/
5362 static void gen_mtcrf(DisasContext *ctx)
5363 {
5364 uint32_t crm, crn;
5365
5366 crm = CRM(ctx->opcode);
5367 if (likely((ctx->opcode & 0x00100000))) {
5368 if (crm && ((crm & (crm - 1)) == 0)) {
5369 TCGv_i32 temp = tcg_temp_new_i32();
5370 crn = ctz32(crm);
5371 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
5372 tcg_gen_shri_i32(temp, temp, crn * 4);
5373 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
5374 tcg_temp_free_i32(temp);
5375 }
5376 } else {
5377 TCGv_i32 temp = tcg_temp_new_i32();
5378 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
5379 for (crn = 0 ; crn < 8 ; crn++) {
5380 if (crm & (1 << crn)) {
5381 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
5382 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
5383 }
5384 }
5385 tcg_temp_free_i32(temp);
5386 }
5387 }
5388
5389 /* mtmsr */
5390 #if defined(TARGET_PPC64)
5391 static void gen_mtmsrd(DisasContext *ctx)
5392 {
5393 CHK_SV;
5394
5395 #if !defined(CONFIG_USER_ONLY)
5396 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5397 gen_io_start();
5398 }
5399 if (ctx->opcode & 0x00010000) {
5400 /* L=1 form only updates EE and RI */
5401 TCGv t0 = tcg_temp_new();
5402 TCGv t1 = tcg_temp_new();
5403 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)],
5404 (1 << MSR_RI) | (1 << MSR_EE));
5405 tcg_gen_andi_tl(t1, cpu_msr,
5406 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
5407 tcg_gen_or_tl(t1, t1, t0);
5408
5409 gen_helper_store_msr(cpu_env, t1);
5410 tcg_temp_free(t0);
5411 tcg_temp_free(t1);
5412
5413 } else {
5414 /*
5415 * XXX: we need to update nip before the store if we enter
5416 * power saving mode, we will exit the loop directly from
5417 * ppc_store_msr
5418 */
5419 gen_update_nip(ctx, ctx->base.pc_next);
5420 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]);
5421 }
5422 /* Must stop the translation as machine state (may have) changed */
5423 gen_stop_exception(ctx);
5424 #endif /* !defined(CONFIG_USER_ONLY) */
5425 }
5426 #endif /* defined(TARGET_PPC64) */
5427
5428 static void gen_mtmsr(DisasContext *ctx)
5429 {
5430 CHK_SV;
5431
5432 #if !defined(CONFIG_USER_ONLY)
5433 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
5434 gen_io_start();
5435 }
5436 if (ctx->opcode & 0x00010000) {
5437 /* L=1 form only updates EE and RI */
5438 TCGv t0 = tcg_temp_new();
5439 TCGv t1 = tcg_temp_new();
5440 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)],
5441 (1 << MSR_RI) | (1 << MSR_EE));
5442 tcg_gen_andi_tl(t1, cpu_msr,
5443 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
5444 tcg_gen_or_tl(t1, t1, t0);
5445
5446 gen_helper_store_msr(cpu_env, t1);
5447 tcg_temp_free(t0);
5448 tcg_temp_free(t1);
5449
5450 } else {
5451 TCGv msr = tcg_temp_new();
5452
5453 /*
5454 * XXX: we need to update nip before the store if we enter
5455 * power saving mode, we will exit the loop directly from
5456 * ppc_store_msr
5457 */
5458 gen_update_nip(ctx, ctx->base.pc_next);
5459 #if defined(TARGET_PPC64)
5460 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
5461 #else
5462 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
5463 #endif
5464 gen_helper_store_msr(cpu_env, msr);
5465 tcg_temp_free(msr);
5466 }
5467 /* Must stop the translation as machine state (may have) changed */
5468 gen_stop_exception(ctx);
5469 #endif
5470 }
5471
5472 /* mtspr */
5473 static void gen_mtspr(DisasContext *ctx)
5474 {
5475 void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
5476 uint32_t sprn = SPR(ctx->opcode);
5477
5478 #if defined(CONFIG_USER_ONLY)
5479 write_cb = ctx->spr_cb[sprn].uea_write;
5480 #else
5481 if (ctx->pr) {
5482 write_cb = ctx->spr_cb[sprn].uea_write;
5483 } else if (ctx->hv) {
5484 write_cb = ctx->spr_cb[sprn].hea_write;
5485 } else {
5486 write_cb = ctx->spr_cb[sprn].oea_write;
5487 }
5488 #endif
5489 if (likely(write_cb != NULL)) {
5490 if (likely(write_cb != SPR_NOACCESS)) {
5491 (*write_cb)(ctx, sprn, rS(ctx->opcode));
5492 } else {
5493 /* Privilege exception */
5494 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
5495 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5496 ctx->cia);
5497 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5498 }
5499 } else {
5500 /* ISA 2.07 defines these as no-ops */
5501 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5502 (sprn >= 808 && sprn <= 811)) {
5503 /* This is a nop */
5504 return;
5505 }
5506
5507 /* Not defined */
5508 qemu_log_mask(LOG_GUEST_ERROR,
5509 "Trying to write invalid spr %d (0x%03x) at "
5510 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5511
5512
5513 /*
5514 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5515 * generate a priv, a hv emu or a no-op
5516 */
5517 if (sprn & 0x10) {
5518 if (ctx->pr) {
5519 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5520 }
5521 } else {
5522 if (ctx->pr || sprn == 0) {
5523 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5524 }
5525 }
5526 }
5527 }
5528
5529 #if defined(TARGET_PPC64)
5530 /* setb */
5531 static void gen_setb(DisasContext *ctx)
5532 {
5533 TCGv_i32 t0 = tcg_temp_new_i32();
5534 TCGv_i32 t8 = tcg_temp_new_i32();
5535 TCGv_i32 tm1 = tcg_temp_new_i32();
5536 int crf = crfS(ctx->opcode);
5537
5538 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5539 tcg_gen_movi_i32(t8, 8);
5540 tcg_gen_movi_i32(tm1, -1);
5541 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5542 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5543
5544 tcg_temp_free_i32(t0);
5545 tcg_temp_free_i32(t8);
5546 tcg_temp_free_i32(tm1);
5547 }
5548 #endif
5549
5550 /*** Cache management ***/
5551
5552 /* dcbf */
5553 static void gen_dcbf(DisasContext *ctx)
5554 {
5555 /* XXX: specification says this is treated as a load by the MMU */
5556 TCGv t0;
5557 gen_set_access_type(ctx, ACCESS_CACHE);
5558 t0 = tcg_temp_new();
5559 gen_addr_reg_index(ctx, t0);
5560 gen_qemu_ld8u(ctx, t0, t0);
5561 tcg_temp_free(t0);
5562 }
5563
5564 /* dcbfep (external PID dcbf) */
5565 static void gen_dcbfep(DisasContext *ctx)
5566 {
5567 /* XXX: specification says this is treated as a load by the MMU */
5568 TCGv t0;
5569 CHK_SV;
5570 gen_set_access_type(ctx, ACCESS_CACHE);
5571 t0 = tcg_temp_new();
5572 gen_addr_reg_index(ctx, t0);
5573 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5574 tcg_temp_free(t0);
5575 }
5576
5577 /* dcbi (Supervisor only) */
5578 static void gen_dcbi(DisasContext *ctx)
5579 {
5580 #if defined(CONFIG_USER_ONLY)
5581 GEN_PRIV;
5582 #else
5583 TCGv EA, val;
5584
5585 CHK_SV;
5586 EA = tcg_temp_new();
5587 gen_set_access_type(ctx, ACCESS_CACHE);
5588 gen_addr_reg_index(ctx, EA);
5589 val = tcg_temp_new();
5590 /* XXX: specification says this should be treated as a store by the MMU */
5591 gen_qemu_ld8u(ctx, val, EA);
5592 gen_qemu_st8(ctx, val, EA);
5593 tcg_temp_free(val);
5594 tcg_temp_free(EA);
5595 #endif /* defined(CONFIG_USER_ONLY) */
5596 }
5597
5598 /* dcdst */
5599 static void gen_dcbst(DisasContext *ctx)
5600 {
5601 /* XXX: specification say this is treated as a load by the MMU */
5602 TCGv t0;
5603 gen_set_access_type(ctx, ACCESS_CACHE);
5604 t0 = tcg_temp_new();
5605 gen_addr_reg_index(ctx, t0);
5606 gen_qemu_ld8u(ctx, t0, t0);
5607 tcg_temp_free(t0);
5608 }
5609
5610 /* dcbstep (dcbstep External PID version) */
5611 static void gen_dcbstep(DisasContext *ctx)
5612 {
5613 /* XXX: specification say this is treated as a load by the MMU */
5614 TCGv t0;
5615 gen_set_access_type(ctx, ACCESS_CACHE);
5616 t0 = tcg_temp_new();
5617 gen_addr_reg_index(ctx, t0);
5618 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5619 tcg_temp_free(t0);
5620 }
5621
5622 /* dcbt */
5623 static void gen_dcbt(DisasContext *ctx)
5624 {
5625 /*
5626 * interpreted as no-op
5627 * XXX: specification say this is treated as a load by the MMU but
5628 * does not generate any exception
5629 */
5630 }
5631
5632 /* dcbtep */
5633 static void gen_dcbtep(DisasContext *ctx)
5634 {
5635 /*
5636 * interpreted as no-op
5637 * XXX: specification say this is treated as a load by the MMU but
5638 * does not generate any exception
5639 */
5640 }
5641
5642 /* dcbtst */
5643 static void gen_dcbtst(DisasContext *ctx)
5644 {
5645 /*
5646 * interpreted as no-op
5647 * XXX: specification say this is treated as a load by the MMU but
5648 * does not generate any exception
5649 */
5650 }
5651
5652 /* dcbtstep */
5653 static void gen_dcbtstep(DisasContext *ctx)
5654 {
5655 /*
5656 * interpreted as no-op
5657 * XXX: specification say this is treated as a load by the MMU but
5658 * does not generate any exception
5659 */
5660 }
5661
5662 /* dcbtls */
5663 static void gen_dcbtls(DisasContext *ctx)
5664 {
5665 /* Always fails locking the cache */
5666 TCGv t0 = tcg_temp_new();
5667 gen_load_spr(t0, SPR_Exxx_L1CSR0);
5668 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5669 gen_store_spr(SPR_Exxx_L1CSR0, t0);
5670 tcg_temp_free(t0);
5671 }
5672
5673 /* dcbz */
5674 static void gen_dcbz(DisasContext *ctx)
5675 {
5676 TCGv tcgv_addr;
5677 TCGv_i32 tcgv_op;
5678
5679 gen_set_access_type(ctx, ACCESS_CACHE);
5680 tcgv_addr = tcg_temp_new();
5681 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5682 gen_addr_reg_index(ctx, tcgv_addr);
5683 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5684 tcg_temp_free(tcgv_addr);
5685 tcg_temp_free_i32(tcgv_op);
5686 }
5687
5688 /* dcbzep */
5689 static void gen_dcbzep(DisasContext *ctx)
5690 {
5691 TCGv tcgv_addr;
5692 TCGv_i32 tcgv_op;
5693
5694 gen_set_access_type(ctx, ACCESS_CACHE);
5695 tcgv_addr = tcg_temp_new();
5696 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5697 gen_addr_reg_index(ctx, tcgv_addr);
5698 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5699 tcg_temp_free(tcgv_addr);
5700 tcg_temp_free_i32(tcgv_op);
5701 }
5702
5703 /* dst / dstt */
5704 static void gen_dst(DisasContext *ctx)
5705 {
5706 if (rA(ctx->opcode) == 0) {
5707 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5708 } else {
5709 /* interpreted as no-op */
5710 }
5711 }
5712
5713 /* dstst /dststt */
5714 static void gen_dstst(DisasContext *ctx)
5715 {
5716 if (rA(ctx->opcode) == 0) {
5717 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5718 } else {
5719 /* interpreted as no-op */
5720 }
5721
5722 }
5723
5724 /* dss / dssall */
5725 static void gen_dss(DisasContext *ctx)
5726 {
5727 /* interpreted as no-op */
5728 }
5729
5730 /* icbi */
5731 static void gen_icbi(DisasContext *ctx)
5732 {
5733 TCGv t0;
5734 gen_set_access_type(ctx, ACCESS_CACHE);
5735 t0 = tcg_temp_new();
5736 gen_addr_reg_index(ctx, t0);
5737 gen_helper_icbi(cpu_env, t0);
5738 tcg_temp_free(t0);
5739 }
5740
5741 /* icbiep */
5742 static void gen_icbiep(DisasContext *ctx)
5743 {
5744 TCGv t0;
5745 gen_set_access_type(ctx, ACCESS_CACHE);
5746 t0 = tcg_temp_new();
5747 gen_addr_reg_index(ctx, t0);
5748 gen_helper_icbiep(cpu_env, t0);
5749 tcg_temp_free(t0);
5750 }
5751
5752 /* Optional: */
5753 /* dcba */
5754 static void gen_dcba(DisasContext *ctx)
5755 {
5756 /*
5757 * interpreted as no-op
5758 * XXX: specification say this is treated as a store by the MMU
5759 * but does not generate any exception
5760 */
5761 }
5762
5763 /*** Segment register manipulation ***/
5764 /* Supervisor only: */
5765
5766 /* mfsr */
5767 static void gen_mfsr(DisasContext *ctx)
5768 {
5769 #if defined(CONFIG_USER_ONLY)
5770 GEN_PRIV;
5771 #else
5772 TCGv t0;
5773
5774 CHK_SV;
5775 t0 = tcg_const_tl(SR(ctx->opcode));
5776 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5777 tcg_temp_free(t0);
5778 #endif /* defined(CONFIG_USER_ONLY) */
5779 }
5780
5781 /* mfsrin */
5782 static void gen_mfsrin(DisasContext *ctx)
5783 {
5784 #if defined(CONFIG_USER_ONLY)
5785 GEN_PRIV;
5786 #else
5787 TCGv t0;
5788
5789 CHK_SV;
5790 t0 = tcg_temp_new();
5791 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5792 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5793 tcg_temp_free(t0);
5794 #endif /* defined(CONFIG_USER_ONLY) */
5795 }
5796
5797 /* mtsr */
5798 static void gen_mtsr(DisasContext *ctx)
5799 {
5800 #if defined(CONFIG_USER_ONLY)
5801 GEN_PRIV;
5802 #else
5803 TCGv t0;
5804
5805 CHK_SV;
5806 t0 = tcg_const_tl(SR(ctx->opcode));
5807 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5808 tcg_temp_free(t0);
5809 #endif /* defined(CONFIG_USER_ONLY) */
5810 }
5811
5812 /* mtsrin */
5813 static void gen_mtsrin(DisasContext *ctx)
5814 {
5815 #if defined(CONFIG_USER_ONLY)
5816 GEN_PRIV;
5817 #else
5818 TCGv t0;
5819 CHK_SV;
5820
5821 t0 = tcg_temp_new();
5822 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5823 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5824 tcg_temp_free(t0);
5825 #endif /* defined(CONFIG_USER_ONLY) */
5826 }
5827
5828 #if defined(TARGET_PPC64)
5829 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5830
5831 /* mfsr */
5832 static void gen_mfsr_64b(DisasContext *ctx)
5833 {
5834 #if defined(CONFIG_USER_ONLY)
5835 GEN_PRIV;
5836 #else
5837 TCGv t0;
5838
5839 CHK_SV;
5840 t0 = tcg_const_tl(SR(ctx->opcode));
5841 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5842 tcg_temp_free(t0);
5843 #endif /* defined(CONFIG_USER_ONLY) */
5844 }
5845
5846 /* mfsrin */
5847 static void gen_mfsrin_64b(DisasContext *ctx)
5848 {
5849 #if defined(CONFIG_USER_ONLY)
5850 GEN_PRIV;
5851 #else
5852 TCGv t0;
5853
5854 CHK_SV;
5855 t0 = tcg_temp_new();
5856 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5857 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5858 tcg_temp_free(t0);
5859 #endif /* defined(CONFIG_USER_ONLY) */
5860 }
5861
5862 /* mtsr */
5863 static void gen_mtsr_64b(DisasContext *ctx)
5864 {
5865 #if defined(CONFIG_USER_ONLY)
5866 GEN_PRIV;
5867 #else
5868 TCGv t0;
5869
5870 CHK_SV;
5871 t0 = tcg_const_tl(SR(ctx->opcode));
5872 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5873 tcg_temp_free(t0);
5874 #endif /* defined(CONFIG_USER_ONLY) */
5875 }
5876
5877 /* mtsrin */
5878 static void gen_mtsrin_64b(DisasContext *ctx)
5879 {
5880 #if defined(CONFIG_USER_ONLY)
5881 GEN_PRIV;
5882 #else
5883 TCGv t0;
5884
5885 CHK_SV;
5886 t0 = tcg_temp_new();
5887 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5888 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5889 tcg_temp_free(t0);
5890 #endif /* defined(CONFIG_USER_ONLY) */
5891 }
5892
5893 /* slbmte */
5894 static void gen_slbmte(DisasContext *ctx)
5895 {
5896 #if defined(CONFIG_USER_ONLY)
5897 GEN_PRIV;
5898 #else
5899 CHK_SV;
5900
5901 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
5902 cpu_gpr[rS(ctx->opcode)]);
5903 #endif /* defined(CONFIG_USER_ONLY) */
5904 }
5905
5906 static void gen_slbmfee(DisasContext *ctx)
5907 {
5908 #if defined(CONFIG_USER_ONLY)
5909 GEN_PRIV;
5910 #else
5911 CHK_SV;
5912
5913 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5914 cpu_gpr[rB(ctx->opcode)]);
5915 #endif /* defined(CONFIG_USER_ONLY) */
5916 }
5917
5918 static void gen_slbmfev(DisasContext *ctx)
5919 {
5920 #if defined(CONFIG_USER_ONLY)
5921 GEN_PRIV;
5922 #else
5923 CHK_SV;
5924
5925 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5926 cpu_gpr[rB(ctx->opcode)]);
5927 #endif /* defined(CONFIG_USER_ONLY) */
5928 }
5929
5930 static void gen_slbfee_(DisasContext *ctx)
5931 {
5932 #if defined(CONFIG_USER_ONLY)
5933 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5934 #else
5935 TCGLabel *l1, *l2;
5936
5937 if (unlikely(ctx->pr)) {
5938 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5939 return;
5940 }
5941 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5942 cpu_gpr[rB(ctx->opcode)]);
5943 l1 = gen_new_label();
5944 l2 = gen_new_label();
5945 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5946 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1);
5947 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
5948 tcg_gen_br(l2);
5949 gen_set_label(l1);
5950 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0);
5951 gen_set_label(l2);
5952 #endif
5953 }
5954 #endif /* defined(TARGET_PPC64) */
5955
5956 /*** Lookaside buffer management ***/
5957 /* Optional & supervisor only: */
5958
5959 /* tlbia */
5960 static void gen_tlbia(DisasContext *ctx)
5961 {
5962 #if defined(CONFIG_USER_ONLY)
5963 GEN_PRIV;
5964 #else
5965 CHK_HV;
5966
5967 gen_helper_tlbia(cpu_env);
5968 #endif /* defined(CONFIG_USER_ONLY) */
5969 }
5970
5971 /* tlbiel */
5972 static void gen_tlbiel(DisasContext *ctx)
5973 {
5974 #if defined(CONFIG_USER_ONLY)
5975 GEN_PRIV;
5976 #else
5977 CHK_SV;
5978
5979 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5980 #endif /* defined(CONFIG_USER_ONLY) */
5981 }
5982
5983 /* tlbie */
5984 static void gen_tlbie(DisasContext *ctx)
5985 {
5986 #if defined(CONFIG_USER_ONLY)
5987 GEN_PRIV;
5988 #else
5989 TCGv_i32 t1;
5990
5991 if (ctx->gtse) {
5992 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */
5993 } else {
5994 CHK_HV; /* Else hypervisor privileged */
5995 }
5996
5997 if (NARROW_MODE(ctx)) {
5998 TCGv t0 = tcg_temp_new();
5999 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
6000 gen_helper_tlbie(cpu_env, t0);
6001 tcg_temp_free(t0);
6002 } else {
6003 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6004 }
6005 t1 = tcg_temp_new_i32();
6006 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
6007 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH);
6008 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
6009 tcg_temp_free_i32(t1);
6010 #endif /* defined(CONFIG_USER_ONLY) */
6011 }
6012
6013 /* tlbsync */
6014 static void gen_tlbsync(DisasContext *ctx)
6015 {
6016 #if defined(CONFIG_USER_ONLY)
6017 GEN_PRIV;
6018 #else
6019
6020 if (ctx->gtse) {
6021 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */
6022 } else {
6023 CHK_HV; /* Else hypervisor privileged */
6024 }
6025
6026 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
6027 if (ctx->insns_flags & PPC_BOOKE) {
6028 gen_check_tlb_flush(ctx, true);
6029 }
6030 #endif /* defined(CONFIG_USER_ONLY) */
6031 }
6032
6033 #if defined(TARGET_PPC64)
6034 /* slbia */
6035 static void gen_slbia(DisasContext *ctx)
6036 {
6037 #if defined(CONFIG_USER_ONLY)
6038 GEN_PRIV;
6039 #else
6040 uint32_t ih = (ctx->opcode >> 21) & 0x7;
6041 TCGv_i32 t0 = tcg_const_i32(ih);
6042
6043 CHK_SV;
6044
6045 gen_helper_slbia(cpu_env, t0);
6046 tcg_temp_free_i32(t0);
6047 #endif /* defined(CONFIG_USER_ONLY) */
6048 }
6049
6050 /* slbie */
6051 static void gen_slbie(DisasContext *ctx)
6052 {
6053 #if defined(CONFIG_USER_ONLY)
6054 GEN_PRIV;
6055 #else
6056 CHK_SV;
6057
6058 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6059 #endif /* defined(CONFIG_USER_ONLY) */
6060 }
6061
6062 /* slbieg */
6063 static void gen_slbieg(DisasContext *ctx)
6064 {
6065 #if defined(CONFIG_USER_ONLY)
6066 GEN_PRIV;
6067 #else
6068 CHK_SV;
6069
6070 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6071 #endif /* defined(CONFIG_USER_ONLY) */
6072 }
6073
6074 /* slbsync */
6075 static void gen_slbsync(DisasContext *ctx)
6076 {
6077 #if defined(CONFIG_USER_ONLY)
6078 GEN_PRIV;
6079 #else
6080 CHK_SV;
6081 gen_check_tlb_flush(ctx, true);
6082 #endif /* defined(CONFIG_USER_ONLY) */
6083 }
6084
6085 #endif /* defined(TARGET_PPC64) */
6086
6087 /*** External control ***/
6088 /* Optional: */
6089
6090 /* eciwx */
6091 static void gen_eciwx(DisasContext *ctx)
6092 {
6093 TCGv t0;
6094 /* Should check EAR[E] ! */
6095 gen_set_access_type(ctx, ACCESS_EXT);
6096 t0 = tcg_temp_new();
6097 gen_addr_reg_index(ctx, t0);
6098 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
6099 DEF_MEMOP(MO_UL | MO_ALIGN));
6100 tcg_temp_free(t0);
6101 }
6102
6103 /* ecowx */
6104 static void gen_ecowx(DisasContext *ctx)
6105 {
6106 TCGv t0;
6107 /* Should check EAR[E] ! */
6108 gen_set_access_type(ctx, ACCESS_EXT);
6109 t0 = tcg_temp_new();
6110 gen_addr_reg_index(ctx, t0);
6111 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
6112 DEF_MEMOP(MO_UL | MO_ALIGN));
6113 tcg_temp_free(t0);
6114 }
6115
6116 /* PowerPC 601 specific instructions */
6117
6118 /* abs - abs. */
6119 static void gen_abs(DisasContext *ctx)
6120 {
6121 TCGv d = cpu_gpr[rD(ctx->opcode)];
6122 TCGv a = cpu_gpr[rA(ctx->opcode)];
6123
6124 tcg_gen_abs_tl(d, a);
6125 if (unlikely(Rc(ctx->opcode) != 0)) {
6126 gen_set_Rc0(ctx, d);
6127 }
6128 }
6129
6130 /* abso - abso. */
6131 static void gen_abso(DisasContext *ctx)
6132 {
6133 TCGv d = cpu_gpr[rD(ctx->opcode)];
6134 TCGv a = cpu_gpr[rA(ctx->opcode)];
6135
6136 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000);
6137 tcg_gen_abs_tl(d, a);
6138 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
6139 if (unlikely(Rc(ctx->opcode) != 0)) {
6140 gen_set_Rc0(ctx, d);
6141 }
6142 }
6143
6144 /* clcs */
6145 static void gen_clcs(DisasContext *ctx)
6146 {
6147 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
6148 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6149 tcg_temp_free_i32(t0);
6150 /* Rc=1 sets CR0 to an undefined state */
6151 }
6152
6153 /* div - div. */
6154 static void gen_div(DisasContext *ctx)
6155 {
6156 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
6157 cpu_gpr[rB(ctx->opcode)]);
6158 if (unlikely(Rc(ctx->opcode) != 0)) {
6159 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6160 }
6161 }
6162
6163 /* divo - divo. */
6164 static void gen_divo(DisasContext *ctx)
6165 {
6166 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
6167 cpu_gpr[rB(ctx->opcode)]);
6168 if (unlikely(Rc(ctx->opcode) != 0)) {
6169 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6170 }
6171 }
6172
6173 /* divs - divs. */
6174 static void gen_divs(DisasContext *ctx)
6175 {
6176 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
6177 cpu_gpr[rB(ctx->opcode)]);
6178 if (unlikely(Rc(ctx->opcode) != 0)) {
6179 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6180 }
6181 }
6182
6183 /* divso - divso. */
6184 static void gen_divso(DisasContext *ctx)
6185 {
6186 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
6187 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6188 if (unlikely(Rc(ctx->opcode) != 0)) {
6189 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6190 }
6191 }
6192
6193 /* doz - doz. */
6194 static void gen_doz(DisasContext *ctx)
6195 {
6196 TCGLabel *l1 = gen_new_label();
6197 TCGLabel *l2 = gen_new_label();
6198 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)],
6199 cpu_gpr[rA(ctx->opcode)], l1);
6200 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
6201 cpu_gpr[rA(ctx->opcode)]);
6202 tcg_gen_br(l2);
6203 gen_set_label(l1);
6204 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6205 gen_set_label(l2);
6206 if (unlikely(Rc(ctx->opcode) != 0)) {
6207 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6208 }
6209 }
6210
6211 /* dozo - dozo. */
6212 static void gen_dozo(DisasContext *ctx)
6213 {
6214 TCGLabel *l1 = gen_new_label();
6215 TCGLabel *l2 = gen_new_label();
6216 TCGv t0 = tcg_temp_new();
6217 TCGv t1 = tcg_temp_new();
6218 TCGv t2 = tcg_temp_new();
6219 /* Start with XER OV disabled, the most likely case */
6220 tcg_gen_movi_tl(cpu_ov, 0);
6221 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)],
6222 cpu_gpr[rA(ctx->opcode)], l1);
6223 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6224 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6225 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
6226 tcg_gen_andc_tl(t1, t1, t2);
6227 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6228 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
6229 tcg_gen_movi_tl(cpu_ov, 1);
6230 tcg_gen_movi_tl(cpu_so, 1);
6231 tcg_gen_br(l2);
6232 gen_set_label(l1);
6233 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6234 gen_set_label(l2);
6235 tcg_temp_free(t0);
6236 tcg_temp_free(t1);
6237 tcg_temp_free(t2);
6238 if (unlikely(Rc(ctx->opcode) != 0)) {
6239 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6240 }
6241 }
6242
6243 /* dozi */
6244 static void gen_dozi(DisasContext *ctx)
6245 {
6246 target_long simm = SIMM(ctx->opcode);
6247 TCGLabel *l1 = gen_new_label();
6248 TCGLabel *l2 = gen_new_label();
6249 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
6250 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
6251 tcg_gen_br(l2);
6252 gen_set_label(l1);
6253 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6254 gen_set_label(l2);
6255 if (unlikely(Rc(ctx->opcode) != 0)) {
6256 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6257 }
6258 }
6259
6260 /* lscbx - lscbx. */
6261 static void gen_lscbx(DisasContext *ctx)
6262 {
6263 TCGv t0 = tcg_temp_new();
6264 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
6265 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
6266 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
6267
6268 gen_addr_reg_index(ctx, t0);
6269 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
6270 tcg_temp_free_i32(t1);
6271 tcg_temp_free_i32(t2);
6272 tcg_temp_free_i32(t3);
6273 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
6274 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
6275 if (unlikely(Rc(ctx->opcode) != 0)) {
6276 gen_set_Rc0(ctx, t0);
6277 }
6278 tcg_temp_free(t0);
6279 }
6280
6281 /* maskg - maskg. */
6282 static void gen_maskg(DisasContext *ctx)
6283 {
6284 TCGLabel *l1 = gen_new_label();
6285 TCGv t0 = tcg_temp_new();
6286 TCGv t1 = tcg_temp_new();
6287 TCGv t2 = tcg_temp_new();
6288 TCGv t3 = tcg_temp_new();
6289 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
6290 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6291 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
6292 tcg_gen_addi_tl(t2, t0, 1);
6293 tcg_gen_shr_tl(t2, t3, t2);
6294 tcg_gen_shr_tl(t3, t3, t1);
6295 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
6296 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
6297 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6298 gen_set_label(l1);
6299 tcg_temp_free(t0);
6300 tcg_temp_free(t1);
6301 tcg_temp_free(t2);
6302 tcg_temp_free(t3);
6303 if (unlikely(Rc(ctx->opcode) != 0)) {
6304 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6305 }
6306 }
6307
6308 /* maskir - maskir. */
6309 static void gen_maskir(DisasContext *ctx)
6310 {
6311 TCGv t0 = tcg_temp_new();
6312 TCGv t1 = tcg_temp_new();
6313 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6314 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6315 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6316 tcg_temp_free(t0);
6317 tcg_temp_free(t1);
6318 if (unlikely(Rc(ctx->opcode) != 0)) {
6319 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6320 }
6321 }
6322
6323 /* mul - mul. */
6324 static void gen_mul(DisasContext *ctx)
6325 {
6326 TCGv_i64 t0 = tcg_temp_new_i64();
6327 TCGv_i64 t1 = tcg_temp_new_i64();
6328 TCGv t2 = tcg_temp_new();
6329 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
6330 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
6331 tcg_gen_mul_i64(t0, t0, t1);
6332 tcg_gen_trunc_i64_tl(t2, t0);
6333 gen_store_spr(SPR_MQ, t2);
6334 tcg_gen_shri_i64(t1, t0, 32);
6335 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
6336 tcg_temp_free_i64(t0);
6337 tcg_temp_free_i64(t1);
6338 tcg_temp_free(t2);
6339 if (unlikely(Rc(ctx->opcode) != 0)) {
6340 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6341 }
6342 }
6343
6344 /* mulo - mulo. */
6345 static void gen_mulo(DisasContext *ctx)
6346 {
6347 TCGLabel *l1 = gen_new_label();
6348 TCGv_i64 t0 = tcg_temp_new_i64();
6349 TCGv_i64 t1 = tcg_temp_new_i64();
6350 TCGv t2 = tcg_temp_new();
6351 /* Start with XER OV disabled, the most likely case */
6352 tcg_gen_movi_tl(cpu_ov, 0);
6353 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
6354 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
6355 tcg_gen_mul_i64(t0, t0, t1);
6356 tcg_gen_trunc_i64_tl(t2, t0);
6357 gen_store_spr(SPR_MQ, t2);
6358 tcg_gen_shri_i64(t1, t0, 32);
6359 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
6360 tcg_gen_ext32s_i64(t1, t0);
6361 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
6362 tcg_gen_movi_tl(cpu_ov, 1);
6363 tcg_gen_movi_tl(cpu_so, 1);
6364 gen_set_label(l1);
6365 tcg_temp_free_i64(t0);
6366 tcg_temp_free_i64(t1);
6367 tcg_temp_free(t2);
6368 if (unlikely(Rc(ctx->opcode) != 0)) {
6369 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6370 }
6371 }
6372
6373 /* nabs - nabs. */
6374 static void gen_nabs(DisasContext *ctx)
6375 {
6376 TCGv d = cpu_gpr[rD(ctx->opcode)];
6377 TCGv a = cpu_gpr[rA(ctx->opcode)];
6378
6379 tcg_gen_abs_tl(d, a);
6380 tcg_gen_neg_tl(d, d);
6381 if (unlikely(Rc(ctx->opcode) != 0)) {
6382 gen_set_Rc0(ctx, d);
6383 }
6384 }
6385
6386 /* nabso - nabso. */
6387 static void gen_nabso(DisasContext *ctx)
6388 {
6389 TCGv d = cpu_gpr[rD(ctx->opcode)];
6390 TCGv a = cpu_gpr[rA(ctx->opcode)];
6391
6392 tcg_gen_abs_tl(d, a);
6393 tcg_gen_neg_tl(d, d);
6394 /* nabs never overflows */
6395 tcg_gen_movi_tl(cpu_ov, 0);
6396 if (unlikely(Rc(ctx->opcode) != 0)) {
6397 gen_set_Rc0(ctx, d);
6398 }
6399 }
6400
6401 /* rlmi - rlmi. */
6402 static void gen_rlmi(DisasContext *ctx)
6403 {
6404 uint32_t mb = MB(ctx->opcode);
6405 uint32_t me = ME(ctx->opcode);
6406 TCGv t0 = tcg_temp_new();
6407 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6408 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6409 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
6410 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
6411 ~MASK(mb, me));
6412 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
6413 tcg_temp_free(t0);
6414 if (unlikely(Rc(ctx->opcode) != 0)) {
6415 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6416 }
6417 }
6418
6419 /* rrib - rrib. */
6420 static void gen_rrib(DisasContext *ctx)
6421 {
6422 TCGv t0 = tcg_temp_new();
6423 TCGv t1 = tcg_temp_new();
6424 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6425 tcg_gen_movi_tl(t1, 0x80000000);
6426 tcg_gen_shr_tl(t1, t1, t0);
6427 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6428 tcg_gen_and_tl(t0, t0, t1);
6429 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
6430 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6431 tcg_temp_free(t0);
6432 tcg_temp_free(t1);
6433 if (unlikely(Rc(ctx->opcode) != 0)) {
6434 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6435 }
6436 }
6437
6438 /* sle - sle. */
6439 static void gen_sle(DisasContext *ctx)
6440 {
6441 TCGv t0 = tcg_temp_new();
6442 TCGv t1 = tcg_temp_new();
6443 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6444 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6445 tcg_gen_subfi_tl(t1, 32, t1);
6446 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6447 tcg_gen_or_tl(t1, t0, t1);
6448 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6449 gen_store_spr(SPR_MQ, t1);
6450 tcg_temp_free(t0);
6451 tcg_temp_free(t1);
6452 if (unlikely(Rc(ctx->opcode) != 0)) {
6453 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6454 }
6455 }
6456
6457 /* sleq - sleq. */
6458 static void gen_sleq(DisasContext *ctx)
6459 {
6460 TCGv t0 = tcg_temp_new();
6461 TCGv t1 = tcg_temp_new();
6462 TCGv t2 = tcg_temp_new();
6463 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6464 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
6465 tcg_gen_shl_tl(t2, t2, t0);
6466 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6467 gen_load_spr(t1, SPR_MQ);
6468 gen_store_spr(SPR_MQ, t0);
6469 tcg_gen_and_tl(t0, t0, t2);
6470 tcg_gen_andc_tl(t1, t1, t2);
6471 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6472 tcg_temp_free(t0);
6473 tcg_temp_free(t1);
6474 tcg_temp_free(t2);
6475 if (unlikely(Rc(ctx->opcode) != 0)) {
6476 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6477 }
6478 }
6479
6480 /* sliq - sliq. */
6481 static void gen_sliq(DisasContext *ctx)
6482 {
6483 int sh = SH(ctx->opcode);
6484 TCGv t0 = tcg_temp_new();
6485 TCGv t1 = tcg_temp_new();
6486 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6487 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6488 tcg_gen_or_tl(t1, t0, t1);
6489 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6490 gen_store_spr(SPR_MQ, t1);
6491 tcg_temp_free(t0);
6492 tcg_temp_free(t1);
6493 if (unlikely(Rc(ctx->opcode) != 0)) {
6494 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6495 }
6496 }
6497
6498 /* slliq - slliq. */
6499 static void gen_slliq(DisasContext *ctx)
6500 {
6501 int sh = SH(ctx->opcode);
6502 TCGv t0 = tcg_temp_new();
6503 TCGv t1 = tcg_temp_new();
6504 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6505 gen_load_spr(t1, SPR_MQ);
6506 gen_store_spr(SPR_MQ, t0);
6507 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
6508 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
6509 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6510 tcg_temp_free(t0);
6511 tcg_temp_free(t1);
6512 if (unlikely(Rc(ctx->opcode) != 0)) {
6513 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6514 }
6515 }
6516
6517 /* sllq - sllq. */
6518 static void gen_sllq(DisasContext *ctx)
6519 {
6520 TCGLabel *l1 = gen_new_label();
6521 TCGLabel *l2 = gen_new_label();
6522 TCGv t0 = tcg_temp_local_new();
6523 TCGv t1 = tcg_temp_local_new();
6524 TCGv t2 = tcg_temp_local_new();
6525 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6526 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6527 tcg_gen_shl_tl(t1, t1, t2);
6528 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6529 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6530 gen_load_spr(t0, SPR_MQ);
6531 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6532 tcg_gen_br(l2);
6533 gen_set_label(l1);
6534 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6535 gen_load_spr(t2, SPR_MQ);
6536 tcg_gen_andc_tl(t1, t2, t1);
6537 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6538 gen_set_label(l2);
6539 tcg_temp_free(t0);
6540 tcg_temp_free(t1);
6541 tcg_temp_free(t2);
6542 if (unlikely(Rc(ctx->opcode) != 0)) {
6543 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6544 }
6545 }
6546
6547 /* slq - slq. */
6548 static void gen_slq(DisasContext *ctx)
6549 {
6550 TCGLabel *l1 = gen_new_label();
6551 TCGv t0 = tcg_temp_new();
6552 TCGv t1 = tcg_temp_new();
6553 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6554 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6555 tcg_gen_subfi_tl(t1, 32, t1);
6556 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6557 tcg_gen_or_tl(t1, t0, t1);
6558 gen_store_spr(SPR_MQ, t1);
6559 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
6560 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6561 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
6562 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
6563 gen_set_label(l1);
6564 tcg_temp_free(t0);
6565 tcg_temp_free(t1);
6566 if (unlikely(Rc(ctx->opcode) != 0)) {
6567 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6568 }
6569 }
6570
6571 /* sraiq - sraiq. */
6572 static void gen_sraiq(DisasContext *ctx)
6573 {
6574 int sh = SH(ctx->opcode);
6575 TCGLabel *l1 = gen_new_label();
6576 TCGv t0 = tcg_temp_new();
6577 TCGv t1 = tcg_temp_new();
6578 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6579 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6580 tcg_gen_or_tl(t0, t0, t1);
6581 gen_store_spr(SPR_MQ, t0);
6582 tcg_gen_movi_tl(cpu_ca, 0);
6583 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
6584 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
6585 tcg_gen_movi_tl(cpu_ca, 1);
6586 gen_set_label(l1);
6587 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
6588 tcg_temp_free(t0);
6589 tcg_temp_free(t1);
6590 if (unlikely(Rc(ctx->opcode) != 0)) {
6591 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6592 }
6593 }
6594
6595 /* sraq - sraq. */
6596 static void gen_sraq(DisasContext *ctx)
6597 {
6598 TCGLabel *l1 = gen_new_label();
6599 TCGLabel *l2 = gen_new_label();
6600 TCGv t0 = tcg_temp_new();
6601 TCGv t1 = tcg_temp_local_new();
6602 TCGv t2 = tcg_temp_local_new();
6603 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6604 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6605 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
6606 tcg_gen_subfi_tl(t2, 32, t2);
6607 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
6608 tcg_gen_or_tl(t0, t0, t2);
6609 gen_store_spr(SPR_MQ, t0);
6610 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6611 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
6612 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
6613 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
6614 gen_set_label(l1);
6615 tcg_temp_free(t0);
6616 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
6617 tcg_gen_movi_tl(cpu_ca, 0);
6618 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
6619 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
6620 tcg_gen_movi_tl(cpu_ca, 1);
6621 gen_set_label(l2);
6622 tcg_temp_free(t1);
6623 tcg_temp_free(t2);
6624 if (unlikely(Rc(ctx->opcode) != 0)) {
6625 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6626 }
6627 }
6628
6629 /* sre - sre. */
6630 static void gen_sre(DisasContext *ctx)
6631 {
6632 TCGv t0 = tcg_temp_new();
6633 TCGv t1 = tcg_temp_new();
6634 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6635 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6636 tcg_gen_subfi_tl(t1, 32, t1);
6637 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6638 tcg_gen_or_tl(t1, t0, t1);
6639 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6640 gen_store_spr(SPR_MQ, t1);
6641 tcg_temp_free(t0);
6642 tcg_temp_free(t1);
6643 if (unlikely(Rc(ctx->opcode) != 0)) {
6644 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6645 }
6646 }
6647
6648 /* srea - srea. */
6649 static void gen_srea(DisasContext *ctx)
6650 {
6651 TCGv t0 = tcg_temp_new();
6652 TCGv t1 = tcg_temp_new();
6653 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6654 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6655 gen_store_spr(SPR_MQ, t0);
6656 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
6657 tcg_temp_free(t0);
6658 tcg_temp_free(t1);
6659 if (unlikely(Rc(ctx->opcode) != 0)) {
6660 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6661 }
6662 }
6663
6664 /* sreq */
6665 static void gen_sreq(DisasContext *ctx)
6666 {
6667 TCGv t0 = tcg_temp_new();
6668 TCGv t1 = tcg_temp_new();
6669 TCGv t2 = tcg_temp_new();
6670 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6671 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6672 tcg_gen_shr_tl(t1, t1, t0);
6673 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6674 gen_load_spr(t2, SPR_MQ);
6675 gen_store_spr(SPR_MQ, t0);
6676 tcg_gen_and_tl(t0, t0, t1);
6677 tcg_gen_andc_tl(t2, t2, t1);
6678 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
6679 tcg_temp_free(t0);
6680 tcg_temp_free(t1);
6681 tcg_temp_free(t2);
6682 if (unlikely(Rc(ctx->opcode) != 0)) {
6683 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6684 }
6685 }
6686
6687 /* sriq */
6688 static void gen_sriq(DisasContext *ctx)
6689 {
6690 int sh = SH(ctx->opcode);
6691 TCGv t0 = tcg_temp_new();
6692 TCGv t1 = tcg_temp_new();
6693 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6694 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6695 tcg_gen_or_tl(t1, t0, t1);
6696 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6697 gen_store_spr(SPR_MQ, t1);
6698 tcg_temp_free(t0);
6699 tcg_temp_free(t1);
6700 if (unlikely(Rc(ctx->opcode) != 0)) {
6701 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6702 }
6703 }
6704
6705 /* srliq */
6706 static void gen_srliq(DisasContext *ctx)
6707 {
6708 int sh = SH(ctx->opcode);
6709 TCGv t0 = tcg_temp_new();
6710 TCGv t1 = tcg_temp_new();
6711 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6712 gen_load_spr(t1, SPR_MQ);
6713 gen_store_spr(SPR_MQ, t0);
6714 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
6715 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
6716 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6717 tcg_temp_free(t0);
6718 tcg_temp_free(t1);
6719 if (unlikely(Rc(ctx->opcode) != 0)) {
6720 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6721 }
6722 }
6723
6724 /* srlq */
6725 static void gen_srlq(DisasContext *ctx)
6726 {
6727 TCGLabel *l1 = gen_new_label();
6728 TCGLabel *l2 = gen_new_label();
6729 TCGv t0 = tcg_temp_local_new();
6730 TCGv t1 = tcg_temp_local_new();
6731 TCGv t2 = tcg_temp_local_new();
6732 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6733 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6734 tcg_gen_shr_tl(t2, t1, t2);
6735 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6736 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6737 gen_load_spr(t0, SPR_MQ);
6738 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
6739 tcg_gen_br(l2);
6740 gen_set_label(l1);
6741 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6742 tcg_gen_and_tl(t0, t0, t2);
6743 gen_load_spr(t1, SPR_MQ);
6744 tcg_gen_andc_tl(t1, t1, t2);
6745 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6746 gen_set_label(l2);
6747 tcg_temp_free(t0);
6748 tcg_temp_free(t1);
6749 tcg_temp_free(t2);
6750 if (unlikely(Rc(ctx->opcode) != 0)) {
6751 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6752 }
6753 }
6754
6755 /* srq */
6756 static void gen_srq(DisasContext *ctx)
6757 {
6758 TCGLabel *l1 = gen_new_label();
6759 TCGv t0 = tcg_temp_new();
6760 TCGv t1 = tcg_temp_new();
6761 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6762 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6763 tcg_gen_subfi_tl(t1, 32, t1);
6764 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6765 tcg_gen_or_tl(t1, t0, t1);
6766 gen_store_spr(SPR_MQ, t1);
6767 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
6768 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6769 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6770 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
6771 gen_set_label(l1);
6772 tcg_temp_free(t0);
6773 tcg_temp_free(t1);
6774 if (unlikely(Rc(ctx->opcode) != 0)) {
6775 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6776 }
6777 }
6778
6779 /* PowerPC 602 specific instructions */
6780
6781 /* dsa */
6782 static void gen_dsa(DisasContext *ctx)
6783 {
6784 /* XXX: TODO */
6785 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6786 }
6787
6788 /* esa */
6789 static void gen_esa(DisasContext *ctx)
6790 {
6791 /* XXX: TODO */
6792 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6793 }
6794
6795 /* mfrom */
6796 static void gen_mfrom(DisasContext *ctx)
6797 {
6798 #if defined(CONFIG_USER_ONLY)
6799 GEN_PRIV;
6800 #else
6801 CHK_SV;
6802 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6803 #endif /* defined(CONFIG_USER_ONLY) */
6804 }
6805
6806 /* 602 - 603 - G2 TLB management */
6807
6808 /* tlbld */
6809 static void gen_tlbld_6xx(DisasContext *ctx)
6810 {
6811 #if defined(CONFIG_USER_ONLY)
6812 GEN_PRIV;
6813 #else
6814 CHK_SV;
6815 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6816 #endif /* defined(CONFIG_USER_ONLY) */
6817 }
6818
6819 /* tlbli */
6820 static void gen_tlbli_6xx(DisasContext *ctx)
6821 {
6822 #if defined(CONFIG_USER_ONLY)
6823 GEN_PRIV;
6824 #else
6825 CHK_SV;
6826 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6827 #endif /* defined(CONFIG_USER_ONLY) */
6828 }
6829
6830 /* 74xx TLB management */
6831
6832 /* tlbld */
6833 static void gen_tlbld_74xx(DisasContext *ctx)
6834 {
6835 #if defined(CONFIG_USER_ONLY)
6836 GEN_PRIV;
6837 #else
6838 CHK_SV;
6839 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6840 #endif /* defined(CONFIG_USER_ONLY) */
6841 }
6842
6843 /* tlbli */
6844 static void gen_tlbli_74xx(DisasContext *ctx)
6845 {
6846 #if defined(CONFIG_USER_ONLY)
6847 GEN_PRIV;
6848 #else
6849 CHK_SV;
6850 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6851 #endif /* defined(CONFIG_USER_ONLY) */
6852 }
6853
6854 /* POWER instructions not in PowerPC 601 */
6855
6856 /* clf */
6857 static void gen_clf(DisasContext *ctx)
6858 {
6859 /* Cache line flush: implemented as no-op */
6860 }
6861
6862 /* cli */
6863 static void gen_cli(DisasContext *ctx)
6864 {
6865 #if defined(CONFIG_USER_ONLY)
6866 GEN_PRIV;
6867 #else
6868 /* Cache line invalidate: privileged and treated as no-op */
6869 CHK_SV;
6870 #endif /* defined(CONFIG_USER_ONLY) */
6871 }
6872
6873 /* dclst */
6874 static void gen_dclst(DisasContext *ctx)
6875 {
6876 /* Data cache line store: treated as no-op */
6877 }
6878
6879 static void gen_mfsri(DisasContext *ctx)
6880 {
6881 #if defined(CONFIG_USER_ONLY)
6882 GEN_PRIV;
6883 #else
6884 int ra = rA(ctx->opcode);
6885 int rd = rD(ctx->opcode);
6886 TCGv t0;
6887
6888 CHK_SV;
6889 t0 = tcg_temp_new();
6890 gen_addr_reg_index(ctx, t0);
6891 tcg_gen_extract_tl(t0, t0, 28, 4);
6892 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
6893 tcg_temp_free(t0);
6894 if (ra != 0 && ra != rd) {
6895 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
6896 }
6897 #endif /* defined(CONFIG_USER_ONLY) */
6898 }
6899
6900 static void gen_rac(DisasContext *ctx)
6901 {
6902 #if defined(CONFIG_USER_ONLY)
6903 GEN_PRIV;
6904 #else
6905 TCGv t0;
6906
6907 CHK_SV;
6908 t0 = tcg_temp_new();
6909 gen_addr_reg_index(ctx, t0);
6910 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6911 tcg_temp_free(t0);
6912 #endif /* defined(CONFIG_USER_ONLY) */
6913 }
6914
6915 static void gen_rfsvc(DisasContext *ctx)
6916 {
6917 #if defined(CONFIG_USER_ONLY)
6918 GEN_PRIV;
6919 #else
6920 CHK_SV;
6921
6922 gen_helper_rfsvc(cpu_env);
6923 gen_sync_exception(ctx);
6924 #endif /* defined(CONFIG_USER_ONLY) */
6925 }
6926
6927 /* svc is not implemented for now */
6928
6929 /* BookE specific instructions */
6930
6931 /* XXX: not implemented on 440 ? */
6932 static void gen_mfapidi(DisasContext *ctx)
6933 {
6934 /* XXX: TODO */
6935 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6936 }
6937
6938 /* XXX: not implemented on 440 ? */
6939 static void gen_tlbiva(DisasContext *ctx)
6940 {
6941 #if defined(CONFIG_USER_ONLY)
6942 GEN_PRIV;
6943 #else
6944 TCGv t0;
6945
6946 CHK_SV;
6947 t0 = tcg_temp_new();
6948 gen_addr_reg_index(ctx, t0);
6949 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6950 tcg_temp_free(t0);
6951 #endif /* defined(CONFIG_USER_ONLY) */
6952 }
6953
6954 /* All 405 MAC instructions are translated here */
6955 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
6956 int ra, int rb, int rt, int Rc)
6957 {
6958 TCGv t0, t1;
6959
6960 t0 = tcg_temp_local_new();
6961 t1 = tcg_temp_local_new();
6962
6963 switch (opc3 & 0x0D) {
6964 case 0x05:
6965 /* macchw - macchw. - macchwo - macchwo. */
6966 /* macchws - macchws. - macchwso - macchwso. */
6967 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
6968 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
6969 /* mulchw - mulchw. */
6970 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
6971 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
6972 tcg_gen_ext16s_tl(t1, t1);
6973 break;
6974 case 0x04:
6975 /* macchwu - macchwu. - macchwuo - macchwuo. */
6976 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
6977 /* mulchwu - mulchwu. */
6978 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
6979 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
6980 tcg_gen_ext16u_tl(t1, t1);
6981 break;
6982 case 0x01:
6983 /* machhw - machhw. - machhwo - machhwo. */
6984 /* machhws - machhws. - machhwso - machhwso. */
6985 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
6986 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
6987 /* mulhhw - mulhhw. */
6988 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
6989 tcg_gen_ext16s_tl(t0, t0);
6990 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
6991 tcg_gen_ext16s_tl(t1, t1);
6992 break;
6993 case 0x00:
6994 /* machhwu - machhwu. - machhwuo - machhwuo. */
6995 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
6996 /* mulhhwu - mulhhwu. */
6997 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
6998 tcg_gen_ext16u_tl(t0, t0);
6999 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
7000 tcg_gen_ext16u_tl(t1, t1);
7001 break;
7002 case 0x0D:
7003 /* maclhw - maclhw. - maclhwo - maclhwo. */
7004 /* maclhws - maclhws. - maclhwso - maclhwso. */
7005 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
7006 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
7007 /* mullhw - mullhw. */
7008 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
7009 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
7010 break;
7011 case 0x0C:
7012 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
7013 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
7014 /* mullhwu - mullhwu. */
7015 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
7016 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
7017 break;
7018 }
7019 if (opc2 & 0x04) {
7020 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
7021 tcg_gen_mul_tl(t1, t0, t1);
7022 if (opc2 & 0x02) {
7023 /* nmultiply-and-accumulate (0x0E) */
7024 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
7025 } else {
7026 /* multiply-and-accumulate (0x0C) */
7027 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
7028 }
7029
7030 if (opc3 & 0x12) {
7031 /* Check overflow and/or saturate */
7032 TCGLabel *l1 = gen_new_label();
7033
7034 if (opc3 & 0x10) {
7035 /* Start with XER OV disabled, the most likely case */
7036 tcg_gen_movi_tl(cpu_ov, 0);
7037 }
7038 if (opc3 & 0x01) {
7039 /* Signed */
7040 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
7041 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
7042 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
7043 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
7044 if (opc3 & 0x02) {
7045 /* Saturate */
7046 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
7047 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
7048 }
7049 } else {
7050 /* Unsigned */
7051 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
7052 if (opc3 & 0x02) {
7053 /* Saturate */
7054 tcg_gen_movi_tl(t0, UINT32_MAX);
7055 }
7056 }
7057 if (opc3 & 0x10) {
7058 /* Check overflow */
7059 tcg_gen_movi_tl(cpu_ov, 1);
7060 tcg_gen_movi_tl(cpu_so, 1);
7061 }
7062 gen_set_label(l1);
7063 tcg_gen_mov_tl(cpu_gpr[rt], t0);
7064 }
7065 } else {
7066 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
7067 }
7068 tcg_temp_free(t0);
7069 tcg_temp_free(t1);
7070 if (unlikely(Rc) != 0) {
7071 /* Update Rc0 */
7072 gen_set_Rc0(ctx, cpu_gpr[rt]);
7073 }
7074 }
7075
7076 #define GEN_MAC_HANDLER(name, opc2, opc3) \
7077 static void glue(gen_, name)(DisasContext *ctx) \
7078 { \
7079 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
7080 rD(ctx->opcode), Rc(ctx->opcode)); \
7081 }
7082
7083 /* macchw - macchw. */
7084 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
7085 /* macchwo - macchwo. */
7086 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
7087 /* macchws - macchws. */
7088 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
7089 /* macchwso - macchwso. */
7090 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
7091 /* macchwsu - macchwsu. */
7092 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
7093 /* macchwsuo - macchwsuo. */
7094 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
7095 /* macchwu - macchwu. */
7096 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
7097 /* macchwuo - macchwuo. */
7098 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
7099 /* machhw - machhw. */
7100 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
7101 /* machhwo - machhwo. */
7102 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
7103 /* machhws - machhws. */
7104 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
7105 /* machhwso - machhwso. */
7106 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
7107 /* machhwsu - machhwsu. */
7108 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
7109 /* machhwsuo - machhwsuo. */
7110 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
7111 /* machhwu - machhwu. */
7112 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
7113 /* machhwuo - machhwuo. */
7114 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
7115 /* maclhw - maclhw. */
7116 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
7117 /* maclhwo - maclhwo. */
7118 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
7119 /* maclhws - maclhws. */
7120 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
7121 /* maclhwso - maclhwso. */
7122 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
7123 /* maclhwu - maclhwu. */
7124 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
7125 /* maclhwuo - maclhwuo. */
7126 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
7127 /* maclhwsu - maclhwsu. */
7128 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
7129 /* maclhwsuo - maclhwsuo. */
7130 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
7131 /* nmacchw - nmacchw. */
7132 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
7133 /* nmacchwo - nmacchwo. */
7134 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
7135 /* nmacchws - nmacchws. */
7136 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
7137 /* nmacchwso - nmacchwso. */
7138 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
7139 /* nmachhw - nmachhw. */
7140 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
7141 /* nmachhwo - nmachhwo. */
7142 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
7143 /* nmachhws - nmachhws. */
7144 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
7145 /* nmachhwso - nmachhwso. */
7146 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
7147 /* nmaclhw - nmaclhw. */
7148 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
7149 /* nmaclhwo - nmaclhwo. */
7150 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
7151 /* nmaclhws - nmaclhws. */
7152 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
7153 /* nmaclhwso - nmaclhwso. */
7154 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
7155
7156 /* mulchw - mulchw. */
7157 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
7158 /* mulchwu - mulchwu. */
7159 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
7160 /* mulhhw - mulhhw. */
7161 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
7162 /* mulhhwu - mulhhwu. */
7163 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
7164 /* mullhw - mullhw. */
7165 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
7166 /* mullhwu - mullhwu. */
7167 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
7168
7169 /* mfdcr */
7170 static void gen_mfdcr(DisasContext *ctx)
7171 {
7172 #if defined(CONFIG_USER_ONLY)
7173 GEN_PRIV;
7174 #else
7175 TCGv dcrn;
7176
7177 CHK_SV;
7178 dcrn = tcg_const_tl(SPR(ctx->opcode));
7179 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
7180 tcg_temp_free(dcrn);
7181 #endif /* defined(CONFIG_USER_ONLY) */
7182 }
7183
7184 /* mtdcr */
7185 static void gen_mtdcr(DisasContext *ctx)
7186 {
7187 #if defined(CONFIG_USER_ONLY)
7188 GEN_PRIV;
7189 #else
7190 TCGv dcrn;
7191
7192 CHK_SV;
7193 dcrn = tcg_const_tl(SPR(ctx->opcode));
7194 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
7195 tcg_temp_free(dcrn);
7196 #endif /* defined(CONFIG_USER_ONLY) */
7197 }
7198
7199 /* mfdcrx */
7200 /* XXX: not implemented on 440 ? */
7201 static void gen_mfdcrx(DisasContext *ctx)
7202 {
7203 #if defined(CONFIG_USER_ONLY)
7204 GEN_PRIV;
7205 #else
7206 CHK_SV;
7207 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
7208 cpu_gpr[rA(ctx->opcode)]);
7209 /* Note: Rc update flag set leads to undefined state of Rc0 */
7210 #endif /* defined(CONFIG_USER_ONLY) */
7211 }
7212
7213 /* mtdcrx */
7214 /* XXX: not implemented on 440 ? */
7215 static void gen_mtdcrx(DisasContext *ctx)
7216 {
7217 #if defined(CONFIG_USER_ONLY)
7218 GEN_PRIV;
7219 #else
7220 CHK_SV;
7221 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
7222 cpu_gpr[rS(ctx->opcode)]);
7223 /* Note: Rc update flag set leads to undefined state of Rc0 */
7224 #endif /* defined(CONFIG_USER_ONLY) */
7225 }
7226
7227 /* mfdcrux (PPC 460) : user-mode access to DCR */
7228 static void gen_mfdcrux(DisasContext *ctx)
7229 {
7230 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
7231 cpu_gpr[rA(ctx->opcode)]);
7232 /* Note: Rc update flag set leads to undefined state of Rc0 */
7233 }
7234
7235 /* mtdcrux (PPC 460) : user-mode access to DCR */
7236 static void gen_mtdcrux(DisasContext *ctx)
7237 {
7238 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
7239 cpu_gpr[rS(ctx->opcode)]);
7240 /* Note: Rc update flag set leads to undefined state of Rc0 */
7241 }
7242
7243 /* dccci */
7244 static void gen_dccci(DisasContext *ctx)
7245 {
7246 CHK_SV;
7247 /* interpreted as no-op */
7248 }
7249
7250 /* dcread */
7251 static void gen_dcread(DisasContext *ctx)
7252 {
7253 #if defined(CONFIG_USER_ONLY)
7254 GEN_PRIV;
7255 #else
7256 TCGv EA, val;
7257
7258 CHK_SV;
7259 gen_set_access_type(ctx, ACCESS_CACHE);
7260 EA = tcg_temp_new();
7261 gen_addr_reg_index(ctx, EA);
7262 val = tcg_temp_new();
7263 gen_qemu_ld32u(ctx, val, EA);
7264 tcg_temp_free(val);
7265 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
7266 tcg_temp_free(EA);
7267 #endif /* defined(CONFIG_USER_ONLY) */
7268 }
7269
7270 /* icbt */
7271 static void gen_icbt_40x(DisasContext *ctx)
7272 {
7273 /*
7274 * interpreted as no-op
7275 * XXX: specification say this is treated as a load by the MMU but
7276 * does not generate any exception
7277 */
7278 }
7279
7280 /* iccci */
7281 static void gen_iccci(DisasContext *ctx)
7282 {
7283 CHK_SV;
7284 /* interpreted as no-op */
7285 }
7286
7287 /* icread */
7288 static void gen_icread(DisasContext *ctx)
7289 {
7290 CHK_SV;
7291 /* interpreted as no-op */
7292 }
7293
7294 /* rfci (supervisor only) */
7295 static void gen_rfci_40x(DisasContext *ctx)
7296 {
7297 #if defined(CONFIG_USER_ONLY)
7298 GEN_PRIV;
7299 #else
7300 CHK_SV;
7301 /* Restore CPU state */
7302 gen_helper_40x_rfci(cpu_env);
7303 gen_sync_exception(ctx);
7304 #endif /* defined(CONFIG_USER_ONLY) */
7305 }
7306
7307 static void gen_rfci(DisasContext *ctx)
7308 {
7309 #if defined(CONFIG_USER_ONLY)
7310 GEN_PRIV;
7311 #else
7312 CHK_SV;
7313 /* Restore CPU state */
7314 gen_helper_rfci(cpu_env);
7315 gen_sync_exception(ctx);
7316 #endif /* defined(CONFIG_USER_ONLY) */
7317 }
7318
7319 /* BookE specific */
7320
7321 /* XXX: not implemented on 440 ? */
7322 static void gen_rfdi(DisasContext *ctx)
7323 {
7324 #if defined(CONFIG_USER_ONLY)
7325 GEN_PRIV;
7326 #else
7327 CHK_SV;
7328 /* Restore CPU state */
7329 gen_helper_rfdi(cpu_env);
7330 gen_sync_exception(ctx);
7331 #endif /* defined(CONFIG_USER_ONLY) */
7332 }
7333
7334 /* XXX: not implemented on 440 ? */
7335 static void gen_rfmci(DisasContext *ctx)
7336 {
7337 #if defined(CONFIG_USER_ONLY)
7338 GEN_PRIV;
7339 #else
7340 CHK_SV;
7341 /* Restore CPU state */
7342 gen_helper_rfmci(cpu_env);
7343 gen_sync_exception(ctx);
7344 #endif /* defined(CONFIG_USER_ONLY) */
7345 }
7346
7347 /* TLB management - PowerPC 405 implementation */
7348
7349 /* tlbre */
7350 static void gen_tlbre_40x(DisasContext *ctx)
7351 {
7352 #if defined(CONFIG_USER_ONLY)
7353 GEN_PRIV;
7354 #else
7355 CHK_SV;
7356 switch (rB(ctx->opcode)) {
7357 case 0:
7358 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
7359 cpu_gpr[rA(ctx->opcode)]);
7360 break;
7361 case 1:
7362 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
7363 cpu_gpr[rA(ctx->opcode)]);
7364 break;
7365 default:
7366 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7367 break;
7368 }
7369 #endif /* defined(CONFIG_USER_ONLY) */
7370 }
7371
7372 /* tlbsx - tlbsx. */
7373 static void gen_tlbsx_40x(DisasContext *ctx)
7374 {
7375 #if defined(CONFIG_USER_ONLY)
7376 GEN_PRIV;
7377 #else
7378 TCGv t0;
7379
7380 CHK_SV;
7381 t0 = tcg_temp_new();
7382 gen_addr_reg_index(ctx, t0);
7383 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
7384 tcg_temp_free(t0);
7385 if (Rc(ctx->opcode)) {
7386 TCGLabel *l1 = gen_new_label();
7387 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
7388 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
7389 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
7390 gen_set_label(l1);
7391 }
7392 #endif /* defined(CONFIG_USER_ONLY) */
7393 }
7394
7395 /* tlbwe */
7396 static void gen_tlbwe_40x(DisasContext *ctx)
7397 {
7398 #if defined(CONFIG_USER_ONLY)
7399 GEN_PRIV;
7400 #else
7401 CHK_SV;
7402
7403 switch (rB(ctx->opcode)) {
7404 case 0:
7405 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
7406 cpu_gpr[rS(ctx->opcode)]);
7407 break;
7408 case 1:
7409 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
7410 cpu_gpr[rS(ctx->opcode)]);
7411 break;
7412 default:
7413 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7414 break;
7415 }
7416 #endif /* defined(CONFIG_USER_ONLY) */
7417 }
7418
7419 /* TLB management - PowerPC 440 implementation */
7420
7421 /* tlbre */
7422 static void gen_tlbre_440(DisasContext *ctx)
7423 {
7424 #if defined(CONFIG_USER_ONLY)
7425 GEN_PRIV;
7426 #else
7427 CHK_SV;
7428
7429 switch (rB(ctx->opcode)) {
7430 case 0:
7431 case 1:
7432 case 2:
7433 {
7434 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
7435 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
7436 t0, cpu_gpr[rA(ctx->opcode)]);
7437 tcg_temp_free_i32(t0);
7438 }
7439 break;
7440 default:
7441 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7442 break;
7443 }
7444 #endif /* defined(CONFIG_USER_ONLY) */
7445 }
7446
7447 /* tlbsx - tlbsx. */
7448 static void gen_tlbsx_440(DisasContext *ctx)
7449 {
7450 #if defined(CONFIG_USER_ONLY)
7451 GEN_PRIV;
7452 #else
7453 TCGv t0;
7454
7455 CHK_SV;
7456 t0 = tcg_temp_new();
7457 gen_addr_reg_index(ctx, t0);
7458 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
7459 tcg_temp_free(t0);
7460 if (Rc(ctx->opcode)) {
7461 TCGLabel *l1 = gen_new_label();
7462 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
7463 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
7464 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
7465 gen_set_label(l1);
7466 }
7467 #endif /* defined(CONFIG_USER_ONLY) */
7468 }
7469
7470 /* tlbwe */
7471 static void gen_tlbwe_440(DisasContext *ctx)
7472 {
7473 #if defined(CONFIG_USER_ONLY)
7474 GEN_PRIV;
7475 #else
7476 CHK_SV;
7477 switch (rB(ctx->opcode)) {
7478 case 0:
7479 case 1:
7480 case 2:
7481 {
7482 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
7483 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
7484 cpu_gpr[rS(ctx->opcode)]);
7485 tcg_temp_free_i32(t0);
7486 }
7487 break;
7488 default:
7489 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7490 break;
7491 }
7492 #endif /* defined(CONFIG_USER_ONLY) */
7493 }
7494
7495 /* TLB management - PowerPC BookE 2.06 implementation */
7496
7497 /* tlbre */
7498 static void gen_tlbre_booke206(DisasContext *ctx)
7499 {
7500 #if defined(CONFIG_USER_ONLY)
7501 GEN_PRIV;
7502 #else
7503 CHK_SV;
7504 gen_helper_booke206_tlbre(cpu_env);
7505 #endif /* defined(CONFIG_USER_ONLY) */
7506 }
7507
7508 /* tlbsx - tlbsx. */
7509 static void gen_tlbsx_booke206(DisasContext *ctx)
7510 {
7511 #if defined(CONFIG_USER_ONLY)
7512 GEN_PRIV;
7513 #else
7514 TCGv t0;
7515
7516 CHK_SV;
7517 if (rA(ctx->opcode)) {
7518 t0 = tcg_temp_new();
7519 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
7520 } else {
7521 t0 = tcg_const_tl(0);
7522 }
7523
7524 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
7525 gen_helper_booke206_tlbsx(cpu_env, t0);
7526 tcg_temp_free(t0);
7527 #endif /* defined(CONFIG_USER_ONLY) */
7528 }
7529
7530 /* tlbwe */
7531 static void gen_tlbwe_booke206(DisasContext *ctx)
7532 {
7533 #if defined(CONFIG_USER_ONLY)
7534 GEN_PRIV;
7535 #else
7536 CHK_SV;
7537 gen_helper_booke206_tlbwe(cpu_env);
7538 #endif /* defined(CONFIG_USER_ONLY) */
7539 }
7540
7541 static void gen_tlbivax_booke206(DisasContext *ctx)
7542 {
7543 #if defined(CONFIG_USER_ONLY)
7544 GEN_PRIV;
7545 #else
7546 TCGv t0;
7547
7548 CHK_SV;
7549 t0 = tcg_temp_new();
7550 gen_addr_reg_index(ctx, t0);
7551 gen_helper_booke206_tlbivax(cpu_env, t0);
7552 tcg_temp_free(t0);
7553 #endif /* defined(CONFIG_USER_ONLY) */
7554 }
7555
7556 static void gen_tlbilx_booke206(DisasContext *ctx)
7557 {
7558 #if defined(CONFIG_USER_ONLY)
7559 GEN_PRIV;
7560 #else
7561 TCGv t0;
7562
7563 CHK_SV;
7564 t0 = tcg_temp_new();
7565 gen_addr_reg_index(ctx, t0);
7566
7567 switch ((ctx->opcode >> 21) & 0x3) {
7568 case 0:
7569 gen_helper_booke206_tlbilx0(cpu_env, t0);
7570 break;
7571 case 1:
7572 gen_helper_booke206_tlbilx1(cpu_env, t0);
7573 break;
7574 case 3:
7575 gen_helper_booke206_tlbilx3(cpu_env, t0);
7576 break;
7577 default:
7578 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7579 break;
7580 }
7581
7582 tcg_temp_free(t0);
7583 #endif /* defined(CONFIG_USER_ONLY) */
7584 }
7585
7586
7587 /* wrtee */
7588 static void gen_wrtee(DisasContext *ctx)
7589 {
7590 #if defined(CONFIG_USER_ONLY)
7591 GEN_PRIV;
7592 #else
7593 TCGv t0;
7594
7595 CHK_SV;
7596 t0 = tcg_temp_new();
7597 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
7598 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
7599 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
7600 tcg_temp_free(t0);
7601 /*
7602 * Stop translation to have a chance to raise an exception if we
7603 * just set msr_ee to 1
7604 */
7605 gen_stop_exception(ctx);
7606 #endif /* defined(CONFIG_USER_ONLY) */
7607 }
7608
7609 /* wrteei */
7610 static void gen_wrteei(DisasContext *ctx)
7611 {
7612 #if defined(CONFIG_USER_ONLY)
7613 GEN_PRIV;
7614 #else
7615 CHK_SV;
7616 if (ctx->opcode & 0x00008000) {
7617 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
7618 /* Stop translation to have a chance to raise an exception */
7619 gen_stop_exception(ctx);
7620 } else {
7621 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
7622 }
7623 #endif /* defined(CONFIG_USER_ONLY) */
7624 }
7625
7626 /* PowerPC 440 specific instructions */
7627
7628 /* dlmzb */
7629 static void gen_dlmzb(DisasContext *ctx)
7630 {
7631 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
7632 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
7633 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
7634 tcg_temp_free_i32(t0);
7635 }
7636
7637 /* mbar replaces eieio on 440 */
7638 static void gen_mbar(DisasContext *ctx)
7639 {
7640 /* interpreted as no-op */
7641 }
7642
7643 /* msync replaces sync on 440 */
7644 static void gen_msync_4xx(DisasContext *ctx)
7645 {
7646 /* Only e500 seems to treat reserved bits as invalid */
7647 if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
7648 (ctx->opcode & 0x03FFF801)) {
7649 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7650 }
7651 /* otherwise interpreted as no-op */
7652 }
7653
7654 /* icbt */
7655 static void gen_icbt_440(DisasContext *ctx)
7656 {
7657 /*
7658 * interpreted as no-op
7659 * XXX: specification say this is treated as a load by the MMU but
7660 * does not generate any exception
7661 */
7662 }
7663
7664 /* Embedded.Processor Control */
7665
7666 static void gen_msgclr(DisasContext *ctx)
7667 {
7668 #if defined(CONFIG_USER_ONLY)
7669 GEN_PRIV;
7670 #else
7671 CHK_HV;
7672 if (is_book3s_arch2x(ctx)) {
7673 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7674 } else {
7675 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7676 }
7677 #endif /* defined(CONFIG_USER_ONLY) */
7678 }
7679
7680 static void gen_msgsnd(DisasContext *ctx)
7681 {
7682 #if defined(CONFIG_USER_ONLY)
7683 GEN_PRIV;
7684 #else
7685 CHK_HV;
7686 if (is_book3s_arch2x(ctx)) {
7687 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]);
7688 } else {
7689 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
7690 }
7691 #endif /* defined(CONFIG_USER_ONLY) */
7692 }
7693
7694 #if defined(TARGET_PPC64)
7695 static void gen_msgclrp(DisasContext *ctx)
7696 {
7697 #if defined(CONFIG_USER_ONLY)
7698 GEN_PRIV;
7699 #else
7700 CHK_SV;
7701 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7702 #endif /* defined(CONFIG_USER_ONLY) */
7703 }
7704
7705 static void gen_msgsndp(DisasContext *ctx)
7706 {
7707 #if defined(CONFIG_USER_ONLY)
7708 GEN_PRIV;
7709 #else
7710 CHK_SV;
7711 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7712 #endif /* defined(CONFIG_USER_ONLY) */
7713 }
7714 #endif
7715
7716 static void gen_msgsync(DisasContext *ctx)
7717 {
7718 #if defined(CONFIG_USER_ONLY)
7719 GEN_PRIV;
7720 #else
7721 CHK_HV;
7722 #endif /* defined(CONFIG_USER_ONLY) */
7723 /* interpreted as no-op */
7724 }
7725
7726 #if defined(TARGET_PPC64)
7727 static void gen_maddld(DisasContext *ctx)
7728 {
7729 TCGv_i64 t1 = tcg_temp_new_i64();
7730
7731 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7732 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
7733 tcg_temp_free_i64(t1);
7734 }
7735
7736 /* maddhd maddhdu */
7737 static void gen_maddhd_maddhdu(DisasContext *ctx)
7738 {
7739 TCGv_i64 lo = tcg_temp_new_i64();
7740 TCGv_i64 hi = tcg_temp_new_i64();
7741 TCGv_i64 t1 = tcg_temp_new_i64();
7742
7743 if (Rc(ctx->opcode)) {
7744 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
7745 cpu_gpr[rB(ctx->opcode)]);
7746 tcg_gen_movi_i64(t1, 0);
7747 } else {
7748 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
7749 cpu_gpr[rB(ctx->opcode)]);
7750 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
7751 }
7752 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
7753 cpu_gpr[rC(ctx->opcode)], t1);
7754 tcg_temp_free_i64(lo);
7755 tcg_temp_free_i64(hi);
7756 tcg_temp_free_i64(t1);
7757 }
7758 #endif /* defined(TARGET_PPC64) */
7759
7760 static void gen_tbegin(DisasContext *ctx)
7761 {
7762 if (unlikely(!ctx->tm_enabled)) {
7763 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
7764 return;
7765 }
7766 gen_helper_tbegin(cpu_env);
7767 }
7768
7769 #define GEN_TM_NOOP(name) \
7770 static inline void gen_##name(DisasContext *ctx) \
7771 { \
7772 if (unlikely(!ctx->tm_enabled)) { \
7773 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
7774 return; \
7775 } \
7776 /* \
7777 * Because tbegin always fails in QEMU, these user \
7778 * space instructions all have a simple implementation: \
7779 * \
7780 * CR[0] = 0b0 || MSR[TS] || 0b0 \
7781 * = 0b0 || 0b00 || 0b0 \
7782 */ \
7783 tcg_gen_movi_i32(cpu_crf[0], 0); \
7784 }
7785
7786 GEN_TM_NOOP(tend);
7787 GEN_TM_NOOP(tabort);
7788 GEN_TM_NOOP(tabortwc);
7789 GEN_TM_NOOP(tabortwci);
7790 GEN_TM_NOOP(tabortdc);
7791 GEN_TM_NOOP(tabortdci);
7792 GEN_TM_NOOP(tsr);
7793
7794 static inline void gen_cp_abort(DisasContext *ctx)
7795 {
7796 /* Do Nothing */
7797 }
7798
7799 #define GEN_CP_PASTE_NOOP(name) \
7800 static inline void gen_##name(DisasContext *ctx) \
7801 { \
7802 /* \
7803 * Generate invalid exception until we have an \
7804 * implementation of the copy paste facility \
7805 */ \
7806 gen_invalid(ctx); \
7807 }
7808
7809 GEN_CP_PASTE_NOOP(copy)
7810 GEN_CP_PASTE_NOOP(paste)
7811
7812 static void gen_tcheck(DisasContext *ctx)
7813 {
7814 if (unlikely(!ctx->tm_enabled)) {
7815 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
7816 return;
7817 }
7818 /*
7819 * Because tbegin always fails, the tcheck implementation is
7820 * simple:
7821 *
7822 * CR[CRF] = TDOOMED || MSR[TS] || 0b0
7823 * = 0b1 || 0b00 || 0b0
7824 */
7825 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
7826 }
7827
7828 #if defined(CONFIG_USER_ONLY)
7829 #define GEN_TM_PRIV_NOOP(name) \
7830 static inline void gen_##name(DisasContext *ctx) \
7831 { \
7832 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \
7833 }
7834
7835 #else
7836
7837 #define GEN_TM_PRIV_NOOP(name) \
7838 static inline void gen_##name(DisasContext *ctx) \
7839 { \
7840 CHK_SV; \
7841 if (unlikely(!ctx->tm_enabled)) { \
7842 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
7843 return; \
7844 } \
7845 /* \
7846 * Because tbegin always fails, the implementation is \
7847 * simple: \
7848 * \
7849 * CR[0] = 0b0 || MSR[TS] || 0b0 \
7850 * = 0b0 || 0b00 | 0b0 \
7851 */ \
7852 tcg_gen_movi_i32(cpu_crf[0], 0); \
7853 }
7854
7855 #endif
7856
7857 GEN_TM_PRIV_NOOP(treclaim);
7858 GEN_TM_PRIV_NOOP(trechkpt);
7859
7860 static inline void get_fpr(TCGv_i64 dst, int regno)
7861 {
7862 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
7863 }
7864
7865 static inline void set_fpr(int regno, TCGv_i64 src)
7866 {
7867 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
7868 }
7869
7870 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
7871 {
7872 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
7873 }
7874
7875 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
7876 {
7877 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
7878 }
7879
7880 #include "translate/fp-impl.c.inc"
7881
7882 #include "translate/vmx-impl.c.inc"
7883
7884 #include "translate/vsx-impl.c.inc"
7885
7886 #include "translate/dfp-impl.c.inc"
7887
7888 #include "translate/spe-impl.c.inc"
7889
7890 /* Handles lfdp, lxsd, lxssp */
7891 static void gen_dform39(DisasContext *ctx)
7892 {
7893 switch (ctx->opcode & 0x3) {
7894 case 0: /* lfdp */
7895 if (ctx->insns_flags2 & PPC2_ISA205) {
7896 return gen_lfdp(ctx);
7897 }
7898 break;
7899 case 2: /* lxsd */
7900 if (ctx->insns_flags2 & PPC2_ISA300) {
7901 return gen_lxsd(ctx);
7902 }
7903 break;
7904 case 3: /* lxssp */
7905 if (ctx->insns_flags2 & PPC2_ISA300) {
7906 return gen_lxssp(ctx);
7907 }
7908 break;
7909 }
7910 return gen_invalid(ctx);
7911 }
7912
7913 /* handles stfdp, lxv, stxsd, stxssp lxvx */
7914 static void gen_dform3D(DisasContext *ctx)
7915 {
7916 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */
7917 switch (ctx->opcode & 0x7) {
7918 case 1: /* lxv */
7919 if (ctx->insns_flags2 & PPC2_ISA300) {
7920 return gen_lxv(ctx);
7921 }
7922 break;
7923 case 5: /* stxv */
7924 if (ctx->insns_flags2 & PPC2_ISA300) {
7925 return gen_stxv(ctx);
7926 }
7927 break;
7928 }
7929 } else { /* DS-FORM */
7930 switch (ctx->opcode & 0x3) {
7931 case 0: /* stfdp */
7932 if (ctx->insns_flags2 & PPC2_ISA205) {
7933 return gen_stfdp(ctx);
7934 }
7935 break;
7936 case 2: /* stxsd */
7937 if (ctx->insns_flags2 & PPC2_ISA300) {
7938 return gen_stxsd(ctx);
7939 }
7940 break;
7941 case 3: /* stxssp */
7942 if (ctx->insns_flags2 & PPC2_ISA300) {
7943 return gen_stxssp(ctx);
7944 }
7945 break;
7946 }
7947 }
7948 return gen_invalid(ctx);
7949 }
7950
7951 #if defined(TARGET_PPC64)
7952 /* brd */
7953 static void gen_brd(DisasContext *ctx)
7954 {
7955 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
7956 }
7957
7958 /* brw */
7959 static void gen_brw(DisasContext *ctx)
7960 {
7961 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
7962 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
7963
7964 }
7965
7966 /* brh */
7967 static void gen_brh(DisasContext *ctx)
7968 {
7969 TCGv_i64 t0 = tcg_temp_new_i64();
7970 TCGv_i64 t1 = tcg_temp_new_i64();
7971 TCGv_i64 t2 = tcg_temp_new_i64();
7972
7973 tcg_gen_movi_i64(t0, 0x00ff00ff00ff00ffull);
7974 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
7975 tcg_gen_and_i64(t2, t1, t0);
7976 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], t0);
7977 tcg_gen_shli_i64(t1, t1, 8);
7978 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
7979
7980 tcg_temp_free_i64(t0);
7981 tcg_temp_free_i64(t1);
7982 tcg_temp_free_i64(t2);
7983 }
7984 #endif
7985
7986 static opcode_t opcodes[] = {
7987 #if defined(TARGET_PPC64)
7988 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
7989 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
7990 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
7991 #endif
7992 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
7993 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
7994 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
7995 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER),
7996 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
7997 #if defined(TARGET_PPC64)
7998 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
7999 #endif
8000 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
8001 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
8002 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
8003 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8004 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8005 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8006 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8007 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
8008 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
8009 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
8010 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
8011 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
8012 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8013 #if defined(TARGET_PPC64)
8014 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
8015 #endif
8016 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
8017 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
8018 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8019 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8020 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8021 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
8022 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
8023 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
8024 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
8025 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
8026 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
8027 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
8028 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8029 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8030 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8031 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8032 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
8033 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
8034 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
8035 #if defined(TARGET_PPC64)
8036 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
8037 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
8038 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
8039 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
8040 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
8041 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
8042 #endif
8043 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8044 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8045 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8046 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
8047 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
8048 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
8049 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
8050 #if defined(TARGET_PPC64)
8051 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
8052 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
8053 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
8054 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
8055 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
8056 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
8057 PPC_NONE, PPC2_ISA300),
8058 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
8059 PPC_NONE, PPC2_ISA300),
8060 #endif
8061 #if defined(TARGET_PPC64)
8062 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
8063 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
8064 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
8065 #endif
8066 /* handles lfdp, lxsd, lxssp */
8067 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
8068 /* handles stfdp, lxv, stxsd, stxssp, stxv */
8069 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
8070 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8071 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
8072 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
8073 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
8074 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
8075 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
8076 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
8077 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
8078 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
8079 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
8080 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
8081 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
8082 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
8083 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
8084 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
8085 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
8086 #if defined(TARGET_PPC64)
8087 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
8088 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
8089 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
8090 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
8091 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
8092 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
8093 #endif
8094 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
8095 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
8096 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300),
8097 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8098 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8099 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
8100 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
8101 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
8102 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
8103 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
8104 #if defined(TARGET_PPC64)
8105 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
8106 #if !defined(CONFIG_USER_ONLY)
8107 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
8108 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
8109 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
8110 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
8111 #endif
8112 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
8113 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
8114 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
8115 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
8116 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
8117 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
8118 #endif
8119 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
8120 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
8121 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
8122 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
8123 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
8124 #if defined(TARGET_PPC64)
8125 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
8126 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8127 #endif
8128 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8129 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8130 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8131 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8132 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8133 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8134 #if defined(TARGET_PPC64)
8135 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8136 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
8137 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
8138 #endif
8139 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
8140 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
8141 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8142 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
8143 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8144 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8145 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
8146 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
8147 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
8148 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
8149 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
8150 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
8151 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
8152 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
8153 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8154 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
8155 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8156 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8157 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
8158 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8159 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8160 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8161 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8162 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8163 #if defined(TARGET_PPC64)
8164 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8165 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8166 PPC_SEGMENT_64B),
8167 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8168 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8169 PPC_SEGMENT_64B),
8170 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8171 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8172 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8173 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B),
8174 #endif
8175 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8176 /*
8177 * XXX Those instructions will need to be handled differently for
8178 * different ISA versions
8179 */
8180 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE),
8181 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE),
8182 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300),
8183 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300),
8184 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8185 #if defined(TARGET_PPC64)
8186 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI),
8187 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8188 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300),
8189 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
8190 #endif
8191 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8192 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8193 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8194 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8195 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8196 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8197 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8198 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8199 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8200 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8201 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8202 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8203 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8204 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8205 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8206 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8207 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8208 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8209 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8210 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8211 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8212 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8213 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8214 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8215 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8216 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8217 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8218 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8219 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8220 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8221 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8222 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8223 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8224 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8225 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8226 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8227 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8228 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8229 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8230 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8231 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8232 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8233 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8234 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8235 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8236 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8237 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8238 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8239 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8240 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8241 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8242 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8243 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8244 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8245 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8246 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8247 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8248 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8249 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8250 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8251 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8252 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8253 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8254 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8255 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8256 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8257 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8258 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8259 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8260 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8261 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8262 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8263 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8264 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8265 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8266 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8267 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8268 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8269 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8270 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8271 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8272 PPC_NONE, PPC2_BOOKE206),
8273 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8274 PPC_NONE, PPC2_BOOKE206),
8275 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8276 PPC_NONE, PPC2_BOOKE206),
8277 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8278 PPC_NONE, PPC2_BOOKE206),
8279 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
8280 PPC_NONE, PPC2_BOOKE206),
8281 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
8282 PPC_NONE, PPC2_PRCNTL),
8283 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
8284 PPC_NONE, PPC2_PRCNTL),
8285 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000,
8286 PPC_NONE, PPC2_PRCNTL),
8287 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8288 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8289 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8290 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8291 PPC_BOOKE, PPC2_BOOKE206),
8292 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
8293 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8294 PPC_BOOKE, PPC2_BOOKE206),
8295 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
8296 PPC_440_SPEC),
8297 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8298 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8299 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8300 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8301 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8302 #if defined(TARGET_PPC64)
8303 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
8304 PPC2_ISA300),
8305 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
8306 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001,
8307 PPC_NONE, PPC2_ISA207S),
8308 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001,
8309 PPC_NONE, PPC2_ISA207S),
8310 #endif
8311
8312 #undef GEN_INT_ARITH_ADD
8313 #undef GEN_INT_ARITH_ADD_CONST
8314 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8315 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8316 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8317 add_ca, compute_ca, compute_ov) \
8318 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8319 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8320 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8321 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8322 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8323 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8324 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8325 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8326 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8327 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
8328 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8329 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8330
8331 #undef GEN_INT_ARITH_DIVW
8332 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8333 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8334 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8335 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8336 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8337 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8338 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8339 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8340 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8341 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8342 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
8343 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
8344
8345 #if defined(TARGET_PPC64)
8346 #undef GEN_INT_ARITH_DIVD
8347 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8348 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8349 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8350 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8351 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8352 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8353
8354 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8355 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8356 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8357 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8358 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
8359 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
8360
8361 #undef GEN_INT_ARITH_MUL_HELPER
8362 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8363 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8364 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8365 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8366 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8367 #endif
8368
8369 #undef GEN_INT_ARITH_SUBF
8370 #undef GEN_INT_ARITH_SUBF_CONST
8371 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8372 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8373 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8374 add_ca, compute_ca, compute_ov) \
8375 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8376 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8377 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8378 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8379 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8380 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8381 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8382 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8383 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8384 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8385 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8386
8387 #undef GEN_LOGICAL1
8388 #undef GEN_LOGICAL2
8389 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
8390 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8391 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
8392 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8393 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8394 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8395 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8396 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8397 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8398 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8399 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8400 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8401 #if defined(TARGET_PPC64)
8402 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8403 #endif
8404
8405 #if defined(TARGET_PPC64)
8406 #undef GEN_PPC64_R2
8407 #undef GEN_PPC64_R4
8408 #define GEN_PPC64_R2(name, opc1, opc2) \
8409 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8410 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8411 PPC_64B)
8412 #define GEN_PPC64_R4(name, opc1, opc2) \
8413 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8414 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8415 PPC_64B), \
8416 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8417 PPC_64B), \
8418 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8419 PPC_64B)
8420 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8421 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8422 GEN_PPC64_R4(rldic, 0x1E, 0x04),
8423 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8424 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8425 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8426 #endif
8427
8428 #undef GEN_LD
8429 #undef GEN_LDU
8430 #undef GEN_LDUX
8431 #undef GEN_LDX_E
8432 #undef GEN_LDS
8433 #define GEN_LD(name, ldop, opc, type) \
8434 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8435 #define GEN_LDU(name, ldop, opc, type) \
8436 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8437 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
8438 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8439 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
8440 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
8441 #define GEN_LDS(name, ldop, op, type) \
8442 GEN_LD(name, ldop, op | 0x20, type) \
8443 GEN_LDU(name, ldop, op | 0x21, type) \
8444 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8445 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8446
8447 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8448 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8449 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8450 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8451 #if defined(TARGET_PPC64)
8452 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8453 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8454 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B)
8455 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B)
8456 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
8457
8458 /* HV/P7 and later only */
8459 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
8460 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
8461 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
8462 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
8463 #endif
8464 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8465 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8466
8467 /* External PID based load */
8468 #undef GEN_LDEPX
8469 #define GEN_LDEPX(name, ldop, opc2, opc3) \
8470 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \
8471 0x00000001, PPC_NONE, PPC2_BOOKE206),
8472
8473 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
8474 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
8475 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
8476 #if defined(TARGET_PPC64)
8477 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00)
8478 #endif
8479
8480 #undef GEN_ST
8481 #undef GEN_STU
8482 #undef GEN_STUX
8483 #undef GEN_STX_E
8484 #undef GEN_STS
8485 #define GEN_ST(name, stop, opc, type) \
8486 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8487 #define GEN_STU(name, stop, opc, type) \
8488 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8489 #define GEN_STUX(name, stop, opc2, opc3, type) \
8490 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8491 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
8492 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
8493 #define GEN_STS(name, stop, op, type) \
8494 GEN_ST(name, stop, op | 0x20, type) \
8495 GEN_STU(name, stop, op | 0x21, type) \
8496 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8497 GEN_STX(name, stop, 0x17, op | 0x00, type)
8498
8499 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8500 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8501 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8502 #if defined(TARGET_PPC64)
8503 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B)
8504 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B)
8505 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
8506 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
8507 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
8508 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
8509 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
8510 #endif
8511 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8512 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8513
8514 #undef GEN_STEPX
8515 #define GEN_STEPX(name, ldop, opc2, opc3) \
8516 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \
8517 0x00000001, PPC_NONE, PPC2_BOOKE206),
8518
8519 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
8520 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
8521 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
8522 #if defined(TARGET_PPC64)
8523 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04)
8524 #endif
8525
8526 #undef GEN_CRLOGIC
8527 #define GEN_CRLOGIC(name, tcg_op, opc) \
8528 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8529 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8530 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8531 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8532 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8533 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8534 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8535 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8536 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8537
8538 #undef GEN_MAC_HANDLER
8539 #define GEN_MAC_HANDLER(name, opc2, opc3) \
8540 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8541 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8542 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8543 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8544 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8545 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8546 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8547 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8548 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8549 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8550 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8551 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8552 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8553 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8554 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8555 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8556 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8557 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8558 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8559 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8560 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8561 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8562 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8563 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8564 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8565 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8566 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8567 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8568 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8569 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8570 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8571 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8572 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8573 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8574 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8575 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8576 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8577 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8578 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8579 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8580 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8581 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8582 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8583
8584 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
8585 PPC_NONE, PPC2_TM),
8586 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \
8587 PPC_NONE, PPC2_TM),
8588 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
8589 PPC_NONE, PPC2_TM),
8590 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
8591 PPC_NONE, PPC2_TM),
8592 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
8593 PPC_NONE, PPC2_TM),
8594 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
8595 PPC_NONE, PPC2_TM),
8596 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
8597 PPC_NONE, PPC2_TM),
8598 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
8599 PPC_NONE, PPC2_TM),
8600 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
8601 PPC_NONE, PPC2_TM),
8602 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
8603 PPC_NONE, PPC2_TM),
8604 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
8605 PPC_NONE, PPC2_TM),
8606
8607 #include "translate/fp-ops.c.inc"
8608
8609 #include "translate/vmx-ops.c.inc"
8610
8611 #include "translate/vsx-ops.c.inc"
8612
8613 #include "translate/dfp-ops.c.inc"
8614
8615 #include "translate/spe-ops.c.inc"
8616 };
8617
8618 /*****************************************************************************/
8619 /* Opcode types */
8620 enum {
8621 PPC_DIRECT = 0, /* Opcode routine */
8622 PPC_INDIRECT = 1, /* Indirect opcode table */
8623 };
8624
8625 #define PPC_OPCODE_MASK 0x3
8626
8627 static inline int is_indirect_opcode(void *handler)
8628 {
8629 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
8630 }
8631
8632 static inline opc_handler_t **ind_table(void *handler)
8633 {
8634 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
8635 }
8636
8637 /* Instruction table creation */
8638 /* Opcodes tables creation */
8639 static void fill_new_table(opc_handler_t **table, int len)
8640 {
8641 int i;
8642
8643 for (i = 0; i < len; i++) {
8644 table[i] = &invalid_handler;
8645 }
8646 }
8647
8648 static int create_new_table(opc_handler_t **table, unsigned char idx)
8649 {
8650 opc_handler_t **tmp;
8651
8652 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
8653 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
8654 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
8655
8656 return 0;
8657 }
8658
8659 static int insert_in_table(opc_handler_t **table, unsigned char idx,
8660 opc_handler_t *handler)
8661 {
8662 if (table[idx] != &invalid_handler) {
8663 return -1;
8664 }
8665 table[idx] = handler;
8666
8667 return 0;
8668 }
8669
8670 static int register_direct_insn(opc_handler_t **ppc_opcodes,
8671 unsigned char idx, opc_handler_t *handler)
8672 {
8673 if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
8674 printf("*** ERROR: opcode %02x already assigned in main "
8675 "opcode table\n", idx);
8676 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
8677 printf(" Registered handler '%s' - new handler '%s'\n",
8678 ppc_opcodes[idx]->oname, handler->oname);
8679 #endif
8680 return -1;
8681 }
8682
8683 return 0;
8684 }
8685
8686 static int register_ind_in_table(opc_handler_t **table,
8687 unsigned char idx1, unsigned char idx2,
8688 opc_handler_t *handler)
8689 {
8690 if (table[idx1] == &invalid_handler) {
8691 if (create_new_table(table, idx1) < 0) {
8692 printf("*** ERROR: unable to create indirect table "
8693 "idx=%02x\n", idx1);
8694 return -1;
8695 }
8696 } else {
8697 if (!is_indirect_opcode(table[idx1])) {
8698 printf("*** ERROR: idx %02x already assigned to a direct "
8699 "opcode\n", idx1);
8700 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
8701 printf(" Registered handler '%s' - new handler '%s'\n",
8702 ind_table(table[idx1])[idx2]->oname, handler->oname);
8703 #endif
8704 return -1;
8705 }
8706 }
8707 if (handler != NULL &&
8708 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
8709 printf("*** ERROR: opcode %02x already assigned in "
8710 "opcode table %02x\n", idx2, idx1);
8711 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
8712 printf(" Registered handler '%s' - new handler '%s'\n",
8713 ind_table(table[idx1])[idx2]->oname, handler->oname);
8714 #endif
8715 return -1;
8716 }
8717
8718 return 0;
8719 }
8720
8721 static int register_ind_insn(opc_handler_t **ppc_opcodes,
8722 unsigned char idx1, unsigned char idx2,
8723 opc_handler_t *handler)
8724 {
8725 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
8726 }
8727
8728 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
8729 unsigned char idx1, unsigned char idx2,
8730 unsigned char idx3, opc_handler_t *handler)
8731 {
8732 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
8733 printf("*** ERROR: unable to join indirect table idx "
8734 "[%02x-%02x]\n", idx1, idx2);
8735 return -1;
8736 }
8737 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
8738 handler) < 0) {
8739 printf("*** ERROR: unable to insert opcode "
8740 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
8741 return -1;
8742 }
8743
8744 return 0;
8745 }
8746
8747 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
8748 unsigned char idx1, unsigned char idx2,
8749 unsigned char idx3, unsigned char idx4,
8750 opc_handler_t *handler)
8751 {
8752 opc_handler_t **table;
8753
8754 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
8755 printf("*** ERROR: unable to join indirect table idx "
8756 "[%02x-%02x]\n", idx1, idx2);
8757 return -1;
8758 }
8759 table = ind_table(ppc_opcodes[idx1]);
8760 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
8761 printf("*** ERROR: unable to join 2nd-level indirect table idx "
8762 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
8763 return -1;
8764 }
8765 table = ind_table(table[idx2]);
8766 if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
8767 printf("*** ERROR: unable to insert opcode "
8768 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
8769 return -1;
8770 }
8771 return 0;
8772 }
8773 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
8774 {
8775 if (insn->opc2 != 0xFF) {
8776 if (insn->opc3 != 0xFF) {
8777 if (insn->opc4 != 0xFF) {
8778 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
8779 insn->opc3, insn->opc4,
8780 &insn->handler) < 0) {
8781 return -1;
8782 }
8783 } else {
8784 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
8785 insn->opc3, &insn->handler) < 0) {
8786 return -1;
8787 }
8788 }
8789 } else {
8790 if (register_ind_insn(ppc_opcodes, insn->opc1,
8791 insn->opc2, &insn->handler) < 0) {
8792 return -1;
8793 }
8794 }
8795 } else {
8796 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
8797 return -1;
8798 }
8799 }
8800
8801 return 0;
8802 }
8803
8804 static int test_opcode_table(opc_handler_t **table, int len)
8805 {
8806 int i, count, tmp;
8807
8808 for (i = 0, count = 0; i < len; i++) {
8809 /* Consistency fixup */
8810 if (table[i] == NULL) {
8811 table[i] = &invalid_handler;
8812 }
8813 if (table[i] != &invalid_handler) {
8814 if (is_indirect_opcode(table[i])) {
8815 tmp = test_opcode_table(ind_table(table[i]),
8816 PPC_CPU_INDIRECT_OPCODES_LEN);
8817 if (tmp == 0) {
8818 free(table[i]);
8819 table[i] = &invalid_handler;
8820 } else {
8821 count++;
8822 }
8823 } else {
8824 count++;
8825 }
8826 }
8827 }
8828
8829 return count;
8830 }
8831
8832 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
8833 {
8834 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
8835 printf("*** WARNING: no opcode defined !\n");
8836 }
8837 }
8838
8839 /*****************************************************************************/
8840 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
8841 {
8842 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
8843 opcode_t *opc;
8844
8845 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
8846 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
8847 if (((opc->handler.type & pcc->insns_flags) != 0) ||
8848 ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
8849 if (register_insn(cpu->opcodes, opc) < 0) {
8850 error_setg(errp, "ERROR initializing PowerPC instruction "
8851 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
8852 opc->opc3);
8853 return;
8854 }
8855 }
8856 }
8857 fix_opcode_tables(cpu->opcodes);
8858 fflush(stdout);
8859 fflush(stderr);
8860 }
8861
8862 void destroy_ppc_opcodes(PowerPCCPU *cpu)
8863 {
8864 opc_handler_t **table, **table_2;
8865 int i, j, k;
8866
8867 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
8868 if (cpu->opcodes[i] == &invalid_handler) {
8869 continue;
8870 }
8871 if (is_indirect_opcode(cpu->opcodes[i])) {
8872 table = ind_table(cpu->opcodes[i]);
8873 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
8874 if (table[j] == &invalid_handler) {
8875 continue;
8876 }
8877 if (is_indirect_opcode(table[j])) {
8878 table_2 = ind_table(table[j]);
8879 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
8880 if (table_2[k] != &invalid_handler &&
8881 is_indirect_opcode(table_2[k])) {
8882 g_free((opc_handler_t *)((uintptr_t)table_2[k] &
8883 ~PPC_INDIRECT));
8884 }
8885 }
8886 g_free((opc_handler_t *)((uintptr_t)table[j] &
8887 ~PPC_INDIRECT));
8888 }
8889 }
8890 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
8891 ~PPC_INDIRECT));
8892 }
8893 }
8894 }
8895
8896 #if defined(PPC_DUMP_CPU)
8897 static void dump_ppc_insns(CPUPPCState *env)
8898 {
8899 opc_handler_t **table, *handler;
8900 const char *p, *q;
8901 uint8_t opc1, opc2, opc3, opc4;
8902
8903 printf("Instructions set:\n");
8904 /* opc1 is 6 bits long */
8905 for (opc1 = 0x00; opc1 < PPC_CPU_OPCODES_LEN; opc1++) {
8906 table = env->opcodes;
8907 handler = table[opc1];
8908 if (is_indirect_opcode(handler)) {
8909 /* opc2 is 5 bits long */
8910 for (opc2 = 0; opc2 < PPC_CPU_INDIRECT_OPCODES_LEN; opc2++) {
8911 table = env->opcodes;
8912 handler = env->opcodes[opc1];
8913 table = ind_table(handler);
8914 handler = table[opc2];
8915 if (is_indirect_opcode(handler)) {
8916 table = ind_table(handler);
8917 /* opc3 is 5 bits long */
8918 for (opc3 = 0; opc3 < PPC_CPU_INDIRECT_OPCODES_LEN;
8919 opc3++) {
8920 handler = table[opc3];
8921 if (is_indirect_opcode(handler)) {
8922 table = ind_table(handler);
8923 /* opc4 is 5 bits long */
8924 for (opc4 = 0; opc4 < PPC_CPU_INDIRECT_OPCODES_LEN;
8925 opc4++) {
8926 handler = table[opc4];
8927 if (handler->handler != &gen_invalid) {
8928 printf("INSN: %02x %02x %02x %02x -- "
8929 "(%02d %04d %02d) : %s\n",
8930 opc1, opc2, opc3, opc4,
8931 opc1, (opc3 << 5) | opc2, opc4,
8932 handler->oname);
8933 }
8934 }
8935 } else {
8936 if (handler->handler != &gen_invalid) {
8937 /* Special hack to properly dump SPE insns */
8938 p = strchr(handler->oname, '_');
8939 if (p == NULL) {
8940 printf("INSN: %02x %02x %02x (%02d %04d) : "
8941 "%s\n",
8942 opc1, opc2, opc3, opc1,
8943 (opc3 << 5) | opc2,
8944 handler->oname);
8945 } else {
8946 q = "speundef";
8947 if ((p - handler->oname) != strlen(q)
8948 || (memcmp(handler->oname, q, strlen(q))
8949 != 0)) {
8950 /* First instruction */
8951 printf("INSN: %02x %02x %02x"
8952 "(%02d %04d) : %.*s\n",
8953 opc1, opc2 << 1, opc3, opc1,
8954 (opc3 << 6) | (opc2 << 1),
8955 (int)(p - handler->oname),
8956 handler->oname);
8957 }
8958 if (strcmp(p + 1, q) != 0) {
8959 /* Second instruction */
8960 printf("INSN: %02x %02x %02x "
8961 "(%02d %04d) : %s\n", opc1,
8962 (opc2 << 1) | 1, opc3, opc1,
8963 (opc3 << 6) | (opc2 << 1) | 1,
8964 p + 1);
8965 }
8966 }
8967 }
8968 }
8969 }
8970 } else {
8971 if (handler->handler != &gen_invalid) {
8972 printf("INSN: %02x %02x -- (%02d %04d) : %s\n",
8973 opc1, opc2, opc1, opc2, handler->oname);
8974 }
8975 }
8976 }
8977 } else {
8978 if (handler->handler != &gen_invalid) {
8979 printf("INSN: %02x -- -- (%02d ----) : %s\n",
8980 opc1, opc1, handler->oname);
8981 }
8982 }
8983 }
8984 }
8985 #endif
8986 int ppc_fixup_cpu(PowerPCCPU *cpu)
8987 {
8988 CPUPPCState *env = &cpu->env;
8989
8990 /*
8991 * TCG doesn't (yet) emulate some groups of instructions that are
8992 * implemented on some otherwise supported CPUs (e.g. VSX and
8993 * decimal floating point instructions on POWER7). We remove
8994 * unsupported instruction groups from the cpu state's instruction
8995 * masks and hope the guest can cope. For at least the pseries
8996 * machine, the unavailability of these instructions can be
8997 * advertised to the guest via the device tree.
8998 */
8999 if ((env->insns_flags & ~PPC_TCG_INSNS)
9000 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
9001 warn_report("Disabling some instructions which are not "
9002 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
9003 env->insns_flags & ~PPC_TCG_INSNS,
9004 env->insns_flags2 & ~PPC_TCG_INSNS2);
9005 }
9006 env->insns_flags &= PPC_TCG_INSNS;
9007 env->insns_flags2 &= PPC_TCG_INSNS2;
9008 return 0;
9009 }
9010
9011
9012 void ppc_cpu_dump_statistics(CPUState *cs, int flags)
9013 {
9014 #if defined(DO_PPC_STATISTICS)
9015 PowerPCCPU *cpu = POWERPC_CPU(cs);
9016 opc_handler_t **t1, **t2, **t3, *handler;
9017 int op1, op2, op3;
9018
9019 t1 = cpu->env.opcodes;
9020 for (op1 = 0; op1 < 64; op1++) {
9021 handler = t1[op1];
9022 if (is_indirect_opcode(handler)) {
9023 t2 = ind_table(handler);
9024 for (op2 = 0; op2 < 32; op2++) {
9025 handler = t2[op2];
9026 if (is_indirect_opcode(handler)) {
9027 t3 = ind_table(handler);
9028 for (op3 = 0; op3 < 32; op3++) {
9029 handler = t3[op3];
9030 if (handler->count == 0) {
9031 continue;
9032 }
9033 qemu_printf("%02x %02x %02x (%02x %04d) %16s: "
9034 "%016" PRIx64 " %" PRId64 "\n",
9035 op1, op2, op3, op1, (op3 << 5) | op2,
9036 handler->oname,
9037 handler->count, handler->count);
9038 }
9039 } else {
9040 if (handler->count == 0) {
9041 continue;
9042 }
9043 qemu_printf("%02x %02x (%02x %04d) %16s: "
9044 "%016" PRIx64 " %" PRId64 "\n",
9045 op1, op2, op1, op2, handler->oname,
9046 handler->count, handler->count);
9047 }
9048 }
9049 } else {
9050 if (handler->count == 0) {
9051 continue;
9052 }
9053 qemu_printf("%02x (%02x ) %16s: %016" PRIx64
9054 " %" PRId64 "\n",
9055 op1, op1, handler->oname,
9056 handler->count, handler->count);
9057 }
9058 }
9059 #endif
9060 }
9061
9062 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
9063 {
9064 opc_handler_t **table, *handler;
9065 uint32_t inval;
9066
9067 ctx->opcode = insn;
9068
9069 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
9070 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
9071 ctx->le_mode ? "little" : "big");
9072
9073 table = cpu->opcodes;
9074 handler = table[opc1(insn)];
9075 if (is_indirect_opcode(handler)) {
9076 table = ind_table(handler);
9077 handler = table[opc2(insn)];
9078 if (is_indirect_opcode(handler)) {
9079 table = ind_table(handler);
9080 handler = table[opc3(insn)];
9081 if (is_indirect_opcode(handler)) {
9082 table = ind_table(handler);
9083 handler = table[opc4(insn)];
9084 }
9085 }
9086 }
9087
9088 /* Is opcode *REALLY* valid ? */
9089 if (unlikely(handler->handler == &gen_invalid)) {
9090 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
9091 "%02x - %02x - %02x - %02x (%08x) "
9092 TARGET_FMT_lx "\n",
9093 opc1(insn), opc2(insn), opc3(insn), opc4(insn),
9094 insn, ctx->cia);
9095 return false;
9096 }
9097
9098 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
9099 && Rc(insn))) {
9100 inval = handler->inval2;
9101 } else {
9102 inval = handler->inval1;
9103 }
9104
9105 if (unlikely((insn & inval) != 0)) {
9106 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
9107 "%02x - %02x - %02x - %02x (%08x) "
9108 TARGET_FMT_lx "\n", insn & inval,
9109 opc1(insn), opc2(insn), opc3(insn), opc4(insn),
9110 insn, ctx->cia);
9111 return false;
9112 }
9113
9114 handler->handler(ctx);
9115 return true;
9116 }
9117
9118 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
9119 {
9120 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9121 CPUPPCState *env = cs->env_ptr;
9122 uint32_t hflags = ctx->base.tb->flags;
9123 int bound;
9124
9125 ctx->exception = POWERPC_EXCP_NONE;
9126 ctx->spr_cb = env->spr_cb;
9127 ctx->pr = (hflags >> HFLAGS_PR) & 1;
9128 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
9129 ctx->dr = (hflags >> HFLAGS_DR) & 1;
9130 ctx->hv = (hflags >> HFLAGS_HV) & 1;
9131 ctx->insns_flags = env->insns_flags;
9132 ctx->insns_flags2 = env->insns_flags2;
9133 ctx->access_type = -1;
9134 ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
9135 ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
9136 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
9137 ctx->flags = env->flags;
9138 #if defined(TARGET_PPC64)
9139 ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
9140 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
9141 #endif
9142 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
9143 || env->mmu_model == POWERPC_MMU_601
9144 || env->mmu_model & POWERPC_MMU_64;
9145
9146 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
9147 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
9148 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
9149 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
9150 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
9151 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
9152
9153 ctx->singlestep_enabled = 0;
9154 if ((hflags >> HFLAGS_SE) & 1) {
9155 ctx->singlestep_enabled |= CPU_SINGLE_STEP;
9156 }
9157 if ((hflags >> HFLAGS_BE) & 1) {
9158 ctx->singlestep_enabled |= CPU_BRANCH_STEP;
9159 }
9160 if (unlikely(ctx->base.singlestep_enabled)) {
9161 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9162 }
9163
9164 bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4;
9165 ctx->base.max_insns = MIN(ctx->base.max_insns, bound);
9166 }
9167
9168 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
9169 {
9170 }
9171
9172 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
9173 {
9174 tcg_gen_insn_start(dcbase->pc_next);
9175 }
9176
9177 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs,
9178 const CPUBreakpoint *bp)
9179 {
9180 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9181
9182 gen_update_nip(ctx, ctx->base.pc_next);
9183 gen_debug_exception(ctx);
9184 /*
9185 * The address covered by the breakpoint must be included in
9186 * [tb->pc, tb->pc + tb->size) in order to for it to be properly
9187 * cleared -- thus we increment the PC here so that the logic
9188 * setting tb->size below does the right thing.
9189 */
9190 ctx->base.pc_next += 4;
9191 return true;
9192 }
9193
9194 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
9195 {
9196 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9197 PowerPCCPU *cpu = POWERPC_CPU(cs);
9198 CPUPPCState *env = cs->env_ptr;
9199 uint32_t insn;
9200 bool ok;
9201
9202 LOG_DISAS("----------------\n");
9203 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9204 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
9205
9206 ctx->cia = ctx->base.pc_next;
9207 insn = translator_ldl_swap(env, ctx->base.pc_next, need_byteswap(ctx));
9208 ctx->base.pc_next += 4;
9209
9210 ok = decode_legacy(cpu, ctx, insn);
9211 if (!ok) {
9212 gen_invalid(ctx);
9213 }
9214
9215 #if defined(DO_PPC_STATISTICS)
9216 handler->count++;
9217 #endif
9218
9219 /* Check trace mode exceptions */
9220 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP &&
9221 (ctx->base.pc_next <= 0x100 || ctx->base.pc_next > 0xF00) &&
9222 ctx->exception != POWERPC_EXCP_BRANCH &&
9223 ctx->base.is_jmp != DISAS_NORETURN)) {
9224 uint32_t excp = gen_prep_dbgex(ctx);
9225 gen_exception_nip(ctx, excp, ctx->base.pc_next);
9226 }
9227
9228 if (tcg_check_temp_count()) {
9229 qemu_log("Opcode %02x %02x %02x %02x (%08x) leaked "
9230 "temporaries\n", opc1(ctx->opcode), opc2(ctx->opcode),
9231 opc3(ctx->opcode), opc4(ctx->opcode), ctx->opcode);
9232 }
9233
9234 if (ctx->base.is_jmp == DISAS_NEXT) {
9235 switch (ctx->exception) {
9236 case POWERPC_EXCP_NONE:
9237 break;
9238 case POWERPC_EXCP_BRANCH:
9239 ctx->base.is_jmp = DISAS_NORETURN;
9240 break;
9241 case POWERPC_EXCP_SYNC:
9242 case POWERPC_EXCP_STOP:
9243 ctx->base.is_jmp = DISAS_EXIT;
9244 break;
9245 default:
9246 /* Every other ctx->exception should have set NORETURN. */
9247 g_assert_not_reached();
9248 }
9249 }
9250 }
9251
9252 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
9253 {
9254 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9255 DisasJumpType is_jmp = ctx->base.is_jmp;
9256 target_ulong nip = ctx->base.pc_next;
9257
9258 if (is_jmp == DISAS_NORETURN) {
9259 /* We have already exited the TB. */
9260 return;
9261 }
9262
9263 /* Honor single stepping. */
9264 if (unlikely(ctx->base.singlestep_enabled)) {
9265 switch (is_jmp) {
9266 case DISAS_TOO_MANY:
9267 case DISAS_EXIT_UPDATE:
9268 case DISAS_CHAIN_UPDATE:
9269 gen_update_nip(ctx, nip);
9270 break;
9271 case DISAS_EXIT:
9272 case DISAS_CHAIN:
9273 break;
9274 default:
9275 g_assert_not_reached();
9276 }
9277 gen_debug_exception(ctx);
9278 return;
9279 }
9280
9281 switch (is_jmp) {
9282 case DISAS_TOO_MANY:
9283 if (use_goto_tb(ctx, nip)) {
9284 tcg_gen_goto_tb(0);
9285 gen_update_nip(ctx, nip);
9286 tcg_gen_exit_tb(ctx->base.tb, 0);
9287 break;
9288 }
9289 /* fall through */
9290 case DISAS_CHAIN_UPDATE:
9291 gen_update_nip(ctx, nip);
9292 /* fall through */
9293 case DISAS_CHAIN:
9294 tcg_gen_lookup_and_goto_ptr();
9295 break;
9296
9297 case DISAS_EXIT_UPDATE:
9298 gen_update_nip(ctx, nip);
9299 /* fall through */
9300 case DISAS_EXIT:
9301 tcg_gen_exit_tb(NULL, 0);
9302 break;
9303
9304 default:
9305 g_assert_not_reached();
9306 }
9307 }
9308
9309 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs)
9310 {
9311 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
9312 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size);
9313 }
9314
9315 static const TranslatorOps ppc_tr_ops = {
9316 .init_disas_context = ppc_tr_init_disas_context,
9317 .tb_start = ppc_tr_tb_start,
9318 .insn_start = ppc_tr_insn_start,
9319 .breakpoint_check = ppc_tr_breakpoint_check,
9320 .translate_insn = ppc_tr_translate_insn,
9321 .tb_stop = ppc_tr_tb_stop,
9322 .disas_log = ppc_tr_disas_log,
9323 };
9324
9325 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns)
9326 {
9327 DisasContext ctx;
9328
9329 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns);
9330 }
9331
9332 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb,
9333 target_ulong *data)
9334 {
9335 env->nip = data[0];
9336 }