]> git.proxmox.com Git - mirror_qemu.git/blob - target/ppc/translate.c
Merge remote-tracking branch 'remotes/ericb/tags/pull-nbd-2021-05-11' into staging
[mirror_qemu.git] / target / ppc / translate.c
1 /*
2 * PowerPC emulation for qemu: main translation routines.
3 *
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34
35 #include "trace-tcg.h"
36 #include "exec/translator.h"
37 #include "exec/log.h"
38 #include "qemu/atomic128.h"
39 #include "spr_tcg.h"
40
41 #include "qemu/qemu-print.h"
42 #include "qapi/error.h"
43
44 #define CPU_SINGLE_STEP 0x1
45 #define CPU_BRANCH_STEP 0x2
46 #define GDBSTUB_SINGLE_STEP 0x4
47
48 /* Include definitions for instructions classes and implementations flags */
49 /* #define PPC_DEBUG_DISAS */
50 /* #define DO_PPC_STATISTICS */
51
52 #ifdef PPC_DEBUG_DISAS
53 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
54 #else
55 # define LOG_DISAS(...) do { } while (0)
56 #endif
57 /*****************************************************************************/
58 /* Code translation helpers */
59
60 /* global register indexes */
61 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */
62 + 10 * 4 + 22 * 5 /* SPE GPRh */
63 + 8 * 5 /* CRF */];
64 static TCGv cpu_gpr[32];
65 static TCGv cpu_gprh[32];
66 static TCGv_i32 cpu_crf[8];
67 static TCGv cpu_nip;
68 static TCGv cpu_msr;
69 static TCGv cpu_ctr;
70 static TCGv cpu_lr;
71 #if defined(TARGET_PPC64)
72 static TCGv cpu_cfar;
73 #endif
74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
75 static TCGv cpu_reserve;
76 static TCGv cpu_reserve_val;
77 static TCGv cpu_fpscr;
78 static TCGv_i32 cpu_access_type;
79
80 #include "exec/gen-icount.h"
81
82 void ppc_translate_init(void)
83 {
84 int i;
85 char *p;
86 size_t cpu_reg_names_size;
87
88 p = cpu_reg_names;
89 cpu_reg_names_size = sizeof(cpu_reg_names);
90
91 for (i = 0; i < 8; i++) {
92 snprintf(p, cpu_reg_names_size, "crf%d", i);
93 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
94 offsetof(CPUPPCState, crf[i]), p);
95 p += 5;
96 cpu_reg_names_size -= 5;
97 }
98
99 for (i = 0; i < 32; i++) {
100 snprintf(p, cpu_reg_names_size, "r%d", i);
101 cpu_gpr[i] = tcg_global_mem_new(cpu_env,
102 offsetof(CPUPPCState, gpr[i]), p);
103 p += (i < 10) ? 3 : 4;
104 cpu_reg_names_size -= (i < 10) ? 3 : 4;
105 snprintf(p, cpu_reg_names_size, "r%dH", i);
106 cpu_gprh[i] = tcg_global_mem_new(cpu_env,
107 offsetof(CPUPPCState, gprh[i]), p);
108 p += (i < 10) ? 4 : 5;
109 cpu_reg_names_size -= (i < 10) ? 4 : 5;
110 }
111
112 cpu_nip = tcg_global_mem_new(cpu_env,
113 offsetof(CPUPPCState, nip), "nip");
114
115 cpu_msr = tcg_global_mem_new(cpu_env,
116 offsetof(CPUPPCState, msr), "msr");
117
118 cpu_ctr = tcg_global_mem_new(cpu_env,
119 offsetof(CPUPPCState, ctr), "ctr");
120
121 cpu_lr = tcg_global_mem_new(cpu_env,
122 offsetof(CPUPPCState, lr), "lr");
123
124 #if defined(TARGET_PPC64)
125 cpu_cfar = tcg_global_mem_new(cpu_env,
126 offsetof(CPUPPCState, cfar), "cfar");
127 #endif
128
129 cpu_xer = tcg_global_mem_new(cpu_env,
130 offsetof(CPUPPCState, xer), "xer");
131 cpu_so = tcg_global_mem_new(cpu_env,
132 offsetof(CPUPPCState, so), "SO");
133 cpu_ov = tcg_global_mem_new(cpu_env,
134 offsetof(CPUPPCState, ov), "OV");
135 cpu_ca = tcg_global_mem_new(cpu_env,
136 offsetof(CPUPPCState, ca), "CA");
137 cpu_ov32 = tcg_global_mem_new(cpu_env,
138 offsetof(CPUPPCState, ov32), "OV32");
139 cpu_ca32 = tcg_global_mem_new(cpu_env,
140 offsetof(CPUPPCState, ca32), "CA32");
141
142 cpu_reserve = tcg_global_mem_new(cpu_env,
143 offsetof(CPUPPCState, reserve_addr),
144 "reserve_addr");
145 cpu_reserve_val = tcg_global_mem_new(cpu_env,
146 offsetof(CPUPPCState, reserve_val),
147 "reserve_val");
148
149 cpu_fpscr = tcg_global_mem_new(cpu_env,
150 offsetof(CPUPPCState, fpscr), "fpscr");
151
152 cpu_access_type = tcg_global_mem_new_i32(cpu_env,
153 offsetof(CPUPPCState, access_type),
154 "access_type");
155 }
156
157 /* internal defines */
158 struct DisasContext {
159 DisasContextBase base;
160 target_ulong cia; /* current instruction address */
161 uint32_t opcode;
162 /* Routine used to access memory */
163 bool pr, hv, dr, le_mode;
164 bool lazy_tlb_flush;
165 bool need_access_type;
166 int mem_idx;
167 int access_type;
168 /* Translation flags */
169 MemOp default_tcg_memop_mask;
170 #if defined(TARGET_PPC64)
171 bool sf_mode;
172 bool has_cfar;
173 #endif
174 bool fpu_enabled;
175 bool altivec_enabled;
176 bool vsx_enabled;
177 bool spe_enabled;
178 bool tm_enabled;
179 bool gtse;
180 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
181 int singlestep_enabled;
182 uint32_t flags;
183 uint64_t insns_flags;
184 uint64_t insns_flags2;
185 };
186
187 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */
188 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */
189 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */
190 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */
191
192 /* Return true iff byteswap is needed in a scalar memop */
193 static inline bool need_byteswap(const DisasContext *ctx)
194 {
195 #if defined(TARGET_WORDS_BIGENDIAN)
196 return ctx->le_mode;
197 #else
198 return !ctx->le_mode;
199 #endif
200 }
201
202 /* True when active word size < size of target_long. */
203 #ifdef TARGET_PPC64
204 # define NARROW_MODE(C) (!(C)->sf_mode)
205 #else
206 # define NARROW_MODE(C) 0
207 #endif
208
209 struct opc_handler_t {
210 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
211 uint32_t inval1;
212 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
213 uint32_t inval2;
214 /* instruction type */
215 uint64_t type;
216 /* extended instruction type */
217 uint64_t type2;
218 /* handler */
219 void (*handler)(DisasContext *ctx);
220 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
221 const char *oname;
222 #endif
223 #if defined(DO_PPC_STATISTICS)
224 uint64_t count;
225 #endif
226 };
227
228 /* SPR load/store helpers */
229 static inline void gen_load_spr(TCGv t, int reg)
230 {
231 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
232 }
233
234 static inline void gen_store_spr(int reg, TCGv t)
235 {
236 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
237 }
238
239 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
240 {
241 if (ctx->need_access_type && ctx->access_type != access_type) {
242 tcg_gen_movi_i32(cpu_access_type, access_type);
243 ctx->access_type = access_type;
244 }
245 }
246
247 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
248 {
249 if (NARROW_MODE(ctx)) {
250 nip = (uint32_t)nip;
251 }
252 tcg_gen_movi_tl(cpu_nip, nip);
253 }
254
255 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
256 {
257 TCGv_i32 t0, t1;
258
259 /*
260 * These are all synchronous exceptions, we set the PC back to the
261 * faulting instruction
262 */
263 gen_update_nip(ctx, ctx->cia);
264 t0 = tcg_const_i32(excp);
265 t1 = tcg_const_i32(error);
266 gen_helper_raise_exception_err(cpu_env, t0, t1);
267 tcg_temp_free_i32(t0);
268 tcg_temp_free_i32(t1);
269 ctx->base.is_jmp = DISAS_NORETURN;
270 }
271
272 static void gen_exception(DisasContext *ctx, uint32_t excp)
273 {
274 TCGv_i32 t0;
275
276 /*
277 * These are all synchronous exceptions, we set the PC back to the
278 * faulting instruction
279 */
280 gen_update_nip(ctx, ctx->cia);
281 t0 = tcg_const_i32(excp);
282 gen_helper_raise_exception(cpu_env, t0);
283 tcg_temp_free_i32(t0);
284 ctx->base.is_jmp = DISAS_NORETURN;
285 }
286
287 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
288 target_ulong nip)
289 {
290 TCGv_i32 t0;
291
292 gen_update_nip(ctx, nip);
293 t0 = tcg_const_i32(excp);
294 gen_helper_raise_exception(cpu_env, t0);
295 tcg_temp_free_i32(t0);
296 ctx->base.is_jmp = DISAS_NORETURN;
297 }
298
299 static void gen_icount_io_start(DisasContext *ctx)
300 {
301 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
302 gen_io_start();
303 /*
304 * An I/O instruction must be last in the TB.
305 * Chain to the next TB, and let the code from gen_tb_start
306 * decide if we need to return to the main loop.
307 * Doing this first also allows this value to be overridden.
308 */
309 ctx->base.is_jmp = DISAS_TOO_MANY;
310 }
311 }
312
313 /*
314 * Tells the caller what is the appropriate exception to generate and prepares
315 * SPR registers for this exception.
316 *
317 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
318 * POWERPC_EXCP_DEBUG (on BookE).
319 */
320 static uint32_t gen_prep_dbgex(DisasContext *ctx)
321 {
322 if (ctx->flags & POWERPC_FLAG_DE) {
323 target_ulong dbsr = 0;
324 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
325 dbsr = DBCR0_ICMP;
326 } else {
327 /* Must have been branch */
328 dbsr = DBCR0_BRT;
329 }
330 TCGv t0 = tcg_temp_new();
331 gen_load_spr(t0, SPR_BOOKE_DBSR);
332 tcg_gen_ori_tl(t0, t0, dbsr);
333 gen_store_spr(SPR_BOOKE_DBSR, t0);
334 tcg_temp_free(t0);
335 return POWERPC_EXCP_DEBUG;
336 } else {
337 return POWERPC_EXCP_TRACE;
338 }
339 }
340
341 static void gen_debug_exception(DisasContext *ctx)
342 {
343 gen_helper_raise_exception(cpu_env, tcg_constant_i32(EXCP_DEBUG));
344 ctx->base.is_jmp = DISAS_NORETURN;
345 }
346
347 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
348 {
349 /* Will be converted to program check if needed */
350 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
351 }
352
353 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
354 {
355 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
356 }
357
358 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
359 {
360 /* Will be converted to program check if needed */
361 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
362 }
363
364 /*****************************************************************************/
365 /* SPR READ/WRITE CALLBACKS */
366
367 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
368 {
369 #if 0
370 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
371 printf("ERROR: try to access SPR %d !\n", sprn);
372 #endif
373 }
374
375 /* #define PPC_DUMP_SPR_ACCESSES */
376
377 /*
378 * Generic callbacks:
379 * do nothing but store/retrieve spr value
380 */
381 static void spr_load_dump_spr(int sprn)
382 {
383 #ifdef PPC_DUMP_SPR_ACCESSES
384 TCGv_i32 t0 = tcg_const_i32(sprn);
385 gen_helper_load_dump_spr(cpu_env, t0);
386 tcg_temp_free_i32(t0);
387 #endif
388 }
389
390 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
391 {
392 gen_load_spr(cpu_gpr[gprn], sprn);
393 spr_load_dump_spr(sprn);
394 }
395
396 static void spr_store_dump_spr(int sprn)
397 {
398 #ifdef PPC_DUMP_SPR_ACCESSES
399 TCGv_i32 t0 = tcg_const_i32(sprn);
400 gen_helper_store_dump_spr(cpu_env, t0);
401 tcg_temp_free_i32(t0);
402 #endif
403 }
404
405 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
406 {
407 gen_store_spr(sprn, cpu_gpr[gprn]);
408 spr_store_dump_spr(sprn);
409 }
410
411 #if !defined(CONFIG_USER_ONLY)
412 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
413 {
414 #ifdef TARGET_PPC64
415 TCGv t0 = tcg_temp_new();
416 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
417 gen_store_spr(sprn, t0);
418 tcg_temp_free(t0);
419 spr_store_dump_spr(sprn);
420 #else
421 spr_write_generic(ctx, sprn, gprn);
422 #endif
423 }
424
425 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
426 {
427 TCGv t0 = tcg_temp_new();
428 TCGv t1 = tcg_temp_new();
429 gen_load_spr(t0, sprn);
430 tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
431 tcg_gen_and_tl(t0, t0, t1);
432 gen_store_spr(sprn, t0);
433 tcg_temp_free(t0);
434 tcg_temp_free(t1);
435 }
436
437 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
438 {
439 }
440
441 #endif
442
443 /* SPR common to all PowerPC */
444 /* XER */
445 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
446 {
447 TCGv dst = cpu_gpr[gprn];
448 TCGv t0 = tcg_temp_new();
449 TCGv t1 = tcg_temp_new();
450 TCGv t2 = tcg_temp_new();
451 tcg_gen_mov_tl(dst, cpu_xer);
452 tcg_gen_shli_tl(t0, cpu_so, XER_SO);
453 tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
454 tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
455 tcg_gen_or_tl(t0, t0, t1);
456 tcg_gen_or_tl(dst, dst, t2);
457 tcg_gen_or_tl(dst, dst, t0);
458 if (is_isa300(ctx)) {
459 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
460 tcg_gen_or_tl(dst, dst, t0);
461 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
462 tcg_gen_or_tl(dst, dst, t0);
463 }
464 tcg_temp_free(t0);
465 tcg_temp_free(t1);
466 tcg_temp_free(t2);
467 }
468
469 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
470 {
471 TCGv src = cpu_gpr[gprn];
472 /* Write all flags, while reading back check for isa300 */
473 tcg_gen_andi_tl(cpu_xer, src,
474 ~((1u << XER_SO) |
475 (1u << XER_OV) | (1u << XER_OV32) |
476 (1u << XER_CA) | (1u << XER_CA32)));
477 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
478 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
479 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
480 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
481 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
482 }
483
484 /* LR */
485 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
486 {
487 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
488 }
489
490 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
491 {
492 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
493 }
494
495 /* CFAR */
496 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
497 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
498 {
499 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
500 }
501
502 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
503 {
504 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
505 }
506 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
507
508 /* CTR */
509 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
510 {
511 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
512 }
513
514 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
515 {
516 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
517 }
518
519 /* User read access to SPR */
520 /* USPRx */
521 /* UMMCRx */
522 /* UPMCx */
523 /* USIA */
524 /* UDECR */
525 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
526 {
527 gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
528 }
529
530 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
531 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
532 {
533 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
534 }
535 #endif
536
537 /* SPR common to all non-embedded PowerPC */
538 /* DECR */
539 #if !defined(CONFIG_USER_ONLY)
540 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
541 {
542 gen_icount_io_start(ctx);
543 gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
544 }
545
546 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
547 {
548 gen_icount_io_start(ctx);
549 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
550 }
551 #endif
552
553 /* SPR common to all non-embedded PowerPC, except 601 */
554 /* Time base */
555 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
556 {
557 gen_icount_io_start(ctx);
558 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
559 }
560
561 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
562 {
563 gen_icount_io_start(ctx);
564 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
565 }
566
567 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
568 {
569 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
570 }
571
572 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
573 {
574 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
575 }
576
577 #if !defined(CONFIG_USER_ONLY)
578 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
579 {
580 gen_icount_io_start(ctx);
581 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
582 }
583
584 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
585 {
586 gen_icount_io_start(ctx);
587 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
588 }
589
590 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
591 {
592 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
593 }
594
595 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
596 {
597 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
598 }
599
600 #if defined(TARGET_PPC64)
601 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
602 {
603 gen_icount_io_start(ctx);
604 gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
605 }
606
607 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
608 {
609 gen_icount_io_start(ctx);
610 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
611 }
612
613 /* HDECR */
614 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
615 {
616 gen_icount_io_start(ctx);
617 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
618 }
619
620 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
621 {
622 gen_icount_io_start(ctx);
623 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
624 }
625
626 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
627 {
628 gen_icount_io_start(ctx);
629 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
630 }
631
632 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
633 {
634 gen_icount_io_start(ctx);
635 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
636 }
637
638 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
639 {
640 gen_icount_io_start(ctx);
641 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
642 }
643
644 #endif
645 #endif
646
647 #if !defined(CONFIG_USER_ONLY)
648 /* IBAT0U...IBAT0U */
649 /* IBAT0L...IBAT7L */
650 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
651 {
652 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
653 offsetof(CPUPPCState,
654 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
655 }
656
657 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
658 {
659 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
660 offsetof(CPUPPCState,
661 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
662 }
663
664 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
665 {
666 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
667 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
668 tcg_temp_free_i32(t0);
669 }
670
671 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
672 {
673 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
674 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
675 tcg_temp_free_i32(t0);
676 }
677
678 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
679 {
680 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
681 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
682 tcg_temp_free_i32(t0);
683 }
684
685 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
686 {
687 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
688 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
689 tcg_temp_free_i32(t0);
690 }
691
692 /* DBAT0U...DBAT7U */
693 /* DBAT0L...DBAT7L */
694 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
695 {
696 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
697 offsetof(CPUPPCState,
698 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
699 }
700
701 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
702 {
703 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
704 offsetof(CPUPPCState,
705 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
706 }
707
708 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
709 {
710 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
711 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
712 tcg_temp_free_i32(t0);
713 }
714
715 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
716 {
717 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
718 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
719 tcg_temp_free_i32(t0);
720 }
721
722 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
723 {
724 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
725 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
726 tcg_temp_free_i32(t0);
727 }
728
729 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
730 {
731 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
732 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
733 tcg_temp_free_i32(t0);
734 }
735
736 /* SDR1 */
737 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
738 {
739 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
740 }
741
742 #if defined(TARGET_PPC64)
743 /* 64 bits PowerPC specific SPRs */
744 /* PIDR */
745 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
746 {
747 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
748 }
749
750 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
751 {
752 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
753 }
754
755 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
756 {
757 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
758 }
759
760 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
761 {
762 TCGv t0 = tcg_temp_new();
763 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
764 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
765 tcg_temp_free(t0);
766 }
767 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
768 {
769 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
770 }
771
772 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
773 {
774 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
775 }
776
777 /* DPDES */
778 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
779 {
780 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
781 }
782
783 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
784 {
785 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
786 }
787 #endif
788 #endif
789
790 /* PowerPC 601 specific registers */
791 /* RTC */
792 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn)
793 {
794 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env);
795 }
796
797 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn)
798 {
799 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env);
800 }
801
802 #if !defined(CONFIG_USER_ONLY)
803 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn)
804 {
805 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]);
806 }
807
808 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn)
809 {
810 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]);
811 }
812
813 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn)
814 {
815 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]);
816 /* Must stop the translation as endianness may have changed */
817 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
818 }
819 #endif
820
821 /* Unified bats */
822 #if !defined(CONFIG_USER_ONLY)
823 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn)
824 {
825 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
826 offsetof(CPUPPCState,
827 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
828 }
829
830 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn)
831 {
832 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
833 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]);
834 tcg_temp_free_i32(t0);
835 }
836
837 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn)
838 {
839 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
840 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]);
841 tcg_temp_free_i32(t0);
842 }
843 #endif
844
845 /* PowerPC 40x specific registers */
846 #if !defined(CONFIG_USER_ONLY)
847 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
848 {
849 gen_icount_io_start(ctx);
850 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
851 }
852
853 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
854 {
855 gen_icount_io_start(ctx);
856 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
857 }
858
859 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
860 {
861 gen_icount_io_start(ctx);
862 gen_store_spr(sprn, cpu_gpr[gprn]);
863 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
864 /* We must stop translation as we may have rebooted */
865 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
866 }
867
868 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
869 {
870 gen_icount_io_start(ctx);
871 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
872 }
873
874 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
875 {
876 gen_icount_io_start(ctx);
877 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
878 }
879
880 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
881 {
882 gen_icount_io_start(ctx);
883 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
884 }
885 #endif
886
887 /* PowerPC 403 specific registers */
888 /* PBL1 / PBU1 / PBL2 / PBU2 */
889 #if !defined(CONFIG_USER_ONLY)
890 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn)
891 {
892 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
893 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1]));
894 }
895
896 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn)
897 {
898 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1);
899 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]);
900 tcg_temp_free_i32(t0);
901 }
902
903 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
904 {
905 TCGv t0 = tcg_temp_new();
906 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
907 gen_store_spr(SPR_PIR, t0);
908 tcg_temp_free(t0);
909 }
910 #endif
911
912 /* SPE specific registers */
913 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
914 {
915 TCGv_i32 t0 = tcg_temp_new_i32();
916 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
917 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
918 tcg_temp_free_i32(t0);
919 }
920
921 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
922 {
923 TCGv_i32 t0 = tcg_temp_new_i32();
924 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
925 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
926 tcg_temp_free_i32(t0);
927 }
928
929 #if !defined(CONFIG_USER_ONLY)
930 /* Callback used to write the exception vector base */
931 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
932 {
933 TCGv t0 = tcg_temp_new();
934 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
935 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
936 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
937 gen_store_spr(sprn, t0);
938 tcg_temp_free(t0);
939 }
940
941 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
942 {
943 int sprn_offs;
944
945 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
946 sprn_offs = sprn - SPR_BOOKE_IVOR0;
947 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
948 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
949 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
950 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
951 } else {
952 printf("Trying to write an unknown exception vector %d %03x\n",
953 sprn, sprn);
954 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
955 return;
956 }
957
958 TCGv t0 = tcg_temp_new();
959 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
960 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
961 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
962 gen_store_spr(sprn, t0);
963 tcg_temp_free(t0);
964 }
965 #endif
966
967 #ifdef TARGET_PPC64
968 #ifndef CONFIG_USER_ONLY
969 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
970 {
971 TCGv t0 = tcg_temp_new();
972 TCGv t1 = tcg_temp_new();
973 TCGv t2 = tcg_temp_new();
974
975 /*
976 * Note, the HV=1 PR=0 case is handled earlier by simply using
977 * spr_write_generic for HV mode in the SPR table
978 */
979
980 /* Build insertion mask into t1 based on context */
981 if (ctx->pr) {
982 gen_load_spr(t1, SPR_UAMOR);
983 } else {
984 gen_load_spr(t1, SPR_AMOR);
985 }
986
987 /* Mask new bits into t2 */
988 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
989
990 /* Load AMR and clear new bits in t0 */
991 gen_load_spr(t0, SPR_AMR);
992 tcg_gen_andc_tl(t0, t0, t1);
993
994 /* Or'in new bits and write it out */
995 tcg_gen_or_tl(t0, t0, t2);
996 gen_store_spr(SPR_AMR, t0);
997 spr_store_dump_spr(SPR_AMR);
998
999 tcg_temp_free(t0);
1000 tcg_temp_free(t1);
1001 tcg_temp_free(t2);
1002 }
1003
1004 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
1005 {
1006 TCGv t0 = tcg_temp_new();
1007 TCGv t1 = tcg_temp_new();
1008 TCGv t2 = tcg_temp_new();
1009
1010 /*
1011 * Note, the HV=1 case is handled earlier by simply using
1012 * spr_write_generic for HV mode in the SPR table
1013 */
1014
1015 /* Build insertion mask into t1 based on context */
1016 gen_load_spr(t1, SPR_AMOR);
1017
1018 /* Mask new bits into t2 */
1019 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1020
1021 /* Load AMR and clear new bits in t0 */
1022 gen_load_spr(t0, SPR_UAMOR);
1023 tcg_gen_andc_tl(t0, t0, t1);
1024
1025 /* Or'in new bits and write it out */
1026 tcg_gen_or_tl(t0, t0, t2);
1027 gen_store_spr(SPR_UAMOR, t0);
1028 spr_store_dump_spr(SPR_UAMOR);
1029
1030 tcg_temp_free(t0);
1031 tcg_temp_free(t1);
1032 tcg_temp_free(t2);
1033 }
1034
1035 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1036 {
1037 TCGv t0 = tcg_temp_new();
1038 TCGv t1 = tcg_temp_new();
1039 TCGv t2 = tcg_temp_new();
1040
1041 /*
1042 * Note, the HV=1 case is handled earlier by simply using
1043 * spr_write_generic for HV mode in the SPR table
1044 */
1045
1046 /* Build insertion mask into t1 based on context */
1047 gen_load_spr(t1, SPR_AMOR);
1048
1049 /* Mask new bits into t2 */
1050 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1051
1052 /* Load AMR and clear new bits in t0 */
1053 gen_load_spr(t0, SPR_IAMR);
1054 tcg_gen_andc_tl(t0, t0, t1);
1055
1056 /* Or'in new bits and write it out */
1057 tcg_gen_or_tl(t0, t0, t2);
1058 gen_store_spr(SPR_IAMR, t0);
1059 spr_store_dump_spr(SPR_IAMR);
1060
1061 tcg_temp_free(t0);
1062 tcg_temp_free(t1);
1063 tcg_temp_free(t2);
1064 }
1065 #endif
1066 #endif
1067
1068 #ifndef CONFIG_USER_ONLY
1069 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1070 {
1071 gen_helper_fixup_thrm(cpu_env);
1072 gen_load_spr(cpu_gpr[gprn], sprn);
1073 spr_load_dump_spr(sprn);
1074 }
1075 #endif /* !CONFIG_USER_ONLY */
1076
1077 #if !defined(CONFIG_USER_ONLY)
1078 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1079 {
1080 TCGv t0 = tcg_temp_new();
1081
1082 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1083 gen_store_spr(sprn, t0);
1084 tcg_temp_free(t0);
1085 }
1086
1087 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1088 {
1089 TCGv t0 = tcg_temp_new();
1090
1091 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1092 gen_store_spr(sprn, t0);
1093 tcg_temp_free(t0);
1094 }
1095
1096 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1097 {
1098 TCGv t0 = tcg_temp_new();
1099
1100 tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1101 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1102 gen_store_spr(sprn, t0);
1103 tcg_temp_free(t0);
1104 }
1105
1106 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1107 {
1108 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1109 }
1110
1111 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1112 {
1113 TCGv_i32 t0 = tcg_const_i32(sprn);
1114 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1115 tcg_temp_free_i32(t0);
1116 }
1117 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1118 {
1119 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1120 }
1121 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1122 {
1123 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1124 }
1125
1126 #endif
1127
1128 #if !defined(CONFIG_USER_ONLY)
1129 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1130 {
1131 TCGv val = tcg_temp_new();
1132 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1133 gen_store_spr(SPR_BOOKE_MAS3, val);
1134 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1135 gen_store_spr(SPR_BOOKE_MAS7, val);
1136 tcg_temp_free(val);
1137 }
1138
1139 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1140 {
1141 TCGv mas7 = tcg_temp_new();
1142 TCGv mas3 = tcg_temp_new();
1143 gen_load_spr(mas7, SPR_BOOKE_MAS7);
1144 tcg_gen_shli_tl(mas7, mas7, 32);
1145 gen_load_spr(mas3, SPR_BOOKE_MAS3);
1146 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1147 tcg_temp_free(mas3);
1148 tcg_temp_free(mas7);
1149 }
1150
1151 #endif
1152
1153 #ifdef TARGET_PPC64
1154 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1155 int bit, int sprn, int cause)
1156 {
1157 TCGv_i32 t1 = tcg_const_i32(bit);
1158 TCGv_i32 t2 = tcg_const_i32(sprn);
1159 TCGv_i32 t3 = tcg_const_i32(cause);
1160
1161 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1162
1163 tcg_temp_free_i32(t3);
1164 tcg_temp_free_i32(t2);
1165 tcg_temp_free_i32(t1);
1166 }
1167
1168 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1169 int bit, int sprn, int cause)
1170 {
1171 TCGv_i32 t1 = tcg_const_i32(bit);
1172 TCGv_i32 t2 = tcg_const_i32(sprn);
1173 TCGv_i32 t3 = tcg_const_i32(cause);
1174
1175 gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1176
1177 tcg_temp_free_i32(t3);
1178 tcg_temp_free_i32(t2);
1179 tcg_temp_free_i32(t1);
1180 }
1181
1182 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1183 {
1184 TCGv spr_up = tcg_temp_new();
1185 TCGv spr = tcg_temp_new();
1186
1187 gen_load_spr(spr, sprn - 1);
1188 tcg_gen_shri_tl(spr_up, spr, 32);
1189 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1190
1191 tcg_temp_free(spr);
1192 tcg_temp_free(spr_up);
1193 }
1194
1195 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1196 {
1197 TCGv spr = tcg_temp_new();
1198
1199 gen_load_spr(spr, sprn - 1);
1200 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1201 gen_store_spr(sprn - 1, spr);
1202
1203 tcg_temp_free(spr);
1204 }
1205
1206 #if !defined(CONFIG_USER_ONLY)
1207 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1208 {
1209 TCGv hmer = tcg_temp_new();
1210
1211 gen_load_spr(hmer, sprn);
1212 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1213 gen_store_spr(sprn, hmer);
1214 spr_store_dump_spr(sprn);
1215 tcg_temp_free(hmer);
1216 }
1217
1218 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1219 {
1220 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1221 }
1222 #endif /* !defined(CONFIG_USER_ONLY) */
1223
1224 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1225 {
1226 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1227 spr_read_generic(ctx, gprn, sprn);
1228 }
1229
1230 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1231 {
1232 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1233 spr_write_generic(ctx, sprn, gprn);
1234 }
1235
1236 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1237 {
1238 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1239 spr_read_generic(ctx, gprn, sprn);
1240 }
1241
1242 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1243 {
1244 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1245 spr_write_generic(ctx, sprn, gprn);
1246 }
1247
1248 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1249 {
1250 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1251 spr_read_prev_upper32(ctx, gprn, sprn);
1252 }
1253
1254 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1255 {
1256 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1257 spr_write_prev_upper32(ctx, sprn, gprn);
1258 }
1259
1260 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1261 {
1262 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1263 spr_read_generic(ctx, gprn, sprn);
1264 }
1265
1266 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1267 {
1268 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1269 spr_write_generic(ctx, sprn, gprn);
1270 }
1271
1272 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1273 {
1274 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1275 spr_read_prev_upper32(ctx, gprn, sprn);
1276 }
1277
1278 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1279 {
1280 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1281 spr_write_prev_upper32(ctx, sprn, gprn);
1282 }
1283 #endif
1284
1285 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
1286 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1287
1288 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
1289 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1290
1291 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
1292 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1293
1294 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
1295 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1296
1297 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \
1298 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1299
1300 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1301 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1302
1303 typedef struct opcode_t {
1304 unsigned char opc1, opc2, opc3, opc4;
1305 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1306 unsigned char pad[4];
1307 #endif
1308 opc_handler_t handler;
1309 const char *oname;
1310 } opcode_t;
1311
1312 /* Helpers for priv. check */
1313 #define GEN_PRIV \
1314 do { \
1315 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \
1316 } while (0)
1317
1318 #if defined(CONFIG_USER_ONLY)
1319 #define CHK_HV GEN_PRIV
1320 #define CHK_SV GEN_PRIV
1321 #define CHK_HVRM GEN_PRIV
1322 #else
1323 #define CHK_HV \
1324 do { \
1325 if (unlikely(ctx->pr || !ctx->hv)) { \
1326 GEN_PRIV; \
1327 } \
1328 } while (0)
1329 #define CHK_SV \
1330 do { \
1331 if (unlikely(ctx->pr)) { \
1332 GEN_PRIV; \
1333 } \
1334 } while (0)
1335 #define CHK_HVRM \
1336 do { \
1337 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1338 GEN_PRIV; \
1339 } \
1340 } while (0)
1341 #endif
1342
1343 #define CHK_NONE
1344
1345 /*****************************************************************************/
1346 /* PowerPC instructions table */
1347
1348 #if defined(DO_PPC_STATISTICS)
1349 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
1350 { \
1351 .opc1 = op1, \
1352 .opc2 = op2, \
1353 .opc3 = op3, \
1354 .opc4 = 0xff, \
1355 .handler = { \
1356 .inval1 = invl, \
1357 .type = _typ, \
1358 .type2 = _typ2, \
1359 .handler = &gen_##name, \
1360 .oname = stringify(name), \
1361 }, \
1362 .oname = stringify(name), \
1363 }
1364 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
1365 { \
1366 .opc1 = op1, \
1367 .opc2 = op2, \
1368 .opc3 = op3, \
1369 .opc4 = 0xff, \
1370 .handler = { \
1371 .inval1 = invl1, \
1372 .inval2 = invl2, \
1373 .type = _typ, \
1374 .type2 = _typ2, \
1375 .handler = &gen_##name, \
1376 .oname = stringify(name), \
1377 }, \
1378 .oname = stringify(name), \
1379 }
1380 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
1381 { \
1382 .opc1 = op1, \
1383 .opc2 = op2, \
1384 .opc3 = op3, \
1385 .opc4 = 0xff, \
1386 .handler = { \
1387 .inval1 = invl, \
1388 .type = _typ, \
1389 .type2 = _typ2, \
1390 .handler = &gen_##name, \
1391 .oname = onam, \
1392 }, \
1393 .oname = onam, \
1394 }
1395 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
1396 { \
1397 .opc1 = op1, \
1398 .opc2 = op2, \
1399 .opc3 = op3, \
1400 .opc4 = op4, \
1401 .handler = { \
1402 .inval1 = invl, \
1403 .type = _typ, \
1404 .type2 = _typ2, \
1405 .handler = &gen_##name, \
1406 .oname = stringify(name), \
1407 }, \
1408 .oname = stringify(name), \
1409 }
1410 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \
1411 { \
1412 .opc1 = op1, \
1413 .opc2 = op2, \
1414 .opc3 = op3, \
1415 .opc4 = op4, \
1416 .handler = { \
1417 .inval1 = invl, \
1418 .type = _typ, \
1419 .type2 = _typ2, \
1420 .handler = &gen_##name, \
1421 .oname = onam, \
1422 }, \
1423 .oname = onam, \
1424 }
1425 #else
1426 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
1427 { \
1428 .opc1 = op1, \
1429 .opc2 = op2, \
1430 .opc3 = op3, \
1431 .opc4 = 0xff, \
1432 .handler = { \
1433 .inval1 = invl, \
1434 .type = _typ, \
1435 .type2 = _typ2, \
1436 .handler = &gen_##name, \
1437 }, \
1438 .oname = stringify(name), \
1439 }
1440 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
1441 { \
1442 .opc1 = op1, \
1443 .opc2 = op2, \
1444 .opc3 = op3, \
1445 .opc4 = 0xff, \
1446 .handler = { \
1447 .inval1 = invl1, \
1448 .inval2 = invl2, \
1449 .type = _typ, \
1450 .type2 = _typ2, \
1451 .handler = &gen_##name, \
1452 }, \
1453 .oname = stringify(name), \
1454 }
1455 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
1456 { \
1457 .opc1 = op1, \
1458 .opc2 = op2, \
1459 .opc3 = op3, \
1460 .opc4 = 0xff, \
1461 .handler = { \
1462 .inval1 = invl, \
1463 .type = _typ, \
1464 .type2 = _typ2, \
1465 .handler = &gen_##name, \
1466 }, \
1467 .oname = onam, \
1468 }
1469 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
1470 { \
1471 .opc1 = op1, \
1472 .opc2 = op2, \
1473 .opc3 = op3, \
1474 .opc4 = op4, \
1475 .handler = { \
1476 .inval1 = invl, \
1477 .type = _typ, \
1478 .type2 = _typ2, \
1479 .handler = &gen_##name, \
1480 }, \
1481 .oname = stringify(name), \
1482 }
1483 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \
1484 { \
1485 .opc1 = op1, \
1486 .opc2 = op2, \
1487 .opc3 = op3, \
1488 .opc4 = op4, \
1489 .handler = { \
1490 .inval1 = invl, \
1491 .type = _typ, \
1492 .type2 = _typ2, \
1493 .handler = &gen_##name, \
1494 }, \
1495 .oname = onam, \
1496 }
1497 #endif
1498
1499 /* Invalid instruction */
1500 static void gen_invalid(DisasContext *ctx)
1501 {
1502 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1503 }
1504
1505 static opc_handler_t invalid_handler = {
1506 .inval1 = 0xFFFFFFFF,
1507 .inval2 = 0xFFFFFFFF,
1508 .type = PPC_NONE,
1509 .type2 = PPC_NONE,
1510 .handler = gen_invalid,
1511 };
1512
1513 /*** Integer comparison ***/
1514
1515 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1516 {
1517 TCGv t0 = tcg_temp_new();
1518 TCGv t1 = tcg_temp_new();
1519 TCGv_i32 t = tcg_temp_new_i32();
1520
1521 tcg_gen_movi_tl(t0, CRF_EQ);
1522 tcg_gen_movi_tl(t1, CRF_LT);
1523 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1524 t0, arg0, arg1, t1, t0);
1525 tcg_gen_movi_tl(t1, CRF_GT);
1526 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1527 t0, arg0, arg1, t1, t0);
1528
1529 tcg_gen_trunc_tl_i32(t, t0);
1530 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1531 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1532
1533 tcg_temp_free(t0);
1534 tcg_temp_free(t1);
1535 tcg_temp_free_i32(t);
1536 }
1537
1538 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1539 {
1540 TCGv t0 = tcg_const_tl(arg1);
1541 gen_op_cmp(arg0, t0, s, crf);
1542 tcg_temp_free(t0);
1543 }
1544
1545 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1546 {
1547 TCGv t0, t1;
1548 t0 = tcg_temp_new();
1549 t1 = tcg_temp_new();
1550 if (s) {
1551 tcg_gen_ext32s_tl(t0, arg0);
1552 tcg_gen_ext32s_tl(t1, arg1);
1553 } else {
1554 tcg_gen_ext32u_tl(t0, arg0);
1555 tcg_gen_ext32u_tl(t1, arg1);
1556 }
1557 gen_op_cmp(t0, t1, s, crf);
1558 tcg_temp_free(t1);
1559 tcg_temp_free(t0);
1560 }
1561
1562 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1563 {
1564 TCGv t0 = tcg_const_tl(arg1);
1565 gen_op_cmp32(arg0, t0, s, crf);
1566 tcg_temp_free(t0);
1567 }
1568
1569 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1570 {
1571 if (NARROW_MODE(ctx)) {
1572 gen_op_cmpi32(reg, 0, 1, 0);
1573 } else {
1574 gen_op_cmpi(reg, 0, 1, 0);
1575 }
1576 }
1577
1578 /* cmp */
1579 static void gen_cmp(DisasContext *ctx)
1580 {
1581 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1582 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1583 1, crfD(ctx->opcode));
1584 } else {
1585 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1586 1, crfD(ctx->opcode));
1587 }
1588 }
1589
1590 /* cmpi */
1591 static void gen_cmpi(DisasContext *ctx)
1592 {
1593 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1594 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
1595 1, crfD(ctx->opcode));
1596 } else {
1597 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
1598 1, crfD(ctx->opcode));
1599 }
1600 }
1601
1602 /* cmpl */
1603 static void gen_cmpl(DisasContext *ctx)
1604 {
1605 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1606 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1607 0, crfD(ctx->opcode));
1608 } else {
1609 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1610 0, crfD(ctx->opcode));
1611 }
1612 }
1613
1614 /* cmpli */
1615 static void gen_cmpli(DisasContext *ctx)
1616 {
1617 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1618 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
1619 0, crfD(ctx->opcode));
1620 } else {
1621 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
1622 0, crfD(ctx->opcode));
1623 }
1624 }
1625
1626 /* cmprb - range comparison: isupper, isaplha, islower*/
1627 static void gen_cmprb(DisasContext *ctx)
1628 {
1629 TCGv_i32 src1 = tcg_temp_new_i32();
1630 TCGv_i32 src2 = tcg_temp_new_i32();
1631 TCGv_i32 src2lo = tcg_temp_new_i32();
1632 TCGv_i32 src2hi = tcg_temp_new_i32();
1633 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1634
1635 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1636 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1637
1638 tcg_gen_andi_i32(src1, src1, 0xFF);
1639 tcg_gen_ext8u_i32(src2lo, src2);
1640 tcg_gen_shri_i32(src2, src2, 8);
1641 tcg_gen_ext8u_i32(src2hi, src2);
1642
1643 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1644 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1645 tcg_gen_and_i32(crf, src2lo, src2hi);
1646
1647 if (ctx->opcode & 0x00200000) {
1648 tcg_gen_shri_i32(src2, src2, 8);
1649 tcg_gen_ext8u_i32(src2lo, src2);
1650 tcg_gen_shri_i32(src2, src2, 8);
1651 tcg_gen_ext8u_i32(src2hi, src2);
1652 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1653 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1654 tcg_gen_and_i32(src2lo, src2lo, src2hi);
1655 tcg_gen_or_i32(crf, crf, src2lo);
1656 }
1657 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1658 tcg_temp_free_i32(src1);
1659 tcg_temp_free_i32(src2);
1660 tcg_temp_free_i32(src2lo);
1661 tcg_temp_free_i32(src2hi);
1662 }
1663
1664 #if defined(TARGET_PPC64)
1665 /* cmpeqb */
1666 static void gen_cmpeqb(DisasContext *ctx)
1667 {
1668 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1669 cpu_gpr[rB(ctx->opcode)]);
1670 }
1671 #endif
1672
1673 /* isel (PowerPC 2.03 specification) */
1674 static void gen_isel(DisasContext *ctx)
1675 {
1676 uint32_t bi = rC(ctx->opcode);
1677 uint32_t mask = 0x08 >> (bi & 0x03);
1678 TCGv t0 = tcg_temp_new();
1679 TCGv zr;
1680
1681 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1682 tcg_gen_andi_tl(t0, t0, mask);
1683
1684 zr = tcg_const_tl(0);
1685 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1686 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1687 cpu_gpr[rB(ctx->opcode)]);
1688 tcg_temp_free(zr);
1689 tcg_temp_free(t0);
1690 }
1691
1692 /* cmpb: PowerPC 2.05 specification */
1693 static void gen_cmpb(DisasContext *ctx)
1694 {
1695 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1696 cpu_gpr[rB(ctx->opcode)]);
1697 }
1698
1699 /*** Integer arithmetic ***/
1700
1701 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1702 TCGv arg1, TCGv arg2, int sub)
1703 {
1704 TCGv t0 = tcg_temp_new();
1705
1706 tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1707 tcg_gen_xor_tl(t0, arg1, arg2);
1708 if (sub) {
1709 tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1710 } else {
1711 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1712 }
1713 tcg_temp_free(t0);
1714 if (NARROW_MODE(ctx)) {
1715 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1716 if (is_isa300(ctx)) {
1717 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1718 }
1719 } else {
1720 if (is_isa300(ctx)) {
1721 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1722 }
1723 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1724 }
1725 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1726 }
1727
1728 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1729 TCGv res, TCGv arg0, TCGv arg1,
1730 TCGv ca32, int sub)
1731 {
1732 TCGv t0;
1733
1734 if (!is_isa300(ctx)) {
1735 return;
1736 }
1737
1738 t0 = tcg_temp_new();
1739 if (sub) {
1740 tcg_gen_eqv_tl(t0, arg0, arg1);
1741 } else {
1742 tcg_gen_xor_tl(t0, arg0, arg1);
1743 }
1744 tcg_gen_xor_tl(t0, t0, res);
1745 tcg_gen_extract_tl(ca32, t0, 32, 1);
1746 tcg_temp_free(t0);
1747 }
1748
1749 /* Common add function */
1750 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1751 TCGv arg2, TCGv ca, TCGv ca32,
1752 bool add_ca, bool compute_ca,
1753 bool compute_ov, bool compute_rc0)
1754 {
1755 TCGv t0 = ret;
1756
1757 if (compute_ca || compute_ov) {
1758 t0 = tcg_temp_new();
1759 }
1760
1761 if (compute_ca) {
1762 if (NARROW_MODE(ctx)) {
1763 /*
1764 * Caution: a non-obvious corner case of the spec is that
1765 * we must produce the *entire* 64-bit addition, but
1766 * produce the carry into bit 32.
1767 */
1768 TCGv t1 = tcg_temp_new();
1769 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
1770 tcg_gen_add_tl(t0, arg1, arg2);
1771 if (add_ca) {
1772 tcg_gen_add_tl(t0, t0, ca);
1773 }
1774 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */
1775 tcg_temp_free(t1);
1776 tcg_gen_extract_tl(ca, ca, 32, 1);
1777 if (is_isa300(ctx)) {
1778 tcg_gen_mov_tl(ca32, ca);
1779 }
1780 } else {
1781 TCGv zero = tcg_const_tl(0);
1782 if (add_ca) {
1783 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1784 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1785 } else {
1786 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1787 }
1788 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1789 tcg_temp_free(zero);
1790 }
1791 } else {
1792 tcg_gen_add_tl(t0, arg1, arg2);
1793 if (add_ca) {
1794 tcg_gen_add_tl(t0, t0, ca);
1795 }
1796 }
1797
1798 if (compute_ov) {
1799 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1800 }
1801 if (unlikely(compute_rc0)) {
1802 gen_set_Rc0(ctx, t0);
1803 }
1804
1805 if (t0 != ret) {
1806 tcg_gen_mov_tl(ret, t0);
1807 tcg_temp_free(t0);
1808 }
1809 }
1810 /* Add functions with two operands */
1811 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \
1812 static void glue(gen_, name)(DisasContext *ctx) \
1813 { \
1814 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1815 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1816 ca, glue(ca, 32), \
1817 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1818 }
1819 /* Add functions with one operand and one immediate */
1820 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \
1821 add_ca, compute_ca, compute_ov) \
1822 static void glue(gen_, name)(DisasContext *ctx) \
1823 { \
1824 TCGv t0 = tcg_const_tl(const_val); \
1825 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1826 cpu_gpr[rA(ctx->opcode)], t0, \
1827 ca, glue(ca, 32), \
1828 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1829 tcg_temp_free(t0); \
1830 }
1831
1832 /* add add. addo addo. */
1833 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1834 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1835 /* addc addc. addco addco. */
1836 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1837 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1838 /* adde adde. addeo addeo. */
1839 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1840 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1841 /* addme addme. addmeo addmeo. */
1842 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1843 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1844 /* addex */
1845 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1846 /* addze addze. addzeo addzeo.*/
1847 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1848 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1849 /* addi */
1850 static void gen_addi(DisasContext *ctx)
1851 {
1852 target_long simm = SIMM(ctx->opcode);
1853
1854 if (rA(ctx->opcode) == 0) {
1855 /* li case */
1856 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
1857 } else {
1858 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
1859 cpu_gpr[rA(ctx->opcode)], simm);
1860 }
1861 }
1862 /* addic addic.*/
1863 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1864 {
1865 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1866 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1867 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1868 tcg_temp_free(c);
1869 }
1870
1871 static void gen_addic(DisasContext *ctx)
1872 {
1873 gen_op_addic(ctx, 0);
1874 }
1875
1876 static void gen_addic_(DisasContext *ctx)
1877 {
1878 gen_op_addic(ctx, 1);
1879 }
1880
1881 /* addis */
1882 static void gen_addis(DisasContext *ctx)
1883 {
1884 target_long simm = SIMM(ctx->opcode);
1885
1886 if (rA(ctx->opcode) == 0) {
1887 /* lis case */
1888 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
1889 } else {
1890 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
1891 cpu_gpr[rA(ctx->opcode)], simm << 16);
1892 }
1893 }
1894
1895 /* addpcis */
1896 static void gen_addpcis(DisasContext *ctx)
1897 {
1898 target_long d = DX(ctx->opcode);
1899
1900 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16));
1901 }
1902
1903 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1904 TCGv arg2, int sign, int compute_ov)
1905 {
1906 TCGv_i32 t0 = tcg_temp_new_i32();
1907 TCGv_i32 t1 = tcg_temp_new_i32();
1908 TCGv_i32 t2 = tcg_temp_new_i32();
1909 TCGv_i32 t3 = tcg_temp_new_i32();
1910
1911 tcg_gen_trunc_tl_i32(t0, arg1);
1912 tcg_gen_trunc_tl_i32(t1, arg2);
1913 if (sign) {
1914 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1915 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1916 tcg_gen_and_i32(t2, t2, t3);
1917 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1918 tcg_gen_or_i32(t2, t2, t3);
1919 tcg_gen_movi_i32(t3, 0);
1920 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1921 tcg_gen_div_i32(t3, t0, t1);
1922 tcg_gen_extu_i32_tl(ret, t3);
1923 } else {
1924 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1925 tcg_gen_movi_i32(t3, 0);
1926 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1927 tcg_gen_divu_i32(t3, t0, t1);
1928 tcg_gen_extu_i32_tl(ret, t3);
1929 }
1930 if (compute_ov) {
1931 tcg_gen_extu_i32_tl(cpu_ov, t2);
1932 if (is_isa300(ctx)) {
1933 tcg_gen_extu_i32_tl(cpu_ov32, t2);
1934 }
1935 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1936 }
1937 tcg_temp_free_i32(t0);
1938 tcg_temp_free_i32(t1);
1939 tcg_temp_free_i32(t2);
1940 tcg_temp_free_i32(t3);
1941
1942 if (unlikely(Rc(ctx->opcode) != 0)) {
1943 gen_set_Rc0(ctx, ret);
1944 }
1945 }
1946 /* Div functions */
1947 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1948 static void glue(gen_, name)(DisasContext *ctx) \
1949 { \
1950 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1951 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1952 sign, compute_ov); \
1953 }
1954 /* divwu divwu. divwuo divwuo. */
1955 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1956 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1957 /* divw divw. divwo divwo. */
1958 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1959 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1960
1961 /* div[wd]eu[o][.] */
1962 #define GEN_DIVE(name, hlpr, compute_ov) \
1963 static void gen_##name(DisasContext *ctx) \
1964 { \
1965 TCGv_i32 t0 = tcg_const_i32(compute_ov); \
1966 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \
1967 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1968 tcg_temp_free_i32(t0); \
1969 if (unlikely(Rc(ctx->opcode) != 0)) { \
1970 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1971 } \
1972 }
1973
1974 GEN_DIVE(divweu, divweu, 0);
1975 GEN_DIVE(divweuo, divweu, 1);
1976 GEN_DIVE(divwe, divwe, 0);
1977 GEN_DIVE(divweo, divwe, 1);
1978
1979 #if defined(TARGET_PPC64)
1980 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1981 TCGv arg2, int sign, int compute_ov)
1982 {
1983 TCGv_i64 t0 = tcg_temp_new_i64();
1984 TCGv_i64 t1 = tcg_temp_new_i64();
1985 TCGv_i64 t2 = tcg_temp_new_i64();
1986 TCGv_i64 t3 = tcg_temp_new_i64();
1987
1988 tcg_gen_mov_i64(t0, arg1);
1989 tcg_gen_mov_i64(t1, arg2);
1990 if (sign) {
1991 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1992 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1993 tcg_gen_and_i64(t2, t2, t3);
1994 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1995 tcg_gen_or_i64(t2, t2, t3);
1996 tcg_gen_movi_i64(t3, 0);
1997 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1998 tcg_gen_div_i64(ret, t0, t1);
1999 } else {
2000 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
2001 tcg_gen_movi_i64(t3, 0);
2002 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
2003 tcg_gen_divu_i64(ret, t0, t1);
2004 }
2005 if (compute_ov) {
2006 tcg_gen_mov_tl(cpu_ov, t2);
2007 if (is_isa300(ctx)) {
2008 tcg_gen_mov_tl(cpu_ov32, t2);
2009 }
2010 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2011 }
2012 tcg_temp_free_i64(t0);
2013 tcg_temp_free_i64(t1);
2014 tcg_temp_free_i64(t2);
2015 tcg_temp_free_i64(t3);
2016
2017 if (unlikely(Rc(ctx->opcode) != 0)) {
2018 gen_set_Rc0(ctx, ret);
2019 }
2020 }
2021
2022 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
2023 static void glue(gen_, name)(DisasContext *ctx) \
2024 { \
2025 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
2026 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2027 sign, compute_ov); \
2028 }
2029 /* divdu divdu. divduo divduo. */
2030 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
2031 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
2032 /* divd divd. divdo divdo. */
2033 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
2034 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
2035
2036 GEN_DIVE(divdeu, divdeu, 0);
2037 GEN_DIVE(divdeuo, divdeu, 1);
2038 GEN_DIVE(divde, divde, 0);
2039 GEN_DIVE(divdeo, divde, 1);
2040 #endif
2041
2042 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
2043 TCGv arg2, int sign)
2044 {
2045 TCGv_i32 t0 = tcg_temp_new_i32();
2046 TCGv_i32 t1 = tcg_temp_new_i32();
2047
2048 tcg_gen_trunc_tl_i32(t0, arg1);
2049 tcg_gen_trunc_tl_i32(t1, arg2);
2050 if (sign) {
2051 TCGv_i32 t2 = tcg_temp_new_i32();
2052 TCGv_i32 t3 = tcg_temp_new_i32();
2053 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
2054 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
2055 tcg_gen_and_i32(t2, t2, t3);
2056 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
2057 tcg_gen_or_i32(t2, t2, t3);
2058 tcg_gen_movi_i32(t3, 0);
2059 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
2060 tcg_gen_rem_i32(t3, t0, t1);
2061 tcg_gen_ext_i32_tl(ret, t3);
2062 tcg_temp_free_i32(t2);
2063 tcg_temp_free_i32(t3);
2064 } else {
2065 TCGv_i32 t2 = tcg_const_i32(1);
2066 TCGv_i32 t3 = tcg_const_i32(0);
2067 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
2068 tcg_gen_remu_i32(t3, t0, t1);
2069 tcg_gen_extu_i32_tl(ret, t3);
2070 tcg_temp_free_i32(t2);
2071 tcg_temp_free_i32(t3);
2072 }
2073 tcg_temp_free_i32(t0);
2074 tcg_temp_free_i32(t1);
2075 }
2076
2077 #define GEN_INT_ARITH_MODW(name, opc3, sign) \
2078 static void glue(gen_, name)(DisasContext *ctx) \
2079 { \
2080 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \
2081 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2082 sign); \
2083 }
2084
2085 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
2086 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
2087
2088 #if defined(TARGET_PPC64)
2089 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
2090 TCGv arg2, int sign)
2091 {
2092 TCGv_i64 t0 = tcg_temp_new_i64();
2093 TCGv_i64 t1 = tcg_temp_new_i64();
2094
2095 tcg_gen_mov_i64(t0, arg1);
2096 tcg_gen_mov_i64(t1, arg2);
2097 if (sign) {
2098 TCGv_i64 t2 = tcg_temp_new_i64();
2099 TCGv_i64 t3 = tcg_temp_new_i64();
2100 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
2101 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
2102 tcg_gen_and_i64(t2, t2, t3);
2103 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
2104 tcg_gen_or_i64(t2, t2, t3);
2105 tcg_gen_movi_i64(t3, 0);
2106 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
2107 tcg_gen_rem_i64(ret, t0, t1);
2108 tcg_temp_free_i64(t2);
2109 tcg_temp_free_i64(t3);
2110 } else {
2111 TCGv_i64 t2 = tcg_const_i64(1);
2112 TCGv_i64 t3 = tcg_const_i64(0);
2113 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
2114 tcg_gen_remu_i64(ret, t0, t1);
2115 tcg_temp_free_i64(t2);
2116 tcg_temp_free_i64(t3);
2117 }
2118 tcg_temp_free_i64(t0);
2119 tcg_temp_free_i64(t1);
2120 }
2121
2122 #define GEN_INT_ARITH_MODD(name, opc3, sign) \
2123 static void glue(gen_, name)(DisasContext *ctx) \
2124 { \
2125 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \
2126 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2127 sign); \
2128 }
2129
2130 GEN_INT_ARITH_MODD(modud, 0x08, 0);
2131 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
2132 #endif
2133
2134 /* mulhw mulhw. */
2135 static void gen_mulhw(DisasContext *ctx)
2136 {
2137 TCGv_i32 t0 = tcg_temp_new_i32();
2138 TCGv_i32 t1 = tcg_temp_new_i32();
2139
2140 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2141 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2142 tcg_gen_muls2_i32(t0, t1, t0, t1);
2143 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
2144 tcg_temp_free_i32(t0);
2145 tcg_temp_free_i32(t1);
2146 if (unlikely(Rc(ctx->opcode) != 0)) {
2147 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2148 }
2149 }
2150
2151 /* mulhwu mulhwu. */
2152 static void gen_mulhwu(DisasContext *ctx)
2153 {
2154 TCGv_i32 t0 = tcg_temp_new_i32();
2155 TCGv_i32 t1 = tcg_temp_new_i32();
2156
2157 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2158 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2159 tcg_gen_mulu2_i32(t0, t1, t0, t1);
2160 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
2161 tcg_temp_free_i32(t0);
2162 tcg_temp_free_i32(t1);
2163 if (unlikely(Rc(ctx->opcode) != 0)) {
2164 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2165 }
2166 }
2167
2168 /* mullw mullw. */
2169 static void gen_mullw(DisasContext *ctx)
2170 {
2171 #if defined(TARGET_PPC64)
2172 TCGv_i64 t0, t1;
2173 t0 = tcg_temp_new_i64();
2174 t1 = tcg_temp_new_i64();
2175 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
2176 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
2177 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2178 tcg_temp_free(t0);
2179 tcg_temp_free(t1);
2180 #else
2181 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2182 cpu_gpr[rB(ctx->opcode)]);
2183 #endif
2184 if (unlikely(Rc(ctx->opcode) != 0)) {
2185 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2186 }
2187 }
2188
2189 /* mullwo mullwo. */
2190 static void gen_mullwo(DisasContext *ctx)
2191 {
2192 TCGv_i32 t0 = tcg_temp_new_i32();
2193 TCGv_i32 t1 = tcg_temp_new_i32();
2194
2195 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2196 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2197 tcg_gen_muls2_i32(t0, t1, t0, t1);
2198 #if defined(TARGET_PPC64)
2199 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2200 #else
2201 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2202 #endif
2203
2204 tcg_gen_sari_i32(t0, t0, 31);
2205 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2206 tcg_gen_extu_i32_tl(cpu_ov, t0);
2207 if (is_isa300(ctx)) {
2208 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2209 }
2210 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2211
2212 tcg_temp_free_i32(t0);
2213 tcg_temp_free_i32(t1);
2214 if (unlikely(Rc(ctx->opcode) != 0)) {
2215 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2216 }
2217 }
2218
2219 /* mulli */
2220 static void gen_mulli(DisasContext *ctx)
2221 {
2222 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2223 SIMM(ctx->opcode));
2224 }
2225
2226 #if defined(TARGET_PPC64)
2227 /* mulhd mulhd. */
2228 static void gen_mulhd(DisasContext *ctx)
2229 {
2230 TCGv lo = tcg_temp_new();
2231 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2232 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2233 tcg_temp_free(lo);
2234 if (unlikely(Rc(ctx->opcode) != 0)) {
2235 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2236 }
2237 }
2238
2239 /* mulhdu mulhdu. */
2240 static void gen_mulhdu(DisasContext *ctx)
2241 {
2242 TCGv lo = tcg_temp_new();
2243 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2244 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2245 tcg_temp_free(lo);
2246 if (unlikely(Rc(ctx->opcode) != 0)) {
2247 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2248 }
2249 }
2250
2251 /* mulld mulld. */
2252 static void gen_mulld(DisasContext *ctx)
2253 {
2254 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2255 cpu_gpr[rB(ctx->opcode)]);
2256 if (unlikely(Rc(ctx->opcode) != 0)) {
2257 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2258 }
2259 }
2260
2261 /* mulldo mulldo. */
2262 static void gen_mulldo(DisasContext *ctx)
2263 {
2264 TCGv_i64 t0 = tcg_temp_new_i64();
2265 TCGv_i64 t1 = tcg_temp_new_i64();
2266
2267 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2268 cpu_gpr[rB(ctx->opcode)]);
2269 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2270
2271 tcg_gen_sari_i64(t0, t0, 63);
2272 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2273 if (is_isa300(ctx)) {
2274 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2275 }
2276 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2277
2278 tcg_temp_free_i64(t0);
2279 tcg_temp_free_i64(t1);
2280
2281 if (unlikely(Rc(ctx->opcode) != 0)) {
2282 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2283 }
2284 }
2285 #endif
2286
2287 /* Common subf function */
2288 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2289 TCGv arg2, bool add_ca, bool compute_ca,
2290 bool compute_ov, bool compute_rc0)
2291 {
2292 TCGv t0 = ret;
2293
2294 if (compute_ca || compute_ov) {
2295 t0 = tcg_temp_new();
2296 }
2297
2298 if (compute_ca) {
2299 /* dest = ~arg1 + arg2 [+ ca]. */
2300 if (NARROW_MODE(ctx)) {
2301 /*
2302 * Caution: a non-obvious corner case of the spec is that
2303 * we must produce the *entire* 64-bit addition, but
2304 * produce the carry into bit 32.
2305 */
2306 TCGv inv1 = tcg_temp_new();
2307 TCGv t1 = tcg_temp_new();
2308 tcg_gen_not_tl(inv1, arg1);
2309 if (add_ca) {
2310 tcg_gen_add_tl(t0, arg2, cpu_ca);
2311 } else {
2312 tcg_gen_addi_tl(t0, arg2, 1);
2313 }
2314 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
2315 tcg_gen_add_tl(t0, t0, inv1);
2316 tcg_temp_free(inv1);
2317 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
2318 tcg_temp_free(t1);
2319 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2320 if (is_isa300(ctx)) {
2321 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2322 }
2323 } else if (add_ca) {
2324 TCGv zero, inv1 = tcg_temp_new();
2325 tcg_gen_not_tl(inv1, arg1);
2326 zero = tcg_const_tl(0);
2327 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2328 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2329 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2330 tcg_temp_free(zero);
2331 tcg_temp_free(inv1);
2332 } else {
2333 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2334 tcg_gen_sub_tl(t0, arg2, arg1);
2335 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2336 }
2337 } else if (add_ca) {
2338 /*
2339 * Since we're ignoring carry-out, we can simplify the
2340 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2341 */
2342 tcg_gen_sub_tl(t0, arg2, arg1);
2343 tcg_gen_add_tl(t0, t0, cpu_ca);
2344 tcg_gen_subi_tl(t0, t0, 1);
2345 } else {
2346 tcg_gen_sub_tl(t0, arg2, arg1);
2347 }
2348
2349 if (compute_ov) {
2350 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2351 }
2352 if (unlikely(compute_rc0)) {
2353 gen_set_Rc0(ctx, t0);
2354 }
2355
2356 if (t0 != ret) {
2357 tcg_gen_mov_tl(ret, t0);
2358 tcg_temp_free(t0);
2359 }
2360 }
2361 /* Sub functions with Two operands functions */
2362 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
2363 static void glue(gen_, name)(DisasContext *ctx) \
2364 { \
2365 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
2366 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2367 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
2368 }
2369 /* Sub functions with one operand and one immediate */
2370 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
2371 add_ca, compute_ca, compute_ov) \
2372 static void glue(gen_, name)(DisasContext *ctx) \
2373 { \
2374 TCGv t0 = tcg_const_tl(const_val); \
2375 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
2376 cpu_gpr[rA(ctx->opcode)], t0, \
2377 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
2378 tcg_temp_free(t0); \
2379 }
2380 /* subf subf. subfo subfo. */
2381 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2382 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2383 /* subfc subfc. subfco subfco. */
2384 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2385 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2386 /* subfe subfe. subfeo subfo. */
2387 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2388 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2389 /* subfme subfme. subfmeo subfmeo. */
2390 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2391 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2392 /* subfze subfze. subfzeo subfzeo.*/
2393 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2394 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2395
2396 /* subfic */
2397 static void gen_subfic(DisasContext *ctx)
2398 {
2399 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2400 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2401 c, 0, 1, 0, 0);
2402 tcg_temp_free(c);
2403 }
2404
2405 /* neg neg. nego nego. */
2406 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2407 {
2408 TCGv zero = tcg_const_tl(0);
2409 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2410 zero, 0, 0, compute_ov, Rc(ctx->opcode));
2411 tcg_temp_free(zero);
2412 }
2413
2414 static void gen_neg(DisasContext *ctx)
2415 {
2416 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2417 if (unlikely(Rc(ctx->opcode))) {
2418 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2419 }
2420 }
2421
2422 static void gen_nego(DisasContext *ctx)
2423 {
2424 gen_op_arith_neg(ctx, 1);
2425 }
2426
2427 /*** Integer logical ***/
2428 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
2429 static void glue(gen_, name)(DisasContext *ctx) \
2430 { \
2431 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
2432 cpu_gpr[rB(ctx->opcode)]); \
2433 if (unlikely(Rc(ctx->opcode) != 0)) \
2434 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
2435 }
2436
2437 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
2438 static void glue(gen_, name)(DisasContext *ctx) \
2439 { \
2440 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
2441 if (unlikely(Rc(ctx->opcode) != 0)) \
2442 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
2443 }
2444
2445 /* and & and. */
2446 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2447 /* andc & andc. */
2448 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2449
2450 /* andi. */
2451 static void gen_andi_(DisasContext *ctx)
2452 {
2453 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2454 UIMM(ctx->opcode));
2455 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2456 }
2457
2458 /* andis. */
2459 static void gen_andis_(DisasContext *ctx)
2460 {
2461 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2462 UIMM(ctx->opcode) << 16);
2463 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2464 }
2465
2466 /* cntlzw */
2467 static void gen_cntlzw(DisasContext *ctx)
2468 {
2469 TCGv_i32 t = tcg_temp_new_i32();
2470
2471 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2472 tcg_gen_clzi_i32(t, t, 32);
2473 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2474 tcg_temp_free_i32(t);
2475
2476 if (unlikely(Rc(ctx->opcode) != 0)) {
2477 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2478 }
2479 }
2480
2481 /* cnttzw */
2482 static void gen_cnttzw(DisasContext *ctx)
2483 {
2484 TCGv_i32 t = tcg_temp_new_i32();
2485
2486 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2487 tcg_gen_ctzi_i32(t, t, 32);
2488 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2489 tcg_temp_free_i32(t);
2490
2491 if (unlikely(Rc(ctx->opcode) != 0)) {
2492 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2493 }
2494 }
2495
2496 /* eqv & eqv. */
2497 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2498 /* extsb & extsb. */
2499 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2500 /* extsh & extsh. */
2501 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2502 /* nand & nand. */
2503 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2504 /* nor & nor. */
2505 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2506
2507 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2508 static void gen_pause(DisasContext *ctx)
2509 {
2510 TCGv_i32 t0 = tcg_const_i32(0);
2511 tcg_gen_st_i32(t0, cpu_env,
2512 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2513 tcg_temp_free_i32(t0);
2514
2515 /* Stop translation, this gives other CPUs a chance to run */
2516 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2517 }
2518 #endif /* defined(TARGET_PPC64) */
2519
2520 /* or & or. */
2521 static void gen_or(DisasContext *ctx)
2522 {
2523 int rs, ra, rb;
2524
2525 rs = rS(ctx->opcode);
2526 ra = rA(ctx->opcode);
2527 rb = rB(ctx->opcode);
2528 /* Optimisation for mr. ri case */
2529 if (rs != ra || rs != rb) {
2530 if (rs != rb) {
2531 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2532 } else {
2533 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2534 }
2535 if (unlikely(Rc(ctx->opcode) != 0)) {
2536 gen_set_Rc0(ctx, cpu_gpr[ra]);
2537 }
2538 } else if (unlikely(Rc(ctx->opcode) != 0)) {
2539 gen_set_Rc0(ctx, cpu_gpr[rs]);
2540 #if defined(TARGET_PPC64)
2541 } else if (rs != 0) { /* 0 is nop */
2542 int prio = 0;
2543
2544 switch (rs) {
2545 case 1:
2546 /* Set process priority to low */
2547 prio = 2;
2548 break;
2549 case 6:
2550 /* Set process priority to medium-low */
2551 prio = 3;
2552 break;
2553 case 2:
2554 /* Set process priority to normal */
2555 prio = 4;
2556 break;
2557 #if !defined(CONFIG_USER_ONLY)
2558 case 31:
2559 if (!ctx->pr) {
2560 /* Set process priority to very low */
2561 prio = 1;
2562 }
2563 break;
2564 case 5:
2565 if (!ctx->pr) {
2566 /* Set process priority to medium-hight */
2567 prio = 5;
2568 }
2569 break;
2570 case 3:
2571 if (!ctx->pr) {
2572 /* Set process priority to high */
2573 prio = 6;
2574 }
2575 break;
2576 case 7:
2577 if (ctx->hv && !ctx->pr) {
2578 /* Set process priority to very high */
2579 prio = 7;
2580 }
2581 break;
2582 #endif
2583 default:
2584 break;
2585 }
2586 if (prio) {
2587 TCGv t0 = tcg_temp_new();
2588 gen_load_spr(t0, SPR_PPR);
2589 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2590 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2591 gen_store_spr(SPR_PPR, t0);
2592 tcg_temp_free(t0);
2593 }
2594 #if !defined(CONFIG_USER_ONLY)
2595 /*
2596 * Pause out of TCG otherwise spin loops with smt_low eat too
2597 * much CPU and the kernel hangs. This applies to all
2598 * encodings other than no-op, e.g., miso(rs=26), yield(27),
2599 * mdoio(29), mdoom(30), and all currently undefined.
2600 */
2601 gen_pause(ctx);
2602 #endif
2603 #endif
2604 }
2605 }
2606 /* orc & orc. */
2607 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2608
2609 /* xor & xor. */
2610 static void gen_xor(DisasContext *ctx)
2611 {
2612 /* Optimisation for "set to zero" case */
2613 if (rS(ctx->opcode) != rB(ctx->opcode)) {
2614 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2615 cpu_gpr[rB(ctx->opcode)]);
2616 } else {
2617 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2618 }
2619 if (unlikely(Rc(ctx->opcode) != 0)) {
2620 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2621 }
2622 }
2623
2624 /* ori */
2625 static void gen_ori(DisasContext *ctx)
2626 {
2627 target_ulong uimm = UIMM(ctx->opcode);
2628
2629 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2630 return;
2631 }
2632 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2633 }
2634
2635 /* oris */
2636 static void gen_oris(DisasContext *ctx)
2637 {
2638 target_ulong uimm = UIMM(ctx->opcode);
2639
2640 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2641 /* NOP */
2642 return;
2643 }
2644 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2645 uimm << 16);
2646 }
2647
2648 /* xori */
2649 static void gen_xori(DisasContext *ctx)
2650 {
2651 target_ulong uimm = UIMM(ctx->opcode);
2652
2653 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2654 /* NOP */
2655 return;
2656 }
2657 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2658 }
2659
2660 /* xoris */
2661 static void gen_xoris(DisasContext *ctx)
2662 {
2663 target_ulong uimm = UIMM(ctx->opcode);
2664
2665 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2666 /* NOP */
2667 return;
2668 }
2669 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2670 uimm << 16);
2671 }
2672
2673 /* popcntb : PowerPC 2.03 specification */
2674 static void gen_popcntb(DisasContext *ctx)
2675 {
2676 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2677 }
2678
2679 static void gen_popcntw(DisasContext *ctx)
2680 {
2681 #if defined(TARGET_PPC64)
2682 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2683 #else
2684 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2685 #endif
2686 }
2687
2688 #if defined(TARGET_PPC64)
2689 /* popcntd: PowerPC 2.06 specification */
2690 static void gen_popcntd(DisasContext *ctx)
2691 {
2692 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2693 }
2694 #endif
2695
2696 /* prtyw: PowerPC 2.05 specification */
2697 static void gen_prtyw(DisasContext *ctx)
2698 {
2699 TCGv ra = cpu_gpr[rA(ctx->opcode)];
2700 TCGv rs = cpu_gpr[rS(ctx->opcode)];
2701 TCGv t0 = tcg_temp_new();
2702 tcg_gen_shri_tl(t0, rs, 16);
2703 tcg_gen_xor_tl(ra, rs, t0);
2704 tcg_gen_shri_tl(t0, ra, 8);
2705 tcg_gen_xor_tl(ra, ra, t0);
2706 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2707 tcg_temp_free(t0);
2708 }
2709
2710 #if defined(TARGET_PPC64)
2711 /* prtyd: PowerPC 2.05 specification */
2712 static void gen_prtyd(DisasContext *ctx)
2713 {
2714 TCGv ra = cpu_gpr[rA(ctx->opcode)];
2715 TCGv rs = cpu_gpr[rS(ctx->opcode)];
2716 TCGv t0 = tcg_temp_new();
2717 tcg_gen_shri_tl(t0, rs, 32);
2718 tcg_gen_xor_tl(ra, rs, t0);
2719 tcg_gen_shri_tl(t0, ra, 16);
2720 tcg_gen_xor_tl(ra, ra, t0);
2721 tcg_gen_shri_tl(t0, ra, 8);
2722 tcg_gen_xor_tl(ra, ra, t0);
2723 tcg_gen_andi_tl(ra, ra, 1);
2724 tcg_temp_free(t0);
2725 }
2726 #endif
2727
2728 #if defined(TARGET_PPC64)
2729 /* bpermd */
2730 static void gen_bpermd(DisasContext *ctx)
2731 {
2732 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2733 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2734 }
2735 #endif
2736
2737 #if defined(TARGET_PPC64)
2738 /* extsw & extsw. */
2739 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2740
2741 /* cntlzd */
2742 static void gen_cntlzd(DisasContext *ctx)
2743 {
2744 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2745 if (unlikely(Rc(ctx->opcode) != 0)) {
2746 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2747 }
2748 }
2749
2750 /* cnttzd */
2751 static void gen_cnttzd(DisasContext *ctx)
2752 {
2753 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2754 if (unlikely(Rc(ctx->opcode) != 0)) {
2755 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2756 }
2757 }
2758
2759 /* darn */
2760 static void gen_darn(DisasContext *ctx)
2761 {
2762 int l = L(ctx->opcode);
2763
2764 if (l > 2) {
2765 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2766 } else {
2767 gen_icount_io_start(ctx);
2768 if (l == 0) {
2769 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2770 } else {
2771 /* Return 64-bit random for both CRN and RRN */
2772 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2773 }
2774 }
2775 }
2776 #endif
2777
2778 /*** Integer rotate ***/
2779
2780 /* rlwimi & rlwimi. */
2781 static void gen_rlwimi(DisasContext *ctx)
2782 {
2783 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2784 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2785 uint32_t sh = SH(ctx->opcode);
2786 uint32_t mb = MB(ctx->opcode);
2787 uint32_t me = ME(ctx->opcode);
2788
2789 if (sh == (31 - me) && mb <= me) {
2790 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2791 } else {
2792 target_ulong mask;
2793 bool mask_in_32b = true;
2794 TCGv t1;
2795
2796 #if defined(TARGET_PPC64)
2797 mb += 32;
2798 me += 32;
2799 #endif
2800 mask = MASK(mb, me);
2801
2802 #if defined(TARGET_PPC64)
2803 if (mask > 0xffffffffu) {
2804 mask_in_32b = false;
2805 }
2806 #endif
2807 t1 = tcg_temp_new();
2808 if (mask_in_32b) {
2809 TCGv_i32 t0 = tcg_temp_new_i32();
2810 tcg_gen_trunc_tl_i32(t0, t_rs);
2811 tcg_gen_rotli_i32(t0, t0, sh);
2812 tcg_gen_extu_i32_tl(t1, t0);
2813 tcg_temp_free_i32(t0);
2814 } else {
2815 #if defined(TARGET_PPC64)
2816 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2817 tcg_gen_rotli_i64(t1, t1, sh);
2818 #else
2819 g_assert_not_reached();
2820 #endif
2821 }
2822
2823 tcg_gen_andi_tl(t1, t1, mask);
2824 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2825 tcg_gen_or_tl(t_ra, t_ra, t1);
2826 tcg_temp_free(t1);
2827 }
2828 if (unlikely(Rc(ctx->opcode) != 0)) {
2829 gen_set_Rc0(ctx, t_ra);
2830 }
2831 }
2832
2833 /* rlwinm & rlwinm. */
2834 static void gen_rlwinm(DisasContext *ctx)
2835 {
2836 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2837 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2838 int sh = SH(ctx->opcode);
2839 int mb = MB(ctx->opcode);
2840 int me = ME(ctx->opcode);
2841 int len = me - mb + 1;
2842 int rsh = (32 - sh) & 31;
2843
2844 if (sh != 0 && len > 0 && me == (31 - sh)) {
2845 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2846 } else if (me == 31 && rsh + len <= 32) {
2847 tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2848 } else {
2849 target_ulong mask;
2850 bool mask_in_32b = true;
2851 #if defined(TARGET_PPC64)
2852 mb += 32;
2853 me += 32;
2854 #endif
2855 mask = MASK(mb, me);
2856 #if defined(TARGET_PPC64)
2857 if (mask > 0xffffffffu) {
2858 mask_in_32b = false;
2859 }
2860 #endif
2861 if (mask_in_32b) {
2862 if (sh == 0) {
2863 tcg_gen_andi_tl(t_ra, t_rs, mask);
2864 } else {
2865 TCGv_i32 t0 = tcg_temp_new_i32();
2866 tcg_gen_trunc_tl_i32(t0, t_rs);
2867 tcg_gen_rotli_i32(t0, t0, sh);
2868 tcg_gen_andi_i32(t0, t0, mask);
2869 tcg_gen_extu_i32_tl(t_ra, t0);
2870 tcg_temp_free_i32(t0);
2871 }
2872 } else {
2873 #if defined(TARGET_PPC64)
2874 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2875 tcg_gen_rotli_i64(t_ra, t_ra, sh);
2876 tcg_gen_andi_i64(t_ra, t_ra, mask);
2877 #else
2878 g_assert_not_reached();
2879 #endif
2880 }
2881 }
2882 if (unlikely(Rc(ctx->opcode) != 0)) {
2883 gen_set_Rc0(ctx, t_ra);
2884 }
2885 }
2886
2887 /* rlwnm & rlwnm. */
2888 static void gen_rlwnm(DisasContext *ctx)
2889 {
2890 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2891 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2892 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2893 uint32_t mb = MB(ctx->opcode);
2894 uint32_t me = ME(ctx->opcode);
2895 target_ulong mask;
2896 bool mask_in_32b = true;
2897
2898 #if defined(TARGET_PPC64)
2899 mb += 32;
2900 me += 32;
2901 #endif
2902 mask = MASK(mb, me);
2903
2904 #if defined(TARGET_PPC64)
2905 if (mask > 0xffffffffu) {
2906 mask_in_32b = false;
2907 }
2908 #endif
2909 if (mask_in_32b) {
2910 TCGv_i32 t0 = tcg_temp_new_i32();
2911 TCGv_i32 t1 = tcg_temp_new_i32();
2912 tcg_gen_trunc_tl_i32(t0, t_rb);
2913 tcg_gen_trunc_tl_i32(t1, t_rs);
2914 tcg_gen_andi_i32(t0, t0, 0x1f);
2915 tcg_gen_rotl_i32(t1, t1, t0);
2916 tcg_gen_extu_i32_tl(t_ra, t1);
2917 tcg_temp_free_i32(t0);
2918 tcg_temp_free_i32(t1);
2919 } else {
2920 #if defined(TARGET_PPC64)
2921 TCGv_i64 t0 = tcg_temp_new_i64();
2922 tcg_gen_andi_i64(t0, t_rb, 0x1f);
2923 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2924 tcg_gen_rotl_i64(t_ra, t_ra, t0);
2925 tcg_temp_free_i64(t0);
2926 #else
2927 g_assert_not_reached();
2928 #endif
2929 }
2930
2931 tcg_gen_andi_tl(t_ra, t_ra, mask);
2932
2933 if (unlikely(Rc(ctx->opcode) != 0)) {
2934 gen_set_Rc0(ctx, t_ra);
2935 }
2936 }
2937
2938 #if defined(TARGET_PPC64)
2939 #define GEN_PPC64_R2(name, opc1, opc2) \
2940 static void glue(gen_, name##0)(DisasContext *ctx) \
2941 { \
2942 gen_##name(ctx, 0); \
2943 } \
2944 \
2945 static void glue(gen_, name##1)(DisasContext *ctx) \
2946 { \
2947 gen_##name(ctx, 1); \
2948 }
2949 #define GEN_PPC64_R4(name, opc1, opc2) \
2950 static void glue(gen_, name##0)(DisasContext *ctx) \
2951 { \
2952 gen_##name(ctx, 0, 0); \
2953 } \
2954 \
2955 static void glue(gen_, name##1)(DisasContext *ctx) \
2956 { \
2957 gen_##name(ctx, 0, 1); \
2958 } \
2959 \
2960 static void glue(gen_, name##2)(DisasContext *ctx) \
2961 { \
2962 gen_##name(ctx, 1, 0); \
2963 } \
2964 \
2965 static void glue(gen_, name##3)(DisasContext *ctx) \
2966 { \
2967 gen_##name(ctx, 1, 1); \
2968 }
2969
2970 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2971 {
2972 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2973 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2974 int len = me - mb + 1;
2975 int rsh = (64 - sh) & 63;
2976
2977 if (sh != 0 && len > 0 && me == (63 - sh)) {
2978 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2979 } else if (me == 63 && rsh + len <= 64) {
2980 tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2981 } else {
2982 tcg_gen_rotli_tl(t_ra, t_rs, sh);
2983 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2984 }
2985 if (unlikely(Rc(ctx->opcode) != 0)) {
2986 gen_set_Rc0(ctx, t_ra);
2987 }
2988 }
2989
2990 /* rldicl - rldicl. */
2991 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2992 {
2993 uint32_t sh, mb;
2994
2995 sh = SH(ctx->opcode) | (shn << 5);
2996 mb = MB(ctx->opcode) | (mbn << 5);
2997 gen_rldinm(ctx, mb, 63, sh);
2998 }
2999 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
3000
3001 /* rldicr - rldicr. */
3002 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
3003 {
3004 uint32_t sh, me;
3005
3006 sh = SH(ctx->opcode) | (shn << 5);
3007 me = MB(ctx->opcode) | (men << 5);
3008 gen_rldinm(ctx, 0, me, sh);
3009 }
3010 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
3011
3012 /* rldic - rldic. */
3013 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
3014 {
3015 uint32_t sh, mb;
3016
3017 sh = SH(ctx->opcode) | (shn << 5);
3018 mb = MB(ctx->opcode) | (mbn << 5);
3019 gen_rldinm(ctx, mb, 63 - sh, sh);
3020 }
3021 GEN_PPC64_R4(rldic, 0x1E, 0x04);
3022
3023 static void gen_rldnm(DisasContext *ctx, int mb, int me)
3024 {
3025 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
3026 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
3027 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
3028 TCGv t0;
3029
3030 t0 = tcg_temp_new();
3031 tcg_gen_andi_tl(t0, t_rb, 0x3f);
3032 tcg_gen_rotl_tl(t_ra, t_rs, t0);
3033 tcg_temp_free(t0);
3034
3035 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
3036 if (unlikely(Rc(ctx->opcode) != 0)) {
3037 gen_set_Rc0(ctx, t_ra);
3038 }
3039 }
3040
3041 /* rldcl - rldcl. */
3042 static inline void gen_rldcl(DisasContext *ctx, int mbn)
3043 {
3044 uint32_t mb;
3045
3046 mb = MB(ctx->opcode) | (mbn << 5);
3047 gen_rldnm(ctx, mb, 63);
3048 }
3049 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
3050
3051 /* rldcr - rldcr. */
3052 static inline void gen_rldcr(DisasContext *ctx, int men)
3053 {
3054 uint32_t me;
3055
3056 me = MB(ctx->opcode) | (men << 5);
3057 gen_rldnm(ctx, 0, me);
3058 }
3059 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
3060
3061 /* rldimi - rldimi. */
3062 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
3063 {
3064 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
3065 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
3066 uint32_t sh = SH(ctx->opcode) | (shn << 5);
3067 uint32_t mb = MB(ctx->opcode) | (mbn << 5);
3068 uint32_t me = 63 - sh;
3069
3070 if (mb <= me) {
3071 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
3072 } else {
3073 target_ulong mask = MASK(mb, me);
3074 TCGv t1 = tcg_temp_new();
3075
3076 tcg_gen_rotli_tl(t1, t_rs, sh);
3077 tcg_gen_andi_tl(t1, t1, mask);
3078 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
3079 tcg_gen_or_tl(t_ra, t_ra, t1);
3080 tcg_temp_free(t1);
3081 }
3082 if (unlikely(Rc(ctx->opcode) != 0)) {
3083 gen_set_Rc0(ctx, t_ra);
3084 }
3085 }
3086 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
3087 #endif
3088
3089 /*** Integer shift ***/
3090
3091 /* slw & slw. */
3092 static void gen_slw(DisasContext *ctx)
3093 {
3094 TCGv t0, t1;
3095
3096 t0 = tcg_temp_new();
3097 /* AND rS with a mask that is 0 when rB >= 0x20 */
3098 #if defined(TARGET_PPC64)
3099 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3100 tcg_gen_sari_tl(t0, t0, 0x3f);
3101 #else
3102 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3103 tcg_gen_sari_tl(t0, t0, 0x1f);
3104 #endif
3105 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3106 t1 = tcg_temp_new();
3107 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3108 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3109 tcg_temp_free(t1);
3110 tcg_temp_free(t0);
3111 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
3112 if (unlikely(Rc(ctx->opcode) != 0)) {
3113 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3114 }
3115 }
3116
3117 /* sraw & sraw. */
3118 static void gen_sraw(DisasContext *ctx)
3119 {
3120 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
3121 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3122 if (unlikely(Rc(ctx->opcode) != 0)) {
3123 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3124 }
3125 }
3126
3127 /* srawi & srawi. */
3128 static void gen_srawi(DisasContext *ctx)
3129 {
3130 int sh = SH(ctx->opcode);
3131 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3132 TCGv src = cpu_gpr[rS(ctx->opcode)];
3133 if (sh == 0) {
3134 tcg_gen_ext32s_tl(dst, src);
3135 tcg_gen_movi_tl(cpu_ca, 0);
3136 if (is_isa300(ctx)) {
3137 tcg_gen_movi_tl(cpu_ca32, 0);
3138 }
3139 } else {
3140 TCGv t0;
3141 tcg_gen_ext32s_tl(dst, src);
3142 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
3143 t0 = tcg_temp_new();
3144 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
3145 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3146 tcg_temp_free(t0);
3147 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3148 if (is_isa300(ctx)) {
3149 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3150 }
3151 tcg_gen_sari_tl(dst, dst, sh);
3152 }
3153 if (unlikely(Rc(ctx->opcode) != 0)) {
3154 gen_set_Rc0(ctx, dst);
3155 }
3156 }
3157
3158 /* srw & srw. */
3159 static void gen_srw(DisasContext *ctx)
3160 {
3161 TCGv t0, t1;
3162
3163 t0 = tcg_temp_new();
3164 /* AND rS with a mask that is 0 when rB >= 0x20 */
3165 #if defined(TARGET_PPC64)
3166 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3167 tcg_gen_sari_tl(t0, t0, 0x3f);
3168 #else
3169 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3170 tcg_gen_sari_tl(t0, t0, 0x1f);
3171 #endif
3172 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3173 tcg_gen_ext32u_tl(t0, t0);
3174 t1 = tcg_temp_new();
3175 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3176 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3177 tcg_temp_free(t1);
3178 tcg_temp_free(t0);
3179 if (unlikely(Rc(ctx->opcode) != 0)) {
3180 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3181 }
3182 }
3183
3184 #if defined(TARGET_PPC64)
3185 /* sld & sld. */
3186 static void gen_sld(DisasContext *ctx)
3187 {
3188 TCGv t0, t1;
3189
3190 t0 = tcg_temp_new();
3191 /* AND rS with a mask that is 0 when rB >= 0x40 */
3192 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3193 tcg_gen_sari_tl(t0, t0, 0x3f);
3194 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3195 t1 = tcg_temp_new();
3196 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3197 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3198 tcg_temp_free(t1);
3199 tcg_temp_free(t0);
3200 if (unlikely(Rc(ctx->opcode) != 0)) {
3201 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3202 }
3203 }
3204
3205 /* srad & srad. */
3206 static void gen_srad(DisasContext *ctx)
3207 {
3208 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3209 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3210 if (unlikely(Rc(ctx->opcode) != 0)) {
3211 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3212 }
3213 }
3214 /* sradi & sradi. */
3215 static inline void gen_sradi(DisasContext *ctx, int n)
3216 {
3217 int sh = SH(ctx->opcode) + (n << 5);
3218 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3219 TCGv src = cpu_gpr[rS(ctx->opcode)];
3220 if (sh == 0) {
3221 tcg_gen_mov_tl(dst, src);
3222 tcg_gen_movi_tl(cpu_ca, 0);
3223 if (is_isa300(ctx)) {
3224 tcg_gen_movi_tl(cpu_ca32, 0);
3225 }
3226 } else {
3227 TCGv t0;
3228 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3229 t0 = tcg_temp_new();
3230 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3231 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3232 tcg_temp_free(t0);
3233 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3234 if (is_isa300(ctx)) {
3235 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3236 }
3237 tcg_gen_sari_tl(dst, src, sh);
3238 }
3239 if (unlikely(Rc(ctx->opcode) != 0)) {
3240 gen_set_Rc0(ctx, dst);
3241 }
3242 }
3243
3244 static void gen_sradi0(DisasContext *ctx)
3245 {
3246 gen_sradi(ctx, 0);
3247 }
3248
3249 static void gen_sradi1(DisasContext *ctx)
3250 {
3251 gen_sradi(ctx, 1);
3252 }
3253
3254 /* extswsli & extswsli. */
3255 static inline void gen_extswsli(DisasContext *ctx, int n)
3256 {
3257 int sh = SH(ctx->opcode) + (n << 5);
3258 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3259 TCGv src = cpu_gpr[rS(ctx->opcode)];
3260
3261 tcg_gen_ext32s_tl(dst, src);
3262 tcg_gen_shli_tl(dst, dst, sh);
3263 if (unlikely(Rc(ctx->opcode) != 0)) {
3264 gen_set_Rc0(ctx, dst);
3265 }
3266 }
3267
3268 static void gen_extswsli0(DisasContext *ctx)
3269 {
3270 gen_extswsli(ctx, 0);
3271 }
3272
3273 static void gen_extswsli1(DisasContext *ctx)
3274 {
3275 gen_extswsli(ctx, 1);
3276 }
3277
3278 /* srd & srd. */
3279 static void gen_srd(DisasContext *ctx)
3280 {
3281 TCGv t0, t1;
3282
3283 t0 = tcg_temp_new();
3284 /* AND rS with a mask that is 0 when rB >= 0x40 */
3285 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3286 tcg_gen_sari_tl(t0, t0, 0x3f);
3287 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3288 t1 = tcg_temp_new();
3289 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3290 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3291 tcg_temp_free(t1);
3292 tcg_temp_free(t0);
3293 if (unlikely(Rc(ctx->opcode) != 0)) {
3294 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3295 }
3296 }
3297 #endif
3298
3299 /*** Addressing modes ***/
3300 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3301 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3302 target_long maskl)
3303 {
3304 target_long simm = SIMM(ctx->opcode);
3305
3306 simm &= ~maskl;
3307 if (rA(ctx->opcode) == 0) {
3308 if (NARROW_MODE(ctx)) {
3309 simm = (uint32_t)simm;
3310 }
3311 tcg_gen_movi_tl(EA, simm);
3312 } else if (likely(simm != 0)) {
3313 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3314 if (NARROW_MODE(ctx)) {
3315 tcg_gen_ext32u_tl(EA, EA);
3316 }
3317 } else {
3318 if (NARROW_MODE(ctx)) {
3319 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3320 } else {
3321 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3322 }
3323 }
3324 }
3325
3326 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3327 {
3328 if (rA(ctx->opcode) == 0) {
3329 if (NARROW_MODE(ctx)) {
3330 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3331 } else {
3332 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3333 }
3334 } else {
3335 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3336 if (NARROW_MODE(ctx)) {
3337 tcg_gen_ext32u_tl(EA, EA);
3338 }
3339 }
3340 }
3341
3342 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3343 {
3344 if (rA(ctx->opcode) == 0) {
3345 tcg_gen_movi_tl(EA, 0);
3346 } else if (NARROW_MODE(ctx)) {
3347 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3348 } else {
3349 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3350 }
3351 }
3352
3353 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3354 target_long val)
3355 {
3356 tcg_gen_addi_tl(ret, arg1, val);
3357 if (NARROW_MODE(ctx)) {
3358 tcg_gen_ext32u_tl(ret, ret);
3359 }
3360 }
3361
3362 static inline void gen_align_no_le(DisasContext *ctx)
3363 {
3364 gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3365 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3366 }
3367
3368 /*** Integer load ***/
3369 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3370 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3371
3372 #define GEN_QEMU_LOAD_TL(ldop, op) \
3373 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \
3374 TCGv val, \
3375 TCGv addr) \
3376 { \
3377 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \
3378 }
3379
3380 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB))
3381 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3382 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3383 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3384 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3385
3386 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3387 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3388
3389 #define GEN_QEMU_LOAD_64(ldop, op) \
3390 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \
3391 TCGv_i64 val, \
3392 TCGv addr) \
3393 { \
3394 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \
3395 }
3396
3397 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB))
3398 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3399 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3400 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3401 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q))
3402
3403 #if defined(TARGET_PPC64)
3404 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q))
3405 #endif
3406
3407 #define GEN_QEMU_STORE_TL(stop, op) \
3408 static void glue(gen_qemu_, stop)(DisasContext *ctx, \
3409 TCGv val, \
3410 TCGv addr) \
3411 { \
3412 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \
3413 }
3414
3415 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB))
3416 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3417 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3418
3419 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3420 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3421
3422 #define GEN_QEMU_STORE_64(stop, op) \
3423 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \
3424 TCGv_i64 val, \
3425 TCGv addr) \
3426 { \
3427 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \
3428 }
3429
3430 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB))
3431 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3432 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3433 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q))
3434
3435 #if defined(TARGET_PPC64)
3436 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q))
3437 #endif
3438
3439 #define GEN_LD(name, ldop, opc, type) \
3440 static void glue(gen_, name)(DisasContext *ctx) \
3441 { \
3442 TCGv EA; \
3443 gen_set_access_type(ctx, ACCESS_INT); \
3444 EA = tcg_temp_new(); \
3445 gen_addr_imm_index(ctx, EA, 0); \
3446 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3447 tcg_temp_free(EA); \
3448 }
3449
3450 #define GEN_LDU(name, ldop, opc, type) \
3451 static void glue(gen_, name##u)(DisasContext *ctx) \
3452 { \
3453 TCGv EA; \
3454 if (unlikely(rA(ctx->opcode) == 0 || \
3455 rA(ctx->opcode) == rD(ctx->opcode))) { \
3456 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3457 return; \
3458 } \
3459 gen_set_access_type(ctx, ACCESS_INT); \
3460 EA = tcg_temp_new(); \
3461 if (type == PPC_64B) \
3462 gen_addr_imm_index(ctx, EA, 0x03); \
3463 else \
3464 gen_addr_imm_index(ctx, EA, 0); \
3465 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3466 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3467 tcg_temp_free(EA); \
3468 }
3469
3470 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
3471 static void glue(gen_, name##ux)(DisasContext *ctx) \
3472 { \
3473 TCGv EA; \
3474 if (unlikely(rA(ctx->opcode) == 0 || \
3475 rA(ctx->opcode) == rD(ctx->opcode))) { \
3476 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3477 return; \
3478 } \
3479 gen_set_access_type(ctx, ACCESS_INT); \
3480 EA = tcg_temp_new(); \
3481 gen_addr_reg_index(ctx, EA); \
3482 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3483 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3484 tcg_temp_free(EA); \
3485 }
3486
3487 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
3488 static void glue(gen_, name##x)(DisasContext *ctx) \
3489 { \
3490 TCGv EA; \
3491 chk; \
3492 gen_set_access_type(ctx, ACCESS_INT); \
3493 EA = tcg_temp_new(); \
3494 gen_addr_reg_index(ctx, EA); \
3495 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3496 tcg_temp_free(EA); \
3497 }
3498
3499 #define GEN_LDX(name, ldop, opc2, opc3, type) \
3500 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3501
3502 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \
3503 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3504
3505 #define GEN_LDS(name, ldop, op, type) \
3506 GEN_LD(name, ldop, op | 0x20, type); \
3507 GEN_LDU(name, ldop, op | 0x21, type); \
3508 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
3509 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
3510
3511 /* lbz lbzu lbzux lbzx */
3512 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
3513 /* lha lhau lhaux lhax */
3514 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
3515 /* lhz lhzu lhzux lhzx */
3516 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
3517 /* lwz lwzu lwzux lwzx */
3518 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
3519
3520 #define GEN_LDEPX(name, ldop, opc2, opc3) \
3521 static void glue(gen_, name##epx)(DisasContext *ctx) \
3522 { \
3523 TCGv EA; \
3524 CHK_SV; \
3525 gen_set_access_type(ctx, ACCESS_INT); \
3526 EA = tcg_temp_new(); \
3527 gen_addr_reg_index(ctx, EA); \
3528 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3529 tcg_temp_free(EA); \
3530 }
3531
3532 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3533 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3534 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3535 #if defined(TARGET_PPC64)
3536 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00)
3537 #endif
3538
3539 #if defined(TARGET_PPC64)
3540 /* lwaux */
3541 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
3542 /* lwax */
3543 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
3544 /* ldux */
3545 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B);
3546 /* ldx */
3547 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B);
3548
3549 /* CI load/store variants */
3550 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3551 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3552 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3553 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3554
3555 static void gen_ld(DisasContext *ctx)
3556 {
3557 TCGv EA;
3558 if (Rc(ctx->opcode)) {
3559 if (unlikely(rA(ctx->opcode) == 0 ||
3560 rA(ctx->opcode) == rD(ctx->opcode))) {
3561 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3562 return;
3563 }
3564 }
3565 gen_set_access_type(ctx, ACCESS_INT);
3566 EA = tcg_temp_new();
3567 gen_addr_imm_index(ctx, EA, 0x03);
3568 if (ctx->opcode & 0x02) {
3569 /* lwa (lwau is undefined) */
3570 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
3571 } else {
3572 /* ld - ldu */
3573 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
3574 }
3575 if (Rc(ctx->opcode)) {
3576 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3577 }
3578 tcg_temp_free(EA);
3579 }
3580
3581 /* lq */
3582 static void gen_lq(DisasContext *ctx)
3583 {
3584 int ra, rd;
3585 TCGv EA, hi, lo;
3586
3587 /* lq is a legal user mode instruction starting in ISA 2.07 */
3588 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3589 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3590
3591 if (!legal_in_user_mode && ctx->pr) {
3592 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3593 return;
3594 }
3595
3596 if (!le_is_supported && ctx->le_mode) {
3597 gen_align_no_le(ctx);
3598 return;
3599 }
3600 ra = rA(ctx->opcode);
3601 rd = rD(ctx->opcode);
3602 if (unlikely((rd & 1) || rd == ra)) {
3603 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3604 return;
3605 }
3606
3607 gen_set_access_type(ctx, ACCESS_INT);
3608 EA = tcg_temp_new();
3609 gen_addr_imm_index(ctx, EA, 0x0F);
3610
3611 /* Note that the low part is always in RD+1, even in LE mode. */
3612 lo = cpu_gpr[rd + 1];
3613 hi = cpu_gpr[rd];
3614
3615 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3616 if (HAVE_ATOMIC128) {
3617 TCGv_i32 oi = tcg_temp_new_i32();
3618 if (ctx->le_mode) {
3619 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx));
3620 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3621 } else {
3622 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx));
3623 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3624 }
3625 tcg_temp_free_i32(oi);
3626 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3627 } else {
3628 /* Restart with exclusive lock. */
3629 gen_helper_exit_atomic(cpu_env);
3630 ctx->base.is_jmp = DISAS_NORETURN;
3631 }
3632 } else if (ctx->le_mode) {
3633 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ);
3634 gen_addr_add(ctx, EA, EA, 8);
3635 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ);
3636 } else {
3637 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ);
3638 gen_addr_add(ctx, EA, EA, 8);
3639 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ);
3640 }
3641 tcg_temp_free(EA);
3642 }
3643 #endif
3644
3645 /*** Integer store ***/
3646 #define GEN_ST(name, stop, opc, type) \
3647 static void glue(gen_, name)(DisasContext *ctx) \
3648 { \
3649 TCGv EA; \
3650 gen_set_access_type(ctx, ACCESS_INT); \
3651 EA = tcg_temp_new(); \
3652 gen_addr_imm_index(ctx, EA, 0); \
3653 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3654 tcg_temp_free(EA); \
3655 }
3656
3657 #define GEN_STU(name, stop, opc, type) \
3658 static void glue(gen_, stop##u)(DisasContext *ctx) \
3659 { \
3660 TCGv EA; \
3661 if (unlikely(rA(ctx->opcode) == 0)) { \
3662 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3663 return; \
3664 } \
3665 gen_set_access_type(ctx, ACCESS_INT); \
3666 EA = tcg_temp_new(); \
3667 if (type == PPC_64B) \
3668 gen_addr_imm_index(ctx, EA, 0x03); \
3669 else \
3670 gen_addr_imm_index(ctx, EA, 0); \
3671 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3672 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3673 tcg_temp_free(EA); \
3674 }
3675
3676 #define GEN_STUX(name, stop, opc2, opc3, type) \
3677 static void glue(gen_, name##ux)(DisasContext *ctx) \
3678 { \
3679 TCGv EA; \
3680 if (unlikely(rA(ctx->opcode) == 0)) { \
3681 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3682 return; \
3683 } \
3684 gen_set_access_type(ctx, ACCESS_INT); \
3685 EA = tcg_temp_new(); \
3686 gen_addr_reg_index(ctx, EA); \
3687 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3688 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3689 tcg_temp_free(EA); \
3690 }
3691
3692 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
3693 static void glue(gen_, name##x)(DisasContext *ctx) \
3694 { \
3695 TCGv EA; \
3696 chk; \
3697 gen_set_access_type(ctx, ACCESS_INT); \
3698 EA = tcg_temp_new(); \
3699 gen_addr_reg_index(ctx, EA); \
3700 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3701 tcg_temp_free(EA); \
3702 }
3703 #define GEN_STX(name, stop, opc2, opc3, type) \
3704 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3705
3706 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \
3707 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3708
3709 #define GEN_STS(name, stop, op, type) \
3710 GEN_ST(name, stop, op | 0x20, type); \
3711 GEN_STU(name, stop, op | 0x21, type); \
3712 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
3713 GEN_STX(name, stop, 0x17, op | 0x00, type)
3714
3715 /* stb stbu stbux stbx */
3716 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
3717 /* sth sthu sthux sthx */
3718 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
3719 /* stw stwu stwux stwx */
3720 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
3721
3722 #define GEN_STEPX(name, stop, opc2, opc3) \
3723 static void glue(gen_, name##epx)(DisasContext *ctx) \
3724 { \
3725 TCGv EA; \
3726 CHK_SV; \
3727 gen_set_access_type(ctx, ACCESS_INT); \
3728 EA = tcg_temp_new(); \
3729 gen_addr_reg_index(ctx, EA); \
3730 tcg_gen_qemu_st_tl( \
3731 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \
3732 tcg_temp_free(EA); \
3733 }
3734
3735 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3736 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3737 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3738 #if defined(TARGET_PPC64)
3739 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04)
3740 #endif
3741
3742 #if defined(TARGET_PPC64)
3743 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B);
3744 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B);
3745 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3746 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3747 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3748 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3749
3750 static void gen_std(DisasContext *ctx)
3751 {
3752 int rs;
3753 TCGv EA;
3754
3755 rs = rS(ctx->opcode);
3756 if ((ctx->opcode & 0x3) == 0x2) { /* stq */
3757 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3758 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3759 TCGv hi, lo;
3760
3761 if (!(ctx->insns_flags & PPC_64BX)) {
3762 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3763 }
3764
3765 if (!legal_in_user_mode && ctx->pr) {
3766 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3767 return;
3768 }
3769
3770 if (!le_is_supported && ctx->le_mode) {
3771 gen_align_no_le(ctx);
3772 return;
3773 }
3774
3775 if (unlikely(rs & 1)) {
3776 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3777 return;
3778 }
3779 gen_set_access_type(ctx, ACCESS_INT);
3780 EA = tcg_temp_new();
3781 gen_addr_imm_index(ctx, EA, 0x03);
3782
3783 /* Note that the low part is always in RS+1, even in LE mode. */
3784 lo = cpu_gpr[rs + 1];
3785 hi = cpu_gpr[rs];
3786
3787 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3788 if (HAVE_ATOMIC128) {
3789 TCGv_i32 oi = tcg_temp_new_i32();
3790 if (ctx->le_mode) {
3791 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx));
3792 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi);
3793 } else {
3794 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx));
3795 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi);
3796 }
3797 tcg_temp_free_i32(oi);
3798 } else {
3799 /* Restart with exclusive lock. */
3800 gen_helper_exit_atomic(cpu_env);
3801 ctx->base.is_jmp = DISAS_NORETURN;
3802 }
3803 } else if (ctx->le_mode) {
3804 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ);
3805 gen_addr_add(ctx, EA, EA, 8);
3806 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ);
3807 } else {
3808 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ);
3809 gen_addr_add(ctx, EA, EA, 8);
3810 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ);
3811 }
3812 tcg_temp_free(EA);
3813 } else {
3814 /* std / stdu */
3815 if (Rc(ctx->opcode)) {
3816 if (unlikely(rA(ctx->opcode) == 0)) {
3817 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3818 return;
3819 }
3820 }
3821 gen_set_access_type(ctx, ACCESS_INT);
3822 EA = tcg_temp_new();
3823 gen_addr_imm_index(ctx, EA, 0x03);
3824 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA);
3825 if (Rc(ctx->opcode)) {
3826 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3827 }
3828 tcg_temp_free(EA);
3829 }
3830 }
3831 #endif
3832 /*** Integer load and store with byte reverse ***/
3833
3834 /* lhbrx */
3835 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3836
3837 /* lwbrx */
3838 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3839
3840 #if defined(TARGET_PPC64)
3841 /* ldbrx */
3842 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3843 /* stdbrx */
3844 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3845 #endif /* TARGET_PPC64 */
3846
3847 /* sthbrx */
3848 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3849 /* stwbrx */
3850 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3851
3852 /*** Integer load and store multiple ***/
3853
3854 /* lmw */
3855 static void gen_lmw(DisasContext *ctx)
3856 {
3857 TCGv t0;
3858 TCGv_i32 t1;
3859
3860 if (ctx->le_mode) {
3861 gen_align_no_le(ctx);
3862 return;
3863 }
3864 gen_set_access_type(ctx, ACCESS_INT);
3865 t0 = tcg_temp_new();
3866 t1 = tcg_const_i32(rD(ctx->opcode));
3867 gen_addr_imm_index(ctx, t0, 0);
3868 gen_helper_lmw(cpu_env, t0, t1);
3869 tcg_temp_free(t0);
3870 tcg_temp_free_i32(t1);
3871 }
3872
3873 /* stmw */
3874 static void gen_stmw(DisasContext *ctx)
3875 {
3876 TCGv t0;
3877 TCGv_i32 t1;
3878
3879 if (ctx->le_mode) {
3880 gen_align_no_le(ctx);
3881 return;
3882 }
3883 gen_set_access_type(ctx, ACCESS_INT);
3884 t0 = tcg_temp_new();
3885 t1 = tcg_const_i32(rS(ctx->opcode));
3886 gen_addr_imm_index(ctx, t0, 0);
3887 gen_helper_stmw(cpu_env, t0, t1);
3888 tcg_temp_free(t0);
3889 tcg_temp_free_i32(t1);
3890 }
3891
3892 /*** Integer load and store strings ***/
3893
3894 /* lswi */
3895 /*
3896 * PowerPC32 specification says we must generate an exception if rA is
3897 * in the range of registers to be loaded. In an other hand, IBM says
3898 * this is valid, but rA won't be loaded. For now, I'll follow the
3899 * spec...
3900 */
3901 static void gen_lswi(DisasContext *ctx)
3902 {
3903 TCGv t0;
3904 TCGv_i32 t1, t2;
3905 int nb = NB(ctx->opcode);
3906 int start = rD(ctx->opcode);
3907 int ra = rA(ctx->opcode);
3908 int nr;
3909
3910 if (ctx->le_mode) {
3911 gen_align_no_le(ctx);
3912 return;
3913 }
3914 if (nb == 0) {
3915 nb = 32;
3916 }
3917 nr = DIV_ROUND_UP(nb, 4);
3918 if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3919 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3920 return;
3921 }
3922 gen_set_access_type(ctx, ACCESS_INT);
3923 t0 = tcg_temp_new();
3924 gen_addr_register(ctx, t0);
3925 t1 = tcg_const_i32(nb);
3926 t2 = tcg_const_i32(start);
3927 gen_helper_lsw(cpu_env, t0, t1, t2);
3928 tcg_temp_free(t0);
3929 tcg_temp_free_i32(t1);
3930 tcg_temp_free_i32(t2);
3931 }
3932
3933 /* lswx */
3934 static void gen_lswx(DisasContext *ctx)
3935 {
3936 TCGv t0;
3937 TCGv_i32 t1, t2, t3;
3938
3939 if (ctx->le_mode) {
3940 gen_align_no_le(ctx);
3941 return;
3942 }
3943 gen_set_access_type(ctx, ACCESS_INT);
3944 t0 = tcg_temp_new();
3945 gen_addr_reg_index(ctx, t0);
3946 t1 = tcg_const_i32(rD(ctx->opcode));
3947 t2 = tcg_const_i32(rA(ctx->opcode));
3948 t3 = tcg_const_i32(rB(ctx->opcode));
3949 gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3950 tcg_temp_free(t0);
3951 tcg_temp_free_i32(t1);
3952 tcg_temp_free_i32(t2);
3953 tcg_temp_free_i32(t3);
3954 }
3955
3956 /* stswi */
3957 static void gen_stswi(DisasContext *ctx)
3958 {
3959 TCGv t0;
3960 TCGv_i32 t1, t2;
3961 int nb = NB(ctx->opcode);
3962
3963 if (ctx->le_mode) {
3964 gen_align_no_le(ctx);
3965 return;
3966 }
3967 gen_set_access_type(ctx, ACCESS_INT);
3968 t0 = tcg_temp_new();
3969 gen_addr_register(ctx, t0);
3970 if (nb == 0) {
3971 nb = 32;
3972 }
3973 t1 = tcg_const_i32(nb);
3974 t2 = tcg_const_i32(rS(ctx->opcode));
3975 gen_helper_stsw(cpu_env, t0, t1, t2);
3976 tcg_temp_free(t0);
3977 tcg_temp_free_i32(t1);
3978 tcg_temp_free_i32(t2);
3979 }
3980
3981 /* stswx */
3982 static void gen_stswx(DisasContext *ctx)
3983 {
3984 TCGv t0;
3985 TCGv_i32 t1, t2;
3986
3987 if (ctx->le_mode) {
3988 gen_align_no_le(ctx);
3989 return;
3990 }
3991 gen_set_access_type(ctx, ACCESS_INT);
3992 t0 = tcg_temp_new();
3993 gen_addr_reg_index(ctx, t0);
3994 t1 = tcg_temp_new_i32();
3995 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3996 tcg_gen_andi_i32(t1, t1, 0x7F);
3997 t2 = tcg_const_i32(rS(ctx->opcode));
3998 gen_helper_stsw(cpu_env, t0, t1, t2);
3999 tcg_temp_free(t0);
4000 tcg_temp_free_i32(t1);
4001 tcg_temp_free_i32(t2);
4002 }
4003
4004 /*** Memory synchronisation ***/
4005 /* eieio */
4006 static void gen_eieio(DisasContext *ctx)
4007 {
4008 TCGBar bar = TCG_MO_LD_ST;
4009
4010 /*
4011 * POWER9 has a eieio instruction variant using bit 6 as a hint to
4012 * tell the CPU it is a store-forwarding barrier.
4013 */
4014 if (ctx->opcode & 0x2000000) {
4015 /*
4016 * ISA says that "Reserved fields in instructions are ignored
4017 * by the processor". So ignore the bit 6 on non-POWER9 CPU but
4018 * as this is not an instruction software should be using,
4019 * complain to the user.
4020 */
4021 if (!(ctx->insns_flags2 & PPC2_ISA300)) {
4022 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
4023 TARGET_FMT_lx "\n", ctx->cia);
4024 } else {
4025 bar = TCG_MO_ST_LD;
4026 }
4027 }
4028
4029 tcg_gen_mb(bar | TCG_BAR_SC);
4030 }
4031
4032 #if !defined(CONFIG_USER_ONLY)
4033 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
4034 {
4035 TCGv_i32 t;
4036 TCGLabel *l;
4037
4038 if (!ctx->lazy_tlb_flush) {
4039 return;
4040 }
4041 l = gen_new_label();
4042 t = tcg_temp_new_i32();
4043 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
4044 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
4045 if (global) {
4046 gen_helper_check_tlb_flush_global(cpu_env);
4047 } else {
4048 gen_helper_check_tlb_flush_local(cpu_env);
4049 }
4050 gen_set_label(l);
4051 tcg_temp_free_i32(t);
4052 }
4053 #else
4054 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
4055 #endif
4056
4057 /* isync */
4058 static void gen_isync(DisasContext *ctx)
4059 {
4060 /*
4061 * We need to check for a pending TLB flush. This can only happen in
4062 * kernel mode however so check MSR_PR
4063 */
4064 if (!ctx->pr) {
4065 gen_check_tlb_flush(ctx, false);
4066 }
4067 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
4068 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4069 }
4070
4071 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE))
4072
4073 static void gen_load_locked(DisasContext *ctx, MemOp memop)
4074 {
4075 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
4076 TCGv t0 = tcg_temp_new();
4077
4078 gen_set_access_type(ctx, ACCESS_RES);
4079 gen_addr_reg_index(ctx, t0);
4080 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
4081 tcg_gen_mov_tl(cpu_reserve, t0);
4082 tcg_gen_mov_tl(cpu_reserve_val, gpr);
4083 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
4084 tcg_temp_free(t0);
4085 }
4086
4087 #define LARX(name, memop) \
4088 static void gen_##name(DisasContext *ctx) \
4089 { \
4090 gen_load_locked(ctx, memop); \
4091 }
4092
4093 /* lwarx */
4094 LARX(lbarx, DEF_MEMOP(MO_UB))
4095 LARX(lharx, DEF_MEMOP(MO_UW))
4096 LARX(lwarx, DEF_MEMOP(MO_UL))
4097
4098 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
4099 TCGv EA, TCGCond cond, int addend)
4100 {
4101 TCGv t = tcg_temp_new();
4102 TCGv t2 = tcg_temp_new();
4103 TCGv u = tcg_temp_new();
4104
4105 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
4106 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
4107 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
4108 tcg_gen_addi_tl(u, t, addend);
4109
4110 /* E.g. for fetch and increment bounded... */
4111 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
4112 tcg_gen_movcond_tl(cond, u, t, t2, u, t);
4113 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
4114
4115 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
4116 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
4117 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
4118
4119 tcg_temp_free(t);
4120 tcg_temp_free(t2);
4121 tcg_temp_free(u);
4122 }
4123
4124 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
4125 {
4126 uint32_t gpr_FC = FC(ctx->opcode);
4127 TCGv EA = tcg_temp_new();
4128 int rt = rD(ctx->opcode);
4129 bool need_serial;
4130 TCGv src, dst;
4131
4132 gen_addr_register(ctx, EA);
4133 dst = cpu_gpr[rt];
4134 src = cpu_gpr[(rt + 1) & 31];
4135
4136 need_serial = false;
4137 memop |= MO_ALIGN;
4138 switch (gpr_FC) {
4139 case 0: /* Fetch and add */
4140 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
4141 break;
4142 case 1: /* Fetch and xor */
4143 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
4144 break;
4145 case 2: /* Fetch and or */
4146 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
4147 break;
4148 case 3: /* Fetch and 'and' */
4149 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
4150 break;
4151 case 4: /* Fetch and max unsigned */
4152 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
4153 break;
4154 case 5: /* Fetch and max signed */
4155 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
4156 break;
4157 case 6: /* Fetch and min unsigned */
4158 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
4159 break;
4160 case 7: /* Fetch and min signed */
4161 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
4162 break;
4163 case 8: /* Swap */
4164 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
4165 break;
4166
4167 case 16: /* Compare and swap not equal */
4168 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4169 need_serial = true;
4170 } else {
4171 TCGv t0 = tcg_temp_new();
4172 TCGv t1 = tcg_temp_new();
4173
4174 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
4175 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
4176 tcg_gen_mov_tl(t1, src);
4177 } else {
4178 tcg_gen_ext32u_tl(t1, src);
4179 }
4180 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
4181 cpu_gpr[(rt + 2) & 31], t0);
4182 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
4183 tcg_gen_mov_tl(dst, t0);
4184
4185 tcg_temp_free(t0);
4186 tcg_temp_free(t1);
4187 }
4188 break;
4189
4190 case 24: /* Fetch and increment bounded */
4191 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4192 need_serial = true;
4193 } else {
4194 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
4195 }
4196 break;
4197 case 25: /* Fetch and increment equal */
4198 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4199 need_serial = true;
4200 } else {
4201 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
4202 }
4203 break;
4204 case 28: /* Fetch and decrement bounded */
4205 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4206 need_serial = true;
4207 } else {
4208 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
4209 }
4210 break;
4211
4212 default:
4213 /* invoke data storage error handler */
4214 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
4215 }
4216 tcg_temp_free(EA);
4217
4218 if (need_serial) {
4219 /* Restart with exclusive lock. */
4220 gen_helper_exit_atomic(cpu_env);
4221 ctx->base.is_jmp = DISAS_NORETURN;
4222 }
4223 }
4224
4225 static void gen_lwat(DisasContext *ctx)
4226 {
4227 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
4228 }
4229
4230 #ifdef TARGET_PPC64
4231 static void gen_ldat(DisasContext *ctx)
4232 {
4233 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q));
4234 }
4235 #endif
4236
4237 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
4238 {
4239 uint32_t gpr_FC = FC(ctx->opcode);
4240 TCGv EA = tcg_temp_new();
4241 TCGv src, discard;
4242
4243 gen_addr_register(ctx, EA);
4244 src = cpu_gpr[rD(ctx->opcode)];
4245 discard = tcg_temp_new();
4246
4247 memop |= MO_ALIGN;
4248 switch (gpr_FC) {
4249 case 0: /* add and Store */
4250 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4251 break;
4252 case 1: /* xor and Store */
4253 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4254 break;
4255 case 2: /* Or and Store */
4256 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4257 break;
4258 case 3: /* 'and' and Store */
4259 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4260 break;
4261 case 4: /* Store max unsigned */
4262 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4263 break;
4264 case 5: /* Store max signed */
4265 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4266 break;
4267 case 6: /* Store min unsigned */
4268 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4269 break;
4270 case 7: /* Store min signed */
4271 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4272 break;
4273 case 24: /* Store twin */
4274 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4275 /* Restart with exclusive lock. */
4276 gen_helper_exit_atomic(cpu_env);
4277 ctx->base.is_jmp = DISAS_NORETURN;
4278 } else {
4279 TCGv t = tcg_temp_new();
4280 TCGv t2 = tcg_temp_new();
4281 TCGv s = tcg_temp_new();
4282 TCGv s2 = tcg_temp_new();
4283 TCGv ea_plus_s = tcg_temp_new();
4284
4285 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
4286 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
4287 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
4288 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
4289 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
4290 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
4291 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
4292
4293 tcg_temp_free(ea_plus_s);
4294 tcg_temp_free(s2);
4295 tcg_temp_free(s);
4296 tcg_temp_free(t2);
4297 tcg_temp_free(t);
4298 }
4299 break;
4300 default:
4301 /* invoke data storage error handler */
4302 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
4303 }
4304 tcg_temp_free(discard);
4305 tcg_temp_free(EA);
4306 }
4307
4308 static void gen_stwat(DisasContext *ctx)
4309 {
4310 gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
4311 }
4312
4313 #ifdef TARGET_PPC64
4314 static void gen_stdat(DisasContext *ctx)
4315 {
4316 gen_st_atomic(ctx, DEF_MEMOP(MO_Q));
4317 }
4318 #endif
4319
4320 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
4321 {
4322 TCGLabel *l1 = gen_new_label();
4323 TCGLabel *l2 = gen_new_label();
4324 TCGv t0 = tcg_temp_new();
4325 int reg = rS(ctx->opcode);
4326
4327 gen_set_access_type(ctx, ACCESS_RES);
4328 gen_addr_reg_index(ctx, t0);
4329 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
4330 tcg_temp_free(t0);
4331
4332 t0 = tcg_temp_new();
4333 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
4334 cpu_gpr[reg], ctx->mem_idx,
4335 DEF_MEMOP(memop) | MO_ALIGN);
4336 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
4337 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
4338 tcg_gen_or_tl(t0, t0, cpu_so);
4339 tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
4340 tcg_temp_free(t0);
4341 tcg_gen_br(l2);
4342
4343 gen_set_label(l1);
4344
4345 /*
4346 * Address mismatch implies failure. But we still need to provide
4347 * the memory barrier semantics of the instruction.
4348 */
4349 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
4350 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4351
4352 gen_set_label(l2);
4353 tcg_gen_movi_tl(cpu_reserve, -1);
4354 }
4355
4356 #define STCX(name, memop) \
4357 static void gen_##name(DisasContext *ctx) \
4358 { \
4359 gen_conditional_store(ctx, memop); \
4360 }
4361
4362 STCX(stbcx_, DEF_MEMOP(MO_UB))
4363 STCX(sthcx_, DEF_MEMOP(MO_UW))
4364 STCX(stwcx_, DEF_MEMOP(MO_UL))
4365
4366 #if defined(TARGET_PPC64)
4367 /* ldarx */
4368 LARX(ldarx, DEF_MEMOP(MO_Q))
4369 /* stdcx. */
4370 STCX(stdcx_, DEF_MEMOP(MO_Q))
4371
4372 /* lqarx */
4373 static void gen_lqarx(DisasContext *ctx)
4374 {
4375 int rd = rD(ctx->opcode);
4376 TCGv EA, hi, lo;
4377
4378 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
4379 (rd == rB(ctx->opcode)))) {
4380 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4381 return;
4382 }
4383
4384 gen_set_access_type(ctx, ACCESS_RES);
4385 EA = tcg_temp_new();
4386 gen_addr_reg_index(ctx, EA);
4387
4388 /* Note that the low part is always in RD+1, even in LE mode. */
4389 lo = cpu_gpr[rd + 1];
4390 hi = cpu_gpr[rd];
4391
4392 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4393 if (HAVE_ATOMIC128) {
4394 TCGv_i32 oi = tcg_temp_new_i32();
4395 if (ctx->le_mode) {
4396 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16,
4397 ctx->mem_idx));
4398 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
4399 } else {
4400 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16,
4401 ctx->mem_idx));
4402 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
4403 }
4404 tcg_temp_free_i32(oi);
4405 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
4406 } else {
4407 /* Restart with exclusive lock. */
4408 gen_helper_exit_atomic(cpu_env);
4409 ctx->base.is_jmp = DISAS_NORETURN;
4410 tcg_temp_free(EA);
4411 return;
4412 }
4413 } else if (ctx->le_mode) {
4414 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16);
4415 tcg_gen_mov_tl(cpu_reserve, EA);
4416 gen_addr_add(ctx, EA, EA, 8);
4417 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ);
4418 } else {
4419 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16);
4420 tcg_gen_mov_tl(cpu_reserve, EA);
4421 gen_addr_add(ctx, EA, EA, 8);
4422 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ);
4423 }
4424 tcg_temp_free(EA);
4425
4426 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
4427 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
4428 }
4429
4430 /* stqcx. */
4431 static void gen_stqcx_(DisasContext *ctx)
4432 {
4433 int rs = rS(ctx->opcode);
4434 TCGv EA, hi, lo;
4435
4436 if (unlikely(rs & 1)) {
4437 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4438 return;
4439 }
4440
4441 gen_set_access_type(ctx, ACCESS_RES);
4442 EA = tcg_temp_new();
4443 gen_addr_reg_index(ctx, EA);
4444
4445 /* Note that the low part is always in RS+1, even in LE mode. */
4446 lo = cpu_gpr[rs + 1];
4447 hi = cpu_gpr[rs];
4448
4449 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4450 if (HAVE_CMPXCHG128) {
4451 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16);
4452 if (ctx->le_mode) {
4453 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env,
4454 EA, lo, hi, oi);
4455 } else {
4456 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env,
4457 EA, lo, hi, oi);
4458 }
4459 tcg_temp_free_i32(oi);
4460 } else {
4461 /* Restart with exclusive lock. */
4462 gen_helper_exit_atomic(cpu_env);
4463 ctx->base.is_jmp = DISAS_NORETURN;
4464 }
4465 tcg_temp_free(EA);
4466 } else {
4467 TCGLabel *lab_fail = gen_new_label();
4468 TCGLabel *lab_over = gen_new_label();
4469 TCGv_i64 t0 = tcg_temp_new_i64();
4470 TCGv_i64 t1 = tcg_temp_new_i64();
4471
4472 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4473 tcg_temp_free(EA);
4474
4475 gen_qemu_ld64_i64(ctx, t0, cpu_reserve);
4476 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4477 ? offsetof(CPUPPCState, reserve_val2)
4478 : offsetof(CPUPPCState, reserve_val)));
4479 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4480
4481 tcg_gen_addi_i64(t0, cpu_reserve, 8);
4482 gen_qemu_ld64_i64(ctx, t0, t0);
4483 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4484 ? offsetof(CPUPPCState, reserve_val)
4485 : offsetof(CPUPPCState, reserve_val2)));
4486 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4487
4488 /* Success */
4489 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve);
4490 tcg_gen_addi_i64(t0, cpu_reserve, 8);
4491 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0);
4492
4493 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4494 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
4495 tcg_gen_br(lab_over);
4496
4497 gen_set_label(lab_fail);
4498 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4499
4500 gen_set_label(lab_over);
4501 tcg_gen_movi_tl(cpu_reserve, -1);
4502 tcg_temp_free_i64(t0);
4503 tcg_temp_free_i64(t1);
4504 }
4505 }
4506 #endif /* defined(TARGET_PPC64) */
4507
4508 /* sync */
4509 static void gen_sync(DisasContext *ctx)
4510 {
4511 uint32_t l = (ctx->opcode >> 21) & 3;
4512
4513 /*
4514 * We may need to check for a pending TLB flush.
4515 *
4516 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4517 *
4518 * Additionally, this can only happen in kernel mode however so
4519 * check MSR_PR as well.
4520 */
4521 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4522 gen_check_tlb_flush(ctx, true);
4523 }
4524 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
4525 }
4526
4527 /* wait */
4528 static void gen_wait(DisasContext *ctx)
4529 {
4530 TCGv_i32 t0 = tcg_const_i32(1);
4531 tcg_gen_st_i32(t0, cpu_env,
4532 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4533 tcg_temp_free_i32(t0);
4534 /* Stop translation, as the CPU is supposed to sleep from now */
4535 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4536 }
4537
4538 #if defined(TARGET_PPC64)
4539 static void gen_doze(DisasContext *ctx)
4540 {
4541 #if defined(CONFIG_USER_ONLY)
4542 GEN_PRIV;
4543 #else
4544 TCGv_i32 t;
4545
4546 CHK_HV;
4547 t = tcg_const_i32(PPC_PM_DOZE);
4548 gen_helper_pminsn(cpu_env, t);
4549 tcg_temp_free_i32(t);
4550 /* Stop translation, as the CPU is supposed to sleep from now */
4551 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4552 #endif /* defined(CONFIG_USER_ONLY) */
4553 }
4554
4555 static void gen_nap(DisasContext *ctx)
4556 {
4557 #if defined(CONFIG_USER_ONLY)
4558 GEN_PRIV;
4559 #else
4560 TCGv_i32 t;
4561
4562 CHK_HV;
4563 t = tcg_const_i32(PPC_PM_NAP);
4564 gen_helper_pminsn(cpu_env, t);
4565 tcg_temp_free_i32(t);
4566 /* Stop translation, as the CPU is supposed to sleep from now */
4567 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4568 #endif /* defined(CONFIG_USER_ONLY) */
4569 }
4570
4571 static void gen_stop(DisasContext *ctx)
4572 {
4573 #if defined(CONFIG_USER_ONLY)
4574 GEN_PRIV;
4575 #else
4576 TCGv_i32 t;
4577
4578 CHK_HV;
4579 t = tcg_const_i32(PPC_PM_STOP);
4580 gen_helper_pminsn(cpu_env, t);
4581 tcg_temp_free_i32(t);
4582 /* Stop translation, as the CPU is supposed to sleep from now */
4583 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4584 #endif /* defined(CONFIG_USER_ONLY) */
4585 }
4586
4587 static void gen_sleep(DisasContext *ctx)
4588 {
4589 #if defined(CONFIG_USER_ONLY)
4590 GEN_PRIV;
4591 #else
4592 TCGv_i32 t;
4593
4594 CHK_HV;
4595 t = tcg_const_i32(PPC_PM_SLEEP);
4596 gen_helper_pminsn(cpu_env, t);
4597 tcg_temp_free_i32(t);
4598 /* Stop translation, as the CPU is supposed to sleep from now */
4599 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4600 #endif /* defined(CONFIG_USER_ONLY) */
4601 }
4602
4603 static void gen_rvwinkle(DisasContext *ctx)
4604 {
4605 #if defined(CONFIG_USER_ONLY)
4606 GEN_PRIV;
4607 #else
4608 TCGv_i32 t;
4609
4610 CHK_HV;
4611 t = tcg_const_i32(PPC_PM_RVWINKLE);
4612 gen_helper_pminsn(cpu_env, t);
4613 tcg_temp_free_i32(t);
4614 /* Stop translation, as the CPU is supposed to sleep from now */
4615 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4616 #endif /* defined(CONFIG_USER_ONLY) */
4617 }
4618 #endif /* #if defined(TARGET_PPC64) */
4619
4620 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4621 {
4622 #if defined(TARGET_PPC64)
4623 if (ctx->has_cfar) {
4624 tcg_gen_movi_tl(cpu_cfar, nip);
4625 }
4626 #endif
4627 }
4628
4629 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4630 {
4631 if (unlikely(ctx->singlestep_enabled)) {
4632 return false;
4633 }
4634
4635 #ifndef CONFIG_USER_ONLY
4636 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
4637 #else
4638 return true;
4639 #endif
4640 }
4641
4642 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4643 {
4644 int sse = ctx->singlestep_enabled;
4645 if (unlikely(sse)) {
4646 if (sse & GDBSTUB_SINGLE_STEP) {
4647 gen_debug_exception(ctx);
4648 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) {
4649 uint32_t excp = gen_prep_dbgex(ctx);
4650 gen_exception(ctx, excp);
4651 } else {
4652 tcg_gen_exit_tb(NULL, 0);
4653 }
4654 } else {
4655 tcg_gen_lookup_and_goto_ptr();
4656 }
4657 }
4658
4659 /*** Branch ***/
4660 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4661 {
4662 if (NARROW_MODE(ctx)) {
4663 dest = (uint32_t) dest;
4664 }
4665 if (use_goto_tb(ctx, dest)) {
4666 tcg_gen_goto_tb(n);
4667 tcg_gen_movi_tl(cpu_nip, dest & ~3);
4668 tcg_gen_exit_tb(ctx->base.tb, n);
4669 } else {
4670 tcg_gen_movi_tl(cpu_nip, dest & ~3);
4671 gen_lookup_and_goto_ptr(ctx);
4672 }
4673 }
4674
4675 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4676 {
4677 if (NARROW_MODE(ctx)) {
4678 nip = (uint32_t)nip;
4679 }
4680 tcg_gen_movi_tl(cpu_lr, nip);
4681 }
4682
4683 /* b ba bl bla */
4684 static void gen_b(DisasContext *ctx)
4685 {
4686 target_ulong li, target;
4687
4688 /* sign extend LI */
4689 li = LI(ctx->opcode);
4690 li = (li ^ 0x02000000) - 0x02000000;
4691 if (likely(AA(ctx->opcode) == 0)) {
4692 target = ctx->cia + li;
4693 } else {
4694 target = li;
4695 }
4696 if (LK(ctx->opcode)) {
4697 gen_setlr(ctx, ctx->base.pc_next);
4698 }
4699 gen_update_cfar(ctx, ctx->cia);
4700 gen_goto_tb(ctx, 0, target);
4701 ctx->base.is_jmp = DISAS_NORETURN;
4702 }
4703
4704 #define BCOND_IM 0
4705 #define BCOND_LR 1
4706 #define BCOND_CTR 2
4707 #define BCOND_TAR 3
4708
4709 static void gen_bcond(DisasContext *ctx, int type)
4710 {
4711 uint32_t bo = BO(ctx->opcode);
4712 TCGLabel *l1;
4713 TCGv target;
4714
4715 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4716 target = tcg_temp_local_new();
4717 if (type == BCOND_CTR) {
4718 tcg_gen_mov_tl(target, cpu_ctr);
4719 } else if (type == BCOND_TAR) {
4720 gen_load_spr(target, SPR_TAR);
4721 } else {
4722 tcg_gen_mov_tl(target, cpu_lr);
4723 }
4724 } else {
4725 target = NULL;
4726 }
4727 if (LK(ctx->opcode)) {
4728 gen_setlr(ctx, ctx->base.pc_next);
4729 }
4730 l1 = gen_new_label();
4731 if ((bo & 0x4) == 0) {
4732 /* Decrement and test CTR */
4733 TCGv temp = tcg_temp_new();
4734
4735 if (type == BCOND_CTR) {
4736 /*
4737 * All ISAs up to v3 describe this form of bcctr as invalid but
4738 * some processors, ie. 64-bit server processors compliant with
4739 * arch 2.x, do implement a "test and decrement" logic instead,
4740 * as described in their respective UMs. This logic involves CTR
4741 * to act as both the branch target and a counter, which makes
4742 * it basically useless and thus never used in real code.
4743 *
4744 * This form was hence chosen to trigger extra micro-architectural
4745 * side-effect on real HW needed for the Spectre v2 workaround.
4746 * It is up to guests that implement such workaround, ie. linux, to
4747 * use this form in a way it just triggers the side-effect without
4748 * doing anything else harmful.
4749 */
4750 if (unlikely(!is_book3s_arch2x(ctx))) {
4751 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4752 tcg_temp_free(temp);
4753 tcg_temp_free(target);
4754 return;
4755 }
4756
4757 if (NARROW_MODE(ctx)) {
4758 tcg_gen_ext32u_tl(temp, cpu_ctr);
4759 } else {
4760 tcg_gen_mov_tl(temp, cpu_ctr);
4761 }
4762 if (bo & 0x2) {
4763 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4764 } else {
4765 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4766 }
4767 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4768 } else {
4769 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4770 if (NARROW_MODE(ctx)) {
4771 tcg_gen_ext32u_tl(temp, cpu_ctr);
4772 } else {
4773 tcg_gen_mov_tl(temp, cpu_ctr);
4774 }
4775 if (bo & 0x2) {
4776 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4777 } else {
4778 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4779 }
4780 }
4781 tcg_temp_free(temp);
4782 }
4783 if ((bo & 0x10) == 0) {
4784 /* Test CR */
4785 uint32_t bi = BI(ctx->opcode);
4786 uint32_t mask = 0x08 >> (bi & 0x03);
4787 TCGv_i32 temp = tcg_temp_new_i32();
4788
4789 if (bo & 0x8) {
4790 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4791 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4792 } else {
4793 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4794 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4795 }
4796 tcg_temp_free_i32(temp);
4797 }
4798 gen_update_cfar(ctx, ctx->cia);
4799 if (type == BCOND_IM) {
4800 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4801 if (likely(AA(ctx->opcode) == 0)) {
4802 gen_goto_tb(ctx, 0, ctx->cia + li);
4803 } else {
4804 gen_goto_tb(ctx, 0, li);
4805 }
4806 } else {
4807 if (NARROW_MODE(ctx)) {
4808 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4809 } else {
4810 tcg_gen_andi_tl(cpu_nip, target, ~3);
4811 }
4812 gen_lookup_and_goto_ptr(ctx);
4813 tcg_temp_free(target);
4814 }
4815 if ((bo & 0x14) != 0x14) {
4816 /* fallthrough case */
4817 gen_set_label(l1);
4818 gen_goto_tb(ctx, 1, ctx->base.pc_next);
4819 }
4820 ctx->base.is_jmp = DISAS_NORETURN;
4821 }
4822
4823 static void gen_bc(DisasContext *ctx)
4824 {
4825 gen_bcond(ctx, BCOND_IM);
4826 }
4827
4828 static void gen_bcctr(DisasContext *ctx)
4829 {
4830 gen_bcond(ctx, BCOND_CTR);
4831 }
4832
4833 static void gen_bclr(DisasContext *ctx)
4834 {
4835 gen_bcond(ctx, BCOND_LR);
4836 }
4837
4838 static void gen_bctar(DisasContext *ctx)
4839 {
4840 gen_bcond(ctx, BCOND_TAR);
4841 }
4842
4843 /*** Condition register logical ***/
4844 #define GEN_CRLOGIC(name, tcg_op, opc) \
4845 static void glue(gen_, name)(DisasContext *ctx) \
4846 { \
4847 uint8_t bitmask; \
4848 int sh; \
4849 TCGv_i32 t0, t1; \
4850 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
4851 t0 = tcg_temp_new_i32(); \
4852 if (sh > 0) \
4853 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
4854 else if (sh < 0) \
4855 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
4856 else \
4857 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
4858 t1 = tcg_temp_new_i32(); \
4859 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
4860 if (sh > 0) \
4861 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
4862 else if (sh < 0) \
4863 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
4864 else \
4865 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
4866 tcg_op(t0, t0, t1); \
4867 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \
4868 tcg_gen_andi_i32(t0, t0, bitmask); \
4869 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
4870 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
4871 tcg_temp_free_i32(t0); \
4872 tcg_temp_free_i32(t1); \
4873 }
4874
4875 /* crand */
4876 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4877 /* crandc */
4878 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4879 /* creqv */
4880 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4881 /* crnand */
4882 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4883 /* crnor */
4884 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4885 /* cror */
4886 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4887 /* crorc */
4888 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4889 /* crxor */
4890 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4891
4892 /* mcrf */
4893 static void gen_mcrf(DisasContext *ctx)
4894 {
4895 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4896 }
4897
4898 /*** System linkage ***/
4899
4900 /* rfi (supervisor only) */
4901 static void gen_rfi(DisasContext *ctx)
4902 {
4903 #if defined(CONFIG_USER_ONLY)
4904 GEN_PRIV;
4905 #else
4906 /*
4907 * This instruction doesn't exist anymore on 64-bit server
4908 * processors compliant with arch 2.x
4909 */
4910 if (is_book3s_arch2x(ctx)) {
4911 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4912 return;
4913 }
4914 /* Restore CPU state */
4915 CHK_SV;
4916 gen_icount_io_start(ctx);
4917 gen_update_cfar(ctx, ctx->cia);
4918 gen_helper_rfi(cpu_env);
4919 ctx->base.is_jmp = DISAS_EXIT;
4920 #endif
4921 }
4922
4923 #if defined(TARGET_PPC64)
4924 static void gen_rfid(DisasContext *ctx)
4925 {
4926 #if defined(CONFIG_USER_ONLY)
4927 GEN_PRIV;
4928 #else
4929 /* Restore CPU state */
4930 CHK_SV;
4931 gen_icount_io_start(ctx);
4932 gen_update_cfar(ctx, ctx->cia);
4933 gen_helper_rfid(cpu_env);
4934 ctx->base.is_jmp = DISAS_EXIT;
4935 #endif
4936 }
4937
4938 #if !defined(CONFIG_USER_ONLY)
4939 static void gen_rfscv(DisasContext *ctx)
4940 {
4941 #if defined(CONFIG_USER_ONLY)
4942 GEN_PRIV;
4943 #else
4944 /* Restore CPU state */
4945 CHK_SV;
4946 gen_icount_io_start(ctx);
4947 gen_update_cfar(ctx, ctx->cia);
4948 gen_helper_rfscv(cpu_env);
4949 ctx->base.is_jmp = DISAS_EXIT;
4950 #endif
4951 }
4952 #endif
4953
4954 static void gen_hrfid(DisasContext *ctx)
4955 {
4956 #if defined(CONFIG_USER_ONLY)
4957 GEN_PRIV;
4958 #else
4959 /* Restore CPU state */
4960 CHK_HV;
4961 gen_helper_hrfid(cpu_env);
4962 ctx->base.is_jmp = DISAS_EXIT;
4963 #endif
4964 }
4965 #endif
4966
4967 /* sc */
4968 #if defined(CONFIG_USER_ONLY)
4969 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4970 #else
4971 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4972 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4973 #endif
4974 static void gen_sc(DisasContext *ctx)
4975 {
4976 uint32_t lev;
4977
4978 lev = (ctx->opcode >> 5) & 0x7F;
4979 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4980 }
4981
4982 #if defined(TARGET_PPC64)
4983 #if !defined(CONFIG_USER_ONLY)
4984 static void gen_scv(DisasContext *ctx)
4985 {
4986 uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4987
4988 /* Set the PC back to the faulting instruction. */
4989 gen_update_nip(ctx, ctx->cia);
4990 gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4991
4992 ctx->base.is_jmp = DISAS_NORETURN;
4993 }
4994 #endif
4995 #endif
4996
4997 /*** Trap ***/
4998
4999 /* Check for unconditional traps (always or never) */
5000 static bool check_unconditional_trap(DisasContext *ctx)
5001 {
5002 /* Trap never */
5003 if (TO(ctx->opcode) == 0) {
5004 return true;
5005 }
5006 /* Trap always */
5007 if (TO(ctx->opcode) == 31) {
5008 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
5009 return true;
5010 }
5011 return false;
5012 }
5013
5014 /* tw */
5015 static void gen_tw(DisasContext *ctx)
5016 {
5017 TCGv_i32 t0;
5018
5019 if (check_unconditional_trap(ctx)) {
5020 return;
5021 }
5022 t0 = tcg_const_i32(TO(ctx->opcode));
5023 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
5024 t0);
5025 tcg_temp_free_i32(t0);
5026 }
5027
5028 /* twi */
5029 static void gen_twi(DisasContext *ctx)
5030 {
5031 TCGv t0;
5032 TCGv_i32 t1;
5033
5034 if (check_unconditional_trap(ctx)) {
5035 return;
5036 }
5037 t0 = tcg_const_tl(SIMM(ctx->opcode));
5038 t1 = tcg_const_i32(TO(ctx->opcode));
5039 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
5040 tcg_temp_free(t0);
5041 tcg_temp_free_i32(t1);
5042 }
5043
5044 #if defined(TARGET_PPC64)
5045 /* td */
5046 static void gen_td(DisasContext *ctx)
5047 {
5048 TCGv_i32 t0;
5049
5050 if (check_unconditional_trap(ctx)) {
5051 return;
5052 }
5053 t0 = tcg_const_i32(TO(ctx->opcode));
5054 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
5055 t0);
5056 tcg_temp_free_i32(t0);
5057 }
5058
5059 /* tdi */
5060 static void gen_tdi(DisasContext *ctx)
5061 {
5062 TCGv t0;
5063 TCGv_i32 t1;
5064
5065 if (check_unconditional_trap(ctx)) {
5066 return;
5067 }
5068 t0 = tcg_const_tl(SIMM(ctx->opcode));
5069 t1 = tcg_const_i32(TO(ctx->opcode));
5070 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
5071 tcg_temp_free(t0);
5072 tcg_temp_free_i32(t1);
5073 }
5074 #endif
5075
5076 /*** Processor control ***/
5077
5078 /* mcrxr */
5079 static void gen_mcrxr(DisasContext *ctx)
5080 {
5081 TCGv_i32 t0 = tcg_temp_new_i32();
5082 TCGv_i32 t1 = tcg_temp_new_i32();
5083 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
5084
5085 tcg_gen_trunc_tl_i32(t0, cpu_so);
5086 tcg_gen_trunc_tl_i32(t1, cpu_ov);
5087 tcg_gen_trunc_tl_i32(dst, cpu_ca);
5088 tcg_gen_shli_i32(t0, t0, 3);
5089 tcg_gen_shli_i32(t1, t1, 2);
5090 tcg_gen_shli_i32(dst, dst, 1);
5091 tcg_gen_or_i32(dst, dst, t0);
5092 tcg_gen_or_i32(dst, dst, t1);
5093 tcg_temp_free_i32(t0);
5094 tcg_temp_free_i32(t1);
5095
5096 tcg_gen_movi_tl(cpu_so, 0);
5097 tcg_gen_movi_tl(cpu_ov, 0);
5098 tcg_gen_movi_tl(cpu_ca, 0);
5099 }
5100
5101 #ifdef TARGET_PPC64
5102 /* mcrxrx */
5103 static void gen_mcrxrx(DisasContext *ctx)
5104 {
5105 TCGv t0 = tcg_temp_new();
5106 TCGv t1 = tcg_temp_new();
5107 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
5108
5109 /* copy OV and OV32 */
5110 tcg_gen_shli_tl(t0, cpu_ov, 1);
5111 tcg_gen_or_tl(t0, t0, cpu_ov32);
5112 tcg_gen_shli_tl(t0, t0, 2);
5113 /* copy CA and CA32 */
5114 tcg_gen_shli_tl(t1, cpu_ca, 1);
5115 tcg_gen_or_tl(t1, t1, cpu_ca32);
5116 tcg_gen_or_tl(t0, t0, t1);
5117 tcg_gen_trunc_tl_i32(dst, t0);
5118 tcg_temp_free(t0);
5119 tcg_temp_free(t1);
5120 }
5121 #endif
5122
5123 /* mfcr mfocrf */
5124 static void gen_mfcr(DisasContext *ctx)
5125 {
5126 uint32_t crm, crn;
5127
5128 if (likely(ctx->opcode & 0x00100000)) {
5129 crm = CRM(ctx->opcode);
5130 if (likely(crm && ((crm & (crm - 1)) == 0))) {
5131 crn = ctz32(crm);
5132 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
5133 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
5134 cpu_gpr[rD(ctx->opcode)], crn * 4);
5135 }
5136 } else {
5137 TCGv_i32 t0 = tcg_temp_new_i32();
5138 tcg_gen_mov_i32(t0, cpu_crf[0]);
5139 tcg_gen_shli_i32(t0, t0, 4);
5140 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
5141 tcg_gen_shli_i32(t0, t0, 4);
5142 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
5143 tcg_gen_shli_i32(t0, t0, 4);
5144 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
5145 tcg_gen_shli_i32(t0, t0, 4);
5146 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
5147 tcg_gen_shli_i32(t0, t0, 4);
5148 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
5149 tcg_gen_shli_i32(t0, t0, 4);
5150 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
5151 tcg_gen_shli_i32(t0, t0, 4);
5152 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
5153 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5154 tcg_temp_free_i32(t0);
5155 }
5156 }
5157
5158 /* mfmsr */
5159 static void gen_mfmsr(DisasContext *ctx)
5160 {
5161 CHK_SV;
5162 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
5163 }
5164
5165 /* mfspr */
5166 static inline void gen_op_mfspr(DisasContext *ctx)
5167 {
5168 void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
5169 uint32_t sprn = SPR(ctx->opcode);
5170
5171 #if defined(CONFIG_USER_ONLY)
5172 read_cb = ctx->spr_cb[sprn].uea_read;
5173 #else
5174 if (ctx->pr) {
5175 read_cb = ctx->spr_cb[sprn].uea_read;
5176 } else if (ctx->hv) {
5177 read_cb = ctx->spr_cb[sprn].hea_read;
5178 } else {
5179 read_cb = ctx->spr_cb[sprn].oea_read;
5180 }
5181 #endif
5182 if (likely(read_cb != NULL)) {
5183 if (likely(read_cb != SPR_NOACCESS)) {
5184 (*read_cb)(ctx, rD(ctx->opcode), sprn);
5185 } else {
5186 /* Privilege exception */
5187 /*
5188 * This is a hack to avoid warnings when running Linux:
5189 * this OS breaks the PowerPC virtualisation model,
5190 * allowing userland application to read the PVR
5191 */
5192 if (sprn != SPR_PVR) {
5193 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
5194 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5195 ctx->cia);
5196 }
5197 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5198 }
5199 } else {
5200 /* ISA 2.07 defines these as no-ops */
5201 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5202 (sprn >= 808 && sprn <= 811)) {
5203 /* This is a nop */
5204 return;
5205 }
5206 /* Not defined */
5207 qemu_log_mask(LOG_GUEST_ERROR,
5208 "Trying to read invalid spr %d (0x%03x) at "
5209 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5210
5211 /*
5212 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5213 * generate a priv, a hv emu or a no-op
5214 */
5215 if (sprn & 0x10) {
5216 if (ctx->pr) {
5217 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5218 }
5219 } else {
5220 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
5221 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5222 }
5223 }
5224 }
5225 }
5226
5227 static void gen_mfspr(DisasContext *ctx)
5228 {
5229 gen_op_mfspr(ctx);
5230 }
5231
5232 /* mftb */
5233 static void gen_mftb(DisasContext *ctx)
5234 {
5235 gen_op_mfspr(ctx);
5236 }
5237
5238 /* mtcrf mtocrf*/
5239 static void gen_mtcrf(DisasContext *ctx)
5240 {
5241 uint32_t crm, crn;
5242
5243 crm = CRM(ctx->opcode);
5244 if (likely((ctx->opcode & 0x00100000))) {
5245 if (crm && ((crm & (crm - 1)) == 0)) {
5246 TCGv_i32 temp = tcg_temp_new_i32();
5247 crn = ctz32(crm);
5248 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
5249 tcg_gen_shri_i32(temp, temp, crn * 4);
5250 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
5251 tcg_temp_free_i32(temp);
5252 }
5253 } else {
5254 TCGv_i32 temp = tcg_temp_new_i32();
5255 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
5256 for (crn = 0 ; crn < 8 ; crn++) {
5257 if (crm & (1 << crn)) {
5258 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
5259 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
5260 }
5261 }
5262 tcg_temp_free_i32(temp);
5263 }
5264 }
5265
5266 /* mtmsr */
5267 #if defined(TARGET_PPC64)
5268 static void gen_mtmsrd(DisasContext *ctx)
5269 {
5270 CHK_SV;
5271
5272 #if !defined(CONFIG_USER_ONLY)
5273 gen_icount_io_start(ctx);
5274 if (ctx->opcode & 0x00010000) {
5275 /* L=1 form only updates EE and RI */
5276 TCGv t0 = tcg_temp_new();
5277 TCGv t1 = tcg_temp_new();
5278 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)],
5279 (1 << MSR_RI) | (1 << MSR_EE));
5280 tcg_gen_andi_tl(t1, cpu_msr,
5281 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
5282 tcg_gen_or_tl(t1, t1, t0);
5283
5284 gen_helper_store_msr(cpu_env, t1);
5285 tcg_temp_free(t0);
5286 tcg_temp_free(t1);
5287
5288 } else {
5289 /*
5290 * XXX: we need to update nip before the store if we enter
5291 * power saving mode, we will exit the loop directly from
5292 * ppc_store_msr
5293 */
5294 gen_update_nip(ctx, ctx->base.pc_next);
5295 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]);
5296 }
5297 /* Must stop the translation as machine state (may have) changed */
5298 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5299 #endif /* !defined(CONFIG_USER_ONLY) */
5300 }
5301 #endif /* defined(TARGET_PPC64) */
5302
5303 static void gen_mtmsr(DisasContext *ctx)
5304 {
5305 CHK_SV;
5306
5307 #if !defined(CONFIG_USER_ONLY)
5308 gen_icount_io_start(ctx);
5309 if (ctx->opcode & 0x00010000) {
5310 /* L=1 form only updates EE and RI */
5311 TCGv t0 = tcg_temp_new();
5312 TCGv t1 = tcg_temp_new();
5313 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)],
5314 (1 << MSR_RI) | (1 << MSR_EE));
5315 tcg_gen_andi_tl(t1, cpu_msr,
5316 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
5317 tcg_gen_or_tl(t1, t1, t0);
5318
5319 gen_helper_store_msr(cpu_env, t1);
5320 tcg_temp_free(t0);
5321 tcg_temp_free(t1);
5322
5323 } else {
5324 TCGv msr = tcg_temp_new();
5325
5326 /*
5327 * XXX: we need to update nip before the store if we enter
5328 * power saving mode, we will exit the loop directly from
5329 * ppc_store_msr
5330 */
5331 gen_update_nip(ctx, ctx->base.pc_next);
5332 #if defined(TARGET_PPC64)
5333 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
5334 #else
5335 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
5336 #endif
5337 gen_helper_store_msr(cpu_env, msr);
5338 tcg_temp_free(msr);
5339 }
5340 /* Must stop the translation as machine state (may have) changed */
5341 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5342 #endif
5343 }
5344
5345 /* mtspr */
5346 static void gen_mtspr(DisasContext *ctx)
5347 {
5348 void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
5349 uint32_t sprn = SPR(ctx->opcode);
5350
5351 #if defined(CONFIG_USER_ONLY)
5352 write_cb = ctx->spr_cb[sprn].uea_write;
5353 #else
5354 if (ctx->pr) {
5355 write_cb = ctx->spr_cb[sprn].uea_write;
5356 } else if (ctx->hv) {
5357 write_cb = ctx->spr_cb[sprn].hea_write;
5358 } else {
5359 write_cb = ctx->spr_cb[sprn].oea_write;
5360 }
5361 #endif
5362 if (likely(write_cb != NULL)) {
5363 if (likely(write_cb != SPR_NOACCESS)) {
5364 (*write_cb)(ctx, sprn, rS(ctx->opcode));
5365 } else {
5366 /* Privilege exception */
5367 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
5368 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5369 ctx->cia);
5370 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5371 }
5372 } else {
5373 /* ISA 2.07 defines these as no-ops */
5374 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5375 (sprn >= 808 && sprn <= 811)) {
5376 /* This is a nop */
5377 return;
5378 }
5379
5380 /* Not defined */
5381 qemu_log_mask(LOG_GUEST_ERROR,
5382 "Trying to write invalid spr %d (0x%03x) at "
5383 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5384
5385
5386 /*
5387 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5388 * generate a priv, a hv emu or a no-op
5389 */
5390 if (sprn & 0x10) {
5391 if (ctx->pr) {
5392 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5393 }
5394 } else {
5395 if (ctx->pr || sprn == 0) {
5396 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5397 }
5398 }
5399 }
5400 }
5401
5402 #if defined(TARGET_PPC64)
5403 /* setb */
5404 static void gen_setb(DisasContext *ctx)
5405 {
5406 TCGv_i32 t0 = tcg_temp_new_i32();
5407 TCGv_i32 t8 = tcg_temp_new_i32();
5408 TCGv_i32 tm1 = tcg_temp_new_i32();
5409 int crf = crfS(ctx->opcode);
5410
5411 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5412 tcg_gen_movi_i32(t8, 8);
5413 tcg_gen_movi_i32(tm1, -1);
5414 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5415 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5416
5417 tcg_temp_free_i32(t0);
5418 tcg_temp_free_i32(t8);
5419 tcg_temp_free_i32(tm1);
5420 }
5421 #endif
5422
5423 /*** Cache management ***/
5424
5425 /* dcbf */
5426 static void gen_dcbf(DisasContext *ctx)
5427 {
5428 /* XXX: specification says this is treated as a load by the MMU */
5429 TCGv t0;
5430 gen_set_access_type(ctx, ACCESS_CACHE);
5431 t0 = tcg_temp_new();
5432 gen_addr_reg_index(ctx, t0);
5433 gen_qemu_ld8u(ctx, t0, t0);
5434 tcg_temp_free(t0);
5435 }
5436
5437 /* dcbfep (external PID dcbf) */
5438 static void gen_dcbfep(DisasContext *ctx)
5439 {
5440 /* XXX: specification says this is treated as a load by the MMU */
5441 TCGv t0;
5442 CHK_SV;
5443 gen_set_access_type(ctx, ACCESS_CACHE);
5444 t0 = tcg_temp_new();
5445 gen_addr_reg_index(ctx, t0);
5446 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5447 tcg_temp_free(t0);
5448 }
5449
5450 /* dcbi (Supervisor only) */
5451 static void gen_dcbi(DisasContext *ctx)
5452 {
5453 #if defined(CONFIG_USER_ONLY)
5454 GEN_PRIV;
5455 #else
5456 TCGv EA, val;
5457
5458 CHK_SV;
5459 EA = tcg_temp_new();
5460 gen_set_access_type(ctx, ACCESS_CACHE);
5461 gen_addr_reg_index(ctx, EA);
5462 val = tcg_temp_new();
5463 /* XXX: specification says this should be treated as a store by the MMU */
5464 gen_qemu_ld8u(ctx, val, EA);
5465 gen_qemu_st8(ctx, val, EA);
5466 tcg_temp_free(val);
5467 tcg_temp_free(EA);
5468 #endif /* defined(CONFIG_USER_ONLY) */
5469 }
5470
5471 /* dcdst */
5472 static void gen_dcbst(DisasContext *ctx)
5473 {
5474 /* XXX: specification say this is treated as a load by the MMU */
5475 TCGv t0;
5476 gen_set_access_type(ctx, ACCESS_CACHE);
5477 t0 = tcg_temp_new();
5478 gen_addr_reg_index(ctx, t0);
5479 gen_qemu_ld8u(ctx, t0, t0);
5480 tcg_temp_free(t0);
5481 }
5482
5483 /* dcbstep (dcbstep External PID version) */
5484 static void gen_dcbstep(DisasContext *ctx)
5485 {
5486 /* XXX: specification say this is treated as a load by the MMU */
5487 TCGv t0;
5488 gen_set_access_type(ctx, ACCESS_CACHE);
5489 t0 = tcg_temp_new();
5490 gen_addr_reg_index(ctx, t0);
5491 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5492 tcg_temp_free(t0);
5493 }
5494
5495 /* dcbt */
5496 static void gen_dcbt(DisasContext *ctx)
5497 {
5498 /*
5499 * interpreted as no-op
5500 * XXX: specification say this is treated as a load by the MMU but
5501 * does not generate any exception
5502 */
5503 }
5504
5505 /* dcbtep */
5506 static void gen_dcbtep(DisasContext *ctx)
5507 {
5508 /*
5509 * interpreted as no-op
5510 * XXX: specification say this is treated as a load by the MMU but
5511 * does not generate any exception
5512 */
5513 }
5514
5515 /* dcbtst */
5516 static void gen_dcbtst(DisasContext *ctx)
5517 {
5518 /*
5519 * interpreted as no-op
5520 * XXX: specification say this is treated as a load by the MMU but
5521 * does not generate any exception
5522 */
5523 }
5524
5525 /* dcbtstep */
5526 static void gen_dcbtstep(DisasContext *ctx)
5527 {
5528 /*
5529 * interpreted as no-op
5530 * XXX: specification say this is treated as a load by the MMU but
5531 * does not generate any exception
5532 */
5533 }
5534
5535 /* dcbtls */
5536 static void gen_dcbtls(DisasContext *ctx)
5537 {
5538 /* Always fails locking the cache */
5539 TCGv t0 = tcg_temp_new();
5540 gen_load_spr(t0, SPR_Exxx_L1CSR0);
5541 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5542 gen_store_spr(SPR_Exxx_L1CSR0, t0);
5543 tcg_temp_free(t0);
5544 }
5545
5546 /* dcbz */
5547 static void gen_dcbz(DisasContext *ctx)
5548 {
5549 TCGv tcgv_addr;
5550 TCGv_i32 tcgv_op;
5551
5552 gen_set_access_type(ctx, ACCESS_CACHE);
5553 tcgv_addr = tcg_temp_new();
5554 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5555 gen_addr_reg_index(ctx, tcgv_addr);
5556 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5557 tcg_temp_free(tcgv_addr);
5558 tcg_temp_free_i32(tcgv_op);
5559 }
5560
5561 /* dcbzep */
5562 static void gen_dcbzep(DisasContext *ctx)
5563 {
5564 TCGv tcgv_addr;
5565 TCGv_i32 tcgv_op;
5566
5567 gen_set_access_type(ctx, ACCESS_CACHE);
5568 tcgv_addr = tcg_temp_new();
5569 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5570 gen_addr_reg_index(ctx, tcgv_addr);
5571 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5572 tcg_temp_free(tcgv_addr);
5573 tcg_temp_free_i32(tcgv_op);
5574 }
5575
5576 /* dst / dstt */
5577 static void gen_dst(DisasContext *ctx)
5578 {
5579 if (rA(ctx->opcode) == 0) {
5580 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5581 } else {
5582 /* interpreted as no-op */
5583 }
5584 }
5585
5586 /* dstst /dststt */
5587 static void gen_dstst(DisasContext *ctx)
5588 {
5589 if (rA(ctx->opcode) == 0) {
5590 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5591 } else {
5592 /* interpreted as no-op */
5593 }
5594
5595 }
5596
5597 /* dss / dssall */
5598 static void gen_dss(DisasContext *ctx)
5599 {
5600 /* interpreted as no-op */
5601 }
5602
5603 /* icbi */
5604 static void gen_icbi(DisasContext *ctx)
5605 {
5606 TCGv t0;
5607 gen_set_access_type(ctx, ACCESS_CACHE);
5608 t0 = tcg_temp_new();
5609 gen_addr_reg_index(ctx, t0);
5610 gen_helper_icbi(cpu_env, t0);
5611 tcg_temp_free(t0);
5612 }
5613
5614 /* icbiep */
5615 static void gen_icbiep(DisasContext *ctx)
5616 {
5617 TCGv t0;
5618 gen_set_access_type(ctx, ACCESS_CACHE);
5619 t0 = tcg_temp_new();
5620 gen_addr_reg_index(ctx, t0);
5621 gen_helper_icbiep(cpu_env, t0);
5622 tcg_temp_free(t0);
5623 }
5624
5625 /* Optional: */
5626 /* dcba */
5627 static void gen_dcba(DisasContext *ctx)
5628 {
5629 /*
5630 * interpreted as no-op
5631 * XXX: specification say this is treated as a store by the MMU
5632 * but does not generate any exception
5633 */
5634 }
5635
5636 /*** Segment register manipulation ***/
5637 /* Supervisor only: */
5638
5639 /* mfsr */
5640 static void gen_mfsr(DisasContext *ctx)
5641 {
5642 #if defined(CONFIG_USER_ONLY)
5643 GEN_PRIV;
5644 #else
5645 TCGv t0;
5646
5647 CHK_SV;
5648 t0 = tcg_const_tl(SR(ctx->opcode));
5649 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5650 tcg_temp_free(t0);
5651 #endif /* defined(CONFIG_USER_ONLY) */
5652 }
5653
5654 /* mfsrin */
5655 static void gen_mfsrin(DisasContext *ctx)
5656 {
5657 #if defined(CONFIG_USER_ONLY)
5658 GEN_PRIV;
5659 #else
5660 TCGv t0;
5661
5662 CHK_SV;
5663 t0 = tcg_temp_new();
5664 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5665 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5666 tcg_temp_free(t0);
5667 #endif /* defined(CONFIG_USER_ONLY) */
5668 }
5669
5670 /* mtsr */
5671 static void gen_mtsr(DisasContext *ctx)
5672 {
5673 #if defined(CONFIG_USER_ONLY)
5674 GEN_PRIV;
5675 #else
5676 TCGv t0;
5677
5678 CHK_SV;
5679 t0 = tcg_const_tl(SR(ctx->opcode));
5680 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5681 tcg_temp_free(t0);
5682 #endif /* defined(CONFIG_USER_ONLY) */
5683 }
5684
5685 /* mtsrin */
5686 static void gen_mtsrin(DisasContext *ctx)
5687 {
5688 #if defined(CONFIG_USER_ONLY)
5689 GEN_PRIV;
5690 #else
5691 TCGv t0;
5692 CHK_SV;
5693
5694 t0 = tcg_temp_new();
5695 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5696 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5697 tcg_temp_free(t0);
5698 #endif /* defined(CONFIG_USER_ONLY) */
5699 }
5700
5701 #if defined(TARGET_PPC64)
5702 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5703
5704 /* mfsr */
5705 static void gen_mfsr_64b(DisasContext *ctx)
5706 {
5707 #if defined(CONFIG_USER_ONLY)
5708 GEN_PRIV;
5709 #else
5710 TCGv t0;
5711
5712 CHK_SV;
5713 t0 = tcg_const_tl(SR(ctx->opcode));
5714 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5715 tcg_temp_free(t0);
5716 #endif /* defined(CONFIG_USER_ONLY) */
5717 }
5718
5719 /* mfsrin */
5720 static void gen_mfsrin_64b(DisasContext *ctx)
5721 {
5722 #if defined(CONFIG_USER_ONLY)
5723 GEN_PRIV;
5724 #else
5725 TCGv t0;
5726
5727 CHK_SV;
5728 t0 = tcg_temp_new();
5729 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5730 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5731 tcg_temp_free(t0);
5732 #endif /* defined(CONFIG_USER_ONLY) */
5733 }
5734
5735 /* mtsr */
5736 static void gen_mtsr_64b(DisasContext *ctx)
5737 {
5738 #if defined(CONFIG_USER_ONLY)
5739 GEN_PRIV;
5740 #else
5741 TCGv t0;
5742
5743 CHK_SV;
5744 t0 = tcg_const_tl(SR(ctx->opcode));
5745 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5746 tcg_temp_free(t0);
5747 #endif /* defined(CONFIG_USER_ONLY) */
5748 }
5749
5750 /* mtsrin */
5751 static void gen_mtsrin_64b(DisasContext *ctx)
5752 {
5753 #if defined(CONFIG_USER_ONLY)
5754 GEN_PRIV;
5755 #else
5756 TCGv t0;
5757
5758 CHK_SV;
5759 t0 = tcg_temp_new();
5760 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5761 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5762 tcg_temp_free(t0);
5763 #endif /* defined(CONFIG_USER_ONLY) */
5764 }
5765
5766 /* slbmte */
5767 static void gen_slbmte(DisasContext *ctx)
5768 {
5769 #if defined(CONFIG_USER_ONLY)
5770 GEN_PRIV;
5771 #else
5772 CHK_SV;
5773
5774 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
5775 cpu_gpr[rS(ctx->opcode)]);
5776 #endif /* defined(CONFIG_USER_ONLY) */
5777 }
5778
5779 static void gen_slbmfee(DisasContext *ctx)
5780 {
5781 #if defined(CONFIG_USER_ONLY)
5782 GEN_PRIV;
5783 #else
5784 CHK_SV;
5785
5786 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5787 cpu_gpr[rB(ctx->opcode)]);
5788 #endif /* defined(CONFIG_USER_ONLY) */
5789 }
5790
5791 static void gen_slbmfev(DisasContext *ctx)
5792 {
5793 #if defined(CONFIG_USER_ONLY)
5794 GEN_PRIV;
5795 #else
5796 CHK_SV;
5797
5798 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5799 cpu_gpr[rB(ctx->opcode)]);
5800 #endif /* defined(CONFIG_USER_ONLY) */
5801 }
5802
5803 static void gen_slbfee_(DisasContext *ctx)
5804 {
5805 #if defined(CONFIG_USER_ONLY)
5806 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5807 #else
5808 TCGLabel *l1, *l2;
5809
5810 if (unlikely(ctx->pr)) {
5811 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5812 return;
5813 }
5814 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5815 cpu_gpr[rB(ctx->opcode)]);
5816 l1 = gen_new_label();
5817 l2 = gen_new_label();
5818 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5819 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1);
5820 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
5821 tcg_gen_br(l2);
5822 gen_set_label(l1);
5823 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0);
5824 gen_set_label(l2);
5825 #endif
5826 }
5827 #endif /* defined(TARGET_PPC64) */
5828
5829 /*** Lookaside buffer management ***/
5830 /* Optional & supervisor only: */
5831
5832 /* tlbia */
5833 static void gen_tlbia(DisasContext *ctx)
5834 {
5835 #if defined(CONFIG_USER_ONLY)
5836 GEN_PRIV;
5837 #else
5838 CHK_HV;
5839
5840 gen_helper_tlbia(cpu_env);
5841 #endif /* defined(CONFIG_USER_ONLY) */
5842 }
5843
5844 /* tlbiel */
5845 static void gen_tlbiel(DisasContext *ctx)
5846 {
5847 #if defined(CONFIG_USER_ONLY)
5848 GEN_PRIV;
5849 #else
5850 CHK_SV;
5851
5852 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5853 #endif /* defined(CONFIG_USER_ONLY) */
5854 }
5855
5856 /* tlbie */
5857 static void gen_tlbie(DisasContext *ctx)
5858 {
5859 #if defined(CONFIG_USER_ONLY)
5860 GEN_PRIV;
5861 #else
5862 TCGv_i32 t1;
5863
5864 if (ctx->gtse) {
5865 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */
5866 } else {
5867 CHK_HV; /* Else hypervisor privileged */
5868 }
5869
5870 if (NARROW_MODE(ctx)) {
5871 TCGv t0 = tcg_temp_new();
5872 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
5873 gen_helper_tlbie(cpu_env, t0);
5874 tcg_temp_free(t0);
5875 } else {
5876 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5877 }
5878 t1 = tcg_temp_new_i32();
5879 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
5880 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH);
5881 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
5882 tcg_temp_free_i32(t1);
5883 #endif /* defined(CONFIG_USER_ONLY) */
5884 }
5885
5886 /* tlbsync */
5887 static void gen_tlbsync(DisasContext *ctx)
5888 {
5889 #if defined(CONFIG_USER_ONLY)
5890 GEN_PRIV;
5891 #else
5892
5893 if (ctx->gtse) {
5894 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */
5895 } else {
5896 CHK_HV; /* Else hypervisor privileged */
5897 }
5898
5899 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5900 if (ctx->insns_flags & PPC_BOOKE) {
5901 gen_check_tlb_flush(ctx, true);
5902 }
5903 #endif /* defined(CONFIG_USER_ONLY) */
5904 }
5905
5906 #if defined(TARGET_PPC64)
5907 /* slbia */
5908 static void gen_slbia(DisasContext *ctx)
5909 {
5910 #if defined(CONFIG_USER_ONLY)
5911 GEN_PRIV;
5912 #else
5913 uint32_t ih = (ctx->opcode >> 21) & 0x7;
5914 TCGv_i32 t0 = tcg_const_i32(ih);
5915
5916 CHK_SV;
5917
5918 gen_helper_slbia(cpu_env, t0);
5919 tcg_temp_free_i32(t0);
5920 #endif /* defined(CONFIG_USER_ONLY) */
5921 }
5922
5923 /* slbie */
5924 static void gen_slbie(DisasContext *ctx)
5925 {
5926 #if defined(CONFIG_USER_ONLY)
5927 GEN_PRIV;
5928 #else
5929 CHK_SV;
5930
5931 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5932 #endif /* defined(CONFIG_USER_ONLY) */
5933 }
5934
5935 /* slbieg */
5936 static void gen_slbieg(DisasContext *ctx)
5937 {
5938 #if defined(CONFIG_USER_ONLY)
5939 GEN_PRIV;
5940 #else
5941 CHK_SV;
5942
5943 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5944 #endif /* defined(CONFIG_USER_ONLY) */
5945 }
5946
5947 /* slbsync */
5948 static void gen_slbsync(DisasContext *ctx)
5949 {
5950 #if defined(CONFIG_USER_ONLY)
5951 GEN_PRIV;
5952 #else
5953 CHK_SV;
5954 gen_check_tlb_flush(ctx, true);
5955 #endif /* defined(CONFIG_USER_ONLY) */
5956 }
5957
5958 #endif /* defined(TARGET_PPC64) */
5959
5960 /*** External control ***/
5961 /* Optional: */
5962
5963 /* eciwx */
5964 static void gen_eciwx(DisasContext *ctx)
5965 {
5966 TCGv t0;
5967 /* Should check EAR[E] ! */
5968 gen_set_access_type(ctx, ACCESS_EXT);
5969 t0 = tcg_temp_new();
5970 gen_addr_reg_index(ctx, t0);
5971 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5972 DEF_MEMOP(MO_UL | MO_ALIGN));
5973 tcg_temp_free(t0);
5974 }
5975
5976 /* ecowx */
5977 static void gen_ecowx(DisasContext *ctx)
5978 {
5979 TCGv t0;
5980 /* Should check EAR[E] ! */
5981 gen_set_access_type(ctx, ACCESS_EXT);
5982 t0 = tcg_temp_new();
5983 gen_addr_reg_index(ctx, t0);
5984 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5985 DEF_MEMOP(MO_UL | MO_ALIGN));
5986 tcg_temp_free(t0);
5987 }
5988
5989 /* PowerPC 601 specific instructions */
5990
5991 /* abs - abs. */
5992 static void gen_abs(DisasContext *ctx)
5993 {
5994 TCGv d = cpu_gpr[rD(ctx->opcode)];
5995 TCGv a = cpu_gpr[rA(ctx->opcode)];
5996
5997 tcg_gen_abs_tl(d, a);
5998 if (unlikely(Rc(ctx->opcode) != 0)) {
5999 gen_set_Rc0(ctx, d);
6000 }
6001 }
6002
6003 /* abso - abso. */
6004 static void gen_abso(DisasContext *ctx)
6005 {
6006 TCGv d = cpu_gpr[rD(ctx->opcode)];
6007 TCGv a = cpu_gpr[rA(ctx->opcode)];
6008
6009 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000);
6010 tcg_gen_abs_tl(d, a);
6011 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
6012 if (unlikely(Rc(ctx->opcode) != 0)) {
6013 gen_set_Rc0(ctx, d);
6014 }
6015 }
6016
6017 /* clcs */
6018 static void gen_clcs(DisasContext *ctx)
6019 {
6020 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
6021 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6022 tcg_temp_free_i32(t0);
6023 /* Rc=1 sets CR0 to an undefined state */
6024 }
6025
6026 /* div - div. */
6027 static void gen_div(DisasContext *ctx)
6028 {
6029 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
6030 cpu_gpr[rB(ctx->opcode)]);
6031 if (unlikely(Rc(ctx->opcode) != 0)) {
6032 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6033 }
6034 }
6035
6036 /* divo - divo. */
6037 static void gen_divo(DisasContext *ctx)
6038 {
6039 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
6040 cpu_gpr[rB(ctx->opcode)]);
6041 if (unlikely(Rc(ctx->opcode) != 0)) {
6042 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6043 }
6044 }
6045
6046 /* divs - divs. */
6047 static void gen_divs(DisasContext *ctx)
6048 {
6049 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
6050 cpu_gpr[rB(ctx->opcode)]);
6051 if (unlikely(Rc(ctx->opcode) != 0)) {
6052 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6053 }
6054 }
6055
6056 /* divso - divso. */
6057 static void gen_divso(DisasContext *ctx)
6058 {
6059 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
6060 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6061 if (unlikely(Rc(ctx->opcode) != 0)) {
6062 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6063 }
6064 }
6065
6066 /* doz - doz. */
6067 static void gen_doz(DisasContext *ctx)
6068 {
6069 TCGLabel *l1 = gen_new_label();
6070 TCGLabel *l2 = gen_new_label();
6071 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)],
6072 cpu_gpr[rA(ctx->opcode)], l1);
6073 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
6074 cpu_gpr[rA(ctx->opcode)]);
6075 tcg_gen_br(l2);
6076 gen_set_label(l1);
6077 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6078 gen_set_label(l2);
6079 if (unlikely(Rc(ctx->opcode) != 0)) {
6080 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6081 }
6082 }
6083
6084 /* dozo - dozo. */
6085 static void gen_dozo(DisasContext *ctx)
6086 {
6087 TCGLabel *l1 = gen_new_label();
6088 TCGLabel *l2 = gen_new_label();
6089 TCGv t0 = tcg_temp_new();
6090 TCGv t1 = tcg_temp_new();
6091 TCGv t2 = tcg_temp_new();
6092 /* Start with XER OV disabled, the most likely case */
6093 tcg_gen_movi_tl(cpu_ov, 0);
6094 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)],
6095 cpu_gpr[rA(ctx->opcode)], l1);
6096 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6097 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6098 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
6099 tcg_gen_andc_tl(t1, t1, t2);
6100 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6101 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
6102 tcg_gen_movi_tl(cpu_ov, 1);
6103 tcg_gen_movi_tl(cpu_so, 1);
6104 tcg_gen_br(l2);
6105 gen_set_label(l1);
6106 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6107 gen_set_label(l2);
6108 tcg_temp_free(t0);
6109 tcg_temp_free(t1);
6110 tcg_temp_free(t2);
6111 if (unlikely(Rc(ctx->opcode) != 0)) {
6112 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6113 }
6114 }
6115
6116 /* dozi */
6117 static void gen_dozi(DisasContext *ctx)
6118 {
6119 target_long simm = SIMM(ctx->opcode);
6120 TCGLabel *l1 = gen_new_label();
6121 TCGLabel *l2 = gen_new_label();
6122 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
6123 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
6124 tcg_gen_br(l2);
6125 gen_set_label(l1);
6126 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6127 gen_set_label(l2);
6128 if (unlikely(Rc(ctx->opcode) != 0)) {
6129 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6130 }
6131 }
6132
6133 /* lscbx - lscbx. */
6134 static void gen_lscbx(DisasContext *ctx)
6135 {
6136 TCGv t0 = tcg_temp_new();
6137 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
6138 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
6139 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
6140
6141 gen_addr_reg_index(ctx, t0);
6142 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
6143 tcg_temp_free_i32(t1);
6144 tcg_temp_free_i32(t2);
6145 tcg_temp_free_i32(t3);
6146 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
6147 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
6148 if (unlikely(Rc(ctx->opcode) != 0)) {
6149 gen_set_Rc0(ctx, t0);
6150 }
6151 tcg_temp_free(t0);
6152 }
6153
6154 /* maskg - maskg. */
6155 static void gen_maskg(DisasContext *ctx)
6156 {
6157 TCGLabel *l1 = gen_new_label();
6158 TCGv t0 = tcg_temp_new();
6159 TCGv t1 = tcg_temp_new();
6160 TCGv t2 = tcg_temp_new();
6161 TCGv t3 = tcg_temp_new();
6162 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
6163 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6164 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
6165 tcg_gen_addi_tl(t2, t0, 1);
6166 tcg_gen_shr_tl(t2, t3, t2);
6167 tcg_gen_shr_tl(t3, t3, t1);
6168 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
6169 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
6170 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6171 gen_set_label(l1);
6172 tcg_temp_free(t0);
6173 tcg_temp_free(t1);
6174 tcg_temp_free(t2);
6175 tcg_temp_free(t3);
6176 if (unlikely(Rc(ctx->opcode) != 0)) {
6177 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6178 }
6179 }
6180
6181 /* maskir - maskir. */
6182 static void gen_maskir(DisasContext *ctx)
6183 {
6184 TCGv t0 = tcg_temp_new();
6185 TCGv t1 = tcg_temp_new();
6186 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6187 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6188 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6189 tcg_temp_free(t0);
6190 tcg_temp_free(t1);
6191 if (unlikely(Rc(ctx->opcode) != 0)) {
6192 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6193 }
6194 }
6195
6196 /* mul - mul. */
6197 static void gen_mul(DisasContext *ctx)
6198 {
6199 TCGv_i64 t0 = tcg_temp_new_i64();
6200 TCGv_i64 t1 = tcg_temp_new_i64();
6201 TCGv t2 = tcg_temp_new();
6202 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
6203 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
6204 tcg_gen_mul_i64(t0, t0, t1);
6205 tcg_gen_trunc_i64_tl(t2, t0);
6206 gen_store_spr(SPR_MQ, t2);
6207 tcg_gen_shri_i64(t1, t0, 32);
6208 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
6209 tcg_temp_free_i64(t0);
6210 tcg_temp_free_i64(t1);
6211 tcg_temp_free(t2);
6212 if (unlikely(Rc(ctx->opcode) != 0)) {
6213 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6214 }
6215 }
6216
6217 /* mulo - mulo. */
6218 static void gen_mulo(DisasContext *ctx)
6219 {
6220 TCGLabel *l1 = gen_new_label();
6221 TCGv_i64 t0 = tcg_temp_new_i64();
6222 TCGv_i64 t1 = tcg_temp_new_i64();
6223 TCGv t2 = tcg_temp_new();
6224 /* Start with XER OV disabled, the most likely case */
6225 tcg_gen_movi_tl(cpu_ov, 0);
6226 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
6227 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
6228 tcg_gen_mul_i64(t0, t0, t1);
6229 tcg_gen_trunc_i64_tl(t2, t0);
6230 gen_store_spr(SPR_MQ, t2);
6231 tcg_gen_shri_i64(t1, t0, 32);
6232 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
6233 tcg_gen_ext32s_i64(t1, t0);
6234 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
6235 tcg_gen_movi_tl(cpu_ov, 1);
6236 tcg_gen_movi_tl(cpu_so, 1);
6237 gen_set_label(l1);
6238 tcg_temp_free_i64(t0);
6239 tcg_temp_free_i64(t1);
6240 tcg_temp_free(t2);
6241 if (unlikely(Rc(ctx->opcode) != 0)) {
6242 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6243 }
6244 }
6245
6246 /* nabs - nabs. */
6247 static void gen_nabs(DisasContext *ctx)
6248 {
6249 TCGv d = cpu_gpr[rD(ctx->opcode)];
6250 TCGv a = cpu_gpr[rA(ctx->opcode)];
6251
6252 tcg_gen_abs_tl(d, a);
6253 tcg_gen_neg_tl(d, d);
6254 if (unlikely(Rc(ctx->opcode) != 0)) {
6255 gen_set_Rc0(ctx, d);
6256 }
6257 }
6258
6259 /* nabso - nabso. */
6260 static void gen_nabso(DisasContext *ctx)
6261 {
6262 TCGv d = cpu_gpr[rD(ctx->opcode)];
6263 TCGv a = cpu_gpr[rA(ctx->opcode)];
6264
6265 tcg_gen_abs_tl(d, a);
6266 tcg_gen_neg_tl(d, d);
6267 /* nabs never overflows */
6268 tcg_gen_movi_tl(cpu_ov, 0);
6269 if (unlikely(Rc(ctx->opcode) != 0)) {
6270 gen_set_Rc0(ctx, d);
6271 }
6272 }
6273
6274 /* rlmi - rlmi. */
6275 static void gen_rlmi(DisasContext *ctx)
6276 {
6277 uint32_t mb = MB(ctx->opcode);
6278 uint32_t me = ME(ctx->opcode);
6279 TCGv t0 = tcg_temp_new();
6280 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6281 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6282 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
6283 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
6284 ~MASK(mb, me));
6285 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
6286 tcg_temp_free(t0);
6287 if (unlikely(Rc(ctx->opcode) != 0)) {
6288 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6289 }
6290 }
6291
6292 /* rrib - rrib. */
6293 static void gen_rrib(DisasContext *ctx)
6294 {
6295 TCGv t0 = tcg_temp_new();
6296 TCGv t1 = tcg_temp_new();
6297 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6298 tcg_gen_movi_tl(t1, 0x80000000);
6299 tcg_gen_shr_tl(t1, t1, t0);
6300 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6301 tcg_gen_and_tl(t0, t0, t1);
6302 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
6303 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6304 tcg_temp_free(t0);
6305 tcg_temp_free(t1);
6306 if (unlikely(Rc(ctx->opcode) != 0)) {
6307 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6308 }
6309 }
6310
6311 /* sle - sle. */
6312 static void gen_sle(DisasContext *ctx)
6313 {
6314 TCGv t0 = tcg_temp_new();
6315 TCGv t1 = tcg_temp_new();
6316 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6317 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6318 tcg_gen_subfi_tl(t1, 32, t1);
6319 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6320 tcg_gen_or_tl(t1, t0, t1);
6321 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6322 gen_store_spr(SPR_MQ, t1);
6323 tcg_temp_free(t0);
6324 tcg_temp_free(t1);
6325 if (unlikely(Rc(ctx->opcode) != 0)) {
6326 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6327 }
6328 }
6329
6330 /* sleq - sleq. */
6331 static void gen_sleq(DisasContext *ctx)
6332 {
6333 TCGv t0 = tcg_temp_new();
6334 TCGv t1 = tcg_temp_new();
6335 TCGv t2 = tcg_temp_new();
6336 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6337 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
6338 tcg_gen_shl_tl(t2, t2, t0);
6339 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6340 gen_load_spr(t1, SPR_MQ);
6341 gen_store_spr(SPR_MQ, t0);
6342 tcg_gen_and_tl(t0, t0, t2);
6343 tcg_gen_andc_tl(t1, t1, t2);
6344 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6345 tcg_temp_free(t0);
6346 tcg_temp_free(t1);
6347 tcg_temp_free(t2);
6348 if (unlikely(Rc(ctx->opcode) != 0)) {
6349 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6350 }
6351 }
6352
6353 /* sliq - sliq. */
6354 static void gen_sliq(DisasContext *ctx)
6355 {
6356 int sh = SH(ctx->opcode);
6357 TCGv t0 = tcg_temp_new();
6358 TCGv t1 = tcg_temp_new();
6359 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6360 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6361 tcg_gen_or_tl(t1, t0, t1);
6362 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6363 gen_store_spr(SPR_MQ, t1);
6364 tcg_temp_free(t0);
6365 tcg_temp_free(t1);
6366 if (unlikely(Rc(ctx->opcode) != 0)) {
6367 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6368 }
6369 }
6370
6371 /* slliq - slliq. */
6372 static void gen_slliq(DisasContext *ctx)
6373 {
6374 int sh = SH(ctx->opcode);
6375 TCGv t0 = tcg_temp_new();
6376 TCGv t1 = tcg_temp_new();
6377 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6378 gen_load_spr(t1, SPR_MQ);
6379 gen_store_spr(SPR_MQ, t0);
6380 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
6381 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
6382 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6383 tcg_temp_free(t0);
6384 tcg_temp_free(t1);
6385 if (unlikely(Rc(ctx->opcode) != 0)) {
6386 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6387 }
6388 }
6389
6390 /* sllq - sllq. */
6391 static void gen_sllq(DisasContext *ctx)
6392 {
6393 TCGLabel *l1 = gen_new_label();
6394 TCGLabel *l2 = gen_new_label();
6395 TCGv t0 = tcg_temp_local_new();
6396 TCGv t1 = tcg_temp_local_new();
6397 TCGv t2 = tcg_temp_local_new();
6398 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6399 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6400 tcg_gen_shl_tl(t1, t1, t2);
6401 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6402 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6403 gen_load_spr(t0, SPR_MQ);
6404 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6405 tcg_gen_br(l2);
6406 gen_set_label(l1);
6407 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6408 gen_load_spr(t2, SPR_MQ);
6409 tcg_gen_andc_tl(t1, t2, t1);
6410 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6411 gen_set_label(l2);
6412 tcg_temp_free(t0);
6413 tcg_temp_free(t1);
6414 tcg_temp_free(t2);
6415 if (unlikely(Rc(ctx->opcode) != 0)) {
6416 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6417 }
6418 }
6419
6420 /* slq - slq. */
6421 static void gen_slq(DisasContext *ctx)
6422 {
6423 TCGLabel *l1 = gen_new_label();
6424 TCGv t0 = tcg_temp_new();
6425 TCGv t1 = tcg_temp_new();
6426 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6427 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6428 tcg_gen_subfi_tl(t1, 32, t1);
6429 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6430 tcg_gen_or_tl(t1, t0, t1);
6431 gen_store_spr(SPR_MQ, t1);
6432 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
6433 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6434 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
6435 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
6436 gen_set_label(l1);
6437 tcg_temp_free(t0);
6438 tcg_temp_free(t1);
6439 if (unlikely(Rc(ctx->opcode) != 0)) {
6440 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6441 }
6442 }
6443
6444 /* sraiq - sraiq. */
6445 static void gen_sraiq(DisasContext *ctx)
6446 {
6447 int sh = SH(ctx->opcode);
6448 TCGLabel *l1 = gen_new_label();
6449 TCGv t0 = tcg_temp_new();
6450 TCGv t1 = tcg_temp_new();
6451 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6452 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6453 tcg_gen_or_tl(t0, t0, t1);
6454 gen_store_spr(SPR_MQ, t0);
6455 tcg_gen_movi_tl(cpu_ca, 0);
6456 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
6457 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
6458 tcg_gen_movi_tl(cpu_ca, 1);
6459 gen_set_label(l1);
6460 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
6461 tcg_temp_free(t0);
6462 tcg_temp_free(t1);
6463 if (unlikely(Rc(ctx->opcode) != 0)) {
6464 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6465 }
6466 }
6467
6468 /* sraq - sraq. */
6469 static void gen_sraq(DisasContext *ctx)
6470 {
6471 TCGLabel *l1 = gen_new_label();
6472 TCGLabel *l2 = gen_new_label();
6473 TCGv t0 = tcg_temp_new();
6474 TCGv t1 = tcg_temp_local_new();
6475 TCGv t2 = tcg_temp_local_new();
6476 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6477 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6478 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
6479 tcg_gen_subfi_tl(t2, 32, t2);
6480 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
6481 tcg_gen_or_tl(t0, t0, t2);
6482 gen_store_spr(SPR_MQ, t0);
6483 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6484 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
6485 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
6486 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
6487 gen_set_label(l1);
6488 tcg_temp_free(t0);
6489 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
6490 tcg_gen_movi_tl(cpu_ca, 0);
6491 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
6492 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
6493 tcg_gen_movi_tl(cpu_ca, 1);
6494 gen_set_label(l2);
6495 tcg_temp_free(t1);
6496 tcg_temp_free(t2);
6497 if (unlikely(Rc(ctx->opcode) != 0)) {
6498 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6499 }
6500 }
6501
6502 /* sre - sre. */
6503 static void gen_sre(DisasContext *ctx)
6504 {
6505 TCGv t0 = tcg_temp_new();
6506 TCGv t1 = tcg_temp_new();
6507 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6508 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6509 tcg_gen_subfi_tl(t1, 32, t1);
6510 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6511 tcg_gen_or_tl(t1, t0, t1);
6512 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6513 gen_store_spr(SPR_MQ, t1);
6514 tcg_temp_free(t0);
6515 tcg_temp_free(t1);
6516 if (unlikely(Rc(ctx->opcode) != 0)) {
6517 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6518 }
6519 }
6520
6521 /* srea - srea. */
6522 static void gen_srea(DisasContext *ctx)
6523 {
6524 TCGv t0 = tcg_temp_new();
6525 TCGv t1 = tcg_temp_new();
6526 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6527 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6528 gen_store_spr(SPR_MQ, t0);
6529 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
6530 tcg_temp_free(t0);
6531 tcg_temp_free(t1);
6532 if (unlikely(Rc(ctx->opcode) != 0)) {
6533 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6534 }
6535 }
6536
6537 /* sreq */
6538 static void gen_sreq(DisasContext *ctx)
6539 {
6540 TCGv t0 = tcg_temp_new();
6541 TCGv t1 = tcg_temp_new();
6542 TCGv t2 = tcg_temp_new();
6543 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6544 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6545 tcg_gen_shr_tl(t1, t1, t0);
6546 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6547 gen_load_spr(t2, SPR_MQ);
6548 gen_store_spr(SPR_MQ, t0);
6549 tcg_gen_and_tl(t0, t0, t1);
6550 tcg_gen_andc_tl(t2, t2, t1);
6551 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
6552 tcg_temp_free(t0);
6553 tcg_temp_free(t1);
6554 tcg_temp_free(t2);
6555 if (unlikely(Rc(ctx->opcode) != 0)) {
6556 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6557 }
6558 }
6559
6560 /* sriq */
6561 static void gen_sriq(DisasContext *ctx)
6562 {
6563 int sh = SH(ctx->opcode);
6564 TCGv t0 = tcg_temp_new();
6565 TCGv t1 = tcg_temp_new();
6566 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6567 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6568 tcg_gen_or_tl(t1, t0, t1);
6569 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6570 gen_store_spr(SPR_MQ, t1);
6571 tcg_temp_free(t0);
6572 tcg_temp_free(t1);
6573 if (unlikely(Rc(ctx->opcode) != 0)) {
6574 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6575 }
6576 }
6577
6578 /* srliq */
6579 static void gen_srliq(DisasContext *ctx)
6580 {
6581 int sh = SH(ctx->opcode);
6582 TCGv t0 = tcg_temp_new();
6583 TCGv t1 = tcg_temp_new();
6584 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6585 gen_load_spr(t1, SPR_MQ);
6586 gen_store_spr(SPR_MQ, t0);
6587 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
6588 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
6589 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6590 tcg_temp_free(t0);
6591 tcg_temp_free(t1);
6592 if (unlikely(Rc(ctx->opcode) != 0)) {
6593 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6594 }
6595 }
6596
6597 /* srlq */
6598 static void gen_srlq(DisasContext *ctx)
6599 {
6600 TCGLabel *l1 = gen_new_label();
6601 TCGLabel *l2 = gen_new_label();
6602 TCGv t0 = tcg_temp_local_new();
6603 TCGv t1 = tcg_temp_local_new();
6604 TCGv t2 = tcg_temp_local_new();
6605 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6606 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6607 tcg_gen_shr_tl(t2, t1, t2);
6608 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6609 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6610 gen_load_spr(t0, SPR_MQ);
6611 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
6612 tcg_gen_br(l2);
6613 gen_set_label(l1);
6614 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6615 tcg_gen_and_tl(t0, t0, t2);
6616 gen_load_spr(t1, SPR_MQ);
6617 tcg_gen_andc_tl(t1, t1, t2);
6618 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6619 gen_set_label(l2);
6620 tcg_temp_free(t0);
6621 tcg_temp_free(t1);
6622 tcg_temp_free(t2);
6623 if (unlikely(Rc(ctx->opcode) != 0)) {
6624 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6625 }
6626 }
6627
6628 /* srq */
6629 static void gen_srq(DisasContext *ctx)
6630 {
6631 TCGLabel *l1 = gen_new_label();
6632 TCGv t0 = tcg_temp_new();
6633 TCGv t1 = tcg_temp_new();
6634 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6635 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6636 tcg_gen_subfi_tl(t1, 32, t1);
6637 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6638 tcg_gen_or_tl(t1, t0, t1);
6639 gen_store_spr(SPR_MQ, t1);
6640 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
6641 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6642 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6643 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
6644 gen_set_label(l1);
6645 tcg_temp_free(t0);
6646 tcg_temp_free(t1);
6647 if (unlikely(Rc(ctx->opcode) != 0)) {
6648 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6649 }
6650 }
6651
6652 /* PowerPC 602 specific instructions */
6653
6654 /* dsa */
6655 static void gen_dsa(DisasContext *ctx)
6656 {
6657 /* XXX: TODO */
6658 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6659 }
6660
6661 /* esa */
6662 static void gen_esa(DisasContext *ctx)
6663 {
6664 /* XXX: TODO */
6665 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6666 }
6667
6668 /* mfrom */
6669 static void gen_mfrom(DisasContext *ctx)
6670 {
6671 #if defined(CONFIG_USER_ONLY)
6672 GEN_PRIV;
6673 #else
6674 CHK_SV;
6675 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6676 #endif /* defined(CONFIG_USER_ONLY) */
6677 }
6678
6679 /* 602 - 603 - G2 TLB management */
6680
6681 /* tlbld */
6682 static void gen_tlbld_6xx(DisasContext *ctx)
6683 {
6684 #if defined(CONFIG_USER_ONLY)
6685 GEN_PRIV;
6686 #else
6687 CHK_SV;
6688 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6689 #endif /* defined(CONFIG_USER_ONLY) */
6690 }
6691
6692 /* tlbli */
6693 static void gen_tlbli_6xx(DisasContext *ctx)
6694 {
6695 #if defined(CONFIG_USER_ONLY)
6696 GEN_PRIV;
6697 #else
6698 CHK_SV;
6699 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6700 #endif /* defined(CONFIG_USER_ONLY) */
6701 }
6702
6703 /* 74xx TLB management */
6704
6705 /* tlbld */
6706 static void gen_tlbld_74xx(DisasContext *ctx)
6707 {
6708 #if defined(CONFIG_USER_ONLY)
6709 GEN_PRIV;
6710 #else
6711 CHK_SV;
6712 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6713 #endif /* defined(CONFIG_USER_ONLY) */
6714 }
6715
6716 /* tlbli */
6717 static void gen_tlbli_74xx(DisasContext *ctx)
6718 {
6719 #if defined(CONFIG_USER_ONLY)
6720 GEN_PRIV;
6721 #else
6722 CHK_SV;
6723 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6724 #endif /* defined(CONFIG_USER_ONLY) */
6725 }
6726
6727 /* POWER instructions not in PowerPC 601 */
6728
6729 /* clf */
6730 static void gen_clf(DisasContext *ctx)
6731 {
6732 /* Cache line flush: implemented as no-op */
6733 }
6734
6735 /* cli */
6736 static void gen_cli(DisasContext *ctx)
6737 {
6738 #if defined(CONFIG_USER_ONLY)
6739 GEN_PRIV;
6740 #else
6741 /* Cache line invalidate: privileged and treated as no-op */
6742 CHK_SV;
6743 #endif /* defined(CONFIG_USER_ONLY) */
6744 }
6745
6746 /* dclst */
6747 static void gen_dclst(DisasContext *ctx)
6748 {
6749 /* Data cache line store: treated as no-op */
6750 }
6751
6752 static void gen_mfsri(DisasContext *ctx)
6753 {
6754 #if defined(CONFIG_USER_ONLY)
6755 GEN_PRIV;
6756 #else
6757 int ra = rA(ctx->opcode);
6758 int rd = rD(ctx->opcode);
6759 TCGv t0;
6760
6761 CHK_SV;
6762 t0 = tcg_temp_new();
6763 gen_addr_reg_index(ctx, t0);
6764 tcg_gen_extract_tl(t0, t0, 28, 4);
6765 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
6766 tcg_temp_free(t0);
6767 if (ra != 0 && ra != rd) {
6768 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
6769 }
6770 #endif /* defined(CONFIG_USER_ONLY) */
6771 }
6772
6773 static void gen_rac(DisasContext *ctx)
6774 {
6775 #if defined(CONFIG_USER_ONLY)
6776 GEN_PRIV;
6777 #else
6778 TCGv t0;
6779
6780 CHK_SV;
6781 t0 = tcg_temp_new();
6782 gen_addr_reg_index(ctx, t0);
6783 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6784 tcg_temp_free(t0);
6785 #endif /* defined(CONFIG_USER_ONLY) */
6786 }
6787
6788 static void gen_rfsvc(DisasContext *ctx)
6789 {
6790 #if defined(CONFIG_USER_ONLY)
6791 GEN_PRIV;
6792 #else
6793 CHK_SV;
6794
6795 gen_helper_rfsvc(cpu_env);
6796 ctx->base.is_jmp = DISAS_EXIT;
6797 #endif /* defined(CONFIG_USER_ONLY) */
6798 }
6799
6800 /* svc is not implemented for now */
6801
6802 /* BookE specific instructions */
6803
6804 /* XXX: not implemented on 440 ? */
6805 static void gen_mfapidi(DisasContext *ctx)
6806 {
6807 /* XXX: TODO */
6808 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6809 }
6810
6811 /* XXX: not implemented on 440 ? */
6812 static void gen_tlbiva(DisasContext *ctx)
6813 {
6814 #if defined(CONFIG_USER_ONLY)
6815 GEN_PRIV;
6816 #else
6817 TCGv t0;
6818
6819 CHK_SV;
6820 t0 = tcg_temp_new();
6821 gen_addr_reg_index(ctx, t0);
6822 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6823 tcg_temp_free(t0);
6824 #endif /* defined(CONFIG_USER_ONLY) */
6825 }
6826
6827 /* All 405 MAC instructions are translated here */
6828 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
6829 int ra, int rb, int rt, int Rc)
6830 {
6831 TCGv t0, t1;
6832
6833 t0 = tcg_temp_local_new();
6834 t1 = tcg_temp_local_new();
6835
6836 switch (opc3 & 0x0D) {
6837 case 0x05:
6838 /* macchw - macchw. - macchwo - macchwo. */
6839 /* macchws - macchws. - macchwso - macchwso. */
6840 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
6841 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
6842 /* mulchw - mulchw. */
6843 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
6844 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
6845 tcg_gen_ext16s_tl(t1, t1);
6846 break;
6847 case 0x04:
6848 /* macchwu - macchwu. - macchwuo - macchwuo. */
6849 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
6850 /* mulchwu - mulchwu. */
6851 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
6852 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
6853 tcg_gen_ext16u_tl(t1, t1);
6854 break;
6855 case 0x01:
6856 /* machhw - machhw. - machhwo - machhwo. */
6857 /* machhws - machhws. - machhwso - machhwso. */
6858 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
6859 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
6860 /* mulhhw - mulhhw. */
6861 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
6862 tcg_gen_ext16s_tl(t0, t0);
6863 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
6864 tcg_gen_ext16s_tl(t1, t1);
6865 break;
6866 case 0x00:
6867 /* machhwu - machhwu. - machhwuo - machhwuo. */
6868 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
6869 /* mulhhwu - mulhhwu. */
6870 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
6871 tcg_gen_ext16u_tl(t0, t0);
6872 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
6873 tcg_gen_ext16u_tl(t1, t1);
6874 break;
6875 case 0x0D:
6876 /* maclhw - maclhw. - maclhwo - maclhwo. */
6877 /* maclhws - maclhws. - maclhwso - maclhwso. */
6878 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
6879 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
6880 /* mullhw - mullhw. */
6881 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
6882 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
6883 break;
6884 case 0x0C:
6885 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
6886 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
6887 /* mullhwu - mullhwu. */
6888 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
6889 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
6890 break;
6891 }
6892 if (opc2 & 0x04) {
6893 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
6894 tcg_gen_mul_tl(t1, t0, t1);
6895 if (opc2 & 0x02) {
6896 /* nmultiply-and-accumulate (0x0E) */
6897 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
6898 } else {
6899 /* multiply-and-accumulate (0x0C) */
6900 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
6901 }
6902
6903 if (opc3 & 0x12) {
6904 /* Check overflow and/or saturate */
6905 TCGLabel *l1 = gen_new_label();
6906
6907 if (opc3 & 0x10) {
6908 /* Start with XER OV disabled, the most likely case */
6909 tcg_gen_movi_tl(cpu_ov, 0);
6910 }
6911 if (opc3 & 0x01) {
6912 /* Signed */
6913 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
6914 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
6915 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
6916 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
6917 if (opc3 & 0x02) {
6918 /* Saturate */
6919 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
6920 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
6921 }
6922 } else {
6923 /* Unsigned */
6924 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
6925 if (opc3 & 0x02) {
6926 /* Saturate */
6927 tcg_gen_movi_tl(t0, UINT32_MAX);
6928 }
6929 }
6930 if (opc3 & 0x10) {
6931 /* Check overflow */
6932 tcg_gen_movi_tl(cpu_ov, 1);
6933 tcg_gen_movi_tl(cpu_so, 1);
6934 }
6935 gen_set_label(l1);
6936 tcg_gen_mov_tl(cpu_gpr[rt], t0);
6937 }
6938 } else {
6939 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
6940 }
6941 tcg_temp_free(t0);
6942 tcg_temp_free(t1);
6943 if (unlikely(Rc) != 0) {
6944 /* Update Rc0 */
6945 gen_set_Rc0(ctx, cpu_gpr[rt]);
6946 }
6947 }
6948
6949 #define GEN_MAC_HANDLER(name, opc2, opc3) \
6950 static void glue(gen_, name)(DisasContext *ctx) \
6951 { \
6952 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
6953 rD(ctx->opcode), Rc(ctx->opcode)); \
6954 }
6955
6956 /* macchw - macchw. */
6957 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
6958 /* macchwo - macchwo. */
6959 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
6960 /* macchws - macchws. */
6961 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
6962 /* macchwso - macchwso. */
6963 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
6964 /* macchwsu - macchwsu. */
6965 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
6966 /* macchwsuo - macchwsuo. */
6967 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
6968 /* macchwu - macchwu. */
6969 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
6970 /* macchwuo - macchwuo. */
6971 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
6972 /* machhw - machhw. */
6973 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
6974 /* machhwo - machhwo. */
6975 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
6976 /* machhws - machhws. */
6977 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
6978 /* machhwso - machhwso. */
6979 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
6980 /* machhwsu - machhwsu. */
6981 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
6982 /* machhwsuo - machhwsuo. */
6983 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
6984 /* machhwu - machhwu. */
6985 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
6986 /* machhwuo - machhwuo. */
6987 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
6988 /* maclhw - maclhw. */
6989 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
6990 /* maclhwo - maclhwo. */
6991 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
6992 /* maclhws - maclhws. */
6993 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
6994 /* maclhwso - maclhwso. */
6995 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
6996 /* maclhwu - maclhwu. */
6997 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
6998 /* maclhwuo - maclhwuo. */
6999 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
7000 /* maclhwsu - maclhwsu. */
7001 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
7002 /* maclhwsuo - maclhwsuo. */
7003 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
7004 /* nmacchw - nmacchw. */
7005 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
7006 /* nmacchwo - nmacchwo. */
7007 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
7008 /* nmacchws - nmacchws. */
7009 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
7010 /* nmacchwso - nmacchwso. */
7011 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
7012 /* nmachhw - nmachhw. */
7013 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
7014 /* nmachhwo - nmachhwo. */
7015 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
7016 /* nmachhws - nmachhws. */
7017 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
7018 /* nmachhwso - nmachhwso. */
7019 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
7020 /* nmaclhw - nmaclhw. */
7021 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
7022 /* nmaclhwo - nmaclhwo. */
7023 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
7024 /* nmaclhws - nmaclhws. */
7025 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
7026 /* nmaclhwso - nmaclhwso. */
7027 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
7028
7029 /* mulchw - mulchw. */
7030 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
7031 /* mulchwu - mulchwu. */
7032 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
7033 /* mulhhw - mulhhw. */
7034 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
7035 /* mulhhwu - mulhhwu. */
7036 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
7037 /* mullhw - mullhw. */
7038 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
7039 /* mullhwu - mullhwu. */
7040 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
7041
7042 /* mfdcr */
7043 static void gen_mfdcr(DisasContext *ctx)
7044 {
7045 #if defined(CONFIG_USER_ONLY)
7046 GEN_PRIV;
7047 #else
7048 TCGv dcrn;
7049
7050 CHK_SV;
7051 dcrn = tcg_const_tl(SPR(ctx->opcode));
7052 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
7053 tcg_temp_free(dcrn);
7054 #endif /* defined(CONFIG_USER_ONLY) */
7055 }
7056
7057 /* mtdcr */
7058 static void gen_mtdcr(DisasContext *ctx)
7059 {
7060 #if defined(CONFIG_USER_ONLY)
7061 GEN_PRIV;
7062 #else
7063 TCGv dcrn;
7064
7065 CHK_SV;
7066 dcrn = tcg_const_tl(SPR(ctx->opcode));
7067 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
7068 tcg_temp_free(dcrn);
7069 #endif /* defined(CONFIG_USER_ONLY) */
7070 }
7071
7072 /* mfdcrx */
7073 /* XXX: not implemented on 440 ? */
7074 static void gen_mfdcrx(DisasContext *ctx)
7075 {
7076 #if defined(CONFIG_USER_ONLY)
7077 GEN_PRIV;
7078 #else
7079 CHK_SV;
7080 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
7081 cpu_gpr[rA(ctx->opcode)]);
7082 /* Note: Rc update flag set leads to undefined state of Rc0 */
7083 #endif /* defined(CONFIG_USER_ONLY) */
7084 }
7085
7086 /* mtdcrx */
7087 /* XXX: not implemented on 440 ? */
7088 static void gen_mtdcrx(DisasContext *ctx)
7089 {
7090 #if defined(CONFIG_USER_ONLY)
7091 GEN_PRIV;
7092 #else
7093 CHK_SV;
7094 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
7095 cpu_gpr[rS(ctx->opcode)]);
7096 /* Note: Rc update flag set leads to undefined state of Rc0 */
7097 #endif /* defined(CONFIG_USER_ONLY) */
7098 }
7099
7100 /* mfdcrux (PPC 460) : user-mode access to DCR */
7101 static void gen_mfdcrux(DisasContext *ctx)
7102 {
7103 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
7104 cpu_gpr[rA(ctx->opcode)]);
7105 /* Note: Rc update flag set leads to undefined state of Rc0 */
7106 }
7107
7108 /* mtdcrux (PPC 460) : user-mode access to DCR */
7109 static void gen_mtdcrux(DisasContext *ctx)
7110 {
7111 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
7112 cpu_gpr[rS(ctx->opcode)]);
7113 /* Note: Rc update flag set leads to undefined state of Rc0 */
7114 }
7115
7116 /* dccci */
7117 static void gen_dccci(DisasContext *ctx)
7118 {
7119 CHK_SV;
7120 /* interpreted as no-op */
7121 }
7122
7123 /* dcread */
7124 static void gen_dcread(DisasContext *ctx)
7125 {
7126 #if defined(CONFIG_USER_ONLY)
7127 GEN_PRIV;
7128 #else
7129 TCGv EA, val;
7130
7131 CHK_SV;
7132 gen_set_access_type(ctx, ACCESS_CACHE);
7133 EA = tcg_temp_new();
7134 gen_addr_reg_index(ctx, EA);
7135 val = tcg_temp_new();
7136 gen_qemu_ld32u(ctx, val, EA);
7137 tcg_temp_free(val);
7138 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
7139 tcg_temp_free(EA);
7140 #endif /* defined(CONFIG_USER_ONLY) */
7141 }
7142
7143 /* icbt */
7144 static void gen_icbt_40x(DisasContext *ctx)
7145 {
7146 /*
7147 * interpreted as no-op
7148 * XXX: specification say this is treated as a load by the MMU but
7149 * does not generate any exception
7150 */
7151 }
7152
7153 /* iccci */
7154 static void gen_iccci(DisasContext *ctx)
7155 {
7156 CHK_SV;
7157 /* interpreted as no-op */
7158 }
7159
7160 /* icread */
7161 static void gen_icread(DisasContext *ctx)
7162 {
7163 CHK_SV;
7164 /* interpreted as no-op */
7165 }
7166
7167 /* rfci (supervisor only) */
7168 static void gen_rfci_40x(DisasContext *ctx)
7169 {
7170 #if defined(CONFIG_USER_ONLY)
7171 GEN_PRIV;
7172 #else
7173 CHK_SV;
7174 /* Restore CPU state */
7175 gen_helper_40x_rfci(cpu_env);
7176 ctx->base.is_jmp = DISAS_EXIT;
7177 #endif /* defined(CONFIG_USER_ONLY) */
7178 }
7179
7180 static void gen_rfci(DisasContext *ctx)
7181 {
7182 #if defined(CONFIG_USER_ONLY)
7183 GEN_PRIV;
7184 #else
7185 CHK_SV;
7186 /* Restore CPU state */
7187 gen_helper_rfci(cpu_env);
7188 ctx->base.is_jmp = DISAS_EXIT;
7189 #endif /* defined(CONFIG_USER_ONLY) */
7190 }
7191
7192 /* BookE specific */
7193
7194 /* XXX: not implemented on 440 ? */
7195 static void gen_rfdi(DisasContext *ctx)
7196 {
7197 #if defined(CONFIG_USER_ONLY)
7198 GEN_PRIV;
7199 #else
7200 CHK_SV;
7201 /* Restore CPU state */
7202 gen_helper_rfdi(cpu_env);
7203 ctx->base.is_jmp = DISAS_EXIT;
7204 #endif /* defined(CONFIG_USER_ONLY) */
7205 }
7206
7207 /* XXX: not implemented on 440 ? */
7208 static void gen_rfmci(DisasContext *ctx)
7209 {
7210 #if defined(CONFIG_USER_ONLY)
7211 GEN_PRIV;
7212 #else
7213 CHK_SV;
7214 /* Restore CPU state */
7215 gen_helper_rfmci(cpu_env);
7216 ctx->base.is_jmp = DISAS_EXIT;
7217 #endif /* defined(CONFIG_USER_ONLY) */
7218 }
7219
7220 /* TLB management - PowerPC 405 implementation */
7221
7222 /* tlbre */
7223 static void gen_tlbre_40x(DisasContext *ctx)
7224 {
7225 #if defined(CONFIG_USER_ONLY)
7226 GEN_PRIV;
7227 #else
7228 CHK_SV;
7229 switch (rB(ctx->opcode)) {
7230 case 0:
7231 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
7232 cpu_gpr[rA(ctx->opcode)]);
7233 break;
7234 case 1:
7235 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
7236 cpu_gpr[rA(ctx->opcode)]);
7237 break;
7238 default:
7239 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7240 break;
7241 }
7242 #endif /* defined(CONFIG_USER_ONLY) */
7243 }
7244
7245 /* tlbsx - tlbsx. */
7246 static void gen_tlbsx_40x(DisasContext *ctx)
7247 {
7248 #if defined(CONFIG_USER_ONLY)
7249 GEN_PRIV;
7250 #else
7251 TCGv t0;
7252
7253 CHK_SV;
7254 t0 = tcg_temp_new();
7255 gen_addr_reg_index(ctx, t0);
7256 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
7257 tcg_temp_free(t0);
7258 if (Rc(ctx->opcode)) {
7259 TCGLabel *l1 = gen_new_label();
7260 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
7261 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
7262 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
7263 gen_set_label(l1);
7264 }
7265 #endif /* defined(CONFIG_USER_ONLY) */
7266 }
7267
7268 /* tlbwe */
7269 static void gen_tlbwe_40x(DisasContext *ctx)
7270 {
7271 #if defined(CONFIG_USER_ONLY)
7272 GEN_PRIV;
7273 #else
7274 CHK_SV;
7275
7276 switch (rB(ctx->opcode)) {
7277 case 0:
7278 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
7279 cpu_gpr[rS(ctx->opcode)]);
7280 break;
7281 case 1:
7282 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
7283 cpu_gpr[rS(ctx->opcode)]);
7284 break;
7285 default:
7286 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7287 break;
7288 }
7289 #endif /* defined(CONFIG_USER_ONLY) */
7290 }
7291
7292 /* TLB management - PowerPC 440 implementation */
7293
7294 /* tlbre */
7295 static void gen_tlbre_440(DisasContext *ctx)
7296 {
7297 #if defined(CONFIG_USER_ONLY)
7298 GEN_PRIV;
7299 #else
7300 CHK_SV;
7301
7302 switch (rB(ctx->opcode)) {
7303 case 0:
7304 case 1:
7305 case 2:
7306 {
7307 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
7308 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
7309 t0, cpu_gpr[rA(ctx->opcode)]);
7310 tcg_temp_free_i32(t0);
7311 }
7312 break;
7313 default:
7314 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7315 break;
7316 }
7317 #endif /* defined(CONFIG_USER_ONLY) */
7318 }
7319
7320 /* tlbsx - tlbsx. */
7321 static void gen_tlbsx_440(DisasContext *ctx)
7322 {
7323 #if defined(CONFIG_USER_ONLY)
7324 GEN_PRIV;
7325 #else
7326 TCGv t0;
7327
7328 CHK_SV;
7329 t0 = tcg_temp_new();
7330 gen_addr_reg_index(ctx, t0);
7331 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
7332 tcg_temp_free(t0);
7333 if (Rc(ctx->opcode)) {
7334 TCGLabel *l1 = gen_new_label();
7335 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
7336 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
7337 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
7338 gen_set_label(l1);
7339 }
7340 #endif /* defined(CONFIG_USER_ONLY) */
7341 }
7342
7343 /* tlbwe */
7344 static void gen_tlbwe_440(DisasContext *ctx)
7345 {
7346 #if defined(CONFIG_USER_ONLY)
7347 GEN_PRIV;
7348 #else
7349 CHK_SV;
7350 switch (rB(ctx->opcode)) {
7351 case 0:
7352 case 1:
7353 case 2:
7354 {
7355 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
7356 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
7357 cpu_gpr[rS(ctx->opcode)]);
7358 tcg_temp_free_i32(t0);
7359 }
7360 break;
7361 default:
7362 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7363 break;
7364 }
7365 #endif /* defined(CONFIG_USER_ONLY) */
7366 }
7367
7368 /* TLB management - PowerPC BookE 2.06 implementation */
7369
7370 /* tlbre */
7371 static void gen_tlbre_booke206(DisasContext *ctx)
7372 {
7373 #if defined(CONFIG_USER_ONLY)
7374 GEN_PRIV;
7375 #else
7376 CHK_SV;
7377 gen_helper_booke206_tlbre(cpu_env);
7378 #endif /* defined(CONFIG_USER_ONLY) */
7379 }
7380
7381 /* tlbsx - tlbsx. */
7382 static void gen_tlbsx_booke206(DisasContext *ctx)
7383 {
7384 #if defined(CONFIG_USER_ONLY)
7385 GEN_PRIV;
7386 #else
7387 TCGv t0;
7388
7389 CHK_SV;
7390 if (rA(ctx->opcode)) {
7391 t0 = tcg_temp_new();
7392 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
7393 } else {
7394 t0 = tcg_const_tl(0);
7395 }
7396
7397 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
7398 gen_helper_booke206_tlbsx(cpu_env, t0);
7399 tcg_temp_free(t0);
7400 #endif /* defined(CONFIG_USER_ONLY) */
7401 }
7402
7403 /* tlbwe */
7404 static void gen_tlbwe_booke206(DisasContext *ctx)
7405 {
7406 #if defined(CONFIG_USER_ONLY)
7407 GEN_PRIV;
7408 #else
7409 CHK_SV;
7410 gen_helper_booke206_tlbwe(cpu_env);
7411 #endif /* defined(CONFIG_USER_ONLY) */
7412 }
7413
7414 static void gen_tlbivax_booke206(DisasContext *ctx)
7415 {
7416 #if defined(CONFIG_USER_ONLY)
7417 GEN_PRIV;
7418 #else
7419 TCGv t0;
7420
7421 CHK_SV;
7422 t0 = tcg_temp_new();
7423 gen_addr_reg_index(ctx, t0);
7424 gen_helper_booke206_tlbivax(cpu_env, t0);
7425 tcg_temp_free(t0);
7426 #endif /* defined(CONFIG_USER_ONLY) */
7427 }
7428
7429 static void gen_tlbilx_booke206(DisasContext *ctx)
7430 {
7431 #if defined(CONFIG_USER_ONLY)
7432 GEN_PRIV;
7433 #else
7434 TCGv t0;
7435
7436 CHK_SV;
7437 t0 = tcg_temp_new();
7438 gen_addr_reg_index(ctx, t0);
7439
7440 switch ((ctx->opcode >> 21) & 0x3) {
7441 case 0:
7442 gen_helper_booke206_tlbilx0(cpu_env, t0);
7443 break;
7444 case 1:
7445 gen_helper_booke206_tlbilx1(cpu_env, t0);
7446 break;
7447 case 3:
7448 gen_helper_booke206_tlbilx3(cpu_env, t0);
7449 break;
7450 default:
7451 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7452 break;
7453 }
7454
7455 tcg_temp_free(t0);
7456 #endif /* defined(CONFIG_USER_ONLY) */
7457 }
7458
7459
7460 /* wrtee */
7461 static void gen_wrtee(DisasContext *ctx)
7462 {
7463 #if defined(CONFIG_USER_ONLY)
7464 GEN_PRIV;
7465 #else
7466 TCGv t0;
7467
7468 CHK_SV;
7469 t0 = tcg_temp_new();
7470 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
7471 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
7472 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
7473 tcg_temp_free(t0);
7474 /*
7475 * Stop translation to have a chance to raise an exception if we
7476 * just set msr_ee to 1
7477 */
7478 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
7479 #endif /* defined(CONFIG_USER_ONLY) */
7480 }
7481
7482 /* wrteei */
7483 static void gen_wrteei(DisasContext *ctx)
7484 {
7485 #if defined(CONFIG_USER_ONLY)
7486 GEN_PRIV;
7487 #else
7488 CHK_SV;
7489 if (ctx->opcode & 0x00008000) {
7490 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
7491 /* Stop translation to have a chance to raise an exception */
7492 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
7493 } else {
7494 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
7495 }
7496 #endif /* defined(CONFIG_USER_ONLY) */
7497 }
7498
7499 /* PowerPC 440 specific instructions */
7500
7501 /* dlmzb */
7502 static void gen_dlmzb(DisasContext *ctx)
7503 {
7504 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
7505 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
7506 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
7507 tcg_temp_free_i32(t0);
7508 }
7509
7510 /* mbar replaces eieio on 440 */
7511 static void gen_mbar(DisasContext *ctx)
7512 {
7513 /* interpreted as no-op */
7514 }
7515
7516 /* msync replaces sync on 440 */
7517 static void gen_msync_4xx(DisasContext *ctx)
7518 {
7519 /* Only e500 seems to treat reserved bits as invalid */
7520 if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
7521 (ctx->opcode & 0x03FFF801)) {
7522 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7523 }
7524 /* otherwise interpreted as no-op */
7525 }
7526
7527 /* icbt */
7528 static void gen_icbt_440(DisasContext *ctx)
7529 {
7530 /*
7531 * interpreted as no-op
7532 * XXX: specification say this is treated as a load by the MMU but
7533 * does not generate any exception
7534 */
7535 }
7536
7537 /* Embedded.Processor Control */
7538
7539 static void gen_msgclr(DisasContext *ctx)
7540 {
7541 #if defined(CONFIG_USER_ONLY)
7542 GEN_PRIV;
7543 #else
7544 CHK_HV;
7545 if (is_book3s_arch2x(ctx)) {
7546 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7547 } else {
7548 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7549 }
7550 #endif /* defined(CONFIG_USER_ONLY) */
7551 }
7552
7553 static void gen_msgsnd(DisasContext *ctx)
7554 {
7555 #if defined(CONFIG_USER_ONLY)
7556 GEN_PRIV;
7557 #else
7558 CHK_HV;
7559 if (is_book3s_arch2x(ctx)) {
7560 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]);
7561 } else {
7562 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
7563 }
7564 #endif /* defined(CONFIG_USER_ONLY) */
7565 }
7566
7567 #if defined(TARGET_PPC64)
7568 static void gen_msgclrp(DisasContext *ctx)
7569 {
7570 #if defined(CONFIG_USER_ONLY)
7571 GEN_PRIV;
7572 #else
7573 CHK_SV;
7574 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7575 #endif /* defined(CONFIG_USER_ONLY) */
7576 }
7577
7578 static void gen_msgsndp(DisasContext *ctx)
7579 {
7580 #if defined(CONFIG_USER_ONLY)
7581 GEN_PRIV;
7582 #else
7583 CHK_SV;
7584 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7585 #endif /* defined(CONFIG_USER_ONLY) */
7586 }
7587 #endif
7588
7589 static void gen_msgsync(DisasContext *ctx)
7590 {
7591 #if defined(CONFIG_USER_ONLY)
7592 GEN_PRIV;
7593 #else
7594 CHK_HV;
7595 #endif /* defined(CONFIG_USER_ONLY) */
7596 /* interpreted as no-op */
7597 }
7598
7599 #if defined(TARGET_PPC64)
7600 static void gen_maddld(DisasContext *ctx)
7601 {
7602 TCGv_i64 t1 = tcg_temp_new_i64();
7603
7604 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7605 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
7606 tcg_temp_free_i64(t1);
7607 }
7608
7609 /* maddhd maddhdu */
7610 static void gen_maddhd_maddhdu(DisasContext *ctx)
7611 {
7612 TCGv_i64 lo = tcg_temp_new_i64();
7613 TCGv_i64 hi = tcg_temp_new_i64();
7614 TCGv_i64 t1 = tcg_temp_new_i64();
7615
7616 if (Rc(ctx->opcode)) {
7617 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
7618 cpu_gpr[rB(ctx->opcode)]);
7619 tcg_gen_movi_i64(t1, 0);
7620 } else {
7621 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
7622 cpu_gpr[rB(ctx->opcode)]);
7623 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
7624 }
7625 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
7626 cpu_gpr[rC(ctx->opcode)], t1);
7627 tcg_temp_free_i64(lo);
7628 tcg_temp_free_i64(hi);
7629 tcg_temp_free_i64(t1);
7630 }
7631 #endif /* defined(TARGET_PPC64) */
7632
7633 static void gen_tbegin(DisasContext *ctx)
7634 {
7635 if (unlikely(!ctx->tm_enabled)) {
7636 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
7637 return;
7638 }
7639 gen_helper_tbegin(cpu_env);
7640 }
7641
7642 #define GEN_TM_NOOP(name) \
7643 static inline void gen_##name(DisasContext *ctx) \
7644 { \
7645 if (unlikely(!ctx->tm_enabled)) { \
7646 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
7647 return; \
7648 } \
7649 /* \
7650 * Because tbegin always fails in QEMU, these user \
7651 * space instructions all have a simple implementation: \
7652 * \
7653 * CR[0] = 0b0 || MSR[TS] || 0b0 \
7654 * = 0b0 || 0b00 || 0b0 \
7655 */ \
7656 tcg_gen_movi_i32(cpu_crf[0], 0); \
7657 }
7658
7659 GEN_TM_NOOP(tend);
7660 GEN_TM_NOOP(tabort);
7661 GEN_TM_NOOP(tabortwc);
7662 GEN_TM_NOOP(tabortwci);
7663 GEN_TM_NOOP(tabortdc);
7664 GEN_TM_NOOP(tabortdci);
7665 GEN_TM_NOOP(tsr);
7666
7667 static inline void gen_cp_abort(DisasContext *ctx)
7668 {
7669 /* Do Nothing */
7670 }
7671
7672 #define GEN_CP_PASTE_NOOP(name) \
7673 static inline void gen_##name(DisasContext *ctx) \
7674 { \
7675 /* \
7676 * Generate invalid exception until we have an \
7677 * implementation of the copy paste facility \
7678 */ \
7679 gen_invalid(ctx); \
7680 }
7681
7682 GEN_CP_PASTE_NOOP(copy)
7683 GEN_CP_PASTE_NOOP(paste)
7684
7685 static void gen_tcheck(DisasContext *ctx)
7686 {
7687 if (unlikely(!ctx->tm_enabled)) {
7688 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
7689 return;
7690 }
7691 /*
7692 * Because tbegin always fails, the tcheck implementation is
7693 * simple:
7694 *
7695 * CR[CRF] = TDOOMED || MSR[TS] || 0b0
7696 * = 0b1 || 0b00 || 0b0
7697 */
7698 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
7699 }
7700
7701 #if defined(CONFIG_USER_ONLY)
7702 #define GEN_TM_PRIV_NOOP(name) \
7703 static inline void gen_##name(DisasContext *ctx) \
7704 { \
7705 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \
7706 }
7707
7708 #else
7709
7710 #define GEN_TM_PRIV_NOOP(name) \
7711 static inline void gen_##name(DisasContext *ctx) \
7712 { \
7713 CHK_SV; \
7714 if (unlikely(!ctx->tm_enabled)) { \
7715 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
7716 return; \
7717 } \
7718 /* \
7719 * Because tbegin always fails, the implementation is \
7720 * simple: \
7721 * \
7722 * CR[0] = 0b0 || MSR[TS] || 0b0 \
7723 * = 0b0 || 0b00 | 0b0 \
7724 */ \
7725 tcg_gen_movi_i32(cpu_crf[0], 0); \
7726 }
7727
7728 #endif
7729
7730 GEN_TM_PRIV_NOOP(treclaim);
7731 GEN_TM_PRIV_NOOP(trechkpt);
7732
7733 static inline void get_fpr(TCGv_i64 dst, int regno)
7734 {
7735 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
7736 }
7737
7738 static inline void set_fpr(int regno, TCGv_i64 src)
7739 {
7740 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
7741 }
7742
7743 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
7744 {
7745 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
7746 }
7747
7748 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
7749 {
7750 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
7751 }
7752
7753 #include "translate/fp-impl.c.inc"
7754
7755 #include "translate/vmx-impl.c.inc"
7756
7757 #include "translate/vsx-impl.c.inc"
7758
7759 #include "translate/dfp-impl.c.inc"
7760
7761 #include "translate/spe-impl.c.inc"
7762
7763 /* Handles lfdp, lxsd, lxssp */
7764 static void gen_dform39(DisasContext *ctx)
7765 {
7766 switch (ctx->opcode & 0x3) {
7767 case 0: /* lfdp */
7768 if (ctx->insns_flags2 & PPC2_ISA205) {
7769 return gen_lfdp(ctx);
7770 }
7771 break;
7772 case 2: /* lxsd */
7773 if (ctx->insns_flags2 & PPC2_ISA300) {
7774 return gen_lxsd(ctx);
7775 }
7776 break;
7777 case 3: /* lxssp */
7778 if (ctx->insns_flags2 & PPC2_ISA300) {
7779 return gen_lxssp(ctx);
7780 }
7781 break;
7782 }
7783 return gen_invalid(ctx);
7784 }
7785
7786 /* handles stfdp, lxv, stxsd, stxssp lxvx */
7787 static void gen_dform3D(DisasContext *ctx)
7788 {
7789 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */
7790 switch (ctx->opcode & 0x7) {
7791 case 1: /* lxv */
7792 if (ctx->insns_flags2 & PPC2_ISA300) {
7793 return gen_lxv(ctx);
7794 }
7795 break;
7796 case 5: /* stxv */
7797 if (ctx->insns_flags2 & PPC2_ISA300) {
7798 return gen_stxv(ctx);
7799 }
7800 break;
7801 }
7802 } else { /* DS-FORM */
7803 switch (ctx->opcode & 0x3) {
7804 case 0: /* stfdp */
7805 if (ctx->insns_flags2 & PPC2_ISA205) {
7806 return gen_stfdp(ctx);
7807 }
7808 break;
7809 case 2: /* stxsd */
7810 if (ctx->insns_flags2 & PPC2_ISA300) {
7811 return gen_stxsd(ctx);
7812 }
7813 break;
7814 case 3: /* stxssp */
7815 if (ctx->insns_flags2 & PPC2_ISA300) {
7816 return gen_stxssp(ctx);
7817 }
7818 break;
7819 }
7820 }
7821 return gen_invalid(ctx);
7822 }
7823
7824 #if defined(TARGET_PPC64)
7825 /* brd */
7826 static void gen_brd(DisasContext *ctx)
7827 {
7828 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
7829 }
7830
7831 /* brw */
7832 static void gen_brw(DisasContext *ctx)
7833 {
7834 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
7835 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
7836
7837 }
7838
7839 /* brh */
7840 static void gen_brh(DisasContext *ctx)
7841 {
7842 TCGv_i64 t0 = tcg_temp_new_i64();
7843 TCGv_i64 t1 = tcg_temp_new_i64();
7844 TCGv_i64 t2 = tcg_temp_new_i64();
7845
7846 tcg_gen_movi_i64(t0, 0x00ff00ff00ff00ffull);
7847 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
7848 tcg_gen_and_i64(t2, t1, t0);
7849 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], t0);
7850 tcg_gen_shli_i64(t1, t1, 8);
7851 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
7852
7853 tcg_temp_free_i64(t0);
7854 tcg_temp_free_i64(t1);
7855 tcg_temp_free_i64(t2);
7856 }
7857 #endif
7858
7859 static opcode_t opcodes[] = {
7860 #if defined(TARGET_PPC64)
7861 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
7862 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
7863 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
7864 #endif
7865 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
7866 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
7867 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
7868 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER),
7869 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
7870 #if defined(TARGET_PPC64)
7871 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
7872 #endif
7873 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
7874 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
7875 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
7876 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7877 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7878 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7879 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7880 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
7881 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
7882 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
7883 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
7884 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
7885 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7886 #if defined(TARGET_PPC64)
7887 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
7888 #endif
7889 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
7890 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
7891 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7892 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7893 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7894 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
7895 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
7896 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
7897 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
7898 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
7899 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
7900 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
7901 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7902 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7903 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7904 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7905 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
7906 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
7907 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
7908 #if defined(TARGET_PPC64)
7909 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
7910 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
7911 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
7912 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
7913 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
7914 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
7915 #endif
7916 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7917 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7918 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7919 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
7920 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
7921 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
7922 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
7923 #if defined(TARGET_PPC64)
7924 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
7925 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
7926 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
7927 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
7928 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
7929 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
7930 PPC_NONE, PPC2_ISA300),
7931 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
7932 PPC_NONE, PPC2_ISA300),
7933 #endif
7934 #if defined(TARGET_PPC64)
7935 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
7936 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
7937 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
7938 #endif
7939 /* handles lfdp, lxsd, lxssp */
7940 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
7941 /* handles stfdp, lxv, stxsd, stxssp, stxv */
7942 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
7943 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7944 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7945 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
7946 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
7947 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
7948 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
7949 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
7950 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
7951 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7952 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7953 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
7954 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
7955 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
7956 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7957 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7958 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
7959 #if defined(TARGET_PPC64)
7960 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
7961 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
7962 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
7963 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
7964 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
7965 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
7966 #endif
7967 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
7968 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
7969 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300),
7970 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
7971 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
7972 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
7973 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
7974 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
7975 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
7976 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
7977 #if defined(TARGET_PPC64)
7978 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
7979 #if !defined(CONFIG_USER_ONLY)
7980 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
7981 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
7982 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
7983 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
7984 #endif
7985 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
7986 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7987 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7988 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7989 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7990 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
7991 #endif
7992 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
7993 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
7994 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
7995 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
7996 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
7997 #if defined(TARGET_PPC64)
7998 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
7999 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
8000 #endif
8001 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
8002 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
8003 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
8004 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
8005 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
8006 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
8007 #if defined(TARGET_PPC64)
8008 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
8009 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
8010 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
8011 #endif
8012 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
8013 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
8014 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
8015 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
8016 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
8017 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
8018 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
8019 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
8020 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
8021 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
8022 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
8023 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
8024 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
8025 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
8026 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
8027 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
8028 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
8029 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
8030 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
8031 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
8032 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
8033 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
8034 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
8035 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
8036 #if defined(TARGET_PPC64)
8037 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
8038 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
8039 PPC_SEGMENT_64B),
8040 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
8041 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
8042 PPC_SEGMENT_64B),
8043 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
8044 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
8045 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
8046 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B),
8047 #endif
8048 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
8049 /*
8050 * XXX Those instructions will need to be handled differently for
8051 * different ISA versions
8052 */
8053 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE),
8054 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE),
8055 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300),
8056 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300),
8057 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
8058 #if defined(TARGET_PPC64)
8059 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI),
8060 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
8061 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300),
8062 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
8063 #endif
8064 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
8065 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
8066 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
8067 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
8068 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
8069 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
8070 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
8071 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
8072 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
8073 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
8074 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
8075 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8076 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
8077 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
8078 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
8079 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
8080 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
8081 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
8082 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
8083 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
8084 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
8085 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8086 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8087 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8088 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8089 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8090 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8091 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8092 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8093 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8094 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8095 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8096 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8097 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8098 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8099 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8100 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8101 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8102 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8103 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8104 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8105 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8106 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8107 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8108 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8109 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8110 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8111 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8112 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8113 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8114 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8115 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8116 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8117 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8118 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8119 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8120 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8121 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8122 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8123 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8124 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8125 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8126 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8127 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8128 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8129 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8130 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8131 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8132 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8133 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8134 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8135 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8136 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8137 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8138 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8139 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8140 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8141 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8142 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8143 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8144 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8145 PPC_NONE, PPC2_BOOKE206),
8146 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8147 PPC_NONE, PPC2_BOOKE206),
8148 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8149 PPC_NONE, PPC2_BOOKE206),
8150 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8151 PPC_NONE, PPC2_BOOKE206),
8152 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
8153 PPC_NONE, PPC2_BOOKE206),
8154 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
8155 PPC_NONE, PPC2_PRCNTL),
8156 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
8157 PPC_NONE, PPC2_PRCNTL),
8158 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000,
8159 PPC_NONE, PPC2_PRCNTL),
8160 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8161 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8162 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8163 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8164 PPC_BOOKE, PPC2_BOOKE206),
8165 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
8166 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8167 PPC_BOOKE, PPC2_BOOKE206),
8168 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
8169 PPC_440_SPEC),
8170 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8171 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8172 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8173 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8174 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8175 #if defined(TARGET_PPC64)
8176 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
8177 PPC2_ISA300),
8178 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
8179 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001,
8180 PPC_NONE, PPC2_ISA207S),
8181 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001,
8182 PPC_NONE, PPC2_ISA207S),
8183 #endif
8184
8185 #undef GEN_INT_ARITH_ADD
8186 #undef GEN_INT_ARITH_ADD_CONST
8187 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8188 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8189 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8190 add_ca, compute_ca, compute_ov) \
8191 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8192 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8193 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8194 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8195 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8196 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8197 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8198 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8199 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8200 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
8201 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8202 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8203
8204 #undef GEN_INT_ARITH_DIVW
8205 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8206 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8207 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8208 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8209 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8210 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8211 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8212 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8213 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8214 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8215 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
8216 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
8217
8218 #if defined(TARGET_PPC64)
8219 #undef GEN_INT_ARITH_DIVD
8220 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8221 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8222 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8223 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8224 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8225 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8226
8227 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8228 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8229 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8230 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8231 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
8232 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
8233
8234 #undef GEN_INT_ARITH_MUL_HELPER
8235 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8236 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8237 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8238 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8239 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8240 #endif
8241
8242 #undef GEN_INT_ARITH_SUBF
8243 #undef GEN_INT_ARITH_SUBF_CONST
8244 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8245 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8246 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8247 add_ca, compute_ca, compute_ov) \
8248 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8249 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8250 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8251 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8252 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8253 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8254 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8255 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8256 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8257 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8258 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8259
8260 #undef GEN_LOGICAL1
8261 #undef GEN_LOGICAL2
8262 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
8263 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8264 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
8265 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8266 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8267 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8268 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8269 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8270 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8271 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8272 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8273 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8274 #if defined(TARGET_PPC64)
8275 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8276 #endif
8277
8278 #if defined(TARGET_PPC64)
8279 #undef GEN_PPC64_R2
8280 #undef GEN_PPC64_R4
8281 #define GEN_PPC64_R2(name, opc1, opc2) \
8282 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8283 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8284 PPC_64B)
8285 #define GEN_PPC64_R4(name, opc1, opc2) \
8286 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8287 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8288 PPC_64B), \
8289 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8290 PPC_64B), \
8291 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8292 PPC_64B)
8293 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8294 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8295 GEN_PPC64_R4(rldic, 0x1E, 0x04),
8296 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8297 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8298 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8299 #endif
8300
8301 #undef GEN_LD
8302 #undef GEN_LDU
8303 #undef GEN_LDUX
8304 #undef GEN_LDX_E
8305 #undef GEN_LDS
8306 #define GEN_LD(name, ldop, opc, type) \
8307 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8308 #define GEN_LDU(name, ldop, opc, type) \
8309 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8310 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
8311 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8312 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
8313 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
8314 #define GEN_LDS(name, ldop, op, type) \
8315 GEN_LD(name, ldop, op | 0x20, type) \
8316 GEN_LDU(name, ldop, op | 0x21, type) \
8317 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8318 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8319
8320 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8321 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8322 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8323 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8324 #if defined(TARGET_PPC64)
8325 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8326 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8327 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B)
8328 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B)
8329 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
8330
8331 /* HV/P7 and later only */
8332 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
8333 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
8334 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
8335 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
8336 #endif
8337 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8338 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8339
8340 /* External PID based load */
8341 #undef GEN_LDEPX
8342 #define GEN_LDEPX(name, ldop, opc2, opc3) \
8343 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \
8344 0x00000001, PPC_NONE, PPC2_BOOKE206),
8345
8346 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
8347 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
8348 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
8349 #if defined(TARGET_PPC64)
8350 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00)
8351 #endif
8352
8353 #undef GEN_ST
8354 #undef GEN_STU
8355 #undef GEN_STUX
8356 #undef GEN_STX_E
8357 #undef GEN_STS
8358 #define GEN_ST(name, stop, opc, type) \
8359 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8360 #define GEN_STU(name, stop, opc, type) \
8361 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8362 #define GEN_STUX(name, stop, opc2, opc3, type) \
8363 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8364 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
8365 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
8366 #define GEN_STS(name, stop, op, type) \
8367 GEN_ST(name, stop, op | 0x20, type) \
8368 GEN_STU(name, stop, op | 0x21, type) \
8369 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8370 GEN_STX(name, stop, 0x17, op | 0x00, type)
8371
8372 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8373 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8374 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8375 #if defined(TARGET_PPC64)
8376 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B)
8377 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B)
8378 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
8379 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
8380 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
8381 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
8382 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
8383 #endif
8384 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8385 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8386
8387 #undef GEN_STEPX
8388 #define GEN_STEPX(name, ldop, opc2, opc3) \
8389 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \
8390 0x00000001, PPC_NONE, PPC2_BOOKE206),
8391
8392 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
8393 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
8394 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
8395 #if defined(TARGET_PPC64)
8396 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04)
8397 #endif
8398
8399 #undef GEN_CRLOGIC
8400 #define GEN_CRLOGIC(name, tcg_op, opc) \
8401 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8402 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8403 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8404 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8405 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8406 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8407 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8408 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8409 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8410
8411 #undef GEN_MAC_HANDLER
8412 #define GEN_MAC_HANDLER(name, opc2, opc3) \
8413 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8414 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8415 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8416 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8417 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8418 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8419 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8420 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8421 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8422 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8423 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8424 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8425 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8426 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8427 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8428 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8429 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8430 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8431 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8432 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8433 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8434 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8435 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8436 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8437 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8438 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8439 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8440 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8441 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8442 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8443 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8444 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8445 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8446 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8447 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8448 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8449 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8450 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8451 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8452 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8453 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8454 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8455 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8456
8457 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
8458 PPC_NONE, PPC2_TM),
8459 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \
8460 PPC_NONE, PPC2_TM),
8461 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
8462 PPC_NONE, PPC2_TM),
8463 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
8464 PPC_NONE, PPC2_TM),
8465 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
8466 PPC_NONE, PPC2_TM),
8467 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
8468 PPC_NONE, PPC2_TM),
8469 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
8470 PPC_NONE, PPC2_TM),
8471 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
8472 PPC_NONE, PPC2_TM),
8473 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
8474 PPC_NONE, PPC2_TM),
8475 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
8476 PPC_NONE, PPC2_TM),
8477 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
8478 PPC_NONE, PPC2_TM),
8479
8480 #include "translate/fp-ops.c.inc"
8481
8482 #include "translate/vmx-ops.c.inc"
8483
8484 #include "translate/vsx-ops.c.inc"
8485
8486 #include "translate/dfp-ops.c.inc"
8487
8488 #include "translate/spe-ops.c.inc"
8489 };
8490
8491 /*****************************************************************************/
8492 /* Opcode types */
8493 enum {
8494 PPC_DIRECT = 0, /* Opcode routine */
8495 PPC_INDIRECT = 1, /* Indirect opcode table */
8496 };
8497
8498 #define PPC_OPCODE_MASK 0x3
8499
8500 static inline int is_indirect_opcode(void *handler)
8501 {
8502 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
8503 }
8504
8505 static inline opc_handler_t **ind_table(void *handler)
8506 {
8507 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
8508 }
8509
8510 /* Instruction table creation */
8511 /* Opcodes tables creation */
8512 static void fill_new_table(opc_handler_t **table, int len)
8513 {
8514 int i;
8515
8516 for (i = 0; i < len; i++) {
8517 table[i] = &invalid_handler;
8518 }
8519 }
8520
8521 static int create_new_table(opc_handler_t **table, unsigned char idx)
8522 {
8523 opc_handler_t **tmp;
8524
8525 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
8526 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
8527 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
8528
8529 return 0;
8530 }
8531
8532 static int insert_in_table(opc_handler_t **table, unsigned char idx,
8533 opc_handler_t *handler)
8534 {
8535 if (table[idx] != &invalid_handler) {
8536 return -1;
8537 }
8538 table[idx] = handler;
8539
8540 return 0;
8541 }
8542
8543 static int register_direct_insn(opc_handler_t **ppc_opcodes,
8544 unsigned char idx, opc_handler_t *handler)
8545 {
8546 if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
8547 printf("*** ERROR: opcode %02x already assigned in main "
8548 "opcode table\n", idx);
8549 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
8550 printf(" Registered handler '%s' - new handler '%s'\n",
8551 ppc_opcodes[idx]->oname, handler->oname);
8552 #endif
8553 return -1;
8554 }
8555
8556 return 0;
8557 }
8558
8559 static int register_ind_in_table(opc_handler_t **table,
8560 unsigned char idx1, unsigned char idx2,
8561 opc_handler_t *handler)
8562 {
8563 if (table[idx1] == &invalid_handler) {
8564 if (create_new_table(table, idx1) < 0) {
8565 printf("*** ERROR: unable to create indirect table "
8566 "idx=%02x\n", idx1);
8567 return -1;
8568 }
8569 } else {
8570 if (!is_indirect_opcode(table[idx1])) {
8571 printf("*** ERROR: idx %02x already assigned to a direct "
8572 "opcode\n", idx1);
8573 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
8574 printf(" Registered handler '%s' - new handler '%s'\n",
8575 ind_table(table[idx1])[idx2]->oname, handler->oname);
8576 #endif
8577 return -1;
8578 }
8579 }
8580 if (handler != NULL &&
8581 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
8582 printf("*** ERROR: opcode %02x already assigned in "
8583 "opcode table %02x\n", idx2, idx1);
8584 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
8585 printf(" Registered handler '%s' - new handler '%s'\n",
8586 ind_table(table[idx1])[idx2]->oname, handler->oname);
8587 #endif
8588 return -1;
8589 }
8590
8591 return 0;
8592 }
8593
8594 static int register_ind_insn(opc_handler_t **ppc_opcodes,
8595 unsigned char idx1, unsigned char idx2,
8596 opc_handler_t *handler)
8597 {
8598 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
8599 }
8600
8601 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
8602 unsigned char idx1, unsigned char idx2,
8603 unsigned char idx3, opc_handler_t *handler)
8604 {
8605 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
8606 printf("*** ERROR: unable to join indirect table idx "
8607 "[%02x-%02x]\n", idx1, idx2);
8608 return -1;
8609 }
8610 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
8611 handler) < 0) {
8612 printf("*** ERROR: unable to insert opcode "
8613 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
8614 return -1;
8615 }
8616
8617 return 0;
8618 }
8619
8620 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
8621 unsigned char idx1, unsigned char idx2,
8622 unsigned char idx3, unsigned char idx4,
8623 opc_handler_t *handler)
8624 {
8625 opc_handler_t **table;
8626
8627 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
8628 printf("*** ERROR: unable to join indirect table idx "
8629 "[%02x-%02x]\n", idx1, idx2);
8630 return -1;
8631 }
8632 table = ind_table(ppc_opcodes[idx1]);
8633 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
8634 printf("*** ERROR: unable to join 2nd-level indirect table idx "
8635 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
8636 return -1;
8637 }
8638 table = ind_table(table[idx2]);
8639 if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
8640 printf("*** ERROR: unable to insert opcode "
8641 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
8642 return -1;
8643 }
8644 return 0;
8645 }
8646 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
8647 {
8648 if (insn->opc2 != 0xFF) {
8649 if (insn->opc3 != 0xFF) {
8650 if (insn->opc4 != 0xFF) {
8651 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
8652 insn->opc3, insn->opc4,
8653 &insn->handler) < 0) {
8654 return -1;
8655 }
8656 } else {
8657 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
8658 insn->opc3, &insn->handler) < 0) {
8659 return -1;
8660 }
8661 }
8662 } else {
8663 if (register_ind_insn(ppc_opcodes, insn->opc1,
8664 insn->opc2, &insn->handler) < 0) {
8665 return -1;
8666 }
8667 }
8668 } else {
8669 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
8670 return -1;
8671 }
8672 }
8673
8674 return 0;
8675 }
8676
8677 static int test_opcode_table(opc_handler_t **table, int len)
8678 {
8679 int i, count, tmp;
8680
8681 for (i = 0, count = 0; i < len; i++) {
8682 /* Consistency fixup */
8683 if (table[i] == NULL) {
8684 table[i] = &invalid_handler;
8685 }
8686 if (table[i] != &invalid_handler) {
8687 if (is_indirect_opcode(table[i])) {
8688 tmp = test_opcode_table(ind_table(table[i]),
8689 PPC_CPU_INDIRECT_OPCODES_LEN);
8690 if (tmp == 0) {
8691 free(table[i]);
8692 table[i] = &invalid_handler;
8693 } else {
8694 count++;
8695 }
8696 } else {
8697 count++;
8698 }
8699 }
8700 }
8701
8702 return count;
8703 }
8704
8705 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
8706 {
8707 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
8708 printf("*** WARNING: no opcode defined !\n");
8709 }
8710 }
8711
8712 /*****************************************************************************/
8713 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
8714 {
8715 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
8716 opcode_t *opc;
8717
8718 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
8719 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
8720 if (((opc->handler.type & pcc->insns_flags) != 0) ||
8721 ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
8722 if (register_insn(cpu->opcodes, opc) < 0) {
8723 error_setg(errp, "ERROR initializing PowerPC instruction "
8724 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
8725 opc->opc3);
8726 return;
8727 }
8728 }
8729 }
8730 fix_opcode_tables(cpu->opcodes);
8731 fflush(stdout);
8732 fflush(stderr);
8733 }
8734
8735 void destroy_ppc_opcodes(PowerPCCPU *cpu)
8736 {
8737 opc_handler_t **table, **table_2;
8738 int i, j, k;
8739
8740 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
8741 if (cpu->opcodes[i] == &invalid_handler) {
8742 continue;
8743 }
8744 if (is_indirect_opcode(cpu->opcodes[i])) {
8745 table = ind_table(cpu->opcodes[i]);
8746 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
8747 if (table[j] == &invalid_handler) {
8748 continue;
8749 }
8750 if (is_indirect_opcode(table[j])) {
8751 table_2 = ind_table(table[j]);
8752 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
8753 if (table_2[k] != &invalid_handler &&
8754 is_indirect_opcode(table_2[k])) {
8755 g_free((opc_handler_t *)((uintptr_t)table_2[k] &
8756 ~PPC_INDIRECT));
8757 }
8758 }
8759 g_free((opc_handler_t *)((uintptr_t)table[j] &
8760 ~PPC_INDIRECT));
8761 }
8762 }
8763 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
8764 ~PPC_INDIRECT));
8765 }
8766 }
8767 }
8768
8769 #if defined(PPC_DUMP_CPU)
8770 static void dump_ppc_insns(CPUPPCState *env)
8771 {
8772 opc_handler_t **table, *handler;
8773 const char *p, *q;
8774 uint8_t opc1, opc2, opc3, opc4;
8775
8776 printf("Instructions set:\n");
8777 /* opc1 is 6 bits long */
8778 for (opc1 = 0x00; opc1 < PPC_CPU_OPCODES_LEN; opc1++) {
8779 table = env->opcodes;
8780 handler = table[opc1];
8781 if (is_indirect_opcode(handler)) {
8782 /* opc2 is 5 bits long */
8783 for (opc2 = 0; opc2 < PPC_CPU_INDIRECT_OPCODES_LEN; opc2++) {
8784 table = env->opcodes;
8785 handler = env->opcodes[opc1];
8786 table = ind_table(handler);
8787 handler = table[opc2];
8788 if (is_indirect_opcode(handler)) {
8789 table = ind_table(handler);
8790 /* opc3 is 5 bits long */
8791 for (opc3 = 0; opc3 < PPC_CPU_INDIRECT_OPCODES_LEN;
8792 opc3++) {
8793 handler = table[opc3];
8794 if (is_indirect_opcode(handler)) {
8795 table = ind_table(handler);
8796 /* opc4 is 5 bits long */
8797 for (opc4 = 0; opc4 < PPC_CPU_INDIRECT_OPCODES_LEN;
8798 opc4++) {
8799 handler = table[opc4];
8800 if (handler->handler != &gen_invalid) {
8801 printf("INSN: %02x %02x %02x %02x -- "
8802 "(%02d %04d %02d) : %s\n",
8803 opc1, opc2, opc3, opc4,
8804 opc1, (opc3 << 5) | opc2, opc4,
8805 handler->oname);
8806 }
8807 }
8808 } else {
8809 if (handler->handler != &gen_invalid) {
8810 /* Special hack to properly dump SPE insns */
8811 p = strchr(handler->oname, '_');
8812 if (p == NULL) {
8813 printf("INSN: %02x %02x %02x (%02d %04d) : "
8814 "%s\n",
8815 opc1, opc2, opc3, opc1,
8816 (opc3 << 5) | opc2,
8817 handler->oname);
8818 } else {
8819 q = "speundef";
8820 if ((p - handler->oname) != strlen(q)
8821 || (memcmp(handler->oname, q, strlen(q))
8822 != 0)) {
8823 /* First instruction */
8824 printf("INSN: %02x %02x %02x"
8825 "(%02d %04d) : %.*s\n",
8826 opc1, opc2 << 1, opc3, opc1,
8827 (opc3 << 6) | (opc2 << 1),
8828 (int)(p - handler->oname),
8829 handler->oname);
8830 }
8831 if (strcmp(p + 1, q) != 0) {
8832 /* Second instruction */
8833 printf("INSN: %02x %02x %02x "
8834 "(%02d %04d) : %s\n", opc1,
8835 (opc2 << 1) | 1, opc3, opc1,
8836 (opc3 << 6) | (opc2 << 1) | 1,
8837 p + 1);
8838 }
8839 }
8840 }
8841 }
8842 }
8843 } else {
8844 if (handler->handler != &gen_invalid) {
8845 printf("INSN: %02x %02x -- (%02d %04d) : %s\n",
8846 opc1, opc2, opc1, opc2, handler->oname);
8847 }
8848 }
8849 }
8850 } else {
8851 if (handler->handler != &gen_invalid) {
8852 printf("INSN: %02x -- -- (%02d ----) : %s\n",
8853 opc1, opc1, handler->oname);
8854 }
8855 }
8856 }
8857 }
8858 #endif
8859 int ppc_fixup_cpu(PowerPCCPU *cpu)
8860 {
8861 CPUPPCState *env = &cpu->env;
8862
8863 /*
8864 * TCG doesn't (yet) emulate some groups of instructions that are
8865 * implemented on some otherwise supported CPUs (e.g. VSX and
8866 * decimal floating point instructions on POWER7). We remove
8867 * unsupported instruction groups from the cpu state's instruction
8868 * masks and hope the guest can cope. For at least the pseries
8869 * machine, the unavailability of these instructions can be
8870 * advertised to the guest via the device tree.
8871 */
8872 if ((env->insns_flags & ~PPC_TCG_INSNS)
8873 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
8874 warn_report("Disabling some instructions which are not "
8875 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
8876 env->insns_flags & ~PPC_TCG_INSNS,
8877 env->insns_flags2 & ~PPC_TCG_INSNS2);
8878 }
8879 env->insns_flags &= PPC_TCG_INSNS;
8880 env->insns_flags2 &= PPC_TCG_INSNS2;
8881 return 0;
8882 }
8883
8884
8885 void ppc_cpu_dump_statistics(CPUState *cs, int flags)
8886 {
8887 #if defined(DO_PPC_STATISTICS)
8888 PowerPCCPU *cpu = POWERPC_CPU(cs);
8889 opc_handler_t **t1, **t2, **t3, *handler;
8890 int op1, op2, op3;
8891
8892 t1 = cpu->env.opcodes;
8893 for (op1 = 0; op1 < 64; op1++) {
8894 handler = t1[op1];
8895 if (is_indirect_opcode(handler)) {
8896 t2 = ind_table(handler);
8897 for (op2 = 0; op2 < 32; op2++) {
8898 handler = t2[op2];
8899 if (is_indirect_opcode(handler)) {
8900 t3 = ind_table(handler);
8901 for (op3 = 0; op3 < 32; op3++) {
8902 handler = t3[op3];
8903 if (handler->count == 0) {
8904 continue;
8905 }
8906 qemu_printf("%02x %02x %02x (%02x %04d) %16s: "
8907 "%016" PRIx64 " %" PRId64 "\n",
8908 op1, op2, op3, op1, (op3 << 5) | op2,
8909 handler->oname,
8910 handler->count, handler->count);
8911 }
8912 } else {
8913 if (handler->count == 0) {
8914 continue;
8915 }
8916 qemu_printf("%02x %02x (%02x %04d) %16s: "
8917 "%016" PRIx64 " %" PRId64 "\n",
8918 op1, op2, op1, op2, handler->oname,
8919 handler->count, handler->count);
8920 }
8921 }
8922 } else {
8923 if (handler->count == 0) {
8924 continue;
8925 }
8926 qemu_printf("%02x (%02x ) %16s: %016" PRIx64
8927 " %" PRId64 "\n",
8928 op1, op1, handler->oname,
8929 handler->count, handler->count);
8930 }
8931 }
8932 #endif
8933 }
8934
8935 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
8936 {
8937 opc_handler_t **table, *handler;
8938 uint32_t inval;
8939
8940 ctx->opcode = insn;
8941
8942 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
8943 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
8944 ctx->le_mode ? "little" : "big");
8945
8946 table = cpu->opcodes;
8947 handler = table[opc1(insn)];
8948 if (is_indirect_opcode(handler)) {
8949 table = ind_table(handler);
8950 handler = table[opc2(insn)];
8951 if (is_indirect_opcode(handler)) {
8952 table = ind_table(handler);
8953 handler = table[opc3(insn)];
8954 if (is_indirect_opcode(handler)) {
8955 table = ind_table(handler);
8956 handler = table[opc4(insn)];
8957 }
8958 }
8959 }
8960
8961 /* Is opcode *REALLY* valid ? */
8962 if (unlikely(handler->handler == &gen_invalid)) {
8963 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
8964 "%02x - %02x - %02x - %02x (%08x) "
8965 TARGET_FMT_lx "\n",
8966 opc1(insn), opc2(insn), opc3(insn), opc4(insn),
8967 insn, ctx->cia);
8968 return false;
8969 }
8970
8971 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
8972 && Rc(insn))) {
8973 inval = handler->inval2;
8974 } else {
8975 inval = handler->inval1;
8976 }
8977
8978 if (unlikely((insn & inval) != 0)) {
8979 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
8980 "%02x - %02x - %02x - %02x (%08x) "
8981 TARGET_FMT_lx "\n", insn & inval,
8982 opc1(insn), opc2(insn), opc3(insn), opc4(insn),
8983 insn, ctx->cia);
8984 return false;
8985 }
8986
8987 handler->handler(ctx);
8988 return true;
8989 }
8990
8991 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
8992 {
8993 DisasContext *ctx = container_of(dcbase, DisasContext, base);
8994 CPUPPCState *env = cs->env_ptr;
8995 uint32_t hflags = ctx->base.tb->flags;
8996
8997 ctx->spr_cb = env->spr_cb;
8998 ctx->pr = (hflags >> HFLAGS_PR) & 1;
8999 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
9000 ctx->dr = (hflags >> HFLAGS_DR) & 1;
9001 ctx->hv = (hflags >> HFLAGS_HV) & 1;
9002 ctx->insns_flags = env->insns_flags;
9003 ctx->insns_flags2 = env->insns_flags2;
9004 ctx->access_type = -1;
9005 ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
9006 ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
9007 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
9008 ctx->flags = env->flags;
9009 #if defined(TARGET_PPC64)
9010 ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
9011 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
9012 #endif
9013 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
9014 || env->mmu_model == POWERPC_MMU_601
9015 || env->mmu_model & POWERPC_MMU_64;
9016
9017 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
9018 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
9019 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
9020 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
9021 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
9022 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
9023
9024 ctx->singlestep_enabled = 0;
9025 if ((hflags >> HFLAGS_SE) & 1) {
9026 ctx->singlestep_enabled |= CPU_SINGLE_STEP;
9027 }
9028 if ((hflags >> HFLAGS_BE) & 1) {
9029 ctx->singlestep_enabled |= CPU_BRANCH_STEP;
9030 }
9031 if (unlikely(ctx->base.singlestep_enabled)) {
9032 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP;
9033 }
9034
9035 if (ctx->singlestep_enabled & (CPU_SINGLE_STEP | GDBSTUB_SINGLE_STEP)) {
9036 ctx->base.max_insns = 1;
9037 } else {
9038 int bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4;
9039 ctx->base.max_insns = MIN(ctx->base.max_insns, bound);
9040 }
9041 }
9042
9043 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
9044 {
9045 }
9046
9047 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
9048 {
9049 tcg_gen_insn_start(dcbase->pc_next);
9050 }
9051
9052 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs,
9053 const CPUBreakpoint *bp)
9054 {
9055 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9056
9057 gen_update_nip(ctx, ctx->base.pc_next);
9058 gen_debug_exception(ctx);
9059 /*
9060 * The address covered by the breakpoint must be included in
9061 * [tb->pc, tb->pc + tb->size) in order to for it to be properly
9062 * cleared -- thus we increment the PC here so that the logic
9063 * setting tb->size below does the right thing.
9064 */
9065 ctx->base.pc_next += 4;
9066 return true;
9067 }
9068
9069 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
9070 {
9071 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9072 PowerPCCPU *cpu = POWERPC_CPU(cs);
9073 CPUPPCState *env = cs->env_ptr;
9074 uint32_t insn;
9075 bool ok;
9076
9077 LOG_DISAS("----------------\n");
9078 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
9079 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
9080
9081 ctx->cia = ctx->base.pc_next;
9082 insn = translator_ldl_swap(env, ctx->base.pc_next, need_byteswap(ctx));
9083 ctx->base.pc_next += 4;
9084
9085 ok = decode_legacy(cpu, ctx, insn);
9086 if (!ok) {
9087 gen_invalid(ctx);
9088 }
9089
9090 #if defined(DO_PPC_STATISTICS)
9091 handler->count++;
9092 #endif
9093
9094 translator_loop_temp_check(&ctx->base);
9095 }
9096
9097 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
9098 {
9099 DisasContext *ctx = container_of(dcbase, DisasContext, base);
9100 DisasJumpType is_jmp = ctx->base.is_jmp;
9101 target_ulong nip = ctx->base.pc_next;
9102 int sse;
9103
9104 if (is_jmp == DISAS_NORETURN) {
9105 /* We have already exited the TB. */
9106 return;
9107 }
9108
9109 /* Honor single stepping. */
9110 sse = ctx->singlestep_enabled & (CPU_SINGLE_STEP | GDBSTUB_SINGLE_STEP);
9111 if (unlikely(sse)) {
9112 switch (is_jmp) {
9113 case DISAS_TOO_MANY:
9114 case DISAS_EXIT_UPDATE:
9115 case DISAS_CHAIN_UPDATE:
9116 gen_update_nip(ctx, nip);
9117 break;
9118 case DISAS_EXIT:
9119 case DISAS_CHAIN:
9120 break;
9121 default:
9122 g_assert_not_reached();
9123 }
9124
9125 if (sse & GDBSTUB_SINGLE_STEP) {
9126 gen_debug_exception(ctx);
9127 return;
9128 }
9129 /* else CPU_SINGLE_STEP... */
9130 if (nip <= 0x100 || nip > 0xf00) {
9131 gen_exception(ctx, gen_prep_dbgex(ctx));
9132 return;
9133 }
9134 }
9135
9136 switch (is_jmp) {
9137 case DISAS_TOO_MANY:
9138 if (use_goto_tb(ctx, nip)) {
9139 tcg_gen_goto_tb(0);
9140 gen_update_nip(ctx, nip);
9141 tcg_gen_exit_tb(ctx->base.tb, 0);
9142 break;
9143 }
9144 /* fall through */
9145 case DISAS_CHAIN_UPDATE:
9146 gen_update_nip(ctx, nip);
9147 /* fall through */
9148 case DISAS_CHAIN:
9149 tcg_gen_lookup_and_goto_ptr();
9150 break;
9151
9152 case DISAS_EXIT_UPDATE:
9153 gen_update_nip(ctx, nip);
9154 /* fall through */
9155 case DISAS_EXIT:
9156 tcg_gen_exit_tb(NULL, 0);
9157 break;
9158
9159 default:
9160 g_assert_not_reached();
9161 }
9162 }
9163
9164 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs)
9165 {
9166 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
9167 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size);
9168 }
9169
9170 static const TranslatorOps ppc_tr_ops = {
9171 .init_disas_context = ppc_tr_init_disas_context,
9172 .tb_start = ppc_tr_tb_start,
9173 .insn_start = ppc_tr_insn_start,
9174 .breakpoint_check = ppc_tr_breakpoint_check,
9175 .translate_insn = ppc_tr_translate_insn,
9176 .tb_stop = ppc_tr_tb_stop,
9177 .disas_log = ppc_tr_disas_log,
9178 };
9179
9180 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns)
9181 {
9182 DisasContext ctx;
9183
9184 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns);
9185 }
9186
9187 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb,
9188 target_ulong *data)
9189 {
9190 env->nip = data[0];
9191 }