]> git.proxmox.com Git - mirror_qemu.git/blob - target/ppc/translate.c
target/ppc: removed all mentions to PPC_DUMP_CPU
[mirror_qemu.git] / target / ppc / translate.c
1 /*
2 * PowerPC emulation for qemu: main translation routines.
3 *
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 * Copyright (C) 2011 Freescale Semiconductor, Inc.
6 *
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19 */
20
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34
35 #include "trace-tcg.h"
36 #include "exec/translator.h"
37 #include "exec/log.h"
38 #include "qemu/atomic128.h"
39 #include "spr_tcg.h"
40
41 #include "qemu/qemu-print.h"
42 #include "qapi/error.h"
43
44 #define CPU_SINGLE_STEP 0x1
45 #define CPU_BRANCH_STEP 0x2
46 #define GDBSTUB_SINGLE_STEP 0x4
47
48 /* Include definitions for instructions classes and implementations flags */
49 /* #define PPC_DEBUG_DISAS */
50
51 #ifdef PPC_DEBUG_DISAS
52 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
53 #else
54 # define LOG_DISAS(...) do { } while (0)
55 #endif
56 /*****************************************************************************/
57 /* Code translation helpers */
58
59 /* global register indexes */
60 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */
61 + 10 * 4 + 22 * 5 /* SPE GPRh */
62 + 8 * 5 /* CRF */];
63 static TCGv cpu_gpr[32];
64 static TCGv cpu_gprh[32];
65 static TCGv_i32 cpu_crf[8];
66 static TCGv cpu_nip;
67 static TCGv cpu_msr;
68 static TCGv cpu_ctr;
69 static TCGv cpu_lr;
70 #if defined(TARGET_PPC64)
71 static TCGv cpu_cfar;
72 #endif
73 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
74 static TCGv cpu_reserve;
75 static TCGv cpu_reserve_val;
76 static TCGv cpu_fpscr;
77 static TCGv_i32 cpu_access_type;
78
79 #include "exec/gen-icount.h"
80
81 void ppc_translate_init(void)
82 {
83 int i;
84 char *p;
85 size_t cpu_reg_names_size;
86
87 p = cpu_reg_names;
88 cpu_reg_names_size = sizeof(cpu_reg_names);
89
90 for (i = 0; i < 8; i++) {
91 snprintf(p, cpu_reg_names_size, "crf%d", i);
92 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
93 offsetof(CPUPPCState, crf[i]), p);
94 p += 5;
95 cpu_reg_names_size -= 5;
96 }
97
98 for (i = 0; i < 32; i++) {
99 snprintf(p, cpu_reg_names_size, "r%d", i);
100 cpu_gpr[i] = tcg_global_mem_new(cpu_env,
101 offsetof(CPUPPCState, gpr[i]), p);
102 p += (i < 10) ? 3 : 4;
103 cpu_reg_names_size -= (i < 10) ? 3 : 4;
104 snprintf(p, cpu_reg_names_size, "r%dH", i);
105 cpu_gprh[i] = tcg_global_mem_new(cpu_env,
106 offsetof(CPUPPCState, gprh[i]), p);
107 p += (i < 10) ? 4 : 5;
108 cpu_reg_names_size -= (i < 10) ? 4 : 5;
109 }
110
111 cpu_nip = tcg_global_mem_new(cpu_env,
112 offsetof(CPUPPCState, nip), "nip");
113
114 cpu_msr = tcg_global_mem_new(cpu_env,
115 offsetof(CPUPPCState, msr), "msr");
116
117 cpu_ctr = tcg_global_mem_new(cpu_env,
118 offsetof(CPUPPCState, ctr), "ctr");
119
120 cpu_lr = tcg_global_mem_new(cpu_env,
121 offsetof(CPUPPCState, lr), "lr");
122
123 #if defined(TARGET_PPC64)
124 cpu_cfar = tcg_global_mem_new(cpu_env,
125 offsetof(CPUPPCState, cfar), "cfar");
126 #endif
127
128 cpu_xer = tcg_global_mem_new(cpu_env,
129 offsetof(CPUPPCState, xer), "xer");
130 cpu_so = tcg_global_mem_new(cpu_env,
131 offsetof(CPUPPCState, so), "SO");
132 cpu_ov = tcg_global_mem_new(cpu_env,
133 offsetof(CPUPPCState, ov), "OV");
134 cpu_ca = tcg_global_mem_new(cpu_env,
135 offsetof(CPUPPCState, ca), "CA");
136 cpu_ov32 = tcg_global_mem_new(cpu_env,
137 offsetof(CPUPPCState, ov32), "OV32");
138 cpu_ca32 = tcg_global_mem_new(cpu_env,
139 offsetof(CPUPPCState, ca32), "CA32");
140
141 cpu_reserve = tcg_global_mem_new(cpu_env,
142 offsetof(CPUPPCState, reserve_addr),
143 "reserve_addr");
144 cpu_reserve_val = tcg_global_mem_new(cpu_env,
145 offsetof(CPUPPCState, reserve_val),
146 "reserve_val");
147
148 cpu_fpscr = tcg_global_mem_new(cpu_env,
149 offsetof(CPUPPCState, fpscr), "fpscr");
150
151 cpu_access_type = tcg_global_mem_new_i32(cpu_env,
152 offsetof(CPUPPCState, access_type),
153 "access_type");
154 }
155
156 /* internal defines */
157 struct DisasContext {
158 DisasContextBase base;
159 target_ulong cia; /* current instruction address */
160 uint32_t opcode;
161 /* Routine used to access memory */
162 bool pr, hv, dr, le_mode;
163 bool lazy_tlb_flush;
164 bool need_access_type;
165 int mem_idx;
166 int access_type;
167 /* Translation flags */
168 MemOp default_tcg_memop_mask;
169 #if defined(TARGET_PPC64)
170 bool sf_mode;
171 bool has_cfar;
172 #endif
173 bool fpu_enabled;
174 bool altivec_enabled;
175 bool vsx_enabled;
176 bool spe_enabled;
177 bool tm_enabled;
178 bool gtse;
179 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
180 int singlestep_enabled;
181 uint32_t flags;
182 uint64_t insns_flags;
183 uint64_t insns_flags2;
184 };
185
186 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */
187 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */
188 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */
189 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */
190
191 /* Return true iff byteswap is needed in a scalar memop */
192 static inline bool need_byteswap(const DisasContext *ctx)
193 {
194 #if defined(TARGET_WORDS_BIGENDIAN)
195 return ctx->le_mode;
196 #else
197 return !ctx->le_mode;
198 #endif
199 }
200
201 /* True when active word size < size of target_long. */
202 #ifdef TARGET_PPC64
203 # define NARROW_MODE(C) (!(C)->sf_mode)
204 #else
205 # define NARROW_MODE(C) 0
206 #endif
207
208 struct opc_handler_t {
209 /* invalid bits for instruction 1 (Rc(opcode) == 0) */
210 uint32_t inval1;
211 /* invalid bits for instruction 2 (Rc(opcode) == 1) */
212 uint32_t inval2;
213 /* instruction type */
214 uint64_t type;
215 /* extended instruction type */
216 uint64_t type2;
217 /* handler */
218 void (*handler)(DisasContext *ctx);
219 };
220
221 /* SPR load/store helpers */
222 static inline void gen_load_spr(TCGv t, int reg)
223 {
224 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
225 }
226
227 static inline void gen_store_spr(int reg, TCGv t)
228 {
229 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
230 }
231
232 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
233 {
234 if (ctx->need_access_type && ctx->access_type != access_type) {
235 tcg_gen_movi_i32(cpu_access_type, access_type);
236 ctx->access_type = access_type;
237 }
238 }
239
240 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
241 {
242 if (NARROW_MODE(ctx)) {
243 nip = (uint32_t)nip;
244 }
245 tcg_gen_movi_tl(cpu_nip, nip);
246 }
247
248 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
249 {
250 TCGv_i32 t0, t1;
251
252 /*
253 * These are all synchronous exceptions, we set the PC back to the
254 * faulting instruction
255 */
256 gen_update_nip(ctx, ctx->cia);
257 t0 = tcg_const_i32(excp);
258 t1 = tcg_const_i32(error);
259 gen_helper_raise_exception_err(cpu_env, t0, t1);
260 tcg_temp_free_i32(t0);
261 tcg_temp_free_i32(t1);
262 ctx->base.is_jmp = DISAS_NORETURN;
263 }
264
265 static void gen_exception(DisasContext *ctx, uint32_t excp)
266 {
267 TCGv_i32 t0;
268
269 /*
270 * These are all synchronous exceptions, we set the PC back to the
271 * faulting instruction
272 */
273 gen_update_nip(ctx, ctx->cia);
274 t0 = tcg_const_i32(excp);
275 gen_helper_raise_exception(cpu_env, t0);
276 tcg_temp_free_i32(t0);
277 ctx->base.is_jmp = DISAS_NORETURN;
278 }
279
280 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
281 target_ulong nip)
282 {
283 TCGv_i32 t0;
284
285 gen_update_nip(ctx, nip);
286 t0 = tcg_const_i32(excp);
287 gen_helper_raise_exception(cpu_env, t0);
288 tcg_temp_free_i32(t0);
289 ctx->base.is_jmp = DISAS_NORETURN;
290 }
291
292 static void gen_icount_io_start(DisasContext *ctx)
293 {
294 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
295 gen_io_start();
296 /*
297 * An I/O instruction must be last in the TB.
298 * Chain to the next TB, and let the code from gen_tb_start
299 * decide if we need to return to the main loop.
300 * Doing this first also allows this value to be overridden.
301 */
302 ctx->base.is_jmp = DISAS_TOO_MANY;
303 }
304 }
305
306 /*
307 * Tells the caller what is the appropriate exception to generate and prepares
308 * SPR registers for this exception.
309 *
310 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
311 * POWERPC_EXCP_DEBUG (on BookE).
312 */
313 static uint32_t gen_prep_dbgex(DisasContext *ctx)
314 {
315 if (ctx->flags & POWERPC_FLAG_DE) {
316 target_ulong dbsr = 0;
317 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
318 dbsr = DBCR0_ICMP;
319 } else {
320 /* Must have been branch */
321 dbsr = DBCR0_BRT;
322 }
323 TCGv t0 = tcg_temp_new();
324 gen_load_spr(t0, SPR_BOOKE_DBSR);
325 tcg_gen_ori_tl(t0, t0, dbsr);
326 gen_store_spr(SPR_BOOKE_DBSR, t0);
327 tcg_temp_free(t0);
328 return POWERPC_EXCP_DEBUG;
329 } else {
330 return POWERPC_EXCP_TRACE;
331 }
332 }
333
334 static void gen_debug_exception(DisasContext *ctx)
335 {
336 gen_helper_raise_exception(cpu_env, tcg_constant_i32(EXCP_DEBUG));
337 ctx->base.is_jmp = DISAS_NORETURN;
338 }
339
340 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
341 {
342 /* Will be converted to program check if needed */
343 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
344 }
345
346 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
347 {
348 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
349 }
350
351 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
352 {
353 /* Will be converted to program check if needed */
354 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
355 }
356
357 /*****************************************************************************/
358 /* SPR READ/WRITE CALLBACKS */
359
360 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
361 {
362 #if 0
363 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
364 printf("ERROR: try to access SPR %d !\n", sprn);
365 #endif
366 }
367
368 /* #define PPC_DUMP_SPR_ACCESSES */
369
370 /*
371 * Generic callbacks:
372 * do nothing but store/retrieve spr value
373 */
374 static void spr_load_dump_spr(int sprn)
375 {
376 #ifdef PPC_DUMP_SPR_ACCESSES
377 TCGv_i32 t0 = tcg_const_i32(sprn);
378 gen_helper_load_dump_spr(cpu_env, t0);
379 tcg_temp_free_i32(t0);
380 #endif
381 }
382
383 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
384 {
385 gen_load_spr(cpu_gpr[gprn], sprn);
386 spr_load_dump_spr(sprn);
387 }
388
389 static void spr_store_dump_spr(int sprn)
390 {
391 #ifdef PPC_DUMP_SPR_ACCESSES
392 TCGv_i32 t0 = tcg_const_i32(sprn);
393 gen_helper_store_dump_spr(cpu_env, t0);
394 tcg_temp_free_i32(t0);
395 #endif
396 }
397
398 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
399 {
400 gen_store_spr(sprn, cpu_gpr[gprn]);
401 spr_store_dump_spr(sprn);
402 }
403
404 #if !defined(CONFIG_USER_ONLY)
405 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
406 {
407 #ifdef TARGET_PPC64
408 TCGv t0 = tcg_temp_new();
409 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
410 gen_store_spr(sprn, t0);
411 tcg_temp_free(t0);
412 spr_store_dump_spr(sprn);
413 #else
414 spr_write_generic(ctx, sprn, gprn);
415 #endif
416 }
417
418 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
419 {
420 TCGv t0 = tcg_temp_new();
421 TCGv t1 = tcg_temp_new();
422 gen_load_spr(t0, sprn);
423 tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
424 tcg_gen_and_tl(t0, t0, t1);
425 gen_store_spr(sprn, t0);
426 tcg_temp_free(t0);
427 tcg_temp_free(t1);
428 }
429
430 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
431 {
432 }
433
434 #endif
435
436 /* SPR common to all PowerPC */
437 /* XER */
438 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
439 {
440 TCGv dst = cpu_gpr[gprn];
441 TCGv t0 = tcg_temp_new();
442 TCGv t1 = tcg_temp_new();
443 TCGv t2 = tcg_temp_new();
444 tcg_gen_mov_tl(dst, cpu_xer);
445 tcg_gen_shli_tl(t0, cpu_so, XER_SO);
446 tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
447 tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
448 tcg_gen_or_tl(t0, t0, t1);
449 tcg_gen_or_tl(dst, dst, t2);
450 tcg_gen_or_tl(dst, dst, t0);
451 if (is_isa300(ctx)) {
452 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
453 tcg_gen_or_tl(dst, dst, t0);
454 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
455 tcg_gen_or_tl(dst, dst, t0);
456 }
457 tcg_temp_free(t0);
458 tcg_temp_free(t1);
459 tcg_temp_free(t2);
460 }
461
462 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
463 {
464 TCGv src = cpu_gpr[gprn];
465 /* Write all flags, while reading back check for isa300 */
466 tcg_gen_andi_tl(cpu_xer, src,
467 ~((1u << XER_SO) |
468 (1u << XER_OV) | (1u << XER_OV32) |
469 (1u << XER_CA) | (1u << XER_CA32)));
470 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
471 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
472 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
473 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
474 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
475 }
476
477 /* LR */
478 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
479 {
480 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
481 }
482
483 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
484 {
485 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
486 }
487
488 /* CFAR */
489 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
490 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
491 {
492 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
493 }
494
495 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
496 {
497 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
498 }
499 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
500
501 /* CTR */
502 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
503 {
504 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
505 }
506
507 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
508 {
509 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
510 }
511
512 /* User read access to SPR */
513 /* USPRx */
514 /* UMMCRx */
515 /* UPMCx */
516 /* USIA */
517 /* UDECR */
518 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
519 {
520 gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
521 }
522
523 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
524 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
525 {
526 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
527 }
528 #endif
529
530 /* SPR common to all non-embedded PowerPC */
531 /* DECR */
532 #if !defined(CONFIG_USER_ONLY)
533 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
534 {
535 gen_icount_io_start(ctx);
536 gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
537 }
538
539 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
540 {
541 gen_icount_io_start(ctx);
542 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
543 }
544 #endif
545
546 /* SPR common to all non-embedded PowerPC, except 601 */
547 /* Time base */
548 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
549 {
550 gen_icount_io_start(ctx);
551 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
552 }
553
554 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
555 {
556 gen_icount_io_start(ctx);
557 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
558 }
559
560 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
561 {
562 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
563 }
564
565 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
566 {
567 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
568 }
569
570 #if !defined(CONFIG_USER_ONLY)
571 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
572 {
573 gen_icount_io_start(ctx);
574 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
575 }
576
577 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
578 {
579 gen_icount_io_start(ctx);
580 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
581 }
582
583 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
584 {
585 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
586 }
587
588 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
589 {
590 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
591 }
592
593 #if defined(TARGET_PPC64)
594 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
595 {
596 gen_icount_io_start(ctx);
597 gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
598 }
599
600 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
601 {
602 gen_icount_io_start(ctx);
603 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
604 }
605
606 /* HDECR */
607 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
608 {
609 gen_icount_io_start(ctx);
610 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
611 }
612
613 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
614 {
615 gen_icount_io_start(ctx);
616 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
617 }
618
619 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
620 {
621 gen_icount_io_start(ctx);
622 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
623 }
624
625 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
626 {
627 gen_icount_io_start(ctx);
628 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
629 }
630
631 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
632 {
633 gen_icount_io_start(ctx);
634 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
635 }
636
637 #endif
638 #endif
639
640 #if !defined(CONFIG_USER_ONLY)
641 /* IBAT0U...IBAT0U */
642 /* IBAT0L...IBAT7L */
643 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
644 {
645 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
646 offsetof(CPUPPCState,
647 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
648 }
649
650 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
651 {
652 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
653 offsetof(CPUPPCState,
654 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
655 }
656
657 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
658 {
659 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
660 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
661 tcg_temp_free_i32(t0);
662 }
663
664 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
665 {
666 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
667 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
668 tcg_temp_free_i32(t0);
669 }
670
671 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
672 {
673 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
674 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
675 tcg_temp_free_i32(t0);
676 }
677
678 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
679 {
680 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
681 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
682 tcg_temp_free_i32(t0);
683 }
684
685 /* DBAT0U...DBAT7U */
686 /* DBAT0L...DBAT7L */
687 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
688 {
689 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
690 offsetof(CPUPPCState,
691 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
692 }
693
694 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
695 {
696 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
697 offsetof(CPUPPCState,
698 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
699 }
700
701 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
702 {
703 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
704 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
705 tcg_temp_free_i32(t0);
706 }
707
708 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
709 {
710 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
711 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
712 tcg_temp_free_i32(t0);
713 }
714
715 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
716 {
717 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
718 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
719 tcg_temp_free_i32(t0);
720 }
721
722 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
723 {
724 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
725 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
726 tcg_temp_free_i32(t0);
727 }
728
729 /* SDR1 */
730 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
731 {
732 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
733 }
734
735 #if defined(TARGET_PPC64)
736 /* 64 bits PowerPC specific SPRs */
737 /* PIDR */
738 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
739 {
740 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
741 }
742
743 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
744 {
745 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
746 }
747
748 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
749 {
750 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
751 }
752
753 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
754 {
755 TCGv t0 = tcg_temp_new();
756 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
757 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
758 tcg_temp_free(t0);
759 }
760 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
761 {
762 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
763 }
764
765 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
766 {
767 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
768 }
769
770 /* DPDES */
771 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
772 {
773 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
774 }
775
776 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
777 {
778 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
779 }
780 #endif
781 #endif
782
783 /* PowerPC 601 specific registers */
784 /* RTC */
785 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn)
786 {
787 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env);
788 }
789
790 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn)
791 {
792 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env);
793 }
794
795 #if !defined(CONFIG_USER_ONLY)
796 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn)
797 {
798 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]);
799 }
800
801 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn)
802 {
803 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]);
804 }
805
806 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn)
807 {
808 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]);
809 /* Must stop the translation as endianness may have changed */
810 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
811 }
812 #endif
813
814 /* Unified bats */
815 #if !defined(CONFIG_USER_ONLY)
816 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn)
817 {
818 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
819 offsetof(CPUPPCState,
820 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
821 }
822
823 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn)
824 {
825 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
826 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]);
827 tcg_temp_free_i32(t0);
828 }
829
830 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn)
831 {
832 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
833 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]);
834 tcg_temp_free_i32(t0);
835 }
836 #endif
837
838 /* PowerPC 40x specific registers */
839 #if !defined(CONFIG_USER_ONLY)
840 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
841 {
842 gen_icount_io_start(ctx);
843 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
844 }
845
846 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
847 {
848 gen_icount_io_start(ctx);
849 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
850 }
851
852 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
853 {
854 gen_icount_io_start(ctx);
855 gen_store_spr(sprn, cpu_gpr[gprn]);
856 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
857 /* We must stop translation as we may have rebooted */
858 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
859 }
860
861 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
862 {
863 gen_icount_io_start(ctx);
864 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
865 }
866
867 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
868 {
869 gen_icount_io_start(ctx);
870 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
871 }
872
873 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
874 {
875 gen_icount_io_start(ctx);
876 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
877 }
878 #endif
879
880 /* PowerPC 403 specific registers */
881 /* PBL1 / PBU1 / PBL2 / PBU2 */
882 #if !defined(CONFIG_USER_ONLY)
883 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn)
884 {
885 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
886 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1]));
887 }
888
889 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn)
890 {
891 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1);
892 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]);
893 tcg_temp_free_i32(t0);
894 }
895
896 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
897 {
898 TCGv t0 = tcg_temp_new();
899 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
900 gen_store_spr(SPR_PIR, t0);
901 tcg_temp_free(t0);
902 }
903 #endif
904
905 /* SPE specific registers */
906 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
907 {
908 TCGv_i32 t0 = tcg_temp_new_i32();
909 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
910 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
911 tcg_temp_free_i32(t0);
912 }
913
914 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
915 {
916 TCGv_i32 t0 = tcg_temp_new_i32();
917 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
918 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
919 tcg_temp_free_i32(t0);
920 }
921
922 #if !defined(CONFIG_USER_ONLY)
923 /* Callback used to write the exception vector base */
924 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
925 {
926 TCGv t0 = tcg_temp_new();
927 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
928 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
929 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
930 gen_store_spr(sprn, t0);
931 tcg_temp_free(t0);
932 }
933
934 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
935 {
936 int sprn_offs;
937
938 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
939 sprn_offs = sprn - SPR_BOOKE_IVOR0;
940 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
941 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
942 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
943 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
944 } else {
945 printf("Trying to write an unknown exception vector %d %03x\n",
946 sprn, sprn);
947 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
948 return;
949 }
950
951 TCGv t0 = tcg_temp_new();
952 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
953 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
954 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
955 gen_store_spr(sprn, t0);
956 tcg_temp_free(t0);
957 }
958 #endif
959
960 #ifdef TARGET_PPC64
961 #ifndef CONFIG_USER_ONLY
962 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
963 {
964 TCGv t0 = tcg_temp_new();
965 TCGv t1 = tcg_temp_new();
966 TCGv t2 = tcg_temp_new();
967
968 /*
969 * Note, the HV=1 PR=0 case is handled earlier by simply using
970 * spr_write_generic for HV mode in the SPR table
971 */
972
973 /* Build insertion mask into t1 based on context */
974 if (ctx->pr) {
975 gen_load_spr(t1, SPR_UAMOR);
976 } else {
977 gen_load_spr(t1, SPR_AMOR);
978 }
979
980 /* Mask new bits into t2 */
981 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
982
983 /* Load AMR and clear new bits in t0 */
984 gen_load_spr(t0, SPR_AMR);
985 tcg_gen_andc_tl(t0, t0, t1);
986
987 /* Or'in new bits and write it out */
988 tcg_gen_or_tl(t0, t0, t2);
989 gen_store_spr(SPR_AMR, t0);
990 spr_store_dump_spr(SPR_AMR);
991
992 tcg_temp_free(t0);
993 tcg_temp_free(t1);
994 tcg_temp_free(t2);
995 }
996
997 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
998 {
999 TCGv t0 = tcg_temp_new();
1000 TCGv t1 = tcg_temp_new();
1001 TCGv t2 = tcg_temp_new();
1002
1003 /*
1004 * Note, the HV=1 case is handled earlier by simply using
1005 * spr_write_generic for HV mode in the SPR table
1006 */
1007
1008 /* Build insertion mask into t1 based on context */
1009 gen_load_spr(t1, SPR_AMOR);
1010
1011 /* Mask new bits into t2 */
1012 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1013
1014 /* Load AMR and clear new bits in t0 */
1015 gen_load_spr(t0, SPR_UAMOR);
1016 tcg_gen_andc_tl(t0, t0, t1);
1017
1018 /* Or'in new bits and write it out */
1019 tcg_gen_or_tl(t0, t0, t2);
1020 gen_store_spr(SPR_UAMOR, t0);
1021 spr_store_dump_spr(SPR_UAMOR);
1022
1023 tcg_temp_free(t0);
1024 tcg_temp_free(t1);
1025 tcg_temp_free(t2);
1026 }
1027
1028 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1029 {
1030 TCGv t0 = tcg_temp_new();
1031 TCGv t1 = tcg_temp_new();
1032 TCGv t2 = tcg_temp_new();
1033
1034 /*
1035 * Note, the HV=1 case is handled earlier by simply using
1036 * spr_write_generic for HV mode in the SPR table
1037 */
1038
1039 /* Build insertion mask into t1 based on context */
1040 gen_load_spr(t1, SPR_AMOR);
1041
1042 /* Mask new bits into t2 */
1043 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1044
1045 /* Load AMR and clear new bits in t0 */
1046 gen_load_spr(t0, SPR_IAMR);
1047 tcg_gen_andc_tl(t0, t0, t1);
1048
1049 /* Or'in new bits and write it out */
1050 tcg_gen_or_tl(t0, t0, t2);
1051 gen_store_spr(SPR_IAMR, t0);
1052 spr_store_dump_spr(SPR_IAMR);
1053
1054 tcg_temp_free(t0);
1055 tcg_temp_free(t1);
1056 tcg_temp_free(t2);
1057 }
1058 #endif
1059 #endif
1060
1061 #ifndef CONFIG_USER_ONLY
1062 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1063 {
1064 gen_helper_fixup_thrm(cpu_env);
1065 gen_load_spr(cpu_gpr[gprn], sprn);
1066 spr_load_dump_spr(sprn);
1067 }
1068 #endif /* !CONFIG_USER_ONLY */
1069
1070 #if !defined(CONFIG_USER_ONLY)
1071 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1072 {
1073 TCGv t0 = tcg_temp_new();
1074
1075 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1076 gen_store_spr(sprn, t0);
1077 tcg_temp_free(t0);
1078 }
1079
1080 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1081 {
1082 TCGv t0 = tcg_temp_new();
1083
1084 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1085 gen_store_spr(sprn, t0);
1086 tcg_temp_free(t0);
1087 }
1088
1089 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1090 {
1091 TCGv t0 = tcg_temp_new();
1092
1093 tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1094 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1095 gen_store_spr(sprn, t0);
1096 tcg_temp_free(t0);
1097 }
1098
1099 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1100 {
1101 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1102 }
1103
1104 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1105 {
1106 TCGv_i32 t0 = tcg_const_i32(sprn);
1107 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1108 tcg_temp_free_i32(t0);
1109 }
1110 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1111 {
1112 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1113 }
1114 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1115 {
1116 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1117 }
1118
1119 #endif
1120
1121 #if !defined(CONFIG_USER_ONLY)
1122 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1123 {
1124 TCGv val = tcg_temp_new();
1125 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1126 gen_store_spr(SPR_BOOKE_MAS3, val);
1127 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1128 gen_store_spr(SPR_BOOKE_MAS7, val);
1129 tcg_temp_free(val);
1130 }
1131
1132 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1133 {
1134 TCGv mas7 = tcg_temp_new();
1135 TCGv mas3 = tcg_temp_new();
1136 gen_load_spr(mas7, SPR_BOOKE_MAS7);
1137 tcg_gen_shli_tl(mas7, mas7, 32);
1138 gen_load_spr(mas3, SPR_BOOKE_MAS3);
1139 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1140 tcg_temp_free(mas3);
1141 tcg_temp_free(mas7);
1142 }
1143
1144 #endif
1145
1146 #ifdef TARGET_PPC64
1147 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1148 int bit, int sprn, int cause)
1149 {
1150 TCGv_i32 t1 = tcg_const_i32(bit);
1151 TCGv_i32 t2 = tcg_const_i32(sprn);
1152 TCGv_i32 t3 = tcg_const_i32(cause);
1153
1154 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1155
1156 tcg_temp_free_i32(t3);
1157 tcg_temp_free_i32(t2);
1158 tcg_temp_free_i32(t1);
1159 }
1160
1161 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1162 int bit, int sprn, int cause)
1163 {
1164 TCGv_i32 t1 = tcg_const_i32(bit);
1165 TCGv_i32 t2 = tcg_const_i32(sprn);
1166 TCGv_i32 t3 = tcg_const_i32(cause);
1167
1168 gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1169
1170 tcg_temp_free_i32(t3);
1171 tcg_temp_free_i32(t2);
1172 tcg_temp_free_i32(t1);
1173 }
1174
1175 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1176 {
1177 TCGv spr_up = tcg_temp_new();
1178 TCGv spr = tcg_temp_new();
1179
1180 gen_load_spr(spr, sprn - 1);
1181 tcg_gen_shri_tl(spr_up, spr, 32);
1182 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1183
1184 tcg_temp_free(spr);
1185 tcg_temp_free(spr_up);
1186 }
1187
1188 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1189 {
1190 TCGv spr = tcg_temp_new();
1191
1192 gen_load_spr(spr, sprn - 1);
1193 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1194 gen_store_spr(sprn - 1, spr);
1195
1196 tcg_temp_free(spr);
1197 }
1198
1199 #if !defined(CONFIG_USER_ONLY)
1200 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1201 {
1202 TCGv hmer = tcg_temp_new();
1203
1204 gen_load_spr(hmer, sprn);
1205 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1206 gen_store_spr(sprn, hmer);
1207 spr_store_dump_spr(sprn);
1208 tcg_temp_free(hmer);
1209 }
1210
1211 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1212 {
1213 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1214 }
1215 #endif /* !defined(CONFIG_USER_ONLY) */
1216
1217 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1218 {
1219 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1220 spr_read_generic(ctx, gprn, sprn);
1221 }
1222
1223 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1224 {
1225 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1226 spr_write_generic(ctx, sprn, gprn);
1227 }
1228
1229 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1230 {
1231 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1232 spr_read_generic(ctx, gprn, sprn);
1233 }
1234
1235 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1236 {
1237 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1238 spr_write_generic(ctx, sprn, gprn);
1239 }
1240
1241 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1242 {
1243 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1244 spr_read_prev_upper32(ctx, gprn, sprn);
1245 }
1246
1247 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1248 {
1249 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1250 spr_write_prev_upper32(ctx, sprn, gprn);
1251 }
1252
1253 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1254 {
1255 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1256 spr_read_generic(ctx, gprn, sprn);
1257 }
1258
1259 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1260 {
1261 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1262 spr_write_generic(ctx, sprn, gprn);
1263 }
1264
1265 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1266 {
1267 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1268 spr_read_prev_upper32(ctx, gprn, sprn);
1269 }
1270
1271 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1272 {
1273 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1274 spr_write_prev_upper32(ctx, sprn, gprn);
1275 }
1276 #endif
1277
1278 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
1279 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1280
1281 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \
1282 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1283
1284 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
1285 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1286
1287 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \
1288 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1289
1290 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \
1291 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1292
1293 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1294 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1295
1296 typedef struct opcode_t {
1297 unsigned char opc1, opc2, opc3, opc4;
1298 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1299 unsigned char pad[4];
1300 #endif
1301 opc_handler_t handler;
1302 const char *oname;
1303 } opcode_t;
1304
1305 /* Helpers for priv. check */
1306 #define GEN_PRIV \
1307 do { \
1308 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \
1309 } while (0)
1310
1311 #if defined(CONFIG_USER_ONLY)
1312 #define CHK_HV GEN_PRIV
1313 #define CHK_SV GEN_PRIV
1314 #define CHK_HVRM GEN_PRIV
1315 #else
1316 #define CHK_HV \
1317 do { \
1318 if (unlikely(ctx->pr || !ctx->hv)) { \
1319 GEN_PRIV; \
1320 } \
1321 } while (0)
1322 #define CHK_SV \
1323 do { \
1324 if (unlikely(ctx->pr)) { \
1325 GEN_PRIV; \
1326 } \
1327 } while (0)
1328 #define CHK_HVRM \
1329 do { \
1330 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1331 GEN_PRIV; \
1332 } \
1333 } while (0)
1334 #endif
1335
1336 #define CHK_NONE
1337
1338 /*****************************************************************************/
1339 /* PowerPC instructions table */
1340
1341 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \
1342 { \
1343 .opc1 = op1, \
1344 .opc2 = op2, \
1345 .opc3 = op3, \
1346 .opc4 = 0xff, \
1347 .handler = { \
1348 .inval1 = invl, \
1349 .type = _typ, \
1350 .type2 = _typ2, \
1351 .handler = &gen_##name, \
1352 }, \
1353 .oname = stringify(name), \
1354 }
1355 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \
1356 { \
1357 .opc1 = op1, \
1358 .opc2 = op2, \
1359 .opc3 = op3, \
1360 .opc4 = 0xff, \
1361 .handler = { \
1362 .inval1 = invl1, \
1363 .inval2 = invl2, \
1364 .type = _typ, \
1365 .type2 = _typ2, \
1366 .handler = &gen_##name, \
1367 }, \
1368 .oname = stringify(name), \
1369 }
1370 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \
1371 { \
1372 .opc1 = op1, \
1373 .opc2 = op2, \
1374 .opc3 = op3, \
1375 .opc4 = 0xff, \
1376 .handler = { \
1377 .inval1 = invl, \
1378 .type = _typ, \
1379 .type2 = _typ2, \
1380 .handler = &gen_##name, \
1381 }, \
1382 .oname = onam, \
1383 }
1384 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \
1385 { \
1386 .opc1 = op1, \
1387 .opc2 = op2, \
1388 .opc3 = op3, \
1389 .opc4 = op4, \
1390 .handler = { \
1391 .inval1 = invl, \
1392 .type = _typ, \
1393 .type2 = _typ2, \
1394 .handler = &gen_##name, \
1395 }, \
1396 .oname = stringify(name), \
1397 }
1398 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \
1399 { \
1400 .opc1 = op1, \
1401 .opc2 = op2, \
1402 .opc3 = op3, \
1403 .opc4 = op4, \
1404 .handler = { \
1405 .inval1 = invl, \
1406 .type = _typ, \
1407 .type2 = _typ2, \
1408 .handler = &gen_##name, \
1409 }, \
1410 .oname = onam, \
1411 }
1412
1413 /* Invalid instruction */
1414 static void gen_invalid(DisasContext *ctx)
1415 {
1416 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1417 }
1418
1419 static opc_handler_t invalid_handler = {
1420 .inval1 = 0xFFFFFFFF,
1421 .inval2 = 0xFFFFFFFF,
1422 .type = PPC_NONE,
1423 .type2 = PPC_NONE,
1424 .handler = gen_invalid,
1425 };
1426
1427 /*** Integer comparison ***/
1428
1429 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1430 {
1431 TCGv t0 = tcg_temp_new();
1432 TCGv t1 = tcg_temp_new();
1433 TCGv_i32 t = tcg_temp_new_i32();
1434
1435 tcg_gen_movi_tl(t0, CRF_EQ);
1436 tcg_gen_movi_tl(t1, CRF_LT);
1437 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1438 t0, arg0, arg1, t1, t0);
1439 tcg_gen_movi_tl(t1, CRF_GT);
1440 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1441 t0, arg0, arg1, t1, t0);
1442
1443 tcg_gen_trunc_tl_i32(t, t0);
1444 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1445 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1446
1447 tcg_temp_free(t0);
1448 tcg_temp_free(t1);
1449 tcg_temp_free_i32(t);
1450 }
1451
1452 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1453 {
1454 TCGv t0 = tcg_const_tl(arg1);
1455 gen_op_cmp(arg0, t0, s, crf);
1456 tcg_temp_free(t0);
1457 }
1458
1459 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1460 {
1461 TCGv t0, t1;
1462 t0 = tcg_temp_new();
1463 t1 = tcg_temp_new();
1464 if (s) {
1465 tcg_gen_ext32s_tl(t0, arg0);
1466 tcg_gen_ext32s_tl(t1, arg1);
1467 } else {
1468 tcg_gen_ext32u_tl(t0, arg0);
1469 tcg_gen_ext32u_tl(t1, arg1);
1470 }
1471 gen_op_cmp(t0, t1, s, crf);
1472 tcg_temp_free(t1);
1473 tcg_temp_free(t0);
1474 }
1475
1476 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1477 {
1478 TCGv t0 = tcg_const_tl(arg1);
1479 gen_op_cmp32(arg0, t0, s, crf);
1480 tcg_temp_free(t0);
1481 }
1482
1483 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1484 {
1485 if (NARROW_MODE(ctx)) {
1486 gen_op_cmpi32(reg, 0, 1, 0);
1487 } else {
1488 gen_op_cmpi(reg, 0, 1, 0);
1489 }
1490 }
1491
1492 /* cmp */
1493 static void gen_cmp(DisasContext *ctx)
1494 {
1495 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1496 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1497 1, crfD(ctx->opcode));
1498 } else {
1499 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1500 1, crfD(ctx->opcode));
1501 }
1502 }
1503
1504 /* cmpi */
1505 static void gen_cmpi(DisasContext *ctx)
1506 {
1507 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1508 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
1509 1, crfD(ctx->opcode));
1510 } else {
1511 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
1512 1, crfD(ctx->opcode));
1513 }
1514 }
1515
1516 /* cmpl */
1517 static void gen_cmpl(DisasContext *ctx)
1518 {
1519 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1520 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1521 0, crfD(ctx->opcode));
1522 } else {
1523 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
1524 0, crfD(ctx->opcode));
1525 }
1526 }
1527
1528 /* cmpli */
1529 static void gen_cmpli(DisasContext *ctx)
1530 {
1531 if ((ctx->opcode & 0x00200000) && (ctx->insns_flags & PPC_64B)) {
1532 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
1533 0, crfD(ctx->opcode));
1534 } else {
1535 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
1536 0, crfD(ctx->opcode));
1537 }
1538 }
1539
1540 /* cmprb - range comparison: isupper, isaplha, islower*/
1541 static void gen_cmprb(DisasContext *ctx)
1542 {
1543 TCGv_i32 src1 = tcg_temp_new_i32();
1544 TCGv_i32 src2 = tcg_temp_new_i32();
1545 TCGv_i32 src2lo = tcg_temp_new_i32();
1546 TCGv_i32 src2hi = tcg_temp_new_i32();
1547 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1548
1549 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1550 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1551
1552 tcg_gen_andi_i32(src1, src1, 0xFF);
1553 tcg_gen_ext8u_i32(src2lo, src2);
1554 tcg_gen_shri_i32(src2, src2, 8);
1555 tcg_gen_ext8u_i32(src2hi, src2);
1556
1557 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1558 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1559 tcg_gen_and_i32(crf, src2lo, src2hi);
1560
1561 if (ctx->opcode & 0x00200000) {
1562 tcg_gen_shri_i32(src2, src2, 8);
1563 tcg_gen_ext8u_i32(src2lo, src2);
1564 tcg_gen_shri_i32(src2, src2, 8);
1565 tcg_gen_ext8u_i32(src2hi, src2);
1566 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1567 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1568 tcg_gen_and_i32(src2lo, src2lo, src2hi);
1569 tcg_gen_or_i32(crf, crf, src2lo);
1570 }
1571 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1572 tcg_temp_free_i32(src1);
1573 tcg_temp_free_i32(src2);
1574 tcg_temp_free_i32(src2lo);
1575 tcg_temp_free_i32(src2hi);
1576 }
1577
1578 #if defined(TARGET_PPC64)
1579 /* cmpeqb */
1580 static void gen_cmpeqb(DisasContext *ctx)
1581 {
1582 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1583 cpu_gpr[rB(ctx->opcode)]);
1584 }
1585 #endif
1586
1587 /* isel (PowerPC 2.03 specification) */
1588 static void gen_isel(DisasContext *ctx)
1589 {
1590 uint32_t bi = rC(ctx->opcode);
1591 uint32_t mask = 0x08 >> (bi & 0x03);
1592 TCGv t0 = tcg_temp_new();
1593 TCGv zr;
1594
1595 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1596 tcg_gen_andi_tl(t0, t0, mask);
1597
1598 zr = tcg_const_tl(0);
1599 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1600 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1601 cpu_gpr[rB(ctx->opcode)]);
1602 tcg_temp_free(zr);
1603 tcg_temp_free(t0);
1604 }
1605
1606 /* cmpb: PowerPC 2.05 specification */
1607 static void gen_cmpb(DisasContext *ctx)
1608 {
1609 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1610 cpu_gpr[rB(ctx->opcode)]);
1611 }
1612
1613 /*** Integer arithmetic ***/
1614
1615 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1616 TCGv arg1, TCGv arg2, int sub)
1617 {
1618 TCGv t0 = tcg_temp_new();
1619
1620 tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1621 tcg_gen_xor_tl(t0, arg1, arg2);
1622 if (sub) {
1623 tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1624 } else {
1625 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1626 }
1627 tcg_temp_free(t0);
1628 if (NARROW_MODE(ctx)) {
1629 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1630 if (is_isa300(ctx)) {
1631 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1632 }
1633 } else {
1634 if (is_isa300(ctx)) {
1635 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1636 }
1637 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1638 }
1639 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1640 }
1641
1642 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1643 TCGv res, TCGv arg0, TCGv arg1,
1644 TCGv ca32, int sub)
1645 {
1646 TCGv t0;
1647
1648 if (!is_isa300(ctx)) {
1649 return;
1650 }
1651
1652 t0 = tcg_temp_new();
1653 if (sub) {
1654 tcg_gen_eqv_tl(t0, arg0, arg1);
1655 } else {
1656 tcg_gen_xor_tl(t0, arg0, arg1);
1657 }
1658 tcg_gen_xor_tl(t0, t0, res);
1659 tcg_gen_extract_tl(ca32, t0, 32, 1);
1660 tcg_temp_free(t0);
1661 }
1662
1663 /* Common add function */
1664 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1665 TCGv arg2, TCGv ca, TCGv ca32,
1666 bool add_ca, bool compute_ca,
1667 bool compute_ov, bool compute_rc0)
1668 {
1669 TCGv t0 = ret;
1670
1671 if (compute_ca || compute_ov) {
1672 t0 = tcg_temp_new();
1673 }
1674
1675 if (compute_ca) {
1676 if (NARROW_MODE(ctx)) {
1677 /*
1678 * Caution: a non-obvious corner case of the spec is that
1679 * we must produce the *entire* 64-bit addition, but
1680 * produce the carry into bit 32.
1681 */
1682 TCGv t1 = tcg_temp_new();
1683 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */
1684 tcg_gen_add_tl(t0, arg1, arg2);
1685 if (add_ca) {
1686 tcg_gen_add_tl(t0, t0, ca);
1687 }
1688 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */
1689 tcg_temp_free(t1);
1690 tcg_gen_extract_tl(ca, ca, 32, 1);
1691 if (is_isa300(ctx)) {
1692 tcg_gen_mov_tl(ca32, ca);
1693 }
1694 } else {
1695 TCGv zero = tcg_const_tl(0);
1696 if (add_ca) {
1697 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1698 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1699 } else {
1700 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1701 }
1702 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1703 tcg_temp_free(zero);
1704 }
1705 } else {
1706 tcg_gen_add_tl(t0, arg1, arg2);
1707 if (add_ca) {
1708 tcg_gen_add_tl(t0, t0, ca);
1709 }
1710 }
1711
1712 if (compute_ov) {
1713 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1714 }
1715 if (unlikely(compute_rc0)) {
1716 gen_set_Rc0(ctx, t0);
1717 }
1718
1719 if (t0 != ret) {
1720 tcg_gen_mov_tl(ret, t0);
1721 tcg_temp_free(t0);
1722 }
1723 }
1724 /* Add functions with two operands */
1725 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \
1726 static void glue(gen_, name)(DisasContext *ctx) \
1727 { \
1728 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1729 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1730 ca, glue(ca, 32), \
1731 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1732 }
1733 /* Add functions with one operand and one immediate */
1734 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \
1735 add_ca, compute_ca, compute_ov) \
1736 static void glue(gen_, name)(DisasContext *ctx) \
1737 { \
1738 TCGv t0 = tcg_const_tl(const_val); \
1739 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
1740 cpu_gpr[rA(ctx->opcode)], t0, \
1741 ca, glue(ca, 32), \
1742 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
1743 tcg_temp_free(t0); \
1744 }
1745
1746 /* add add. addo addo. */
1747 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1748 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1749 /* addc addc. addco addco. */
1750 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1751 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1752 /* adde adde. addeo addeo. */
1753 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1754 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1755 /* addme addme. addmeo addmeo. */
1756 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1757 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1758 /* addex */
1759 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1760 /* addze addze. addzeo addzeo.*/
1761 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1762 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1763 /* addi */
1764 static void gen_addi(DisasContext *ctx)
1765 {
1766 target_long simm = SIMM(ctx->opcode);
1767
1768 if (rA(ctx->opcode) == 0) {
1769 /* li case */
1770 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
1771 } else {
1772 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
1773 cpu_gpr[rA(ctx->opcode)], simm);
1774 }
1775 }
1776 /* addic addic.*/
1777 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1778 {
1779 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1780 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1781 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1782 tcg_temp_free(c);
1783 }
1784
1785 static void gen_addic(DisasContext *ctx)
1786 {
1787 gen_op_addic(ctx, 0);
1788 }
1789
1790 static void gen_addic_(DisasContext *ctx)
1791 {
1792 gen_op_addic(ctx, 1);
1793 }
1794
1795 /* addis */
1796 static void gen_addis(DisasContext *ctx)
1797 {
1798 target_long simm = SIMM(ctx->opcode);
1799
1800 if (rA(ctx->opcode) == 0) {
1801 /* lis case */
1802 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
1803 } else {
1804 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)],
1805 cpu_gpr[rA(ctx->opcode)], simm << 16);
1806 }
1807 }
1808
1809 /* addpcis */
1810 static void gen_addpcis(DisasContext *ctx)
1811 {
1812 target_long d = DX(ctx->opcode);
1813
1814 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], ctx->base.pc_next + (d << 16));
1815 }
1816
1817 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1818 TCGv arg2, int sign, int compute_ov)
1819 {
1820 TCGv_i32 t0 = tcg_temp_new_i32();
1821 TCGv_i32 t1 = tcg_temp_new_i32();
1822 TCGv_i32 t2 = tcg_temp_new_i32();
1823 TCGv_i32 t3 = tcg_temp_new_i32();
1824
1825 tcg_gen_trunc_tl_i32(t0, arg1);
1826 tcg_gen_trunc_tl_i32(t1, arg2);
1827 if (sign) {
1828 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1829 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1830 tcg_gen_and_i32(t2, t2, t3);
1831 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1832 tcg_gen_or_i32(t2, t2, t3);
1833 tcg_gen_movi_i32(t3, 0);
1834 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1835 tcg_gen_div_i32(t3, t0, t1);
1836 tcg_gen_extu_i32_tl(ret, t3);
1837 } else {
1838 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1839 tcg_gen_movi_i32(t3, 0);
1840 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1841 tcg_gen_divu_i32(t3, t0, t1);
1842 tcg_gen_extu_i32_tl(ret, t3);
1843 }
1844 if (compute_ov) {
1845 tcg_gen_extu_i32_tl(cpu_ov, t2);
1846 if (is_isa300(ctx)) {
1847 tcg_gen_extu_i32_tl(cpu_ov32, t2);
1848 }
1849 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1850 }
1851 tcg_temp_free_i32(t0);
1852 tcg_temp_free_i32(t1);
1853 tcg_temp_free_i32(t2);
1854 tcg_temp_free_i32(t3);
1855
1856 if (unlikely(Rc(ctx->opcode) != 0)) {
1857 gen_set_Rc0(ctx, ret);
1858 }
1859 }
1860 /* Div functions */
1861 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1862 static void glue(gen_, name)(DisasContext *ctx) \
1863 { \
1864 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1865 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1866 sign, compute_ov); \
1867 }
1868 /* divwu divwu. divwuo divwuo. */
1869 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1870 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1871 /* divw divw. divwo divwo. */
1872 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1873 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1874
1875 /* div[wd]eu[o][.] */
1876 #define GEN_DIVE(name, hlpr, compute_ov) \
1877 static void gen_##name(DisasContext *ctx) \
1878 { \
1879 TCGv_i32 t0 = tcg_const_i32(compute_ov); \
1880 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \
1881 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1882 tcg_temp_free_i32(t0); \
1883 if (unlikely(Rc(ctx->opcode) != 0)) { \
1884 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1885 } \
1886 }
1887
1888 GEN_DIVE(divweu, divweu, 0);
1889 GEN_DIVE(divweuo, divweu, 1);
1890 GEN_DIVE(divwe, divwe, 0);
1891 GEN_DIVE(divweo, divwe, 1);
1892
1893 #if defined(TARGET_PPC64)
1894 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1895 TCGv arg2, int sign, int compute_ov)
1896 {
1897 TCGv_i64 t0 = tcg_temp_new_i64();
1898 TCGv_i64 t1 = tcg_temp_new_i64();
1899 TCGv_i64 t2 = tcg_temp_new_i64();
1900 TCGv_i64 t3 = tcg_temp_new_i64();
1901
1902 tcg_gen_mov_i64(t0, arg1);
1903 tcg_gen_mov_i64(t1, arg2);
1904 if (sign) {
1905 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1906 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1907 tcg_gen_and_i64(t2, t2, t3);
1908 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1909 tcg_gen_or_i64(t2, t2, t3);
1910 tcg_gen_movi_i64(t3, 0);
1911 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1912 tcg_gen_div_i64(ret, t0, t1);
1913 } else {
1914 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1915 tcg_gen_movi_i64(t3, 0);
1916 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1917 tcg_gen_divu_i64(ret, t0, t1);
1918 }
1919 if (compute_ov) {
1920 tcg_gen_mov_tl(cpu_ov, t2);
1921 if (is_isa300(ctx)) {
1922 tcg_gen_mov_tl(cpu_ov32, t2);
1923 }
1924 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1925 }
1926 tcg_temp_free_i64(t0);
1927 tcg_temp_free_i64(t1);
1928 tcg_temp_free_i64(t2);
1929 tcg_temp_free_i64(t3);
1930
1931 if (unlikely(Rc(ctx->opcode) != 0)) {
1932 gen_set_Rc0(ctx, ret);
1933 }
1934 }
1935
1936 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1937 static void glue(gen_, name)(DisasContext *ctx) \
1938 { \
1939 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1940 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1941 sign, compute_ov); \
1942 }
1943 /* divdu divdu. divduo divduo. */
1944 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1945 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1946 /* divd divd. divdo divdo. */
1947 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1948 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1949
1950 GEN_DIVE(divdeu, divdeu, 0);
1951 GEN_DIVE(divdeuo, divdeu, 1);
1952 GEN_DIVE(divde, divde, 0);
1953 GEN_DIVE(divdeo, divde, 1);
1954 #endif
1955
1956 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1957 TCGv arg2, int sign)
1958 {
1959 TCGv_i32 t0 = tcg_temp_new_i32();
1960 TCGv_i32 t1 = tcg_temp_new_i32();
1961
1962 tcg_gen_trunc_tl_i32(t0, arg1);
1963 tcg_gen_trunc_tl_i32(t1, arg2);
1964 if (sign) {
1965 TCGv_i32 t2 = tcg_temp_new_i32();
1966 TCGv_i32 t3 = tcg_temp_new_i32();
1967 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1968 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1969 tcg_gen_and_i32(t2, t2, t3);
1970 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1971 tcg_gen_or_i32(t2, t2, t3);
1972 tcg_gen_movi_i32(t3, 0);
1973 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1974 tcg_gen_rem_i32(t3, t0, t1);
1975 tcg_gen_ext_i32_tl(ret, t3);
1976 tcg_temp_free_i32(t2);
1977 tcg_temp_free_i32(t3);
1978 } else {
1979 TCGv_i32 t2 = tcg_const_i32(1);
1980 TCGv_i32 t3 = tcg_const_i32(0);
1981 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1982 tcg_gen_remu_i32(t3, t0, t1);
1983 tcg_gen_extu_i32_tl(ret, t3);
1984 tcg_temp_free_i32(t2);
1985 tcg_temp_free_i32(t3);
1986 }
1987 tcg_temp_free_i32(t0);
1988 tcg_temp_free_i32(t1);
1989 }
1990
1991 #define GEN_INT_ARITH_MODW(name, opc3, sign) \
1992 static void glue(gen_, name)(DisasContext *ctx) \
1993 { \
1994 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \
1995 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1996 sign); \
1997 }
1998
1999 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
2000 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
2001
2002 #if defined(TARGET_PPC64)
2003 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
2004 TCGv arg2, int sign)
2005 {
2006 TCGv_i64 t0 = tcg_temp_new_i64();
2007 TCGv_i64 t1 = tcg_temp_new_i64();
2008
2009 tcg_gen_mov_i64(t0, arg1);
2010 tcg_gen_mov_i64(t1, arg2);
2011 if (sign) {
2012 TCGv_i64 t2 = tcg_temp_new_i64();
2013 TCGv_i64 t3 = tcg_temp_new_i64();
2014 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
2015 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
2016 tcg_gen_and_i64(t2, t2, t3);
2017 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
2018 tcg_gen_or_i64(t2, t2, t3);
2019 tcg_gen_movi_i64(t3, 0);
2020 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
2021 tcg_gen_rem_i64(ret, t0, t1);
2022 tcg_temp_free_i64(t2);
2023 tcg_temp_free_i64(t3);
2024 } else {
2025 TCGv_i64 t2 = tcg_const_i64(1);
2026 TCGv_i64 t3 = tcg_const_i64(0);
2027 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
2028 tcg_gen_remu_i64(ret, t0, t1);
2029 tcg_temp_free_i64(t2);
2030 tcg_temp_free_i64(t3);
2031 }
2032 tcg_temp_free_i64(t0);
2033 tcg_temp_free_i64(t1);
2034 }
2035
2036 #define GEN_INT_ARITH_MODD(name, opc3, sign) \
2037 static void glue(gen_, name)(DisasContext *ctx) \
2038 { \
2039 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \
2040 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2041 sign); \
2042 }
2043
2044 GEN_INT_ARITH_MODD(modud, 0x08, 0);
2045 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
2046 #endif
2047
2048 /* mulhw mulhw. */
2049 static void gen_mulhw(DisasContext *ctx)
2050 {
2051 TCGv_i32 t0 = tcg_temp_new_i32();
2052 TCGv_i32 t1 = tcg_temp_new_i32();
2053
2054 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2055 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2056 tcg_gen_muls2_i32(t0, t1, t0, t1);
2057 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
2058 tcg_temp_free_i32(t0);
2059 tcg_temp_free_i32(t1);
2060 if (unlikely(Rc(ctx->opcode) != 0)) {
2061 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2062 }
2063 }
2064
2065 /* mulhwu mulhwu. */
2066 static void gen_mulhwu(DisasContext *ctx)
2067 {
2068 TCGv_i32 t0 = tcg_temp_new_i32();
2069 TCGv_i32 t1 = tcg_temp_new_i32();
2070
2071 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2072 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2073 tcg_gen_mulu2_i32(t0, t1, t0, t1);
2074 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
2075 tcg_temp_free_i32(t0);
2076 tcg_temp_free_i32(t1);
2077 if (unlikely(Rc(ctx->opcode) != 0)) {
2078 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2079 }
2080 }
2081
2082 /* mullw mullw. */
2083 static void gen_mullw(DisasContext *ctx)
2084 {
2085 #if defined(TARGET_PPC64)
2086 TCGv_i64 t0, t1;
2087 t0 = tcg_temp_new_i64();
2088 t1 = tcg_temp_new_i64();
2089 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
2090 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
2091 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2092 tcg_temp_free(t0);
2093 tcg_temp_free(t1);
2094 #else
2095 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2096 cpu_gpr[rB(ctx->opcode)]);
2097 #endif
2098 if (unlikely(Rc(ctx->opcode) != 0)) {
2099 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2100 }
2101 }
2102
2103 /* mullwo mullwo. */
2104 static void gen_mullwo(DisasContext *ctx)
2105 {
2106 TCGv_i32 t0 = tcg_temp_new_i32();
2107 TCGv_i32 t1 = tcg_temp_new_i32();
2108
2109 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2110 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2111 tcg_gen_muls2_i32(t0, t1, t0, t1);
2112 #if defined(TARGET_PPC64)
2113 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2114 #else
2115 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2116 #endif
2117
2118 tcg_gen_sari_i32(t0, t0, 31);
2119 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2120 tcg_gen_extu_i32_tl(cpu_ov, t0);
2121 if (is_isa300(ctx)) {
2122 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2123 }
2124 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2125
2126 tcg_temp_free_i32(t0);
2127 tcg_temp_free_i32(t1);
2128 if (unlikely(Rc(ctx->opcode) != 0)) {
2129 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2130 }
2131 }
2132
2133 /* mulli */
2134 static void gen_mulli(DisasContext *ctx)
2135 {
2136 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2137 SIMM(ctx->opcode));
2138 }
2139
2140 #if defined(TARGET_PPC64)
2141 /* mulhd mulhd. */
2142 static void gen_mulhd(DisasContext *ctx)
2143 {
2144 TCGv lo = tcg_temp_new();
2145 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2146 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2147 tcg_temp_free(lo);
2148 if (unlikely(Rc(ctx->opcode) != 0)) {
2149 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2150 }
2151 }
2152
2153 /* mulhdu mulhdu. */
2154 static void gen_mulhdu(DisasContext *ctx)
2155 {
2156 TCGv lo = tcg_temp_new();
2157 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2158 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2159 tcg_temp_free(lo);
2160 if (unlikely(Rc(ctx->opcode) != 0)) {
2161 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2162 }
2163 }
2164
2165 /* mulld mulld. */
2166 static void gen_mulld(DisasContext *ctx)
2167 {
2168 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2169 cpu_gpr[rB(ctx->opcode)]);
2170 if (unlikely(Rc(ctx->opcode) != 0)) {
2171 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2172 }
2173 }
2174
2175 /* mulldo mulldo. */
2176 static void gen_mulldo(DisasContext *ctx)
2177 {
2178 TCGv_i64 t0 = tcg_temp_new_i64();
2179 TCGv_i64 t1 = tcg_temp_new_i64();
2180
2181 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2182 cpu_gpr[rB(ctx->opcode)]);
2183 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2184
2185 tcg_gen_sari_i64(t0, t0, 63);
2186 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2187 if (is_isa300(ctx)) {
2188 tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2189 }
2190 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2191
2192 tcg_temp_free_i64(t0);
2193 tcg_temp_free_i64(t1);
2194
2195 if (unlikely(Rc(ctx->opcode) != 0)) {
2196 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2197 }
2198 }
2199 #endif
2200
2201 /* Common subf function */
2202 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2203 TCGv arg2, bool add_ca, bool compute_ca,
2204 bool compute_ov, bool compute_rc0)
2205 {
2206 TCGv t0 = ret;
2207
2208 if (compute_ca || compute_ov) {
2209 t0 = tcg_temp_new();
2210 }
2211
2212 if (compute_ca) {
2213 /* dest = ~arg1 + arg2 [+ ca]. */
2214 if (NARROW_MODE(ctx)) {
2215 /*
2216 * Caution: a non-obvious corner case of the spec is that
2217 * we must produce the *entire* 64-bit addition, but
2218 * produce the carry into bit 32.
2219 */
2220 TCGv inv1 = tcg_temp_new();
2221 TCGv t1 = tcg_temp_new();
2222 tcg_gen_not_tl(inv1, arg1);
2223 if (add_ca) {
2224 tcg_gen_add_tl(t0, arg2, cpu_ca);
2225 } else {
2226 tcg_gen_addi_tl(t0, arg2, 1);
2227 }
2228 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */
2229 tcg_gen_add_tl(t0, t0, inv1);
2230 tcg_temp_free(inv1);
2231 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */
2232 tcg_temp_free(t1);
2233 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2234 if (is_isa300(ctx)) {
2235 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2236 }
2237 } else if (add_ca) {
2238 TCGv zero, inv1 = tcg_temp_new();
2239 tcg_gen_not_tl(inv1, arg1);
2240 zero = tcg_const_tl(0);
2241 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2242 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2243 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2244 tcg_temp_free(zero);
2245 tcg_temp_free(inv1);
2246 } else {
2247 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2248 tcg_gen_sub_tl(t0, arg2, arg1);
2249 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2250 }
2251 } else if (add_ca) {
2252 /*
2253 * Since we're ignoring carry-out, we can simplify the
2254 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2255 */
2256 tcg_gen_sub_tl(t0, arg2, arg1);
2257 tcg_gen_add_tl(t0, t0, cpu_ca);
2258 tcg_gen_subi_tl(t0, t0, 1);
2259 } else {
2260 tcg_gen_sub_tl(t0, arg2, arg1);
2261 }
2262
2263 if (compute_ov) {
2264 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2265 }
2266 if (unlikely(compute_rc0)) {
2267 gen_set_Rc0(ctx, t0);
2268 }
2269
2270 if (t0 != ret) {
2271 tcg_gen_mov_tl(ret, t0);
2272 tcg_temp_free(t0);
2273 }
2274 }
2275 /* Sub functions with Two operands functions */
2276 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
2277 static void glue(gen_, name)(DisasContext *ctx) \
2278 { \
2279 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
2280 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
2281 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
2282 }
2283 /* Sub functions with one operand and one immediate */
2284 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
2285 add_ca, compute_ca, compute_ov) \
2286 static void glue(gen_, name)(DisasContext *ctx) \
2287 { \
2288 TCGv t0 = tcg_const_tl(const_val); \
2289 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
2290 cpu_gpr[rA(ctx->opcode)], t0, \
2291 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \
2292 tcg_temp_free(t0); \
2293 }
2294 /* subf subf. subfo subfo. */
2295 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2296 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2297 /* subfc subfc. subfco subfco. */
2298 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2299 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2300 /* subfe subfe. subfeo subfo. */
2301 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2302 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2303 /* subfme subfme. subfmeo subfmeo. */
2304 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2305 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2306 /* subfze subfze. subfzeo subfzeo.*/
2307 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2308 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2309
2310 /* subfic */
2311 static void gen_subfic(DisasContext *ctx)
2312 {
2313 TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2314 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2315 c, 0, 1, 0, 0);
2316 tcg_temp_free(c);
2317 }
2318
2319 /* neg neg. nego nego. */
2320 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2321 {
2322 TCGv zero = tcg_const_tl(0);
2323 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2324 zero, 0, 0, compute_ov, Rc(ctx->opcode));
2325 tcg_temp_free(zero);
2326 }
2327
2328 static void gen_neg(DisasContext *ctx)
2329 {
2330 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2331 if (unlikely(Rc(ctx->opcode))) {
2332 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2333 }
2334 }
2335
2336 static void gen_nego(DisasContext *ctx)
2337 {
2338 gen_op_arith_neg(ctx, 1);
2339 }
2340
2341 /*** Integer logical ***/
2342 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
2343 static void glue(gen_, name)(DisasContext *ctx) \
2344 { \
2345 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
2346 cpu_gpr[rB(ctx->opcode)]); \
2347 if (unlikely(Rc(ctx->opcode) != 0)) \
2348 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
2349 }
2350
2351 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
2352 static void glue(gen_, name)(DisasContext *ctx) \
2353 { \
2354 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
2355 if (unlikely(Rc(ctx->opcode) != 0)) \
2356 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
2357 }
2358
2359 /* and & and. */
2360 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2361 /* andc & andc. */
2362 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2363
2364 /* andi. */
2365 static void gen_andi_(DisasContext *ctx)
2366 {
2367 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2368 UIMM(ctx->opcode));
2369 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2370 }
2371
2372 /* andis. */
2373 static void gen_andis_(DisasContext *ctx)
2374 {
2375 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2376 UIMM(ctx->opcode) << 16);
2377 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2378 }
2379
2380 /* cntlzw */
2381 static void gen_cntlzw(DisasContext *ctx)
2382 {
2383 TCGv_i32 t = tcg_temp_new_i32();
2384
2385 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2386 tcg_gen_clzi_i32(t, t, 32);
2387 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2388 tcg_temp_free_i32(t);
2389
2390 if (unlikely(Rc(ctx->opcode) != 0)) {
2391 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2392 }
2393 }
2394
2395 /* cnttzw */
2396 static void gen_cnttzw(DisasContext *ctx)
2397 {
2398 TCGv_i32 t = tcg_temp_new_i32();
2399
2400 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2401 tcg_gen_ctzi_i32(t, t, 32);
2402 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2403 tcg_temp_free_i32(t);
2404
2405 if (unlikely(Rc(ctx->opcode) != 0)) {
2406 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2407 }
2408 }
2409
2410 /* eqv & eqv. */
2411 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2412 /* extsb & extsb. */
2413 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2414 /* extsh & extsh. */
2415 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2416 /* nand & nand. */
2417 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2418 /* nor & nor. */
2419 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2420
2421 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2422 static void gen_pause(DisasContext *ctx)
2423 {
2424 TCGv_i32 t0 = tcg_const_i32(0);
2425 tcg_gen_st_i32(t0, cpu_env,
2426 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2427 tcg_temp_free_i32(t0);
2428
2429 /* Stop translation, this gives other CPUs a chance to run */
2430 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2431 }
2432 #endif /* defined(TARGET_PPC64) */
2433
2434 /* or & or. */
2435 static void gen_or(DisasContext *ctx)
2436 {
2437 int rs, ra, rb;
2438
2439 rs = rS(ctx->opcode);
2440 ra = rA(ctx->opcode);
2441 rb = rB(ctx->opcode);
2442 /* Optimisation for mr. ri case */
2443 if (rs != ra || rs != rb) {
2444 if (rs != rb) {
2445 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2446 } else {
2447 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2448 }
2449 if (unlikely(Rc(ctx->opcode) != 0)) {
2450 gen_set_Rc0(ctx, cpu_gpr[ra]);
2451 }
2452 } else if (unlikely(Rc(ctx->opcode) != 0)) {
2453 gen_set_Rc0(ctx, cpu_gpr[rs]);
2454 #if defined(TARGET_PPC64)
2455 } else if (rs != 0) { /* 0 is nop */
2456 int prio = 0;
2457
2458 switch (rs) {
2459 case 1:
2460 /* Set process priority to low */
2461 prio = 2;
2462 break;
2463 case 6:
2464 /* Set process priority to medium-low */
2465 prio = 3;
2466 break;
2467 case 2:
2468 /* Set process priority to normal */
2469 prio = 4;
2470 break;
2471 #if !defined(CONFIG_USER_ONLY)
2472 case 31:
2473 if (!ctx->pr) {
2474 /* Set process priority to very low */
2475 prio = 1;
2476 }
2477 break;
2478 case 5:
2479 if (!ctx->pr) {
2480 /* Set process priority to medium-hight */
2481 prio = 5;
2482 }
2483 break;
2484 case 3:
2485 if (!ctx->pr) {
2486 /* Set process priority to high */
2487 prio = 6;
2488 }
2489 break;
2490 case 7:
2491 if (ctx->hv && !ctx->pr) {
2492 /* Set process priority to very high */
2493 prio = 7;
2494 }
2495 break;
2496 #endif
2497 default:
2498 break;
2499 }
2500 if (prio) {
2501 TCGv t0 = tcg_temp_new();
2502 gen_load_spr(t0, SPR_PPR);
2503 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2504 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2505 gen_store_spr(SPR_PPR, t0);
2506 tcg_temp_free(t0);
2507 }
2508 #if !defined(CONFIG_USER_ONLY)
2509 /*
2510 * Pause out of TCG otherwise spin loops with smt_low eat too
2511 * much CPU and the kernel hangs. This applies to all
2512 * encodings other than no-op, e.g., miso(rs=26), yield(27),
2513 * mdoio(29), mdoom(30), and all currently undefined.
2514 */
2515 gen_pause(ctx);
2516 #endif
2517 #endif
2518 }
2519 }
2520 /* orc & orc. */
2521 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2522
2523 /* xor & xor. */
2524 static void gen_xor(DisasContext *ctx)
2525 {
2526 /* Optimisation for "set to zero" case */
2527 if (rS(ctx->opcode) != rB(ctx->opcode)) {
2528 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2529 cpu_gpr[rB(ctx->opcode)]);
2530 } else {
2531 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2532 }
2533 if (unlikely(Rc(ctx->opcode) != 0)) {
2534 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2535 }
2536 }
2537
2538 /* ori */
2539 static void gen_ori(DisasContext *ctx)
2540 {
2541 target_ulong uimm = UIMM(ctx->opcode);
2542
2543 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2544 return;
2545 }
2546 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2547 }
2548
2549 /* oris */
2550 static void gen_oris(DisasContext *ctx)
2551 {
2552 target_ulong uimm = UIMM(ctx->opcode);
2553
2554 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2555 /* NOP */
2556 return;
2557 }
2558 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2559 uimm << 16);
2560 }
2561
2562 /* xori */
2563 static void gen_xori(DisasContext *ctx)
2564 {
2565 target_ulong uimm = UIMM(ctx->opcode);
2566
2567 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2568 /* NOP */
2569 return;
2570 }
2571 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2572 }
2573
2574 /* xoris */
2575 static void gen_xoris(DisasContext *ctx)
2576 {
2577 target_ulong uimm = UIMM(ctx->opcode);
2578
2579 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2580 /* NOP */
2581 return;
2582 }
2583 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2584 uimm << 16);
2585 }
2586
2587 /* popcntb : PowerPC 2.03 specification */
2588 static void gen_popcntb(DisasContext *ctx)
2589 {
2590 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2591 }
2592
2593 static void gen_popcntw(DisasContext *ctx)
2594 {
2595 #if defined(TARGET_PPC64)
2596 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2597 #else
2598 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2599 #endif
2600 }
2601
2602 #if defined(TARGET_PPC64)
2603 /* popcntd: PowerPC 2.06 specification */
2604 static void gen_popcntd(DisasContext *ctx)
2605 {
2606 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2607 }
2608 #endif
2609
2610 /* prtyw: PowerPC 2.05 specification */
2611 static void gen_prtyw(DisasContext *ctx)
2612 {
2613 TCGv ra = cpu_gpr[rA(ctx->opcode)];
2614 TCGv rs = cpu_gpr[rS(ctx->opcode)];
2615 TCGv t0 = tcg_temp_new();
2616 tcg_gen_shri_tl(t0, rs, 16);
2617 tcg_gen_xor_tl(ra, rs, t0);
2618 tcg_gen_shri_tl(t0, ra, 8);
2619 tcg_gen_xor_tl(ra, ra, t0);
2620 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2621 tcg_temp_free(t0);
2622 }
2623
2624 #if defined(TARGET_PPC64)
2625 /* prtyd: PowerPC 2.05 specification */
2626 static void gen_prtyd(DisasContext *ctx)
2627 {
2628 TCGv ra = cpu_gpr[rA(ctx->opcode)];
2629 TCGv rs = cpu_gpr[rS(ctx->opcode)];
2630 TCGv t0 = tcg_temp_new();
2631 tcg_gen_shri_tl(t0, rs, 32);
2632 tcg_gen_xor_tl(ra, rs, t0);
2633 tcg_gen_shri_tl(t0, ra, 16);
2634 tcg_gen_xor_tl(ra, ra, t0);
2635 tcg_gen_shri_tl(t0, ra, 8);
2636 tcg_gen_xor_tl(ra, ra, t0);
2637 tcg_gen_andi_tl(ra, ra, 1);
2638 tcg_temp_free(t0);
2639 }
2640 #endif
2641
2642 #if defined(TARGET_PPC64)
2643 /* bpermd */
2644 static void gen_bpermd(DisasContext *ctx)
2645 {
2646 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2647 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2648 }
2649 #endif
2650
2651 #if defined(TARGET_PPC64)
2652 /* extsw & extsw. */
2653 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2654
2655 /* cntlzd */
2656 static void gen_cntlzd(DisasContext *ctx)
2657 {
2658 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2659 if (unlikely(Rc(ctx->opcode) != 0)) {
2660 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2661 }
2662 }
2663
2664 /* cnttzd */
2665 static void gen_cnttzd(DisasContext *ctx)
2666 {
2667 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2668 if (unlikely(Rc(ctx->opcode) != 0)) {
2669 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2670 }
2671 }
2672
2673 /* darn */
2674 static void gen_darn(DisasContext *ctx)
2675 {
2676 int l = L(ctx->opcode);
2677
2678 if (l > 2) {
2679 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2680 } else {
2681 gen_icount_io_start(ctx);
2682 if (l == 0) {
2683 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2684 } else {
2685 /* Return 64-bit random for both CRN and RRN */
2686 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2687 }
2688 }
2689 }
2690 #endif
2691
2692 /*** Integer rotate ***/
2693
2694 /* rlwimi & rlwimi. */
2695 static void gen_rlwimi(DisasContext *ctx)
2696 {
2697 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2698 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2699 uint32_t sh = SH(ctx->opcode);
2700 uint32_t mb = MB(ctx->opcode);
2701 uint32_t me = ME(ctx->opcode);
2702
2703 if (sh == (31 - me) && mb <= me) {
2704 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2705 } else {
2706 target_ulong mask;
2707 bool mask_in_32b = true;
2708 TCGv t1;
2709
2710 #if defined(TARGET_PPC64)
2711 mb += 32;
2712 me += 32;
2713 #endif
2714 mask = MASK(mb, me);
2715
2716 #if defined(TARGET_PPC64)
2717 if (mask > 0xffffffffu) {
2718 mask_in_32b = false;
2719 }
2720 #endif
2721 t1 = tcg_temp_new();
2722 if (mask_in_32b) {
2723 TCGv_i32 t0 = tcg_temp_new_i32();
2724 tcg_gen_trunc_tl_i32(t0, t_rs);
2725 tcg_gen_rotli_i32(t0, t0, sh);
2726 tcg_gen_extu_i32_tl(t1, t0);
2727 tcg_temp_free_i32(t0);
2728 } else {
2729 #if defined(TARGET_PPC64)
2730 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2731 tcg_gen_rotli_i64(t1, t1, sh);
2732 #else
2733 g_assert_not_reached();
2734 #endif
2735 }
2736
2737 tcg_gen_andi_tl(t1, t1, mask);
2738 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2739 tcg_gen_or_tl(t_ra, t_ra, t1);
2740 tcg_temp_free(t1);
2741 }
2742 if (unlikely(Rc(ctx->opcode) != 0)) {
2743 gen_set_Rc0(ctx, t_ra);
2744 }
2745 }
2746
2747 /* rlwinm & rlwinm. */
2748 static void gen_rlwinm(DisasContext *ctx)
2749 {
2750 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2751 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2752 int sh = SH(ctx->opcode);
2753 int mb = MB(ctx->opcode);
2754 int me = ME(ctx->opcode);
2755 int len = me - mb + 1;
2756 int rsh = (32 - sh) & 31;
2757
2758 if (sh != 0 && len > 0 && me == (31 - sh)) {
2759 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2760 } else if (me == 31 && rsh + len <= 32) {
2761 tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2762 } else {
2763 target_ulong mask;
2764 bool mask_in_32b = true;
2765 #if defined(TARGET_PPC64)
2766 mb += 32;
2767 me += 32;
2768 #endif
2769 mask = MASK(mb, me);
2770 #if defined(TARGET_PPC64)
2771 if (mask > 0xffffffffu) {
2772 mask_in_32b = false;
2773 }
2774 #endif
2775 if (mask_in_32b) {
2776 if (sh == 0) {
2777 tcg_gen_andi_tl(t_ra, t_rs, mask);
2778 } else {
2779 TCGv_i32 t0 = tcg_temp_new_i32();
2780 tcg_gen_trunc_tl_i32(t0, t_rs);
2781 tcg_gen_rotli_i32(t0, t0, sh);
2782 tcg_gen_andi_i32(t0, t0, mask);
2783 tcg_gen_extu_i32_tl(t_ra, t0);
2784 tcg_temp_free_i32(t0);
2785 }
2786 } else {
2787 #if defined(TARGET_PPC64)
2788 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2789 tcg_gen_rotli_i64(t_ra, t_ra, sh);
2790 tcg_gen_andi_i64(t_ra, t_ra, mask);
2791 #else
2792 g_assert_not_reached();
2793 #endif
2794 }
2795 }
2796 if (unlikely(Rc(ctx->opcode) != 0)) {
2797 gen_set_Rc0(ctx, t_ra);
2798 }
2799 }
2800
2801 /* rlwnm & rlwnm. */
2802 static void gen_rlwnm(DisasContext *ctx)
2803 {
2804 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2805 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2806 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2807 uint32_t mb = MB(ctx->opcode);
2808 uint32_t me = ME(ctx->opcode);
2809 target_ulong mask;
2810 bool mask_in_32b = true;
2811
2812 #if defined(TARGET_PPC64)
2813 mb += 32;
2814 me += 32;
2815 #endif
2816 mask = MASK(mb, me);
2817
2818 #if defined(TARGET_PPC64)
2819 if (mask > 0xffffffffu) {
2820 mask_in_32b = false;
2821 }
2822 #endif
2823 if (mask_in_32b) {
2824 TCGv_i32 t0 = tcg_temp_new_i32();
2825 TCGv_i32 t1 = tcg_temp_new_i32();
2826 tcg_gen_trunc_tl_i32(t0, t_rb);
2827 tcg_gen_trunc_tl_i32(t1, t_rs);
2828 tcg_gen_andi_i32(t0, t0, 0x1f);
2829 tcg_gen_rotl_i32(t1, t1, t0);
2830 tcg_gen_extu_i32_tl(t_ra, t1);
2831 tcg_temp_free_i32(t0);
2832 tcg_temp_free_i32(t1);
2833 } else {
2834 #if defined(TARGET_PPC64)
2835 TCGv_i64 t0 = tcg_temp_new_i64();
2836 tcg_gen_andi_i64(t0, t_rb, 0x1f);
2837 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2838 tcg_gen_rotl_i64(t_ra, t_ra, t0);
2839 tcg_temp_free_i64(t0);
2840 #else
2841 g_assert_not_reached();
2842 #endif
2843 }
2844
2845 tcg_gen_andi_tl(t_ra, t_ra, mask);
2846
2847 if (unlikely(Rc(ctx->opcode) != 0)) {
2848 gen_set_Rc0(ctx, t_ra);
2849 }
2850 }
2851
2852 #if defined(TARGET_PPC64)
2853 #define GEN_PPC64_R2(name, opc1, opc2) \
2854 static void glue(gen_, name##0)(DisasContext *ctx) \
2855 { \
2856 gen_##name(ctx, 0); \
2857 } \
2858 \
2859 static void glue(gen_, name##1)(DisasContext *ctx) \
2860 { \
2861 gen_##name(ctx, 1); \
2862 }
2863 #define GEN_PPC64_R4(name, opc1, opc2) \
2864 static void glue(gen_, name##0)(DisasContext *ctx) \
2865 { \
2866 gen_##name(ctx, 0, 0); \
2867 } \
2868 \
2869 static void glue(gen_, name##1)(DisasContext *ctx) \
2870 { \
2871 gen_##name(ctx, 0, 1); \
2872 } \
2873 \
2874 static void glue(gen_, name##2)(DisasContext *ctx) \
2875 { \
2876 gen_##name(ctx, 1, 0); \
2877 } \
2878 \
2879 static void glue(gen_, name##3)(DisasContext *ctx) \
2880 { \
2881 gen_##name(ctx, 1, 1); \
2882 }
2883
2884 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2885 {
2886 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2887 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2888 int len = me - mb + 1;
2889 int rsh = (64 - sh) & 63;
2890
2891 if (sh != 0 && len > 0 && me == (63 - sh)) {
2892 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2893 } else if (me == 63 && rsh + len <= 64) {
2894 tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2895 } else {
2896 tcg_gen_rotli_tl(t_ra, t_rs, sh);
2897 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2898 }
2899 if (unlikely(Rc(ctx->opcode) != 0)) {
2900 gen_set_Rc0(ctx, t_ra);
2901 }
2902 }
2903
2904 /* rldicl - rldicl. */
2905 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2906 {
2907 uint32_t sh, mb;
2908
2909 sh = SH(ctx->opcode) | (shn << 5);
2910 mb = MB(ctx->opcode) | (mbn << 5);
2911 gen_rldinm(ctx, mb, 63, sh);
2912 }
2913 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2914
2915 /* rldicr - rldicr. */
2916 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2917 {
2918 uint32_t sh, me;
2919
2920 sh = SH(ctx->opcode) | (shn << 5);
2921 me = MB(ctx->opcode) | (men << 5);
2922 gen_rldinm(ctx, 0, me, sh);
2923 }
2924 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2925
2926 /* rldic - rldic. */
2927 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2928 {
2929 uint32_t sh, mb;
2930
2931 sh = SH(ctx->opcode) | (shn << 5);
2932 mb = MB(ctx->opcode) | (mbn << 5);
2933 gen_rldinm(ctx, mb, 63 - sh, sh);
2934 }
2935 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2936
2937 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2938 {
2939 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2940 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2941 TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2942 TCGv t0;
2943
2944 t0 = tcg_temp_new();
2945 tcg_gen_andi_tl(t0, t_rb, 0x3f);
2946 tcg_gen_rotl_tl(t_ra, t_rs, t0);
2947 tcg_temp_free(t0);
2948
2949 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2950 if (unlikely(Rc(ctx->opcode) != 0)) {
2951 gen_set_Rc0(ctx, t_ra);
2952 }
2953 }
2954
2955 /* rldcl - rldcl. */
2956 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2957 {
2958 uint32_t mb;
2959
2960 mb = MB(ctx->opcode) | (mbn << 5);
2961 gen_rldnm(ctx, mb, 63);
2962 }
2963 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2964
2965 /* rldcr - rldcr. */
2966 static inline void gen_rldcr(DisasContext *ctx, int men)
2967 {
2968 uint32_t me;
2969
2970 me = MB(ctx->opcode) | (men << 5);
2971 gen_rldnm(ctx, 0, me);
2972 }
2973 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2974
2975 /* rldimi - rldimi. */
2976 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2977 {
2978 TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2979 TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2980 uint32_t sh = SH(ctx->opcode) | (shn << 5);
2981 uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2982 uint32_t me = 63 - sh;
2983
2984 if (mb <= me) {
2985 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2986 } else {
2987 target_ulong mask = MASK(mb, me);
2988 TCGv t1 = tcg_temp_new();
2989
2990 tcg_gen_rotli_tl(t1, t_rs, sh);
2991 tcg_gen_andi_tl(t1, t1, mask);
2992 tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2993 tcg_gen_or_tl(t_ra, t_ra, t1);
2994 tcg_temp_free(t1);
2995 }
2996 if (unlikely(Rc(ctx->opcode) != 0)) {
2997 gen_set_Rc0(ctx, t_ra);
2998 }
2999 }
3000 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
3001 #endif
3002
3003 /*** Integer shift ***/
3004
3005 /* slw & slw. */
3006 static void gen_slw(DisasContext *ctx)
3007 {
3008 TCGv t0, t1;
3009
3010 t0 = tcg_temp_new();
3011 /* AND rS with a mask that is 0 when rB >= 0x20 */
3012 #if defined(TARGET_PPC64)
3013 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3014 tcg_gen_sari_tl(t0, t0, 0x3f);
3015 #else
3016 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3017 tcg_gen_sari_tl(t0, t0, 0x1f);
3018 #endif
3019 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3020 t1 = tcg_temp_new();
3021 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3022 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3023 tcg_temp_free(t1);
3024 tcg_temp_free(t0);
3025 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
3026 if (unlikely(Rc(ctx->opcode) != 0)) {
3027 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3028 }
3029 }
3030
3031 /* sraw & sraw. */
3032 static void gen_sraw(DisasContext *ctx)
3033 {
3034 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
3035 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3036 if (unlikely(Rc(ctx->opcode) != 0)) {
3037 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3038 }
3039 }
3040
3041 /* srawi & srawi. */
3042 static void gen_srawi(DisasContext *ctx)
3043 {
3044 int sh = SH(ctx->opcode);
3045 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3046 TCGv src = cpu_gpr[rS(ctx->opcode)];
3047 if (sh == 0) {
3048 tcg_gen_ext32s_tl(dst, src);
3049 tcg_gen_movi_tl(cpu_ca, 0);
3050 if (is_isa300(ctx)) {
3051 tcg_gen_movi_tl(cpu_ca32, 0);
3052 }
3053 } else {
3054 TCGv t0;
3055 tcg_gen_ext32s_tl(dst, src);
3056 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
3057 t0 = tcg_temp_new();
3058 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
3059 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3060 tcg_temp_free(t0);
3061 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3062 if (is_isa300(ctx)) {
3063 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3064 }
3065 tcg_gen_sari_tl(dst, dst, sh);
3066 }
3067 if (unlikely(Rc(ctx->opcode) != 0)) {
3068 gen_set_Rc0(ctx, dst);
3069 }
3070 }
3071
3072 /* srw & srw. */
3073 static void gen_srw(DisasContext *ctx)
3074 {
3075 TCGv t0, t1;
3076
3077 t0 = tcg_temp_new();
3078 /* AND rS with a mask that is 0 when rB >= 0x20 */
3079 #if defined(TARGET_PPC64)
3080 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3081 tcg_gen_sari_tl(t0, t0, 0x3f);
3082 #else
3083 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3084 tcg_gen_sari_tl(t0, t0, 0x1f);
3085 #endif
3086 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3087 tcg_gen_ext32u_tl(t0, t0);
3088 t1 = tcg_temp_new();
3089 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3090 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3091 tcg_temp_free(t1);
3092 tcg_temp_free(t0);
3093 if (unlikely(Rc(ctx->opcode) != 0)) {
3094 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3095 }
3096 }
3097
3098 #if defined(TARGET_PPC64)
3099 /* sld & sld. */
3100 static void gen_sld(DisasContext *ctx)
3101 {
3102 TCGv t0, t1;
3103
3104 t0 = tcg_temp_new();
3105 /* AND rS with a mask that is 0 when rB >= 0x40 */
3106 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3107 tcg_gen_sari_tl(t0, t0, 0x3f);
3108 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3109 t1 = tcg_temp_new();
3110 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3111 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3112 tcg_temp_free(t1);
3113 tcg_temp_free(t0);
3114 if (unlikely(Rc(ctx->opcode) != 0)) {
3115 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3116 }
3117 }
3118
3119 /* srad & srad. */
3120 static void gen_srad(DisasContext *ctx)
3121 {
3122 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3123 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3124 if (unlikely(Rc(ctx->opcode) != 0)) {
3125 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3126 }
3127 }
3128 /* sradi & sradi. */
3129 static inline void gen_sradi(DisasContext *ctx, int n)
3130 {
3131 int sh = SH(ctx->opcode) + (n << 5);
3132 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3133 TCGv src = cpu_gpr[rS(ctx->opcode)];
3134 if (sh == 0) {
3135 tcg_gen_mov_tl(dst, src);
3136 tcg_gen_movi_tl(cpu_ca, 0);
3137 if (is_isa300(ctx)) {
3138 tcg_gen_movi_tl(cpu_ca32, 0);
3139 }
3140 } else {
3141 TCGv t0;
3142 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3143 t0 = tcg_temp_new();
3144 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3145 tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3146 tcg_temp_free(t0);
3147 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3148 if (is_isa300(ctx)) {
3149 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3150 }
3151 tcg_gen_sari_tl(dst, src, sh);
3152 }
3153 if (unlikely(Rc(ctx->opcode) != 0)) {
3154 gen_set_Rc0(ctx, dst);
3155 }
3156 }
3157
3158 static void gen_sradi0(DisasContext *ctx)
3159 {
3160 gen_sradi(ctx, 0);
3161 }
3162
3163 static void gen_sradi1(DisasContext *ctx)
3164 {
3165 gen_sradi(ctx, 1);
3166 }
3167
3168 /* extswsli & extswsli. */
3169 static inline void gen_extswsli(DisasContext *ctx, int n)
3170 {
3171 int sh = SH(ctx->opcode) + (n << 5);
3172 TCGv dst = cpu_gpr[rA(ctx->opcode)];
3173 TCGv src = cpu_gpr[rS(ctx->opcode)];
3174
3175 tcg_gen_ext32s_tl(dst, src);
3176 tcg_gen_shli_tl(dst, dst, sh);
3177 if (unlikely(Rc(ctx->opcode) != 0)) {
3178 gen_set_Rc0(ctx, dst);
3179 }
3180 }
3181
3182 static void gen_extswsli0(DisasContext *ctx)
3183 {
3184 gen_extswsli(ctx, 0);
3185 }
3186
3187 static void gen_extswsli1(DisasContext *ctx)
3188 {
3189 gen_extswsli(ctx, 1);
3190 }
3191
3192 /* srd & srd. */
3193 static void gen_srd(DisasContext *ctx)
3194 {
3195 TCGv t0, t1;
3196
3197 t0 = tcg_temp_new();
3198 /* AND rS with a mask that is 0 when rB >= 0x40 */
3199 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3200 tcg_gen_sari_tl(t0, t0, 0x3f);
3201 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3202 t1 = tcg_temp_new();
3203 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3204 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3205 tcg_temp_free(t1);
3206 tcg_temp_free(t0);
3207 if (unlikely(Rc(ctx->opcode) != 0)) {
3208 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3209 }
3210 }
3211 #endif
3212
3213 /*** Addressing modes ***/
3214 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3215 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3216 target_long maskl)
3217 {
3218 target_long simm = SIMM(ctx->opcode);
3219
3220 simm &= ~maskl;
3221 if (rA(ctx->opcode) == 0) {
3222 if (NARROW_MODE(ctx)) {
3223 simm = (uint32_t)simm;
3224 }
3225 tcg_gen_movi_tl(EA, simm);
3226 } else if (likely(simm != 0)) {
3227 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3228 if (NARROW_MODE(ctx)) {
3229 tcg_gen_ext32u_tl(EA, EA);
3230 }
3231 } else {
3232 if (NARROW_MODE(ctx)) {
3233 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3234 } else {
3235 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3236 }
3237 }
3238 }
3239
3240 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3241 {
3242 if (rA(ctx->opcode) == 0) {
3243 if (NARROW_MODE(ctx)) {
3244 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3245 } else {
3246 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3247 }
3248 } else {
3249 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3250 if (NARROW_MODE(ctx)) {
3251 tcg_gen_ext32u_tl(EA, EA);
3252 }
3253 }
3254 }
3255
3256 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3257 {
3258 if (rA(ctx->opcode) == 0) {
3259 tcg_gen_movi_tl(EA, 0);
3260 } else if (NARROW_MODE(ctx)) {
3261 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3262 } else {
3263 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3264 }
3265 }
3266
3267 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3268 target_long val)
3269 {
3270 tcg_gen_addi_tl(ret, arg1, val);
3271 if (NARROW_MODE(ctx)) {
3272 tcg_gen_ext32u_tl(ret, ret);
3273 }
3274 }
3275
3276 static inline void gen_align_no_le(DisasContext *ctx)
3277 {
3278 gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3279 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3280 }
3281
3282 /*** Integer load ***/
3283 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3284 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3285
3286 #define GEN_QEMU_LOAD_TL(ldop, op) \
3287 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \
3288 TCGv val, \
3289 TCGv addr) \
3290 { \
3291 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \
3292 }
3293
3294 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB))
3295 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3296 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3297 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3298 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3299
3300 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3301 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3302
3303 #define GEN_QEMU_LOAD_64(ldop, op) \
3304 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \
3305 TCGv_i64 val, \
3306 TCGv addr) \
3307 { \
3308 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \
3309 }
3310
3311 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB))
3312 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3313 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3314 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3315 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q))
3316
3317 #if defined(TARGET_PPC64)
3318 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q))
3319 #endif
3320
3321 #define GEN_QEMU_STORE_TL(stop, op) \
3322 static void glue(gen_qemu_, stop)(DisasContext *ctx, \
3323 TCGv val, \
3324 TCGv addr) \
3325 { \
3326 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \
3327 }
3328
3329 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB))
3330 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3331 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3332
3333 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3334 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3335
3336 #define GEN_QEMU_STORE_64(stop, op) \
3337 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \
3338 TCGv_i64 val, \
3339 TCGv addr) \
3340 { \
3341 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \
3342 }
3343
3344 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB))
3345 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3346 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3347 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q))
3348
3349 #if defined(TARGET_PPC64)
3350 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q))
3351 #endif
3352
3353 #define GEN_LD(name, ldop, opc, type) \
3354 static void glue(gen_, name)(DisasContext *ctx) \
3355 { \
3356 TCGv EA; \
3357 gen_set_access_type(ctx, ACCESS_INT); \
3358 EA = tcg_temp_new(); \
3359 gen_addr_imm_index(ctx, EA, 0); \
3360 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3361 tcg_temp_free(EA); \
3362 }
3363
3364 #define GEN_LDU(name, ldop, opc, type) \
3365 static void glue(gen_, name##u)(DisasContext *ctx) \
3366 { \
3367 TCGv EA; \
3368 if (unlikely(rA(ctx->opcode) == 0 || \
3369 rA(ctx->opcode) == rD(ctx->opcode))) { \
3370 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3371 return; \
3372 } \
3373 gen_set_access_type(ctx, ACCESS_INT); \
3374 EA = tcg_temp_new(); \
3375 if (type == PPC_64B) \
3376 gen_addr_imm_index(ctx, EA, 0x03); \
3377 else \
3378 gen_addr_imm_index(ctx, EA, 0); \
3379 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3380 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3381 tcg_temp_free(EA); \
3382 }
3383
3384 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
3385 static void glue(gen_, name##ux)(DisasContext *ctx) \
3386 { \
3387 TCGv EA; \
3388 if (unlikely(rA(ctx->opcode) == 0 || \
3389 rA(ctx->opcode) == rD(ctx->opcode))) { \
3390 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3391 return; \
3392 } \
3393 gen_set_access_type(ctx, ACCESS_INT); \
3394 EA = tcg_temp_new(); \
3395 gen_addr_reg_index(ctx, EA); \
3396 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3397 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3398 tcg_temp_free(EA); \
3399 }
3400
3401 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
3402 static void glue(gen_, name##x)(DisasContext *ctx) \
3403 { \
3404 TCGv EA; \
3405 chk; \
3406 gen_set_access_type(ctx, ACCESS_INT); \
3407 EA = tcg_temp_new(); \
3408 gen_addr_reg_index(ctx, EA); \
3409 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \
3410 tcg_temp_free(EA); \
3411 }
3412
3413 #define GEN_LDX(name, ldop, opc2, opc3, type) \
3414 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3415
3416 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \
3417 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3418
3419 #define GEN_LDS(name, ldop, op, type) \
3420 GEN_LD(name, ldop, op | 0x20, type); \
3421 GEN_LDU(name, ldop, op | 0x21, type); \
3422 GEN_LDUX(name, ldop, 0x17, op | 0x01, type); \
3423 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
3424
3425 /* lbz lbzu lbzux lbzx */
3426 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER);
3427 /* lha lhau lhaux lhax */
3428 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER);
3429 /* lhz lhzu lhzux lhzx */
3430 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER);
3431 /* lwz lwzu lwzux lwzx */
3432 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER);
3433
3434 #define GEN_LDEPX(name, ldop, opc2, opc3) \
3435 static void glue(gen_, name##epx)(DisasContext *ctx) \
3436 { \
3437 TCGv EA; \
3438 CHK_SV; \
3439 gen_set_access_type(ctx, ACCESS_INT); \
3440 EA = tcg_temp_new(); \
3441 gen_addr_reg_index(ctx, EA); \
3442 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3443 tcg_temp_free(EA); \
3444 }
3445
3446 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3447 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3448 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3449 #if defined(TARGET_PPC64)
3450 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00)
3451 #endif
3452
3453 #if defined(TARGET_PPC64)
3454 /* lwaux */
3455 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B);
3456 /* lwax */
3457 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B);
3458 /* ldux */
3459 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B);
3460 /* ldx */
3461 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B);
3462
3463 /* CI load/store variants */
3464 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3465 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3466 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3467 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3468
3469 static void gen_ld(DisasContext *ctx)
3470 {
3471 TCGv EA;
3472 if (Rc(ctx->opcode)) {
3473 if (unlikely(rA(ctx->opcode) == 0 ||
3474 rA(ctx->opcode) == rD(ctx->opcode))) {
3475 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3476 return;
3477 }
3478 }
3479 gen_set_access_type(ctx, ACCESS_INT);
3480 EA = tcg_temp_new();
3481 gen_addr_imm_index(ctx, EA, 0x03);
3482 if (ctx->opcode & 0x02) {
3483 /* lwa (lwau is undefined) */
3484 gen_qemu_ld32s(ctx, cpu_gpr[rD(ctx->opcode)], EA);
3485 } else {
3486 /* ld - ldu */
3487 gen_qemu_ld64_i64(ctx, cpu_gpr[rD(ctx->opcode)], EA);
3488 }
3489 if (Rc(ctx->opcode)) {
3490 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3491 }
3492 tcg_temp_free(EA);
3493 }
3494
3495 /* lq */
3496 static void gen_lq(DisasContext *ctx)
3497 {
3498 int ra, rd;
3499 TCGv EA, hi, lo;
3500
3501 /* lq is a legal user mode instruction starting in ISA 2.07 */
3502 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3503 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3504
3505 if (!legal_in_user_mode && ctx->pr) {
3506 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3507 return;
3508 }
3509
3510 if (!le_is_supported && ctx->le_mode) {
3511 gen_align_no_le(ctx);
3512 return;
3513 }
3514 ra = rA(ctx->opcode);
3515 rd = rD(ctx->opcode);
3516 if (unlikely((rd & 1) || rd == ra)) {
3517 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3518 return;
3519 }
3520
3521 gen_set_access_type(ctx, ACCESS_INT);
3522 EA = tcg_temp_new();
3523 gen_addr_imm_index(ctx, EA, 0x0F);
3524
3525 /* Note that the low part is always in RD+1, even in LE mode. */
3526 lo = cpu_gpr[rd + 1];
3527 hi = cpu_gpr[rd];
3528
3529 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3530 if (HAVE_ATOMIC128) {
3531 TCGv_i32 oi = tcg_temp_new_i32();
3532 if (ctx->le_mode) {
3533 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx));
3534 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3535 } else {
3536 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx));
3537 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3538 }
3539 tcg_temp_free_i32(oi);
3540 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3541 } else {
3542 /* Restart with exclusive lock. */
3543 gen_helper_exit_atomic(cpu_env);
3544 ctx->base.is_jmp = DISAS_NORETURN;
3545 }
3546 } else if (ctx->le_mode) {
3547 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ);
3548 gen_addr_add(ctx, EA, EA, 8);
3549 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ);
3550 } else {
3551 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ);
3552 gen_addr_add(ctx, EA, EA, 8);
3553 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ);
3554 }
3555 tcg_temp_free(EA);
3556 }
3557 #endif
3558
3559 /*** Integer store ***/
3560 #define GEN_ST(name, stop, opc, type) \
3561 static void glue(gen_, name)(DisasContext *ctx) \
3562 { \
3563 TCGv EA; \
3564 gen_set_access_type(ctx, ACCESS_INT); \
3565 EA = tcg_temp_new(); \
3566 gen_addr_imm_index(ctx, EA, 0); \
3567 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3568 tcg_temp_free(EA); \
3569 }
3570
3571 #define GEN_STU(name, stop, opc, type) \
3572 static void glue(gen_, stop##u)(DisasContext *ctx) \
3573 { \
3574 TCGv EA; \
3575 if (unlikely(rA(ctx->opcode) == 0)) { \
3576 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3577 return; \
3578 } \
3579 gen_set_access_type(ctx, ACCESS_INT); \
3580 EA = tcg_temp_new(); \
3581 if (type == PPC_64B) \
3582 gen_addr_imm_index(ctx, EA, 0x03); \
3583 else \
3584 gen_addr_imm_index(ctx, EA, 0); \
3585 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3586 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3587 tcg_temp_free(EA); \
3588 }
3589
3590 #define GEN_STUX(name, stop, opc2, opc3, type) \
3591 static void glue(gen_, name##ux)(DisasContext *ctx) \
3592 { \
3593 TCGv EA; \
3594 if (unlikely(rA(ctx->opcode) == 0)) { \
3595 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); \
3596 return; \
3597 } \
3598 gen_set_access_type(ctx, ACCESS_INT); \
3599 EA = tcg_temp_new(); \
3600 gen_addr_reg_index(ctx, EA); \
3601 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3602 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
3603 tcg_temp_free(EA); \
3604 }
3605
3606 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
3607 static void glue(gen_, name##x)(DisasContext *ctx) \
3608 { \
3609 TCGv EA; \
3610 chk; \
3611 gen_set_access_type(ctx, ACCESS_INT); \
3612 EA = tcg_temp_new(); \
3613 gen_addr_reg_index(ctx, EA); \
3614 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \
3615 tcg_temp_free(EA); \
3616 }
3617 #define GEN_STX(name, stop, opc2, opc3, type) \
3618 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3619
3620 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \
3621 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3622
3623 #define GEN_STS(name, stop, op, type) \
3624 GEN_ST(name, stop, op | 0x20, type); \
3625 GEN_STU(name, stop, op | 0x21, type); \
3626 GEN_STUX(name, stop, 0x17, op | 0x01, type); \
3627 GEN_STX(name, stop, 0x17, op | 0x00, type)
3628
3629 /* stb stbu stbux stbx */
3630 GEN_STS(stb, st8, 0x06, PPC_INTEGER);
3631 /* sth sthu sthux sthx */
3632 GEN_STS(sth, st16, 0x0C, PPC_INTEGER);
3633 /* stw stwu stwux stwx */
3634 GEN_STS(stw, st32, 0x04, PPC_INTEGER);
3635
3636 #define GEN_STEPX(name, stop, opc2, opc3) \
3637 static void glue(gen_, name##epx)(DisasContext *ctx) \
3638 { \
3639 TCGv EA; \
3640 CHK_SV; \
3641 gen_set_access_type(ctx, ACCESS_INT); \
3642 EA = tcg_temp_new(); \
3643 gen_addr_reg_index(ctx, EA); \
3644 tcg_gen_qemu_st_tl( \
3645 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \
3646 tcg_temp_free(EA); \
3647 }
3648
3649 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3650 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3651 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3652 #if defined(TARGET_PPC64)
3653 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04)
3654 #endif
3655
3656 #if defined(TARGET_PPC64)
3657 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B);
3658 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B);
3659 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3660 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3661 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3662 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3663
3664 static void gen_std(DisasContext *ctx)
3665 {
3666 int rs;
3667 TCGv EA;
3668
3669 rs = rS(ctx->opcode);
3670 if ((ctx->opcode & 0x3) == 0x2) { /* stq */
3671 bool legal_in_user_mode = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3672 bool le_is_supported = (ctx->insns_flags2 & PPC2_LSQ_ISA207) != 0;
3673 TCGv hi, lo;
3674
3675 if (!(ctx->insns_flags & PPC_64BX)) {
3676 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3677 }
3678
3679 if (!legal_in_user_mode && ctx->pr) {
3680 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
3681 return;
3682 }
3683
3684 if (!le_is_supported && ctx->le_mode) {
3685 gen_align_no_le(ctx);
3686 return;
3687 }
3688
3689 if (unlikely(rs & 1)) {
3690 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3691 return;
3692 }
3693 gen_set_access_type(ctx, ACCESS_INT);
3694 EA = tcg_temp_new();
3695 gen_addr_imm_index(ctx, EA, 0x03);
3696
3697 /* Note that the low part is always in RS+1, even in LE mode. */
3698 lo = cpu_gpr[rs + 1];
3699 hi = cpu_gpr[rs];
3700
3701 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3702 if (HAVE_ATOMIC128) {
3703 TCGv_i32 oi = tcg_temp_new_i32();
3704 if (ctx->le_mode) {
3705 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ, ctx->mem_idx));
3706 gen_helper_stq_le_parallel(cpu_env, EA, lo, hi, oi);
3707 } else {
3708 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ, ctx->mem_idx));
3709 gen_helper_stq_be_parallel(cpu_env, EA, lo, hi, oi);
3710 }
3711 tcg_temp_free_i32(oi);
3712 } else {
3713 /* Restart with exclusive lock. */
3714 gen_helper_exit_atomic(cpu_env);
3715 ctx->base.is_jmp = DISAS_NORETURN;
3716 }
3717 } else if (ctx->le_mode) {
3718 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_LEQ);
3719 gen_addr_add(ctx, EA, EA, 8);
3720 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_LEQ);
3721 } else {
3722 tcg_gen_qemu_st_i64(hi, EA, ctx->mem_idx, MO_BEQ);
3723 gen_addr_add(ctx, EA, EA, 8);
3724 tcg_gen_qemu_st_i64(lo, EA, ctx->mem_idx, MO_BEQ);
3725 }
3726 tcg_temp_free(EA);
3727 } else {
3728 /* std / stdu */
3729 if (Rc(ctx->opcode)) {
3730 if (unlikely(rA(ctx->opcode) == 0)) {
3731 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3732 return;
3733 }
3734 }
3735 gen_set_access_type(ctx, ACCESS_INT);
3736 EA = tcg_temp_new();
3737 gen_addr_imm_index(ctx, EA, 0x03);
3738 gen_qemu_st64_i64(ctx, cpu_gpr[rs], EA);
3739 if (Rc(ctx->opcode)) {
3740 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3741 }
3742 tcg_temp_free(EA);
3743 }
3744 }
3745 #endif
3746 /*** Integer load and store with byte reverse ***/
3747
3748 /* lhbrx */
3749 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3750
3751 /* lwbrx */
3752 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3753
3754 #if defined(TARGET_PPC64)
3755 /* ldbrx */
3756 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3757 /* stdbrx */
3758 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3759 #endif /* TARGET_PPC64 */
3760
3761 /* sthbrx */
3762 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3763 /* stwbrx */
3764 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3765
3766 /*** Integer load and store multiple ***/
3767
3768 /* lmw */
3769 static void gen_lmw(DisasContext *ctx)
3770 {
3771 TCGv t0;
3772 TCGv_i32 t1;
3773
3774 if (ctx->le_mode) {
3775 gen_align_no_le(ctx);
3776 return;
3777 }
3778 gen_set_access_type(ctx, ACCESS_INT);
3779 t0 = tcg_temp_new();
3780 t1 = tcg_const_i32(rD(ctx->opcode));
3781 gen_addr_imm_index(ctx, t0, 0);
3782 gen_helper_lmw(cpu_env, t0, t1);
3783 tcg_temp_free(t0);
3784 tcg_temp_free_i32(t1);
3785 }
3786
3787 /* stmw */
3788 static void gen_stmw(DisasContext *ctx)
3789 {
3790 TCGv t0;
3791 TCGv_i32 t1;
3792
3793 if (ctx->le_mode) {
3794 gen_align_no_le(ctx);
3795 return;
3796 }
3797 gen_set_access_type(ctx, ACCESS_INT);
3798 t0 = tcg_temp_new();
3799 t1 = tcg_const_i32(rS(ctx->opcode));
3800 gen_addr_imm_index(ctx, t0, 0);
3801 gen_helper_stmw(cpu_env, t0, t1);
3802 tcg_temp_free(t0);
3803 tcg_temp_free_i32(t1);
3804 }
3805
3806 /*** Integer load and store strings ***/
3807
3808 /* lswi */
3809 /*
3810 * PowerPC32 specification says we must generate an exception if rA is
3811 * in the range of registers to be loaded. In an other hand, IBM says
3812 * this is valid, but rA won't be loaded. For now, I'll follow the
3813 * spec...
3814 */
3815 static void gen_lswi(DisasContext *ctx)
3816 {
3817 TCGv t0;
3818 TCGv_i32 t1, t2;
3819 int nb = NB(ctx->opcode);
3820 int start = rD(ctx->opcode);
3821 int ra = rA(ctx->opcode);
3822 int nr;
3823
3824 if (ctx->le_mode) {
3825 gen_align_no_le(ctx);
3826 return;
3827 }
3828 if (nb == 0) {
3829 nb = 32;
3830 }
3831 nr = DIV_ROUND_UP(nb, 4);
3832 if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3833 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3834 return;
3835 }
3836 gen_set_access_type(ctx, ACCESS_INT);
3837 t0 = tcg_temp_new();
3838 gen_addr_register(ctx, t0);
3839 t1 = tcg_const_i32(nb);
3840 t2 = tcg_const_i32(start);
3841 gen_helper_lsw(cpu_env, t0, t1, t2);
3842 tcg_temp_free(t0);
3843 tcg_temp_free_i32(t1);
3844 tcg_temp_free_i32(t2);
3845 }
3846
3847 /* lswx */
3848 static void gen_lswx(DisasContext *ctx)
3849 {
3850 TCGv t0;
3851 TCGv_i32 t1, t2, t3;
3852
3853 if (ctx->le_mode) {
3854 gen_align_no_le(ctx);
3855 return;
3856 }
3857 gen_set_access_type(ctx, ACCESS_INT);
3858 t0 = tcg_temp_new();
3859 gen_addr_reg_index(ctx, t0);
3860 t1 = tcg_const_i32(rD(ctx->opcode));
3861 t2 = tcg_const_i32(rA(ctx->opcode));
3862 t3 = tcg_const_i32(rB(ctx->opcode));
3863 gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3864 tcg_temp_free(t0);
3865 tcg_temp_free_i32(t1);
3866 tcg_temp_free_i32(t2);
3867 tcg_temp_free_i32(t3);
3868 }
3869
3870 /* stswi */
3871 static void gen_stswi(DisasContext *ctx)
3872 {
3873 TCGv t0;
3874 TCGv_i32 t1, t2;
3875 int nb = NB(ctx->opcode);
3876
3877 if (ctx->le_mode) {
3878 gen_align_no_le(ctx);
3879 return;
3880 }
3881 gen_set_access_type(ctx, ACCESS_INT);
3882 t0 = tcg_temp_new();
3883 gen_addr_register(ctx, t0);
3884 if (nb == 0) {
3885 nb = 32;
3886 }
3887 t1 = tcg_const_i32(nb);
3888 t2 = tcg_const_i32(rS(ctx->opcode));
3889 gen_helper_stsw(cpu_env, t0, t1, t2);
3890 tcg_temp_free(t0);
3891 tcg_temp_free_i32(t1);
3892 tcg_temp_free_i32(t2);
3893 }
3894
3895 /* stswx */
3896 static void gen_stswx(DisasContext *ctx)
3897 {
3898 TCGv t0;
3899 TCGv_i32 t1, t2;
3900
3901 if (ctx->le_mode) {
3902 gen_align_no_le(ctx);
3903 return;
3904 }
3905 gen_set_access_type(ctx, ACCESS_INT);
3906 t0 = tcg_temp_new();
3907 gen_addr_reg_index(ctx, t0);
3908 t1 = tcg_temp_new_i32();
3909 tcg_gen_trunc_tl_i32(t1, cpu_xer);
3910 tcg_gen_andi_i32(t1, t1, 0x7F);
3911 t2 = tcg_const_i32(rS(ctx->opcode));
3912 gen_helper_stsw(cpu_env, t0, t1, t2);
3913 tcg_temp_free(t0);
3914 tcg_temp_free_i32(t1);
3915 tcg_temp_free_i32(t2);
3916 }
3917
3918 /*** Memory synchronisation ***/
3919 /* eieio */
3920 static void gen_eieio(DisasContext *ctx)
3921 {
3922 TCGBar bar = TCG_MO_LD_ST;
3923
3924 /*
3925 * POWER9 has a eieio instruction variant using bit 6 as a hint to
3926 * tell the CPU it is a store-forwarding barrier.
3927 */
3928 if (ctx->opcode & 0x2000000) {
3929 /*
3930 * ISA says that "Reserved fields in instructions are ignored
3931 * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3932 * as this is not an instruction software should be using,
3933 * complain to the user.
3934 */
3935 if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3936 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3937 TARGET_FMT_lx "\n", ctx->cia);
3938 } else {
3939 bar = TCG_MO_ST_LD;
3940 }
3941 }
3942
3943 tcg_gen_mb(bar | TCG_BAR_SC);
3944 }
3945
3946 #if !defined(CONFIG_USER_ONLY)
3947 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3948 {
3949 TCGv_i32 t;
3950 TCGLabel *l;
3951
3952 if (!ctx->lazy_tlb_flush) {
3953 return;
3954 }
3955 l = gen_new_label();
3956 t = tcg_temp_new_i32();
3957 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3958 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3959 if (global) {
3960 gen_helper_check_tlb_flush_global(cpu_env);
3961 } else {
3962 gen_helper_check_tlb_flush_local(cpu_env);
3963 }
3964 gen_set_label(l);
3965 tcg_temp_free_i32(t);
3966 }
3967 #else
3968 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3969 #endif
3970
3971 /* isync */
3972 static void gen_isync(DisasContext *ctx)
3973 {
3974 /*
3975 * We need to check for a pending TLB flush. This can only happen in
3976 * kernel mode however so check MSR_PR
3977 */
3978 if (!ctx->pr) {
3979 gen_check_tlb_flush(ctx, false);
3980 }
3981 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3982 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3983 }
3984
3985 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE))
3986
3987 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3988 {
3989 TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3990 TCGv t0 = tcg_temp_new();
3991
3992 gen_set_access_type(ctx, ACCESS_RES);
3993 gen_addr_reg_index(ctx, t0);
3994 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3995 tcg_gen_mov_tl(cpu_reserve, t0);
3996 tcg_gen_mov_tl(cpu_reserve_val, gpr);
3997 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3998 tcg_temp_free(t0);
3999 }
4000
4001 #define LARX(name, memop) \
4002 static void gen_##name(DisasContext *ctx) \
4003 { \
4004 gen_load_locked(ctx, memop); \
4005 }
4006
4007 /* lwarx */
4008 LARX(lbarx, DEF_MEMOP(MO_UB))
4009 LARX(lharx, DEF_MEMOP(MO_UW))
4010 LARX(lwarx, DEF_MEMOP(MO_UL))
4011
4012 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
4013 TCGv EA, TCGCond cond, int addend)
4014 {
4015 TCGv t = tcg_temp_new();
4016 TCGv t2 = tcg_temp_new();
4017 TCGv u = tcg_temp_new();
4018
4019 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
4020 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
4021 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
4022 tcg_gen_addi_tl(u, t, addend);
4023
4024 /* E.g. for fetch and increment bounded... */
4025 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
4026 tcg_gen_movcond_tl(cond, u, t, t2, u, t);
4027 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
4028
4029 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
4030 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
4031 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
4032
4033 tcg_temp_free(t);
4034 tcg_temp_free(t2);
4035 tcg_temp_free(u);
4036 }
4037
4038 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
4039 {
4040 uint32_t gpr_FC = FC(ctx->opcode);
4041 TCGv EA = tcg_temp_new();
4042 int rt = rD(ctx->opcode);
4043 bool need_serial;
4044 TCGv src, dst;
4045
4046 gen_addr_register(ctx, EA);
4047 dst = cpu_gpr[rt];
4048 src = cpu_gpr[(rt + 1) & 31];
4049
4050 need_serial = false;
4051 memop |= MO_ALIGN;
4052 switch (gpr_FC) {
4053 case 0: /* Fetch and add */
4054 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
4055 break;
4056 case 1: /* Fetch and xor */
4057 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
4058 break;
4059 case 2: /* Fetch and or */
4060 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
4061 break;
4062 case 3: /* Fetch and 'and' */
4063 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
4064 break;
4065 case 4: /* Fetch and max unsigned */
4066 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
4067 break;
4068 case 5: /* Fetch and max signed */
4069 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
4070 break;
4071 case 6: /* Fetch and min unsigned */
4072 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
4073 break;
4074 case 7: /* Fetch and min signed */
4075 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
4076 break;
4077 case 8: /* Swap */
4078 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
4079 break;
4080
4081 case 16: /* Compare and swap not equal */
4082 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4083 need_serial = true;
4084 } else {
4085 TCGv t0 = tcg_temp_new();
4086 TCGv t1 = tcg_temp_new();
4087
4088 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
4089 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
4090 tcg_gen_mov_tl(t1, src);
4091 } else {
4092 tcg_gen_ext32u_tl(t1, src);
4093 }
4094 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
4095 cpu_gpr[(rt + 2) & 31], t0);
4096 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
4097 tcg_gen_mov_tl(dst, t0);
4098
4099 tcg_temp_free(t0);
4100 tcg_temp_free(t1);
4101 }
4102 break;
4103
4104 case 24: /* Fetch and increment bounded */
4105 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4106 need_serial = true;
4107 } else {
4108 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
4109 }
4110 break;
4111 case 25: /* Fetch and increment equal */
4112 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4113 need_serial = true;
4114 } else {
4115 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
4116 }
4117 break;
4118 case 28: /* Fetch and decrement bounded */
4119 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4120 need_serial = true;
4121 } else {
4122 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
4123 }
4124 break;
4125
4126 default:
4127 /* invoke data storage error handler */
4128 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
4129 }
4130 tcg_temp_free(EA);
4131
4132 if (need_serial) {
4133 /* Restart with exclusive lock. */
4134 gen_helper_exit_atomic(cpu_env);
4135 ctx->base.is_jmp = DISAS_NORETURN;
4136 }
4137 }
4138
4139 static void gen_lwat(DisasContext *ctx)
4140 {
4141 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
4142 }
4143
4144 #ifdef TARGET_PPC64
4145 static void gen_ldat(DisasContext *ctx)
4146 {
4147 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q));
4148 }
4149 #endif
4150
4151 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
4152 {
4153 uint32_t gpr_FC = FC(ctx->opcode);
4154 TCGv EA = tcg_temp_new();
4155 TCGv src, discard;
4156
4157 gen_addr_register(ctx, EA);
4158 src = cpu_gpr[rD(ctx->opcode)];
4159 discard = tcg_temp_new();
4160
4161 memop |= MO_ALIGN;
4162 switch (gpr_FC) {
4163 case 0: /* add and Store */
4164 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4165 break;
4166 case 1: /* xor and Store */
4167 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4168 break;
4169 case 2: /* Or and Store */
4170 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4171 break;
4172 case 3: /* 'and' and Store */
4173 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4174 break;
4175 case 4: /* Store max unsigned */
4176 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4177 break;
4178 case 5: /* Store max signed */
4179 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4180 break;
4181 case 6: /* Store min unsigned */
4182 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4183 break;
4184 case 7: /* Store min signed */
4185 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
4186 break;
4187 case 24: /* Store twin */
4188 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4189 /* Restart with exclusive lock. */
4190 gen_helper_exit_atomic(cpu_env);
4191 ctx->base.is_jmp = DISAS_NORETURN;
4192 } else {
4193 TCGv t = tcg_temp_new();
4194 TCGv t2 = tcg_temp_new();
4195 TCGv s = tcg_temp_new();
4196 TCGv s2 = tcg_temp_new();
4197 TCGv ea_plus_s = tcg_temp_new();
4198
4199 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
4200 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
4201 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
4202 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
4203 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
4204 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
4205 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
4206
4207 tcg_temp_free(ea_plus_s);
4208 tcg_temp_free(s2);
4209 tcg_temp_free(s);
4210 tcg_temp_free(t2);
4211 tcg_temp_free(t);
4212 }
4213 break;
4214 default:
4215 /* invoke data storage error handler */
4216 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
4217 }
4218 tcg_temp_free(discard);
4219 tcg_temp_free(EA);
4220 }
4221
4222 static void gen_stwat(DisasContext *ctx)
4223 {
4224 gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
4225 }
4226
4227 #ifdef TARGET_PPC64
4228 static void gen_stdat(DisasContext *ctx)
4229 {
4230 gen_st_atomic(ctx, DEF_MEMOP(MO_Q));
4231 }
4232 #endif
4233
4234 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
4235 {
4236 TCGLabel *l1 = gen_new_label();
4237 TCGLabel *l2 = gen_new_label();
4238 TCGv t0 = tcg_temp_new();
4239 int reg = rS(ctx->opcode);
4240
4241 gen_set_access_type(ctx, ACCESS_RES);
4242 gen_addr_reg_index(ctx, t0);
4243 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
4244 tcg_temp_free(t0);
4245
4246 t0 = tcg_temp_new();
4247 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
4248 cpu_gpr[reg], ctx->mem_idx,
4249 DEF_MEMOP(memop) | MO_ALIGN);
4250 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
4251 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
4252 tcg_gen_or_tl(t0, t0, cpu_so);
4253 tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
4254 tcg_temp_free(t0);
4255 tcg_gen_br(l2);
4256
4257 gen_set_label(l1);
4258
4259 /*
4260 * Address mismatch implies failure. But we still need to provide
4261 * the memory barrier semantics of the instruction.
4262 */
4263 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
4264 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4265
4266 gen_set_label(l2);
4267 tcg_gen_movi_tl(cpu_reserve, -1);
4268 }
4269
4270 #define STCX(name, memop) \
4271 static void gen_##name(DisasContext *ctx) \
4272 { \
4273 gen_conditional_store(ctx, memop); \
4274 }
4275
4276 STCX(stbcx_, DEF_MEMOP(MO_UB))
4277 STCX(sthcx_, DEF_MEMOP(MO_UW))
4278 STCX(stwcx_, DEF_MEMOP(MO_UL))
4279
4280 #if defined(TARGET_PPC64)
4281 /* ldarx */
4282 LARX(ldarx, DEF_MEMOP(MO_Q))
4283 /* stdcx. */
4284 STCX(stdcx_, DEF_MEMOP(MO_Q))
4285
4286 /* lqarx */
4287 static void gen_lqarx(DisasContext *ctx)
4288 {
4289 int rd = rD(ctx->opcode);
4290 TCGv EA, hi, lo;
4291
4292 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
4293 (rd == rB(ctx->opcode)))) {
4294 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4295 return;
4296 }
4297
4298 gen_set_access_type(ctx, ACCESS_RES);
4299 EA = tcg_temp_new();
4300 gen_addr_reg_index(ctx, EA);
4301
4302 /* Note that the low part is always in RD+1, even in LE mode. */
4303 lo = cpu_gpr[rd + 1];
4304 hi = cpu_gpr[rd];
4305
4306 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4307 if (HAVE_ATOMIC128) {
4308 TCGv_i32 oi = tcg_temp_new_i32();
4309 if (ctx->le_mode) {
4310 tcg_gen_movi_i32(oi, make_memop_idx(MO_LEQ | MO_ALIGN_16,
4311 ctx->mem_idx));
4312 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
4313 } else {
4314 tcg_gen_movi_i32(oi, make_memop_idx(MO_BEQ | MO_ALIGN_16,
4315 ctx->mem_idx));
4316 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
4317 }
4318 tcg_temp_free_i32(oi);
4319 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
4320 } else {
4321 /* Restart with exclusive lock. */
4322 gen_helper_exit_atomic(cpu_env);
4323 ctx->base.is_jmp = DISAS_NORETURN;
4324 tcg_temp_free(EA);
4325 return;
4326 }
4327 } else if (ctx->le_mode) {
4328 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16);
4329 tcg_gen_mov_tl(cpu_reserve, EA);
4330 gen_addr_add(ctx, EA, EA, 8);
4331 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ);
4332 } else {
4333 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16);
4334 tcg_gen_mov_tl(cpu_reserve, EA);
4335 gen_addr_add(ctx, EA, EA, 8);
4336 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ);
4337 }
4338 tcg_temp_free(EA);
4339
4340 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
4341 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
4342 }
4343
4344 /* stqcx. */
4345 static void gen_stqcx_(DisasContext *ctx)
4346 {
4347 int rs = rS(ctx->opcode);
4348 TCGv EA, hi, lo;
4349
4350 if (unlikely(rs & 1)) {
4351 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4352 return;
4353 }
4354
4355 gen_set_access_type(ctx, ACCESS_RES);
4356 EA = tcg_temp_new();
4357 gen_addr_reg_index(ctx, EA);
4358
4359 /* Note that the low part is always in RS+1, even in LE mode. */
4360 lo = cpu_gpr[rs + 1];
4361 hi = cpu_gpr[rs];
4362
4363 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4364 if (HAVE_CMPXCHG128) {
4365 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_Q) | MO_ALIGN_16);
4366 if (ctx->le_mode) {
4367 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env,
4368 EA, lo, hi, oi);
4369 } else {
4370 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env,
4371 EA, lo, hi, oi);
4372 }
4373 tcg_temp_free_i32(oi);
4374 } else {
4375 /* Restart with exclusive lock. */
4376 gen_helper_exit_atomic(cpu_env);
4377 ctx->base.is_jmp = DISAS_NORETURN;
4378 }
4379 tcg_temp_free(EA);
4380 } else {
4381 TCGLabel *lab_fail = gen_new_label();
4382 TCGLabel *lab_over = gen_new_label();
4383 TCGv_i64 t0 = tcg_temp_new_i64();
4384 TCGv_i64 t1 = tcg_temp_new_i64();
4385
4386 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4387 tcg_temp_free(EA);
4388
4389 gen_qemu_ld64_i64(ctx, t0, cpu_reserve);
4390 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4391 ? offsetof(CPUPPCState, reserve_val2)
4392 : offsetof(CPUPPCState, reserve_val)));
4393 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4394
4395 tcg_gen_addi_i64(t0, cpu_reserve, 8);
4396 gen_qemu_ld64_i64(ctx, t0, t0);
4397 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4398 ? offsetof(CPUPPCState, reserve_val)
4399 : offsetof(CPUPPCState, reserve_val2)));
4400 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4401
4402 /* Success */
4403 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve);
4404 tcg_gen_addi_i64(t0, cpu_reserve, 8);
4405 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0);
4406
4407 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4408 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
4409 tcg_gen_br(lab_over);
4410
4411 gen_set_label(lab_fail);
4412 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4413
4414 gen_set_label(lab_over);
4415 tcg_gen_movi_tl(cpu_reserve, -1);
4416 tcg_temp_free_i64(t0);
4417 tcg_temp_free_i64(t1);
4418 }
4419 }
4420 #endif /* defined(TARGET_PPC64) */
4421
4422 /* sync */
4423 static void gen_sync(DisasContext *ctx)
4424 {
4425 uint32_t l = (ctx->opcode >> 21) & 3;
4426
4427 /*
4428 * We may need to check for a pending TLB flush.
4429 *
4430 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4431 *
4432 * Additionally, this can only happen in kernel mode however so
4433 * check MSR_PR as well.
4434 */
4435 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4436 gen_check_tlb_flush(ctx, true);
4437 }
4438 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
4439 }
4440
4441 /* wait */
4442 static void gen_wait(DisasContext *ctx)
4443 {
4444 TCGv_i32 t0 = tcg_const_i32(1);
4445 tcg_gen_st_i32(t0, cpu_env,
4446 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4447 tcg_temp_free_i32(t0);
4448 /* Stop translation, as the CPU is supposed to sleep from now */
4449 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4450 }
4451
4452 #if defined(TARGET_PPC64)
4453 static void gen_doze(DisasContext *ctx)
4454 {
4455 #if defined(CONFIG_USER_ONLY)
4456 GEN_PRIV;
4457 #else
4458 TCGv_i32 t;
4459
4460 CHK_HV;
4461 t = tcg_const_i32(PPC_PM_DOZE);
4462 gen_helper_pminsn(cpu_env, t);
4463 tcg_temp_free_i32(t);
4464 /* Stop translation, as the CPU is supposed to sleep from now */
4465 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4466 #endif /* defined(CONFIG_USER_ONLY) */
4467 }
4468
4469 static void gen_nap(DisasContext *ctx)
4470 {
4471 #if defined(CONFIG_USER_ONLY)
4472 GEN_PRIV;
4473 #else
4474 TCGv_i32 t;
4475
4476 CHK_HV;
4477 t = tcg_const_i32(PPC_PM_NAP);
4478 gen_helper_pminsn(cpu_env, t);
4479 tcg_temp_free_i32(t);
4480 /* Stop translation, as the CPU is supposed to sleep from now */
4481 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4482 #endif /* defined(CONFIG_USER_ONLY) */
4483 }
4484
4485 static void gen_stop(DisasContext *ctx)
4486 {
4487 #if defined(CONFIG_USER_ONLY)
4488 GEN_PRIV;
4489 #else
4490 TCGv_i32 t;
4491
4492 CHK_HV;
4493 t = tcg_const_i32(PPC_PM_STOP);
4494 gen_helper_pminsn(cpu_env, t);
4495 tcg_temp_free_i32(t);
4496 /* Stop translation, as the CPU is supposed to sleep from now */
4497 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4498 #endif /* defined(CONFIG_USER_ONLY) */
4499 }
4500
4501 static void gen_sleep(DisasContext *ctx)
4502 {
4503 #if defined(CONFIG_USER_ONLY)
4504 GEN_PRIV;
4505 #else
4506 TCGv_i32 t;
4507
4508 CHK_HV;
4509 t = tcg_const_i32(PPC_PM_SLEEP);
4510 gen_helper_pminsn(cpu_env, t);
4511 tcg_temp_free_i32(t);
4512 /* Stop translation, as the CPU is supposed to sleep from now */
4513 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4514 #endif /* defined(CONFIG_USER_ONLY) */
4515 }
4516
4517 static void gen_rvwinkle(DisasContext *ctx)
4518 {
4519 #if defined(CONFIG_USER_ONLY)
4520 GEN_PRIV;
4521 #else
4522 TCGv_i32 t;
4523
4524 CHK_HV;
4525 t = tcg_const_i32(PPC_PM_RVWINKLE);
4526 gen_helper_pminsn(cpu_env, t);
4527 tcg_temp_free_i32(t);
4528 /* Stop translation, as the CPU is supposed to sleep from now */
4529 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4530 #endif /* defined(CONFIG_USER_ONLY) */
4531 }
4532 #endif /* #if defined(TARGET_PPC64) */
4533
4534 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4535 {
4536 #if defined(TARGET_PPC64)
4537 if (ctx->has_cfar) {
4538 tcg_gen_movi_tl(cpu_cfar, nip);
4539 }
4540 #endif
4541 }
4542
4543 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4544 {
4545 if (unlikely(ctx->singlestep_enabled)) {
4546 return false;
4547 }
4548
4549 #ifndef CONFIG_USER_ONLY
4550 return (ctx->base.tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK);
4551 #else
4552 return true;
4553 #endif
4554 }
4555
4556 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4557 {
4558 int sse = ctx->singlestep_enabled;
4559 if (unlikely(sse)) {
4560 if (sse & GDBSTUB_SINGLE_STEP) {
4561 gen_debug_exception(ctx);
4562 } else if (sse & (CPU_SINGLE_STEP | CPU_BRANCH_STEP)) {
4563 uint32_t excp = gen_prep_dbgex(ctx);
4564 gen_exception(ctx, excp);
4565 } else {
4566 tcg_gen_exit_tb(NULL, 0);
4567 }
4568 } else {
4569 tcg_gen_lookup_and_goto_ptr();
4570 }
4571 }
4572
4573 /*** Branch ***/
4574 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4575 {
4576 if (NARROW_MODE(ctx)) {
4577 dest = (uint32_t) dest;
4578 }
4579 if (use_goto_tb(ctx, dest)) {
4580 tcg_gen_goto_tb(n);
4581 tcg_gen_movi_tl(cpu_nip, dest & ~3);
4582 tcg_gen_exit_tb(ctx->base.tb, n);
4583 } else {
4584 tcg_gen_movi_tl(cpu_nip, dest & ~3);
4585 gen_lookup_and_goto_ptr(ctx);
4586 }
4587 }
4588
4589 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4590 {
4591 if (NARROW_MODE(ctx)) {
4592 nip = (uint32_t)nip;
4593 }
4594 tcg_gen_movi_tl(cpu_lr, nip);
4595 }
4596
4597 /* b ba bl bla */
4598 static void gen_b(DisasContext *ctx)
4599 {
4600 target_ulong li, target;
4601
4602 /* sign extend LI */
4603 li = LI(ctx->opcode);
4604 li = (li ^ 0x02000000) - 0x02000000;
4605 if (likely(AA(ctx->opcode) == 0)) {
4606 target = ctx->cia + li;
4607 } else {
4608 target = li;
4609 }
4610 if (LK(ctx->opcode)) {
4611 gen_setlr(ctx, ctx->base.pc_next);
4612 }
4613 gen_update_cfar(ctx, ctx->cia);
4614 gen_goto_tb(ctx, 0, target);
4615 ctx->base.is_jmp = DISAS_NORETURN;
4616 }
4617
4618 #define BCOND_IM 0
4619 #define BCOND_LR 1
4620 #define BCOND_CTR 2
4621 #define BCOND_TAR 3
4622
4623 static void gen_bcond(DisasContext *ctx, int type)
4624 {
4625 uint32_t bo = BO(ctx->opcode);
4626 TCGLabel *l1;
4627 TCGv target;
4628
4629 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4630 target = tcg_temp_local_new();
4631 if (type == BCOND_CTR) {
4632 tcg_gen_mov_tl(target, cpu_ctr);
4633 } else if (type == BCOND_TAR) {
4634 gen_load_spr(target, SPR_TAR);
4635 } else {
4636 tcg_gen_mov_tl(target, cpu_lr);
4637 }
4638 } else {
4639 target = NULL;
4640 }
4641 if (LK(ctx->opcode)) {
4642 gen_setlr(ctx, ctx->base.pc_next);
4643 }
4644 l1 = gen_new_label();
4645 if ((bo & 0x4) == 0) {
4646 /* Decrement and test CTR */
4647 TCGv temp = tcg_temp_new();
4648
4649 if (type == BCOND_CTR) {
4650 /*
4651 * All ISAs up to v3 describe this form of bcctr as invalid but
4652 * some processors, ie. 64-bit server processors compliant with
4653 * arch 2.x, do implement a "test and decrement" logic instead,
4654 * as described in their respective UMs. This logic involves CTR
4655 * to act as both the branch target and a counter, which makes
4656 * it basically useless and thus never used in real code.
4657 *
4658 * This form was hence chosen to trigger extra micro-architectural
4659 * side-effect on real HW needed for the Spectre v2 workaround.
4660 * It is up to guests that implement such workaround, ie. linux, to
4661 * use this form in a way it just triggers the side-effect without
4662 * doing anything else harmful.
4663 */
4664 if (unlikely(!is_book3s_arch2x(ctx))) {
4665 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4666 tcg_temp_free(temp);
4667 tcg_temp_free(target);
4668 return;
4669 }
4670
4671 if (NARROW_MODE(ctx)) {
4672 tcg_gen_ext32u_tl(temp, cpu_ctr);
4673 } else {
4674 tcg_gen_mov_tl(temp, cpu_ctr);
4675 }
4676 if (bo & 0x2) {
4677 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4678 } else {
4679 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4680 }
4681 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4682 } else {
4683 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4684 if (NARROW_MODE(ctx)) {
4685 tcg_gen_ext32u_tl(temp, cpu_ctr);
4686 } else {
4687 tcg_gen_mov_tl(temp, cpu_ctr);
4688 }
4689 if (bo & 0x2) {
4690 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4691 } else {
4692 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4693 }
4694 }
4695 tcg_temp_free(temp);
4696 }
4697 if ((bo & 0x10) == 0) {
4698 /* Test CR */
4699 uint32_t bi = BI(ctx->opcode);
4700 uint32_t mask = 0x08 >> (bi & 0x03);
4701 TCGv_i32 temp = tcg_temp_new_i32();
4702
4703 if (bo & 0x8) {
4704 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4705 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4706 } else {
4707 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4708 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4709 }
4710 tcg_temp_free_i32(temp);
4711 }
4712 gen_update_cfar(ctx, ctx->cia);
4713 if (type == BCOND_IM) {
4714 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4715 if (likely(AA(ctx->opcode) == 0)) {
4716 gen_goto_tb(ctx, 0, ctx->cia + li);
4717 } else {
4718 gen_goto_tb(ctx, 0, li);
4719 }
4720 } else {
4721 if (NARROW_MODE(ctx)) {
4722 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4723 } else {
4724 tcg_gen_andi_tl(cpu_nip, target, ~3);
4725 }
4726 gen_lookup_and_goto_ptr(ctx);
4727 tcg_temp_free(target);
4728 }
4729 if ((bo & 0x14) != 0x14) {
4730 /* fallthrough case */
4731 gen_set_label(l1);
4732 gen_goto_tb(ctx, 1, ctx->base.pc_next);
4733 }
4734 ctx->base.is_jmp = DISAS_NORETURN;
4735 }
4736
4737 static void gen_bc(DisasContext *ctx)
4738 {
4739 gen_bcond(ctx, BCOND_IM);
4740 }
4741
4742 static void gen_bcctr(DisasContext *ctx)
4743 {
4744 gen_bcond(ctx, BCOND_CTR);
4745 }
4746
4747 static void gen_bclr(DisasContext *ctx)
4748 {
4749 gen_bcond(ctx, BCOND_LR);
4750 }
4751
4752 static void gen_bctar(DisasContext *ctx)
4753 {
4754 gen_bcond(ctx, BCOND_TAR);
4755 }
4756
4757 /*** Condition register logical ***/
4758 #define GEN_CRLOGIC(name, tcg_op, opc) \
4759 static void glue(gen_, name)(DisasContext *ctx) \
4760 { \
4761 uint8_t bitmask; \
4762 int sh; \
4763 TCGv_i32 t0, t1; \
4764 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
4765 t0 = tcg_temp_new_i32(); \
4766 if (sh > 0) \
4767 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
4768 else if (sh < 0) \
4769 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
4770 else \
4771 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
4772 t1 = tcg_temp_new_i32(); \
4773 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
4774 if (sh > 0) \
4775 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
4776 else if (sh < 0) \
4777 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
4778 else \
4779 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
4780 tcg_op(t0, t0, t1); \
4781 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \
4782 tcg_gen_andi_i32(t0, t0, bitmask); \
4783 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
4784 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
4785 tcg_temp_free_i32(t0); \
4786 tcg_temp_free_i32(t1); \
4787 }
4788
4789 /* crand */
4790 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4791 /* crandc */
4792 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4793 /* creqv */
4794 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4795 /* crnand */
4796 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4797 /* crnor */
4798 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4799 /* cror */
4800 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4801 /* crorc */
4802 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4803 /* crxor */
4804 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4805
4806 /* mcrf */
4807 static void gen_mcrf(DisasContext *ctx)
4808 {
4809 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4810 }
4811
4812 /*** System linkage ***/
4813
4814 /* rfi (supervisor only) */
4815 static void gen_rfi(DisasContext *ctx)
4816 {
4817 #if defined(CONFIG_USER_ONLY)
4818 GEN_PRIV;
4819 #else
4820 /*
4821 * This instruction doesn't exist anymore on 64-bit server
4822 * processors compliant with arch 2.x
4823 */
4824 if (is_book3s_arch2x(ctx)) {
4825 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4826 return;
4827 }
4828 /* Restore CPU state */
4829 CHK_SV;
4830 gen_icount_io_start(ctx);
4831 gen_update_cfar(ctx, ctx->cia);
4832 gen_helper_rfi(cpu_env);
4833 ctx->base.is_jmp = DISAS_EXIT;
4834 #endif
4835 }
4836
4837 #if defined(TARGET_PPC64)
4838 static void gen_rfid(DisasContext *ctx)
4839 {
4840 #if defined(CONFIG_USER_ONLY)
4841 GEN_PRIV;
4842 #else
4843 /* Restore CPU state */
4844 CHK_SV;
4845 gen_icount_io_start(ctx);
4846 gen_update_cfar(ctx, ctx->cia);
4847 gen_helper_rfid(cpu_env);
4848 ctx->base.is_jmp = DISAS_EXIT;
4849 #endif
4850 }
4851
4852 #if !defined(CONFIG_USER_ONLY)
4853 static void gen_rfscv(DisasContext *ctx)
4854 {
4855 #if defined(CONFIG_USER_ONLY)
4856 GEN_PRIV;
4857 #else
4858 /* Restore CPU state */
4859 CHK_SV;
4860 gen_icount_io_start(ctx);
4861 gen_update_cfar(ctx, ctx->cia);
4862 gen_helper_rfscv(cpu_env);
4863 ctx->base.is_jmp = DISAS_EXIT;
4864 #endif
4865 }
4866 #endif
4867
4868 static void gen_hrfid(DisasContext *ctx)
4869 {
4870 #if defined(CONFIG_USER_ONLY)
4871 GEN_PRIV;
4872 #else
4873 /* Restore CPU state */
4874 CHK_HV;
4875 gen_helper_hrfid(cpu_env);
4876 ctx->base.is_jmp = DISAS_EXIT;
4877 #endif
4878 }
4879 #endif
4880
4881 /* sc */
4882 #if defined(CONFIG_USER_ONLY)
4883 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4884 #else
4885 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4886 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4887 #endif
4888 static void gen_sc(DisasContext *ctx)
4889 {
4890 uint32_t lev;
4891
4892 lev = (ctx->opcode >> 5) & 0x7F;
4893 gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4894 }
4895
4896 #if defined(TARGET_PPC64)
4897 #if !defined(CONFIG_USER_ONLY)
4898 static void gen_scv(DisasContext *ctx)
4899 {
4900 uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4901
4902 /* Set the PC back to the faulting instruction. */
4903 gen_update_nip(ctx, ctx->cia);
4904 gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4905
4906 ctx->base.is_jmp = DISAS_NORETURN;
4907 }
4908 #endif
4909 #endif
4910
4911 /*** Trap ***/
4912
4913 /* Check for unconditional traps (always or never) */
4914 static bool check_unconditional_trap(DisasContext *ctx)
4915 {
4916 /* Trap never */
4917 if (TO(ctx->opcode) == 0) {
4918 return true;
4919 }
4920 /* Trap always */
4921 if (TO(ctx->opcode) == 31) {
4922 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4923 return true;
4924 }
4925 return false;
4926 }
4927
4928 /* tw */
4929 static void gen_tw(DisasContext *ctx)
4930 {
4931 TCGv_i32 t0;
4932
4933 if (check_unconditional_trap(ctx)) {
4934 return;
4935 }
4936 t0 = tcg_const_i32(TO(ctx->opcode));
4937 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4938 t0);
4939 tcg_temp_free_i32(t0);
4940 }
4941
4942 /* twi */
4943 static void gen_twi(DisasContext *ctx)
4944 {
4945 TCGv t0;
4946 TCGv_i32 t1;
4947
4948 if (check_unconditional_trap(ctx)) {
4949 return;
4950 }
4951 t0 = tcg_const_tl(SIMM(ctx->opcode));
4952 t1 = tcg_const_i32(TO(ctx->opcode));
4953 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4954 tcg_temp_free(t0);
4955 tcg_temp_free_i32(t1);
4956 }
4957
4958 #if defined(TARGET_PPC64)
4959 /* td */
4960 static void gen_td(DisasContext *ctx)
4961 {
4962 TCGv_i32 t0;
4963
4964 if (check_unconditional_trap(ctx)) {
4965 return;
4966 }
4967 t0 = tcg_const_i32(TO(ctx->opcode));
4968 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4969 t0);
4970 tcg_temp_free_i32(t0);
4971 }
4972
4973 /* tdi */
4974 static void gen_tdi(DisasContext *ctx)
4975 {
4976 TCGv t0;
4977 TCGv_i32 t1;
4978
4979 if (check_unconditional_trap(ctx)) {
4980 return;
4981 }
4982 t0 = tcg_const_tl(SIMM(ctx->opcode));
4983 t1 = tcg_const_i32(TO(ctx->opcode));
4984 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4985 tcg_temp_free(t0);
4986 tcg_temp_free_i32(t1);
4987 }
4988 #endif
4989
4990 /*** Processor control ***/
4991
4992 /* mcrxr */
4993 static void gen_mcrxr(DisasContext *ctx)
4994 {
4995 TCGv_i32 t0 = tcg_temp_new_i32();
4996 TCGv_i32 t1 = tcg_temp_new_i32();
4997 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4998
4999 tcg_gen_trunc_tl_i32(t0, cpu_so);
5000 tcg_gen_trunc_tl_i32(t1, cpu_ov);
5001 tcg_gen_trunc_tl_i32(dst, cpu_ca);
5002 tcg_gen_shli_i32(t0, t0, 3);
5003 tcg_gen_shli_i32(t1, t1, 2);
5004 tcg_gen_shli_i32(dst, dst, 1);
5005 tcg_gen_or_i32(dst, dst, t0);
5006 tcg_gen_or_i32(dst, dst, t1);
5007 tcg_temp_free_i32(t0);
5008 tcg_temp_free_i32(t1);
5009
5010 tcg_gen_movi_tl(cpu_so, 0);
5011 tcg_gen_movi_tl(cpu_ov, 0);
5012 tcg_gen_movi_tl(cpu_ca, 0);
5013 }
5014
5015 #ifdef TARGET_PPC64
5016 /* mcrxrx */
5017 static void gen_mcrxrx(DisasContext *ctx)
5018 {
5019 TCGv t0 = tcg_temp_new();
5020 TCGv t1 = tcg_temp_new();
5021 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
5022
5023 /* copy OV and OV32 */
5024 tcg_gen_shli_tl(t0, cpu_ov, 1);
5025 tcg_gen_or_tl(t0, t0, cpu_ov32);
5026 tcg_gen_shli_tl(t0, t0, 2);
5027 /* copy CA and CA32 */
5028 tcg_gen_shli_tl(t1, cpu_ca, 1);
5029 tcg_gen_or_tl(t1, t1, cpu_ca32);
5030 tcg_gen_or_tl(t0, t0, t1);
5031 tcg_gen_trunc_tl_i32(dst, t0);
5032 tcg_temp_free(t0);
5033 tcg_temp_free(t1);
5034 }
5035 #endif
5036
5037 /* mfcr mfocrf */
5038 static void gen_mfcr(DisasContext *ctx)
5039 {
5040 uint32_t crm, crn;
5041
5042 if (likely(ctx->opcode & 0x00100000)) {
5043 crm = CRM(ctx->opcode);
5044 if (likely(crm && ((crm & (crm - 1)) == 0))) {
5045 crn = ctz32(crm);
5046 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
5047 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
5048 cpu_gpr[rD(ctx->opcode)], crn * 4);
5049 }
5050 } else {
5051 TCGv_i32 t0 = tcg_temp_new_i32();
5052 tcg_gen_mov_i32(t0, cpu_crf[0]);
5053 tcg_gen_shli_i32(t0, t0, 4);
5054 tcg_gen_or_i32(t0, t0, cpu_crf[1]);
5055 tcg_gen_shli_i32(t0, t0, 4);
5056 tcg_gen_or_i32(t0, t0, cpu_crf[2]);
5057 tcg_gen_shli_i32(t0, t0, 4);
5058 tcg_gen_or_i32(t0, t0, cpu_crf[3]);
5059 tcg_gen_shli_i32(t0, t0, 4);
5060 tcg_gen_or_i32(t0, t0, cpu_crf[4]);
5061 tcg_gen_shli_i32(t0, t0, 4);
5062 tcg_gen_or_i32(t0, t0, cpu_crf[5]);
5063 tcg_gen_shli_i32(t0, t0, 4);
5064 tcg_gen_or_i32(t0, t0, cpu_crf[6]);
5065 tcg_gen_shli_i32(t0, t0, 4);
5066 tcg_gen_or_i32(t0, t0, cpu_crf[7]);
5067 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5068 tcg_temp_free_i32(t0);
5069 }
5070 }
5071
5072 /* mfmsr */
5073 static void gen_mfmsr(DisasContext *ctx)
5074 {
5075 CHK_SV;
5076 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
5077 }
5078
5079 /* mfspr */
5080 static inline void gen_op_mfspr(DisasContext *ctx)
5081 {
5082 void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
5083 uint32_t sprn = SPR(ctx->opcode);
5084
5085 #if defined(CONFIG_USER_ONLY)
5086 read_cb = ctx->spr_cb[sprn].uea_read;
5087 #else
5088 if (ctx->pr) {
5089 read_cb = ctx->spr_cb[sprn].uea_read;
5090 } else if (ctx->hv) {
5091 read_cb = ctx->spr_cb[sprn].hea_read;
5092 } else {
5093 read_cb = ctx->spr_cb[sprn].oea_read;
5094 }
5095 #endif
5096 if (likely(read_cb != NULL)) {
5097 if (likely(read_cb != SPR_NOACCESS)) {
5098 (*read_cb)(ctx, rD(ctx->opcode), sprn);
5099 } else {
5100 /* Privilege exception */
5101 /*
5102 * This is a hack to avoid warnings when running Linux:
5103 * this OS breaks the PowerPC virtualisation model,
5104 * allowing userland application to read the PVR
5105 */
5106 if (sprn != SPR_PVR) {
5107 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
5108 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5109 ctx->cia);
5110 }
5111 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5112 }
5113 } else {
5114 /* ISA 2.07 defines these as no-ops */
5115 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5116 (sprn >= 808 && sprn <= 811)) {
5117 /* This is a nop */
5118 return;
5119 }
5120 /* Not defined */
5121 qemu_log_mask(LOG_GUEST_ERROR,
5122 "Trying to read invalid spr %d (0x%03x) at "
5123 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5124
5125 /*
5126 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5127 * generate a priv, a hv emu or a no-op
5128 */
5129 if (sprn & 0x10) {
5130 if (ctx->pr) {
5131 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5132 }
5133 } else {
5134 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
5135 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5136 }
5137 }
5138 }
5139 }
5140
5141 static void gen_mfspr(DisasContext *ctx)
5142 {
5143 gen_op_mfspr(ctx);
5144 }
5145
5146 /* mftb */
5147 static void gen_mftb(DisasContext *ctx)
5148 {
5149 gen_op_mfspr(ctx);
5150 }
5151
5152 /* mtcrf mtocrf*/
5153 static void gen_mtcrf(DisasContext *ctx)
5154 {
5155 uint32_t crm, crn;
5156
5157 crm = CRM(ctx->opcode);
5158 if (likely((ctx->opcode & 0x00100000))) {
5159 if (crm && ((crm & (crm - 1)) == 0)) {
5160 TCGv_i32 temp = tcg_temp_new_i32();
5161 crn = ctz32(crm);
5162 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
5163 tcg_gen_shri_i32(temp, temp, crn * 4);
5164 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
5165 tcg_temp_free_i32(temp);
5166 }
5167 } else {
5168 TCGv_i32 temp = tcg_temp_new_i32();
5169 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
5170 for (crn = 0 ; crn < 8 ; crn++) {
5171 if (crm & (1 << crn)) {
5172 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
5173 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
5174 }
5175 }
5176 tcg_temp_free_i32(temp);
5177 }
5178 }
5179
5180 /* mtmsr */
5181 #if defined(TARGET_PPC64)
5182 static void gen_mtmsrd(DisasContext *ctx)
5183 {
5184 CHK_SV;
5185
5186 #if !defined(CONFIG_USER_ONLY)
5187 gen_icount_io_start(ctx);
5188 if (ctx->opcode & 0x00010000) {
5189 /* L=1 form only updates EE and RI */
5190 TCGv t0 = tcg_temp_new();
5191 TCGv t1 = tcg_temp_new();
5192 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)],
5193 (1 << MSR_RI) | (1 << MSR_EE));
5194 tcg_gen_andi_tl(t1, cpu_msr,
5195 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
5196 tcg_gen_or_tl(t1, t1, t0);
5197
5198 gen_helper_store_msr(cpu_env, t1);
5199 tcg_temp_free(t0);
5200 tcg_temp_free(t1);
5201
5202 } else {
5203 /*
5204 * XXX: we need to update nip before the store if we enter
5205 * power saving mode, we will exit the loop directly from
5206 * ppc_store_msr
5207 */
5208 gen_update_nip(ctx, ctx->base.pc_next);
5209 gen_helper_store_msr(cpu_env, cpu_gpr[rS(ctx->opcode)]);
5210 }
5211 /* Must stop the translation as machine state (may have) changed */
5212 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5213 #endif /* !defined(CONFIG_USER_ONLY) */
5214 }
5215 #endif /* defined(TARGET_PPC64) */
5216
5217 static void gen_mtmsr(DisasContext *ctx)
5218 {
5219 CHK_SV;
5220
5221 #if !defined(CONFIG_USER_ONLY)
5222 gen_icount_io_start(ctx);
5223 if (ctx->opcode & 0x00010000) {
5224 /* L=1 form only updates EE and RI */
5225 TCGv t0 = tcg_temp_new();
5226 TCGv t1 = tcg_temp_new();
5227 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)],
5228 (1 << MSR_RI) | (1 << MSR_EE));
5229 tcg_gen_andi_tl(t1, cpu_msr,
5230 ~(target_ulong)((1 << MSR_RI) | (1 << MSR_EE)));
5231 tcg_gen_or_tl(t1, t1, t0);
5232
5233 gen_helper_store_msr(cpu_env, t1);
5234 tcg_temp_free(t0);
5235 tcg_temp_free(t1);
5236
5237 } else {
5238 TCGv msr = tcg_temp_new();
5239
5240 /*
5241 * XXX: we need to update nip before the store if we enter
5242 * power saving mode, we will exit the loop directly from
5243 * ppc_store_msr
5244 */
5245 gen_update_nip(ctx, ctx->base.pc_next);
5246 #if defined(TARGET_PPC64)
5247 tcg_gen_deposit_tl(msr, cpu_msr, cpu_gpr[rS(ctx->opcode)], 0, 32);
5248 #else
5249 tcg_gen_mov_tl(msr, cpu_gpr[rS(ctx->opcode)]);
5250 #endif
5251 gen_helper_store_msr(cpu_env, msr);
5252 tcg_temp_free(msr);
5253 }
5254 /* Must stop the translation as machine state (may have) changed */
5255 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5256 #endif
5257 }
5258
5259 /* mtspr */
5260 static void gen_mtspr(DisasContext *ctx)
5261 {
5262 void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
5263 uint32_t sprn = SPR(ctx->opcode);
5264
5265 #if defined(CONFIG_USER_ONLY)
5266 write_cb = ctx->spr_cb[sprn].uea_write;
5267 #else
5268 if (ctx->pr) {
5269 write_cb = ctx->spr_cb[sprn].uea_write;
5270 } else if (ctx->hv) {
5271 write_cb = ctx->spr_cb[sprn].hea_write;
5272 } else {
5273 write_cb = ctx->spr_cb[sprn].oea_write;
5274 }
5275 #endif
5276 if (likely(write_cb != NULL)) {
5277 if (likely(write_cb != SPR_NOACCESS)) {
5278 (*write_cb)(ctx, sprn, rS(ctx->opcode));
5279 } else {
5280 /* Privilege exception */
5281 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
5282 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5283 ctx->cia);
5284 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5285 }
5286 } else {
5287 /* ISA 2.07 defines these as no-ops */
5288 if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5289 (sprn >= 808 && sprn <= 811)) {
5290 /* This is a nop */
5291 return;
5292 }
5293
5294 /* Not defined */
5295 qemu_log_mask(LOG_GUEST_ERROR,
5296 "Trying to write invalid spr %d (0x%03x) at "
5297 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5298
5299
5300 /*
5301 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5302 * generate a priv, a hv emu or a no-op
5303 */
5304 if (sprn & 0x10) {
5305 if (ctx->pr) {
5306 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5307 }
5308 } else {
5309 if (ctx->pr || sprn == 0) {
5310 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
5311 }
5312 }
5313 }
5314 }
5315
5316 #if defined(TARGET_PPC64)
5317 /* setb */
5318 static void gen_setb(DisasContext *ctx)
5319 {
5320 TCGv_i32 t0 = tcg_temp_new_i32();
5321 TCGv_i32 t8 = tcg_temp_new_i32();
5322 TCGv_i32 tm1 = tcg_temp_new_i32();
5323 int crf = crfS(ctx->opcode);
5324
5325 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5326 tcg_gen_movi_i32(t8, 8);
5327 tcg_gen_movi_i32(tm1, -1);
5328 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5329 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5330
5331 tcg_temp_free_i32(t0);
5332 tcg_temp_free_i32(t8);
5333 tcg_temp_free_i32(tm1);
5334 }
5335 #endif
5336
5337 /*** Cache management ***/
5338
5339 /* dcbf */
5340 static void gen_dcbf(DisasContext *ctx)
5341 {
5342 /* XXX: specification says this is treated as a load by the MMU */
5343 TCGv t0;
5344 gen_set_access_type(ctx, ACCESS_CACHE);
5345 t0 = tcg_temp_new();
5346 gen_addr_reg_index(ctx, t0);
5347 gen_qemu_ld8u(ctx, t0, t0);
5348 tcg_temp_free(t0);
5349 }
5350
5351 /* dcbfep (external PID dcbf) */
5352 static void gen_dcbfep(DisasContext *ctx)
5353 {
5354 /* XXX: specification says this is treated as a load by the MMU */
5355 TCGv t0;
5356 CHK_SV;
5357 gen_set_access_type(ctx, ACCESS_CACHE);
5358 t0 = tcg_temp_new();
5359 gen_addr_reg_index(ctx, t0);
5360 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5361 tcg_temp_free(t0);
5362 }
5363
5364 /* dcbi (Supervisor only) */
5365 static void gen_dcbi(DisasContext *ctx)
5366 {
5367 #if defined(CONFIG_USER_ONLY)
5368 GEN_PRIV;
5369 #else
5370 TCGv EA, val;
5371
5372 CHK_SV;
5373 EA = tcg_temp_new();
5374 gen_set_access_type(ctx, ACCESS_CACHE);
5375 gen_addr_reg_index(ctx, EA);
5376 val = tcg_temp_new();
5377 /* XXX: specification says this should be treated as a store by the MMU */
5378 gen_qemu_ld8u(ctx, val, EA);
5379 gen_qemu_st8(ctx, val, EA);
5380 tcg_temp_free(val);
5381 tcg_temp_free(EA);
5382 #endif /* defined(CONFIG_USER_ONLY) */
5383 }
5384
5385 /* dcdst */
5386 static void gen_dcbst(DisasContext *ctx)
5387 {
5388 /* XXX: specification say this is treated as a load by the MMU */
5389 TCGv t0;
5390 gen_set_access_type(ctx, ACCESS_CACHE);
5391 t0 = tcg_temp_new();
5392 gen_addr_reg_index(ctx, t0);
5393 gen_qemu_ld8u(ctx, t0, t0);
5394 tcg_temp_free(t0);
5395 }
5396
5397 /* dcbstep (dcbstep External PID version) */
5398 static void gen_dcbstep(DisasContext *ctx)
5399 {
5400 /* XXX: specification say this is treated as a load by the MMU */
5401 TCGv t0;
5402 gen_set_access_type(ctx, ACCESS_CACHE);
5403 t0 = tcg_temp_new();
5404 gen_addr_reg_index(ctx, t0);
5405 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5406 tcg_temp_free(t0);
5407 }
5408
5409 /* dcbt */
5410 static void gen_dcbt(DisasContext *ctx)
5411 {
5412 /*
5413 * interpreted as no-op
5414 * XXX: specification say this is treated as a load by the MMU but
5415 * does not generate any exception
5416 */
5417 }
5418
5419 /* dcbtep */
5420 static void gen_dcbtep(DisasContext *ctx)
5421 {
5422 /*
5423 * interpreted as no-op
5424 * XXX: specification say this is treated as a load by the MMU but
5425 * does not generate any exception
5426 */
5427 }
5428
5429 /* dcbtst */
5430 static void gen_dcbtst(DisasContext *ctx)
5431 {
5432 /*
5433 * interpreted as no-op
5434 * XXX: specification say this is treated as a load by the MMU but
5435 * does not generate any exception
5436 */
5437 }
5438
5439 /* dcbtstep */
5440 static void gen_dcbtstep(DisasContext *ctx)
5441 {
5442 /*
5443 * interpreted as no-op
5444 * XXX: specification say this is treated as a load by the MMU but
5445 * does not generate any exception
5446 */
5447 }
5448
5449 /* dcbtls */
5450 static void gen_dcbtls(DisasContext *ctx)
5451 {
5452 /* Always fails locking the cache */
5453 TCGv t0 = tcg_temp_new();
5454 gen_load_spr(t0, SPR_Exxx_L1CSR0);
5455 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5456 gen_store_spr(SPR_Exxx_L1CSR0, t0);
5457 tcg_temp_free(t0);
5458 }
5459
5460 /* dcbz */
5461 static void gen_dcbz(DisasContext *ctx)
5462 {
5463 TCGv tcgv_addr;
5464 TCGv_i32 tcgv_op;
5465
5466 gen_set_access_type(ctx, ACCESS_CACHE);
5467 tcgv_addr = tcg_temp_new();
5468 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5469 gen_addr_reg_index(ctx, tcgv_addr);
5470 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5471 tcg_temp_free(tcgv_addr);
5472 tcg_temp_free_i32(tcgv_op);
5473 }
5474
5475 /* dcbzep */
5476 static void gen_dcbzep(DisasContext *ctx)
5477 {
5478 TCGv tcgv_addr;
5479 TCGv_i32 tcgv_op;
5480
5481 gen_set_access_type(ctx, ACCESS_CACHE);
5482 tcgv_addr = tcg_temp_new();
5483 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5484 gen_addr_reg_index(ctx, tcgv_addr);
5485 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5486 tcg_temp_free(tcgv_addr);
5487 tcg_temp_free_i32(tcgv_op);
5488 }
5489
5490 /* dst / dstt */
5491 static void gen_dst(DisasContext *ctx)
5492 {
5493 if (rA(ctx->opcode) == 0) {
5494 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5495 } else {
5496 /* interpreted as no-op */
5497 }
5498 }
5499
5500 /* dstst /dststt */
5501 static void gen_dstst(DisasContext *ctx)
5502 {
5503 if (rA(ctx->opcode) == 0) {
5504 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5505 } else {
5506 /* interpreted as no-op */
5507 }
5508
5509 }
5510
5511 /* dss / dssall */
5512 static void gen_dss(DisasContext *ctx)
5513 {
5514 /* interpreted as no-op */
5515 }
5516
5517 /* icbi */
5518 static void gen_icbi(DisasContext *ctx)
5519 {
5520 TCGv t0;
5521 gen_set_access_type(ctx, ACCESS_CACHE);
5522 t0 = tcg_temp_new();
5523 gen_addr_reg_index(ctx, t0);
5524 gen_helper_icbi(cpu_env, t0);
5525 tcg_temp_free(t0);
5526 }
5527
5528 /* icbiep */
5529 static void gen_icbiep(DisasContext *ctx)
5530 {
5531 TCGv t0;
5532 gen_set_access_type(ctx, ACCESS_CACHE);
5533 t0 = tcg_temp_new();
5534 gen_addr_reg_index(ctx, t0);
5535 gen_helper_icbiep(cpu_env, t0);
5536 tcg_temp_free(t0);
5537 }
5538
5539 /* Optional: */
5540 /* dcba */
5541 static void gen_dcba(DisasContext *ctx)
5542 {
5543 /*
5544 * interpreted as no-op
5545 * XXX: specification say this is treated as a store by the MMU
5546 * but does not generate any exception
5547 */
5548 }
5549
5550 /*** Segment register manipulation ***/
5551 /* Supervisor only: */
5552
5553 /* mfsr */
5554 static void gen_mfsr(DisasContext *ctx)
5555 {
5556 #if defined(CONFIG_USER_ONLY)
5557 GEN_PRIV;
5558 #else
5559 TCGv t0;
5560
5561 CHK_SV;
5562 t0 = tcg_const_tl(SR(ctx->opcode));
5563 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5564 tcg_temp_free(t0);
5565 #endif /* defined(CONFIG_USER_ONLY) */
5566 }
5567
5568 /* mfsrin */
5569 static void gen_mfsrin(DisasContext *ctx)
5570 {
5571 #if defined(CONFIG_USER_ONLY)
5572 GEN_PRIV;
5573 #else
5574 TCGv t0;
5575
5576 CHK_SV;
5577 t0 = tcg_temp_new();
5578 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5579 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5580 tcg_temp_free(t0);
5581 #endif /* defined(CONFIG_USER_ONLY) */
5582 }
5583
5584 /* mtsr */
5585 static void gen_mtsr(DisasContext *ctx)
5586 {
5587 #if defined(CONFIG_USER_ONLY)
5588 GEN_PRIV;
5589 #else
5590 TCGv t0;
5591
5592 CHK_SV;
5593 t0 = tcg_const_tl(SR(ctx->opcode));
5594 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5595 tcg_temp_free(t0);
5596 #endif /* defined(CONFIG_USER_ONLY) */
5597 }
5598
5599 /* mtsrin */
5600 static void gen_mtsrin(DisasContext *ctx)
5601 {
5602 #if defined(CONFIG_USER_ONLY)
5603 GEN_PRIV;
5604 #else
5605 TCGv t0;
5606 CHK_SV;
5607
5608 t0 = tcg_temp_new();
5609 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5610 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5611 tcg_temp_free(t0);
5612 #endif /* defined(CONFIG_USER_ONLY) */
5613 }
5614
5615 #if defined(TARGET_PPC64)
5616 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5617
5618 /* mfsr */
5619 static void gen_mfsr_64b(DisasContext *ctx)
5620 {
5621 #if defined(CONFIG_USER_ONLY)
5622 GEN_PRIV;
5623 #else
5624 TCGv t0;
5625
5626 CHK_SV;
5627 t0 = tcg_const_tl(SR(ctx->opcode));
5628 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5629 tcg_temp_free(t0);
5630 #endif /* defined(CONFIG_USER_ONLY) */
5631 }
5632
5633 /* mfsrin */
5634 static void gen_mfsrin_64b(DisasContext *ctx)
5635 {
5636 #if defined(CONFIG_USER_ONLY)
5637 GEN_PRIV;
5638 #else
5639 TCGv t0;
5640
5641 CHK_SV;
5642 t0 = tcg_temp_new();
5643 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5644 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5645 tcg_temp_free(t0);
5646 #endif /* defined(CONFIG_USER_ONLY) */
5647 }
5648
5649 /* mtsr */
5650 static void gen_mtsr_64b(DisasContext *ctx)
5651 {
5652 #if defined(CONFIG_USER_ONLY)
5653 GEN_PRIV;
5654 #else
5655 TCGv t0;
5656
5657 CHK_SV;
5658 t0 = tcg_const_tl(SR(ctx->opcode));
5659 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5660 tcg_temp_free(t0);
5661 #endif /* defined(CONFIG_USER_ONLY) */
5662 }
5663
5664 /* mtsrin */
5665 static void gen_mtsrin_64b(DisasContext *ctx)
5666 {
5667 #if defined(CONFIG_USER_ONLY)
5668 GEN_PRIV;
5669 #else
5670 TCGv t0;
5671
5672 CHK_SV;
5673 t0 = tcg_temp_new();
5674 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5675 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5676 tcg_temp_free(t0);
5677 #endif /* defined(CONFIG_USER_ONLY) */
5678 }
5679
5680 /* slbmte */
5681 static void gen_slbmte(DisasContext *ctx)
5682 {
5683 #if defined(CONFIG_USER_ONLY)
5684 GEN_PRIV;
5685 #else
5686 CHK_SV;
5687
5688 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
5689 cpu_gpr[rS(ctx->opcode)]);
5690 #endif /* defined(CONFIG_USER_ONLY) */
5691 }
5692
5693 static void gen_slbmfee(DisasContext *ctx)
5694 {
5695 #if defined(CONFIG_USER_ONLY)
5696 GEN_PRIV;
5697 #else
5698 CHK_SV;
5699
5700 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5701 cpu_gpr[rB(ctx->opcode)]);
5702 #endif /* defined(CONFIG_USER_ONLY) */
5703 }
5704
5705 static void gen_slbmfev(DisasContext *ctx)
5706 {
5707 #if defined(CONFIG_USER_ONLY)
5708 GEN_PRIV;
5709 #else
5710 CHK_SV;
5711
5712 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5713 cpu_gpr[rB(ctx->opcode)]);
5714 #endif /* defined(CONFIG_USER_ONLY) */
5715 }
5716
5717 static void gen_slbfee_(DisasContext *ctx)
5718 {
5719 #if defined(CONFIG_USER_ONLY)
5720 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5721 #else
5722 TCGLabel *l1, *l2;
5723
5724 if (unlikely(ctx->pr)) {
5725 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5726 return;
5727 }
5728 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5729 cpu_gpr[rB(ctx->opcode)]);
5730 l1 = gen_new_label();
5731 l2 = gen_new_label();
5732 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5733 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1);
5734 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
5735 tcg_gen_br(l2);
5736 gen_set_label(l1);
5737 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0);
5738 gen_set_label(l2);
5739 #endif
5740 }
5741 #endif /* defined(TARGET_PPC64) */
5742
5743 /*** Lookaside buffer management ***/
5744 /* Optional & supervisor only: */
5745
5746 /* tlbia */
5747 static void gen_tlbia(DisasContext *ctx)
5748 {
5749 #if defined(CONFIG_USER_ONLY)
5750 GEN_PRIV;
5751 #else
5752 CHK_HV;
5753
5754 gen_helper_tlbia(cpu_env);
5755 #endif /* defined(CONFIG_USER_ONLY) */
5756 }
5757
5758 /* tlbiel */
5759 static void gen_tlbiel(DisasContext *ctx)
5760 {
5761 #if defined(CONFIG_USER_ONLY)
5762 GEN_PRIV;
5763 #else
5764 CHK_SV;
5765
5766 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5767 #endif /* defined(CONFIG_USER_ONLY) */
5768 }
5769
5770 /* tlbie */
5771 static void gen_tlbie(DisasContext *ctx)
5772 {
5773 #if defined(CONFIG_USER_ONLY)
5774 GEN_PRIV;
5775 #else
5776 TCGv_i32 t1;
5777
5778 if (ctx->gtse) {
5779 CHK_SV; /* If gtse is set then tlbie is supervisor privileged */
5780 } else {
5781 CHK_HV; /* Else hypervisor privileged */
5782 }
5783
5784 if (NARROW_MODE(ctx)) {
5785 TCGv t0 = tcg_temp_new();
5786 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
5787 gen_helper_tlbie(cpu_env, t0);
5788 tcg_temp_free(t0);
5789 } else {
5790 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5791 }
5792 t1 = tcg_temp_new_i32();
5793 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
5794 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH);
5795 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
5796 tcg_temp_free_i32(t1);
5797 #endif /* defined(CONFIG_USER_ONLY) */
5798 }
5799
5800 /* tlbsync */
5801 static void gen_tlbsync(DisasContext *ctx)
5802 {
5803 #if defined(CONFIG_USER_ONLY)
5804 GEN_PRIV;
5805 #else
5806
5807 if (ctx->gtse) {
5808 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */
5809 } else {
5810 CHK_HV; /* Else hypervisor privileged */
5811 }
5812
5813 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5814 if (ctx->insns_flags & PPC_BOOKE) {
5815 gen_check_tlb_flush(ctx, true);
5816 }
5817 #endif /* defined(CONFIG_USER_ONLY) */
5818 }
5819
5820 #if defined(TARGET_PPC64)
5821 /* slbia */
5822 static void gen_slbia(DisasContext *ctx)
5823 {
5824 #if defined(CONFIG_USER_ONLY)
5825 GEN_PRIV;
5826 #else
5827 uint32_t ih = (ctx->opcode >> 21) & 0x7;
5828 TCGv_i32 t0 = tcg_const_i32(ih);
5829
5830 CHK_SV;
5831
5832 gen_helper_slbia(cpu_env, t0);
5833 tcg_temp_free_i32(t0);
5834 #endif /* defined(CONFIG_USER_ONLY) */
5835 }
5836
5837 /* slbie */
5838 static void gen_slbie(DisasContext *ctx)
5839 {
5840 #if defined(CONFIG_USER_ONLY)
5841 GEN_PRIV;
5842 #else
5843 CHK_SV;
5844
5845 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5846 #endif /* defined(CONFIG_USER_ONLY) */
5847 }
5848
5849 /* slbieg */
5850 static void gen_slbieg(DisasContext *ctx)
5851 {
5852 #if defined(CONFIG_USER_ONLY)
5853 GEN_PRIV;
5854 #else
5855 CHK_SV;
5856
5857 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5858 #endif /* defined(CONFIG_USER_ONLY) */
5859 }
5860
5861 /* slbsync */
5862 static void gen_slbsync(DisasContext *ctx)
5863 {
5864 #if defined(CONFIG_USER_ONLY)
5865 GEN_PRIV;
5866 #else
5867 CHK_SV;
5868 gen_check_tlb_flush(ctx, true);
5869 #endif /* defined(CONFIG_USER_ONLY) */
5870 }
5871
5872 #endif /* defined(TARGET_PPC64) */
5873
5874 /*** External control ***/
5875 /* Optional: */
5876
5877 /* eciwx */
5878 static void gen_eciwx(DisasContext *ctx)
5879 {
5880 TCGv t0;
5881 /* Should check EAR[E] ! */
5882 gen_set_access_type(ctx, ACCESS_EXT);
5883 t0 = tcg_temp_new();
5884 gen_addr_reg_index(ctx, t0);
5885 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5886 DEF_MEMOP(MO_UL | MO_ALIGN));
5887 tcg_temp_free(t0);
5888 }
5889
5890 /* ecowx */
5891 static void gen_ecowx(DisasContext *ctx)
5892 {
5893 TCGv t0;
5894 /* Should check EAR[E] ! */
5895 gen_set_access_type(ctx, ACCESS_EXT);
5896 t0 = tcg_temp_new();
5897 gen_addr_reg_index(ctx, t0);
5898 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5899 DEF_MEMOP(MO_UL | MO_ALIGN));
5900 tcg_temp_free(t0);
5901 }
5902
5903 /* PowerPC 601 specific instructions */
5904
5905 /* abs - abs. */
5906 static void gen_abs(DisasContext *ctx)
5907 {
5908 TCGv d = cpu_gpr[rD(ctx->opcode)];
5909 TCGv a = cpu_gpr[rA(ctx->opcode)];
5910
5911 tcg_gen_abs_tl(d, a);
5912 if (unlikely(Rc(ctx->opcode) != 0)) {
5913 gen_set_Rc0(ctx, d);
5914 }
5915 }
5916
5917 /* abso - abso. */
5918 static void gen_abso(DisasContext *ctx)
5919 {
5920 TCGv d = cpu_gpr[rD(ctx->opcode)];
5921 TCGv a = cpu_gpr[rA(ctx->opcode)];
5922
5923 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000);
5924 tcg_gen_abs_tl(d, a);
5925 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
5926 if (unlikely(Rc(ctx->opcode) != 0)) {
5927 gen_set_Rc0(ctx, d);
5928 }
5929 }
5930
5931 /* clcs */
5932 static void gen_clcs(DisasContext *ctx)
5933 {
5934 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode));
5935 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5936 tcg_temp_free_i32(t0);
5937 /* Rc=1 sets CR0 to an undefined state */
5938 }
5939
5940 /* div - div. */
5941 static void gen_div(DisasContext *ctx)
5942 {
5943 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
5944 cpu_gpr[rB(ctx->opcode)]);
5945 if (unlikely(Rc(ctx->opcode) != 0)) {
5946 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5947 }
5948 }
5949
5950 /* divo - divo. */
5951 static void gen_divo(DisasContext *ctx)
5952 {
5953 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
5954 cpu_gpr[rB(ctx->opcode)]);
5955 if (unlikely(Rc(ctx->opcode) != 0)) {
5956 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5957 }
5958 }
5959
5960 /* divs - divs. */
5961 static void gen_divs(DisasContext *ctx)
5962 {
5963 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)],
5964 cpu_gpr[rB(ctx->opcode)]);
5965 if (unlikely(Rc(ctx->opcode) != 0)) {
5966 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5967 }
5968 }
5969
5970 /* divso - divso. */
5971 static void gen_divso(DisasContext *ctx)
5972 {
5973 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env,
5974 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5975 if (unlikely(Rc(ctx->opcode) != 0)) {
5976 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5977 }
5978 }
5979
5980 /* doz - doz. */
5981 static void gen_doz(DisasContext *ctx)
5982 {
5983 TCGLabel *l1 = gen_new_label();
5984 TCGLabel *l2 = gen_new_label();
5985 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)],
5986 cpu_gpr[rA(ctx->opcode)], l1);
5987 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
5988 cpu_gpr[rA(ctx->opcode)]);
5989 tcg_gen_br(l2);
5990 gen_set_label(l1);
5991 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
5992 gen_set_label(l2);
5993 if (unlikely(Rc(ctx->opcode) != 0)) {
5994 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
5995 }
5996 }
5997
5998 /* dozo - dozo. */
5999 static void gen_dozo(DisasContext *ctx)
6000 {
6001 TCGLabel *l1 = gen_new_label();
6002 TCGLabel *l2 = gen_new_label();
6003 TCGv t0 = tcg_temp_new();
6004 TCGv t1 = tcg_temp_new();
6005 TCGv t2 = tcg_temp_new();
6006 /* Start with XER OV disabled, the most likely case */
6007 tcg_gen_movi_tl(cpu_ov, 0);
6008 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)],
6009 cpu_gpr[rA(ctx->opcode)], l1);
6010 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6011 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6012 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0);
6013 tcg_gen_andc_tl(t1, t1, t2);
6014 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
6015 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
6016 tcg_gen_movi_tl(cpu_ov, 1);
6017 tcg_gen_movi_tl(cpu_so, 1);
6018 tcg_gen_br(l2);
6019 gen_set_label(l1);
6020 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6021 gen_set_label(l2);
6022 tcg_temp_free(t0);
6023 tcg_temp_free(t1);
6024 tcg_temp_free(t2);
6025 if (unlikely(Rc(ctx->opcode) != 0)) {
6026 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6027 }
6028 }
6029
6030 /* dozi */
6031 static void gen_dozi(DisasContext *ctx)
6032 {
6033 target_long simm = SIMM(ctx->opcode);
6034 TCGLabel *l1 = gen_new_label();
6035 TCGLabel *l2 = gen_new_label();
6036 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1);
6037 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]);
6038 tcg_gen_br(l2);
6039 gen_set_label(l1);
6040 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
6041 gen_set_label(l2);
6042 if (unlikely(Rc(ctx->opcode) != 0)) {
6043 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6044 }
6045 }
6046
6047 /* lscbx - lscbx. */
6048 static void gen_lscbx(DisasContext *ctx)
6049 {
6050 TCGv t0 = tcg_temp_new();
6051 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode));
6052 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode));
6053 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode));
6054
6055 gen_addr_reg_index(ctx, t0);
6056 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3);
6057 tcg_temp_free_i32(t1);
6058 tcg_temp_free_i32(t2);
6059 tcg_temp_free_i32(t3);
6060 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
6061 tcg_gen_or_tl(cpu_xer, cpu_xer, t0);
6062 if (unlikely(Rc(ctx->opcode) != 0)) {
6063 gen_set_Rc0(ctx, t0);
6064 }
6065 tcg_temp_free(t0);
6066 }
6067
6068 /* maskg - maskg. */
6069 static void gen_maskg(DisasContext *ctx)
6070 {
6071 TCGLabel *l1 = gen_new_label();
6072 TCGv t0 = tcg_temp_new();
6073 TCGv t1 = tcg_temp_new();
6074 TCGv t2 = tcg_temp_new();
6075 TCGv t3 = tcg_temp_new();
6076 tcg_gen_movi_tl(t3, 0xFFFFFFFF);
6077 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6078 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F);
6079 tcg_gen_addi_tl(t2, t0, 1);
6080 tcg_gen_shr_tl(t2, t3, t2);
6081 tcg_gen_shr_tl(t3, t3, t1);
6082 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3);
6083 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1);
6084 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6085 gen_set_label(l1);
6086 tcg_temp_free(t0);
6087 tcg_temp_free(t1);
6088 tcg_temp_free(t2);
6089 tcg_temp_free(t3);
6090 if (unlikely(Rc(ctx->opcode) != 0)) {
6091 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6092 }
6093 }
6094
6095 /* maskir - maskir. */
6096 static void gen_maskir(DisasContext *ctx)
6097 {
6098 TCGv t0 = tcg_temp_new();
6099 TCGv t1 = tcg_temp_new();
6100 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6101 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6102 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6103 tcg_temp_free(t0);
6104 tcg_temp_free(t1);
6105 if (unlikely(Rc(ctx->opcode) != 0)) {
6106 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6107 }
6108 }
6109
6110 /* mul - mul. */
6111 static void gen_mul(DisasContext *ctx)
6112 {
6113 TCGv_i64 t0 = tcg_temp_new_i64();
6114 TCGv_i64 t1 = tcg_temp_new_i64();
6115 TCGv t2 = tcg_temp_new();
6116 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
6117 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
6118 tcg_gen_mul_i64(t0, t0, t1);
6119 tcg_gen_trunc_i64_tl(t2, t0);
6120 gen_store_spr(SPR_MQ, t2);
6121 tcg_gen_shri_i64(t1, t0, 32);
6122 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
6123 tcg_temp_free_i64(t0);
6124 tcg_temp_free_i64(t1);
6125 tcg_temp_free(t2);
6126 if (unlikely(Rc(ctx->opcode) != 0)) {
6127 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6128 }
6129 }
6130
6131 /* mulo - mulo. */
6132 static void gen_mulo(DisasContext *ctx)
6133 {
6134 TCGLabel *l1 = gen_new_label();
6135 TCGv_i64 t0 = tcg_temp_new_i64();
6136 TCGv_i64 t1 = tcg_temp_new_i64();
6137 TCGv t2 = tcg_temp_new();
6138 /* Start with XER OV disabled, the most likely case */
6139 tcg_gen_movi_tl(cpu_ov, 0);
6140 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
6141 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
6142 tcg_gen_mul_i64(t0, t0, t1);
6143 tcg_gen_trunc_i64_tl(t2, t0);
6144 gen_store_spr(SPR_MQ, t2);
6145 tcg_gen_shri_i64(t1, t0, 32);
6146 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1);
6147 tcg_gen_ext32s_i64(t1, t0);
6148 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
6149 tcg_gen_movi_tl(cpu_ov, 1);
6150 tcg_gen_movi_tl(cpu_so, 1);
6151 gen_set_label(l1);
6152 tcg_temp_free_i64(t0);
6153 tcg_temp_free_i64(t1);
6154 tcg_temp_free(t2);
6155 if (unlikely(Rc(ctx->opcode) != 0)) {
6156 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
6157 }
6158 }
6159
6160 /* nabs - nabs. */
6161 static void gen_nabs(DisasContext *ctx)
6162 {
6163 TCGv d = cpu_gpr[rD(ctx->opcode)];
6164 TCGv a = cpu_gpr[rA(ctx->opcode)];
6165
6166 tcg_gen_abs_tl(d, a);
6167 tcg_gen_neg_tl(d, d);
6168 if (unlikely(Rc(ctx->opcode) != 0)) {
6169 gen_set_Rc0(ctx, d);
6170 }
6171 }
6172
6173 /* nabso - nabso. */
6174 static void gen_nabso(DisasContext *ctx)
6175 {
6176 TCGv d = cpu_gpr[rD(ctx->opcode)];
6177 TCGv a = cpu_gpr[rA(ctx->opcode)];
6178
6179 tcg_gen_abs_tl(d, a);
6180 tcg_gen_neg_tl(d, d);
6181 /* nabs never overflows */
6182 tcg_gen_movi_tl(cpu_ov, 0);
6183 if (unlikely(Rc(ctx->opcode) != 0)) {
6184 gen_set_Rc0(ctx, d);
6185 }
6186 }
6187
6188 /* rlmi - rlmi. */
6189 static void gen_rlmi(DisasContext *ctx)
6190 {
6191 uint32_t mb = MB(ctx->opcode);
6192 uint32_t me = ME(ctx->opcode);
6193 TCGv t0 = tcg_temp_new();
6194 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6195 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6196 tcg_gen_andi_tl(t0, t0, MASK(mb, me));
6197 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
6198 ~MASK(mb, me));
6199 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0);
6200 tcg_temp_free(t0);
6201 if (unlikely(Rc(ctx->opcode) != 0)) {
6202 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6203 }
6204 }
6205
6206 /* rrib - rrib. */
6207 static void gen_rrib(DisasContext *ctx)
6208 {
6209 TCGv t0 = tcg_temp_new();
6210 TCGv t1 = tcg_temp_new();
6211 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6212 tcg_gen_movi_tl(t1, 0x80000000);
6213 tcg_gen_shr_tl(t1, t1, t0);
6214 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6215 tcg_gen_and_tl(t0, t0, t1);
6216 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1);
6217 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6218 tcg_temp_free(t0);
6219 tcg_temp_free(t1);
6220 if (unlikely(Rc(ctx->opcode) != 0)) {
6221 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6222 }
6223 }
6224
6225 /* sle - sle. */
6226 static void gen_sle(DisasContext *ctx)
6227 {
6228 TCGv t0 = tcg_temp_new();
6229 TCGv t1 = tcg_temp_new();
6230 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6231 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6232 tcg_gen_subfi_tl(t1, 32, t1);
6233 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6234 tcg_gen_or_tl(t1, t0, t1);
6235 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6236 gen_store_spr(SPR_MQ, t1);
6237 tcg_temp_free(t0);
6238 tcg_temp_free(t1);
6239 if (unlikely(Rc(ctx->opcode) != 0)) {
6240 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6241 }
6242 }
6243
6244 /* sleq - sleq. */
6245 static void gen_sleq(DisasContext *ctx)
6246 {
6247 TCGv t0 = tcg_temp_new();
6248 TCGv t1 = tcg_temp_new();
6249 TCGv t2 = tcg_temp_new();
6250 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6251 tcg_gen_movi_tl(t2, 0xFFFFFFFF);
6252 tcg_gen_shl_tl(t2, t2, t0);
6253 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6254 gen_load_spr(t1, SPR_MQ);
6255 gen_store_spr(SPR_MQ, t0);
6256 tcg_gen_and_tl(t0, t0, t2);
6257 tcg_gen_andc_tl(t1, t1, t2);
6258 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6259 tcg_temp_free(t0);
6260 tcg_temp_free(t1);
6261 tcg_temp_free(t2);
6262 if (unlikely(Rc(ctx->opcode) != 0)) {
6263 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6264 }
6265 }
6266
6267 /* sliq - sliq. */
6268 static void gen_sliq(DisasContext *ctx)
6269 {
6270 int sh = SH(ctx->opcode);
6271 TCGv t0 = tcg_temp_new();
6272 TCGv t1 = tcg_temp_new();
6273 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6274 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6275 tcg_gen_or_tl(t1, t0, t1);
6276 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6277 gen_store_spr(SPR_MQ, t1);
6278 tcg_temp_free(t0);
6279 tcg_temp_free(t1);
6280 if (unlikely(Rc(ctx->opcode) != 0)) {
6281 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6282 }
6283 }
6284
6285 /* slliq - slliq. */
6286 static void gen_slliq(DisasContext *ctx)
6287 {
6288 int sh = SH(ctx->opcode);
6289 TCGv t0 = tcg_temp_new();
6290 TCGv t1 = tcg_temp_new();
6291 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6292 gen_load_spr(t1, SPR_MQ);
6293 gen_store_spr(SPR_MQ, t0);
6294 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh));
6295 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh));
6296 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6297 tcg_temp_free(t0);
6298 tcg_temp_free(t1);
6299 if (unlikely(Rc(ctx->opcode) != 0)) {
6300 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6301 }
6302 }
6303
6304 /* sllq - sllq. */
6305 static void gen_sllq(DisasContext *ctx)
6306 {
6307 TCGLabel *l1 = gen_new_label();
6308 TCGLabel *l2 = gen_new_label();
6309 TCGv t0 = tcg_temp_local_new();
6310 TCGv t1 = tcg_temp_local_new();
6311 TCGv t2 = tcg_temp_local_new();
6312 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6313 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6314 tcg_gen_shl_tl(t1, t1, t2);
6315 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6316 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6317 gen_load_spr(t0, SPR_MQ);
6318 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6319 tcg_gen_br(l2);
6320 gen_set_label(l1);
6321 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6322 gen_load_spr(t2, SPR_MQ);
6323 tcg_gen_andc_tl(t1, t2, t1);
6324 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6325 gen_set_label(l2);
6326 tcg_temp_free(t0);
6327 tcg_temp_free(t1);
6328 tcg_temp_free(t2);
6329 if (unlikely(Rc(ctx->opcode) != 0)) {
6330 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6331 }
6332 }
6333
6334 /* slq - slq. */
6335 static void gen_slq(DisasContext *ctx)
6336 {
6337 TCGLabel *l1 = gen_new_label();
6338 TCGv t0 = tcg_temp_new();
6339 TCGv t1 = tcg_temp_new();
6340 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6341 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6342 tcg_gen_subfi_tl(t1, 32, t1);
6343 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6344 tcg_gen_or_tl(t1, t0, t1);
6345 gen_store_spr(SPR_MQ, t1);
6346 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
6347 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6348 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
6349 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
6350 gen_set_label(l1);
6351 tcg_temp_free(t0);
6352 tcg_temp_free(t1);
6353 if (unlikely(Rc(ctx->opcode) != 0)) {
6354 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6355 }
6356 }
6357
6358 /* sraiq - sraiq. */
6359 static void gen_sraiq(DisasContext *ctx)
6360 {
6361 int sh = SH(ctx->opcode);
6362 TCGLabel *l1 = gen_new_label();
6363 TCGv t0 = tcg_temp_new();
6364 TCGv t1 = tcg_temp_new();
6365 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6366 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6367 tcg_gen_or_tl(t0, t0, t1);
6368 gen_store_spr(SPR_MQ, t0);
6369 tcg_gen_movi_tl(cpu_ca, 0);
6370 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1);
6371 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
6372 tcg_gen_movi_tl(cpu_ca, 1);
6373 gen_set_label(l1);
6374 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
6375 tcg_temp_free(t0);
6376 tcg_temp_free(t1);
6377 if (unlikely(Rc(ctx->opcode) != 0)) {
6378 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6379 }
6380 }
6381
6382 /* sraq - sraq. */
6383 static void gen_sraq(DisasContext *ctx)
6384 {
6385 TCGLabel *l1 = gen_new_label();
6386 TCGLabel *l2 = gen_new_label();
6387 TCGv t0 = tcg_temp_new();
6388 TCGv t1 = tcg_temp_local_new();
6389 TCGv t2 = tcg_temp_local_new();
6390 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6391 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6392 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2);
6393 tcg_gen_subfi_tl(t2, 32, t2);
6394 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2);
6395 tcg_gen_or_tl(t0, t0, t2);
6396 gen_store_spr(SPR_MQ, t0);
6397 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6398 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1);
6399 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]);
6400 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31);
6401 gen_set_label(l1);
6402 tcg_temp_free(t0);
6403 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1);
6404 tcg_gen_movi_tl(cpu_ca, 0);
6405 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2);
6406 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2);
6407 tcg_gen_movi_tl(cpu_ca, 1);
6408 gen_set_label(l2);
6409 tcg_temp_free(t1);
6410 tcg_temp_free(t2);
6411 if (unlikely(Rc(ctx->opcode) != 0)) {
6412 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6413 }
6414 }
6415
6416 /* sre - sre. */
6417 static void gen_sre(DisasContext *ctx)
6418 {
6419 TCGv t0 = tcg_temp_new();
6420 TCGv t1 = tcg_temp_new();
6421 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6422 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6423 tcg_gen_subfi_tl(t1, 32, t1);
6424 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6425 tcg_gen_or_tl(t1, t0, t1);
6426 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6427 gen_store_spr(SPR_MQ, t1);
6428 tcg_temp_free(t0);
6429 tcg_temp_free(t1);
6430 if (unlikely(Rc(ctx->opcode) != 0)) {
6431 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6432 }
6433 }
6434
6435 /* srea - srea. */
6436 static void gen_srea(DisasContext *ctx)
6437 {
6438 TCGv t0 = tcg_temp_new();
6439 TCGv t1 = tcg_temp_new();
6440 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6441 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6442 gen_store_spr(SPR_MQ, t0);
6443 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1);
6444 tcg_temp_free(t0);
6445 tcg_temp_free(t1);
6446 if (unlikely(Rc(ctx->opcode) != 0)) {
6447 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6448 }
6449 }
6450
6451 /* sreq */
6452 static void gen_sreq(DisasContext *ctx)
6453 {
6454 TCGv t0 = tcg_temp_new();
6455 TCGv t1 = tcg_temp_new();
6456 TCGv t2 = tcg_temp_new();
6457 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F);
6458 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6459 tcg_gen_shr_tl(t1, t1, t0);
6460 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
6461 gen_load_spr(t2, SPR_MQ);
6462 gen_store_spr(SPR_MQ, t0);
6463 tcg_gen_and_tl(t0, t0, t1);
6464 tcg_gen_andc_tl(t2, t2, t1);
6465 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
6466 tcg_temp_free(t0);
6467 tcg_temp_free(t1);
6468 tcg_temp_free(t2);
6469 if (unlikely(Rc(ctx->opcode) != 0)) {
6470 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6471 }
6472 }
6473
6474 /* sriq */
6475 static void gen_sriq(DisasContext *ctx)
6476 {
6477 int sh = SH(ctx->opcode);
6478 TCGv t0 = tcg_temp_new();
6479 TCGv t1 = tcg_temp_new();
6480 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6481 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh);
6482 tcg_gen_or_tl(t1, t0, t1);
6483 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6484 gen_store_spr(SPR_MQ, t1);
6485 tcg_temp_free(t0);
6486 tcg_temp_free(t1);
6487 if (unlikely(Rc(ctx->opcode) != 0)) {
6488 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6489 }
6490 }
6491
6492 /* srliq */
6493 static void gen_srliq(DisasContext *ctx)
6494 {
6495 int sh = SH(ctx->opcode);
6496 TCGv t0 = tcg_temp_new();
6497 TCGv t1 = tcg_temp_new();
6498 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
6499 gen_load_spr(t1, SPR_MQ);
6500 gen_store_spr(SPR_MQ, t0);
6501 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh));
6502 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh));
6503 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6504 tcg_temp_free(t0);
6505 tcg_temp_free(t1);
6506 if (unlikely(Rc(ctx->opcode) != 0)) {
6507 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6508 }
6509 }
6510
6511 /* srlq */
6512 static void gen_srlq(DisasContext *ctx)
6513 {
6514 TCGLabel *l1 = gen_new_label();
6515 TCGLabel *l2 = gen_new_label();
6516 TCGv t0 = tcg_temp_local_new();
6517 TCGv t1 = tcg_temp_local_new();
6518 TCGv t2 = tcg_temp_local_new();
6519 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F);
6520 tcg_gen_movi_tl(t1, 0xFFFFFFFF);
6521 tcg_gen_shr_tl(t2, t1, t2);
6522 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20);
6523 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6524 gen_load_spr(t0, SPR_MQ);
6525 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2);
6526 tcg_gen_br(l2);
6527 gen_set_label(l1);
6528 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2);
6529 tcg_gen_and_tl(t0, t0, t2);
6530 gen_load_spr(t1, SPR_MQ);
6531 tcg_gen_andc_tl(t1, t1, t2);
6532 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
6533 gen_set_label(l2);
6534 tcg_temp_free(t0);
6535 tcg_temp_free(t1);
6536 tcg_temp_free(t2);
6537 if (unlikely(Rc(ctx->opcode) != 0)) {
6538 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6539 }
6540 }
6541
6542 /* srq */
6543 static void gen_srq(DisasContext *ctx)
6544 {
6545 TCGLabel *l1 = gen_new_label();
6546 TCGv t0 = tcg_temp_new();
6547 TCGv t1 = tcg_temp_new();
6548 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F);
6549 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1);
6550 tcg_gen_subfi_tl(t1, 32, t1);
6551 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1);
6552 tcg_gen_or_tl(t1, t0, t1);
6553 gen_store_spr(SPR_MQ, t1);
6554 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20);
6555 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
6556 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
6557 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
6558 gen_set_label(l1);
6559 tcg_temp_free(t0);
6560 tcg_temp_free(t1);
6561 if (unlikely(Rc(ctx->opcode) != 0)) {
6562 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
6563 }
6564 }
6565
6566 /* PowerPC 602 specific instructions */
6567
6568 /* dsa */
6569 static void gen_dsa(DisasContext *ctx)
6570 {
6571 /* XXX: TODO */
6572 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6573 }
6574
6575 /* esa */
6576 static void gen_esa(DisasContext *ctx)
6577 {
6578 /* XXX: TODO */
6579 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6580 }
6581
6582 /* mfrom */
6583 static void gen_mfrom(DisasContext *ctx)
6584 {
6585 #if defined(CONFIG_USER_ONLY)
6586 GEN_PRIV;
6587 #else
6588 CHK_SV;
6589 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6590 #endif /* defined(CONFIG_USER_ONLY) */
6591 }
6592
6593 /* 602 - 603 - G2 TLB management */
6594
6595 /* tlbld */
6596 static void gen_tlbld_6xx(DisasContext *ctx)
6597 {
6598 #if defined(CONFIG_USER_ONLY)
6599 GEN_PRIV;
6600 #else
6601 CHK_SV;
6602 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6603 #endif /* defined(CONFIG_USER_ONLY) */
6604 }
6605
6606 /* tlbli */
6607 static void gen_tlbli_6xx(DisasContext *ctx)
6608 {
6609 #if defined(CONFIG_USER_ONLY)
6610 GEN_PRIV;
6611 #else
6612 CHK_SV;
6613 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6614 #endif /* defined(CONFIG_USER_ONLY) */
6615 }
6616
6617 /* 74xx TLB management */
6618
6619 /* tlbld */
6620 static void gen_tlbld_74xx(DisasContext *ctx)
6621 {
6622 #if defined(CONFIG_USER_ONLY)
6623 GEN_PRIV;
6624 #else
6625 CHK_SV;
6626 gen_helper_74xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6627 #endif /* defined(CONFIG_USER_ONLY) */
6628 }
6629
6630 /* tlbli */
6631 static void gen_tlbli_74xx(DisasContext *ctx)
6632 {
6633 #if defined(CONFIG_USER_ONLY)
6634 GEN_PRIV;
6635 #else
6636 CHK_SV;
6637 gen_helper_74xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6638 #endif /* defined(CONFIG_USER_ONLY) */
6639 }
6640
6641 /* POWER instructions not in PowerPC 601 */
6642
6643 /* clf */
6644 static void gen_clf(DisasContext *ctx)
6645 {
6646 /* Cache line flush: implemented as no-op */
6647 }
6648
6649 /* cli */
6650 static void gen_cli(DisasContext *ctx)
6651 {
6652 #if defined(CONFIG_USER_ONLY)
6653 GEN_PRIV;
6654 #else
6655 /* Cache line invalidate: privileged and treated as no-op */
6656 CHK_SV;
6657 #endif /* defined(CONFIG_USER_ONLY) */
6658 }
6659
6660 /* dclst */
6661 static void gen_dclst(DisasContext *ctx)
6662 {
6663 /* Data cache line store: treated as no-op */
6664 }
6665
6666 static void gen_mfsri(DisasContext *ctx)
6667 {
6668 #if defined(CONFIG_USER_ONLY)
6669 GEN_PRIV;
6670 #else
6671 int ra = rA(ctx->opcode);
6672 int rd = rD(ctx->opcode);
6673 TCGv t0;
6674
6675 CHK_SV;
6676 t0 = tcg_temp_new();
6677 gen_addr_reg_index(ctx, t0);
6678 tcg_gen_extract_tl(t0, t0, 28, 4);
6679 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0);
6680 tcg_temp_free(t0);
6681 if (ra != 0 && ra != rd) {
6682 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]);
6683 }
6684 #endif /* defined(CONFIG_USER_ONLY) */
6685 }
6686
6687 static void gen_rac(DisasContext *ctx)
6688 {
6689 #if defined(CONFIG_USER_ONLY)
6690 GEN_PRIV;
6691 #else
6692 TCGv t0;
6693
6694 CHK_SV;
6695 t0 = tcg_temp_new();
6696 gen_addr_reg_index(ctx, t0);
6697 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6698 tcg_temp_free(t0);
6699 #endif /* defined(CONFIG_USER_ONLY) */
6700 }
6701
6702 static void gen_rfsvc(DisasContext *ctx)
6703 {
6704 #if defined(CONFIG_USER_ONLY)
6705 GEN_PRIV;
6706 #else
6707 CHK_SV;
6708
6709 gen_helper_rfsvc(cpu_env);
6710 ctx->base.is_jmp = DISAS_EXIT;
6711 #endif /* defined(CONFIG_USER_ONLY) */
6712 }
6713
6714 /* svc is not implemented for now */
6715
6716 /* BookE specific instructions */
6717
6718 /* XXX: not implemented on 440 ? */
6719 static void gen_mfapidi(DisasContext *ctx)
6720 {
6721 /* XXX: TODO */
6722 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6723 }
6724
6725 /* XXX: not implemented on 440 ? */
6726 static void gen_tlbiva(DisasContext *ctx)
6727 {
6728 #if defined(CONFIG_USER_ONLY)
6729 GEN_PRIV;
6730 #else
6731 TCGv t0;
6732
6733 CHK_SV;
6734 t0 = tcg_temp_new();
6735 gen_addr_reg_index(ctx, t0);
6736 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6737 tcg_temp_free(t0);
6738 #endif /* defined(CONFIG_USER_ONLY) */
6739 }
6740
6741 /* All 405 MAC instructions are translated here */
6742 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
6743 int ra, int rb, int rt, int Rc)
6744 {
6745 TCGv t0, t1;
6746
6747 t0 = tcg_temp_local_new();
6748 t1 = tcg_temp_local_new();
6749
6750 switch (opc3 & 0x0D) {
6751 case 0x05:
6752 /* macchw - macchw. - macchwo - macchwo. */
6753 /* macchws - macchws. - macchwso - macchwso. */
6754 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
6755 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
6756 /* mulchw - mulchw. */
6757 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
6758 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
6759 tcg_gen_ext16s_tl(t1, t1);
6760 break;
6761 case 0x04:
6762 /* macchwu - macchwu. - macchwuo - macchwuo. */
6763 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
6764 /* mulchwu - mulchwu. */
6765 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
6766 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
6767 tcg_gen_ext16u_tl(t1, t1);
6768 break;
6769 case 0x01:
6770 /* machhw - machhw. - machhwo - machhwo. */
6771 /* machhws - machhws. - machhwso - machhwso. */
6772 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
6773 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
6774 /* mulhhw - mulhhw. */
6775 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
6776 tcg_gen_ext16s_tl(t0, t0);
6777 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
6778 tcg_gen_ext16s_tl(t1, t1);
6779 break;
6780 case 0x00:
6781 /* machhwu - machhwu. - machhwuo - machhwuo. */
6782 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
6783 /* mulhhwu - mulhhwu. */
6784 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
6785 tcg_gen_ext16u_tl(t0, t0);
6786 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
6787 tcg_gen_ext16u_tl(t1, t1);
6788 break;
6789 case 0x0D:
6790 /* maclhw - maclhw. - maclhwo - maclhwo. */
6791 /* maclhws - maclhws. - maclhwso - maclhwso. */
6792 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
6793 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
6794 /* mullhw - mullhw. */
6795 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
6796 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
6797 break;
6798 case 0x0C:
6799 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
6800 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
6801 /* mullhwu - mullhwu. */
6802 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
6803 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
6804 break;
6805 }
6806 if (opc2 & 0x04) {
6807 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
6808 tcg_gen_mul_tl(t1, t0, t1);
6809 if (opc2 & 0x02) {
6810 /* nmultiply-and-accumulate (0x0E) */
6811 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
6812 } else {
6813 /* multiply-and-accumulate (0x0C) */
6814 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
6815 }
6816
6817 if (opc3 & 0x12) {
6818 /* Check overflow and/or saturate */
6819 TCGLabel *l1 = gen_new_label();
6820
6821 if (opc3 & 0x10) {
6822 /* Start with XER OV disabled, the most likely case */
6823 tcg_gen_movi_tl(cpu_ov, 0);
6824 }
6825 if (opc3 & 0x01) {
6826 /* Signed */
6827 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
6828 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
6829 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
6830 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
6831 if (opc3 & 0x02) {
6832 /* Saturate */
6833 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
6834 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
6835 }
6836 } else {
6837 /* Unsigned */
6838 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
6839 if (opc3 & 0x02) {
6840 /* Saturate */
6841 tcg_gen_movi_tl(t0, UINT32_MAX);
6842 }
6843 }
6844 if (opc3 & 0x10) {
6845 /* Check overflow */
6846 tcg_gen_movi_tl(cpu_ov, 1);
6847 tcg_gen_movi_tl(cpu_so, 1);
6848 }
6849 gen_set_label(l1);
6850 tcg_gen_mov_tl(cpu_gpr[rt], t0);
6851 }
6852 } else {
6853 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
6854 }
6855 tcg_temp_free(t0);
6856 tcg_temp_free(t1);
6857 if (unlikely(Rc) != 0) {
6858 /* Update Rc0 */
6859 gen_set_Rc0(ctx, cpu_gpr[rt]);
6860 }
6861 }
6862
6863 #define GEN_MAC_HANDLER(name, opc2, opc3) \
6864 static void glue(gen_, name)(DisasContext *ctx) \
6865 { \
6866 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
6867 rD(ctx->opcode), Rc(ctx->opcode)); \
6868 }
6869
6870 /* macchw - macchw. */
6871 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
6872 /* macchwo - macchwo. */
6873 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
6874 /* macchws - macchws. */
6875 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
6876 /* macchwso - macchwso. */
6877 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
6878 /* macchwsu - macchwsu. */
6879 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
6880 /* macchwsuo - macchwsuo. */
6881 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
6882 /* macchwu - macchwu. */
6883 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
6884 /* macchwuo - macchwuo. */
6885 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
6886 /* machhw - machhw. */
6887 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
6888 /* machhwo - machhwo. */
6889 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
6890 /* machhws - machhws. */
6891 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
6892 /* machhwso - machhwso. */
6893 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
6894 /* machhwsu - machhwsu. */
6895 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
6896 /* machhwsuo - machhwsuo. */
6897 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
6898 /* machhwu - machhwu. */
6899 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
6900 /* machhwuo - machhwuo. */
6901 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
6902 /* maclhw - maclhw. */
6903 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
6904 /* maclhwo - maclhwo. */
6905 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
6906 /* maclhws - maclhws. */
6907 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
6908 /* maclhwso - maclhwso. */
6909 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
6910 /* maclhwu - maclhwu. */
6911 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
6912 /* maclhwuo - maclhwuo. */
6913 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
6914 /* maclhwsu - maclhwsu. */
6915 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
6916 /* maclhwsuo - maclhwsuo. */
6917 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
6918 /* nmacchw - nmacchw. */
6919 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
6920 /* nmacchwo - nmacchwo. */
6921 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
6922 /* nmacchws - nmacchws. */
6923 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
6924 /* nmacchwso - nmacchwso. */
6925 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
6926 /* nmachhw - nmachhw. */
6927 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
6928 /* nmachhwo - nmachhwo. */
6929 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
6930 /* nmachhws - nmachhws. */
6931 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
6932 /* nmachhwso - nmachhwso. */
6933 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
6934 /* nmaclhw - nmaclhw. */
6935 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
6936 /* nmaclhwo - nmaclhwo. */
6937 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
6938 /* nmaclhws - nmaclhws. */
6939 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
6940 /* nmaclhwso - nmaclhwso. */
6941 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
6942
6943 /* mulchw - mulchw. */
6944 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
6945 /* mulchwu - mulchwu. */
6946 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
6947 /* mulhhw - mulhhw. */
6948 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
6949 /* mulhhwu - mulhhwu. */
6950 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
6951 /* mullhw - mullhw. */
6952 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
6953 /* mullhwu - mullhwu. */
6954 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
6955
6956 /* mfdcr */
6957 static void gen_mfdcr(DisasContext *ctx)
6958 {
6959 #if defined(CONFIG_USER_ONLY)
6960 GEN_PRIV;
6961 #else
6962 TCGv dcrn;
6963
6964 CHK_SV;
6965 dcrn = tcg_const_tl(SPR(ctx->opcode));
6966 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
6967 tcg_temp_free(dcrn);
6968 #endif /* defined(CONFIG_USER_ONLY) */
6969 }
6970
6971 /* mtdcr */
6972 static void gen_mtdcr(DisasContext *ctx)
6973 {
6974 #if defined(CONFIG_USER_ONLY)
6975 GEN_PRIV;
6976 #else
6977 TCGv dcrn;
6978
6979 CHK_SV;
6980 dcrn = tcg_const_tl(SPR(ctx->opcode));
6981 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
6982 tcg_temp_free(dcrn);
6983 #endif /* defined(CONFIG_USER_ONLY) */
6984 }
6985
6986 /* mfdcrx */
6987 /* XXX: not implemented on 440 ? */
6988 static void gen_mfdcrx(DisasContext *ctx)
6989 {
6990 #if defined(CONFIG_USER_ONLY)
6991 GEN_PRIV;
6992 #else
6993 CHK_SV;
6994 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
6995 cpu_gpr[rA(ctx->opcode)]);
6996 /* Note: Rc update flag set leads to undefined state of Rc0 */
6997 #endif /* defined(CONFIG_USER_ONLY) */
6998 }
6999
7000 /* mtdcrx */
7001 /* XXX: not implemented on 440 ? */
7002 static void gen_mtdcrx(DisasContext *ctx)
7003 {
7004 #if defined(CONFIG_USER_ONLY)
7005 GEN_PRIV;
7006 #else
7007 CHK_SV;
7008 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
7009 cpu_gpr[rS(ctx->opcode)]);
7010 /* Note: Rc update flag set leads to undefined state of Rc0 */
7011 #endif /* defined(CONFIG_USER_ONLY) */
7012 }
7013
7014 /* mfdcrux (PPC 460) : user-mode access to DCR */
7015 static void gen_mfdcrux(DisasContext *ctx)
7016 {
7017 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
7018 cpu_gpr[rA(ctx->opcode)]);
7019 /* Note: Rc update flag set leads to undefined state of Rc0 */
7020 }
7021
7022 /* mtdcrux (PPC 460) : user-mode access to DCR */
7023 static void gen_mtdcrux(DisasContext *ctx)
7024 {
7025 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
7026 cpu_gpr[rS(ctx->opcode)]);
7027 /* Note: Rc update flag set leads to undefined state of Rc0 */
7028 }
7029
7030 /* dccci */
7031 static void gen_dccci(DisasContext *ctx)
7032 {
7033 CHK_SV;
7034 /* interpreted as no-op */
7035 }
7036
7037 /* dcread */
7038 static void gen_dcread(DisasContext *ctx)
7039 {
7040 #if defined(CONFIG_USER_ONLY)
7041 GEN_PRIV;
7042 #else
7043 TCGv EA, val;
7044
7045 CHK_SV;
7046 gen_set_access_type(ctx, ACCESS_CACHE);
7047 EA = tcg_temp_new();
7048 gen_addr_reg_index(ctx, EA);
7049 val = tcg_temp_new();
7050 gen_qemu_ld32u(ctx, val, EA);
7051 tcg_temp_free(val);
7052 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
7053 tcg_temp_free(EA);
7054 #endif /* defined(CONFIG_USER_ONLY) */
7055 }
7056
7057 /* icbt */
7058 static void gen_icbt_40x(DisasContext *ctx)
7059 {
7060 /*
7061 * interpreted as no-op
7062 * XXX: specification say this is treated as a load by the MMU but
7063 * does not generate any exception
7064 */
7065 }
7066
7067 /* iccci */
7068 static void gen_iccci(DisasContext *ctx)
7069 {
7070 CHK_SV;
7071 /* interpreted as no-op */
7072 }
7073
7074 /* icread */
7075 static void gen_icread(DisasContext *ctx)
7076 {
7077 CHK_SV;
7078 /* interpreted as no-op */
7079 }
7080
7081 /* rfci (supervisor only) */
7082 static void gen_rfci_40x(DisasContext *ctx)
7083 {
7084 #if defined(CONFIG_USER_ONLY)
7085 GEN_PRIV;
7086 #else
7087 CHK_SV;
7088 /* Restore CPU state */
7089 gen_helper_40x_rfci(cpu_env);
7090 ctx->base.is_jmp = DISAS_EXIT;
7091 #endif /* defined(CONFIG_USER_ONLY) */
7092 }
7093
7094 static void gen_rfci(DisasContext *ctx)
7095 {
7096 #if defined(CONFIG_USER_ONLY)
7097 GEN_PRIV;
7098 #else
7099 CHK_SV;
7100 /* Restore CPU state */
7101 gen_helper_rfci(cpu_env);
7102 ctx->base.is_jmp = DISAS_EXIT;
7103 #endif /* defined(CONFIG_USER_ONLY) */
7104 }
7105
7106 /* BookE specific */
7107
7108 /* XXX: not implemented on 440 ? */
7109 static void gen_rfdi(DisasContext *ctx)
7110 {
7111 #if defined(CONFIG_USER_ONLY)
7112 GEN_PRIV;
7113 #else
7114 CHK_SV;
7115 /* Restore CPU state */
7116 gen_helper_rfdi(cpu_env);
7117 ctx->base.is_jmp = DISAS_EXIT;
7118 #endif /* defined(CONFIG_USER_ONLY) */
7119 }
7120
7121 /* XXX: not implemented on 440 ? */
7122 static void gen_rfmci(DisasContext *ctx)
7123 {
7124 #if defined(CONFIG_USER_ONLY)
7125 GEN_PRIV;
7126 #else
7127 CHK_SV;
7128 /* Restore CPU state */
7129 gen_helper_rfmci(cpu_env);
7130 ctx->base.is_jmp = DISAS_EXIT;
7131 #endif /* defined(CONFIG_USER_ONLY) */
7132 }
7133
7134 /* TLB management - PowerPC 405 implementation */
7135
7136 /* tlbre */
7137 static void gen_tlbre_40x(DisasContext *ctx)
7138 {
7139 #if defined(CONFIG_USER_ONLY)
7140 GEN_PRIV;
7141 #else
7142 CHK_SV;
7143 switch (rB(ctx->opcode)) {
7144 case 0:
7145 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
7146 cpu_gpr[rA(ctx->opcode)]);
7147 break;
7148 case 1:
7149 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
7150 cpu_gpr[rA(ctx->opcode)]);
7151 break;
7152 default:
7153 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7154 break;
7155 }
7156 #endif /* defined(CONFIG_USER_ONLY) */
7157 }
7158
7159 /* tlbsx - tlbsx. */
7160 static void gen_tlbsx_40x(DisasContext *ctx)
7161 {
7162 #if defined(CONFIG_USER_ONLY)
7163 GEN_PRIV;
7164 #else
7165 TCGv t0;
7166
7167 CHK_SV;
7168 t0 = tcg_temp_new();
7169 gen_addr_reg_index(ctx, t0);
7170 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
7171 tcg_temp_free(t0);
7172 if (Rc(ctx->opcode)) {
7173 TCGLabel *l1 = gen_new_label();
7174 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
7175 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
7176 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
7177 gen_set_label(l1);
7178 }
7179 #endif /* defined(CONFIG_USER_ONLY) */
7180 }
7181
7182 /* tlbwe */
7183 static void gen_tlbwe_40x(DisasContext *ctx)
7184 {
7185 #if defined(CONFIG_USER_ONLY)
7186 GEN_PRIV;
7187 #else
7188 CHK_SV;
7189
7190 switch (rB(ctx->opcode)) {
7191 case 0:
7192 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
7193 cpu_gpr[rS(ctx->opcode)]);
7194 break;
7195 case 1:
7196 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
7197 cpu_gpr[rS(ctx->opcode)]);
7198 break;
7199 default:
7200 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7201 break;
7202 }
7203 #endif /* defined(CONFIG_USER_ONLY) */
7204 }
7205
7206 /* TLB management - PowerPC 440 implementation */
7207
7208 /* tlbre */
7209 static void gen_tlbre_440(DisasContext *ctx)
7210 {
7211 #if defined(CONFIG_USER_ONLY)
7212 GEN_PRIV;
7213 #else
7214 CHK_SV;
7215
7216 switch (rB(ctx->opcode)) {
7217 case 0:
7218 case 1:
7219 case 2:
7220 {
7221 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
7222 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
7223 t0, cpu_gpr[rA(ctx->opcode)]);
7224 tcg_temp_free_i32(t0);
7225 }
7226 break;
7227 default:
7228 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7229 break;
7230 }
7231 #endif /* defined(CONFIG_USER_ONLY) */
7232 }
7233
7234 /* tlbsx - tlbsx. */
7235 static void gen_tlbsx_440(DisasContext *ctx)
7236 {
7237 #if defined(CONFIG_USER_ONLY)
7238 GEN_PRIV;
7239 #else
7240 TCGv t0;
7241
7242 CHK_SV;
7243 t0 = tcg_temp_new();
7244 gen_addr_reg_index(ctx, t0);
7245 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
7246 tcg_temp_free(t0);
7247 if (Rc(ctx->opcode)) {
7248 TCGLabel *l1 = gen_new_label();
7249 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
7250 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
7251 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
7252 gen_set_label(l1);
7253 }
7254 #endif /* defined(CONFIG_USER_ONLY) */
7255 }
7256
7257 /* tlbwe */
7258 static void gen_tlbwe_440(DisasContext *ctx)
7259 {
7260 #if defined(CONFIG_USER_ONLY)
7261 GEN_PRIV;
7262 #else
7263 CHK_SV;
7264 switch (rB(ctx->opcode)) {
7265 case 0:
7266 case 1:
7267 case 2:
7268 {
7269 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
7270 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
7271 cpu_gpr[rS(ctx->opcode)]);
7272 tcg_temp_free_i32(t0);
7273 }
7274 break;
7275 default:
7276 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7277 break;
7278 }
7279 #endif /* defined(CONFIG_USER_ONLY) */
7280 }
7281
7282 /* TLB management - PowerPC BookE 2.06 implementation */
7283
7284 /* tlbre */
7285 static void gen_tlbre_booke206(DisasContext *ctx)
7286 {
7287 #if defined(CONFIG_USER_ONLY)
7288 GEN_PRIV;
7289 #else
7290 CHK_SV;
7291 gen_helper_booke206_tlbre(cpu_env);
7292 #endif /* defined(CONFIG_USER_ONLY) */
7293 }
7294
7295 /* tlbsx - tlbsx. */
7296 static void gen_tlbsx_booke206(DisasContext *ctx)
7297 {
7298 #if defined(CONFIG_USER_ONLY)
7299 GEN_PRIV;
7300 #else
7301 TCGv t0;
7302
7303 CHK_SV;
7304 if (rA(ctx->opcode)) {
7305 t0 = tcg_temp_new();
7306 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
7307 } else {
7308 t0 = tcg_const_tl(0);
7309 }
7310
7311 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
7312 gen_helper_booke206_tlbsx(cpu_env, t0);
7313 tcg_temp_free(t0);
7314 #endif /* defined(CONFIG_USER_ONLY) */
7315 }
7316
7317 /* tlbwe */
7318 static void gen_tlbwe_booke206(DisasContext *ctx)
7319 {
7320 #if defined(CONFIG_USER_ONLY)
7321 GEN_PRIV;
7322 #else
7323 CHK_SV;
7324 gen_helper_booke206_tlbwe(cpu_env);
7325 #endif /* defined(CONFIG_USER_ONLY) */
7326 }
7327
7328 static void gen_tlbivax_booke206(DisasContext *ctx)
7329 {
7330 #if defined(CONFIG_USER_ONLY)
7331 GEN_PRIV;
7332 #else
7333 TCGv t0;
7334
7335 CHK_SV;
7336 t0 = tcg_temp_new();
7337 gen_addr_reg_index(ctx, t0);
7338 gen_helper_booke206_tlbivax(cpu_env, t0);
7339 tcg_temp_free(t0);
7340 #endif /* defined(CONFIG_USER_ONLY) */
7341 }
7342
7343 static void gen_tlbilx_booke206(DisasContext *ctx)
7344 {
7345 #if defined(CONFIG_USER_ONLY)
7346 GEN_PRIV;
7347 #else
7348 TCGv t0;
7349
7350 CHK_SV;
7351 t0 = tcg_temp_new();
7352 gen_addr_reg_index(ctx, t0);
7353
7354 switch ((ctx->opcode >> 21) & 0x3) {
7355 case 0:
7356 gen_helper_booke206_tlbilx0(cpu_env, t0);
7357 break;
7358 case 1:
7359 gen_helper_booke206_tlbilx1(cpu_env, t0);
7360 break;
7361 case 3:
7362 gen_helper_booke206_tlbilx3(cpu_env, t0);
7363 break;
7364 default:
7365 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7366 break;
7367 }
7368
7369 tcg_temp_free(t0);
7370 #endif /* defined(CONFIG_USER_ONLY) */
7371 }
7372
7373
7374 /* wrtee */
7375 static void gen_wrtee(DisasContext *ctx)
7376 {
7377 #if defined(CONFIG_USER_ONLY)
7378 GEN_PRIV;
7379 #else
7380 TCGv t0;
7381
7382 CHK_SV;
7383 t0 = tcg_temp_new();
7384 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
7385 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
7386 tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
7387 tcg_temp_free(t0);
7388 /*
7389 * Stop translation to have a chance to raise an exception if we
7390 * just set msr_ee to 1
7391 */
7392 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
7393 #endif /* defined(CONFIG_USER_ONLY) */
7394 }
7395
7396 /* wrteei */
7397 static void gen_wrteei(DisasContext *ctx)
7398 {
7399 #if defined(CONFIG_USER_ONLY)
7400 GEN_PRIV;
7401 #else
7402 CHK_SV;
7403 if (ctx->opcode & 0x00008000) {
7404 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
7405 /* Stop translation to have a chance to raise an exception */
7406 ctx->base.is_jmp = DISAS_EXIT_UPDATE;
7407 } else {
7408 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
7409 }
7410 #endif /* defined(CONFIG_USER_ONLY) */
7411 }
7412
7413 /* PowerPC 440 specific instructions */
7414
7415 /* dlmzb */
7416 static void gen_dlmzb(DisasContext *ctx)
7417 {
7418 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
7419 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
7420 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
7421 tcg_temp_free_i32(t0);
7422 }
7423
7424 /* mbar replaces eieio on 440 */
7425 static void gen_mbar(DisasContext *ctx)
7426 {
7427 /* interpreted as no-op */
7428 }
7429
7430 /* msync replaces sync on 440 */
7431 static void gen_msync_4xx(DisasContext *ctx)
7432 {
7433 /* Only e500 seems to treat reserved bits as invalid */
7434 if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
7435 (ctx->opcode & 0x03FFF801)) {
7436 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
7437 }
7438 /* otherwise interpreted as no-op */
7439 }
7440
7441 /* icbt */
7442 static void gen_icbt_440(DisasContext *ctx)
7443 {
7444 /*
7445 * interpreted as no-op
7446 * XXX: specification say this is treated as a load by the MMU but
7447 * does not generate any exception
7448 */
7449 }
7450
7451 /* Embedded.Processor Control */
7452
7453 static void gen_msgclr(DisasContext *ctx)
7454 {
7455 #if defined(CONFIG_USER_ONLY)
7456 GEN_PRIV;
7457 #else
7458 CHK_HV;
7459 if (is_book3s_arch2x(ctx)) {
7460 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7461 } else {
7462 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7463 }
7464 #endif /* defined(CONFIG_USER_ONLY) */
7465 }
7466
7467 static void gen_msgsnd(DisasContext *ctx)
7468 {
7469 #if defined(CONFIG_USER_ONLY)
7470 GEN_PRIV;
7471 #else
7472 CHK_HV;
7473 if (is_book3s_arch2x(ctx)) {
7474 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]);
7475 } else {
7476 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
7477 }
7478 #endif /* defined(CONFIG_USER_ONLY) */
7479 }
7480
7481 #if defined(TARGET_PPC64)
7482 static void gen_msgclrp(DisasContext *ctx)
7483 {
7484 #if defined(CONFIG_USER_ONLY)
7485 GEN_PRIV;
7486 #else
7487 CHK_SV;
7488 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7489 #endif /* defined(CONFIG_USER_ONLY) */
7490 }
7491
7492 static void gen_msgsndp(DisasContext *ctx)
7493 {
7494 #if defined(CONFIG_USER_ONLY)
7495 GEN_PRIV;
7496 #else
7497 CHK_SV;
7498 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
7499 #endif /* defined(CONFIG_USER_ONLY) */
7500 }
7501 #endif
7502
7503 static void gen_msgsync(DisasContext *ctx)
7504 {
7505 #if defined(CONFIG_USER_ONLY)
7506 GEN_PRIV;
7507 #else
7508 CHK_HV;
7509 #endif /* defined(CONFIG_USER_ONLY) */
7510 /* interpreted as no-op */
7511 }
7512
7513 #if defined(TARGET_PPC64)
7514 static void gen_maddld(DisasContext *ctx)
7515 {
7516 TCGv_i64 t1 = tcg_temp_new_i64();
7517
7518 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
7519 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
7520 tcg_temp_free_i64(t1);
7521 }
7522
7523 /* maddhd maddhdu */
7524 static void gen_maddhd_maddhdu(DisasContext *ctx)
7525 {
7526 TCGv_i64 lo = tcg_temp_new_i64();
7527 TCGv_i64 hi = tcg_temp_new_i64();
7528 TCGv_i64 t1 = tcg_temp_new_i64();
7529
7530 if (Rc(ctx->opcode)) {
7531 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
7532 cpu_gpr[rB(ctx->opcode)]);
7533 tcg_gen_movi_i64(t1, 0);
7534 } else {
7535 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
7536 cpu_gpr[rB(ctx->opcode)]);
7537 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
7538 }
7539 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
7540 cpu_gpr[rC(ctx->opcode)], t1);
7541 tcg_temp_free_i64(lo);
7542 tcg_temp_free_i64(hi);
7543 tcg_temp_free_i64(t1);
7544 }
7545 #endif /* defined(TARGET_PPC64) */
7546
7547 static void gen_tbegin(DisasContext *ctx)
7548 {
7549 if (unlikely(!ctx->tm_enabled)) {
7550 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
7551 return;
7552 }
7553 gen_helper_tbegin(cpu_env);
7554 }
7555
7556 #define GEN_TM_NOOP(name) \
7557 static inline void gen_##name(DisasContext *ctx) \
7558 { \
7559 if (unlikely(!ctx->tm_enabled)) { \
7560 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
7561 return; \
7562 } \
7563 /* \
7564 * Because tbegin always fails in QEMU, these user \
7565 * space instructions all have a simple implementation: \
7566 * \
7567 * CR[0] = 0b0 || MSR[TS] || 0b0 \
7568 * = 0b0 || 0b00 || 0b0 \
7569 */ \
7570 tcg_gen_movi_i32(cpu_crf[0], 0); \
7571 }
7572
7573 GEN_TM_NOOP(tend);
7574 GEN_TM_NOOP(tabort);
7575 GEN_TM_NOOP(tabortwc);
7576 GEN_TM_NOOP(tabortwci);
7577 GEN_TM_NOOP(tabortdc);
7578 GEN_TM_NOOP(tabortdci);
7579 GEN_TM_NOOP(tsr);
7580
7581 static inline void gen_cp_abort(DisasContext *ctx)
7582 {
7583 /* Do Nothing */
7584 }
7585
7586 #define GEN_CP_PASTE_NOOP(name) \
7587 static inline void gen_##name(DisasContext *ctx) \
7588 { \
7589 /* \
7590 * Generate invalid exception until we have an \
7591 * implementation of the copy paste facility \
7592 */ \
7593 gen_invalid(ctx); \
7594 }
7595
7596 GEN_CP_PASTE_NOOP(copy)
7597 GEN_CP_PASTE_NOOP(paste)
7598
7599 static void gen_tcheck(DisasContext *ctx)
7600 {
7601 if (unlikely(!ctx->tm_enabled)) {
7602 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
7603 return;
7604 }
7605 /*
7606 * Because tbegin always fails, the tcheck implementation is
7607 * simple:
7608 *
7609 * CR[CRF] = TDOOMED || MSR[TS] || 0b0
7610 * = 0b1 || 0b00 || 0b0
7611 */
7612 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
7613 }
7614
7615 #if defined(CONFIG_USER_ONLY)
7616 #define GEN_TM_PRIV_NOOP(name) \
7617 static inline void gen_##name(DisasContext *ctx) \
7618 { \
7619 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \
7620 }
7621
7622 #else
7623
7624 #define GEN_TM_PRIV_NOOP(name) \
7625 static inline void gen_##name(DisasContext *ctx) \
7626 { \
7627 CHK_SV; \
7628 if (unlikely(!ctx->tm_enabled)) { \
7629 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \
7630 return; \
7631 } \
7632 /* \
7633 * Because tbegin always fails, the implementation is \
7634 * simple: \
7635 * \
7636 * CR[0] = 0b0 || MSR[TS] || 0b0 \
7637 * = 0b0 || 0b00 | 0b0 \
7638 */ \
7639 tcg_gen_movi_i32(cpu_crf[0], 0); \
7640 }
7641
7642 #endif
7643
7644 GEN_TM_PRIV_NOOP(treclaim);
7645 GEN_TM_PRIV_NOOP(trechkpt);
7646
7647 static inline void get_fpr(TCGv_i64 dst, int regno)
7648 {
7649 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
7650 }
7651
7652 static inline void set_fpr(int regno, TCGv_i64 src)
7653 {
7654 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
7655 }
7656
7657 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
7658 {
7659 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
7660 }
7661
7662 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
7663 {
7664 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
7665 }
7666
7667 #include "translate/fp-impl.c.inc"
7668
7669 #include "translate/vmx-impl.c.inc"
7670
7671 #include "translate/vsx-impl.c.inc"
7672
7673 #include "translate/dfp-impl.c.inc"
7674
7675 #include "translate/spe-impl.c.inc"
7676
7677 /* Handles lfdp, lxsd, lxssp */
7678 static void gen_dform39(DisasContext *ctx)
7679 {
7680 switch (ctx->opcode & 0x3) {
7681 case 0: /* lfdp */
7682 if (ctx->insns_flags2 & PPC2_ISA205) {
7683 return gen_lfdp(ctx);
7684 }
7685 break;
7686 case 2: /* lxsd */
7687 if (ctx->insns_flags2 & PPC2_ISA300) {
7688 return gen_lxsd(ctx);
7689 }
7690 break;
7691 case 3: /* lxssp */
7692 if (ctx->insns_flags2 & PPC2_ISA300) {
7693 return gen_lxssp(ctx);
7694 }
7695 break;
7696 }
7697 return gen_invalid(ctx);
7698 }
7699
7700 /* handles stfdp, lxv, stxsd, stxssp lxvx */
7701 static void gen_dform3D(DisasContext *ctx)
7702 {
7703 if ((ctx->opcode & 3) == 1) { /* DQ-FORM */
7704 switch (ctx->opcode & 0x7) {
7705 case 1: /* lxv */
7706 if (ctx->insns_flags2 & PPC2_ISA300) {
7707 return gen_lxv(ctx);
7708 }
7709 break;
7710 case 5: /* stxv */
7711 if (ctx->insns_flags2 & PPC2_ISA300) {
7712 return gen_stxv(ctx);
7713 }
7714 break;
7715 }
7716 } else { /* DS-FORM */
7717 switch (ctx->opcode & 0x3) {
7718 case 0: /* stfdp */
7719 if (ctx->insns_flags2 & PPC2_ISA205) {
7720 return gen_stfdp(ctx);
7721 }
7722 break;
7723 case 2: /* stxsd */
7724 if (ctx->insns_flags2 & PPC2_ISA300) {
7725 return gen_stxsd(ctx);
7726 }
7727 break;
7728 case 3: /* stxssp */
7729 if (ctx->insns_flags2 & PPC2_ISA300) {
7730 return gen_stxssp(ctx);
7731 }
7732 break;
7733 }
7734 }
7735 return gen_invalid(ctx);
7736 }
7737
7738 #if defined(TARGET_PPC64)
7739 /* brd */
7740 static void gen_brd(DisasContext *ctx)
7741 {
7742 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
7743 }
7744
7745 /* brw */
7746 static void gen_brw(DisasContext *ctx)
7747 {
7748 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
7749 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
7750
7751 }
7752
7753 /* brh */
7754 static void gen_brh(DisasContext *ctx)
7755 {
7756 TCGv_i64 t0 = tcg_temp_new_i64();
7757 TCGv_i64 t1 = tcg_temp_new_i64();
7758 TCGv_i64 t2 = tcg_temp_new_i64();
7759
7760 tcg_gen_movi_i64(t0, 0x00ff00ff00ff00ffull);
7761 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
7762 tcg_gen_and_i64(t2, t1, t0);
7763 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], t0);
7764 tcg_gen_shli_i64(t1, t1, 8);
7765 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
7766
7767 tcg_temp_free_i64(t0);
7768 tcg_temp_free_i64(t1);
7769 tcg_temp_free_i64(t2);
7770 }
7771 #endif
7772
7773 static opcode_t opcodes[] = {
7774 #if defined(TARGET_PPC64)
7775 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
7776 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
7777 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
7778 #endif
7779 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
7780 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER),
7781 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
7782 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400001, PPC_INTEGER),
7783 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER),
7784 #if defined(TARGET_PPC64)
7785 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
7786 #endif
7787 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
7788 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
7789 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
7790 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7791 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7792 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7793 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7794 GEN_HANDLER_E(addpcis, 0x13, 0x2, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
7795 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
7796 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
7797 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
7798 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
7799 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7800 #if defined(TARGET_PPC64)
7801 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
7802 #endif
7803 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
7804 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
7805 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7806 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7807 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7808 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
7809 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
7810 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
7811 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
7812 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
7813 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
7814 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
7815 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7816 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7817 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7818 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7819 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
7820 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
7821 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
7822 #if defined(TARGET_PPC64)
7823 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
7824 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
7825 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
7826 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
7827 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
7828 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
7829 #endif
7830 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7831 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7832 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7833 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
7834 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
7835 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
7836 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
7837 #if defined(TARGET_PPC64)
7838 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
7839 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
7840 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
7841 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
7842 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
7843 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
7844 PPC_NONE, PPC2_ISA300),
7845 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
7846 PPC_NONE, PPC2_ISA300),
7847 #endif
7848 #if defined(TARGET_PPC64)
7849 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B),
7850 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX),
7851 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B),
7852 #endif
7853 /* handles lfdp, lxsd, lxssp */
7854 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
7855 /* handles stfdp, lxv, stxsd, stxssp, stxv */
7856 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
7857 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7858 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
7859 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
7860 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
7861 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
7862 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
7863 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
7864 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
7865 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7866 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7867 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
7868 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
7869 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
7870 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7871 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
7872 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
7873 #if defined(TARGET_PPC64)
7874 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
7875 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
7876 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
7877 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
7878 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
7879 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
7880 #endif
7881 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
7882 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
7883 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300),
7884 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
7885 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
7886 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
7887 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
7888 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
7889 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
7890 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
7891 #if defined(TARGET_PPC64)
7892 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
7893 #if !defined(CONFIG_USER_ONLY)
7894 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
7895 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
7896 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
7897 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
7898 #endif
7899 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
7900 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7901 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7902 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7903 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
7904 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
7905 #endif
7906 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
7907 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
7908 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
7909 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
7910 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
7911 #if defined(TARGET_PPC64)
7912 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
7913 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
7914 #endif
7915 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
7916 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
7917 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
7918 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
7919 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
7920 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
7921 #if defined(TARGET_PPC64)
7922 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
7923 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
7924 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
7925 #endif
7926 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
7927 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
7928 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
7929 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
7930 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
7931 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
7932 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
7933 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
7934 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
7935 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
7936 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
7937 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
7938 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
7939 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
7940 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
7941 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
7942 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
7943 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
7944 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
7945 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
7946 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
7947 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
7948 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
7949 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
7950 #if defined(TARGET_PPC64)
7951 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
7952 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
7953 PPC_SEGMENT_64B),
7954 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
7955 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
7956 PPC_SEGMENT_64B),
7957 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
7958 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
7959 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
7960 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B),
7961 #endif
7962 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
7963 /*
7964 * XXX Those instructions will need to be handled differently for
7965 * different ISA versions
7966 */
7967 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE),
7968 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE),
7969 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300),
7970 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300),
7971 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
7972 #if defined(TARGET_PPC64)
7973 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI),
7974 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
7975 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300),
7976 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
7977 #endif
7978 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
7979 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
7980 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR),
7981 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR),
7982 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR),
7983 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR),
7984 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR),
7985 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR),
7986 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR),
7987 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR),
7988 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR),
7989 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
7990 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR),
7991 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR),
7992 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR),
7993 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR),
7994 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR),
7995 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR),
7996 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR),
7997 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR),
7998 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR),
7999 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR),
8000 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR),
8001 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR),
8002 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR),
8003 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR),
8004 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR),
8005 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR),
8006 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR),
8007 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR),
8008 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR),
8009 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR),
8010 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR),
8011 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR),
8012 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR),
8013 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR),
8014 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC),
8015 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC),
8016 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC),
8017 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
8018 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
8019 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB),
8020 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB),
8021 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER),
8022 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER),
8023 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER),
8024 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER),
8025 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER),
8026 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER),
8027 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8028 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8029 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2),
8030 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2),
8031 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8032 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2),
8033 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2),
8034 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2),
8035 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
8036 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
8037 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
8038 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
8039 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
8040 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
8041 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
8042 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
8043 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
8044 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
8045 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
8046 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
8047 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
8048 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
8049 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
8050 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
8051 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
8052 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
8053 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
8054 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
8055 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
8056 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
8057 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
8058 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
8059 PPC_NONE, PPC2_BOOKE206),
8060 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
8061 PPC_NONE, PPC2_BOOKE206),
8062 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
8063 PPC_NONE, PPC2_BOOKE206),
8064 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
8065 PPC_NONE, PPC2_BOOKE206),
8066 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
8067 PPC_NONE, PPC2_BOOKE206),
8068 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
8069 PPC_NONE, PPC2_PRCNTL),
8070 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
8071 PPC_NONE, PPC2_PRCNTL),
8072 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000,
8073 PPC_NONE, PPC2_PRCNTL),
8074 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
8075 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
8076 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
8077 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
8078 PPC_BOOKE, PPC2_BOOKE206),
8079 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
8080 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
8081 PPC_BOOKE, PPC2_BOOKE206),
8082 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
8083 PPC_440_SPEC),
8084 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
8085 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
8086 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
8087 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
8088 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
8089 #if defined(TARGET_PPC64)
8090 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
8091 PPC2_ISA300),
8092 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
8093 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001,
8094 PPC_NONE, PPC2_ISA207S),
8095 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001,
8096 PPC_NONE, PPC2_ISA207S),
8097 #endif
8098
8099 #undef GEN_INT_ARITH_ADD
8100 #undef GEN_INT_ARITH_ADD_CONST
8101 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
8102 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
8103 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
8104 add_ca, compute_ca, compute_ov) \
8105 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
8106 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
8107 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
8108 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
8109 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
8110 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
8111 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
8112 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
8113 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
8114 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
8115 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
8116 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
8117
8118 #undef GEN_INT_ARITH_DIVW
8119 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
8120 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
8121 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
8122 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
8123 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
8124 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
8125 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8126 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8127 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8128 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8129 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
8130 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
8131
8132 #if defined(TARGET_PPC64)
8133 #undef GEN_INT_ARITH_DIVD
8134 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
8135 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8136 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
8137 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
8138 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
8139 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
8140
8141 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8142 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
8143 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8144 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
8145 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
8146 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
8147
8148 #undef GEN_INT_ARITH_MUL_HELPER
8149 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
8150 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
8151 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
8152 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
8153 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
8154 #endif
8155
8156 #undef GEN_INT_ARITH_SUBF
8157 #undef GEN_INT_ARITH_SUBF_CONST
8158 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
8159 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
8160 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
8161 add_ca, compute_ca, compute_ov) \
8162 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
8163 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
8164 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
8165 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
8166 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
8167 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
8168 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
8169 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
8170 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
8171 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
8172 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
8173
8174 #undef GEN_LOGICAL1
8175 #undef GEN_LOGICAL2
8176 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
8177 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
8178 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
8179 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
8180 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
8181 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
8182 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
8183 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
8184 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
8185 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
8186 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
8187 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
8188 #if defined(TARGET_PPC64)
8189 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
8190 #endif
8191
8192 #if defined(TARGET_PPC64)
8193 #undef GEN_PPC64_R2
8194 #undef GEN_PPC64_R4
8195 #define GEN_PPC64_R2(name, opc1, opc2) \
8196 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8197 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8198 PPC_64B)
8199 #define GEN_PPC64_R4(name, opc1, opc2) \
8200 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
8201 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
8202 PPC_64B), \
8203 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
8204 PPC_64B), \
8205 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
8206 PPC_64B)
8207 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
8208 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
8209 GEN_PPC64_R4(rldic, 0x1E, 0x04),
8210 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
8211 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
8212 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
8213 #endif
8214
8215 #undef GEN_LD
8216 #undef GEN_LDU
8217 #undef GEN_LDUX
8218 #undef GEN_LDX_E
8219 #undef GEN_LDS
8220 #define GEN_LD(name, ldop, opc, type) \
8221 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8222 #define GEN_LDU(name, ldop, opc, type) \
8223 GEN_HANDLER(name##u, opc, 0xFF, 0xFF, 0x00000000, type),
8224 #define GEN_LDUX(name, ldop, opc2, opc3, type) \
8225 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8226 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \
8227 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
8228 #define GEN_LDS(name, ldop, op, type) \
8229 GEN_LD(name, ldop, op | 0x20, type) \
8230 GEN_LDU(name, ldop, op | 0x21, type) \
8231 GEN_LDUX(name, ldop, 0x17, op | 0x01, type) \
8232 GEN_LDX(name, ldop, 0x17, op | 0x00, type)
8233
8234 GEN_LDS(lbz, ld8u, 0x02, PPC_INTEGER)
8235 GEN_LDS(lha, ld16s, 0x0A, PPC_INTEGER)
8236 GEN_LDS(lhz, ld16u, 0x08, PPC_INTEGER)
8237 GEN_LDS(lwz, ld32u, 0x00, PPC_INTEGER)
8238 #if defined(TARGET_PPC64)
8239 GEN_LDUX(lwa, ld32s, 0x15, 0x0B, PPC_64B)
8240 GEN_LDX(lwa, ld32s, 0x15, 0x0A, PPC_64B)
8241 GEN_LDUX(ld, ld64_i64, 0x15, 0x01, PPC_64B)
8242 GEN_LDX(ld, ld64_i64, 0x15, 0x00, PPC_64B)
8243 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
8244
8245 /* HV/P7 and later only */
8246 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
8247 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
8248 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
8249 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
8250 #endif
8251 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
8252 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
8253
8254 /* External PID based load */
8255 #undef GEN_LDEPX
8256 #define GEN_LDEPX(name, ldop, opc2, opc3) \
8257 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \
8258 0x00000001, PPC_NONE, PPC2_BOOKE206),
8259
8260 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
8261 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
8262 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
8263 #if defined(TARGET_PPC64)
8264 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00)
8265 #endif
8266
8267 #undef GEN_ST
8268 #undef GEN_STU
8269 #undef GEN_STUX
8270 #undef GEN_STX_E
8271 #undef GEN_STS
8272 #define GEN_ST(name, stop, opc, type) \
8273 GEN_HANDLER(name, opc, 0xFF, 0xFF, 0x00000000, type),
8274 #define GEN_STU(name, stop, opc, type) \
8275 GEN_HANDLER(stop##u, opc, 0xFF, 0xFF, 0x00000000, type),
8276 #define GEN_STUX(name, stop, opc2, opc3, type) \
8277 GEN_HANDLER(name##ux, 0x1F, opc2, opc3, 0x00000001, type),
8278 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \
8279 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
8280 #define GEN_STS(name, stop, op, type) \
8281 GEN_ST(name, stop, op | 0x20, type) \
8282 GEN_STU(name, stop, op | 0x21, type) \
8283 GEN_STUX(name, stop, 0x17, op | 0x01, type) \
8284 GEN_STX(name, stop, 0x17, op | 0x00, type)
8285
8286 GEN_STS(stb, st8, 0x06, PPC_INTEGER)
8287 GEN_STS(sth, st16, 0x0C, PPC_INTEGER)
8288 GEN_STS(stw, st32, 0x04, PPC_INTEGER)
8289 #if defined(TARGET_PPC64)
8290 GEN_STUX(std, st64_i64, 0x15, 0x05, PPC_64B)
8291 GEN_STX(std, st64_i64, 0x15, 0x04, PPC_64B)
8292 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
8293 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
8294 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
8295 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
8296 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
8297 #endif
8298 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
8299 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
8300
8301 #undef GEN_STEPX
8302 #define GEN_STEPX(name, ldop, opc2, opc3) \
8303 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \
8304 0x00000001, PPC_NONE, PPC2_BOOKE206),
8305
8306 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
8307 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
8308 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
8309 #if defined(TARGET_PPC64)
8310 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04)
8311 #endif
8312
8313 #undef GEN_CRLOGIC
8314 #define GEN_CRLOGIC(name, tcg_op, opc) \
8315 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
8316 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
8317 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
8318 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
8319 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
8320 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
8321 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
8322 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
8323 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
8324
8325 #undef GEN_MAC_HANDLER
8326 #define GEN_MAC_HANDLER(name, opc2, opc3) \
8327 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
8328 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
8329 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
8330 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
8331 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
8332 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
8333 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
8334 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
8335 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
8336 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
8337 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
8338 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
8339 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
8340 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
8341 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
8342 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
8343 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
8344 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
8345 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
8346 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
8347 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
8348 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
8349 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
8350 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
8351 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
8352 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
8353 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
8354 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
8355 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
8356 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
8357 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
8358 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
8359 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
8360 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
8361 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
8362 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
8363 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
8364 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
8365 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
8366 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
8367 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
8368 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
8369 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
8370
8371 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
8372 PPC_NONE, PPC2_TM),
8373 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \
8374 PPC_NONE, PPC2_TM),
8375 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
8376 PPC_NONE, PPC2_TM),
8377 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
8378 PPC_NONE, PPC2_TM),
8379 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
8380 PPC_NONE, PPC2_TM),
8381 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
8382 PPC_NONE, PPC2_TM),
8383 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
8384 PPC_NONE, PPC2_TM),
8385 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
8386 PPC_NONE, PPC2_TM),
8387 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
8388 PPC_NONE, PPC2_TM),
8389 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
8390 PPC_NONE, PPC2_TM),
8391 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
8392 PPC_NONE, PPC2_TM),
8393
8394 #include "translate/fp-ops.c.inc"
8395
8396 #include "translate/vmx-ops.c.inc"
8397
8398 #include "translate/vsx-ops.c.inc"
8399
8400 #include "translate/dfp-ops.c.inc"
8401
8402 #include "translate/spe-ops.c.inc"
8403 };
8404
8405 /*****************************************************************************/
8406 /* Opcode types */
8407 enum {
8408 PPC_DIRECT = 0, /* Opcode routine */
8409 PPC_INDIRECT = 1, /* Indirect opcode table */
8410 };
8411
8412 #define PPC_OPCODE_MASK 0x3
8413
8414 static inline int is_indirect_opcode(void *handler)
8415 {
8416 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
8417 }
8418
8419 static inline opc_handler_t **ind_table(void *handler)
8420 {
8421 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
8422 }
8423
8424 /* Instruction table creation */
8425 /* Opcodes tables creation */
8426 static void fill_new_table(opc_handler_t **table, int len)
8427 {
8428 int i;
8429
8430 for (i = 0; i < len; i++) {
8431 table[i] = &invalid_handler;
8432 }
8433 }
8434
8435 static int create_new_table(opc_handler_t **table, unsigned char idx)
8436 {
8437 opc_handler_t **tmp;
8438
8439 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
8440 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
8441 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
8442
8443 return 0;
8444 }
8445
8446 static int insert_in_table(opc_handler_t **table, unsigned char idx,
8447 opc_handler_t *handler)
8448 {
8449 if (table[idx] != &invalid_handler) {
8450 return -1;
8451 }
8452 table[idx] = handler;
8453
8454 return 0;
8455 }
8456
8457 static int register_direct_insn(opc_handler_t **ppc_opcodes,
8458 unsigned char idx, opc_handler_t *handler)
8459 {
8460 if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
8461 printf("*** ERROR: opcode %02x already assigned in main "
8462 "opcode table\n", idx);
8463 return -1;
8464 }
8465
8466 return 0;
8467 }
8468
8469 static int register_ind_in_table(opc_handler_t **table,
8470 unsigned char idx1, unsigned char idx2,
8471 opc_handler_t *handler)
8472 {
8473 if (table[idx1] == &invalid_handler) {
8474 if (create_new_table(table, idx1) < 0) {
8475 printf("*** ERROR: unable to create indirect table "
8476 "idx=%02x\n", idx1);
8477 return -1;
8478 }
8479 } else {
8480 if (!is_indirect_opcode(table[idx1])) {
8481 printf("*** ERROR: idx %02x already assigned to a direct "
8482 "opcode\n", idx1);
8483 return -1;
8484 }
8485 }
8486 if (handler != NULL &&
8487 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
8488 printf("*** ERROR: opcode %02x already assigned in "
8489 "opcode table %02x\n", idx2, idx1);
8490 return -1;
8491 }
8492
8493 return 0;
8494 }
8495
8496 static int register_ind_insn(opc_handler_t **ppc_opcodes,
8497 unsigned char idx1, unsigned char idx2,
8498 opc_handler_t *handler)
8499 {
8500 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
8501 }
8502
8503 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
8504 unsigned char idx1, unsigned char idx2,
8505 unsigned char idx3, opc_handler_t *handler)
8506 {
8507 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
8508 printf("*** ERROR: unable to join indirect table idx "
8509 "[%02x-%02x]\n", idx1, idx2);
8510 return -1;
8511 }
8512 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
8513 handler) < 0) {
8514 printf("*** ERROR: unable to insert opcode "
8515 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
8516 return -1;
8517 }
8518
8519 return 0;
8520 }
8521
8522 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
8523 unsigned char idx1, unsigned char idx2,
8524 unsigned char idx3, unsigned char idx4,
8525 opc_handler_t *handler)
8526 {
8527 opc_handler_t **table;
8528
8529 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
8530 printf("*** ERROR: unable to join indirect table idx "
8531 "[%02x-%02x]\n", idx1, idx2);
8532 return -1;
8533 }
8534 table = ind_table(ppc_opcodes[idx1]);
8535 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
8536 printf("*** ERROR: unable to join 2nd-level indirect table idx "
8537 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
8538 return -1;
8539 }
8540 table = ind_table(table[idx2]);
8541 if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
8542 printf("*** ERROR: unable to insert opcode "
8543 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
8544 return -1;
8545 }
8546 return 0;
8547 }
8548 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
8549 {
8550 if (insn->opc2 != 0xFF) {
8551 if (insn->opc3 != 0xFF) {
8552 if (insn->opc4 != 0xFF) {
8553 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
8554 insn->opc3, insn->opc4,
8555 &insn->handler) < 0) {
8556 return -1;
8557 }
8558 } else {
8559 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
8560 insn->opc3, &insn->handler) < 0) {
8561 return -1;
8562 }
8563 }
8564 } else {
8565 if (register_ind_insn(ppc_opcodes, insn->opc1,
8566 insn->opc2, &insn->handler) < 0) {
8567 return -1;
8568 }
8569 }
8570 } else {
8571 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
8572 return -1;
8573 }
8574 }
8575
8576 return 0;
8577 }
8578
8579 static int test_opcode_table(opc_handler_t **table, int len)
8580 {
8581 int i, count, tmp;
8582
8583 for (i = 0, count = 0; i < len; i++) {
8584 /* Consistency fixup */
8585 if (table[i] == NULL) {
8586 table[i] = &invalid_handler;
8587 }
8588 if (table[i] != &invalid_handler) {
8589 if (is_indirect_opcode(table[i])) {
8590 tmp = test_opcode_table(ind_table(table[i]),
8591 PPC_CPU_INDIRECT_OPCODES_LEN);
8592 if (tmp == 0) {
8593 free(table[i]);
8594 table[i] = &invalid_handler;
8595 } else {
8596 count++;
8597 }
8598 } else {
8599 count++;
8600 }
8601 }
8602 }
8603
8604 return count;
8605 }
8606
8607 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
8608 {
8609 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
8610 printf("*** WARNING: no opcode defined !\n");
8611 }
8612 }
8613
8614 /*****************************************************************************/
8615 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
8616 {
8617 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
8618 opcode_t *opc;
8619
8620 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
8621 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
8622 if (((opc->handler.type & pcc->insns_flags) != 0) ||
8623 ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
8624 if (register_insn(cpu->opcodes, opc) < 0) {
8625 error_setg(errp, "ERROR initializing PowerPC instruction "
8626 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
8627 opc->opc3);
8628 return;
8629 }
8630 }
8631 }
8632 fix_opcode_tables(cpu->opcodes);
8633 fflush(stdout);
8634 fflush(stderr);
8635 }
8636
8637 void destroy_ppc_opcodes(PowerPCCPU *cpu)
8638 {
8639 opc_handler_t **table, **table_2;
8640 int i, j, k;
8641
8642 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
8643 if (cpu->opcodes[i] == &invalid_handler) {
8644 continue;
8645 }
8646 if (is_indirect_opcode(cpu->opcodes[i])) {
8647 table = ind_table(cpu->opcodes[i]);
8648 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
8649 if (table[j] == &invalid_handler) {
8650 continue;
8651 }
8652 if (is_indirect_opcode(table[j])) {
8653 table_2 = ind_table(table[j]);
8654 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
8655 if (table_2[k] != &invalid_handler &&
8656 is_indirect_opcode(table_2[k])) {
8657 g_free((opc_handler_t *)((uintptr_t)table_2[k] &
8658 ~PPC_INDIRECT));
8659 }
8660 }
8661 g_free((opc_handler_t *)((uintptr_t)table[j] &
8662 ~PPC_INDIRECT));
8663 }
8664 }
8665 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
8666 ~PPC_INDIRECT));
8667 }
8668 }
8669 }
8670
8671 int ppc_fixup_cpu(PowerPCCPU *cpu)
8672 {
8673 CPUPPCState *env = &cpu->env;
8674
8675 /*
8676 * TCG doesn't (yet) emulate some groups of instructions that are
8677 * implemented on some otherwise supported CPUs (e.g. VSX and
8678 * decimal floating point instructions on POWER7). We remove
8679 * unsupported instruction groups from the cpu state's instruction
8680 * masks and hope the guest can cope. For at least the pseries
8681 * machine, the unavailability of these instructions can be
8682 * advertised to the guest via the device tree.
8683 */
8684 if ((env->insns_flags & ~PPC_TCG_INSNS)
8685 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
8686 warn_report("Disabling some instructions which are not "
8687 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
8688 env->insns_flags & ~PPC_TCG_INSNS,
8689 env->insns_flags2 & ~PPC_TCG_INSNS2);
8690 }
8691 env->insns_flags &= PPC_TCG_INSNS;
8692 env->insns_flags2 &= PPC_TCG_INSNS2;
8693 return 0;
8694 }
8695
8696 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
8697 {
8698 opc_handler_t **table, *handler;
8699 uint32_t inval;
8700
8701 ctx->opcode = insn;
8702
8703 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
8704 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
8705 ctx->le_mode ? "little" : "big");
8706
8707 table = cpu->opcodes;
8708 handler = table[opc1(insn)];
8709 if (is_indirect_opcode(handler)) {
8710 table = ind_table(handler);
8711 handler = table[opc2(insn)];
8712 if (is_indirect_opcode(handler)) {
8713 table = ind_table(handler);
8714 handler = table[opc3(insn)];
8715 if (is_indirect_opcode(handler)) {
8716 table = ind_table(handler);
8717 handler = table[opc4(insn)];
8718 }
8719 }
8720 }
8721
8722 /* Is opcode *REALLY* valid ? */
8723 if (unlikely(handler->handler == &gen_invalid)) {
8724 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
8725 "%02x - %02x - %02x - %02x (%08x) "
8726 TARGET_FMT_lx "\n",
8727 opc1(insn), opc2(insn), opc3(insn), opc4(insn),
8728 insn, ctx->cia);
8729 return false;
8730 }
8731
8732 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
8733 && Rc(insn))) {
8734 inval = handler->inval2;
8735 } else {
8736 inval = handler->inval1;
8737 }
8738
8739 if (unlikely((insn & inval) != 0)) {
8740 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
8741 "%02x - %02x - %02x - %02x (%08x) "
8742 TARGET_FMT_lx "\n", insn & inval,
8743 opc1(insn), opc2(insn), opc3(insn), opc4(insn),
8744 insn, ctx->cia);
8745 return false;
8746 }
8747
8748 handler->handler(ctx);
8749 return true;
8750 }
8751
8752 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
8753 {
8754 DisasContext *ctx = container_of(dcbase, DisasContext, base);
8755 CPUPPCState *env = cs->env_ptr;
8756 uint32_t hflags = ctx->base.tb->flags;
8757
8758 ctx->spr_cb = env->spr_cb;
8759 ctx->pr = (hflags >> HFLAGS_PR) & 1;
8760 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
8761 ctx->dr = (hflags >> HFLAGS_DR) & 1;
8762 ctx->hv = (hflags >> HFLAGS_HV) & 1;
8763 ctx->insns_flags = env->insns_flags;
8764 ctx->insns_flags2 = env->insns_flags2;
8765 ctx->access_type = -1;
8766 ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
8767 ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
8768 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
8769 ctx->flags = env->flags;
8770 #if defined(TARGET_PPC64)
8771 ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
8772 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
8773 #endif
8774 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
8775 || env->mmu_model == POWERPC_MMU_601
8776 || env->mmu_model & POWERPC_MMU_64;
8777
8778 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
8779 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
8780 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
8781 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
8782 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
8783 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
8784
8785 ctx->singlestep_enabled = 0;
8786 if ((hflags >> HFLAGS_SE) & 1) {
8787 ctx->singlestep_enabled |= CPU_SINGLE_STEP;
8788 }
8789 if ((hflags >> HFLAGS_BE) & 1) {
8790 ctx->singlestep_enabled |= CPU_BRANCH_STEP;
8791 }
8792 if (unlikely(ctx->base.singlestep_enabled)) {
8793 ctx->singlestep_enabled |= GDBSTUB_SINGLE_STEP;
8794 }
8795
8796 if (ctx->singlestep_enabled & (CPU_SINGLE_STEP | GDBSTUB_SINGLE_STEP)) {
8797 ctx->base.max_insns = 1;
8798 } else {
8799 int bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4;
8800 ctx->base.max_insns = MIN(ctx->base.max_insns, bound);
8801 }
8802 }
8803
8804 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
8805 {
8806 }
8807
8808 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
8809 {
8810 tcg_gen_insn_start(dcbase->pc_next);
8811 }
8812
8813 static bool ppc_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cs,
8814 const CPUBreakpoint *bp)
8815 {
8816 DisasContext *ctx = container_of(dcbase, DisasContext, base);
8817
8818 gen_update_nip(ctx, ctx->base.pc_next);
8819 gen_debug_exception(ctx);
8820 /*
8821 * The address covered by the breakpoint must be included in
8822 * [tb->pc, tb->pc + tb->size) in order to for it to be properly
8823 * cleared -- thus we increment the PC here so that the logic
8824 * setting tb->size below does the right thing.
8825 */
8826 ctx->base.pc_next += 4;
8827 return true;
8828 }
8829
8830 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
8831 {
8832 DisasContext *ctx = container_of(dcbase, DisasContext, base);
8833 PowerPCCPU *cpu = POWERPC_CPU(cs);
8834 CPUPPCState *env = cs->env_ptr;
8835 uint32_t insn;
8836 bool ok;
8837
8838 LOG_DISAS("----------------\n");
8839 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
8840 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
8841
8842 ctx->cia = ctx->base.pc_next;
8843 insn = translator_ldl_swap(env, ctx->base.pc_next, need_byteswap(ctx));
8844 ctx->base.pc_next += 4;
8845
8846 ok = decode_legacy(cpu, ctx, insn);
8847 if (!ok) {
8848 gen_invalid(ctx);
8849 }
8850
8851 translator_loop_temp_check(&ctx->base);
8852 }
8853
8854 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
8855 {
8856 DisasContext *ctx = container_of(dcbase, DisasContext, base);
8857 DisasJumpType is_jmp = ctx->base.is_jmp;
8858 target_ulong nip = ctx->base.pc_next;
8859 int sse;
8860
8861 if (is_jmp == DISAS_NORETURN) {
8862 /* We have already exited the TB. */
8863 return;
8864 }
8865
8866 /* Honor single stepping. */
8867 sse = ctx->singlestep_enabled & (CPU_SINGLE_STEP | GDBSTUB_SINGLE_STEP);
8868 if (unlikely(sse)) {
8869 switch (is_jmp) {
8870 case DISAS_TOO_MANY:
8871 case DISAS_EXIT_UPDATE:
8872 case DISAS_CHAIN_UPDATE:
8873 gen_update_nip(ctx, nip);
8874 break;
8875 case DISAS_EXIT:
8876 case DISAS_CHAIN:
8877 break;
8878 default:
8879 g_assert_not_reached();
8880 }
8881
8882 if (sse & GDBSTUB_SINGLE_STEP) {
8883 gen_debug_exception(ctx);
8884 return;
8885 }
8886 /* else CPU_SINGLE_STEP... */
8887 if (nip <= 0x100 || nip > 0xf00) {
8888 gen_exception(ctx, gen_prep_dbgex(ctx));
8889 return;
8890 }
8891 }
8892
8893 switch (is_jmp) {
8894 case DISAS_TOO_MANY:
8895 if (use_goto_tb(ctx, nip)) {
8896 tcg_gen_goto_tb(0);
8897 gen_update_nip(ctx, nip);
8898 tcg_gen_exit_tb(ctx->base.tb, 0);
8899 break;
8900 }
8901 /* fall through */
8902 case DISAS_CHAIN_UPDATE:
8903 gen_update_nip(ctx, nip);
8904 /* fall through */
8905 case DISAS_CHAIN:
8906 tcg_gen_lookup_and_goto_ptr();
8907 break;
8908
8909 case DISAS_EXIT_UPDATE:
8910 gen_update_nip(ctx, nip);
8911 /* fall through */
8912 case DISAS_EXIT:
8913 tcg_gen_exit_tb(NULL, 0);
8914 break;
8915
8916 default:
8917 g_assert_not_reached();
8918 }
8919 }
8920
8921 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs)
8922 {
8923 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
8924 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size);
8925 }
8926
8927 static const TranslatorOps ppc_tr_ops = {
8928 .init_disas_context = ppc_tr_init_disas_context,
8929 .tb_start = ppc_tr_tb_start,
8930 .insn_start = ppc_tr_insn_start,
8931 .breakpoint_check = ppc_tr_breakpoint_check,
8932 .translate_insn = ppc_tr_translate_insn,
8933 .tb_stop = ppc_tr_tb_stop,
8934 .disas_log = ppc_tr_disas_log,
8935 };
8936
8937 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns)
8938 {
8939 DisasContext ctx;
8940
8941 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns);
8942 }
8943
8944 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb,
8945 target_ulong *data)
8946 {
8947 env->nip = data[0];
8948 }