]> git.proxmox.com Git - qemu.git/blob - target-ppc/translate.c
TCG variable type checking.
[qemu.git] / target-ppc / translate.c
1 /*
2 * PowerPC emulation for qemu: main translation routines.
3 *
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25
26 #include "cpu.h"
27 #include "exec-all.h"
28 #include "disas.h"
29 #include "tcg-op.h"
30 #include "qemu-common.h"
31
32 #include "helper.h"
33 #define GEN_HELPER 1
34 #include "helper.h"
35
36 #define CPU_SINGLE_STEP 0x1
37 #define CPU_BRANCH_STEP 0x2
38 #define GDBSTUB_SINGLE_STEP 0x4
39
40 /* Include definitions for instructions classes and implementations flags */
41 //#define DO_SINGLE_STEP
42 //#define PPC_DEBUG_DISAS
43 //#define DO_PPC_STATISTICS
44 //#define OPTIMIZE_FPRF_UPDATE
45
46 /*****************************************************************************/
47 /* Code translation helpers */
48
49 /* global register indexes */
50 static TCGv_ptr cpu_env;
51 static char cpu_reg_names[10*3 + 22*4 /* GPR */
52 #if !defined(TARGET_PPC64)
53 + 10*4 + 22*5 /* SPE GPRh */
54 #endif
55 + 10*4 + 22*5 /* FPR */
56 + 2*(10*6 + 22*7) /* AVRh, AVRl */
57 + 8*5 /* CRF */];
58 static TCGv cpu_gpr[32];
59 #if !defined(TARGET_PPC64)
60 static TCGv cpu_gprh[32];
61 #endif
62 static TCGv_i64 cpu_fpr[32];
63 static TCGv_i64 cpu_avrh[32], cpu_avrl[32];
64 static TCGv_i32 cpu_crf[8];
65 static TCGv cpu_nip;
66 static TCGv cpu_ctr;
67 static TCGv cpu_lr;
68 static TCGv cpu_xer;
69 static TCGv_i32 cpu_fpscr;
70
71 /* dyngen register indexes */
72 static TCGv cpu_T[3];
73 #if defined(TARGET_PPC64)
74 #define cpu_T64 cpu_T
75 #else
76 static TCGv_i64 cpu_T64[3];
77 #endif
78 static TCGv_i64 cpu_FT[3];
79 static TCGv_i64 cpu_AVRh[3], cpu_AVRl[3];
80
81 #include "gen-icount.h"
82
83 void ppc_translate_init(void)
84 {
85 int i;
86 char* p;
87 static int done_init = 0;
88
89 if (done_init)
90 return;
91
92 cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
93 #if TARGET_LONG_BITS > HOST_LONG_BITS
94 cpu_T[0] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t0), "T0");
95 cpu_T[1] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t1), "T1");
96 cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
97 #else
98 cpu_T[0] = tcg_global_reg_new(TCG_AREG1, "T0");
99 cpu_T[1] = tcg_global_reg_new(TCG_AREG2, "T1");
100 #ifdef HOST_I386
101 /* XXX: This is a temporary workaround for i386.
102 * On i386 qemu_st32 runs out of registers.
103 * The proper fix is to remove cpu_T.
104 */
105 cpu_T[2] = tcg_global_mem_new(TCG_AREG0, offsetof(CPUState, t2), "T2");
106 #else
107 cpu_T[2] = tcg_global_reg_new(TCG_AREG3, "T2");
108 #endif
109 #endif
110 #if !defined(TARGET_PPC64)
111 cpu_T64[0] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t0_64),
112 "T0_64");
113 cpu_T64[1] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t1_64),
114 "T1_64");
115 cpu_T64[2] = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUState, t2_64),
116 "T2_64");
117 #endif
118
119 cpu_FT[0] = tcg_global_mem_new_i64(TCG_AREG0,
120 offsetof(CPUState, ft0), "FT0");
121 cpu_FT[1] = tcg_global_mem_new_i64(TCG_AREG0,
122 offsetof(CPUState, ft1), "FT1");
123 cpu_FT[2] = tcg_global_mem_new_i64(TCG_AREG0,
124 offsetof(CPUState, ft2), "FT2");
125
126 cpu_AVRh[0] = tcg_global_mem_new_i64(TCG_AREG0,
127 offsetof(CPUState, avr0.u64[0]), "AVR0H");
128 cpu_AVRl[0] = tcg_global_mem_new_i64(TCG_AREG0,
129 offsetof(CPUState, avr0.u64[1]), "AVR0L");
130 cpu_AVRh[1] = tcg_global_mem_new_i64(TCG_AREG0,
131 offsetof(CPUState, avr1.u64[0]), "AVR1H");
132 cpu_AVRl[1] = tcg_global_mem_new_i64(TCG_AREG0,
133 offsetof(CPUState, avr1.u64[1]), "AVR1L");
134 cpu_AVRh[2] = tcg_global_mem_new_i64(TCG_AREG0,
135 offsetof(CPUState, avr2.u64[0]), "AVR2H");
136 cpu_AVRl[2] = tcg_global_mem_new_i64(TCG_AREG0,
137 offsetof(CPUState, avr2.u64[1]), "AVR2L");
138
139 p = cpu_reg_names;
140
141 for (i = 0; i < 8; i++) {
142 sprintf(p, "crf%d", i);
143 cpu_crf[i] = tcg_global_mem_new_i32(TCG_AREG0,
144 offsetof(CPUState, crf[i]), p);
145 p += 5;
146 }
147
148 for (i = 0; i < 32; i++) {
149 sprintf(p, "r%d", i);
150 cpu_gpr[i] = tcg_global_mem_new(TCG_AREG0,
151 offsetof(CPUState, gpr[i]), p);
152 p += (i < 10) ? 3 : 4;
153 #if !defined(TARGET_PPC64)
154 sprintf(p, "r%dH", i);
155 cpu_gprh[i] = tcg_global_mem_new_i32(TCG_AREG0,
156 offsetof(CPUState, gprh[i]), p);
157 p += (i < 10) ? 4 : 5;
158 #endif
159
160 sprintf(p, "fp%d", i);
161 cpu_fpr[i] = tcg_global_mem_new_i64(TCG_AREG0,
162 offsetof(CPUState, fpr[i]), p);
163 p += (i < 10) ? 4 : 5;
164
165 sprintf(p, "avr%dH", i);
166 cpu_avrh[i] = tcg_global_mem_new_i64(TCG_AREG0,
167 offsetof(CPUState, avr[i].u64[0]), p);
168 p += (i < 10) ? 6 : 7;
169
170 sprintf(p, "avr%dL", i);
171 cpu_avrl[i] = tcg_global_mem_new_i64(TCG_AREG0,
172 offsetof(CPUState, avr[i].u64[1]), p);
173 p += (i < 10) ? 6 : 7;
174 }
175
176 cpu_nip = tcg_global_mem_new(TCG_AREG0,
177 offsetof(CPUState, nip), "nip");
178
179 cpu_ctr = tcg_global_mem_new(TCG_AREG0,
180 offsetof(CPUState, ctr), "ctr");
181
182 cpu_lr = tcg_global_mem_new(TCG_AREG0,
183 offsetof(CPUState, lr), "lr");
184
185 cpu_xer = tcg_global_mem_new(TCG_AREG0,
186 offsetof(CPUState, xer), "xer");
187
188 cpu_fpscr = tcg_global_mem_new_i32(TCG_AREG0,
189 offsetof(CPUState, fpscr), "fpscr");
190
191 /* register helpers */
192 #define GEN_HELPER 2
193 #include "helper.h"
194
195 done_init = 1;
196 }
197
198 #if defined(OPTIMIZE_FPRF_UPDATE)
199 static uint16_t *gen_fprf_buf[OPC_BUF_SIZE];
200 static uint16_t **gen_fprf_ptr;
201 #endif
202
203 /* internal defines */
204 typedef struct DisasContext {
205 struct TranslationBlock *tb;
206 target_ulong nip;
207 uint32_t opcode;
208 uint32_t exception;
209 /* Routine used to access memory */
210 int mem_idx;
211 /* Translation flags */
212 #if !defined(CONFIG_USER_ONLY)
213 int supervisor;
214 #endif
215 #if defined(TARGET_PPC64)
216 int sf_mode;
217 #endif
218 int fpu_enabled;
219 int altivec_enabled;
220 int spe_enabled;
221 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
222 int singlestep_enabled;
223 int dcache_line_size;
224 } DisasContext;
225
226 struct opc_handler_t {
227 /* invalid bits */
228 uint32_t inval;
229 /* instruction type */
230 uint64_t type;
231 /* handler */
232 void (*handler)(DisasContext *ctx);
233 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
234 const char *oname;
235 #endif
236 #if defined(DO_PPC_STATISTICS)
237 uint64_t count;
238 #endif
239 };
240
241 static always_inline void gen_reset_fpstatus (void)
242 {
243 #ifdef CONFIG_SOFTFLOAT
244 gen_op_reset_fpstatus();
245 #endif
246 }
247
248 static always_inline void gen_compute_fprf (int set_fprf, int set_rc)
249 {
250 if (set_fprf != 0) {
251 /* This case might be optimized later */
252 #if defined(OPTIMIZE_FPRF_UPDATE)
253 *gen_fprf_ptr++ = gen_opc_ptr;
254 #endif
255 gen_op_compute_fprf(1);
256 if (unlikely(set_rc)) {
257 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_T[0]);
258 tcg_gen_andi_i32(cpu_crf[1], cpu_crf[1], 0xf);
259 }
260 gen_op_float_check_status();
261 } else if (unlikely(set_rc)) {
262 /* We always need to compute fpcc */
263 gen_op_compute_fprf(0);
264 tcg_gen_trunc_tl_i32(cpu_crf[1], cpu_T[0]);
265 tcg_gen_andi_i32(cpu_crf[1], cpu_crf[1], 0xf);
266 if (set_fprf)
267 gen_op_float_check_status();
268 }
269 }
270
271 static always_inline void gen_optimize_fprf (void)
272 {
273 #if defined(OPTIMIZE_FPRF_UPDATE)
274 uint16_t **ptr;
275
276 for (ptr = gen_fprf_buf; ptr != (gen_fprf_ptr - 1); ptr++)
277 *ptr = INDEX_op_nop1;
278 gen_fprf_ptr = gen_fprf_buf;
279 #endif
280 }
281
282 static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
283 {
284 #if defined(TARGET_PPC64)
285 if (ctx->sf_mode)
286 tcg_gen_movi_tl(cpu_nip, nip);
287 else
288 #endif
289 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
290 }
291
292 #define GEN_EXCP(ctx, excp, error) \
293 do { \
294 if ((ctx)->exception == POWERPC_EXCP_NONE) { \
295 gen_update_nip(ctx, (ctx)->nip); \
296 } \
297 gen_op_raise_exception_err((excp), (error)); \
298 ctx->exception = (excp); \
299 } while (0)
300
301 #define GEN_EXCP_INVAL(ctx) \
302 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
303 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_INVAL)
304
305 #define GEN_EXCP_PRIVOPC(ctx) \
306 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
307 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_OPC)
308
309 #define GEN_EXCP_PRIVREG(ctx) \
310 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
311 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_REG)
312
313 #define GEN_EXCP_NO_FP(ctx) \
314 GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0)
315
316 #define GEN_EXCP_NO_AP(ctx) \
317 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0)
318
319 #define GEN_EXCP_NO_VR(ctx) \
320 GEN_EXCP(ctx, POWERPC_EXCP_VPU, 0)
321
322 /* Stop translation */
323 static always_inline void GEN_STOP (DisasContext *ctx)
324 {
325 gen_update_nip(ctx, ctx->nip);
326 ctx->exception = POWERPC_EXCP_STOP;
327 }
328
329 /* No need to update nip here, as execution flow will change */
330 static always_inline void GEN_SYNC (DisasContext *ctx)
331 {
332 ctx->exception = POWERPC_EXCP_SYNC;
333 }
334
335 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
336 static void gen_##name (DisasContext *ctx); \
337 GEN_OPCODE(name, opc1, opc2, opc3, inval, type); \
338 static void gen_##name (DisasContext *ctx)
339
340 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
341 static void gen_##name (DisasContext *ctx); \
342 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type); \
343 static void gen_##name (DisasContext *ctx)
344
345 typedef struct opcode_t {
346 unsigned char opc1, opc2, opc3;
347 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
348 unsigned char pad[5];
349 #else
350 unsigned char pad[1];
351 #endif
352 opc_handler_t handler;
353 const char *oname;
354 } opcode_t;
355
356 /*****************************************************************************/
357 /*** Instruction decoding ***/
358 #define EXTRACT_HELPER(name, shift, nb) \
359 static always_inline uint32_t name (uint32_t opcode) \
360 { \
361 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
362 }
363
364 #define EXTRACT_SHELPER(name, shift, nb) \
365 static always_inline int32_t name (uint32_t opcode) \
366 { \
367 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
368 }
369
370 /* Opcode part 1 */
371 EXTRACT_HELPER(opc1, 26, 6);
372 /* Opcode part 2 */
373 EXTRACT_HELPER(opc2, 1, 5);
374 /* Opcode part 3 */
375 EXTRACT_HELPER(opc3, 6, 5);
376 /* Update Cr0 flags */
377 EXTRACT_HELPER(Rc, 0, 1);
378 /* Destination */
379 EXTRACT_HELPER(rD, 21, 5);
380 /* Source */
381 EXTRACT_HELPER(rS, 21, 5);
382 /* First operand */
383 EXTRACT_HELPER(rA, 16, 5);
384 /* Second operand */
385 EXTRACT_HELPER(rB, 11, 5);
386 /* Third operand */
387 EXTRACT_HELPER(rC, 6, 5);
388 /*** Get CRn ***/
389 EXTRACT_HELPER(crfD, 23, 3);
390 EXTRACT_HELPER(crfS, 18, 3);
391 EXTRACT_HELPER(crbD, 21, 5);
392 EXTRACT_HELPER(crbA, 16, 5);
393 EXTRACT_HELPER(crbB, 11, 5);
394 /* SPR / TBL */
395 EXTRACT_HELPER(_SPR, 11, 10);
396 static always_inline uint32_t SPR (uint32_t opcode)
397 {
398 uint32_t sprn = _SPR(opcode);
399
400 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
401 }
402 /*** Get constants ***/
403 EXTRACT_HELPER(IMM, 12, 8);
404 /* 16 bits signed immediate value */
405 EXTRACT_SHELPER(SIMM, 0, 16);
406 /* 16 bits unsigned immediate value */
407 EXTRACT_HELPER(UIMM, 0, 16);
408 /* Bit count */
409 EXTRACT_HELPER(NB, 11, 5);
410 /* Shift count */
411 EXTRACT_HELPER(SH, 11, 5);
412 /* Mask start */
413 EXTRACT_HELPER(MB, 6, 5);
414 /* Mask end */
415 EXTRACT_HELPER(ME, 1, 5);
416 /* Trap operand */
417 EXTRACT_HELPER(TO, 21, 5);
418
419 EXTRACT_HELPER(CRM, 12, 8);
420 EXTRACT_HELPER(FM, 17, 8);
421 EXTRACT_HELPER(SR, 16, 4);
422 EXTRACT_HELPER(FPIMM, 12, 4);
423
424 /*** Jump target decoding ***/
425 /* Displacement */
426 EXTRACT_SHELPER(d, 0, 16);
427 /* Immediate address */
428 static always_inline target_ulong LI (uint32_t opcode)
429 {
430 return (opcode >> 0) & 0x03FFFFFC;
431 }
432
433 static always_inline uint32_t BD (uint32_t opcode)
434 {
435 return (opcode >> 0) & 0xFFFC;
436 }
437
438 EXTRACT_HELPER(BO, 21, 5);
439 EXTRACT_HELPER(BI, 16, 5);
440 /* Absolute/relative address */
441 EXTRACT_HELPER(AA, 1, 1);
442 /* Link */
443 EXTRACT_HELPER(LK, 0, 1);
444
445 /* Create a mask between <start> and <end> bits */
446 static always_inline target_ulong MASK (uint32_t start, uint32_t end)
447 {
448 target_ulong ret;
449
450 #if defined(TARGET_PPC64)
451 if (likely(start == 0)) {
452 ret = UINT64_MAX << (63 - end);
453 } else if (likely(end == 63)) {
454 ret = UINT64_MAX >> start;
455 }
456 #else
457 if (likely(start == 0)) {
458 ret = UINT32_MAX << (31 - end);
459 } else if (likely(end == 31)) {
460 ret = UINT32_MAX >> start;
461 }
462 #endif
463 else {
464 ret = (((target_ulong)(-1ULL)) >> (start)) ^
465 (((target_ulong)(-1ULL) >> (end)) >> 1);
466 if (unlikely(start > end))
467 return ~ret;
468 }
469
470 return ret;
471 }
472
473 /*****************************************************************************/
474 /* PowerPC Instructions types definitions */
475 enum {
476 PPC_NONE = 0x0000000000000000ULL,
477 /* PowerPC base instructions set */
478 PPC_INSNS_BASE = 0x0000000000000001ULL,
479 /* integer operations instructions */
480 #define PPC_INTEGER PPC_INSNS_BASE
481 /* flow control instructions */
482 #define PPC_FLOW PPC_INSNS_BASE
483 /* virtual memory instructions */
484 #define PPC_MEM PPC_INSNS_BASE
485 /* ld/st with reservation instructions */
486 #define PPC_RES PPC_INSNS_BASE
487 /* spr/msr access instructions */
488 #define PPC_MISC PPC_INSNS_BASE
489 /* Deprecated instruction sets */
490 /* Original POWER instruction set */
491 PPC_POWER = 0x0000000000000002ULL,
492 /* POWER2 instruction set extension */
493 PPC_POWER2 = 0x0000000000000004ULL,
494 /* Power RTC support */
495 PPC_POWER_RTC = 0x0000000000000008ULL,
496 /* Power-to-PowerPC bridge (601) */
497 PPC_POWER_BR = 0x0000000000000010ULL,
498 /* 64 bits PowerPC instruction set */
499 PPC_64B = 0x0000000000000020ULL,
500 /* New 64 bits extensions (PowerPC 2.0x) */
501 PPC_64BX = 0x0000000000000040ULL,
502 /* 64 bits hypervisor extensions */
503 PPC_64H = 0x0000000000000080ULL,
504 /* New wait instruction (PowerPC 2.0x) */
505 PPC_WAIT = 0x0000000000000100ULL,
506 /* Time base mftb instruction */
507 PPC_MFTB = 0x0000000000000200ULL,
508
509 /* Fixed-point unit extensions */
510 /* PowerPC 602 specific */
511 PPC_602_SPEC = 0x0000000000000400ULL,
512 /* isel instruction */
513 PPC_ISEL = 0x0000000000000800ULL,
514 /* popcntb instruction */
515 PPC_POPCNTB = 0x0000000000001000ULL,
516 /* string load / store */
517 PPC_STRING = 0x0000000000002000ULL,
518
519 /* Floating-point unit extensions */
520 /* Optional floating point instructions */
521 PPC_FLOAT = 0x0000000000010000ULL,
522 /* New floating-point extensions (PowerPC 2.0x) */
523 PPC_FLOAT_EXT = 0x0000000000020000ULL,
524 PPC_FLOAT_FSQRT = 0x0000000000040000ULL,
525 PPC_FLOAT_FRES = 0x0000000000080000ULL,
526 PPC_FLOAT_FRSQRTE = 0x0000000000100000ULL,
527 PPC_FLOAT_FRSQRTES = 0x0000000000200000ULL,
528 PPC_FLOAT_FSEL = 0x0000000000400000ULL,
529 PPC_FLOAT_STFIWX = 0x0000000000800000ULL,
530
531 /* Vector/SIMD extensions */
532 /* Altivec support */
533 PPC_ALTIVEC = 0x0000000001000000ULL,
534 /* PowerPC 2.03 SPE extension */
535 PPC_SPE = 0x0000000002000000ULL,
536 /* PowerPC 2.03 SPE floating-point extension */
537 PPC_SPEFPU = 0x0000000004000000ULL,
538
539 /* Optional memory control instructions */
540 PPC_MEM_TLBIA = 0x0000000010000000ULL,
541 PPC_MEM_TLBIE = 0x0000000020000000ULL,
542 PPC_MEM_TLBSYNC = 0x0000000040000000ULL,
543 /* sync instruction */
544 PPC_MEM_SYNC = 0x0000000080000000ULL,
545 /* eieio instruction */
546 PPC_MEM_EIEIO = 0x0000000100000000ULL,
547
548 /* Cache control instructions */
549 PPC_CACHE = 0x0000000200000000ULL,
550 /* icbi instruction */
551 PPC_CACHE_ICBI = 0x0000000400000000ULL,
552 /* dcbz instruction with fixed cache line size */
553 PPC_CACHE_DCBZ = 0x0000000800000000ULL,
554 /* dcbz instruction with tunable cache line size */
555 PPC_CACHE_DCBZT = 0x0000001000000000ULL,
556 /* dcba instruction */
557 PPC_CACHE_DCBA = 0x0000002000000000ULL,
558 /* Freescale cache locking instructions */
559 PPC_CACHE_LOCK = 0x0000004000000000ULL,
560
561 /* MMU related extensions */
562 /* external control instructions */
563 PPC_EXTERN = 0x0000010000000000ULL,
564 /* segment register access instructions */
565 PPC_SEGMENT = 0x0000020000000000ULL,
566 /* PowerPC 6xx TLB management instructions */
567 PPC_6xx_TLB = 0x0000040000000000ULL,
568 /* PowerPC 74xx TLB management instructions */
569 PPC_74xx_TLB = 0x0000080000000000ULL,
570 /* PowerPC 40x TLB management instructions */
571 PPC_40x_TLB = 0x0000100000000000ULL,
572 /* segment register access instructions for PowerPC 64 "bridge" */
573 PPC_SEGMENT_64B = 0x0000200000000000ULL,
574 /* SLB management */
575 PPC_SLBI = 0x0000400000000000ULL,
576
577 /* Embedded PowerPC dedicated instructions */
578 PPC_WRTEE = 0x0001000000000000ULL,
579 /* PowerPC 40x exception model */
580 PPC_40x_EXCP = 0x0002000000000000ULL,
581 /* PowerPC 405 Mac instructions */
582 PPC_405_MAC = 0x0004000000000000ULL,
583 /* PowerPC 440 specific instructions */
584 PPC_440_SPEC = 0x0008000000000000ULL,
585 /* BookE (embedded) PowerPC specification */
586 PPC_BOOKE = 0x0010000000000000ULL,
587 /* mfapidi instruction */
588 PPC_MFAPIDI = 0x0020000000000000ULL,
589 /* tlbiva instruction */
590 PPC_TLBIVA = 0x0040000000000000ULL,
591 /* tlbivax instruction */
592 PPC_TLBIVAX = 0x0080000000000000ULL,
593 /* PowerPC 4xx dedicated instructions */
594 PPC_4xx_COMMON = 0x0100000000000000ULL,
595 /* PowerPC 40x ibct instructions */
596 PPC_40x_ICBT = 0x0200000000000000ULL,
597 /* rfmci is not implemented in all BookE PowerPC */
598 PPC_RFMCI = 0x0400000000000000ULL,
599 /* rfdi instruction */
600 PPC_RFDI = 0x0800000000000000ULL,
601 /* DCR accesses */
602 PPC_DCR = 0x1000000000000000ULL,
603 /* DCR extended accesse */
604 PPC_DCRX = 0x2000000000000000ULL,
605 /* user-mode DCR access, implemented in PowerPC 460 */
606 PPC_DCRUX = 0x4000000000000000ULL,
607 };
608
609 /*****************************************************************************/
610 /* PowerPC instructions table */
611 #if HOST_LONG_BITS == 64
612 #define OPC_ALIGN 8
613 #else
614 #define OPC_ALIGN 4
615 #endif
616 #if defined(__APPLE__)
617 #define OPCODES_SECTION \
618 __attribute__ ((section("__TEXT,__opcodes"), unused, aligned (OPC_ALIGN) ))
619 #else
620 #define OPCODES_SECTION \
621 __attribute__ ((section(".opcodes"), unused, aligned (OPC_ALIGN) ))
622 #endif
623
624 #if defined(DO_PPC_STATISTICS)
625 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
626 OPCODES_SECTION opcode_t opc_##name = { \
627 .opc1 = op1, \
628 .opc2 = op2, \
629 .opc3 = op3, \
630 .pad = { 0, }, \
631 .handler = { \
632 .inval = invl, \
633 .type = _typ, \
634 .handler = &gen_##name, \
635 .oname = stringify(name), \
636 }, \
637 .oname = stringify(name), \
638 }
639 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
640 OPCODES_SECTION opcode_t opc_##name = { \
641 .opc1 = op1, \
642 .opc2 = op2, \
643 .opc3 = op3, \
644 .pad = { 0, }, \
645 .handler = { \
646 .inval = invl, \
647 .type = _typ, \
648 .handler = &gen_##name, \
649 .oname = onam, \
650 }, \
651 .oname = onam, \
652 }
653 #else
654 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
655 OPCODES_SECTION opcode_t opc_##name = { \
656 .opc1 = op1, \
657 .opc2 = op2, \
658 .opc3 = op3, \
659 .pad = { 0, }, \
660 .handler = { \
661 .inval = invl, \
662 .type = _typ, \
663 .handler = &gen_##name, \
664 }, \
665 .oname = stringify(name), \
666 }
667 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
668 OPCODES_SECTION opcode_t opc_##name = { \
669 .opc1 = op1, \
670 .opc2 = op2, \
671 .opc3 = op3, \
672 .pad = { 0, }, \
673 .handler = { \
674 .inval = invl, \
675 .type = _typ, \
676 .handler = &gen_##name, \
677 }, \
678 .oname = onam, \
679 }
680 #endif
681
682 #define GEN_OPCODE_MARK(name) \
683 OPCODES_SECTION opcode_t opc_##name = { \
684 .opc1 = 0xFF, \
685 .opc2 = 0xFF, \
686 .opc3 = 0xFF, \
687 .pad = { 0, }, \
688 .handler = { \
689 .inval = 0x00000000, \
690 .type = 0x00, \
691 .handler = NULL, \
692 }, \
693 .oname = stringify(name), \
694 }
695
696 /* Start opcode list */
697 GEN_OPCODE_MARK(start);
698
699 /* Invalid instruction */
700 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE)
701 {
702 GEN_EXCP_INVAL(ctx);
703 }
704
705 static opc_handler_t invalid_handler = {
706 .inval = 0xFFFFFFFF,
707 .type = PPC_NONE,
708 .handler = gen_invalid,
709 };
710
711 /*** Integer comparison ***/
712
713 static always_inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
714 {
715 int l1, l2, l3;
716
717 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_xer);
718 tcg_gen_shri_i32(cpu_crf[crf], cpu_crf[crf], XER_SO);
719 tcg_gen_andi_i32(cpu_crf[crf], cpu_crf[crf], 1);
720
721 l1 = gen_new_label();
722 l2 = gen_new_label();
723 l3 = gen_new_label();
724 if (s) {
725 tcg_gen_brcond_tl(TCG_COND_LT, arg0, arg1, l1);
726 tcg_gen_brcond_tl(TCG_COND_GT, arg0, arg1, l2);
727 } else {
728 tcg_gen_brcond_tl(TCG_COND_LTU, arg0, arg1, l1);
729 tcg_gen_brcond_tl(TCG_COND_GTU, arg0, arg1, l2);
730 }
731 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_EQ);
732 tcg_gen_br(l3);
733 gen_set_label(l1);
734 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_LT);
735 tcg_gen_br(l3);
736 gen_set_label(l2);
737 tcg_gen_ori_i32(cpu_crf[crf], cpu_crf[crf], 1 << CRF_GT);
738 gen_set_label(l3);
739 }
740
741 static always_inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
742 {
743 TCGv t0 = tcg_const_local_tl(arg1);
744 gen_op_cmp(arg0, t0, s, crf);
745 tcg_temp_free(t0);
746 }
747
748 #if defined(TARGET_PPC64)
749 static always_inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
750 {
751 TCGv t0, t1;
752 t0 = tcg_temp_local_new();
753 t1 = tcg_temp_local_new();
754 if (s) {
755 tcg_gen_ext32s_tl(t0, arg0);
756 tcg_gen_ext32s_tl(t1, arg1);
757 } else {
758 tcg_gen_ext32u_tl(t0, arg0);
759 tcg_gen_ext32u_tl(t1, arg1);
760 }
761 gen_op_cmp(t0, t1, s, crf);
762 tcg_temp_free(t1);
763 tcg_temp_free(t0);
764 }
765
766 static always_inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
767 {
768 TCGv t0 = tcg_const_local_tl(arg1);
769 gen_op_cmp32(arg0, t0, s, crf);
770 tcg_temp_free(t0);
771 }
772 #endif
773
774 static always_inline void gen_set_Rc0 (DisasContext *ctx, TCGv reg)
775 {
776 #if defined(TARGET_PPC64)
777 if (!(ctx->sf_mode))
778 gen_op_cmpi32(reg, 0, 1, 0);
779 else
780 #endif
781 gen_op_cmpi(reg, 0, 1, 0);
782 }
783
784 /* cmp */
785 GEN_HANDLER(cmp, 0x1F, 0x00, 0x00, 0x00400000, PPC_INTEGER)
786 {
787 #if defined(TARGET_PPC64)
788 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
789 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
790 1, crfD(ctx->opcode));
791 else
792 #endif
793 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
794 1, crfD(ctx->opcode));
795 }
796
797 /* cmpi */
798 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
799 {
800 #if defined(TARGET_PPC64)
801 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
802 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
803 1, crfD(ctx->opcode));
804 else
805 #endif
806 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], SIMM(ctx->opcode),
807 1, crfD(ctx->opcode));
808 }
809
810 /* cmpl */
811 GEN_HANDLER(cmpl, 0x1F, 0x00, 0x01, 0x00400000, PPC_INTEGER)
812 {
813 #if defined(TARGET_PPC64)
814 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
815 gen_op_cmp32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
816 0, crfD(ctx->opcode));
817 else
818 #endif
819 gen_op_cmp(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
820 0, crfD(ctx->opcode));
821 }
822
823 /* cmpli */
824 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
825 {
826 #if defined(TARGET_PPC64)
827 if (!(ctx->sf_mode && (ctx->opcode & 0x00200000)))
828 gen_op_cmpi32(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
829 0, crfD(ctx->opcode));
830 else
831 #endif
832 gen_op_cmpi(cpu_gpr[rA(ctx->opcode)], UIMM(ctx->opcode),
833 0, crfD(ctx->opcode));
834 }
835
836 /* isel (PowerPC 2.03 specification) */
837 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL)
838 {
839 int l1, l2;
840 uint32_t bi = rC(ctx->opcode);
841 uint32_t mask;
842 TCGv_i32 t0;
843
844 l1 = gen_new_label();
845 l2 = gen_new_label();
846
847 mask = 1 << (3 - (bi & 0x03));
848 t0 = tcg_temp_new_i32();
849 tcg_gen_andi_i32(t0, cpu_crf[bi >> 2], mask);
850 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
851 if (rA(ctx->opcode) == 0)
852 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0);
853 else
854 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
855 tcg_gen_br(l2);
856 gen_set_label(l1);
857 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
858 gen_set_label(l2);
859 tcg_temp_free_i32(t0);
860 }
861
862 /*** Integer arithmetic ***/
863
864 static always_inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, TCGv arg1, TCGv arg2, int sub)
865 {
866 int l1;
867 TCGv t0;
868
869 l1 = gen_new_label();
870 /* Start with XER OV disabled, the most likely case */
871 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
872 t0 = tcg_temp_local_new();
873 tcg_gen_xor_tl(t0, arg0, arg1);
874 #if defined(TARGET_PPC64)
875 if (!ctx->sf_mode)
876 tcg_gen_ext32s_tl(t0, t0);
877 #endif
878 if (sub)
879 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
880 else
881 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
882 tcg_gen_xor_tl(t0, arg1, arg2);
883 #if defined(TARGET_PPC64)
884 if (!ctx->sf_mode)
885 tcg_gen_ext32s_tl(t0, t0);
886 #endif
887 if (sub)
888 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
889 else
890 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0, l1);
891 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
892 gen_set_label(l1);
893 tcg_temp_free(t0);
894 }
895
896 static always_inline void gen_op_arith_compute_ca(DisasContext *ctx, TCGv arg1, TCGv arg2, int sub)
897 {
898 int l1 = gen_new_label();
899
900 #if defined(TARGET_PPC64)
901 if (!(ctx->sf_mode)) {
902 TCGv t0, t1;
903 t0 = tcg_temp_new();
904 t1 = tcg_temp_new();
905
906 tcg_gen_ext32u_tl(t0, arg1);
907 tcg_gen_ext32u_tl(t1, arg2);
908 if (sub) {
909 tcg_gen_brcond_tl(TCG_COND_GTU, t0, t1, l1);
910 } else {
911 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
912 }
913 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
914 gen_set_label(l1);
915 tcg_temp_free(t0);
916 tcg_temp_free(t1);
917 } else
918 #endif
919 {
920 if (sub) {
921 tcg_gen_brcond_tl(TCG_COND_GTU, arg1, arg2, l1);
922 } else {
923 tcg_gen_brcond_tl(TCG_COND_GEU, arg1, arg2, l1);
924 }
925 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
926 gen_set_label(l1);
927 }
928 }
929
930 /* Common add function */
931 static always_inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
932 int add_ca, int compute_ca, int compute_ov)
933 {
934 TCGv t0, t1;
935
936 if ((!compute_ca && !compute_ov) ||
937 (!TCGV_EQUAL(ret,arg1) && !TCGV_EQUAL(ret, arg2))) {
938 t0 = ret;
939 } else {
940 t0 = tcg_temp_local_new();
941 }
942
943 if (add_ca) {
944 t1 = tcg_temp_local_new();
945 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
946 tcg_gen_shri_tl(t1, t1, XER_CA);
947 }
948
949 if (compute_ca && compute_ov) {
950 /* Start with XER CA and OV disabled, the most likely case */
951 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
952 } else if (compute_ca) {
953 /* Start with XER CA disabled, the most likely case */
954 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
955 } else if (compute_ov) {
956 /* Start with XER OV disabled, the most likely case */
957 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
958 }
959
960 tcg_gen_add_tl(t0, arg1, arg2);
961
962 if (compute_ca) {
963 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
964 }
965 if (add_ca) {
966 tcg_gen_add_tl(t0, t0, t1);
967 gen_op_arith_compute_ca(ctx, t0, t1, 0);
968 tcg_temp_free(t1);
969 }
970 if (compute_ov) {
971 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
972 }
973
974 if (unlikely(Rc(ctx->opcode) != 0))
975 gen_set_Rc0(ctx, t0);
976
977 if (!TCGV_EQUAL(t0, ret)) {
978 tcg_gen_mov_tl(ret, t0);
979 tcg_temp_free(t0);
980 }
981 }
982 /* Add functions with two operands */
983 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \
984 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER) \
985 { \
986 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
987 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
988 add_ca, compute_ca, compute_ov); \
989 }
990 /* Add functions with one operand and one immediate */
991 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \
992 add_ca, compute_ca, compute_ov) \
993 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER) \
994 { \
995 TCGv t0 = tcg_const_local_tl(const_val); \
996 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \
997 cpu_gpr[rA(ctx->opcode)], t0, \
998 add_ca, compute_ca, compute_ov); \
999 tcg_temp_free(t0); \
1000 }
1001
1002 /* add add. addo addo. */
1003 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
1004 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
1005 /* addc addc. addco addco. */
1006 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
1007 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
1008 /* adde adde. addeo addeo. */
1009 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
1010 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
1011 /* addme addme. addmeo addmeo. */
1012 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
1013 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
1014 /* addze addze. addzeo addzeo.*/
1015 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
1016 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
1017 /* addi */
1018 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1019 {
1020 target_long simm = SIMM(ctx->opcode);
1021
1022 if (rA(ctx->opcode) == 0) {
1023 /* li case */
1024 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm);
1025 } else {
1026 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm);
1027 }
1028 }
1029 /* addic addic.*/
1030 static always_inline void gen_op_addic (DisasContext *ctx, TCGv ret, TCGv arg1,
1031 int compute_Rc0)
1032 {
1033 target_long simm = SIMM(ctx->opcode);
1034
1035 /* Start with XER CA and OV disabled, the most likely case */
1036 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1037
1038 if (likely(simm != 0)) {
1039 TCGv t0 = tcg_temp_local_new();
1040 tcg_gen_addi_tl(t0, arg1, simm);
1041 gen_op_arith_compute_ca(ctx, t0, arg1, 0);
1042 tcg_gen_mov_tl(ret, t0);
1043 tcg_temp_free(t0);
1044 } else {
1045 tcg_gen_mov_tl(ret, arg1);
1046 }
1047 if (compute_Rc0) {
1048 gen_set_Rc0(ctx, ret);
1049 }
1050 }
1051 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1052 {
1053 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1054 }
1055 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1056 {
1057 gen_op_addic(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1058 }
1059 /* addis */
1060 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1061 {
1062 target_long simm = SIMM(ctx->opcode);
1063
1064 if (rA(ctx->opcode) == 0) {
1065 /* lis case */
1066 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], simm << 16);
1067 } else {
1068 tcg_gen_addi_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], simm << 16);
1069 }
1070 }
1071
1072 static always_inline void gen_op_arith_divw (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1073 int sign, int compute_ov)
1074 {
1075 int l1 = gen_new_label();
1076 int l2 = gen_new_label();
1077 TCGv_i32 t0 = tcg_temp_local_new_i32();
1078 TCGv_i32 t1 = tcg_temp_local_new_i32();
1079
1080 tcg_gen_trunc_tl_i32(t0, arg1);
1081 tcg_gen_trunc_tl_i32(t1, arg2);
1082 tcg_gen_brcondi_i32(TCG_COND_EQ, t1, 0, l1);
1083 if (sign) {
1084 int l3 = gen_new_label();
1085 tcg_gen_brcondi_i32(TCG_COND_NE, t1, -1, l3);
1086 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, INT32_MIN, l1);
1087 gen_set_label(l3);
1088 tcg_gen_div_i32(t0, t0, t1);
1089 } else {
1090 tcg_gen_divu_i32(t0, t0, t1);
1091 }
1092 if (compute_ov) {
1093 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1094 }
1095 tcg_gen_br(l2);
1096 gen_set_label(l1);
1097 if (sign) {
1098 tcg_gen_sari_i32(t0, t0, 31);
1099 } else {
1100 tcg_gen_movi_i32(t0, 0);
1101 }
1102 if (compute_ov) {
1103 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1104 }
1105 gen_set_label(l2);
1106 tcg_gen_extu_i32_tl(ret, t0);
1107 tcg_temp_free_i32(t0);
1108 tcg_temp_free_i32(t1);
1109 if (unlikely(Rc(ctx->opcode) != 0))
1110 gen_set_Rc0(ctx, ret);
1111 }
1112 /* Div functions */
1113 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \
1114 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) \
1115 { \
1116 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \
1117 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1118 sign, compute_ov); \
1119 }
1120 /* divwu divwu. divwuo divwuo. */
1121 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1122 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1123 /* divw divw. divwo divwo. */
1124 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1125 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1126 #if defined(TARGET_PPC64)
1127 static always_inline void gen_op_arith_divd (DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1128 int sign, int compute_ov)
1129 {
1130 int l1 = gen_new_label();
1131 int l2 = gen_new_label();
1132
1133 tcg_gen_brcondi_i64(TCG_COND_EQ, arg2, 0, l1);
1134 if (sign) {
1135 int l3 = gen_new_label();
1136 tcg_gen_brcondi_i64(TCG_COND_NE, arg2, -1, l3);
1137 tcg_gen_brcondi_i64(TCG_COND_EQ, arg1, INT64_MIN, l1);
1138 gen_set_label(l3);
1139 tcg_gen_div_i64(ret, arg1, arg2);
1140 } else {
1141 tcg_gen_divu_i64(ret, arg1, arg2);
1142 }
1143 if (compute_ov) {
1144 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1145 }
1146 tcg_gen_br(l2);
1147 gen_set_label(l1);
1148 if (sign) {
1149 tcg_gen_sari_i64(ret, arg1, 63);
1150 } else {
1151 tcg_gen_movi_i64(ret, 0);
1152 }
1153 if (compute_ov) {
1154 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1155 }
1156 gen_set_label(l2);
1157 if (unlikely(Rc(ctx->opcode) != 0))
1158 gen_set_Rc0(ctx, ret);
1159 }
1160 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \
1161 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \
1162 { \
1163 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \
1164 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1165 sign, compute_ov); \
1166 }
1167 /* divwu divwu. divwuo divwuo. */
1168 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1169 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1170 /* divw divw. divwo divwo. */
1171 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1172 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1173 #endif
1174
1175 /* mulhw mulhw. */
1176 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER)
1177 {
1178 TCGv_i64 t0, t1;
1179
1180 t0 = tcg_temp_new_i64();
1181 t1 = tcg_temp_new_i64();
1182 #if defined(TARGET_PPC64)
1183 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1184 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1185 tcg_gen_mul_i64(t0, t0, t1);
1186 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1187 #else
1188 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1189 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1190 tcg_gen_mul_i64(t0, t0, t1);
1191 tcg_gen_shri_i64(t0, t0, 32);
1192 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1193 #endif
1194 tcg_temp_free_i64(t0);
1195 tcg_temp_free_i64(t1);
1196 if (unlikely(Rc(ctx->opcode) != 0))
1197 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1198 }
1199 /* mulhwu mulhwu. */
1200 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER)
1201 {
1202 TCGv_i64 t0, t1;
1203
1204 t0 = tcg_temp_new_i64();
1205 t1 = tcg_temp_new_i64();
1206 #if defined(TARGET_PPC64)
1207 tcg_gen_ext32u_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1208 tcg_gen_ext32u_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1209 tcg_gen_mul_i64(t0, t0, t1);
1210 tcg_gen_shri_i64(cpu_gpr[rD(ctx->opcode)], t0, 32);
1211 #else
1212 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1213 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1214 tcg_gen_mul_i64(t0, t0, t1);
1215 tcg_gen_shri_i64(t0, t0, 32);
1216 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1217 #endif
1218 tcg_temp_free_i64(t0);
1219 tcg_temp_free_i64(t1);
1220 if (unlikely(Rc(ctx->opcode) != 0))
1221 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1222 }
1223 /* mullw mullw. */
1224 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER)
1225 {
1226 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1227 cpu_gpr[rB(ctx->opcode)]);
1228 tcg_gen_ext32s_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rD(ctx->opcode)]);
1229 if (unlikely(Rc(ctx->opcode) != 0))
1230 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1231 }
1232 /* mullwo mullwo. */
1233 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER)
1234 {
1235 int l1;
1236 TCGv_i64 t0, t1;
1237
1238 t0 = tcg_temp_new_i64();
1239 t1 = tcg_temp_new_i64();
1240 l1 = gen_new_label();
1241 /* Start with XER OV disabled, the most likely case */
1242 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1243 #if defined(TARGET_PPC64)
1244 tcg_gen_ext32s_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1245 tcg_gen_ext32s_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1246 #else
1247 tcg_gen_ext_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]);
1248 tcg_gen_ext_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]);
1249 #endif
1250 tcg_gen_mul_i64(t0, t0, t1);
1251 #if defined(TARGET_PPC64)
1252 tcg_gen_ext32s_i64(cpu_gpr[rD(ctx->opcode)], t0);
1253 tcg_gen_brcond_i64(TCG_COND_EQ, t0, cpu_gpr[rD(ctx->opcode)], l1);
1254 #else
1255 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t0);
1256 tcg_gen_ext32s_i64(t1, t0);
1257 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1);
1258 #endif
1259 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1260 gen_set_label(l1);
1261 tcg_temp_free_i64(t0);
1262 tcg_temp_free_i64(t1);
1263 if (unlikely(Rc(ctx->opcode) != 0))
1264 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1265 }
1266 /* mulli */
1267 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1268 {
1269 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1270 SIMM(ctx->opcode));
1271 }
1272 #if defined(TARGET_PPC64)
1273 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \
1274 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) \
1275 { \
1276 gen_helper_##name (cpu_gpr[rD(ctx->opcode)], \
1277 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); \
1278 if (unlikely(Rc(ctx->opcode) != 0)) \
1279 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \
1280 }
1281 /* mulhd mulhd. */
1282 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00);
1283 /* mulhdu mulhdu. */
1284 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02);
1285 /* mulld mulld. */
1286 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B)
1287 {
1288 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1289 cpu_gpr[rB(ctx->opcode)]);
1290 if (unlikely(Rc(ctx->opcode) != 0))
1291 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1292 }
1293 /* mulldo mulldo. */
1294 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17);
1295 #endif
1296
1297 /* neg neg. nego nego. */
1298 static always_inline void gen_op_arith_neg (DisasContext *ctx, TCGv ret, TCGv arg1, int ov_check)
1299 {
1300 int l1 = gen_new_label();
1301 int l2 = gen_new_label();
1302 TCGv t0 = tcg_temp_local_new();
1303 #if defined(TARGET_PPC64)
1304 if (ctx->sf_mode) {
1305 tcg_gen_mov_tl(t0, arg1);
1306 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT64_MIN, l1);
1307 } else
1308 #endif
1309 {
1310 tcg_gen_ext32s_tl(t0, arg1);
1311 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, INT32_MIN, l1);
1312 }
1313 tcg_gen_neg_tl(ret, arg1);
1314 if (ov_check) {
1315 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1316 }
1317 tcg_gen_br(l2);
1318 gen_set_label(l1);
1319 tcg_gen_mov_tl(ret, t0);
1320 if (ov_check) {
1321 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
1322 }
1323 gen_set_label(l2);
1324 tcg_temp_free(t0);
1325 if (unlikely(Rc(ctx->opcode) != 0))
1326 gen_set_Rc0(ctx, ret);
1327 }
1328 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER)
1329 {
1330 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 0);
1331 }
1332 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER)
1333 {
1334 gen_op_arith_neg(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1);
1335 }
1336
1337 /* Common subf function */
1338 static always_inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, TCGv arg2,
1339 int add_ca, int compute_ca, int compute_ov)
1340 {
1341 TCGv t0, t1;
1342
1343 if ((!compute_ca && !compute_ov) ||
1344 (!TCGV_EQUAL(ret, arg1) && !TCGV_EQUAL(ret, arg2))) {
1345 t0 = ret;
1346 } else {
1347 t0 = tcg_temp_local_new();
1348 }
1349
1350 if (add_ca) {
1351 t1 = tcg_temp_local_new();
1352 tcg_gen_andi_tl(t1, cpu_xer, (1 << XER_CA));
1353 tcg_gen_shri_tl(t1, t1, XER_CA);
1354 }
1355
1356 if (compute_ca && compute_ov) {
1357 /* Start with XER CA and OV disabled, the most likely case */
1358 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~((1 << XER_CA) | (1 << XER_OV)));
1359 } else if (compute_ca) {
1360 /* Start with XER CA disabled, the most likely case */
1361 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1362 } else if (compute_ov) {
1363 /* Start with XER OV disabled, the most likely case */
1364 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
1365 }
1366
1367 if (add_ca) {
1368 tcg_gen_not_tl(t0, arg1);
1369 tcg_gen_add_tl(t0, t0, arg2);
1370 gen_op_arith_compute_ca(ctx, t0, arg2, 0);
1371 tcg_gen_add_tl(t0, t0, t1);
1372 gen_op_arith_compute_ca(ctx, t0, t1, 0);
1373 tcg_temp_free(t1);
1374 } else {
1375 tcg_gen_sub_tl(t0, arg2, arg1);
1376 if (compute_ca) {
1377 gen_op_arith_compute_ca(ctx, t0, arg2, 1);
1378 }
1379 }
1380 if (compute_ov) {
1381 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1382 }
1383
1384 if (unlikely(Rc(ctx->opcode) != 0))
1385 gen_set_Rc0(ctx, t0);
1386
1387 if (!TCGV_EQUAL(t0, ret)) {
1388 tcg_gen_mov_tl(ret, t0);
1389 tcg_temp_free(t0);
1390 }
1391 }
1392 /* Sub functions with Two operands functions */
1393 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \
1394 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER) \
1395 { \
1396 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1397 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
1398 add_ca, compute_ca, compute_ov); \
1399 }
1400 /* Sub functions with one operand and one immediate */
1401 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \
1402 add_ca, compute_ca, compute_ov) \
1403 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER) \
1404 { \
1405 TCGv t0 = tcg_const_local_tl(const_val); \
1406 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \
1407 cpu_gpr[rA(ctx->opcode)], t0, \
1408 add_ca, compute_ca, compute_ov); \
1409 tcg_temp_free(t0); \
1410 }
1411 /* subf subf. subfo subfo. */
1412 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
1413 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
1414 /* subfc subfc. subfco subfco. */
1415 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
1416 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
1417 /* subfe subfe. subfeo subfo. */
1418 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
1419 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
1420 /* subfme subfme. subfmeo subfmeo. */
1421 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
1422 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
1423 /* subfze subfze. subfzeo subfzeo.*/
1424 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
1425 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
1426 /* subfic */
1427 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1428 {
1429 /* Start with XER CA and OV disabled, the most likely case */
1430 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1431 TCGv t0 = tcg_temp_local_new();
1432 TCGv t1 = tcg_const_local_tl(SIMM(ctx->opcode));
1433 tcg_gen_sub_tl(t0, t1, cpu_gpr[rA(ctx->opcode)]);
1434 gen_op_arith_compute_ca(ctx, t0, t1, 1);
1435 tcg_temp_free(t1);
1436 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0);
1437 tcg_temp_free(t0);
1438 }
1439
1440 /*** Integer logical ***/
1441 #define GEN_LOGICAL2(name, tcg_op, opc, type) \
1442 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) \
1443 { \
1444 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \
1445 cpu_gpr[rB(ctx->opcode)]); \
1446 if (unlikely(Rc(ctx->opcode) != 0)) \
1447 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1448 }
1449
1450 #define GEN_LOGICAL1(name, tcg_op, opc, type) \
1451 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) \
1452 { \
1453 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \
1454 if (unlikely(Rc(ctx->opcode) != 0)) \
1455 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \
1456 }
1457
1458 /* and & and. */
1459 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
1460 /* andc & andc. */
1461 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
1462 /* andi. */
1463 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1464 {
1465 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode));
1466 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1467 }
1468 /* andis. */
1469 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1470 {
1471 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], UIMM(ctx->opcode) << 16);
1472 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1473 }
1474 /* cntlzw */
1475 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER)
1476 {
1477 gen_helper_cntlzw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1478 if (unlikely(Rc(ctx->opcode) != 0))
1479 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1480 }
1481 /* eqv & eqv. */
1482 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
1483 /* extsb & extsb. */
1484 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
1485 /* extsh & extsh. */
1486 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
1487 /* nand & nand. */
1488 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
1489 /* nor & nor. */
1490 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
1491 /* or & or. */
1492 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER)
1493 {
1494 int rs, ra, rb;
1495
1496 rs = rS(ctx->opcode);
1497 ra = rA(ctx->opcode);
1498 rb = rB(ctx->opcode);
1499 /* Optimisation for mr. ri case */
1500 if (rs != ra || rs != rb) {
1501 if (rs != rb)
1502 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
1503 else
1504 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
1505 if (unlikely(Rc(ctx->opcode) != 0))
1506 gen_set_Rc0(ctx, cpu_gpr[ra]);
1507 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1508 gen_set_Rc0(ctx, cpu_gpr[rs]);
1509 #if defined(TARGET_PPC64)
1510 } else {
1511 int prio = 0;
1512
1513 switch (rs) {
1514 case 1:
1515 /* Set process priority to low */
1516 prio = 2;
1517 break;
1518 case 6:
1519 /* Set process priority to medium-low */
1520 prio = 3;
1521 break;
1522 case 2:
1523 /* Set process priority to normal */
1524 prio = 4;
1525 break;
1526 #if !defined(CONFIG_USER_ONLY)
1527 case 31:
1528 if (ctx->supervisor > 0) {
1529 /* Set process priority to very low */
1530 prio = 1;
1531 }
1532 break;
1533 case 5:
1534 if (ctx->supervisor > 0) {
1535 /* Set process priority to medium-hight */
1536 prio = 5;
1537 }
1538 break;
1539 case 3:
1540 if (ctx->supervisor > 0) {
1541 /* Set process priority to high */
1542 prio = 6;
1543 }
1544 break;
1545 case 7:
1546 if (ctx->supervisor > 1) {
1547 /* Set process priority to very high */
1548 prio = 7;
1549 }
1550 break;
1551 #endif
1552 default:
1553 /* nop */
1554 break;
1555 }
1556 if (prio) {
1557 TCGv t0 = tcg_temp_new();
1558 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, spr[SPR_PPR]));
1559 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
1560 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
1561 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, spr[SPR_PPR]));
1562 tcg_temp_free(t0);
1563 }
1564 #endif
1565 }
1566 }
1567 /* orc & orc. */
1568 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
1569 /* xor & xor. */
1570 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER)
1571 {
1572 /* Optimisation for "set to zero" case */
1573 if (rS(ctx->opcode) != rB(ctx->opcode))
1574 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1575 else
1576 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1577 if (unlikely(Rc(ctx->opcode) != 0))
1578 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1579 }
1580 /* ori */
1581 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1582 {
1583 target_ulong uimm = UIMM(ctx->opcode);
1584
1585 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1586 /* NOP */
1587 /* XXX: should handle special NOPs for POWER series */
1588 return;
1589 }
1590 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1591 }
1592 /* oris */
1593 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1594 {
1595 target_ulong uimm = UIMM(ctx->opcode);
1596
1597 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1598 /* NOP */
1599 return;
1600 }
1601 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1602 }
1603 /* xori */
1604 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1605 {
1606 target_ulong uimm = UIMM(ctx->opcode);
1607
1608 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1609 /* NOP */
1610 return;
1611 }
1612 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
1613 }
1614 /* xoris */
1615 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1616 {
1617 target_ulong uimm = UIMM(ctx->opcode);
1618
1619 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1620 /* NOP */
1621 return;
1622 }
1623 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm << 16);
1624 }
1625 /* popcntb : PowerPC 2.03 specification */
1626 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB)
1627 {
1628 #if defined(TARGET_PPC64)
1629 if (ctx->sf_mode)
1630 gen_helper_popcntb_64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1631 else
1632 #endif
1633 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1634 }
1635
1636 #if defined(TARGET_PPC64)
1637 /* extsw & extsw. */
1638 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
1639 /* cntlzd */
1640 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B)
1641 {
1642 gen_helper_cntlzd(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1643 if (unlikely(Rc(ctx->opcode) != 0))
1644 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1645 }
1646 #endif
1647
1648 /*** Integer rotate ***/
1649 /* rlwimi & rlwimi. */
1650 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1651 {
1652 uint32_t mb, me, sh;
1653
1654 mb = MB(ctx->opcode);
1655 me = ME(ctx->opcode);
1656 sh = SH(ctx->opcode);
1657 if (likely(sh == 0 && mb == 0 && me == 31)) {
1658 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1659 } else {
1660 target_ulong mask;
1661 TCGv t1;
1662 TCGv t0 = tcg_temp_new();
1663 #if defined(TARGET_PPC64)
1664 TCGv_i32 t2 = tcg_temp_new_i32();
1665 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rS(ctx->opcode)]);
1666 tcg_gen_rotli_i32(t2, t2, sh);
1667 tcg_gen_extu_i32_i64(t0, t2);
1668 tcg_temp_free_i32(t2);
1669 #else
1670 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1671 #endif
1672 #if defined(TARGET_PPC64)
1673 mb += 32;
1674 me += 32;
1675 #endif
1676 mask = MASK(mb, me);
1677 t1 = tcg_temp_new();
1678 tcg_gen_andi_tl(t0, t0, mask);
1679 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1680 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1681 tcg_temp_free(t0);
1682 tcg_temp_free(t1);
1683 }
1684 if (unlikely(Rc(ctx->opcode) != 0))
1685 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1686 }
1687 /* rlwinm & rlwinm. */
1688 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1689 {
1690 uint32_t mb, me, sh;
1691
1692 sh = SH(ctx->opcode);
1693 mb = MB(ctx->opcode);
1694 me = ME(ctx->opcode);
1695
1696 if (likely(mb == 0 && me == (31 - sh))) {
1697 if (likely(sh == 0)) {
1698 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1699 } else {
1700 TCGv t0 = tcg_temp_new();
1701 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1702 tcg_gen_shli_tl(t0, t0, sh);
1703 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1704 tcg_temp_free(t0);
1705 }
1706 } else if (likely(sh != 0 && me == 31 && sh == (32 - mb))) {
1707 TCGv t0 = tcg_temp_new();
1708 tcg_gen_ext32u_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1709 tcg_gen_shri_tl(t0, t0, mb);
1710 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], t0);
1711 tcg_temp_free(t0);
1712 } else {
1713 TCGv t0 = tcg_temp_new();
1714 #if defined(TARGET_PPC64)
1715 TCGv_i32 t1 = tcg_temp_new_i32();
1716 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1717 tcg_gen_rotli_i32(t1, t1, sh);
1718 tcg_gen_extu_i32_i64(t0, t1);
1719 tcg_temp_free_i32(t1);
1720 #else
1721 tcg_gen_rotli_i32(t0, cpu_gpr[rS(ctx->opcode)], sh);
1722 #endif
1723 #if defined(TARGET_PPC64)
1724 mb += 32;
1725 me += 32;
1726 #endif
1727 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1728 tcg_temp_free(t0);
1729 }
1730 if (unlikely(Rc(ctx->opcode) != 0))
1731 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1732 }
1733 /* rlwnm & rlwnm. */
1734 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1735 {
1736 uint32_t mb, me;
1737 TCGv t0;
1738 #if defined(TARGET_PPC64)
1739 TCGv_i32 t1, t2;
1740 #endif
1741
1742 mb = MB(ctx->opcode);
1743 me = ME(ctx->opcode);
1744 t0 = tcg_temp_new();
1745 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1f);
1746 #if defined(TARGET_PPC64)
1747 t1 = tcg_temp_new_i32();
1748 t2 = tcg_temp_new_i32();
1749 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rS(ctx->opcode)]);
1750 tcg_gen_trunc_i64_i32(t2, t0);
1751 tcg_gen_rotl_i32(t1, t1, t2);
1752 tcg_gen_extu_i32_i64(t0, t1);
1753 tcg_temp_free_i32(t1);
1754 tcg_temp_free_i32(t2);
1755 #else
1756 tcg_gen_rotl_i32(t0, cpu_gpr[rS(ctx->opcode)], t0);
1757 #endif
1758 if (unlikely(mb != 0 || me != 31)) {
1759 #if defined(TARGET_PPC64)
1760 mb += 32;
1761 me += 32;
1762 #endif
1763 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1764 } else {
1765 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1766 }
1767 tcg_temp_free(t0);
1768 if (unlikely(Rc(ctx->opcode) != 0))
1769 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1770 }
1771
1772 #if defined(TARGET_PPC64)
1773 #define GEN_PPC64_R2(name, opc1, opc2) \
1774 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1775 { \
1776 gen_##name(ctx, 0); \
1777 } \
1778 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1779 PPC_64B) \
1780 { \
1781 gen_##name(ctx, 1); \
1782 }
1783 #define GEN_PPC64_R4(name, opc1, opc2) \
1784 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1785 { \
1786 gen_##name(ctx, 0, 0); \
1787 } \
1788 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
1789 PPC_64B) \
1790 { \
1791 gen_##name(ctx, 0, 1); \
1792 } \
1793 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1794 PPC_64B) \
1795 { \
1796 gen_##name(ctx, 1, 0); \
1797 } \
1798 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
1799 PPC_64B) \
1800 { \
1801 gen_##name(ctx, 1, 1); \
1802 }
1803
1804 static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb,
1805 uint32_t me, uint32_t sh)
1806 {
1807 if (likely(sh != 0 && mb == 0 && me == (63 - sh))) {
1808 tcg_gen_shli_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
1809 } else if (likely(sh != 0 && me == 63 && sh == (64 - mb))) {
1810 tcg_gen_shri_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], mb);
1811 } else {
1812 TCGv t0 = tcg_temp_new();
1813 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1814 if (likely(mb == 0 && me == 63)) {
1815 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1816 } else {
1817 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1818 }
1819 tcg_temp_free(t0);
1820 }
1821 if (unlikely(Rc(ctx->opcode) != 0))
1822 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1823 }
1824 /* rldicl - rldicl. */
1825 static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1826 {
1827 uint32_t sh, mb;
1828
1829 sh = SH(ctx->opcode) | (shn << 5);
1830 mb = MB(ctx->opcode) | (mbn << 5);
1831 gen_rldinm(ctx, mb, 63, sh);
1832 }
1833 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1834 /* rldicr - rldicr. */
1835 static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1836 {
1837 uint32_t sh, me;
1838
1839 sh = SH(ctx->opcode) | (shn << 5);
1840 me = MB(ctx->opcode) | (men << 5);
1841 gen_rldinm(ctx, 0, me, sh);
1842 }
1843 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1844 /* rldic - rldic. */
1845 static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1846 {
1847 uint32_t sh, mb;
1848
1849 sh = SH(ctx->opcode) | (shn << 5);
1850 mb = MB(ctx->opcode) | (mbn << 5);
1851 gen_rldinm(ctx, mb, 63 - sh, sh);
1852 }
1853 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1854
1855 static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb,
1856 uint32_t me)
1857 {
1858 TCGv t0;
1859
1860 mb = MB(ctx->opcode);
1861 me = ME(ctx->opcode);
1862 t0 = tcg_temp_new();
1863 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1864 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
1865 if (unlikely(mb != 0 || me != 63)) {
1866 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], t0, MASK(mb, me));
1867 } else {
1868 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0);
1869 }
1870 tcg_temp_free(t0);
1871 if (unlikely(Rc(ctx->opcode) != 0))
1872 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1873 }
1874
1875 /* rldcl - rldcl. */
1876 static always_inline void gen_rldcl (DisasContext *ctx, int mbn)
1877 {
1878 uint32_t mb;
1879
1880 mb = MB(ctx->opcode) | (mbn << 5);
1881 gen_rldnm(ctx, mb, 63);
1882 }
1883 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1884 /* rldcr - rldcr. */
1885 static always_inline void gen_rldcr (DisasContext *ctx, int men)
1886 {
1887 uint32_t me;
1888
1889 me = MB(ctx->opcode) | (men << 5);
1890 gen_rldnm(ctx, 0, me);
1891 }
1892 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1893 /* rldimi - rldimi. */
1894 static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
1895 {
1896 uint32_t sh, mb, me;
1897
1898 sh = SH(ctx->opcode) | (shn << 5);
1899 mb = MB(ctx->opcode) | (mbn << 5);
1900 me = 63 - sh;
1901 if (unlikely(sh == 0 && mb == 0)) {
1902 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1903 } else {
1904 TCGv t0, t1;
1905 target_ulong mask;
1906
1907 t0 = tcg_temp_new();
1908 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh);
1909 t1 = tcg_temp_new();
1910 mask = MASK(mb, me);
1911 tcg_gen_andi_tl(t0, t0, mask);
1912 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], ~mask);
1913 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
1914 tcg_temp_free(t0);
1915 tcg_temp_free(t1);
1916 }
1917 if (unlikely(Rc(ctx->opcode) != 0))
1918 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1919 }
1920 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1921 #endif
1922
1923 /*** Integer shift ***/
1924 /* slw & slw. */
1925 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER)
1926 {
1927 TCGv t0;
1928 int l1, l2;
1929 l1 = gen_new_label();
1930 l2 = gen_new_label();
1931
1932 t0 = tcg_temp_local_new();
1933 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1934 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
1935 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1936 tcg_gen_br(l2);
1937 gen_set_label(l1);
1938 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
1939 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
1940 gen_set_label(l2);
1941 tcg_temp_free(t0);
1942 if (unlikely(Rc(ctx->opcode) != 0))
1943 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1944 }
1945 /* sraw & sraw. */
1946 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER)
1947 {
1948 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)],
1949 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1950 if (unlikely(Rc(ctx->opcode) != 0))
1951 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1952 }
1953 /* srawi & srawi. */
1954 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER)
1955 {
1956 int sh = SH(ctx->opcode);
1957 if (sh != 0) {
1958 int l1, l2;
1959 TCGv t0;
1960 l1 = gen_new_label();
1961 l2 = gen_new_label();
1962 t0 = tcg_temp_local_new();
1963 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1964 tcg_gen_brcondi_tl(TCG_COND_GE, t0, 0, l1);
1965 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
1966 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
1967 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
1968 tcg_gen_br(l2);
1969 gen_set_label(l1);
1970 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1971 gen_set_label(l2);
1972 tcg_gen_ext32s_tl(t0, cpu_gpr[rS(ctx->opcode)]);
1973 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], t0, sh);
1974 tcg_temp_free(t0);
1975 } else {
1976 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
1977 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
1978 }
1979 if (unlikely(Rc(ctx->opcode) != 0))
1980 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
1981 }
1982 /* srw & srw. */
1983 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER)
1984 {
1985 TCGv t0, t1;
1986 int l1, l2;
1987 l1 = gen_new_label();
1988 l2 = gen_new_label();
1989
1990 t0 = tcg_temp_local_new();
1991 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3f);
1992 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x20, l1);
1993 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
1994 tcg_gen_br(l2);
1995 gen_set_label(l1);
1996 t1 = tcg_temp_new();
1997 tcg_gen_ext32u_tl(t1, cpu_gpr[rS(ctx->opcode)]);
1998 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t1, t0);
1999 tcg_temp_free(t1);
2000 gen_set_label(l2);
2001 tcg_temp_free(t0);
2002 if (unlikely(Rc(ctx->opcode) != 0))
2003 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2004 }
2005 #if defined(TARGET_PPC64)
2006 /* sld & sld. */
2007 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B)
2008 {
2009 TCGv t0;
2010 int l1, l2;
2011 l1 = gen_new_label();
2012 l2 = gen_new_label();
2013
2014 t0 = tcg_temp_local_new();
2015 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
2016 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
2017 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2018 tcg_gen_br(l2);
2019 gen_set_label(l1);
2020 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
2021 gen_set_label(l2);
2022 tcg_temp_free(t0);
2023 if (unlikely(Rc(ctx->opcode) != 0))
2024 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2025 }
2026 /* srad & srad. */
2027 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B)
2028 {
2029 gen_helper_srad(cpu_gpr[rA(ctx->opcode)],
2030 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2031 if (unlikely(Rc(ctx->opcode) != 0))
2032 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2033 }
2034 /* sradi & sradi. */
2035 static always_inline void gen_sradi (DisasContext *ctx, int n)
2036 {
2037 int sh = SH(ctx->opcode) + (n << 5);
2038 if (sh != 0) {
2039 int l1, l2;
2040 TCGv t0;
2041 l1 = gen_new_label();
2042 l2 = gen_new_label();
2043 t0 = tcg_temp_local_new();
2044 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1);
2045 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], (1ULL << sh) - 1);
2046 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1);
2047 tcg_gen_ori_tl(cpu_xer, cpu_xer, 1 << XER_CA);
2048 tcg_gen_br(l2);
2049 gen_set_label(l1);
2050 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
2051 gen_set_label(l2);
2052 tcg_temp_free(t0);
2053 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh);
2054 } else {
2055 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2056 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_CA));
2057 }
2058 if (unlikely(Rc(ctx->opcode) != 0))
2059 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2060 }
2061 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B)
2062 {
2063 gen_sradi(ctx, 0);
2064 }
2065 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B)
2066 {
2067 gen_sradi(ctx, 1);
2068 }
2069 /* srd & srd. */
2070 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B)
2071 {
2072 TCGv t0;
2073 int l1, l2;
2074 l1 = gen_new_label();
2075 l2 = gen_new_label();
2076
2077 t0 = tcg_temp_local_new();
2078 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x7f);
2079 tcg_gen_brcondi_tl(TCG_COND_LT, t0, 0x40, l1);
2080 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2081 tcg_gen_br(l2);
2082 gen_set_label(l1);
2083 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t0);
2084 gen_set_label(l2);
2085 tcg_temp_free(t0);
2086 if (unlikely(Rc(ctx->opcode) != 0))
2087 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2088 }
2089 #endif
2090
2091 /*** Floating-Point arithmetic ***/
2092 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
2093 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type) \
2094 { \
2095 if (unlikely(!ctx->fpu_enabled)) { \
2096 GEN_EXCP_NO_FP(ctx); \
2097 return; \
2098 } \
2099 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
2100 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rC(ctx->opcode)]); \
2101 tcg_gen_mov_i64(cpu_FT[2], cpu_fpr[rB(ctx->opcode)]); \
2102 gen_reset_fpstatus(); \
2103 gen_op_f##op(); \
2104 if (isfloat) { \
2105 gen_op_frsp(); \
2106 } \
2107 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2108 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
2109 }
2110
2111 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
2112 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
2113 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
2114
2115 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2116 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
2117 { \
2118 if (unlikely(!ctx->fpu_enabled)) { \
2119 GEN_EXCP_NO_FP(ctx); \
2120 return; \
2121 } \
2122 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
2123 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]); \
2124 gen_reset_fpstatus(); \
2125 gen_op_f##op(); \
2126 if (isfloat) { \
2127 gen_op_frsp(); \
2128 } \
2129 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2130 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
2131 }
2132 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
2133 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2134 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2135
2136 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
2137 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
2138 { \
2139 if (unlikely(!ctx->fpu_enabled)) { \
2140 GEN_EXCP_NO_FP(ctx); \
2141 return; \
2142 } \
2143 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
2144 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rC(ctx->opcode)]); \
2145 gen_reset_fpstatus(); \
2146 gen_op_f##op(); \
2147 if (isfloat) { \
2148 gen_op_frsp(); \
2149 } \
2150 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2151 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
2152 }
2153 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
2154 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
2155 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
2156
2157 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
2158 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type) \
2159 { \
2160 if (unlikely(!ctx->fpu_enabled)) { \
2161 GEN_EXCP_NO_FP(ctx); \
2162 return; \
2163 } \
2164 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]); \
2165 gen_reset_fpstatus(); \
2166 gen_op_f##name(); \
2167 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2168 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
2169 }
2170
2171 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
2172 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type) \
2173 { \
2174 if (unlikely(!ctx->fpu_enabled)) { \
2175 GEN_EXCP_NO_FP(ctx); \
2176 return; \
2177 } \
2178 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]); \
2179 gen_reset_fpstatus(); \
2180 gen_op_f##name(); \
2181 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2182 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
2183 }
2184
2185 /* fadd - fadds */
2186 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
2187 /* fdiv - fdivs */
2188 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
2189 /* fmul - fmuls */
2190 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
2191
2192 /* fre */
2193 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
2194
2195 /* fres */
2196 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
2197
2198 /* frsqrte */
2199 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
2200
2201 /* frsqrtes */
2202 static always_inline void gen_op_frsqrtes (void)
2203 {
2204 gen_op_frsqrte();
2205 gen_op_frsp();
2206 }
2207 GEN_FLOAT_BS(rsqrtes, 0x3B, 0x1A, 1, PPC_FLOAT_FRSQRTES);
2208
2209 /* fsel */
2210 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
2211 /* fsub - fsubs */
2212 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
2213 /* Optional: */
2214 /* fsqrt */
2215 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
2216 {
2217 if (unlikely(!ctx->fpu_enabled)) {
2218 GEN_EXCP_NO_FP(ctx);
2219 return;
2220 }
2221 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
2222 gen_reset_fpstatus();
2223 gen_op_fsqrt();
2224 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
2225 gen_compute_fprf(1, Rc(ctx->opcode) != 0);
2226 }
2227
2228 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
2229 {
2230 if (unlikely(!ctx->fpu_enabled)) {
2231 GEN_EXCP_NO_FP(ctx);
2232 return;
2233 }
2234 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
2235 gen_reset_fpstatus();
2236 gen_op_fsqrt();
2237 gen_op_frsp();
2238 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
2239 gen_compute_fprf(1, Rc(ctx->opcode) != 0);
2240 }
2241
2242 /*** Floating-Point multiply-and-add ***/
2243 /* fmadd - fmadds */
2244 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
2245 /* fmsub - fmsubs */
2246 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
2247 /* fnmadd - fnmadds */
2248 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
2249 /* fnmsub - fnmsubs */
2250 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
2251
2252 /*** Floating-Point round & convert ***/
2253 /* fctiw */
2254 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
2255 /* fctiwz */
2256 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
2257 /* frsp */
2258 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
2259 #if defined(TARGET_PPC64)
2260 /* fcfid */
2261 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
2262 /* fctid */
2263 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
2264 /* fctidz */
2265 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
2266 #endif
2267
2268 /* frin */
2269 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
2270 /* friz */
2271 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
2272 /* frip */
2273 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
2274 /* frim */
2275 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
2276
2277 /*** Floating-Point compare ***/
2278 /* fcmpo */
2279 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT)
2280 {
2281 if (unlikely(!ctx->fpu_enabled)) {
2282 GEN_EXCP_NO_FP(ctx);
2283 return;
2284 }
2285 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
2286 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
2287 gen_reset_fpstatus();
2288 gen_helper_fcmpo(cpu_crf[crfD(ctx->opcode)]);
2289 gen_op_float_check_status();
2290 }
2291
2292 /* fcmpu */
2293 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT)
2294 {
2295 if (unlikely(!ctx->fpu_enabled)) {
2296 GEN_EXCP_NO_FP(ctx);
2297 return;
2298 }
2299 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
2300 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
2301 gen_reset_fpstatus();
2302 gen_helper_fcmpu(cpu_crf[crfD(ctx->opcode)]);
2303 gen_op_float_check_status();
2304 }
2305
2306 /*** Floating-point move ***/
2307 /* fabs */
2308 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2309 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2310
2311 /* fmr - fmr. */
2312 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2313 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT)
2314 {
2315 if (unlikely(!ctx->fpu_enabled)) {
2316 GEN_EXCP_NO_FP(ctx);
2317 return;
2318 }
2319 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
2320 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
2321 gen_compute_fprf(0, Rc(ctx->opcode) != 0);
2322 }
2323
2324 /* fnabs */
2325 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2326 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2327 /* fneg */
2328 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2329 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2330
2331 /*** Floating-Point status & ctrl register ***/
2332 /* mcrfs */
2333 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT)
2334 {
2335 int bfa;
2336
2337 if (unlikely(!ctx->fpu_enabled)) {
2338 GEN_EXCP_NO_FP(ctx);
2339 return;
2340 }
2341 gen_optimize_fprf();
2342 bfa = 4 * (7 - crfS(ctx->opcode));
2343 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_fpscr, bfa);
2344 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], 0xf);
2345 gen_op_fpscr_resetbit(~(0xF << bfa));
2346 }
2347
2348 /* mffs */
2349 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT)
2350 {
2351 if (unlikely(!ctx->fpu_enabled)) {
2352 GEN_EXCP_NO_FP(ctx);
2353 return;
2354 }
2355 gen_optimize_fprf();
2356 gen_reset_fpstatus();
2357 gen_op_load_fpscr_FT0();
2358 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
2359 gen_compute_fprf(0, Rc(ctx->opcode) != 0);
2360 }
2361
2362 /* mtfsb0 */
2363 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT)
2364 {
2365 uint8_t crb;
2366
2367 if (unlikely(!ctx->fpu_enabled)) {
2368 GEN_EXCP_NO_FP(ctx);
2369 return;
2370 }
2371 crb = 32 - (crbD(ctx->opcode) >> 2);
2372 gen_optimize_fprf();
2373 gen_reset_fpstatus();
2374 if (likely(crb != 30 && crb != 29))
2375 gen_op_fpscr_resetbit(~(1 << crb));
2376 if (unlikely(Rc(ctx->opcode) != 0)) {
2377 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2378 }
2379 }
2380
2381 /* mtfsb1 */
2382 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT)
2383 {
2384 uint8_t crb;
2385
2386 if (unlikely(!ctx->fpu_enabled)) {
2387 GEN_EXCP_NO_FP(ctx);
2388 return;
2389 }
2390 crb = 32 - (crbD(ctx->opcode) >> 2);
2391 gen_optimize_fprf();
2392 gen_reset_fpstatus();
2393 /* XXX: we pretend we can only do IEEE floating-point computations */
2394 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI))
2395 gen_op_fpscr_setbit(crb);
2396 if (unlikely(Rc(ctx->opcode) != 0)) {
2397 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2398 }
2399 /* We can raise a differed exception */
2400 gen_op_float_check_status();
2401 }
2402
2403 /* mtfsf */
2404 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x02010000, PPC_FLOAT)
2405 {
2406 if (unlikely(!ctx->fpu_enabled)) {
2407 GEN_EXCP_NO_FP(ctx);
2408 return;
2409 }
2410 gen_optimize_fprf();
2411 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
2412 gen_reset_fpstatus();
2413 gen_op_store_fpscr(FM(ctx->opcode));
2414 if (unlikely(Rc(ctx->opcode) != 0)) {
2415 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2416 }
2417 /* We can raise a differed exception */
2418 gen_op_float_check_status();
2419 }
2420
2421 /* mtfsfi */
2422 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
2423 {
2424 int bf, sh;
2425
2426 if (unlikely(!ctx->fpu_enabled)) {
2427 GEN_EXCP_NO_FP(ctx);
2428 return;
2429 }
2430 bf = crbD(ctx->opcode) >> 2;
2431 sh = 7 - bf;
2432 gen_optimize_fprf();
2433 tcg_gen_movi_i64(cpu_FT[0], FPIMM(ctx->opcode) << (4 * sh));
2434 gen_reset_fpstatus();
2435 gen_op_store_fpscr(1 << sh);
2436 if (unlikely(Rc(ctx->opcode) != 0)) {
2437 tcg_gen_shri_i32(cpu_crf[1], cpu_fpscr, FPSCR_OX);
2438 }
2439 /* We can raise a differed exception */
2440 gen_op_float_check_status();
2441 }
2442
2443 /*** Addressing modes ***/
2444 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2445 static always_inline void gen_addr_imm_index (TCGv EA,
2446 DisasContext *ctx,
2447 target_long maskl)
2448 {
2449 target_long simm = SIMM(ctx->opcode);
2450
2451 simm &= ~maskl;
2452 if (rA(ctx->opcode) == 0)
2453 tcg_gen_movi_tl(EA, simm);
2454 else if (likely(simm != 0))
2455 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2456 else
2457 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2458 }
2459
2460 static always_inline void gen_addr_reg_index (TCGv EA,
2461 DisasContext *ctx)
2462 {
2463 if (rA(ctx->opcode) == 0)
2464 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2465 else
2466 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2467 }
2468
2469 static always_inline void gen_addr_register (TCGv EA,
2470 DisasContext *ctx)
2471 {
2472 if (rA(ctx->opcode) == 0)
2473 tcg_gen_movi_tl(EA, 0);
2474 else
2475 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2476 }
2477
2478 #if defined(TARGET_PPC64)
2479 #define _GEN_MEM_FUNCS(name, mode) \
2480 &gen_op_##name##_##mode, \
2481 &gen_op_##name##_le_##mode, \
2482 &gen_op_##name##_64_##mode, \
2483 &gen_op_##name##_le_64_##mode
2484 #else
2485 #define _GEN_MEM_FUNCS(name, mode) \
2486 &gen_op_##name##_##mode, \
2487 &gen_op_##name##_le_##mode
2488 #endif
2489 #if defined(CONFIG_USER_ONLY)
2490 #if defined(TARGET_PPC64)
2491 #define NB_MEM_FUNCS 4
2492 #else
2493 #define NB_MEM_FUNCS 2
2494 #endif
2495 #define GEN_MEM_FUNCS(name) \
2496 _GEN_MEM_FUNCS(name, raw)
2497 #else
2498 #if defined(TARGET_PPC64)
2499 #define NB_MEM_FUNCS 12
2500 #else
2501 #define NB_MEM_FUNCS 6
2502 #endif
2503 #define GEN_MEM_FUNCS(name) \
2504 _GEN_MEM_FUNCS(name, user), \
2505 _GEN_MEM_FUNCS(name, kernel), \
2506 _GEN_MEM_FUNCS(name, hypv)
2507 #endif
2508
2509 /*** Integer load ***/
2510 #define op_ldst(name) (*gen_op_##name[ctx->mem_idx])()
2511 #define OP_LD_TABLE(width) \
2512 static GenOpFunc *gen_op_l##width[NB_MEM_FUNCS] = { \
2513 GEN_MEM_FUNCS(l##width), \
2514 };
2515 #define OP_ST_TABLE(width) \
2516 static GenOpFunc *gen_op_st##width[NB_MEM_FUNCS] = { \
2517 GEN_MEM_FUNCS(st##width), \
2518 };
2519
2520
2521 #if defined(TARGET_PPC64)
2522 #define GEN_QEMU_LD_PPC64(width) \
2523 static always_inline void gen_qemu_ld##width##_ppc64(TCGv t0, TCGv t1, int flags)\
2524 { \
2525 if (likely(flags & 2)) \
2526 tcg_gen_qemu_ld##width(t0, t1, flags >> 2); \
2527 else { \
2528 TCGv addr = tcg_temp_new(); \
2529 tcg_gen_ext32u_tl(addr, t1); \
2530 tcg_gen_qemu_ld##width(t0, addr, flags >> 2); \
2531 tcg_temp_free(addr); \
2532 } \
2533 }
2534 GEN_QEMU_LD_PPC64(8u)
2535 GEN_QEMU_LD_PPC64(8s)
2536 GEN_QEMU_LD_PPC64(16u)
2537 GEN_QEMU_LD_PPC64(16s)
2538 GEN_QEMU_LD_PPC64(32u)
2539 GEN_QEMU_LD_PPC64(32s)
2540 GEN_QEMU_LD_PPC64(64)
2541
2542 #define GEN_QEMU_ST_PPC64(width) \
2543 static always_inline void gen_qemu_st##width##_ppc64(TCGv t0, TCGv t1, int flags)\
2544 { \
2545 if (likely(flags & 2)) \
2546 tcg_gen_qemu_st##width(t0, t1, flags >> 2); \
2547 else { \
2548 TCGv addr = tcg_temp_new(); \
2549 tcg_gen_ext32u_tl(addr, t1); \
2550 tcg_gen_qemu_st##width(t0, addr, flags >> 2); \
2551 tcg_temp_free(addr); \
2552 } \
2553 }
2554 GEN_QEMU_ST_PPC64(8)
2555 GEN_QEMU_ST_PPC64(16)
2556 GEN_QEMU_ST_PPC64(32)
2557 GEN_QEMU_ST_PPC64(64)
2558
2559 static always_inline void gen_qemu_ld8u(TCGv arg0, TCGv arg1, int flags)
2560 {
2561 gen_qemu_ld8u_ppc64(arg0, arg1, flags);
2562 }
2563
2564 static always_inline void gen_qemu_ld8s(TCGv arg0, TCGv arg1, int flags)
2565 {
2566 gen_qemu_ld8s_ppc64(arg0, arg1, flags);
2567 }
2568
2569 static always_inline void gen_qemu_ld16u(TCGv arg0, TCGv arg1, int flags)
2570 {
2571 if (unlikely(flags & 1)) {
2572 TCGv_i32 t0;
2573 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2574 t0 = tcg_temp_new_i32();
2575 tcg_gen_trunc_tl_i32(t0, arg0);
2576 tcg_gen_bswap16_i32(t0, t0);
2577 tcg_gen_extu_i32_tl(arg0, t0);
2578 tcg_temp_free_i32(t0);
2579 } else
2580 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2581 }
2582
2583 static always_inline void gen_qemu_ld16s(TCGv arg0, TCGv arg1, int flags)
2584 {
2585 if (unlikely(flags & 1)) {
2586 TCGv_i32 t0;
2587 gen_qemu_ld16u_ppc64(arg0, arg1, flags);
2588 t0 = tcg_temp_new_i32();
2589 tcg_gen_trunc_tl_i32(t0, arg0);
2590 tcg_gen_bswap16_i32(t0, t0);
2591 tcg_gen_extu_i32_tl(arg0, t0);
2592 tcg_gen_ext16s_tl(arg0, arg0);
2593 tcg_temp_free_i32(t0);
2594 } else
2595 gen_qemu_ld16s_ppc64(arg0, arg1, flags);
2596 }
2597
2598 static always_inline void gen_qemu_ld32u(TCGv arg0, TCGv arg1, int flags)
2599 {
2600 if (unlikely(flags & 1)) {
2601 TCGv_i32 t0;
2602 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2603 t0 = tcg_temp_new_i32();
2604 tcg_gen_trunc_tl_i32(t0, arg0);
2605 tcg_gen_bswap_i32(t0, t0);
2606 tcg_gen_extu_i32_tl(arg0, t0);
2607 tcg_temp_free_i32(t0);
2608 } else
2609 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2610 }
2611
2612 static always_inline void gen_qemu_ld32s(TCGv arg0, TCGv arg1, int flags)
2613 {
2614 if (unlikely(flags & 1)) {
2615 TCGv_i32 t0;
2616 gen_qemu_ld32u_ppc64(arg0, arg1, flags);
2617 t0 = tcg_temp_new_i32();
2618 tcg_gen_trunc_tl_i32(t0, arg0);
2619 tcg_gen_bswap_i32(t0, t0);
2620 tcg_gen_ext_i32_tl(arg0, t0);
2621 tcg_temp_free_i32(t0);
2622 } else
2623 gen_qemu_ld32s_ppc64(arg0, arg1, flags);
2624 }
2625
2626 static always_inline void gen_qemu_ld64(TCGv arg0, TCGv arg1, int flags)
2627 {
2628 gen_qemu_ld64_ppc64(arg0, arg1, flags);
2629 if (unlikely(flags & 1))
2630 tcg_gen_bswap_i64(arg0, arg0);
2631 }
2632
2633 static always_inline void gen_qemu_st8(TCGv arg0, TCGv arg1, int flags)
2634 {
2635 gen_qemu_st8_ppc64(arg0, arg1, flags);
2636 }
2637
2638 static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
2639 {
2640 if (unlikely(flags & 1)) {
2641 TCGv_i32 t0;
2642 TCGv_i64 t1;
2643 t0 = tcg_temp_new_i32();
2644 tcg_gen_trunc_tl_i32(t0, arg0);
2645 tcg_gen_ext16u_i32(t0, t0);
2646 tcg_gen_bswap16_i32(t0, t0);
2647 t1 = tcg_temp_new_i64();
2648 tcg_gen_extu_i32_tl(t1, t0);
2649 tcg_temp_free_i32(t0);
2650 gen_qemu_st16_ppc64(t1, arg1, flags);
2651 tcg_temp_free_i64(t1);
2652 } else
2653 gen_qemu_st16_ppc64(arg0, arg1, flags);
2654 }
2655
2656 static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
2657 {
2658 if (unlikely(flags & 1)) {
2659 TCGv_i32 t0;
2660 TCGv_i64 t1;
2661 t0 = tcg_temp_new_i32();
2662 tcg_gen_trunc_tl_i32(t0, arg0);
2663 tcg_gen_bswap_i32(t0, t0);
2664 t1 = tcg_temp_new_i64();
2665 tcg_gen_extu_i32_tl(t1, t0);
2666 tcg_temp_free_i32(t0);
2667 gen_qemu_st32_ppc64(t1, arg1, flags);
2668 tcg_temp_free_i64(t1);
2669 } else
2670 gen_qemu_st32_ppc64(arg0, arg1, flags);
2671 }
2672
2673 static always_inline void gen_qemu_st64(TCGv arg0, TCGv arg1, int flags)
2674 {
2675 if (unlikely(flags & 1)) {
2676 TCGv_i64 t0 = tcg_temp_new_i64();
2677 tcg_gen_bswap_i64(t0, arg0);
2678 gen_qemu_st64_ppc64(t0, arg1, flags);
2679 tcg_temp_free_i64(t0);
2680 } else
2681 gen_qemu_st64_ppc64(arg0, arg1, flags);
2682 }
2683
2684
2685 #else /* defined(TARGET_PPC64) */
2686 #define GEN_QEMU_LD_PPC32(width) \
2687 static always_inline void gen_qemu_ld##width##_ppc32(TCGv arg0, TCGv arg1, int flags)\
2688 { \
2689 tcg_gen_qemu_ld##width(arg0, arg1, flags >> 1); \
2690 }
2691 GEN_QEMU_LD_PPC32(8u)
2692 GEN_QEMU_LD_PPC32(8s)
2693 GEN_QEMU_LD_PPC32(16u)
2694 GEN_QEMU_LD_PPC32(16s)
2695 GEN_QEMU_LD_PPC32(32u)
2696 GEN_QEMU_LD_PPC32(32s)
2697
2698 #define GEN_QEMU_ST_PPC32(width) \
2699 static always_inline void gen_qemu_st##width##_ppc32(TCGv arg0, TCGv arg1, int flags)\
2700 { \
2701 tcg_gen_qemu_st##width(arg0, arg1, flags >> 1); \
2702 }
2703 GEN_QEMU_ST_PPC32(8)
2704 GEN_QEMU_ST_PPC32(16)
2705 GEN_QEMU_ST_PPC32(32)
2706
2707 static always_inline void gen_qemu_ld8u(TCGv arg0, TCGv arg1, int flags)
2708 {
2709 gen_qemu_ld8u_ppc32(arg0, arg1, flags >> 1);
2710 }
2711
2712 static always_inline void gen_qemu_ld8s(TCGv arg0, TCGv arg1, int flags)
2713 {
2714 gen_qemu_ld8s_ppc32(arg0, arg1, flags >> 1);
2715 }
2716
2717 static always_inline void gen_qemu_ld16u(TCGv arg0, TCGv arg1, int flags)
2718 {
2719 gen_qemu_ld16u_ppc32(arg0, arg1, flags >> 1);
2720 if (unlikely(flags & 1))
2721 tcg_gen_bswap16_i32(arg0, arg0);
2722 }
2723
2724 static always_inline void gen_qemu_ld16s(TCGv arg0, TCGv arg1, int flags)
2725 {
2726 if (unlikely(flags & 1)) {
2727 gen_qemu_ld16u_ppc32(arg0, arg1, flags);
2728 tcg_gen_bswap16_i32(arg0, arg0);
2729 tcg_gen_ext16s_i32(arg0, arg0);
2730 } else
2731 gen_qemu_ld16s_ppc32(arg0, arg1, flags);
2732 }
2733
2734 static always_inline void gen_qemu_ld32u(TCGv arg0, TCGv arg1, int flags)
2735 {
2736 gen_qemu_ld32u_ppc32(arg0, arg1, flags);
2737 if (unlikely(flags & 1))
2738 tcg_gen_bswap_i32(arg0, arg0);
2739 }
2740
2741 static always_inline void gen_qemu_st8(TCGv arg0, TCGv arg1, int flags)
2742 {
2743 gen_qemu_st8_ppc32(arg0, arg1, flags);
2744 }
2745
2746 static always_inline void gen_qemu_st16(TCGv arg0, TCGv arg1, int flags)
2747 {
2748 if (unlikely(flags & 1)) {
2749 TCGv_i32 temp = tcg_temp_new_i32();
2750 tcg_gen_ext16u_i32(temp, arg0);
2751 tcg_gen_bswap16_i32(temp, temp);
2752 gen_qemu_st16_ppc32(temp, arg1, flags);
2753 tcg_temp_free_i32(temp);
2754 } else
2755 gen_qemu_st16_ppc32(arg0, arg1, flags);
2756 }
2757
2758 static always_inline void gen_qemu_st32(TCGv arg0, TCGv arg1, int flags)
2759 {
2760 if (unlikely(flags & 1)) {
2761 TCGv_i32 temp = tcg_temp_new_i32();
2762 tcg_gen_bswap_i32(temp, arg0);
2763 gen_qemu_st32_ppc32(temp, arg1, flags);
2764 tcg_temp_free_i32(temp);
2765 } else
2766 gen_qemu_st32_ppc32(arg0, arg1, flags);
2767 }
2768
2769 #endif
2770
2771 #define GEN_LD(width, opc, type) \
2772 GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2773 { \
2774 TCGv EA = tcg_temp_new(); \
2775 gen_addr_imm_index(EA, ctx, 0); \
2776 gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2777 tcg_temp_free(EA); \
2778 }
2779
2780 #define GEN_LDU(width, opc, type) \
2781 GEN_HANDLER(l##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2782 { \
2783 TCGv EA; \
2784 if (unlikely(rA(ctx->opcode) == 0 || \
2785 rA(ctx->opcode) == rD(ctx->opcode))) { \
2786 GEN_EXCP_INVAL(ctx); \
2787 return; \
2788 } \
2789 EA = tcg_temp_new(); \
2790 if (type == PPC_64B) \
2791 gen_addr_imm_index(EA, ctx, 0x03); \
2792 else \
2793 gen_addr_imm_index(EA, ctx, 0); \
2794 gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2795 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2796 tcg_temp_free(EA); \
2797 }
2798
2799 #define GEN_LDUX(width, opc2, opc3, type) \
2800 GEN_HANDLER(l##width##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2801 { \
2802 TCGv EA; \
2803 if (unlikely(rA(ctx->opcode) == 0 || \
2804 rA(ctx->opcode) == rD(ctx->opcode))) { \
2805 GEN_EXCP_INVAL(ctx); \
2806 return; \
2807 } \
2808 EA = tcg_temp_new(); \
2809 gen_addr_reg_index(EA, ctx); \
2810 gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2811 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2812 tcg_temp_free(EA); \
2813 }
2814
2815 #define GEN_LDX(width, opc2, opc3, type) \
2816 GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2817 { \
2818 TCGv EA = tcg_temp_new(); \
2819 gen_addr_reg_index(EA, ctx); \
2820 gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2821 tcg_temp_free(EA); \
2822 }
2823
2824 #define GEN_LDS(width, op, type) \
2825 GEN_LD(width, op | 0x20, type); \
2826 GEN_LDU(width, op | 0x21, type); \
2827 GEN_LDUX(width, 0x17, op | 0x01, type); \
2828 GEN_LDX(width, 0x17, op | 0x00, type)
2829
2830 /* lbz lbzu lbzux lbzx */
2831 GEN_LDS(8u, 0x02, PPC_INTEGER);
2832 /* lha lhau lhaux lhax */
2833 GEN_LDS(16s, 0x0A, PPC_INTEGER);
2834 /* lhz lhzu lhzux lhzx */
2835 GEN_LDS(16u, 0x08, PPC_INTEGER);
2836 /* lwz lwzu lwzux lwzx */
2837 GEN_LDS(32u, 0x00, PPC_INTEGER);
2838 #if defined(TARGET_PPC64)
2839 /* lwaux */
2840 GEN_LDUX(32s, 0x15, 0x0B, PPC_64B);
2841 /* lwax */
2842 GEN_LDX(32s, 0x15, 0x0A, PPC_64B);
2843 /* ldux */
2844 GEN_LDUX(64, 0x15, 0x01, PPC_64B);
2845 /* ldx */
2846 GEN_LDX(64, 0x15, 0x00, PPC_64B);
2847 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B)
2848 {
2849 TCGv EA;
2850 if (Rc(ctx->opcode)) {
2851 if (unlikely(rA(ctx->opcode) == 0 ||
2852 rA(ctx->opcode) == rD(ctx->opcode))) {
2853 GEN_EXCP_INVAL(ctx);
2854 return;
2855 }
2856 }
2857 EA = tcg_temp_new();
2858 gen_addr_imm_index(EA, ctx, 0x03);
2859 if (ctx->opcode & 0x02) {
2860 /* lwa (lwau is undefined) */
2861 gen_qemu_ld32s(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);
2862 } else {
2863 /* ld - ldu */
2864 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);
2865 }
2866 if (Rc(ctx->opcode))
2867 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2868 tcg_temp_free(EA);
2869 }
2870 /* lq */
2871 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX)
2872 {
2873 #if defined(CONFIG_USER_ONLY)
2874 GEN_EXCP_PRIVOPC(ctx);
2875 #else
2876 int ra, rd;
2877 TCGv EA;
2878
2879 /* Restore CPU state */
2880 if (unlikely(ctx->supervisor == 0)) {
2881 GEN_EXCP_PRIVOPC(ctx);
2882 return;
2883 }
2884 ra = rA(ctx->opcode);
2885 rd = rD(ctx->opcode);
2886 if (unlikely((rd & 1) || rd == ra)) {
2887 GEN_EXCP_INVAL(ctx);
2888 return;
2889 }
2890 if (unlikely(ctx->mem_idx & 1)) {
2891 /* Little-endian mode is not handled */
2892 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2893 return;
2894 }
2895 EA = tcg_temp_new();
2896 gen_addr_imm_index(EA, ctx, 0x0F);
2897 gen_qemu_ld64(cpu_gpr[rd], EA, ctx->mem_idx);
2898 tcg_gen_addi_tl(EA, EA, 8);
2899 gen_qemu_ld64(cpu_gpr[rd+1], EA, ctx->mem_idx);
2900 tcg_temp_free(EA);
2901 #endif
2902 }
2903 #endif
2904
2905 /*** Integer store ***/
2906 #define GEN_ST(width, opc, type) \
2907 GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2908 { \
2909 TCGv EA = tcg_temp_new(); \
2910 gen_addr_imm_index(EA, ctx, 0); \
2911 gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2912 tcg_temp_free(EA); \
2913 }
2914
2915 #define GEN_STU(width, opc, type) \
2916 GEN_HANDLER(st##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2917 { \
2918 TCGv EA; \
2919 if (unlikely(rA(ctx->opcode) == 0)) { \
2920 GEN_EXCP_INVAL(ctx); \
2921 return; \
2922 } \
2923 EA = tcg_temp_new(); \
2924 if (type == PPC_64B) \
2925 gen_addr_imm_index(EA, ctx, 0x03); \
2926 else \
2927 gen_addr_imm_index(EA, ctx, 0); \
2928 gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2929 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2930 tcg_temp_free(EA); \
2931 }
2932
2933 #define GEN_STUX(width, opc2, opc3, type) \
2934 GEN_HANDLER(st##width##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2935 { \
2936 TCGv EA; \
2937 if (unlikely(rA(ctx->opcode) == 0)) { \
2938 GEN_EXCP_INVAL(ctx); \
2939 return; \
2940 } \
2941 EA = tcg_temp_new(); \
2942 gen_addr_reg_index(EA, ctx); \
2943 gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2944 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2945 tcg_temp_free(EA); \
2946 }
2947
2948 #define GEN_STX(width, opc2, opc3, type) \
2949 GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2950 { \
2951 TCGv EA = tcg_temp_new(); \
2952 gen_addr_reg_index(EA, ctx); \
2953 gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2954 tcg_temp_free(EA); \
2955 }
2956
2957 #define GEN_STS(width, op, type) \
2958 GEN_ST(width, op | 0x20, type); \
2959 GEN_STU(width, op | 0x21, type); \
2960 GEN_STUX(width, 0x17, op | 0x01, type); \
2961 GEN_STX(width, 0x17, op | 0x00, type)
2962
2963 /* stb stbu stbux stbx */
2964 GEN_STS(8, 0x06, PPC_INTEGER);
2965 /* sth sthu sthux sthx */
2966 GEN_STS(16, 0x0C, PPC_INTEGER);
2967 /* stw stwu stwux stwx */
2968 GEN_STS(32, 0x04, PPC_INTEGER);
2969 #if defined(TARGET_PPC64)
2970 GEN_STUX(64, 0x15, 0x05, PPC_64B);
2971 GEN_STX(64, 0x15, 0x04, PPC_64B);
2972 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B)
2973 {
2974 int rs;
2975 TCGv EA;
2976
2977 rs = rS(ctx->opcode);
2978 if ((ctx->opcode & 0x3) == 0x2) {
2979 #if defined(CONFIG_USER_ONLY)
2980 GEN_EXCP_PRIVOPC(ctx);
2981 #else
2982 /* stq */
2983 if (unlikely(ctx->supervisor == 0)) {
2984 GEN_EXCP_PRIVOPC(ctx);
2985 return;
2986 }
2987 if (unlikely(rs & 1)) {
2988 GEN_EXCP_INVAL(ctx);
2989 return;
2990 }
2991 if (unlikely(ctx->mem_idx & 1)) {
2992 /* Little-endian mode is not handled */
2993 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2994 return;
2995 }
2996 EA = tcg_temp_new();
2997 gen_addr_imm_index(EA, ctx, 0x03);
2998 gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
2999 tcg_gen_addi_tl(EA, EA, 8);
3000 gen_qemu_st64(cpu_gpr[rs+1], EA, ctx->mem_idx);
3001 tcg_temp_free(EA);
3002 #endif
3003 } else {
3004 /* std / stdu */
3005 if (Rc(ctx->opcode)) {
3006 if (unlikely(rA(ctx->opcode) == 0)) {
3007 GEN_EXCP_INVAL(ctx);
3008 return;
3009 }
3010 }
3011 EA = tcg_temp_new();
3012 gen_addr_imm_index(EA, ctx, 0x03);
3013 gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
3014 if (Rc(ctx->opcode))
3015 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
3016 tcg_temp_free(EA);
3017 }
3018 }
3019 #endif
3020 /*** Integer load and store with byte reverse ***/
3021 /* lhbrx */
3022 void always_inline gen_qemu_ld16ur(TCGv t0, TCGv t1, int flags)
3023 {
3024 TCGv_i32 temp = tcg_temp_new_i32();
3025 gen_qemu_ld16u(t0, t1, flags);
3026 tcg_gen_trunc_tl_i32(temp, t0);
3027 tcg_gen_bswap16_i32(temp, temp);
3028 tcg_gen_extu_i32_tl(t0, temp);
3029 tcg_temp_free_i32(temp);
3030 }
3031 GEN_LDX(16ur, 0x16, 0x18, PPC_INTEGER);
3032
3033 /* lwbrx */
3034 void always_inline gen_qemu_ld32ur(TCGv t0, TCGv t1, int flags)
3035 {
3036 TCGv_i32 temp = tcg_temp_new_i32();
3037 gen_qemu_ld32u(t0, t1, flags);
3038 tcg_gen_trunc_tl_i32(temp, t0);
3039 tcg_gen_bswap_i32(temp, temp);
3040 tcg_gen_extu_i32_tl(t0, temp);
3041 tcg_temp_free_i32(temp);
3042 }
3043 GEN_LDX(32ur, 0x16, 0x10, PPC_INTEGER);
3044
3045 /* sthbrx */
3046 void always_inline gen_qemu_st16r(TCGv t0, TCGv t1, int flags)
3047 {
3048 TCGv_i32 temp = tcg_temp_new_i32();
3049 TCGv t2 = tcg_temp_new();
3050 tcg_gen_trunc_tl_i32(temp, t0);
3051 tcg_gen_ext16u_i32(temp, temp);
3052 tcg_gen_bswap16_i32(temp, temp);
3053 tcg_gen_extu_i32_tl(t2, temp);
3054 tcg_temp_free_i32(temp);
3055 gen_qemu_st16(t2, t1, flags);
3056 tcg_temp_free(t2);
3057 }
3058 GEN_STX(16r, 0x16, 0x1C, PPC_INTEGER);
3059
3060 /* stwbrx */
3061 void always_inline gen_qemu_st32r(TCGv t0, TCGv t1, int flags)
3062 {
3063 TCGv_i32 temp = tcg_temp_new_i32();
3064 TCGv t2 = tcg_temp_new();
3065 tcg_gen_trunc_tl_i32(temp, t0);
3066 tcg_gen_bswap_i32(temp, temp);
3067 tcg_gen_extu_i32_tl(t2, temp);
3068 tcg_temp_free_i32(temp);
3069 gen_qemu_st16(t2, t1, flags);
3070 tcg_temp_free(t2);
3071 }
3072 GEN_STX(32r, 0x16, 0x14, PPC_INTEGER);
3073
3074 /*** Integer load and store multiple ***/
3075 #define op_ldstm(name, reg) (*gen_op_##name[ctx->mem_idx])(reg)
3076 static GenOpFunc1 *gen_op_lmw[NB_MEM_FUNCS] = {
3077 GEN_MEM_FUNCS(lmw),
3078 };
3079 static GenOpFunc1 *gen_op_stmw[NB_MEM_FUNCS] = {
3080 GEN_MEM_FUNCS(stmw),
3081 };
3082
3083 /* lmw */
3084 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
3085 {
3086 /* NIP cannot be restored if the memory exception comes from an helper */
3087 gen_update_nip(ctx, ctx->nip - 4);
3088 gen_addr_imm_index(cpu_T[0], ctx, 0);
3089 op_ldstm(lmw, rD(ctx->opcode));
3090 }
3091
3092 /* stmw */
3093 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
3094 {
3095 /* NIP cannot be restored if the memory exception comes from an helper */
3096 gen_update_nip(ctx, ctx->nip - 4);
3097 gen_addr_imm_index(cpu_T[0], ctx, 0);
3098 op_ldstm(stmw, rS(ctx->opcode));
3099 }
3100
3101 /*** Integer load and store strings ***/
3102 #define op_ldsts(name, start) (*gen_op_##name[ctx->mem_idx])(start)
3103 #define op_ldstsx(name, rd, ra, rb) (*gen_op_##name[ctx->mem_idx])(rd, ra, rb)
3104 /* string load & stores are by definition endian-safe */
3105 #define gen_op_lswi_le_raw gen_op_lswi_raw
3106 #define gen_op_lswi_le_user gen_op_lswi_user
3107 #define gen_op_lswi_le_kernel gen_op_lswi_kernel
3108 #define gen_op_lswi_le_hypv gen_op_lswi_hypv
3109 #define gen_op_lswi_le_64_raw gen_op_lswi_raw
3110 #define gen_op_lswi_le_64_user gen_op_lswi_user
3111 #define gen_op_lswi_le_64_kernel gen_op_lswi_kernel
3112 #define gen_op_lswi_le_64_hypv gen_op_lswi_hypv
3113 static GenOpFunc1 *gen_op_lswi[NB_MEM_FUNCS] = {
3114 GEN_MEM_FUNCS(lswi),
3115 };
3116 #define gen_op_lswx_le_raw gen_op_lswx_raw
3117 #define gen_op_lswx_le_user gen_op_lswx_user
3118 #define gen_op_lswx_le_kernel gen_op_lswx_kernel
3119 #define gen_op_lswx_le_hypv gen_op_lswx_hypv
3120 #define gen_op_lswx_le_64_raw gen_op_lswx_raw
3121 #define gen_op_lswx_le_64_user gen_op_lswx_user
3122 #define gen_op_lswx_le_64_kernel gen_op_lswx_kernel
3123 #define gen_op_lswx_le_64_hypv gen_op_lswx_hypv
3124 static GenOpFunc3 *gen_op_lswx[NB_MEM_FUNCS] = {
3125 GEN_MEM_FUNCS(lswx),
3126 };
3127 #define gen_op_stsw_le_raw gen_op_stsw_raw
3128 #define gen_op_stsw_le_user gen_op_stsw_user
3129 #define gen_op_stsw_le_kernel gen_op_stsw_kernel
3130 #define gen_op_stsw_le_hypv gen_op_stsw_hypv
3131 #define gen_op_stsw_le_64_raw gen_op_stsw_raw
3132 #define gen_op_stsw_le_64_user gen_op_stsw_user
3133 #define gen_op_stsw_le_64_kernel gen_op_stsw_kernel
3134 #define gen_op_stsw_le_64_hypv gen_op_stsw_hypv
3135 static GenOpFunc1 *gen_op_stsw[NB_MEM_FUNCS] = {
3136 GEN_MEM_FUNCS(stsw),
3137 };
3138
3139 /* lswi */
3140 /* PowerPC32 specification says we must generate an exception if
3141 * rA is in the range of registers to be loaded.
3142 * In an other hand, IBM says this is valid, but rA won't be loaded.
3143 * For now, I'll follow the spec...
3144 */
3145 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING)
3146 {
3147 int nb = NB(ctx->opcode);
3148 int start = rD(ctx->opcode);
3149 int ra = rA(ctx->opcode);
3150 int nr;
3151
3152 if (nb == 0)
3153 nb = 32;
3154 nr = nb / 4;
3155 if (unlikely(((start + nr) > 32 &&
3156 start <= ra && (start + nr - 32) > ra) ||
3157 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
3158 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3159 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_LSWX);
3160 return;
3161 }
3162 /* NIP cannot be restored if the memory exception comes from an helper */
3163 gen_update_nip(ctx, ctx->nip - 4);
3164 gen_addr_register(cpu_T[0], ctx);
3165 tcg_gen_movi_tl(cpu_T[1], nb);
3166 op_ldsts(lswi, start);
3167 }
3168
3169 /* lswx */
3170 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING)
3171 {
3172 int ra = rA(ctx->opcode);
3173 int rb = rB(ctx->opcode);
3174
3175 /* NIP cannot be restored if the memory exception comes from an helper */
3176 gen_update_nip(ctx, ctx->nip - 4);
3177 gen_addr_reg_index(cpu_T[0], ctx);
3178 if (ra == 0) {
3179 ra = rb;
3180 }
3181 tcg_gen_andi_tl(cpu_T[1], cpu_xer, 0x7F);
3182 op_ldstsx(lswx, rD(ctx->opcode), ra, rb);
3183 }
3184
3185 /* stswi */
3186 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING)
3187 {
3188 int nb = NB(ctx->opcode);
3189
3190 /* NIP cannot be restored if the memory exception comes from an helper */
3191 gen_update_nip(ctx, ctx->nip - 4);
3192 gen_addr_register(cpu_T[0], ctx);
3193 if (nb == 0)
3194 nb = 32;
3195 tcg_gen_movi_tl(cpu_T[1], nb);
3196 op_ldsts(stsw, rS(ctx->opcode));
3197 }
3198
3199 /* stswx */
3200 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING)
3201 {
3202 /* NIP cannot be restored if the memory exception comes from an helper */
3203 gen_update_nip(ctx, ctx->nip - 4);
3204 gen_addr_reg_index(cpu_T[0], ctx);
3205 tcg_gen_andi_tl(cpu_T[1], cpu_xer, 0x7F);
3206 op_ldsts(stsw, rS(ctx->opcode));
3207 }
3208
3209 /*** Memory synchronisation ***/
3210 /* eieio */
3211 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO)
3212 {
3213 }
3214
3215 /* isync */
3216 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM)
3217 {
3218 GEN_STOP(ctx);
3219 }
3220
3221 #define op_lwarx() (*gen_op_lwarx[ctx->mem_idx])()
3222 #define op_stwcx() (*gen_op_stwcx[ctx->mem_idx])()
3223 static GenOpFunc *gen_op_lwarx[NB_MEM_FUNCS] = {
3224 GEN_MEM_FUNCS(lwarx),
3225 };
3226 static GenOpFunc *gen_op_stwcx[NB_MEM_FUNCS] = {
3227 GEN_MEM_FUNCS(stwcx),
3228 };
3229
3230 /* lwarx */
3231 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES)
3232 {
3233 /* NIP cannot be restored if the memory exception comes from an helper */
3234 gen_update_nip(ctx, ctx->nip - 4);
3235 gen_addr_reg_index(cpu_T[0], ctx);
3236 op_lwarx();
3237 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
3238 }
3239
3240 /* stwcx. */
3241 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES)
3242 {
3243 /* NIP cannot be restored if the memory exception comes from an helper */
3244 gen_update_nip(ctx, ctx->nip - 4);
3245 gen_addr_reg_index(cpu_T[0], ctx);
3246 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
3247 op_stwcx();
3248 }
3249
3250 #if defined(TARGET_PPC64)
3251 #define op_ldarx() (*gen_op_ldarx[ctx->mem_idx])()
3252 #define op_stdcx() (*gen_op_stdcx[ctx->mem_idx])()
3253 static GenOpFunc *gen_op_ldarx[NB_MEM_FUNCS] = {
3254 GEN_MEM_FUNCS(ldarx),
3255 };
3256 static GenOpFunc *gen_op_stdcx[NB_MEM_FUNCS] = {
3257 GEN_MEM_FUNCS(stdcx),
3258 };
3259
3260 /* ldarx */
3261 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B)
3262 {
3263 /* NIP cannot be restored if the memory exception comes from an helper */
3264 gen_update_nip(ctx, ctx->nip - 4);
3265 gen_addr_reg_index(cpu_T[0], ctx);
3266 op_ldarx();
3267 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
3268 }
3269
3270 /* stdcx. */
3271 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B)
3272 {
3273 /* NIP cannot be restored if the memory exception comes from an helper */
3274 gen_update_nip(ctx, ctx->nip - 4);
3275 gen_addr_reg_index(cpu_T[0], ctx);
3276 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
3277 op_stdcx();
3278 }
3279 #endif /* defined(TARGET_PPC64) */
3280
3281 /* sync */
3282 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC)
3283 {
3284 }
3285
3286 /* wait */
3287 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT)
3288 {
3289 /* Stop translation, as the CPU is supposed to sleep from now */
3290 gen_op_wait();
3291 GEN_EXCP(ctx, EXCP_HLT, 1);
3292 }
3293
3294 /*** Floating-point load ***/
3295 #define GEN_LDF(width, opc, type) \
3296 GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
3297 { \
3298 if (unlikely(!ctx->fpu_enabled)) { \
3299 GEN_EXCP_NO_FP(ctx); \
3300 return; \
3301 } \
3302 gen_addr_imm_index(cpu_T[0], ctx, 0); \
3303 op_ldst(l##width); \
3304 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
3305 }
3306
3307 #define GEN_LDUF(width, opc, type) \
3308 GEN_HANDLER(l##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
3309 { \
3310 if (unlikely(!ctx->fpu_enabled)) { \
3311 GEN_EXCP_NO_FP(ctx); \
3312 return; \
3313 } \
3314 if (unlikely(rA(ctx->opcode) == 0)) { \
3315 GEN_EXCP_INVAL(ctx); \
3316 return; \
3317 } \
3318 gen_addr_imm_index(cpu_T[0], ctx, 0); \
3319 op_ldst(l##width); \
3320 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
3321 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
3322 }
3323
3324 #define GEN_LDUXF(width, opc, type) \
3325 GEN_HANDLER(l##width##ux, 0x1F, 0x17, opc, 0x00000001, type) \
3326 { \
3327 if (unlikely(!ctx->fpu_enabled)) { \
3328 GEN_EXCP_NO_FP(ctx); \
3329 return; \
3330 } \
3331 if (unlikely(rA(ctx->opcode) == 0)) { \
3332 GEN_EXCP_INVAL(ctx); \
3333 return; \
3334 } \
3335 gen_addr_reg_index(cpu_T[0], ctx); \
3336 op_ldst(l##width); \
3337 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
3338 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
3339 }
3340
3341 #define GEN_LDXF(width, opc2, opc3, type) \
3342 GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
3343 { \
3344 if (unlikely(!ctx->fpu_enabled)) { \
3345 GEN_EXCP_NO_FP(ctx); \
3346 return; \
3347 } \
3348 gen_addr_reg_index(cpu_T[0], ctx); \
3349 op_ldst(l##width); \
3350 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
3351 }
3352
3353 #define GEN_LDFS(width, op, type) \
3354 OP_LD_TABLE(width); \
3355 GEN_LDF(width, op | 0x20, type); \
3356 GEN_LDUF(width, op | 0x21, type); \
3357 GEN_LDUXF(width, op | 0x01, type); \
3358 GEN_LDXF(width, 0x17, op | 0x00, type)
3359
3360 /* lfd lfdu lfdux lfdx */
3361 GEN_LDFS(fd, 0x12, PPC_FLOAT);
3362 /* lfs lfsu lfsux lfsx */
3363 GEN_LDFS(fs, 0x10, PPC_FLOAT);
3364
3365 /*** Floating-point store ***/
3366 #define GEN_STF(width, opc, type) \
3367 GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
3368 { \
3369 if (unlikely(!ctx->fpu_enabled)) { \
3370 GEN_EXCP_NO_FP(ctx); \
3371 return; \
3372 } \
3373 gen_addr_imm_index(cpu_T[0], ctx, 0); \
3374 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
3375 op_ldst(st##width); \
3376 }
3377
3378 #define GEN_STUF(width, opc, type) \
3379 GEN_HANDLER(st##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
3380 { \
3381 if (unlikely(!ctx->fpu_enabled)) { \
3382 GEN_EXCP_NO_FP(ctx); \
3383 return; \
3384 } \
3385 if (unlikely(rA(ctx->opcode) == 0)) { \
3386 GEN_EXCP_INVAL(ctx); \
3387 return; \
3388 } \
3389 gen_addr_imm_index(cpu_T[0], ctx, 0); \
3390 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
3391 op_ldst(st##width); \
3392 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
3393 }
3394
3395 #define GEN_STUXF(width, opc, type) \
3396 GEN_HANDLER(st##width##ux, 0x1F, 0x17, opc, 0x00000001, type) \
3397 { \
3398 if (unlikely(!ctx->fpu_enabled)) { \
3399 GEN_EXCP_NO_FP(ctx); \
3400 return; \
3401 } \
3402 if (unlikely(rA(ctx->opcode) == 0)) { \
3403 GEN_EXCP_INVAL(ctx); \
3404 return; \
3405 } \
3406 gen_addr_reg_index(cpu_T[0], ctx); \
3407 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
3408 op_ldst(st##width); \
3409 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
3410 }
3411
3412 #define GEN_STXF(width, opc2, opc3, type) \
3413 GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
3414 { \
3415 if (unlikely(!ctx->fpu_enabled)) { \
3416 GEN_EXCP_NO_FP(ctx); \
3417 return; \
3418 } \
3419 gen_addr_reg_index(cpu_T[0], ctx); \
3420 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
3421 op_ldst(st##width); \
3422 }
3423
3424 #define GEN_STFS(width, op, type) \
3425 OP_ST_TABLE(width); \
3426 GEN_STF(width, op | 0x20, type); \
3427 GEN_STUF(width, op | 0x21, type); \
3428 GEN_STUXF(width, op | 0x01, type); \
3429 GEN_STXF(width, 0x17, op | 0x00, type)
3430
3431 /* stfd stfdu stfdux stfdx */
3432 GEN_STFS(fd, 0x16, PPC_FLOAT);
3433 /* stfs stfsu stfsux stfsx */
3434 GEN_STFS(fs, 0x14, PPC_FLOAT);
3435
3436 /* Optional: */
3437 /* stfiwx */
3438 OP_ST_TABLE(fiw);
3439 GEN_STXF(fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3440
3441 /*** Branch ***/
3442 static always_inline void gen_goto_tb (DisasContext *ctx, int n,
3443 target_ulong dest)
3444 {
3445 TranslationBlock *tb;
3446 tb = ctx->tb;
3447 #if defined(TARGET_PPC64)
3448 if (!ctx->sf_mode)
3449 dest = (uint32_t) dest;
3450 #endif
3451 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3452 likely(!ctx->singlestep_enabled)) {
3453 tcg_gen_goto_tb(n);
3454 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3455 tcg_gen_exit_tb((long)tb + n);
3456 } else {
3457 tcg_gen_movi_tl(cpu_nip, dest & ~3);
3458 if (unlikely(ctx->singlestep_enabled)) {
3459 if ((ctx->singlestep_enabled &
3460 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3461 ctx->exception == POWERPC_EXCP_BRANCH) {
3462 target_ulong tmp = ctx->nip;
3463 ctx->nip = dest;
3464 GEN_EXCP(ctx, POWERPC_EXCP_TRACE, 0);
3465 ctx->nip = tmp;
3466 }
3467 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3468 gen_update_nip(ctx, dest);
3469 gen_op_debug();
3470 }
3471 }
3472 tcg_gen_exit_tb(0);
3473 }
3474 }
3475
3476 static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip)
3477 {
3478 #if defined(TARGET_PPC64)
3479 if (ctx->sf_mode == 0)
3480 tcg_gen_movi_tl(cpu_lr, (uint32_t)nip);
3481 else
3482 #endif
3483 tcg_gen_movi_tl(cpu_lr, nip);
3484 }
3485
3486 /* b ba bl bla */
3487 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3488 {
3489 target_ulong li, target;
3490
3491 ctx->exception = POWERPC_EXCP_BRANCH;
3492 /* sign extend LI */
3493 #if defined(TARGET_PPC64)
3494 if (ctx->sf_mode)
3495 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3496 else
3497 #endif
3498 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3499 if (likely(AA(ctx->opcode) == 0))
3500 target = ctx->nip + li - 4;
3501 else
3502 target = li;
3503 if (LK(ctx->opcode))
3504 gen_setlr(ctx, ctx->nip);
3505 gen_goto_tb(ctx, 0, target);
3506 }
3507
3508 #define BCOND_IM 0
3509 #define BCOND_LR 1
3510 #define BCOND_CTR 2
3511
3512 static always_inline void gen_bcond (DisasContext *ctx, int type)
3513 {
3514 uint32_t bo = BO(ctx->opcode);
3515 int l1 = gen_new_label();
3516 TCGv target;
3517
3518 ctx->exception = POWERPC_EXCP_BRANCH;
3519 if (type == BCOND_LR || type == BCOND_CTR) {
3520 target = tcg_temp_local_new();
3521 if (type == BCOND_CTR)
3522 tcg_gen_mov_tl(target, cpu_ctr);
3523 else
3524 tcg_gen_mov_tl(target, cpu_lr);
3525 }
3526 if (LK(ctx->opcode))
3527 gen_setlr(ctx, ctx->nip);
3528 l1 = gen_new_label();
3529 if ((bo & 0x4) == 0) {
3530 /* Decrement and test CTR */
3531 TCGv temp = tcg_temp_new();
3532 if (unlikely(type == BCOND_CTR)) {
3533 GEN_EXCP_INVAL(ctx);
3534 return;
3535 }
3536 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3537 #if defined(TARGET_PPC64)
3538 if (!ctx->sf_mode)
3539 tcg_gen_ext32u_tl(temp, cpu_ctr);
3540 else
3541 #endif
3542 tcg_gen_mov_tl(temp, cpu_ctr);
3543 if (bo & 0x2) {
3544 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3545 } else {
3546 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3547 }
3548 tcg_temp_free(temp);
3549 }
3550 if ((bo & 0x10) == 0) {
3551 /* Test CR */
3552 uint32_t bi = BI(ctx->opcode);
3553 uint32_t mask = 1 << (3 - (bi & 0x03));
3554 TCGv_i32 temp = tcg_temp_new_i32();
3555
3556 if (bo & 0x8) {
3557 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3558 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3559 } else {
3560 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3561 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3562 }
3563 tcg_temp_free_i32(temp);
3564 }
3565 if (type == BCOND_IM) {
3566 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3567 if (likely(AA(ctx->opcode) == 0)) {
3568 gen_goto_tb(ctx, 0, ctx->nip + li - 4);
3569 } else {
3570 gen_goto_tb(ctx, 0, li);
3571 }
3572 gen_set_label(l1);
3573 gen_goto_tb(ctx, 1, ctx->nip);
3574 } else {
3575 #if defined(TARGET_PPC64)
3576 if (!(ctx->sf_mode))
3577 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3578 else
3579 #endif
3580 tcg_gen_andi_tl(cpu_nip, target, ~3);
3581 tcg_gen_exit_tb(0);
3582 gen_set_label(l1);
3583 #if defined(TARGET_PPC64)
3584 if (!(ctx->sf_mode))
3585 tcg_gen_movi_tl(cpu_nip, (uint32_t)ctx->nip);
3586 else
3587 #endif
3588 tcg_gen_movi_tl(cpu_nip, ctx->nip);
3589 tcg_gen_exit_tb(0);
3590 }
3591 }
3592
3593 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3594 {
3595 gen_bcond(ctx, BCOND_IM);
3596 }
3597
3598 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW)
3599 {
3600 gen_bcond(ctx, BCOND_CTR);
3601 }
3602
3603 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW)
3604 {
3605 gen_bcond(ctx, BCOND_LR);
3606 }
3607
3608 /*** Condition register logical ***/
3609 #define GEN_CRLOGIC(name, tcg_op, opc) \
3610 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) \
3611 { \
3612 uint8_t bitmask; \
3613 int sh; \
3614 TCGv_i32 t0, t1; \
3615 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3616 t0 = tcg_temp_new_i32(); \
3617 if (sh > 0) \
3618 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \
3619 else if (sh < 0) \
3620 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \
3621 else \
3622 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \
3623 t1 = tcg_temp_new_i32(); \
3624 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3625 if (sh > 0) \
3626 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \
3627 else if (sh < 0) \
3628 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \
3629 else \
3630 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \
3631 tcg_op(t0, t0, t1); \
3632 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3633 tcg_gen_andi_i32(t0, t0, bitmask); \
3634 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3635 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \
3636 tcg_temp_free_i32(t0); \
3637 tcg_temp_free_i32(t1); \
3638 }
3639
3640 /* crand */
3641 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3642 /* crandc */
3643 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3644 /* creqv */
3645 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3646 /* crnand */
3647 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3648 /* crnor */
3649 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3650 /* cror */
3651 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3652 /* crorc */
3653 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3654 /* crxor */
3655 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3656 /* mcrf */
3657 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER)
3658 {
3659 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3660 }
3661
3662 /*** System linkage ***/
3663 /* rfi (supervisor only) */
3664 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW)
3665 {
3666 #if defined(CONFIG_USER_ONLY)
3667 GEN_EXCP_PRIVOPC(ctx);
3668 #else
3669 /* Restore CPU state */
3670 if (unlikely(!ctx->supervisor)) {
3671 GEN_EXCP_PRIVOPC(ctx);
3672 return;
3673 }
3674 gen_op_rfi();
3675 GEN_SYNC(ctx);
3676 #endif
3677 }
3678
3679 #if defined(TARGET_PPC64)
3680 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B)
3681 {
3682 #if defined(CONFIG_USER_ONLY)
3683 GEN_EXCP_PRIVOPC(ctx);
3684 #else
3685 /* Restore CPU state */
3686 if (unlikely(!ctx->supervisor)) {
3687 GEN_EXCP_PRIVOPC(ctx);
3688 return;
3689 }
3690 gen_op_rfid();
3691 GEN_SYNC(ctx);
3692 #endif
3693 }
3694
3695 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H)
3696 {
3697 #if defined(CONFIG_USER_ONLY)
3698 GEN_EXCP_PRIVOPC(ctx);
3699 #else
3700 /* Restore CPU state */
3701 if (unlikely(ctx->supervisor <= 1)) {
3702 GEN_EXCP_PRIVOPC(ctx);
3703 return;
3704 }
3705 gen_op_hrfid();
3706 GEN_SYNC(ctx);
3707 #endif
3708 }
3709 #endif
3710
3711 /* sc */
3712 #if defined(CONFIG_USER_ONLY)
3713 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3714 #else
3715 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3716 #endif
3717 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW)
3718 {
3719 uint32_t lev;
3720
3721 lev = (ctx->opcode >> 5) & 0x7F;
3722 GEN_EXCP(ctx, POWERPC_SYSCALL, lev);
3723 }
3724
3725 /*** Trap ***/
3726 /* tw */
3727 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW)
3728 {
3729 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3730 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3731 /* Update the nip since this might generate a trap exception */
3732 gen_update_nip(ctx, ctx->nip);
3733 gen_op_tw(TO(ctx->opcode));
3734 }
3735
3736 /* twi */
3737 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3738 {
3739 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3740 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3741 /* Update the nip since this might generate a trap exception */
3742 gen_update_nip(ctx, ctx->nip);
3743 gen_op_tw(TO(ctx->opcode));
3744 }
3745
3746 #if defined(TARGET_PPC64)
3747 /* td */
3748 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B)
3749 {
3750 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3751 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3752 /* Update the nip since this might generate a trap exception */
3753 gen_update_nip(ctx, ctx->nip);
3754 gen_op_td(TO(ctx->opcode));
3755 }
3756
3757 /* tdi */
3758 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B)
3759 {
3760 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3761 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3762 /* Update the nip since this might generate a trap exception */
3763 gen_update_nip(ctx, ctx->nip);
3764 gen_op_td(TO(ctx->opcode));
3765 }
3766 #endif
3767
3768 /*** Processor control ***/
3769 /* mcrxr */
3770 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC)
3771 {
3772 tcg_gen_trunc_tl_i32(cpu_crf[crfD(ctx->opcode)], cpu_xer);
3773 tcg_gen_shri_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], XER_CA);
3774 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_SO | 1 << XER_OV | 1 << XER_CA));
3775 }
3776
3777 /* mfcr */
3778 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC)
3779 {
3780 uint32_t crm, crn;
3781
3782 if (likely(ctx->opcode & 0x00100000)) {
3783 crm = CRM(ctx->opcode);
3784 if (likely((crm ^ (crm - 1)) == 0)) {
3785 crn = ffs(crm);
3786 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
3787 }
3788 } else {
3789 gen_helper_load_cr(cpu_gpr[rD(ctx->opcode)]);
3790 }
3791 }
3792
3793 /* mfmsr */
3794 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC)
3795 {
3796 #if defined(CONFIG_USER_ONLY)
3797 GEN_EXCP_PRIVREG(ctx);
3798 #else
3799 if (unlikely(!ctx->supervisor)) {
3800 GEN_EXCP_PRIVREG(ctx);
3801 return;
3802 }
3803 gen_op_load_msr();
3804 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3805 #endif
3806 }
3807
3808 #if 1
3809 #define SPR_NOACCESS ((void *)(-1UL))
3810 #else
3811 static void spr_noaccess (void *opaque, int sprn)
3812 {
3813 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3814 printf("ERROR: try to access SPR %d !\n", sprn);
3815 }
3816 #define SPR_NOACCESS (&spr_noaccess)
3817 #endif
3818
3819 /* mfspr */
3820 static always_inline void gen_op_mfspr (DisasContext *ctx)
3821 {
3822 void (*read_cb)(void *opaque, int sprn);
3823 uint32_t sprn = SPR(ctx->opcode);
3824
3825 #if !defined(CONFIG_USER_ONLY)
3826 if (ctx->supervisor == 2)
3827 read_cb = ctx->spr_cb[sprn].hea_read;
3828 else if (ctx->supervisor)
3829 read_cb = ctx->spr_cb[sprn].oea_read;
3830 else
3831 #endif
3832 read_cb = ctx->spr_cb[sprn].uea_read;
3833 if (likely(read_cb != NULL)) {
3834 if (likely(read_cb != SPR_NOACCESS)) {
3835 (*read_cb)(ctx, sprn);
3836 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3837 } else {
3838 /* Privilege exception */
3839 /* This is a hack to avoid warnings when running Linux:
3840 * this OS breaks the PowerPC virtualisation model,
3841 * allowing userland application to read the PVR
3842 */
3843 if (sprn != SPR_PVR) {
3844 if (loglevel != 0) {
3845 fprintf(logfile, "Trying to read privileged spr %d %03x at "
3846 ADDRX "\n", sprn, sprn, ctx->nip);
3847 }
3848 printf("Trying to read privileged spr %d %03x at " ADDRX "\n",
3849 sprn, sprn, ctx->nip);
3850 }
3851 GEN_EXCP_PRIVREG(ctx);
3852 }
3853 } else {
3854 /* Not defined */
3855 if (loglevel != 0) {
3856 fprintf(logfile, "Trying to read invalid spr %d %03x at "
3857 ADDRX "\n", sprn, sprn, ctx->nip);
3858 }
3859 printf("Trying to read invalid spr %d %03x at " ADDRX "\n",
3860 sprn, sprn, ctx->nip);
3861 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3862 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3863 }
3864 }
3865
3866 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC)
3867 {
3868 gen_op_mfspr(ctx);
3869 }
3870
3871 /* mftb */
3872 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB)
3873 {
3874 gen_op_mfspr(ctx);
3875 }
3876
3877 /* mtcrf */
3878 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC)
3879 {
3880 uint32_t crm, crn;
3881
3882 crm = CRM(ctx->opcode);
3883 if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) {
3884 TCGv_i32 temp = tcg_temp_new_i32();
3885 crn = ffs(crm);
3886 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
3887 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
3888 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
3889 tcg_temp_free_i32(temp);
3890 } else {
3891 TCGv_i32 temp = tcg_const_i32(crm);
3892 gen_helper_store_cr(cpu_gpr[rS(ctx->opcode)], temp);
3893 tcg_temp_free_i32(temp);
3894 }
3895 }
3896
3897 /* mtmsr */
3898 #if defined(TARGET_PPC64)
3899 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B)
3900 {
3901 #if defined(CONFIG_USER_ONLY)
3902 GEN_EXCP_PRIVREG(ctx);
3903 #else
3904 if (unlikely(!ctx->supervisor)) {
3905 GEN_EXCP_PRIVREG(ctx);
3906 return;
3907 }
3908 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3909 if (ctx->opcode & 0x00010000) {
3910 /* Special form that does not need any synchronisation */
3911 gen_op_update_riee();
3912 } else {
3913 /* XXX: we need to update nip before the store
3914 * if we enter power saving mode, we will exit the loop
3915 * directly from ppc_store_msr
3916 */
3917 gen_update_nip(ctx, ctx->nip);
3918 gen_op_store_msr();
3919 /* Must stop the translation as machine state (may have) changed */
3920 /* Note that mtmsr is not always defined as context-synchronizing */
3921 ctx->exception = POWERPC_EXCP_STOP;
3922 }
3923 #endif
3924 }
3925 #endif
3926
3927 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC)
3928 {
3929 #if defined(CONFIG_USER_ONLY)
3930 GEN_EXCP_PRIVREG(ctx);
3931 #else
3932 if (unlikely(!ctx->supervisor)) {
3933 GEN_EXCP_PRIVREG(ctx);
3934 return;
3935 }
3936 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3937 if (ctx->opcode & 0x00010000) {
3938 /* Special form that does not need any synchronisation */
3939 gen_op_update_riee();
3940 } else {
3941 /* XXX: we need to update nip before the store
3942 * if we enter power saving mode, we will exit the loop
3943 * directly from ppc_store_msr
3944 */
3945 gen_update_nip(ctx, ctx->nip);
3946 #if defined(TARGET_PPC64)
3947 if (!ctx->sf_mode)
3948 gen_op_store_msr_32();
3949 else
3950 #endif
3951 gen_op_store_msr();
3952 /* Must stop the translation as machine state (may have) changed */
3953 /* Note that mtmsrd is not always defined as context-synchronizing */
3954 ctx->exception = POWERPC_EXCP_STOP;
3955 }
3956 #endif
3957 }
3958
3959 /* mtspr */
3960 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC)
3961 {
3962 void (*write_cb)(void *opaque, int sprn);
3963 uint32_t sprn = SPR(ctx->opcode);
3964
3965 #if !defined(CONFIG_USER_ONLY)
3966 if (ctx->supervisor == 2)
3967 write_cb = ctx->spr_cb[sprn].hea_write;
3968 else if (ctx->supervisor)
3969 write_cb = ctx->spr_cb[sprn].oea_write;
3970 else
3971 #endif
3972 write_cb = ctx->spr_cb[sprn].uea_write;
3973 if (likely(write_cb != NULL)) {
3974 if (likely(write_cb != SPR_NOACCESS)) {
3975 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3976 (*write_cb)(ctx, sprn);
3977 } else {
3978 /* Privilege exception */
3979 if (loglevel != 0) {
3980 fprintf(logfile, "Trying to write privileged spr %d %03x at "
3981 ADDRX "\n", sprn, sprn, ctx->nip);
3982 }
3983 printf("Trying to write privileged spr %d %03x at " ADDRX "\n",
3984 sprn, sprn, ctx->nip);
3985 GEN_EXCP_PRIVREG(ctx);
3986 }
3987 } else {
3988 /* Not defined */
3989 if (loglevel != 0) {
3990 fprintf(logfile, "Trying to write invalid spr %d %03x at "
3991 ADDRX "\n", sprn, sprn, ctx->nip);
3992 }
3993 printf("Trying to write invalid spr %d %03x at " ADDRX "\n",
3994 sprn, sprn, ctx->nip);
3995 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3996 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3997 }
3998 }
3999
4000 /*** Cache management ***/
4001 /* dcbf */
4002 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE)
4003 {
4004 /* XXX: specification says this is treated as a load by the MMU */
4005 TCGv t0 = tcg_temp_new();
4006 gen_addr_reg_index(t0, ctx);
4007 gen_qemu_ld8u(t0, t0, ctx->mem_idx);
4008 tcg_temp_free(t0);
4009 }
4010
4011 /* dcbi (Supervisor only) */
4012 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE)
4013 {
4014 #if defined(CONFIG_USER_ONLY)
4015 GEN_EXCP_PRIVOPC(ctx);
4016 #else
4017 TCGv EA, val;
4018 if (unlikely(!ctx->supervisor)) {
4019 GEN_EXCP_PRIVOPC(ctx);
4020 return;
4021 }
4022 EA = tcg_temp_new();
4023 gen_addr_reg_index(EA, ctx);
4024 val = tcg_temp_new();
4025 /* XXX: specification says this should be treated as a store by the MMU */
4026 gen_qemu_ld8u(val, EA, ctx->mem_idx);
4027 gen_qemu_st8(val, EA, ctx->mem_idx);
4028 tcg_temp_free(val);
4029 tcg_temp_free(EA);
4030 #endif
4031 }
4032
4033 /* dcdst */
4034 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE)
4035 {
4036 /* XXX: specification say this is treated as a load by the MMU */
4037 TCGv t0 = tcg_temp_new();
4038 gen_addr_reg_index(t0, ctx);
4039 gen_qemu_ld8u(t0, t0, ctx->mem_idx);
4040 tcg_temp_free(t0);
4041 }
4042
4043 /* dcbt */
4044 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE)
4045 {
4046 /* interpreted as no-op */
4047 /* XXX: specification say this is treated as a load by the MMU
4048 * but does not generate any exception
4049 */
4050 }
4051
4052 /* dcbtst */
4053 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE)
4054 {
4055 /* interpreted as no-op */
4056 /* XXX: specification say this is treated as a load by the MMU
4057 * but does not generate any exception
4058 */
4059 }
4060
4061 /* dcbz */
4062 #define op_dcbz(n) (*gen_op_dcbz[n][ctx->mem_idx])()
4063 static GenOpFunc *gen_op_dcbz[4][NB_MEM_FUNCS] = {
4064 /* 32 bytes cache line size */
4065 {
4066 #define gen_op_dcbz_l32_le_raw gen_op_dcbz_l32_raw
4067 #define gen_op_dcbz_l32_le_user gen_op_dcbz_l32_user
4068 #define gen_op_dcbz_l32_le_kernel gen_op_dcbz_l32_kernel
4069 #define gen_op_dcbz_l32_le_hypv gen_op_dcbz_l32_hypv
4070 #define gen_op_dcbz_l32_le_64_raw gen_op_dcbz_l32_64_raw
4071 #define gen_op_dcbz_l32_le_64_user gen_op_dcbz_l32_64_user
4072 #define gen_op_dcbz_l32_le_64_kernel gen_op_dcbz_l32_64_kernel
4073 #define gen_op_dcbz_l32_le_64_hypv gen_op_dcbz_l32_64_hypv
4074 GEN_MEM_FUNCS(dcbz_l32),
4075 },
4076 /* 64 bytes cache line size */
4077 {
4078 #define gen_op_dcbz_l64_le_raw gen_op_dcbz_l64_raw
4079 #define gen_op_dcbz_l64_le_user gen_op_dcbz_l64_user
4080 #define gen_op_dcbz_l64_le_kernel gen_op_dcbz_l64_kernel
4081 #define gen_op_dcbz_l64_le_hypv gen_op_dcbz_l64_hypv
4082 #define gen_op_dcbz_l64_le_64_raw gen_op_dcbz_l64_64_raw
4083 #define gen_op_dcbz_l64_le_64_user gen_op_dcbz_l64_64_user
4084 #define gen_op_dcbz_l64_le_64_kernel gen_op_dcbz_l64_64_kernel
4085 #define gen_op_dcbz_l64_le_64_hypv gen_op_dcbz_l64_64_hypv
4086 GEN_MEM_FUNCS(dcbz_l64),
4087 },
4088 /* 128 bytes cache line size */
4089 {
4090 #define gen_op_dcbz_l128_le_raw gen_op_dcbz_l128_raw
4091 #define gen_op_dcbz_l128_le_user gen_op_dcbz_l128_user
4092 #define gen_op_dcbz_l128_le_kernel gen_op_dcbz_l128_kernel
4093 #define gen_op_dcbz_l128_le_hypv gen_op_dcbz_l128_hypv
4094 #define gen_op_dcbz_l128_le_64_raw gen_op_dcbz_l128_64_raw
4095 #define gen_op_dcbz_l128_le_64_user gen_op_dcbz_l128_64_user
4096 #define gen_op_dcbz_l128_le_64_kernel gen_op_dcbz_l128_64_kernel
4097 #define gen_op_dcbz_l128_le_64_hypv gen_op_dcbz_l128_64_hypv
4098 GEN_MEM_FUNCS(dcbz_l128),
4099 },
4100 /* tunable cache line size */
4101 {
4102 #define gen_op_dcbz_le_raw gen_op_dcbz_raw
4103 #define gen_op_dcbz_le_user gen_op_dcbz_user
4104 #define gen_op_dcbz_le_kernel gen_op_dcbz_kernel
4105 #define gen_op_dcbz_le_hypv gen_op_dcbz_hypv
4106 #define gen_op_dcbz_le_64_raw gen_op_dcbz_64_raw
4107 #define gen_op_dcbz_le_64_user gen_op_dcbz_64_user
4108 #define gen_op_dcbz_le_64_kernel gen_op_dcbz_64_kernel
4109 #define gen_op_dcbz_le_64_hypv gen_op_dcbz_64_hypv
4110 GEN_MEM_FUNCS(dcbz),
4111 },
4112 };
4113
4114 static always_inline void handler_dcbz (DisasContext *ctx,
4115 int dcache_line_size)
4116 {
4117 int n;
4118
4119 switch (dcache_line_size) {
4120 case 32:
4121 n = 0;
4122 break;
4123 case 64:
4124 n = 1;
4125 break;
4126 case 128:
4127 n = 2;
4128 break;
4129 default:
4130 n = 3;
4131 break;
4132 }
4133 op_dcbz(n);
4134 }
4135
4136 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ)
4137 {
4138 gen_addr_reg_index(cpu_T[0], ctx);
4139 handler_dcbz(ctx, ctx->dcache_line_size);
4140 gen_op_check_reservation();
4141 }
4142
4143 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT)
4144 {
4145 gen_addr_reg_index(cpu_T[0], ctx);
4146 if (ctx->opcode & 0x00200000)
4147 handler_dcbz(ctx, ctx->dcache_line_size);
4148 else
4149 handler_dcbz(ctx, -1);
4150 gen_op_check_reservation();
4151 }
4152
4153 /* icbi */
4154 #define op_icbi() (*gen_op_icbi[ctx->mem_idx])()
4155 #define gen_op_icbi_le_raw gen_op_icbi_raw
4156 #define gen_op_icbi_le_user gen_op_icbi_user
4157 #define gen_op_icbi_le_kernel gen_op_icbi_kernel
4158 #define gen_op_icbi_le_hypv gen_op_icbi_hypv
4159 #define gen_op_icbi_le_64_raw gen_op_icbi_64_raw
4160 #define gen_op_icbi_le_64_user gen_op_icbi_64_user
4161 #define gen_op_icbi_le_64_kernel gen_op_icbi_64_kernel
4162 #define gen_op_icbi_le_64_hypv gen_op_icbi_64_hypv
4163 static GenOpFunc *gen_op_icbi[NB_MEM_FUNCS] = {
4164 GEN_MEM_FUNCS(icbi),
4165 };
4166
4167 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI)
4168 {
4169 /* NIP cannot be restored if the memory exception comes from an helper */
4170 gen_update_nip(ctx, ctx->nip - 4);
4171 gen_addr_reg_index(cpu_T[0], ctx);
4172 op_icbi();
4173 }
4174
4175 /* Optional: */
4176 /* dcba */
4177 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA)
4178 {
4179 /* interpreted as no-op */
4180 /* XXX: specification say this is treated as a store by the MMU
4181 * but does not generate any exception
4182 */
4183 }
4184
4185 /*** Segment register manipulation ***/
4186 /* Supervisor only: */
4187 /* mfsr */
4188 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT)
4189 {
4190 #if defined(CONFIG_USER_ONLY)
4191 GEN_EXCP_PRIVREG(ctx);
4192 #else
4193 if (unlikely(!ctx->supervisor)) {
4194 GEN_EXCP_PRIVREG(ctx);
4195 return;
4196 }
4197 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4198 gen_op_load_sr();
4199 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4200 #endif
4201 }
4202
4203 /* mfsrin */
4204 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT)
4205 {
4206 #if defined(CONFIG_USER_ONLY)
4207 GEN_EXCP_PRIVREG(ctx);
4208 #else
4209 if (unlikely(!ctx->supervisor)) {
4210 GEN_EXCP_PRIVREG(ctx);
4211 return;
4212 }
4213 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4214 gen_op_srli_T1(28);
4215 gen_op_load_sr();
4216 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4217 #endif
4218 }
4219
4220 /* mtsr */
4221 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT)
4222 {
4223 #if defined(CONFIG_USER_ONLY)
4224 GEN_EXCP_PRIVREG(ctx);
4225 #else
4226 if (unlikely(!ctx->supervisor)) {
4227 GEN_EXCP_PRIVREG(ctx);
4228 return;
4229 }
4230 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4231 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4232 gen_op_store_sr();
4233 #endif
4234 }
4235
4236 /* mtsrin */
4237 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT)
4238 {
4239 #if defined(CONFIG_USER_ONLY)
4240 GEN_EXCP_PRIVREG(ctx);
4241 #else
4242 if (unlikely(!ctx->supervisor)) {
4243 GEN_EXCP_PRIVREG(ctx);
4244 return;
4245 }
4246 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4247 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4248 gen_op_srli_T1(28);
4249 gen_op_store_sr();
4250 #endif
4251 }
4252
4253 #if defined(TARGET_PPC64)
4254 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4255 /* mfsr */
4256 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B)
4257 {
4258 #if defined(CONFIG_USER_ONLY)
4259 GEN_EXCP_PRIVREG(ctx);
4260 #else
4261 if (unlikely(!ctx->supervisor)) {
4262 GEN_EXCP_PRIVREG(ctx);
4263 return;
4264 }
4265 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4266 gen_op_load_slb();
4267 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4268 #endif
4269 }
4270
4271 /* mfsrin */
4272 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
4273 PPC_SEGMENT_64B)
4274 {
4275 #if defined(CONFIG_USER_ONLY)
4276 GEN_EXCP_PRIVREG(ctx);
4277 #else
4278 if (unlikely(!ctx->supervisor)) {
4279 GEN_EXCP_PRIVREG(ctx);
4280 return;
4281 }
4282 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4283 gen_op_srli_T1(28);
4284 gen_op_load_slb();
4285 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4286 #endif
4287 }
4288
4289 /* mtsr */
4290 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B)
4291 {
4292 #if defined(CONFIG_USER_ONLY)
4293 GEN_EXCP_PRIVREG(ctx);
4294 #else
4295 if (unlikely(!ctx->supervisor)) {
4296 GEN_EXCP_PRIVREG(ctx);
4297 return;
4298 }
4299 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4300 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4301 gen_op_store_slb();
4302 #endif
4303 }
4304
4305 /* mtsrin */
4306 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
4307 PPC_SEGMENT_64B)
4308 {
4309 #if defined(CONFIG_USER_ONLY)
4310 GEN_EXCP_PRIVREG(ctx);
4311 #else
4312 if (unlikely(!ctx->supervisor)) {
4313 GEN_EXCP_PRIVREG(ctx);
4314 return;
4315 }
4316 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4317 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4318 gen_op_srli_T1(28);
4319 gen_op_store_slb();
4320 #endif
4321 }
4322 #endif /* defined(TARGET_PPC64) */
4323
4324 /*** Lookaside buffer management ***/
4325 /* Optional & supervisor only: */
4326 /* tlbia */
4327 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA)
4328 {
4329 #if defined(CONFIG_USER_ONLY)
4330 GEN_EXCP_PRIVOPC(ctx);
4331 #else
4332 if (unlikely(!ctx->supervisor)) {
4333 GEN_EXCP_PRIVOPC(ctx);
4334 return;
4335 }
4336 gen_op_tlbia();
4337 #endif
4338 }
4339
4340 /* tlbie */
4341 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE)
4342 {
4343 #if defined(CONFIG_USER_ONLY)
4344 GEN_EXCP_PRIVOPC(ctx);
4345 #else
4346 if (unlikely(!ctx->supervisor)) {
4347 GEN_EXCP_PRIVOPC(ctx);
4348 return;
4349 }
4350 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4351 #if defined(TARGET_PPC64)
4352 if (ctx->sf_mode)
4353 gen_op_tlbie_64();
4354 else
4355 #endif
4356 gen_op_tlbie();
4357 #endif
4358 }
4359
4360 /* tlbsync */
4361 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC)
4362 {
4363 #if defined(CONFIG_USER_ONLY)
4364 GEN_EXCP_PRIVOPC(ctx);
4365 #else
4366 if (unlikely(!ctx->supervisor)) {
4367 GEN_EXCP_PRIVOPC(ctx);
4368 return;
4369 }
4370 /* This has no effect: it should ensure that all previous
4371 * tlbie have completed
4372 */
4373 GEN_STOP(ctx);
4374 #endif
4375 }
4376
4377 #if defined(TARGET_PPC64)
4378 /* slbia */
4379 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI)
4380 {
4381 #if defined(CONFIG_USER_ONLY)
4382 GEN_EXCP_PRIVOPC(ctx);
4383 #else
4384 if (unlikely(!ctx->supervisor)) {
4385 GEN_EXCP_PRIVOPC(ctx);
4386 return;
4387 }
4388 gen_op_slbia();
4389 #endif
4390 }
4391
4392 /* slbie */
4393 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI)
4394 {
4395 #if defined(CONFIG_USER_ONLY)
4396 GEN_EXCP_PRIVOPC(ctx);
4397 #else
4398 if (unlikely(!ctx->supervisor)) {
4399 GEN_EXCP_PRIVOPC(ctx);
4400 return;
4401 }
4402 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4403 gen_op_slbie();
4404 #endif
4405 }
4406 #endif
4407
4408 /*** External control ***/
4409 /* Optional: */
4410 #define op_eciwx() (*gen_op_eciwx[ctx->mem_idx])()
4411 #define op_ecowx() (*gen_op_ecowx[ctx->mem_idx])()
4412 static GenOpFunc *gen_op_eciwx[NB_MEM_FUNCS] = {
4413 GEN_MEM_FUNCS(eciwx),
4414 };
4415 static GenOpFunc *gen_op_ecowx[NB_MEM_FUNCS] = {
4416 GEN_MEM_FUNCS(ecowx),
4417 };
4418
4419 /* eciwx */
4420 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN)
4421 {
4422 /* Should check EAR[E] & alignment ! */
4423 gen_addr_reg_index(cpu_T[0], ctx);
4424 op_eciwx();
4425 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4426 }
4427
4428 /* ecowx */
4429 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN)
4430 {
4431 /* Should check EAR[E] & alignment ! */
4432 gen_addr_reg_index(cpu_T[0], ctx);
4433 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4434 op_ecowx();
4435 }
4436
4437 /* PowerPC 601 specific instructions */
4438 /* abs - abs. */
4439 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR)
4440 {
4441 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4442 gen_op_POWER_abs();
4443 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4444 if (unlikely(Rc(ctx->opcode) != 0))
4445 gen_set_Rc0(ctx, cpu_T[0]);
4446 }
4447
4448 /* abso - abso. */
4449 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR)
4450 {
4451 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4452 gen_op_POWER_abso();
4453 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4454 if (unlikely(Rc(ctx->opcode) != 0))
4455 gen_set_Rc0(ctx, cpu_T[0]);
4456 }
4457
4458 /* clcs */
4459 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR)
4460 {
4461 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4462 gen_op_POWER_clcs();
4463 /* Rc=1 sets CR0 to an undefined state */
4464 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4465 }
4466
4467 /* div - div. */
4468 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR)
4469 {
4470 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4471 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4472 gen_op_POWER_div();
4473 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4474 if (unlikely(Rc(ctx->opcode) != 0))
4475 gen_set_Rc0(ctx, cpu_T[0]);
4476 }
4477
4478 /* divo - divo. */
4479 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR)
4480 {
4481 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4482 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4483 gen_op_POWER_divo();
4484 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4485 if (unlikely(Rc(ctx->opcode) != 0))
4486 gen_set_Rc0(ctx, cpu_T[0]);
4487 }
4488
4489 /* divs - divs. */
4490 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR)
4491 {
4492 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4493 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4494 gen_op_POWER_divs();
4495 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4496 if (unlikely(Rc(ctx->opcode) != 0))
4497 gen_set_Rc0(ctx, cpu_T[0]);
4498 }
4499
4500 /* divso - divso. */
4501 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR)
4502 {
4503 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4504 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4505 gen_op_POWER_divso();
4506 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4507 if (unlikely(Rc(ctx->opcode) != 0))
4508 gen_set_Rc0(ctx, cpu_T[0]);
4509 }
4510
4511 /* doz - doz. */
4512 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR)
4513 {
4514 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4515 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4516 gen_op_POWER_doz();
4517 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4518 if (unlikely(Rc(ctx->opcode) != 0))
4519 gen_set_Rc0(ctx, cpu_T[0]);
4520 }
4521
4522 /* dozo - dozo. */
4523 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR)
4524 {
4525 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4526 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4527 gen_op_POWER_dozo();
4528 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4529 if (unlikely(Rc(ctx->opcode) != 0))
4530 gen_set_Rc0(ctx, cpu_T[0]);
4531 }
4532
4533 /* dozi */
4534 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4535 {
4536 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4537 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
4538 gen_op_POWER_doz();
4539 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4540 }
4541
4542 /* As lscbx load from memory byte after byte, it's always endian safe.
4543 * Original POWER is 32 bits only, define 64 bits ops as 32 bits ones
4544 */
4545 #define op_POWER_lscbx(start, ra, rb) \
4546 (*gen_op_POWER_lscbx[ctx->mem_idx])(start, ra, rb)
4547 #define gen_op_POWER_lscbx_64_raw gen_op_POWER_lscbx_raw
4548 #define gen_op_POWER_lscbx_64_user gen_op_POWER_lscbx_user
4549 #define gen_op_POWER_lscbx_64_kernel gen_op_POWER_lscbx_kernel
4550 #define gen_op_POWER_lscbx_64_hypv gen_op_POWER_lscbx_hypv
4551 #define gen_op_POWER_lscbx_le_raw gen_op_POWER_lscbx_raw
4552 #define gen_op_POWER_lscbx_le_user gen_op_POWER_lscbx_user
4553 #define gen_op_POWER_lscbx_le_kernel gen_op_POWER_lscbx_kernel
4554 #define gen_op_POWER_lscbx_le_hypv gen_op_POWER_lscbx_hypv
4555 #define gen_op_POWER_lscbx_le_64_raw gen_op_POWER_lscbx_raw
4556 #define gen_op_POWER_lscbx_le_64_user gen_op_POWER_lscbx_user
4557 #define gen_op_POWER_lscbx_le_64_kernel gen_op_POWER_lscbx_kernel
4558 #define gen_op_POWER_lscbx_le_64_hypv gen_op_POWER_lscbx_hypv
4559 static GenOpFunc3 *gen_op_POWER_lscbx[NB_MEM_FUNCS] = {
4560 GEN_MEM_FUNCS(POWER_lscbx),
4561 };
4562
4563 /* lscbx - lscbx. */
4564 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR)
4565 {
4566 int ra = rA(ctx->opcode);
4567 int rb = rB(ctx->opcode);
4568
4569 gen_addr_reg_index(cpu_T[0], ctx);
4570 if (ra == 0) {
4571 ra = rb;
4572 }
4573 /* NIP cannot be restored if the memory exception comes from an helper */
4574 gen_update_nip(ctx, ctx->nip - 4);
4575 tcg_gen_andi_tl(cpu_T[1], cpu_xer, 0x7F);
4576 tcg_gen_shri_tl(cpu_T[2], cpu_xer, XER_CMP);
4577 tcg_gen_andi_tl(cpu_T[2], cpu_T[2], 0xFF);
4578 op_POWER_lscbx(rD(ctx->opcode), ra, rb);
4579 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
4580 tcg_gen_or_tl(cpu_xer, cpu_xer, cpu_T[0]);
4581 if (unlikely(Rc(ctx->opcode) != 0))
4582 gen_set_Rc0(ctx, cpu_T[0]);
4583 }
4584
4585 /* maskg - maskg. */
4586 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR)
4587 {
4588 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4589 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4590 gen_op_POWER_maskg();
4591 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4592 if (unlikely(Rc(ctx->opcode) != 0))
4593 gen_set_Rc0(ctx, cpu_T[0]);
4594 }
4595
4596 /* maskir - maskir. */
4597 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR)
4598 {
4599 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4600 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4601 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4602 gen_op_POWER_maskir();
4603 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4604 if (unlikely(Rc(ctx->opcode) != 0))
4605 gen_set_Rc0(ctx, cpu_T[0]);
4606 }
4607
4608 /* mul - mul. */
4609 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR)
4610 {
4611 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4612 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4613 gen_op_POWER_mul();
4614 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4615 if (unlikely(Rc(ctx->opcode) != 0))
4616 gen_set_Rc0(ctx, cpu_T[0]);
4617 }
4618
4619 /* mulo - mulo. */
4620 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR)
4621 {
4622 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4623 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4624 gen_op_POWER_mulo();
4625 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4626 if (unlikely(Rc(ctx->opcode) != 0))
4627 gen_set_Rc0(ctx, cpu_T[0]);
4628 }
4629
4630 /* nabs - nabs. */
4631 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR)
4632 {
4633 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4634 gen_op_POWER_nabs();
4635 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4636 if (unlikely(Rc(ctx->opcode) != 0))
4637 gen_set_Rc0(ctx, cpu_T[0]);
4638 }
4639
4640 /* nabso - nabso. */
4641 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR)
4642 {
4643 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4644 gen_op_POWER_nabso();
4645 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4646 if (unlikely(Rc(ctx->opcode) != 0))
4647 gen_set_Rc0(ctx, cpu_T[0]);
4648 }
4649
4650 /* rlmi - rlmi. */
4651 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4652 {
4653 uint32_t mb, me;
4654
4655 mb = MB(ctx->opcode);
4656 me = ME(ctx->opcode);
4657 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4658 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
4659 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4660 gen_op_POWER_rlmi(MASK(mb, me), ~MASK(mb, me));
4661 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4662 if (unlikely(Rc(ctx->opcode) != 0))
4663 gen_set_Rc0(ctx, cpu_T[0]);
4664 }
4665
4666 /* rrib - rrib. */
4667 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR)
4668 {
4669 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4670 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
4671 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4672 gen_op_POWER_rrib();
4673 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4674 if (unlikely(Rc(ctx->opcode) != 0))
4675 gen_set_Rc0(ctx, cpu_T[0]);
4676 }
4677
4678 /* sle - sle. */
4679 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR)
4680 {
4681 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4682 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4683 gen_op_POWER_sle();
4684 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4685 if (unlikely(Rc(ctx->opcode) != 0))
4686 gen_set_Rc0(ctx, cpu_T[0]);
4687 }
4688
4689 /* sleq - sleq. */
4690 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR)
4691 {
4692 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4693 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4694 gen_op_POWER_sleq();
4695 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4696 if (unlikely(Rc(ctx->opcode) != 0))
4697 gen_set_Rc0(ctx, cpu_T[0]);
4698 }
4699
4700 /* sliq - sliq. */
4701 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR)
4702 {
4703 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4704 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4705 gen_op_POWER_sle();
4706 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4707 if (unlikely(Rc(ctx->opcode) != 0))
4708 gen_set_Rc0(ctx, cpu_T[0]);
4709 }
4710
4711 /* slliq - slliq. */
4712 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR)
4713 {
4714 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4715 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4716 gen_op_POWER_sleq();
4717 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4718 if (unlikely(Rc(ctx->opcode) != 0))
4719 gen_set_Rc0(ctx, cpu_T[0]);
4720 }
4721
4722 /* sllq - sllq. */
4723 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR)
4724 {
4725 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4726 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4727 gen_op_POWER_sllq();
4728 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4729 if (unlikely(Rc(ctx->opcode) != 0))
4730 gen_set_Rc0(ctx, cpu_T[0]);
4731 }
4732
4733 /* slq - slq. */
4734 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR)
4735 {
4736 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4737 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4738 gen_op_POWER_slq();
4739 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4740 if (unlikely(Rc(ctx->opcode) != 0))
4741 gen_set_Rc0(ctx, cpu_T[0]);
4742 }
4743
4744 /* sraiq - sraiq. */
4745 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR)
4746 {
4747 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4748 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4749 gen_op_POWER_sraq();
4750 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4751 if (unlikely(Rc(ctx->opcode) != 0))
4752 gen_set_Rc0(ctx, cpu_T[0]);
4753 }
4754
4755 /* sraq - sraq. */
4756 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR)
4757 {
4758 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4759 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4760 gen_op_POWER_sraq();
4761 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4762 if (unlikely(Rc(ctx->opcode) != 0))
4763 gen_set_Rc0(ctx, cpu_T[0]);
4764 }
4765
4766 /* sre - sre. */
4767 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR)
4768 {
4769 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4770 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4771 gen_op_POWER_sre();
4772 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4773 if (unlikely(Rc(ctx->opcode) != 0))
4774 gen_set_Rc0(ctx, cpu_T[0]);
4775 }
4776
4777 /* srea - srea. */
4778 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR)
4779 {
4780 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4781 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4782 gen_op_POWER_srea();
4783 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4784 if (unlikely(Rc(ctx->opcode) != 0))
4785 gen_set_Rc0(ctx, cpu_T[0]);
4786 }
4787
4788 /* sreq */
4789 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR)
4790 {
4791 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4792 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4793 gen_op_POWER_sreq();
4794 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4795 if (unlikely(Rc(ctx->opcode) != 0))
4796 gen_set_Rc0(ctx, cpu_T[0]);
4797 }
4798
4799 /* sriq */
4800 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR)
4801 {
4802 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4803 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4804 gen_op_POWER_srq();
4805 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4806 if (unlikely(Rc(ctx->opcode) != 0))
4807 gen_set_Rc0(ctx, cpu_T[0]);
4808 }
4809
4810 /* srliq */
4811 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR)
4812 {
4813 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4814 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4815 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4816 gen_op_POWER_srlq();
4817 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4818 if (unlikely(Rc(ctx->opcode) != 0))
4819 gen_set_Rc0(ctx, cpu_T[0]);
4820 }
4821
4822 /* srlq */
4823 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR)
4824 {
4825 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4826 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4827 gen_op_POWER_srlq();
4828 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4829 if (unlikely(Rc(ctx->opcode) != 0))
4830 gen_set_Rc0(ctx, cpu_T[0]);
4831 }
4832
4833 /* srq */
4834 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR)
4835 {
4836 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4837 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4838 gen_op_POWER_srq();
4839 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4840 if (unlikely(Rc(ctx->opcode) != 0))
4841 gen_set_Rc0(ctx, cpu_T[0]);
4842 }
4843
4844 /* PowerPC 602 specific instructions */
4845 /* dsa */
4846 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC)
4847 {
4848 /* XXX: TODO */
4849 GEN_EXCP_INVAL(ctx);
4850 }
4851
4852 /* esa */
4853 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC)
4854 {
4855 /* XXX: TODO */
4856 GEN_EXCP_INVAL(ctx);
4857 }
4858
4859 /* mfrom */
4860 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC)
4861 {
4862 #if defined(CONFIG_USER_ONLY)
4863 GEN_EXCP_PRIVOPC(ctx);
4864 #else
4865 if (unlikely(!ctx->supervisor)) {
4866 GEN_EXCP_PRIVOPC(ctx);
4867 return;
4868 }
4869 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4870 gen_op_602_mfrom();
4871 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4872 #endif
4873 }
4874
4875 /* 602 - 603 - G2 TLB management */
4876 /* tlbld */
4877 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB)
4878 {
4879 #if defined(CONFIG_USER_ONLY)
4880 GEN_EXCP_PRIVOPC(ctx);
4881 #else
4882 if (unlikely(!ctx->supervisor)) {
4883 GEN_EXCP_PRIVOPC(ctx);
4884 return;
4885 }
4886 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4887 gen_op_6xx_tlbld();
4888 #endif
4889 }
4890
4891 /* tlbli */
4892 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB)
4893 {
4894 #if defined(CONFIG_USER_ONLY)
4895 GEN_EXCP_PRIVOPC(ctx);
4896 #else
4897 if (unlikely(!ctx->supervisor)) {
4898 GEN_EXCP_PRIVOPC(ctx);
4899 return;
4900 }
4901 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4902 gen_op_6xx_tlbli();
4903 #endif
4904 }
4905
4906 /* 74xx TLB management */
4907 /* tlbld */
4908 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB)
4909 {
4910 #if defined(CONFIG_USER_ONLY)
4911 GEN_EXCP_PRIVOPC(ctx);
4912 #else
4913 if (unlikely(!ctx->supervisor)) {
4914 GEN_EXCP_PRIVOPC(ctx);
4915 return;
4916 }
4917 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4918 gen_op_74xx_tlbld();
4919 #endif
4920 }
4921
4922 /* tlbli */
4923 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB)
4924 {
4925 #if defined(CONFIG_USER_ONLY)
4926 GEN_EXCP_PRIVOPC(ctx);
4927 #else
4928 if (unlikely(!ctx->supervisor)) {
4929 GEN_EXCP_PRIVOPC(ctx);
4930 return;
4931 }
4932 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4933 gen_op_74xx_tlbli();
4934 #endif
4935 }
4936
4937 /* POWER instructions not in PowerPC 601 */
4938 /* clf */
4939 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER)
4940 {
4941 /* Cache line flush: implemented as no-op */
4942 }
4943
4944 /* cli */
4945 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER)
4946 {
4947 /* Cache line invalidate: privileged and treated as no-op */
4948 #if defined(CONFIG_USER_ONLY)
4949 GEN_EXCP_PRIVOPC(ctx);
4950 #else
4951 if (unlikely(!ctx->supervisor)) {
4952 GEN_EXCP_PRIVOPC(ctx);
4953 return;
4954 }
4955 #endif
4956 }
4957
4958 /* dclst */
4959 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER)
4960 {
4961 /* Data cache line store: treated as no-op */
4962 }
4963
4964 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER)
4965 {
4966 #if defined(CONFIG_USER_ONLY)
4967 GEN_EXCP_PRIVOPC(ctx);
4968 #else
4969 if (unlikely(!ctx->supervisor)) {
4970 GEN_EXCP_PRIVOPC(ctx);
4971 return;
4972 }
4973 int ra = rA(ctx->opcode);
4974 int rd = rD(ctx->opcode);
4975
4976 gen_addr_reg_index(cpu_T[0], ctx);
4977 gen_op_POWER_mfsri();
4978 tcg_gen_mov_tl(cpu_gpr[rd], cpu_T[0]);
4979 if (ra != 0 && ra != rd)
4980 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[1]);
4981 #endif
4982 }
4983
4984 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER)
4985 {
4986 #if defined(CONFIG_USER_ONLY)
4987 GEN_EXCP_PRIVOPC(ctx);
4988 #else
4989 if (unlikely(!ctx->supervisor)) {
4990 GEN_EXCP_PRIVOPC(ctx);
4991 return;
4992 }
4993 gen_addr_reg_index(cpu_T[0], ctx);
4994 gen_op_POWER_rac();
4995 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4996 #endif
4997 }
4998
4999 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER)
5000 {
5001 #if defined(CONFIG_USER_ONLY)
5002 GEN_EXCP_PRIVOPC(ctx);
5003 #else
5004 if (unlikely(!ctx->supervisor)) {
5005 GEN_EXCP_PRIVOPC(ctx);
5006 return;
5007 }
5008 gen_op_POWER_rfsvc();
5009 GEN_SYNC(ctx);
5010 #endif
5011 }
5012
5013 /* svc is not implemented for now */
5014
5015 /* POWER2 specific instructions */
5016 /* Quad manipulation (load/store two floats at a time) */
5017 /* Original POWER2 is 32 bits only, define 64 bits ops as 32 bits ones */
5018 #define op_POWER2_lfq() (*gen_op_POWER2_lfq[ctx->mem_idx])()
5019 #define op_POWER2_stfq() (*gen_op_POWER2_stfq[ctx->mem_idx])()
5020 #define gen_op_POWER2_lfq_64_raw gen_op_POWER2_lfq_raw
5021 #define gen_op_POWER2_lfq_64_user gen_op_POWER2_lfq_user
5022 #define gen_op_POWER2_lfq_64_kernel gen_op_POWER2_lfq_kernel
5023 #define gen_op_POWER2_lfq_64_hypv gen_op_POWER2_lfq_hypv
5024 #define gen_op_POWER2_lfq_le_64_raw gen_op_POWER2_lfq_le_raw
5025 #define gen_op_POWER2_lfq_le_64_user gen_op_POWER2_lfq_le_user
5026 #define gen_op_POWER2_lfq_le_64_kernel gen_op_POWER2_lfq_le_kernel
5027 #define gen_op_POWER2_lfq_le_64_hypv gen_op_POWER2_lfq_le_hypv
5028 #define gen_op_POWER2_stfq_64_raw gen_op_POWER2_stfq_raw
5029 #define gen_op_POWER2_stfq_64_user gen_op_POWER2_stfq_user
5030 #define gen_op_POWER2_stfq_64_kernel gen_op_POWER2_stfq_kernel
5031 #define gen_op_POWER2_stfq_64_hypv gen_op_POWER2_stfq_hypv
5032 #define gen_op_POWER2_stfq_le_64_raw gen_op_POWER2_stfq_le_raw
5033 #define gen_op_POWER2_stfq_le_64_user gen_op_POWER2_stfq_le_user
5034 #define gen_op_POWER2_stfq_le_64_kernel gen_op_POWER2_stfq_le_kernel
5035 #define gen_op_POWER2_stfq_le_64_hypv gen_op_POWER2_stfq_le_hypv
5036 static GenOpFunc *gen_op_POWER2_lfq[NB_MEM_FUNCS] = {
5037 GEN_MEM_FUNCS(POWER2_lfq),
5038 };
5039 static GenOpFunc *gen_op_POWER2_stfq[NB_MEM_FUNCS] = {
5040 GEN_MEM_FUNCS(POWER2_stfq),
5041 };
5042
5043 /* lfq */
5044 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5045 {
5046 /* NIP cannot be restored if the memory exception comes from an helper */
5047 gen_update_nip(ctx, ctx->nip - 4);
5048 gen_addr_imm_index(cpu_T[0], ctx, 0);
5049 op_POWER2_lfq();
5050 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
5051 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
5052 }
5053
5054 /* lfqu */
5055 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5056 {
5057 int ra = rA(ctx->opcode);
5058
5059 /* NIP cannot be restored if the memory exception comes from an helper */
5060 gen_update_nip(ctx, ctx->nip - 4);
5061 gen_addr_imm_index(cpu_T[0], ctx, 0);
5062 op_POWER2_lfq();
5063 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
5064 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
5065 if (ra != 0)
5066 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
5067 }
5068
5069 /* lfqux */
5070 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2)
5071 {
5072 int ra = rA(ctx->opcode);
5073
5074 /* NIP cannot be restored if the memory exception comes from an helper */
5075 gen_update_nip(ctx, ctx->nip - 4);
5076 gen_addr_reg_index(cpu_T[0], ctx);
5077 op_POWER2_lfq();
5078 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
5079 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
5080 if (ra != 0)
5081 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
5082 }
5083
5084 /* lfqx */
5085 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2)
5086 {
5087 /* NIP cannot be restored if the memory exception comes from an helper */
5088 gen_update_nip(ctx, ctx->nip - 4);
5089 gen_addr_reg_index(cpu_T[0], ctx);
5090 op_POWER2_lfq();
5091 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
5092 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
5093 }
5094
5095 /* stfq */
5096 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5097 {
5098 /* NIP cannot be restored if the memory exception comes from an helper */
5099 gen_update_nip(ctx, ctx->nip - 4);
5100 gen_addr_imm_index(cpu_T[0], ctx, 0);
5101 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
5102 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
5103 op_POWER2_stfq();
5104 }
5105
5106 /* stfqu */
5107 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
5108 {
5109 int ra = rA(ctx->opcode);
5110
5111 /* NIP cannot be restored if the memory exception comes from an helper */
5112 gen_update_nip(ctx, ctx->nip - 4);
5113 gen_addr_imm_index(cpu_T[0], ctx, 0);
5114 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
5115 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
5116 op_POWER2_stfq();
5117 if (ra != 0)
5118 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
5119 }
5120
5121 /* stfqux */
5122 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2)
5123 {
5124 int ra = rA(ctx->opcode);
5125
5126 /* NIP cannot be restored if the memory exception comes from an helper */
5127 gen_update_nip(ctx, ctx->nip - 4);
5128 gen_addr_reg_index(cpu_T[0], ctx);
5129 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
5130 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
5131 op_POWER2_stfq();
5132 if (ra != 0)
5133 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
5134 }
5135
5136 /* stfqx */
5137 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2)
5138 {
5139 /* NIP cannot be restored if the memory exception comes from an helper */
5140 gen_update_nip(ctx, ctx->nip - 4);
5141 gen_addr_reg_index(cpu_T[0], ctx);
5142 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
5143 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
5144 op_POWER2_stfq();
5145 }
5146
5147 /* BookE specific instructions */
5148 /* XXX: not implemented on 440 ? */
5149 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI)
5150 {
5151 /* XXX: TODO */
5152 GEN_EXCP_INVAL(ctx);
5153 }
5154
5155 /* XXX: not implemented on 440 ? */
5156 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA)
5157 {
5158 #if defined(CONFIG_USER_ONLY)
5159 GEN_EXCP_PRIVOPC(ctx);
5160 #else
5161 if (unlikely(!ctx->supervisor)) {
5162 GEN_EXCP_PRIVOPC(ctx);
5163 return;
5164 }
5165 gen_addr_reg_index(cpu_T[0], ctx);
5166 /* Use the same micro-ops as for tlbie */
5167 #if defined(TARGET_PPC64)
5168 if (ctx->sf_mode)
5169 gen_op_tlbie_64();
5170 else
5171 #endif
5172 gen_op_tlbie();
5173 #endif
5174 }
5175
5176 /* All 405 MAC instructions are translated here */
5177 static always_inline void gen_405_mulladd_insn (DisasContext *ctx,
5178 int opc2, int opc3,
5179 int ra, int rb, int rt, int Rc)
5180 {
5181 TCGv t0, t1;
5182
5183 t0 = tcg_temp_local_new();
5184 t1 = tcg_temp_local_new();
5185
5186 switch (opc3 & 0x0D) {
5187 case 0x05:
5188 /* macchw - macchw. - macchwo - macchwo. */
5189 /* macchws - macchws. - macchwso - macchwso. */
5190 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
5191 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
5192 /* mulchw - mulchw. */
5193 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5194 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5195 tcg_gen_ext16s_tl(t1, t1);
5196 break;
5197 case 0x04:
5198 /* macchwu - macchwu. - macchwuo - macchwuo. */
5199 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
5200 /* mulchwu - mulchwu. */
5201 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5202 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5203 tcg_gen_ext16u_tl(t1, t1);
5204 break;
5205 case 0x01:
5206 /* machhw - machhw. - machhwo - machhwo. */
5207 /* machhws - machhws. - machhwso - machhwso. */
5208 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
5209 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
5210 /* mulhhw - mulhhw. */
5211 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5212 tcg_gen_ext16s_tl(t0, t0);
5213 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5214 tcg_gen_ext16s_tl(t1, t1);
5215 break;
5216 case 0x00:
5217 /* machhwu - machhwu. - machhwuo - machhwuo. */
5218 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
5219 /* mulhhwu - mulhhwu. */
5220 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5221 tcg_gen_ext16u_tl(t0, t0);
5222 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5223 tcg_gen_ext16u_tl(t1, t1);
5224 break;
5225 case 0x0D:
5226 /* maclhw - maclhw. - maclhwo - maclhwo. */
5227 /* maclhws - maclhws. - maclhwso - maclhwso. */
5228 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
5229 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
5230 /* mullhw - mullhw. */
5231 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5232 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5233 break;
5234 case 0x0C:
5235 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
5236 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
5237 /* mullhwu - mullhwu. */
5238 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5239 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5240 break;
5241 }
5242 if (opc2 & 0x04) {
5243 /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5244 tcg_gen_mul_tl(t1, t0, t1);
5245 if (opc2 & 0x02) {
5246 /* nmultiply-and-accumulate (0x0E) */
5247 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5248 } else {
5249 /* multiply-and-accumulate (0x0C) */
5250 tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5251 }
5252
5253 if (opc3 & 0x12) {
5254 /* Check overflow and/or saturate */
5255 int l1 = gen_new_label();
5256
5257 if (opc3 & 0x10) {
5258 /* Start with XER OV disabled, the most likely case */
5259 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~(1 << XER_OV));
5260 }
5261 if (opc3 & 0x01) {
5262 /* Signed */
5263 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5264 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5265 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5266 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5267 if (opc3 & 0x02) {
5268 /* Saturate */
5269 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5270 tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5271 }
5272 } else {
5273 /* Unsigned */
5274 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5275 if (opc3 & 0x02) {
5276 /* Saturate */
5277 tcg_gen_movi_tl(t0, UINT32_MAX);
5278 }
5279 }
5280 if (opc3 & 0x10) {
5281 /* Check overflow */
5282 tcg_gen_ori_tl(cpu_xer, cpu_xer, (1 << XER_OV) | (1 << XER_SO));
5283 }
5284 gen_set_label(l1);
5285 tcg_gen_mov_tl(cpu_gpr[rt], t0);
5286 }
5287 } else {
5288 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5289 }
5290 tcg_temp_free(t0);
5291 tcg_temp_free(t1);
5292 if (unlikely(Rc) != 0) {
5293 /* Update Rc0 */
5294 gen_set_Rc0(ctx, cpu_gpr[rt]);
5295 }
5296 }
5297
5298 #define GEN_MAC_HANDLER(name, opc2, opc3) \
5299 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) \
5300 { \
5301 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
5302 rD(ctx->opcode), Rc(ctx->opcode)); \
5303 }
5304
5305 /* macchw - macchw. */
5306 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5307 /* macchwo - macchwo. */
5308 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5309 /* macchws - macchws. */
5310 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5311 /* macchwso - macchwso. */
5312 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5313 /* macchwsu - macchwsu. */
5314 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5315 /* macchwsuo - macchwsuo. */
5316 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5317 /* macchwu - macchwu. */
5318 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5319 /* macchwuo - macchwuo. */
5320 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5321 /* machhw - machhw. */
5322 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5323 /* machhwo - machhwo. */
5324 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5325 /* machhws - machhws. */
5326 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5327 /* machhwso - machhwso. */
5328 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5329 /* machhwsu - machhwsu. */
5330 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5331 /* machhwsuo - machhwsuo. */
5332 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5333 /* machhwu - machhwu. */
5334 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5335 /* machhwuo - machhwuo. */
5336 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5337 /* maclhw - maclhw. */
5338 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5339 /* maclhwo - maclhwo. */
5340 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5341 /* maclhws - maclhws. */
5342 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5343 /* maclhwso - maclhwso. */
5344 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5345 /* maclhwu - maclhwu. */
5346 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5347 /* maclhwuo - maclhwuo. */
5348 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5349 /* maclhwsu - maclhwsu. */
5350 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5351 /* maclhwsuo - maclhwsuo. */
5352 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5353 /* nmacchw - nmacchw. */
5354 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5355 /* nmacchwo - nmacchwo. */
5356 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5357 /* nmacchws - nmacchws. */
5358 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5359 /* nmacchwso - nmacchwso. */
5360 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5361 /* nmachhw - nmachhw. */
5362 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5363 /* nmachhwo - nmachhwo. */
5364 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5365 /* nmachhws - nmachhws. */
5366 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5367 /* nmachhwso - nmachhwso. */
5368 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5369 /* nmaclhw - nmaclhw. */
5370 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5371 /* nmaclhwo - nmaclhwo. */
5372 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5373 /* nmaclhws - nmaclhws. */
5374 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5375 /* nmaclhwso - nmaclhwso. */
5376 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5377
5378 /* mulchw - mulchw. */
5379 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5380 /* mulchwu - mulchwu. */
5381 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5382 /* mulhhw - mulhhw. */
5383 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5384 /* mulhhwu - mulhhwu. */
5385 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5386 /* mullhw - mullhw. */
5387 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5388 /* mullhwu - mullhwu. */
5389 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5390
5391 /* mfdcr */
5392 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR)
5393 {
5394 #if defined(CONFIG_USER_ONLY)
5395 GEN_EXCP_PRIVREG(ctx);
5396 #else
5397 uint32_t dcrn = SPR(ctx->opcode);
5398
5399 if (unlikely(!ctx->supervisor)) {
5400 GEN_EXCP_PRIVREG(ctx);
5401 return;
5402 }
5403 tcg_gen_movi_tl(cpu_T[0], dcrn);
5404 gen_op_load_dcr();
5405 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5406 #endif
5407 }
5408
5409 /* mtdcr */
5410 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR)
5411 {
5412 #if defined(CONFIG_USER_ONLY)
5413 GEN_EXCP_PRIVREG(ctx);
5414 #else
5415 uint32_t dcrn = SPR(ctx->opcode);
5416
5417 if (unlikely(!ctx->supervisor)) {
5418 GEN_EXCP_PRIVREG(ctx);
5419 return;
5420 }
5421 tcg_gen_movi_tl(cpu_T[0], dcrn);
5422 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5423 gen_op_store_dcr();
5424 #endif
5425 }
5426
5427 /* mfdcrx */
5428 /* XXX: not implemented on 440 ? */
5429 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX)
5430 {
5431 #if defined(CONFIG_USER_ONLY)
5432 GEN_EXCP_PRIVREG(ctx);
5433 #else
5434 if (unlikely(!ctx->supervisor)) {
5435 GEN_EXCP_PRIVREG(ctx);
5436 return;
5437 }
5438 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5439 gen_op_load_dcr();
5440 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5441 /* Note: Rc update flag set leads to undefined state of Rc0 */
5442 #endif
5443 }
5444
5445 /* mtdcrx */
5446 /* XXX: not implemented on 440 ? */
5447 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX)
5448 {
5449 #if defined(CONFIG_USER_ONLY)
5450 GEN_EXCP_PRIVREG(ctx);
5451 #else
5452 if (unlikely(!ctx->supervisor)) {
5453 GEN_EXCP_PRIVREG(ctx);
5454 return;
5455 }
5456 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5457 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5458 gen_op_store_dcr();
5459 /* Note: Rc update flag set leads to undefined state of Rc0 */
5460 #endif
5461 }
5462
5463 /* mfdcrux (PPC 460) : user-mode access to DCR */
5464 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX)
5465 {
5466 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5467 gen_op_load_dcr();
5468 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5469 /* Note: Rc update flag set leads to undefined state of Rc0 */
5470 }
5471
5472 /* mtdcrux (PPC 460) : user-mode access to DCR */
5473 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX)
5474 {
5475 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5476 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5477 gen_op_store_dcr();
5478 /* Note: Rc update flag set leads to undefined state of Rc0 */
5479 }
5480
5481 /* dccci */
5482 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON)
5483 {
5484 #if defined(CONFIG_USER_ONLY)
5485 GEN_EXCP_PRIVOPC(ctx);
5486 #else
5487 if (unlikely(!ctx->supervisor)) {
5488 GEN_EXCP_PRIVOPC(ctx);
5489 return;
5490 }
5491 /* interpreted as no-op */
5492 #endif
5493 }
5494
5495 /* dcread */
5496 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON)
5497 {
5498 #if defined(CONFIG_USER_ONLY)
5499 GEN_EXCP_PRIVOPC(ctx);
5500 #else
5501 TCGv EA, val;
5502 if (unlikely(!ctx->supervisor)) {
5503 GEN_EXCP_PRIVOPC(ctx);
5504 return;
5505 }
5506 EA = tcg_temp_new();
5507 gen_addr_reg_index(EA, ctx);
5508 val = tcg_temp_new();
5509 gen_qemu_ld32u(val, EA, ctx->mem_idx);
5510 tcg_temp_free(val);
5511 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5512 tcg_temp_free(EA);
5513 #endif
5514 }
5515
5516 /* icbt */
5517 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT)
5518 {
5519 /* interpreted as no-op */
5520 /* XXX: specification say this is treated as a load by the MMU
5521 * but does not generate any exception
5522 */
5523 }
5524
5525 /* iccci */
5526 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON)
5527 {
5528 #if defined(CONFIG_USER_ONLY)
5529 GEN_EXCP_PRIVOPC(ctx);
5530 #else
5531 if (unlikely(!ctx->supervisor)) {
5532 GEN_EXCP_PRIVOPC(ctx);
5533 return;
5534 }
5535 /* interpreted as no-op */
5536 #endif
5537 }
5538
5539 /* icread */
5540 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON)
5541 {
5542 #if defined(CONFIG_USER_ONLY)
5543 GEN_EXCP_PRIVOPC(ctx);
5544 #else
5545 if (unlikely(!ctx->supervisor)) {
5546 GEN_EXCP_PRIVOPC(ctx);
5547 return;
5548 }
5549 /* interpreted as no-op */
5550 #endif
5551 }
5552
5553 /* rfci (supervisor only) */
5554 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP)
5555 {
5556 #if defined(CONFIG_USER_ONLY)
5557 GEN_EXCP_PRIVOPC(ctx);
5558 #else
5559 if (unlikely(!ctx->supervisor)) {
5560 GEN_EXCP_PRIVOPC(ctx);
5561 return;
5562 }
5563 /* Restore CPU state */
5564 gen_op_40x_rfci();
5565 GEN_SYNC(ctx);
5566 #endif
5567 }
5568
5569 GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE)
5570 {
5571 #if defined(CONFIG_USER_ONLY)
5572 GEN_EXCP_PRIVOPC(ctx);
5573 #else
5574 if (unlikely(!ctx->supervisor)) {
5575 GEN_EXCP_PRIVOPC(ctx);
5576 return;
5577 }
5578 /* Restore CPU state */
5579 gen_op_rfci();
5580 GEN_SYNC(ctx);
5581 #endif
5582 }
5583
5584 /* BookE specific */
5585 /* XXX: not implemented on 440 ? */
5586 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI)
5587 {
5588 #if defined(CONFIG_USER_ONLY)
5589 GEN_EXCP_PRIVOPC(ctx);
5590 #else
5591 if (unlikely(!ctx->supervisor)) {
5592 GEN_EXCP_PRIVOPC(ctx);
5593 return;
5594 }
5595 /* Restore CPU state */
5596 gen_op_rfdi();
5597 GEN_SYNC(ctx);
5598 #endif
5599 }
5600
5601 /* XXX: not implemented on 440 ? */
5602 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI)
5603 {
5604 #if defined(CONFIG_USER_ONLY)
5605 GEN_EXCP_PRIVOPC(ctx);
5606 #else
5607 if (unlikely(!ctx->supervisor)) {
5608 GEN_EXCP_PRIVOPC(ctx);
5609 return;
5610 }
5611 /* Restore CPU state */
5612 gen_op_rfmci();
5613 GEN_SYNC(ctx);
5614 #endif
5615 }
5616
5617 /* TLB management - PowerPC 405 implementation */
5618 /* tlbre */
5619 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB)
5620 {
5621 #if defined(CONFIG_USER_ONLY)
5622 GEN_EXCP_PRIVOPC(ctx);
5623 #else
5624 if (unlikely(!ctx->supervisor)) {
5625 GEN_EXCP_PRIVOPC(ctx);
5626 return;
5627 }
5628 switch (rB(ctx->opcode)) {
5629 case 0:
5630 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5631 gen_op_4xx_tlbre_hi();
5632 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5633 break;
5634 case 1:
5635 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5636 gen_op_4xx_tlbre_lo();
5637 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5638 break;
5639 default:
5640 GEN_EXCP_INVAL(ctx);
5641 break;
5642 }
5643 #endif
5644 }
5645
5646 /* tlbsx - tlbsx. */
5647 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB)
5648 {
5649 #if defined(CONFIG_USER_ONLY)
5650 GEN_EXCP_PRIVOPC(ctx);
5651 #else
5652 if (unlikely(!ctx->supervisor)) {
5653 GEN_EXCP_PRIVOPC(ctx);
5654 return;
5655 }
5656 gen_addr_reg_index(cpu_T[0], ctx);
5657 gen_op_4xx_tlbsx();
5658 if (Rc(ctx->opcode))
5659 gen_op_4xx_tlbsx_check();
5660 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5661 #endif
5662 }
5663
5664 /* tlbwe */
5665 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB)
5666 {
5667 #if defined(CONFIG_USER_ONLY)
5668 GEN_EXCP_PRIVOPC(ctx);
5669 #else
5670 if (unlikely(!ctx->supervisor)) {
5671 GEN_EXCP_PRIVOPC(ctx);
5672 return;
5673 }
5674 switch (rB(ctx->opcode)) {
5675 case 0:
5676 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5677 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5678 gen_op_4xx_tlbwe_hi();
5679 break;
5680 case 1:
5681 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5682 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5683 gen_op_4xx_tlbwe_lo();
5684 break;
5685 default:
5686 GEN_EXCP_INVAL(ctx);
5687 break;
5688 }
5689 #endif
5690 }
5691
5692 /* TLB management - PowerPC 440 implementation */
5693 /* tlbre */
5694 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE)
5695 {
5696 #if defined(CONFIG_USER_ONLY)
5697 GEN_EXCP_PRIVOPC(ctx);
5698 #else
5699 if (unlikely(!ctx->supervisor)) {
5700 GEN_EXCP_PRIVOPC(ctx);
5701 return;
5702 }
5703 switch (rB(ctx->opcode)) {
5704 case 0:
5705 case 1:
5706 case 2:
5707 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5708 gen_op_440_tlbre(rB(ctx->opcode));
5709 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5710 break;
5711 default:
5712 GEN_EXCP_INVAL(ctx);
5713 break;
5714 }
5715 #endif
5716 }
5717
5718 /* tlbsx - tlbsx. */
5719 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE)
5720 {
5721 #if defined(CONFIG_USER_ONLY)
5722 GEN_EXCP_PRIVOPC(ctx);
5723 #else
5724 if (unlikely(!ctx->supervisor)) {
5725 GEN_EXCP_PRIVOPC(ctx);
5726 return;
5727 }
5728 gen_addr_reg_index(cpu_T[0], ctx);
5729 gen_op_440_tlbsx();
5730 if (Rc(ctx->opcode))
5731 gen_op_4xx_tlbsx_check();
5732 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5733 #endif
5734 }
5735
5736 /* tlbwe */
5737 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE)
5738 {
5739 #if defined(CONFIG_USER_ONLY)
5740 GEN_EXCP_PRIVOPC(ctx);
5741 #else
5742 if (unlikely(!ctx->supervisor)) {
5743 GEN_EXCP_PRIVOPC(ctx);
5744 return;
5745 }
5746 switch (rB(ctx->opcode)) {
5747 case 0:
5748 case 1:
5749 case 2:
5750 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5751 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5752 gen_op_440_tlbwe(rB(ctx->opcode));
5753 break;
5754 default:
5755 GEN_EXCP_INVAL(ctx);
5756 break;
5757 }
5758 #endif
5759 }
5760
5761 /* wrtee */
5762 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE)
5763 {
5764 #if defined(CONFIG_USER_ONLY)
5765 GEN_EXCP_PRIVOPC(ctx);
5766 #else
5767 if (unlikely(!ctx->supervisor)) {
5768 GEN_EXCP_PRIVOPC(ctx);
5769 return;
5770 }
5771 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rD(ctx->opcode)]);
5772 gen_op_wrte();
5773 /* Stop translation to have a chance to raise an exception
5774 * if we just set msr_ee to 1
5775 */
5776 GEN_STOP(ctx);
5777 #endif
5778 }
5779
5780 /* wrteei */
5781 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000EFC01, PPC_WRTEE)
5782 {
5783 #if defined(CONFIG_USER_ONLY)
5784 GEN_EXCP_PRIVOPC(ctx);
5785 #else
5786 if (unlikely(!ctx->supervisor)) {
5787 GEN_EXCP_PRIVOPC(ctx);
5788 return;
5789 }
5790 tcg_gen_movi_tl(cpu_T[0], ctx->opcode & 0x00010000);
5791 gen_op_wrte();
5792 /* Stop translation to have a chance to raise an exception
5793 * if we just set msr_ee to 1
5794 */
5795 GEN_STOP(ctx);
5796 #endif
5797 }
5798
5799 /* PowerPC 440 specific instructions */
5800 /* dlmzb */
5801 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC)
5802 {
5803 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
5804 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
5805 gen_op_440_dlmzb();
5806 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
5807 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F);
5808 tcg_gen_or_tl(cpu_xer, cpu_xer, cpu_T[0]);
5809 if (Rc(ctx->opcode)) {
5810 gen_op_440_dlmzb_update_Rc();
5811 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_T[0]);
5812 tcg_gen_andi_i32(cpu_crf[0], cpu_crf[0], 0xf);
5813 }
5814 }
5815
5816 /* mbar replaces eieio on 440 */
5817 GEN_HANDLER(mbar, 0x1F, 0x16, 0x13, 0x001FF801, PPC_BOOKE)
5818 {
5819 /* interpreted as no-op */
5820 }
5821
5822 /* msync replaces sync on 440 */
5823 GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE)
5824 {
5825 /* interpreted as no-op */
5826 }
5827
5828 /* icbt */
5829 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE)
5830 {
5831 /* interpreted as no-op */
5832 /* XXX: specification say this is treated as a load by the MMU
5833 * but does not generate any exception
5834 */
5835 }
5836
5837 /*** Altivec vector extension ***/
5838 /* Altivec registers moves */
5839
5840 static always_inline void gen_load_avr(int t, int reg) {
5841 tcg_gen_mov_i64(cpu_AVRh[t], cpu_avrh[reg]);
5842 tcg_gen_mov_i64(cpu_AVRl[t], cpu_avrl[reg]);
5843 }
5844
5845 static always_inline void gen_store_avr(int reg, int t) {
5846 tcg_gen_mov_i64(cpu_avrh[reg], cpu_AVRh[t]);
5847 tcg_gen_mov_i64(cpu_avrl[reg], cpu_AVRl[t]);
5848 }
5849
5850 #define op_vr_ldst(name) (*gen_op_##name[ctx->mem_idx])()
5851 #define OP_VR_LD_TABLE(name) \
5852 static GenOpFunc *gen_op_vr_l##name[NB_MEM_FUNCS] = { \
5853 GEN_MEM_FUNCS(vr_l##name), \
5854 };
5855 #define OP_VR_ST_TABLE(name) \
5856 static GenOpFunc *gen_op_vr_st##name[NB_MEM_FUNCS] = { \
5857 GEN_MEM_FUNCS(vr_st##name), \
5858 };
5859
5860 #define GEN_VR_LDX(name, opc2, opc3) \
5861 GEN_HANDLER(l##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
5862 { \
5863 if (unlikely(!ctx->altivec_enabled)) { \
5864 GEN_EXCP_NO_VR(ctx); \
5865 return; \
5866 } \
5867 gen_addr_reg_index(cpu_T[0], ctx); \
5868 op_vr_ldst(vr_l##name); \
5869 gen_store_avr(rD(ctx->opcode), 0); \
5870 }
5871
5872 #define GEN_VR_STX(name, opc2, opc3) \
5873 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
5874 { \
5875 if (unlikely(!ctx->altivec_enabled)) { \
5876 GEN_EXCP_NO_VR(ctx); \
5877 return; \
5878 } \
5879 gen_addr_reg_index(cpu_T[0], ctx); \
5880 gen_load_avr(0, rS(ctx->opcode)); \
5881 op_vr_ldst(vr_st##name); \
5882 }
5883
5884 OP_VR_LD_TABLE(vx);
5885 GEN_VR_LDX(vx, 0x07, 0x03);
5886 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
5887 #define gen_op_vr_lvxl gen_op_vr_lvx
5888 GEN_VR_LDX(vxl, 0x07, 0x0B);
5889
5890 OP_VR_ST_TABLE(vx);
5891 GEN_VR_STX(vx, 0x07, 0x07);
5892 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
5893 #define gen_op_vr_stvxl gen_op_vr_stvx
5894 GEN_VR_STX(vxl, 0x07, 0x0F);
5895
5896 /*** SPE extension ***/
5897 /* Register moves */
5898
5899 static always_inline void gen_load_gpr64(TCGv_i64 t, int reg) {
5900 #if defined(TARGET_PPC64)
5901 tcg_gen_mov_i64(t, cpu_gpr[reg]);
5902 #else
5903 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
5904 #endif
5905 }
5906
5907 static always_inline void gen_store_gpr64(int reg, TCGv_i64 t) {
5908 #if defined(TARGET_PPC64)
5909 tcg_gen_mov_i64(cpu_gpr[reg], t);
5910 #else
5911 TCGv_i64 tmp = tcg_temp_new_i64();
5912 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
5913 tcg_gen_shri_i64(tmp, t, 32);
5914 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
5915 tcg_temp_free_i64(tmp);
5916 #endif
5917 }
5918
5919 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
5920 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \
5921 { \
5922 if (Rc(ctx->opcode)) \
5923 gen_##name1(ctx); \
5924 else \
5925 gen_##name0(ctx); \
5926 }
5927
5928 /* Handler for undefined SPE opcodes */
5929 static always_inline void gen_speundef (DisasContext *ctx)
5930 {
5931 GEN_EXCP_INVAL(ctx);
5932 }
5933
5934 /* SPE load and stores */
5935 static always_inline void gen_addr_spe_imm_index (TCGv EA, DisasContext *ctx, int sh)
5936 {
5937 target_long simm = rB(ctx->opcode);
5938
5939 if (rA(ctx->opcode) == 0)
5940 tcg_gen_movi_tl(EA, simm << sh);
5941 else if (likely(simm != 0))
5942 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm << sh);
5943 else
5944 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
5945 }
5946
5947 #define op_spe_ldst(name) (*gen_op_##name[ctx->mem_idx])()
5948 #define OP_SPE_LD_TABLE(name) \
5949 static GenOpFunc *gen_op_spe_l##name[NB_MEM_FUNCS] = { \
5950 GEN_MEM_FUNCS(spe_l##name), \
5951 };
5952 #define OP_SPE_ST_TABLE(name) \
5953 static GenOpFunc *gen_op_spe_st##name[NB_MEM_FUNCS] = { \
5954 GEN_MEM_FUNCS(spe_st##name), \
5955 };
5956
5957 #define GEN_SPE_LD(name, sh) \
5958 static always_inline void gen_evl##name (DisasContext *ctx) \
5959 { \
5960 if (unlikely(!ctx->spe_enabled)) { \
5961 GEN_EXCP_NO_AP(ctx); \
5962 return; \
5963 } \
5964 gen_addr_spe_imm_index(cpu_T[0], ctx, sh); \
5965 op_spe_ldst(spe_l##name); \
5966 gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \
5967 }
5968
5969 #define GEN_SPE_LDX(name) \
5970 static always_inline void gen_evl##name##x (DisasContext *ctx) \
5971 { \
5972 if (unlikely(!ctx->spe_enabled)) { \
5973 GEN_EXCP_NO_AP(ctx); \
5974 return; \
5975 } \
5976 gen_addr_reg_index(cpu_T[0], ctx); \
5977 op_spe_ldst(spe_l##name); \
5978 gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \
5979 }
5980
5981 #define GEN_SPEOP_LD(name, sh) \
5982 OP_SPE_LD_TABLE(name); \
5983 GEN_SPE_LD(name, sh); \
5984 GEN_SPE_LDX(name)
5985
5986 #define GEN_SPE_ST(name, sh) \
5987 static always_inline void gen_evst##name (DisasContext *ctx) \
5988 { \
5989 if (unlikely(!ctx->spe_enabled)) { \
5990 GEN_EXCP_NO_AP(ctx); \
5991 return; \
5992 } \
5993 gen_addr_spe_imm_index(cpu_T[0], ctx, sh); \
5994 gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \
5995 op_spe_ldst(spe_st##name); \
5996 }
5997
5998 #define GEN_SPE_STX(name) \
5999 static always_inline void gen_evst##name##x (DisasContext *ctx) \
6000 { \
6001 if (unlikely(!ctx->spe_enabled)) { \
6002 GEN_EXCP_NO_AP(ctx); \
6003 return; \
6004 } \
6005 gen_addr_reg_index(cpu_T[0], ctx); \
6006 gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \
6007 op_spe_ldst(spe_st##name); \
6008 }
6009
6010 #define GEN_SPEOP_ST(name, sh) \
6011 OP_SPE_ST_TABLE(name); \
6012 GEN_SPE_ST(name, sh); \
6013 GEN_SPE_STX(name)
6014
6015 #define GEN_SPEOP_LDST(name, sh) \
6016 GEN_SPEOP_LD(name, sh); \
6017 GEN_SPEOP_ST(name, sh)
6018
6019 /* SPE logic */
6020 #if defined(TARGET_PPC64)
6021 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6022 static always_inline void gen_##name (DisasContext *ctx) \
6023 { \
6024 if (unlikely(!ctx->spe_enabled)) { \
6025 GEN_EXCP_NO_AP(ctx); \
6026 return; \
6027 } \
6028 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6029 cpu_gpr[rB(ctx->opcode)]); \
6030 }
6031 #else
6032 #define GEN_SPEOP_LOGIC2(name, tcg_op) \
6033 static always_inline void gen_##name (DisasContext *ctx) \
6034 { \
6035 if (unlikely(!ctx->spe_enabled)) { \
6036 GEN_EXCP_NO_AP(ctx); \
6037 return; \
6038 } \
6039 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6040 cpu_gpr[rB(ctx->opcode)]); \
6041 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6042 cpu_gprh[rB(ctx->opcode)]); \
6043 }
6044 #endif
6045
6046 GEN_SPEOP_LOGIC2(evand, tcg_gen_and_tl);
6047 GEN_SPEOP_LOGIC2(evandc, tcg_gen_andc_tl);
6048 GEN_SPEOP_LOGIC2(evxor, tcg_gen_xor_tl);
6049 GEN_SPEOP_LOGIC2(evor, tcg_gen_or_tl);
6050 GEN_SPEOP_LOGIC2(evnor, tcg_gen_nor_tl);
6051 GEN_SPEOP_LOGIC2(eveqv, tcg_gen_eqv_tl);
6052 GEN_SPEOP_LOGIC2(evorc, tcg_gen_orc_tl);
6053 GEN_SPEOP_LOGIC2(evnand, tcg_gen_nand_tl);
6054
6055 /* SPE logic immediate */
6056 #if defined(TARGET_PPC64)
6057 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6058 static always_inline void gen_##name (DisasContext *ctx) \
6059 { \
6060 if (unlikely(!ctx->spe_enabled)) { \
6061 GEN_EXCP_NO_AP(ctx); \
6062 return; \
6063 } \
6064 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6065 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6066 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6067 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6068 tcg_opi(t0, t0, rB(ctx->opcode)); \
6069 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6070 tcg_gen_trunc_i64_i32(t1, t2); \
6071 tcg_temp_free_i64(t2); \
6072 tcg_opi(t1, t1, rB(ctx->opcode)); \
6073 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6074 tcg_temp_free_i32(t0); \
6075 tcg_temp_free_i32(t1); \
6076 }
6077 #else
6078 #define GEN_SPEOP_TCG_LOGIC_IMM2(name, tcg_opi) \
6079 static always_inline void gen_##name (DisasContext *ctx) \
6080 { \
6081 if (unlikely(!ctx->spe_enabled)) { \
6082 GEN_EXCP_NO_AP(ctx); \
6083 return; \
6084 } \
6085 tcg_opi(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6086 rB(ctx->opcode)); \
6087 tcg_opi(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6088 rB(ctx->opcode)); \
6089 }
6090 #endif
6091 GEN_SPEOP_TCG_LOGIC_IMM2(evslwi, tcg_gen_shli_i32);
6092 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwiu, tcg_gen_shri_i32);
6093 GEN_SPEOP_TCG_LOGIC_IMM2(evsrwis, tcg_gen_sari_i32);
6094 GEN_SPEOP_TCG_LOGIC_IMM2(evrlwi, tcg_gen_rotli_i32);
6095
6096 /* SPE arithmetic */
6097 #if defined(TARGET_PPC64)
6098 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6099 static always_inline void gen_##name (DisasContext *ctx) \
6100 { \
6101 if (unlikely(!ctx->spe_enabled)) { \
6102 GEN_EXCP_NO_AP(ctx); \
6103 return; \
6104 } \
6105 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6106 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6107 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6108 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6109 tcg_op(t0, t0); \
6110 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6111 tcg_gen_trunc_i64_i32(t1, t2); \
6112 tcg_temp_free_i64(t2); \
6113 tcg_op(t1, t1); \
6114 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6115 tcg_temp_free_i32(t0); \
6116 tcg_temp_free_i32(t1); \
6117 }
6118 #else
6119 #define GEN_SPEOP_ARITH1(name, tcg_op) \
6120 static always_inline void gen_##name (DisasContext *ctx) \
6121 { \
6122 if (unlikely(!ctx->spe_enabled)) { \
6123 GEN_EXCP_NO_AP(ctx); \
6124 return; \
6125 } \
6126 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); \
6127 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]); \
6128 }
6129 #endif
6130
6131 static always_inline void gen_op_evabs (TCGv_i32 ret, TCGv_i32 arg1)
6132 {
6133 int l1 = gen_new_label();
6134 int l2 = gen_new_label();
6135
6136 tcg_gen_brcondi_i32(TCG_COND_GE, arg1, 0, l1);
6137 tcg_gen_neg_i32(ret, arg1);
6138 tcg_gen_br(l2);
6139 gen_set_label(l1);
6140 tcg_gen_mov_i32(ret, arg1);
6141 gen_set_label(l2);
6142 }
6143 GEN_SPEOP_ARITH1(evabs, gen_op_evabs);
6144 GEN_SPEOP_ARITH1(evneg, tcg_gen_neg_i32);
6145 GEN_SPEOP_ARITH1(evextsb, tcg_gen_ext8s_i32);
6146 GEN_SPEOP_ARITH1(evextsh, tcg_gen_ext16s_i32);
6147 static always_inline void gen_op_evrndw (TCGv_i32 ret, TCGv_i32 arg1)
6148 {
6149 tcg_gen_addi_i32(ret, arg1, 0x8000);
6150 tcg_gen_ext16u_i32(ret, ret);
6151 }
6152 GEN_SPEOP_ARITH1(evrndw, gen_op_evrndw);
6153 GEN_SPEOP_ARITH1(evcntlsw, gen_helper_cntlsw32);
6154 GEN_SPEOP_ARITH1(evcntlzw, gen_helper_cntlzw32);
6155
6156 #if defined(TARGET_PPC64)
6157 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6158 static always_inline void gen_##name (DisasContext *ctx) \
6159 { \
6160 if (unlikely(!ctx->spe_enabled)) { \
6161 GEN_EXCP_NO_AP(ctx); \
6162 return; \
6163 } \
6164 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6165 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6166 TCGv_i32 t2 = tcg_temp_local_new_i32(); \
6167 TCGv_i64 t3 = tcg_temp_local_new(TCG_TYPE_I64); \
6168 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6169 tcg_gen_trunc_i64_i32(t2, cpu_gpr[rB(ctx->opcode)]); \
6170 tcg_op(t0, t0, t2); \
6171 tcg_gen_shri_i64(t3, cpu_gpr[rA(ctx->opcode)], 32); \
6172 tcg_gen_trunc_i64_i32(t1, t3); \
6173 tcg_gen_shri_i64(t3, cpu_gpr[rB(ctx->opcode)], 32); \
6174 tcg_gen_trunc_i64_i32(t2, t3); \
6175 tcg_temp_free_i64(t3); \
6176 tcg_op(t1, t1, t2); \
6177 tcg_temp_free_i32(t2); \
6178 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6179 tcg_temp_free_i32(t0); \
6180 tcg_temp_free_i32(t1); \
6181 }
6182 #else
6183 #define GEN_SPEOP_ARITH2(name, tcg_op) \
6184 static always_inline void gen_##name (DisasContext *ctx) \
6185 { \
6186 if (unlikely(!ctx->spe_enabled)) { \
6187 GEN_EXCP_NO_AP(ctx); \
6188 return; \
6189 } \
6190 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], \
6191 cpu_gpr[rB(ctx->opcode)]); \
6192 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)], \
6193 cpu_gprh[rB(ctx->opcode)]); \
6194 }
6195 #endif
6196
6197 static always_inline void gen_op_evsrwu (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6198 {
6199 TCGv_i32 t0;
6200 int l1, l2;
6201
6202 l1 = gen_new_label();
6203 l2 = gen_new_label();
6204 t0 = tcg_temp_local_new_i32();
6205 /* No error here: 6 bits are used */
6206 tcg_gen_andi_i32(t0, arg2, 0x3F);
6207 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6208 tcg_gen_shr_i32(ret, arg1, t0);
6209 tcg_gen_br(l2);
6210 gen_set_label(l1);
6211 tcg_gen_movi_i32(ret, 0);
6212 tcg_gen_br(l2);
6213 tcg_temp_free_i32(t0);
6214 }
6215 GEN_SPEOP_ARITH2(evsrwu, gen_op_evsrwu);
6216 static always_inline void gen_op_evsrws (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6217 {
6218 TCGv_i32 t0;
6219 int l1, l2;
6220
6221 l1 = gen_new_label();
6222 l2 = gen_new_label();
6223 t0 = tcg_temp_local_new_i32();
6224 /* No error here: 6 bits are used */
6225 tcg_gen_andi_i32(t0, arg2, 0x3F);
6226 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6227 tcg_gen_sar_i32(ret, arg1, t0);
6228 tcg_gen_br(l2);
6229 gen_set_label(l1);
6230 tcg_gen_movi_i32(ret, 0);
6231 tcg_gen_br(l2);
6232 tcg_temp_free_i32(t0);
6233 }
6234 GEN_SPEOP_ARITH2(evsrws, gen_op_evsrws);
6235 static always_inline void gen_op_evslw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6236 {
6237 TCGv_i32 t0;
6238 int l1, l2;
6239
6240 l1 = gen_new_label();
6241 l2 = gen_new_label();
6242 t0 = tcg_temp_local_new_i32();
6243 /* No error here: 6 bits are used */
6244 tcg_gen_andi_i32(t0, arg2, 0x3F);
6245 tcg_gen_brcondi_i32(TCG_COND_GE, t0, 32, l1);
6246 tcg_gen_shl_i32(ret, arg1, t0);
6247 tcg_gen_br(l2);
6248 gen_set_label(l1);
6249 tcg_gen_movi_i32(ret, 0);
6250 tcg_gen_br(l2);
6251 tcg_temp_free_i32(t0);
6252 }
6253 GEN_SPEOP_ARITH2(evslw, gen_op_evslw);
6254 static always_inline void gen_op_evrlw (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6255 {
6256 TCGv_i32 t0 = tcg_temp_new_i32();
6257 tcg_gen_andi_i32(t0, arg2, 0x1F);
6258 tcg_gen_rotl_i32(ret, arg1, t0);
6259 tcg_temp_free_i32(t0);
6260 }
6261 GEN_SPEOP_ARITH2(evrlw, gen_op_evrlw);
6262 static always_inline void gen_evmergehi (DisasContext *ctx)
6263 {
6264 if (unlikely(!ctx->spe_enabled)) {
6265 GEN_EXCP_NO_AP(ctx);
6266 return;
6267 }
6268 #if defined(TARGET_PPC64)
6269 TCGv t0 = tcg_temp_new();
6270 TCGv t1 = tcg_temp_new();
6271 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6272 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6273 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6274 tcg_temp_free(t0);
6275 tcg_temp_free(t1);
6276 #else
6277 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6278 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6279 #endif
6280 }
6281 GEN_SPEOP_ARITH2(evaddw, tcg_gen_add_i32);
6282 static always_inline void gen_op_evsubf (TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
6283 {
6284 tcg_gen_sub_i32(ret, arg2, arg1);
6285 }
6286 GEN_SPEOP_ARITH2(evsubfw, gen_op_evsubf);
6287
6288 /* SPE arithmetic immediate */
6289 #if defined(TARGET_PPC64)
6290 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6291 static always_inline void gen_##name (DisasContext *ctx) \
6292 { \
6293 if (unlikely(!ctx->spe_enabled)) { \
6294 GEN_EXCP_NO_AP(ctx); \
6295 return; \
6296 } \
6297 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6298 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6299 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6300 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rB(ctx->opcode)]); \
6301 tcg_op(t0, t0, rA(ctx->opcode)); \
6302 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6303 tcg_gen_trunc_i64_i32(t1, t2); \
6304 tcg_temp_free_i64(t2); \
6305 tcg_op(t1, t1, rA(ctx->opcode)); \
6306 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); \
6307 tcg_temp_free_i32(t0); \
6308 tcg_temp_free_i32(t1); \
6309 }
6310 #else
6311 #define GEN_SPEOP_ARITH_IMM2(name, tcg_op) \
6312 static always_inline void gen_##name (DisasContext *ctx) \
6313 { \
6314 if (unlikely(!ctx->spe_enabled)) { \
6315 GEN_EXCP_NO_AP(ctx); \
6316 return; \
6317 } \
6318 tcg_op(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \
6319 rA(ctx->opcode)); \
6320 tcg_op(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)], \
6321 rA(ctx->opcode)); \
6322 }
6323 #endif
6324 GEN_SPEOP_ARITH_IMM2(evaddiw, tcg_gen_addi_i32);
6325 GEN_SPEOP_ARITH_IMM2(evsubifw, tcg_gen_subi_i32);
6326
6327 /* SPE comparison */
6328 #if defined(TARGET_PPC64)
6329 #define GEN_SPEOP_COMP(name, tcg_cond) \
6330 static always_inline void gen_##name (DisasContext *ctx) \
6331 { \
6332 if (unlikely(!ctx->spe_enabled)) { \
6333 GEN_EXCP_NO_AP(ctx); \
6334 return; \
6335 } \
6336 int l1 = gen_new_label(); \
6337 int l2 = gen_new_label(); \
6338 int l3 = gen_new_label(); \
6339 int l4 = gen_new_label(); \
6340 TCGv_i32 t0 = tcg_temp_local_new_i32(); \
6341 TCGv_i32 t1 = tcg_temp_local_new_i32(); \
6342 TCGv_i64 t2 = tcg_temp_local_new_i64(); \
6343 tcg_gen_trunc_i64_i32(t0, cpu_gpr[rA(ctx->opcode)]); \
6344 tcg_gen_trunc_i64_i32(t1, cpu_gpr[rB(ctx->opcode)]); \
6345 tcg_gen_brcond_i32(tcg_cond, t0, t1, l1); \
6346 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0); \
6347 tcg_gen_br(l2); \
6348 gen_set_label(l1); \
6349 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
6350 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
6351 gen_set_label(l2); \
6352 tcg_gen_shri_i64(t2, cpu_gpr[rA(ctx->opcode)], 32); \
6353 tcg_gen_trunc_i64_i32(t0, t2); \
6354 tcg_gen_shri_i64(t2, cpu_gpr[rB(ctx->opcode)], 32); \
6355 tcg_gen_trunc_i64_i32(t1, t2); \
6356 tcg_temp_free_i64(t2); \
6357 tcg_gen_brcond_i32(tcg_cond, t0, t1, l3); \
6358 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6359 ~(CRF_CH | CRF_CH_AND_CL)); \
6360 tcg_gen_br(l4); \
6361 gen_set_label(l3); \
6362 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6363 CRF_CH | CRF_CH_OR_CL); \
6364 gen_set_label(l4); \
6365 tcg_temp_free_i32(t0); \
6366 tcg_temp_free_i32(t1); \
6367 }
6368 #else
6369 #define GEN_SPEOP_COMP(name, tcg_cond) \
6370 static always_inline void gen_##name (DisasContext *ctx) \
6371 { \
6372 if (unlikely(!ctx->spe_enabled)) { \
6373 GEN_EXCP_NO_AP(ctx); \
6374 return; \
6375 } \
6376 int l1 = gen_new_label(); \
6377 int l2 = gen_new_label(); \
6378 int l3 = gen_new_label(); \
6379 int l4 = gen_new_label(); \
6380 \
6381 tcg_gen_brcond_i32(tcg_cond, cpu_gpr[rA(ctx->opcode)], \
6382 cpu_gpr[rB(ctx->opcode)], l1); \
6383 tcg_gen_movi_tl(cpu_crf[crfD(ctx->opcode)], 0); \
6384 tcg_gen_br(l2); \
6385 gen_set_label(l1); \
6386 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], \
6387 CRF_CL | CRF_CH_OR_CL | CRF_CH_AND_CL); \
6388 gen_set_label(l2); \
6389 tcg_gen_brcond_i32(tcg_cond, cpu_gprh[rA(ctx->opcode)], \
6390 cpu_gprh[rB(ctx->opcode)], l3); \
6391 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6392 ~(CRF_CH | CRF_CH_AND_CL)); \
6393 tcg_gen_br(l4); \
6394 gen_set_label(l3); \
6395 tcg_gen_ori_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfD(ctx->opcode)], \
6396 CRF_CH | CRF_CH_OR_CL); \
6397 gen_set_label(l4); \
6398 }
6399 #endif
6400 GEN_SPEOP_COMP(evcmpgtu, TCG_COND_GTU);
6401 GEN_SPEOP_COMP(evcmpgts, TCG_COND_GT);
6402 GEN_SPEOP_COMP(evcmpltu, TCG_COND_LTU);
6403 GEN_SPEOP_COMP(evcmplts, TCG_COND_LT);
6404 GEN_SPEOP_COMP(evcmpeq, TCG_COND_EQ);
6405
6406 /* SPE misc */
6407 static always_inline void gen_brinc (DisasContext *ctx)
6408 {
6409 /* Note: brinc is usable even if SPE is disabled */
6410 gen_helper_brinc(cpu_gpr[rD(ctx->opcode)],
6411 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6412 }
6413 static always_inline void gen_evmergelo (DisasContext *ctx)
6414 {
6415 if (unlikely(!ctx->spe_enabled)) {
6416 GEN_EXCP_NO_AP(ctx);
6417 return;
6418 }
6419 #if defined(TARGET_PPC64)
6420 TCGv t0 = tcg_temp_new();
6421 TCGv t1 = tcg_temp_new();
6422 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL);
6423 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
6424 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6425 tcg_temp_free(t0);
6426 tcg_temp_free(t1);
6427 #else
6428 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6429 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6430 #endif
6431 }
6432 static always_inline void gen_evmergehilo (DisasContext *ctx)
6433 {
6434 if (unlikely(!ctx->spe_enabled)) {
6435 GEN_EXCP_NO_AP(ctx);
6436 return;
6437 }
6438 #if defined(TARGET_PPC64)
6439 TCGv t0 = tcg_temp_new();
6440 TCGv t1 = tcg_temp_new();
6441 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFLL);
6442 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF0000000ULL);
6443 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6444 tcg_temp_free(t0);
6445 tcg_temp_free(t1);
6446 #else
6447 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6448 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6449 #endif
6450 }
6451 static always_inline void gen_evmergelohi (DisasContext *ctx)
6452 {
6453 if (unlikely(!ctx->spe_enabled)) {
6454 GEN_EXCP_NO_AP(ctx);
6455 return;
6456 }
6457 #if defined(TARGET_PPC64)
6458 TCGv t0 = tcg_temp_new();
6459 TCGv t1 = tcg_temp_new();
6460 tcg_gen_shri_tl(t0, cpu_gpr[rB(ctx->opcode)], 32);
6461 tcg_gen_shli_tl(t1, cpu_gpr[rA(ctx->opcode)], 32);
6462 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6463 tcg_temp_free(t0);
6464 tcg_temp_free(t1);
6465 #else
6466 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6467 tcg_gen_mov_i32(cpu_gprh[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6468 #endif
6469 }
6470 static always_inline void gen_evsplati (DisasContext *ctx)
6471 {
6472 int32_t imm = (int32_t)(rA(ctx->opcode) << 11) >> 27;
6473
6474 #if defined(TARGET_PPC64)
6475 TCGv t0 = tcg_temp_new();
6476 TCGv t1 = tcg_temp_new();
6477 tcg_gen_movi_tl(t0, imm);
6478 tcg_gen_shri_tl(t1, t0, 32);
6479 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6480 tcg_temp_free(t0);
6481 tcg_temp_free(t1);
6482 #else
6483 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
6484 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
6485 #endif
6486 }
6487 static always_inline void gen_evsplatfi (DisasContext *ctx)
6488 {
6489 uint32_t imm = rA(ctx->opcode) << 11;
6490
6491 #if defined(TARGET_PPC64)
6492 TCGv t0 = tcg_temp_new();
6493 TCGv t1 = tcg_temp_new();
6494 tcg_gen_movi_tl(t0, imm);
6495 tcg_gen_shri_tl(t1, t0, 32);
6496 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t0, t1);
6497 tcg_temp_free(t0);
6498 tcg_temp_free(t1);
6499 #else
6500 tcg_gen_movi_i32(cpu_gpr[rD(ctx->opcode)], imm);
6501 tcg_gen_movi_i32(cpu_gprh[rD(ctx->opcode)], imm);
6502 #endif
6503 }
6504
6505 static always_inline void gen_evsel (DisasContext *ctx)
6506 {
6507 int l1 = gen_new_label();
6508 int l2 = gen_new_label();
6509 int l3 = gen_new_label();
6510 int l4 = gen_new_label();
6511 TCGv_i32 t0 = tcg_temp_local_new_i32();
6512 #if defined(TARGET_PPC64)
6513 TCGv t1 = tcg_temp_local_new();
6514 TCGv t2 = tcg_temp_local_new();
6515 #endif
6516 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 3);
6517 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l1);
6518 #if defined(TARGET_PPC64)
6519 tcg_gen_andi_tl(t1, cpu_gpr[rA(ctx->opcode)], 0xFFFFFFFF00000000ULL);
6520 #else
6521 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rA(ctx->opcode)]);
6522 #endif
6523 tcg_gen_br(l2);
6524 gen_set_label(l1);
6525 #if defined(TARGET_PPC64)
6526 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0xFFFFFFFF00000000ULL);
6527 #else
6528 tcg_gen_mov_tl(cpu_gprh[rD(ctx->opcode)], cpu_gprh[rB(ctx->opcode)]);
6529 #endif
6530 gen_set_label(l2);
6531 tcg_gen_andi_i32(t0, cpu_crf[ctx->opcode & 0x07], 1 << 2);
6532 tcg_gen_brcondi_i32(TCG_COND_EQ, t0, 0, l3);
6533 #if defined(TARGET_PPC64)
6534 tcg_gen_andi_tl(t2, cpu_gpr[rA(ctx->opcode)], 0x00000000FFFFFFFFULL);
6535 #else
6536 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
6537 #endif
6538 tcg_gen_br(l4);
6539 gen_set_label(l3);
6540 #if defined(TARGET_PPC64)
6541 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x00000000FFFFFFFFULL);
6542 #else
6543 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6544 #endif
6545 gen_set_label(l4);
6546 tcg_temp_free_i32(t0);
6547 #if defined(TARGET_PPC64)
6548 tcg_gen_or_tl(cpu_gpr[rD(ctx->opcode)], t1, t2);
6549 tcg_temp_free(t1);
6550 tcg_temp_free(t2);
6551 #endif
6552 }
6553 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE)
6554 {
6555 gen_evsel(ctx);
6556 }
6557 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE)
6558 {
6559 gen_evsel(ctx);
6560 }
6561 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE)
6562 {
6563 gen_evsel(ctx);
6564 }
6565 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE)
6566 {
6567 gen_evsel(ctx);
6568 }
6569
6570 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
6571 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
6572 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
6573 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
6574 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
6575 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
6576 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
6577 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
6578 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
6579 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
6580 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
6581 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
6582 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
6583 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
6584 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
6585 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
6586 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
6587 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
6588 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
6589 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
6590 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
6591 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
6592 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
6593 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
6594 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
6595
6596 /* Load and stores */
6597 GEN_SPEOP_LDST(dd, 3);
6598 GEN_SPEOP_LDST(dw, 3);
6599 GEN_SPEOP_LDST(dh, 3);
6600 GEN_SPEOP_LDST(whe, 2);
6601 GEN_SPEOP_LD(whou, 2);
6602 GEN_SPEOP_LD(whos, 2);
6603 GEN_SPEOP_ST(who, 2);
6604
6605 #define _GEN_OP_SPE_STWWE(suffix) \
6606 static always_inline void gen_op_spe_stwwe_##suffix (void) \
6607 { \
6608 gen_op_srli32_T1_64(); \
6609 gen_op_spe_stwwo_##suffix(); \
6610 }
6611 #define _GEN_OP_SPE_STWWE_LE(suffix) \
6612 static always_inline void gen_op_spe_stwwe_le_##suffix (void) \
6613 { \
6614 gen_op_srli32_T1_64(); \
6615 gen_op_spe_stwwo_le_##suffix(); \
6616 }
6617 #if defined(TARGET_PPC64)
6618 #define GEN_OP_SPE_STWWE(suffix) \
6619 _GEN_OP_SPE_STWWE(suffix); \
6620 _GEN_OP_SPE_STWWE_LE(suffix); \
6621 static always_inline void gen_op_spe_stwwe_64_##suffix (void) \
6622 { \
6623 gen_op_srli32_T1_64(); \
6624 gen_op_spe_stwwo_64_##suffix(); \
6625 } \
6626 static always_inline void gen_op_spe_stwwe_le_64_##suffix (void) \
6627 { \
6628 gen_op_srli32_T1_64(); \
6629 gen_op_spe_stwwo_le_64_##suffix(); \
6630 }
6631 #else
6632 #define GEN_OP_SPE_STWWE(suffix) \
6633 _GEN_OP_SPE_STWWE(suffix); \
6634 _GEN_OP_SPE_STWWE_LE(suffix)
6635 #endif
6636 #if defined(CONFIG_USER_ONLY)
6637 GEN_OP_SPE_STWWE(raw);
6638 #else /* defined(CONFIG_USER_ONLY) */
6639 GEN_OP_SPE_STWWE(user);
6640 GEN_OP_SPE_STWWE(kernel);
6641 GEN_OP_SPE_STWWE(hypv);
6642 #endif /* defined(CONFIG_USER_ONLY) */
6643 GEN_SPEOP_ST(wwe, 2);
6644 GEN_SPEOP_ST(wwo, 2);
6645
6646 #define GEN_SPE_LDSPLAT(name, op, suffix) \
6647 static always_inline void gen_op_spe_l##name##_##suffix (void) \
6648 { \
6649 gen_op_##op##_##suffix(); \
6650 gen_op_splatw_T1_64(); \
6651 }
6652
6653 #define GEN_OP_SPE_LHE(suffix) \
6654 static always_inline void gen_op_spe_lhe_##suffix (void) \
6655 { \
6656 gen_op_spe_lh_##suffix(); \
6657 gen_op_sli16_T1_64(); \
6658 }
6659
6660 #define GEN_OP_SPE_LHX(suffix) \
6661 static always_inline void gen_op_spe_lhx_##suffix (void) \
6662 { \
6663 gen_op_spe_lh_##suffix(); \
6664 gen_op_extsh_T1_64(); \
6665 }
6666
6667 #if defined(CONFIG_USER_ONLY)
6668 GEN_OP_SPE_LHE(raw);
6669 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, raw);
6670 GEN_OP_SPE_LHE(le_raw);
6671 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_raw);
6672 GEN_SPE_LDSPLAT(hhousplat, spe_lh, raw);
6673 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_raw);
6674 GEN_OP_SPE_LHX(raw);
6675 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, raw);
6676 GEN_OP_SPE_LHX(le_raw);
6677 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_raw);
6678 #if defined(TARGET_PPC64)
6679 GEN_OP_SPE_LHE(64_raw);
6680 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_raw);
6681 GEN_OP_SPE_LHE(le_64_raw);
6682 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_raw);
6683 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_raw);
6684 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_raw);
6685 GEN_OP_SPE_LHX(64_raw);
6686 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_raw);
6687 GEN_OP_SPE_LHX(le_64_raw);
6688 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_raw);
6689 #endif
6690 #else
6691 GEN_OP_SPE_LHE(user);
6692 GEN_OP_SPE_LHE(kernel);
6693 GEN_OP_SPE_LHE(hypv);
6694 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, user);
6695 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, kernel);
6696 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, hypv);
6697 GEN_OP_SPE_LHE(le_user);
6698 GEN_OP_SPE_LHE(le_kernel);
6699 GEN_OP_SPE_LHE(le_hypv);
6700 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_user);
6701 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_kernel);
6702 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_hypv);
6703 GEN_SPE_LDSPLAT(hhousplat, spe_lh, user);
6704 GEN_SPE_LDSPLAT(hhousplat, spe_lh, kernel);
6705 GEN_SPE_LDSPLAT(hhousplat, spe_lh, hypv);
6706 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_user);
6707 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_kernel);
6708 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_hypv);
6709 GEN_OP_SPE_LHX(user);
6710 GEN_OP_SPE_LHX(kernel);
6711 GEN_OP_SPE_LHX(hypv);
6712 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, user);
6713 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, kernel);
6714 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, hypv);
6715 GEN_OP_SPE_LHX(le_user);
6716 GEN_OP_SPE_LHX(le_kernel);
6717 GEN_OP_SPE_LHX(le_hypv);
6718 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_user);
6719 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_kernel);
6720 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_hypv);
6721 #if defined(TARGET_PPC64)
6722 GEN_OP_SPE_LHE(64_user);
6723 GEN_OP_SPE_LHE(64_kernel);
6724 GEN_OP_SPE_LHE(64_hypv);
6725 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_user);
6726 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_kernel);
6727 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_hypv);
6728 GEN_OP_SPE_LHE(le_64_user);
6729 GEN_OP_SPE_LHE(le_64_kernel);
6730 GEN_OP_SPE_LHE(le_64_hypv);
6731 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_user);
6732 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_kernel);
6733 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_hypv);
6734 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_user);
6735 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_kernel);
6736 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_hypv);
6737 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_user);
6738 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_kernel);
6739 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_hypv);
6740 GEN_OP_SPE_LHX(64_user);
6741 GEN_OP_SPE_LHX(64_kernel);
6742 GEN_OP_SPE_LHX(64_hypv);
6743 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_user);
6744 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_kernel);
6745 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_hypv);
6746 GEN_OP_SPE_LHX(le_64_user);
6747 GEN_OP_SPE_LHX(le_64_kernel);
6748 GEN_OP_SPE_LHX(le_64_hypv);
6749 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_user);
6750 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_kernel);
6751 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_hypv);
6752 #endif
6753 #endif
6754 GEN_SPEOP_LD(hhesplat, 1);
6755 GEN_SPEOP_LD(hhousplat, 1);
6756 GEN_SPEOP_LD(hhossplat, 1);
6757 GEN_SPEOP_LD(wwsplat, 2);
6758 GEN_SPEOP_LD(whsplat, 2);
6759
6760 GEN_SPE(evlddx, evldd, 0x00, 0x0C, 0x00000000, PPC_SPE); //
6761 GEN_SPE(evldwx, evldw, 0x01, 0x0C, 0x00000000, PPC_SPE); //
6762 GEN_SPE(evldhx, evldh, 0x02, 0x0C, 0x00000000, PPC_SPE); //
6763 GEN_SPE(evlhhesplatx, evlhhesplat, 0x04, 0x0C, 0x00000000, PPC_SPE); //
6764 GEN_SPE(evlhhousplatx, evlhhousplat, 0x06, 0x0C, 0x00000000, PPC_SPE); //
6765 GEN_SPE(evlhhossplatx, evlhhossplat, 0x07, 0x0C, 0x00000000, PPC_SPE); //
6766 GEN_SPE(evlwhex, evlwhe, 0x08, 0x0C, 0x00000000, PPC_SPE); //
6767 GEN_SPE(evlwhoux, evlwhou, 0x0A, 0x0C, 0x00000000, PPC_SPE); //
6768 GEN_SPE(evlwhosx, evlwhos, 0x0B, 0x0C, 0x00000000, PPC_SPE); //
6769 GEN_SPE(evlwwsplatx, evlwwsplat, 0x0C, 0x0C, 0x00000000, PPC_SPE); //
6770 GEN_SPE(evlwhsplatx, evlwhsplat, 0x0E, 0x0C, 0x00000000, PPC_SPE); //
6771 GEN_SPE(evstddx, evstdd, 0x10, 0x0C, 0x00000000, PPC_SPE); //
6772 GEN_SPE(evstdwx, evstdw, 0x11, 0x0C, 0x00000000, PPC_SPE); //
6773 GEN_SPE(evstdhx, evstdh, 0x12, 0x0C, 0x00000000, PPC_SPE); //
6774 GEN_SPE(evstwhex, evstwhe, 0x18, 0x0C, 0x00000000, PPC_SPE); //
6775 GEN_SPE(evstwhox, evstwho, 0x1A, 0x0C, 0x00000000, PPC_SPE); //
6776 GEN_SPE(evstwwex, evstwwe, 0x1C, 0x0C, 0x00000000, PPC_SPE); //
6777 GEN_SPE(evstwwox, evstwwo, 0x1E, 0x0C, 0x00000000, PPC_SPE); //
6778
6779 /* Multiply and add - TODO */
6780 #if 0
6781 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
6782 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
6783 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
6784 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
6785 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
6786 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
6787 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
6788 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
6789 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
6790 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
6791 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
6792 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
6793
6794 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
6795 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
6796 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
6797 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
6798 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
6799 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
6800 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
6801 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
6802 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
6803 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
6804 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
6805 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
6806 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
6807 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
6808
6809 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
6810 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
6811 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
6812 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
6813 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
6814 GEN_SPE(evmra, speundef, 0x07, 0x13, 0x0000F800, PPC_SPE);
6815
6816 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
6817 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
6818 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
6819 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
6820 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
6821 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
6822 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
6823 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
6824 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
6825 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
6826 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
6827 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
6828
6829 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
6830 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
6831 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
6832 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
6833 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
6834
6835 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
6836 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
6837 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
6838 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
6839 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
6840 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
6841 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
6842 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
6843 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
6844 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
6845 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
6846 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
6847
6848 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
6849 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
6850 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
6851 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
6852 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
6853 #endif
6854
6855 /*** SPE floating-point extension ***/
6856 #define GEN_SPEFPUOP_CONV(name) \
6857 static always_inline void gen_##name (DisasContext *ctx) \
6858 { \
6859 gen_load_gpr64(cpu_T64[0], rB(ctx->opcode)); \
6860 gen_op_##name(); \
6861 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
6862 }
6863
6864 #define GEN_SPEFPUOP_ARITH1(name) \
6865 static always_inline void gen_##name (DisasContext *ctx) \
6866 { \
6867 if (unlikely(!ctx->spe_enabled)) { \
6868 GEN_EXCP_NO_AP(ctx); \
6869 return; \
6870 } \
6871 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
6872 gen_op_##name(); \
6873 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
6874 }
6875
6876 #define GEN_SPEFPUOP_ARITH2(name) \
6877 static always_inline void gen_##name (DisasContext *ctx) \
6878 { \
6879 if (unlikely(!ctx->spe_enabled)) { \
6880 GEN_EXCP_NO_AP(ctx); \
6881 return; \
6882 } \
6883 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
6884 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
6885 gen_op_##name(); \
6886 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
6887 }
6888
6889 #define GEN_SPEFPUOP_COMP(name) \
6890 static always_inline void gen_##name (DisasContext *ctx) \
6891 { \
6892 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; \
6893 if (unlikely(!ctx->spe_enabled)) { \
6894 GEN_EXCP_NO_AP(ctx); \
6895 return; \
6896 } \
6897 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
6898 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
6899 gen_op_##name(); \
6900 tcg_gen_trunc_tl_i32(crf, cpu_T[0]); \
6901 tcg_gen_andi_i32(crf, crf, 0xf); \
6902 }
6903
6904 /* Single precision floating-point vectors operations */
6905 /* Arithmetic */
6906 GEN_SPEFPUOP_ARITH2(evfsadd);
6907 GEN_SPEFPUOP_ARITH2(evfssub);
6908 GEN_SPEFPUOP_ARITH2(evfsmul);
6909 GEN_SPEFPUOP_ARITH2(evfsdiv);
6910 GEN_SPEFPUOP_ARITH1(evfsabs);
6911 GEN_SPEFPUOP_ARITH1(evfsnabs);
6912 GEN_SPEFPUOP_ARITH1(evfsneg);
6913 /* Conversion */
6914 GEN_SPEFPUOP_CONV(evfscfui);
6915 GEN_SPEFPUOP_CONV(evfscfsi);
6916 GEN_SPEFPUOP_CONV(evfscfuf);
6917 GEN_SPEFPUOP_CONV(evfscfsf);
6918 GEN_SPEFPUOP_CONV(evfsctui);
6919 GEN_SPEFPUOP_CONV(evfsctsi);
6920 GEN_SPEFPUOP_CONV(evfsctuf);
6921 GEN_SPEFPUOP_CONV(evfsctsf);
6922 GEN_SPEFPUOP_CONV(evfsctuiz);
6923 GEN_SPEFPUOP_CONV(evfsctsiz);
6924 /* Comparison */
6925 GEN_SPEFPUOP_COMP(evfscmpgt);
6926 GEN_SPEFPUOP_COMP(evfscmplt);
6927 GEN_SPEFPUOP_COMP(evfscmpeq);
6928 GEN_SPEFPUOP_COMP(evfststgt);
6929 GEN_SPEFPUOP_COMP(evfststlt);
6930 GEN_SPEFPUOP_COMP(evfststeq);
6931
6932 /* Opcodes definitions */
6933 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPEFPU); //
6934 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPEFPU); //
6935 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPEFPU); //
6936 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPEFPU); //
6937 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPEFPU); //
6938 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPEFPU); //
6939 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPEFPU); //
6940 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPEFPU); //
6941 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPEFPU); //
6942 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPEFPU); //
6943 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPEFPU); //
6944 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPEFPU); //
6945 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPEFPU); //
6946 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPEFPU); //
6947
6948 /* Single precision floating-point operations */
6949 /* Arithmetic */
6950 GEN_SPEFPUOP_ARITH2(efsadd);
6951 GEN_SPEFPUOP_ARITH2(efssub);
6952 GEN_SPEFPUOP_ARITH2(efsmul);
6953 GEN_SPEFPUOP_ARITH2(efsdiv);
6954 GEN_SPEFPUOP_ARITH1(efsabs);
6955 GEN_SPEFPUOP_ARITH1(efsnabs);
6956 GEN_SPEFPUOP_ARITH1(efsneg);
6957 /* Conversion */
6958 GEN_SPEFPUOP_CONV(efscfui);
6959 GEN_SPEFPUOP_CONV(efscfsi);
6960 GEN_SPEFPUOP_CONV(efscfuf);
6961 GEN_SPEFPUOP_CONV(efscfsf);
6962 GEN_SPEFPUOP_CONV(efsctui);
6963 GEN_SPEFPUOP_CONV(efsctsi);
6964 GEN_SPEFPUOP_CONV(efsctuf);
6965 GEN_SPEFPUOP_CONV(efsctsf);
6966 GEN_SPEFPUOP_CONV(efsctuiz);
6967 GEN_SPEFPUOP_CONV(efsctsiz);
6968 GEN_SPEFPUOP_CONV(efscfd);
6969 /* Comparison */
6970 GEN_SPEFPUOP_COMP(efscmpgt);
6971 GEN_SPEFPUOP_COMP(efscmplt);
6972 GEN_SPEFPUOP_COMP(efscmpeq);
6973 GEN_SPEFPUOP_COMP(efststgt);
6974 GEN_SPEFPUOP_COMP(efststlt);
6975 GEN_SPEFPUOP_COMP(efststeq);
6976
6977 /* Opcodes definitions */
6978 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPEFPU); //
6979 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPEFPU); //
6980 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPEFPU); //
6981 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPEFPU); //
6982 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPEFPU); //
6983 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPEFPU); //
6984 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPEFPU); //
6985 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPEFPU); //
6986 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPEFPU); //
6987 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPEFPU); //
6988 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPEFPU); //
6989 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPEFPU); //
6990 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPEFPU); //
6991 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPEFPU); //
6992
6993 /* Double precision floating-point operations */
6994 /* Arithmetic */
6995 GEN_SPEFPUOP_ARITH2(efdadd);
6996 GEN_SPEFPUOP_ARITH2(efdsub);
6997 GEN_SPEFPUOP_ARITH2(efdmul);
6998 GEN_SPEFPUOP_ARITH2(efddiv);
6999 GEN_SPEFPUOP_ARITH1(efdabs);
7000 GEN_SPEFPUOP_ARITH1(efdnabs);
7001 GEN_SPEFPUOP_ARITH1(efdneg);
7002 /* Conversion */
7003
7004 GEN_SPEFPUOP_CONV(efdcfui);
7005 GEN_SPEFPUOP_CONV(efdcfsi);
7006 GEN_SPEFPUOP_CONV(efdcfuf);
7007 GEN_SPEFPUOP_CONV(efdcfsf);
7008 GEN_SPEFPUOP_CONV(efdctui);
7009 GEN_SPEFPUOP_CONV(efdctsi);
7010 GEN_SPEFPUOP_CONV(efdctuf);
7011 GEN_SPEFPUOP_CONV(efdctsf);
7012 GEN_SPEFPUOP_CONV(efdctuiz);
7013 GEN_SPEFPUOP_CONV(efdctsiz);
7014 GEN_SPEFPUOP_CONV(efdcfs);
7015 GEN_SPEFPUOP_CONV(efdcfuid);
7016 GEN_SPEFPUOP_CONV(efdcfsid);
7017 GEN_SPEFPUOP_CONV(efdctuidz);
7018 GEN_SPEFPUOP_CONV(efdctsidz);
7019 /* Comparison */
7020 GEN_SPEFPUOP_COMP(efdcmpgt);
7021 GEN_SPEFPUOP_COMP(efdcmplt);
7022 GEN_SPEFPUOP_COMP(efdcmpeq);
7023 GEN_SPEFPUOP_COMP(efdtstgt);
7024 GEN_SPEFPUOP_COMP(efdtstlt);
7025 GEN_SPEFPUOP_COMP(efdtsteq);
7026
7027 /* Opcodes definitions */
7028 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPEFPU); //
7029 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPEFPU); //
7030 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPEFPU); //
7031 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPEFPU); //
7032 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPEFPU); //
7033 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPEFPU); //
7034 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPEFPU); //
7035 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPEFPU); //
7036 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPEFPU); //
7037 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPEFPU); //
7038 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPEFPU); //
7039 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPEFPU); //
7040 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPEFPU); //
7041 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPEFPU); //
7042 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPEFPU); //
7043 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPEFPU); //
7044
7045 /* End opcode list */
7046 GEN_OPCODE_MARK(end);
7047
7048 #include "translate_init.c"
7049 #include "helper_regs.h"
7050
7051 /*****************************************************************************/
7052 /* Misc PowerPC helpers */
7053 void cpu_dump_state (CPUState *env, FILE *f,
7054 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
7055 int flags)
7056 {
7057 #define RGPL 4
7058 #define RFPL 4
7059
7060 int i;
7061
7062 cpu_fprintf(f, "NIP " ADDRX " LR " ADDRX " CTR " ADDRX " XER %08x\n",
7063 env->nip, env->lr, env->ctr, env->xer);
7064 cpu_fprintf(f, "MSR " ADDRX " HID0 " ADDRX " HF " ADDRX " idx %d\n",
7065 env->msr, env->spr[SPR_HID0], env->hflags, env->mmu_idx);
7066 #if !defined(NO_TIMER_DUMP)
7067 cpu_fprintf(f, "TB %08x %08x "
7068 #if !defined(CONFIG_USER_ONLY)
7069 "DECR %08x"
7070 #endif
7071 "\n",
7072 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
7073 #if !defined(CONFIG_USER_ONLY)
7074 , cpu_ppc_load_decr(env)
7075 #endif
7076 );
7077 #endif
7078 for (i = 0; i < 32; i++) {
7079 if ((i & (RGPL - 1)) == 0)
7080 cpu_fprintf(f, "GPR%02d", i);
7081 cpu_fprintf(f, " " REGX, ppc_dump_gpr(env, i));
7082 if ((i & (RGPL - 1)) == (RGPL - 1))
7083 cpu_fprintf(f, "\n");
7084 }
7085 cpu_fprintf(f, "CR ");
7086 for (i = 0; i < 8; i++)
7087 cpu_fprintf(f, "%01x", env->crf[i]);
7088 cpu_fprintf(f, " [");
7089 for (i = 0; i < 8; i++) {
7090 char a = '-';
7091 if (env->crf[i] & 0x08)
7092 a = 'L';
7093 else if (env->crf[i] & 0x04)
7094 a = 'G';
7095 else if (env->crf[i] & 0x02)
7096 a = 'E';
7097 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
7098 }
7099 cpu_fprintf(f, " ] RES " ADDRX "\n", env->reserve);
7100 for (i = 0; i < 32; i++) {
7101 if ((i & (RFPL - 1)) == 0)
7102 cpu_fprintf(f, "FPR%02d", i);
7103 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
7104 if ((i & (RFPL - 1)) == (RFPL - 1))
7105 cpu_fprintf(f, "\n");
7106 }
7107 #if !defined(CONFIG_USER_ONLY)
7108 cpu_fprintf(f, "SRR0 " ADDRX " SRR1 " ADDRX " SDR1 " ADDRX "\n",
7109 env->spr[SPR_SRR0], env->spr[SPR_SRR1], env->sdr1);
7110 #endif
7111
7112 #undef RGPL
7113 #undef RFPL
7114 }
7115
7116 void cpu_dump_statistics (CPUState *env, FILE*f,
7117 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
7118 int flags)
7119 {
7120 #if defined(DO_PPC_STATISTICS)
7121 opc_handler_t **t1, **t2, **t3, *handler;
7122 int op1, op2, op3;
7123
7124 t1 = env->opcodes;
7125 for (op1 = 0; op1 < 64; op1++) {
7126 handler = t1[op1];
7127 if (is_indirect_opcode(handler)) {
7128 t2 = ind_table(handler);
7129 for (op2 = 0; op2 < 32; op2++) {
7130 handler = t2[op2];
7131 if (is_indirect_opcode(handler)) {
7132 t3 = ind_table(handler);
7133 for (op3 = 0; op3 < 32; op3++) {
7134 handler = t3[op3];
7135 if (handler->count == 0)
7136 continue;
7137 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
7138 "%016llx %lld\n",
7139 op1, op2, op3, op1, (op3 << 5) | op2,
7140 handler->oname,
7141 handler->count, handler->count);
7142 }
7143 } else {
7144 if (handler->count == 0)
7145 continue;
7146 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
7147 "%016llx %lld\n",
7148 op1, op2, op1, op2, handler->oname,
7149 handler->count, handler->count);
7150 }
7151 }
7152 } else {
7153 if (handler->count == 0)
7154 continue;
7155 cpu_fprintf(f, "%02x (%02x ) %16s: %016llx %lld\n",
7156 op1, op1, handler->oname,
7157 handler->count, handler->count);
7158 }
7159 }
7160 #endif
7161 }
7162
7163 /*****************************************************************************/
7164 static always_inline void gen_intermediate_code_internal (CPUState *env,
7165 TranslationBlock *tb,
7166 int search_pc)
7167 {
7168 DisasContext ctx, *ctxp = &ctx;
7169 opc_handler_t **table, *handler;
7170 target_ulong pc_start;
7171 uint16_t *gen_opc_end;
7172 int supervisor, little_endian;
7173 int j, lj = -1;
7174 int num_insns;
7175 int max_insns;
7176
7177 pc_start = tb->pc;
7178 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
7179 #if defined(OPTIMIZE_FPRF_UPDATE)
7180 gen_fprf_ptr = gen_fprf_buf;
7181 #endif
7182 ctx.nip = pc_start;
7183 ctx.tb = tb;
7184 ctx.exception = POWERPC_EXCP_NONE;
7185 ctx.spr_cb = env->spr_cb;
7186 supervisor = env->mmu_idx;
7187 #if !defined(CONFIG_USER_ONLY)
7188 ctx.supervisor = supervisor;
7189 #endif
7190 little_endian = env->hflags & (1 << MSR_LE) ? 1 : 0;
7191 #if defined(TARGET_PPC64)
7192 ctx.sf_mode = msr_sf;
7193 ctx.mem_idx = (supervisor << 2) | (msr_sf << 1) | little_endian;
7194 #else
7195 ctx.mem_idx = (supervisor << 1) | little_endian;
7196 #endif
7197 ctx.dcache_line_size = env->dcache_line_size;
7198 ctx.fpu_enabled = msr_fp;
7199 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
7200 ctx.spe_enabled = msr_spe;
7201 else
7202 ctx.spe_enabled = 0;
7203 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
7204 ctx.altivec_enabled = msr_vr;
7205 else
7206 ctx.altivec_enabled = 0;
7207 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
7208 ctx.singlestep_enabled = CPU_SINGLE_STEP;
7209 else
7210 ctx.singlestep_enabled = 0;
7211 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
7212 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
7213 if (unlikely(env->singlestep_enabled))
7214 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
7215 #if defined (DO_SINGLE_STEP) && 0
7216 /* Single step trace mode */
7217 msr_se = 1;
7218 #endif
7219 num_insns = 0;
7220 max_insns = tb->cflags & CF_COUNT_MASK;
7221 if (max_insns == 0)
7222 max_insns = CF_COUNT_MASK;
7223
7224 gen_icount_start();
7225 /* Set env in case of segfault during code fetch */
7226 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
7227 if (unlikely(env->nb_breakpoints > 0)) {
7228 for (j = 0; j < env->nb_breakpoints; j++) {
7229 if (env->breakpoints[j] == ctx.nip) {
7230 gen_update_nip(&ctx, ctx.nip);
7231 gen_op_debug();
7232 break;
7233 }
7234 }
7235 }
7236 if (unlikely(search_pc)) {
7237 j = gen_opc_ptr - gen_opc_buf;
7238 if (lj < j) {
7239 lj++;
7240 while (lj < j)
7241 gen_opc_instr_start[lj++] = 0;
7242 gen_opc_pc[lj] = ctx.nip;
7243 gen_opc_instr_start[lj] = 1;
7244 gen_opc_icount[lj] = num_insns;
7245 }
7246 }
7247 #if defined PPC_DEBUG_DISAS
7248 if (loglevel & CPU_LOG_TB_IN_ASM) {
7249 fprintf(logfile, "----------------\n");
7250 fprintf(logfile, "nip=" ADDRX " super=%d ir=%d\n",
7251 ctx.nip, supervisor, (int)msr_ir);
7252 }
7253 #endif
7254 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
7255 gen_io_start();
7256 if (unlikely(little_endian)) {
7257 ctx.opcode = bswap32(ldl_code(ctx.nip));
7258 } else {
7259 ctx.opcode = ldl_code(ctx.nip);
7260 }
7261 #if defined PPC_DEBUG_DISAS
7262 if (loglevel & CPU_LOG_TB_IN_ASM) {
7263 fprintf(logfile, "translate opcode %08x (%02x %02x %02x) (%s)\n",
7264 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
7265 opc3(ctx.opcode), little_endian ? "little" : "big");
7266 }
7267 #endif
7268 ctx.nip += 4;
7269 table = env->opcodes;
7270 num_insns++;
7271 handler = table[opc1(ctx.opcode)];
7272 if (is_indirect_opcode(handler)) {
7273 table = ind_table(handler);
7274 handler = table[opc2(ctx.opcode)];
7275 if (is_indirect_opcode(handler)) {
7276 table = ind_table(handler);
7277 handler = table[opc3(ctx.opcode)];
7278 }
7279 }
7280 /* Is opcode *REALLY* valid ? */
7281 if (unlikely(handler->handler == &gen_invalid)) {
7282 if (loglevel != 0) {
7283 fprintf(logfile, "invalid/unsupported opcode: "
7284 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
7285 opc1(ctx.opcode), opc2(ctx.opcode),
7286 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
7287 } else {
7288 printf("invalid/unsupported opcode: "
7289 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
7290 opc1(ctx.opcode), opc2(ctx.opcode),
7291 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
7292 }
7293 } else {
7294 if (unlikely((ctx.opcode & handler->inval) != 0)) {
7295 if (loglevel != 0) {
7296 fprintf(logfile, "invalid bits: %08x for opcode: "
7297 "%02x - %02x - %02x (%08x) " ADDRX "\n",
7298 ctx.opcode & handler->inval, opc1(ctx.opcode),
7299 opc2(ctx.opcode), opc3(ctx.opcode),
7300 ctx.opcode, ctx.nip - 4);
7301 } else {
7302 printf("invalid bits: %08x for opcode: "
7303 "%02x - %02x - %02x (%08x) " ADDRX "\n",
7304 ctx.opcode & handler->inval, opc1(ctx.opcode),
7305 opc2(ctx.opcode), opc3(ctx.opcode),
7306 ctx.opcode, ctx.nip - 4);
7307 }
7308 GEN_EXCP_INVAL(ctxp);
7309 break;
7310 }
7311 }
7312 (*(handler->handler))(&ctx);
7313 #if defined(DO_PPC_STATISTICS)
7314 handler->count++;
7315 #endif
7316 /* Check trace mode exceptions */
7317 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
7318 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
7319 ctx.exception != POWERPC_SYSCALL &&
7320 ctx.exception != POWERPC_EXCP_TRAP &&
7321 ctx.exception != POWERPC_EXCP_BRANCH)) {
7322 GEN_EXCP(ctxp, POWERPC_EXCP_TRACE, 0);
7323 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
7324 (env->singlestep_enabled) ||
7325 num_insns >= max_insns)) {
7326 /* if we reach a page boundary or are single stepping, stop
7327 * generation
7328 */
7329 break;
7330 }
7331 #if defined (DO_SINGLE_STEP)
7332 break;
7333 #endif
7334 }
7335 if (tb->cflags & CF_LAST_IO)
7336 gen_io_end();
7337 if (ctx.exception == POWERPC_EXCP_NONE) {
7338 gen_goto_tb(&ctx, 0, ctx.nip);
7339 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
7340 if (unlikely(env->singlestep_enabled)) {
7341 gen_update_nip(&ctx, ctx.nip);
7342 gen_op_debug();
7343 }
7344 /* Generate the return instruction */
7345 tcg_gen_exit_tb(0);
7346 }
7347 gen_icount_end(tb, num_insns);
7348 *gen_opc_ptr = INDEX_op_end;
7349 if (unlikely(search_pc)) {
7350 j = gen_opc_ptr - gen_opc_buf;
7351 lj++;
7352 while (lj <= j)
7353 gen_opc_instr_start[lj++] = 0;
7354 } else {
7355 tb->size = ctx.nip - pc_start;
7356 tb->icount = num_insns;
7357 }
7358 #if defined(DEBUG_DISAS)
7359 if (loglevel & CPU_LOG_TB_CPU) {
7360 fprintf(logfile, "---------------- excp: %04x\n", ctx.exception);
7361 cpu_dump_state(env, logfile, fprintf, 0);
7362 }
7363 if (loglevel & CPU_LOG_TB_IN_ASM) {
7364 int flags;
7365 flags = env->bfd_mach;
7366 flags |= little_endian << 16;
7367 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
7368 target_disas(logfile, pc_start, ctx.nip - pc_start, flags);
7369 fprintf(logfile, "\n");
7370 }
7371 #endif
7372 }
7373
7374 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
7375 {
7376 gen_intermediate_code_internal(env, tb, 0);
7377 }
7378
7379 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
7380 {
7381 gen_intermediate_code_internal(env, tb, 1);
7382 }
7383
7384 void gen_pc_load(CPUState *env, TranslationBlock *tb,
7385 unsigned long searched_pc, int pc_pos, void *puc)
7386 {
7387 int type, c;
7388 /* for PPC, we need to look at the micro operation to get the
7389 * access type */
7390 env->nip = gen_opc_pc[pc_pos];
7391 c = gen_opc_buf[pc_pos];
7392 switch(c) {
7393 #if defined(CONFIG_USER_ONLY)
7394 #define CASE3(op)\
7395 case INDEX_op_ ## op ## _raw
7396 #else
7397 #define CASE3(op)\
7398 case INDEX_op_ ## op ## _user:\
7399 case INDEX_op_ ## op ## _kernel:\
7400 case INDEX_op_ ## op ## _hypv
7401 #endif
7402
7403 CASE3(stfd):
7404 CASE3(stfs):
7405 CASE3(lfd):
7406 CASE3(lfs):
7407 type = ACCESS_FLOAT;
7408 break;
7409 CASE3(lwarx):
7410 type = ACCESS_RES;
7411 break;
7412 CASE3(stwcx):
7413 type = ACCESS_RES;
7414 break;
7415 CASE3(eciwx):
7416 CASE3(ecowx):
7417 type = ACCESS_EXT;
7418 break;
7419 default:
7420 type = ACCESS_INT;
7421 break;
7422 }
7423 env->access_type = type;
7424 }