]> git.proxmox.com Git - qemu.git/blob - target-ppc/translate.c
added explicit license
[qemu.git] / target-ppc / translate.c
1 /*
2 * PowerPC emulation for qemu: main translation routines.
3 *
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25
26 #include "cpu.h"
27 #include "exec-all.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31 #include "qemu-common.h"
32
33 #define CPU_SINGLE_STEP 0x1
34 #define CPU_BRANCH_STEP 0x2
35 #define GDBSTUB_SINGLE_STEP 0x4
36
37 /* Include definitions for instructions classes and implementations flags */
38 //#define DO_SINGLE_STEP
39 //#define PPC_DEBUG_DISAS
40 //#define DO_PPC_STATISTICS
41 //#define OPTIMIZE_FPRF_UPDATE
42
43 /*****************************************************************************/
44 /* Code translation helpers */
45
46 /* global register indexes */
47 static TCGv cpu_env;
48 static char cpu_reg_names[10*3 + 22*4 /* GPR */
49 #if !defined(TARGET_PPC64)
50 + 10*4 + 22*5 /* SPE GPRh */
51 #endif
52 + 10*4 + 22*5 /* FPR */
53 + 2*(10*6 + 22*7) /* AVRh, AVRl */
54 + 8*5 /* CRF */];
55 static TCGv cpu_gpr[32];
56 #if !defined(TARGET_PPC64)
57 static TCGv cpu_gprh[32];
58 #endif
59 static TCGv cpu_fpr[32];
60 static TCGv cpu_avrh[32], cpu_avrl[32];
61 static TCGv cpu_crf[8];
62 static TCGv cpu_nip;
63 static TCGv cpu_ctr;
64 static TCGv cpu_lr;
65
66 /* dyngen register indexes */
67 static TCGv cpu_T[3];
68 #if defined(TARGET_PPC64)
69 #define cpu_T64 cpu_T
70 #else
71 static TCGv cpu_T64[3];
72 #endif
73 static TCGv cpu_FT[3];
74 static TCGv cpu_AVRh[3], cpu_AVRl[3];
75
76 #include "gen-icount.h"
77
78 void ppc_translate_init(void)
79 {
80 int i;
81 char* p;
82 static int done_init = 0;
83
84 if (done_init)
85 return;
86
87 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
88 #if TARGET_LONG_BITS > HOST_LONG_BITS
89 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
90 TCG_AREG0, offsetof(CPUState, t0), "T0");
91 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
92 TCG_AREG0, offsetof(CPUState, t1), "T1");
93 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
94 TCG_AREG0, offsetof(CPUState, t2), "T2");
95 #else
96 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
97 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
98 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
99 #endif
100 #if !defined(TARGET_PPC64)
101 cpu_T64[0] = tcg_global_mem_new(TCG_TYPE_I64,
102 TCG_AREG0, offsetof(CPUState, t0_64),
103 "T0_64");
104 cpu_T64[1] = tcg_global_mem_new(TCG_TYPE_I64,
105 TCG_AREG0, offsetof(CPUState, t1_64),
106 "T1_64");
107 cpu_T64[2] = tcg_global_mem_new(TCG_TYPE_I64,
108 TCG_AREG0, offsetof(CPUState, t2_64),
109 "T2_64");
110 #endif
111
112 cpu_FT[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
113 offsetof(CPUState, ft0), "FT0");
114 cpu_FT[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
115 offsetof(CPUState, ft1), "FT1");
116 cpu_FT[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
117 offsetof(CPUState, ft2), "FT2");
118
119 cpu_AVRh[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
120 offsetof(CPUState, avr0.u64[0]), "AVR0H");
121 cpu_AVRl[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
122 offsetof(CPUState, avr0.u64[1]), "AVR0L");
123 cpu_AVRh[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
124 offsetof(CPUState, avr1.u64[0]), "AVR1H");
125 cpu_AVRl[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
126 offsetof(CPUState, avr1.u64[1]), "AVR1L");
127 cpu_AVRh[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
128 offsetof(CPUState, avr2.u64[0]), "AVR2H");
129 cpu_AVRl[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
130 offsetof(CPUState, avr2.u64[1]), "AVR2L");
131
132 p = cpu_reg_names;
133
134 for (i = 0; i < 8; i++) {
135 sprintf(p, "crf%d", i);
136 cpu_crf[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
137 offsetof(CPUState, crf[i]), p);
138 p += 5;
139 }
140
141 for (i = 0; i < 32; i++) {
142 sprintf(p, "r%d", i);
143 cpu_gpr[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
144 offsetof(CPUState, gpr[i]), p);
145 p += (i < 10) ? 3 : 4;
146 #if !defined(TARGET_PPC64)
147 sprintf(p, "r%dH", i);
148 cpu_gprh[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
149 offsetof(CPUState, gprh[i]), p);
150 p += (i < 10) ? 4 : 5;
151 #endif
152
153 sprintf(p, "fp%d", i);
154 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
155 offsetof(CPUState, fpr[i]), p);
156 p += (i < 10) ? 4 : 5;
157
158 sprintf(p, "avr%dH", i);
159 cpu_avrh[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
160 offsetof(CPUState, avr[i].u64[0]), p);
161 p += (i < 10) ? 6 : 7;
162
163 sprintf(p, "avr%dL", i);
164 cpu_avrl[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
165 offsetof(CPUState, avr[i].u64[1]), p);
166 p += (i < 10) ? 6 : 7;
167 }
168
169 cpu_nip = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
170 offsetof(CPUState, nip), "nip");
171
172 cpu_ctr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
173 offsetof(CPUState, ctr), "ctr");
174
175 cpu_lr = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
176 offsetof(CPUState, lr), "lr");
177
178 /* register helpers */
179 #undef DEF_HELPER
180 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
181 #include "helper.h"
182
183 done_init = 1;
184 }
185
186 #if defined(OPTIMIZE_FPRF_UPDATE)
187 static uint16_t *gen_fprf_buf[OPC_BUF_SIZE];
188 static uint16_t **gen_fprf_ptr;
189 #endif
190
191 /* internal defines */
192 typedef struct DisasContext {
193 struct TranslationBlock *tb;
194 target_ulong nip;
195 uint32_t opcode;
196 uint32_t exception;
197 /* Routine used to access memory */
198 int mem_idx;
199 /* Translation flags */
200 #if !defined(CONFIG_USER_ONLY)
201 int supervisor;
202 #endif
203 #if defined(TARGET_PPC64)
204 int sf_mode;
205 #endif
206 int fpu_enabled;
207 int altivec_enabled;
208 int spe_enabled;
209 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
210 int singlestep_enabled;
211 int dcache_line_size;
212 } DisasContext;
213
214 struct opc_handler_t {
215 /* invalid bits */
216 uint32_t inval;
217 /* instruction type */
218 uint64_t type;
219 /* handler */
220 void (*handler)(DisasContext *ctx);
221 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
222 const char *oname;
223 #endif
224 #if defined(DO_PPC_STATISTICS)
225 uint64_t count;
226 #endif
227 };
228
229 static always_inline void gen_set_Rc0 (DisasContext *ctx)
230 {
231 #if defined(TARGET_PPC64)
232 if (ctx->sf_mode)
233 gen_op_cmpi_64(0);
234 else
235 #endif
236 gen_op_cmpi(0);
237 gen_op_set_Rc0();
238 }
239
240 static always_inline void gen_reset_fpstatus (void)
241 {
242 #ifdef CONFIG_SOFTFLOAT
243 gen_op_reset_fpstatus();
244 #endif
245 }
246
247 static always_inline void gen_compute_fprf (int set_fprf, int set_rc)
248 {
249 if (set_fprf != 0) {
250 /* This case might be optimized later */
251 #if defined(OPTIMIZE_FPRF_UPDATE)
252 *gen_fprf_ptr++ = gen_opc_ptr;
253 #endif
254 gen_op_compute_fprf(1);
255 if (unlikely(set_rc))
256 tcg_gen_andi_i32(cpu_crf[1], cpu_T[0], 0xf);
257 gen_op_float_check_status();
258 } else if (unlikely(set_rc)) {
259 /* We always need to compute fpcc */
260 gen_op_compute_fprf(0);
261 tcg_gen_andi_i32(cpu_crf[1], cpu_T[0], 0xf);
262 if (set_fprf)
263 gen_op_float_check_status();
264 }
265 }
266
267 static always_inline void gen_optimize_fprf (void)
268 {
269 #if defined(OPTIMIZE_FPRF_UPDATE)
270 uint16_t **ptr;
271
272 for (ptr = gen_fprf_buf; ptr != (gen_fprf_ptr - 1); ptr++)
273 *ptr = INDEX_op_nop1;
274 gen_fprf_ptr = gen_fprf_buf;
275 #endif
276 }
277
278 static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
279 {
280 #if defined(TARGET_PPC64)
281 if (ctx->sf_mode)
282 tcg_gen_movi_tl(cpu_nip, nip);
283 else
284 #endif
285 tcg_gen_movi_tl(cpu_nip, (uint32_t)nip);
286 }
287
288 #define GEN_EXCP(ctx, excp, error) \
289 do { \
290 if ((ctx)->exception == POWERPC_EXCP_NONE) { \
291 gen_update_nip(ctx, (ctx)->nip); \
292 } \
293 gen_op_raise_exception_err((excp), (error)); \
294 ctx->exception = (excp); \
295 } while (0)
296
297 #define GEN_EXCP_INVAL(ctx) \
298 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
299 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_INVAL)
300
301 #define GEN_EXCP_PRIVOPC(ctx) \
302 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
303 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_OPC)
304
305 #define GEN_EXCP_PRIVREG(ctx) \
306 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
307 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_REG)
308
309 #define GEN_EXCP_NO_FP(ctx) \
310 GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0)
311
312 #define GEN_EXCP_NO_AP(ctx) \
313 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0)
314
315 #define GEN_EXCP_NO_VR(ctx) \
316 GEN_EXCP(ctx, POWERPC_EXCP_VPU, 0)
317
318 /* Stop translation */
319 static always_inline void GEN_STOP (DisasContext *ctx)
320 {
321 gen_update_nip(ctx, ctx->nip);
322 ctx->exception = POWERPC_EXCP_STOP;
323 }
324
325 /* No need to update nip here, as execution flow will change */
326 static always_inline void GEN_SYNC (DisasContext *ctx)
327 {
328 ctx->exception = POWERPC_EXCP_SYNC;
329 }
330
331 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
332 static void gen_##name (DisasContext *ctx); \
333 GEN_OPCODE(name, opc1, opc2, opc3, inval, type); \
334 static void gen_##name (DisasContext *ctx)
335
336 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
337 static void gen_##name (DisasContext *ctx); \
338 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type); \
339 static void gen_##name (DisasContext *ctx)
340
341 typedef struct opcode_t {
342 unsigned char opc1, opc2, opc3;
343 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
344 unsigned char pad[5];
345 #else
346 unsigned char pad[1];
347 #endif
348 opc_handler_t handler;
349 const char *oname;
350 } opcode_t;
351
352 /*****************************************************************************/
353 /*** Instruction decoding ***/
354 #define EXTRACT_HELPER(name, shift, nb) \
355 static always_inline uint32_t name (uint32_t opcode) \
356 { \
357 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
358 }
359
360 #define EXTRACT_SHELPER(name, shift, nb) \
361 static always_inline int32_t name (uint32_t opcode) \
362 { \
363 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
364 }
365
366 /* Opcode part 1 */
367 EXTRACT_HELPER(opc1, 26, 6);
368 /* Opcode part 2 */
369 EXTRACT_HELPER(opc2, 1, 5);
370 /* Opcode part 3 */
371 EXTRACT_HELPER(opc3, 6, 5);
372 /* Update Cr0 flags */
373 EXTRACT_HELPER(Rc, 0, 1);
374 /* Destination */
375 EXTRACT_HELPER(rD, 21, 5);
376 /* Source */
377 EXTRACT_HELPER(rS, 21, 5);
378 /* First operand */
379 EXTRACT_HELPER(rA, 16, 5);
380 /* Second operand */
381 EXTRACT_HELPER(rB, 11, 5);
382 /* Third operand */
383 EXTRACT_HELPER(rC, 6, 5);
384 /*** Get CRn ***/
385 EXTRACT_HELPER(crfD, 23, 3);
386 EXTRACT_HELPER(crfS, 18, 3);
387 EXTRACT_HELPER(crbD, 21, 5);
388 EXTRACT_HELPER(crbA, 16, 5);
389 EXTRACT_HELPER(crbB, 11, 5);
390 /* SPR / TBL */
391 EXTRACT_HELPER(_SPR, 11, 10);
392 static always_inline uint32_t SPR (uint32_t opcode)
393 {
394 uint32_t sprn = _SPR(opcode);
395
396 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
397 }
398 /*** Get constants ***/
399 EXTRACT_HELPER(IMM, 12, 8);
400 /* 16 bits signed immediate value */
401 EXTRACT_SHELPER(SIMM, 0, 16);
402 /* 16 bits unsigned immediate value */
403 EXTRACT_HELPER(UIMM, 0, 16);
404 /* Bit count */
405 EXTRACT_HELPER(NB, 11, 5);
406 /* Shift count */
407 EXTRACT_HELPER(SH, 11, 5);
408 /* Mask start */
409 EXTRACT_HELPER(MB, 6, 5);
410 /* Mask end */
411 EXTRACT_HELPER(ME, 1, 5);
412 /* Trap operand */
413 EXTRACT_HELPER(TO, 21, 5);
414
415 EXTRACT_HELPER(CRM, 12, 8);
416 EXTRACT_HELPER(FM, 17, 8);
417 EXTRACT_HELPER(SR, 16, 4);
418 EXTRACT_HELPER(FPIMM, 12, 4);
419
420 /*** Jump target decoding ***/
421 /* Displacement */
422 EXTRACT_SHELPER(d, 0, 16);
423 /* Immediate address */
424 static always_inline target_ulong LI (uint32_t opcode)
425 {
426 return (opcode >> 0) & 0x03FFFFFC;
427 }
428
429 static always_inline uint32_t BD (uint32_t opcode)
430 {
431 return (opcode >> 0) & 0xFFFC;
432 }
433
434 EXTRACT_HELPER(BO, 21, 5);
435 EXTRACT_HELPER(BI, 16, 5);
436 /* Absolute/relative address */
437 EXTRACT_HELPER(AA, 1, 1);
438 /* Link */
439 EXTRACT_HELPER(LK, 0, 1);
440
441 /* Create a mask between <start> and <end> bits */
442 static always_inline target_ulong MASK (uint32_t start, uint32_t end)
443 {
444 target_ulong ret;
445
446 #if defined(TARGET_PPC64)
447 if (likely(start == 0)) {
448 ret = UINT64_MAX << (63 - end);
449 } else if (likely(end == 63)) {
450 ret = UINT64_MAX >> start;
451 }
452 #else
453 if (likely(start == 0)) {
454 ret = UINT32_MAX << (31 - end);
455 } else if (likely(end == 31)) {
456 ret = UINT32_MAX >> start;
457 }
458 #endif
459 else {
460 ret = (((target_ulong)(-1ULL)) >> (start)) ^
461 (((target_ulong)(-1ULL) >> (end)) >> 1);
462 if (unlikely(start > end))
463 return ~ret;
464 }
465
466 return ret;
467 }
468
469 /*****************************************************************************/
470 /* PowerPC Instructions types definitions */
471 enum {
472 PPC_NONE = 0x0000000000000000ULL,
473 /* PowerPC base instructions set */
474 PPC_INSNS_BASE = 0x0000000000000001ULL,
475 /* integer operations instructions */
476 #define PPC_INTEGER PPC_INSNS_BASE
477 /* flow control instructions */
478 #define PPC_FLOW PPC_INSNS_BASE
479 /* virtual memory instructions */
480 #define PPC_MEM PPC_INSNS_BASE
481 /* ld/st with reservation instructions */
482 #define PPC_RES PPC_INSNS_BASE
483 /* spr/msr access instructions */
484 #define PPC_MISC PPC_INSNS_BASE
485 /* Deprecated instruction sets */
486 /* Original POWER instruction set */
487 PPC_POWER = 0x0000000000000002ULL,
488 /* POWER2 instruction set extension */
489 PPC_POWER2 = 0x0000000000000004ULL,
490 /* Power RTC support */
491 PPC_POWER_RTC = 0x0000000000000008ULL,
492 /* Power-to-PowerPC bridge (601) */
493 PPC_POWER_BR = 0x0000000000000010ULL,
494 /* 64 bits PowerPC instruction set */
495 PPC_64B = 0x0000000000000020ULL,
496 /* New 64 bits extensions (PowerPC 2.0x) */
497 PPC_64BX = 0x0000000000000040ULL,
498 /* 64 bits hypervisor extensions */
499 PPC_64H = 0x0000000000000080ULL,
500 /* New wait instruction (PowerPC 2.0x) */
501 PPC_WAIT = 0x0000000000000100ULL,
502 /* Time base mftb instruction */
503 PPC_MFTB = 0x0000000000000200ULL,
504
505 /* Fixed-point unit extensions */
506 /* PowerPC 602 specific */
507 PPC_602_SPEC = 0x0000000000000400ULL,
508 /* isel instruction */
509 PPC_ISEL = 0x0000000000000800ULL,
510 /* popcntb instruction */
511 PPC_POPCNTB = 0x0000000000001000ULL,
512 /* string load / store */
513 PPC_STRING = 0x0000000000002000ULL,
514
515 /* Floating-point unit extensions */
516 /* Optional floating point instructions */
517 PPC_FLOAT = 0x0000000000010000ULL,
518 /* New floating-point extensions (PowerPC 2.0x) */
519 PPC_FLOAT_EXT = 0x0000000000020000ULL,
520 PPC_FLOAT_FSQRT = 0x0000000000040000ULL,
521 PPC_FLOAT_FRES = 0x0000000000080000ULL,
522 PPC_FLOAT_FRSQRTE = 0x0000000000100000ULL,
523 PPC_FLOAT_FRSQRTES = 0x0000000000200000ULL,
524 PPC_FLOAT_FSEL = 0x0000000000400000ULL,
525 PPC_FLOAT_STFIWX = 0x0000000000800000ULL,
526
527 /* Vector/SIMD extensions */
528 /* Altivec support */
529 PPC_ALTIVEC = 0x0000000001000000ULL,
530 /* PowerPC 2.03 SPE extension */
531 PPC_SPE = 0x0000000002000000ULL,
532 /* PowerPC 2.03 SPE floating-point extension */
533 PPC_SPEFPU = 0x0000000004000000ULL,
534
535 /* Optional memory control instructions */
536 PPC_MEM_TLBIA = 0x0000000010000000ULL,
537 PPC_MEM_TLBIE = 0x0000000020000000ULL,
538 PPC_MEM_TLBSYNC = 0x0000000040000000ULL,
539 /* sync instruction */
540 PPC_MEM_SYNC = 0x0000000080000000ULL,
541 /* eieio instruction */
542 PPC_MEM_EIEIO = 0x0000000100000000ULL,
543
544 /* Cache control instructions */
545 PPC_CACHE = 0x0000000200000000ULL,
546 /* icbi instruction */
547 PPC_CACHE_ICBI = 0x0000000400000000ULL,
548 /* dcbz instruction with fixed cache line size */
549 PPC_CACHE_DCBZ = 0x0000000800000000ULL,
550 /* dcbz instruction with tunable cache line size */
551 PPC_CACHE_DCBZT = 0x0000001000000000ULL,
552 /* dcba instruction */
553 PPC_CACHE_DCBA = 0x0000002000000000ULL,
554 /* Freescale cache locking instructions */
555 PPC_CACHE_LOCK = 0x0000004000000000ULL,
556
557 /* MMU related extensions */
558 /* external control instructions */
559 PPC_EXTERN = 0x0000010000000000ULL,
560 /* segment register access instructions */
561 PPC_SEGMENT = 0x0000020000000000ULL,
562 /* PowerPC 6xx TLB management instructions */
563 PPC_6xx_TLB = 0x0000040000000000ULL,
564 /* PowerPC 74xx TLB management instructions */
565 PPC_74xx_TLB = 0x0000080000000000ULL,
566 /* PowerPC 40x TLB management instructions */
567 PPC_40x_TLB = 0x0000100000000000ULL,
568 /* segment register access instructions for PowerPC 64 "bridge" */
569 PPC_SEGMENT_64B = 0x0000200000000000ULL,
570 /* SLB management */
571 PPC_SLBI = 0x0000400000000000ULL,
572
573 /* Embedded PowerPC dedicated instructions */
574 PPC_WRTEE = 0x0001000000000000ULL,
575 /* PowerPC 40x exception model */
576 PPC_40x_EXCP = 0x0002000000000000ULL,
577 /* PowerPC 405 Mac instructions */
578 PPC_405_MAC = 0x0004000000000000ULL,
579 /* PowerPC 440 specific instructions */
580 PPC_440_SPEC = 0x0008000000000000ULL,
581 /* BookE (embedded) PowerPC specification */
582 PPC_BOOKE = 0x0010000000000000ULL,
583 /* mfapidi instruction */
584 PPC_MFAPIDI = 0x0020000000000000ULL,
585 /* tlbiva instruction */
586 PPC_TLBIVA = 0x0040000000000000ULL,
587 /* tlbivax instruction */
588 PPC_TLBIVAX = 0x0080000000000000ULL,
589 /* PowerPC 4xx dedicated instructions */
590 PPC_4xx_COMMON = 0x0100000000000000ULL,
591 /* PowerPC 40x ibct instructions */
592 PPC_40x_ICBT = 0x0200000000000000ULL,
593 /* rfmci is not implemented in all BookE PowerPC */
594 PPC_RFMCI = 0x0400000000000000ULL,
595 /* rfdi instruction */
596 PPC_RFDI = 0x0800000000000000ULL,
597 /* DCR accesses */
598 PPC_DCR = 0x1000000000000000ULL,
599 /* DCR extended accesse */
600 PPC_DCRX = 0x2000000000000000ULL,
601 /* user-mode DCR access, implemented in PowerPC 460 */
602 PPC_DCRUX = 0x4000000000000000ULL,
603 };
604
605 /*****************************************************************************/
606 /* PowerPC instructions table */
607 #if HOST_LONG_BITS == 64
608 #define OPC_ALIGN 8
609 #else
610 #define OPC_ALIGN 4
611 #endif
612 #if defined(__APPLE__)
613 #define OPCODES_SECTION \
614 __attribute__ ((section("__TEXT,__opcodes"), unused, aligned (OPC_ALIGN) ))
615 #else
616 #define OPCODES_SECTION \
617 __attribute__ ((section(".opcodes"), unused, aligned (OPC_ALIGN) ))
618 #endif
619
620 #if defined(DO_PPC_STATISTICS)
621 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
622 OPCODES_SECTION opcode_t opc_##name = { \
623 .opc1 = op1, \
624 .opc2 = op2, \
625 .opc3 = op3, \
626 .pad = { 0, }, \
627 .handler = { \
628 .inval = invl, \
629 .type = _typ, \
630 .handler = &gen_##name, \
631 .oname = stringify(name), \
632 }, \
633 .oname = stringify(name), \
634 }
635 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
636 OPCODES_SECTION opcode_t opc_##name = { \
637 .opc1 = op1, \
638 .opc2 = op2, \
639 .opc3 = op3, \
640 .pad = { 0, }, \
641 .handler = { \
642 .inval = invl, \
643 .type = _typ, \
644 .handler = &gen_##name, \
645 .oname = onam, \
646 }, \
647 .oname = onam, \
648 }
649 #else
650 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
651 OPCODES_SECTION opcode_t opc_##name = { \
652 .opc1 = op1, \
653 .opc2 = op2, \
654 .opc3 = op3, \
655 .pad = { 0, }, \
656 .handler = { \
657 .inval = invl, \
658 .type = _typ, \
659 .handler = &gen_##name, \
660 }, \
661 .oname = stringify(name), \
662 }
663 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
664 OPCODES_SECTION opcode_t opc_##name = { \
665 .opc1 = op1, \
666 .opc2 = op2, \
667 .opc3 = op3, \
668 .pad = { 0, }, \
669 .handler = { \
670 .inval = invl, \
671 .type = _typ, \
672 .handler = &gen_##name, \
673 }, \
674 .oname = onam, \
675 }
676 #endif
677
678 #define GEN_OPCODE_MARK(name) \
679 OPCODES_SECTION opcode_t opc_##name = { \
680 .opc1 = 0xFF, \
681 .opc2 = 0xFF, \
682 .opc3 = 0xFF, \
683 .pad = { 0, }, \
684 .handler = { \
685 .inval = 0x00000000, \
686 .type = 0x00, \
687 .handler = NULL, \
688 }, \
689 .oname = stringify(name), \
690 }
691
692 /* Start opcode list */
693 GEN_OPCODE_MARK(start);
694
695 /* Invalid instruction */
696 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE)
697 {
698 GEN_EXCP_INVAL(ctx);
699 }
700
701 static opc_handler_t invalid_handler = {
702 .inval = 0xFFFFFFFF,
703 .type = PPC_NONE,
704 .handler = gen_invalid,
705 };
706
707 /*** Integer arithmetic ***/
708 #define __GEN_INT_ARITH2(name, opc1, opc2, opc3, inval, type) \
709 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
710 { \
711 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
712 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
713 gen_op_##name(); \
714 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
715 if (unlikely(Rc(ctx->opcode) != 0)) \
716 gen_set_Rc0(ctx); \
717 }
718
719 #define __GEN_INT_ARITH2_O(name, opc1, opc2, opc3, inval, type) \
720 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
721 { \
722 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
723 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
724 gen_op_##name(); \
725 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
726 if (unlikely(Rc(ctx->opcode) != 0)) \
727 gen_set_Rc0(ctx); \
728 }
729
730 #define __GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
731 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
732 { \
733 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
734 gen_op_##name(); \
735 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
736 if (unlikely(Rc(ctx->opcode) != 0)) \
737 gen_set_Rc0(ctx); \
738 }
739 #define __GEN_INT_ARITH1_O(name, opc1, opc2, opc3, type) \
740 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
741 { \
742 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
743 gen_op_##name(); \
744 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
745 if (unlikely(Rc(ctx->opcode) != 0)) \
746 gen_set_Rc0(ctx); \
747 }
748
749 /* Two operands arithmetic functions */
750 #define GEN_INT_ARITH2(name, opc1, opc2, opc3, type) \
751 __GEN_INT_ARITH2(name, opc1, opc2, opc3, 0x00000000, type) \
752 __GEN_INT_ARITH2_O(name##o, opc1, opc2, opc3 | 0x10, 0x00000000, type)
753
754 /* Two operands arithmetic functions with no overflow allowed */
755 #define GEN_INT_ARITHN(name, opc1, opc2, opc3, type) \
756 __GEN_INT_ARITH2(name, opc1, opc2, opc3, 0x00000400, type)
757
758 /* One operand arithmetic functions */
759 #define GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
760 __GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
761 __GEN_INT_ARITH1_O(name##o, opc1, opc2, opc3 | 0x10, type)
762
763 #if defined(TARGET_PPC64)
764 #define __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, inval, type) \
765 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
766 { \
767 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
768 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
769 if (ctx->sf_mode) \
770 gen_op_##name##_64(); \
771 else \
772 gen_op_##name(); \
773 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
774 if (unlikely(Rc(ctx->opcode) != 0)) \
775 gen_set_Rc0(ctx); \
776 }
777
778 #define __GEN_INT_ARITH2_O_64(name, opc1, opc2, opc3, inval, type) \
779 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
780 { \
781 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
782 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
783 if (ctx->sf_mode) \
784 gen_op_##name##_64(); \
785 else \
786 gen_op_##name(); \
787 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
788 if (unlikely(Rc(ctx->opcode) != 0)) \
789 gen_set_Rc0(ctx); \
790 }
791
792 #define __GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
793 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
794 { \
795 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
796 if (ctx->sf_mode) \
797 gen_op_##name##_64(); \
798 else \
799 gen_op_##name(); \
800 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
801 if (unlikely(Rc(ctx->opcode) != 0)) \
802 gen_set_Rc0(ctx); \
803 }
804 #define __GEN_INT_ARITH1_O_64(name, opc1, opc2, opc3, type) \
805 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
806 { \
807 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
808 if (ctx->sf_mode) \
809 gen_op_##name##_64(); \
810 else \
811 gen_op_##name(); \
812 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
813 if (unlikely(Rc(ctx->opcode) != 0)) \
814 gen_set_Rc0(ctx); \
815 }
816
817 /* Two operands arithmetic functions */
818 #define GEN_INT_ARITH2_64(name, opc1, opc2, opc3, type) \
819 __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, 0x00000000, type) \
820 __GEN_INT_ARITH2_O_64(name##o, opc1, opc2, opc3 | 0x10, 0x00000000, type)
821
822 /* Two operands arithmetic functions with no overflow allowed */
823 #define GEN_INT_ARITHN_64(name, opc1, opc2, opc3, type) \
824 __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, 0x00000400, type)
825
826 /* One operand arithmetic functions */
827 #define GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
828 __GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
829 __GEN_INT_ARITH1_O_64(name##o, opc1, opc2, opc3 | 0x10, type)
830 #else
831 #define GEN_INT_ARITH2_64 GEN_INT_ARITH2
832 #define GEN_INT_ARITHN_64 GEN_INT_ARITHN
833 #define GEN_INT_ARITH1_64 GEN_INT_ARITH1
834 #endif
835
836 /* add add. addo addo. */
837 static always_inline void gen_op_add (void)
838 {
839 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
840 }
841 static always_inline void gen_op_addo (void)
842 {
843 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
844 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
845 gen_op_check_addo();
846 }
847 #if defined(TARGET_PPC64)
848 #define gen_op_add_64 gen_op_add
849 static always_inline void gen_op_addo_64 (void)
850 {
851 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
852 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
853 gen_op_check_addo_64();
854 }
855 #endif
856 GEN_INT_ARITH2_64 (add, 0x1F, 0x0A, 0x08, PPC_INTEGER);
857 /* addc addc. addco addco. */
858 static always_inline void gen_op_addc (void)
859 {
860 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
861 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
862 gen_op_check_addc();
863 }
864 static always_inline void gen_op_addco (void)
865 {
866 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
867 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
868 gen_op_check_addc();
869 gen_op_check_addo();
870 }
871 #if defined(TARGET_PPC64)
872 static always_inline void gen_op_addc_64 (void)
873 {
874 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
875 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
876 gen_op_check_addc_64();
877 }
878 static always_inline void gen_op_addco_64 (void)
879 {
880 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
881 tcg_gen_add_tl(cpu_T[0], cpu_T[0], cpu_T[1]);
882 gen_op_check_addc_64();
883 gen_op_check_addo_64();
884 }
885 #endif
886 GEN_INT_ARITH2_64 (addc, 0x1F, 0x0A, 0x00, PPC_INTEGER);
887 /* adde adde. addeo addeo. */
888 static always_inline void gen_op_addeo (void)
889 {
890 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
891 gen_op_adde();
892 gen_op_check_addo();
893 }
894 #if defined(TARGET_PPC64)
895 static always_inline void gen_op_addeo_64 (void)
896 {
897 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
898 gen_op_adde_64();
899 gen_op_check_addo_64();
900 }
901 #endif
902 GEN_INT_ARITH2_64 (adde, 0x1F, 0x0A, 0x04, PPC_INTEGER);
903 /* addme addme. addmeo addmeo. */
904 static always_inline void gen_op_addme (void)
905 {
906 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
907 gen_op_add_me();
908 }
909 #if defined(TARGET_PPC64)
910 static always_inline void gen_op_addme_64 (void)
911 {
912 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
913 gen_op_add_me_64();
914 }
915 #endif
916 GEN_INT_ARITH1_64 (addme, 0x1F, 0x0A, 0x07, PPC_INTEGER);
917 /* addze addze. addzeo addzeo. */
918 static always_inline void gen_op_addze (void)
919 {
920 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
921 gen_op_add_ze();
922 gen_op_check_addc();
923 }
924 static always_inline void gen_op_addzeo (void)
925 {
926 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
927 gen_op_add_ze();
928 gen_op_check_addc();
929 gen_op_check_addo();
930 }
931 #if defined(TARGET_PPC64)
932 static always_inline void gen_op_addze_64 (void)
933 {
934 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
935 gen_op_add_ze();
936 gen_op_check_addc_64();
937 }
938 static always_inline void gen_op_addzeo_64 (void)
939 {
940 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
941 gen_op_add_ze();
942 gen_op_check_addc_64();
943 gen_op_check_addo_64();
944 }
945 #endif
946 GEN_INT_ARITH1_64 (addze, 0x1F, 0x0A, 0x06, PPC_INTEGER);
947 /* divw divw. divwo divwo. */
948 GEN_INT_ARITH2 (divw, 0x1F, 0x0B, 0x0F, PPC_INTEGER);
949 /* divwu divwu. divwuo divwuo. */
950 GEN_INT_ARITH2 (divwu, 0x1F, 0x0B, 0x0E, PPC_INTEGER);
951 /* mulhw mulhw. */
952 GEN_INT_ARITHN (mulhw, 0x1F, 0x0B, 0x02, PPC_INTEGER);
953 /* mulhwu mulhwu. */
954 GEN_INT_ARITHN (mulhwu, 0x1F, 0x0B, 0x00, PPC_INTEGER);
955 /* mullw mullw. mullwo mullwo. */
956 GEN_INT_ARITH2 (mullw, 0x1F, 0x0B, 0x07, PPC_INTEGER);
957 /* neg neg. nego nego. */
958 GEN_INT_ARITH1_64 (neg, 0x1F, 0x08, 0x03, PPC_INTEGER);
959 /* subf subf. subfo subfo. */
960 static always_inline void gen_op_subf (void)
961 {
962 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
963 }
964 static always_inline void gen_op_subfo (void)
965 {
966 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
967 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
968 gen_op_check_addo();
969 }
970 #if defined(TARGET_PPC64)
971 #define gen_op_subf_64 gen_op_subf
972 static always_inline void gen_op_subfo_64 (void)
973 {
974 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
975 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
976 gen_op_check_addo_64();
977 }
978 #endif
979 GEN_INT_ARITH2_64 (subf, 0x1F, 0x08, 0x01, PPC_INTEGER);
980 /* subfc subfc. subfco subfco. */
981 static always_inline void gen_op_subfc (void)
982 {
983 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
984 gen_op_check_subfc();
985 }
986 static always_inline void gen_op_subfco (void)
987 {
988 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
989 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
990 gen_op_check_subfc();
991 gen_op_check_addo();
992 }
993 #if defined(TARGET_PPC64)
994 static always_inline void gen_op_subfc_64 (void)
995 {
996 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
997 gen_op_check_subfc_64();
998 }
999 static always_inline void gen_op_subfco_64 (void)
1000 {
1001 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
1002 tcg_gen_sub_tl(cpu_T[0], cpu_T[1], cpu_T[0]);
1003 gen_op_check_subfc_64();
1004 gen_op_check_addo_64();
1005 }
1006 #endif
1007 GEN_INT_ARITH2_64 (subfc, 0x1F, 0x08, 0x00, PPC_INTEGER);
1008 /* subfe subfe. subfeo subfeo. */
1009 static always_inline void gen_op_subfeo (void)
1010 {
1011 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
1012 gen_op_subfe();
1013 gen_op_check_addo();
1014 }
1015 #if defined(TARGET_PPC64)
1016 #define gen_op_subfe_64 gen_op_subfe
1017 static always_inline void gen_op_subfeo_64 (void)
1018 {
1019 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
1020 gen_op_subfe_64();
1021 gen_op_check_addo_64();
1022 }
1023 #endif
1024 GEN_INT_ARITH2_64 (subfe, 0x1F, 0x08, 0x04, PPC_INTEGER);
1025 /* subfme subfme. subfmeo subfmeo. */
1026 GEN_INT_ARITH1_64 (subfme, 0x1F, 0x08, 0x07, PPC_INTEGER);
1027 /* subfze subfze. subfzeo subfzeo. */
1028 GEN_INT_ARITH1_64 (subfze, 0x1F, 0x08, 0x06, PPC_INTEGER);
1029 /* addi */
1030 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1031 {
1032 target_long simm = SIMM(ctx->opcode);
1033
1034 if (rA(ctx->opcode) == 0) {
1035 /* li case */
1036 tcg_gen_movi_tl(cpu_T[0], simm);
1037 } else {
1038 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1039 if (likely(simm != 0))
1040 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
1041 }
1042 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1043 }
1044 /* addic */
1045 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1046 {
1047 target_long simm = SIMM(ctx->opcode);
1048
1049 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1050 if (likely(simm != 0)) {
1051 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
1052 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
1053 #if defined(TARGET_PPC64)
1054 if (ctx->sf_mode)
1055 gen_op_check_addc_64();
1056 else
1057 #endif
1058 gen_op_check_addc();
1059 } else {
1060 gen_op_clear_xer_ca();
1061 }
1062 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1063 }
1064 /* addic. */
1065 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1066 {
1067 target_long simm = SIMM(ctx->opcode);
1068
1069 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1070 if (likely(simm != 0)) {
1071 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
1072 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm);
1073 #if defined(TARGET_PPC64)
1074 if (ctx->sf_mode)
1075 gen_op_check_addc_64();
1076 else
1077 #endif
1078 gen_op_check_addc();
1079 } else {
1080 gen_op_clear_xer_ca();
1081 }
1082 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1083 gen_set_Rc0(ctx);
1084 }
1085 /* addis */
1086 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1087 {
1088 target_long simm = SIMM(ctx->opcode);
1089
1090 if (rA(ctx->opcode) == 0) {
1091 /* lis case */
1092 tcg_gen_movi_tl(cpu_T[0], simm << 16);
1093 } else {
1094 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1095 if (likely(simm != 0))
1096 tcg_gen_addi_tl(cpu_T[0], cpu_T[0], simm << 16);
1097 }
1098 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1099 }
1100 /* mulli */
1101 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1102 {
1103 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1104 gen_op_mulli(SIMM(ctx->opcode));
1105 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1106 }
1107 /* subfic */
1108 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1109 {
1110 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1111 #if defined(TARGET_PPC64)
1112 if (ctx->sf_mode)
1113 gen_op_subfic_64(SIMM(ctx->opcode));
1114 else
1115 #endif
1116 gen_op_subfic(SIMM(ctx->opcode));
1117 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1118 }
1119
1120 #if defined(TARGET_PPC64)
1121 /* mulhd mulhd. */
1122 GEN_INT_ARITHN (mulhd, 0x1F, 0x09, 0x02, PPC_64B);
1123 /* mulhdu mulhdu. */
1124 GEN_INT_ARITHN (mulhdu, 0x1F, 0x09, 0x00, PPC_64B);
1125 /* mulld mulld. mulldo mulldo. */
1126 GEN_INT_ARITH2 (mulld, 0x1F, 0x09, 0x07, PPC_64B);
1127 /* divd divd. divdo divdo. */
1128 GEN_INT_ARITH2 (divd, 0x1F, 0x09, 0x0F, PPC_64B);
1129 /* divdu divdu. divduo divduo. */
1130 GEN_INT_ARITH2 (divdu, 0x1F, 0x09, 0x0E, PPC_64B);
1131 #endif
1132
1133 /*** Integer comparison ***/
1134 #if defined(TARGET_PPC64)
1135 #define GEN_CMP(name, opc, type) \
1136 GEN_HANDLER(name, 0x1F, 0x00, opc, 0x00400000, type) \
1137 { \
1138 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
1139 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1140 if (ctx->sf_mode && (ctx->opcode & 0x00200000)) \
1141 gen_op_##name##_64(); \
1142 else \
1143 gen_op_##name(); \
1144 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf); \
1145 }
1146 #else
1147 #define GEN_CMP(name, opc, type) \
1148 GEN_HANDLER(name, 0x1F, 0x00, opc, 0x00400000, type) \
1149 { \
1150 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
1151 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1152 gen_op_##name(); \
1153 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf); \
1154 }
1155 #endif
1156
1157 /* cmp */
1158 GEN_CMP(cmp, 0x00, PPC_INTEGER);
1159 /* cmpi */
1160 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
1161 {
1162 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1163 #if defined(TARGET_PPC64)
1164 if (ctx->sf_mode && (ctx->opcode & 0x00200000))
1165 gen_op_cmpi_64(SIMM(ctx->opcode));
1166 else
1167 #endif
1168 gen_op_cmpi(SIMM(ctx->opcode));
1169 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1170 }
1171 /* cmpl */
1172 GEN_CMP(cmpl, 0x01, PPC_INTEGER);
1173 /* cmpli */
1174 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
1175 {
1176 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1177 #if defined(TARGET_PPC64)
1178 if (ctx->sf_mode && (ctx->opcode & 0x00200000))
1179 gen_op_cmpli_64(UIMM(ctx->opcode));
1180 else
1181 #endif
1182 gen_op_cmpli(UIMM(ctx->opcode));
1183 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1184 }
1185
1186 /* isel (PowerPC 2.03 specification) */
1187 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL)
1188 {
1189 uint32_t bi = rC(ctx->opcode);
1190 uint32_t mask;
1191
1192 if (rA(ctx->opcode) == 0) {
1193 tcg_gen_movi_tl(cpu_T[0], 0);
1194 } else {
1195 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1196 }
1197 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
1198 mask = 1 << (3 - (bi & 0x03));
1199 tcg_gen_mov_i32(cpu_T[0], cpu_crf[bi >> 2]);
1200 gen_op_test_true(mask);
1201 gen_op_isel();
1202 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1203 }
1204
1205 /*** Integer logical ***/
1206 #define __GEN_LOGICAL2(name, opc2, opc3, type) \
1207 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000000, type) \
1208 { \
1209 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]); \
1210 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1211 gen_op_##name(); \
1212 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
1213 if (unlikely(Rc(ctx->opcode) != 0)) \
1214 gen_set_Rc0(ctx); \
1215 }
1216 #define GEN_LOGICAL2(name, opc, type) \
1217 __GEN_LOGICAL2(name, 0x1C, opc, type)
1218
1219 #define GEN_LOGICAL1(name, opc, type) \
1220 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) \
1221 { \
1222 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]); \
1223 gen_op_##name(); \
1224 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
1225 if (unlikely(Rc(ctx->opcode) != 0)) \
1226 gen_set_Rc0(ctx); \
1227 }
1228
1229 /* and & and. */
1230 GEN_LOGICAL2(and, 0x00, PPC_INTEGER);
1231 /* andc & andc. */
1232 GEN_LOGICAL2(andc, 0x01, PPC_INTEGER);
1233 /* andi. */
1234 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1235 {
1236 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1237 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], UIMM(ctx->opcode));
1238 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1239 gen_set_Rc0(ctx);
1240 }
1241 /* andis. */
1242 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1243 {
1244 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1245 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], UIMM(ctx->opcode) << 16);
1246 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1247 gen_set_Rc0(ctx);
1248 }
1249
1250 /* cntlzw */
1251 GEN_LOGICAL1(cntlzw, 0x00, PPC_INTEGER);
1252 /* eqv & eqv. */
1253 GEN_LOGICAL2(eqv, 0x08, PPC_INTEGER);
1254 /* extsb & extsb. */
1255 GEN_LOGICAL1(extsb, 0x1D, PPC_INTEGER);
1256 /* extsh & extsh. */
1257 GEN_LOGICAL1(extsh, 0x1C, PPC_INTEGER);
1258 /* nand & nand. */
1259 GEN_LOGICAL2(nand, 0x0E, PPC_INTEGER);
1260 /* nor & nor. */
1261 GEN_LOGICAL2(nor, 0x03, PPC_INTEGER);
1262
1263 /* or & or. */
1264 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER)
1265 {
1266 int rs, ra, rb;
1267
1268 rs = rS(ctx->opcode);
1269 ra = rA(ctx->opcode);
1270 rb = rB(ctx->opcode);
1271 /* Optimisation for mr. ri case */
1272 if (rs != ra || rs != rb) {
1273 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rs]);
1274 if (rs != rb) {
1275 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rb]);
1276 gen_op_or();
1277 }
1278 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
1279 if (unlikely(Rc(ctx->opcode) != 0))
1280 gen_set_Rc0(ctx);
1281 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1282 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rs]);
1283 gen_set_Rc0(ctx);
1284 #if defined(TARGET_PPC64)
1285 } else {
1286 switch (rs) {
1287 case 1:
1288 /* Set process priority to low */
1289 gen_op_store_pri(2);
1290 break;
1291 case 6:
1292 /* Set process priority to medium-low */
1293 gen_op_store_pri(3);
1294 break;
1295 case 2:
1296 /* Set process priority to normal */
1297 gen_op_store_pri(4);
1298 break;
1299 #if !defined(CONFIG_USER_ONLY)
1300 case 31:
1301 if (ctx->supervisor > 0) {
1302 /* Set process priority to very low */
1303 gen_op_store_pri(1);
1304 }
1305 break;
1306 case 5:
1307 if (ctx->supervisor > 0) {
1308 /* Set process priority to medium-hight */
1309 gen_op_store_pri(5);
1310 }
1311 break;
1312 case 3:
1313 if (ctx->supervisor > 0) {
1314 /* Set process priority to high */
1315 gen_op_store_pri(6);
1316 }
1317 break;
1318 case 7:
1319 if (ctx->supervisor > 1) {
1320 /* Set process priority to very high */
1321 gen_op_store_pri(7);
1322 }
1323 break;
1324 #endif
1325 default:
1326 /* nop */
1327 break;
1328 }
1329 #endif
1330 }
1331 }
1332
1333 /* orc & orc. */
1334 GEN_LOGICAL2(orc, 0x0C, PPC_INTEGER);
1335 /* xor & xor. */
1336 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER)
1337 {
1338 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1339 /* Optimisation for "set to zero" case */
1340 if (rS(ctx->opcode) != rB(ctx->opcode)) {
1341 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1342 gen_op_xor();
1343 } else {
1344 tcg_gen_movi_tl(cpu_T[0], 0);
1345 }
1346 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1347 if (unlikely(Rc(ctx->opcode) != 0))
1348 gen_set_Rc0(ctx);
1349 }
1350 /* ori */
1351 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1352 {
1353 target_ulong uimm = UIMM(ctx->opcode);
1354
1355 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1356 /* NOP */
1357 /* XXX: should handle special NOPs for POWER series */
1358 return;
1359 }
1360 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1361 if (likely(uimm != 0))
1362 gen_op_ori(uimm);
1363 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1364 }
1365 /* oris */
1366 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1367 {
1368 target_ulong uimm = UIMM(ctx->opcode);
1369
1370 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1371 /* NOP */
1372 return;
1373 }
1374 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1375 if (likely(uimm != 0))
1376 gen_op_ori(uimm << 16);
1377 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1378 }
1379 /* xori */
1380 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1381 {
1382 target_ulong uimm = UIMM(ctx->opcode);
1383
1384 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1385 /* NOP */
1386 return;
1387 }
1388 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1389 if (likely(uimm != 0))
1390 gen_op_xori(uimm);
1391 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1392 }
1393
1394 /* xoris */
1395 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1396 {
1397 target_ulong uimm = UIMM(ctx->opcode);
1398
1399 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1400 /* NOP */
1401 return;
1402 }
1403 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1404 if (likely(uimm != 0))
1405 gen_op_xori(uimm << 16);
1406 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1407 }
1408
1409 /* popcntb : PowerPC 2.03 specification */
1410 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB)
1411 {
1412 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1413 #if defined(TARGET_PPC64)
1414 if (ctx->sf_mode)
1415 gen_op_popcntb_64();
1416 else
1417 #endif
1418 gen_op_popcntb();
1419 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1420 }
1421
1422 #if defined(TARGET_PPC64)
1423 /* extsw & extsw. */
1424 GEN_LOGICAL1(extsw, 0x1E, PPC_64B);
1425 /* cntlzd */
1426 GEN_LOGICAL1(cntlzd, 0x01, PPC_64B);
1427 #endif
1428
1429 /*** Integer rotate ***/
1430 /* rlwimi & rlwimi. */
1431 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1432 {
1433 target_ulong mask;
1434 uint32_t mb, me, sh;
1435
1436 mb = MB(ctx->opcode);
1437 me = ME(ctx->opcode);
1438 sh = SH(ctx->opcode);
1439 if (likely(sh == 0)) {
1440 if (likely(mb == 0 && me == 31)) {
1441 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1442 goto do_store;
1443 } else if (likely(mb == 31 && me == 0)) {
1444 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1445 goto do_store;
1446 }
1447 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1448 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1449 goto do_mask;
1450 }
1451 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1452 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1453 gen_op_rotli32_T0(SH(ctx->opcode));
1454 do_mask:
1455 #if defined(TARGET_PPC64)
1456 mb += 32;
1457 me += 32;
1458 #endif
1459 mask = MASK(mb, me);
1460 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], mask);
1461 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], ~mask);
1462 gen_op_or();
1463 do_store:
1464 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1465 if (unlikely(Rc(ctx->opcode) != 0))
1466 gen_set_Rc0(ctx);
1467 }
1468 /* rlwinm & rlwinm. */
1469 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1470 {
1471 uint32_t mb, me, sh;
1472
1473 sh = SH(ctx->opcode);
1474 mb = MB(ctx->opcode);
1475 me = ME(ctx->opcode);
1476 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1477 if (likely(sh == 0)) {
1478 goto do_mask;
1479 }
1480 if (likely(mb == 0)) {
1481 if (likely(me == 31)) {
1482 gen_op_rotli32_T0(sh);
1483 goto do_store;
1484 } else if (likely(me == (31 - sh))) {
1485 gen_op_sli_T0(sh);
1486 goto do_store;
1487 }
1488 } else if (likely(me == 31)) {
1489 if (likely(sh == (32 - mb))) {
1490 gen_op_srli_T0(mb);
1491 goto do_store;
1492 }
1493 }
1494 gen_op_rotli32_T0(sh);
1495 do_mask:
1496 #if defined(TARGET_PPC64)
1497 mb += 32;
1498 me += 32;
1499 #endif
1500 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1501 do_store:
1502 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1503 if (unlikely(Rc(ctx->opcode) != 0))
1504 gen_set_Rc0(ctx);
1505 }
1506 /* rlwnm & rlwnm. */
1507 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1508 {
1509 uint32_t mb, me;
1510
1511 mb = MB(ctx->opcode);
1512 me = ME(ctx->opcode);
1513 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1514 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1515 gen_op_rotl32_T0_T1();
1516 if (unlikely(mb != 0 || me != 31)) {
1517 #if defined(TARGET_PPC64)
1518 mb += 32;
1519 me += 32;
1520 #endif
1521 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1522 }
1523 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1524 if (unlikely(Rc(ctx->opcode) != 0))
1525 gen_set_Rc0(ctx);
1526 }
1527
1528 #if defined(TARGET_PPC64)
1529 #define GEN_PPC64_R2(name, opc1, opc2) \
1530 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1531 { \
1532 gen_##name(ctx, 0); \
1533 } \
1534 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1535 PPC_64B) \
1536 { \
1537 gen_##name(ctx, 1); \
1538 }
1539 #define GEN_PPC64_R4(name, opc1, opc2) \
1540 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1541 { \
1542 gen_##name(ctx, 0, 0); \
1543 } \
1544 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
1545 PPC_64B) \
1546 { \
1547 gen_##name(ctx, 0, 1); \
1548 } \
1549 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1550 PPC_64B) \
1551 { \
1552 gen_##name(ctx, 1, 0); \
1553 } \
1554 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
1555 PPC_64B) \
1556 { \
1557 gen_##name(ctx, 1, 1); \
1558 }
1559
1560 static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb,
1561 uint32_t me, uint32_t sh)
1562 {
1563 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1564 if (likely(sh == 0)) {
1565 goto do_mask;
1566 }
1567 if (likely(mb == 0)) {
1568 if (likely(me == 63)) {
1569 gen_op_rotli64_T0(sh);
1570 goto do_store;
1571 } else if (likely(me == (63 - sh))) {
1572 gen_op_sli_T0(sh);
1573 goto do_store;
1574 }
1575 } else if (likely(me == 63)) {
1576 if (likely(sh == (64 - mb))) {
1577 gen_op_srli_T0_64(mb);
1578 goto do_store;
1579 }
1580 }
1581 gen_op_rotli64_T0(sh);
1582 do_mask:
1583 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1584 do_store:
1585 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1586 if (unlikely(Rc(ctx->opcode) != 0))
1587 gen_set_Rc0(ctx);
1588 }
1589 /* rldicl - rldicl. */
1590 static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1591 {
1592 uint32_t sh, mb;
1593
1594 sh = SH(ctx->opcode) | (shn << 5);
1595 mb = MB(ctx->opcode) | (mbn << 5);
1596 gen_rldinm(ctx, mb, 63, sh);
1597 }
1598 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1599 /* rldicr - rldicr. */
1600 static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1601 {
1602 uint32_t sh, me;
1603
1604 sh = SH(ctx->opcode) | (shn << 5);
1605 me = MB(ctx->opcode) | (men << 5);
1606 gen_rldinm(ctx, 0, me, sh);
1607 }
1608 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1609 /* rldic - rldic. */
1610 static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1611 {
1612 uint32_t sh, mb;
1613
1614 sh = SH(ctx->opcode) | (shn << 5);
1615 mb = MB(ctx->opcode) | (mbn << 5);
1616 gen_rldinm(ctx, mb, 63 - sh, sh);
1617 }
1618 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1619
1620 static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb,
1621 uint32_t me)
1622 {
1623 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1624 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1625 gen_op_rotl64_T0_T1();
1626 if (unlikely(mb != 0 || me != 63)) {
1627 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], MASK(mb, me));
1628 }
1629 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1630 if (unlikely(Rc(ctx->opcode) != 0))
1631 gen_set_Rc0(ctx);
1632 }
1633
1634 /* rldcl - rldcl. */
1635 static always_inline void gen_rldcl (DisasContext *ctx, int mbn)
1636 {
1637 uint32_t mb;
1638
1639 mb = MB(ctx->opcode) | (mbn << 5);
1640 gen_rldnm(ctx, mb, 63);
1641 }
1642 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1643 /* rldcr - rldcr. */
1644 static always_inline void gen_rldcr (DisasContext *ctx, int men)
1645 {
1646 uint32_t me;
1647
1648 me = MB(ctx->opcode) | (men << 5);
1649 gen_rldnm(ctx, 0, me);
1650 }
1651 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1652 /* rldimi - rldimi. */
1653 static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
1654 {
1655 uint64_t mask;
1656 uint32_t sh, mb, me;
1657
1658 sh = SH(ctx->opcode) | (shn << 5);
1659 mb = MB(ctx->opcode) | (mbn << 5);
1660 me = 63 - sh;
1661 if (likely(sh == 0)) {
1662 if (likely(mb == 0)) {
1663 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1664 goto do_store;
1665 }
1666 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1667 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1668 goto do_mask;
1669 }
1670 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1671 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1672 gen_op_rotli64_T0(sh);
1673 do_mask:
1674 mask = MASK(mb, me);
1675 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], mask);
1676 tcg_gen_andi_tl(cpu_T[1], cpu_T[1], ~mask);
1677 gen_op_or();
1678 do_store:
1679 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1680 if (unlikely(Rc(ctx->opcode) != 0))
1681 gen_set_Rc0(ctx);
1682 }
1683 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1684 #endif
1685
1686 /*** Integer shift ***/
1687 /* slw & slw. */
1688 __GEN_LOGICAL2(slw, 0x18, 0x00, PPC_INTEGER);
1689 /* sraw & sraw. */
1690 __GEN_LOGICAL2(sraw, 0x18, 0x18, PPC_INTEGER);
1691 /* srawi & srawi. */
1692 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER)
1693 {
1694 int mb, me;
1695 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1696 if (SH(ctx->opcode) != 0) {
1697 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1698 mb = 32 - SH(ctx->opcode);
1699 me = 31;
1700 #if defined(TARGET_PPC64)
1701 mb += 32;
1702 me += 32;
1703 #endif
1704 gen_op_srawi(SH(ctx->opcode), MASK(mb, me));
1705 }
1706 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1707 if (unlikely(Rc(ctx->opcode) != 0))
1708 gen_set_Rc0(ctx);
1709 }
1710 /* srw & srw. */
1711 __GEN_LOGICAL2(srw, 0x18, 0x10, PPC_INTEGER);
1712
1713 #if defined(TARGET_PPC64)
1714 /* sld & sld. */
1715 __GEN_LOGICAL2(sld, 0x1B, 0x00, PPC_64B);
1716 /* srad & srad. */
1717 __GEN_LOGICAL2(srad, 0x1A, 0x18, PPC_64B);
1718 /* sradi & sradi. */
1719 static always_inline void gen_sradi (DisasContext *ctx, int n)
1720 {
1721 uint64_t mask;
1722 int sh, mb, me;
1723
1724 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1725 sh = SH(ctx->opcode) + (n << 5);
1726 if (sh != 0) {
1727 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1728 mb = 64 - SH(ctx->opcode);
1729 me = 63;
1730 mask = MASK(mb, me);
1731 gen_op_sradi(sh, mask >> 32, mask);
1732 }
1733 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1734 if (unlikely(Rc(ctx->opcode) != 0))
1735 gen_set_Rc0(ctx);
1736 }
1737 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B)
1738 {
1739 gen_sradi(ctx, 0);
1740 }
1741 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B)
1742 {
1743 gen_sradi(ctx, 1);
1744 }
1745 /* srd & srd. */
1746 __GEN_LOGICAL2(srd, 0x1B, 0x10, PPC_64B);
1747 #endif
1748
1749 /*** Floating-Point arithmetic ***/
1750 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1751 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type) \
1752 { \
1753 if (unlikely(!ctx->fpu_enabled)) { \
1754 GEN_EXCP_NO_FP(ctx); \
1755 return; \
1756 } \
1757 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1758 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rC(ctx->opcode)]); \
1759 tcg_gen_mov_i64(cpu_FT[2], cpu_fpr[rB(ctx->opcode)]); \
1760 gen_reset_fpstatus(); \
1761 gen_op_f##op(); \
1762 if (isfloat) { \
1763 gen_op_frsp(); \
1764 } \
1765 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1766 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1767 }
1768
1769 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
1770 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
1771 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
1772
1773 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1774 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
1775 { \
1776 if (unlikely(!ctx->fpu_enabled)) { \
1777 GEN_EXCP_NO_FP(ctx); \
1778 return; \
1779 } \
1780 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1781 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]); \
1782 gen_reset_fpstatus(); \
1783 gen_op_f##op(); \
1784 if (isfloat) { \
1785 gen_op_frsp(); \
1786 } \
1787 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1788 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1789 }
1790 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
1791 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
1792 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
1793
1794 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1795 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
1796 { \
1797 if (unlikely(!ctx->fpu_enabled)) { \
1798 GEN_EXCP_NO_FP(ctx); \
1799 return; \
1800 } \
1801 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1802 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rC(ctx->opcode)]); \
1803 gen_reset_fpstatus(); \
1804 gen_op_f##op(); \
1805 if (isfloat) { \
1806 gen_op_frsp(); \
1807 } \
1808 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1809 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1810 }
1811 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
1812 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
1813 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
1814
1815 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
1816 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type) \
1817 { \
1818 if (unlikely(!ctx->fpu_enabled)) { \
1819 GEN_EXCP_NO_FP(ctx); \
1820 return; \
1821 } \
1822 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]); \
1823 gen_reset_fpstatus(); \
1824 gen_op_f##name(); \
1825 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1826 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1827 }
1828
1829 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
1830 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type) \
1831 { \
1832 if (unlikely(!ctx->fpu_enabled)) { \
1833 GEN_EXCP_NO_FP(ctx); \
1834 return; \
1835 } \
1836 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]); \
1837 gen_reset_fpstatus(); \
1838 gen_op_f##name(); \
1839 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1840 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1841 }
1842
1843 /* fadd - fadds */
1844 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
1845 /* fdiv - fdivs */
1846 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
1847 /* fmul - fmuls */
1848 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
1849
1850 /* fre */
1851 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
1852
1853 /* fres */
1854 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
1855
1856 /* frsqrte */
1857 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
1858
1859 /* frsqrtes */
1860 static always_inline void gen_op_frsqrtes (void)
1861 {
1862 gen_op_frsqrte();
1863 gen_op_frsp();
1864 }
1865 GEN_FLOAT_BS(rsqrtes, 0x3B, 0x1A, 1, PPC_FLOAT_FRSQRTES);
1866
1867 /* fsel */
1868 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
1869 /* fsub - fsubs */
1870 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
1871 /* Optional: */
1872 /* fsqrt */
1873 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
1874 {
1875 if (unlikely(!ctx->fpu_enabled)) {
1876 GEN_EXCP_NO_FP(ctx);
1877 return;
1878 }
1879 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1880 gen_reset_fpstatus();
1881 gen_op_fsqrt();
1882 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1883 gen_compute_fprf(1, Rc(ctx->opcode) != 0);
1884 }
1885
1886 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
1887 {
1888 if (unlikely(!ctx->fpu_enabled)) {
1889 GEN_EXCP_NO_FP(ctx);
1890 return;
1891 }
1892 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1893 gen_reset_fpstatus();
1894 gen_op_fsqrt();
1895 gen_op_frsp();
1896 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1897 gen_compute_fprf(1, Rc(ctx->opcode) != 0);
1898 }
1899
1900 /*** Floating-Point multiply-and-add ***/
1901 /* fmadd - fmadds */
1902 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
1903 /* fmsub - fmsubs */
1904 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
1905 /* fnmadd - fnmadds */
1906 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
1907 /* fnmsub - fnmsubs */
1908 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
1909
1910 /*** Floating-Point round & convert ***/
1911 /* fctiw */
1912 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
1913 /* fctiwz */
1914 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
1915 /* frsp */
1916 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
1917 #if defined(TARGET_PPC64)
1918 /* fcfid */
1919 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
1920 /* fctid */
1921 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
1922 /* fctidz */
1923 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
1924 #endif
1925
1926 /* frin */
1927 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
1928 /* friz */
1929 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
1930 /* frip */
1931 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
1932 /* frim */
1933 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
1934
1935 /*** Floating-Point compare ***/
1936 /* fcmpo */
1937 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT)
1938 {
1939 if (unlikely(!ctx->fpu_enabled)) {
1940 GEN_EXCP_NO_FP(ctx);
1941 return;
1942 }
1943 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
1944 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
1945 gen_reset_fpstatus();
1946 gen_op_fcmpo();
1947 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1948 gen_op_float_check_status();
1949 }
1950
1951 /* fcmpu */
1952 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT)
1953 {
1954 if (unlikely(!ctx->fpu_enabled)) {
1955 GEN_EXCP_NO_FP(ctx);
1956 return;
1957 }
1958 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
1959 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
1960 gen_reset_fpstatus();
1961 gen_op_fcmpu();
1962 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
1963 gen_op_float_check_status();
1964 }
1965
1966 /*** Floating-point move ***/
1967 /* fabs */
1968 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
1969 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
1970
1971 /* fmr - fmr. */
1972 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
1973 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT)
1974 {
1975 if (unlikely(!ctx->fpu_enabled)) {
1976 GEN_EXCP_NO_FP(ctx);
1977 return;
1978 }
1979 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1980 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1981 gen_compute_fprf(0, Rc(ctx->opcode) != 0);
1982 }
1983
1984 /* fnabs */
1985 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
1986 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
1987 /* fneg */
1988 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
1989 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
1990
1991 /*** Floating-Point status & ctrl register ***/
1992 /* mcrfs */
1993 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT)
1994 {
1995 int bfa;
1996
1997 if (unlikely(!ctx->fpu_enabled)) {
1998 GEN_EXCP_NO_FP(ctx);
1999 return;
2000 }
2001 gen_optimize_fprf();
2002 bfa = 4 * (7 - crfS(ctx->opcode));
2003 gen_op_load_fpscr_T0(bfa);
2004 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
2005 gen_op_fpscr_resetbit(~(0xF << bfa));
2006 }
2007
2008 /* mffs */
2009 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT)
2010 {
2011 if (unlikely(!ctx->fpu_enabled)) {
2012 GEN_EXCP_NO_FP(ctx);
2013 return;
2014 }
2015 gen_optimize_fprf();
2016 gen_reset_fpstatus();
2017 gen_op_load_fpscr_FT0();
2018 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
2019 gen_compute_fprf(0, Rc(ctx->opcode) != 0);
2020 }
2021
2022 /* mtfsb0 */
2023 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT)
2024 {
2025 uint8_t crb;
2026
2027 if (unlikely(!ctx->fpu_enabled)) {
2028 GEN_EXCP_NO_FP(ctx);
2029 return;
2030 }
2031 crb = 32 - (crbD(ctx->opcode) >> 2);
2032 gen_optimize_fprf();
2033 gen_reset_fpstatus();
2034 if (likely(crb != 30 && crb != 29))
2035 gen_op_fpscr_resetbit(~(1 << crb));
2036 if (unlikely(Rc(ctx->opcode) != 0)) {
2037 gen_op_load_fpcc();
2038 gen_op_set_Rc0();
2039 }
2040 }
2041
2042 /* mtfsb1 */
2043 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT)
2044 {
2045 uint8_t crb;
2046
2047 if (unlikely(!ctx->fpu_enabled)) {
2048 GEN_EXCP_NO_FP(ctx);
2049 return;
2050 }
2051 crb = 32 - (crbD(ctx->opcode) >> 2);
2052 gen_optimize_fprf();
2053 gen_reset_fpstatus();
2054 /* XXX: we pretend we can only do IEEE floating-point computations */
2055 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI))
2056 gen_op_fpscr_setbit(crb);
2057 if (unlikely(Rc(ctx->opcode) != 0)) {
2058 gen_op_load_fpcc();
2059 gen_op_set_Rc0();
2060 }
2061 /* We can raise a differed exception */
2062 gen_op_float_check_status();
2063 }
2064
2065 /* mtfsf */
2066 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x02010000, PPC_FLOAT)
2067 {
2068 if (unlikely(!ctx->fpu_enabled)) {
2069 GEN_EXCP_NO_FP(ctx);
2070 return;
2071 }
2072 gen_optimize_fprf();
2073 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
2074 gen_reset_fpstatus();
2075 gen_op_store_fpscr(FM(ctx->opcode));
2076 if (unlikely(Rc(ctx->opcode) != 0)) {
2077 gen_op_load_fpcc();
2078 gen_op_set_Rc0();
2079 }
2080 /* We can raise a differed exception */
2081 gen_op_float_check_status();
2082 }
2083
2084 /* mtfsfi */
2085 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
2086 {
2087 int bf, sh;
2088
2089 if (unlikely(!ctx->fpu_enabled)) {
2090 GEN_EXCP_NO_FP(ctx);
2091 return;
2092 }
2093 bf = crbD(ctx->opcode) >> 2;
2094 sh = 7 - bf;
2095 gen_optimize_fprf();
2096 tcg_gen_movi_i64(cpu_FT[0], FPIMM(ctx->opcode) << (4 * sh));
2097 gen_reset_fpstatus();
2098 gen_op_store_fpscr(1 << sh);
2099 if (unlikely(Rc(ctx->opcode) != 0)) {
2100 gen_op_load_fpcc();
2101 gen_op_set_Rc0();
2102 }
2103 /* We can raise a differed exception */
2104 gen_op_float_check_status();
2105 }
2106
2107 /*** Addressing modes ***/
2108 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2109 static always_inline void gen_addr_imm_index (TCGv EA,
2110 DisasContext *ctx,
2111 target_long maskl)
2112 {
2113 target_long simm = SIMM(ctx->opcode);
2114
2115 simm &= ~maskl;
2116 if (rA(ctx->opcode) == 0)
2117 tcg_gen_movi_tl(EA, simm);
2118 else if (likely(simm != 0))
2119 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2120 else
2121 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2122 }
2123
2124 static always_inline void gen_addr_reg_index (TCGv EA,
2125 DisasContext *ctx)
2126 {
2127 if (rA(ctx->opcode) == 0)
2128 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2129 else
2130 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2131 }
2132
2133 static always_inline void gen_addr_register (TCGv EA,
2134 DisasContext *ctx)
2135 {
2136 if (rA(ctx->opcode) == 0)
2137 tcg_gen_movi_tl(EA, 0);
2138 else
2139 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2140 }
2141
2142 #if defined(TARGET_PPC64)
2143 #define _GEN_MEM_FUNCS(name, mode) \
2144 &gen_op_##name##_##mode, \
2145 &gen_op_##name##_le_##mode, \
2146 &gen_op_##name##_64_##mode, \
2147 &gen_op_##name##_le_64_##mode
2148 #else
2149 #define _GEN_MEM_FUNCS(name, mode) \
2150 &gen_op_##name##_##mode, \
2151 &gen_op_##name##_le_##mode
2152 #endif
2153 #if defined(CONFIG_USER_ONLY)
2154 #if defined(TARGET_PPC64)
2155 #define NB_MEM_FUNCS 4
2156 #else
2157 #define NB_MEM_FUNCS 2
2158 #endif
2159 #define GEN_MEM_FUNCS(name) \
2160 _GEN_MEM_FUNCS(name, raw)
2161 #else
2162 #if defined(TARGET_PPC64)
2163 #define NB_MEM_FUNCS 12
2164 #else
2165 #define NB_MEM_FUNCS 6
2166 #endif
2167 #define GEN_MEM_FUNCS(name) \
2168 _GEN_MEM_FUNCS(name, user), \
2169 _GEN_MEM_FUNCS(name, kernel), \
2170 _GEN_MEM_FUNCS(name, hypv)
2171 #endif
2172
2173 /*** Integer load ***/
2174 #define op_ldst(name) (*gen_op_##name[ctx->mem_idx])()
2175 #define OP_LD_TABLE(width) \
2176 static GenOpFunc *gen_op_l##width[NB_MEM_FUNCS] = { \
2177 GEN_MEM_FUNCS(l##width), \
2178 };
2179 #define OP_ST_TABLE(width) \
2180 static GenOpFunc *gen_op_st##width[NB_MEM_FUNCS] = { \
2181 GEN_MEM_FUNCS(st##width), \
2182 };
2183
2184
2185 #if defined(TARGET_PPC64)
2186 #define GEN_QEMU_LD_PPC64(width) \
2187 static always_inline void gen_qemu_ld##width##_ppc64(TCGv t0, TCGv t1, int flags)\
2188 { \
2189 if (likely(flags & 2)) \
2190 tcg_gen_qemu_ld##width(t0, t1, flags >> 2); \
2191 else { \
2192 TCGv addr = tcg_temp_new(TCG_TYPE_TL); \
2193 tcg_gen_ext32u_tl(addr, t1); \
2194 tcg_gen_qemu_ld##width(t0, addr, flags >> 2); \
2195 tcg_temp_free(addr); \
2196 } \
2197 }
2198 GEN_QEMU_LD_PPC64(8u)
2199 GEN_QEMU_LD_PPC64(8s)
2200 GEN_QEMU_LD_PPC64(16u)
2201 GEN_QEMU_LD_PPC64(16s)
2202 GEN_QEMU_LD_PPC64(32u)
2203 GEN_QEMU_LD_PPC64(32s)
2204 GEN_QEMU_LD_PPC64(64)
2205
2206 #define GEN_QEMU_ST_PPC64(width) \
2207 static always_inline void gen_qemu_st##width##_ppc64(TCGv t0, TCGv t1, int flags)\
2208 { \
2209 if (likely(flags & 2)) \
2210 tcg_gen_qemu_st##width(t0, t1, flags >> 2); \
2211 else { \
2212 TCGv addr = tcg_temp_new(TCG_TYPE_TL); \
2213 tcg_gen_ext32u_tl(addr, t1); \
2214 tcg_gen_qemu_st##width(t0, addr, flags >> 2); \
2215 tcg_temp_free(addr); \
2216 } \
2217 }
2218 GEN_QEMU_ST_PPC64(8)
2219 GEN_QEMU_ST_PPC64(16)
2220 GEN_QEMU_ST_PPC64(32)
2221 GEN_QEMU_ST_PPC64(64)
2222
2223 static always_inline void gen_qemu_ld8u(TCGv t0, TCGv t1, int flags)
2224 {
2225 gen_qemu_ld8u_ppc64(t0, t1, flags);
2226 }
2227
2228 static always_inline void gen_qemu_ld8s(TCGv t0, TCGv t1, int flags)
2229 {
2230 gen_qemu_ld8s_ppc64(t0, t1, flags);
2231 }
2232
2233 static always_inline void gen_qemu_ld16u(TCGv t0, TCGv t1, int flags)
2234 {
2235 if (unlikely(flags & 1)) {
2236 TCGv t0_32;
2237 gen_qemu_ld16u_ppc64(t0, t1, flags);
2238 t0_32 = tcg_temp_new(TCG_TYPE_I32);
2239 tcg_gen_trunc_tl_i32(t0_32, t0);
2240 tcg_gen_bswap16_i32(t0_32, t0_32);
2241 tcg_gen_extu_i32_tl(t0, t0_32);
2242 tcg_temp_free(t0_32);
2243 } else
2244 gen_qemu_ld16u_ppc64(t0, t1, flags);
2245 }
2246
2247 static always_inline void gen_qemu_ld16s(TCGv t0, TCGv t1, int flags)
2248 {
2249 if (unlikely(flags & 1)) {
2250 TCGv t0_32;
2251 gen_qemu_ld16u_ppc64(t0, t1, flags);
2252 t0_32 = tcg_temp_new(TCG_TYPE_I32);
2253 tcg_gen_trunc_tl_i32(t0_32, t0);
2254 tcg_gen_bswap16_i32(t0_32, t0_32);
2255 tcg_gen_extu_i32_tl(t0, t0_32);
2256 tcg_gen_ext16s_tl(t0, t0);
2257 tcg_temp_free(t0_32);
2258 } else
2259 gen_qemu_ld16s_ppc64(t0, t1, flags);
2260 }
2261
2262 static always_inline void gen_qemu_ld32u(TCGv t0, TCGv t1, int flags)
2263 {
2264 if (unlikely(flags & 1)) {
2265 TCGv t0_32;
2266 gen_qemu_ld32u_ppc64(t0, t1, flags);
2267 t0_32 = tcg_temp_new(TCG_TYPE_I32);
2268 tcg_gen_trunc_tl_i32(t0_32, t0);
2269 tcg_gen_bswap_i32(t0_32, t0_32);
2270 tcg_gen_extu_i32_tl(t0, t0_32);
2271 tcg_temp_free(t0_32);
2272 } else
2273 gen_qemu_ld32u_ppc64(t0, t1, flags);
2274 }
2275
2276 static always_inline void gen_qemu_ld32s(TCGv t0, TCGv t1, int flags)
2277 {
2278 if (unlikely(flags & 1)) {
2279 TCGv t0_32;
2280 gen_qemu_ld32u_ppc64(t0, t1, flags);
2281 t0_32 = tcg_temp_new(TCG_TYPE_I32);
2282 tcg_gen_trunc_tl_i32(t0_32, t0);
2283 tcg_gen_bswap_i32(t0_32, t0_32);
2284 tcg_gen_ext_i32_tl(t0, t0_32);
2285 tcg_temp_free(t0_32);
2286 } else
2287 gen_qemu_ld32s_ppc64(t0, t1, flags);
2288 }
2289
2290 static always_inline void gen_qemu_ld64(TCGv t0, TCGv t1, int flags)
2291 {
2292 gen_qemu_ld64_ppc64(t0, t1, flags);
2293 if (unlikely(flags & 1))
2294 tcg_gen_bswap_i64(t0, t0);
2295 }
2296
2297 static always_inline void gen_qemu_st8(TCGv t0, TCGv t1, int flags)
2298 {
2299 gen_qemu_st8_ppc64(t0, t1, flags);
2300 }
2301
2302 static always_inline void gen_qemu_st16(TCGv t0, TCGv t1, int flags)
2303 {
2304 if (unlikely(flags & 1)) {
2305 TCGv temp1, temp2;
2306 temp1 = tcg_temp_new(TCG_TYPE_I32);
2307 tcg_gen_trunc_tl_i32(temp1, t0);
2308 tcg_gen_ext16u_i32(temp1, temp1);
2309 tcg_gen_bswap16_i32(temp1, temp1);
2310 temp2 = tcg_temp_new(TCG_TYPE_I64);
2311 tcg_gen_extu_i32_tl(temp2, temp1);
2312 tcg_temp_free(temp1);
2313 gen_qemu_st16_ppc64(temp2, t1, flags);
2314 tcg_temp_free(temp2);
2315 } else
2316 gen_qemu_st16_ppc64(t0, t1, flags);
2317 }
2318
2319 static always_inline void gen_qemu_st32(TCGv t0, TCGv t1, int flags)
2320 {
2321 if (unlikely(flags & 1)) {
2322 TCGv temp1, temp2;
2323 temp1 = tcg_temp_new(TCG_TYPE_I32);
2324 tcg_gen_trunc_tl_i32(temp1, t0);
2325 tcg_gen_bswap_i32(temp1, temp1);
2326 temp2 = tcg_temp_new(TCG_TYPE_I64);
2327 tcg_gen_extu_i32_tl(temp2, temp1);
2328 tcg_temp_free(temp1);
2329 gen_qemu_st32_ppc64(temp2, t1, flags);
2330 tcg_temp_free(temp2);
2331 } else
2332 gen_qemu_st32_ppc64(t0, t1, flags);
2333 }
2334
2335 static always_inline void gen_qemu_st64(TCGv t0, TCGv t1, int flags)
2336 {
2337 if (unlikely(flags & 1)) {
2338 TCGv temp = tcg_temp_new(TCG_TYPE_I64);
2339 tcg_gen_bswap_i64(temp, t0);
2340 gen_qemu_st64_ppc64(temp, t1, flags);
2341 tcg_temp_free(temp);
2342 } else
2343 gen_qemu_st64_ppc64(t0, t1, flags);
2344 }
2345
2346
2347 #else /* defined(TARGET_PPC64) */
2348 #define GEN_QEMU_LD_PPC32(width) \
2349 static always_inline void gen_qemu_ld##width##_ppc32(TCGv t0, TCGv t1, int flags)\
2350 { \
2351 tcg_gen_qemu_ld##width(t0, t1, flags >> 1); \
2352 }
2353 GEN_QEMU_LD_PPC32(8u)
2354 GEN_QEMU_LD_PPC32(8s)
2355 GEN_QEMU_LD_PPC32(16u)
2356 GEN_QEMU_LD_PPC32(16s)
2357 GEN_QEMU_LD_PPC32(32u)
2358 GEN_QEMU_LD_PPC32(32s)
2359 GEN_QEMU_LD_PPC32(64)
2360
2361 #define GEN_QEMU_ST_PPC32(width) \
2362 static always_inline void gen_qemu_st##width##_ppc32(TCGv t0, TCGv t1, int flags)\
2363 { \
2364 tcg_gen_qemu_st##width(t0, t1, flags >> 1); \
2365 }
2366 GEN_QEMU_ST_PPC32(8)
2367 GEN_QEMU_ST_PPC32(16)
2368 GEN_QEMU_ST_PPC32(32)
2369 GEN_QEMU_ST_PPC32(64)
2370
2371 static always_inline void gen_qemu_ld8u(TCGv t0, TCGv t1, int flags)
2372 {
2373 gen_qemu_ld8u_ppc32(t0, t1, flags >> 1);
2374 }
2375
2376 static always_inline void gen_qemu_ld8s(TCGv t0, TCGv t1, int flags)
2377 {
2378 gen_qemu_ld8s_ppc32(t0, t1, flags >> 1);
2379 }
2380
2381 static always_inline void gen_qemu_ld16u(TCGv t0, TCGv t1, int flags)
2382 {
2383 gen_qemu_ld16u_ppc32(t0, t1, flags >> 1);
2384 if (unlikely(flags & 1))
2385 tcg_gen_bswap16_i32(t0, t0);
2386 }
2387
2388 static always_inline void gen_qemu_ld16s(TCGv t0, TCGv t1, int flags)
2389 {
2390 if (unlikely(flags & 1)) {
2391 gen_qemu_ld16u_ppc32(t0, t1, flags);
2392 tcg_gen_bswap16_i32(t0, t0);
2393 tcg_gen_ext16s_i32(t0, t0);
2394 } else
2395 gen_qemu_ld16s_ppc32(t0, t1, flags);
2396 }
2397
2398 static always_inline void gen_qemu_ld32u(TCGv t0, TCGv t1, int flags)
2399 {
2400 gen_qemu_ld32u_ppc32(t0, t1, flags);
2401 if (unlikely(flags & 1))
2402 tcg_gen_bswap_i32(t0, t0);
2403 }
2404
2405 static always_inline void gen_qemu_ld64(TCGv t0, TCGv t1, int flags)
2406 {
2407 gen_qemu_ld64_ppc32(t0, t1, flags);
2408 if (unlikely(flags & 1))
2409 tcg_gen_bswap_i64(t0, t0);
2410 }
2411
2412 static always_inline void gen_qemu_st8(TCGv t0, TCGv t1, int flags)
2413 {
2414 gen_qemu_st8_ppc32(t0, t1, flags >> 1);
2415 }
2416
2417 static always_inline void gen_qemu_st16(TCGv t0, TCGv t1, int flags)
2418 {
2419 if (unlikely(flags & 1)) {
2420 TCGv temp = tcg_temp_new(TCG_TYPE_I32);
2421 tcg_gen_ext16u_i32(temp, t0);
2422 tcg_gen_bswap16_i32(temp, temp);
2423 gen_qemu_st16_ppc32(temp, t1, flags >> 1);
2424 tcg_temp_free(temp);
2425 } else
2426 gen_qemu_st16_ppc32(t0, t1, flags >> 1);
2427 }
2428
2429 static always_inline void gen_qemu_st32(TCGv t0, TCGv t1, int flags)
2430 {
2431 if (unlikely(flags & 1)) {
2432 TCGv temp = tcg_temp_new(TCG_TYPE_I32);
2433 tcg_gen_bswap_i32(temp, t0);
2434 gen_qemu_st32_ppc32(temp, t1, flags >> 1);
2435 tcg_temp_free(temp);
2436 } else
2437 gen_qemu_st32_ppc32(t0, t1, flags >> 1);
2438 }
2439
2440 static always_inline void gen_qemu_st64(TCGv t0, TCGv t1, int flags)
2441 {
2442 if (unlikely(flags & 1)) {
2443 TCGv temp = tcg_temp_new(TCG_TYPE_I64);
2444 tcg_gen_bswap_i64(temp, t0);
2445 gen_qemu_st64_ppc32(temp, t1, flags >> 1);
2446 tcg_temp_free(temp);
2447 } else
2448 gen_qemu_st64_ppc32(t0, t1, flags >> 1);
2449 }
2450
2451 #endif
2452
2453 #define GEN_LD(width, opc, type) \
2454 GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2455 { \
2456 TCGv EA = tcg_temp_new(TCG_TYPE_TL); \
2457 gen_addr_imm_index(EA, ctx, 0); \
2458 gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2459 tcg_temp_free(EA); \
2460 }
2461
2462 #define GEN_LDU(width, opc, type) \
2463 GEN_HANDLER(l##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2464 { \
2465 TCGv EA; \
2466 if (unlikely(rA(ctx->opcode) == 0 || \
2467 rA(ctx->opcode) == rD(ctx->opcode))) { \
2468 GEN_EXCP_INVAL(ctx); \
2469 return; \
2470 } \
2471 EA = tcg_temp_new(TCG_TYPE_TL); \
2472 if (type == PPC_64B) \
2473 gen_addr_imm_index(EA, ctx, 0x03); \
2474 else \
2475 gen_addr_imm_index(EA, ctx, 0); \
2476 gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2477 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2478 tcg_temp_free(EA); \
2479 }
2480
2481 #define GEN_LDUX(width, opc2, opc3, type) \
2482 GEN_HANDLER(l##width##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2483 { \
2484 TCGv EA; \
2485 if (unlikely(rA(ctx->opcode) == 0 || \
2486 rA(ctx->opcode) == rD(ctx->opcode))) { \
2487 GEN_EXCP_INVAL(ctx); \
2488 return; \
2489 } \
2490 EA = tcg_temp_new(TCG_TYPE_TL); \
2491 gen_addr_reg_index(EA, ctx); \
2492 gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2493 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2494 tcg_temp_free(EA); \
2495 }
2496
2497 #define GEN_LDX(width, opc2, opc3, type) \
2498 GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2499 { \
2500 TCGv EA = tcg_temp_new(TCG_TYPE_TL); \
2501 gen_addr_reg_index(EA, ctx); \
2502 gen_qemu_ld##width(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx); \
2503 tcg_temp_free(EA); \
2504 }
2505
2506 #define GEN_LDS(width, op, type) \
2507 GEN_LD(width, op | 0x20, type); \
2508 GEN_LDU(width, op | 0x21, type); \
2509 GEN_LDUX(width, 0x17, op | 0x01, type); \
2510 GEN_LDX(width, 0x17, op | 0x00, type)
2511
2512 /* lbz lbzu lbzux lbzx */
2513 GEN_LDS(8u, 0x02, PPC_INTEGER);
2514 /* lha lhau lhaux lhax */
2515 GEN_LDS(16s, 0x0A, PPC_INTEGER);
2516 /* lhz lhzu lhzux lhzx */
2517 GEN_LDS(16u, 0x08, PPC_INTEGER);
2518 /* lwz lwzu lwzux lwzx */
2519 GEN_LDS(32u, 0x00, PPC_INTEGER);
2520 #if defined(TARGET_PPC64)
2521 /* lwaux */
2522 GEN_LDUX(32s, 0x15, 0x0B, PPC_64B);
2523 /* lwax */
2524 GEN_LDX(32s, 0x15, 0x0A, PPC_64B);
2525 /* ldux */
2526 GEN_LDUX(64, 0x15, 0x01, PPC_64B);
2527 /* ldx */
2528 GEN_LDX(64, 0x15, 0x00, PPC_64B);
2529 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B)
2530 {
2531 TCGv EA;
2532 if (Rc(ctx->opcode)) {
2533 if (unlikely(rA(ctx->opcode) == 0 ||
2534 rA(ctx->opcode) == rD(ctx->opcode))) {
2535 GEN_EXCP_INVAL(ctx);
2536 return;
2537 }
2538 }
2539 EA = tcg_temp_new(TCG_TYPE_TL);
2540 gen_addr_imm_index(EA, ctx, 0x03);
2541 if (ctx->opcode & 0x02) {
2542 /* lwa (lwau is undefined) */
2543 gen_qemu_ld32s(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);
2544 } else {
2545 /* ld - ldu */
2546 gen_qemu_ld64(cpu_gpr[rD(ctx->opcode)], EA, ctx->mem_idx);
2547 }
2548 if (Rc(ctx->opcode))
2549 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2550 tcg_temp_free(EA);
2551 }
2552 /* lq */
2553 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX)
2554 {
2555 #if defined(CONFIG_USER_ONLY)
2556 GEN_EXCP_PRIVOPC(ctx);
2557 #else
2558 int ra, rd;
2559 TCGv EA;
2560
2561 /* Restore CPU state */
2562 if (unlikely(ctx->supervisor == 0)) {
2563 GEN_EXCP_PRIVOPC(ctx);
2564 return;
2565 }
2566 ra = rA(ctx->opcode);
2567 rd = rD(ctx->opcode);
2568 if (unlikely((rd & 1) || rd == ra)) {
2569 GEN_EXCP_INVAL(ctx);
2570 return;
2571 }
2572 if (unlikely(ctx->mem_idx & 1)) {
2573 /* Little-endian mode is not handled */
2574 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2575 return;
2576 }
2577 EA = tcg_temp_new(TCG_TYPE_TL);
2578 gen_addr_imm_index(EA, ctx, 0x0F);
2579 gen_qemu_ld64(cpu_gpr[rd], EA, ctx->mem_idx);
2580 tcg_gen_addi_tl(EA, EA, 8);
2581 gen_qemu_ld64(cpu_gpr[rd+1], EA, ctx->mem_idx);
2582 tcg_temp_free(EA);
2583 #endif
2584 }
2585 #endif
2586
2587 /*** Integer store ***/
2588 #define GEN_ST(width, opc, type) \
2589 GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2590 { \
2591 TCGv EA = tcg_temp_new(TCG_TYPE_TL); \
2592 gen_addr_imm_index(EA, ctx, 0); \
2593 gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2594 tcg_temp_free(EA); \
2595 }
2596
2597 #define GEN_STU(width, opc, type) \
2598 GEN_HANDLER(st##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2599 { \
2600 TCGv EA; \
2601 if (unlikely(rA(ctx->opcode) == 0)) { \
2602 GEN_EXCP_INVAL(ctx); \
2603 return; \
2604 } \
2605 EA = tcg_temp_new(TCG_TYPE_TL); \
2606 if (type == PPC_64B) \
2607 gen_addr_imm_index(EA, ctx, 0x03); \
2608 else \
2609 gen_addr_imm_index(EA, ctx, 0); \
2610 gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2611 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2612 tcg_temp_free(EA); \
2613 }
2614
2615 #define GEN_STUX(width, opc2, opc3, type) \
2616 GEN_HANDLER(st##width##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2617 { \
2618 TCGv EA; \
2619 if (unlikely(rA(ctx->opcode) == 0)) { \
2620 GEN_EXCP_INVAL(ctx); \
2621 return; \
2622 } \
2623 EA = tcg_temp_new(TCG_TYPE_TL); \
2624 gen_addr_reg_index(EA, ctx); \
2625 gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2626 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA); \
2627 tcg_temp_free(EA); \
2628 }
2629
2630 #define GEN_STX(width, opc2, opc3, type) \
2631 GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2632 { \
2633 TCGv EA = tcg_temp_new(TCG_TYPE_TL); \
2634 gen_addr_reg_index(EA, ctx); \
2635 gen_qemu_st##width(cpu_gpr[rS(ctx->opcode)], EA, ctx->mem_idx); \
2636 tcg_temp_free(EA); \
2637 }
2638
2639 #define GEN_STS(width, op, type) \
2640 GEN_ST(width, op | 0x20, type); \
2641 GEN_STU(width, op | 0x21, type); \
2642 GEN_STUX(width, 0x17, op | 0x01, type); \
2643 GEN_STX(width, 0x17, op | 0x00, type)
2644
2645 /* stb stbu stbux stbx */
2646 GEN_STS(8, 0x06, PPC_INTEGER);
2647 /* sth sthu sthux sthx */
2648 GEN_STS(16, 0x0C, PPC_INTEGER);
2649 /* stw stwu stwux stwx */
2650 GEN_STS(32, 0x04, PPC_INTEGER);
2651 #if defined(TARGET_PPC64)
2652 GEN_STUX(64, 0x15, 0x05, PPC_64B);
2653 GEN_STX(64, 0x15, 0x04, PPC_64B);
2654 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B)
2655 {
2656 int rs;
2657 TCGv EA;
2658
2659 rs = rS(ctx->opcode);
2660 if ((ctx->opcode & 0x3) == 0x2) {
2661 #if defined(CONFIG_USER_ONLY)
2662 GEN_EXCP_PRIVOPC(ctx);
2663 #else
2664 /* stq */
2665 if (unlikely(ctx->supervisor == 0)) {
2666 GEN_EXCP_PRIVOPC(ctx);
2667 return;
2668 }
2669 if (unlikely(rs & 1)) {
2670 GEN_EXCP_INVAL(ctx);
2671 return;
2672 }
2673 if (unlikely(ctx->mem_idx & 1)) {
2674 /* Little-endian mode is not handled */
2675 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2676 return;
2677 }
2678 EA = tcg_temp_new(TCG_TYPE_TL);
2679 gen_addr_imm_index(EA, ctx, 0x03);
2680 gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
2681 tcg_gen_addi_tl(EA, EA, 8);
2682 gen_qemu_st64(cpu_gpr[rs+1], EA, ctx->mem_idx);
2683 tcg_temp_free(EA);
2684 #endif
2685 } else {
2686 /* std / stdu */
2687 if (Rc(ctx->opcode)) {
2688 if (unlikely(rA(ctx->opcode) == 0)) {
2689 GEN_EXCP_INVAL(ctx);
2690 return;
2691 }
2692 }
2693 EA = tcg_temp_new(TCG_TYPE_TL);
2694 gen_addr_imm_index(EA, ctx, 0x03);
2695 gen_qemu_st64(cpu_gpr[rs], EA, ctx->mem_idx);
2696 if (Rc(ctx->opcode))
2697 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], EA);
2698 tcg_temp_free(EA);
2699 }
2700 }
2701 #endif
2702 /*** Integer load and store with byte reverse ***/
2703 /* lhbrx */
2704 void always_inline gen_qemu_ld16ur(TCGv t0, TCGv t1, int flags)
2705 {
2706 TCGv temp = tcg_temp_new(TCG_TYPE_I32);
2707 gen_qemu_ld16u(temp, t1, flags);
2708 tcg_gen_bswap16_i32(temp, temp);
2709 tcg_gen_extu_i32_tl(t0, temp);
2710 tcg_temp_free(temp);
2711 }
2712 GEN_LDX(16ur, 0x16, 0x18, PPC_INTEGER);
2713
2714 /* lwbrx */
2715 void always_inline gen_qemu_ld32ur(TCGv t0, TCGv t1, int flags)
2716 {
2717 TCGv temp = tcg_temp_new(TCG_TYPE_I32);
2718 gen_qemu_ld32u(temp, t1, flags);
2719 tcg_gen_bswap_i32(temp, temp);
2720 tcg_gen_extu_i32_tl(t0, temp);
2721 tcg_temp_free(temp);
2722 }
2723 GEN_LDX(32ur, 0x16, 0x10, PPC_INTEGER);
2724
2725 /* sthbrx */
2726 void always_inline gen_qemu_st16r(TCGv t0, TCGv t1, int flags)
2727 {
2728 TCGv temp = tcg_temp_new(TCG_TYPE_I32);
2729 tcg_gen_trunc_tl_i32(temp, t0);
2730 tcg_gen_ext16u_i32(temp, temp);
2731 tcg_gen_bswap16_i32(temp, temp);
2732 gen_qemu_st16(temp, t1, flags);
2733 tcg_temp_free(temp);
2734 }
2735 GEN_STX(16r, 0x16, 0x1C, PPC_INTEGER);
2736
2737 /* stwbrx */
2738 void always_inline gen_qemu_st32r(TCGv t0, TCGv t1, int flags)
2739 {
2740 TCGv temp = tcg_temp_new(TCG_TYPE_I32);
2741 tcg_gen_trunc_tl_i32(temp, t0);
2742 tcg_gen_bswap_i32(temp, temp);
2743 gen_qemu_st32(temp, t1, flags);
2744 tcg_temp_free(temp);
2745 }
2746 GEN_STX(32r, 0x16, 0x14, PPC_INTEGER);
2747
2748 /*** Integer load and store multiple ***/
2749 #define op_ldstm(name, reg) (*gen_op_##name[ctx->mem_idx])(reg)
2750 static GenOpFunc1 *gen_op_lmw[NB_MEM_FUNCS] = {
2751 GEN_MEM_FUNCS(lmw),
2752 };
2753 static GenOpFunc1 *gen_op_stmw[NB_MEM_FUNCS] = {
2754 GEN_MEM_FUNCS(stmw),
2755 };
2756
2757 /* lmw */
2758 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
2759 {
2760 /* NIP cannot be restored if the memory exception comes from an helper */
2761 gen_update_nip(ctx, ctx->nip - 4);
2762 gen_addr_imm_index(cpu_T[0], ctx, 0);
2763 op_ldstm(lmw, rD(ctx->opcode));
2764 }
2765
2766 /* stmw */
2767 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
2768 {
2769 /* NIP cannot be restored if the memory exception comes from an helper */
2770 gen_update_nip(ctx, ctx->nip - 4);
2771 gen_addr_imm_index(cpu_T[0], ctx, 0);
2772 op_ldstm(stmw, rS(ctx->opcode));
2773 }
2774
2775 /*** Integer load and store strings ***/
2776 #define op_ldsts(name, start) (*gen_op_##name[ctx->mem_idx])(start)
2777 #define op_ldstsx(name, rd, ra, rb) (*gen_op_##name[ctx->mem_idx])(rd, ra, rb)
2778 /* string load & stores are by definition endian-safe */
2779 #define gen_op_lswi_le_raw gen_op_lswi_raw
2780 #define gen_op_lswi_le_user gen_op_lswi_user
2781 #define gen_op_lswi_le_kernel gen_op_lswi_kernel
2782 #define gen_op_lswi_le_hypv gen_op_lswi_hypv
2783 #define gen_op_lswi_le_64_raw gen_op_lswi_raw
2784 #define gen_op_lswi_le_64_user gen_op_lswi_user
2785 #define gen_op_lswi_le_64_kernel gen_op_lswi_kernel
2786 #define gen_op_lswi_le_64_hypv gen_op_lswi_hypv
2787 static GenOpFunc1 *gen_op_lswi[NB_MEM_FUNCS] = {
2788 GEN_MEM_FUNCS(lswi),
2789 };
2790 #define gen_op_lswx_le_raw gen_op_lswx_raw
2791 #define gen_op_lswx_le_user gen_op_lswx_user
2792 #define gen_op_lswx_le_kernel gen_op_lswx_kernel
2793 #define gen_op_lswx_le_hypv gen_op_lswx_hypv
2794 #define gen_op_lswx_le_64_raw gen_op_lswx_raw
2795 #define gen_op_lswx_le_64_user gen_op_lswx_user
2796 #define gen_op_lswx_le_64_kernel gen_op_lswx_kernel
2797 #define gen_op_lswx_le_64_hypv gen_op_lswx_hypv
2798 static GenOpFunc3 *gen_op_lswx[NB_MEM_FUNCS] = {
2799 GEN_MEM_FUNCS(lswx),
2800 };
2801 #define gen_op_stsw_le_raw gen_op_stsw_raw
2802 #define gen_op_stsw_le_user gen_op_stsw_user
2803 #define gen_op_stsw_le_kernel gen_op_stsw_kernel
2804 #define gen_op_stsw_le_hypv gen_op_stsw_hypv
2805 #define gen_op_stsw_le_64_raw gen_op_stsw_raw
2806 #define gen_op_stsw_le_64_user gen_op_stsw_user
2807 #define gen_op_stsw_le_64_kernel gen_op_stsw_kernel
2808 #define gen_op_stsw_le_64_hypv gen_op_stsw_hypv
2809 static GenOpFunc1 *gen_op_stsw[NB_MEM_FUNCS] = {
2810 GEN_MEM_FUNCS(stsw),
2811 };
2812
2813 /* lswi */
2814 /* PowerPC32 specification says we must generate an exception if
2815 * rA is in the range of registers to be loaded.
2816 * In an other hand, IBM says this is valid, but rA won't be loaded.
2817 * For now, I'll follow the spec...
2818 */
2819 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING)
2820 {
2821 int nb = NB(ctx->opcode);
2822 int start = rD(ctx->opcode);
2823 int ra = rA(ctx->opcode);
2824 int nr;
2825
2826 if (nb == 0)
2827 nb = 32;
2828 nr = nb / 4;
2829 if (unlikely(((start + nr) > 32 &&
2830 start <= ra && (start + nr - 32) > ra) ||
2831 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2832 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
2833 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_LSWX);
2834 return;
2835 }
2836 /* NIP cannot be restored if the memory exception comes from an helper */
2837 gen_update_nip(ctx, ctx->nip - 4);
2838 gen_addr_register(cpu_T[0], ctx);
2839 tcg_gen_movi_tl(cpu_T[1], nb);
2840 op_ldsts(lswi, start);
2841 }
2842
2843 /* lswx */
2844 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING)
2845 {
2846 int ra = rA(ctx->opcode);
2847 int rb = rB(ctx->opcode);
2848
2849 /* NIP cannot be restored if the memory exception comes from an helper */
2850 gen_update_nip(ctx, ctx->nip - 4);
2851 gen_addr_reg_index(cpu_T[0], ctx);
2852 if (ra == 0) {
2853 ra = rb;
2854 }
2855 gen_op_load_xer_bc();
2856 op_ldstsx(lswx, rD(ctx->opcode), ra, rb);
2857 }
2858
2859 /* stswi */
2860 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING)
2861 {
2862 int nb = NB(ctx->opcode);
2863
2864 /* NIP cannot be restored if the memory exception comes from an helper */
2865 gen_update_nip(ctx, ctx->nip - 4);
2866 gen_addr_register(cpu_T[0], ctx);
2867 if (nb == 0)
2868 nb = 32;
2869 tcg_gen_movi_tl(cpu_T[1], nb);
2870 op_ldsts(stsw, rS(ctx->opcode));
2871 }
2872
2873 /* stswx */
2874 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING)
2875 {
2876 /* NIP cannot be restored if the memory exception comes from an helper */
2877 gen_update_nip(ctx, ctx->nip - 4);
2878 gen_addr_reg_index(cpu_T[0], ctx);
2879 gen_op_load_xer_bc();
2880 op_ldsts(stsw, rS(ctx->opcode));
2881 }
2882
2883 /*** Memory synchronisation ***/
2884 /* eieio */
2885 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO)
2886 {
2887 }
2888
2889 /* isync */
2890 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM)
2891 {
2892 GEN_STOP(ctx);
2893 }
2894
2895 #define op_lwarx() (*gen_op_lwarx[ctx->mem_idx])()
2896 #define op_stwcx() (*gen_op_stwcx[ctx->mem_idx])()
2897 static GenOpFunc *gen_op_lwarx[NB_MEM_FUNCS] = {
2898 GEN_MEM_FUNCS(lwarx),
2899 };
2900 static GenOpFunc *gen_op_stwcx[NB_MEM_FUNCS] = {
2901 GEN_MEM_FUNCS(stwcx),
2902 };
2903
2904 /* lwarx */
2905 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES)
2906 {
2907 /* NIP cannot be restored if the memory exception comes from an helper */
2908 gen_update_nip(ctx, ctx->nip - 4);
2909 gen_addr_reg_index(cpu_T[0], ctx);
2910 op_lwarx();
2911 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2912 }
2913
2914 /* stwcx. */
2915 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES)
2916 {
2917 /* NIP cannot be restored if the memory exception comes from an helper */
2918 gen_update_nip(ctx, ctx->nip - 4);
2919 gen_addr_reg_index(cpu_T[0], ctx);
2920 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
2921 op_stwcx();
2922 }
2923
2924 #if defined(TARGET_PPC64)
2925 #define op_ldarx() (*gen_op_ldarx[ctx->mem_idx])()
2926 #define op_stdcx() (*gen_op_stdcx[ctx->mem_idx])()
2927 static GenOpFunc *gen_op_ldarx[NB_MEM_FUNCS] = {
2928 GEN_MEM_FUNCS(ldarx),
2929 };
2930 static GenOpFunc *gen_op_stdcx[NB_MEM_FUNCS] = {
2931 GEN_MEM_FUNCS(stdcx),
2932 };
2933
2934 /* ldarx */
2935 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B)
2936 {
2937 /* NIP cannot be restored if the memory exception comes from an helper */
2938 gen_update_nip(ctx, ctx->nip - 4);
2939 gen_addr_reg_index(cpu_T[0], ctx);
2940 op_ldarx();
2941 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2942 }
2943
2944 /* stdcx. */
2945 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B)
2946 {
2947 /* NIP cannot be restored if the memory exception comes from an helper */
2948 gen_update_nip(ctx, ctx->nip - 4);
2949 gen_addr_reg_index(cpu_T[0], ctx);
2950 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
2951 op_stdcx();
2952 }
2953 #endif /* defined(TARGET_PPC64) */
2954
2955 /* sync */
2956 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC)
2957 {
2958 }
2959
2960 /* wait */
2961 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT)
2962 {
2963 /* Stop translation, as the CPU is supposed to sleep from now */
2964 gen_op_wait();
2965 GEN_EXCP(ctx, EXCP_HLT, 1);
2966 }
2967
2968 /*** Floating-point load ***/
2969 #define GEN_LDF(width, opc, type) \
2970 GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2971 { \
2972 if (unlikely(!ctx->fpu_enabled)) { \
2973 GEN_EXCP_NO_FP(ctx); \
2974 return; \
2975 } \
2976 gen_addr_imm_index(cpu_T[0], ctx, 0); \
2977 op_ldst(l##width); \
2978 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2979 }
2980
2981 #define GEN_LDUF(width, opc, type) \
2982 GEN_HANDLER(l##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2983 { \
2984 if (unlikely(!ctx->fpu_enabled)) { \
2985 GEN_EXCP_NO_FP(ctx); \
2986 return; \
2987 } \
2988 if (unlikely(rA(ctx->opcode) == 0)) { \
2989 GEN_EXCP_INVAL(ctx); \
2990 return; \
2991 } \
2992 gen_addr_imm_index(cpu_T[0], ctx, 0); \
2993 op_ldst(l##width); \
2994 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2995 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2996 }
2997
2998 #define GEN_LDUXF(width, opc, type) \
2999 GEN_HANDLER(l##width##ux, 0x1F, 0x17, opc, 0x00000001, type) \
3000 { \
3001 if (unlikely(!ctx->fpu_enabled)) { \
3002 GEN_EXCP_NO_FP(ctx); \
3003 return; \
3004 } \
3005 if (unlikely(rA(ctx->opcode) == 0)) { \
3006 GEN_EXCP_INVAL(ctx); \
3007 return; \
3008 } \
3009 gen_addr_reg_index(cpu_T[0], ctx); \
3010 op_ldst(l##width); \
3011 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
3012 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
3013 }
3014
3015 #define GEN_LDXF(width, opc2, opc3, type) \
3016 GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
3017 { \
3018 if (unlikely(!ctx->fpu_enabled)) { \
3019 GEN_EXCP_NO_FP(ctx); \
3020 return; \
3021 } \
3022 gen_addr_reg_index(cpu_T[0], ctx); \
3023 op_ldst(l##width); \
3024 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
3025 }
3026
3027 #define GEN_LDFS(width, op, type) \
3028 OP_LD_TABLE(width); \
3029 GEN_LDF(width, op | 0x20, type); \
3030 GEN_LDUF(width, op | 0x21, type); \
3031 GEN_LDUXF(width, op | 0x01, type); \
3032 GEN_LDXF(width, 0x17, op | 0x00, type)
3033
3034 /* lfd lfdu lfdux lfdx */
3035 GEN_LDFS(fd, 0x12, PPC_FLOAT);
3036 /* lfs lfsu lfsux lfsx */
3037 GEN_LDFS(fs, 0x10, PPC_FLOAT);
3038
3039 /*** Floating-point store ***/
3040 #define GEN_STF(width, opc, type) \
3041 GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
3042 { \
3043 if (unlikely(!ctx->fpu_enabled)) { \
3044 GEN_EXCP_NO_FP(ctx); \
3045 return; \
3046 } \
3047 gen_addr_imm_index(cpu_T[0], ctx, 0); \
3048 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
3049 op_ldst(st##width); \
3050 }
3051
3052 #define GEN_STUF(width, opc, type) \
3053 GEN_HANDLER(st##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
3054 { \
3055 if (unlikely(!ctx->fpu_enabled)) { \
3056 GEN_EXCP_NO_FP(ctx); \
3057 return; \
3058 } \
3059 if (unlikely(rA(ctx->opcode) == 0)) { \
3060 GEN_EXCP_INVAL(ctx); \
3061 return; \
3062 } \
3063 gen_addr_imm_index(cpu_T[0], ctx, 0); \
3064 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
3065 op_ldst(st##width); \
3066 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
3067 }
3068
3069 #define GEN_STUXF(width, opc, type) \
3070 GEN_HANDLER(st##width##ux, 0x1F, 0x17, opc, 0x00000001, type) \
3071 { \
3072 if (unlikely(!ctx->fpu_enabled)) { \
3073 GEN_EXCP_NO_FP(ctx); \
3074 return; \
3075 } \
3076 if (unlikely(rA(ctx->opcode) == 0)) { \
3077 GEN_EXCP_INVAL(ctx); \
3078 return; \
3079 } \
3080 gen_addr_reg_index(cpu_T[0], ctx); \
3081 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
3082 op_ldst(st##width); \
3083 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
3084 }
3085
3086 #define GEN_STXF(width, opc2, opc3, type) \
3087 GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
3088 { \
3089 if (unlikely(!ctx->fpu_enabled)) { \
3090 GEN_EXCP_NO_FP(ctx); \
3091 return; \
3092 } \
3093 gen_addr_reg_index(cpu_T[0], ctx); \
3094 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
3095 op_ldst(st##width); \
3096 }
3097
3098 #define GEN_STFS(width, op, type) \
3099 OP_ST_TABLE(width); \
3100 GEN_STF(width, op | 0x20, type); \
3101 GEN_STUF(width, op | 0x21, type); \
3102 GEN_STUXF(width, op | 0x01, type); \
3103 GEN_STXF(width, 0x17, op | 0x00, type)
3104
3105 /* stfd stfdu stfdux stfdx */
3106 GEN_STFS(fd, 0x16, PPC_FLOAT);
3107 /* stfs stfsu stfsux stfsx */
3108 GEN_STFS(fs, 0x14, PPC_FLOAT);
3109
3110 /* Optional: */
3111 /* stfiwx */
3112 OP_ST_TABLE(fiw);
3113 GEN_STXF(fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
3114
3115 /*** Branch ***/
3116 static always_inline void gen_goto_tb (DisasContext *ctx, int n,
3117 target_ulong dest)
3118 {
3119 TranslationBlock *tb;
3120 tb = ctx->tb;
3121 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
3122 likely(!ctx->singlestep_enabled)) {
3123 tcg_gen_goto_tb(n);
3124 tcg_gen_movi_tl(cpu_T[1], dest);
3125 #if defined(TARGET_PPC64)
3126 if (ctx->sf_mode)
3127 tcg_gen_andi_tl(cpu_nip, cpu_T[1], ~3);
3128 else
3129 #endif
3130 tcg_gen_andi_tl(cpu_nip, cpu_T[1], (uint32_t)~3);
3131 tcg_gen_exit_tb((long)tb + n);
3132 } else {
3133 tcg_gen_movi_tl(cpu_T[1], dest);
3134 #if defined(TARGET_PPC64)
3135 if (ctx->sf_mode)
3136 tcg_gen_andi_tl(cpu_nip, cpu_T[1], ~3);
3137 else
3138 #endif
3139 tcg_gen_andi_tl(cpu_nip, cpu_T[1], (uint32_t)~3);
3140 if (unlikely(ctx->singlestep_enabled)) {
3141 if ((ctx->singlestep_enabled &
3142 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
3143 ctx->exception == POWERPC_EXCP_BRANCH) {
3144 target_ulong tmp = ctx->nip;
3145 ctx->nip = dest;
3146 GEN_EXCP(ctx, POWERPC_EXCP_TRACE, 0);
3147 ctx->nip = tmp;
3148 }
3149 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3150 gen_update_nip(ctx, dest);
3151 gen_op_debug();
3152 }
3153 }
3154 tcg_gen_exit_tb(0);
3155 }
3156 }
3157
3158 static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip)
3159 {
3160 #if defined(TARGET_PPC64)
3161 if (ctx->sf_mode != 0 && (nip >> 32))
3162 gen_op_setlr_64(ctx->nip >> 32, ctx->nip);
3163 else
3164 #endif
3165 gen_op_setlr(ctx->nip);
3166 }
3167
3168 /* b ba bl bla */
3169 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3170 {
3171 target_ulong li, target;
3172
3173 ctx->exception = POWERPC_EXCP_BRANCH;
3174 /* sign extend LI */
3175 #if defined(TARGET_PPC64)
3176 if (ctx->sf_mode)
3177 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
3178 else
3179 #endif
3180 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
3181 if (likely(AA(ctx->opcode) == 0))
3182 target = ctx->nip + li - 4;
3183 else
3184 target = li;
3185 #if defined(TARGET_PPC64)
3186 if (!ctx->sf_mode)
3187 target = (uint32_t)target;
3188 #endif
3189 if (LK(ctx->opcode))
3190 gen_setlr(ctx, ctx->nip);
3191 gen_goto_tb(ctx, 0, target);
3192 }
3193
3194 #define BCOND_IM 0
3195 #define BCOND_LR 1
3196 #define BCOND_CTR 2
3197
3198 static always_inline void gen_bcond (DisasContext *ctx, int type)
3199 {
3200 target_ulong target = 0;
3201 target_ulong li;
3202 uint32_t bo = BO(ctx->opcode);
3203 uint32_t bi = BI(ctx->opcode);
3204 uint32_t mask;
3205
3206 ctx->exception = POWERPC_EXCP_BRANCH;
3207 if ((bo & 0x4) == 0)
3208 gen_op_dec_ctr();
3209 switch(type) {
3210 case BCOND_IM:
3211 li = (target_long)((int16_t)(BD(ctx->opcode)));
3212 if (likely(AA(ctx->opcode) == 0)) {
3213 target = ctx->nip + li - 4;
3214 } else {
3215 target = li;
3216 }
3217 #if defined(TARGET_PPC64)
3218 if (!ctx->sf_mode)
3219 target = (uint32_t)target;
3220 #endif
3221 break;
3222 case BCOND_CTR:
3223 gen_op_movl_T1_ctr();
3224 break;
3225 default:
3226 case BCOND_LR:
3227 gen_op_movl_T1_lr();
3228 break;
3229 }
3230 if (LK(ctx->opcode))
3231 gen_setlr(ctx, ctx->nip);
3232 if (bo & 0x10) {
3233 /* No CR condition */
3234 switch (bo & 0x6) {
3235 case 0:
3236 #if defined(TARGET_PPC64)
3237 if (ctx->sf_mode)
3238 gen_op_test_ctr_64();
3239 else
3240 #endif
3241 gen_op_test_ctr();
3242 break;
3243 case 2:
3244 #if defined(TARGET_PPC64)
3245 if (ctx->sf_mode)
3246 gen_op_test_ctrz_64();
3247 else
3248 #endif
3249 gen_op_test_ctrz();
3250 break;
3251 default:
3252 case 4:
3253 case 6:
3254 if (type == BCOND_IM) {
3255 gen_goto_tb(ctx, 0, target);
3256 return;
3257 } else {
3258 #if defined(TARGET_PPC64)
3259 if (ctx->sf_mode)
3260 tcg_gen_andi_tl(cpu_nip, cpu_T[1], ~3);
3261 else
3262 #endif
3263 tcg_gen_andi_tl(cpu_nip, cpu_T[1], (uint32_t)~3);
3264 goto no_test;
3265 }
3266 break;
3267 }
3268 } else {
3269 mask = 1 << (3 - (bi & 0x03));
3270 tcg_gen_mov_i32(cpu_T[0], cpu_crf[bi >> 2]);
3271 if (bo & 0x8) {
3272 switch (bo & 0x6) {
3273 case 0:
3274 #if defined(TARGET_PPC64)
3275 if (ctx->sf_mode)
3276 gen_op_test_ctr_true_64(mask);
3277 else
3278 #endif
3279 gen_op_test_ctr_true(mask);
3280 break;
3281 case 2:
3282 #if defined(TARGET_PPC64)
3283 if (ctx->sf_mode)
3284 gen_op_test_ctrz_true_64(mask);
3285 else
3286 #endif
3287 gen_op_test_ctrz_true(mask);
3288 break;
3289 default:
3290 case 4:
3291 case 6:
3292 gen_op_test_true(mask);
3293 break;
3294 }
3295 } else {
3296 switch (bo & 0x6) {
3297 case 0:
3298 #if defined(TARGET_PPC64)
3299 if (ctx->sf_mode)
3300 gen_op_test_ctr_false_64(mask);
3301 else
3302 #endif
3303 gen_op_test_ctr_false(mask);
3304 break;
3305 case 2:
3306 #if defined(TARGET_PPC64)
3307 if (ctx->sf_mode)
3308 gen_op_test_ctrz_false_64(mask);
3309 else
3310 #endif
3311 gen_op_test_ctrz_false(mask);
3312 break;
3313 default:
3314 case 4:
3315 case 6:
3316 gen_op_test_false(mask);
3317 break;
3318 }
3319 }
3320 }
3321 if (type == BCOND_IM) {
3322 int l1 = gen_new_label();
3323 gen_op_jz_T0(l1);
3324 gen_goto_tb(ctx, 0, target);
3325 gen_set_label(l1);
3326 gen_goto_tb(ctx, 1, ctx->nip);
3327 } else {
3328 #if defined(TARGET_PPC64)
3329 if (ctx->sf_mode)
3330 gen_op_btest_T1_64(ctx->nip >> 32, ctx->nip);
3331 else
3332 #endif
3333 gen_op_btest_T1(ctx->nip);
3334 no_test:
3335 tcg_gen_exit_tb(0);
3336 }
3337 }
3338
3339 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3340 {
3341 gen_bcond(ctx, BCOND_IM);
3342 }
3343
3344 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW)
3345 {
3346 gen_bcond(ctx, BCOND_CTR);
3347 }
3348
3349 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW)
3350 {
3351 gen_bcond(ctx, BCOND_LR);
3352 }
3353
3354 /*** Condition register logical ***/
3355 #define GEN_CRLOGIC(op, opc) \
3356 GEN_HANDLER(cr##op, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) \
3357 { \
3358 uint8_t bitmask; \
3359 int sh; \
3360 tcg_gen_mov_i32(cpu_T[0], cpu_crf[crbA(ctx->opcode) >> 2]); \
3361 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3362 if (sh > 0) \
3363 gen_op_srli_T0(sh); \
3364 else if (sh < 0) \
3365 gen_op_sli_T0(-sh); \
3366 tcg_gen_mov_i32(cpu_T[1], cpu_crf[crbB(ctx->opcode) >> 2]); \
3367 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3368 if (sh > 0) \
3369 gen_op_srli_T1(sh); \
3370 else if (sh < 0) \
3371 gen_op_sli_T1(-sh); \
3372 gen_op_##op(); \
3373 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3374 tcg_gen_andi_tl(cpu_T[0], cpu_T[0], bitmask); \
3375 tcg_gen_andi_i32(cpu_T[1], cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \
3376 gen_op_or(); \
3377 tcg_gen_andi_i32(cpu_crf[crbD(ctx->opcode) >> 2], cpu_T[0], 0xf); \
3378 }
3379
3380 /* crand */
3381 GEN_CRLOGIC(and, 0x08);
3382 /* crandc */
3383 GEN_CRLOGIC(andc, 0x04);
3384 /* creqv */
3385 GEN_CRLOGIC(eqv, 0x09);
3386 /* crnand */
3387 GEN_CRLOGIC(nand, 0x07);
3388 /* crnor */
3389 GEN_CRLOGIC(nor, 0x01);
3390 /* cror */
3391 GEN_CRLOGIC(or, 0x0E);
3392 /* crorc */
3393 GEN_CRLOGIC(orc, 0x0D);
3394 /* crxor */
3395 GEN_CRLOGIC(xor, 0x06);
3396 /* mcrf */
3397 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER)
3398 {
3399 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3400 }
3401
3402 /*** System linkage ***/
3403 /* rfi (supervisor only) */
3404 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW)
3405 {
3406 #if defined(CONFIG_USER_ONLY)
3407 GEN_EXCP_PRIVOPC(ctx);
3408 #else
3409 /* Restore CPU state */
3410 if (unlikely(!ctx->supervisor)) {
3411 GEN_EXCP_PRIVOPC(ctx);
3412 return;
3413 }
3414 gen_op_rfi();
3415 GEN_SYNC(ctx);
3416 #endif
3417 }
3418
3419 #if defined(TARGET_PPC64)
3420 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B)
3421 {
3422 #if defined(CONFIG_USER_ONLY)
3423 GEN_EXCP_PRIVOPC(ctx);
3424 #else
3425 /* Restore CPU state */
3426 if (unlikely(!ctx->supervisor)) {
3427 GEN_EXCP_PRIVOPC(ctx);
3428 return;
3429 }
3430 gen_op_rfid();
3431 GEN_SYNC(ctx);
3432 #endif
3433 }
3434
3435 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H)
3436 {
3437 #if defined(CONFIG_USER_ONLY)
3438 GEN_EXCP_PRIVOPC(ctx);
3439 #else
3440 /* Restore CPU state */
3441 if (unlikely(ctx->supervisor <= 1)) {
3442 GEN_EXCP_PRIVOPC(ctx);
3443 return;
3444 }
3445 gen_op_hrfid();
3446 GEN_SYNC(ctx);
3447 #endif
3448 }
3449 #endif
3450
3451 /* sc */
3452 #if defined(CONFIG_USER_ONLY)
3453 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3454 #else
3455 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3456 #endif
3457 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW)
3458 {
3459 uint32_t lev;
3460
3461 lev = (ctx->opcode >> 5) & 0x7F;
3462 GEN_EXCP(ctx, POWERPC_SYSCALL, lev);
3463 }
3464
3465 /*** Trap ***/
3466 /* tw */
3467 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW)
3468 {
3469 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3470 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3471 /* Update the nip since this might generate a trap exception */
3472 gen_update_nip(ctx, ctx->nip);
3473 gen_op_tw(TO(ctx->opcode));
3474 }
3475
3476 /* twi */
3477 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3478 {
3479 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3480 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3481 /* Update the nip since this might generate a trap exception */
3482 gen_update_nip(ctx, ctx->nip);
3483 gen_op_tw(TO(ctx->opcode));
3484 }
3485
3486 #if defined(TARGET_PPC64)
3487 /* td */
3488 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B)
3489 {
3490 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3491 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3492 /* Update the nip since this might generate a trap exception */
3493 gen_update_nip(ctx, ctx->nip);
3494 gen_op_td(TO(ctx->opcode));
3495 }
3496
3497 /* tdi */
3498 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B)
3499 {
3500 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3501 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3502 /* Update the nip since this might generate a trap exception */
3503 gen_update_nip(ctx, ctx->nip);
3504 gen_op_td(TO(ctx->opcode));
3505 }
3506 #endif
3507
3508 /*** Processor control ***/
3509 /* mcrxr */
3510 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC)
3511 {
3512 gen_op_load_xer_cr();
3513 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf);
3514 gen_op_clear_xer_ov();
3515 gen_op_clear_xer_ca();
3516 }
3517
3518 /* mfcr */
3519 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC)
3520 {
3521 uint32_t crm, crn;
3522
3523 if (likely(ctx->opcode & 0x00100000)) {
3524 crm = CRM(ctx->opcode);
3525 if (likely((crm ^ (crm - 1)) == 0)) {
3526 crn = ffs(crm);
3527 tcg_gen_mov_i32(cpu_T[0], cpu_crf[7 - crn]);
3528 }
3529 } else {
3530 gen_op_load_cr();
3531 }
3532 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3533 }
3534
3535 /* mfmsr */
3536 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC)
3537 {
3538 #if defined(CONFIG_USER_ONLY)
3539 GEN_EXCP_PRIVREG(ctx);
3540 #else
3541 if (unlikely(!ctx->supervisor)) {
3542 GEN_EXCP_PRIVREG(ctx);
3543 return;
3544 }
3545 gen_op_load_msr();
3546 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3547 #endif
3548 }
3549
3550 #if 1
3551 #define SPR_NOACCESS ((void *)(-1UL))
3552 #else
3553 static void spr_noaccess (void *opaque, int sprn)
3554 {
3555 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3556 printf("ERROR: try to access SPR %d !\n", sprn);
3557 }
3558 #define SPR_NOACCESS (&spr_noaccess)
3559 #endif
3560
3561 /* mfspr */
3562 static always_inline void gen_op_mfspr (DisasContext *ctx)
3563 {
3564 void (*read_cb)(void *opaque, int sprn);
3565 uint32_t sprn = SPR(ctx->opcode);
3566
3567 #if !defined(CONFIG_USER_ONLY)
3568 if (ctx->supervisor == 2)
3569 read_cb = ctx->spr_cb[sprn].hea_read;
3570 else if (ctx->supervisor)
3571 read_cb = ctx->spr_cb[sprn].oea_read;
3572 else
3573 #endif
3574 read_cb = ctx->spr_cb[sprn].uea_read;
3575 if (likely(read_cb != NULL)) {
3576 if (likely(read_cb != SPR_NOACCESS)) {
3577 (*read_cb)(ctx, sprn);
3578 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3579 } else {
3580 /* Privilege exception */
3581 /* This is a hack to avoid warnings when running Linux:
3582 * this OS breaks the PowerPC virtualisation model,
3583 * allowing userland application to read the PVR
3584 */
3585 if (sprn != SPR_PVR) {
3586 if (loglevel != 0) {
3587 fprintf(logfile, "Trying to read privileged spr %d %03x at "
3588 ADDRX "\n", sprn, sprn, ctx->nip);
3589 }
3590 printf("Trying to read privileged spr %d %03x at " ADDRX "\n",
3591 sprn, sprn, ctx->nip);
3592 }
3593 GEN_EXCP_PRIVREG(ctx);
3594 }
3595 } else {
3596 /* Not defined */
3597 if (loglevel != 0) {
3598 fprintf(logfile, "Trying to read invalid spr %d %03x at "
3599 ADDRX "\n", sprn, sprn, ctx->nip);
3600 }
3601 printf("Trying to read invalid spr %d %03x at " ADDRX "\n",
3602 sprn, sprn, ctx->nip);
3603 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3604 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3605 }
3606 }
3607
3608 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC)
3609 {
3610 gen_op_mfspr(ctx);
3611 }
3612
3613 /* mftb */
3614 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB)
3615 {
3616 gen_op_mfspr(ctx);
3617 }
3618
3619 /* mtcrf */
3620 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC)
3621 {
3622 uint32_t crm, crn;
3623
3624 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3625 crm = CRM(ctx->opcode);
3626 if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) {
3627 crn = ffs(crm);
3628 gen_op_srli_T0(crn * 4);
3629 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_T[0], 0xf);
3630 } else {
3631 gen_op_store_cr(crm);
3632 }
3633 }
3634
3635 /* mtmsr */
3636 #if defined(TARGET_PPC64)
3637 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B)
3638 {
3639 #if defined(CONFIG_USER_ONLY)
3640 GEN_EXCP_PRIVREG(ctx);
3641 #else
3642 if (unlikely(!ctx->supervisor)) {
3643 GEN_EXCP_PRIVREG(ctx);
3644 return;
3645 }
3646 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3647 if (ctx->opcode & 0x00010000) {
3648 /* Special form that does not need any synchronisation */
3649 gen_op_update_riee();
3650 } else {
3651 /* XXX: we need to update nip before the store
3652 * if we enter power saving mode, we will exit the loop
3653 * directly from ppc_store_msr
3654 */
3655 gen_update_nip(ctx, ctx->nip);
3656 gen_op_store_msr();
3657 /* Must stop the translation as machine state (may have) changed */
3658 /* Note that mtmsr is not always defined as context-synchronizing */
3659 ctx->exception = POWERPC_EXCP_STOP;
3660 }
3661 #endif
3662 }
3663 #endif
3664
3665 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC)
3666 {
3667 #if defined(CONFIG_USER_ONLY)
3668 GEN_EXCP_PRIVREG(ctx);
3669 #else
3670 if (unlikely(!ctx->supervisor)) {
3671 GEN_EXCP_PRIVREG(ctx);
3672 return;
3673 }
3674 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3675 if (ctx->opcode & 0x00010000) {
3676 /* Special form that does not need any synchronisation */
3677 gen_op_update_riee();
3678 } else {
3679 /* XXX: we need to update nip before the store
3680 * if we enter power saving mode, we will exit the loop
3681 * directly from ppc_store_msr
3682 */
3683 gen_update_nip(ctx, ctx->nip);
3684 #if defined(TARGET_PPC64)
3685 if (!ctx->sf_mode)
3686 gen_op_store_msr_32();
3687 else
3688 #endif
3689 gen_op_store_msr();
3690 /* Must stop the translation as machine state (may have) changed */
3691 /* Note that mtmsrd is not always defined as context-synchronizing */
3692 ctx->exception = POWERPC_EXCP_STOP;
3693 }
3694 #endif
3695 }
3696
3697 /* mtspr */
3698 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC)
3699 {
3700 void (*write_cb)(void *opaque, int sprn);
3701 uint32_t sprn = SPR(ctx->opcode);
3702
3703 #if !defined(CONFIG_USER_ONLY)
3704 if (ctx->supervisor == 2)
3705 write_cb = ctx->spr_cb[sprn].hea_write;
3706 else if (ctx->supervisor)
3707 write_cb = ctx->spr_cb[sprn].oea_write;
3708 else
3709 #endif
3710 write_cb = ctx->spr_cb[sprn].uea_write;
3711 if (likely(write_cb != NULL)) {
3712 if (likely(write_cb != SPR_NOACCESS)) {
3713 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3714 (*write_cb)(ctx, sprn);
3715 } else {
3716 /* Privilege exception */
3717 if (loglevel != 0) {
3718 fprintf(logfile, "Trying to write privileged spr %d %03x at "
3719 ADDRX "\n", sprn, sprn, ctx->nip);
3720 }
3721 printf("Trying to write privileged spr %d %03x at " ADDRX "\n",
3722 sprn, sprn, ctx->nip);
3723 GEN_EXCP_PRIVREG(ctx);
3724 }
3725 } else {
3726 /* Not defined */
3727 if (loglevel != 0) {
3728 fprintf(logfile, "Trying to write invalid spr %d %03x at "
3729 ADDRX "\n", sprn, sprn, ctx->nip);
3730 }
3731 printf("Trying to write invalid spr %d %03x at " ADDRX "\n",
3732 sprn, sprn, ctx->nip);
3733 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3734 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3735 }
3736 }
3737
3738 /*** Cache management ***/
3739 /* dcbf */
3740 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE)
3741 {
3742 /* XXX: specification says this is treated as a load by the MMU */
3743 TCGv temp = tcg_temp_new(TCG_TYPE_TL);
3744 gen_addr_reg_index(temp, ctx);
3745 gen_qemu_ld8u(temp, temp, ctx->mem_idx);
3746 tcg_temp_free(temp);
3747 }
3748
3749 /* dcbi (Supervisor only) */
3750 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE)
3751 {
3752 #if defined(CONFIG_USER_ONLY)
3753 GEN_EXCP_PRIVOPC(ctx);
3754 #else
3755 TCGv EA, val;
3756 if (unlikely(!ctx->supervisor)) {
3757 GEN_EXCP_PRIVOPC(ctx);
3758 return;
3759 }
3760 EA = tcg_temp_new(TCG_TYPE_TL);
3761 gen_addr_reg_index(EA, ctx);
3762 val = tcg_temp_new(TCG_TYPE_TL);
3763 /* XXX: specification says this should be treated as a store by the MMU */
3764 gen_qemu_ld8u(val, EA, ctx->mem_idx);
3765 gen_qemu_st8(val, EA, ctx->mem_idx);
3766 tcg_temp_free(val);
3767 tcg_temp_free(EA);
3768 #endif
3769 }
3770
3771 /* dcdst */
3772 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE)
3773 {
3774 /* XXX: specification say this is treated as a load by the MMU */
3775 TCGv temp = tcg_temp_new(TCG_TYPE_TL);
3776 gen_addr_reg_index(temp, ctx);
3777 gen_qemu_ld8u(temp, temp, ctx->mem_idx);
3778 tcg_temp_free(temp);
3779 }
3780
3781 /* dcbt */
3782 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE)
3783 {
3784 /* interpreted as no-op */
3785 /* XXX: specification say this is treated as a load by the MMU
3786 * but does not generate any exception
3787 */
3788 }
3789
3790 /* dcbtst */
3791 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE)
3792 {
3793 /* interpreted as no-op */
3794 /* XXX: specification say this is treated as a load by the MMU
3795 * but does not generate any exception
3796 */
3797 }
3798
3799 /* dcbz */
3800 #define op_dcbz(n) (*gen_op_dcbz[n][ctx->mem_idx])()
3801 static GenOpFunc *gen_op_dcbz[4][NB_MEM_FUNCS] = {
3802 /* 32 bytes cache line size */
3803 {
3804 #define gen_op_dcbz_l32_le_raw gen_op_dcbz_l32_raw
3805 #define gen_op_dcbz_l32_le_user gen_op_dcbz_l32_user
3806 #define gen_op_dcbz_l32_le_kernel gen_op_dcbz_l32_kernel
3807 #define gen_op_dcbz_l32_le_hypv gen_op_dcbz_l32_hypv
3808 #define gen_op_dcbz_l32_le_64_raw gen_op_dcbz_l32_64_raw
3809 #define gen_op_dcbz_l32_le_64_user gen_op_dcbz_l32_64_user
3810 #define gen_op_dcbz_l32_le_64_kernel gen_op_dcbz_l32_64_kernel
3811 #define gen_op_dcbz_l32_le_64_hypv gen_op_dcbz_l32_64_hypv
3812 GEN_MEM_FUNCS(dcbz_l32),
3813 },
3814 /* 64 bytes cache line size */
3815 {
3816 #define gen_op_dcbz_l64_le_raw gen_op_dcbz_l64_raw
3817 #define gen_op_dcbz_l64_le_user gen_op_dcbz_l64_user
3818 #define gen_op_dcbz_l64_le_kernel gen_op_dcbz_l64_kernel
3819 #define gen_op_dcbz_l64_le_hypv gen_op_dcbz_l64_hypv
3820 #define gen_op_dcbz_l64_le_64_raw gen_op_dcbz_l64_64_raw
3821 #define gen_op_dcbz_l64_le_64_user gen_op_dcbz_l64_64_user
3822 #define gen_op_dcbz_l64_le_64_kernel gen_op_dcbz_l64_64_kernel
3823 #define gen_op_dcbz_l64_le_64_hypv gen_op_dcbz_l64_64_hypv
3824 GEN_MEM_FUNCS(dcbz_l64),
3825 },
3826 /* 128 bytes cache line size */
3827 {
3828 #define gen_op_dcbz_l128_le_raw gen_op_dcbz_l128_raw
3829 #define gen_op_dcbz_l128_le_user gen_op_dcbz_l128_user
3830 #define gen_op_dcbz_l128_le_kernel gen_op_dcbz_l128_kernel
3831 #define gen_op_dcbz_l128_le_hypv gen_op_dcbz_l128_hypv
3832 #define gen_op_dcbz_l128_le_64_raw gen_op_dcbz_l128_64_raw
3833 #define gen_op_dcbz_l128_le_64_user gen_op_dcbz_l128_64_user
3834 #define gen_op_dcbz_l128_le_64_kernel gen_op_dcbz_l128_64_kernel
3835 #define gen_op_dcbz_l128_le_64_hypv gen_op_dcbz_l128_64_hypv
3836 GEN_MEM_FUNCS(dcbz_l128),
3837 },
3838 /* tunable cache line size */
3839 {
3840 #define gen_op_dcbz_le_raw gen_op_dcbz_raw
3841 #define gen_op_dcbz_le_user gen_op_dcbz_user
3842 #define gen_op_dcbz_le_kernel gen_op_dcbz_kernel
3843 #define gen_op_dcbz_le_hypv gen_op_dcbz_hypv
3844 #define gen_op_dcbz_le_64_raw gen_op_dcbz_64_raw
3845 #define gen_op_dcbz_le_64_user gen_op_dcbz_64_user
3846 #define gen_op_dcbz_le_64_kernel gen_op_dcbz_64_kernel
3847 #define gen_op_dcbz_le_64_hypv gen_op_dcbz_64_hypv
3848 GEN_MEM_FUNCS(dcbz),
3849 },
3850 };
3851
3852 static always_inline void handler_dcbz (DisasContext *ctx,
3853 int dcache_line_size)
3854 {
3855 int n;
3856
3857 switch (dcache_line_size) {
3858 case 32:
3859 n = 0;
3860 break;
3861 case 64:
3862 n = 1;
3863 break;
3864 case 128:
3865 n = 2;
3866 break;
3867 default:
3868 n = 3;
3869 break;
3870 }
3871 op_dcbz(n);
3872 }
3873
3874 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ)
3875 {
3876 gen_addr_reg_index(cpu_T[0], ctx);
3877 handler_dcbz(ctx, ctx->dcache_line_size);
3878 gen_op_check_reservation();
3879 }
3880
3881 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT)
3882 {
3883 gen_addr_reg_index(cpu_T[0], ctx);
3884 if (ctx->opcode & 0x00200000)
3885 handler_dcbz(ctx, ctx->dcache_line_size);
3886 else
3887 handler_dcbz(ctx, -1);
3888 gen_op_check_reservation();
3889 }
3890
3891 /* icbi */
3892 #define op_icbi() (*gen_op_icbi[ctx->mem_idx])()
3893 #define gen_op_icbi_le_raw gen_op_icbi_raw
3894 #define gen_op_icbi_le_user gen_op_icbi_user
3895 #define gen_op_icbi_le_kernel gen_op_icbi_kernel
3896 #define gen_op_icbi_le_hypv gen_op_icbi_hypv
3897 #define gen_op_icbi_le_64_raw gen_op_icbi_64_raw
3898 #define gen_op_icbi_le_64_user gen_op_icbi_64_user
3899 #define gen_op_icbi_le_64_kernel gen_op_icbi_64_kernel
3900 #define gen_op_icbi_le_64_hypv gen_op_icbi_64_hypv
3901 static GenOpFunc *gen_op_icbi[NB_MEM_FUNCS] = {
3902 GEN_MEM_FUNCS(icbi),
3903 };
3904
3905 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI)
3906 {
3907 /* NIP cannot be restored if the memory exception comes from an helper */
3908 gen_update_nip(ctx, ctx->nip - 4);
3909 gen_addr_reg_index(cpu_T[0], ctx);
3910 op_icbi();
3911 }
3912
3913 /* Optional: */
3914 /* dcba */
3915 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA)
3916 {
3917 /* interpreted as no-op */
3918 /* XXX: specification say this is treated as a store by the MMU
3919 * but does not generate any exception
3920 */
3921 }
3922
3923 /*** Segment register manipulation ***/
3924 /* Supervisor only: */
3925 /* mfsr */
3926 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT)
3927 {
3928 #if defined(CONFIG_USER_ONLY)
3929 GEN_EXCP_PRIVREG(ctx);
3930 #else
3931 if (unlikely(!ctx->supervisor)) {
3932 GEN_EXCP_PRIVREG(ctx);
3933 return;
3934 }
3935 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3936 gen_op_load_sr();
3937 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3938 #endif
3939 }
3940
3941 /* mfsrin */
3942 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT)
3943 {
3944 #if defined(CONFIG_USER_ONLY)
3945 GEN_EXCP_PRIVREG(ctx);
3946 #else
3947 if (unlikely(!ctx->supervisor)) {
3948 GEN_EXCP_PRIVREG(ctx);
3949 return;
3950 }
3951 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3952 gen_op_srli_T1(28);
3953 gen_op_load_sr();
3954 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3955 #endif
3956 }
3957
3958 /* mtsr */
3959 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT)
3960 {
3961 #if defined(CONFIG_USER_ONLY)
3962 GEN_EXCP_PRIVREG(ctx);
3963 #else
3964 if (unlikely(!ctx->supervisor)) {
3965 GEN_EXCP_PRIVREG(ctx);
3966 return;
3967 }
3968 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3969 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3970 gen_op_store_sr();
3971 #endif
3972 }
3973
3974 /* mtsrin */
3975 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT)
3976 {
3977 #if defined(CONFIG_USER_ONLY)
3978 GEN_EXCP_PRIVREG(ctx);
3979 #else
3980 if (unlikely(!ctx->supervisor)) {
3981 GEN_EXCP_PRIVREG(ctx);
3982 return;
3983 }
3984 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3985 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3986 gen_op_srli_T1(28);
3987 gen_op_store_sr();
3988 #endif
3989 }
3990
3991 #if defined(TARGET_PPC64)
3992 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
3993 /* mfsr */
3994 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B)
3995 {
3996 #if defined(CONFIG_USER_ONLY)
3997 GEN_EXCP_PRIVREG(ctx);
3998 #else
3999 if (unlikely(!ctx->supervisor)) {
4000 GEN_EXCP_PRIVREG(ctx);
4001 return;
4002 }
4003 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4004 gen_op_load_slb();
4005 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4006 #endif
4007 }
4008
4009 /* mfsrin */
4010 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
4011 PPC_SEGMENT_64B)
4012 {
4013 #if defined(CONFIG_USER_ONLY)
4014 GEN_EXCP_PRIVREG(ctx);
4015 #else
4016 if (unlikely(!ctx->supervisor)) {
4017 GEN_EXCP_PRIVREG(ctx);
4018 return;
4019 }
4020 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4021 gen_op_srli_T1(28);
4022 gen_op_load_slb();
4023 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4024 #endif
4025 }
4026
4027 /* mtsr */
4028 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B)
4029 {
4030 #if defined(CONFIG_USER_ONLY)
4031 GEN_EXCP_PRIVREG(ctx);
4032 #else
4033 if (unlikely(!ctx->supervisor)) {
4034 GEN_EXCP_PRIVREG(ctx);
4035 return;
4036 }
4037 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4038 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
4039 gen_op_store_slb();
4040 #endif
4041 }
4042
4043 /* mtsrin */
4044 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
4045 PPC_SEGMENT_64B)
4046 {
4047 #if defined(CONFIG_USER_ONLY)
4048 GEN_EXCP_PRIVREG(ctx);
4049 #else
4050 if (unlikely(!ctx->supervisor)) {
4051 GEN_EXCP_PRIVREG(ctx);
4052 return;
4053 }
4054 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4055 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4056 gen_op_srli_T1(28);
4057 gen_op_store_slb();
4058 #endif
4059 }
4060 #endif /* defined(TARGET_PPC64) */
4061
4062 /*** Lookaside buffer management ***/
4063 /* Optional & supervisor only: */
4064 /* tlbia */
4065 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA)
4066 {
4067 #if defined(CONFIG_USER_ONLY)
4068 GEN_EXCP_PRIVOPC(ctx);
4069 #else
4070 if (unlikely(!ctx->supervisor)) {
4071 GEN_EXCP_PRIVOPC(ctx);
4072 return;
4073 }
4074 gen_op_tlbia();
4075 #endif
4076 }
4077
4078 /* tlbie */
4079 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE)
4080 {
4081 #if defined(CONFIG_USER_ONLY)
4082 GEN_EXCP_PRIVOPC(ctx);
4083 #else
4084 if (unlikely(!ctx->supervisor)) {
4085 GEN_EXCP_PRIVOPC(ctx);
4086 return;
4087 }
4088 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4089 #if defined(TARGET_PPC64)
4090 if (ctx->sf_mode)
4091 gen_op_tlbie_64();
4092 else
4093 #endif
4094 gen_op_tlbie();
4095 #endif
4096 }
4097
4098 /* tlbsync */
4099 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC)
4100 {
4101 #if defined(CONFIG_USER_ONLY)
4102 GEN_EXCP_PRIVOPC(ctx);
4103 #else
4104 if (unlikely(!ctx->supervisor)) {
4105 GEN_EXCP_PRIVOPC(ctx);
4106 return;
4107 }
4108 /* This has no effect: it should ensure that all previous
4109 * tlbie have completed
4110 */
4111 GEN_STOP(ctx);
4112 #endif
4113 }
4114
4115 #if defined(TARGET_PPC64)
4116 /* slbia */
4117 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI)
4118 {
4119 #if defined(CONFIG_USER_ONLY)
4120 GEN_EXCP_PRIVOPC(ctx);
4121 #else
4122 if (unlikely(!ctx->supervisor)) {
4123 GEN_EXCP_PRIVOPC(ctx);
4124 return;
4125 }
4126 gen_op_slbia();
4127 #endif
4128 }
4129
4130 /* slbie */
4131 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI)
4132 {
4133 #if defined(CONFIG_USER_ONLY)
4134 GEN_EXCP_PRIVOPC(ctx);
4135 #else
4136 if (unlikely(!ctx->supervisor)) {
4137 GEN_EXCP_PRIVOPC(ctx);
4138 return;
4139 }
4140 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4141 gen_op_slbie();
4142 #endif
4143 }
4144 #endif
4145
4146 /*** External control ***/
4147 /* Optional: */
4148 #define op_eciwx() (*gen_op_eciwx[ctx->mem_idx])()
4149 #define op_ecowx() (*gen_op_ecowx[ctx->mem_idx])()
4150 static GenOpFunc *gen_op_eciwx[NB_MEM_FUNCS] = {
4151 GEN_MEM_FUNCS(eciwx),
4152 };
4153 static GenOpFunc *gen_op_ecowx[NB_MEM_FUNCS] = {
4154 GEN_MEM_FUNCS(ecowx),
4155 };
4156
4157 /* eciwx */
4158 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN)
4159 {
4160 /* Should check EAR[E] & alignment ! */
4161 gen_addr_reg_index(cpu_T[0], ctx);
4162 op_eciwx();
4163 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4164 }
4165
4166 /* ecowx */
4167 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN)
4168 {
4169 /* Should check EAR[E] & alignment ! */
4170 gen_addr_reg_index(cpu_T[0], ctx);
4171 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4172 op_ecowx();
4173 }
4174
4175 /* PowerPC 601 specific instructions */
4176 /* abs - abs. */
4177 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR)
4178 {
4179 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4180 gen_op_POWER_abs();
4181 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4182 if (unlikely(Rc(ctx->opcode) != 0))
4183 gen_set_Rc0(ctx);
4184 }
4185
4186 /* abso - abso. */
4187 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR)
4188 {
4189 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4190 gen_op_POWER_abso();
4191 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4192 if (unlikely(Rc(ctx->opcode) != 0))
4193 gen_set_Rc0(ctx);
4194 }
4195
4196 /* clcs */
4197 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR)
4198 {
4199 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4200 gen_op_POWER_clcs();
4201 /* Rc=1 sets CR0 to an undefined state */
4202 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4203 }
4204
4205 /* div - div. */
4206 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR)
4207 {
4208 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4209 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4210 gen_op_POWER_div();
4211 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4212 if (unlikely(Rc(ctx->opcode) != 0))
4213 gen_set_Rc0(ctx);
4214 }
4215
4216 /* divo - divo. */
4217 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR)
4218 {
4219 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4220 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4221 gen_op_POWER_divo();
4222 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4223 if (unlikely(Rc(ctx->opcode) != 0))
4224 gen_set_Rc0(ctx);
4225 }
4226
4227 /* divs - divs. */
4228 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR)
4229 {
4230 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4231 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4232 gen_op_POWER_divs();
4233 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4234 if (unlikely(Rc(ctx->opcode) != 0))
4235 gen_set_Rc0(ctx);
4236 }
4237
4238 /* divso - divso. */
4239 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR)
4240 {
4241 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4242 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4243 gen_op_POWER_divso();
4244 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4245 if (unlikely(Rc(ctx->opcode) != 0))
4246 gen_set_Rc0(ctx);
4247 }
4248
4249 /* doz - doz. */
4250 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR)
4251 {
4252 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4253 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4254 gen_op_POWER_doz();
4255 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4256 if (unlikely(Rc(ctx->opcode) != 0))
4257 gen_set_Rc0(ctx);
4258 }
4259
4260 /* dozo - dozo. */
4261 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR)
4262 {
4263 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4264 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4265 gen_op_POWER_dozo();
4266 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4267 if (unlikely(Rc(ctx->opcode) != 0))
4268 gen_set_Rc0(ctx);
4269 }
4270
4271 /* dozi */
4272 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4273 {
4274 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4275 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
4276 gen_op_POWER_doz();
4277 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4278 }
4279
4280 /* As lscbx load from memory byte after byte, it's always endian safe.
4281 * Original POWER is 32 bits only, define 64 bits ops as 32 bits ones
4282 */
4283 #define op_POWER_lscbx(start, ra, rb) \
4284 (*gen_op_POWER_lscbx[ctx->mem_idx])(start, ra, rb)
4285 #define gen_op_POWER_lscbx_64_raw gen_op_POWER_lscbx_raw
4286 #define gen_op_POWER_lscbx_64_user gen_op_POWER_lscbx_user
4287 #define gen_op_POWER_lscbx_64_kernel gen_op_POWER_lscbx_kernel
4288 #define gen_op_POWER_lscbx_64_hypv gen_op_POWER_lscbx_hypv
4289 #define gen_op_POWER_lscbx_le_raw gen_op_POWER_lscbx_raw
4290 #define gen_op_POWER_lscbx_le_user gen_op_POWER_lscbx_user
4291 #define gen_op_POWER_lscbx_le_kernel gen_op_POWER_lscbx_kernel
4292 #define gen_op_POWER_lscbx_le_hypv gen_op_POWER_lscbx_hypv
4293 #define gen_op_POWER_lscbx_le_64_raw gen_op_POWER_lscbx_raw
4294 #define gen_op_POWER_lscbx_le_64_user gen_op_POWER_lscbx_user
4295 #define gen_op_POWER_lscbx_le_64_kernel gen_op_POWER_lscbx_kernel
4296 #define gen_op_POWER_lscbx_le_64_hypv gen_op_POWER_lscbx_hypv
4297 static GenOpFunc3 *gen_op_POWER_lscbx[NB_MEM_FUNCS] = {
4298 GEN_MEM_FUNCS(POWER_lscbx),
4299 };
4300
4301 /* lscbx - lscbx. */
4302 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR)
4303 {
4304 int ra = rA(ctx->opcode);
4305 int rb = rB(ctx->opcode);
4306
4307 gen_addr_reg_index(cpu_T[0], ctx);
4308 if (ra == 0) {
4309 ra = rb;
4310 }
4311 /* NIP cannot be restored if the memory exception comes from an helper */
4312 gen_update_nip(ctx, ctx->nip - 4);
4313 gen_op_load_xer_bc();
4314 gen_op_load_xer_cmp();
4315 op_POWER_lscbx(rD(ctx->opcode), ra, rb);
4316 gen_op_store_xer_bc();
4317 if (unlikely(Rc(ctx->opcode) != 0))
4318 gen_set_Rc0(ctx);
4319 }
4320
4321 /* maskg - maskg. */
4322 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR)
4323 {
4324 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4325 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4326 gen_op_POWER_maskg();
4327 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4328 if (unlikely(Rc(ctx->opcode) != 0))
4329 gen_set_Rc0(ctx);
4330 }
4331
4332 /* maskir - maskir. */
4333 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR)
4334 {
4335 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4336 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4337 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4338 gen_op_POWER_maskir();
4339 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4340 if (unlikely(Rc(ctx->opcode) != 0))
4341 gen_set_Rc0(ctx);
4342 }
4343
4344 /* mul - mul. */
4345 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR)
4346 {
4347 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4348 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4349 gen_op_POWER_mul();
4350 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4351 if (unlikely(Rc(ctx->opcode) != 0))
4352 gen_set_Rc0(ctx);
4353 }
4354
4355 /* mulo - mulo. */
4356 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR)
4357 {
4358 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4359 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4360 gen_op_POWER_mulo();
4361 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4362 if (unlikely(Rc(ctx->opcode) != 0))
4363 gen_set_Rc0(ctx);
4364 }
4365
4366 /* nabs - nabs. */
4367 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR)
4368 {
4369 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4370 gen_op_POWER_nabs();
4371 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4372 if (unlikely(Rc(ctx->opcode) != 0))
4373 gen_set_Rc0(ctx);
4374 }
4375
4376 /* nabso - nabso. */
4377 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR)
4378 {
4379 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4380 gen_op_POWER_nabso();
4381 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4382 if (unlikely(Rc(ctx->opcode) != 0))
4383 gen_set_Rc0(ctx);
4384 }
4385
4386 /* rlmi - rlmi. */
4387 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4388 {
4389 uint32_t mb, me;
4390
4391 mb = MB(ctx->opcode);
4392 me = ME(ctx->opcode);
4393 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4394 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
4395 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4396 gen_op_POWER_rlmi(MASK(mb, me), ~MASK(mb, me));
4397 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4398 if (unlikely(Rc(ctx->opcode) != 0))
4399 gen_set_Rc0(ctx);
4400 }
4401
4402 /* rrib - rrib. */
4403 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR)
4404 {
4405 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4406 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
4407 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4408 gen_op_POWER_rrib();
4409 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4410 if (unlikely(Rc(ctx->opcode) != 0))
4411 gen_set_Rc0(ctx);
4412 }
4413
4414 /* sle - sle. */
4415 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR)
4416 {
4417 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4418 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4419 gen_op_POWER_sle();
4420 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4421 if (unlikely(Rc(ctx->opcode) != 0))
4422 gen_set_Rc0(ctx);
4423 }
4424
4425 /* sleq - sleq. */
4426 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR)
4427 {
4428 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4429 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4430 gen_op_POWER_sleq();
4431 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4432 if (unlikely(Rc(ctx->opcode) != 0))
4433 gen_set_Rc0(ctx);
4434 }
4435
4436 /* sliq - sliq. */
4437 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR)
4438 {
4439 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4440 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4441 gen_op_POWER_sle();
4442 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4443 if (unlikely(Rc(ctx->opcode) != 0))
4444 gen_set_Rc0(ctx);
4445 }
4446
4447 /* slliq - slliq. */
4448 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR)
4449 {
4450 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4451 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4452 gen_op_POWER_sleq();
4453 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4454 if (unlikely(Rc(ctx->opcode) != 0))
4455 gen_set_Rc0(ctx);
4456 }
4457
4458 /* sllq - sllq. */
4459 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR)
4460 {
4461 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4462 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4463 gen_op_POWER_sllq();
4464 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4465 if (unlikely(Rc(ctx->opcode) != 0))
4466 gen_set_Rc0(ctx);
4467 }
4468
4469 /* slq - slq. */
4470 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR)
4471 {
4472 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4473 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4474 gen_op_POWER_slq();
4475 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4476 if (unlikely(Rc(ctx->opcode) != 0))
4477 gen_set_Rc0(ctx);
4478 }
4479
4480 /* sraiq - sraiq. */
4481 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR)
4482 {
4483 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4484 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4485 gen_op_POWER_sraq();
4486 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4487 if (unlikely(Rc(ctx->opcode) != 0))
4488 gen_set_Rc0(ctx);
4489 }
4490
4491 /* sraq - sraq. */
4492 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR)
4493 {
4494 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4495 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4496 gen_op_POWER_sraq();
4497 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4498 if (unlikely(Rc(ctx->opcode) != 0))
4499 gen_set_Rc0(ctx);
4500 }
4501
4502 /* sre - sre. */
4503 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR)
4504 {
4505 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4506 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4507 gen_op_POWER_sre();
4508 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4509 if (unlikely(Rc(ctx->opcode) != 0))
4510 gen_set_Rc0(ctx);
4511 }
4512
4513 /* srea - srea. */
4514 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR)
4515 {
4516 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4517 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4518 gen_op_POWER_srea();
4519 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4520 if (unlikely(Rc(ctx->opcode) != 0))
4521 gen_set_Rc0(ctx);
4522 }
4523
4524 /* sreq */
4525 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR)
4526 {
4527 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4528 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4529 gen_op_POWER_sreq();
4530 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4531 if (unlikely(Rc(ctx->opcode) != 0))
4532 gen_set_Rc0(ctx);
4533 }
4534
4535 /* sriq */
4536 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR)
4537 {
4538 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4539 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4540 gen_op_POWER_srq();
4541 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4542 if (unlikely(Rc(ctx->opcode) != 0))
4543 gen_set_Rc0(ctx);
4544 }
4545
4546 /* srliq */
4547 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR)
4548 {
4549 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4550 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4551 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4552 gen_op_POWER_srlq();
4553 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4554 if (unlikely(Rc(ctx->opcode) != 0))
4555 gen_set_Rc0(ctx);
4556 }
4557
4558 /* srlq */
4559 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR)
4560 {
4561 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4562 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4563 gen_op_POWER_srlq();
4564 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4565 if (unlikely(Rc(ctx->opcode) != 0))
4566 gen_set_Rc0(ctx);
4567 }
4568
4569 /* srq */
4570 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR)
4571 {
4572 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4573 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4574 gen_op_POWER_srq();
4575 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4576 if (unlikely(Rc(ctx->opcode) != 0))
4577 gen_set_Rc0(ctx);
4578 }
4579
4580 /* PowerPC 602 specific instructions */
4581 /* dsa */
4582 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC)
4583 {
4584 /* XXX: TODO */
4585 GEN_EXCP_INVAL(ctx);
4586 }
4587
4588 /* esa */
4589 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC)
4590 {
4591 /* XXX: TODO */
4592 GEN_EXCP_INVAL(ctx);
4593 }
4594
4595 /* mfrom */
4596 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC)
4597 {
4598 #if defined(CONFIG_USER_ONLY)
4599 GEN_EXCP_PRIVOPC(ctx);
4600 #else
4601 if (unlikely(!ctx->supervisor)) {
4602 GEN_EXCP_PRIVOPC(ctx);
4603 return;
4604 }
4605 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4606 gen_op_602_mfrom();
4607 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4608 #endif
4609 }
4610
4611 /* 602 - 603 - G2 TLB management */
4612 /* tlbld */
4613 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB)
4614 {
4615 #if defined(CONFIG_USER_ONLY)
4616 GEN_EXCP_PRIVOPC(ctx);
4617 #else
4618 if (unlikely(!ctx->supervisor)) {
4619 GEN_EXCP_PRIVOPC(ctx);
4620 return;
4621 }
4622 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4623 gen_op_6xx_tlbld();
4624 #endif
4625 }
4626
4627 /* tlbli */
4628 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB)
4629 {
4630 #if defined(CONFIG_USER_ONLY)
4631 GEN_EXCP_PRIVOPC(ctx);
4632 #else
4633 if (unlikely(!ctx->supervisor)) {
4634 GEN_EXCP_PRIVOPC(ctx);
4635 return;
4636 }
4637 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4638 gen_op_6xx_tlbli();
4639 #endif
4640 }
4641
4642 /* 74xx TLB management */
4643 /* tlbld */
4644 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB)
4645 {
4646 #if defined(CONFIG_USER_ONLY)
4647 GEN_EXCP_PRIVOPC(ctx);
4648 #else
4649 if (unlikely(!ctx->supervisor)) {
4650 GEN_EXCP_PRIVOPC(ctx);
4651 return;
4652 }
4653 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4654 gen_op_74xx_tlbld();
4655 #endif
4656 }
4657
4658 /* tlbli */
4659 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB)
4660 {
4661 #if defined(CONFIG_USER_ONLY)
4662 GEN_EXCP_PRIVOPC(ctx);
4663 #else
4664 if (unlikely(!ctx->supervisor)) {
4665 GEN_EXCP_PRIVOPC(ctx);
4666 return;
4667 }
4668 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4669 gen_op_74xx_tlbli();
4670 #endif
4671 }
4672
4673 /* POWER instructions not in PowerPC 601 */
4674 /* clf */
4675 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER)
4676 {
4677 /* Cache line flush: implemented as no-op */
4678 }
4679
4680 /* cli */
4681 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER)
4682 {
4683 /* Cache line invalidate: privileged and treated as no-op */
4684 #if defined(CONFIG_USER_ONLY)
4685 GEN_EXCP_PRIVOPC(ctx);
4686 #else
4687 if (unlikely(!ctx->supervisor)) {
4688 GEN_EXCP_PRIVOPC(ctx);
4689 return;
4690 }
4691 #endif
4692 }
4693
4694 /* dclst */
4695 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER)
4696 {
4697 /* Data cache line store: treated as no-op */
4698 }
4699
4700 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER)
4701 {
4702 #if defined(CONFIG_USER_ONLY)
4703 GEN_EXCP_PRIVOPC(ctx);
4704 #else
4705 if (unlikely(!ctx->supervisor)) {
4706 GEN_EXCP_PRIVOPC(ctx);
4707 return;
4708 }
4709 int ra = rA(ctx->opcode);
4710 int rd = rD(ctx->opcode);
4711
4712 gen_addr_reg_index(cpu_T[0], ctx);
4713 gen_op_POWER_mfsri();
4714 tcg_gen_mov_tl(cpu_gpr[rd], cpu_T[0]);
4715 if (ra != 0 && ra != rd)
4716 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[1]);
4717 #endif
4718 }
4719
4720 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER)
4721 {
4722 #if defined(CONFIG_USER_ONLY)
4723 GEN_EXCP_PRIVOPC(ctx);
4724 #else
4725 if (unlikely(!ctx->supervisor)) {
4726 GEN_EXCP_PRIVOPC(ctx);
4727 return;
4728 }
4729 gen_addr_reg_index(cpu_T[0], ctx);
4730 gen_op_POWER_rac();
4731 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4732 #endif
4733 }
4734
4735 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER)
4736 {
4737 #if defined(CONFIG_USER_ONLY)
4738 GEN_EXCP_PRIVOPC(ctx);
4739 #else
4740 if (unlikely(!ctx->supervisor)) {
4741 GEN_EXCP_PRIVOPC(ctx);
4742 return;
4743 }
4744 gen_op_POWER_rfsvc();
4745 GEN_SYNC(ctx);
4746 #endif
4747 }
4748
4749 /* svc is not implemented for now */
4750
4751 /* POWER2 specific instructions */
4752 /* Quad manipulation (load/store two floats at a time) */
4753 /* Original POWER2 is 32 bits only, define 64 bits ops as 32 bits ones */
4754 #define op_POWER2_lfq() (*gen_op_POWER2_lfq[ctx->mem_idx])()
4755 #define op_POWER2_stfq() (*gen_op_POWER2_stfq[ctx->mem_idx])()
4756 #define gen_op_POWER2_lfq_64_raw gen_op_POWER2_lfq_raw
4757 #define gen_op_POWER2_lfq_64_user gen_op_POWER2_lfq_user
4758 #define gen_op_POWER2_lfq_64_kernel gen_op_POWER2_lfq_kernel
4759 #define gen_op_POWER2_lfq_64_hypv gen_op_POWER2_lfq_hypv
4760 #define gen_op_POWER2_lfq_le_64_raw gen_op_POWER2_lfq_le_raw
4761 #define gen_op_POWER2_lfq_le_64_user gen_op_POWER2_lfq_le_user
4762 #define gen_op_POWER2_lfq_le_64_kernel gen_op_POWER2_lfq_le_kernel
4763 #define gen_op_POWER2_lfq_le_64_hypv gen_op_POWER2_lfq_le_hypv
4764 #define gen_op_POWER2_stfq_64_raw gen_op_POWER2_stfq_raw
4765 #define gen_op_POWER2_stfq_64_user gen_op_POWER2_stfq_user
4766 #define gen_op_POWER2_stfq_64_kernel gen_op_POWER2_stfq_kernel
4767 #define gen_op_POWER2_stfq_64_hypv gen_op_POWER2_stfq_hypv
4768 #define gen_op_POWER2_stfq_le_64_raw gen_op_POWER2_stfq_le_raw
4769 #define gen_op_POWER2_stfq_le_64_user gen_op_POWER2_stfq_le_user
4770 #define gen_op_POWER2_stfq_le_64_kernel gen_op_POWER2_stfq_le_kernel
4771 #define gen_op_POWER2_stfq_le_64_hypv gen_op_POWER2_stfq_le_hypv
4772 static GenOpFunc *gen_op_POWER2_lfq[NB_MEM_FUNCS] = {
4773 GEN_MEM_FUNCS(POWER2_lfq),
4774 };
4775 static GenOpFunc *gen_op_POWER2_stfq[NB_MEM_FUNCS] = {
4776 GEN_MEM_FUNCS(POWER2_stfq),
4777 };
4778
4779 /* lfq */
4780 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4781 {
4782 /* NIP cannot be restored if the memory exception comes from an helper */
4783 gen_update_nip(ctx, ctx->nip - 4);
4784 gen_addr_imm_index(cpu_T[0], ctx, 0);
4785 op_POWER2_lfq();
4786 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4787 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4788 }
4789
4790 /* lfqu */
4791 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4792 {
4793 int ra = rA(ctx->opcode);
4794
4795 /* NIP cannot be restored if the memory exception comes from an helper */
4796 gen_update_nip(ctx, ctx->nip - 4);
4797 gen_addr_imm_index(cpu_T[0], ctx, 0);
4798 op_POWER2_lfq();
4799 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4800 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4801 if (ra != 0)
4802 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4803 }
4804
4805 /* lfqux */
4806 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2)
4807 {
4808 int ra = rA(ctx->opcode);
4809
4810 /* NIP cannot be restored if the memory exception comes from an helper */
4811 gen_update_nip(ctx, ctx->nip - 4);
4812 gen_addr_reg_index(cpu_T[0], ctx);
4813 op_POWER2_lfq();
4814 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4815 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4816 if (ra != 0)
4817 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4818 }
4819
4820 /* lfqx */
4821 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2)
4822 {
4823 /* NIP cannot be restored if the memory exception comes from an helper */
4824 gen_update_nip(ctx, ctx->nip - 4);
4825 gen_addr_reg_index(cpu_T[0], ctx);
4826 op_POWER2_lfq();
4827 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4828 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4829 }
4830
4831 /* stfq */
4832 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4833 {
4834 /* NIP cannot be restored if the memory exception comes from an helper */
4835 gen_update_nip(ctx, ctx->nip - 4);
4836 gen_addr_imm_index(cpu_T[0], ctx, 0);
4837 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4838 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4839 op_POWER2_stfq();
4840 }
4841
4842 /* stfqu */
4843 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4844 {
4845 int ra = rA(ctx->opcode);
4846
4847 /* NIP cannot be restored if the memory exception comes from an helper */
4848 gen_update_nip(ctx, ctx->nip - 4);
4849 gen_addr_imm_index(cpu_T[0], ctx, 0);
4850 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4851 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4852 op_POWER2_stfq();
4853 if (ra != 0)
4854 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4855 }
4856
4857 /* stfqux */
4858 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2)
4859 {
4860 int ra = rA(ctx->opcode);
4861
4862 /* NIP cannot be restored if the memory exception comes from an helper */
4863 gen_update_nip(ctx, ctx->nip - 4);
4864 gen_addr_reg_index(cpu_T[0], ctx);
4865 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4866 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4867 op_POWER2_stfq();
4868 if (ra != 0)
4869 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4870 }
4871
4872 /* stfqx */
4873 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2)
4874 {
4875 /* NIP cannot be restored if the memory exception comes from an helper */
4876 gen_update_nip(ctx, ctx->nip - 4);
4877 gen_addr_reg_index(cpu_T[0], ctx);
4878 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4879 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4880 op_POWER2_stfq();
4881 }
4882
4883 /* BookE specific instructions */
4884 /* XXX: not implemented on 440 ? */
4885 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI)
4886 {
4887 /* XXX: TODO */
4888 GEN_EXCP_INVAL(ctx);
4889 }
4890
4891 /* XXX: not implemented on 440 ? */
4892 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA)
4893 {
4894 #if defined(CONFIG_USER_ONLY)
4895 GEN_EXCP_PRIVOPC(ctx);
4896 #else
4897 if (unlikely(!ctx->supervisor)) {
4898 GEN_EXCP_PRIVOPC(ctx);
4899 return;
4900 }
4901 gen_addr_reg_index(cpu_T[0], ctx);
4902 /* Use the same micro-ops as for tlbie */
4903 #if defined(TARGET_PPC64)
4904 if (ctx->sf_mode)
4905 gen_op_tlbie_64();
4906 else
4907 #endif
4908 gen_op_tlbie();
4909 #endif
4910 }
4911
4912 /* All 405 MAC instructions are translated here */
4913 static always_inline void gen_405_mulladd_insn (DisasContext *ctx,
4914 int opc2, int opc3,
4915 int ra, int rb, int rt, int Rc)
4916 {
4917 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[ra]);
4918 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rb]);
4919 switch (opc3 & 0x0D) {
4920 case 0x05:
4921 /* macchw - macchw. - macchwo - macchwo. */
4922 /* macchws - macchws. - macchwso - macchwso. */
4923 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
4924 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
4925 /* mulchw - mulchw. */
4926 gen_op_405_mulchw();
4927 break;
4928 case 0x04:
4929 /* macchwu - macchwu. - macchwuo - macchwuo. */
4930 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
4931 /* mulchwu - mulchwu. */
4932 gen_op_405_mulchwu();
4933 break;
4934 case 0x01:
4935 /* machhw - machhw. - machhwo - machhwo. */
4936 /* machhws - machhws. - machhwso - machhwso. */
4937 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
4938 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
4939 /* mulhhw - mulhhw. */
4940 gen_op_405_mulhhw();
4941 break;
4942 case 0x00:
4943 /* machhwu - machhwu. - machhwuo - machhwuo. */
4944 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
4945 /* mulhhwu - mulhhwu. */
4946 gen_op_405_mulhhwu();
4947 break;
4948 case 0x0D:
4949 /* maclhw - maclhw. - maclhwo - maclhwo. */
4950 /* maclhws - maclhws. - maclhwso - maclhwso. */
4951 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
4952 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
4953 /* mullhw - mullhw. */
4954 gen_op_405_mullhw();
4955 break;
4956 case 0x0C:
4957 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
4958 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
4959 /* mullhwu - mullhwu. */
4960 gen_op_405_mullhwu();
4961 break;
4962 }
4963 if (opc2 & 0x02) {
4964 /* nmultiply-and-accumulate (0x0E) */
4965 gen_op_neg();
4966 }
4967 if (opc2 & 0x04) {
4968 /* (n)multiply-and-accumulate (0x0C - 0x0E) */
4969 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rt]);
4970 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4971 gen_op_405_add_T0_T2();
4972 }
4973 if (opc3 & 0x10) {
4974 /* Check overflow */
4975 if (opc3 & 0x01)
4976 gen_op_check_addo();
4977 else
4978 gen_op_405_check_ovu();
4979 }
4980 if (opc3 & 0x02) {
4981 /* Saturate */
4982 if (opc3 & 0x01)
4983 gen_op_405_check_sat();
4984 else
4985 gen_op_405_check_satu();
4986 }
4987 tcg_gen_mov_tl(cpu_gpr[rt], cpu_T[0]);
4988 if (unlikely(Rc) != 0) {
4989 /* Update Rc0 */
4990 gen_set_Rc0(ctx);
4991 }
4992 }
4993
4994 #define GEN_MAC_HANDLER(name, opc2, opc3) \
4995 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) \
4996 { \
4997 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
4998 rD(ctx->opcode), Rc(ctx->opcode)); \
4999 }
5000
5001 /* macchw - macchw. */
5002 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5003 /* macchwo - macchwo. */
5004 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5005 /* macchws - macchws. */
5006 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5007 /* macchwso - macchwso. */
5008 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5009 /* macchwsu - macchwsu. */
5010 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5011 /* macchwsuo - macchwsuo. */
5012 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5013 /* macchwu - macchwu. */
5014 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5015 /* macchwuo - macchwuo. */
5016 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5017 /* machhw - machhw. */
5018 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5019 /* machhwo - machhwo. */
5020 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5021 /* machhws - machhws. */
5022 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5023 /* machhwso - machhwso. */
5024 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5025 /* machhwsu - machhwsu. */
5026 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5027 /* machhwsuo - machhwsuo. */
5028 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5029 /* machhwu - machhwu. */
5030 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5031 /* machhwuo - machhwuo. */
5032 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5033 /* maclhw - maclhw. */
5034 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5035 /* maclhwo - maclhwo. */
5036 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5037 /* maclhws - maclhws. */
5038 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5039 /* maclhwso - maclhwso. */
5040 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5041 /* maclhwu - maclhwu. */
5042 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5043 /* maclhwuo - maclhwuo. */
5044 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5045 /* maclhwsu - maclhwsu. */
5046 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5047 /* maclhwsuo - maclhwsuo. */
5048 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5049 /* nmacchw - nmacchw. */
5050 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5051 /* nmacchwo - nmacchwo. */
5052 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5053 /* nmacchws - nmacchws. */
5054 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5055 /* nmacchwso - nmacchwso. */
5056 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5057 /* nmachhw - nmachhw. */
5058 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5059 /* nmachhwo - nmachhwo. */
5060 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5061 /* nmachhws - nmachhws. */
5062 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5063 /* nmachhwso - nmachhwso. */
5064 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5065 /* nmaclhw - nmaclhw. */
5066 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5067 /* nmaclhwo - nmaclhwo. */
5068 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5069 /* nmaclhws - nmaclhws. */
5070 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5071 /* nmaclhwso - nmaclhwso. */
5072 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5073
5074 /* mulchw - mulchw. */
5075 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5076 /* mulchwu - mulchwu. */
5077 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5078 /* mulhhw - mulhhw. */
5079 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5080 /* mulhhwu - mulhhwu. */
5081 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5082 /* mullhw - mullhw. */
5083 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5084 /* mullhwu - mullhwu. */
5085 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5086
5087 /* mfdcr */
5088 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR)
5089 {
5090 #if defined(CONFIG_USER_ONLY)
5091 GEN_EXCP_PRIVREG(ctx);
5092 #else
5093 uint32_t dcrn = SPR(ctx->opcode);
5094
5095 if (unlikely(!ctx->supervisor)) {
5096 GEN_EXCP_PRIVREG(ctx);
5097 return;
5098 }
5099 tcg_gen_movi_tl(cpu_T[0], dcrn);
5100 gen_op_load_dcr();
5101 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5102 #endif
5103 }
5104
5105 /* mtdcr */
5106 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR)
5107 {
5108 #if defined(CONFIG_USER_ONLY)
5109 GEN_EXCP_PRIVREG(ctx);
5110 #else
5111 uint32_t dcrn = SPR(ctx->opcode);
5112
5113 if (unlikely(!ctx->supervisor)) {
5114 GEN_EXCP_PRIVREG(ctx);
5115 return;
5116 }
5117 tcg_gen_movi_tl(cpu_T[0], dcrn);
5118 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5119 gen_op_store_dcr();
5120 #endif
5121 }
5122
5123 /* mfdcrx */
5124 /* XXX: not implemented on 440 ? */
5125 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX)
5126 {
5127 #if defined(CONFIG_USER_ONLY)
5128 GEN_EXCP_PRIVREG(ctx);
5129 #else
5130 if (unlikely(!ctx->supervisor)) {
5131 GEN_EXCP_PRIVREG(ctx);
5132 return;
5133 }
5134 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5135 gen_op_load_dcr();
5136 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5137 /* Note: Rc update flag set leads to undefined state of Rc0 */
5138 #endif
5139 }
5140
5141 /* mtdcrx */
5142 /* XXX: not implemented on 440 ? */
5143 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX)
5144 {
5145 #if defined(CONFIG_USER_ONLY)
5146 GEN_EXCP_PRIVREG(ctx);
5147 #else
5148 if (unlikely(!ctx->supervisor)) {
5149 GEN_EXCP_PRIVREG(ctx);
5150 return;
5151 }
5152 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5153 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5154 gen_op_store_dcr();
5155 /* Note: Rc update flag set leads to undefined state of Rc0 */
5156 #endif
5157 }
5158
5159 /* mfdcrux (PPC 460) : user-mode access to DCR */
5160 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX)
5161 {
5162 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5163 gen_op_load_dcr();
5164 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5165 /* Note: Rc update flag set leads to undefined state of Rc0 */
5166 }
5167
5168 /* mtdcrux (PPC 460) : user-mode access to DCR */
5169 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX)
5170 {
5171 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5172 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5173 gen_op_store_dcr();
5174 /* Note: Rc update flag set leads to undefined state of Rc0 */
5175 }
5176
5177 /* dccci */
5178 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON)
5179 {
5180 #if defined(CONFIG_USER_ONLY)
5181 GEN_EXCP_PRIVOPC(ctx);
5182 #else
5183 if (unlikely(!ctx->supervisor)) {
5184 GEN_EXCP_PRIVOPC(ctx);
5185 return;
5186 }
5187 /* interpreted as no-op */
5188 #endif
5189 }
5190
5191 /* dcread */
5192 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON)
5193 {
5194 #if defined(CONFIG_USER_ONLY)
5195 GEN_EXCP_PRIVOPC(ctx);
5196 #else
5197 TCGv EA, val;
5198 if (unlikely(!ctx->supervisor)) {
5199 GEN_EXCP_PRIVOPC(ctx);
5200 return;
5201 }
5202 EA = tcg_temp_new(TCG_TYPE_TL);
5203 gen_addr_reg_index(EA, ctx);
5204 val = tcg_temp_new(TCG_TYPE_TL);
5205 gen_qemu_ld32u(val, EA, ctx->mem_idx);
5206 tcg_temp_free(val);
5207 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5208 tcg_temp_free(EA);
5209 #endif
5210 }
5211
5212 /* icbt */
5213 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT)
5214 {
5215 /* interpreted as no-op */
5216 /* XXX: specification say this is treated as a load by the MMU
5217 * but does not generate any exception
5218 */
5219 }
5220
5221 /* iccci */
5222 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON)
5223 {
5224 #if defined(CONFIG_USER_ONLY)
5225 GEN_EXCP_PRIVOPC(ctx);
5226 #else
5227 if (unlikely(!ctx->supervisor)) {
5228 GEN_EXCP_PRIVOPC(ctx);
5229 return;
5230 }
5231 /* interpreted as no-op */
5232 #endif
5233 }
5234
5235 /* icread */
5236 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON)
5237 {
5238 #if defined(CONFIG_USER_ONLY)
5239 GEN_EXCP_PRIVOPC(ctx);
5240 #else
5241 if (unlikely(!ctx->supervisor)) {
5242 GEN_EXCP_PRIVOPC(ctx);
5243 return;
5244 }
5245 /* interpreted as no-op */
5246 #endif
5247 }
5248
5249 /* rfci (supervisor only) */
5250 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP)
5251 {
5252 #if defined(CONFIG_USER_ONLY)
5253 GEN_EXCP_PRIVOPC(ctx);
5254 #else
5255 if (unlikely(!ctx->supervisor)) {
5256 GEN_EXCP_PRIVOPC(ctx);
5257 return;
5258 }
5259 /* Restore CPU state */
5260 gen_op_40x_rfci();
5261 GEN_SYNC(ctx);
5262 #endif
5263 }
5264
5265 GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE)
5266 {
5267 #if defined(CONFIG_USER_ONLY)
5268 GEN_EXCP_PRIVOPC(ctx);
5269 #else
5270 if (unlikely(!ctx->supervisor)) {
5271 GEN_EXCP_PRIVOPC(ctx);
5272 return;
5273 }
5274 /* Restore CPU state */
5275 gen_op_rfci();
5276 GEN_SYNC(ctx);
5277 #endif
5278 }
5279
5280 /* BookE specific */
5281 /* XXX: not implemented on 440 ? */
5282 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI)
5283 {
5284 #if defined(CONFIG_USER_ONLY)
5285 GEN_EXCP_PRIVOPC(ctx);
5286 #else
5287 if (unlikely(!ctx->supervisor)) {
5288 GEN_EXCP_PRIVOPC(ctx);
5289 return;
5290 }
5291 /* Restore CPU state */
5292 gen_op_rfdi();
5293 GEN_SYNC(ctx);
5294 #endif
5295 }
5296
5297 /* XXX: not implemented on 440 ? */
5298 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI)
5299 {
5300 #if defined(CONFIG_USER_ONLY)
5301 GEN_EXCP_PRIVOPC(ctx);
5302 #else
5303 if (unlikely(!ctx->supervisor)) {
5304 GEN_EXCP_PRIVOPC(ctx);
5305 return;
5306 }
5307 /* Restore CPU state */
5308 gen_op_rfmci();
5309 GEN_SYNC(ctx);
5310 #endif
5311 }
5312
5313 /* TLB management - PowerPC 405 implementation */
5314 /* tlbre */
5315 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB)
5316 {
5317 #if defined(CONFIG_USER_ONLY)
5318 GEN_EXCP_PRIVOPC(ctx);
5319 #else
5320 if (unlikely(!ctx->supervisor)) {
5321 GEN_EXCP_PRIVOPC(ctx);
5322 return;
5323 }
5324 switch (rB(ctx->opcode)) {
5325 case 0:
5326 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5327 gen_op_4xx_tlbre_hi();
5328 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5329 break;
5330 case 1:
5331 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5332 gen_op_4xx_tlbre_lo();
5333 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5334 break;
5335 default:
5336 GEN_EXCP_INVAL(ctx);
5337 break;
5338 }
5339 #endif
5340 }
5341
5342 /* tlbsx - tlbsx. */
5343 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB)
5344 {
5345 #if defined(CONFIG_USER_ONLY)
5346 GEN_EXCP_PRIVOPC(ctx);
5347 #else
5348 if (unlikely(!ctx->supervisor)) {
5349 GEN_EXCP_PRIVOPC(ctx);
5350 return;
5351 }
5352 gen_addr_reg_index(cpu_T[0], ctx);
5353 gen_op_4xx_tlbsx();
5354 if (Rc(ctx->opcode))
5355 gen_op_4xx_tlbsx_check();
5356 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5357 #endif
5358 }
5359
5360 /* tlbwe */
5361 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB)
5362 {
5363 #if defined(CONFIG_USER_ONLY)
5364 GEN_EXCP_PRIVOPC(ctx);
5365 #else
5366 if (unlikely(!ctx->supervisor)) {
5367 GEN_EXCP_PRIVOPC(ctx);
5368 return;
5369 }
5370 switch (rB(ctx->opcode)) {
5371 case 0:
5372 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5373 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5374 gen_op_4xx_tlbwe_hi();
5375 break;
5376 case 1:
5377 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5378 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5379 gen_op_4xx_tlbwe_lo();
5380 break;
5381 default:
5382 GEN_EXCP_INVAL(ctx);
5383 break;
5384 }
5385 #endif
5386 }
5387
5388 /* TLB management - PowerPC 440 implementation */
5389 /* tlbre */
5390 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE)
5391 {
5392 #if defined(CONFIG_USER_ONLY)
5393 GEN_EXCP_PRIVOPC(ctx);
5394 #else
5395 if (unlikely(!ctx->supervisor)) {
5396 GEN_EXCP_PRIVOPC(ctx);
5397 return;
5398 }
5399 switch (rB(ctx->opcode)) {
5400 case 0:
5401 case 1:
5402 case 2:
5403 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5404 gen_op_440_tlbre(rB(ctx->opcode));
5405 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5406 break;
5407 default:
5408 GEN_EXCP_INVAL(ctx);
5409 break;
5410 }
5411 #endif
5412 }
5413
5414 /* tlbsx - tlbsx. */
5415 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE)
5416 {
5417 #if defined(CONFIG_USER_ONLY)
5418 GEN_EXCP_PRIVOPC(ctx);
5419 #else
5420 if (unlikely(!ctx->supervisor)) {
5421 GEN_EXCP_PRIVOPC(ctx);
5422 return;
5423 }
5424 gen_addr_reg_index(cpu_T[0], ctx);
5425 gen_op_440_tlbsx();
5426 if (Rc(ctx->opcode))
5427 gen_op_4xx_tlbsx_check();
5428 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5429 #endif
5430 }
5431
5432 /* tlbwe */
5433 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE)
5434 {
5435 #if defined(CONFIG_USER_ONLY)
5436 GEN_EXCP_PRIVOPC(ctx);
5437 #else
5438 if (unlikely(!ctx->supervisor)) {
5439 GEN_EXCP_PRIVOPC(ctx);
5440 return;
5441 }
5442 switch (rB(ctx->opcode)) {
5443 case 0:
5444 case 1:
5445 case 2:
5446 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5447 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5448 gen_op_440_tlbwe(rB(ctx->opcode));
5449 break;
5450 default:
5451 GEN_EXCP_INVAL(ctx);
5452 break;
5453 }
5454 #endif
5455 }
5456
5457 /* wrtee */
5458 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE)
5459 {
5460 #if defined(CONFIG_USER_ONLY)
5461 GEN_EXCP_PRIVOPC(ctx);
5462 #else
5463 if (unlikely(!ctx->supervisor)) {
5464 GEN_EXCP_PRIVOPC(ctx);
5465 return;
5466 }
5467 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rD(ctx->opcode)]);
5468 gen_op_wrte();
5469 /* Stop translation to have a chance to raise an exception
5470 * if we just set msr_ee to 1
5471 */
5472 GEN_STOP(ctx);
5473 #endif
5474 }
5475
5476 /* wrteei */
5477 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000EFC01, PPC_WRTEE)
5478 {
5479 #if defined(CONFIG_USER_ONLY)
5480 GEN_EXCP_PRIVOPC(ctx);
5481 #else
5482 if (unlikely(!ctx->supervisor)) {
5483 GEN_EXCP_PRIVOPC(ctx);
5484 return;
5485 }
5486 tcg_gen_movi_tl(cpu_T[0], ctx->opcode & 0x00010000);
5487 gen_op_wrte();
5488 /* Stop translation to have a chance to raise an exception
5489 * if we just set msr_ee to 1
5490 */
5491 GEN_STOP(ctx);
5492 #endif
5493 }
5494
5495 /* PowerPC 440 specific instructions */
5496 /* dlmzb */
5497 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC)
5498 {
5499 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
5500 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
5501 gen_op_440_dlmzb();
5502 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
5503 gen_op_store_xer_bc();
5504 if (Rc(ctx->opcode)) {
5505 gen_op_440_dlmzb_update_Rc();
5506 tcg_gen_andi_i32(cpu_crf[0], cpu_T[0], 0xf);
5507 }
5508 }
5509
5510 /* mbar replaces eieio on 440 */
5511 GEN_HANDLER(mbar, 0x1F, 0x16, 0x13, 0x001FF801, PPC_BOOKE)
5512 {
5513 /* interpreted as no-op */
5514 }
5515
5516 /* msync replaces sync on 440 */
5517 GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE)
5518 {
5519 /* interpreted as no-op */
5520 }
5521
5522 /* icbt */
5523 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE)
5524 {
5525 /* interpreted as no-op */
5526 /* XXX: specification say this is treated as a load by the MMU
5527 * but does not generate any exception
5528 */
5529 }
5530
5531 /*** Altivec vector extension ***/
5532 /* Altivec registers moves */
5533
5534 static always_inline void gen_load_avr(int t, int reg) {
5535 tcg_gen_mov_i64(cpu_AVRh[t], cpu_avrh[reg]);
5536 tcg_gen_mov_i64(cpu_AVRl[t], cpu_avrl[reg]);
5537 }
5538
5539 static always_inline void gen_store_avr(int reg, int t) {
5540 tcg_gen_mov_i64(cpu_avrh[reg], cpu_AVRh[t]);
5541 tcg_gen_mov_i64(cpu_avrl[reg], cpu_AVRl[t]);
5542 }
5543
5544 #define op_vr_ldst(name) (*gen_op_##name[ctx->mem_idx])()
5545 #define OP_VR_LD_TABLE(name) \
5546 static GenOpFunc *gen_op_vr_l##name[NB_MEM_FUNCS] = { \
5547 GEN_MEM_FUNCS(vr_l##name), \
5548 };
5549 #define OP_VR_ST_TABLE(name) \
5550 static GenOpFunc *gen_op_vr_st##name[NB_MEM_FUNCS] = { \
5551 GEN_MEM_FUNCS(vr_st##name), \
5552 };
5553
5554 #define GEN_VR_LDX(name, opc2, opc3) \
5555 GEN_HANDLER(l##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
5556 { \
5557 if (unlikely(!ctx->altivec_enabled)) { \
5558 GEN_EXCP_NO_VR(ctx); \
5559 return; \
5560 } \
5561 gen_addr_reg_index(cpu_T[0], ctx); \
5562 op_vr_ldst(vr_l##name); \
5563 gen_store_avr(rD(ctx->opcode), 0); \
5564 }
5565
5566 #define GEN_VR_STX(name, opc2, opc3) \
5567 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
5568 { \
5569 if (unlikely(!ctx->altivec_enabled)) { \
5570 GEN_EXCP_NO_VR(ctx); \
5571 return; \
5572 } \
5573 gen_addr_reg_index(cpu_T[0], ctx); \
5574 gen_load_avr(0, rS(ctx->opcode)); \
5575 op_vr_ldst(vr_st##name); \
5576 }
5577
5578 OP_VR_LD_TABLE(vx);
5579 GEN_VR_LDX(vx, 0x07, 0x03);
5580 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
5581 #define gen_op_vr_lvxl gen_op_vr_lvx
5582 GEN_VR_LDX(vxl, 0x07, 0x0B);
5583
5584 OP_VR_ST_TABLE(vx);
5585 GEN_VR_STX(vx, 0x07, 0x07);
5586 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
5587 #define gen_op_vr_stvxl gen_op_vr_stvx
5588 GEN_VR_STX(vxl, 0x07, 0x0F);
5589
5590 /*** SPE extension ***/
5591 /* Register moves */
5592
5593 static always_inline void gen_load_gpr64(TCGv t, int reg) {
5594 #if defined(TARGET_PPC64)
5595 tcg_gen_mov_i64(t, cpu_gpr[reg]);
5596 #else
5597 tcg_gen_concat_i32_i64(t, cpu_gpr[reg], cpu_gprh[reg]);
5598 #endif
5599 }
5600
5601 static always_inline void gen_store_gpr64(int reg, TCGv t) {
5602 #if defined(TARGET_PPC64)
5603 tcg_gen_mov_i64(cpu_gpr[reg], t);
5604 #else
5605 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
5606 TCGv tmp = tcg_temp_new(TCG_TYPE_I64);
5607 tcg_gen_shri_i64(tmp, t, 32);
5608 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
5609 tcg_temp_free(tmp);
5610 #endif
5611 }
5612
5613 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
5614 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \
5615 { \
5616 if (Rc(ctx->opcode)) \
5617 gen_##name1(ctx); \
5618 else \
5619 gen_##name0(ctx); \
5620 }
5621
5622 /* Handler for undefined SPE opcodes */
5623 static always_inline void gen_speundef (DisasContext *ctx)
5624 {
5625 GEN_EXCP_INVAL(ctx);
5626 }
5627
5628 /* SPE load and stores */
5629 static always_inline void gen_addr_spe_imm_index (TCGv EA, DisasContext *ctx, int sh)
5630 {
5631 target_long simm = rB(ctx->opcode);
5632
5633 if (rA(ctx->opcode) == 0)
5634 tcg_gen_movi_tl(EA, simm << sh);
5635 else if (likely(simm != 0))
5636 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm << sh);
5637 else
5638 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
5639 }
5640
5641 #define op_spe_ldst(name) (*gen_op_##name[ctx->mem_idx])()
5642 #define OP_SPE_LD_TABLE(name) \
5643 static GenOpFunc *gen_op_spe_l##name[NB_MEM_FUNCS] = { \
5644 GEN_MEM_FUNCS(spe_l##name), \
5645 };
5646 #define OP_SPE_ST_TABLE(name) \
5647 static GenOpFunc *gen_op_spe_st##name[NB_MEM_FUNCS] = { \
5648 GEN_MEM_FUNCS(spe_st##name), \
5649 };
5650
5651 #define GEN_SPE_LD(name, sh) \
5652 static always_inline void gen_evl##name (DisasContext *ctx) \
5653 { \
5654 if (unlikely(!ctx->spe_enabled)) { \
5655 GEN_EXCP_NO_AP(ctx); \
5656 return; \
5657 } \
5658 gen_addr_spe_imm_index(cpu_T[0], ctx, sh); \
5659 op_spe_ldst(spe_l##name); \
5660 gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \
5661 }
5662
5663 #define GEN_SPE_LDX(name) \
5664 static always_inline void gen_evl##name##x (DisasContext *ctx) \
5665 { \
5666 if (unlikely(!ctx->spe_enabled)) { \
5667 GEN_EXCP_NO_AP(ctx); \
5668 return; \
5669 } \
5670 gen_addr_reg_index(cpu_T[0], ctx); \
5671 op_spe_ldst(spe_l##name); \
5672 gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \
5673 }
5674
5675 #define GEN_SPEOP_LD(name, sh) \
5676 OP_SPE_LD_TABLE(name); \
5677 GEN_SPE_LD(name, sh); \
5678 GEN_SPE_LDX(name)
5679
5680 #define GEN_SPE_ST(name, sh) \
5681 static always_inline void gen_evst##name (DisasContext *ctx) \
5682 { \
5683 if (unlikely(!ctx->spe_enabled)) { \
5684 GEN_EXCP_NO_AP(ctx); \
5685 return; \
5686 } \
5687 gen_addr_spe_imm_index(cpu_T[0], ctx, sh); \
5688 gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \
5689 op_spe_ldst(spe_st##name); \
5690 }
5691
5692 #define GEN_SPE_STX(name) \
5693 static always_inline void gen_evst##name##x (DisasContext *ctx) \
5694 { \
5695 if (unlikely(!ctx->spe_enabled)) { \
5696 GEN_EXCP_NO_AP(ctx); \
5697 return; \
5698 } \
5699 gen_addr_reg_index(cpu_T[0], ctx); \
5700 gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \
5701 op_spe_ldst(spe_st##name); \
5702 }
5703
5704 #define GEN_SPEOP_ST(name, sh) \
5705 OP_SPE_ST_TABLE(name); \
5706 GEN_SPE_ST(name, sh); \
5707 GEN_SPE_STX(name)
5708
5709 #define GEN_SPEOP_LDST(name, sh) \
5710 GEN_SPEOP_LD(name, sh); \
5711 GEN_SPEOP_ST(name, sh)
5712
5713 /* SPE arithmetic and logic */
5714 #define GEN_SPEOP_ARITH2(name) \
5715 static always_inline void gen_##name (DisasContext *ctx) \
5716 { \
5717 if (unlikely(!ctx->spe_enabled)) { \
5718 GEN_EXCP_NO_AP(ctx); \
5719 return; \
5720 } \
5721 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5722 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
5723 gen_op_##name(); \
5724 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5725 }
5726
5727 #define GEN_SPEOP_TCG_ARITH2(name) \
5728 static always_inline void gen_##name (DisasContext *ctx) \
5729 { \
5730 if (unlikely(!ctx->spe_enabled)) { \
5731 GEN_EXCP_NO_AP(ctx); \
5732 return; \
5733 } \
5734 TCGv t0 = tcg_temp_new(TCG_TYPE_I64); \
5735 TCGv t1 = tcg_temp_new(TCG_TYPE_I64); \
5736 gen_load_gpr64(t0, rA(ctx->opcode)); \
5737 gen_load_gpr64(t1, rB(ctx->opcode)); \
5738 gen_op_##name(t0, t1); \
5739 gen_store_gpr64(rD(ctx->opcode), t0); \
5740 tcg_temp_free(t0); \
5741 tcg_temp_free(t1); \
5742 }
5743
5744 #define GEN_SPEOP_ARITH1(name) \
5745 static always_inline void gen_##name (DisasContext *ctx) \
5746 { \
5747 if (unlikely(!ctx->spe_enabled)) { \
5748 GEN_EXCP_NO_AP(ctx); \
5749 return; \
5750 } \
5751 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5752 gen_op_##name(); \
5753 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5754 }
5755
5756 #define GEN_SPEOP_COMP(name) \
5757 static always_inline void gen_##name (DisasContext *ctx) \
5758 { \
5759 if (unlikely(!ctx->spe_enabled)) { \
5760 GEN_EXCP_NO_AP(ctx); \
5761 return; \
5762 } \
5763 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5764 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
5765 gen_op_##name(); \
5766 tcg_gen_andi_i32(cpu_crf[crfD(ctx->opcode)], cpu_T[0], 0xf); \
5767 }
5768
5769 /* Logical */
5770 static always_inline void gen_op_evand (TCGv t0, TCGv t1)
5771 {
5772 tcg_gen_and_i64(t0, t0, t1);
5773 }
5774
5775 static always_inline void gen_op_evandc (TCGv t0, TCGv t1)
5776 {
5777 tcg_gen_not_i64(t1, t1);
5778 tcg_gen_and_i64(t0, t0, t1);
5779 }
5780
5781 static always_inline void gen_op_evxor (TCGv t0, TCGv t1)
5782 {
5783 tcg_gen_xor_i64(t0, t0, t1);
5784 }
5785
5786 static always_inline void gen_op_evor (TCGv t0, TCGv t1)
5787 {
5788 tcg_gen_or_i64(t0, t0, t1);
5789 }
5790
5791 static always_inline void gen_op_evnor (TCGv t0, TCGv t1)
5792 {
5793 tcg_gen_or_i64(t0, t0, t1);
5794 tcg_gen_not_i64(t0, t0);
5795 }
5796
5797 static always_inline void gen_op_eveqv (TCGv t0, TCGv t1)
5798 {
5799 tcg_gen_xor_i64(t0, t0, t1);
5800 tcg_gen_not_i64(t0, t0);
5801 }
5802
5803 static always_inline void gen_op_evorc (TCGv t0, TCGv t1)
5804 {
5805 tcg_gen_not_i64(t1, t1);
5806 tcg_gen_or_i64(t0, t0, t1);
5807 }
5808
5809 static always_inline void gen_op_evnand (TCGv t0, TCGv t1)
5810 {
5811 tcg_gen_and_i64(t0, t0, t1);
5812 tcg_gen_not_i64(t0, t0);
5813 }
5814
5815 GEN_SPEOP_TCG_ARITH2(evand);
5816 GEN_SPEOP_TCG_ARITH2(evandc);
5817 GEN_SPEOP_TCG_ARITH2(evxor);
5818 GEN_SPEOP_TCG_ARITH2(evor);
5819 GEN_SPEOP_TCG_ARITH2(evnor);
5820 GEN_SPEOP_TCG_ARITH2(eveqv);
5821 GEN_SPEOP_TCG_ARITH2(evorc);
5822 GEN_SPEOP_TCG_ARITH2(evnand);
5823 GEN_SPEOP_ARITH2(evsrwu);
5824 GEN_SPEOP_ARITH2(evsrws);
5825 GEN_SPEOP_ARITH2(evslw);
5826 GEN_SPEOP_ARITH2(evrlw);
5827 GEN_SPEOP_ARITH2(evmergehi);
5828 GEN_SPEOP_ARITH2(evmergelo);
5829 GEN_SPEOP_ARITH2(evmergehilo);
5830 GEN_SPEOP_ARITH2(evmergelohi);
5831
5832 /* Arithmetic */
5833 GEN_SPEOP_ARITH2(evaddw);
5834 GEN_SPEOP_ARITH2(evsubfw);
5835 GEN_SPEOP_ARITH1(evabs);
5836 GEN_SPEOP_ARITH1(evneg);
5837 GEN_SPEOP_ARITH1(evextsb);
5838 GEN_SPEOP_ARITH1(evextsh);
5839 GEN_SPEOP_ARITH1(evrndw);
5840 GEN_SPEOP_ARITH1(evcntlzw);
5841 GEN_SPEOP_ARITH1(evcntlsw);
5842 static always_inline void gen_brinc (DisasContext *ctx)
5843 {
5844 /* Note: brinc is usable even if SPE is disabled */
5845 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5846 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
5847 gen_op_brinc();
5848 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5849 }
5850
5851 #define GEN_SPEOP_ARITH_IMM2(name) \
5852 static always_inline void gen_##name##i (DisasContext *ctx) \
5853 { \
5854 if (unlikely(!ctx->spe_enabled)) { \
5855 GEN_EXCP_NO_AP(ctx); \
5856 return; \
5857 } \
5858 gen_load_gpr64(cpu_T64[0], rB(ctx->opcode)); \
5859 gen_op_splatwi_T1_64(rA(ctx->opcode)); \
5860 gen_op_##name(); \
5861 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5862 }
5863
5864 #define GEN_SPEOP_LOGIC_IMM2(name) \
5865 static always_inline void gen_##name##i (DisasContext *ctx) \
5866 { \
5867 if (unlikely(!ctx->spe_enabled)) { \
5868 GEN_EXCP_NO_AP(ctx); \
5869 return; \
5870 } \
5871 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5872 gen_op_splatwi_T1_64(rB(ctx->opcode)); \
5873 gen_op_##name(); \
5874 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5875 }
5876
5877 GEN_SPEOP_ARITH_IMM2(evaddw);
5878 #define gen_evaddiw gen_evaddwi
5879 GEN_SPEOP_ARITH_IMM2(evsubfw);
5880 #define gen_evsubifw gen_evsubfwi
5881 GEN_SPEOP_LOGIC_IMM2(evslw);
5882 GEN_SPEOP_LOGIC_IMM2(evsrwu);
5883 #define gen_evsrwis gen_evsrwsi
5884 GEN_SPEOP_LOGIC_IMM2(evsrws);
5885 #define gen_evsrwiu gen_evsrwui
5886 GEN_SPEOP_LOGIC_IMM2(evrlw);
5887
5888 static always_inline void gen_evsplati (DisasContext *ctx)
5889 {
5890 int32_t imm = (int32_t)(rA(ctx->opcode) << 27) >> 27;
5891
5892 gen_op_splatwi_T0_64(imm);
5893 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5894 }
5895
5896 static always_inline void gen_evsplatfi (DisasContext *ctx)
5897 {
5898 uint32_t imm = rA(ctx->opcode) << 27;
5899
5900 gen_op_splatwi_T0_64(imm);
5901 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5902 }
5903
5904 /* Comparison */
5905 GEN_SPEOP_COMP(evcmpgtu);
5906 GEN_SPEOP_COMP(evcmpgts);
5907 GEN_SPEOP_COMP(evcmpltu);
5908 GEN_SPEOP_COMP(evcmplts);
5909 GEN_SPEOP_COMP(evcmpeq);
5910
5911 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
5912 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
5913 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
5914 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
5915 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
5916 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
5917 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
5918 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
5919 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
5920 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
5921 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
5922 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
5923 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
5924 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
5925 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
5926 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
5927 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
5928 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
5929 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
5930 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
5931 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
5932 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
5933 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
5934 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
5935 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
5936
5937 static always_inline void gen_evsel (DisasContext *ctx)
5938 {
5939 if (unlikely(!ctx->spe_enabled)) {
5940 GEN_EXCP_NO_AP(ctx);
5941 return;
5942 }
5943 tcg_gen_mov_i32(cpu_T[0], cpu_crf[ctx->opcode & 0x7]);
5944 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode));
5945 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode));
5946 gen_op_evsel();
5947 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5948 }
5949
5950 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE)
5951 {
5952 gen_evsel(ctx);
5953 }
5954 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE)
5955 {
5956 gen_evsel(ctx);
5957 }
5958 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE)
5959 {
5960 gen_evsel(ctx);
5961 }
5962 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE)
5963 {
5964 gen_evsel(ctx);
5965 }
5966
5967 /* Load and stores */
5968 GEN_SPEOP_LDST(dd, 3);
5969 GEN_SPEOP_LDST(dw, 3);
5970 GEN_SPEOP_LDST(dh, 3);
5971 GEN_SPEOP_LDST(whe, 2);
5972 GEN_SPEOP_LD(whou, 2);
5973 GEN_SPEOP_LD(whos, 2);
5974 GEN_SPEOP_ST(who, 2);
5975
5976 #define _GEN_OP_SPE_STWWE(suffix) \
5977 static always_inline void gen_op_spe_stwwe_##suffix (void) \
5978 { \
5979 gen_op_srli32_T1_64(); \
5980 gen_op_spe_stwwo_##suffix(); \
5981 }
5982 #define _GEN_OP_SPE_STWWE_LE(suffix) \
5983 static always_inline void gen_op_spe_stwwe_le_##suffix (void) \
5984 { \
5985 gen_op_srli32_T1_64(); \
5986 gen_op_spe_stwwo_le_##suffix(); \
5987 }
5988 #if defined(TARGET_PPC64)
5989 #define GEN_OP_SPE_STWWE(suffix) \
5990 _GEN_OP_SPE_STWWE(suffix); \
5991 _GEN_OP_SPE_STWWE_LE(suffix); \
5992 static always_inline void gen_op_spe_stwwe_64_##suffix (void) \
5993 { \
5994 gen_op_srli32_T1_64(); \
5995 gen_op_spe_stwwo_64_##suffix(); \
5996 } \
5997 static always_inline void gen_op_spe_stwwe_le_64_##suffix (void) \
5998 { \
5999 gen_op_srli32_T1_64(); \
6000 gen_op_spe_stwwo_le_64_##suffix(); \
6001 }
6002 #else
6003 #define GEN_OP_SPE_STWWE(suffix) \
6004 _GEN_OP_SPE_STWWE(suffix); \
6005 _GEN_OP_SPE_STWWE_LE(suffix)
6006 #endif
6007 #if defined(CONFIG_USER_ONLY)
6008 GEN_OP_SPE_STWWE(raw);
6009 #else /* defined(CONFIG_USER_ONLY) */
6010 GEN_OP_SPE_STWWE(user);
6011 GEN_OP_SPE_STWWE(kernel);
6012 GEN_OP_SPE_STWWE(hypv);
6013 #endif /* defined(CONFIG_USER_ONLY) */
6014 GEN_SPEOP_ST(wwe, 2);
6015 GEN_SPEOP_ST(wwo, 2);
6016
6017 #define GEN_SPE_LDSPLAT(name, op, suffix) \
6018 static always_inline void gen_op_spe_l##name##_##suffix (void) \
6019 { \
6020 gen_op_##op##_##suffix(); \
6021 gen_op_splatw_T1_64(); \
6022 }
6023
6024 #define GEN_OP_SPE_LHE(suffix) \
6025 static always_inline void gen_op_spe_lhe_##suffix (void) \
6026 { \
6027 gen_op_spe_lh_##suffix(); \
6028 gen_op_sli16_T1_64(); \
6029 }
6030
6031 #define GEN_OP_SPE_LHX(suffix) \
6032 static always_inline void gen_op_spe_lhx_##suffix (void) \
6033 { \
6034 gen_op_spe_lh_##suffix(); \
6035 gen_op_extsh_T1_64(); \
6036 }
6037
6038 #if defined(CONFIG_USER_ONLY)
6039 GEN_OP_SPE_LHE(raw);
6040 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, raw);
6041 GEN_OP_SPE_LHE(le_raw);
6042 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_raw);
6043 GEN_SPE_LDSPLAT(hhousplat, spe_lh, raw);
6044 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_raw);
6045 GEN_OP_SPE_LHX(raw);
6046 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, raw);
6047 GEN_OP_SPE_LHX(le_raw);
6048 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_raw);
6049 #if defined(TARGET_PPC64)
6050 GEN_OP_SPE_LHE(64_raw);
6051 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_raw);
6052 GEN_OP_SPE_LHE(le_64_raw);
6053 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_raw);
6054 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_raw);
6055 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_raw);
6056 GEN_OP_SPE_LHX(64_raw);
6057 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_raw);
6058 GEN_OP_SPE_LHX(le_64_raw);
6059 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_raw);
6060 #endif
6061 #else
6062 GEN_OP_SPE_LHE(user);
6063 GEN_OP_SPE_LHE(kernel);
6064 GEN_OP_SPE_LHE(hypv);
6065 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, user);
6066 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, kernel);
6067 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, hypv);
6068 GEN_OP_SPE_LHE(le_user);
6069 GEN_OP_SPE_LHE(le_kernel);
6070 GEN_OP_SPE_LHE(le_hypv);
6071 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_user);
6072 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_kernel);
6073 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_hypv);
6074 GEN_SPE_LDSPLAT(hhousplat, spe_lh, user);
6075 GEN_SPE_LDSPLAT(hhousplat, spe_lh, kernel);
6076 GEN_SPE_LDSPLAT(hhousplat, spe_lh, hypv);
6077 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_user);
6078 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_kernel);
6079 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_hypv);
6080 GEN_OP_SPE_LHX(user);
6081 GEN_OP_SPE_LHX(kernel);
6082 GEN_OP_SPE_LHX(hypv);
6083 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, user);
6084 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, kernel);
6085 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, hypv);
6086 GEN_OP_SPE_LHX(le_user);
6087 GEN_OP_SPE_LHX(le_kernel);
6088 GEN_OP_SPE_LHX(le_hypv);
6089 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_user);
6090 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_kernel);
6091 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_hypv);
6092 #if defined(TARGET_PPC64)
6093 GEN_OP_SPE_LHE(64_user);
6094 GEN_OP_SPE_LHE(64_kernel);
6095 GEN_OP_SPE_LHE(64_hypv);
6096 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_user);
6097 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_kernel);
6098 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_hypv);
6099 GEN_OP_SPE_LHE(le_64_user);
6100 GEN_OP_SPE_LHE(le_64_kernel);
6101 GEN_OP_SPE_LHE(le_64_hypv);
6102 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_user);
6103 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_kernel);
6104 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_hypv);
6105 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_user);
6106 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_kernel);
6107 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_hypv);
6108 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_user);
6109 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_kernel);
6110 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_hypv);
6111 GEN_OP_SPE_LHX(64_user);
6112 GEN_OP_SPE_LHX(64_kernel);
6113 GEN_OP_SPE_LHX(64_hypv);
6114 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_user);
6115 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_kernel);
6116 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_hypv);
6117 GEN_OP_SPE_LHX(le_64_user);
6118 GEN_OP_SPE_LHX(le_64_kernel);
6119 GEN_OP_SPE_LHX(le_64_hypv);
6120 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_user);
6121 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_kernel);
6122 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_hypv);
6123 #endif
6124 #endif
6125 GEN_SPEOP_LD(hhesplat, 1);
6126 GEN_SPEOP_LD(hhousplat, 1);
6127 GEN_SPEOP_LD(hhossplat, 1);
6128 GEN_SPEOP_LD(wwsplat, 2);
6129 GEN_SPEOP_LD(whsplat, 2);
6130
6131 GEN_SPE(evlddx, evldd, 0x00, 0x0C, 0x00000000, PPC_SPE); //
6132 GEN_SPE(evldwx, evldw, 0x01, 0x0C, 0x00000000, PPC_SPE); //
6133 GEN_SPE(evldhx, evldh, 0x02, 0x0C, 0x00000000, PPC_SPE); //
6134 GEN_SPE(evlhhesplatx, evlhhesplat, 0x04, 0x0C, 0x00000000, PPC_SPE); //
6135 GEN_SPE(evlhhousplatx, evlhhousplat, 0x06, 0x0C, 0x00000000, PPC_SPE); //
6136 GEN_SPE(evlhhossplatx, evlhhossplat, 0x07, 0x0C, 0x00000000, PPC_SPE); //
6137 GEN_SPE(evlwhex, evlwhe, 0x08, 0x0C, 0x00000000, PPC_SPE); //
6138 GEN_SPE(evlwhoux, evlwhou, 0x0A, 0x0C, 0x00000000, PPC_SPE); //
6139 GEN_SPE(evlwhosx, evlwhos, 0x0B, 0x0C, 0x00000000, PPC_SPE); //
6140 GEN_SPE(evlwwsplatx, evlwwsplat, 0x0C, 0x0C, 0x00000000, PPC_SPE); //
6141 GEN_SPE(evlwhsplatx, evlwhsplat, 0x0E, 0x0C, 0x00000000, PPC_SPE); //
6142 GEN_SPE(evstddx, evstdd, 0x10, 0x0C, 0x00000000, PPC_SPE); //
6143 GEN_SPE(evstdwx, evstdw, 0x11, 0x0C, 0x00000000, PPC_SPE); //
6144 GEN_SPE(evstdhx, evstdh, 0x12, 0x0C, 0x00000000, PPC_SPE); //
6145 GEN_SPE(evstwhex, evstwhe, 0x18, 0x0C, 0x00000000, PPC_SPE); //
6146 GEN_SPE(evstwhox, evstwho, 0x1A, 0x0C, 0x00000000, PPC_SPE); //
6147 GEN_SPE(evstwwex, evstwwe, 0x1C, 0x0C, 0x00000000, PPC_SPE); //
6148 GEN_SPE(evstwwox, evstwwo, 0x1E, 0x0C, 0x00000000, PPC_SPE); //
6149
6150 /* Multiply and add - TODO */
6151 #if 0
6152 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
6153 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
6154 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
6155 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
6156 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
6157 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
6158 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
6159 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
6160 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
6161 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
6162 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
6163 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
6164
6165 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
6166 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
6167 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
6168 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
6169 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
6170 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
6171 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
6172 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
6173 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
6174 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
6175 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
6176 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
6177 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
6178 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
6179
6180 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
6181 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
6182 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
6183 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
6184 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
6185 GEN_SPE(evmra, speundef, 0x07, 0x13, 0x0000F800, PPC_SPE);
6186
6187 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
6188 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
6189 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
6190 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
6191 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
6192 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
6193 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
6194 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
6195 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
6196 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
6197 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
6198 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
6199
6200 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
6201 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
6202 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
6203 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
6204 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
6205
6206 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
6207 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
6208 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
6209 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
6210 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
6211 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
6212 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
6213 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
6214 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
6215 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
6216 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
6217 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
6218
6219 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
6220 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
6221 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
6222 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
6223 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
6224 #endif
6225
6226 /*** SPE floating-point extension ***/
6227 #define GEN_SPEFPUOP_CONV(name) \
6228 static always_inline void gen_##name (DisasContext *ctx) \
6229 { \
6230 gen_load_gpr64(cpu_T64[0], rB(ctx->opcode)); \
6231 gen_op_##name(); \
6232 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
6233 }
6234
6235 /* Single precision floating-point vectors operations */
6236 /* Arithmetic */
6237 GEN_SPEOP_ARITH2(evfsadd);
6238 GEN_SPEOP_ARITH2(evfssub);
6239 GEN_SPEOP_ARITH2(evfsmul);
6240 GEN_SPEOP_ARITH2(evfsdiv);
6241 GEN_SPEOP_ARITH1(evfsabs);
6242 GEN_SPEOP_ARITH1(evfsnabs);
6243 GEN_SPEOP_ARITH1(evfsneg);
6244 /* Conversion */
6245 GEN_SPEFPUOP_CONV(evfscfui);
6246 GEN_SPEFPUOP_CONV(evfscfsi);
6247 GEN_SPEFPUOP_CONV(evfscfuf);
6248 GEN_SPEFPUOP_CONV(evfscfsf);
6249 GEN_SPEFPUOP_CONV(evfsctui);
6250 GEN_SPEFPUOP_CONV(evfsctsi);
6251 GEN_SPEFPUOP_CONV(evfsctuf);
6252 GEN_SPEFPUOP_CONV(evfsctsf);
6253 GEN_SPEFPUOP_CONV(evfsctuiz);
6254 GEN_SPEFPUOP_CONV(evfsctsiz);
6255 /* Comparison */
6256 GEN_SPEOP_COMP(evfscmpgt);
6257 GEN_SPEOP_COMP(evfscmplt);
6258 GEN_SPEOP_COMP(evfscmpeq);
6259 GEN_SPEOP_COMP(evfststgt);
6260 GEN_SPEOP_COMP(evfststlt);
6261 GEN_SPEOP_COMP(evfststeq);
6262
6263 /* Opcodes definitions */
6264 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPEFPU); //
6265 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPEFPU); //
6266 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPEFPU); //
6267 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPEFPU); //
6268 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPEFPU); //
6269 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPEFPU); //
6270 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPEFPU); //
6271 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPEFPU); //
6272 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPEFPU); //
6273 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPEFPU); //
6274 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPEFPU); //
6275 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPEFPU); //
6276 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPEFPU); //
6277 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPEFPU); //
6278
6279 /* Single precision floating-point operations */
6280 /* Arithmetic */
6281 GEN_SPEOP_ARITH2(efsadd);
6282 GEN_SPEOP_ARITH2(efssub);
6283 GEN_SPEOP_ARITH2(efsmul);
6284 GEN_SPEOP_ARITH2(efsdiv);
6285 GEN_SPEOP_ARITH1(efsabs);
6286 GEN_SPEOP_ARITH1(efsnabs);
6287 GEN_SPEOP_ARITH1(efsneg);
6288 /* Conversion */
6289 GEN_SPEFPUOP_CONV(efscfui);
6290 GEN_SPEFPUOP_CONV(efscfsi);
6291 GEN_SPEFPUOP_CONV(efscfuf);
6292 GEN_SPEFPUOP_CONV(efscfsf);
6293 GEN_SPEFPUOP_CONV(efsctui);
6294 GEN_SPEFPUOP_CONV(efsctsi);
6295 GEN_SPEFPUOP_CONV(efsctuf);
6296 GEN_SPEFPUOP_CONV(efsctsf);
6297 GEN_SPEFPUOP_CONV(efsctuiz);
6298 GEN_SPEFPUOP_CONV(efsctsiz);
6299 GEN_SPEFPUOP_CONV(efscfd);
6300 /* Comparison */
6301 GEN_SPEOP_COMP(efscmpgt);
6302 GEN_SPEOP_COMP(efscmplt);
6303 GEN_SPEOP_COMP(efscmpeq);
6304 GEN_SPEOP_COMP(efststgt);
6305 GEN_SPEOP_COMP(efststlt);
6306 GEN_SPEOP_COMP(efststeq);
6307
6308 /* Opcodes definitions */
6309 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPEFPU); //
6310 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPEFPU); //
6311 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPEFPU); //
6312 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPEFPU); //
6313 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPEFPU); //
6314 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPEFPU); //
6315 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPEFPU); //
6316 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPEFPU); //
6317 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPEFPU); //
6318 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPEFPU); //
6319 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPEFPU); //
6320 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPEFPU); //
6321 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPEFPU); //
6322 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPEFPU); //
6323
6324 /* Double precision floating-point operations */
6325 /* Arithmetic */
6326 GEN_SPEOP_ARITH2(efdadd);
6327 GEN_SPEOP_ARITH2(efdsub);
6328 GEN_SPEOP_ARITH2(efdmul);
6329 GEN_SPEOP_ARITH2(efddiv);
6330 GEN_SPEOP_ARITH1(efdabs);
6331 GEN_SPEOP_ARITH1(efdnabs);
6332 GEN_SPEOP_ARITH1(efdneg);
6333 /* Conversion */
6334
6335 GEN_SPEFPUOP_CONV(efdcfui);
6336 GEN_SPEFPUOP_CONV(efdcfsi);
6337 GEN_SPEFPUOP_CONV(efdcfuf);
6338 GEN_SPEFPUOP_CONV(efdcfsf);
6339 GEN_SPEFPUOP_CONV(efdctui);
6340 GEN_SPEFPUOP_CONV(efdctsi);
6341 GEN_SPEFPUOP_CONV(efdctuf);
6342 GEN_SPEFPUOP_CONV(efdctsf);
6343 GEN_SPEFPUOP_CONV(efdctuiz);
6344 GEN_SPEFPUOP_CONV(efdctsiz);
6345 GEN_SPEFPUOP_CONV(efdcfs);
6346 GEN_SPEFPUOP_CONV(efdcfuid);
6347 GEN_SPEFPUOP_CONV(efdcfsid);
6348 GEN_SPEFPUOP_CONV(efdctuidz);
6349 GEN_SPEFPUOP_CONV(efdctsidz);
6350 /* Comparison */
6351 GEN_SPEOP_COMP(efdcmpgt);
6352 GEN_SPEOP_COMP(efdcmplt);
6353 GEN_SPEOP_COMP(efdcmpeq);
6354 GEN_SPEOP_COMP(efdtstgt);
6355 GEN_SPEOP_COMP(efdtstlt);
6356 GEN_SPEOP_COMP(efdtsteq);
6357
6358 /* Opcodes definitions */
6359 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPEFPU); //
6360 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPEFPU); //
6361 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPEFPU); //
6362 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPEFPU); //
6363 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPEFPU); //
6364 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPEFPU); //
6365 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPEFPU); //
6366 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPEFPU); //
6367 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPEFPU); //
6368 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPEFPU); //
6369 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPEFPU); //
6370 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPEFPU); //
6371 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPEFPU); //
6372 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPEFPU); //
6373 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPEFPU); //
6374 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPEFPU); //
6375
6376 /* End opcode list */
6377 GEN_OPCODE_MARK(end);
6378
6379 #include "translate_init.c"
6380 #include "helper_regs.h"
6381
6382 /*****************************************************************************/
6383 /* Misc PowerPC helpers */
6384 void cpu_dump_state (CPUState *env, FILE *f,
6385 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
6386 int flags)
6387 {
6388 #define RGPL 4
6389 #define RFPL 4
6390
6391 int i;
6392
6393 cpu_fprintf(f, "NIP " ADDRX " LR " ADDRX " CTR " ADDRX " XER %08x\n",
6394 env->nip, env->lr, env->ctr, hreg_load_xer(env));
6395 cpu_fprintf(f, "MSR " ADDRX " HID0 " ADDRX " HF " ADDRX " idx %d\n",
6396 env->msr, env->spr[SPR_HID0], env->hflags, env->mmu_idx);
6397 #if !defined(NO_TIMER_DUMP)
6398 cpu_fprintf(f, "TB %08x %08x "
6399 #if !defined(CONFIG_USER_ONLY)
6400 "DECR %08x"
6401 #endif
6402 "\n",
6403 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
6404 #if !defined(CONFIG_USER_ONLY)
6405 , cpu_ppc_load_decr(env)
6406 #endif
6407 );
6408 #endif
6409 for (i = 0; i < 32; i++) {
6410 if ((i & (RGPL - 1)) == 0)
6411 cpu_fprintf(f, "GPR%02d", i);
6412 cpu_fprintf(f, " " REGX, ppc_dump_gpr(env, i));
6413 if ((i & (RGPL - 1)) == (RGPL - 1))
6414 cpu_fprintf(f, "\n");
6415 }
6416 cpu_fprintf(f, "CR ");
6417 for (i = 0; i < 8; i++)
6418 cpu_fprintf(f, "%01x", env->crf[i]);
6419 cpu_fprintf(f, " [");
6420 for (i = 0; i < 8; i++) {
6421 char a = '-';
6422 if (env->crf[i] & 0x08)
6423 a = 'L';
6424 else if (env->crf[i] & 0x04)
6425 a = 'G';
6426 else if (env->crf[i] & 0x02)
6427 a = 'E';
6428 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
6429 }
6430 cpu_fprintf(f, " ] RES " ADDRX "\n", env->reserve);
6431 for (i = 0; i < 32; i++) {
6432 if ((i & (RFPL - 1)) == 0)
6433 cpu_fprintf(f, "FPR%02d", i);
6434 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
6435 if ((i & (RFPL - 1)) == (RFPL - 1))
6436 cpu_fprintf(f, "\n");
6437 }
6438 #if !defined(CONFIG_USER_ONLY)
6439 cpu_fprintf(f, "SRR0 " ADDRX " SRR1 " ADDRX " SDR1 " ADDRX "\n",
6440 env->spr[SPR_SRR0], env->spr[SPR_SRR1], env->sdr1);
6441 #endif
6442
6443 #undef RGPL
6444 #undef RFPL
6445 }
6446
6447 void cpu_dump_statistics (CPUState *env, FILE*f,
6448 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
6449 int flags)
6450 {
6451 #if defined(DO_PPC_STATISTICS)
6452 opc_handler_t **t1, **t2, **t3, *handler;
6453 int op1, op2, op3;
6454
6455 t1 = env->opcodes;
6456 for (op1 = 0; op1 < 64; op1++) {
6457 handler = t1[op1];
6458 if (is_indirect_opcode(handler)) {
6459 t2 = ind_table(handler);
6460 for (op2 = 0; op2 < 32; op2++) {
6461 handler = t2[op2];
6462 if (is_indirect_opcode(handler)) {
6463 t3 = ind_table(handler);
6464 for (op3 = 0; op3 < 32; op3++) {
6465 handler = t3[op3];
6466 if (handler->count == 0)
6467 continue;
6468 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
6469 "%016llx %lld\n",
6470 op1, op2, op3, op1, (op3 << 5) | op2,
6471 handler->oname,
6472 handler->count, handler->count);
6473 }
6474 } else {
6475 if (handler->count == 0)
6476 continue;
6477 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
6478 "%016llx %lld\n",
6479 op1, op2, op1, op2, handler->oname,
6480 handler->count, handler->count);
6481 }
6482 }
6483 } else {
6484 if (handler->count == 0)
6485 continue;
6486 cpu_fprintf(f, "%02x (%02x ) %16s: %016llx %lld\n",
6487 op1, op1, handler->oname,
6488 handler->count, handler->count);
6489 }
6490 }
6491 #endif
6492 }
6493
6494 /*****************************************************************************/
6495 static always_inline void gen_intermediate_code_internal (CPUState *env,
6496 TranslationBlock *tb,
6497 int search_pc)
6498 {
6499 DisasContext ctx, *ctxp = &ctx;
6500 opc_handler_t **table, *handler;
6501 target_ulong pc_start;
6502 uint16_t *gen_opc_end;
6503 int supervisor, little_endian;
6504 int j, lj = -1;
6505 int num_insns;
6506 int max_insns;
6507
6508 pc_start = tb->pc;
6509 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6510 #if defined(OPTIMIZE_FPRF_UPDATE)
6511 gen_fprf_ptr = gen_fprf_buf;
6512 #endif
6513 ctx.nip = pc_start;
6514 ctx.tb = tb;
6515 ctx.exception = POWERPC_EXCP_NONE;
6516 ctx.spr_cb = env->spr_cb;
6517 supervisor = env->mmu_idx;
6518 #if !defined(CONFIG_USER_ONLY)
6519 ctx.supervisor = supervisor;
6520 #endif
6521 little_endian = env->hflags & (1 << MSR_LE) ? 1 : 0;
6522 #if defined(TARGET_PPC64)
6523 ctx.sf_mode = msr_sf;
6524 ctx.mem_idx = (supervisor << 2) | (msr_sf << 1) | little_endian;
6525 #else
6526 ctx.mem_idx = (supervisor << 1) | little_endian;
6527 #endif
6528 ctx.dcache_line_size = env->dcache_line_size;
6529 ctx.fpu_enabled = msr_fp;
6530 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
6531 ctx.spe_enabled = msr_spe;
6532 else
6533 ctx.spe_enabled = 0;
6534 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
6535 ctx.altivec_enabled = msr_vr;
6536 else
6537 ctx.altivec_enabled = 0;
6538 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
6539 ctx.singlestep_enabled = CPU_SINGLE_STEP;
6540 else
6541 ctx.singlestep_enabled = 0;
6542 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
6543 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
6544 if (unlikely(env->singlestep_enabled))
6545 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
6546 #if defined (DO_SINGLE_STEP) && 0
6547 /* Single step trace mode */
6548 msr_se = 1;
6549 #endif
6550 num_insns = 0;
6551 max_insns = tb->cflags & CF_COUNT_MASK;
6552 if (max_insns == 0)
6553 max_insns = CF_COUNT_MASK;
6554
6555 gen_icount_start();
6556 /* Set env in case of segfault during code fetch */
6557 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
6558 if (unlikely(env->nb_breakpoints > 0)) {
6559 for (j = 0; j < env->nb_breakpoints; j++) {
6560 if (env->breakpoints[j] == ctx.nip) {
6561 gen_update_nip(&ctx, ctx.nip);
6562 gen_op_debug();
6563 break;
6564 }
6565 }
6566 }
6567 if (unlikely(search_pc)) {
6568 j = gen_opc_ptr - gen_opc_buf;
6569 if (lj < j) {
6570 lj++;
6571 while (lj < j)
6572 gen_opc_instr_start[lj++] = 0;
6573 gen_opc_pc[lj] = ctx.nip;
6574 gen_opc_instr_start[lj] = 1;
6575 gen_opc_icount[lj] = num_insns;
6576 }
6577 }
6578 #if defined PPC_DEBUG_DISAS
6579 if (loglevel & CPU_LOG_TB_IN_ASM) {
6580 fprintf(logfile, "----------------\n");
6581 fprintf(logfile, "nip=" ADDRX " super=%d ir=%d\n",
6582 ctx.nip, supervisor, (int)msr_ir);
6583 }
6584 #endif
6585 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
6586 gen_io_start();
6587 if (unlikely(little_endian)) {
6588 ctx.opcode = bswap32(ldl_code(ctx.nip));
6589 } else {
6590 ctx.opcode = ldl_code(ctx.nip);
6591 }
6592 #if defined PPC_DEBUG_DISAS
6593 if (loglevel & CPU_LOG_TB_IN_ASM) {
6594 fprintf(logfile, "translate opcode %08x (%02x %02x %02x) (%s)\n",
6595 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
6596 opc3(ctx.opcode), little_endian ? "little" : "big");
6597 }
6598 #endif
6599 ctx.nip += 4;
6600 table = env->opcodes;
6601 num_insns++;
6602 handler = table[opc1(ctx.opcode)];
6603 if (is_indirect_opcode(handler)) {
6604 table = ind_table(handler);
6605 handler = table[opc2(ctx.opcode)];
6606 if (is_indirect_opcode(handler)) {
6607 table = ind_table(handler);
6608 handler = table[opc3(ctx.opcode)];
6609 }
6610 }
6611 /* Is opcode *REALLY* valid ? */
6612 if (unlikely(handler->handler == &gen_invalid)) {
6613 if (loglevel != 0) {
6614 fprintf(logfile, "invalid/unsupported opcode: "
6615 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
6616 opc1(ctx.opcode), opc2(ctx.opcode),
6617 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
6618 } else {
6619 printf("invalid/unsupported opcode: "
6620 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
6621 opc1(ctx.opcode), opc2(ctx.opcode),
6622 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
6623 }
6624 } else {
6625 if (unlikely((ctx.opcode & handler->inval) != 0)) {
6626 if (loglevel != 0) {
6627 fprintf(logfile, "invalid bits: %08x for opcode: "
6628 "%02x - %02x - %02x (%08x) " ADDRX "\n",
6629 ctx.opcode & handler->inval, opc1(ctx.opcode),
6630 opc2(ctx.opcode), opc3(ctx.opcode),
6631 ctx.opcode, ctx.nip - 4);
6632 } else {
6633 printf("invalid bits: %08x for opcode: "
6634 "%02x - %02x - %02x (%08x) " ADDRX "\n",
6635 ctx.opcode & handler->inval, opc1(ctx.opcode),
6636 opc2(ctx.opcode), opc3(ctx.opcode),
6637 ctx.opcode, ctx.nip - 4);
6638 }
6639 GEN_EXCP_INVAL(ctxp);
6640 break;
6641 }
6642 }
6643 (*(handler->handler))(&ctx);
6644 #if defined(DO_PPC_STATISTICS)
6645 handler->count++;
6646 #endif
6647 /* Check trace mode exceptions */
6648 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
6649 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
6650 ctx.exception != POWERPC_SYSCALL &&
6651 ctx.exception != POWERPC_EXCP_TRAP &&
6652 ctx.exception != POWERPC_EXCP_BRANCH)) {
6653 GEN_EXCP(ctxp, POWERPC_EXCP_TRACE, 0);
6654 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
6655 (env->singlestep_enabled) ||
6656 num_insns >= max_insns)) {
6657 /* if we reach a page boundary or are single stepping, stop
6658 * generation
6659 */
6660 break;
6661 }
6662 #if defined (DO_SINGLE_STEP)
6663 break;
6664 #endif
6665 }
6666 if (tb->cflags & CF_LAST_IO)
6667 gen_io_end();
6668 if (ctx.exception == POWERPC_EXCP_NONE) {
6669 gen_goto_tb(&ctx, 0, ctx.nip);
6670 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
6671 if (unlikely(env->singlestep_enabled)) {
6672 gen_update_nip(&ctx, ctx.nip);
6673 gen_op_debug();
6674 }
6675 /* Generate the return instruction */
6676 tcg_gen_exit_tb(0);
6677 }
6678 gen_icount_end(tb, num_insns);
6679 *gen_opc_ptr = INDEX_op_end;
6680 if (unlikely(search_pc)) {
6681 j = gen_opc_ptr - gen_opc_buf;
6682 lj++;
6683 while (lj <= j)
6684 gen_opc_instr_start[lj++] = 0;
6685 } else {
6686 tb->size = ctx.nip - pc_start;
6687 tb->icount = num_insns;
6688 }
6689 #if defined(DEBUG_DISAS)
6690 if (loglevel & CPU_LOG_TB_CPU) {
6691 fprintf(logfile, "---------------- excp: %04x\n", ctx.exception);
6692 cpu_dump_state(env, logfile, fprintf, 0);
6693 }
6694 if (loglevel & CPU_LOG_TB_IN_ASM) {
6695 int flags;
6696 flags = env->bfd_mach;
6697 flags |= little_endian << 16;
6698 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6699 target_disas(logfile, pc_start, ctx.nip - pc_start, flags);
6700 fprintf(logfile, "\n");
6701 }
6702 #endif
6703 }
6704
6705 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
6706 {
6707 gen_intermediate_code_internal(env, tb, 0);
6708 }
6709
6710 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
6711 {
6712 gen_intermediate_code_internal(env, tb, 1);
6713 }
6714
6715 void gen_pc_load(CPUState *env, TranslationBlock *tb,
6716 unsigned long searched_pc, int pc_pos, void *puc)
6717 {
6718 int type, c;
6719 /* for PPC, we need to look at the micro operation to get the
6720 * access type */
6721 env->nip = gen_opc_pc[pc_pos];
6722 c = gen_opc_buf[pc_pos];
6723 switch(c) {
6724 #if defined(CONFIG_USER_ONLY)
6725 #define CASE3(op)\
6726 case INDEX_op_ ## op ## _raw
6727 #else
6728 #define CASE3(op)\
6729 case INDEX_op_ ## op ## _user:\
6730 case INDEX_op_ ## op ## _kernel:\
6731 case INDEX_op_ ## op ## _hypv
6732 #endif
6733
6734 CASE3(stfd):
6735 CASE3(stfs):
6736 CASE3(lfd):
6737 CASE3(lfs):
6738 type = ACCESS_FLOAT;
6739 break;
6740 CASE3(lwarx):
6741 type = ACCESS_RES;
6742 break;
6743 CASE3(stwcx):
6744 type = ACCESS_RES;
6745 break;
6746 CASE3(eciwx):
6747 CASE3(ecowx):
6748 type = ACCESS_EXT;
6749 break;
6750 default:
6751 type = ACCESS_INT;
6752 break;
6753 }
6754 env->access_type = type;
6755 }