]> git.proxmox.com Git - qemu.git/blob - target-ppc/translate.c
ppc: Convert FPR moves to TCG
[qemu.git] / target-ppc / translate.c
1 /*
2 * PowerPC emulation for qemu: main translation routines.
3 *
4 * Copyright (c) 2003-2007 Jocelyn Mayer
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 */
20 #include <stdarg.h>
21 #include <stdlib.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <inttypes.h>
25
26 #include "cpu.h"
27 #include "exec-all.h"
28 #include "disas.h"
29 #include "helper.h"
30 #include "tcg-op.h"
31 #include "qemu-common.h"
32
33 #define CPU_SINGLE_STEP 0x1
34 #define CPU_BRANCH_STEP 0x2
35 #define GDBSTUB_SINGLE_STEP 0x4
36
37 /* Include definitions for instructions classes and implementations flags */
38 //#define DO_SINGLE_STEP
39 //#define PPC_DEBUG_DISAS
40 //#define DEBUG_MEMORY_ACCESSES
41 //#define DO_PPC_STATISTICS
42 //#define OPTIMIZE_FPRF_UPDATE
43
44 /*****************************************************************************/
45 /* Code translation helpers */
46
47 /* global register indexes */
48 static TCGv cpu_env;
49 static char cpu_reg_names[10*3 + 22*4 /* GPR */
50 #if !defined(TARGET_PPC64)
51 + 10*4 + 22*5 /* SPE GPRh */
52 #endif
53 + 10*4 + 22*5 /* FPR */
54 + 2*(10*6 + 22*7) /* AVRh, AVRl */];
55 static TCGv cpu_gpr[32];
56 #if !defined(TARGET_PPC64)
57 static TCGv cpu_gprh[32];
58 #endif
59 static TCGv cpu_fpr[32];
60 static TCGv cpu_avrh[32], cpu_avrl[32];
61
62 /* dyngen register indexes */
63 static TCGv cpu_T[3];
64 #if defined(TARGET_PPC64)
65 #define cpu_T64 cpu_T
66 #else
67 static TCGv cpu_T64[3];
68 #endif
69 static TCGv cpu_FT[3];
70 static TCGv cpu_AVRh[3], cpu_AVRl[3];
71
72 #include "gen-icount.h"
73
74 void ppc_translate_init(void)
75 {
76 int i;
77 char* p;
78 static int done_init = 0;
79
80 if (done_init)
81 return;
82
83 cpu_env = tcg_global_reg_new(TCG_TYPE_PTR, TCG_AREG0, "env");
84 #if TARGET_LONG_BITS > HOST_LONG_BITS
85 cpu_T[0] = tcg_global_mem_new(TCG_TYPE_TL,
86 TCG_AREG0, offsetof(CPUState, t0), "T0");
87 cpu_T[1] = tcg_global_mem_new(TCG_TYPE_TL,
88 TCG_AREG0, offsetof(CPUState, t1), "T1");
89 cpu_T[2] = tcg_global_mem_new(TCG_TYPE_TL,
90 TCG_AREG0, offsetof(CPUState, t2), "T2");
91 #else
92 cpu_T[0] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG1, "T0");
93 cpu_T[1] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG2, "T1");
94 cpu_T[2] = tcg_global_reg_new(TCG_TYPE_TL, TCG_AREG3, "T2");
95 #endif
96 #if !defined(TARGET_PPC64)
97 cpu_T64[0] = tcg_global_mem_new(TCG_TYPE_I64,
98 TCG_AREG0, offsetof(CPUState, t0_64),
99 "T0_64");
100 cpu_T64[1] = tcg_global_mem_new(TCG_TYPE_I64,
101 TCG_AREG0, offsetof(CPUState, t1_64),
102 "T1_64");
103 cpu_T64[2] = tcg_global_mem_new(TCG_TYPE_I64,
104 TCG_AREG0, offsetof(CPUState, t2_64),
105 "T2_64");
106 #endif
107
108 cpu_FT[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
109 offsetof(CPUState, ft0), "FT0");
110 cpu_FT[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
111 offsetof(CPUState, ft1), "FT1");
112 cpu_FT[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
113 offsetof(CPUState, ft2), "FT2");
114
115 cpu_AVRh[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
116 offsetof(CPUState, avr0.u64[0]), "AVR0H");
117 cpu_AVRl[0] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
118 offsetof(CPUState, avr0.u64[1]), "AVR0L");
119 cpu_AVRh[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
120 offsetof(CPUState, avr1.u64[0]), "AVR1H");
121 cpu_AVRl[1] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
122 offsetof(CPUState, avr1.u64[1]), "AVR1L");
123 cpu_AVRh[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
124 offsetof(CPUState, avr2.u64[0]), "AVR2H");
125 cpu_AVRl[2] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
126 offsetof(CPUState, avr2.u64[1]), "AVR2L");
127
128 p = cpu_reg_names;
129 for (i = 0; i < 32; i++) {
130 sprintf(p, "r%d", i);
131 cpu_gpr[i] = tcg_global_mem_new(TCG_TYPE_TL, TCG_AREG0,
132 offsetof(CPUState, gpr[i]), p);
133 p += (i < 10) ? 3 : 4;
134 #if !defined(TARGET_PPC64)
135 sprintf(p, "r%dH", i);
136 cpu_gprh[i] = tcg_global_mem_new(TCG_TYPE_I32, TCG_AREG0,
137 offsetof(CPUState, gprh[i]), p);
138 p += (i < 10) ? 4 : 5;
139 #endif
140
141 sprintf(p, "fp%d", i);
142 cpu_fpr[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
143 offsetof(CPUState, fpr[i]), p);
144
145 sprintf(p, "avr%dH", i);
146 cpu_avrh[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
147 offsetof(CPUState, avr[i].u64[0]), p);
148 p += (i < 10) ? 6 : 7;
149 sprintf(p, "avr%dL", i);
150 cpu_avrl[i] = tcg_global_mem_new(TCG_TYPE_I64, TCG_AREG0,
151 offsetof(CPUState, avr[i].u64[1]), p);
152 p += (i < 10) ? 6 : 7;
153 }
154
155 /* register helpers */
156 #undef DEF_HELPER
157 #define DEF_HELPER(ret, name, params) tcg_register_helper(name, #name);
158 #include "helper.h"
159
160 done_init = 1;
161 }
162
163 #if defined(OPTIMIZE_FPRF_UPDATE)
164 static uint16_t *gen_fprf_buf[OPC_BUF_SIZE];
165 static uint16_t **gen_fprf_ptr;
166 #endif
167
168 #define GEN8(func, NAME) \
169 static GenOpFunc *NAME ## _table [8] = { \
170 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
171 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
172 }; \
173 static always_inline void func (int n) \
174 { \
175 NAME ## _table[n](); \
176 }
177
178 #define GEN16(func, NAME) \
179 static GenOpFunc *NAME ## _table [16] = { \
180 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
181 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
182 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
183 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
184 }; \
185 static always_inline void func (int n) \
186 { \
187 NAME ## _table[n](); \
188 }
189
190 #define GEN32(func, NAME) \
191 static GenOpFunc *NAME ## _table [32] = { \
192 NAME ## 0, NAME ## 1, NAME ## 2, NAME ## 3, \
193 NAME ## 4, NAME ## 5, NAME ## 6, NAME ## 7, \
194 NAME ## 8, NAME ## 9, NAME ## 10, NAME ## 11, \
195 NAME ## 12, NAME ## 13, NAME ## 14, NAME ## 15, \
196 NAME ## 16, NAME ## 17, NAME ## 18, NAME ## 19, \
197 NAME ## 20, NAME ## 21, NAME ## 22, NAME ## 23, \
198 NAME ## 24, NAME ## 25, NAME ## 26, NAME ## 27, \
199 NAME ## 28, NAME ## 29, NAME ## 30, NAME ## 31, \
200 }; \
201 static always_inline void func (int n) \
202 { \
203 NAME ## _table[n](); \
204 }
205
206 /* Condition register moves */
207 GEN8(gen_op_load_crf_T0, gen_op_load_crf_T0_crf);
208 GEN8(gen_op_load_crf_T1, gen_op_load_crf_T1_crf);
209 GEN8(gen_op_store_T0_crf, gen_op_store_T0_crf_crf);
210 #if 0 // Unused
211 GEN8(gen_op_store_T1_crf, gen_op_store_T1_crf_crf);
212 #endif
213
214 /* internal defines */
215 typedef struct DisasContext {
216 struct TranslationBlock *tb;
217 target_ulong nip;
218 uint32_t opcode;
219 uint32_t exception;
220 /* Routine used to access memory */
221 int mem_idx;
222 /* Translation flags */
223 #if !defined(CONFIG_USER_ONLY)
224 int supervisor;
225 #endif
226 #if defined(TARGET_PPC64)
227 int sf_mode;
228 #endif
229 int fpu_enabled;
230 int altivec_enabled;
231 int spe_enabled;
232 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
233 int singlestep_enabled;
234 int dcache_line_size;
235 } DisasContext;
236
237 struct opc_handler_t {
238 /* invalid bits */
239 uint32_t inval;
240 /* instruction type */
241 uint64_t type;
242 /* handler */
243 void (*handler)(DisasContext *ctx);
244 #if defined(DO_PPC_STATISTICS) || defined(PPC_DUMP_CPU)
245 const unsigned char *oname;
246 #endif
247 #if defined(DO_PPC_STATISTICS)
248 uint64_t count;
249 #endif
250 };
251
252 static always_inline void gen_set_Rc0 (DisasContext *ctx)
253 {
254 #if defined(TARGET_PPC64)
255 if (ctx->sf_mode)
256 gen_op_cmpi_64(0);
257 else
258 #endif
259 gen_op_cmpi(0);
260 gen_op_set_Rc0();
261 }
262
263 static always_inline void gen_reset_fpstatus (void)
264 {
265 #ifdef CONFIG_SOFTFLOAT
266 gen_op_reset_fpstatus();
267 #endif
268 }
269
270 static always_inline void gen_compute_fprf (int set_fprf, int set_rc)
271 {
272 if (set_fprf != 0) {
273 /* This case might be optimized later */
274 #if defined(OPTIMIZE_FPRF_UPDATE)
275 *gen_fprf_ptr++ = gen_opc_ptr;
276 #endif
277 gen_op_compute_fprf(1);
278 if (unlikely(set_rc))
279 gen_op_store_T0_crf(1);
280 gen_op_float_check_status();
281 } else if (unlikely(set_rc)) {
282 /* We always need to compute fpcc */
283 gen_op_compute_fprf(0);
284 gen_op_store_T0_crf(1);
285 if (set_fprf)
286 gen_op_float_check_status();
287 }
288 }
289
290 static always_inline void gen_optimize_fprf (void)
291 {
292 #if defined(OPTIMIZE_FPRF_UPDATE)
293 uint16_t **ptr;
294
295 for (ptr = gen_fprf_buf; ptr != (gen_fprf_ptr - 1); ptr++)
296 *ptr = INDEX_op_nop1;
297 gen_fprf_ptr = gen_fprf_buf;
298 #endif
299 }
300
301 static always_inline void gen_update_nip (DisasContext *ctx, target_ulong nip)
302 {
303 #if defined(TARGET_PPC64)
304 if (ctx->sf_mode)
305 gen_op_update_nip_64(nip >> 32, nip);
306 else
307 #endif
308 gen_op_update_nip(nip);
309 }
310
311 #define GEN_EXCP(ctx, excp, error) \
312 do { \
313 if ((ctx)->exception == POWERPC_EXCP_NONE) { \
314 gen_update_nip(ctx, (ctx)->nip); \
315 } \
316 gen_op_raise_exception_err((excp), (error)); \
317 ctx->exception = (excp); \
318 } while (0)
319
320 #define GEN_EXCP_INVAL(ctx) \
321 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
322 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_INVAL)
323
324 #define GEN_EXCP_PRIVOPC(ctx) \
325 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
326 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_OPC)
327
328 #define GEN_EXCP_PRIVREG(ctx) \
329 GEN_EXCP((ctx), POWERPC_EXCP_PROGRAM, \
330 POWERPC_EXCP_INVAL | POWERPC_EXCP_PRIV_REG)
331
332 #define GEN_EXCP_NO_FP(ctx) \
333 GEN_EXCP(ctx, POWERPC_EXCP_FPU, 0)
334
335 #define GEN_EXCP_NO_AP(ctx) \
336 GEN_EXCP(ctx, POWERPC_EXCP_APU, 0)
337
338 #define GEN_EXCP_NO_VR(ctx) \
339 GEN_EXCP(ctx, POWERPC_EXCP_VPU, 0)
340
341 /* Stop translation */
342 static always_inline void GEN_STOP (DisasContext *ctx)
343 {
344 gen_update_nip(ctx, ctx->nip);
345 ctx->exception = POWERPC_EXCP_STOP;
346 }
347
348 /* No need to update nip here, as execution flow will change */
349 static always_inline void GEN_SYNC (DisasContext *ctx)
350 {
351 ctx->exception = POWERPC_EXCP_SYNC;
352 }
353
354 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
355 static void gen_##name (DisasContext *ctx); \
356 GEN_OPCODE(name, opc1, opc2, opc3, inval, type); \
357 static void gen_##name (DisasContext *ctx)
358
359 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \
360 static void gen_##name (DisasContext *ctx); \
361 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type); \
362 static void gen_##name (DisasContext *ctx)
363
364 typedef struct opcode_t {
365 unsigned char opc1, opc2, opc3;
366 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
367 unsigned char pad[5];
368 #else
369 unsigned char pad[1];
370 #endif
371 opc_handler_t handler;
372 const unsigned char *oname;
373 } opcode_t;
374
375 /*****************************************************************************/
376 /*** Instruction decoding ***/
377 #define EXTRACT_HELPER(name, shift, nb) \
378 static always_inline uint32_t name (uint32_t opcode) \
379 { \
380 return (opcode >> (shift)) & ((1 << (nb)) - 1); \
381 }
382
383 #define EXTRACT_SHELPER(name, shift, nb) \
384 static always_inline int32_t name (uint32_t opcode) \
385 { \
386 return (int16_t)((opcode >> (shift)) & ((1 << (nb)) - 1)); \
387 }
388
389 /* Opcode part 1 */
390 EXTRACT_HELPER(opc1, 26, 6);
391 /* Opcode part 2 */
392 EXTRACT_HELPER(opc2, 1, 5);
393 /* Opcode part 3 */
394 EXTRACT_HELPER(opc3, 6, 5);
395 /* Update Cr0 flags */
396 EXTRACT_HELPER(Rc, 0, 1);
397 /* Destination */
398 EXTRACT_HELPER(rD, 21, 5);
399 /* Source */
400 EXTRACT_HELPER(rS, 21, 5);
401 /* First operand */
402 EXTRACT_HELPER(rA, 16, 5);
403 /* Second operand */
404 EXTRACT_HELPER(rB, 11, 5);
405 /* Third operand */
406 EXTRACT_HELPER(rC, 6, 5);
407 /*** Get CRn ***/
408 EXTRACT_HELPER(crfD, 23, 3);
409 EXTRACT_HELPER(crfS, 18, 3);
410 EXTRACT_HELPER(crbD, 21, 5);
411 EXTRACT_HELPER(crbA, 16, 5);
412 EXTRACT_HELPER(crbB, 11, 5);
413 /* SPR / TBL */
414 EXTRACT_HELPER(_SPR, 11, 10);
415 static always_inline uint32_t SPR (uint32_t opcode)
416 {
417 uint32_t sprn = _SPR(opcode);
418
419 return ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
420 }
421 /*** Get constants ***/
422 EXTRACT_HELPER(IMM, 12, 8);
423 /* 16 bits signed immediate value */
424 EXTRACT_SHELPER(SIMM, 0, 16);
425 /* 16 bits unsigned immediate value */
426 EXTRACT_HELPER(UIMM, 0, 16);
427 /* Bit count */
428 EXTRACT_HELPER(NB, 11, 5);
429 /* Shift count */
430 EXTRACT_HELPER(SH, 11, 5);
431 /* Mask start */
432 EXTRACT_HELPER(MB, 6, 5);
433 /* Mask end */
434 EXTRACT_HELPER(ME, 1, 5);
435 /* Trap operand */
436 EXTRACT_HELPER(TO, 21, 5);
437
438 EXTRACT_HELPER(CRM, 12, 8);
439 EXTRACT_HELPER(FM, 17, 8);
440 EXTRACT_HELPER(SR, 16, 4);
441 EXTRACT_HELPER(FPIMM, 12, 4);
442
443 /*** Jump target decoding ***/
444 /* Displacement */
445 EXTRACT_SHELPER(d, 0, 16);
446 /* Immediate address */
447 static always_inline target_ulong LI (uint32_t opcode)
448 {
449 return (opcode >> 0) & 0x03FFFFFC;
450 }
451
452 static always_inline uint32_t BD (uint32_t opcode)
453 {
454 return (opcode >> 0) & 0xFFFC;
455 }
456
457 EXTRACT_HELPER(BO, 21, 5);
458 EXTRACT_HELPER(BI, 16, 5);
459 /* Absolute/relative address */
460 EXTRACT_HELPER(AA, 1, 1);
461 /* Link */
462 EXTRACT_HELPER(LK, 0, 1);
463
464 /* Create a mask between <start> and <end> bits */
465 static always_inline target_ulong MASK (uint32_t start, uint32_t end)
466 {
467 target_ulong ret;
468
469 #if defined(TARGET_PPC64)
470 if (likely(start == 0)) {
471 ret = UINT64_MAX << (63 - end);
472 } else if (likely(end == 63)) {
473 ret = UINT64_MAX >> start;
474 }
475 #else
476 if (likely(start == 0)) {
477 ret = UINT32_MAX << (31 - end);
478 } else if (likely(end == 31)) {
479 ret = UINT32_MAX >> start;
480 }
481 #endif
482 else {
483 ret = (((target_ulong)(-1ULL)) >> (start)) ^
484 (((target_ulong)(-1ULL) >> (end)) >> 1);
485 if (unlikely(start > end))
486 return ~ret;
487 }
488
489 return ret;
490 }
491
492 /*****************************************************************************/
493 /* PowerPC Instructions types definitions */
494 enum {
495 PPC_NONE = 0x0000000000000000ULL,
496 /* PowerPC base instructions set */
497 PPC_INSNS_BASE = 0x0000000000000001ULL,
498 /* integer operations instructions */
499 #define PPC_INTEGER PPC_INSNS_BASE
500 /* flow control instructions */
501 #define PPC_FLOW PPC_INSNS_BASE
502 /* virtual memory instructions */
503 #define PPC_MEM PPC_INSNS_BASE
504 /* ld/st with reservation instructions */
505 #define PPC_RES PPC_INSNS_BASE
506 /* spr/msr access instructions */
507 #define PPC_MISC PPC_INSNS_BASE
508 /* Deprecated instruction sets */
509 /* Original POWER instruction set */
510 PPC_POWER = 0x0000000000000002ULL,
511 /* POWER2 instruction set extension */
512 PPC_POWER2 = 0x0000000000000004ULL,
513 /* Power RTC support */
514 PPC_POWER_RTC = 0x0000000000000008ULL,
515 /* Power-to-PowerPC bridge (601) */
516 PPC_POWER_BR = 0x0000000000000010ULL,
517 /* 64 bits PowerPC instruction set */
518 PPC_64B = 0x0000000000000020ULL,
519 /* New 64 bits extensions (PowerPC 2.0x) */
520 PPC_64BX = 0x0000000000000040ULL,
521 /* 64 bits hypervisor extensions */
522 PPC_64H = 0x0000000000000080ULL,
523 /* New wait instruction (PowerPC 2.0x) */
524 PPC_WAIT = 0x0000000000000100ULL,
525 /* Time base mftb instruction */
526 PPC_MFTB = 0x0000000000000200ULL,
527
528 /* Fixed-point unit extensions */
529 /* PowerPC 602 specific */
530 PPC_602_SPEC = 0x0000000000000400ULL,
531 /* isel instruction */
532 PPC_ISEL = 0x0000000000000800ULL,
533 /* popcntb instruction */
534 PPC_POPCNTB = 0x0000000000001000ULL,
535 /* string load / store */
536 PPC_STRING = 0x0000000000002000ULL,
537
538 /* Floating-point unit extensions */
539 /* Optional floating point instructions */
540 PPC_FLOAT = 0x0000000000010000ULL,
541 /* New floating-point extensions (PowerPC 2.0x) */
542 PPC_FLOAT_EXT = 0x0000000000020000ULL,
543 PPC_FLOAT_FSQRT = 0x0000000000040000ULL,
544 PPC_FLOAT_FRES = 0x0000000000080000ULL,
545 PPC_FLOAT_FRSQRTE = 0x0000000000100000ULL,
546 PPC_FLOAT_FRSQRTES = 0x0000000000200000ULL,
547 PPC_FLOAT_FSEL = 0x0000000000400000ULL,
548 PPC_FLOAT_STFIWX = 0x0000000000800000ULL,
549
550 /* Vector/SIMD extensions */
551 /* Altivec support */
552 PPC_ALTIVEC = 0x0000000001000000ULL,
553 /* PowerPC 2.03 SPE extension */
554 PPC_SPE = 0x0000000002000000ULL,
555 /* PowerPC 2.03 SPE floating-point extension */
556 PPC_SPEFPU = 0x0000000004000000ULL,
557
558 /* Optional memory control instructions */
559 PPC_MEM_TLBIA = 0x0000000010000000ULL,
560 PPC_MEM_TLBIE = 0x0000000020000000ULL,
561 PPC_MEM_TLBSYNC = 0x0000000040000000ULL,
562 /* sync instruction */
563 PPC_MEM_SYNC = 0x0000000080000000ULL,
564 /* eieio instruction */
565 PPC_MEM_EIEIO = 0x0000000100000000ULL,
566
567 /* Cache control instructions */
568 PPC_CACHE = 0x0000000200000000ULL,
569 /* icbi instruction */
570 PPC_CACHE_ICBI = 0x0000000400000000ULL,
571 /* dcbz instruction with fixed cache line size */
572 PPC_CACHE_DCBZ = 0x0000000800000000ULL,
573 /* dcbz instruction with tunable cache line size */
574 PPC_CACHE_DCBZT = 0x0000001000000000ULL,
575 /* dcba instruction */
576 PPC_CACHE_DCBA = 0x0000002000000000ULL,
577 /* Freescale cache locking instructions */
578 PPC_CACHE_LOCK = 0x0000004000000000ULL,
579
580 /* MMU related extensions */
581 /* external control instructions */
582 PPC_EXTERN = 0x0000010000000000ULL,
583 /* segment register access instructions */
584 PPC_SEGMENT = 0x0000020000000000ULL,
585 /* PowerPC 6xx TLB management instructions */
586 PPC_6xx_TLB = 0x0000040000000000ULL,
587 /* PowerPC 74xx TLB management instructions */
588 PPC_74xx_TLB = 0x0000080000000000ULL,
589 /* PowerPC 40x TLB management instructions */
590 PPC_40x_TLB = 0x0000100000000000ULL,
591 /* segment register access instructions for PowerPC 64 "bridge" */
592 PPC_SEGMENT_64B = 0x0000200000000000ULL,
593 /* SLB management */
594 PPC_SLBI = 0x0000400000000000ULL,
595
596 /* Embedded PowerPC dedicated instructions */
597 PPC_WRTEE = 0x0001000000000000ULL,
598 /* PowerPC 40x exception model */
599 PPC_40x_EXCP = 0x0002000000000000ULL,
600 /* PowerPC 405 Mac instructions */
601 PPC_405_MAC = 0x0004000000000000ULL,
602 /* PowerPC 440 specific instructions */
603 PPC_440_SPEC = 0x0008000000000000ULL,
604 /* BookE (embedded) PowerPC specification */
605 PPC_BOOKE = 0x0010000000000000ULL,
606 /* mfapidi instruction */
607 PPC_MFAPIDI = 0x0020000000000000ULL,
608 /* tlbiva instruction */
609 PPC_TLBIVA = 0x0040000000000000ULL,
610 /* tlbivax instruction */
611 PPC_TLBIVAX = 0x0080000000000000ULL,
612 /* PowerPC 4xx dedicated instructions */
613 PPC_4xx_COMMON = 0x0100000000000000ULL,
614 /* PowerPC 40x ibct instructions */
615 PPC_40x_ICBT = 0x0200000000000000ULL,
616 /* rfmci is not implemented in all BookE PowerPC */
617 PPC_RFMCI = 0x0400000000000000ULL,
618 /* rfdi instruction */
619 PPC_RFDI = 0x0800000000000000ULL,
620 /* DCR accesses */
621 PPC_DCR = 0x1000000000000000ULL,
622 /* DCR extended accesse */
623 PPC_DCRX = 0x2000000000000000ULL,
624 /* user-mode DCR access, implemented in PowerPC 460 */
625 PPC_DCRUX = 0x4000000000000000ULL,
626 };
627
628 /*****************************************************************************/
629 /* PowerPC instructions table */
630 #if HOST_LONG_BITS == 64
631 #define OPC_ALIGN 8
632 #else
633 #define OPC_ALIGN 4
634 #endif
635 #if defined(__APPLE__)
636 #define OPCODES_SECTION \
637 __attribute__ ((section("__TEXT,__opcodes"), unused, aligned (OPC_ALIGN) ))
638 #else
639 #define OPCODES_SECTION \
640 __attribute__ ((section(".opcodes"), unused, aligned (OPC_ALIGN) ))
641 #endif
642
643 #if defined(DO_PPC_STATISTICS)
644 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
645 OPCODES_SECTION opcode_t opc_##name = { \
646 .opc1 = op1, \
647 .opc2 = op2, \
648 .opc3 = op3, \
649 .pad = { 0, }, \
650 .handler = { \
651 .inval = invl, \
652 .type = _typ, \
653 .handler = &gen_##name, \
654 .oname = stringify(name), \
655 }, \
656 .oname = stringify(name), \
657 }
658 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
659 OPCODES_SECTION opcode_t opc_##name = { \
660 .opc1 = op1, \
661 .opc2 = op2, \
662 .opc3 = op3, \
663 .pad = { 0, }, \
664 .handler = { \
665 .inval = invl, \
666 .type = _typ, \
667 .handler = &gen_##name, \
668 .oname = onam, \
669 }, \
670 .oname = onam, \
671 }
672 #else
673 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ) \
674 OPCODES_SECTION opcode_t opc_##name = { \
675 .opc1 = op1, \
676 .opc2 = op2, \
677 .opc3 = op3, \
678 .pad = { 0, }, \
679 .handler = { \
680 .inval = invl, \
681 .type = _typ, \
682 .handler = &gen_##name, \
683 }, \
684 .oname = stringify(name), \
685 }
686 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ) \
687 OPCODES_SECTION opcode_t opc_##name = { \
688 .opc1 = op1, \
689 .opc2 = op2, \
690 .opc3 = op3, \
691 .pad = { 0, }, \
692 .handler = { \
693 .inval = invl, \
694 .type = _typ, \
695 .handler = &gen_##name, \
696 }, \
697 .oname = onam, \
698 }
699 #endif
700
701 #define GEN_OPCODE_MARK(name) \
702 OPCODES_SECTION opcode_t opc_##name = { \
703 .opc1 = 0xFF, \
704 .opc2 = 0xFF, \
705 .opc3 = 0xFF, \
706 .pad = { 0, }, \
707 .handler = { \
708 .inval = 0x00000000, \
709 .type = 0x00, \
710 .handler = NULL, \
711 }, \
712 .oname = stringify(name), \
713 }
714
715 /* Start opcode list */
716 GEN_OPCODE_MARK(start);
717
718 /* Invalid instruction */
719 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE)
720 {
721 GEN_EXCP_INVAL(ctx);
722 }
723
724 static opc_handler_t invalid_handler = {
725 .inval = 0xFFFFFFFF,
726 .type = PPC_NONE,
727 .handler = gen_invalid,
728 };
729
730 /*** Integer arithmetic ***/
731 #define __GEN_INT_ARITH2(name, opc1, opc2, opc3, inval, type) \
732 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
733 { \
734 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
735 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
736 gen_op_##name(); \
737 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
738 if (unlikely(Rc(ctx->opcode) != 0)) \
739 gen_set_Rc0(ctx); \
740 }
741
742 #define __GEN_INT_ARITH2_O(name, opc1, opc2, opc3, inval, type) \
743 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
744 { \
745 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
746 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
747 gen_op_##name(); \
748 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
749 if (unlikely(Rc(ctx->opcode) != 0)) \
750 gen_set_Rc0(ctx); \
751 }
752
753 #define __GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
754 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
755 { \
756 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
757 gen_op_##name(); \
758 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
759 if (unlikely(Rc(ctx->opcode) != 0)) \
760 gen_set_Rc0(ctx); \
761 }
762 #define __GEN_INT_ARITH1_O(name, opc1, opc2, opc3, type) \
763 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
764 { \
765 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
766 gen_op_##name(); \
767 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
768 if (unlikely(Rc(ctx->opcode) != 0)) \
769 gen_set_Rc0(ctx); \
770 }
771
772 /* Two operands arithmetic functions */
773 #define GEN_INT_ARITH2(name, opc1, opc2, opc3, type) \
774 __GEN_INT_ARITH2(name, opc1, opc2, opc3, 0x00000000, type) \
775 __GEN_INT_ARITH2_O(name##o, opc1, opc2, opc3 | 0x10, 0x00000000, type)
776
777 /* Two operands arithmetic functions with no overflow allowed */
778 #define GEN_INT_ARITHN(name, opc1, opc2, opc3, type) \
779 __GEN_INT_ARITH2(name, opc1, opc2, opc3, 0x00000400, type)
780
781 /* One operand arithmetic functions */
782 #define GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
783 __GEN_INT_ARITH1(name, opc1, opc2, opc3, type) \
784 __GEN_INT_ARITH1_O(name##o, opc1, opc2, opc3 | 0x10, type)
785
786 #if defined(TARGET_PPC64)
787 #define __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, inval, type) \
788 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
789 { \
790 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
791 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
792 if (ctx->sf_mode) \
793 gen_op_##name##_64(); \
794 else \
795 gen_op_##name(); \
796 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
797 if (unlikely(Rc(ctx->opcode) != 0)) \
798 gen_set_Rc0(ctx); \
799 }
800
801 #define __GEN_INT_ARITH2_O_64(name, opc1, opc2, opc3, inval, type) \
802 GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \
803 { \
804 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
805 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
806 if (ctx->sf_mode) \
807 gen_op_##name##_64(); \
808 else \
809 gen_op_##name(); \
810 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
811 if (unlikely(Rc(ctx->opcode) != 0)) \
812 gen_set_Rc0(ctx); \
813 }
814
815 #define __GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
816 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
817 { \
818 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
819 if (ctx->sf_mode) \
820 gen_op_##name##_64(); \
821 else \
822 gen_op_##name(); \
823 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
824 if (unlikely(Rc(ctx->opcode) != 0)) \
825 gen_set_Rc0(ctx); \
826 }
827 #define __GEN_INT_ARITH1_O_64(name, opc1, opc2, opc3, type) \
828 GEN_HANDLER(name, opc1, opc2, opc3, 0x0000F800, type) \
829 { \
830 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
831 if (ctx->sf_mode) \
832 gen_op_##name##_64(); \
833 else \
834 gen_op_##name(); \
835 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]); \
836 if (unlikely(Rc(ctx->opcode) != 0)) \
837 gen_set_Rc0(ctx); \
838 }
839
840 /* Two operands arithmetic functions */
841 #define GEN_INT_ARITH2_64(name, opc1, opc2, opc3, type) \
842 __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, 0x00000000, type) \
843 __GEN_INT_ARITH2_O_64(name##o, opc1, opc2, opc3 | 0x10, 0x00000000, type)
844
845 /* Two operands arithmetic functions with no overflow allowed */
846 #define GEN_INT_ARITHN_64(name, opc1, opc2, opc3, type) \
847 __GEN_INT_ARITH2_64(name, opc1, opc2, opc3, 0x00000400, type)
848
849 /* One operand arithmetic functions */
850 #define GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
851 __GEN_INT_ARITH1_64(name, opc1, opc2, opc3, type) \
852 __GEN_INT_ARITH1_O_64(name##o, opc1, opc2, opc3 | 0x10, type)
853 #else
854 #define GEN_INT_ARITH2_64 GEN_INT_ARITH2
855 #define GEN_INT_ARITHN_64 GEN_INT_ARITHN
856 #define GEN_INT_ARITH1_64 GEN_INT_ARITH1
857 #endif
858
859 /* add add. addo addo. */
860 static always_inline void gen_op_addo (void)
861 {
862 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
863 gen_op_add();
864 gen_op_check_addo();
865 }
866 #if defined(TARGET_PPC64)
867 #define gen_op_add_64 gen_op_add
868 static always_inline void gen_op_addo_64 (void)
869 {
870 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
871 gen_op_add();
872 gen_op_check_addo_64();
873 }
874 #endif
875 GEN_INT_ARITH2_64 (add, 0x1F, 0x0A, 0x08, PPC_INTEGER);
876 /* addc addc. addco addco. */
877 static always_inline void gen_op_addc (void)
878 {
879 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
880 gen_op_add();
881 gen_op_check_addc();
882 }
883 static always_inline void gen_op_addco (void)
884 {
885 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
886 gen_op_add();
887 gen_op_check_addc();
888 gen_op_check_addo();
889 }
890 #if defined(TARGET_PPC64)
891 static always_inline void gen_op_addc_64 (void)
892 {
893 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
894 gen_op_add();
895 gen_op_check_addc_64();
896 }
897 static always_inline void gen_op_addco_64 (void)
898 {
899 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
900 gen_op_add();
901 gen_op_check_addc_64();
902 gen_op_check_addo_64();
903 }
904 #endif
905 GEN_INT_ARITH2_64 (addc, 0x1F, 0x0A, 0x00, PPC_INTEGER);
906 /* adde adde. addeo addeo. */
907 static always_inline void gen_op_addeo (void)
908 {
909 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
910 gen_op_adde();
911 gen_op_check_addo();
912 }
913 #if defined(TARGET_PPC64)
914 static always_inline void gen_op_addeo_64 (void)
915 {
916 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
917 gen_op_adde_64();
918 gen_op_check_addo_64();
919 }
920 #endif
921 GEN_INT_ARITH2_64 (adde, 0x1F, 0x0A, 0x04, PPC_INTEGER);
922 /* addme addme. addmeo addmeo. */
923 static always_inline void gen_op_addme (void)
924 {
925 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
926 gen_op_add_me();
927 }
928 #if defined(TARGET_PPC64)
929 static always_inline void gen_op_addme_64 (void)
930 {
931 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
932 gen_op_add_me_64();
933 }
934 #endif
935 GEN_INT_ARITH1_64 (addme, 0x1F, 0x0A, 0x07, PPC_INTEGER);
936 /* addze addze. addzeo addzeo. */
937 static always_inline void gen_op_addze (void)
938 {
939 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
940 gen_op_add_ze();
941 gen_op_check_addc();
942 }
943 static always_inline void gen_op_addzeo (void)
944 {
945 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
946 gen_op_add_ze();
947 gen_op_check_addc();
948 gen_op_check_addo();
949 }
950 #if defined(TARGET_PPC64)
951 static always_inline void gen_op_addze_64 (void)
952 {
953 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
954 gen_op_add_ze();
955 gen_op_check_addc_64();
956 }
957 static always_inline void gen_op_addzeo_64 (void)
958 {
959 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
960 gen_op_add_ze();
961 gen_op_check_addc_64();
962 gen_op_check_addo_64();
963 }
964 #endif
965 GEN_INT_ARITH1_64 (addze, 0x1F, 0x0A, 0x06, PPC_INTEGER);
966 /* divw divw. divwo divwo. */
967 GEN_INT_ARITH2 (divw, 0x1F, 0x0B, 0x0F, PPC_INTEGER);
968 /* divwu divwu. divwuo divwuo. */
969 GEN_INT_ARITH2 (divwu, 0x1F, 0x0B, 0x0E, PPC_INTEGER);
970 /* mulhw mulhw. */
971 GEN_INT_ARITHN (mulhw, 0x1F, 0x0B, 0x02, PPC_INTEGER);
972 /* mulhwu mulhwu. */
973 GEN_INT_ARITHN (mulhwu, 0x1F, 0x0B, 0x00, PPC_INTEGER);
974 /* mullw mullw. mullwo mullwo. */
975 GEN_INT_ARITH2 (mullw, 0x1F, 0x0B, 0x07, PPC_INTEGER);
976 /* neg neg. nego nego. */
977 GEN_INT_ARITH1_64 (neg, 0x1F, 0x08, 0x03, PPC_INTEGER);
978 /* subf subf. subfo subfo. */
979 static always_inline void gen_op_subfo (void)
980 {
981 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
982 gen_op_subf();
983 gen_op_check_addo();
984 }
985 #if defined(TARGET_PPC64)
986 #define gen_op_subf_64 gen_op_subf
987 static always_inline void gen_op_subfo_64 (void)
988 {
989 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
990 gen_op_subf();
991 gen_op_check_addo_64();
992 }
993 #endif
994 GEN_INT_ARITH2_64 (subf, 0x1F, 0x08, 0x01, PPC_INTEGER);
995 /* subfc subfc. subfco subfco. */
996 static always_inline void gen_op_subfc (void)
997 {
998 gen_op_subf();
999 gen_op_check_subfc();
1000 }
1001 static always_inline void gen_op_subfco (void)
1002 {
1003 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
1004 gen_op_subf();
1005 gen_op_check_subfc();
1006 gen_op_check_addo();
1007 }
1008 #if defined(TARGET_PPC64)
1009 static always_inline void gen_op_subfc_64 (void)
1010 {
1011 gen_op_subf();
1012 gen_op_check_subfc_64();
1013 }
1014 static always_inline void gen_op_subfco_64 (void)
1015 {
1016 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
1017 gen_op_subf();
1018 gen_op_check_subfc_64();
1019 gen_op_check_addo_64();
1020 }
1021 #endif
1022 GEN_INT_ARITH2_64 (subfc, 0x1F, 0x08, 0x00, PPC_INTEGER);
1023 /* subfe subfe. subfeo subfeo. */
1024 static always_inline void gen_op_subfeo (void)
1025 {
1026 tcg_gen_not_tl(cpu_T[2], cpu_T[0]);
1027 gen_op_subfe();
1028 gen_op_check_addo();
1029 }
1030 #if defined(TARGET_PPC64)
1031 #define gen_op_subfe_64 gen_op_subfe
1032 static always_inline void gen_op_subfeo_64 (void)
1033 {
1034 tcg_gen_not_i64(cpu_T[2], cpu_T[0]);
1035 gen_op_subfe_64();
1036 gen_op_check_addo_64();
1037 }
1038 #endif
1039 GEN_INT_ARITH2_64 (subfe, 0x1F, 0x08, 0x04, PPC_INTEGER);
1040 /* subfme subfme. subfmeo subfmeo. */
1041 GEN_INT_ARITH1_64 (subfme, 0x1F, 0x08, 0x07, PPC_INTEGER);
1042 /* subfze subfze. subfzeo subfzeo. */
1043 GEN_INT_ARITH1_64 (subfze, 0x1F, 0x08, 0x06, PPC_INTEGER);
1044 /* addi */
1045 GEN_HANDLER(addi, 0x0E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1046 {
1047 target_long simm = SIMM(ctx->opcode);
1048
1049 if (rA(ctx->opcode) == 0) {
1050 /* li case */
1051 tcg_gen_movi_tl(cpu_T[0], simm);
1052 } else {
1053 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1054 if (likely(simm != 0))
1055 gen_op_addi(simm);
1056 }
1057 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1058 }
1059 /* addic */
1060 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1061 {
1062 target_long simm = SIMM(ctx->opcode);
1063
1064 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1065 if (likely(simm != 0)) {
1066 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
1067 gen_op_addi(simm);
1068 #if defined(TARGET_PPC64)
1069 if (ctx->sf_mode)
1070 gen_op_check_addc_64();
1071 else
1072 #endif
1073 gen_op_check_addc();
1074 } else {
1075 gen_op_clear_xer_ca();
1076 }
1077 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1078 }
1079 /* addic. */
1080 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1081 {
1082 target_long simm = SIMM(ctx->opcode);
1083
1084 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1085 if (likely(simm != 0)) {
1086 tcg_gen_mov_tl(cpu_T[2], cpu_T[0]);
1087 gen_op_addi(simm);
1088 #if defined(TARGET_PPC64)
1089 if (ctx->sf_mode)
1090 gen_op_check_addc_64();
1091 else
1092 #endif
1093 gen_op_check_addc();
1094 } else {
1095 gen_op_clear_xer_ca();
1096 }
1097 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1098 gen_set_Rc0(ctx);
1099 }
1100 /* addis */
1101 GEN_HANDLER(addis, 0x0F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1102 {
1103 target_long simm = SIMM(ctx->opcode);
1104
1105 if (rA(ctx->opcode) == 0) {
1106 /* lis case */
1107 tcg_gen_movi_tl(cpu_T[0], simm << 16);
1108 } else {
1109 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1110 if (likely(simm != 0))
1111 gen_op_addi(simm << 16);
1112 }
1113 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1114 }
1115 /* mulli */
1116 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1117 {
1118 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1119 gen_op_mulli(SIMM(ctx->opcode));
1120 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1121 }
1122 /* subfic */
1123 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1124 {
1125 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1126 #if defined(TARGET_PPC64)
1127 if (ctx->sf_mode)
1128 gen_op_subfic_64(SIMM(ctx->opcode));
1129 else
1130 #endif
1131 gen_op_subfic(SIMM(ctx->opcode));
1132 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1133 }
1134
1135 #if defined(TARGET_PPC64)
1136 /* mulhd mulhd. */
1137 GEN_INT_ARITHN (mulhd, 0x1F, 0x09, 0x02, PPC_64B);
1138 /* mulhdu mulhdu. */
1139 GEN_INT_ARITHN (mulhdu, 0x1F, 0x09, 0x00, PPC_64B);
1140 /* mulld mulld. mulldo mulldo. */
1141 GEN_INT_ARITH2 (mulld, 0x1F, 0x09, 0x07, PPC_64B);
1142 /* divd divd. divdo divdo. */
1143 GEN_INT_ARITH2 (divd, 0x1F, 0x09, 0x0F, PPC_64B);
1144 /* divdu divdu. divduo divduo. */
1145 GEN_INT_ARITH2 (divdu, 0x1F, 0x09, 0x0E, PPC_64B);
1146 #endif
1147
1148 /*** Integer comparison ***/
1149 #if defined(TARGET_PPC64)
1150 #define GEN_CMP(name, opc, type) \
1151 GEN_HANDLER(name, 0x1F, 0x00, opc, 0x00400000, type) \
1152 { \
1153 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
1154 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1155 if (ctx->sf_mode && (ctx->opcode & 0x00200000)) \
1156 gen_op_##name##_64(); \
1157 else \
1158 gen_op_##name(); \
1159 gen_op_store_T0_crf(crfD(ctx->opcode)); \
1160 }
1161 #else
1162 #define GEN_CMP(name, opc, type) \
1163 GEN_HANDLER(name, 0x1F, 0x00, opc, 0x00400000, type) \
1164 { \
1165 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]); \
1166 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1167 gen_op_##name(); \
1168 gen_op_store_T0_crf(crfD(ctx->opcode)); \
1169 }
1170 #endif
1171
1172 /* cmp */
1173 GEN_CMP(cmp, 0x00, PPC_INTEGER);
1174 /* cmpi */
1175 GEN_HANDLER(cmpi, 0x0B, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
1176 {
1177 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1178 #if defined(TARGET_PPC64)
1179 if (ctx->sf_mode && (ctx->opcode & 0x00200000))
1180 gen_op_cmpi_64(SIMM(ctx->opcode));
1181 else
1182 #endif
1183 gen_op_cmpi(SIMM(ctx->opcode));
1184 gen_op_store_T0_crf(crfD(ctx->opcode));
1185 }
1186 /* cmpl */
1187 GEN_CMP(cmpl, 0x01, PPC_INTEGER);
1188 /* cmpli */
1189 GEN_HANDLER(cmpli, 0x0A, 0xFF, 0xFF, 0x00400000, PPC_INTEGER)
1190 {
1191 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1192 #if defined(TARGET_PPC64)
1193 if (ctx->sf_mode && (ctx->opcode & 0x00200000))
1194 gen_op_cmpli_64(UIMM(ctx->opcode));
1195 else
1196 #endif
1197 gen_op_cmpli(UIMM(ctx->opcode));
1198 gen_op_store_T0_crf(crfD(ctx->opcode));
1199 }
1200
1201 /* isel (PowerPC 2.03 specification) */
1202 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL)
1203 {
1204 uint32_t bi = rC(ctx->opcode);
1205 uint32_t mask;
1206
1207 if (rA(ctx->opcode) == 0) {
1208 tcg_gen_movi_tl(cpu_T[0], 0);
1209 } else {
1210 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1211 }
1212 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
1213 mask = 1 << (3 - (bi & 0x03));
1214 gen_op_load_crf_T0(bi >> 2);
1215 gen_op_test_true(mask);
1216 gen_op_isel();
1217 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
1218 }
1219
1220 /*** Integer logical ***/
1221 #define __GEN_LOGICAL2(name, opc2, opc3, type) \
1222 GEN_HANDLER(name, 0x1F, opc2, opc3, 0x00000000, type) \
1223 { \
1224 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]); \
1225 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]); \
1226 gen_op_##name(); \
1227 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
1228 if (unlikely(Rc(ctx->opcode) != 0)) \
1229 gen_set_Rc0(ctx); \
1230 }
1231 #define GEN_LOGICAL2(name, opc, type) \
1232 __GEN_LOGICAL2(name, 0x1C, opc, type)
1233
1234 #define GEN_LOGICAL1(name, opc, type) \
1235 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) \
1236 { \
1237 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]); \
1238 gen_op_##name(); \
1239 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
1240 if (unlikely(Rc(ctx->opcode) != 0)) \
1241 gen_set_Rc0(ctx); \
1242 }
1243
1244 /* and & and. */
1245 GEN_LOGICAL2(and, 0x00, PPC_INTEGER);
1246 /* andc & andc. */
1247 GEN_LOGICAL2(andc, 0x01, PPC_INTEGER);
1248 /* andi. */
1249 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1250 {
1251 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1252 gen_op_andi_T0(UIMM(ctx->opcode));
1253 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1254 gen_set_Rc0(ctx);
1255 }
1256 /* andis. */
1257 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1258 {
1259 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1260 gen_op_andi_T0(UIMM(ctx->opcode) << 16);
1261 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1262 gen_set_Rc0(ctx);
1263 }
1264
1265 /* cntlzw */
1266 GEN_LOGICAL1(cntlzw, 0x00, PPC_INTEGER);
1267 /* eqv & eqv. */
1268 GEN_LOGICAL2(eqv, 0x08, PPC_INTEGER);
1269 /* extsb & extsb. */
1270 GEN_LOGICAL1(extsb, 0x1D, PPC_INTEGER);
1271 /* extsh & extsh. */
1272 GEN_LOGICAL1(extsh, 0x1C, PPC_INTEGER);
1273 /* nand & nand. */
1274 GEN_LOGICAL2(nand, 0x0E, PPC_INTEGER);
1275 /* nor & nor. */
1276 GEN_LOGICAL2(nor, 0x03, PPC_INTEGER);
1277
1278 /* or & or. */
1279 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER)
1280 {
1281 int rs, ra, rb;
1282
1283 rs = rS(ctx->opcode);
1284 ra = rA(ctx->opcode);
1285 rb = rB(ctx->opcode);
1286 /* Optimisation for mr. ri case */
1287 if (rs != ra || rs != rb) {
1288 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rs]);
1289 if (rs != rb) {
1290 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rb]);
1291 gen_op_or();
1292 }
1293 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
1294 if (unlikely(Rc(ctx->opcode) != 0))
1295 gen_set_Rc0(ctx);
1296 } else if (unlikely(Rc(ctx->opcode) != 0)) {
1297 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rs]);
1298 gen_set_Rc0(ctx);
1299 #if defined(TARGET_PPC64)
1300 } else {
1301 switch (rs) {
1302 case 1:
1303 /* Set process priority to low */
1304 gen_op_store_pri(2);
1305 break;
1306 case 6:
1307 /* Set process priority to medium-low */
1308 gen_op_store_pri(3);
1309 break;
1310 case 2:
1311 /* Set process priority to normal */
1312 gen_op_store_pri(4);
1313 break;
1314 #if !defined(CONFIG_USER_ONLY)
1315 case 31:
1316 if (ctx->supervisor > 0) {
1317 /* Set process priority to very low */
1318 gen_op_store_pri(1);
1319 }
1320 break;
1321 case 5:
1322 if (ctx->supervisor > 0) {
1323 /* Set process priority to medium-hight */
1324 gen_op_store_pri(5);
1325 }
1326 break;
1327 case 3:
1328 if (ctx->supervisor > 0) {
1329 /* Set process priority to high */
1330 gen_op_store_pri(6);
1331 }
1332 break;
1333 case 7:
1334 if (ctx->supervisor > 1) {
1335 /* Set process priority to very high */
1336 gen_op_store_pri(7);
1337 }
1338 break;
1339 #endif
1340 default:
1341 /* nop */
1342 break;
1343 }
1344 #endif
1345 }
1346 }
1347
1348 /* orc & orc. */
1349 GEN_LOGICAL2(orc, 0x0C, PPC_INTEGER);
1350 /* xor & xor. */
1351 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER)
1352 {
1353 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1354 /* Optimisation for "set to zero" case */
1355 if (rS(ctx->opcode) != rB(ctx->opcode)) {
1356 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1357 gen_op_xor();
1358 } else {
1359 tcg_gen_movi_tl(cpu_T[0], 0);
1360 }
1361 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1362 if (unlikely(Rc(ctx->opcode) != 0))
1363 gen_set_Rc0(ctx);
1364 }
1365 /* ori */
1366 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1367 {
1368 target_ulong uimm = UIMM(ctx->opcode);
1369
1370 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1371 /* NOP */
1372 /* XXX: should handle special NOPs for POWER series */
1373 return;
1374 }
1375 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1376 if (likely(uimm != 0))
1377 gen_op_ori(uimm);
1378 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1379 }
1380 /* oris */
1381 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1382 {
1383 target_ulong uimm = UIMM(ctx->opcode);
1384
1385 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1386 /* NOP */
1387 return;
1388 }
1389 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1390 if (likely(uimm != 0))
1391 gen_op_ori(uimm << 16);
1392 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1393 }
1394 /* xori */
1395 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1396 {
1397 target_ulong uimm = UIMM(ctx->opcode);
1398
1399 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1400 /* NOP */
1401 return;
1402 }
1403 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1404 if (likely(uimm != 0))
1405 gen_op_xori(uimm);
1406 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1407 }
1408
1409 /* xoris */
1410 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1411 {
1412 target_ulong uimm = UIMM(ctx->opcode);
1413
1414 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
1415 /* NOP */
1416 return;
1417 }
1418 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1419 if (likely(uimm != 0))
1420 gen_op_xori(uimm << 16);
1421 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1422 }
1423
1424 /* popcntb : PowerPC 2.03 specification */
1425 GEN_HANDLER(popcntb, 0x1F, 0x03, 0x03, 0x0000F801, PPC_POPCNTB)
1426 {
1427 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1428 #if defined(TARGET_PPC64)
1429 if (ctx->sf_mode)
1430 gen_op_popcntb_64();
1431 else
1432 #endif
1433 gen_op_popcntb();
1434 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1435 }
1436
1437 #if defined(TARGET_PPC64)
1438 /* extsw & extsw. */
1439 GEN_LOGICAL1(extsw, 0x1E, PPC_64B);
1440 /* cntlzd */
1441 GEN_LOGICAL1(cntlzd, 0x01, PPC_64B);
1442 #endif
1443
1444 /*** Integer rotate ***/
1445 /* rlwimi & rlwimi. */
1446 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1447 {
1448 target_ulong mask;
1449 uint32_t mb, me, sh;
1450
1451 mb = MB(ctx->opcode);
1452 me = ME(ctx->opcode);
1453 sh = SH(ctx->opcode);
1454 if (likely(sh == 0)) {
1455 if (likely(mb == 0 && me == 31)) {
1456 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1457 goto do_store;
1458 } else if (likely(mb == 31 && me == 0)) {
1459 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
1460 goto do_store;
1461 }
1462 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1463 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1464 goto do_mask;
1465 }
1466 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1467 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1468 gen_op_rotli32_T0(SH(ctx->opcode));
1469 do_mask:
1470 #if defined(TARGET_PPC64)
1471 mb += 32;
1472 me += 32;
1473 #endif
1474 mask = MASK(mb, me);
1475 gen_op_andi_T0(mask);
1476 gen_op_andi_T1(~mask);
1477 gen_op_or();
1478 do_store:
1479 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1480 if (unlikely(Rc(ctx->opcode) != 0))
1481 gen_set_Rc0(ctx);
1482 }
1483 /* rlwinm & rlwinm. */
1484 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1485 {
1486 uint32_t mb, me, sh;
1487
1488 sh = SH(ctx->opcode);
1489 mb = MB(ctx->opcode);
1490 me = ME(ctx->opcode);
1491 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1492 if (likely(sh == 0)) {
1493 goto do_mask;
1494 }
1495 if (likely(mb == 0)) {
1496 if (likely(me == 31)) {
1497 gen_op_rotli32_T0(sh);
1498 goto do_store;
1499 } else if (likely(me == (31 - sh))) {
1500 gen_op_sli_T0(sh);
1501 goto do_store;
1502 }
1503 } else if (likely(me == 31)) {
1504 if (likely(sh == (32 - mb))) {
1505 gen_op_srli_T0(mb);
1506 goto do_store;
1507 }
1508 }
1509 gen_op_rotli32_T0(sh);
1510 do_mask:
1511 #if defined(TARGET_PPC64)
1512 mb += 32;
1513 me += 32;
1514 #endif
1515 gen_op_andi_T0(MASK(mb, me));
1516 do_store:
1517 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1518 if (unlikely(Rc(ctx->opcode) != 0))
1519 gen_set_Rc0(ctx);
1520 }
1521 /* rlwnm & rlwnm. */
1522 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
1523 {
1524 uint32_t mb, me;
1525
1526 mb = MB(ctx->opcode);
1527 me = ME(ctx->opcode);
1528 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1529 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1530 gen_op_rotl32_T0_T1();
1531 if (unlikely(mb != 0 || me != 31)) {
1532 #if defined(TARGET_PPC64)
1533 mb += 32;
1534 me += 32;
1535 #endif
1536 gen_op_andi_T0(MASK(mb, me));
1537 }
1538 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1539 if (unlikely(Rc(ctx->opcode) != 0))
1540 gen_set_Rc0(ctx);
1541 }
1542
1543 #if defined(TARGET_PPC64)
1544 #define GEN_PPC64_R2(name, opc1, opc2) \
1545 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1546 { \
1547 gen_##name(ctx, 0); \
1548 } \
1549 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1550 PPC_64B) \
1551 { \
1552 gen_##name(ctx, 1); \
1553 }
1554 #define GEN_PPC64_R4(name, opc1, opc2) \
1555 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B) \
1556 { \
1557 gen_##name(ctx, 0, 0); \
1558 } \
1559 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \
1560 PPC_64B) \
1561 { \
1562 gen_##name(ctx, 0, 1); \
1563 } \
1564 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \
1565 PPC_64B) \
1566 { \
1567 gen_##name(ctx, 1, 0); \
1568 } \
1569 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \
1570 PPC_64B) \
1571 { \
1572 gen_##name(ctx, 1, 1); \
1573 }
1574
1575 static always_inline void gen_andi_T0_64 (DisasContext *ctx, uint64_t mask)
1576 {
1577 if (mask >> 32)
1578 gen_op_andi_T0_64(mask >> 32, mask & 0xFFFFFFFF);
1579 else
1580 gen_op_andi_T0(mask);
1581 }
1582
1583 static always_inline void gen_andi_T1_64 (DisasContext *ctx, uint64_t mask)
1584 {
1585 if (mask >> 32)
1586 gen_op_andi_T1_64(mask >> 32, mask & 0xFFFFFFFF);
1587 else
1588 gen_op_andi_T1(mask);
1589 }
1590
1591 static always_inline void gen_rldinm (DisasContext *ctx, uint32_t mb,
1592 uint32_t me, uint32_t sh)
1593 {
1594 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1595 if (likely(sh == 0)) {
1596 goto do_mask;
1597 }
1598 if (likely(mb == 0)) {
1599 if (likely(me == 63)) {
1600 gen_op_rotli64_T0(sh);
1601 goto do_store;
1602 } else if (likely(me == (63 - sh))) {
1603 gen_op_sli_T0(sh);
1604 goto do_store;
1605 }
1606 } else if (likely(me == 63)) {
1607 if (likely(sh == (64 - mb))) {
1608 gen_op_srli_T0_64(mb);
1609 goto do_store;
1610 }
1611 }
1612 gen_op_rotli64_T0(sh);
1613 do_mask:
1614 gen_andi_T0_64(ctx, MASK(mb, me));
1615 do_store:
1616 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1617 if (unlikely(Rc(ctx->opcode) != 0))
1618 gen_set_Rc0(ctx);
1619 }
1620 /* rldicl - rldicl. */
1621 static always_inline void gen_rldicl (DisasContext *ctx, int mbn, int shn)
1622 {
1623 uint32_t sh, mb;
1624
1625 sh = SH(ctx->opcode) | (shn << 5);
1626 mb = MB(ctx->opcode) | (mbn << 5);
1627 gen_rldinm(ctx, mb, 63, sh);
1628 }
1629 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
1630 /* rldicr - rldicr. */
1631 static always_inline void gen_rldicr (DisasContext *ctx, int men, int shn)
1632 {
1633 uint32_t sh, me;
1634
1635 sh = SH(ctx->opcode) | (shn << 5);
1636 me = MB(ctx->opcode) | (men << 5);
1637 gen_rldinm(ctx, 0, me, sh);
1638 }
1639 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
1640 /* rldic - rldic. */
1641 static always_inline void gen_rldic (DisasContext *ctx, int mbn, int shn)
1642 {
1643 uint32_t sh, mb;
1644
1645 sh = SH(ctx->opcode) | (shn << 5);
1646 mb = MB(ctx->opcode) | (mbn << 5);
1647 gen_rldinm(ctx, mb, 63 - sh, sh);
1648 }
1649 GEN_PPC64_R4(rldic, 0x1E, 0x04);
1650
1651 static always_inline void gen_rldnm (DisasContext *ctx, uint32_t mb,
1652 uint32_t me)
1653 {
1654 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1655 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
1656 gen_op_rotl64_T0_T1();
1657 if (unlikely(mb != 0 || me != 63)) {
1658 gen_andi_T0_64(ctx, MASK(mb, me));
1659 }
1660 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1661 if (unlikely(Rc(ctx->opcode) != 0))
1662 gen_set_Rc0(ctx);
1663 }
1664
1665 /* rldcl - rldcl. */
1666 static always_inline void gen_rldcl (DisasContext *ctx, int mbn)
1667 {
1668 uint32_t mb;
1669
1670 mb = MB(ctx->opcode) | (mbn << 5);
1671 gen_rldnm(ctx, mb, 63);
1672 }
1673 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
1674 /* rldcr - rldcr. */
1675 static always_inline void gen_rldcr (DisasContext *ctx, int men)
1676 {
1677 uint32_t me;
1678
1679 me = MB(ctx->opcode) | (men << 5);
1680 gen_rldnm(ctx, 0, me);
1681 }
1682 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
1683 /* rldimi - rldimi. */
1684 static always_inline void gen_rldimi (DisasContext *ctx, int mbn, int shn)
1685 {
1686 uint64_t mask;
1687 uint32_t sh, mb, me;
1688
1689 sh = SH(ctx->opcode) | (shn << 5);
1690 mb = MB(ctx->opcode) | (mbn << 5);
1691 me = 63 - sh;
1692 if (likely(sh == 0)) {
1693 if (likely(mb == 0)) {
1694 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1695 goto do_store;
1696 }
1697 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1698 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1699 goto do_mask;
1700 }
1701 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1702 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
1703 gen_op_rotli64_T0(sh);
1704 do_mask:
1705 mask = MASK(mb, me);
1706 gen_andi_T0_64(ctx, mask);
1707 gen_andi_T1_64(ctx, ~mask);
1708 gen_op_or();
1709 do_store:
1710 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1711 if (unlikely(Rc(ctx->opcode) != 0))
1712 gen_set_Rc0(ctx);
1713 }
1714 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
1715 #endif
1716
1717 /*** Integer shift ***/
1718 /* slw & slw. */
1719 __GEN_LOGICAL2(slw, 0x18, 0x00, PPC_INTEGER);
1720 /* sraw & sraw. */
1721 __GEN_LOGICAL2(sraw, 0x18, 0x18, PPC_INTEGER);
1722 /* srawi & srawi. */
1723 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER)
1724 {
1725 int mb, me;
1726 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1727 if (SH(ctx->opcode) != 0) {
1728 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1729 mb = 32 - SH(ctx->opcode);
1730 me = 31;
1731 #if defined(TARGET_PPC64)
1732 mb += 32;
1733 me += 32;
1734 #endif
1735 gen_op_srawi(SH(ctx->opcode), MASK(mb, me));
1736 }
1737 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1738 if (unlikely(Rc(ctx->opcode) != 0))
1739 gen_set_Rc0(ctx);
1740 }
1741 /* srw & srw. */
1742 __GEN_LOGICAL2(srw, 0x18, 0x10, PPC_INTEGER);
1743
1744 #if defined(TARGET_PPC64)
1745 /* sld & sld. */
1746 __GEN_LOGICAL2(sld, 0x1B, 0x00, PPC_64B);
1747 /* srad & srad. */
1748 __GEN_LOGICAL2(srad, 0x1A, 0x18, PPC_64B);
1749 /* sradi & sradi. */
1750 static always_inline void gen_sradi (DisasContext *ctx, int n)
1751 {
1752 uint64_t mask;
1753 int sh, mb, me;
1754
1755 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
1756 sh = SH(ctx->opcode) + (n << 5);
1757 if (sh != 0) {
1758 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
1759 mb = 64 - SH(ctx->opcode);
1760 me = 63;
1761 mask = MASK(mb, me);
1762 gen_op_sradi(sh, mask >> 32, mask);
1763 }
1764 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
1765 if (unlikely(Rc(ctx->opcode) != 0))
1766 gen_set_Rc0(ctx);
1767 }
1768 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B)
1769 {
1770 gen_sradi(ctx, 0);
1771 }
1772 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B)
1773 {
1774 gen_sradi(ctx, 1);
1775 }
1776 /* srd & srd. */
1777 __GEN_LOGICAL2(srd, 0x1B, 0x10, PPC_64B);
1778 #endif
1779
1780 /*** Floating-Point arithmetic ***/
1781 #define _GEN_FLOAT_ACB(name, op, op1, op2, isfloat, set_fprf, type) \
1782 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x00000000, type) \
1783 { \
1784 if (unlikely(!ctx->fpu_enabled)) { \
1785 GEN_EXCP_NO_FP(ctx); \
1786 return; \
1787 } \
1788 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1789 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rC(ctx->opcode)]); \
1790 tcg_gen_mov_i64(cpu_FT[2], cpu_fpr[rB(ctx->opcode)]); \
1791 gen_reset_fpstatus(); \
1792 gen_op_f##op(); \
1793 if (isfloat) { \
1794 gen_op_frsp(); \
1795 } \
1796 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1797 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1798 }
1799
1800 #define GEN_FLOAT_ACB(name, op2, set_fprf, type) \
1801 _GEN_FLOAT_ACB(name, name, 0x3F, op2, 0, set_fprf, type); \
1802 _GEN_FLOAT_ACB(name##s, name, 0x3B, op2, 1, set_fprf, type);
1803
1804 #define _GEN_FLOAT_AB(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1805 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
1806 { \
1807 if (unlikely(!ctx->fpu_enabled)) { \
1808 GEN_EXCP_NO_FP(ctx); \
1809 return; \
1810 } \
1811 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1812 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]); \
1813 gen_reset_fpstatus(); \
1814 gen_op_f##op(); \
1815 if (isfloat) { \
1816 gen_op_frsp(); \
1817 } \
1818 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1819 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1820 }
1821 #define GEN_FLOAT_AB(name, op2, inval, set_fprf, type) \
1822 _GEN_FLOAT_AB(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
1823 _GEN_FLOAT_AB(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
1824
1825 #define _GEN_FLOAT_AC(name, op, op1, op2, inval, isfloat, set_fprf, type) \
1826 GEN_HANDLER(f##name, op1, op2, 0xFF, inval, type) \
1827 { \
1828 if (unlikely(!ctx->fpu_enabled)) { \
1829 GEN_EXCP_NO_FP(ctx); \
1830 return; \
1831 } \
1832 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]); \
1833 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rC(ctx->opcode)]); \
1834 gen_reset_fpstatus(); \
1835 gen_op_f##op(); \
1836 if (isfloat) { \
1837 gen_op_frsp(); \
1838 } \
1839 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1840 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1841 }
1842 #define GEN_FLOAT_AC(name, op2, inval, set_fprf, type) \
1843 _GEN_FLOAT_AC(name, name, 0x3F, op2, inval, 0, set_fprf, type); \
1844 _GEN_FLOAT_AC(name##s, name, 0x3B, op2, inval, 1, set_fprf, type);
1845
1846 #define GEN_FLOAT_B(name, op2, op3, set_fprf, type) \
1847 GEN_HANDLER(f##name, 0x3F, op2, op3, 0x001F0000, type) \
1848 { \
1849 if (unlikely(!ctx->fpu_enabled)) { \
1850 GEN_EXCP_NO_FP(ctx); \
1851 return; \
1852 } \
1853 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]); \
1854 gen_reset_fpstatus(); \
1855 gen_op_f##name(); \
1856 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1857 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1858 }
1859
1860 #define GEN_FLOAT_BS(name, op1, op2, set_fprf, type) \
1861 GEN_HANDLER(f##name, op1, op2, 0xFF, 0x001F07C0, type) \
1862 { \
1863 if (unlikely(!ctx->fpu_enabled)) { \
1864 GEN_EXCP_NO_FP(ctx); \
1865 return; \
1866 } \
1867 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]); \
1868 gen_reset_fpstatus(); \
1869 gen_op_f##name(); \
1870 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
1871 gen_compute_fprf(set_fprf, Rc(ctx->opcode) != 0); \
1872 }
1873
1874 /* fadd - fadds */
1875 GEN_FLOAT_AB(add, 0x15, 0x000007C0, 1, PPC_FLOAT);
1876 /* fdiv - fdivs */
1877 GEN_FLOAT_AB(div, 0x12, 0x000007C0, 1, PPC_FLOAT);
1878 /* fmul - fmuls */
1879 GEN_FLOAT_AC(mul, 0x19, 0x0000F800, 1, PPC_FLOAT);
1880
1881 /* fre */
1882 GEN_FLOAT_BS(re, 0x3F, 0x18, 1, PPC_FLOAT_EXT);
1883
1884 /* fres */
1885 GEN_FLOAT_BS(res, 0x3B, 0x18, 1, PPC_FLOAT_FRES);
1886
1887 /* frsqrte */
1888 GEN_FLOAT_BS(rsqrte, 0x3F, 0x1A, 1, PPC_FLOAT_FRSQRTE);
1889
1890 /* frsqrtes */
1891 static always_inline void gen_op_frsqrtes (void)
1892 {
1893 gen_op_frsqrte();
1894 gen_op_frsp();
1895 }
1896 GEN_FLOAT_BS(rsqrtes, 0x3B, 0x1A, 1, PPC_FLOAT_FRSQRTES);
1897
1898 /* fsel */
1899 _GEN_FLOAT_ACB(sel, sel, 0x3F, 0x17, 0, 0, PPC_FLOAT_FSEL);
1900 /* fsub - fsubs */
1901 GEN_FLOAT_AB(sub, 0x14, 0x000007C0, 1, PPC_FLOAT);
1902 /* Optional: */
1903 /* fsqrt */
1904 GEN_HANDLER(fsqrt, 0x3F, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
1905 {
1906 if (unlikely(!ctx->fpu_enabled)) {
1907 GEN_EXCP_NO_FP(ctx);
1908 return;
1909 }
1910 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1911 gen_reset_fpstatus();
1912 gen_op_fsqrt();
1913 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1914 gen_compute_fprf(1, Rc(ctx->opcode) != 0);
1915 }
1916
1917 GEN_HANDLER(fsqrts, 0x3B, 0x16, 0xFF, 0x001F07C0, PPC_FLOAT_FSQRT)
1918 {
1919 if (unlikely(!ctx->fpu_enabled)) {
1920 GEN_EXCP_NO_FP(ctx);
1921 return;
1922 }
1923 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
1924 gen_reset_fpstatus();
1925 gen_op_fsqrt();
1926 gen_op_frsp();
1927 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
1928 gen_compute_fprf(1, Rc(ctx->opcode) != 0);
1929 }
1930
1931 /*** Floating-Point multiply-and-add ***/
1932 /* fmadd - fmadds */
1933 GEN_FLOAT_ACB(madd, 0x1D, 1, PPC_FLOAT);
1934 /* fmsub - fmsubs */
1935 GEN_FLOAT_ACB(msub, 0x1C, 1, PPC_FLOAT);
1936 /* fnmadd - fnmadds */
1937 GEN_FLOAT_ACB(nmadd, 0x1F, 1, PPC_FLOAT);
1938 /* fnmsub - fnmsubs */
1939 GEN_FLOAT_ACB(nmsub, 0x1E, 1, PPC_FLOAT);
1940
1941 /*** Floating-Point round & convert ***/
1942 /* fctiw */
1943 GEN_FLOAT_B(ctiw, 0x0E, 0x00, 0, PPC_FLOAT);
1944 /* fctiwz */
1945 GEN_FLOAT_B(ctiwz, 0x0F, 0x00, 0, PPC_FLOAT);
1946 /* frsp */
1947 GEN_FLOAT_B(rsp, 0x0C, 0x00, 1, PPC_FLOAT);
1948 #if defined(TARGET_PPC64)
1949 /* fcfid */
1950 GEN_FLOAT_B(cfid, 0x0E, 0x1A, 1, PPC_64B);
1951 /* fctid */
1952 GEN_FLOAT_B(ctid, 0x0E, 0x19, 0, PPC_64B);
1953 /* fctidz */
1954 GEN_FLOAT_B(ctidz, 0x0F, 0x19, 0, PPC_64B);
1955 #endif
1956
1957 /* frin */
1958 GEN_FLOAT_B(rin, 0x08, 0x0C, 1, PPC_FLOAT_EXT);
1959 /* friz */
1960 GEN_FLOAT_B(riz, 0x08, 0x0D, 1, PPC_FLOAT_EXT);
1961 /* frip */
1962 GEN_FLOAT_B(rip, 0x08, 0x0E, 1, PPC_FLOAT_EXT);
1963 /* frim */
1964 GEN_FLOAT_B(rim, 0x08, 0x0F, 1, PPC_FLOAT_EXT);
1965
1966 /*** Floating-Point compare ***/
1967 /* fcmpo */
1968 GEN_HANDLER(fcmpo, 0x3F, 0x00, 0x01, 0x00600001, PPC_FLOAT)
1969 {
1970 if (unlikely(!ctx->fpu_enabled)) {
1971 GEN_EXCP_NO_FP(ctx);
1972 return;
1973 }
1974 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
1975 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
1976 gen_reset_fpstatus();
1977 gen_op_fcmpo();
1978 gen_op_store_T0_crf(crfD(ctx->opcode));
1979 gen_op_float_check_status();
1980 }
1981
1982 /* fcmpu */
1983 GEN_HANDLER(fcmpu, 0x3F, 0x00, 0x00, 0x00600001, PPC_FLOAT)
1984 {
1985 if (unlikely(!ctx->fpu_enabled)) {
1986 GEN_EXCP_NO_FP(ctx);
1987 return;
1988 }
1989 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rA(ctx->opcode)]);
1990 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rB(ctx->opcode)]);
1991 gen_reset_fpstatus();
1992 gen_op_fcmpu();
1993 gen_op_store_T0_crf(crfD(ctx->opcode));
1994 gen_op_float_check_status();
1995 }
1996
1997 /*** Floating-point move ***/
1998 /* fabs */
1999 /* XXX: beware that fabs never checks for NaNs nor update FPSCR */
2000 GEN_FLOAT_B(abs, 0x08, 0x08, 0, PPC_FLOAT);
2001
2002 /* fmr - fmr. */
2003 /* XXX: beware that fmr never checks for NaNs nor update FPSCR */
2004 GEN_HANDLER(fmr, 0x3F, 0x08, 0x02, 0x001F0000, PPC_FLOAT)
2005 {
2006 if (unlikely(!ctx->fpu_enabled)) {
2007 GEN_EXCP_NO_FP(ctx);
2008 return;
2009 }
2010 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
2011 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
2012 gen_compute_fprf(0, Rc(ctx->opcode) != 0);
2013 }
2014
2015 /* fnabs */
2016 /* XXX: beware that fnabs never checks for NaNs nor update FPSCR */
2017 GEN_FLOAT_B(nabs, 0x08, 0x04, 0, PPC_FLOAT);
2018 /* fneg */
2019 /* XXX: beware that fneg never checks for NaNs nor update FPSCR */
2020 GEN_FLOAT_B(neg, 0x08, 0x01, 0, PPC_FLOAT);
2021
2022 /*** Floating-Point status & ctrl register ***/
2023 /* mcrfs */
2024 GEN_HANDLER(mcrfs, 0x3F, 0x00, 0x02, 0x0063F801, PPC_FLOAT)
2025 {
2026 int bfa;
2027
2028 if (unlikely(!ctx->fpu_enabled)) {
2029 GEN_EXCP_NO_FP(ctx);
2030 return;
2031 }
2032 gen_optimize_fprf();
2033 bfa = 4 * (7 - crfS(ctx->opcode));
2034 gen_op_load_fpscr_T0(bfa);
2035 gen_op_store_T0_crf(crfD(ctx->opcode));
2036 gen_op_fpscr_resetbit(~(0xF << bfa));
2037 }
2038
2039 /* mffs */
2040 GEN_HANDLER(mffs, 0x3F, 0x07, 0x12, 0x001FF800, PPC_FLOAT)
2041 {
2042 if (unlikely(!ctx->fpu_enabled)) {
2043 GEN_EXCP_NO_FP(ctx);
2044 return;
2045 }
2046 gen_optimize_fprf();
2047 gen_reset_fpstatus();
2048 gen_op_load_fpscr_FT0();
2049 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
2050 gen_compute_fprf(0, Rc(ctx->opcode) != 0);
2051 }
2052
2053 /* mtfsb0 */
2054 GEN_HANDLER(mtfsb0, 0x3F, 0x06, 0x02, 0x001FF800, PPC_FLOAT)
2055 {
2056 uint8_t crb;
2057
2058 if (unlikely(!ctx->fpu_enabled)) {
2059 GEN_EXCP_NO_FP(ctx);
2060 return;
2061 }
2062 crb = 32 - (crbD(ctx->opcode) >> 2);
2063 gen_optimize_fprf();
2064 gen_reset_fpstatus();
2065 if (likely(crb != 30 && crb != 29))
2066 gen_op_fpscr_resetbit(~(1 << crb));
2067 if (unlikely(Rc(ctx->opcode) != 0)) {
2068 gen_op_load_fpcc();
2069 gen_op_set_Rc0();
2070 }
2071 }
2072
2073 /* mtfsb1 */
2074 GEN_HANDLER(mtfsb1, 0x3F, 0x06, 0x01, 0x001FF800, PPC_FLOAT)
2075 {
2076 uint8_t crb;
2077
2078 if (unlikely(!ctx->fpu_enabled)) {
2079 GEN_EXCP_NO_FP(ctx);
2080 return;
2081 }
2082 crb = 32 - (crbD(ctx->opcode) >> 2);
2083 gen_optimize_fprf();
2084 gen_reset_fpstatus();
2085 /* XXX: we pretend we can only do IEEE floating-point computations */
2086 if (likely(crb != FPSCR_FEX && crb != FPSCR_VX && crb != FPSCR_NI))
2087 gen_op_fpscr_setbit(crb);
2088 if (unlikely(Rc(ctx->opcode) != 0)) {
2089 gen_op_load_fpcc();
2090 gen_op_set_Rc0();
2091 }
2092 /* We can raise a differed exception */
2093 gen_op_float_check_status();
2094 }
2095
2096 /* mtfsf */
2097 GEN_HANDLER(mtfsf, 0x3F, 0x07, 0x16, 0x02010000, PPC_FLOAT)
2098 {
2099 if (unlikely(!ctx->fpu_enabled)) {
2100 GEN_EXCP_NO_FP(ctx);
2101 return;
2102 }
2103 gen_optimize_fprf();
2104 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rB(ctx->opcode)]);
2105 gen_reset_fpstatus();
2106 gen_op_store_fpscr(FM(ctx->opcode));
2107 if (unlikely(Rc(ctx->opcode) != 0)) {
2108 gen_op_load_fpcc();
2109 gen_op_set_Rc0();
2110 }
2111 /* We can raise a differed exception */
2112 gen_op_float_check_status();
2113 }
2114
2115 /* mtfsfi */
2116 GEN_HANDLER(mtfsfi, 0x3F, 0x06, 0x04, 0x006f0800, PPC_FLOAT)
2117 {
2118 int bf, sh;
2119
2120 if (unlikely(!ctx->fpu_enabled)) {
2121 GEN_EXCP_NO_FP(ctx);
2122 return;
2123 }
2124 bf = crbD(ctx->opcode) >> 2;
2125 sh = 7 - bf;
2126 gen_optimize_fprf();
2127 gen_op_set_FT0(FPIMM(ctx->opcode) << (4 * sh));
2128 gen_reset_fpstatus();
2129 gen_op_store_fpscr(1 << sh);
2130 if (unlikely(Rc(ctx->opcode) != 0)) {
2131 gen_op_load_fpcc();
2132 gen_op_set_Rc0();
2133 }
2134 /* We can raise a differed exception */
2135 gen_op_float_check_status();
2136 }
2137
2138 /*** Addressing modes ***/
2139 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2140 static always_inline void gen_addr_imm_index (DisasContext *ctx,
2141 target_long maskl)
2142 {
2143 target_long simm = SIMM(ctx->opcode);
2144
2145 simm &= ~maskl;
2146 if (rA(ctx->opcode) == 0) {
2147 tcg_gen_movi_tl(cpu_T[0], simm);
2148 } else {
2149 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
2150 if (likely(simm != 0))
2151 gen_op_addi(simm);
2152 }
2153 #ifdef DEBUG_MEMORY_ACCESSES
2154 gen_op_print_mem_EA();
2155 #endif
2156 }
2157
2158 static always_inline void gen_addr_reg_index (DisasContext *ctx)
2159 {
2160 if (rA(ctx->opcode) == 0) {
2161 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
2162 } else {
2163 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
2164 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
2165 gen_op_add();
2166 }
2167 #ifdef DEBUG_MEMORY_ACCESSES
2168 gen_op_print_mem_EA();
2169 #endif
2170 }
2171
2172 static always_inline void gen_addr_register (DisasContext *ctx)
2173 {
2174 if (rA(ctx->opcode) == 0) {
2175 tcg_gen_movi_tl(cpu_T[0], 0);
2176 } else {
2177 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
2178 }
2179 #ifdef DEBUG_MEMORY_ACCESSES
2180 gen_op_print_mem_EA();
2181 #endif
2182 }
2183
2184 #if defined(TARGET_PPC64)
2185 #define _GEN_MEM_FUNCS(name, mode) \
2186 &gen_op_##name##_##mode, \
2187 &gen_op_##name##_le_##mode, \
2188 &gen_op_##name##_64_##mode, \
2189 &gen_op_##name##_le_64_##mode
2190 #else
2191 #define _GEN_MEM_FUNCS(name, mode) \
2192 &gen_op_##name##_##mode, \
2193 &gen_op_##name##_le_##mode
2194 #endif
2195 #if defined(CONFIG_USER_ONLY)
2196 #if defined(TARGET_PPC64)
2197 #define NB_MEM_FUNCS 4
2198 #else
2199 #define NB_MEM_FUNCS 2
2200 #endif
2201 #define GEN_MEM_FUNCS(name) \
2202 _GEN_MEM_FUNCS(name, raw)
2203 #else
2204 #if defined(TARGET_PPC64)
2205 #define NB_MEM_FUNCS 12
2206 #else
2207 #define NB_MEM_FUNCS 6
2208 #endif
2209 #define GEN_MEM_FUNCS(name) \
2210 _GEN_MEM_FUNCS(name, user), \
2211 _GEN_MEM_FUNCS(name, kernel), \
2212 _GEN_MEM_FUNCS(name, hypv)
2213 #endif
2214
2215 /*** Integer load ***/
2216 #define op_ldst(name) (*gen_op_##name[ctx->mem_idx])()
2217 /* Byte access routine are endian safe */
2218 #define gen_op_lbz_le_raw gen_op_lbz_raw
2219 #define gen_op_lbz_le_user gen_op_lbz_user
2220 #define gen_op_lbz_le_kernel gen_op_lbz_kernel
2221 #define gen_op_lbz_le_hypv gen_op_lbz_hypv
2222 #define gen_op_lbz_le_64_raw gen_op_lbz_64_raw
2223 #define gen_op_lbz_le_64_user gen_op_lbz_64_user
2224 #define gen_op_lbz_le_64_kernel gen_op_lbz_64_kernel
2225 #define gen_op_lbz_le_64_hypv gen_op_lbz_64_hypv
2226 #define gen_op_stb_le_raw gen_op_stb_raw
2227 #define gen_op_stb_le_user gen_op_stb_user
2228 #define gen_op_stb_le_kernel gen_op_stb_kernel
2229 #define gen_op_stb_le_hypv gen_op_stb_hypv
2230 #define gen_op_stb_le_64_raw gen_op_stb_64_raw
2231 #define gen_op_stb_le_64_user gen_op_stb_64_user
2232 #define gen_op_stb_le_64_kernel gen_op_stb_64_kernel
2233 #define gen_op_stb_le_64_hypv gen_op_stb_64_hypv
2234 #define OP_LD_TABLE(width) \
2235 static GenOpFunc *gen_op_l##width[NB_MEM_FUNCS] = { \
2236 GEN_MEM_FUNCS(l##width), \
2237 };
2238 #define OP_ST_TABLE(width) \
2239 static GenOpFunc *gen_op_st##width[NB_MEM_FUNCS] = { \
2240 GEN_MEM_FUNCS(st##width), \
2241 };
2242
2243 #define GEN_LD(width, opc, type) \
2244 GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2245 { \
2246 gen_addr_imm_index(ctx, 0); \
2247 op_ldst(l##width); \
2248 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2249 }
2250
2251 #define GEN_LDU(width, opc, type) \
2252 GEN_HANDLER(l##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2253 { \
2254 if (unlikely(rA(ctx->opcode) == 0 || \
2255 rA(ctx->opcode) == rD(ctx->opcode))) { \
2256 GEN_EXCP_INVAL(ctx); \
2257 return; \
2258 } \
2259 if (type == PPC_64B) \
2260 gen_addr_imm_index(ctx, 0x03); \
2261 else \
2262 gen_addr_imm_index(ctx, 0); \
2263 op_ldst(l##width); \
2264 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2265 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2266 }
2267
2268 #define GEN_LDUX(width, opc2, opc3, type) \
2269 GEN_HANDLER(l##width##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2270 { \
2271 if (unlikely(rA(ctx->opcode) == 0 || \
2272 rA(ctx->opcode) == rD(ctx->opcode))) { \
2273 GEN_EXCP_INVAL(ctx); \
2274 return; \
2275 } \
2276 gen_addr_reg_index(ctx); \
2277 op_ldst(l##width); \
2278 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2279 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2280 }
2281
2282 #define GEN_LDX(width, opc2, opc3, type) \
2283 GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2284 { \
2285 gen_addr_reg_index(ctx); \
2286 op_ldst(l##width); \
2287 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]); \
2288 }
2289
2290 #define GEN_LDS(width, op, type) \
2291 OP_LD_TABLE(width); \
2292 GEN_LD(width, op | 0x20, type); \
2293 GEN_LDU(width, op | 0x21, type); \
2294 GEN_LDUX(width, 0x17, op | 0x01, type); \
2295 GEN_LDX(width, 0x17, op | 0x00, type)
2296
2297 /* lbz lbzu lbzux lbzx */
2298 GEN_LDS(bz, 0x02, PPC_INTEGER);
2299 /* lha lhau lhaux lhax */
2300 GEN_LDS(ha, 0x0A, PPC_INTEGER);
2301 /* lhz lhzu lhzux lhzx */
2302 GEN_LDS(hz, 0x08, PPC_INTEGER);
2303 /* lwz lwzu lwzux lwzx */
2304 GEN_LDS(wz, 0x00, PPC_INTEGER);
2305 #if defined(TARGET_PPC64)
2306 OP_LD_TABLE(wa);
2307 OP_LD_TABLE(d);
2308 /* lwaux */
2309 GEN_LDUX(wa, 0x15, 0x0B, PPC_64B);
2310 /* lwax */
2311 GEN_LDX(wa, 0x15, 0x0A, PPC_64B);
2312 /* ldux */
2313 GEN_LDUX(d, 0x15, 0x01, PPC_64B);
2314 /* ldx */
2315 GEN_LDX(d, 0x15, 0x00, PPC_64B);
2316 GEN_HANDLER(ld, 0x3A, 0xFF, 0xFF, 0x00000000, PPC_64B)
2317 {
2318 if (Rc(ctx->opcode)) {
2319 if (unlikely(rA(ctx->opcode) == 0 ||
2320 rA(ctx->opcode) == rD(ctx->opcode))) {
2321 GEN_EXCP_INVAL(ctx);
2322 return;
2323 }
2324 }
2325 gen_addr_imm_index(ctx, 0x03);
2326 if (ctx->opcode & 0x02) {
2327 /* lwa (lwau is undefined) */
2328 op_ldst(lwa);
2329 } else {
2330 /* ld - ldu */
2331 op_ldst(ld);
2332 }
2333 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2334 if (Rc(ctx->opcode))
2335 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
2336 }
2337 /* lq */
2338 GEN_HANDLER(lq, 0x38, 0xFF, 0xFF, 0x00000000, PPC_64BX)
2339 {
2340 #if defined(CONFIG_USER_ONLY)
2341 GEN_EXCP_PRIVOPC(ctx);
2342 #else
2343 int ra, rd;
2344
2345 /* Restore CPU state */
2346 if (unlikely(ctx->supervisor == 0)) {
2347 GEN_EXCP_PRIVOPC(ctx);
2348 return;
2349 }
2350 ra = rA(ctx->opcode);
2351 rd = rD(ctx->opcode);
2352 if (unlikely((rd & 1) || rd == ra)) {
2353 GEN_EXCP_INVAL(ctx);
2354 return;
2355 }
2356 if (unlikely(ctx->mem_idx & 1)) {
2357 /* Little-endian mode is not handled */
2358 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2359 return;
2360 }
2361 gen_addr_imm_index(ctx, 0x0F);
2362 op_ldst(ld);
2363 tcg_gen_mov_tl(cpu_gpr[rd], cpu_T[1]);
2364 gen_op_addi(8);
2365 op_ldst(ld);
2366 tcg_gen_mov_tl(cpu_gpr[rd + 1], cpu_T[1]);
2367 #endif
2368 }
2369 #endif
2370
2371 /*** Integer store ***/
2372 #define GEN_ST(width, opc, type) \
2373 GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2374 { \
2375 gen_addr_imm_index(ctx, 0); \
2376 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2377 op_ldst(st##width); \
2378 }
2379
2380 #define GEN_STU(width, opc, type) \
2381 GEN_HANDLER(st##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2382 { \
2383 if (unlikely(rA(ctx->opcode) == 0)) { \
2384 GEN_EXCP_INVAL(ctx); \
2385 return; \
2386 } \
2387 if (type == PPC_64B) \
2388 gen_addr_imm_index(ctx, 0x03); \
2389 else \
2390 gen_addr_imm_index(ctx, 0); \
2391 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2392 op_ldst(st##width); \
2393 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2394 }
2395
2396 #define GEN_STUX(width, opc2, opc3, type) \
2397 GEN_HANDLER(st##width##ux, 0x1F, opc2, opc3, 0x00000001, type) \
2398 { \
2399 if (unlikely(rA(ctx->opcode) == 0)) { \
2400 GEN_EXCP_INVAL(ctx); \
2401 return; \
2402 } \
2403 gen_addr_reg_index(ctx); \
2404 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2405 op_ldst(st##width); \
2406 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2407 }
2408
2409 #define GEN_STX(width, opc2, opc3, type) \
2410 GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2411 { \
2412 gen_addr_reg_index(ctx); \
2413 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]); \
2414 op_ldst(st##width); \
2415 }
2416
2417 #define GEN_STS(width, op, type) \
2418 OP_ST_TABLE(width); \
2419 GEN_ST(width, op | 0x20, type); \
2420 GEN_STU(width, op | 0x21, type); \
2421 GEN_STUX(width, 0x17, op | 0x01, type); \
2422 GEN_STX(width, 0x17, op | 0x00, type)
2423
2424 /* stb stbu stbux stbx */
2425 GEN_STS(b, 0x06, PPC_INTEGER);
2426 /* sth sthu sthux sthx */
2427 GEN_STS(h, 0x0C, PPC_INTEGER);
2428 /* stw stwu stwux stwx */
2429 GEN_STS(w, 0x04, PPC_INTEGER);
2430 #if defined(TARGET_PPC64)
2431 OP_ST_TABLE(d);
2432 GEN_STUX(d, 0x15, 0x05, PPC_64B);
2433 GEN_STX(d, 0x15, 0x04, PPC_64B);
2434 GEN_HANDLER(std, 0x3E, 0xFF, 0xFF, 0x00000000, PPC_64B)
2435 {
2436 int rs;
2437
2438 rs = rS(ctx->opcode);
2439 if ((ctx->opcode & 0x3) == 0x2) {
2440 #if defined(CONFIG_USER_ONLY)
2441 GEN_EXCP_PRIVOPC(ctx);
2442 #else
2443 /* stq */
2444 if (unlikely(ctx->supervisor == 0)) {
2445 GEN_EXCP_PRIVOPC(ctx);
2446 return;
2447 }
2448 if (unlikely(rs & 1)) {
2449 GEN_EXCP_INVAL(ctx);
2450 return;
2451 }
2452 if (unlikely(ctx->mem_idx & 1)) {
2453 /* Little-endian mode is not handled */
2454 GEN_EXCP(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_LE);
2455 return;
2456 }
2457 gen_addr_imm_index(ctx, 0x03);
2458 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rs]);
2459 op_ldst(std);
2460 gen_op_addi(8);
2461 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rs + 1]);
2462 op_ldst(std);
2463 #endif
2464 } else {
2465 /* std / stdu */
2466 if (Rc(ctx->opcode)) {
2467 if (unlikely(rA(ctx->opcode) == 0)) {
2468 GEN_EXCP_INVAL(ctx);
2469 return;
2470 }
2471 }
2472 gen_addr_imm_index(ctx, 0x03);
2473 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rs]);
2474 op_ldst(std);
2475 if (Rc(ctx->opcode))
2476 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
2477 }
2478 }
2479 #endif
2480 /*** Integer load and store with byte reverse ***/
2481 /* lhbrx */
2482 OP_LD_TABLE(hbr);
2483 GEN_LDX(hbr, 0x16, 0x18, PPC_INTEGER);
2484 /* lwbrx */
2485 OP_LD_TABLE(wbr);
2486 GEN_LDX(wbr, 0x16, 0x10, PPC_INTEGER);
2487 /* sthbrx */
2488 OP_ST_TABLE(hbr);
2489 GEN_STX(hbr, 0x16, 0x1C, PPC_INTEGER);
2490 /* stwbrx */
2491 OP_ST_TABLE(wbr);
2492 GEN_STX(wbr, 0x16, 0x14, PPC_INTEGER);
2493
2494 /*** Integer load and store multiple ***/
2495 #define op_ldstm(name, reg) (*gen_op_##name[ctx->mem_idx])(reg)
2496 static GenOpFunc1 *gen_op_lmw[NB_MEM_FUNCS] = {
2497 GEN_MEM_FUNCS(lmw),
2498 };
2499 static GenOpFunc1 *gen_op_stmw[NB_MEM_FUNCS] = {
2500 GEN_MEM_FUNCS(stmw),
2501 };
2502
2503 /* lmw */
2504 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
2505 {
2506 /* NIP cannot be restored if the memory exception comes from an helper */
2507 gen_update_nip(ctx, ctx->nip - 4);
2508 gen_addr_imm_index(ctx, 0);
2509 op_ldstm(lmw, rD(ctx->opcode));
2510 }
2511
2512 /* stmw */
2513 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER)
2514 {
2515 /* NIP cannot be restored if the memory exception comes from an helper */
2516 gen_update_nip(ctx, ctx->nip - 4);
2517 gen_addr_imm_index(ctx, 0);
2518 op_ldstm(stmw, rS(ctx->opcode));
2519 }
2520
2521 /*** Integer load and store strings ***/
2522 #define op_ldsts(name, start) (*gen_op_##name[ctx->mem_idx])(start)
2523 #define op_ldstsx(name, rd, ra, rb) (*gen_op_##name[ctx->mem_idx])(rd, ra, rb)
2524 /* string load & stores are by definition endian-safe */
2525 #define gen_op_lswi_le_raw gen_op_lswi_raw
2526 #define gen_op_lswi_le_user gen_op_lswi_user
2527 #define gen_op_lswi_le_kernel gen_op_lswi_kernel
2528 #define gen_op_lswi_le_hypv gen_op_lswi_hypv
2529 #define gen_op_lswi_le_64_raw gen_op_lswi_raw
2530 #define gen_op_lswi_le_64_user gen_op_lswi_user
2531 #define gen_op_lswi_le_64_kernel gen_op_lswi_kernel
2532 #define gen_op_lswi_le_64_hypv gen_op_lswi_hypv
2533 static GenOpFunc1 *gen_op_lswi[NB_MEM_FUNCS] = {
2534 GEN_MEM_FUNCS(lswi),
2535 };
2536 #define gen_op_lswx_le_raw gen_op_lswx_raw
2537 #define gen_op_lswx_le_user gen_op_lswx_user
2538 #define gen_op_lswx_le_kernel gen_op_lswx_kernel
2539 #define gen_op_lswx_le_hypv gen_op_lswx_hypv
2540 #define gen_op_lswx_le_64_raw gen_op_lswx_raw
2541 #define gen_op_lswx_le_64_user gen_op_lswx_user
2542 #define gen_op_lswx_le_64_kernel gen_op_lswx_kernel
2543 #define gen_op_lswx_le_64_hypv gen_op_lswx_hypv
2544 static GenOpFunc3 *gen_op_lswx[NB_MEM_FUNCS] = {
2545 GEN_MEM_FUNCS(lswx),
2546 };
2547 #define gen_op_stsw_le_raw gen_op_stsw_raw
2548 #define gen_op_stsw_le_user gen_op_stsw_user
2549 #define gen_op_stsw_le_kernel gen_op_stsw_kernel
2550 #define gen_op_stsw_le_hypv gen_op_stsw_hypv
2551 #define gen_op_stsw_le_64_raw gen_op_stsw_raw
2552 #define gen_op_stsw_le_64_user gen_op_stsw_user
2553 #define gen_op_stsw_le_64_kernel gen_op_stsw_kernel
2554 #define gen_op_stsw_le_64_hypv gen_op_stsw_hypv
2555 static GenOpFunc1 *gen_op_stsw[NB_MEM_FUNCS] = {
2556 GEN_MEM_FUNCS(stsw),
2557 };
2558
2559 /* lswi */
2560 /* PowerPC32 specification says we must generate an exception if
2561 * rA is in the range of registers to be loaded.
2562 * In an other hand, IBM says this is valid, but rA won't be loaded.
2563 * For now, I'll follow the spec...
2564 */
2565 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING)
2566 {
2567 int nb = NB(ctx->opcode);
2568 int start = rD(ctx->opcode);
2569 int ra = rA(ctx->opcode);
2570 int nr;
2571
2572 if (nb == 0)
2573 nb = 32;
2574 nr = nb / 4;
2575 if (unlikely(((start + nr) > 32 &&
2576 start <= ra && (start + nr - 32) > ra) ||
2577 ((start + nr) <= 32 && start <= ra && (start + nr) > ra))) {
2578 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
2579 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_LSWX);
2580 return;
2581 }
2582 /* NIP cannot be restored if the memory exception comes from an helper */
2583 gen_update_nip(ctx, ctx->nip - 4);
2584 gen_addr_register(ctx);
2585 tcg_gen_movi_tl(cpu_T[1], nb);
2586 op_ldsts(lswi, start);
2587 }
2588
2589 /* lswx */
2590 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING)
2591 {
2592 int ra = rA(ctx->opcode);
2593 int rb = rB(ctx->opcode);
2594
2595 /* NIP cannot be restored if the memory exception comes from an helper */
2596 gen_update_nip(ctx, ctx->nip - 4);
2597 gen_addr_reg_index(ctx);
2598 if (ra == 0) {
2599 ra = rb;
2600 }
2601 gen_op_load_xer_bc();
2602 op_ldstsx(lswx, rD(ctx->opcode), ra, rb);
2603 }
2604
2605 /* stswi */
2606 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING)
2607 {
2608 int nb = NB(ctx->opcode);
2609
2610 /* NIP cannot be restored if the memory exception comes from an helper */
2611 gen_update_nip(ctx, ctx->nip - 4);
2612 gen_addr_register(ctx);
2613 if (nb == 0)
2614 nb = 32;
2615 tcg_gen_movi_tl(cpu_T[1], nb);
2616 op_ldsts(stsw, rS(ctx->opcode));
2617 }
2618
2619 /* stswx */
2620 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING)
2621 {
2622 /* NIP cannot be restored if the memory exception comes from an helper */
2623 gen_update_nip(ctx, ctx->nip - 4);
2624 gen_addr_reg_index(ctx);
2625 gen_op_load_xer_bc();
2626 op_ldsts(stsw, rS(ctx->opcode));
2627 }
2628
2629 /*** Memory synchronisation ***/
2630 /* eieio */
2631 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x03FFF801, PPC_MEM_EIEIO)
2632 {
2633 }
2634
2635 /* isync */
2636 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM)
2637 {
2638 GEN_STOP(ctx);
2639 }
2640
2641 #define op_lwarx() (*gen_op_lwarx[ctx->mem_idx])()
2642 #define op_stwcx() (*gen_op_stwcx[ctx->mem_idx])()
2643 static GenOpFunc *gen_op_lwarx[NB_MEM_FUNCS] = {
2644 GEN_MEM_FUNCS(lwarx),
2645 };
2646 static GenOpFunc *gen_op_stwcx[NB_MEM_FUNCS] = {
2647 GEN_MEM_FUNCS(stwcx),
2648 };
2649
2650 /* lwarx */
2651 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000001, PPC_RES)
2652 {
2653 /* NIP cannot be restored if the memory exception comes from an helper */
2654 gen_update_nip(ctx, ctx->nip - 4);
2655 gen_addr_reg_index(ctx);
2656 op_lwarx();
2657 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2658 }
2659
2660 /* stwcx. */
2661 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES)
2662 {
2663 /* NIP cannot be restored if the memory exception comes from an helper */
2664 gen_update_nip(ctx, ctx->nip - 4);
2665 gen_addr_reg_index(ctx);
2666 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
2667 op_stwcx();
2668 }
2669
2670 #if defined(TARGET_PPC64)
2671 #define op_ldarx() (*gen_op_ldarx[ctx->mem_idx])()
2672 #define op_stdcx() (*gen_op_stdcx[ctx->mem_idx])()
2673 static GenOpFunc *gen_op_ldarx[NB_MEM_FUNCS] = {
2674 GEN_MEM_FUNCS(ldarx),
2675 };
2676 static GenOpFunc *gen_op_stdcx[NB_MEM_FUNCS] = {
2677 GEN_MEM_FUNCS(stdcx),
2678 };
2679
2680 /* ldarx */
2681 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000001, PPC_64B)
2682 {
2683 /* NIP cannot be restored if the memory exception comes from an helper */
2684 gen_update_nip(ctx, ctx->nip - 4);
2685 gen_addr_reg_index(ctx);
2686 op_ldarx();
2687 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[1]);
2688 }
2689
2690 /* stdcx. */
2691 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B)
2692 {
2693 /* NIP cannot be restored if the memory exception comes from an helper */
2694 gen_update_nip(ctx, ctx->nip - 4);
2695 gen_addr_reg_index(ctx);
2696 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
2697 op_stdcx();
2698 }
2699 #endif /* defined(TARGET_PPC64) */
2700
2701 /* sync */
2702 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC)
2703 {
2704 }
2705
2706 /* wait */
2707 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT)
2708 {
2709 /* Stop translation, as the CPU is supposed to sleep from now */
2710 gen_op_wait();
2711 GEN_EXCP(ctx, EXCP_HLT, 1);
2712 }
2713
2714 /*** Floating-point load ***/
2715 #define GEN_LDF(width, opc, type) \
2716 GEN_HANDLER(l##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2717 { \
2718 if (unlikely(!ctx->fpu_enabled)) { \
2719 GEN_EXCP_NO_FP(ctx); \
2720 return; \
2721 } \
2722 gen_addr_imm_index(ctx, 0); \
2723 op_ldst(l##width); \
2724 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2725 }
2726
2727 #define GEN_LDUF(width, opc, type) \
2728 GEN_HANDLER(l##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2729 { \
2730 if (unlikely(!ctx->fpu_enabled)) { \
2731 GEN_EXCP_NO_FP(ctx); \
2732 return; \
2733 } \
2734 if (unlikely(rA(ctx->opcode) == 0)) { \
2735 GEN_EXCP_INVAL(ctx); \
2736 return; \
2737 } \
2738 gen_addr_imm_index(ctx, 0); \
2739 op_ldst(l##width); \
2740 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2741 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2742 }
2743
2744 #define GEN_LDUXF(width, opc, type) \
2745 GEN_HANDLER(l##width##ux, 0x1F, 0x17, opc, 0x00000001, type) \
2746 { \
2747 if (unlikely(!ctx->fpu_enabled)) { \
2748 GEN_EXCP_NO_FP(ctx); \
2749 return; \
2750 } \
2751 if (unlikely(rA(ctx->opcode) == 0)) { \
2752 GEN_EXCP_INVAL(ctx); \
2753 return; \
2754 } \
2755 gen_addr_reg_index(ctx); \
2756 op_ldst(l##width); \
2757 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2758 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2759 }
2760
2761 #define GEN_LDXF(width, opc2, opc3, type) \
2762 GEN_HANDLER(l##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2763 { \
2764 if (unlikely(!ctx->fpu_enabled)) { \
2765 GEN_EXCP_NO_FP(ctx); \
2766 return; \
2767 } \
2768 gen_addr_reg_index(ctx); \
2769 op_ldst(l##width); \
2770 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]); \
2771 }
2772
2773 #define GEN_LDFS(width, op, type) \
2774 OP_LD_TABLE(width); \
2775 GEN_LDF(width, op | 0x20, type); \
2776 GEN_LDUF(width, op | 0x21, type); \
2777 GEN_LDUXF(width, op | 0x01, type); \
2778 GEN_LDXF(width, 0x17, op | 0x00, type)
2779
2780 /* lfd lfdu lfdux lfdx */
2781 GEN_LDFS(fd, 0x12, PPC_FLOAT);
2782 /* lfs lfsu lfsux lfsx */
2783 GEN_LDFS(fs, 0x10, PPC_FLOAT);
2784
2785 /*** Floating-point store ***/
2786 #define GEN_STF(width, opc, type) \
2787 GEN_HANDLER(st##width, opc, 0xFF, 0xFF, 0x00000000, type) \
2788 { \
2789 if (unlikely(!ctx->fpu_enabled)) { \
2790 GEN_EXCP_NO_FP(ctx); \
2791 return; \
2792 } \
2793 gen_addr_imm_index(ctx, 0); \
2794 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2795 op_ldst(st##width); \
2796 }
2797
2798 #define GEN_STUF(width, opc, type) \
2799 GEN_HANDLER(st##width##u, opc, 0xFF, 0xFF, 0x00000000, type) \
2800 { \
2801 if (unlikely(!ctx->fpu_enabled)) { \
2802 GEN_EXCP_NO_FP(ctx); \
2803 return; \
2804 } \
2805 if (unlikely(rA(ctx->opcode) == 0)) { \
2806 GEN_EXCP_INVAL(ctx); \
2807 return; \
2808 } \
2809 gen_addr_imm_index(ctx, 0); \
2810 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2811 op_ldst(st##width); \
2812 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2813 }
2814
2815 #define GEN_STUXF(width, opc, type) \
2816 GEN_HANDLER(st##width##ux, 0x1F, 0x17, opc, 0x00000001, type) \
2817 { \
2818 if (unlikely(!ctx->fpu_enabled)) { \
2819 GEN_EXCP_NO_FP(ctx); \
2820 return; \
2821 } \
2822 if (unlikely(rA(ctx->opcode) == 0)) { \
2823 GEN_EXCP_INVAL(ctx); \
2824 return; \
2825 } \
2826 gen_addr_reg_index(ctx); \
2827 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2828 op_ldst(st##width); \
2829 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]); \
2830 }
2831
2832 #define GEN_STXF(width, opc2, opc3, type) \
2833 GEN_HANDLER(st##width##x, 0x1F, opc2, opc3, 0x00000001, type) \
2834 { \
2835 if (unlikely(!ctx->fpu_enabled)) { \
2836 GEN_EXCP_NO_FP(ctx); \
2837 return; \
2838 } \
2839 gen_addr_reg_index(ctx); \
2840 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]); \
2841 op_ldst(st##width); \
2842 }
2843
2844 #define GEN_STFS(width, op, type) \
2845 OP_ST_TABLE(width); \
2846 GEN_STF(width, op | 0x20, type); \
2847 GEN_STUF(width, op | 0x21, type); \
2848 GEN_STUXF(width, op | 0x01, type); \
2849 GEN_STXF(width, 0x17, op | 0x00, type)
2850
2851 /* stfd stfdu stfdux stfdx */
2852 GEN_STFS(fd, 0x16, PPC_FLOAT);
2853 /* stfs stfsu stfsux stfsx */
2854 GEN_STFS(fs, 0x14, PPC_FLOAT);
2855
2856 /* Optional: */
2857 /* stfiwx */
2858 OP_ST_TABLE(fiw);
2859 GEN_STXF(fiw, 0x17, 0x1E, PPC_FLOAT_STFIWX);
2860
2861 /*** Branch ***/
2862 static always_inline void gen_goto_tb (DisasContext *ctx, int n,
2863 target_ulong dest)
2864 {
2865 TranslationBlock *tb;
2866 tb = ctx->tb;
2867 if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) &&
2868 likely(!ctx->singlestep_enabled)) {
2869 tcg_gen_goto_tb(n);
2870 tcg_gen_movi_tl(cpu_T[1], dest);
2871 #if defined(TARGET_PPC64)
2872 if (ctx->sf_mode)
2873 gen_op_b_T1_64();
2874 else
2875 #endif
2876 gen_op_b_T1();
2877 tcg_gen_exit_tb((long)tb + n);
2878 } else {
2879 tcg_gen_movi_tl(cpu_T[1], dest);
2880 #if defined(TARGET_PPC64)
2881 if (ctx->sf_mode)
2882 gen_op_b_T1_64();
2883 else
2884 #endif
2885 gen_op_b_T1();
2886 if (unlikely(ctx->singlestep_enabled)) {
2887 if ((ctx->singlestep_enabled &
2888 (CPU_BRANCH_STEP | CPU_SINGLE_STEP)) &&
2889 ctx->exception == POWERPC_EXCP_BRANCH) {
2890 target_ulong tmp = ctx->nip;
2891 ctx->nip = dest;
2892 GEN_EXCP(ctx, POWERPC_EXCP_TRACE, 0);
2893 ctx->nip = tmp;
2894 }
2895 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
2896 gen_update_nip(ctx, dest);
2897 gen_op_debug();
2898 }
2899 }
2900 tcg_gen_exit_tb(0);
2901 }
2902 }
2903
2904 static always_inline void gen_setlr (DisasContext *ctx, target_ulong nip)
2905 {
2906 #if defined(TARGET_PPC64)
2907 if (ctx->sf_mode != 0 && (nip >> 32))
2908 gen_op_setlr_64(ctx->nip >> 32, ctx->nip);
2909 else
2910 #endif
2911 gen_op_setlr(ctx->nip);
2912 }
2913
2914 /* b ba bl bla */
2915 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
2916 {
2917 target_ulong li, target;
2918
2919 ctx->exception = POWERPC_EXCP_BRANCH;
2920 /* sign extend LI */
2921 #if defined(TARGET_PPC64)
2922 if (ctx->sf_mode)
2923 li = ((int64_t)LI(ctx->opcode) << 38) >> 38;
2924 else
2925 #endif
2926 li = ((int32_t)LI(ctx->opcode) << 6) >> 6;
2927 if (likely(AA(ctx->opcode) == 0))
2928 target = ctx->nip + li - 4;
2929 else
2930 target = li;
2931 #if defined(TARGET_PPC64)
2932 if (!ctx->sf_mode)
2933 target = (uint32_t)target;
2934 #endif
2935 if (LK(ctx->opcode))
2936 gen_setlr(ctx, ctx->nip);
2937 gen_goto_tb(ctx, 0, target);
2938 }
2939
2940 #define BCOND_IM 0
2941 #define BCOND_LR 1
2942 #define BCOND_CTR 2
2943
2944 static always_inline void gen_bcond (DisasContext *ctx, int type)
2945 {
2946 target_ulong target = 0;
2947 target_ulong li;
2948 uint32_t bo = BO(ctx->opcode);
2949 uint32_t bi = BI(ctx->opcode);
2950 uint32_t mask;
2951
2952 ctx->exception = POWERPC_EXCP_BRANCH;
2953 if ((bo & 0x4) == 0)
2954 gen_op_dec_ctr();
2955 switch(type) {
2956 case BCOND_IM:
2957 li = (target_long)((int16_t)(BD(ctx->opcode)));
2958 if (likely(AA(ctx->opcode) == 0)) {
2959 target = ctx->nip + li - 4;
2960 } else {
2961 target = li;
2962 }
2963 #if defined(TARGET_PPC64)
2964 if (!ctx->sf_mode)
2965 target = (uint32_t)target;
2966 #endif
2967 break;
2968 case BCOND_CTR:
2969 gen_op_movl_T1_ctr();
2970 break;
2971 default:
2972 case BCOND_LR:
2973 gen_op_movl_T1_lr();
2974 break;
2975 }
2976 if (LK(ctx->opcode))
2977 gen_setlr(ctx, ctx->nip);
2978 if (bo & 0x10) {
2979 /* No CR condition */
2980 switch (bo & 0x6) {
2981 case 0:
2982 #if defined(TARGET_PPC64)
2983 if (ctx->sf_mode)
2984 gen_op_test_ctr_64();
2985 else
2986 #endif
2987 gen_op_test_ctr();
2988 break;
2989 case 2:
2990 #if defined(TARGET_PPC64)
2991 if (ctx->sf_mode)
2992 gen_op_test_ctrz_64();
2993 else
2994 #endif
2995 gen_op_test_ctrz();
2996 break;
2997 default:
2998 case 4:
2999 case 6:
3000 if (type == BCOND_IM) {
3001 gen_goto_tb(ctx, 0, target);
3002 return;
3003 } else {
3004 #if defined(TARGET_PPC64)
3005 if (ctx->sf_mode)
3006 gen_op_b_T1_64();
3007 else
3008 #endif
3009 gen_op_b_T1();
3010 goto no_test;
3011 }
3012 break;
3013 }
3014 } else {
3015 mask = 1 << (3 - (bi & 0x03));
3016 gen_op_load_crf_T0(bi >> 2);
3017 if (bo & 0x8) {
3018 switch (bo & 0x6) {
3019 case 0:
3020 #if defined(TARGET_PPC64)
3021 if (ctx->sf_mode)
3022 gen_op_test_ctr_true_64(mask);
3023 else
3024 #endif
3025 gen_op_test_ctr_true(mask);
3026 break;
3027 case 2:
3028 #if defined(TARGET_PPC64)
3029 if (ctx->sf_mode)
3030 gen_op_test_ctrz_true_64(mask);
3031 else
3032 #endif
3033 gen_op_test_ctrz_true(mask);
3034 break;
3035 default:
3036 case 4:
3037 case 6:
3038 gen_op_test_true(mask);
3039 break;
3040 }
3041 } else {
3042 switch (bo & 0x6) {
3043 case 0:
3044 #if defined(TARGET_PPC64)
3045 if (ctx->sf_mode)
3046 gen_op_test_ctr_false_64(mask);
3047 else
3048 #endif
3049 gen_op_test_ctr_false(mask);
3050 break;
3051 case 2:
3052 #if defined(TARGET_PPC64)
3053 if (ctx->sf_mode)
3054 gen_op_test_ctrz_false_64(mask);
3055 else
3056 #endif
3057 gen_op_test_ctrz_false(mask);
3058 break;
3059 default:
3060 case 4:
3061 case 6:
3062 gen_op_test_false(mask);
3063 break;
3064 }
3065 }
3066 }
3067 if (type == BCOND_IM) {
3068 int l1 = gen_new_label();
3069 gen_op_jz_T0(l1);
3070 gen_goto_tb(ctx, 0, target);
3071 gen_set_label(l1);
3072 gen_goto_tb(ctx, 1, ctx->nip);
3073 } else {
3074 #if defined(TARGET_PPC64)
3075 if (ctx->sf_mode)
3076 gen_op_btest_T1_64(ctx->nip >> 32, ctx->nip);
3077 else
3078 #endif
3079 gen_op_btest_T1(ctx->nip);
3080 no_test:
3081 if (ctx->singlestep_enabled & GDBSTUB_SINGLE_STEP) {
3082 gen_update_nip(ctx, ctx->nip);
3083 gen_op_debug();
3084 }
3085 tcg_gen_exit_tb(0);
3086 }
3087 }
3088
3089 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3090 {
3091 gen_bcond(ctx, BCOND_IM);
3092 }
3093
3094 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW)
3095 {
3096 gen_bcond(ctx, BCOND_CTR);
3097 }
3098
3099 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW)
3100 {
3101 gen_bcond(ctx, BCOND_LR);
3102 }
3103
3104 /*** Condition register logical ***/
3105 #define GEN_CRLOGIC(op, opc) \
3106 GEN_HANDLER(cr##op, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) \
3107 { \
3108 uint8_t bitmask; \
3109 int sh; \
3110 gen_op_load_crf_T0(crbA(ctx->opcode) >> 2); \
3111 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \
3112 if (sh > 0) \
3113 gen_op_srli_T0(sh); \
3114 else if (sh < 0) \
3115 gen_op_sli_T0(-sh); \
3116 gen_op_load_crf_T1(crbB(ctx->opcode) >> 2); \
3117 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \
3118 if (sh > 0) \
3119 gen_op_srli_T1(sh); \
3120 else if (sh < 0) \
3121 gen_op_sli_T1(-sh); \
3122 gen_op_##op(); \
3123 bitmask = 1 << (3 - (crbD(ctx->opcode) & 0x03)); \
3124 gen_op_andi_T0(bitmask); \
3125 gen_op_load_crf_T1(crbD(ctx->opcode) >> 2); \
3126 gen_op_andi_T1(~bitmask); \
3127 gen_op_or(); \
3128 gen_op_store_T0_crf(crbD(ctx->opcode) >> 2); \
3129 }
3130
3131 /* crand */
3132 GEN_CRLOGIC(and, 0x08);
3133 /* crandc */
3134 GEN_CRLOGIC(andc, 0x04);
3135 /* creqv */
3136 GEN_CRLOGIC(eqv, 0x09);
3137 /* crnand */
3138 GEN_CRLOGIC(nand, 0x07);
3139 /* crnor */
3140 GEN_CRLOGIC(nor, 0x01);
3141 /* cror */
3142 GEN_CRLOGIC(or, 0x0E);
3143 /* crorc */
3144 GEN_CRLOGIC(orc, 0x0D);
3145 /* crxor */
3146 GEN_CRLOGIC(xor, 0x06);
3147 /* mcrf */
3148 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER)
3149 {
3150 gen_op_load_crf_T0(crfS(ctx->opcode));
3151 gen_op_store_T0_crf(crfD(ctx->opcode));
3152 }
3153
3154 /*** System linkage ***/
3155 /* rfi (supervisor only) */
3156 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW)
3157 {
3158 #if defined(CONFIG_USER_ONLY)
3159 GEN_EXCP_PRIVOPC(ctx);
3160 #else
3161 /* Restore CPU state */
3162 if (unlikely(!ctx->supervisor)) {
3163 GEN_EXCP_PRIVOPC(ctx);
3164 return;
3165 }
3166 gen_op_rfi();
3167 GEN_SYNC(ctx);
3168 #endif
3169 }
3170
3171 #if defined(TARGET_PPC64)
3172 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B)
3173 {
3174 #if defined(CONFIG_USER_ONLY)
3175 GEN_EXCP_PRIVOPC(ctx);
3176 #else
3177 /* Restore CPU state */
3178 if (unlikely(!ctx->supervisor)) {
3179 GEN_EXCP_PRIVOPC(ctx);
3180 return;
3181 }
3182 gen_op_rfid();
3183 GEN_SYNC(ctx);
3184 #endif
3185 }
3186
3187 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H)
3188 {
3189 #if defined(CONFIG_USER_ONLY)
3190 GEN_EXCP_PRIVOPC(ctx);
3191 #else
3192 /* Restore CPU state */
3193 if (unlikely(ctx->supervisor <= 1)) {
3194 GEN_EXCP_PRIVOPC(ctx);
3195 return;
3196 }
3197 gen_op_hrfid();
3198 GEN_SYNC(ctx);
3199 #endif
3200 }
3201 #endif
3202
3203 /* sc */
3204 #if defined(CONFIG_USER_ONLY)
3205 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3206 #else
3207 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3208 #endif
3209 GEN_HANDLER(sc, 0x11, 0xFF, 0xFF, 0x03FFF01D, PPC_FLOW)
3210 {
3211 uint32_t lev;
3212
3213 lev = (ctx->opcode >> 5) & 0x7F;
3214 GEN_EXCP(ctx, POWERPC_SYSCALL, lev);
3215 }
3216
3217 /*** Trap ***/
3218 /* tw */
3219 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW)
3220 {
3221 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3222 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3223 /* Update the nip since this might generate a trap exception */
3224 gen_update_nip(ctx, ctx->nip);
3225 gen_op_tw(TO(ctx->opcode));
3226 }
3227
3228 /* twi */
3229 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW)
3230 {
3231 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3232 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3233 /* Update the nip since this might generate a trap exception */
3234 gen_update_nip(ctx, ctx->nip);
3235 gen_op_tw(TO(ctx->opcode));
3236 }
3237
3238 #if defined(TARGET_PPC64)
3239 /* td */
3240 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B)
3241 {
3242 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3243 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3244 /* Update the nip since this might generate a trap exception */
3245 gen_update_nip(ctx, ctx->nip);
3246 gen_op_td(TO(ctx->opcode));
3247 }
3248
3249 /* tdi */
3250 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B)
3251 {
3252 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3253 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
3254 /* Update the nip since this might generate a trap exception */
3255 gen_update_nip(ctx, ctx->nip);
3256 gen_op_td(TO(ctx->opcode));
3257 }
3258 #endif
3259
3260 /*** Processor control ***/
3261 /* mcrxr */
3262 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC)
3263 {
3264 gen_op_load_xer_cr();
3265 gen_op_store_T0_crf(crfD(ctx->opcode));
3266 gen_op_clear_xer_ov();
3267 gen_op_clear_xer_ca();
3268 }
3269
3270 /* mfcr */
3271 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC)
3272 {
3273 uint32_t crm, crn;
3274
3275 if (likely(ctx->opcode & 0x00100000)) {
3276 crm = CRM(ctx->opcode);
3277 if (likely((crm ^ (crm - 1)) == 0)) {
3278 crn = ffs(crm);
3279 gen_op_load_cro(7 - crn);
3280 }
3281 } else {
3282 gen_op_load_cr();
3283 }
3284 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3285 }
3286
3287 /* mfmsr */
3288 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC)
3289 {
3290 #if defined(CONFIG_USER_ONLY)
3291 GEN_EXCP_PRIVREG(ctx);
3292 #else
3293 if (unlikely(!ctx->supervisor)) {
3294 GEN_EXCP_PRIVREG(ctx);
3295 return;
3296 }
3297 gen_op_load_msr();
3298 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3299 #endif
3300 }
3301
3302 #if 1
3303 #define SPR_NOACCESS ((void *)(-1UL))
3304 #else
3305 static void spr_noaccess (void *opaque, int sprn)
3306 {
3307 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
3308 printf("ERROR: try to access SPR %d !\n", sprn);
3309 }
3310 #define SPR_NOACCESS (&spr_noaccess)
3311 #endif
3312
3313 /* mfspr */
3314 static always_inline void gen_op_mfspr (DisasContext *ctx)
3315 {
3316 void (*read_cb)(void *opaque, int sprn);
3317 uint32_t sprn = SPR(ctx->opcode);
3318
3319 #if !defined(CONFIG_USER_ONLY)
3320 if (ctx->supervisor == 2)
3321 read_cb = ctx->spr_cb[sprn].hea_read;
3322 else if (ctx->supervisor)
3323 read_cb = ctx->spr_cb[sprn].oea_read;
3324 else
3325 #endif
3326 read_cb = ctx->spr_cb[sprn].uea_read;
3327 if (likely(read_cb != NULL)) {
3328 if (likely(read_cb != SPR_NOACCESS)) {
3329 (*read_cb)(ctx, sprn);
3330 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3331 } else {
3332 /* Privilege exception */
3333 /* This is a hack to avoid warnings when running Linux:
3334 * this OS breaks the PowerPC virtualisation model,
3335 * allowing userland application to read the PVR
3336 */
3337 if (sprn != SPR_PVR) {
3338 if (loglevel != 0) {
3339 fprintf(logfile, "Trying to read privileged spr %d %03x at "
3340 ADDRX "\n", sprn, sprn, ctx->nip);
3341 }
3342 printf("Trying to read privileged spr %d %03x at " ADDRX "\n",
3343 sprn, sprn, ctx->nip);
3344 }
3345 GEN_EXCP_PRIVREG(ctx);
3346 }
3347 } else {
3348 /* Not defined */
3349 if (loglevel != 0) {
3350 fprintf(logfile, "Trying to read invalid spr %d %03x at "
3351 ADDRX "\n", sprn, sprn, ctx->nip);
3352 }
3353 printf("Trying to read invalid spr %d %03x at " ADDRX "\n",
3354 sprn, sprn, ctx->nip);
3355 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3356 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3357 }
3358 }
3359
3360 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC)
3361 {
3362 gen_op_mfspr(ctx);
3363 }
3364
3365 /* mftb */
3366 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB)
3367 {
3368 gen_op_mfspr(ctx);
3369 }
3370
3371 /* mtcrf */
3372 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC)
3373 {
3374 uint32_t crm, crn;
3375
3376 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3377 crm = CRM(ctx->opcode);
3378 if (likely((ctx->opcode & 0x00100000) || (crm ^ (crm - 1)) == 0)) {
3379 crn = ffs(crm);
3380 gen_op_srli_T0(crn * 4);
3381 gen_op_andi_T0(0xF);
3382 gen_op_store_cro(7 - crn);
3383 } else {
3384 gen_op_store_cr(crm);
3385 }
3386 }
3387
3388 /* mtmsr */
3389 #if defined(TARGET_PPC64)
3390 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B)
3391 {
3392 #if defined(CONFIG_USER_ONLY)
3393 GEN_EXCP_PRIVREG(ctx);
3394 #else
3395 if (unlikely(!ctx->supervisor)) {
3396 GEN_EXCP_PRIVREG(ctx);
3397 return;
3398 }
3399 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3400 if (ctx->opcode & 0x00010000) {
3401 /* Special form that does not need any synchronisation */
3402 gen_op_update_riee();
3403 } else {
3404 /* XXX: we need to update nip before the store
3405 * if we enter power saving mode, we will exit the loop
3406 * directly from ppc_store_msr
3407 */
3408 gen_update_nip(ctx, ctx->nip);
3409 gen_op_store_msr();
3410 /* Must stop the translation as machine state (may have) changed */
3411 /* Note that mtmsr is not always defined as context-synchronizing */
3412 ctx->exception = POWERPC_EXCP_STOP;
3413 }
3414 #endif
3415 }
3416 #endif
3417
3418 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001FF801, PPC_MISC)
3419 {
3420 #if defined(CONFIG_USER_ONLY)
3421 GEN_EXCP_PRIVREG(ctx);
3422 #else
3423 if (unlikely(!ctx->supervisor)) {
3424 GEN_EXCP_PRIVREG(ctx);
3425 return;
3426 }
3427 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3428 if (ctx->opcode & 0x00010000) {
3429 /* Special form that does not need any synchronisation */
3430 gen_op_update_riee();
3431 } else {
3432 /* XXX: we need to update nip before the store
3433 * if we enter power saving mode, we will exit the loop
3434 * directly from ppc_store_msr
3435 */
3436 gen_update_nip(ctx, ctx->nip);
3437 #if defined(TARGET_PPC64)
3438 if (!ctx->sf_mode)
3439 gen_op_store_msr_32();
3440 else
3441 #endif
3442 gen_op_store_msr();
3443 /* Must stop the translation as machine state (may have) changed */
3444 /* Note that mtmsrd is not always defined as context-synchronizing */
3445 ctx->exception = POWERPC_EXCP_STOP;
3446 }
3447 #endif
3448 }
3449
3450 /* mtspr */
3451 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000001, PPC_MISC)
3452 {
3453 void (*write_cb)(void *opaque, int sprn);
3454 uint32_t sprn = SPR(ctx->opcode);
3455
3456 #if !defined(CONFIG_USER_ONLY)
3457 if (ctx->supervisor == 2)
3458 write_cb = ctx->spr_cb[sprn].hea_write;
3459 else if (ctx->supervisor)
3460 write_cb = ctx->spr_cb[sprn].oea_write;
3461 else
3462 #endif
3463 write_cb = ctx->spr_cb[sprn].uea_write;
3464 if (likely(write_cb != NULL)) {
3465 if (likely(write_cb != SPR_NOACCESS)) {
3466 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3467 (*write_cb)(ctx, sprn);
3468 } else {
3469 /* Privilege exception */
3470 if (loglevel != 0) {
3471 fprintf(logfile, "Trying to write privileged spr %d %03x at "
3472 ADDRX "\n", sprn, sprn, ctx->nip);
3473 }
3474 printf("Trying to write privileged spr %d %03x at " ADDRX "\n",
3475 sprn, sprn, ctx->nip);
3476 GEN_EXCP_PRIVREG(ctx);
3477 }
3478 } else {
3479 /* Not defined */
3480 if (loglevel != 0) {
3481 fprintf(logfile, "Trying to write invalid spr %d %03x at "
3482 ADDRX "\n", sprn, sprn, ctx->nip);
3483 }
3484 printf("Trying to write invalid spr %d %03x at " ADDRX "\n",
3485 sprn, sprn, ctx->nip);
3486 GEN_EXCP(ctx, POWERPC_EXCP_PROGRAM,
3487 POWERPC_EXCP_INVAL | POWERPC_EXCP_INVAL_SPR);
3488 }
3489 }
3490
3491 /*** Cache management ***/
3492 /* dcbf */
3493 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE)
3494 {
3495 /* XXX: specification says this is treated as a load by the MMU */
3496 gen_addr_reg_index(ctx);
3497 op_ldst(lbz);
3498 }
3499
3500 /* dcbi (Supervisor only) */
3501 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE)
3502 {
3503 #if defined(CONFIG_USER_ONLY)
3504 GEN_EXCP_PRIVOPC(ctx);
3505 #else
3506 if (unlikely(!ctx->supervisor)) {
3507 GEN_EXCP_PRIVOPC(ctx);
3508 return;
3509 }
3510 gen_addr_reg_index(ctx);
3511 /* XXX: specification says this should be treated as a store by the MMU */
3512 op_ldst(lbz);
3513 op_ldst(stb);
3514 #endif
3515 }
3516
3517 /* dcdst */
3518 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE)
3519 {
3520 /* XXX: specification say this is treated as a load by the MMU */
3521 gen_addr_reg_index(ctx);
3522 op_ldst(lbz);
3523 }
3524
3525 /* dcbt */
3526 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x02000001, PPC_CACHE)
3527 {
3528 /* interpreted as no-op */
3529 /* XXX: specification say this is treated as a load by the MMU
3530 * but does not generate any exception
3531 */
3532 }
3533
3534 /* dcbtst */
3535 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x02000001, PPC_CACHE)
3536 {
3537 /* interpreted as no-op */
3538 /* XXX: specification say this is treated as a load by the MMU
3539 * but does not generate any exception
3540 */
3541 }
3542
3543 /* dcbz */
3544 #define op_dcbz(n) (*gen_op_dcbz[n][ctx->mem_idx])()
3545 static GenOpFunc *gen_op_dcbz[4][NB_MEM_FUNCS] = {
3546 /* 32 bytes cache line size */
3547 {
3548 #define gen_op_dcbz_l32_le_raw gen_op_dcbz_l32_raw
3549 #define gen_op_dcbz_l32_le_user gen_op_dcbz_l32_user
3550 #define gen_op_dcbz_l32_le_kernel gen_op_dcbz_l32_kernel
3551 #define gen_op_dcbz_l32_le_hypv gen_op_dcbz_l32_hypv
3552 #define gen_op_dcbz_l32_le_64_raw gen_op_dcbz_l32_64_raw
3553 #define gen_op_dcbz_l32_le_64_user gen_op_dcbz_l32_64_user
3554 #define gen_op_dcbz_l32_le_64_kernel gen_op_dcbz_l32_64_kernel
3555 #define gen_op_dcbz_l32_le_64_hypv gen_op_dcbz_l32_64_hypv
3556 GEN_MEM_FUNCS(dcbz_l32),
3557 },
3558 /* 64 bytes cache line size */
3559 {
3560 #define gen_op_dcbz_l64_le_raw gen_op_dcbz_l64_raw
3561 #define gen_op_dcbz_l64_le_user gen_op_dcbz_l64_user
3562 #define gen_op_dcbz_l64_le_kernel gen_op_dcbz_l64_kernel
3563 #define gen_op_dcbz_l64_le_hypv gen_op_dcbz_l64_hypv
3564 #define gen_op_dcbz_l64_le_64_raw gen_op_dcbz_l64_64_raw
3565 #define gen_op_dcbz_l64_le_64_user gen_op_dcbz_l64_64_user
3566 #define gen_op_dcbz_l64_le_64_kernel gen_op_dcbz_l64_64_kernel
3567 #define gen_op_dcbz_l64_le_64_hypv gen_op_dcbz_l64_64_hypv
3568 GEN_MEM_FUNCS(dcbz_l64),
3569 },
3570 /* 128 bytes cache line size */
3571 {
3572 #define gen_op_dcbz_l128_le_raw gen_op_dcbz_l128_raw
3573 #define gen_op_dcbz_l128_le_user gen_op_dcbz_l128_user
3574 #define gen_op_dcbz_l128_le_kernel gen_op_dcbz_l128_kernel
3575 #define gen_op_dcbz_l128_le_hypv gen_op_dcbz_l128_hypv
3576 #define gen_op_dcbz_l128_le_64_raw gen_op_dcbz_l128_64_raw
3577 #define gen_op_dcbz_l128_le_64_user gen_op_dcbz_l128_64_user
3578 #define gen_op_dcbz_l128_le_64_kernel gen_op_dcbz_l128_64_kernel
3579 #define gen_op_dcbz_l128_le_64_hypv gen_op_dcbz_l128_64_hypv
3580 GEN_MEM_FUNCS(dcbz_l128),
3581 },
3582 /* tunable cache line size */
3583 {
3584 #define gen_op_dcbz_le_raw gen_op_dcbz_raw
3585 #define gen_op_dcbz_le_user gen_op_dcbz_user
3586 #define gen_op_dcbz_le_kernel gen_op_dcbz_kernel
3587 #define gen_op_dcbz_le_hypv gen_op_dcbz_hypv
3588 #define gen_op_dcbz_le_64_raw gen_op_dcbz_64_raw
3589 #define gen_op_dcbz_le_64_user gen_op_dcbz_64_user
3590 #define gen_op_dcbz_le_64_kernel gen_op_dcbz_64_kernel
3591 #define gen_op_dcbz_le_64_hypv gen_op_dcbz_64_hypv
3592 GEN_MEM_FUNCS(dcbz),
3593 },
3594 };
3595
3596 static always_inline void handler_dcbz (DisasContext *ctx,
3597 int dcache_line_size)
3598 {
3599 int n;
3600
3601 switch (dcache_line_size) {
3602 case 32:
3603 n = 0;
3604 break;
3605 case 64:
3606 n = 1;
3607 break;
3608 case 128:
3609 n = 2;
3610 break;
3611 default:
3612 n = 3;
3613 break;
3614 }
3615 op_dcbz(n);
3616 }
3617
3618 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03E00001, PPC_CACHE_DCBZ)
3619 {
3620 gen_addr_reg_index(ctx);
3621 handler_dcbz(ctx, ctx->dcache_line_size);
3622 gen_op_check_reservation();
3623 }
3624
3625 GEN_HANDLER2(dcbz_970, "dcbz", 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZT)
3626 {
3627 gen_addr_reg_index(ctx);
3628 if (ctx->opcode & 0x00200000)
3629 handler_dcbz(ctx, ctx->dcache_line_size);
3630 else
3631 handler_dcbz(ctx, -1);
3632 gen_op_check_reservation();
3633 }
3634
3635 /* icbi */
3636 #define op_icbi() (*gen_op_icbi[ctx->mem_idx])()
3637 #define gen_op_icbi_le_raw gen_op_icbi_raw
3638 #define gen_op_icbi_le_user gen_op_icbi_user
3639 #define gen_op_icbi_le_kernel gen_op_icbi_kernel
3640 #define gen_op_icbi_le_hypv gen_op_icbi_hypv
3641 #define gen_op_icbi_le_64_raw gen_op_icbi_64_raw
3642 #define gen_op_icbi_le_64_user gen_op_icbi_64_user
3643 #define gen_op_icbi_le_64_kernel gen_op_icbi_64_kernel
3644 #define gen_op_icbi_le_64_hypv gen_op_icbi_64_hypv
3645 static GenOpFunc *gen_op_icbi[NB_MEM_FUNCS] = {
3646 GEN_MEM_FUNCS(icbi),
3647 };
3648
3649 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI)
3650 {
3651 /* NIP cannot be restored if the memory exception comes from an helper */
3652 gen_update_nip(ctx, ctx->nip - 4);
3653 gen_addr_reg_index(ctx);
3654 op_icbi();
3655 }
3656
3657 /* Optional: */
3658 /* dcba */
3659 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA)
3660 {
3661 /* interpreted as no-op */
3662 /* XXX: specification say this is treated as a store by the MMU
3663 * but does not generate any exception
3664 */
3665 }
3666
3667 /*** Segment register manipulation ***/
3668 /* Supervisor only: */
3669 /* mfsr */
3670 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT)
3671 {
3672 #if defined(CONFIG_USER_ONLY)
3673 GEN_EXCP_PRIVREG(ctx);
3674 #else
3675 if (unlikely(!ctx->supervisor)) {
3676 GEN_EXCP_PRIVREG(ctx);
3677 return;
3678 }
3679 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3680 gen_op_load_sr();
3681 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3682 #endif
3683 }
3684
3685 /* mfsrin */
3686 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT)
3687 {
3688 #if defined(CONFIG_USER_ONLY)
3689 GEN_EXCP_PRIVREG(ctx);
3690 #else
3691 if (unlikely(!ctx->supervisor)) {
3692 GEN_EXCP_PRIVREG(ctx);
3693 return;
3694 }
3695 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3696 gen_op_srli_T1(28);
3697 gen_op_load_sr();
3698 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3699 #endif
3700 }
3701
3702 /* mtsr */
3703 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT)
3704 {
3705 #if defined(CONFIG_USER_ONLY)
3706 GEN_EXCP_PRIVREG(ctx);
3707 #else
3708 if (unlikely(!ctx->supervisor)) {
3709 GEN_EXCP_PRIVREG(ctx);
3710 return;
3711 }
3712 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3713 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3714 gen_op_store_sr();
3715 #endif
3716 }
3717
3718 /* mtsrin */
3719 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT)
3720 {
3721 #if defined(CONFIG_USER_ONLY)
3722 GEN_EXCP_PRIVREG(ctx);
3723 #else
3724 if (unlikely(!ctx->supervisor)) {
3725 GEN_EXCP_PRIVREG(ctx);
3726 return;
3727 }
3728 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3729 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3730 gen_op_srli_T1(28);
3731 gen_op_store_sr();
3732 #endif
3733 }
3734
3735 #if defined(TARGET_PPC64)
3736 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
3737 /* mfsr */
3738 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B)
3739 {
3740 #if defined(CONFIG_USER_ONLY)
3741 GEN_EXCP_PRIVREG(ctx);
3742 #else
3743 if (unlikely(!ctx->supervisor)) {
3744 GEN_EXCP_PRIVREG(ctx);
3745 return;
3746 }
3747 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3748 gen_op_load_slb();
3749 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3750 #endif
3751 }
3752
3753 /* mfsrin */
3754 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
3755 PPC_SEGMENT_64B)
3756 {
3757 #if defined(CONFIG_USER_ONLY)
3758 GEN_EXCP_PRIVREG(ctx);
3759 #else
3760 if (unlikely(!ctx->supervisor)) {
3761 GEN_EXCP_PRIVREG(ctx);
3762 return;
3763 }
3764 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3765 gen_op_srli_T1(28);
3766 gen_op_load_slb();
3767 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3768 #endif
3769 }
3770
3771 /* mtsr */
3772 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B)
3773 {
3774 #if defined(CONFIG_USER_ONLY)
3775 GEN_EXCP_PRIVREG(ctx);
3776 #else
3777 if (unlikely(!ctx->supervisor)) {
3778 GEN_EXCP_PRIVREG(ctx);
3779 return;
3780 }
3781 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3782 tcg_gen_movi_tl(cpu_T[1], SR(ctx->opcode));
3783 gen_op_store_slb();
3784 #endif
3785 }
3786
3787 /* mtsrin */
3788 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
3789 PPC_SEGMENT_64B)
3790 {
3791 #if defined(CONFIG_USER_ONLY)
3792 GEN_EXCP_PRIVREG(ctx);
3793 #else
3794 if (unlikely(!ctx->supervisor)) {
3795 GEN_EXCP_PRIVREG(ctx);
3796 return;
3797 }
3798 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
3799 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3800 gen_op_srli_T1(28);
3801 gen_op_store_slb();
3802 #endif
3803 }
3804 #endif /* defined(TARGET_PPC64) */
3805
3806 /*** Lookaside buffer management ***/
3807 /* Optional & supervisor only: */
3808 /* tlbia */
3809 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA)
3810 {
3811 #if defined(CONFIG_USER_ONLY)
3812 GEN_EXCP_PRIVOPC(ctx);
3813 #else
3814 if (unlikely(!ctx->supervisor)) {
3815 GEN_EXCP_PRIVOPC(ctx);
3816 return;
3817 }
3818 gen_op_tlbia();
3819 #endif
3820 }
3821
3822 /* tlbie */
3823 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x03FF0001, PPC_MEM_TLBIE)
3824 {
3825 #if defined(CONFIG_USER_ONLY)
3826 GEN_EXCP_PRIVOPC(ctx);
3827 #else
3828 if (unlikely(!ctx->supervisor)) {
3829 GEN_EXCP_PRIVOPC(ctx);
3830 return;
3831 }
3832 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
3833 #if defined(TARGET_PPC64)
3834 if (ctx->sf_mode)
3835 gen_op_tlbie_64();
3836 else
3837 #endif
3838 gen_op_tlbie();
3839 #endif
3840 }
3841
3842 /* tlbsync */
3843 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC)
3844 {
3845 #if defined(CONFIG_USER_ONLY)
3846 GEN_EXCP_PRIVOPC(ctx);
3847 #else
3848 if (unlikely(!ctx->supervisor)) {
3849 GEN_EXCP_PRIVOPC(ctx);
3850 return;
3851 }
3852 /* This has no effect: it should ensure that all previous
3853 * tlbie have completed
3854 */
3855 GEN_STOP(ctx);
3856 #endif
3857 }
3858
3859 #if defined(TARGET_PPC64)
3860 /* slbia */
3861 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x03FFFC01, PPC_SLBI)
3862 {
3863 #if defined(CONFIG_USER_ONLY)
3864 GEN_EXCP_PRIVOPC(ctx);
3865 #else
3866 if (unlikely(!ctx->supervisor)) {
3867 GEN_EXCP_PRIVOPC(ctx);
3868 return;
3869 }
3870 gen_op_slbia();
3871 #endif
3872 }
3873
3874 /* slbie */
3875 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI)
3876 {
3877 #if defined(CONFIG_USER_ONLY)
3878 GEN_EXCP_PRIVOPC(ctx);
3879 #else
3880 if (unlikely(!ctx->supervisor)) {
3881 GEN_EXCP_PRIVOPC(ctx);
3882 return;
3883 }
3884 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
3885 gen_op_slbie();
3886 #endif
3887 }
3888 #endif
3889
3890 /*** External control ***/
3891 /* Optional: */
3892 #define op_eciwx() (*gen_op_eciwx[ctx->mem_idx])()
3893 #define op_ecowx() (*gen_op_ecowx[ctx->mem_idx])()
3894 static GenOpFunc *gen_op_eciwx[NB_MEM_FUNCS] = {
3895 GEN_MEM_FUNCS(eciwx),
3896 };
3897 static GenOpFunc *gen_op_ecowx[NB_MEM_FUNCS] = {
3898 GEN_MEM_FUNCS(ecowx),
3899 };
3900
3901 /* eciwx */
3902 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN)
3903 {
3904 /* Should check EAR[E] & alignment ! */
3905 gen_addr_reg_index(ctx);
3906 op_eciwx();
3907 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3908 }
3909
3910 /* ecowx */
3911 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN)
3912 {
3913 /* Should check EAR[E] & alignment ! */
3914 gen_addr_reg_index(ctx);
3915 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
3916 op_ecowx();
3917 }
3918
3919 /* PowerPC 601 specific instructions */
3920 /* abs - abs. */
3921 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR)
3922 {
3923 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3924 gen_op_POWER_abs();
3925 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3926 if (unlikely(Rc(ctx->opcode) != 0))
3927 gen_set_Rc0(ctx);
3928 }
3929
3930 /* abso - abso. */
3931 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR)
3932 {
3933 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3934 gen_op_POWER_abso();
3935 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3936 if (unlikely(Rc(ctx->opcode) != 0))
3937 gen_set_Rc0(ctx);
3938 }
3939
3940 /* clcs */
3941 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR)
3942 {
3943 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3944 gen_op_POWER_clcs();
3945 /* Rc=1 sets CR0 to an undefined state */
3946 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3947 }
3948
3949 /* div - div. */
3950 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR)
3951 {
3952 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3953 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3954 gen_op_POWER_div();
3955 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3956 if (unlikely(Rc(ctx->opcode) != 0))
3957 gen_set_Rc0(ctx);
3958 }
3959
3960 /* divo - divo. */
3961 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR)
3962 {
3963 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3964 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3965 gen_op_POWER_divo();
3966 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3967 if (unlikely(Rc(ctx->opcode) != 0))
3968 gen_set_Rc0(ctx);
3969 }
3970
3971 /* divs - divs. */
3972 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR)
3973 {
3974 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3975 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3976 gen_op_POWER_divs();
3977 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3978 if (unlikely(Rc(ctx->opcode) != 0))
3979 gen_set_Rc0(ctx);
3980 }
3981
3982 /* divso - divso. */
3983 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR)
3984 {
3985 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3986 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3987 gen_op_POWER_divso();
3988 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
3989 if (unlikely(Rc(ctx->opcode) != 0))
3990 gen_set_Rc0(ctx);
3991 }
3992
3993 /* doz - doz. */
3994 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR)
3995 {
3996 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
3997 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
3998 gen_op_POWER_doz();
3999 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4000 if (unlikely(Rc(ctx->opcode) != 0))
4001 gen_set_Rc0(ctx);
4002 }
4003
4004 /* dozo - dozo. */
4005 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR)
4006 {
4007 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4008 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4009 gen_op_POWER_dozo();
4010 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4011 if (unlikely(Rc(ctx->opcode) != 0))
4012 gen_set_Rc0(ctx);
4013 }
4014
4015 /* dozi */
4016 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4017 {
4018 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4019 tcg_gen_movi_tl(cpu_T[1], SIMM(ctx->opcode));
4020 gen_op_POWER_doz();
4021 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4022 }
4023
4024 /* As lscbx load from memory byte after byte, it's always endian safe.
4025 * Original POWER is 32 bits only, define 64 bits ops as 32 bits ones
4026 */
4027 #define op_POWER_lscbx(start, ra, rb) \
4028 (*gen_op_POWER_lscbx[ctx->mem_idx])(start, ra, rb)
4029 #define gen_op_POWER_lscbx_64_raw gen_op_POWER_lscbx_raw
4030 #define gen_op_POWER_lscbx_64_user gen_op_POWER_lscbx_user
4031 #define gen_op_POWER_lscbx_64_kernel gen_op_POWER_lscbx_kernel
4032 #define gen_op_POWER_lscbx_64_hypv gen_op_POWER_lscbx_hypv
4033 #define gen_op_POWER_lscbx_le_raw gen_op_POWER_lscbx_raw
4034 #define gen_op_POWER_lscbx_le_user gen_op_POWER_lscbx_user
4035 #define gen_op_POWER_lscbx_le_kernel gen_op_POWER_lscbx_kernel
4036 #define gen_op_POWER_lscbx_le_hypv gen_op_POWER_lscbx_hypv
4037 #define gen_op_POWER_lscbx_le_64_raw gen_op_POWER_lscbx_raw
4038 #define gen_op_POWER_lscbx_le_64_user gen_op_POWER_lscbx_user
4039 #define gen_op_POWER_lscbx_le_64_kernel gen_op_POWER_lscbx_kernel
4040 #define gen_op_POWER_lscbx_le_64_hypv gen_op_POWER_lscbx_hypv
4041 static GenOpFunc3 *gen_op_POWER_lscbx[NB_MEM_FUNCS] = {
4042 GEN_MEM_FUNCS(POWER_lscbx),
4043 };
4044
4045 /* lscbx - lscbx. */
4046 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR)
4047 {
4048 int ra = rA(ctx->opcode);
4049 int rb = rB(ctx->opcode);
4050
4051 gen_addr_reg_index(ctx);
4052 if (ra == 0) {
4053 ra = rb;
4054 }
4055 /* NIP cannot be restored if the memory exception comes from an helper */
4056 gen_update_nip(ctx, ctx->nip - 4);
4057 gen_op_load_xer_bc();
4058 gen_op_load_xer_cmp();
4059 op_POWER_lscbx(rD(ctx->opcode), ra, rb);
4060 gen_op_store_xer_bc();
4061 if (unlikely(Rc(ctx->opcode) != 0))
4062 gen_set_Rc0(ctx);
4063 }
4064
4065 /* maskg - maskg. */
4066 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR)
4067 {
4068 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4069 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4070 gen_op_POWER_maskg();
4071 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4072 if (unlikely(Rc(ctx->opcode) != 0))
4073 gen_set_Rc0(ctx);
4074 }
4075
4076 /* maskir - maskir. */
4077 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR)
4078 {
4079 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4080 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4081 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4082 gen_op_POWER_maskir();
4083 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4084 if (unlikely(Rc(ctx->opcode) != 0))
4085 gen_set_Rc0(ctx);
4086 }
4087
4088 /* mul - mul. */
4089 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR)
4090 {
4091 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4092 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4093 gen_op_POWER_mul();
4094 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4095 if (unlikely(Rc(ctx->opcode) != 0))
4096 gen_set_Rc0(ctx);
4097 }
4098
4099 /* mulo - mulo. */
4100 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR)
4101 {
4102 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4103 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4104 gen_op_POWER_mulo();
4105 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4106 if (unlikely(Rc(ctx->opcode) != 0))
4107 gen_set_Rc0(ctx);
4108 }
4109
4110 /* nabs - nabs. */
4111 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR)
4112 {
4113 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4114 gen_op_POWER_nabs();
4115 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4116 if (unlikely(Rc(ctx->opcode) != 0))
4117 gen_set_Rc0(ctx);
4118 }
4119
4120 /* nabso - nabso. */
4121 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR)
4122 {
4123 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4124 gen_op_POWER_nabso();
4125 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4126 if (unlikely(Rc(ctx->opcode) != 0))
4127 gen_set_Rc0(ctx);
4128 }
4129
4130 /* rlmi - rlmi. */
4131 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR)
4132 {
4133 uint32_t mb, me;
4134
4135 mb = MB(ctx->opcode);
4136 me = ME(ctx->opcode);
4137 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4138 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
4139 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4140 gen_op_POWER_rlmi(MASK(mb, me), ~MASK(mb, me));
4141 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4142 if (unlikely(Rc(ctx->opcode) != 0))
4143 gen_set_Rc0(ctx);
4144 }
4145
4146 /* rrib - rrib. */
4147 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR)
4148 {
4149 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4150 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rA(ctx->opcode)]);
4151 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rB(ctx->opcode)]);
4152 gen_op_POWER_rrib();
4153 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4154 if (unlikely(Rc(ctx->opcode) != 0))
4155 gen_set_Rc0(ctx);
4156 }
4157
4158 /* sle - sle. */
4159 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR)
4160 {
4161 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4162 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4163 gen_op_POWER_sle();
4164 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4165 if (unlikely(Rc(ctx->opcode) != 0))
4166 gen_set_Rc0(ctx);
4167 }
4168
4169 /* sleq - sleq. */
4170 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR)
4171 {
4172 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4173 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4174 gen_op_POWER_sleq();
4175 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4176 if (unlikely(Rc(ctx->opcode) != 0))
4177 gen_set_Rc0(ctx);
4178 }
4179
4180 /* sliq - sliq. */
4181 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR)
4182 {
4183 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4184 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4185 gen_op_POWER_sle();
4186 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4187 if (unlikely(Rc(ctx->opcode) != 0))
4188 gen_set_Rc0(ctx);
4189 }
4190
4191 /* slliq - slliq. */
4192 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR)
4193 {
4194 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4195 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4196 gen_op_POWER_sleq();
4197 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4198 if (unlikely(Rc(ctx->opcode) != 0))
4199 gen_set_Rc0(ctx);
4200 }
4201
4202 /* sllq - sllq. */
4203 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR)
4204 {
4205 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4206 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4207 gen_op_POWER_sllq();
4208 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4209 if (unlikely(Rc(ctx->opcode) != 0))
4210 gen_set_Rc0(ctx);
4211 }
4212
4213 /* slq - slq. */
4214 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR)
4215 {
4216 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4217 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4218 gen_op_POWER_slq();
4219 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4220 if (unlikely(Rc(ctx->opcode) != 0))
4221 gen_set_Rc0(ctx);
4222 }
4223
4224 /* sraiq - sraiq. */
4225 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR)
4226 {
4227 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4228 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4229 gen_op_POWER_sraq();
4230 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4231 if (unlikely(Rc(ctx->opcode) != 0))
4232 gen_set_Rc0(ctx);
4233 }
4234
4235 /* sraq - sraq. */
4236 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR)
4237 {
4238 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4239 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4240 gen_op_POWER_sraq();
4241 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4242 if (unlikely(Rc(ctx->opcode) != 0))
4243 gen_set_Rc0(ctx);
4244 }
4245
4246 /* sre - sre. */
4247 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR)
4248 {
4249 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4250 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4251 gen_op_POWER_sre();
4252 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4253 if (unlikely(Rc(ctx->opcode) != 0))
4254 gen_set_Rc0(ctx);
4255 }
4256
4257 /* srea - srea. */
4258 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR)
4259 {
4260 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4261 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4262 gen_op_POWER_srea();
4263 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4264 if (unlikely(Rc(ctx->opcode) != 0))
4265 gen_set_Rc0(ctx);
4266 }
4267
4268 /* sreq */
4269 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR)
4270 {
4271 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4272 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4273 gen_op_POWER_sreq();
4274 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4275 if (unlikely(Rc(ctx->opcode) != 0))
4276 gen_set_Rc0(ctx);
4277 }
4278
4279 /* sriq */
4280 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR)
4281 {
4282 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4283 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4284 gen_op_POWER_srq();
4285 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4286 if (unlikely(Rc(ctx->opcode) != 0))
4287 gen_set_Rc0(ctx);
4288 }
4289
4290 /* srliq */
4291 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR)
4292 {
4293 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4294 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4295 tcg_gen_movi_tl(cpu_T[1], SH(ctx->opcode));
4296 gen_op_POWER_srlq();
4297 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4298 if (unlikely(Rc(ctx->opcode) != 0))
4299 gen_set_Rc0(ctx);
4300 }
4301
4302 /* srlq */
4303 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR)
4304 {
4305 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4306 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4307 gen_op_POWER_srlq();
4308 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4309 if (unlikely(Rc(ctx->opcode) != 0))
4310 gen_set_Rc0(ctx);
4311 }
4312
4313 /* srq */
4314 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR)
4315 {
4316 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
4317 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
4318 gen_op_POWER_srq();
4319 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
4320 if (unlikely(Rc(ctx->opcode) != 0))
4321 gen_set_Rc0(ctx);
4322 }
4323
4324 /* PowerPC 602 specific instructions */
4325 /* dsa */
4326 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC)
4327 {
4328 /* XXX: TODO */
4329 GEN_EXCP_INVAL(ctx);
4330 }
4331
4332 /* esa */
4333 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC)
4334 {
4335 /* XXX: TODO */
4336 GEN_EXCP_INVAL(ctx);
4337 }
4338
4339 /* mfrom */
4340 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC)
4341 {
4342 #if defined(CONFIG_USER_ONLY)
4343 GEN_EXCP_PRIVOPC(ctx);
4344 #else
4345 if (unlikely(!ctx->supervisor)) {
4346 GEN_EXCP_PRIVOPC(ctx);
4347 return;
4348 }
4349 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4350 gen_op_602_mfrom();
4351 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4352 #endif
4353 }
4354
4355 /* 602 - 603 - G2 TLB management */
4356 /* tlbld */
4357 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB)
4358 {
4359 #if defined(CONFIG_USER_ONLY)
4360 GEN_EXCP_PRIVOPC(ctx);
4361 #else
4362 if (unlikely(!ctx->supervisor)) {
4363 GEN_EXCP_PRIVOPC(ctx);
4364 return;
4365 }
4366 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4367 gen_op_6xx_tlbld();
4368 #endif
4369 }
4370
4371 /* tlbli */
4372 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB)
4373 {
4374 #if defined(CONFIG_USER_ONLY)
4375 GEN_EXCP_PRIVOPC(ctx);
4376 #else
4377 if (unlikely(!ctx->supervisor)) {
4378 GEN_EXCP_PRIVOPC(ctx);
4379 return;
4380 }
4381 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4382 gen_op_6xx_tlbli();
4383 #endif
4384 }
4385
4386 /* 74xx TLB management */
4387 /* tlbld */
4388 GEN_HANDLER2(tlbld_74xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_74xx_TLB)
4389 {
4390 #if defined(CONFIG_USER_ONLY)
4391 GEN_EXCP_PRIVOPC(ctx);
4392 #else
4393 if (unlikely(!ctx->supervisor)) {
4394 GEN_EXCP_PRIVOPC(ctx);
4395 return;
4396 }
4397 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4398 gen_op_74xx_tlbld();
4399 #endif
4400 }
4401
4402 /* tlbli */
4403 GEN_HANDLER2(tlbli_74xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_74xx_TLB)
4404 {
4405 #if defined(CONFIG_USER_ONLY)
4406 GEN_EXCP_PRIVOPC(ctx);
4407 #else
4408 if (unlikely(!ctx->supervisor)) {
4409 GEN_EXCP_PRIVOPC(ctx);
4410 return;
4411 }
4412 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rB(ctx->opcode)]);
4413 gen_op_74xx_tlbli();
4414 #endif
4415 }
4416
4417 /* POWER instructions not in PowerPC 601 */
4418 /* clf */
4419 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER)
4420 {
4421 /* Cache line flush: implemented as no-op */
4422 }
4423
4424 /* cli */
4425 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER)
4426 {
4427 /* Cache line invalidate: privileged and treated as no-op */
4428 #if defined(CONFIG_USER_ONLY)
4429 GEN_EXCP_PRIVOPC(ctx);
4430 #else
4431 if (unlikely(!ctx->supervisor)) {
4432 GEN_EXCP_PRIVOPC(ctx);
4433 return;
4434 }
4435 #endif
4436 }
4437
4438 /* dclst */
4439 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER)
4440 {
4441 /* Data cache line store: treated as no-op */
4442 }
4443
4444 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER)
4445 {
4446 #if defined(CONFIG_USER_ONLY)
4447 GEN_EXCP_PRIVOPC(ctx);
4448 #else
4449 if (unlikely(!ctx->supervisor)) {
4450 GEN_EXCP_PRIVOPC(ctx);
4451 return;
4452 }
4453 int ra = rA(ctx->opcode);
4454 int rd = rD(ctx->opcode);
4455
4456 gen_addr_reg_index(ctx);
4457 gen_op_POWER_mfsri();
4458 tcg_gen_mov_tl(cpu_gpr[rd], cpu_T[0]);
4459 if (ra != 0 && ra != rd)
4460 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[1]);
4461 #endif
4462 }
4463
4464 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER)
4465 {
4466 #if defined(CONFIG_USER_ONLY)
4467 GEN_EXCP_PRIVOPC(ctx);
4468 #else
4469 if (unlikely(!ctx->supervisor)) {
4470 GEN_EXCP_PRIVOPC(ctx);
4471 return;
4472 }
4473 gen_addr_reg_index(ctx);
4474 gen_op_POWER_rac();
4475 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4476 #endif
4477 }
4478
4479 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER)
4480 {
4481 #if defined(CONFIG_USER_ONLY)
4482 GEN_EXCP_PRIVOPC(ctx);
4483 #else
4484 if (unlikely(!ctx->supervisor)) {
4485 GEN_EXCP_PRIVOPC(ctx);
4486 return;
4487 }
4488 gen_op_POWER_rfsvc();
4489 GEN_SYNC(ctx);
4490 #endif
4491 }
4492
4493 /* svc is not implemented for now */
4494
4495 /* POWER2 specific instructions */
4496 /* Quad manipulation (load/store two floats at a time) */
4497 /* Original POWER2 is 32 bits only, define 64 bits ops as 32 bits ones */
4498 #define op_POWER2_lfq() (*gen_op_POWER2_lfq[ctx->mem_idx])()
4499 #define op_POWER2_stfq() (*gen_op_POWER2_stfq[ctx->mem_idx])()
4500 #define gen_op_POWER2_lfq_64_raw gen_op_POWER2_lfq_raw
4501 #define gen_op_POWER2_lfq_64_user gen_op_POWER2_lfq_user
4502 #define gen_op_POWER2_lfq_64_kernel gen_op_POWER2_lfq_kernel
4503 #define gen_op_POWER2_lfq_64_hypv gen_op_POWER2_lfq_hypv
4504 #define gen_op_POWER2_lfq_le_64_raw gen_op_POWER2_lfq_le_raw
4505 #define gen_op_POWER2_lfq_le_64_user gen_op_POWER2_lfq_le_user
4506 #define gen_op_POWER2_lfq_le_64_kernel gen_op_POWER2_lfq_le_kernel
4507 #define gen_op_POWER2_lfq_le_64_hypv gen_op_POWER2_lfq_le_hypv
4508 #define gen_op_POWER2_stfq_64_raw gen_op_POWER2_stfq_raw
4509 #define gen_op_POWER2_stfq_64_user gen_op_POWER2_stfq_user
4510 #define gen_op_POWER2_stfq_64_kernel gen_op_POWER2_stfq_kernel
4511 #define gen_op_POWER2_stfq_64_hypv gen_op_POWER2_stfq_hypv
4512 #define gen_op_POWER2_stfq_le_64_raw gen_op_POWER2_stfq_le_raw
4513 #define gen_op_POWER2_stfq_le_64_user gen_op_POWER2_stfq_le_user
4514 #define gen_op_POWER2_stfq_le_64_kernel gen_op_POWER2_stfq_le_kernel
4515 #define gen_op_POWER2_stfq_le_64_hypv gen_op_POWER2_stfq_le_hypv
4516 static GenOpFunc *gen_op_POWER2_lfq[NB_MEM_FUNCS] = {
4517 GEN_MEM_FUNCS(POWER2_lfq),
4518 };
4519 static GenOpFunc *gen_op_POWER2_stfq[NB_MEM_FUNCS] = {
4520 GEN_MEM_FUNCS(POWER2_stfq),
4521 };
4522
4523 /* lfq */
4524 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4525 {
4526 /* NIP cannot be restored if the memory exception comes from an helper */
4527 gen_update_nip(ctx, ctx->nip - 4);
4528 gen_addr_imm_index(ctx, 0);
4529 op_POWER2_lfq();
4530 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4531 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4532 }
4533
4534 /* lfqu */
4535 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4536 {
4537 int ra = rA(ctx->opcode);
4538
4539 /* NIP cannot be restored if the memory exception comes from an helper */
4540 gen_update_nip(ctx, ctx->nip - 4);
4541 gen_addr_imm_index(ctx, 0);
4542 op_POWER2_lfq();
4543 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4544 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4545 if (ra != 0)
4546 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4547 }
4548
4549 /* lfqux */
4550 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2)
4551 {
4552 int ra = rA(ctx->opcode);
4553
4554 /* NIP cannot be restored if the memory exception comes from an helper */
4555 gen_update_nip(ctx, ctx->nip - 4);
4556 gen_addr_reg_index(ctx);
4557 op_POWER2_lfq();
4558 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4559 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4560 if (ra != 0)
4561 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4562 }
4563
4564 /* lfqx */
4565 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2)
4566 {
4567 /* NIP cannot be restored if the memory exception comes from an helper */
4568 gen_update_nip(ctx, ctx->nip - 4);
4569 gen_addr_reg_index(ctx);
4570 op_POWER2_lfq();
4571 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode)], cpu_FT[0]);
4572 tcg_gen_mov_i64(cpu_fpr[rD(ctx->opcode) + 1], cpu_FT[1]);
4573 }
4574
4575 /* stfq */
4576 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4577 {
4578 /* NIP cannot be restored if the memory exception comes from an helper */
4579 gen_update_nip(ctx, ctx->nip - 4);
4580 gen_addr_imm_index(ctx, 0);
4581 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4582 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4583 op_POWER2_stfq();
4584 }
4585
4586 /* stfqu */
4587 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2)
4588 {
4589 int ra = rA(ctx->opcode);
4590
4591 /* NIP cannot be restored if the memory exception comes from an helper */
4592 gen_update_nip(ctx, ctx->nip - 4);
4593 gen_addr_imm_index(ctx, 0);
4594 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4595 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4596 op_POWER2_stfq();
4597 if (ra != 0)
4598 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4599 }
4600
4601 /* stfqux */
4602 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2)
4603 {
4604 int ra = rA(ctx->opcode);
4605
4606 /* NIP cannot be restored if the memory exception comes from an helper */
4607 gen_update_nip(ctx, ctx->nip - 4);
4608 gen_addr_reg_index(ctx);
4609 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4610 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4611 op_POWER2_stfq();
4612 if (ra != 0)
4613 tcg_gen_mov_tl(cpu_gpr[ra], cpu_T[0]);
4614 }
4615
4616 /* stfqx */
4617 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2)
4618 {
4619 /* NIP cannot be restored if the memory exception comes from an helper */
4620 gen_update_nip(ctx, ctx->nip - 4);
4621 gen_addr_reg_index(ctx);
4622 tcg_gen_mov_i64(cpu_FT[0], cpu_fpr[rS(ctx->opcode)]);
4623 tcg_gen_mov_i64(cpu_FT[1], cpu_fpr[rS(ctx->opcode) + 1]);
4624 op_POWER2_stfq();
4625 }
4626
4627 /* BookE specific instructions */
4628 /* XXX: not implemented on 440 ? */
4629 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI)
4630 {
4631 /* XXX: TODO */
4632 GEN_EXCP_INVAL(ctx);
4633 }
4634
4635 /* XXX: not implemented on 440 ? */
4636 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA)
4637 {
4638 #if defined(CONFIG_USER_ONLY)
4639 GEN_EXCP_PRIVOPC(ctx);
4640 #else
4641 if (unlikely(!ctx->supervisor)) {
4642 GEN_EXCP_PRIVOPC(ctx);
4643 return;
4644 }
4645 gen_addr_reg_index(ctx);
4646 /* Use the same micro-ops as for tlbie */
4647 #if defined(TARGET_PPC64)
4648 if (ctx->sf_mode)
4649 gen_op_tlbie_64();
4650 else
4651 #endif
4652 gen_op_tlbie();
4653 #endif
4654 }
4655
4656 /* All 405 MAC instructions are translated here */
4657 static always_inline void gen_405_mulladd_insn (DisasContext *ctx,
4658 int opc2, int opc3,
4659 int ra, int rb, int rt, int Rc)
4660 {
4661 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[ra]);
4662 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rb]);
4663 switch (opc3 & 0x0D) {
4664 case 0x05:
4665 /* macchw - macchw. - macchwo - macchwo. */
4666 /* macchws - macchws. - macchwso - macchwso. */
4667 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */
4668 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */
4669 /* mulchw - mulchw. */
4670 gen_op_405_mulchw();
4671 break;
4672 case 0x04:
4673 /* macchwu - macchwu. - macchwuo - macchwuo. */
4674 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */
4675 /* mulchwu - mulchwu. */
4676 gen_op_405_mulchwu();
4677 break;
4678 case 0x01:
4679 /* machhw - machhw. - machhwo - machhwo. */
4680 /* machhws - machhws. - machhwso - machhwso. */
4681 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */
4682 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */
4683 /* mulhhw - mulhhw. */
4684 gen_op_405_mulhhw();
4685 break;
4686 case 0x00:
4687 /* machhwu - machhwu. - machhwuo - machhwuo. */
4688 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */
4689 /* mulhhwu - mulhhwu. */
4690 gen_op_405_mulhhwu();
4691 break;
4692 case 0x0D:
4693 /* maclhw - maclhw. - maclhwo - maclhwo. */
4694 /* maclhws - maclhws. - maclhwso - maclhwso. */
4695 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */
4696 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */
4697 /* mullhw - mullhw. */
4698 gen_op_405_mullhw();
4699 break;
4700 case 0x0C:
4701 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */
4702 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */
4703 /* mullhwu - mullhwu. */
4704 gen_op_405_mullhwu();
4705 break;
4706 }
4707 if (opc2 & 0x02) {
4708 /* nmultiply-and-accumulate (0x0E) */
4709 gen_op_neg();
4710 }
4711 if (opc2 & 0x04) {
4712 /* (n)multiply-and-accumulate (0x0C - 0x0E) */
4713 tcg_gen_mov_tl(cpu_T[2], cpu_gpr[rt]);
4714 tcg_gen_mov_tl(cpu_T[1], cpu_T[0]);
4715 gen_op_405_add_T0_T2();
4716 }
4717 if (opc3 & 0x10) {
4718 /* Check overflow */
4719 if (opc3 & 0x01)
4720 gen_op_check_addo();
4721 else
4722 gen_op_405_check_ovu();
4723 }
4724 if (opc3 & 0x02) {
4725 /* Saturate */
4726 if (opc3 & 0x01)
4727 gen_op_405_check_sat();
4728 else
4729 gen_op_405_check_satu();
4730 }
4731 tcg_gen_mov_tl(cpu_gpr[rt], cpu_T[0]);
4732 if (unlikely(Rc) != 0) {
4733 /* Update Rc0 */
4734 gen_set_Rc0(ctx);
4735 }
4736 }
4737
4738 #define GEN_MAC_HANDLER(name, opc2, opc3) \
4739 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) \
4740 { \
4741 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \
4742 rD(ctx->opcode), Rc(ctx->opcode)); \
4743 }
4744
4745 /* macchw - macchw. */
4746 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
4747 /* macchwo - macchwo. */
4748 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
4749 /* macchws - macchws. */
4750 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
4751 /* macchwso - macchwso. */
4752 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
4753 /* macchwsu - macchwsu. */
4754 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
4755 /* macchwsuo - macchwsuo. */
4756 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
4757 /* macchwu - macchwu. */
4758 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
4759 /* macchwuo - macchwuo. */
4760 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
4761 /* machhw - machhw. */
4762 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
4763 /* machhwo - machhwo. */
4764 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
4765 /* machhws - machhws. */
4766 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
4767 /* machhwso - machhwso. */
4768 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
4769 /* machhwsu - machhwsu. */
4770 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
4771 /* machhwsuo - machhwsuo. */
4772 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
4773 /* machhwu - machhwu. */
4774 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
4775 /* machhwuo - machhwuo. */
4776 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
4777 /* maclhw - maclhw. */
4778 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
4779 /* maclhwo - maclhwo. */
4780 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
4781 /* maclhws - maclhws. */
4782 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
4783 /* maclhwso - maclhwso. */
4784 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
4785 /* maclhwu - maclhwu. */
4786 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
4787 /* maclhwuo - maclhwuo. */
4788 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
4789 /* maclhwsu - maclhwsu. */
4790 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
4791 /* maclhwsuo - maclhwsuo. */
4792 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
4793 /* nmacchw - nmacchw. */
4794 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
4795 /* nmacchwo - nmacchwo. */
4796 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
4797 /* nmacchws - nmacchws. */
4798 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
4799 /* nmacchwso - nmacchwso. */
4800 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
4801 /* nmachhw - nmachhw. */
4802 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
4803 /* nmachhwo - nmachhwo. */
4804 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
4805 /* nmachhws - nmachhws. */
4806 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
4807 /* nmachhwso - nmachhwso. */
4808 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
4809 /* nmaclhw - nmaclhw. */
4810 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
4811 /* nmaclhwo - nmaclhwo. */
4812 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
4813 /* nmaclhws - nmaclhws. */
4814 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
4815 /* nmaclhwso - nmaclhwso. */
4816 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
4817
4818 /* mulchw - mulchw. */
4819 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
4820 /* mulchwu - mulchwu. */
4821 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
4822 /* mulhhw - mulhhw. */
4823 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
4824 /* mulhhwu - mulhhwu. */
4825 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
4826 /* mullhw - mullhw. */
4827 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
4828 /* mullhwu - mullhwu. */
4829 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
4830
4831 /* mfdcr */
4832 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR)
4833 {
4834 #if defined(CONFIG_USER_ONLY)
4835 GEN_EXCP_PRIVREG(ctx);
4836 #else
4837 uint32_t dcrn = SPR(ctx->opcode);
4838
4839 if (unlikely(!ctx->supervisor)) {
4840 GEN_EXCP_PRIVREG(ctx);
4841 return;
4842 }
4843 tcg_gen_movi_tl(cpu_T[0], dcrn);
4844 gen_op_load_dcr();
4845 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4846 #endif
4847 }
4848
4849 /* mtdcr */
4850 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR)
4851 {
4852 #if defined(CONFIG_USER_ONLY)
4853 GEN_EXCP_PRIVREG(ctx);
4854 #else
4855 uint32_t dcrn = SPR(ctx->opcode);
4856
4857 if (unlikely(!ctx->supervisor)) {
4858 GEN_EXCP_PRIVREG(ctx);
4859 return;
4860 }
4861 tcg_gen_movi_tl(cpu_T[0], dcrn);
4862 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4863 gen_op_store_dcr();
4864 #endif
4865 }
4866
4867 /* mfdcrx */
4868 /* XXX: not implemented on 440 ? */
4869 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX)
4870 {
4871 #if defined(CONFIG_USER_ONLY)
4872 GEN_EXCP_PRIVREG(ctx);
4873 #else
4874 if (unlikely(!ctx->supervisor)) {
4875 GEN_EXCP_PRIVREG(ctx);
4876 return;
4877 }
4878 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4879 gen_op_load_dcr();
4880 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4881 /* Note: Rc update flag set leads to undefined state of Rc0 */
4882 #endif
4883 }
4884
4885 /* mtdcrx */
4886 /* XXX: not implemented on 440 ? */
4887 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX)
4888 {
4889 #if defined(CONFIG_USER_ONLY)
4890 GEN_EXCP_PRIVREG(ctx);
4891 #else
4892 if (unlikely(!ctx->supervisor)) {
4893 GEN_EXCP_PRIVREG(ctx);
4894 return;
4895 }
4896 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4897 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4898 gen_op_store_dcr();
4899 /* Note: Rc update flag set leads to undefined state of Rc0 */
4900 #endif
4901 }
4902
4903 /* mfdcrux (PPC 460) : user-mode access to DCR */
4904 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX)
4905 {
4906 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4907 gen_op_load_dcr();
4908 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4909 /* Note: Rc update flag set leads to undefined state of Rc0 */
4910 }
4911
4912 /* mtdcrux (PPC 460) : user-mode access to DCR */
4913 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX)
4914 {
4915 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
4916 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
4917 gen_op_store_dcr();
4918 /* Note: Rc update flag set leads to undefined state of Rc0 */
4919 }
4920
4921 /* dccci */
4922 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON)
4923 {
4924 #if defined(CONFIG_USER_ONLY)
4925 GEN_EXCP_PRIVOPC(ctx);
4926 #else
4927 if (unlikely(!ctx->supervisor)) {
4928 GEN_EXCP_PRIVOPC(ctx);
4929 return;
4930 }
4931 /* interpreted as no-op */
4932 #endif
4933 }
4934
4935 /* dcread */
4936 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON)
4937 {
4938 #if defined(CONFIG_USER_ONLY)
4939 GEN_EXCP_PRIVOPC(ctx);
4940 #else
4941 if (unlikely(!ctx->supervisor)) {
4942 GEN_EXCP_PRIVOPC(ctx);
4943 return;
4944 }
4945 gen_addr_reg_index(ctx);
4946 op_ldst(lwz);
4947 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
4948 #endif
4949 }
4950
4951 /* icbt */
4952 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT)
4953 {
4954 /* interpreted as no-op */
4955 /* XXX: specification say this is treated as a load by the MMU
4956 * but does not generate any exception
4957 */
4958 }
4959
4960 /* iccci */
4961 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON)
4962 {
4963 #if defined(CONFIG_USER_ONLY)
4964 GEN_EXCP_PRIVOPC(ctx);
4965 #else
4966 if (unlikely(!ctx->supervisor)) {
4967 GEN_EXCP_PRIVOPC(ctx);
4968 return;
4969 }
4970 /* interpreted as no-op */
4971 #endif
4972 }
4973
4974 /* icread */
4975 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON)
4976 {
4977 #if defined(CONFIG_USER_ONLY)
4978 GEN_EXCP_PRIVOPC(ctx);
4979 #else
4980 if (unlikely(!ctx->supervisor)) {
4981 GEN_EXCP_PRIVOPC(ctx);
4982 return;
4983 }
4984 /* interpreted as no-op */
4985 #endif
4986 }
4987
4988 /* rfci (supervisor only) */
4989 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP)
4990 {
4991 #if defined(CONFIG_USER_ONLY)
4992 GEN_EXCP_PRIVOPC(ctx);
4993 #else
4994 if (unlikely(!ctx->supervisor)) {
4995 GEN_EXCP_PRIVOPC(ctx);
4996 return;
4997 }
4998 /* Restore CPU state */
4999 gen_op_40x_rfci();
5000 GEN_SYNC(ctx);
5001 #endif
5002 }
5003
5004 GEN_HANDLER(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE)
5005 {
5006 #if defined(CONFIG_USER_ONLY)
5007 GEN_EXCP_PRIVOPC(ctx);
5008 #else
5009 if (unlikely(!ctx->supervisor)) {
5010 GEN_EXCP_PRIVOPC(ctx);
5011 return;
5012 }
5013 /* Restore CPU state */
5014 gen_op_rfci();
5015 GEN_SYNC(ctx);
5016 #endif
5017 }
5018
5019 /* BookE specific */
5020 /* XXX: not implemented on 440 ? */
5021 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI)
5022 {
5023 #if defined(CONFIG_USER_ONLY)
5024 GEN_EXCP_PRIVOPC(ctx);
5025 #else
5026 if (unlikely(!ctx->supervisor)) {
5027 GEN_EXCP_PRIVOPC(ctx);
5028 return;
5029 }
5030 /* Restore CPU state */
5031 gen_op_rfdi();
5032 GEN_SYNC(ctx);
5033 #endif
5034 }
5035
5036 /* XXX: not implemented on 440 ? */
5037 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI)
5038 {
5039 #if defined(CONFIG_USER_ONLY)
5040 GEN_EXCP_PRIVOPC(ctx);
5041 #else
5042 if (unlikely(!ctx->supervisor)) {
5043 GEN_EXCP_PRIVOPC(ctx);
5044 return;
5045 }
5046 /* Restore CPU state */
5047 gen_op_rfmci();
5048 GEN_SYNC(ctx);
5049 #endif
5050 }
5051
5052 /* TLB management - PowerPC 405 implementation */
5053 /* tlbre */
5054 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB)
5055 {
5056 #if defined(CONFIG_USER_ONLY)
5057 GEN_EXCP_PRIVOPC(ctx);
5058 #else
5059 if (unlikely(!ctx->supervisor)) {
5060 GEN_EXCP_PRIVOPC(ctx);
5061 return;
5062 }
5063 switch (rB(ctx->opcode)) {
5064 case 0:
5065 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5066 gen_op_4xx_tlbre_hi();
5067 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5068 break;
5069 case 1:
5070 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5071 gen_op_4xx_tlbre_lo();
5072 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5073 break;
5074 default:
5075 GEN_EXCP_INVAL(ctx);
5076 break;
5077 }
5078 #endif
5079 }
5080
5081 /* tlbsx - tlbsx. */
5082 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB)
5083 {
5084 #if defined(CONFIG_USER_ONLY)
5085 GEN_EXCP_PRIVOPC(ctx);
5086 #else
5087 if (unlikely(!ctx->supervisor)) {
5088 GEN_EXCP_PRIVOPC(ctx);
5089 return;
5090 }
5091 gen_addr_reg_index(ctx);
5092 gen_op_4xx_tlbsx();
5093 if (Rc(ctx->opcode))
5094 gen_op_4xx_tlbsx_check();
5095 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5096 #endif
5097 }
5098
5099 /* tlbwe */
5100 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB)
5101 {
5102 #if defined(CONFIG_USER_ONLY)
5103 GEN_EXCP_PRIVOPC(ctx);
5104 #else
5105 if (unlikely(!ctx->supervisor)) {
5106 GEN_EXCP_PRIVOPC(ctx);
5107 return;
5108 }
5109 switch (rB(ctx->opcode)) {
5110 case 0:
5111 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5112 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5113 gen_op_4xx_tlbwe_hi();
5114 break;
5115 case 1:
5116 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5117 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5118 gen_op_4xx_tlbwe_lo();
5119 break;
5120 default:
5121 GEN_EXCP_INVAL(ctx);
5122 break;
5123 }
5124 #endif
5125 }
5126
5127 /* TLB management - PowerPC 440 implementation */
5128 /* tlbre */
5129 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE)
5130 {
5131 #if defined(CONFIG_USER_ONLY)
5132 GEN_EXCP_PRIVOPC(ctx);
5133 #else
5134 if (unlikely(!ctx->supervisor)) {
5135 GEN_EXCP_PRIVOPC(ctx);
5136 return;
5137 }
5138 switch (rB(ctx->opcode)) {
5139 case 0:
5140 case 1:
5141 case 2:
5142 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5143 gen_op_440_tlbre(rB(ctx->opcode));
5144 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5145 break;
5146 default:
5147 GEN_EXCP_INVAL(ctx);
5148 break;
5149 }
5150 #endif
5151 }
5152
5153 /* tlbsx - tlbsx. */
5154 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE)
5155 {
5156 #if defined(CONFIG_USER_ONLY)
5157 GEN_EXCP_PRIVOPC(ctx);
5158 #else
5159 if (unlikely(!ctx->supervisor)) {
5160 GEN_EXCP_PRIVOPC(ctx);
5161 return;
5162 }
5163 gen_addr_reg_index(ctx);
5164 gen_op_440_tlbsx();
5165 if (Rc(ctx->opcode))
5166 gen_op_4xx_tlbsx_check();
5167 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5168 #endif
5169 }
5170
5171 /* tlbwe */
5172 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE)
5173 {
5174 #if defined(CONFIG_USER_ONLY)
5175 GEN_EXCP_PRIVOPC(ctx);
5176 #else
5177 if (unlikely(!ctx->supervisor)) {
5178 GEN_EXCP_PRIVOPC(ctx);
5179 return;
5180 }
5181 switch (rB(ctx->opcode)) {
5182 case 0:
5183 case 1:
5184 case 2:
5185 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5186 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rS(ctx->opcode)]);
5187 gen_op_440_tlbwe(rB(ctx->opcode));
5188 break;
5189 default:
5190 GEN_EXCP_INVAL(ctx);
5191 break;
5192 }
5193 #endif
5194 }
5195
5196 /* wrtee */
5197 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE)
5198 {
5199 #if defined(CONFIG_USER_ONLY)
5200 GEN_EXCP_PRIVOPC(ctx);
5201 #else
5202 if (unlikely(!ctx->supervisor)) {
5203 GEN_EXCP_PRIVOPC(ctx);
5204 return;
5205 }
5206 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rD(ctx->opcode)]);
5207 gen_op_wrte();
5208 /* Stop translation to have a chance to raise an exception
5209 * if we just set msr_ee to 1
5210 */
5211 GEN_STOP(ctx);
5212 #endif
5213 }
5214
5215 /* wrteei */
5216 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000EFC01, PPC_WRTEE)
5217 {
5218 #if defined(CONFIG_USER_ONLY)
5219 GEN_EXCP_PRIVOPC(ctx);
5220 #else
5221 if (unlikely(!ctx->supervisor)) {
5222 GEN_EXCP_PRIVOPC(ctx);
5223 return;
5224 }
5225 tcg_gen_movi_tl(cpu_T[0], ctx->opcode & 0x00010000);
5226 gen_op_wrte();
5227 /* Stop translation to have a chance to raise an exception
5228 * if we just set msr_ee to 1
5229 */
5230 GEN_STOP(ctx);
5231 #endif
5232 }
5233
5234 /* PowerPC 440 specific instructions */
5235 /* dlmzb */
5236 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC)
5237 {
5238 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rS(ctx->opcode)]);
5239 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
5240 gen_op_440_dlmzb();
5241 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], cpu_T[0]);
5242 gen_op_store_xer_bc();
5243 if (Rc(ctx->opcode)) {
5244 gen_op_440_dlmzb_update_Rc();
5245 gen_op_store_T0_crf(0);
5246 }
5247 }
5248
5249 /* mbar replaces eieio on 440 */
5250 GEN_HANDLER(mbar, 0x1F, 0x16, 0x13, 0x001FF801, PPC_BOOKE)
5251 {
5252 /* interpreted as no-op */
5253 }
5254
5255 /* msync replaces sync on 440 */
5256 GEN_HANDLER(msync, 0x1F, 0x16, 0x12, 0x03FFF801, PPC_BOOKE)
5257 {
5258 /* interpreted as no-op */
5259 }
5260
5261 /* icbt */
5262 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, PPC_BOOKE)
5263 {
5264 /* interpreted as no-op */
5265 /* XXX: specification say this is treated as a load by the MMU
5266 * but does not generate any exception
5267 */
5268 }
5269
5270 /*** Altivec vector extension ***/
5271 /* Altivec registers moves */
5272
5273 static always_inline void gen_load_avr(int t, int reg) {
5274 tcg_gen_mov_i64(cpu_AVRh[t], cpu_avrh[reg]);
5275 tcg_gen_mov_i64(cpu_AVRl[t], cpu_avrl[reg]);
5276 }
5277
5278 static always_inline void gen_store_avr(int reg, int t) {
5279 tcg_gen_mov_i64(cpu_avrh[reg], cpu_AVRh[t]);
5280 tcg_gen_mov_i64(cpu_avrl[reg], cpu_AVRl[t]);
5281 }
5282
5283 #define op_vr_ldst(name) (*gen_op_##name[ctx->mem_idx])()
5284 #define OP_VR_LD_TABLE(name) \
5285 static GenOpFunc *gen_op_vr_l##name[NB_MEM_FUNCS] = { \
5286 GEN_MEM_FUNCS(vr_l##name), \
5287 };
5288 #define OP_VR_ST_TABLE(name) \
5289 static GenOpFunc *gen_op_vr_st##name[NB_MEM_FUNCS] = { \
5290 GEN_MEM_FUNCS(vr_st##name), \
5291 };
5292
5293 #define GEN_VR_LDX(name, opc2, opc3) \
5294 GEN_HANDLER(l##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
5295 { \
5296 if (unlikely(!ctx->altivec_enabled)) { \
5297 GEN_EXCP_NO_VR(ctx); \
5298 return; \
5299 } \
5300 gen_addr_reg_index(ctx); \
5301 op_vr_ldst(vr_l##name); \
5302 gen_store_avr(rD(ctx->opcode), 0); \
5303 }
5304
5305 #define GEN_VR_STX(name, opc2, opc3) \
5306 GEN_HANDLER(st##name, 0x1F, opc2, opc3, 0x00000001, PPC_ALTIVEC) \
5307 { \
5308 if (unlikely(!ctx->altivec_enabled)) { \
5309 GEN_EXCP_NO_VR(ctx); \
5310 return; \
5311 } \
5312 gen_addr_reg_index(ctx); \
5313 gen_load_avr(0, rS(ctx->opcode)); \
5314 op_vr_ldst(vr_st##name); \
5315 }
5316
5317 OP_VR_LD_TABLE(vx);
5318 GEN_VR_LDX(vx, 0x07, 0x03);
5319 /* As we don't emulate the cache, lvxl is stricly equivalent to lvx */
5320 #define gen_op_vr_lvxl gen_op_vr_lvx
5321 GEN_VR_LDX(vxl, 0x07, 0x0B);
5322
5323 OP_VR_ST_TABLE(vx);
5324 GEN_VR_STX(vx, 0x07, 0x07);
5325 /* As we don't emulate the cache, stvxl is stricly equivalent to stvx */
5326 #define gen_op_vr_stvxl gen_op_vr_stvx
5327 GEN_VR_STX(vxl, 0x07, 0x0F);
5328
5329 /*** SPE extension ***/
5330 /* Register moves */
5331
5332 static always_inline void gen_load_gpr64(TCGv t, int reg) {
5333 #if defined(TARGET_PPC64)
5334 tcg_gen_mov_i64(t, cpu_gpr[reg]);
5335 #else
5336 tcg_gen_extu_i32_i64(t, cpu_gprh[reg]);
5337 tcg_gen_shli_i64(t, t, 32);
5338 TCGv tmp = tcg_temp_local_new(TCG_TYPE_I64);
5339 tcg_gen_extu_i32_i64(tmp, cpu_gpr[reg]);
5340 tcg_gen_or_i64(t, t, tmp);
5341 tcg_temp_free(tmp);
5342 #endif
5343 }
5344
5345 static always_inline void gen_store_gpr64(int reg, TCGv t) {
5346 #if defined(TARGET_PPC64)
5347 tcg_gen_mov_i64(cpu_gpr[reg], t);
5348 #else
5349 tcg_gen_trunc_i64_i32(cpu_gpr[reg], t);
5350 TCGv tmp = tcg_temp_local_new(TCG_TYPE_I64);
5351 tcg_gen_shri_i64(tmp, t, 32);
5352 tcg_gen_trunc_i64_i32(cpu_gprh[reg], tmp);
5353 tcg_temp_free(tmp);
5354 #endif
5355 }
5356
5357 #define GEN_SPE(name0, name1, opc2, opc3, inval, type) \
5358 GEN_HANDLER(name0##_##name1, 0x04, opc2, opc3, inval, type) \
5359 { \
5360 if (Rc(ctx->opcode)) \
5361 gen_##name1(ctx); \
5362 else \
5363 gen_##name0(ctx); \
5364 }
5365
5366 /* Handler for undefined SPE opcodes */
5367 static always_inline void gen_speundef (DisasContext *ctx)
5368 {
5369 GEN_EXCP_INVAL(ctx);
5370 }
5371
5372 /* SPE load and stores */
5373 static always_inline void gen_addr_spe_imm_index (DisasContext *ctx, int sh)
5374 {
5375 target_long simm = rB(ctx->opcode);
5376
5377 if (rA(ctx->opcode) == 0) {
5378 tcg_gen_movi_tl(cpu_T[0], simm << sh);
5379 } else {
5380 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5381 if (likely(simm != 0))
5382 gen_op_addi(simm << sh);
5383 }
5384 }
5385
5386 #define op_spe_ldst(name) (*gen_op_##name[ctx->mem_idx])()
5387 #define OP_SPE_LD_TABLE(name) \
5388 static GenOpFunc *gen_op_spe_l##name[NB_MEM_FUNCS] = { \
5389 GEN_MEM_FUNCS(spe_l##name), \
5390 };
5391 #define OP_SPE_ST_TABLE(name) \
5392 static GenOpFunc *gen_op_spe_st##name[NB_MEM_FUNCS] = { \
5393 GEN_MEM_FUNCS(spe_st##name), \
5394 };
5395
5396 #define GEN_SPE_LD(name, sh) \
5397 static always_inline void gen_evl##name (DisasContext *ctx) \
5398 { \
5399 if (unlikely(!ctx->spe_enabled)) { \
5400 GEN_EXCP_NO_AP(ctx); \
5401 return; \
5402 } \
5403 gen_addr_spe_imm_index(ctx, sh); \
5404 op_spe_ldst(spe_l##name); \
5405 gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \
5406 }
5407
5408 #define GEN_SPE_LDX(name) \
5409 static always_inline void gen_evl##name##x (DisasContext *ctx) \
5410 { \
5411 if (unlikely(!ctx->spe_enabled)) { \
5412 GEN_EXCP_NO_AP(ctx); \
5413 return; \
5414 } \
5415 gen_addr_reg_index(ctx); \
5416 op_spe_ldst(spe_l##name); \
5417 gen_store_gpr64(rD(ctx->opcode), cpu_T64[1]); \
5418 }
5419
5420 #define GEN_SPEOP_LD(name, sh) \
5421 OP_SPE_LD_TABLE(name); \
5422 GEN_SPE_LD(name, sh); \
5423 GEN_SPE_LDX(name)
5424
5425 #define GEN_SPE_ST(name, sh) \
5426 static always_inline void gen_evst##name (DisasContext *ctx) \
5427 { \
5428 if (unlikely(!ctx->spe_enabled)) { \
5429 GEN_EXCP_NO_AP(ctx); \
5430 return; \
5431 } \
5432 gen_addr_spe_imm_index(ctx, sh); \
5433 gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \
5434 op_spe_ldst(spe_st##name); \
5435 }
5436
5437 #define GEN_SPE_STX(name) \
5438 static always_inline void gen_evst##name##x (DisasContext *ctx) \
5439 { \
5440 if (unlikely(!ctx->spe_enabled)) { \
5441 GEN_EXCP_NO_AP(ctx); \
5442 return; \
5443 } \
5444 gen_addr_reg_index(ctx); \
5445 gen_load_gpr64(cpu_T64[1], rS(ctx->opcode)); \
5446 op_spe_ldst(spe_st##name); \
5447 }
5448
5449 #define GEN_SPEOP_ST(name, sh) \
5450 OP_SPE_ST_TABLE(name); \
5451 GEN_SPE_ST(name, sh); \
5452 GEN_SPE_STX(name)
5453
5454 #define GEN_SPEOP_LDST(name, sh) \
5455 GEN_SPEOP_LD(name, sh); \
5456 GEN_SPEOP_ST(name, sh)
5457
5458 /* SPE arithmetic and logic */
5459 #define GEN_SPEOP_ARITH2(name) \
5460 static always_inline void gen_##name (DisasContext *ctx) \
5461 { \
5462 if (unlikely(!ctx->spe_enabled)) { \
5463 GEN_EXCP_NO_AP(ctx); \
5464 return; \
5465 } \
5466 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5467 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
5468 gen_op_##name(); \
5469 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5470 }
5471
5472 #define GEN_SPEOP_ARITH1(name) \
5473 static always_inline void gen_##name (DisasContext *ctx) \
5474 { \
5475 if (unlikely(!ctx->spe_enabled)) { \
5476 GEN_EXCP_NO_AP(ctx); \
5477 return; \
5478 } \
5479 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5480 gen_op_##name(); \
5481 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5482 }
5483
5484 #define GEN_SPEOP_COMP(name) \
5485 static always_inline void gen_##name (DisasContext *ctx) \
5486 { \
5487 if (unlikely(!ctx->spe_enabled)) { \
5488 GEN_EXCP_NO_AP(ctx); \
5489 return; \
5490 } \
5491 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5492 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode)); \
5493 gen_op_##name(); \
5494 gen_op_store_T0_crf(crfD(ctx->opcode)); \
5495 }
5496
5497 /* Logical */
5498 GEN_SPEOP_ARITH2(evand);
5499 GEN_SPEOP_ARITH2(evandc);
5500 GEN_SPEOP_ARITH2(evxor);
5501 GEN_SPEOP_ARITH2(evor);
5502 GEN_SPEOP_ARITH2(evnor);
5503 GEN_SPEOP_ARITH2(eveqv);
5504 GEN_SPEOP_ARITH2(evorc);
5505 GEN_SPEOP_ARITH2(evnand);
5506 GEN_SPEOP_ARITH2(evsrwu);
5507 GEN_SPEOP_ARITH2(evsrws);
5508 GEN_SPEOP_ARITH2(evslw);
5509 GEN_SPEOP_ARITH2(evrlw);
5510 GEN_SPEOP_ARITH2(evmergehi);
5511 GEN_SPEOP_ARITH2(evmergelo);
5512 GEN_SPEOP_ARITH2(evmergehilo);
5513 GEN_SPEOP_ARITH2(evmergelohi);
5514
5515 /* Arithmetic */
5516 GEN_SPEOP_ARITH2(evaddw);
5517 GEN_SPEOP_ARITH2(evsubfw);
5518 GEN_SPEOP_ARITH1(evabs);
5519 GEN_SPEOP_ARITH1(evneg);
5520 GEN_SPEOP_ARITH1(evextsb);
5521 GEN_SPEOP_ARITH1(evextsh);
5522 GEN_SPEOP_ARITH1(evrndw);
5523 GEN_SPEOP_ARITH1(evcntlzw);
5524 GEN_SPEOP_ARITH1(evcntlsw);
5525 static always_inline void gen_brinc (DisasContext *ctx)
5526 {
5527 /* Note: brinc is usable even if SPE is disabled */
5528 tcg_gen_mov_tl(cpu_T[0], cpu_gpr[rA(ctx->opcode)]);
5529 tcg_gen_mov_tl(cpu_T[1], cpu_gpr[rB(ctx->opcode)]);
5530 gen_op_brinc();
5531 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_T[0]);
5532 }
5533
5534 #define GEN_SPEOP_ARITH_IMM2(name) \
5535 static always_inline void gen_##name##i (DisasContext *ctx) \
5536 { \
5537 if (unlikely(!ctx->spe_enabled)) { \
5538 GEN_EXCP_NO_AP(ctx); \
5539 return; \
5540 } \
5541 gen_load_gpr64(cpu_T64[0], rB(ctx->opcode)); \
5542 gen_op_splatwi_T1_64(rA(ctx->opcode)); \
5543 gen_op_##name(); \
5544 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5545 }
5546
5547 #define GEN_SPEOP_LOGIC_IMM2(name) \
5548 static always_inline void gen_##name##i (DisasContext *ctx) \
5549 { \
5550 if (unlikely(!ctx->spe_enabled)) { \
5551 GEN_EXCP_NO_AP(ctx); \
5552 return; \
5553 } \
5554 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode)); \
5555 gen_op_splatwi_T1_64(rB(ctx->opcode)); \
5556 gen_op_##name(); \
5557 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5558 }
5559
5560 GEN_SPEOP_ARITH_IMM2(evaddw);
5561 #define gen_evaddiw gen_evaddwi
5562 GEN_SPEOP_ARITH_IMM2(evsubfw);
5563 #define gen_evsubifw gen_evsubfwi
5564 GEN_SPEOP_LOGIC_IMM2(evslw);
5565 GEN_SPEOP_LOGIC_IMM2(evsrwu);
5566 #define gen_evsrwis gen_evsrwsi
5567 GEN_SPEOP_LOGIC_IMM2(evsrws);
5568 #define gen_evsrwiu gen_evsrwui
5569 GEN_SPEOP_LOGIC_IMM2(evrlw);
5570
5571 static always_inline void gen_evsplati (DisasContext *ctx)
5572 {
5573 int32_t imm = (int32_t)(rA(ctx->opcode) << 27) >> 27;
5574
5575 gen_op_splatwi_T0_64(imm);
5576 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5577 }
5578
5579 static always_inline void gen_evsplatfi (DisasContext *ctx)
5580 {
5581 uint32_t imm = rA(ctx->opcode) << 27;
5582
5583 gen_op_splatwi_T0_64(imm);
5584 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5585 }
5586
5587 /* Comparison */
5588 GEN_SPEOP_COMP(evcmpgtu);
5589 GEN_SPEOP_COMP(evcmpgts);
5590 GEN_SPEOP_COMP(evcmpltu);
5591 GEN_SPEOP_COMP(evcmplts);
5592 GEN_SPEOP_COMP(evcmpeq);
5593
5594 GEN_SPE(evaddw, speundef, 0x00, 0x08, 0x00000000, PPC_SPE); ////
5595 GEN_SPE(evaddiw, speundef, 0x01, 0x08, 0x00000000, PPC_SPE);
5596 GEN_SPE(evsubfw, speundef, 0x02, 0x08, 0x00000000, PPC_SPE); ////
5597 GEN_SPE(evsubifw, speundef, 0x03, 0x08, 0x00000000, PPC_SPE);
5598 GEN_SPE(evabs, evneg, 0x04, 0x08, 0x0000F800, PPC_SPE); ////
5599 GEN_SPE(evextsb, evextsh, 0x05, 0x08, 0x0000F800, PPC_SPE); ////
5600 GEN_SPE(evrndw, evcntlzw, 0x06, 0x08, 0x0000F800, PPC_SPE); ////
5601 GEN_SPE(evcntlsw, brinc, 0x07, 0x08, 0x00000000, PPC_SPE); //
5602 GEN_SPE(speundef, evand, 0x08, 0x08, 0x00000000, PPC_SPE); ////
5603 GEN_SPE(evandc, speundef, 0x09, 0x08, 0x00000000, PPC_SPE); ////
5604 GEN_SPE(evxor, evor, 0x0B, 0x08, 0x00000000, PPC_SPE); ////
5605 GEN_SPE(evnor, eveqv, 0x0C, 0x08, 0x00000000, PPC_SPE); ////
5606 GEN_SPE(speundef, evorc, 0x0D, 0x08, 0x00000000, PPC_SPE); ////
5607 GEN_SPE(evnand, speundef, 0x0F, 0x08, 0x00000000, PPC_SPE); ////
5608 GEN_SPE(evsrwu, evsrws, 0x10, 0x08, 0x00000000, PPC_SPE); ////
5609 GEN_SPE(evsrwiu, evsrwis, 0x11, 0x08, 0x00000000, PPC_SPE);
5610 GEN_SPE(evslw, speundef, 0x12, 0x08, 0x00000000, PPC_SPE); ////
5611 GEN_SPE(evslwi, speundef, 0x13, 0x08, 0x00000000, PPC_SPE);
5612 GEN_SPE(evrlw, evsplati, 0x14, 0x08, 0x00000000, PPC_SPE); //
5613 GEN_SPE(evrlwi, evsplatfi, 0x15, 0x08, 0x00000000, PPC_SPE);
5614 GEN_SPE(evmergehi, evmergelo, 0x16, 0x08, 0x00000000, PPC_SPE); ////
5615 GEN_SPE(evmergehilo, evmergelohi, 0x17, 0x08, 0x00000000, PPC_SPE); ////
5616 GEN_SPE(evcmpgtu, evcmpgts, 0x18, 0x08, 0x00600000, PPC_SPE); ////
5617 GEN_SPE(evcmpltu, evcmplts, 0x19, 0x08, 0x00600000, PPC_SPE); ////
5618 GEN_SPE(evcmpeq, speundef, 0x1A, 0x08, 0x00600000, PPC_SPE); ////
5619
5620 static always_inline void gen_evsel (DisasContext *ctx)
5621 {
5622 if (unlikely(!ctx->spe_enabled)) {
5623 GEN_EXCP_NO_AP(ctx);
5624 return;
5625 }
5626 gen_op_load_crf_T0(ctx->opcode & 0x7);
5627 gen_load_gpr64(cpu_T64[0], rA(ctx->opcode));
5628 gen_load_gpr64(cpu_T64[1], rB(ctx->opcode));
5629 gen_op_evsel();
5630 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]);
5631 }
5632
5633 GEN_HANDLER2(evsel0, "evsel", 0x04, 0x1c, 0x09, 0x00000000, PPC_SPE)
5634 {
5635 gen_evsel(ctx);
5636 }
5637 GEN_HANDLER2(evsel1, "evsel", 0x04, 0x1d, 0x09, 0x00000000, PPC_SPE)
5638 {
5639 gen_evsel(ctx);
5640 }
5641 GEN_HANDLER2(evsel2, "evsel", 0x04, 0x1e, 0x09, 0x00000000, PPC_SPE)
5642 {
5643 gen_evsel(ctx);
5644 }
5645 GEN_HANDLER2(evsel3, "evsel", 0x04, 0x1f, 0x09, 0x00000000, PPC_SPE)
5646 {
5647 gen_evsel(ctx);
5648 }
5649
5650 /* Load and stores */
5651 #if defined(TARGET_PPC64)
5652 /* In that case, we already have 64 bits load & stores
5653 * so, spe_ldd is equivalent to ld and spe_std is equivalent to std
5654 */
5655 #define gen_op_spe_ldd_raw gen_op_ld_raw
5656 #define gen_op_spe_ldd_user gen_op_ld_user
5657 #define gen_op_spe_ldd_kernel gen_op_ld_kernel
5658 #define gen_op_spe_ldd_hypv gen_op_ld_hypv
5659 #define gen_op_spe_ldd_64_raw gen_op_ld_64_raw
5660 #define gen_op_spe_ldd_64_user gen_op_ld_64_user
5661 #define gen_op_spe_ldd_64_kernel gen_op_ld_64_kernel
5662 #define gen_op_spe_ldd_64_hypv gen_op_ld_64_hypv
5663 #define gen_op_spe_ldd_le_raw gen_op_ld_le_raw
5664 #define gen_op_spe_ldd_le_user gen_op_ld_le_user
5665 #define gen_op_spe_ldd_le_kernel gen_op_ld_le_kernel
5666 #define gen_op_spe_ldd_le_hypv gen_op_ld_le_hypv
5667 #define gen_op_spe_ldd_le_64_raw gen_op_ld_le_64_raw
5668 #define gen_op_spe_ldd_le_64_user gen_op_ld_le_64_user
5669 #define gen_op_spe_ldd_le_64_kernel gen_op_ld_le_64_kernel
5670 #define gen_op_spe_ldd_le_64_hypv gen_op_ld_le_64_hypv
5671 #define gen_op_spe_stdd_raw gen_op_std_raw
5672 #define gen_op_spe_stdd_user gen_op_std_user
5673 #define gen_op_spe_stdd_kernel gen_op_std_kernel
5674 #define gen_op_spe_stdd_hypv gen_op_std_hypv
5675 #define gen_op_spe_stdd_64_raw gen_op_std_64_raw
5676 #define gen_op_spe_stdd_64_user gen_op_std_64_user
5677 #define gen_op_spe_stdd_64_kernel gen_op_std_64_kernel
5678 #define gen_op_spe_stdd_64_hypv gen_op_std_64_hypv
5679 #define gen_op_spe_stdd_le_raw gen_op_std_le_raw
5680 #define gen_op_spe_stdd_le_user gen_op_std_le_user
5681 #define gen_op_spe_stdd_le_kernel gen_op_std_le_kernel
5682 #define gen_op_spe_stdd_le_hypv gen_op_std_le_hypv
5683 #define gen_op_spe_stdd_le_64_raw gen_op_std_le_64_raw
5684 #define gen_op_spe_stdd_le_64_user gen_op_std_le_64_user
5685 #define gen_op_spe_stdd_le_64_kernel gen_op_std_le_64_kernel
5686 #define gen_op_spe_stdd_le_64_hypv gen_op_std_le_64_hypv
5687 #endif /* defined(TARGET_PPC64) */
5688 GEN_SPEOP_LDST(dd, 3);
5689 GEN_SPEOP_LDST(dw, 3);
5690 GEN_SPEOP_LDST(dh, 3);
5691 GEN_SPEOP_LDST(whe, 2);
5692 GEN_SPEOP_LD(whou, 2);
5693 GEN_SPEOP_LD(whos, 2);
5694 GEN_SPEOP_ST(who, 2);
5695
5696 #if defined(TARGET_PPC64)
5697 /* In that case, spe_stwwo is equivalent to stw */
5698 #define gen_op_spe_stwwo_raw gen_op_stw_raw
5699 #define gen_op_spe_stwwo_user gen_op_stw_user
5700 #define gen_op_spe_stwwo_kernel gen_op_stw_kernel
5701 #define gen_op_spe_stwwo_hypv gen_op_stw_hypv
5702 #define gen_op_spe_stwwo_le_raw gen_op_stw_le_raw
5703 #define gen_op_spe_stwwo_le_user gen_op_stw_le_user
5704 #define gen_op_spe_stwwo_le_kernel gen_op_stw_le_kernel
5705 #define gen_op_spe_stwwo_le_hypv gen_op_stw_le_hypv
5706 #define gen_op_spe_stwwo_64_raw gen_op_stw_64_raw
5707 #define gen_op_spe_stwwo_64_user gen_op_stw_64_user
5708 #define gen_op_spe_stwwo_64_kernel gen_op_stw_64_kernel
5709 #define gen_op_spe_stwwo_64_hypv gen_op_stw_64_hypv
5710 #define gen_op_spe_stwwo_le_64_raw gen_op_stw_le_64_raw
5711 #define gen_op_spe_stwwo_le_64_user gen_op_stw_le_64_user
5712 #define gen_op_spe_stwwo_le_64_kernel gen_op_stw_le_64_kernel
5713 #define gen_op_spe_stwwo_le_64_hypv gen_op_stw_le_64_hypv
5714 #endif
5715 #define _GEN_OP_SPE_STWWE(suffix) \
5716 static always_inline void gen_op_spe_stwwe_##suffix (void) \
5717 { \
5718 gen_op_srli32_T1_64(); \
5719 gen_op_spe_stwwo_##suffix(); \
5720 }
5721 #define _GEN_OP_SPE_STWWE_LE(suffix) \
5722 static always_inline void gen_op_spe_stwwe_le_##suffix (void) \
5723 { \
5724 gen_op_srli32_T1_64(); \
5725 gen_op_spe_stwwo_le_##suffix(); \
5726 }
5727 #if defined(TARGET_PPC64)
5728 #define GEN_OP_SPE_STWWE(suffix) \
5729 _GEN_OP_SPE_STWWE(suffix); \
5730 _GEN_OP_SPE_STWWE_LE(suffix); \
5731 static always_inline void gen_op_spe_stwwe_64_##suffix (void) \
5732 { \
5733 gen_op_srli32_T1_64(); \
5734 gen_op_spe_stwwo_64_##suffix(); \
5735 } \
5736 static always_inline void gen_op_spe_stwwe_le_64_##suffix (void) \
5737 { \
5738 gen_op_srli32_T1_64(); \
5739 gen_op_spe_stwwo_le_64_##suffix(); \
5740 }
5741 #else
5742 #define GEN_OP_SPE_STWWE(suffix) \
5743 _GEN_OP_SPE_STWWE(suffix); \
5744 _GEN_OP_SPE_STWWE_LE(suffix)
5745 #endif
5746 #if defined(CONFIG_USER_ONLY)
5747 GEN_OP_SPE_STWWE(raw);
5748 #else /* defined(CONFIG_USER_ONLY) */
5749 GEN_OP_SPE_STWWE(user);
5750 GEN_OP_SPE_STWWE(kernel);
5751 GEN_OP_SPE_STWWE(hypv);
5752 #endif /* defined(CONFIG_USER_ONLY) */
5753 GEN_SPEOP_ST(wwe, 2);
5754 GEN_SPEOP_ST(wwo, 2);
5755
5756 #define GEN_SPE_LDSPLAT(name, op, suffix) \
5757 static always_inline void gen_op_spe_l##name##_##suffix (void) \
5758 { \
5759 gen_op_##op##_##suffix(); \
5760 gen_op_splatw_T1_64(); \
5761 }
5762
5763 #define GEN_OP_SPE_LHE(suffix) \
5764 static always_inline void gen_op_spe_lhe_##suffix (void) \
5765 { \
5766 gen_op_spe_lh_##suffix(); \
5767 gen_op_sli16_T1_64(); \
5768 }
5769
5770 #define GEN_OP_SPE_LHX(suffix) \
5771 static always_inline void gen_op_spe_lhx_##suffix (void) \
5772 { \
5773 gen_op_spe_lh_##suffix(); \
5774 gen_op_extsh_T1_64(); \
5775 }
5776
5777 #if defined(CONFIG_USER_ONLY)
5778 GEN_OP_SPE_LHE(raw);
5779 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, raw);
5780 GEN_OP_SPE_LHE(le_raw);
5781 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_raw);
5782 GEN_SPE_LDSPLAT(hhousplat, spe_lh, raw);
5783 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_raw);
5784 GEN_OP_SPE_LHX(raw);
5785 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, raw);
5786 GEN_OP_SPE_LHX(le_raw);
5787 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_raw);
5788 #if defined(TARGET_PPC64)
5789 GEN_OP_SPE_LHE(64_raw);
5790 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_raw);
5791 GEN_OP_SPE_LHE(le_64_raw);
5792 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_raw);
5793 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_raw);
5794 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_raw);
5795 GEN_OP_SPE_LHX(64_raw);
5796 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_raw);
5797 GEN_OP_SPE_LHX(le_64_raw);
5798 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_raw);
5799 #endif
5800 #else
5801 GEN_OP_SPE_LHE(user);
5802 GEN_OP_SPE_LHE(kernel);
5803 GEN_OP_SPE_LHE(hypv);
5804 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, user);
5805 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, kernel);
5806 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, hypv);
5807 GEN_OP_SPE_LHE(le_user);
5808 GEN_OP_SPE_LHE(le_kernel);
5809 GEN_OP_SPE_LHE(le_hypv);
5810 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_user);
5811 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_kernel);
5812 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_hypv);
5813 GEN_SPE_LDSPLAT(hhousplat, spe_lh, user);
5814 GEN_SPE_LDSPLAT(hhousplat, spe_lh, kernel);
5815 GEN_SPE_LDSPLAT(hhousplat, spe_lh, hypv);
5816 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_user);
5817 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_kernel);
5818 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_hypv);
5819 GEN_OP_SPE_LHX(user);
5820 GEN_OP_SPE_LHX(kernel);
5821 GEN_OP_SPE_LHX(hypv);
5822 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, user);
5823 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, kernel);
5824 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, hypv);
5825 GEN_OP_SPE_LHX(le_user);
5826 GEN_OP_SPE_LHX(le_kernel);
5827 GEN_OP_SPE_LHX(le_hypv);
5828 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_user);
5829 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_kernel);
5830 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_hypv);
5831 #if defined(TARGET_PPC64)
5832 GEN_OP_SPE_LHE(64_user);
5833 GEN_OP_SPE_LHE(64_kernel);
5834 GEN_OP_SPE_LHE(64_hypv);
5835 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_user);
5836 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_kernel);
5837 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, 64_hypv);
5838 GEN_OP_SPE_LHE(le_64_user);
5839 GEN_OP_SPE_LHE(le_64_kernel);
5840 GEN_OP_SPE_LHE(le_64_hypv);
5841 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_user);
5842 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_kernel);
5843 GEN_SPE_LDSPLAT(hhesplat, spe_lhe, le_64_hypv);
5844 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_user);
5845 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_kernel);
5846 GEN_SPE_LDSPLAT(hhousplat, spe_lh, 64_hypv);
5847 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_user);
5848 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_kernel);
5849 GEN_SPE_LDSPLAT(hhousplat, spe_lh, le_64_hypv);
5850 GEN_OP_SPE_LHX(64_user);
5851 GEN_OP_SPE_LHX(64_kernel);
5852 GEN_OP_SPE_LHX(64_hypv);
5853 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_user);
5854 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_kernel);
5855 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, 64_hypv);
5856 GEN_OP_SPE_LHX(le_64_user);
5857 GEN_OP_SPE_LHX(le_64_kernel);
5858 GEN_OP_SPE_LHX(le_64_hypv);
5859 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_user);
5860 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_kernel);
5861 GEN_SPE_LDSPLAT(hhossplat, spe_lhx, le_64_hypv);
5862 #endif
5863 #endif
5864 GEN_SPEOP_LD(hhesplat, 1);
5865 GEN_SPEOP_LD(hhousplat, 1);
5866 GEN_SPEOP_LD(hhossplat, 1);
5867 GEN_SPEOP_LD(wwsplat, 2);
5868 GEN_SPEOP_LD(whsplat, 2);
5869
5870 GEN_SPE(evlddx, evldd, 0x00, 0x0C, 0x00000000, PPC_SPE); //
5871 GEN_SPE(evldwx, evldw, 0x01, 0x0C, 0x00000000, PPC_SPE); //
5872 GEN_SPE(evldhx, evldh, 0x02, 0x0C, 0x00000000, PPC_SPE); //
5873 GEN_SPE(evlhhesplatx, evlhhesplat, 0x04, 0x0C, 0x00000000, PPC_SPE); //
5874 GEN_SPE(evlhhousplatx, evlhhousplat, 0x06, 0x0C, 0x00000000, PPC_SPE); //
5875 GEN_SPE(evlhhossplatx, evlhhossplat, 0x07, 0x0C, 0x00000000, PPC_SPE); //
5876 GEN_SPE(evlwhex, evlwhe, 0x08, 0x0C, 0x00000000, PPC_SPE); //
5877 GEN_SPE(evlwhoux, evlwhou, 0x0A, 0x0C, 0x00000000, PPC_SPE); //
5878 GEN_SPE(evlwhosx, evlwhos, 0x0B, 0x0C, 0x00000000, PPC_SPE); //
5879 GEN_SPE(evlwwsplatx, evlwwsplat, 0x0C, 0x0C, 0x00000000, PPC_SPE); //
5880 GEN_SPE(evlwhsplatx, evlwhsplat, 0x0E, 0x0C, 0x00000000, PPC_SPE); //
5881 GEN_SPE(evstddx, evstdd, 0x10, 0x0C, 0x00000000, PPC_SPE); //
5882 GEN_SPE(evstdwx, evstdw, 0x11, 0x0C, 0x00000000, PPC_SPE); //
5883 GEN_SPE(evstdhx, evstdh, 0x12, 0x0C, 0x00000000, PPC_SPE); //
5884 GEN_SPE(evstwhex, evstwhe, 0x18, 0x0C, 0x00000000, PPC_SPE); //
5885 GEN_SPE(evstwhox, evstwho, 0x1A, 0x0C, 0x00000000, PPC_SPE); //
5886 GEN_SPE(evstwwex, evstwwe, 0x1C, 0x0C, 0x00000000, PPC_SPE); //
5887 GEN_SPE(evstwwox, evstwwo, 0x1E, 0x0C, 0x00000000, PPC_SPE); //
5888
5889 /* Multiply and add - TODO */
5890 #if 0
5891 GEN_SPE(speundef, evmhessf, 0x01, 0x10, 0x00000000, PPC_SPE);
5892 GEN_SPE(speundef, evmhossf, 0x03, 0x10, 0x00000000, PPC_SPE);
5893 GEN_SPE(evmheumi, evmhesmi, 0x04, 0x10, 0x00000000, PPC_SPE);
5894 GEN_SPE(speundef, evmhesmf, 0x05, 0x10, 0x00000000, PPC_SPE);
5895 GEN_SPE(evmhoumi, evmhosmi, 0x06, 0x10, 0x00000000, PPC_SPE);
5896 GEN_SPE(speundef, evmhosmf, 0x07, 0x10, 0x00000000, PPC_SPE);
5897 GEN_SPE(speundef, evmhessfa, 0x11, 0x10, 0x00000000, PPC_SPE);
5898 GEN_SPE(speundef, evmhossfa, 0x13, 0x10, 0x00000000, PPC_SPE);
5899 GEN_SPE(evmheumia, evmhesmia, 0x14, 0x10, 0x00000000, PPC_SPE);
5900 GEN_SPE(speundef, evmhesmfa, 0x15, 0x10, 0x00000000, PPC_SPE);
5901 GEN_SPE(evmhoumia, evmhosmia, 0x16, 0x10, 0x00000000, PPC_SPE);
5902 GEN_SPE(speundef, evmhosmfa, 0x17, 0x10, 0x00000000, PPC_SPE);
5903
5904 GEN_SPE(speundef, evmwhssf, 0x03, 0x11, 0x00000000, PPC_SPE);
5905 GEN_SPE(evmwlumi, speundef, 0x04, 0x11, 0x00000000, PPC_SPE);
5906 GEN_SPE(evmwhumi, evmwhsmi, 0x06, 0x11, 0x00000000, PPC_SPE);
5907 GEN_SPE(speundef, evmwhsmf, 0x07, 0x11, 0x00000000, PPC_SPE);
5908 GEN_SPE(speundef, evmwssf, 0x09, 0x11, 0x00000000, PPC_SPE);
5909 GEN_SPE(evmwumi, evmwsmi, 0x0C, 0x11, 0x00000000, PPC_SPE);
5910 GEN_SPE(speundef, evmwsmf, 0x0D, 0x11, 0x00000000, PPC_SPE);
5911 GEN_SPE(speundef, evmwhssfa, 0x13, 0x11, 0x00000000, PPC_SPE);
5912 GEN_SPE(evmwlumia, speundef, 0x14, 0x11, 0x00000000, PPC_SPE);
5913 GEN_SPE(evmwhumia, evmwhsmia, 0x16, 0x11, 0x00000000, PPC_SPE);
5914 GEN_SPE(speundef, evmwhsmfa, 0x17, 0x11, 0x00000000, PPC_SPE);
5915 GEN_SPE(speundef, evmwssfa, 0x19, 0x11, 0x00000000, PPC_SPE);
5916 GEN_SPE(evmwumia, evmwsmia, 0x1C, 0x11, 0x00000000, PPC_SPE);
5917 GEN_SPE(speundef, evmwsmfa, 0x1D, 0x11, 0x00000000, PPC_SPE);
5918
5919 GEN_SPE(evadduiaaw, evaddsiaaw, 0x00, 0x13, 0x0000F800, PPC_SPE);
5920 GEN_SPE(evsubfusiaaw, evsubfssiaaw, 0x01, 0x13, 0x0000F800, PPC_SPE);
5921 GEN_SPE(evaddumiaaw, evaddsmiaaw, 0x04, 0x13, 0x0000F800, PPC_SPE);
5922 GEN_SPE(evsubfumiaaw, evsubfsmiaaw, 0x05, 0x13, 0x0000F800, PPC_SPE);
5923 GEN_SPE(evdivws, evdivwu, 0x06, 0x13, 0x00000000, PPC_SPE);
5924 GEN_SPE(evmra, speundef, 0x07, 0x13, 0x0000F800, PPC_SPE);
5925
5926 GEN_SPE(evmheusiaaw, evmhessiaaw, 0x00, 0x14, 0x00000000, PPC_SPE);
5927 GEN_SPE(speundef, evmhessfaaw, 0x01, 0x14, 0x00000000, PPC_SPE);
5928 GEN_SPE(evmhousiaaw, evmhossiaaw, 0x02, 0x14, 0x00000000, PPC_SPE);
5929 GEN_SPE(speundef, evmhossfaaw, 0x03, 0x14, 0x00000000, PPC_SPE);
5930 GEN_SPE(evmheumiaaw, evmhesmiaaw, 0x04, 0x14, 0x00000000, PPC_SPE);
5931 GEN_SPE(speundef, evmhesmfaaw, 0x05, 0x14, 0x00000000, PPC_SPE);
5932 GEN_SPE(evmhoumiaaw, evmhosmiaaw, 0x06, 0x14, 0x00000000, PPC_SPE);
5933 GEN_SPE(speundef, evmhosmfaaw, 0x07, 0x14, 0x00000000, PPC_SPE);
5934 GEN_SPE(evmhegumiaa, evmhegsmiaa, 0x14, 0x14, 0x00000000, PPC_SPE);
5935 GEN_SPE(speundef, evmhegsmfaa, 0x15, 0x14, 0x00000000, PPC_SPE);
5936 GEN_SPE(evmhogumiaa, evmhogsmiaa, 0x16, 0x14, 0x00000000, PPC_SPE);
5937 GEN_SPE(speundef, evmhogsmfaa, 0x17, 0x14, 0x00000000, PPC_SPE);
5938
5939 GEN_SPE(evmwlusiaaw, evmwlssiaaw, 0x00, 0x15, 0x00000000, PPC_SPE);
5940 GEN_SPE(evmwlumiaaw, evmwlsmiaaw, 0x04, 0x15, 0x00000000, PPC_SPE);
5941 GEN_SPE(speundef, evmwssfaa, 0x09, 0x15, 0x00000000, PPC_SPE);
5942 GEN_SPE(evmwumiaa, evmwsmiaa, 0x0C, 0x15, 0x00000000, PPC_SPE);
5943 GEN_SPE(speundef, evmwsmfaa, 0x0D, 0x15, 0x00000000, PPC_SPE);
5944
5945 GEN_SPE(evmheusianw, evmhessianw, 0x00, 0x16, 0x00000000, PPC_SPE);
5946 GEN_SPE(speundef, evmhessfanw, 0x01, 0x16, 0x00000000, PPC_SPE);
5947 GEN_SPE(evmhousianw, evmhossianw, 0x02, 0x16, 0x00000000, PPC_SPE);
5948 GEN_SPE(speundef, evmhossfanw, 0x03, 0x16, 0x00000000, PPC_SPE);
5949 GEN_SPE(evmheumianw, evmhesmianw, 0x04, 0x16, 0x00000000, PPC_SPE);
5950 GEN_SPE(speundef, evmhesmfanw, 0x05, 0x16, 0x00000000, PPC_SPE);
5951 GEN_SPE(evmhoumianw, evmhosmianw, 0x06, 0x16, 0x00000000, PPC_SPE);
5952 GEN_SPE(speundef, evmhosmfanw, 0x07, 0x16, 0x00000000, PPC_SPE);
5953 GEN_SPE(evmhegumian, evmhegsmian, 0x14, 0x16, 0x00000000, PPC_SPE);
5954 GEN_SPE(speundef, evmhegsmfan, 0x15, 0x16, 0x00000000, PPC_SPE);
5955 GEN_SPE(evmhigumian, evmhigsmian, 0x16, 0x16, 0x00000000, PPC_SPE);
5956 GEN_SPE(speundef, evmhogsmfan, 0x17, 0x16, 0x00000000, PPC_SPE);
5957
5958 GEN_SPE(evmwlusianw, evmwlssianw, 0x00, 0x17, 0x00000000, PPC_SPE);
5959 GEN_SPE(evmwlumianw, evmwlsmianw, 0x04, 0x17, 0x00000000, PPC_SPE);
5960 GEN_SPE(speundef, evmwssfan, 0x09, 0x17, 0x00000000, PPC_SPE);
5961 GEN_SPE(evmwumian, evmwsmian, 0x0C, 0x17, 0x00000000, PPC_SPE);
5962 GEN_SPE(speundef, evmwsmfan, 0x0D, 0x17, 0x00000000, PPC_SPE);
5963 #endif
5964
5965 /*** SPE floating-point extension ***/
5966 #define GEN_SPEFPUOP_CONV(name) \
5967 static always_inline void gen_##name (DisasContext *ctx) \
5968 { \
5969 gen_load_gpr64(cpu_T64[0], rB(ctx->opcode)); \
5970 gen_op_##name(); \
5971 gen_store_gpr64(rD(ctx->opcode), cpu_T64[0]); \
5972 }
5973
5974 /* Single precision floating-point vectors operations */
5975 /* Arithmetic */
5976 GEN_SPEOP_ARITH2(evfsadd);
5977 GEN_SPEOP_ARITH2(evfssub);
5978 GEN_SPEOP_ARITH2(evfsmul);
5979 GEN_SPEOP_ARITH2(evfsdiv);
5980 GEN_SPEOP_ARITH1(evfsabs);
5981 GEN_SPEOP_ARITH1(evfsnabs);
5982 GEN_SPEOP_ARITH1(evfsneg);
5983 /* Conversion */
5984 GEN_SPEFPUOP_CONV(evfscfui);
5985 GEN_SPEFPUOP_CONV(evfscfsi);
5986 GEN_SPEFPUOP_CONV(evfscfuf);
5987 GEN_SPEFPUOP_CONV(evfscfsf);
5988 GEN_SPEFPUOP_CONV(evfsctui);
5989 GEN_SPEFPUOP_CONV(evfsctsi);
5990 GEN_SPEFPUOP_CONV(evfsctuf);
5991 GEN_SPEFPUOP_CONV(evfsctsf);
5992 GEN_SPEFPUOP_CONV(evfsctuiz);
5993 GEN_SPEFPUOP_CONV(evfsctsiz);
5994 /* Comparison */
5995 GEN_SPEOP_COMP(evfscmpgt);
5996 GEN_SPEOP_COMP(evfscmplt);
5997 GEN_SPEOP_COMP(evfscmpeq);
5998 GEN_SPEOP_COMP(evfststgt);
5999 GEN_SPEOP_COMP(evfststlt);
6000 GEN_SPEOP_COMP(evfststeq);
6001
6002 /* Opcodes definitions */
6003 GEN_SPE(evfsadd, evfssub, 0x00, 0x0A, 0x00000000, PPC_SPEFPU); //
6004 GEN_SPE(evfsabs, evfsnabs, 0x02, 0x0A, 0x0000F800, PPC_SPEFPU); //
6005 GEN_SPE(evfsneg, speundef, 0x03, 0x0A, 0x0000F800, PPC_SPEFPU); //
6006 GEN_SPE(evfsmul, evfsdiv, 0x04, 0x0A, 0x00000000, PPC_SPEFPU); //
6007 GEN_SPE(evfscmpgt, evfscmplt, 0x06, 0x0A, 0x00600000, PPC_SPEFPU); //
6008 GEN_SPE(evfscmpeq, speundef, 0x07, 0x0A, 0x00600000, PPC_SPEFPU); //
6009 GEN_SPE(evfscfui, evfscfsi, 0x08, 0x0A, 0x00180000, PPC_SPEFPU); //
6010 GEN_SPE(evfscfuf, evfscfsf, 0x09, 0x0A, 0x00180000, PPC_SPEFPU); //
6011 GEN_SPE(evfsctui, evfsctsi, 0x0A, 0x0A, 0x00180000, PPC_SPEFPU); //
6012 GEN_SPE(evfsctuf, evfsctsf, 0x0B, 0x0A, 0x00180000, PPC_SPEFPU); //
6013 GEN_SPE(evfsctuiz, speundef, 0x0C, 0x0A, 0x00180000, PPC_SPEFPU); //
6014 GEN_SPE(evfsctsiz, speundef, 0x0D, 0x0A, 0x00180000, PPC_SPEFPU); //
6015 GEN_SPE(evfststgt, evfststlt, 0x0E, 0x0A, 0x00600000, PPC_SPEFPU); //
6016 GEN_SPE(evfststeq, speundef, 0x0F, 0x0A, 0x00600000, PPC_SPEFPU); //
6017
6018 /* Single precision floating-point operations */
6019 /* Arithmetic */
6020 GEN_SPEOP_ARITH2(efsadd);
6021 GEN_SPEOP_ARITH2(efssub);
6022 GEN_SPEOP_ARITH2(efsmul);
6023 GEN_SPEOP_ARITH2(efsdiv);
6024 GEN_SPEOP_ARITH1(efsabs);
6025 GEN_SPEOP_ARITH1(efsnabs);
6026 GEN_SPEOP_ARITH1(efsneg);
6027 /* Conversion */
6028 GEN_SPEFPUOP_CONV(efscfui);
6029 GEN_SPEFPUOP_CONV(efscfsi);
6030 GEN_SPEFPUOP_CONV(efscfuf);
6031 GEN_SPEFPUOP_CONV(efscfsf);
6032 GEN_SPEFPUOP_CONV(efsctui);
6033 GEN_SPEFPUOP_CONV(efsctsi);
6034 GEN_SPEFPUOP_CONV(efsctuf);
6035 GEN_SPEFPUOP_CONV(efsctsf);
6036 GEN_SPEFPUOP_CONV(efsctuiz);
6037 GEN_SPEFPUOP_CONV(efsctsiz);
6038 GEN_SPEFPUOP_CONV(efscfd);
6039 /* Comparison */
6040 GEN_SPEOP_COMP(efscmpgt);
6041 GEN_SPEOP_COMP(efscmplt);
6042 GEN_SPEOP_COMP(efscmpeq);
6043 GEN_SPEOP_COMP(efststgt);
6044 GEN_SPEOP_COMP(efststlt);
6045 GEN_SPEOP_COMP(efststeq);
6046
6047 /* Opcodes definitions */
6048 GEN_SPE(efsadd, efssub, 0x00, 0x0B, 0x00000000, PPC_SPEFPU); //
6049 GEN_SPE(efsabs, efsnabs, 0x02, 0x0B, 0x0000F800, PPC_SPEFPU); //
6050 GEN_SPE(efsneg, speundef, 0x03, 0x0B, 0x0000F800, PPC_SPEFPU); //
6051 GEN_SPE(efsmul, efsdiv, 0x04, 0x0B, 0x00000000, PPC_SPEFPU); //
6052 GEN_SPE(efscmpgt, efscmplt, 0x06, 0x0B, 0x00600000, PPC_SPEFPU); //
6053 GEN_SPE(efscmpeq, efscfd, 0x07, 0x0B, 0x00600000, PPC_SPEFPU); //
6054 GEN_SPE(efscfui, efscfsi, 0x08, 0x0B, 0x00180000, PPC_SPEFPU); //
6055 GEN_SPE(efscfuf, efscfsf, 0x09, 0x0B, 0x00180000, PPC_SPEFPU); //
6056 GEN_SPE(efsctui, efsctsi, 0x0A, 0x0B, 0x00180000, PPC_SPEFPU); //
6057 GEN_SPE(efsctuf, efsctsf, 0x0B, 0x0B, 0x00180000, PPC_SPEFPU); //
6058 GEN_SPE(efsctuiz, speundef, 0x0C, 0x0B, 0x00180000, PPC_SPEFPU); //
6059 GEN_SPE(efsctsiz, speundef, 0x0D, 0x0B, 0x00180000, PPC_SPEFPU); //
6060 GEN_SPE(efststgt, efststlt, 0x0E, 0x0B, 0x00600000, PPC_SPEFPU); //
6061 GEN_SPE(efststeq, speundef, 0x0F, 0x0B, 0x00600000, PPC_SPEFPU); //
6062
6063 /* Double precision floating-point operations */
6064 /* Arithmetic */
6065 GEN_SPEOP_ARITH2(efdadd);
6066 GEN_SPEOP_ARITH2(efdsub);
6067 GEN_SPEOP_ARITH2(efdmul);
6068 GEN_SPEOP_ARITH2(efddiv);
6069 GEN_SPEOP_ARITH1(efdabs);
6070 GEN_SPEOP_ARITH1(efdnabs);
6071 GEN_SPEOP_ARITH1(efdneg);
6072 /* Conversion */
6073
6074 GEN_SPEFPUOP_CONV(efdcfui);
6075 GEN_SPEFPUOP_CONV(efdcfsi);
6076 GEN_SPEFPUOP_CONV(efdcfuf);
6077 GEN_SPEFPUOP_CONV(efdcfsf);
6078 GEN_SPEFPUOP_CONV(efdctui);
6079 GEN_SPEFPUOP_CONV(efdctsi);
6080 GEN_SPEFPUOP_CONV(efdctuf);
6081 GEN_SPEFPUOP_CONV(efdctsf);
6082 GEN_SPEFPUOP_CONV(efdctuiz);
6083 GEN_SPEFPUOP_CONV(efdctsiz);
6084 GEN_SPEFPUOP_CONV(efdcfs);
6085 GEN_SPEFPUOP_CONV(efdcfuid);
6086 GEN_SPEFPUOP_CONV(efdcfsid);
6087 GEN_SPEFPUOP_CONV(efdctuidz);
6088 GEN_SPEFPUOP_CONV(efdctsidz);
6089 /* Comparison */
6090 GEN_SPEOP_COMP(efdcmpgt);
6091 GEN_SPEOP_COMP(efdcmplt);
6092 GEN_SPEOP_COMP(efdcmpeq);
6093 GEN_SPEOP_COMP(efdtstgt);
6094 GEN_SPEOP_COMP(efdtstlt);
6095 GEN_SPEOP_COMP(efdtsteq);
6096
6097 /* Opcodes definitions */
6098 GEN_SPE(efdadd, efdsub, 0x10, 0x0B, 0x00000000, PPC_SPEFPU); //
6099 GEN_SPE(efdcfuid, efdcfsid, 0x11, 0x0B, 0x00180000, PPC_SPEFPU); //
6100 GEN_SPE(efdabs, efdnabs, 0x12, 0x0B, 0x0000F800, PPC_SPEFPU); //
6101 GEN_SPE(efdneg, speundef, 0x13, 0x0B, 0x0000F800, PPC_SPEFPU); //
6102 GEN_SPE(efdmul, efddiv, 0x14, 0x0B, 0x00000000, PPC_SPEFPU); //
6103 GEN_SPE(efdctuidz, efdctsidz, 0x15, 0x0B, 0x00180000, PPC_SPEFPU); //
6104 GEN_SPE(efdcmpgt, efdcmplt, 0x16, 0x0B, 0x00600000, PPC_SPEFPU); //
6105 GEN_SPE(efdcmpeq, efdcfs, 0x17, 0x0B, 0x00600000, PPC_SPEFPU); //
6106 GEN_SPE(efdcfui, efdcfsi, 0x18, 0x0B, 0x00180000, PPC_SPEFPU); //
6107 GEN_SPE(efdcfuf, efdcfsf, 0x19, 0x0B, 0x00180000, PPC_SPEFPU); //
6108 GEN_SPE(efdctui, efdctsi, 0x1A, 0x0B, 0x00180000, PPC_SPEFPU); //
6109 GEN_SPE(efdctuf, efdctsf, 0x1B, 0x0B, 0x00180000, PPC_SPEFPU); //
6110 GEN_SPE(efdctuiz, speundef, 0x1C, 0x0B, 0x00180000, PPC_SPEFPU); //
6111 GEN_SPE(efdctsiz, speundef, 0x1D, 0x0B, 0x00180000, PPC_SPEFPU); //
6112 GEN_SPE(efdtstgt, efdtstlt, 0x1E, 0x0B, 0x00600000, PPC_SPEFPU); //
6113 GEN_SPE(efdtsteq, speundef, 0x1F, 0x0B, 0x00600000, PPC_SPEFPU); //
6114
6115 /* End opcode list */
6116 GEN_OPCODE_MARK(end);
6117
6118 #include "translate_init.c"
6119 #include "helper_regs.h"
6120
6121 /*****************************************************************************/
6122 /* Misc PowerPC helpers */
6123 void cpu_dump_state (CPUState *env, FILE *f,
6124 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
6125 int flags)
6126 {
6127 #define RGPL 4
6128 #define RFPL 4
6129
6130 int i;
6131
6132 cpu_fprintf(f, "NIP " ADDRX " LR " ADDRX " CTR " ADDRX " XER %08x\n",
6133 env->nip, env->lr, env->ctr, hreg_load_xer(env));
6134 cpu_fprintf(f, "MSR " ADDRX " HID0 " ADDRX " HF " ADDRX " idx %d\n",
6135 env->msr, env->spr[SPR_HID0], env->hflags, env->mmu_idx);
6136 #if !defined(NO_TIMER_DUMP)
6137 cpu_fprintf(f, "TB %08x %08x "
6138 #if !defined(CONFIG_USER_ONLY)
6139 "DECR %08x"
6140 #endif
6141 "\n",
6142 cpu_ppc_load_tbu(env), cpu_ppc_load_tbl(env)
6143 #if !defined(CONFIG_USER_ONLY)
6144 , cpu_ppc_load_decr(env)
6145 #endif
6146 );
6147 #endif
6148 for (i = 0; i < 32; i++) {
6149 if ((i & (RGPL - 1)) == 0)
6150 cpu_fprintf(f, "GPR%02d", i);
6151 cpu_fprintf(f, " " REGX, ppc_dump_gpr(env, i));
6152 if ((i & (RGPL - 1)) == (RGPL - 1))
6153 cpu_fprintf(f, "\n");
6154 }
6155 cpu_fprintf(f, "CR ");
6156 for (i = 0; i < 8; i++)
6157 cpu_fprintf(f, "%01x", env->crf[i]);
6158 cpu_fprintf(f, " [");
6159 for (i = 0; i < 8; i++) {
6160 char a = '-';
6161 if (env->crf[i] & 0x08)
6162 a = 'L';
6163 else if (env->crf[i] & 0x04)
6164 a = 'G';
6165 else if (env->crf[i] & 0x02)
6166 a = 'E';
6167 cpu_fprintf(f, " %c%c", a, env->crf[i] & 0x01 ? 'O' : ' ');
6168 }
6169 cpu_fprintf(f, " ] RES " ADDRX "\n", env->reserve);
6170 for (i = 0; i < 32; i++) {
6171 if ((i & (RFPL - 1)) == 0)
6172 cpu_fprintf(f, "FPR%02d", i);
6173 cpu_fprintf(f, " %016" PRIx64, *((uint64_t *)&env->fpr[i]));
6174 if ((i & (RFPL - 1)) == (RFPL - 1))
6175 cpu_fprintf(f, "\n");
6176 }
6177 #if !defined(CONFIG_USER_ONLY)
6178 cpu_fprintf(f, "SRR0 " ADDRX " SRR1 " ADDRX " SDR1 " ADDRX "\n",
6179 env->spr[SPR_SRR0], env->spr[SPR_SRR1], env->sdr1);
6180 #endif
6181
6182 #undef RGPL
6183 #undef RFPL
6184 }
6185
6186 void cpu_dump_statistics (CPUState *env, FILE*f,
6187 int (*cpu_fprintf)(FILE *f, const char *fmt, ...),
6188 int flags)
6189 {
6190 #if defined(DO_PPC_STATISTICS)
6191 opc_handler_t **t1, **t2, **t3, *handler;
6192 int op1, op2, op3;
6193
6194 t1 = env->opcodes;
6195 for (op1 = 0; op1 < 64; op1++) {
6196 handler = t1[op1];
6197 if (is_indirect_opcode(handler)) {
6198 t2 = ind_table(handler);
6199 for (op2 = 0; op2 < 32; op2++) {
6200 handler = t2[op2];
6201 if (is_indirect_opcode(handler)) {
6202 t3 = ind_table(handler);
6203 for (op3 = 0; op3 < 32; op3++) {
6204 handler = t3[op3];
6205 if (handler->count == 0)
6206 continue;
6207 cpu_fprintf(f, "%02x %02x %02x (%02x %04d) %16s: "
6208 "%016llx %lld\n",
6209 op1, op2, op3, op1, (op3 << 5) | op2,
6210 handler->oname,
6211 handler->count, handler->count);
6212 }
6213 } else {
6214 if (handler->count == 0)
6215 continue;
6216 cpu_fprintf(f, "%02x %02x (%02x %04d) %16s: "
6217 "%016llx %lld\n",
6218 op1, op2, op1, op2, handler->oname,
6219 handler->count, handler->count);
6220 }
6221 }
6222 } else {
6223 if (handler->count == 0)
6224 continue;
6225 cpu_fprintf(f, "%02x (%02x ) %16s: %016llx %lld\n",
6226 op1, op1, handler->oname,
6227 handler->count, handler->count);
6228 }
6229 }
6230 #endif
6231 }
6232
6233 /*****************************************************************************/
6234 static always_inline void gen_intermediate_code_internal (CPUState *env,
6235 TranslationBlock *tb,
6236 int search_pc)
6237 {
6238 DisasContext ctx, *ctxp = &ctx;
6239 opc_handler_t **table, *handler;
6240 target_ulong pc_start;
6241 uint16_t *gen_opc_end;
6242 int supervisor, little_endian;
6243 int j, lj = -1;
6244 int num_insns;
6245 int max_insns;
6246
6247 pc_start = tb->pc;
6248 gen_opc_end = gen_opc_buf + OPC_MAX_SIZE;
6249 #if defined(OPTIMIZE_FPRF_UPDATE)
6250 gen_fprf_ptr = gen_fprf_buf;
6251 #endif
6252 ctx.nip = pc_start;
6253 ctx.tb = tb;
6254 ctx.exception = POWERPC_EXCP_NONE;
6255 ctx.spr_cb = env->spr_cb;
6256 supervisor = env->mmu_idx;
6257 #if !defined(CONFIG_USER_ONLY)
6258 ctx.supervisor = supervisor;
6259 #endif
6260 little_endian = env->hflags & (1 << MSR_LE) ? 1 : 0;
6261 #if defined(TARGET_PPC64)
6262 ctx.sf_mode = msr_sf;
6263 ctx.mem_idx = (supervisor << 2) | (msr_sf << 1) | little_endian;
6264 #else
6265 ctx.mem_idx = (supervisor << 1) | little_endian;
6266 #endif
6267 ctx.dcache_line_size = env->dcache_line_size;
6268 ctx.fpu_enabled = msr_fp;
6269 if ((env->flags & POWERPC_FLAG_SPE) && msr_spe)
6270 ctx.spe_enabled = msr_spe;
6271 else
6272 ctx.spe_enabled = 0;
6273 if ((env->flags & POWERPC_FLAG_VRE) && msr_vr)
6274 ctx.altivec_enabled = msr_vr;
6275 else
6276 ctx.altivec_enabled = 0;
6277 if ((env->flags & POWERPC_FLAG_SE) && msr_se)
6278 ctx.singlestep_enabled = CPU_SINGLE_STEP;
6279 else
6280 ctx.singlestep_enabled = 0;
6281 if ((env->flags & POWERPC_FLAG_BE) && msr_be)
6282 ctx.singlestep_enabled |= CPU_BRANCH_STEP;
6283 if (unlikely(env->singlestep_enabled))
6284 ctx.singlestep_enabled |= GDBSTUB_SINGLE_STEP;
6285 #if defined (DO_SINGLE_STEP) && 0
6286 /* Single step trace mode */
6287 msr_se = 1;
6288 #endif
6289 num_insns = 0;
6290 max_insns = tb->cflags & CF_COUNT_MASK;
6291 if (max_insns == 0)
6292 max_insns = CF_COUNT_MASK;
6293
6294 gen_icount_start();
6295 /* Set env in case of segfault during code fetch */
6296 while (ctx.exception == POWERPC_EXCP_NONE && gen_opc_ptr < gen_opc_end) {
6297 if (unlikely(env->nb_breakpoints > 0)) {
6298 for (j = 0; j < env->nb_breakpoints; j++) {
6299 if (env->breakpoints[j] == ctx.nip) {
6300 gen_update_nip(&ctx, ctx.nip);
6301 gen_op_debug();
6302 break;
6303 }
6304 }
6305 }
6306 if (unlikely(search_pc)) {
6307 j = gen_opc_ptr - gen_opc_buf;
6308 if (lj < j) {
6309 lj++;
6310 while (lj < j)
6311 gen_opc_instr_start[lj++] = 0;
6312 gen_opc_pc[lj] = ctx.nip;
6313 gen_opc_instr_start[lj] = 1;
6314 gen_opc_icount[lj] = num_insns;
6315 }
6316 }
6317 #if defined PPC_DEBUG_DISAS
6318 if (loglevel & CPU_LOG_TB_IN_ASM) {
6319 fprintf(logfile, "----------------\n");
6320 fprintf(logfile, "nip=" ADDRX " super=%d ir=%d\n",
6321 ctx.nip, supervisor, (int)msr_ir);
6322 }
6323 #endif
6324 if (num_insns + 1 == max_insns && (tb->cflags & CF_LAST_IO))
6325 gen_io_start();
6326 if (unlikely(little_endian)) {
6327 ctx.opcode = bswap32(ldl_code(ctx.nip));
6328 } else {
6329 ctx.opcode = ldl_code(ctx.nip);
6330 }
6331 #if defined PPC_DEBUG_DISAS
6332 if (loglevel & CPU_LOG_TB_IN_ASM) {
6333 fprintf(logfile, "translate opcode %08x (%02x %02x %02x) (%s)\n",
6334 ctx.opcode, opc1(ctx.opcode), opc2(ctx.opcode),
6335 opc3(ctx.opcode), little_endian ? "little" : "big");
6336 }
6337 #endif
6338 ctx.nip += 4;
6339 table = env->opcodes;
6340 num_insns++;
6341 handler = table[opc1(ctx.opcode)];
6342 if (is_indirect_opcode(handler)) {
6343 table = ind_table(handler);
6344 handler = table[opc2(ctx.opcode)];
6345 if (is_indirect_opcode(handler)) {
6346 table = ind_table(handler);
6347 handler = table[opc3(ctx.opcode)];
6348 }
6349 }
6350 /* Is opcode *REALLY* valid ? */
6351 if (unlikely(handler->handler == &gen_invalid)) {
6352 if (loglevel != 0) {
6353 fprintf(logfile, "invalid/unsupported opcode: "
6354 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
6355 opc1(ctx.opcode), opc2(ctx.opcode),
6356 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
6357 } else {
6358 printf("invalid/unsupported opcode: "
6359 "%02x - %02x - %02x (%08x) " ADDRX " %d\n",
6360 opc1(ctx.opcode), opc2(ctx.opcode),
6361 opc3(ctx.opcode), ctx.opcode, ctx.nip - 4, (int)msr_ir);
6362 }
6363 } else {
6364 if (unlikely((ctx.opcode & handler->inval) != 0)) {
6365 if (loglevel != 0) {
6366 fprintf(logfile, "invalid bits: %08x for opcode: "
6367 "%02x - %02x - %02x (%08x) " ADDRX "\n",
6368 ctx.opcode & handler->inval, opc1(ctx.opcode),
6369 opc2(ctx.opcode), opc3(ctx.opcode),
6370 ctx.opcode, ctx.nip - 4);
6371 } else {
6372 printf("invalid bits: %08x for opcode: "
6373 "%02x - %02x - %02x (%08x) " ADDRX "\n",
6374 ctx.opcode & handler->inval, opc1(ctx.opcode),
6375 opc2(ctx.opcode), opc3(ctx.opcode),
6376 ctx.opcode, ctx.nip - 4);
6377 }
6378 GEN_EXCP_INVAL(ctxp);
6379 break;
6380 }
6381 }
6382 (*(handler->handler))(&ctx);
6383 #if defined(DO_PPC_STATISTICS)
6384 handler->count++;
6385 #endif
6386 /* Check trace mode exceptions */
6387 if (unlikely(ctx.singlestep_enabled & CPU_SINGLE_STEP &&
6388 (ctx.nip <= 0x100 || ctx.nip > 0xF00) &&
6389 ctx.exception != POWERPC_SYSCALL &&
6390 ctx.exception != POWERPC_EXCP_TRAP &&
6391 ctx.exception != POWERPC_EXCP_BRANCH)) {
6392 GEN_EXCP(ctxp, POWERPC_EXCP_TRACE, 0);
6393 } else if (unlikely(((ctx.nip & (TARGET_PAGE_SIZE - 1)) == 0) ||
6394 (env->singlestep_enabled) ||
6395 num_insns >= max_insns)) {
6396 /* if we reach a page boundary or are single stepping, stop
6397 * generation
6398 */
6399 break;
6400 }
6401 #if defined (DO_SINGLE_STEP)
6402 break;
6403 #endif
6404 }
6405 if (tb->cflags & CF_LAST_IO)
6406 gen_io_end();
6407 if (ctx.exception == POWERPC_EXCP_NONE) {
6408 gen_goto_tb(&ctx, 0, ctx.nip);
6409 } else if (ctx.exception != POWERPC_EXCP_BRANCH) {
6410 if (unlikely(env->singlestep_enabled)) {
6411 gen_update_nip(&ctx, ctx.nip);
6412 gen_op_debug();
6413 }
6414 /* Generate the return instruction */
6415 tcg_gen_exit_tb(0);
6416 }
6417 gen_icount_end(tb, num_insns);
6418 *gen_opc_ptr = INDEX_op_end;
6419 if (unlikely(search_pc)) {
6420 j = gen_opc_ptr - gen_opc_buf;
6421 lj++;
6422 while (lj <= j)
6423 gen_opc_instr_start[lj++] = 0;
6424 } else {
6425 tb->size = ctx.nip - pc_start;
6426 tb->icount = num_insns;
6427 }
6428 #if defined(DEBUG_DISAS)
6429 if (loglevel & CPU_LOG_TB_CPU) {
6430 fprintf(logfile, "---------------- excp: %04x\n", ctx.exception);
6431 cpu_dump_state(env, logfile, fprintf, 0);
6432 }
6433 if (loglevel & CPU_LOG_TB_IN_ASM) {
6434 int flags;
6435 flags = env->bfd_mach;
6436 flags |= little_endian << 16;
6437 fprintf(logfile, "IN: %s\n", lookup_symbol(pc_start));
6438 target_disas(logfile, pc_start, ctx.nip - pc_start, flags);
6439 fprintf(logfile, "\n");
6440 }
6441 #endif
6442 }
6443
6444 void gen_intermediate_code (CPUState *env, struct TranslationBlock *tb)
6445 {
6446 gen_intermediate_code_internal(env, tb, 0);
6447 }
6448
6449 void gen_intermediate_code_pc (CPUState *env, struct TranslationBlock *tb)
6450 {
6451 gen_intermediate_code_internal(env, tb, 1);
6452 }
6453
6454 void gen_pc_load(CPUState *env, TranslationBlock *tb,
6455 unsigned long searched_pc, int pc_pos, void *puc)
6456 {
6457 int type, c;
6458 /* for PPC, we need to look at the micro operation to get the
6459 * access type */
6460 env->nip = gen_opc_pc[pc_pos];
6461 c = gen_opc_buf[pc_pos];
6462 switch(c) {
6463 #if defined(CONFIG_USER_ONLY)
6464 #define CASE3(op)\
6465 case INDEX_op_ ## op ## _raw
6466 #else
6467 #define CASE3(op)\
6468 case INDEX_op_ ## op ## _user:\
6469 case INDEX_op_ ## op ## _kernel:\
6470 case INDEX_op_ ## op ## _hypv
6471 #endif
6472
6473 CASE3(stfd):
6474 CASE3(stfs):
6475 CASE3(lfd):
6476 CASE3(lfs):
6477 type = ACCESS_FLOAT;
6478 break;
6479 CASE3(lwarx):
6480 type = ACCESS_RES;
6481 break;
6482 CASE3(stwcx):
6483 type = ACCESS_RES;
6484 break;
6485 CASE3(eciwx):
6486 CASE3(ecowx):
6487 type = ACCESS_EXT;
6488 break;
6489 default:
6490 type = ACCESS_INT;
6491 break;
6492 }
6493 env->access_type = type;
6494 }